1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387
|
import json
from uuid import uuid4
import boto3
import pytest
from moto import mock_aws
from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID
from tests.markers import requires_docker
from tests.test_awslambda.utilities import (
get_role_name,
get_test_zip_file_print_event,
wait_for_log_msg,
)
REGION_NAME = "us-east-1"
@mock_aws
@pytest.mark.parametrize(
"match_events,actual_event",
[
(["s3:ObjectCreated:Put"], "ObjectCreated:Put"),
(["s3:ObjectCreated:*"], "ObjectCreated:Put"),
(["s3:ObjectCreated:Post"], None),
(["s3:ObjectCreated:Post", "s3:ObjectCreated:*"], "ObjectCreated:Put"),
],
)
@requires_docker
def test_objectcreated_put__invokes_lambda(match_events, actual_event):
s3_res = boto3.resource("s3", region_name=REGION_NAME)
s3_client = boto3.client("s3", region_name=REGION_NAME)
lambda_client = boto3.client("lambda", REGION_NAME)
# Create S3 bucket
bucket_name = str(uuid4())
s3_res.create_bucket(Bucket=bucket_name)
# Create AWSLambda function
function_name = str(uuid4())[0:6]
fn_arn = lambda_client.create_function(
FunctionName=function_name,
Runtime="python3.11",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file_print_event()},
)["FunctionArn"]
# Put Notification
s3_client.put_bucket_notification_configuration(
Bucket=bucket_name,
NotificationConfiguration={
"LambdaFunctionConfigurations": [
{
"Id": "unrelated",
"LambdaFunctionArn": f"arn:aws:lambda:us-east-1:{ACCOUNT_ID}:function:n/a",
"Events": ["s3:ReducedRedundancyLostObject"],
},
{
"Id": "s3eventtriggerslambda",
"LambdaFunctionArn": fn_arn,
"Events": match_events,
},
]
},
)
# Put Object
s3_client.put_object(Bucket=bucket_name, Key="key name", Body="bodyofnewobject")
# Find the output of AWSLambda
expected_msg = "FINISHED_PRINTING_EVENT"
log_group = f"/aws/lambda/{function_name}"
msg_showed_up, all_logs = wait_for_log_msg(expected_msg, log_group, wait_time=10)
if actual_event is None:
# The event should not be fired on POST, as we've only PUT an event for now
assert not msg_showed_up
return
# If we do have an actual event, verify the Lambda was invoked with the correct event
assert msg_showed_up, (
expected_msg
+ " was not found after sending an SQS message. All logs: "
+ str(all_logs)
)
records = [line for line in all_logs if line.startswith("{'Records'")][0]
records = json.loads(records.replace("'", '"'))["Records"]
assert len(records) == 1
assert records[0]["awsRegion"] == REGION_NAME
assert records[0]["eventName"] == actual_event
assert records[0]["eventSource"] == "aws:s3"
assert "eventTime" in records[0]
assert "s3" in records[0]
assert "bucket" in records[0]["s3"]
assert records[0]["s3"]["bucket"]["arn"] == f"arn:aws:s3:::{bucket_name}"
assert records[0]["s3"]["bucket"]["name"] == bucket_name
assert records[0]["s3"]["configurationId"] == "s3eventtriggerslambda"
assert "object" in records[0]["s3"]
assert records[0]["s3"]["object"]["eTag"] == "61ea96c3c8d2c76fc5a42bfccb6affd9"
assert records[0]["s3"]["object"]["key"] == "key+name"
assert records[0]["s3"]["object"]["size"] == 15
@mock_aws
def test_objectcreated_put__unknown_lambda_is_handled_gracefully():
s3_res = boto3.resource("s3", region_name=REGION_NAME)
s3_client = boto3.client("s3", region_name=REGION_NAME)
# Create S3 bucket
bucket_name = str(uuid4())
s3_res.create_bucket(Bucket=bucket_name)
# Put Notification
s3_client.put_bucket_notification_configuration(
Bucket=bucket_name,
NotificationConfiguration={
"LambdaFunctionConfigurations": [
{
"Id": "unrelated",
"LambdaFunctionArn": f"arn:aws:lambda:us-east-1:{ACCOUNT_ID}:function:n/a",
"Events": ["s3:ObjectCreated:Put"],
}
]
},
)
# Put Object
s3_client.put_object(Bucket=bucket_name, Key="keyname", Body="bodyofnewobject")
# The object was persisted successfully
resp = s3_client.get_object(Bucket=bucket_name, Key="keyname")
assert resp["ContentLength"] == 15
assert resp["Body"].read() == b"bodyofnewobject"
@mock_aws
def test_object_copy__sends_to_queue():
s3_res = boto3.resource("s3", region_name=REGION_NAME)
s3_client = boto3.client("s3", region_name=REGION_NAME)
sqs_client = boto3.client("sqs", region_name=REGION_NAME)
# Create S3 bucket
bucket_name = str(uuid4())
s3_res.create_bucket(Bucket=bucket_name)
# Create SQS queue
queue_url = sqs_client.create_queue(QueueName=str(uuid4())[0:6])["QueueUrl"]
queue_arn = sqs_client.get_queue_attributes(
QueueUrl=queue_url, AttributeNames=["QueueArn"]
)["Attributes"]["QueueArn"]
# Put Notification
s3_client.put_bucket_notification_configuration(
Bucket=bucket_name,
NotificationConfiguration={
"QueueConfigurations": [
{
"Id": "queue_config",
"QueueArn": queue_arn,
"Events": ["s3:ObjectCreated:Copy"],
}
]
},
)
# We should have received a test event now
messages = sqs_client.receive_message(QueueUrl=queue_url)["Messages"]
assert len(messages) == 1
message = json.loads(messages[0]["Body"])
assert message["Service"] == "Amazon S3"
assert message["Event"] == "s3:TestEvent"
assert "Time" in message
assert message["Bucket"] == bucket_name
# Copy an Object
s3_client.put_object(Bucket=bucket_name, Key="keyname", Body="bodyofnewobject")
s3_client.copy_object(
Bucket=bucket_name, CopySource=f"{bucket_name}/keyname", Key="key!"
)
# Read SQS messages - we should have the Copy-event here
resp = sqs_client.receive_message(QueueUrl=queue_url)
assert len(resp["Messages"]) == 1
records = json.loads(resp["Messages"][0]["Body"])["Records"]
assert len(records) == 1
assert records[0]["awsRegion"] == REGION_NAME
assert records[0]["eventName"] == "ObjectCreated:Copy"
assert records[0]["eventSource"] == "aws:s3"
assert "eventTime" in records[0]
assert "s3" in records[0]
assert "bucket" in records[0]["s3"]
assert records[0]["s3"]["bucket"]["arn"] == f"arn:aws:s3:::{bucket_name}"
assert records[0]["s3"]["bucket"]["name"] == bucket_name
assert records[0]["s3"]["configurationId"] == "queue_config"
assert "object" in records[0]["s3"]
assert records[0]["s3"]["object"]["eTag"] == "61ea96c3c8d2c76fc5a42bfccb6affd9"
assert records[0]["s3"]["object"]["key"] == "key%21"
assert records[0]["s3"]["object"]["size"] == 15
@mock_aws
def test_object_put__sends_to_queue__using_filter():
s3_res = boto3.resource("s3", region_name=REGION_NAME)
s3_client = boto3.client("s3", region_name=REGION_NAME)
sqs = boto3.resource("sqs", region_name=REGION_NAME)
# Create S3 bucket
bucket_name = str(uuid4())
s3_res.create_bucket(Bucket=bucket_name)
# Create SQS queue
queue = sqs.create_queue(QueueName=f"{str(uuid4())[0:6]}")
queue_arn = queue.attributes["QueueArn"]
# Put Notification
s3_client.put_bucket_notification_configuration(
Bucket=bucket_name,
NotificationConfiguration={
"QueueConfigurations": [
{
"Id": "prefixed",
"QueueArn": queue_arn,
"Events": ["s3:ObjectCreated:Put"],
"Filter": {
"Key": {"FilterRules": [{"Name": "prefix", "Value": "aa"}]}
},
},
{
"Id": "images_only",
"QueueArn": queue_arn,
"Events": ["s3:ObjectCreated:Put"],
"Filter": {
"Key": {
"FilterRules": [
{"Name": "prefix", "Value": "image/"},
{"Name": "suffix", "Value": "jpg"},
]
}
},
},
]
},
)
# Read the test-event
resp = queue.receive_messages()
_ = [m.delete() for m in resp]
# Create an Object that does not meet any filter
s3_client.put_object(Bucket=bucket_name, Key="bb", Body="sth")
messages = queue.receive_messages()
assert not messages
_ = [m.delete() for m in messages]
# Create an Object that does meet the filter - using the prefix only
s3_client.put_object(Bucket=bucket_name, Key="aafilter", Body="sth")
messages = queue.receive_messages()
assert len(messages) == 1
_ = [m.delete() for m in messages]
# Create an Object that does meet the filter - using the prefix + suffix
s3_client.put_object(Bucket=bucket_name, Key="image/yes.jpg", Body="img")
messages = queue.receive_messages()
assert len(messages) == 1
_ = [m.delete() for m in messages]
# Create an Object that does not meet the filter - only the prefix
s3_client.put_object(Bucket=bucket_name, Key="image/no.gif", Body="img")
messages = queue.receive_messages()
assert not messages
_ = [m.delete() for m in messages]
# Create an Object that does not meet the filter - only the suffix
s3_client.put_object(Bucket=bucket_name, Key="nonimages/yes.jpg", Body="img")
messages = queue.receive_messages()
assert not messages
_ = [m.delete() for m in messages]
@mock_aws
@pytest.mark.parametrize(
"region,partition", [("us-west-2", "aws"), ("cn-north-1", "aws-cn")]
)
def test_put_bucket_notification_sns_sqs(region, partition):
s3_client = boto3.client("s3", region_name=region)
s3_client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": region}
)
sqs_client = boto3.client("sqs", region_name=region)
sqs_queue = sqs_client.create_queue(QueueName="queue")
sqs_queue_arn = sqs_client.get_queue_attributes(
QueueUrl=sqs_queue["QueueUrl"], AttributeNames=["QueueArn"]
)
sns_client = boto3.client("sns", region_name=region)
sns_topic = sns_client.create_topic(Name="topic")
# Subscribe SQS queue to SNS topic
sns_client.subscribe(
TopicArn=sns_topic["TopicArn"],
Protocol="sqs",
Endpoint=sqs_queue_arn["Attributes"]["QueueArn"],
)
# Set S3 to send ObjectCreated to SNS
s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"TopicConfigurations": [
{
"Id": "SomeID",
"TopicArn": sns_topic["TopicArn"],
"Events": ["s3:ObjectCreated:*"],
}
]
},
)
# We should receive a test message
messages = sqs_client.receive_message(
QueueUrl=sqs_queue["QueueUrl"], MaxNumberOfMessages=10
)
assert len(messages["Messages"]) == 1
sqs_client.delete_message(
QueueUrl=sqs_queue["QueueUrl"],
ReceiptHandle=messages["Messages"][0]["ReceiptHandle"],
)
message_body = messages["Messages"][0]["Body"]
sns_message = json.loads(message_body)
assert sns_message["Type"] == "Notification"
# Get S3 notification from SNS message
s3_message_body = json.loads(sns_message["Message"])
assert s3_message_body["Event"] == "s3:TestEvent"
# Upload file to trigger notification
s3_client.put_object(Bucket="bucket", Key="myfile", Body=b"asdf1324")
# Verify queue not empty
messages = sqs_client.receive_message(
QueueUrl=sqs_queue["QueueUrl"], MaxNumberOfMessages=10
)
assert len(messages["Messages"]) == 1
# Get SNS message from SQS
message_body = messages["Messages"][0]["Body"]
sns_message = json.loads(message_body)
assert sns_message["Type"] == "Notification"
# Get S3 notification from SNS message
s3_message_body = json.loads(sns_message["Message"])
assert s3_message_body["Records"][0]["eventName"] == "ObjectCreated:Put"
assert s3_message_body["Records"][0]["awsRegion"] == region
assert (
s3_message_body["Records"][0]["s3"]["bucket"]["arn"]
== f"arn:{partition}:s3:::bucket"
)
@mock_aws
def test_put_bucket_notification_sns_error():
s3_client = boto3.client("s3", region_name=REGION_NAME)
s3_client.create_bucket(Bucket="bucket")
# Set S3 to send ObjectCreated to SNS
s3_client.put_bucket_notification_configuration(
Bucket="bucket",
NotificationConfiguration={
"TopicConfigurations": [
{
"Id": "SomeID",
"TopicArn": "arn:aws:sns:us-east-1:012345678910:notexistingtopic",
"Events": ["s3:ObjectCreated:*"],
}
]
},
)
# This should not throw an exception
s3_client.put_object(Bucket="bucket", Key="myfile", Body=b"asdf1324")
|