Skip to content

Commit ffc40e5

Browse files
authored
feat(s3): add skip destination validation property (#30916)
### Issue #30914 Closes #30914. ### Reason for this change When customers call this API to setup S3 notification configuration for SQS/SNS/Lambda S3 sends s3:TestEvent in order to validate permissions. (For Lambda it does dryrun function invocation instead) However, some customers do not want to do that and test permissions during CDK deployment. Internal reference: `49359101-0e5e-43f3-99eb-3c6c5ed68db1` For example, one customer does not want these test events because they have alarm on unconsumed messages in SQS and they do not have any SQS consumers. And they update notification configuration frequently, which leads to many test events in the queue. See internal ticket: `P142186522` ### Description of changes Expose skip destination validation property when calling PutBucketNotification API in Bucket props. ### Description of how you validated changes Unit test updated. Updated integration tests. Note that 2 integration tests I had to fix and run them with `--disable-update-workflow` flag because they were failing: - `integ.s3.imported-bucket.js` test failed because someone already created bucket `cdk-integration-test-s3-imported-bucket-name` - `integ.bucket-notifications.js` test failed because of overlapping suffix error (not sure how it was passing previously): ``` ❌ cdk-integ-lambda-bucket-s3-notifications failed: Error: The stack named cdk-integ-lambda-bucket-s3-notifications failed to deploy: UPDATE_FAILED (The following resource(s) failed to update: [Construct1IntegUnmanagedBucket1Notifications4A1599D7]. ): Received response status [FAILED] from custom resource. Message returned: Error: An error occurred (InvalidArgument) when calling the PutBucketNotificationConfiguration operation: Configuration is ambiguously defined. Cannot have overlapping suffixes in two rules if the prefixes are overlapping for the same event type.. See the details in CloudWatch Log Stream: 2024/07/22/[$LATEST]e6a16cf979dd4671998e7d911769ff42 (RequestId: 19f6fcd7-d31d-4fbf-9f4a-e3b7cba1cd2b), Rolling back the failed resource only., Received response status [FAILED] from custom resource. Message returned: Error: An error occurred (InvalidArgument) when calling the PutBucketNotificationConfiguration operation: Configuration is ambiguously defined. Cannot have overlapping suffixes in two rules if the prefixes are overlapping for the same event type.. See the details in CloudWatch Log Stream: 2024/07/22/[$LATEST]c30efd0375d64b8088e0ee64d63ce4db (RequestId: 19f6fcd7-d31d-4fbf-9f4a-e3b7cba1cd2b) ``` ### Checklist - [X] My code adheres to the [CONTRIBUTING GUIDE](https://github.com/aws/aws-cdk/blob/main/CONTRIBUTING.md) and [DESIGN GUIDELINES](https://github.com/aws/aws-cdk/blob/main/docs/DESIGN_GUIDELINES.md) ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license*
1 parent c1c800e commit ffc40e5

File tree

53 files changed

+2231
-2806
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+2231
-2806
lines changed
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,18 @@
22
"Resources": {
33
"bucket43879C71": {
44
"Type": "AWS::S3::Bucket",
5-
"Properties": {
6-
"BucketName": "cdk-integration-test-s3-imported-bucket-name"
5+
"UpdateReplacePolicy": "Delete",
6+
"DeletionPolicy": "Delete"
7+
}
8+
},
9+
"Outputs": {
10+
"ExportsOutputRefbucket43879C716CF1CFA3": {
11+
"Value": {
12+
"Ref": "bucket43879C71"
713
},
8-
"UpdateReplacePolicy": "Retain",
9-
"DeletionPolicy": "Retain"
14+
"Export": {
15+
"Name": "TestStack1:ExportsOutputRefbucket43879C716CF1CFA3"
16+
}
1017
}
1118
},
1219
"Parameters": {
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,55 @@
11
{
22
"Resources": {
3+
"FServiceRole3AC82EE1": {
4+
"Type": "AWS::IAM::Role",
5+
"Properties": {
6+
"AssumeRolePolicyDocument": {
7+
"Statement": [
8+
{
9+
"Action": "sts:AssumeRole",
10+
"Effect": "Allow",
11+
"Principal": {
12+
"Service": "lambda.amazonaws.com"
13+
}
14+
}
15+
],
16+
"Version": "2012-10-17"
17+
},
18+
"ManagedPolicyArns": [
19+
{
20+
"Fn::Join": [
21+
"",
22+
[
23+
"arn:",
24+
{
25+
"Ref": "AWS::Partition"
26+
},
27+
":iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
28+
]
29+
]
30+
}
31+
]
32+
}
33+
},
34+
"FC4345940": {
35+
"Type": "AWS::Lambda::Function",
36+
"Properties": {
37+
"Code": {
38+
"ZipFile": "exports.handler = async function handler(event) {\n console.log('event:', JSON.stringify(event, undefined, 2));\n return { event };\n}"
39+
},
40+
"Handler": "index.handler",
41+
"Role": {
42+
"Fn::GetAtt": [
43+
"FServiceRole3AC82EE1",
44+
"Arn"
45+
]
46+
},
47+
"Runtime": "nodejs18.x"
48+
},
49+
"DependsOn": [
50+
"FServiceRole3AC82EE1"
51+
]
52+
},
353
"ImportedNotificationsDB5DE386": {
454
"Type": "Custom::S3BucketNotifications",
555
"Properties": {
@@ -9,7 +59,9 @@
959
"Arn"
1060
]
1161
},
12-
"BucketName": "cdk-integration-test-s3-imported-bucket-name",
62+
"BucketName": {
63+
"Fn::ImportValue": "TestStack1:ExportsOutputRefbucket43879C716CF1CFA3"
64+
},
1365
"NotificationConfiguration": {
1466
"LambdaFunctionConfigurations": [
1567
{
@@ -25,13 +77,14 @@
2577
}
2678
]
2779
},
28-
"Managed": false
80+
"Managed": false,
81+
"SkipDestinationValidation": false
2982
},
3083
"DependsOn": [
31-
"ImportedAllowBucketNotificationsToTestStackF6B9A922242C13EDE"
84+
"ImportedAllowBucketNotificationsToTestStack2F56424633CA7CA6E4"
3285
]
3386
},
34-
"ImportedAllowBucketNotificationsToTestStackF6B9A922242C13EDE": {
87+
"ImportedAllowBucketNotificationsToTestStack2F56424633CA7CA6E4": {
3588
"Type": "AWS::Lambda::Permission",
3689
"Properties": {
3790
"Action": "lambda:InvokeFunction",
@@ -53,62 +106,15 @@
53106
{
54107
"Ref": "AWS::Partition"
55108
},
56-
":s3:::cdk-integration-test-s3-imported-bucket-name"
109+
":s3:::",
110+
{
111+
"Fn::ImportValue": "TestStack1:ExportsOutputRefbucket43879C716CF1CFA3"
112+
}
57113
]
58114
]
59115
}
60116
}
61117
},
62-
"FServiceRole3AC82EE1": {
63-
"Type": "AWS::IAM::Role",
64-
"Properties": {
65-
"AssumeRolePolicyDocument": {
66-
"Statement": [
67-
{
68-
"Action": "sts:AssumeRole",
69-
"Effect": "Allow",
70-
"Principal": {
71-
"Service": "lambda.amazonaws.com"
72-
}
73-
}
74-
],
75-
"Version": "2012-10-17"
76-
},
77-
"ManagedPolicyArns": [
78-
{
79-
"Fn::Join": [
80-
"",
81-
[
82-
"arn:",
83-
{
84-
"Ref": "AWS::Partition"
85-
},
86-
":iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
87-
]
88-
]
89-
}
90-
]
91-
}
92-
},
93-
"FC4345940": {
94-
"Type": "AWS::Lambda::Function",
95-
"Properties": {
96-
"Code": {
97-
"ZipFile": "exports.handler = async function handler(event) {\n console.log('event:', JSON.stringify(event, undefined, 2));\n return { event };\n}"
98-
},
99-
"Handler": "index.handler",
100-
"Role": {
101-
"Fn::GetAtt": [
102-
"FServiceRole3AC82EE1",
103-
"Arn"
104-
]
105-
},
106-
"Runtime": "nodejs18.x"
107-
},
108-
"DependsOn": [
109-
"FServiceRole3AC82EE1"
110-
]
111-
},
112118
"BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleB6FB88EC": {
113119
"Type": "AWS::IAM::Role",
114120
"Properties": {
@@ -169,7 +175,7 @@
169175
"Properties": {
170176
"Description": "AWS CloudFormation handler for \"Custom::S3BucketNotifications\" resources (@aws-cdk/aws-s3)",
171177
"Code": {
172-
"ZipFile": "import boto3 # type: ignore\nimport json\nimport logging\nimport urllib.request\n\ns3 = boto3.client(\"s3\")\n\nEVENTBRIDGE_CONFIGURATION = 'EventBridgeConfiguration'\nCONFIGURATION_TYPES = [\"TopicConfigurations\", \"QueueConfigurations\", \"LambdaFunctionConfigurations\"]\n\ndef handler(event: dict, context):\n response_status = \"SUCCESS\"\n error_message = \"\"\n try:\n props = event[\"ResourceProperties\"]\n notification_configuration = props[\"NotificationConfiguration\"]\n managed = props.get('Managed', 'true').lower() == 'true'\n stack_id = event['StackId']\n old = event.get(\"OldResourceProperties\", {}).get(\"NotificationConfiguration\", {})\n if managed:\n config = handle_managed(event[\"RequestType\"], notification_configuration)\n else:\n config = handle_unmanaged(props[\"BucketName\"], stack_id, event[\"RequestType\"], notification_configuration, old)\n s3.put_bucket_notification_configuration(Bucket=props[\"BucketName\"], NotificationConfiguration=config)\n except Exception as e:\n logging.exception(\"Failed to put bucket notification configuration\")\n response_status = \"FAILED\"\n error_message = f\"Error: {str(e)}. \"\n finally:\n submit_response(event, context, response_status, error_message)\n\ndef handle_managed(request_type, notification_configuration):\n if request_type == 'Delete':\n return {}\n return notification_configuration\n\ndef handle_unmanaged(bucket, stack_id, request_type, notification_configuration, old):\n def get_id(n):\n n['Id'] = ''\n strToHash=json.dumps(n, sort_keys=True).replace('\"Name\": \"prefix\"', '\"Name\": \"Prefix\"').replace('\"Name\": \"suffix\"', '\"Name\": \"Suffix\"')\n return f\"{stack_id}-{hash(strToHash)}\"\n def with_id(n):\n n['Id'] = get_id(n)\n return n\n\n external_notifications = {}\n existing_notifications = s3.get_bucket_notification_configuration(Bucket=bucket)\n for t in CONFIGURATION_TYPES:\n if request_type == 'Update':\n old_incoming_ids = [get_id(n) for n in old.get(t, [])]\n external_notifications[t] = [n for n in existing_notifications.get(t, []) if not get_id(n) in old_incoming_ids] \n elif request_type == 'Delete':\n external_notifications[t] = [n for n in existing_notifications.get(t, []) if not n['Id'].startswith(f\"{stack_id}-\")]\n elif request_type == 'Create':\n external_notifications[t] = [n for n in existing_notifications.get(t, [])]\n if EVENTBRIDGE_CONFIGURATION in existing_notifications:\n external_notifications[EVENTBRIDGE_CONFIGURATION] = existing_notifications[EVENTBRIDGE_CONFIGURATION]\n\n if request_type == 'Delete':\n return external_notifications\n\n notifications = {}\n for t in CONFIGURATION_TYPES:\n external = external_notifications.get(t, [])\n incoming = [with_id(n) for n in notification_configuration.get(t, [])]\n notifications[t] = external + incoming\n\n if EVENTBRIDGE_CONFIGURATION in notification_configuration:\n notifications[EVENTBRIDGE_CONFIGURATION] = notification_configuration[EVENTBRIDGE_CONFIGURATION]\n elif EVENTBRIDGE_CONFIGURATION in external_notifications:\n notifications[EVENTBRIDGE_CONFIGURATION] = external_notifications[EVENTBRIDGE_CONFIGURATION]\n\n return notifications\n\ndef submit_response(event: dict, context, response_status: str, error_message: str):\n response_body = json.dumps(\n {\n \"Status\": response_status,\n \"Reason\": f\"{error_message}See the details in CloudWatch Log Stream: {context.log_stream_name}\",\n \"PhysicalResourceId\": event.get(\"PhysicalResourceId\") or event[\"LogicalResourceId\"],\n \"StackId\": event[\"StackId\"],\n \"RequestId\": event[\"RequestId\"],\n \"LogicalResourceId\": event[\"LogicalResourceId\"],\n \"NoEcho\": False,\n }\n ).encode(\"utf-8\")\n headers = {\"content-type\": \"\", \"content-length\": str(len(response_body))}\n try:\n req = urllib.request.Request(url=event[\"ResponseURL\"], headers=headers, data=response_body, method=\"PUT\")\n with urllib.request.urlopen(req) as response:\n print(response.read().decode(\"utf-8\"))\n print(\"Status code: \" + response.reason)\n except Exception as e:\n print(\"send(..) failed executing request.urlopen(..): \" + str(e))"
178+
"ZipFile": "import boto3 # type: ignore\nimport json\nimport logging\nimport urllib.request\n\ns3 = boto3.client(\"s3\")\n\nEVENTBRIDGE_CONFIGURATION = 'EventBridgeConfiguration'\nCONFIGURATION_TYPES = [\"TopicConfigurations\", \"QueueConfigurations\", \"LambdaFunctionConfigurations\"]\n\ndef handler(event: dict, context):\n response_status = \"SUCCESS\"\n error_message = \"\"\n try:\n props = event[\"ResourceProperties\"]\n notification_configuration = props[\"NotificationConfiguration\"]\n managed = props.get('Managed', 'true').lower() == 'true'\n skipDestinationValidation = props.get('SkipDestinationValidation', 'false').lower() == 'true'\n stack_id = event['StackId']\n old = event.get(\"OldResourceProperties\", {}).get(\"NotificationConfiguration\", {})\n if managed:\n config = handle_managed(event[\"RequestType\"], notification_configuration)\n else:\n config = handle_unmanaged(props[\"BucketName\"], stack_id, event[\"RequestType\"], notification_configuration, old)\n s3.put_bucket_notification_configuration(Bucket=props[\"BucketName\"], NotificationConfiguration=config, SkipDestinationValidation=skipDestinationValidation)\n except Exception as e:\n logging.exception(\"Failed to put bucket notification configuration\")\n response_status = \"FAILED\"\n error_message = f\"Error: {str(e)}. \"\n finally:\n submit_response(event, context, response_status, error_message)\n\ndef handle_managed(request_type, notification_configuration):\n if request_type == 'Delete':\n return {}\n return notification_configuration\n\ndef handle_unmanaged(bucket, stack_id, request_type, notification_configuration, old):\n def get_id(n):\n n['Id'] = ''\n strToHash=json.dumps(n, sort_keys=True).replace('\"Name\": \"prefix\"', '\"Name\": \"Prefix\"').replace('\"Name\": \"suffix\"', '\"Name\": \"Suffix\"')\n return f\"{stack_id}-{hash(strToHash)}\"\n def with_id(n):\n n['Id'] = get_id(n)\n return n\n\n external_notifications = {}\n existing_notifications = s3.get_bucket_notification_configuration(Bucket=bucket)\n for t in CONFIGURATION_TYPES:\n if request_type == 'Update':\n old_incoming_ids = [get_id(n) for n in old.get(t, [])]\n external_notifications[t] = [n for n in existing_notifications.get(t, []) if not get_id(n) in old_incoming_ids] \n elif request_type == 'Delete':\n external_notifications[t] = [n for n in existing_notifications.get(t, []) if not n['Id'].startswith(f\"{stack_id}-\")]\n elif request_type == 'Create':\n external_notifications[t] = [n for n in existing_notifications.get(t, [])]\n if EVENTBRIDGE_CONFIGURATION in existing_notifications:\n external_notifications[EVENTBRIDGE_CONFIGURATION] = existing_notifications[EVENTBRIDGE_CONFIGURATION]\n\n if request_type == 'Delete':\n return external_notifications\n\n notifications = {}\n for t in CONFIGURATION_TYPES:\n external = external_notifications.get(t, [])\n incoming = [with_id(n) for n in notification_configuration.get(t, [])]\n notifications[t] = external + incoming\n\n if EVENTBRIDGE_CONFIGURATION in notification_configuration:\n notifications[EVENTBRIDGE_CONFIGURATION] = notification_configuration[EVENTBRIDGE_CONFIGURATION]\n elif EVENTBRIDGE_CONFIGURATION in external_notifications:\n notifications[EVENTBRIDGE_CONFIGURATION] = external_notifications[EVENTBRIDGE_CONFIGURATION]\n\n return notifications\n\ndef submit_response(event: dict, context, response_status: str, error_message: str):\n response_body = json.dumps(\n {\n \"Status\": response_status,\n \"Reason\": f\"{error_message}See the details in CloudWatch Log Stream: {context.log_stream_name}\",\n \"PhysicalResourceId\": event.get(\"PhysicalResourceId\") or event[\"LogicalResourceId\"],\n \"StackId\": event[\"StackId\"],\n \"RequestId\": event[\"RequestId\"],\n \"LogicalResourceId\": event[\"LogicalResourceId\"],\n \"NoEcho\": False,\n }\n ).encode(\"utf-8\")\n headers = {\"content-type\": \"\", \"content-length\": str(len(response_body))}\n try:\n req = urllib.request.Request(url=event[\"ResponseURL\"], headers=headers, data=response_body, method=\"PUT\")\n with urllib.request.urlopen(req) as response:\n print(response.read().decode(\"utf-8\"))\n print(\"Status code: \" + response.reason)\n except Exception as e:\n print(\"send(..) failed executing request.urlopen(..): \" + str(e))"
173179
},
174180
"Handler": "index.handler",
175181
"Role": {

packages/@aws-cdk-testing/framework-integ/test/aws-lambda-event-sources/test/integ.s3.imported-bucket.js.snapshot/integ.json

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)