Skip to content

Commit 22754d3

Browse files
author
Michael Brewer
authored
feat(data-classes): support for code pipeline job event (#416)
1 parent 93fad02 commit 22754d3

File tree

6 files changed

+475
-7
lines changed

6 files changed

+475
-7
lines changed

Diff for: aws_lambda_powertools/utilities/data_classes/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from .api_gateway_proxy_event import APIGatewayProxyEvent, APIGatewayProxyEventV2
77
from .appsync_resolver_event import AppSyncResolverEvent
88
from .cloud_watch_logs_event import CloudWatchLogsEvent
9+
from .code_pipeline_job_event import CodePipelineJobEvent
910
from .connect_contact_flow_event import ConnectContactFlowEvent
1011
from .dynamo_db_stream_event import DynamoDBStreamEvent
1112
from .event_bridge_event import EventBridgeEvent
@@ -21,6 +22,7 @@
2122
"AppSyncResolverEvent",
2223
"ALBEvent",
2324
"CloudWatchLogsEvent",
25+
"CodePipelineJobEvent",
2426
"ConnectContactFlowEvent",
2527
"DynamoDBStreamEvent",
2628
"EventBridgeEvent",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,236 @@
1+
import json
2+
import tempfile
3+
import zipfile
4+
from typing import Any, Dict, List, Optional
5+
from urllib.parse import unquote_plus
6+
7+
import boto3
8+
9+
from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
10+
11+
12+
class CodePipelineConfiguration(DictWrapper):
13+
@property
14+
def function_name(self) -> str:
15+
"""Function name"""
16+
return self["FunctionName"]
17+
18+
@property
19+
def user_parameters(self) -> str:
20+
"""User parameters"""
21+
return self["UserParameters"]
22+
23+
@property
24+
def decoded_user_parameters(self) -> Dict[str, Any]:
25+
"""Json Decoded user parameters"""
26+
return json.loads(self.user_parameters)
27+
28+
29+
class CodePipelineActionConfiguration(DictWrapper):
30+
"""CodePipeline Action Configuration"""
31+
32+
@property
33+
def configuration(self) -> CodePipelineConfiguration:
34+
return CodePipelineConfiguration(self["configuration"])
35+
36+
37+
class CodePipelineS3Location(DictWrapper):
38+
@property
39+
def bucket_name(self) -> str:
40+
return self["bucketName"]
41+
42+
@property
43+
def key(self) -> str:
44+
"""Raw S3 object key"""
45+
return self["objectKey"]
46+
47+
@property
48+
def object_key(self) -> str:
49+
"""Unquote plus of the S3 object key"""
50+
return unquote_plus(self["objectKey"])
51+
52+
53+
class CodePipelineLocation(DictWrapper):
54+
@property
55+
def get_type(self) -> str:
56+
"""Location type eg: S3"""
57+
return self["type"]
58+
59+
@property
60+
def s3_location(self) -> CodePipelineS3Location:
61+
"""S3 location"""
62+
return CodePipelineS3Location(self["s3Location"])
63+
64+
65+
class CodePipelineArtifact(DictWrapper):
66+
@property
67+
def name(self) -> str:
68+
"""Name"""
69+
return self["name"]
70+
71+
@property
72+
def revision(self) -> Optional[str]:
73+
return self.get("revision")
74+
75+
@property
76+
def location(self) -> CodePipelineLocation:
77+
return CodePipelineLocation(self["location"])
78+
79+
80+
class CodePipelineArtifactCredentials(DictWrapper):
81+
@property
82+
def access_key_id(self) -> str:
83+
return self["accessKeyId"]
84+
85+
@property
86+
def secret_access_key(self) -> str:
87+
return self["secretAccessKey"]
88+
89+
@property
90+
def session_token(self) -> str:
91+
return self["sessionToken"]
92+
93+
@property
94+
def expiration_time(self) -> Optional[int]:
95+
return self.get("expirationTime")
96+
97+
98+
class CodePipelineData(DictWrapper):
99+
"""CodePipeline Job Data"""
100+
101+
@property
102+
def action_configuration(self) -> CodePipelineActionConfiguration:
103+
"""CodePipeline action configuration"""
104+
return CodePipelineActionConfiguration(self["actionConfiguration"])
105+
106+
@property
107+
def input_artifacts(self) -> List[CodePipelineArtifact]:
108+
"""Represents a CodePipeline input artifact"""
109+
return [CodePipelineArtifact(item) for item in self["inputArtifacts"]]
110+
111+
@property
112+
def output_artifacts(self) -> List[CodePipelineArtifact]:
113+
"""Represents a CodePipeline output artifact"""
114+
return [CodePipelineArtifact(item) for item in self["outputArtifacts"]]
115+
116+
@property
117+
def artifact_credentials(self) -> CodePipelineArtifactCredentials:
118+
"""Represents a CodePipeline artifact credentials"""
119+
return CodePipelineArtifactCredentials(self["artifactCredentials"])
120+
121+
@property
122+
def continuation_token(self) -> Optional[str]:
123+
"""A continuation token if continuing job"""
124+
return self.get("continuationToken")
125+
126+
127+
class CodePipelineJobEvent(DictWrapper):
128+
"""AWS CodePipeline Job Event
129+
130+
Documentation:
131+
-------------
132+
- https://docs.aws.amazon.com/codepipeline/latest/userguide/actions-invoke-lambda-function.html
133+
- https://docs.aws.amazon.com/lambda/latest/dg/services-codepipeline.html
134+
"""
135+
136+
def __init__(self, data: Dict[str, Any]):
137+
super().__init__(data)
138+
self._job = self["CodePipeline.job"]
139+
140+
@property
141+
def get_id(self) -> str:
142+
"""Job id"""
143+
return self._job["id"]
144+
145+
@property
146+
def account_id(self) -> str:
147+
"""Account id"""
148+
return self._job["accountId"]
149+
150+
@property
151+
def data(self) -> CodePipelineData:
152+
"""Code pipeline jab data"""
153+
return CodePipelineData(self._job["data"])
154+
155+
@property
156+
def user_parameters(self) -> str:
157+
"""Action configuration user parameters"""
158+
return self.data.action_configuration.configuration.user_parameters
159+
160+
@property
161+
def decoded_user_parameters(self) -> Dict[str, Any]:
162+
"""Json Decoded action configuration user parameters"""
163+
return self.data.action_configuration.configuration.decoded_user_parameters
164+
165+
@property
166+
def input_bucket_name(self) -> str:
167+
"""Get the first input artifact bucket name"""
168+
return self.data.input_artifacts[0].location.s3_location.bucket_name
169+
170+
@property
171+
def input_object_key(self) -> str:
172+
"""Get the first input artifact order key unquote plus"""
173+
return self.data.input_artifacts[0].location.s3_location.object_key
174+
175+
def setup_s3_client(self):
176+
"""Creates an S3 client
177+
178+
Uses the credentials passed in the event by CodePipeline. These
179+
credentials can be used to access the artifact bucket.
180+
181+
Returns
182+
-------
183+
BaseClient
184+
An S3 client with the appropriate credentials
185+
"""
186+
return boto3.client(
187+
"s3",
188+
aws_access_key_id=self.data.artifact_credentials.access_key_id,
189+
aws_secret_access_key=self.data.artifact_credentials.secret_access_key,
190+
aws_session_token=self.data.artifact_credentials.session_token,
191+
)
192+
193+
def find_input_artifact(self, artifact_name: str) -> Optional[CodePipelineArtifact]:
194+
"""Find an input artifact by artifact name
195+
196+
Parameters
197+
----------
198+
artifact_name : str
199+
The name of the input artifact to look for
200+
201+
Returns
202+
-------
203+
CodePipelineArtifact, None
204+
Matching CodePipelineArtifact if found
205+
"""
206+
for artifact in self.data.input_artifacts:
207+
if artifact.name == artifact_name:
208+
return artifact
209+
return None
210+
211+
def get_artifact(self, artifact_name: str, filename: str) -> Optional[str]:
212+
"""Get a file within an artifact zip on s3
213+
214+
Parameters
215+
----------
216+
artifact_name : str
217+
Name of the S3 artifact to download
218+
filename : str
219+
The file name within the artifact zip to extract as a string
220+
221+
Returns
222+
-------
223+
str, None
224+
Returns the contents file contents as a string
225+
"""
226+
artifact = self.find_input_artifact(artifact_name)
227+
if artifact is None:
228+
return None
229+
230+
with tempfile.NamedTemporaryFile() as tmp_file:
231+
s3 = self.setup_s3_client()
232+
bucket = artifact.location.s3_location.bucket_name
233+
key = artifact.location.s3_location.key
234+
s3.download_file(bucket, key, tmp_file.name)
235+
with zipfile.ZipFile(tmp_file.name, "r") as zip_file:
236+
return zip_file.read(filename).decode("UTF-8")

Diff for: docs/utilities/data_classes.md

+53
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ Event Source | Data_class
5252
[API Gateway Proxy event v2](#api-gateway-proxy-v2) | `APIGatewayProxyEventV2`
5353
[AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent`
5454
[CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent`
55+
[CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent`
5556
[Cognito User Pool](#cognito-user-pool) | Multiple available under `cognito_user_pool_event`
5657
[Connect Contact Flow](#connect-contact-flow) | `ConnectContactFlowEvent`
5758
[DynamoDB streams](#dynamodb-streams) | `DynamoDBStreamEvent`, `DynamoDBRecordEventName`
@@ -222,6 +223,58 @@ decompress and parse json data from the event.
222223
do_something_with(event.timestamp, event.message)
223224
```
224225

226+
### CodePipeline Job
227+
228+
Data classes and utility functions to help create continuous delivery pipelines tasks with AWS Lambda
229+
230+
=== "app.py"
231+
232+
```python
233+
from aws_lambda_powertools import Logger
234+
from aws_lambda_powertools.utilities.data_classes import CodePipelineJobEvent
235+
236+
logger = Logger()
237+
238+
239+
def lambda_handler(event, context):
240+
"""The Lambda function handler
241+
242+
If a continuing job then checks the CloudFormation stack status
243+
and updates the job accordingly.
244+
245+
If a new job then kick of an update or creation of the target
246+
CloudFormation stack.
247+
"""
248+
event: CodePipelineJobEvent = CodePipelineJobEvent(event)
249+
250+
# Extract the Job ID
251+
job_id = event.get_id
252+
253+
# Extract the params
254+
params: dict = event.decoded_user_parameters
255+
stack = params["stack"]
256+
artifact_name = params["artifact"]
257+
template_file = params["file"]
258+
259+
try:
260+
if event.data.continuation_token:
261+
# If we're continuing then the create/update has already been triggered
262+
# we just need to check if it has finished.
263+
check_stack_update_status(job_id, stack)
264+
else:
265+
template = event.get_artifact(artifact_name, template_file)
266+
# Kick off a stack update or create
267+
start_update_or_create(job_id, stack, template)
268+
except Exception as e:
269+
# If any other exceptions which we didn't expect are raised
270+
# then fail the job and log the exception message.
271+
logger.exception("Function failed due to exception.")
272+
put_job_failure(job_id, "Function exception: " + str(e))
273+
274+
logger.debug("Function complete.")
275+
return "Complete."
276+
```
277+
225278
### Cognito User Pool
226279

227280
Cognito User Pools have several [different Lambda trigger sources](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html#cognito-user-identity-pools-working-with-aws-lambda-trigger-sources), all of which map to a different data class, which

Diff for: tests/events/codePipelineEvent.json

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
{
2+
"CodePipeline.job": {
3+
"id": "11111111-abcd-1111-abcd-111111abcdef",
4+
"accountId": "111111111111",
5+
"data": {
6+
"actionConfiguration": {
7+
"configuration": {
8+
"FunctionName": "MyLambdaFunctionForAWSCodePipeline",
9+
"UserParameters": "some-input-such-as-a-URL"
10+
}
11+
},
12+
"inputArtifacts": [
13+
{
14+
"name": "ArtifactName",
15+
"revision": null,
16+
"location": {
17+
"type": "S3",
18+
"s3Location": {
19+
"bucketName": "the name of the bucket configured as the pipeline artifact store in Amazon S3, for example codepipeline-us-east-2-1234567890",
20+
"objectKey": "the name of the application, for example CodePipelineDemoApplication.zip"
21+
}
22+
}
23+
}
24+
],
25+
"outputArtifacts": [],
26+
"artifactCredentials": {
27+
"accessKeyId": "AKIAIOSFODNN7EXAMPLE",
28+
"secretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
29+
"sessionToken": "MIICiTCCAfICCQD6m7oRw0uXOjANBgkqhkiG9w0BAQUFADCBiDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAldBMRAwDgYDVQQHEwdTZWF0dGxlMQ8wDQYDVQQKEwZBbWF6b24xFDASBgNVBAsTC0lBTSBDb25zb2xlMRIwEAYDVQQDEwlUZXN0Q2lsYWMxHzAdBgkqhkiG9w0BCQEWEG5vb25lQGFtYXpvbi5jb20wHhcNMTEwNDI1MjA0NTIxWhcNMTIwNDI0MjA0NTIxWjCBiDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAldBMRAwDgYDVQQHEwdTZWF0dGxlMQ8wDQYDVQQKEwZBbWF6b24xFDASBgNVBAsTC0lBTSBDb25zb2xlMRIwEAYDVQQDEwlUZXN0Q2lsYWMxHzAdBgkqhkiG9w0BCQEWEG5vb25lQGFtYXpvbi5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMaK0dn+a4GmWIWJ21uUSfwfEvySWtC2XADZ4nB+BLYgVIk60CpiwsZ3G93vUEIO3IyNoH/f0wYK8m9TrDHudUZg3qX4waLG5M43q7Wgc/MbQITxOUSQv7c7ugFFDzQGBzZswY6786m86gpEIbb3OhjZnzcvQAaRHhdlQWIMm2nrAgMBAAEwDQYJKoZIhvcNAQEFBQADgYEAtCu4nUhVVxYUntneD9+h8Mg9q6q+auNKyExzyLwaxlAoo7TJHidbtS4J5iNmZgXL0FkbFFBjvSfpJIlJ00zbhNYS5f6GuoEDmFJl0ZxBHjJnyp378OD8uTs7fLvjx79LjSTbNYiytVbZPQUQ5Yaxu2jXnimvw3rrszlaEXAMPLE="
30+
},
31+
"continuationToken": "A continuation token if continuing job"
32+
}
33+
}
34+
}

0 commit comments

Comments
 (0)