From d52670fef82a8952d62fc5b12d49c08a8a9bafb6 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 3 Jul 2023 21:24:43 +0100 Subject: [PATCH 01/28] moving function tests to unit tests --- tests/functional/parser/test_alb.py | 44 -------- tests/unit/parser/conftest.py | 43 +++++++ tests/unit/parser/schemas.py | 106 ++++++++++++++++++ tests/unit/parser/test_alb.py | 30 +++++ .../{functional => unit}/parser/test_apigw.py | 0 5 files changed, 179 insertions(+), 44 deletions(-) delete mode 100644 tests/functional/parser/test_alb.py create mode 100644 tests/unit/parser/conftest.py create mode 100644 tests/unit/parser/schemas.py create mode 100644 tests/unit/parser/test_alb.py rename tests/{functional => unit}/parser/test_apigw.py (100%) diff --git a/tests/functional/parser/test_alb.py b/tests/functional/parser/test_alb.py deleted file mode 100644 index d48e39f1bab..00000000000 --- a/tests/functional/parser/test_alb.py +++ /dev/null @@ -1,44 +0,0 @@ -import pytest - -from aws_lambda_powertools.utilities.parser import ValidationError, event_parser -from aws_lambda_powertools.utilities.parser.models import AlbModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.utils import load_event - - -@event_parser(model=AlbModel) -def handle_alb(event: AlbModel, _: LambdaContext): - assert ( - event.requestContext.elb.targetGroupArn - == "arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a" # noqa E501 - ) - assert event.httpMethod == "GET" - assert event.path == "/lambda" - assert event.queryStringParameters == {"query": "1234ABCD"} - assert event.headers == { - "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", - "accept-encoding": "gzip", - "accept-language": "en-US,en;q=0.9", - "connection": "keep-alive", - "host": "lambda-alb-123578498.us-east-2.elb.amazonaws.com", - "upgrade-insecure-requests": "1", - "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36", # noqa E501 - "x-amzn-trace-id": "Root=1-5c536348-3d683b8b04734faae651f476", - "x-forwarded-for": "72.12.164.125", - "x-forwarded-port": "80", - "x-forwarded-proto": "http", - "x-imforwards": "20", - } - assert event.body == "Test" - assert not event.isBase64Encoded - - -def test_alb_trigger_event(): - event_dict = load_event("albEvent.json") - handle_alb(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model(): - event = {"invalid": "event"} - with pytest.raises(ValidationError): - handle_alb(event, LambdaContext()) diff --git a/tests/unit/parser/conftest.py b/tests/unit/parser/conftest.py new file mode 100644 index 00000000000..34199a322b2 --- /dev/null +++ b/tests/unit/parser/conftest.py @@ -0,0 +1,43 @@ +from typing import Any, Dict + +import pytest +from pydantic import BaseModel + +from aws_lambda_powertools.utilities.parser import BaseEnvelope + + +@pytest.fixture +def dummy_event(): + return {"payload": {"message": "hello world"}} + + +@pytest.fixture +def dummy_schema(): + """Wanted payload structure""" + + class MyDummyModel(BaseModel): + message: str + + return MyDummyModel + + +@pytest.fixture +def dummy_envelope_schema(): + """Event wrapper structure""" + + class MyDummyEnvelopeSchema(BaseModel): + payload: Dict + + return MyDummyEnvelopeSchema + + +@pytest.fixture +def dummy_envelope(dummy_envelope_schema): + class MyDummyEnvelope(BaseEnvelope): + """Unwrap dummy event within payload key""" + + def parse(self, data: Dict[str, Any], model: BaseModel): + parsed_enveloped = dummy_envelope_schema(**data) + return self._parse(data=parsed_enveloped.payload, model=model) + + return MyDummyEnvelope diff --git a/tests/unit/parser/schemas.py b/tests/unit/parser/schemas.py new file mode 100644 index 00000000000..1da0213ff45 --- /dev/null +++ b/tests/unit/parser/schemas.py @@ -0,0 +1,106 @@ +from typing import Dict, List, Optional + +from pydantic import BaseModel + +from aws_lambda_powertools.utilities.parser.models import ( + DynamoDBStreamChangedRecordModel, + DynamoDBStreamModel, + DynamoDBStreamRecordModel, + EventBridgeModel, + SnsModel, + SnsNotificationModel, + SnsRecordModel, + SqsModel, + SqsRecordModel, +) +from aws_lambda_powertools.utilities.parser.types import Literal + + +class MyDynamoBusiness(BaseModel): + Message: Dict[Literal["S"], str] + Id: Dict[Literal["N"], int] + + +class MyDynamoScheme(DynamoDBStreamChangedRecordModel): + NewImage: Optional[MyDynamoBusiness] + OldImage: Optional[MyDynamoBusiness] + + +class MyDynamoDBStreamRecordModel(DynamoDBStreamRecordModel): + dynamodb: MyDynamoScheme + + +class MyAdvancedDynamoBusiness(DynamoDBStreamModel): + Records: List[MyDynamoDBStreamRecordModel] + + +class MyEventbridgeBusiness(BaseModel): + instance_id: str + state: str + + +class MyAdvancedEventbridgeBusiness(EventBridgeModel): + detail: MyEventbridgeBusiness + + +class MySqsBusiness(BaseModel): + message: str + username: str + + +class MyAdvancedSqsRecordModel(SqsRecordModel): + body: str + + +class MyAdvancedSqsBusiness(SqsModel): + Records: List[MyAdvancedSqsRecordModel] + + +class MySnsBusiness(BaseModel): + message: str + username: str + + +class MySnsNotificationModel(SnsNotificationModel): + Message: str + + +class MyAdvancedSnsRecordModel(SnsRecordModel): + Sns: MySnsNotificationModel + + +class MyAdvancedSnsBusiness(SnsModel): + Records: List[MyAdvancedSnsRecordModel] + + +class MyKinesisBusiness(BaseModel): + message: str + username: str + + +class MyCloudWatchBusiness(BaseModel): + my_message: str + user: str + + +class MyApiGatewayBusiness(BaseModel): + message: str + username: str + + +class MyALambdaFuncUrlBusiness(BaseModel): + message: str + username: str + + +class MyLambdaKafkaBusiness(BaseModel): + key: str + + +class MyKinesisFirehoseBusiness(BaseModel): + Hello: str + + +class MyVpcLatticeBusiness(BaseModel): + username: str + name: str diff --git a/tests/unit/parser/test_alb.py b/tests/unit/parser/test_alb.py new file mode 100644 index 00000000000..38fab25e9c3 --- /dev/null +++ b/tests/unit/parser/test_alb.py @@ -0,0 +1,30 @@ +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError, event_parser +from aws_lambda_powertools.utilities.parser.models import AlbModel +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.utils import load_event + + +@event_parser(model=AlbModel) +def handle_alb(event: AlbModel, _: LambdaContext): + return event + + +def test_alb_trigger_event(): + raw_event = load_event("albEvent.json") + parsed_event = handle_alb(raw_event, LambdaContext()) + + assert parsed_event.requestContext.elb.targetGroupArn == raw_event["requestContext"]["elb"]["targetGroupArn"] + assert parsed_event.httpMethod == raw_event["httpMethod"] + assert parsed_event.path == raw_event["path"] + assert parsed_event.queryStringParameters == raw_event["queryStringParameters"] + assert parsed_event.headers == raw_event["headers"] + assert parsed_event.body == raw_event["body"] + assert not parsed_event.isBase64Encoded + + +def test_validate_event_does_not_conform_with_model(): + event = {"invalid": "event"} + with pytest.raises(ValidationError): + handle_alb(event, LambdaContext()) diff --git a/tests/functional/parser/test_apigw.py b/tests/unit/parser/test_apigw.py similarity index 100% rename from tests/functional/parser/test_apigw.py rename to tests/unit/parser/test_apigw.py From d01c54347168981448b399e976ff5d3cc420eb25 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 3 Jul 2023 21:46:16 +0100 Subject: [PATCH 02/28] moving + refactoring apigwv2 --- tests/functional/parser/test_apigwv2.py | 105 ---------------------- tests/unit/parser/test_alb.py | 2 +- tests/unit/parser/test_apigw.py | 104 +++++++++++----------- tests/unit/parser/test_apigwv2.py | 113 ++++++++++++++++++++++++ 4 files changed, 167 insertions(+), 157 deletions(-) delete mode 100644 tests/functional/parser/test_apigwv2.py create mode 100644 tests/unit/parser/test_apigwv2.py diff --git a/tests/functional/parser/test_apigwv2.py b/tests/functional/parser/test_apigwv2.py deleted file mode 100644 index d3510b185dd..00000000000 --- a/tests/functional/parser/test_apigwv2.py +++ /dev/null @@ -1,105 +0,0 @@ -from aws_lambda_powertools.utilities.parser import envelopes, event_parser, parse -from aws_lambda_powertools.utilities.parser.models import ( - APIGatewayProxyEventV2Model, - RequestContextV2, - RequestContextV2Authorizer, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyApiGatewayBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayV2Envelope) -def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): - assert event.message == "Hello" - assert event.username == "Ran" - - -@event_parser(model=APIGatewayProxyEventV2Model) -def handle_apigw_event(event: APIGatewayProxyEventV2Model, _: LambdaContext): - return event - - -def test_apigw_v2_event_with_envelope(): - event = load_event("apiGatewayProxyV2Event.json") - event["body"] = '{"message": "Hello", "username": "Ran"}' - handle_apigw_with_envelope(event, LambdaContext()) - - -def test_apigw_v2_event_jwt_authorizer(): - event = load_event("apiGatewayProxyV2Event.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) - assert parsed_event.version == event["version"] - assert parsed_event.routeKey == event["routeKey"] - assert parsed_event.rawPath == event["rawPath"] - assert parsed_event.rawQueryString == event["rawQueryString"] - assert parsed_event.cookies == event["cookies"] - assert parsed_event.cookies[0] == "cookie1" - assert parsed_event.headers == event["headers"] - assert parsed_event.queryStringParameters == event["queryStringParameters"] - assert parsed_event.queryStringParameters["parameter2"] == "value" - - request_context = parsed_event.requestContext - assert request_context.accountId == event["requestContext"]["accountId"] - assert request_context.apiId == event["requestContext"]["apiId"] - assert request_context.authorizer.jwt.claims == event["requestContext"]["authorizer"]["jwt"]["claims"] - assert request_context.authorizer.jwt.scopes == event["requestContext"]["authorizer"]["jwt"]["scopes"] - assert request_context.domainName == event["requestContext"]["domainName"] - assert request_context.domainPrefix == event["requestContext"]["domainPrefix"] - - http = request_context.http - assert http.method == "POST" - assert http.path == "/my/path" - assert http.protocol == "HTTP/1.1" - assert str(http.sourceIp) == "192.168.0.1/32" - assert http.userAgent == "agent" - - assert request_context.requestId == event["requestContext"]["requestId"] - assert request_context.routeKey == event["requestContext"]["routeKey"] - assert request_context.stage == event["requestContext"]["stage"] - assert request_context.time == event["requestContext"]["time"] - convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) - assert convert_time == event["requestContext"]["timeEpoch"] - assert parsed_event.body == event["body"] - assert parsed_event.pathParameters == event["pathParameters"] - assert parsed_event.isBase64Encoded == event["isBase64Encoded"] - assert parsed_event.stageVariables == event["stageVariables"] - - -def test_api_gateway_proxy_v2_event_lambda_authorizer(): - event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) - request_context: RequestContextV2 = parsed_event.requestContext - assert request_context is not None - lambda_props: RequestContextV2Authorizer = request_context.authorizer.lambda_value - assert lambda_props is not None - assert lambda_props["key"] == "value" - - -def test_api_gateway_proxy_v2_event_iam_authorizer(): - event = load_event("apiGatewayProxyV2IamEvent.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) - iam = parsed_event.requestContext.authorizer.iam - assert iam is not None - assert iam.accessKey == "ARIA2ZJZYVUEREEIHAKY" - assert iam.accountId == "1234567890" - assert iam.callerId == "AROA7ZJZYVRE7C3DUXHH6:CognitoIdentityCredentials" - assert iam.cognitoIdentity.amr == ["foo"] - assert iam.cognitoIdentity.identityId == "us-east-1:3f291106-8703-466b-8f2b-3ecee1ca56ce" - assert iam.cognitoIdentity.identityPoolId == "us-east-1:4f291106-8703-466b-8f2b-3ecee1ca56ce" - assert iam.principalOrgId == "AwsOrgId" - assert iam.userArn == "arn:aws:iam::1234567890:user/Admin" - assert iam.userId == "AROA2ZJZYVRE7Y3TUXHH6" - - -def test_apigw_event_empty_body(): - event = load_event("apiGatewayProxyV2Event.json") - event.pop("body") # API GW v2 removes certain keys when no data is passed - parse(event=event, model=APIGatewayProxyEventV2Model) - - -def test_apigw_event_empty_query_strings(): - event = load_event("apiGatewayProxyV2Event.json") - event["rawQueryString"] = "" - event.pop("queryStringParameters") # API GW v2 removes certain keys when no data is passed - parse(event=event, model=APIGatewayProxyEventV2Model) diff --git a/tests/unit/parser/test_alb.py b/tests/unit/parser/test_alb.py index 38fab25e9c3..29dac72fa81 100644 --- a/tests/unit/parser/test_alb.py +++ b/tests/unit/parser/test_alb.py @@ -13,7 +13,7 @@ def handle_alb(event: AlbModel, _: LambdaContext): def test_alb_trigger_event(): raw_event = load_event("albEvent.json") - parsed_event = handle_alb(raw_event, LambdaContext()) + parsed_event: AlbModel = handle_alb(raw_event, LambdaContext()) assert parsed_event.requestContext.elb.targetGroupArn == raw_event["requestContext"]["elb"]["targetGroupArn"] assert parsed_event.httpMethod == raw_event["httpMethod"] diff --git a/tests/unit/parser/test_apigw.py b/tests/unit/parser/test_apigw.py index 35b2fdb1926..f31d8cc2e32 100644 --- a/tests/unit/parser/test_apigw.py +++ b/tests/unit/parser/test_apigw.py @@ -10,89 +10,91 @@ @event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayEnvelope) def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): - assert event.message == "Hello" - assert event.username == "Ran" + return event @event_parser(model=APIGatewayProxyEventModel) def handle_apigw_event(event: APIGatewayProxyEventModel, _: LambdaContext): - assert event.body == "Hello from Lambda!" return event def test_apigw_event_with_envelope(): - event = load_event("apiGatewayProxyEvent.json") - event["body"] = '{"message": "Hello", "username": "Ran"}' - handle_apigw_with_envelope(event, LambdaContext()) + raw_event = load_event("apiGatewayProxyEvent.json") + raw_event["body"] = '{"message": "Hello", "username": "Ran"}' + parsed_event: MyApiGatewayBusiness = handle_apigw_with_envelope(raw_event, LambdaContext()) + + assert parsed_event.message == "Hello" + assert parsed_event.username == "Ran" def test_apigw_event(): - event = load_event("apiGatewayProxyEvent.json") - parsed_event: APIGatewayProxyEventModel = handle_apigw_event(event, LambdaContext()) - assert parsed_event.version == event["version"] - assert parsed_event.resource == event["resource"] - assert parsed_event.path == event["path"] - assert parsed_event.headers == event["headers"] - assert parsed_event.multiValueHeaders == event["multiValueHeaders"] - assert parsed_event.queryStringParameters == event["queryStringParameters"] - assert parsed_event.multiValueQueryStringParameters == event["multiValueQueryStringParameters"] + raw_event = load_event("apiGatewayProxyEvent.json") + parsed_event: APIGatewayProxyEventModel = handle_apigw_event(raw_event, LambdaContext()) + assert parsed_event.version == raw_event["version"] + assert parsed_event.resource == raw_event["resource"] + assert parsed_event.path == raw_event["path"] + assert parsed_event.headers == raw_event["headers"] + assert parsed_event.multiValueHeaders == raw_event["multiValueHeaders"] + assert parsed_event.queryStringParameters == raw_event["queryStringParameters"] + assert parsed_event.multiValueQueryStringParameters == raw_event["multiValueQueryStringParameters"] request_context = parsed_event.requestContext - assert request_context.accountId == event["requestContext"]["accountId"] - assert request_context.apiId == event["requestContext"]["apiId"] + assert request_context.accountId == raw_event["requestContext"]["accountId"] + assert request_context.apiId == raw_event["requestContext"]["apiId"] authorizer = request_context.authorizer assert authorizer.claims is None assert authorizer.scopes is None - assert request_context.domainName == event["requestContext"]["domainName"] - assert request_context.domainPrefix == event["requestContext"]["domainPrefix"] - assert request_context.extendedRequestId == event["requestContext"]["extendedRequestId"] - assert request_context.httpMethod == event["requestContext"]["httpMethod"] + assert request_context.domainName == raw_event["requestContext"]["domainName"] + assert request_context.domainPrefix == raw_event["requestContext"]["domainPrefix"] + assert request_context.extendedRequestId == raw_event["requestContext"]["extendedRequestId"] + assert request_context.httpMethod == raw_event["requestContext"]["httpMethod"] identity = request_context.identity - assert identity.accessKey == event["requestContext"]["identity"]["accessKey"] - assert identity.accountId == event["requestContext"]["identity"]["accountId"] - assert identity.caller == event["requestContext"]["identity"]["caller"] + assert identity.accessKey == raw_event["requestContext"]["identity"]["accessKey"] + assert identity.accountId == raw_event["requestContext"]["identity"]["accountId"] + assert identity.caller == raw_event["requestContext"]["identity"]["caller"] assert ( - identity.cognitoAuthenticationProvider == event["requestContext"]["identity"]["cognitoAuthenticationProvider"] + identity.cognitoAuthenticationProvider + == raw_event["requestContext"]["identity"]["cognitoAuthenticationProvider"] ) - assert identity.cognitoAuthenticationType == event["requestContext"]["identity"]["cognitoAuthenticationType"] - assert identity.cognitoIdentityId == event["requestContext"]["identity"]["cognitoIdentityId"] - assert identity.cognitoIdentityPoolId == event["requestContext"]["identity"]["cognitoIdentityPoolId"] - assert identity.principalOrgId == event["requestContext"]["identity"]["principalOrgId"] - assert str(identity.sourceIp) == event["requestContext"]["identity"]["sourceIp"] - assert identity.user == event["requestContext"]["identity"]["user"] - assert identity.userAgent == event["requestContext"]["identity"]["userAgent"] - assert identity.userArn == event["requestContext"]["identity"]["userArn"] + assert identity.cognitoAuthenticationType == raw_event["requestContext"]["identity"]["cognitoAuthenticationType"] + assert identity.cognitoIdentityId == raw_event["requestContext"]["identity"]["cognitoIdentityId"] + assert identity.cognitoIdentityPoolId == raw_event["requestContext"]["identity"]["cognitoIdentityPoolId"] + assert identity.principalOrgId == raw_event["requestContext"]["identity"]["principalOrgId"] + assert str(identity.sourceIp) == raw_event["requestContext"]["identity"]["sourceIp"] + assert identity.user == raw_event["requestContext"]["identity"]["user"] + assert identity.userAgent == raw_event["requestContext"]["identity"]["userAgent"] + assert identity.userArn == raw_event["requestContext"]["identity"]["userArn"] assert identity.clientCert is not None - assert identity.clientCert.clientCertPem == event["requestContext"]["identity"]["clientCert"]["clientCertPem"] - assert identity.clientCert.subjectDN == event["requestContext"]["identity"]["clientCert"]["subjectDN"] - assert identity.clientCert.issuerDN == event["requestContext"]["identity"]["clientCert"]["issuerDN"] - assert identity.clientCert.serialNumber == event["requestContext"]["identity"]["clientCert"]["serialNumber"] + assert identity.clientCert.clientCertPem == raw_event["requestContext"]["identity"]["clientCert"]["clientCertPem"] + assert identity.clientCert.subjectDN == raw_event["requestContext"]["identity"]["clientCert"]["subjectDN"] + assert identity.clientCert.issuerDN == raw_event["requestContext"]["identity"]["clientCert"]["issuerDN"] + assert identity.clientCert.serialNumber == raw_event["requestContext"]["identity"]["clientCert"]["serialNumber"] assert ( identity.clientCert.validity.notBefore - == event["requestContext"]["identity"]["clientCert"]["validity"]["notBefore"] + == raw_event["requestContext"]["identity"]["clientCert"]["validity"]["notBefore"] ) assert ( identity.clientCert.validity.notAfter - == event["requestContext"]["identity"]["clientCert"]["validity"]["notAfter"] + == raw_event["requestContext"]["identity"]["clientCert"]["validity"]["notAfter"] ) - assert request_context.path == event["requestContext"]["path"] - assert request_context.protocol == event["requestContext"]["protocol"] - assert request_context.requestId == event["requestContext"]["requestId"] - assert request_context.requestTime == event["requestContext"]["requestTime"] + assert request_context.path == raw_event["requestContext"]["path"] + assert request_context.protocol == raw_event["requestContext"]["protocol"] + assert request_context.requestId == raw_event["requestContext"]["requestId"] + assert request_context.requestTime == raw_event["requestContext"]["requestTime"] convert_time = int(round(request_context.requestTimeEpoch.timestamp() * 1000)) assert convert_time == 1583349317135 - assert request_context.resourceId == event["requestContext"]["resourceId"] - assert request_context.resourcePath == event["requestContext"]["resourcePath"] - assert request_context.stage == event["requestContext"]["stage"] - - assert parsed_event.pathParameters == event["pathParameters"] - assert parsed_event.stageVariables == event["stageVariables"] - assert parsed_event.body == event["body"] - assert parsed_event.isBase64Encoded == event["isBase64Encoded"] + assert request_context.resourceId == raw_event["requestContext"]["resourceId"] + assert request_context.resourcePath == raw_event["requestContext"]["resourcePath"] + assert request_context.stage == raw_event["requestContext"]["stage"] + + assert parsed_event.pathParameters == raw_event["pathParameters"] + assert parsed_event.stageVariables == raw_event["stageVariables"] + assert parsed_event.body == raw_event["body"] + assert parsed_event.isBase64Encoded == raw_event["isBase64Encoded"] assert request_context.connectedAt is None assert request_context.connectionId is None diff --git a/tests/unit/parser/test_apigwv2.py b/tests/unit/parser/test_apigwv2.py new file mode 100644 index 00000000000..5a0efc1ff16 --- /dev/null +++ b/tests/unit/parser/test_apigwv2.py @@ -0,0 +1,113 @@ +from aws_lambda_powertools.utilities.parser import envelopes, event_parser, parse +from aws_lambda_powertools.utilities.parser.models import ( + APIGatewayProxyEventV2Model, + RequestContextV2, + RequestContextV2Authorizer, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyApiGatewayBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayV2Envelope) +def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): + return event + + +@event_parser(model=APIGatewayProxyEventV2Model) +def handle_apigw_event(event: APIGatewayProxyEventV2Model, _: LambdaContext): + return event + + +def test_apigw_v2_event_with_envelope(): + raw_event = load_event("apiGatewayProxyV2Event.json") + raw_event["body"] = '{"message": "Hello", "username": "Ran"}' + parsed_event: MyApiGatewayBusiness = handle_apigw_with_envelope(raw_event, LambdaContext()) + + assert parsed_event.message == "Hello" + assert parsed_event.username == "Ran" + + +def test_apigw_v2_event_jwt_authorizer(): + raw_event = load_event("apiGatewayProxyV2Event.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(raw_event, LambdaContext()) + + assert parsed_event.version == raw_event["version"] + assert parsed_event.routeKey == raw_event["routeKey"] + assert parsed_event.rawPath == raw_event["rawPath"] + assert parsed_event.rawQueryString == raw_event["rawQueryString"] + assert parsed_event.cookies == raw_event["cookies"] + assert parsed_event.cookies[0] == "cookie1" + assert parsed_event.headers == raw_event["headers"] + assert parsed_event.queryStringParameters == raw_event["queryStringParameters"] + assert parsed_event.queryStringParameters.get("parameter2") == raw_event["queryStringParameters"]["parameter2"] + + request_context = parsed_event.requestContext + assert request_context.accountId == raw_event["requestContext"]["accountId"] + assert request_context.apiId == raw_event["requestContext"]["apiId"] + assert request_context.authorizer.jwt.claims == raw_event["requestContext"]["authorizer"]["jwt"]["claims"] + assert request_context.authorizer.jwt.scopes == raw_event["requestContext"]["authorizer"]["jwt"]["scopes"] + assert request_context.domainName == raw_event["requestContext"]["domainName"] + assert request_context.domainPrefix == raw_event["requestContext"]["domainPrefix"] + + http = request_context.http + raw_http = raw_event["requestContext"]["http"] + assert http.method == raw_http["method"] + assert http.path == raw_http["path"] + assert http.protocol == raw_http["protocol"] + assert str(http.sourceIp) == raw_http["sourceIp"] + assert http.userAgent == raw_http["userAgent"] + + assert request_context.requestId == raw_event["requestContext"]["requestId"] + assert request_context.routeKey == raw_event["requestContext"]["routeKey"] + assert request_context.stage == raw_event["requestContext"]["stage"] + assert request_context.time == raw_event["requestContext"]["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == raw_event["requestContext"]["timeEpoch"] + assert parsed_event.body == raw_event["body"] + assert parsed_event.pathParameters == raw_event["pathParameters"] + assert parsed_event.isBase64Encoded == raw_event["isBase64Encoded"] + assert parsed_event.stageVariables == raw_event["stageVariables"] + + +def test_api_gateway_proxy_v2_event_lambda_authorizer(): + raw_event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(raw_event, LambdaContext()) + + request_context: RequestContextV2 = parsed_event.requestContext + assert request_context is not None + + lambda_props: RequestContextV2Authorizer = request_context.authorizer.lambda_value + assert lambda_props is not None + assert lambda_props["key"] == raw_event["requestContext"]["authorizer"]["lambda"]["key"] + + +def test_api_gateway_proxy_v2_event_iam_authorizer(): + raw_event = load_event("apiGatewayProxyV2IamEvent.json") + parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(raw_event, LambdaContext()) + + iam = parsed_event.requestContext.authorizer.iam + raw_iam = raw_event["requestContext"]["authorizer"]["iam"] + assert iam is not None + assert iam.accessKey == raw_iam["accessKey"] + assert iam.accountId == raw_iam["accountId"] + assert iam.callerId == raw_iam["callerId"] + assert iam.cognitoIdentity.amr == raw_iam["cognitoIdentity"]["amr"] + assert iam.cognitoIdentity.identityId == raw_iam["cognitoIdentity"]["identityId"] + assert iam.cognitoIdentity.identityPoolId == raw_iam["cognitoIdentity"]["identityPoolId"] + assert iam.principalOrgId == raw_iam["principalOrgId"] + assert iam.userArn == raw_iam["userArn"] + assert iam.userId == raw_iam["userId"] + + +def test_apigw_event_empty_body(): + raw_event = load_event("apiGatewayProxyV2Event.json") + raw_event.pop("body") # API GW v2 removes certain keys when no data is passed + parse(event=raw_event, model=APIGatewayProxyEventV2Model) + + +def test_apigw_event_empty_query_strings(): + raw_event = load_event("apiGatewayProxyV2Event.json") + raw_event["rawQueryString"] = "" + raw_event.pop("queryStringParameters") # API GW v2 removes certain keys when no data is passed + parse(event=raw_event, model=APIGatewayProxyEventV2Model) From 75f9eab78f300ee062ceee199fd5d0fd9eadc5fe Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 3 Jul 2023 23:38:13 +0100 Subject: [PATCH 03/28] moving + cloudwatch --- .../parser/test_cloudwatch.py | 40 ++++++++++--------- 1 file changed, 21 insertions(+), 19 deletions(-) rename tests/{functional => unit}/parser/test_cloudwatch.py (77%) diff --git a/tests/functional/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py similarity index 77% rename from tests/functional/parser/test_cloudwatch.py rename to tests/unit/parser/test_cloudwatch.py index 5fa197bb792..b249565a53f 100644 --- a/tests/functional/parser/test_cloudwatch.py +++ b/tests/unit/parser/test_cloudwatch.py @@ -29,23 +29,7 @@ def handle_cloudwatch_logs(event: List[MyCloudWatchBusiness], _: LambdaContext): @event_parser(model=CloudWatchLogsModel) def handle_cloudwatch_logs_no_envelope(event: CloudWatchLogsModel, _: LambdaContext): - assert event.awslogs.decoded_data.owner == "123456789123" - assert event.awslogs.decoded_data.logGroup == "testLogGroup" - assert event.awslogs.decoded_data.logStream == "testLogStream" - assert event.awslogs.decoded_data.subscriptionFilters == ["testFilter"] - assert event.awslogs.decoded_data.messageType == "DATA_MESSAGE" - - assert len(event.awslogs.decoded_data.logEvents) == 2 - log_record: CloudWatchLogsLogEvent = event.awslogs.decoded_data.logEvents[0] - assert log_record.id == "eventId1" - convert_time = int(round(log_record.timestamp.timestamp() * 1000)) - assert convert_time == 1440442987000 - assert log_record.message == "[ERROR] First test message" - log_record: CloudWatchLogsLogEvent = event.awslogs.decoded_data.logEvents[1] - assert log_record.id == "eventId2" - convert_time = int(round(log_record.timestamp.timestamp() * 1000)) - assert convert_time == 1440442987001 - assert log_record.message == "[ERROR] Second test message" + return event def test_validate_event_user_model_with_envelope(): @@ -72,8 +56,26 @@ def test_validate_event_does_not_conform_with_user_dict_model(): def test_handle_cloudwatch_trigger_event_no_envelope(): - event_dict = load_event("cloudWatchLogEvent.json") - handle_cloudwatch_logs_no_envelope(event_dict, LambdaContext()) + raw_event = load_event("cloudWatchLogEvent.json") + parsed_event: CloudWatchLogsModel = handle_cloudwatch_logs_no_envelope(raw_event, LambdaContext()) + + assert parsed_event.awslogs.decoded_data.owner == "123456789123" + assert parsed_event.awslogs.decoded_data.logGroup == "testLogGroup" + assert parsed_event.awslogs.decoded_data.logStream == "testLogStream" + assert parsed_event.awslogs.decoded_data.subscriptionFilters == ["testFilter"] + assert parsed_event.awslogs.decoded_data.messageType == "DATA_MESSAGE" + + assert len(parsed_event.awslogs.decoded_data.logEvents) == 2 + log_record: CloudWatchLogsLogEvent = parsed_event.awslogs.decoded_data.logEvents[0] + assert log_record.id == "eventId1" + convert_time = int(round(log_record.timestamp.timestamp() * 1000)) + assert convert_time == 1440442987000 + assert log_record.message == "[ERROR] First test message" + log_record: CloudWatchLogsLogEvent = parsed_event.awslogs.decoded_data.logEvents[1] + assert log_record.id == "eventId2" + convert_time = int(round(log_record.timestamp.timestamp() * 1000)) + assert convert_time == 1440442987001 + assert log_record.message == "[ERROR] Second test message" def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): From dc7f5e9e7df626dd21ecb73aaa6e282a4eaab1eb Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 13:20:14 +0100 Subject: [PATCH 04/28] moving + cloudwatch logs --- tests/unit/parser/test_cloudwatch.py | 34 +++++++++++++++++++--------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/tests/unit/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py index b249565a53f..c063507efbb 100644 --- a/tests/unit/parser/test_cloudwatch.py +++ b/tests/unit/parser/test_cloudwatch.py @@ -19,6 +19,12 @@ from tests.functional.utils import load_event +def decode_cloudwatch_raw_event(event: dict): + payload = base64.b64decode(event) + uncompressed = zlib.decompress(payload, zlib.MAX_WBITS | 32) + return json.loads(uncompressed.decode("utf-8")) + + @event_parser(model=MyCloudWatchBusiness, envelope=envelopes.CloudWatchLogsEnvelope) def handle_cloudwatch_logs(event: List[MyCloudWatchBusiness], _: LambdaContext): assert len(event) == 1 @@ -59,23 +65,29 @@ def test_handle_cloudwatch_trigger_event_no_envelope(): raw_event = load_event("cloudWatchLogEvent.json") parsed_event: CloudWatchLogsModel = handle_cloudwatch_logs_no_envelope(raw_event, LambdaContext()) - assert parsed_event.awslogs.decoded_data.owner == "123456789123" - assert parsed_event.awslogs.decoded_data.logGroup == "testLogGroup" - assert parsed_event.awslogs.decoded_data.logStream == "testLogStream" - assert parsed_event.awslogs.decoded_data.subscriptionFilters == ["testFilter"] - assert parsed_event.awslogs.decoded_data.messageType == "DATA_MESSAGE" + raw_event_decoded = decode_cloudwatch_raw_event(raw_event["awslogs"]["data"]) + + assert parsed_event.awslogs.decoded_data.owner == raw_event_decoded["owner"] + assert parsed_event.awslogs.decoded_data.logGroup == raw_event_decoded["logGroup"] + assert parsed_event.awslogs.decoded_data.logStream == raw_event_decoded["logStream"] + assert parsed_event.awslogs.decoded_data.subscriptionFilters == raw_event_decoded["subscriptionFilters"] + assert parsed_event.awslogs.decoded_data.messageType == raw_event_decoded["messageType"] assert len(parsed_event.awslogs.decoded_data.logEvents) == 2 + log_record: CloudWatchLogsLogEvent = parsed_event.awslogs.decoded_data.logEvents[0] - assert log_record.id == "eventId1" + raw_log_record = raw_event_decoded["logEvents"][0] + assert log_record.id == raw_log_record["id"] convert_time = int(round(log_record.timestamp.timestamp() * 1000)) - assert convert_time == 1440442987000 - assert log_record.message == "[ERROR] First test message" + assert convert_time == raw_log_record["timestamp"] + assert log_record.message == raw_log_record["message"] + log_record: CloudWatchLogsLogEvent = parsed_event.awslogs.decoded_data.logEvents[1] - assert log_record.id == "eventId2" + raw_log_record = raw_event_decoded["logEvents"][1] + assert log_record.id == raw_log_record["id"] convert_time = int(round(log_record.timestamp.timestamp() * 1000)) - assert convert_time == 1440442987001 - assert log_record.message == "[ERROR] Second test message" + assert convert_time == raw_log_record["timestamp"] + assert log_record.message == raw_log_record["message"] def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): From 9ed34360e1b02ae429d92ac9a2867259aa39e757 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 13:36:19 +0100 Subject: [PATCH 05/28] moving + dynamodb --- tests/functional/parser/test_dynamodb.py | 73 ------------------ tests/unit/parser/test_cloudwatch.py | 4 +- tests/unit/parser/test_dynamodb.py | 97 ++++++++++++++++++++++++ 3 files changed, 99 insertions(+), 75 deletions(-) delete mode 100644 tests/functional/parser/test_dynamodb.py create mode 100644 tests/unit/parser/test_dynamodb.py diff --git a/tests/functional/parser/test_dynamodb.py b/tests/functional/parser/test_dynamodb.py deleted file mode 100644 index e6238b00b83..00000000000 --- a/tests/functional/parser/test_dynamodb.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import Any, Dict, List - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) -def handle_dynamodb(event: List[Dict[str, MyDynamoBusiness]], _: LambdaContext): - assert len(event) == 2 - assert event[0]["OldImage"] is None - assert event[0]["NewImage"].Message["S"] == "New item!" - assert event[0]["NewImage"].Id["N"] == 101 - assert event[1]["OldImage"].Message["S"] == "New item!" - assert event[1]["OldImage"].Id["N"] == 101 - assert event[1]["NewImage"].Message["S"] == "This item has changed" - assert event[1]["NewImage"].Id["N"] == 101 - - -@event_parser(model=MyAdvancedDynamoBusiness) -def handle_dynamodb_no_envelope(event: MyAdvancedDynamoBusiness, _: LambdaContext): - records = event.Records - record = records[0] - assert record.awsRegion == "us-west-2" - dynamodb = record.dynamodb - assert dynamodb is not None - assert dynamodb.ApproximateCreationDateTime is None - keys = dynamodb.Keys - assert keys is not None - id_key = keys["Id"] - assert id_key["N"] == "101" - message_key = dynamodb.NewImage.Message - assert message_key is not None - assert message_key["S"] == "New item!" - assert dynamodb.OldImage is None - assert dynamodb.SequenceNumber == "111" - assert dynamodb.SizeBytes == 26 - assert dynamodb.StreamViewType == "NEW_AND_OLD_IMAGES" - assert record.eventID == "1" - assert record.eventName == "INSERT" - assert record.eventSource == "aws:dynamodb" - assert record.eventSourceARN == "eventsource_arn" - assert record.eventVersion == 1.0 - assert record.userIdentity is None - - -def test_dynamo_db_stream_trigger_event(): - event_dict = load_event("dynamoStreamEvent.json") - handle_dynamodb(event_dict, LambdaContext()) - - -def test_dynamo_db_stream_trigger_event_no_envelope(): - event_dict = load_event("dynamoStreamEvent.json") - handle_dynamodb_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model_no_envelope(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_dynamodb_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_dynamodb(event_dict, LambdaContext()) diff --git a/tests/unit/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py index c063507efbb..fb0988368f5 100644 --- a/tests/unit/parser/test_cloudwatch.py +++ b/tests/unit/parser/test_cloudwatch.py @@ -91,9 +91,9 @@ def test_handle_cloudwatch_trigger_event_no_envelope(): def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): - event_dict: Any = {"awslogs": {"data": "invalid_data"}} + raw_event: Any = {"awslogs": {"data": "invalid_data"}} with pytest.raises(ValidationError) as context: - handle_cloudwatch_logs_no_envelope(event_dict, LambdaContext()) + handle_cloudwatch_logs_no_envelope(raw_event, LambdaContext()) assert context.value.errors()[0]["msg"] == "unable to decompress data" diff --git a/tests/unit/parser/test_dynamodb.py b/tests/unit/parser/test_dynamodb.py new file mode 100644 index 00000000000..45d153a9c8f --- /dev/null +++ b/tests/unit/parser/test_dynamodb.py @@ -0,0 +1,97 @@ +from typing import Any, Dict, List + +import pytest + +from aws_lambda_powertools.utilities.parser import ( + ValidationError, + envelopes, + event_parser, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) +def handle_dynamodb(event: List[Dict[str, MyDynamoBusiness]], _: LambdaContext): + return event + + +@event_parser(model=MyAdvancedDynamoBusiness) +def handle_dynamodb_no_envelope(event: MyAdvancedDynamoBusiness, _: LambdaContext): + return event + + +def test_dynamo_db_stream_trigger_event(): + raw_event = load_event("dynamoStreamEvent.json") + parserd_event: MyDynamoBusiness = handle_dynamodb(raw_event, LambdaContext()) + + assert len(parserd_event) == 2 + + # record index 0 + old_image = parserd_event[0]["OldImage"] + assert old_image is None + + new_image = parserd_event[0]["NewImage"] + new_image_raw = raw_event["Records"][0]["dynamodb"]["NewImage"] + assert new_image.Message["S"] == new_image_raw["Message"]["S"] + assert new_image.Id["N"] == float(new_image_raw["Id"]["N"]) + + # record index 1 + old_image = parserd_event[1]["OldImage"] + old_image_raw = raw_event["Records"][1]["dynamodb"]["OldImage"] + assert old_image.Message["S"] == old_image_raw["Message"]["S"] + assert old_image.Id["N"] == float(old_image_raw["Id"]["N"]) + + new_image = parserd_event[1]["NewImage"] + new_image_raw = raw_event["Records"][1]["dynamodb"]["NewImage"] + assert new_image.Message["S"] == new_image_raw["Message"]["S"] + assert new_image.Id["N"] == float(new_image_raw["Id"]["N"]) + + +def test_dynamo_db_stream_trigger_event_no_envelope(): + raw_event = load_event("dynamoStreamEvent.json") + parserd_event: MyAdvancedDynamoBusiness = handle_dynamodb_no_envelope(raw_event, LambdaContext()) + + records = parserd_event.Records + record = records[0] + raw_record = raw_event["Records"][0] + + assert record.awsRegion == raw_record["awsRegion"] + assert record.eventID == raw_record["eventID"] + assert record.eventName == raw_record["eventName"] + assert record.eventSource == raw_record["eventSource"] + assert record.eventSourceARN == raw_record["eventSourceARN"] + assert record.eventVersion == float(raw_record["eventVersion"]) + assert record.userIdentity is None + + dynamodb = record.dynamodb + raw_dynamodb = raw_record["dynamodb"] + assert dynamodb is not None + assert dynamodb.ApproximateCreationDateTime is None + assert dynamodb.OldImage is None + assert dynamodb.SequenceNumber == raw_dynamodb["SequenceNumber"] + assert dynamodb.SizeBytes == raw_dynamodb["SizeBytes"] + assert dynamodb.StreamViewType == raw_dynamodb["StreamViewType"] + + keys = dynamodb.Keys + raw_keys = raw_dynamodb["Keys"] + assert keys is not None + id_key = keys["Id"] + assert id_key["N"] == raw_keys["Id"]["N"] + + message_key = dynamodb.NewImage.Message + assert message_key is not None + assert message_key["S"] == "New item!" + + +def test_validate_event_does_not_conform_with_model_no_envelope(): + raw_event: Any = {"hello": "s"} + with pytest.raises(ValidationError): + handle_dynamodb_no_envelope(raw_event, LambdaContext()) + + +def test_validate_event_does_not_conform_with_model(): + raw_event: Any = {"hello": "s"} + with pytest.raises(ValidationError): + handle_dynamodb(raw_event, LambdaContext()) From f12d5fcc88b6650991599eb09cbb07a67a7075dd Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 14:11:46 +0100 Subject: [PATCH 06/28] moving + eventbridge --- tests/functional/parser/test_eventbridge.py | 69 --------------------- tests/unit/parser/test_eventbridge.py | 63 +++++++++++++++++++ 2 files changed, 63 insertions(+), 69 deletions(-) delete mode 100644 tests/functional/parser/test_eventbridge.py create mode 100644 tests/unit/parser/test_eventbridge.py diff --git a/tests/functional/parser/test_eventbridge.py b/tests/functional/parser/test_eventbridge.py deleted file mode 100644 index ca41e1a4bc5..00000000000 --- a/tests/functional/parser/test_eventbridge.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Any - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import ( - MyAdvancedEventbridgeBusiness, - MyEventbridgeBusiness, -) -from tests.functional.utils import load_event - - -@event_parser(model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) -def handle_eventbridge(event: MyEventbridgeBusiness, _: LambdaContext): - assert event.instance_id == "i-1234567890abcdef0" - assert event.state == "terminated" - - -@event_parser(model=MyAdvancedEventbridgeBusiness) -def handle_eventbridge_no_envelope(event: MyAdvancedEventbridgeBusiness, _: LambdaContext): - assert event.detail.instance_id == "i-1234567890abcdef0" - assert event.detail.state == "terminated" - assert event.id == "6a7e8feb-b491-4cf7-a9f1-bf3703467718" - assert event.version == "0" - assert event.account == "111122223333" - time_str = event.time.strftime("%Y-%m-%dT%H:%M:%SZ") - assert time_str == "2017-12-22T18:43:48Z" - assert event.region == "us-west-1" - assert event.resources == ["arn:aws:ec2:us-west-1:123456789012:instance/i-1234567890abcdef0"] - assert event.source == "aws.ec2" - assert event.detail_type == "EC2 Instance State-change Notification" - assert event.replay_name == "replay_archive" - - -def test_handle_eventbridge_trigger_event(): - event_dict = load_event("eventBridgeEvent.json") - handle_eventbridge(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_user_dict_model(): - event_dict: Any = { - "version": "0", - "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", - "detail-type": "EC2 Instance State-change Notification", - "source": "aws.ec2", - "account": "111122223333", - "time": "2017-12-22T18:43:48Z", - "region": "us-west-1", - "resources": ["arn:aws:ec2:us-west-1:123456789012:instance/i-1234567890abcdef0"], - "detail": {}, - } - with pytest.raises(ValidationError) as e: - handle_eventbridge(event_dict, LambdaContext()) - print(e.exconly()) - - -def test_handle_eventbridge_trigger_event_no_envelope(): - event_dict = load_event("eventBridgeEvent.json") - handle_eventbridge_no_envelope(event_dict, LambdaContext()) - - -def test_handle_invalid_event_with_eventbridge_envelope(): - with pytest.raises(ValidationError): - handle_eventbridge(event={}, context=LambdaContext()) diff --git a/tests/unit/parser/test_eventbridge.py b/tests/unit/parser/test_eventbridge.py new file mode 100644 index 00000000000..d485deeb4e7 --- /dev/null +++ b/tests/unit/parser/test_eventbridge.py @@ -0,0 +1,63 @@ +import pytest + +from aws_lambda_powertools.utilities.parser import ( + ValidationError, + envelopes, + event_parser, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import ( + MyAdvancedEventbridgeBusiness, + MyEventbridgeBusiness, +) +from tests.functional.utils import load_event + + +@event_parser(model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) +def handle_eventbridge(event: MyEventbridgeBusiness, _: LambdaContext): + return event + + +@event_parser(model=MyAdvancedEventbridgeBusiness) +def handle_eventbridge_no_envelope(event: MyAdvancedEventbridgeBusiness, _: LambdaContext): + return event + + +def test_handle_eventbridge_trigger_event(): + raw_event = load_event("eventBridgeEvent.json") + parsed_event: MyEventbridgeBusiness = handle_eventbridge(raw_event, LambdaContext()) + + assert parsed_event.instance_id == raw_event["detail"]["instance_id"] + assert parsed_event.state == raw_event["detail"]["state"] + + +def test_validate_event_does_not_conform_with_user_dict_model(): + raw_event = load_event("eventBridgeEvent.json") + + raw_event.pop("version") + + with pytest.raises(ValidationError): + handle_eventbridge(raw_event, LambdaContext()) + + +def test_handle_eventbridge_trigger_event_no_envelope(): + raw_event = load_event("eventBridgeEvent.json") + parsed_event: MyAdvancedEventbridgeBusiness = handle_eventbridge_no_envelope(raw_event, LambdaContext()) + + assert parsed_event.detail.instance_id == raw_event["detail"]["instance_id"] + assert parsed_event.detail.state == raw_event["detail"]["state"] + assert parsed_event.id == raw_event["id"] + assert parsed_event.version == raw_event["version"] + assert parsed_event.account == raw_event["account"] + time_str = parsed_event.time.strftime("%Y-%m-%dT%H:%M:%SZ") + assert time_str == raw_event["time"] + assert parsed_event.region == raw_event["region"] + assert parsed_event.resources == raw_event["resources"] + assert parsed_event.source == raw_event["source"] + assert parsed_event.detail_type == raw_event["detail-type"] + assert parsed_event.replay_name == raw_event["replay-name"] + + +def test_handle_invalid_event_with_eventbridge_envelope(): + with pytest.raises(ValidationError): + handle_eventbridge(event={}, context=LambdaContext()) From 471194ae2047fcefee25ee5a4eb10ea23d08ad13 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 14:29:07 +0100 Subject: [PATCH 07/28] moving + sqs --- tests/{functional => unit}/parser/test_sqs.py | 62 ++++++++++--------- 1 file changed, 34 insertions(+), 28 deletions(-) rename tests/{functional => unit}/parser/test_sqs.py (67%) diff --git a/tests/functional/parser/test_sqs.py b/tests/unit/parser/test_sqs.py similarity index 67% rename from tests/functional/parser/test_sqs.py rename to tests/unit/parser/test_sqs.py index 4f547ad4bcc..e01cb36f23e 100644 --- a/tests/functional/parser/test_sqs.py +++ b/tests/unit/parser/test_sqs.py @@ -25,10 +25,10 @@ def test_handle_sqs_trigger_event_json_body(sqs_event): # noqa: F811 def test_validate_event_does_not_conform_with_model(): - event: Any = {"invalid": "event"} + raw_event: dict = {"invalid": "event"} with pytest.raises(ValidationError): - handle_sqs_json_body(event, LambdaContext()) + handle_sqs_json_body(raw_event, LambdaContext()) def test_validate_event_does_not_conform_user_json_string_with_model(): @@ -61,36 +61,42 @@ def test_validate_event_does_not_conform_user_json_string_with_model(): @event_parser(model=MyAdvancedSqsBusiness) def handle_sqs_no_envelope(event: MyAdvancedSqsBusiness, _: LambdaContext): - records = event.Records - record = records[0] - attributes = record.attributes - message_attributes = record.messageAttributes - test_attr = message_attributes["testAttr"] + return event + +def test_handle_sqs_trigger_event_no_envelope(): + raw_event = load_event("sqsEvent.json") + parsed_event: MyAdvancedSqsBusiness = handle_sqs_no_envelope(raw_event, LambdaContext()) + + records = parsed_event.Records + record = records[0] + raw_record = raw_event["Records"][0] assert len(records) == 2 - assert record.messageId == "059f36b4-87a3-44ab-83d2-661975830a7d" - assert record.receiptHandle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." - assert record.body == "Test message." + + assert record.messageId == raw_record["messageId"] + assert record.receiptHandle == raw_record["receiptHandle"] + assert record.body == raw_record["body"] + assert record.eventSource == raw_record["eventSource"] + assert record.eventSourceARN == raw_record["eventSourceARN"] + assert record.awsRegion == raw_record["awsRegion"] + assert record.md5OfBody == raw_record["md5OfBody"] + + attributes = record.attributes assert attributes.AWSTraceHeader is None - assert attributes.ApproximateReceiveCount == "1" - convert_time = int(round(attributes.SentTimestamp.timestamp() * 1000)) - assert convert_time == 1545082649183 - assert attributes.SenderId == "AIDAIENQZJOLO23YVJ4VO" - convert_time = int(round(attributes.ApproximateFirstReceiveTimestamp.timestamp() * 1000)) - assert convert_time == 1545082649185 + assert attributes.ApproximateReceiveCount == raw_record["attributes"]["ApproximateReceiveCount"] assert attributes.SequenceNumber is None assert attributes.MessageGroupId is None assert attributes.MessageDeduplicationId is None - assert message_attributes.get("NotFound") is None - assert test_attr.stringValue == "100" - assert test_attr.binaryValue == "base64Str" - assert test_attr.dataType == "Number" - assert record.md5OfBody == "e4e68fb7bd0e697a0ae8f1bb342846b3" - assert record.eventSource == "aws:sqs" - assert record.eventSourceARN == "arn:aws:sqs:us-east-2:123456789012:my-queue" - assert record.awsRegion == "us-east-2" - + assert attributes.SenderId == raw_record["attributes"]["SenderId"] + convert_time = int(round(attributes.ApproximateFirstReceiveTimestamp.timestamp() * 1000)) + assert convert_time == int(raw_record["attributes"]["ApproximateFirstReceiveTimestamp"]) + convert_time = int(round(attributes.SentTimestamp.timestamp() * 1000)) + assert convert_time == int(raw_record["attributes"]["SentTimestamp"]) -def test_handle_sqs_trigger_event_no_envelope(): - event_dict = load_event("sqsEvent.json") - handle_sqs_no_envelope(event_dict, LambdaContext()) + message_attributes = record.messageAttributes + message_attributes_raw = raw_record["messageAttributes"]["testAttr"] + test_attr = message_attributes["testAttr"] + assert message_attributes.get("NotFound") is None + assert test_attr.stringValue == message_attributes_raw["stringValue"] + assert test_attr.binaryValue == message_attributes_raw["binaryValue"] + assert test_attr.dataType == message_attributes_raw["dataType"] From 0bffaed76418b4b82fe83df02efe3b82955a69f3 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 15:03:10 +0100 Subject: [PATCH 08/28] moving + ses --- tests/functional/parser/test_ses.py | 49 ------------------------ tests/unit/parser/test_ses.py | 59 +++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 49 deletions(-) delete mode 100644 tests/functional/parser/test_ses.py create mode 100644 tests/unit/parser/test_ses.py diff --git a/tests/functional/parser/test_ses.py b/tests/functional/parser/test_ses.py deleted file mode 100644 index d434e2350f8..00000000000 --- a/tests/functional/parser/test_ses.py +++ /dev/null @@ -1,49 +0,0 @@ -from aws_lambda_powertools.utilities.parser import event_parser -from aws_lambda_powertools.utilities.parser.models import SesModel, SesRecordModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.utils import load_event - - -@event_parser(model=SesModel) -def handle_ses(event: SesModel, _: LambdaContext): - expected_address = "johndoe@example.com" - records = event.Records - record: SesRecordModel = records[0] - assert record.eventSource == "aws:ses" - assert record.eventVersion == "1.0" - mail = record.ses.mail - convert_time = int(round(mail.timestamp.timestamp() * 1000)) - assert convert_time == 0 - assert mail.source == "janedoe@example.com" - assert mail.messageId == "o3vrnil0e2ic28tr" - assert mail.destination == [expected_address] - assert mail.headersTruncated is False - headers = list(mail.headers) - assert len(headers) == 10 - assert headers[0].name == "Return-Path" - assert headers[0].value == "" - common_headers = mail.commonHeaders - assert common_headers.returnPath == "janedoe@example.com" - assert common_headers.header_from == ["Jane Doe "] - assert common_headers.date == "Wed, 7 Oct 2015 12:34:56 -0700" - assert common_headers.to == [expected_address] - assert common_headers.messageId == "<0123456789example.com>" - assert common_headers.subject == "Test Subject" - receipt = record.ses.receipt - convert_time = int(round(receipt.timestamp.timestamp() * 1000)) - assert convert_time == 0 - assert receipt.processingTimeMillis == 574 - assert receipt.recipients == [expected_address] - assert receipt.spamVerdict.status == "PASS" - assert receipt.virusVerdict.status == "PASS" - assert receipt.spfVerdict.status == "PASS" - assert receipt.dmarcVerdict.status == "PASS" - action = receipt.action - assert action.type == "Lambda" - assert action.functionArn == "arn:aws:lambda:us-west-2:012345678912:function:Example" - assert action.invocationType == "Event" - - -def test_ses_trigger_event(): - event_dict = load_event("sesEvent.json") - handle_ses(event_dict, LambdaContext()) diff --git a/tests/unit/parser/test_ses.py b/tests/unit/parser/test_ses.py new file mode 100644 index 00000000000..4742b8832ae --- /dev/null +++ b/tests/unit/parser/test_ses.py @@ -0,0 +1,59 @@ +from aws_lambda_powertools.utilities.parser import event_parser +from aws_lambda_powertools.utilities.parser.models import SesModel, SesRecordModel +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.utils import load_event + + +@event_parser(model=SesModel) +def handle_ses(event: SesModel, _: LambdaContext): + return event + + +def test_ses_trigger_event(): + raw_event = load_event("sesEvent.json") + parsed_event: SesModel = handle_ses(raw_event, LambdaContext()) + + records = parsed_event.Records + record: SesRecordModel = records[0] + raw_record = raw_event["Records"][0] + + assert record.eventSource == raw_record["eventSource"] + assert record.eventVersion == raw_record["eventVersion"] + + mail = record.ses.mail + raw_mail = raw_record["ses"]["mail"] + assert mail.source == raw_mail["source"] + assert mail.messageId == raw_mail["messageId"] + assert mail.destination == raw_mail["destination"] + assert mail.headersTruncated is False + convert_time = int(round(mail.timestamp.timestamp() * 1000)) + assert convert_time == 0 + + headers = list(mail.headers) + assert len(headers) == 10 + assert headers[0].name == raw_mail["headers"][0]["name"] + assert headers[0].value == raw_mail["headers"][0]["value"] + + common_headers = mail.commonHeaders + assert common_headers.returnPath == raw_mail["commonHeaders"]["returnPath"] + assert common_headers.header_from == raw_mail["commonHeaders"]["from"] + assert common_headers.date == raw_mail["commonHeaders"]["date"] + assert common_headers.to == raw_mail["commonHeaders"]["to"] + assert common_headers.messageId == raw_mail["commonHeaders"]["messageId"] + assert common_headers.subject == raw_mail["commonHeaders"]["subject"] + + receipt = record.ses.receipt + raw_receipt = raw_record["ses"]["receipt"] + convert_time = int(round(receipt.timestamp.timestamp() * 1000)) + assert convert_time == 0 + assert receipt.processingTimeMillis == raw_receipt["processingTimeMillis"] + assert receipt.recipients == raw_receipt["recipients"] + assert receipt.spamVerdict.status == raw_receipt["spamVerdict"]["status"] + assert receipt.virusVerdict.status == raw_receipt["virusVerdict"]["status"] + assert receipt.spfVerdict.status == raw_receipt["spfVerdict"]["status"] + assert receipt.dmarcVerdict.status == raw_receipt["dmarcVerdict"]["status"] + + action = receipt.action + assert action.type == raw_receipt["action"]["type"] + assert action.functionArn == raw_receipt["action"]["functionArn"] + assert action.invocationType == raw_receipt["action"]["invocationType"] From 59425378846ef71331408da53bf0aa843169f6af Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 15:20:10 +0100 Subject: [PATCH 09/28] moving + kafka --- tests/functional/parser/test_kafka.py | 93 --------------------------- tests/unit/parser/test_kafka.py | 92 ++++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 93 deletions(-) delete mode 100644 tests/functional/parser/test_kafka.py create mode 100644 tests/unit/parser/test_kafka.py diff --git a/tests/functional/parser/test_kafka.py b/tests/functional/parser/test_kafka.py deleted file mode 100644 index f764106add4..00000000000 --- a/tests/functional/parser/test_kafka.py +++ /dev/null @@ -1,93 +0,0 @@ -from typing import List - -from aws_lambda_powertools.utilities.parser import envelopes, event_parser -from aws_lambda_powertools.utilities.parser.models import ( - KafkaMskEventModel, - KafkaRecordModel, - KafkaSelfManagedEventModel, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyLambdaKafkaBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyLambdaKafkaBusiness, envelope=envelopes.KafkaEnvelope) -def handle_lambda_kafka_with_envelope(event: List[MyLambdaKafkaBusiness], _: LambdaContext): - assert event[0].key == "value" - assert len(event) == 1 - - -@event_parser(model=KafkaSelfManagedEventModel) -def handle_kafka_event(event: KafkaSelfManagedEventModel, _: LambdaContext): - return event - - -def test_kafka_msk_event_with_envelope(): - event = load_event("kafkaEventMsk.json") - handle_lambda_kafka_with_envelope(event, LambdaContext()) - - -def test_kafka_self_managed_event_with_envelope(): - event = load_event("kafkaEventSelfManaged.json") - handle_lambda_kafka_with_envelope(event, LambdaContext()) - - -def test_self_managed_kafka_event(): - json_event = load_event("kafkaEventSelfManaged.json") - event: KafkaSelfManagedEventModel = handle_kafka_event(json_event, LambdaContext()) - assert event.eventSource == "aws:SelfManagedKafka" - bootstrap_servers = [ - "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - ] - assert event.bootstrapServers == bootstrap_servers - - records = list(event.records["mytopic-0"]) - assert len(records) == 1 - record: KafkaRecordModel = records[0] - assert record.topic == "mytopic" - assert record.partition == 0 - assert record.offset == 15 - assert record.timestamp is not None - convert_time = int(round(record.timestamp.timestamp() * 1000)) - assert convert_time == 1545084650987 - assert record.timestampType == "CREATE_TIME" - assert record.key == b"recordKey" - assert record.value == '{"key":"value"}' - assert len(record.headers) == 1 - assert record.headers[0]["headerKey"] == b"headerValue" - - -@event_parser(model=KafkaMskEventModel) -def handle_msk_event(event: KafkaMskEventModel, _: LambdaContext): - return event - - -def test_kafka_msk_event(): - json_event = load_event("kafkaEventMsk.json") - event: KafkaMskEventModel = handle_msk_event(json_event, LambdaContext()) - assert event.eventSource == "aws:kafka" - bootstrap_servers = [ - "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - ] - assert event.bootstrapServers == bootstrap_servers - assert ( - event.eventSourceArn - == "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" - ) - - records = list(event.records["mytopic-0"]) - assert len(records) == 1 - record: KafkaRecordModel = records[0] - assert record.topic == "mytopic" - assert record.partition == 0 - assert record.offset == 15 - assert record.timestamp is not None - convert_time = int(round(record.timestamp.timestamp() * 1000)) - assert convert_time == 1545084650987 - assert record.timestampType == "CREATE_TIME" - assert record.key == b"recordKey" - assert record.value == '{"key":"value"}' - assert len(record.headers) == 1 - assert record.headers[0]["headerKey"] == b"headerValue" diff --git a/tests/unit/parser/test_kafka.py b/tests/unit/parser/test_kafka.py new file mode 100644 index 00000000000..59e2df622d3 --- /dev/null +++ b/tests/unit/parser/test_kafka.py @@ -0,0 +1,92 @@ +from typing import List + +from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser.models import ( + KafkaMskEventModel, + KafkaRecordModel, + KafkaSelfManagedEventModel, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyLambdaKafkaBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyLambdaKafkaBusiness, envelope=envelopes.KafkaEnvelope) +def handle_lambda_kafka_with_envelope(event: List[MyLambdaKafkaBusiness], _: LambdaContext): + return event + + +@event_parser(model=KafkaSelfManagedEventModel) +def handle_kafka_event(event: KafkaSelfManagedEventModel, _: LambdaContext): + return event + + +def test_kafka_msk_event_with_envelope(): + raw_event = load_event("kafkaEventMsk.json") + parsed_event: MyLambdaKafkaBusiness = handle_lambda_kafka_with_envelope(raw_event, LambdaContext()) + + assert parsed_event[0].key == "value" + assert len(parsed_event) == 1 + + +def test_kafka_self_managed_event_with_envelope(): + raw_event = load_event("kafkaEventSelfManaged.json") + parsed_event: MyLambdaKafkaBusiness = handle_lambda_kafka_with_envelope(raw_event, LambdaContext()) + + assert parsed_event[0].key == "value" + assert len(parsed_event) == 1 + + +def test_self_managed_kafka_event(): + raw_event = load_event("kafkaEventSelfManaged.json") + parsed_event: KafkaSelfManagedEventModel = handle_kafka_event(raw_event, LambdaContext()) + + assert parsed_event.eventSource == raw_event["eventSource"] + + assert parsed_event.bootstrapServers == raw_event["bootstrapServers"].split(",") + + records = list(parsed_event.records["mytopic-0"]) + assert len(records) == 1 + record: KafkaRecordModel = records[0] + raw_record = raw_event["records"]["mytopic-0"][0] + assert record.topic == raw_record["topic"] + assert record.partition == raw_record["partition"] + assert record.offset == raw_record["offset"] + assert record.timestamp is not None + convert_time = int(round(record.timestamp.timestamp() * 1000)) + assert convert_time == raw_record["timestamp"] + assert record.timestampType == raw_record["timestampType"] + assert record.key == b"recordKey" + assert record.value == '{"key":"value"}' + assert len(record.headers) == 1 + assert record.headers[0]["headerKey"] == b"headerValue" + + +@event_parser(model=KafkaMskEventModel) +def handle_msk_event(event: KafkaMskEventModel, _: LambdaContext): + return event + + +def test_kafka_msk_event(): + raw_event = load_event("kafkaEventMsk.json") + parsed_event: KafkaMskEventModel = handle_msk_event(raw_event, LambdaContext()) + + assert parsed_event.eventSource == raw_event["eventSource"] + assert parsed_event.bootstrapServers == raw_event["bootstrapServers"].split(",") + assert parsed_event.eventSourceArn == raw_event["eventSourceArn"] + + records = list(parsed_event.records["mytopic-0"]) + assert len(records) == 1 + record: KafkaRecordModel = records[0] + raw_record = raw_event["records"]["mytopic-0"][0] + assert record.topic == raw_record["topic"] + assert record.partition == raw_record["partition"] + assert record.offset == raw_record["offset"] + assert record.timestamp is not None + convert_time = int(round(record.timestamp.timestamp() * 1000)) + assert convert_time == raw_record["timestamp"] + assert record.timestampType == raw_record["timestampType"] + assert record.key == b"recordKey" + assert record.value == '{"key":"value"}' + assert len(record.headers) == 1 + assert record.headers[0]["headerKey"] == b"headerValue" From 264ed990c0069c384a81be6835d9c6b688d5ce9d Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 15:39:24 +0100 Subject: [PATCH 10/28] moving + lambda url --- .../parser/test_lambda_function_url.py | 128 ----------------- tests/unit/parser/test_lambda_function_url.py | 134 ++++++++++++++++++ 2 files changed, 134 insertions(+), 128 deletions(-) delete mode 100644 tests/functional/parser/test_lambda_function_url.py create mode 100644 tests/unit/parser/test_lambda_function_url.py diff --git a/tests/functional/parser/test_lambda_function_url.py b/tests/functional/parser/test_lambda_function_url.py deleted file mode 100644 index a63a4e25884..00000000000 --- a/tests/functional/parser/test_lambda_function_url.py +++ /dev/null @@ -1,128 +0,0 @@ -from aws_lambda_powertools.utilities.parser import envelopes, event_parser -from aws_lambda_powertools.utilities.parser.models import LambdaFunctionUrlModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyALambdaFuncUrlBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyALambdaFuncUrlBusiness, envelope=envelopes.LambdaFunctionUrlEnvelope) -def handle_lambda_func_url_with_envelope(event: MyALambdaFuncUrlBusiness, _: LambdaContext): - assert event.message == "Hello" - assert event.username == "Ran" - - -@event_parser(model=LambdaFunctionUrlModel) -def handle_lambda_func_url_event(event: LambdaFunctionUrlModel, _: LambdaContext): - return event - - -def test_lambda_func_url_event_with_envelope(): - event = load_event("lambdaFunctionUrlEvent.json") - event["body"] = '{"message": "Hello", "username": "Ran"}' - handle_lambda_func_url_with_envelope(event, LambdaContext()) - - -def test_lambda_function_url_event(): - json_event = load_event("lambdaFunctionUrlEvent.json") - event: LambdaFunctionUrlModel = handle_lambda_func_url_event(json_event, LambdaContext()) - - assert event.version == "2.0" - assert event.routeKey == "$default" - - assert event.rawQueryString == "" - - assert event.cookies is None - - headers = event.headers - assert len(headers) == 20 - - assert event.queryStringParameters is None - - assert event.isBase64Encoded is False - assert event.body is None - assert event.pathParameters is None - assert event.stageVariables is None - - request_context = event.requestContext - - assert request_context.accountId == "anonymous" - assert request_context.apiId is not None - assert request_context.domainName == ".lambda-url.us-east-1.on.aws" - assert request_context.domainPrefix == "" - assert request_context.requestId == "id" - assert request_context.routeKey == "$default" - assert request_context.stage == "$default" - assert request_context.time is not None - convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) - assert convert_time == 1659687279885 - assert request_context.authorizer is None - - http = request_context.http - assert http.method == "GET" - assert http.path == "/" - assert http.protocol == "HTTP/1.1" - assert str(http.sourceIp) == "123.123.123.123/32" - assert http.userAgent == "agent" - - assert request_context.authorizer is None - - -def test_lambda_function_url_event_iam(): - json_event = load_event("lambdaFunctionUrlIAMEvent.json") - event: LambdaFunctionUrlModel = handle_lambda_func_url_event(json_event, LambdaContext()) - - assert event.version == "2.0" - assert event.routeKey == "$default" - - assert event.rawQueryString == "parameter1=value1¶meter1=value2¶meter2=value" - - cookies = event.cookies - assert len(cookies) == 2 - assert cookies[0] == "cookie1" - - headers = event.headers - assert len(headers) == 2 - - query_string_parameters = event.queryStringParameters - assert len(query_string_parameters) == 2 - assert query_string_parameters.get("parameter2") == "value" - - assert event.isBase64Encoded is False - assert event.body == "Hello from client!" - assert event.pathParameters is None - assert event.stageVariables is None - - request_context = event.requestContext - - assert request_context.accountId == "123456789012" - assert request_context.apiId is not None - assert request_context.domainName == ".lambda-url.us-west-2.on.aws" - assert request_context.domainPrefix == "" - assert request_context.requestId == "id" - assert request_context.routeKey == "$default" - assert request_context.stage == "$default" - assert request_context.time is not None - convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) - assert convert_time == 1583348638390 - - http = request_context.http - assert http.method == "POST" - assert http.path == "/my/path" - assert http.protocol == "HTTP/1.1" - assert str(http.sourceIp) == "123.123.123.123/32" - assert http.userAgent == "agent" - - authorizer = request_context.authorizer - assert authorizer is not None - assert authorizer.jwt is None - assert authorizer.lambda_value is None - - iam = authorizer.iam - assert iam is not None - assert iam.accessKey == "AKIA..." - assert iam.accountId == "111122223333" - assert iam.callerId == "AIDA..." - assert iam.cognitoIdentity is None - assert iam.principalOrgId is None - assert iam.userId == "AIDA..." - assert iam.userArn == "arn:aws:iam::111122223333:user/example-user" diff --git a/tests/unit/parser/test_lambda_function_url.py b/tests/unit/parser/test_lambda_function_url.py new file mode 100644 index 00000000000..56022c40c2b --- /dev/null +++ b/tests/unit/parser/test_lambda_function_url.py @@ -0,0 +1,134 @@ +from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser.models import LambdaFunctionUrlModel +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.schemas import MyALambdaFuncUrlBusiness +from tests.functional.utils import load_event + + +@event_parser(model=MyALambdaFuncUrlBusiness, envelope=envelopes.LambdaFunctionUrlEnvelope) +def handle_lambda_func_url_with_envelope(event: MyALambdaFuncUrlBusiness, _: LambdaContext): + return event + + +@event_parser(model=LambdaFunctionUrlModel) +def handle_lambda_func_url_event(event: LambdaFunctionUrlModel, _: LambdaContext): + return event + + +def test_lambda_func_url_event_with_envelope(): + raw_event = load_event("lambdaFunctionUrlEvent.json") + raw_event["body"] = '{"message": "Hello", "username": "Ran"}' + + parsed_event: MyALambdaFuncUrlBusiness = handle_lambda_func_url_with_envelope(raw_event, LambdaContext()) + + assert parsed_event.message == "Hello" + assert parsed_event.username == "Ran" + + +def test_lambda_function_url_event(): + raw_event = load_event("lambdaFunctionUrlEvent.json") + parsed_event: LambdaFunctionUrlModel = handle_lambda_func_url_event(raw_event, LambdaContext()) + + assert parsed_event.version == raw_event["version"] + assert parsed_event.routeKey == raw_event["routeKey"] + + assert parsed_event.rawQueryString == raw_event["rawQueryString"] + + assert parsed_event.cookies is None + + headers = parsed_event.headers + assert len(headers) == 20 + + assert parsed_event.queryStringParameters is None + + assert parsed_event.isBase64Encoded is False + assert parsed_event.body is None + assert parsed_event.pathParameters is None + assert parsed_event.stageVariables is None + + request_context = parsed_event.requestContext + raw_request_context = raw_event["requestContext"] + + assert request_context.accountId == raw_request_context["accountId"] + assert request_context.apiId == raw_request_context["apiId"] + assert request_context.domainName == raw_request_context["domainName"] + assert request_context.domainPrefix == raw_request_context["domainPrefix"] + assert request_context.requestId == raw_request_context["requestId"] + assert request_context.routeKey == raw_request_context["routeKey"] + assert request_context.stage == raw_request_context["stage"] + assert request_context.time == raw_request_context["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == raw_request_context["timeEpoch"] + assert request_context.authorizer is None + + http = request_context.http + assert http.method == raw_request_context["http"]["method"] + assert http.path == raw_request_context["http"]["path"] + assert http.protocol == raw_request_context["http"]["protocol"] + assert str(http.sourceIp) == "123.123.123.123/32" + assert http.userAgent == raw_request_context["http"]["userAgent"] + + assert request_context.authorizer is None + + +def test_lambda_function_url_event_iam(): + raw_event = load_event("lambdaFunctionUrlIAMEvent.json") + parsed_event: LambdaFunctionUrlModel = handle_lambda_func_url_event(raw_event, LambdaContext()) + + assert parsed_event.version == raw_event["version"] + assert parsed_event.routeKey == raw_event["routeKey"] + + assert parsed_event.rawQueryString == raw_event["rawQueryString"] + + cookies = parsed_event.cookies + assert len(cookies) == 2 + assert cookies[0] == raw_event["cookies"][0] + + headers = parsed_event.headers + assert len(headers) == 2 + + query_string_parameters = parsed_event.queryStringParameters + assert len(query_string_parameters) == 2 + assert query_string_parameters.get("parameter2") == raw_event["queryStringParameters"]["parameter2"] + + assert parsed_event.isBase64Encoded is False + assert parsed_event.body == raw_event["body"] + assert parsed_event.pathParameters is None + assert parsed_event.stageVariables is None + + request_context = parsed_event.requestContext + raw_request_context = raw_event["requestContext"] + assert request_context.accountId == raw_request_context["accountId"] + assert request_context.apiId == raw_request_context["apiId"] + assert request_context.domainName == raw_request_context["domainName"] + assert request_context.domainPrefix == raw_request_context["domainPrefix"] + assert request_context.requestId == raw_request_context["requestId"] + assert request_context.routeKey == raw_request_context["routeKey"] + assert request_context.stage == raw_request_context["stage"] + assert request_context.time == raw_request_context["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == raw_request_context["timeEpoch"] + assert request_context.authorizer is not None + + http = request_context.http + assert http.method == raw_request_context["http"]["method"] + assert http.path == raw_request_context["http"]["path"] + assert http.protocol == raw_request_context["http"]["protocol"] + assert str(http.sourceIp) == "123.123.123.123/32" + assert http.userAgent == raw_request_context["http"]["userAgent"] + + authorizer = request_context.authorizer + assert authorizer is not None + assert authorizer.jwt is None + assert authorizer.lambda_value is None + + iam = authorizer.iam + iam_raw = raw_event["requestContext"]["authorizer"]["iam"] + assert iam is not None + assert iam.accessKey == iam_raw["accessKey"] + assert iam.accountId == iam_raw["accountId"] + assert iam.callerId == iam_raw["callerId"] + assert iam.cognitoIdentity is None + assert iam.principalOrgId is None + assert iam.userId == iam_raw["userId"] + assert iam.userArn == iam_raw["userArn"] From 5b30d5d77dab4591e6c44c215639bca3b90be89b Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 16:01:22 +0100 Subject: [PATCH 11/28] moving + s3 --- .../parser/test_parser.py | 0 .../parser/test_s3 object_event.py | 19 +++++++++---------- 2 files changed, 9 insertions(+), 10 deletions(-) rename tests/{functional => unit}/parser/test_parser.py (100%) rename tests/{functional => unit}/parser/test_s3 object_event.py (78%) diff --git a/tests/functional/parser/test_parser.py b/tests/unit/parser/test_parser.py similarity index 100% rename from tests/functional/parser/test_parser.py rename to tests/unit/parser/test_parser.py diff --git a/tests/functional/parser/test_s3 object_event.py b/tests/unit/parser/test_s3 object_event.py similarity index 78% rename from tests/functional/parser/test_s3 object_event.py rename to tests/unit/parser/test_s3 object_event.py index 90c2555360d..1ab39b2c6ad 100644 --- a/tests/functional/parser/test_s3 object_event.py +++ b/tests/unit/parser/test_s3 object_event.py @@ -52,16 +52,15 @@ def test_s3_object_event_temp_credentials(): session_context = parsed_event.userIdentity.sessionContext assert session_context is not None session_issuer = session_context.sessionIssuer + session_issuer_raw = event["userIdentity"]["sessionContext"]["sessionIssuer"] assert session_issuer is not None - assert session_issuer.type == event["userIdentity"]["sessionContext"]["sessionIssuer"]["type"] - assert session_issuer.userName == event["userIdentity"]["sessionContext"]["sessionIssuer"]["userName"] - assert session_issuer.principalId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["principalId"] - assert session_issuer.arn == event["userIdentity"]["sessionContext"]["sessionIssuer"]["arn"] - assert session_issuer.accountId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["accountId"] + assert session_issuer.type == session_issuer_raw["type"] + assert session_issuer.userName == session_issuer_raw["userName"] + assert session_issuer.principalId == session_issuer_raw["principalId"] + assert session_issuer.arn == session_issuer_raw["arn"] + assert session_issuer.accountId == session_issuer_raw["accountId"] session_attributes = session_context.attributes + session_attributes_raw = event["userIdentity"]["sessionContext"]["attributes"] assert session_attributes is not None - assert ( - str(session_attributes.mfaAuthenticated).lower() - == event["userIdentity"]["sessionContext"]["attributes"]["mfaAuthenticated"] - ) - assert session_attributes.creationDate == event["userIdentity"]["sessionContext"]["attributes"]["creationDate"] + assert str(session_attributes.mfaAuthenticated).lower() == session_attributes_raw["mfaAuthenticated"] + assert session_attributes.creationDate == session_attributes_raw["creationDate"] From 1ef5b019b6e0b1f9b187e775894412edb3ffcd7e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 16:14:14 +0100 Subject: [PATCH 12/28] moving + sns --- tests/{functional => unit}/parser/test_sns.py | 97 +++++++++++-------- 1 file changed, 54 insertions(+), 43 deletions(-) rename tests/{functional => unit}/parser/test_sns.py (56%) diff --git a/tests/functional/parser/test_sns.py b/tests/unit/parser/test_sns.py similarity index 56% rename from tests/functional/parser/test_sns.py rename to tests/unit/parser/test_sns.py index 617de487748..efd3a45bc27 100644 --- a/tests/functional/parser/test_sns.py +++ b/tests/unit/parser/test_sns.py @@ -1,5 +1,5 @@ import json -from typing import Any, List +from typing import List import pytest @@ -16,9 +16,7 @@ @event_parser(model=MySnsBusiness, envelope=envelopes.SnsEnvelope) def handle_sns_json_body(event: List[MySnsBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].message == "hello world" - assert event[0].username == "lessa" + return event def test_handle_sns_trigger_event_json_body(sns_event): # noqa: F811 @@ -26,14 +24,14 @@ def test_handle_sns_trigger_event_json_body(sns_event): # noqa: F811 def test_validate_event_does_not_conform_with_model(): - event: Any = {"invalid": "event"} + raw_event: dict = {"invalid": "event"} with pytest.raises(ValidationError): - handle_sns_json_body(event, LambdaContext()) + handle_sns_json_body(raw_event, LambdaContext()) def test_validate_event_does_not_conform_user_json_string_with_model(): - event: Any = { + event: dict = { "Records": [ { "EventVersion": "1.0", @@ -62,52 +60,61 @@ def test_validate_event_does_not_conform_user_json_string_with_model(): @event_parser(model=MyAdvancedSnsBusiness) def handle_sns_no_envelope(event: MyAdvancedSnsBusiness, _: LambdaContext): - records = event.Records + return event + + +def test_handle_sns_trigger_event_no_envelope(): + raw_event = load_event("snsEvent.json") + parsed_event: MyAdvancedSnsBusiness = handle_sns_no_envelope(raw_event, LambdaContext()) + + records = parsed_event.Records record = records[0] + raw_record = raw_event["Records"][0] assert len(records) == 1 - assert record.EventVersion == "1.0" - assert record.EventSubscriptionArn == "arn:aws:sns:us-east-2:123456789012:sns-la ..." - assert record.EventSource == "aws:sns" - assert record.Sns.Type == "Notification" - assert record.Sns.UnsubscribeUrl.scheme == "https" - assert record.Sns.UnsubscribeUrl.host == "sns.us-east-2.amazonaws.com" - assert record.Sns.UnsubscribeUrl.query == "Action=Unsubscribe" - assert record.Sns.TopicArn == "arn:aws:sns:us-east-2:123456789012:sns-lambda" - assert record.Sns.Subject == "TestInvoke" - assert record.Sns.SignatureVersion == "1" - convert_time = int(round(record.Sns.Timestamp.timestamp() * 1000)) + assert record.EventVersion == raw_record["EventVersion"] + assert record.EventSubscriptionArn == raw_record["EventSubscriptionArn"] + assert record.EventSource == raw_record["EventSource"] + + sns = record.Sns + raw_sns = raw_record["Sns"] + assert sns.Type == raw_sns["Type"] + assert sns.UnsubscribeUrl.scheme == "https" + assert sns.UnsubscribeUrl.host == "sns.us-east-2.amazonaws.com" + assert sns.UnsubscribeUrl.query == "Action=Unsubscribe" + assert sns.TopicArn == raw_sns["TopicArn"] + assert sns.Subject == raw_sns["Subject"] + assert sns.SignatureVersion == raw_sns["SignatureVersion"] + convert_time = int(round(sns.Timestamp.timestamp() * 1000)) assert convert_time == 1546433107000 - assert record.Sns.Signature == "tcc6faL2yUC6dgZdmrwh1Y4cGa/ebXEkAi6RibDsvpi+tE/1+82j...65r==" - assert record.Sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" - assert record.Sns.SigningCertUrl.scheme == "https" - assert record.Sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" - assert record.Sns.SigningCertUrl.path == "/SimpleNotification" - assert record.Sns.MessageId == "95df01b4-ee98-5cb9-9903-4c221d41eb5e" - assert record.Sns.Message == "Hello from SNS!" - attrib_dict = record.Sns.MessageAttributes + assert sns.Signature == raw_sns["Signature"] + assert sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" + assert sns.SigningCertUrl.scheme == "https" + assert sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" + assert sns.SigningCertUrl.path == "/SimpleNotification" + assert sns.MessageId == raw_sns["MessageId"] + assert sns.Message == raw_sns["Message"] + + attrib_dict = sns.MessageAttributes assert len(attrib_dict) == 2 - assert attrib_dict["Test"].Type == "String" - assert attrib_dict["Test"].Value == "TestString" - assert attrib_dict["TestBinary"].Type == "Binary" - assert attrib_dict["TestBinary"].Value == "TestBinary" - - -def test_handle_sns_trigger_event_no_envelope(): - event_dict = load_event("snsEvent.json") - handle_sns_no_envelope(event_dict, LambdaContext()) + assert attrib_dict["Test"].Type == raw_sns["MessageAttributes"]["Test"]["Type"] + assert attrib_dict["Test"].Value == raw_sns["MessageAttributes"]["Test"]["Value"] + assert attrib_dict["TestBinary"].Type == raw_sns["MessageAttributes"]["TestBinary"]["Type"] + assert attrib_dict["TestBinary"].Value == raw_sns["MessageAttributes"]["TestBinary"]["Value"] @event_parser(model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) def handle_sns_sqs_json_body(event: List[MySnsBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].message == "hello world" - assert event[0].username == "lessa" + return event def test_handle_sns_sqs_trigger_event_json_body(): # noqa: F811 - event_dict = load_event("snsSqsEvent.json") - handle_sns_sqs_json_body(event_dict, LambdaContext()) + raw_event = load_event("snsSqsEvent.json") + parsed_event: MySnsBusiness = handle_sns_sqs_json_body(raw_event, LambdaContext()) + + assert len(parsed_event) == 1 + assert parsed_event[0].message == "hello world" + assert parsed_event[0].username == "lessa" def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url(): @@ -124,5 +131,9 @@ def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url(): def test_handle_sns_sqs_fifo_trigger_event_json_body(): - event_dict = load_event("snsSqsFifoEvent.json") - handle_sns_sqs_json_body(event_dict, LambdaContext()) + raw_event = load_event("snsSqsFifoEvent.json") + parsed_event: MySnsBusiness = handle_sns_sqs_json_body(raw_event, LambdaContext()) + + assert len(parsed_event) == 1 + assert parsed_event[0].message == "hello world" + assert parsed_event[0].username == "lessa" From 80f42a809ee0a1ab0978f80aa2d600513f445d08 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 16:20:36 +0100 Subject: [PATCH 13/28] moving + firehose --- .../parser/test_kinesis_firehose.py | 128 ++++++++++-------- 1 file changed, 68 insertions(+), 60 deletions(-) rename tests/{functional => unit}/parser/test_kinesis_firehose.py (61%) diff --git a/tests/functional/parser/test_kinesis_firehose.py b/tests/unit/parser/test_kinesis_firehose.py similarity index 61% rename from tests/functional/parser/test_kinesis_firehose.py rename to tests/unit/parser/test_kinesis_firehose.py index c0b71f80540..18be7e8397f 100644 --- a/tests/functional/parser/test_kinesis_firehose.py +++ b/tests/unit/parser/test_kinesis_firehose.py @@ -21,18 +21,62 @@ @event_parser(model=MyKinesisFirehoseBusiness, envelope=envelopes.KinesisFirehoseEnvelope) def handle_firehose(event: List[MyKinesisFirehoseBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].Hello == "World" + return event @event_parser(model=KinesisFirehoseModel) def handle_firehose_no_envelope_kinesis(event: KinesisFirehoseModel, _: LambdaContext): - assert event.region == "us-east-2" - assert event.invocationId == "2b4d1ad9-2f48-94bd-a088-767c317e994a" - assert event.deliveryStreamArn == "arn:aws:firehose:us-east-2:123456789012:deliverystream/delivery-stream-name" - assert event.sourceKinesisStreamArn == "arn:aws:kinesis:us-east-1:123456789012:stream/kinesis-source" + return event - records = list(event.records) + +@event_parser(model=KinesisFirehoseModel) +def handle_firehose_no_envelope_put(event: KinesisFirehoseModel, _: LambdaContext): + return event + + +@event_parser(model=KinesisFirehoseSqsModel) +def handle_firehose_sqs_wrapped_message(event: KinesisFirehoseSqsModel, _: LambdaContext): + return event + + +def test_firehose_sqs_wrapped_message_event(): + raw_event = load_event("kinesisFirehoseSQSEvent.json") + parsed_event: KinesisFirehoseSqsModel = handle_firehose_sqs_wrapped_message(raw_event, LambdaContext()) + + assert parsed_event.region == raw_event["region"] + assert parsed_event.invocationId == raw_event["invocationId"] + assert parsed_event.deliveryStreamArn == raw_event["deliveryStreamArn"] + + records = list(parsed_event.records) + assert len(records) == 1 + + record_01: KinesisFirehoseSqsRecord = records[0] + assert record_01.data.messageId == "5ab807d4-5644-4c55-97a3-47396635ac74" + assert record_01.data.receiptHandle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." + assert record_01.data.body == "Test message." + assert record_01.data.attributes.ApproximateReceiveCount == "1" + assert record_01.data.attributes.SenderId == "AIDAIENQZJOLO23YVJ4VO" + + +def test_firehose_trigger_event(): + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"].pop(0) # remove first item since the payload is bytes and we want to test payload json class + parsed_event: MyKinesisFirehoseBusiness = handle_firehose(raw_event, LambdaContext()) + + assert len(parsed_event) == 1 + assert parsed_event[0].Hello == "World" + + +def test_firehose_trigger_event_kinesis_no_envelope(): + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + parsed_event: KinesisFirehoseModel = handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) + + assert parsed_event.region == raw_event["region"] + assert parsed_event.invocationId == raw_event["invocationId"] + assert parsed_event.deliveryStreamArn == raw_event["deliveryStreamArn"] + assert parsed_event.sourceKinesisStreamArn == raw_event["sourceKinesisStreamArn"] + + records = list(parsed_event.records) assert len(records) == 2 record_01: KinesisFirehoseRecord = records[0] assert record_01.approximateArrivalTimestamp == 1664028820148 @@ -59,13 +103,15 @@ def handle_firehose_no_envelope_kinesis(event: KinesisFirehoseModel, _: LambdaCo assert metadata_02.sequenceNumber == "49546986683135544286507457936321625675700192471156785155" -@event_parser(model=KinesisFirehoseModel) -def handle_firehose_no_envelope_put(event: KinesisFirehoseModel, _: LambdaContext): - assert event.region == "us-east-2" - assert event.invocationId == "2b4d1ad9-2f48-94bd-a088-767c317e994a" - assert event.deliveryStreamArn == "arn:aws:firehose:us-east-2:123456789012:deliverystream/delivery-stream-name" +def test_firehose_trigger_event_put_no_envelope(): + raw_event = load_event("kinesisFirehosePutEvent.json") + parsed_event: KinesisFirehoseModel = handle_firehose_no_envelope_put(raw_event, LambdaContext()) + + assert parsed_event.region == raw_event["region"] + assert parsed_event.invocationId == raw_event["invocationId"] + assert parsed_event.deliveryStreamArn == raw_event["deliveryStreamArn"] - records = list(event.records) + records = list(parsed_event.records) assert len(records) == 2 record_01: KinesisFirehoseRecord = records[0] @@ -79,60 +125,22 @@ def handle_firehose_no_envelope_put(event: KinesisFirehoseModel, _: LambdaContex assert record_02.data == b'{"Hello": "World"}' -@event_parser(model=KinesisFirehoseSqsModel) -def handle_firehose_sqs_wrapped_message(event: KinesisFirehoseSqsModel, _: LambdaContext): - assert event.region == "us-east-1" - assert event.invocationId == "556b67a3-48fc-4385-af49-e133aade9cb9" - assert event.deliveryStreamArn == "arn:aws:firehose:us-east-1:123456789012:deliverystream/PUT-S3-tdyyE" - - records = list(event.records) - assert len(records) == 1 - - record_01: KinesisFirehoseSqsRecord = records[0] - assert record_01.data.messageId == "5ab807d4-5644-4c55-97a3-47396635ac74" - assert record_01.data.receiptHandle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." - assert record_01.data.body == "Test message." - assert record_01.data.attributes.ApproximateReceiveCount == "1" - assert record_01.data.attributes.SenderId == "AIDAIENQZJOLO23YVJ4VO" - - -def test_firehose_sqs_wrapped_message_event(): - event_dict = load_event("kinesisFirehoseSQSEvent.json") - handle_firehose_sqs_wrapped_message(event_dict, LambdaContext()) - - -def test_firehose_trigger_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"].pop(0) # remove first item since the payload is bytes and we want to test payload json class - handle_firehose(event_dict, LambdaContext()) - - -def test_firehose_trigger_event_kinesis_no_envelope(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) - - -def test_firehose_trigger_event_put_no_envelope(): - event_dict = load_event("kinesisFirehosePutEvent.json") - handle_firehose_no_envelope_put(event_dict, LambdaContext()) - - def test_kinesis_trigger_bad_base64_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"][0]["data"] = {"bad base64"} + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"][0]["data"] = {"bad base64"} with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) + handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) def test_kinesis_trigger_bad_timestamp_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"][0]["approximateArrivalTimestamp"] = -1 + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"][0]["approximateArrivalTimestamp"] = -1 with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) + handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) def test_kinesis_trigger_bad_metadata_timestamp_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"][0]["kinesisRecordMetadata"]["approximateArrivalTimestamp"] = "-1" + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"][0]["kinesisRecordMetadata"]["approximateArrivalTimestamp"] = "-1" with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) + handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) From b097c12100b84b85d8cf329eac29f7bc7f48db1e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 16:39:24 +0100 Subject: [PATCH 14/28] moving + s3 events --- tests/functional/parser/test_s3.py | 155 ----------- tests/unit/parser/test_s3.py | 298 ++++++++++++---------- tests/unit/parser/test_s3_notification.py | 145 +++++++++++ 3 files changed, 309 insertions(+), 289 deletions(-) delete mode 100644 tests/functional/parser/test_s3.py create mode 100644 tests/unit/parser/test_s3_notification.py diff --git a/tests/functional/parser/test_s3.py b/tests/functional/parser/test_s3.py deleted file mode 100644 index 4037790efc5..00000000000 --- a/tests/functional/parser/test_s3.py +++ /dev/null @@ -1,155 +0,0 @@ -import pytest - -from aws_lambda_powertools.utilities.parser import ValidationError, event_parser, parse -from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.utils import load_event - - -def assert_s3(event: S3Model): - records = list(event.Records) - assert len(records) == 1 - record: S3RecordModel = records[0] - assert record.eventVersion == "2.1" - assert record.eventSource == "aws:s3" - assert record.awsRegion == "us-east-2" - convert_time = int(round(record.eventTime.timestamp() * 1000)) - assert convert_time == 1567539447192 - assert record.eventName == "ObjectCreated:Put" - user_identity = record.userIdentity - assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" - request_parameters = record.requestParameters - assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" - assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" - assert ( - record.responseElements.x_amz_id_2 - == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" - ) - s3 = record.s3 - assert s3.s3SchemaVersion == "1.0" - assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" - bucket = s3.bucket - assert bucket.name == "lambda-artifacts-deafc19498e3f2df" - assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" - assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" - assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.size == 1305107 - assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.versionId is None - assert s3.object.sequencer == "0C0F6F405D6ED209E1" - assert record.glacierEventData is None - - -@event_parser(model=S3Model) -def handle_s3(event: S3Model, _: LambdaContext): - assert_s3(event) - - -@event_parser(model=S3Model) -def handle_s3_glacier(event: S3Model, _: LambdaContext): - records = list(event.Records) - assert len(records) == 1 - record: S3RecordModel = records[0] - assert record.eventVersion == "2.1" - assert record.eventSource == "aws:s3" - assert record.awsRegion == "us-east-2" - convert_time = int(round(record.eventTime.timestamp() * 1000)) - assert convert_time == 1567539447192 - assert record.eventName == "ObjectCreated:Put" - user_identity = record.userIdentity - assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" - request_parameters = record.requestParameters - assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" - assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" - assert ( - record.responseElements.x_amz_id_2 - == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" - ) - s3 = record.s3 - assert s3.s3SchemaVersion == "1.0" - assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" - bucket = s3.bucket - assert bucket.name == "lambda-artifacts-deafc19498e3f2df" - assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" - assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" - assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.size == 1305107 - assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.versionId is None - assert s3.object.sequencer == "0C0F6F405D6ED209E1" - assert record.glacierEventData is not None - convert_time = int( - round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000), - ) - assert convert_time == 60000 - assert record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass == "standard" - - -def test_s3_trigger_event(): - event_dict = load_event("s3Event.json") - handle_s3(event_dict, LambdaContext()) - - -def test_s3_glacier_trigger_event(): - event_dict = load_event("s3EventGlacier.json") - handle_s3_glacier(event_dict, LambdaContext()) - - -def test_s3_empty_object(): - event_dict = load_event("s3Event.json") - event_dict["Records"][0]["s3"]["object"]["size"] = 0 - parse(event=event_dict, model=S3Model) - - -def test_s3_none_object_size_failed_validation(): - event_dict = load_event("s3Event.json") - event_dict["Records"][0]["s3"]["object"]["size"] = None - with pytest.raises(ValidationError): - parse(event=event_dict, model=S3Model) - - -def test_s3_none_etag_value_failed_validation(): - event_dict = load_event("s3Event.json") - event_dict["Records"][0]["s3"]["object"]["eTag"] = None - with pytest.raises(ValidationError): - parse(event=event_dict, model=S3Model) - - -@event_parser(model=S3Model) -def handle_s3_delete_object(event: S3Model, _: LambdaContext): - records = list(event.Records) - assert len(records) == 1 - record: S3RecordModel = records[0] - assert record.eventVersion == "2.1" - assert record.eventSource == "aws:s3" - assert record.awsRegion == "us-east-2" - convert_time = int(round(record.eventTime.timestamp() * 1000)) - assert convert_time == 1567539447192 - assert record.eventName == "ObjectRemoved:Delete" - user_identity = record.userIdentity - assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" - request_parameters = record.requestParameters - assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" - assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" - assert ( - record.responseElements.x_amz_id_2 - == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" - ) - s3 = record.s3 - assert s3.s3SchemaVersion == "1.0" - assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" - bucket = s3.bucket - assert bucket.name == "lambda-artifacts-deafc19498e3f2df" - assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" - assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" - assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.size is None - assert s3.object.eTag is None - assert s3.object.versionId is None - assert s3.object.sequencer == "0C0F6F405D6ED209E1" - assert record.glacierEventData is None - - -def test_s3_trigger_event_delete_object(): - event_dict = load_event("s3EventDeleteObject.json") - handle_s3_delete_object(event_dict, LambdaContext()) diff --git a/tests/unit/parser/test_s3.py b/tests/unit/parser/test_s3.py index c77c70095a3..65c7e73ac06 100644 --- a/tests/unit/parser/test_s3.py +++ b/tests/unit/parser/test_s3.py @@ -1,145 +1,175 @@ -import json -from datetime import datetime - import pytest -from aws_lambda_powertools.utilities.parser import ValidationError -from aws_lambda_powertools.utilities.parser.models import ( - S3EventNotificationEventBridgeModel, - S3SqsEventNotificationModel, -) +from aws_lambda_powertools.utilities.parser import ValidationError, event_parser, parse +from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel +from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event -def test_s3_eventbridge_notification_object_created_event(): - raw_event = load_event("s3EventBridgeNotificationObjectCreatedEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] - assert model.detail.reason == raw_event["detail"]["reason"] - - -def test_s3_eventbridge_notification_object_deleted_event(): - raw_event = load_event("s3EventBridgeNotificationObjectDeletedEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] - assert model.detail.reason == raw_event["detail"]["reason"] - assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] - - -def test_s3_eventbridge_notification_object_expired_event(): - raw_event = load_event("s3EventBridgeNotificationObjectExpiredEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.reason == raw_event["detail"]["reason"] - assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] - - -def test_s3_eventbridge_notification_object_restore_completed_event(): - raw_event = load_event("s3EventBridgeNotificationObjectRestoreCompletedEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.restore_expiry_time == raw_event["detail"]["restore-expiry-time"] - assert model.detail.source_storage_class == raw_event["detail"]["source-storage-class"] - - -def test_s3_sqs_event_notification(): - raw_event = load_event("s3SqsEvent.json") - model = S3SqsEventNotificationModel(**raw_event) - - body = json.loads(raw_event["Records"][0]["body"]) - - assert model.Records[0].body.Records[0].eventVersion == body["Records"][0]["eventVersion"] - assert model.Records[0].body.Records[0].eventSource == body["Records"][0]["eventSource"] - assert model.Records[0].body.Records[0].eventTime == datetime.fromisoformat( - body["Records"][0]["eventTime"].replace("Z", "+00:00"), +@event_parser(model=S3Model) +def handle_s3(event: S3Model, _: LambdaContext): + return event + + +@event_parser(model=S3Model) +def handle_s3_delete_object(event: S3Model, _: LambdaContext): + return event + + +@event_parser(model=S3Model) +def handle_s3_glacier(event: S3Model, _: LambdaContext): + return event + + +def test_s3_trigger_event(): + raw_event = load_event("s3Event.json") + parsed_event: S3Model = handle_s3(raw_event, LambdaContext()) + + records = list(parsed_event.Records) + assert len(records) == 1 + + record: S3RecordModel = records[0] + raw_record = raw_event["Records"][0] + assert record.eventVersion == raw_record["eventVersion"] + assert record.eventSource == raw_record["eventSource"] + assert record.awsRegion == raw_record["awsRegion"] + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == raw_record["eventName"] + assert record.glacierEventData is None + + user_identity = record.userIdentity + assert user_identity.principalId == raw_record["userIdentity"]["principalId"] + + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] + + s3 = record.s3 + raw_s3 = raw_event["Records"][0]["s3"] + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] + assert s3.configurationId == raw_record["s3"]["configurationId"] + assert s3.object.key == raw_s3["object"]["key"] + assert s3.object.size == raw_s3["object"]["size"] + assert s3.object.eTag == raw_s3["object"]["eTag"] + assert s3.object.versionId is None + assert s3.object.sequencer == raw_s3["object"]["sequencer"] + + bucket = s3.bucket + raw_bucket = raw_record["s3"]["bucket"] + assert bucket.name == raw_bucket["name"] + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] + assert bucket.arn == raw_bucket["arn"] + + +def test_s3_glacier_trigger_event(): + raw_event = load_event("s3EventGlacier.json") + parsed_event: S3Model = handle_s3_glacier(raw_event, LambdaContext()) + + records = list(parsed_event.Records) + assert len(records) == 1 + + record: S3RecordModel = records[0] + raw_record = raw_event["Records"][0] + assert record.eventVersion == raw_record["eventVersion"] + assert record.eventSource == raw_record["eventSource"] + assert record.awsRegion == raw_record["awsRegion"] + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == raw_record["eventName"] + assert record.glacierEventData is not None + convert_time = int( + round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000), + ) + assert convert_time == 60000 + assert ( + record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass + == raw_record["glacierEventData"]["restoreEventData"]["lifecycleRestoreStorageClass"] ) - assert model.Records[0].body.Records[0].eventName == body["Records"][0]["eventName"] - - -def test_s3_sqs_event_notification_body_invalid_json(): - raw_event = load_event("s3SqsEvent.json") - - for record in raw_event["Records"]: - record["body"] = "invalid body" + user_identity = record.userIdentity + assert user_identity.principalId == raw_record["userIdentity"]["principalId"] + + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] + + s3 = record.s3 + raw_s3 = raw_event["Records"][0]["s3"] + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] + assert s3.configurationId == raw_record["s3"]["configurationId"] + assert s3.object.key == raw_s3["object"]["key"] + assert s3.object.size == raw_s3["object"]["size"] + assert s3.object.eTag == raw_s3["object"]["eTag"] + assert s3.object.versionId is None + assert s3.object.sequencer == raw_s3["object"]["sequencer"] + + bucket = s3.bucket + raw_bucket = raw_record["s3"]["bucket"] + assert bucket.name == raw_bucket["name"] + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] + assert bucket.arn == raw_bucket["arn"] + + +def test_s3_trigger_event_delete_object(): + raw_event = load_event("s3EventDeleteObject.json") + parsed_event: S3Model = handle_s3_delete_object(raw_event, LambdaContext()) + + records = list(parsed_event.Records) + assert len(records) == 1 + + record: S3RecordModel = records[0] + raw_record = raw_event["Records"][0] + assert record.eventVersion == raw_record["eventVersion"] + assert record.eventSource == raw_record["eventSource"] + assert record.awsRegion == raw_record["awsRegion"] + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == raw_record["eventName"] + assert record.glacierEventData is None + + user_identity = record.userIdentity + assert user_identity.principalId == raw_record["userIdentity"]["principalId"] + + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] + + s3 = record.s3 + raw_s3 = raw_event["Records"][0]["s3"] + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] + assert s3.configurationId == raw_record["s3"]["configurationId"] + assert s3.object.key == raw_s3["object"]["key"] + assert s3.object.size is None + assert s3.object.eTag is None + assert s3.object.versionId is None + assert s3.object.sequencer == raw_s3["object"]["sequencer"] + + bucket = s3.bucket + raw_bucket = raw_record["s3"]["bucket"] + assert bucket.name == raw_bucket["name"] + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] + assert bucket.arn == raw_bucket["arn"] + + +def test_s3_empty_object(): + raw_event = load_event("s3Event.json") + raw_event["Records"][0]["s3"]["object"]["size"] = 0 + parse(event=raw_event, model=S3Model) + + +def test_s3_none_object_size_failed_validation(): + raw_event = load_event("s3Event.json") + raw_event["Records"][0]["s3"]["object"]["size"] = None with pytest.raises(ValidationError): - S3SqsEventNotificationModel(**raw_event) - + parse(event=raw_event, model=S3Model) -def test_s3_sqs_event_notification_body_containing_arbitrary_json(): - raw_event = load_event("s3SqsEvent.json") - for record in raw_event["Records"]: - record["body"] = {"foo": "bar"} +def test_s3_none_etag_value_failed_validation(): + raw_event = load_event("s3Event.json") + raw_event["Records"][0]["s3"]["object"]["eTag"] = None with pytest.raises(ValidationError): - S3SqsEventNotificationModel(**raw_event) + parse(event=raw_event, model=S3Model) diff --git a/tests/unit/parser/test_s3_notification.py b/tests/unit/parser/test_s3_notification.py new file mode 100644 index 00000000000..c77c70095a3 --- /dev/null +++ b/tests/unit/parser/test_s3_notification.py @@ -0,0 +1,145 @@ +import json +from datetime import datetime + +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError +from aws_lambda_powertools.utilities.parser.models import ( + S3EventNotificationEventBridgeModel, + S3SqsEventNotificationModel, +) +from tests.functional.utils import load_event + + +def test_s3_eventbridge_notification_object_created_event(): + raw_event = load_event("s3EventBridgeNotificationObjectCreatedEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] + assert model.detail.reason == raw_event["detail"]["reason"] + + +def test_s3_eventbridge_notification_object_deleted_event(): + raw_event = load_event("s3EventBridgeNotificationObjectDeletedEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] + assert model.detail.reason == raw_event["detail"]["reason"] + assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] + + +def test_s3_eventbridge_notification_object_expired_event(): + raw_event = load_event("s3EventBridgeNotificationObjectExpiredEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.reason == raw_event["detail"]["reason"] + assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] + + +def test_s3_eventbridge_notification_object_restore_completed_event(): + raw_event = load_event("s3EventBridgeNotificationObjectRestoreCompletedEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.restore_expiry_time == raw_event["detail"]["restore-expiry-time"] + assert model.detail.source_storage_class == raw_event["detail"]["source-storage-class"] + + +def test_s3_sqs_event_notification(): + raw_event = load_event("s3SqsEvent.json") + model = S3SqsEventNotificationModel(**raw_event) + + body = json.loads(raw_event["Records"][0]["body"]) + + assert model.Records[0].body.Records[0].eventVersion == body["Records"][0]["eventVersion"] + assert model.Records[0].body.Records[0].eventSource == body["Records"][0]["eventSource"] + assert model.Records[0].body.Records[0].eventTime == datetime.fromisoformat( + body["Records"][0]["eventTime"].replace("Z", "+00:00"), + ) + assert model.Records[0].body.Records[0].eventName == body["Records"][0]["eventName"] + + +def test_s3_sqs_event_notification_body_invalid_json(): + raw_event = load_event("s3SqsEvent.json") + + for record in raw_event["Records"]: + record["body"] = "invalid body" + + with pytest.raises(ValidationError): + S3SqsEventNotificationModel(**raw_event) + + +def test_s3_sqs_event_notification_body_containing_arbitrary_json(): + raw_event = load_event("s3SqsEvent.json") + for record in raw_event["Records"]: + record["body"] = {"foo": "bar"} + + with pytest.raises(ValidationError): + S3SqsEventNotificationModel(**raw_event) From f76115bb8d7bb0308146fc603ff20de3c55dabfc Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 16:53:13 +0100 Subject: [PATCH 15/28] moving + kinesis + refactor --- tests/functional/parser/__init__.py | 0 tests/functional/parser/conftest.py | 43 ----- tests/functional/parser/schemas.py | 106 ------------ tests/functional/parser/test_kinesis.py | 153 ------------------ tests/unit/parser/test_apigw.py | 2 +- tests/unit/parser/test_apigwv2.py | 2 +- tests/unit/parser/test_cloudwatch.py | 2 +- tests/unit/parser/test_dynamodb.py | 2 +- tests/unit/parser/test_eventbridge.py | 4 +- tests/unit/parser/test_kafka.py | 2 +- tests/unit/parser/test_kinesis.py | 115 +++++++++++++ tests/unit/parser/test_kinesis_firehose.py | 2 +- tests/unit/parser/test_lambda_function_url.py | 2 +- tests/unit/parser/test_sns.py | 2 +- tests/unit/parser/test_sqs.py | 2 +- tests/unit/parser/test_vpc_lattice.py | 2 +- 16 files changed, 127 insertions(+), 314 deletions(-) delete mode 100644 tests/functional/parser/__init__.py delete mode 100644 tests/functional/parser/conftest.py delete mode 100644 tests/functional/parser/schemas.py delete mode 100644 tests/functional/parser/test_kinesis.py create mode 100644 tests/unit/parser/test_kinesis.py diff --git a/tests/functional/parser/__init__.py b/tests/functional/parser/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/functional/parser/conftest.py b/tests/functional/parser/conftest.py deleted file mode 100644 index 34199a322b2..00000000000 --- a/tests/functional/parser/conftest.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import Any, Dict - -import pytest -from pydantic import BaseModel - -from aws_lambda_powertools.utilities.parser import BaseEnvelope - - -@pytest.fixture -def dummy_event(): - return {"payload": {"message": "hello world"}} - - -@pytest.fixture -def dummy_schema(): - """Wanted payload structure""" - - class MyDummyModel(BaseModel): - message: str - - return MyDummyModel - - -@pytest.fixture -def dummy_envelope_schema(): - """Event wrapper structure""" - - class MyDummyEnvelopeSchema(BaseModel): - payload: Dict - - return MyDummyEnvelopeSchema - - -@pytest.fixture -def dummy_envelope(dummy_envelope_schema): - class MyDummyEnvelope(BaseEnvelope): - """Unwrap dummy event within payload key""" - - def parse(self, data: Dict[str, Any], model: BaseModel): - parsed_enveloped = dummy_envelope_schema(**data) - return self._parse(data=parsed_enveloped.payload, model=model) - - return MyDummyEnvelope diff --git a/tests/functional/parser/schemas.py b/tests/functional/parser/schemas.py deleted file mode 100644 index 1da0213ff45..00000000000 --- a/tests/functional/parser/schemas.py +++ /dev/null @@ -1,106 +0,0 @@ -from typing import Dict, List, Optional - -from pydantic import BaseModel - -from aws_lambda_powertools.utilities.parser.models import ( - DynamoDBStreamChangedRecordModel, - DynamoDBStreamModel, - DynamoDBStreamRecordModel, - EventBridgeModel, - SnsModel, - SnsNotificationModel, - SnsRecordModel, - SqsModel, - SqsRecordModel, -) -from aws_lambda_powertools.utilities.parser.types import Literal - - -class MyDynamoBusiness(BaseModel): - Message: Dict[Literal["S"], str] - Id: Dict[Literal["N"], int] - - -class MyDynamoScheme(DynamoDBStreamChangedRecordModel): - NewImage: Optional[MyDynamoBusiness] - OldImage: Optional[MyDynamoBusiness] - - -class MyDynamoDBStreamRecordModel(DynamoDBStreamRecordModel): - dynamodb: MyDynamoScheme - - -class MyAdvancedDynamoBusiness(DynamoDBStreamModel): - Records: List[MyDynamoDBStreamRecordModel] - - -class MyEventbridgeBusiness(BaseModel): - instance_id: str - state: str - - -class MyAdvancedEventbridgeBusiness(EventBridgeModel): - detail: MyEventbridgeBusiness - - -class MySqsBusiness(BaseModel): - message: str - username: str - - -class MyAdvancedSqsRecordModel(SqsRecordModel): - body: str - - -class MyAdvancedSqsBusiness(SqsModel): - Records: List[MyAdvancedSqsRecordModel] - - -class MySnsBusiness(BaseModel): - message: str - username: str - - -class MySnsNotificationModel(SnsNotificationModel): - Message: str - - -class MyAdvancedSnsRecordModel(SnsRecordModel): - Sns: MySnsNotificationModel - - -class MyAdvancedSnsBusiness(SnsModel): - Records: List[MyAdvancedSnsRecordModel] - - -class MyKinesisBusiness(BaseModel): - message: str - username: str - - -class MyCloudWatchBusiness(BaseModel): - my_message: str - user: str - - -class MyApiGatewayBusiness(BaseModel): - message: str - username: str - - -class MyALambdaFuncUrlBusiness(BaseModel): - message: str - username: str - - -class MyLambdaKafkaBusiness(BaseModel): - key: str - - -class MyKinesisFirehoseBusiness(BaseModel): - Hello: str - - -class MyVpcLatticeBusiness(BaseModel): - username: str - name: str diff --git a/tests/functional/parser/test_kinesis.py b/tests/functional/parser/test_kinesis.py deleted file mode 100644 index 151102c34c8..00000000000 --- a/tests/functional/parser/test_kinesis.py +++ /dev/null @@ -1,153 +0,0 @@ -from typing import Any, List - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - BaseModel, - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.parser.models import ( - KinesisDataStreamModel, - KinesisDataStreamRecordPayload, -) -from aws_lambda_powertools.utilities.parser.models.cloudwatch import ( - CloudWatchLogsDecode, -) -from aws_lambda_powertools.utilities.parser.models.kinesis import ( - extract_cloudwatch_logs_from_event, - extract_cloudwatch_logs_from_record, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyKinesisBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) -def handle_kinesis(event: List[MyKinesisBusiness], _: LambdaContext): - assert len(event) == 1 - record: KinesisDataStreamModel = event[0] - assert record.message == "test message" - assert record.username == "test" - - -@event_parser(model=KinesisDataStreamModel) -def handle_kinesis_no_envelope(event: KinesisDataStreamModel, _: LambdaContext): - records = event.Records - assert len(records) == 2 - record: KinesisDataStreamModel = records[0] - - assert record.awsRegion == "us-east-2" - assert record.eventID == "shardId-000000000006:49590338271490256608559692538361571095921575989136588898" - assert record.eventName == "aws:kinesis:record" - assert record.eventSource == "aws:kinesis" - assert record.eventSourceARN == "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" - assert record.eventVersion == "1.0" - assert record.invokeIdentityArn == "arn:aws:iam::123456789012:role/lambda-role" - - kinesis: KinesisDataStreamRecordPayload = record.kinesis - assert kinesis.approximateArrivalTimestamp == 1545084650.987 - assert kinesis.kinesisSchemaVersion == "1.0" - assert kinesis.partitionKey == "1" - assert kinesis.sequenceNumber == "49590338271490256608559692538361571095921575989136588898" - assert kinesis.data == b"Hello, this is a test." - - -def test_kinesis_trigger_event(): - event_dict = { - "Records": [ - { - "kinesis": { - "kinesisSchemaVersion": "1.0", - "partitionKey": "1", - "sequenceNumber": "49590338271490256608559692538361571095921575989136588898", - "data": "eyJtZXNzYWdlIjogInRlc3QgbWVzc2FnZSIsICJ1c2VybmFtZSI6ICJ0ZXN0In0=", - "approximateArrivalTimestamp": 1545084650.987, - }, - "eventSource": "aws:kinesis", - "eventVersion": "1.0", - "eventID": "shardId-000000000006:49590338271490256608559692538361571095921575989136588898", - "eventName": "aws:kinesis:record", - "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", - "awsRegion": "us-east-2", - "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream", - }, - ], - } - - handle_kinesis(event_dict, LambdaContext()) - - -def test_kinesis_trigger_bad_base64_event(): - event_dict = { - "Records": [ - { - "kinesis": { - "kinesisSchemaVersion": "1.0", - "partitionKey": "1", - "sequenceNumber": "49590338271490256608559692538361571095921575989136588898", - "data": "bad", - "approximateArrivalTimestamp": 1545084650.987, - }, - "eventSource": "aws:kinesis", - "eventVersion": "1.0", - "eventID": "shardId-000000000006:49590338271490256608559692538361571095921575989136588898", - "eventName": "aws:kinesis:record", - "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", - "awsRegion": "us-east-2", - "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream", - }, - ], - } - with pytest.raises(ValidationError): - handle_kinesis_no_envelope(event_dict, LambdaContext()) - - -def test_kinesis_trigger_event_no_envelope(): - event_dict = load_event("kinesisStreamEvent.json") - handle_kinesis_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model_no_envelope(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_kinesis_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_kinesis(event_dict, LambdaContext()) - - -def test_kinesis_stream_event_cloudwatch_logs_data_extraction(): - # GIVEN a KinesisDataStreamModel is instantiated with CloudWatch Logs compressed data - event_dict = load_event("kinesisStreamCloudWatchLogsEvent.json") - stream_data = KinesisDataStreamModel(**event_dict) - single_record = stream_data.Records[0] - - # WHEN we try to extract CloudWatch Logs from KinesisDataStreamRecordPayload model - extracted_logs = extract_cloudwatch_logs_from_event(stream_data) - individual_logs = [extract_cloudwatch_logs_from_record(record) for record in stream_data.Records] - single_log = extract_cloudwatch_logs_from_record(single_record) - - # THEN we should have extracted any potential logs as CloudWatchLogsDecode models - assert len(extracted_logs) == len(individual_logs) - assert isinstance(single_log, CloudWatchLogsDecode) - - -def test_kinesis_stream_event_cloudwatch_logs_data_extraction_fails_with_custom_model(): - # GIVEN a custom model replaces Kinesis Record Data bytes - class DummyModel(BaseModel): - ... - - event_dict = load_event("kinesisStreamCloudWatchLogsEvent.json") - stream_data = KinesisDataStreamModel(**event_dict) - - # WHEN decompress_zlib_record_data_as_json is used - # THEN ValueError should be raised - with pytest.raises(ValueError, match="We can only decompress bytes data"): - for record in stream_data.Records: - record.kinesis.data = DummyModel() - record.decompress_zlib_record_data_as_json() diff --git a/tests/unit/parser/test_apigw.py b/tests/unit/parser/test_apigw.py index f31d8cc2e32..beaa86e5f6d 100644 --- a/tests/unit/parser/test_apigw.py +++ b/tests/unit/parser/test_apigw.py @@ -4,8 +4,8 @@ from aws_lambda_powertools.utilities.parser import envelopes, event_parser, parse from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventModel from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyApiGatewayBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyApiGatewayBusiness @event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayEnvelope) diff --git a/tests/unit/parser/test_apigwv2.py b/tests/unit/parser/test_apigwv2.py index 5a0efc1ff16..c176a54b82b 100644 --- a/tests/unit/parser/test_apigwv2.py +++ b/tests/unit/parser/test_apigwv2.py @@ -5,8 +5,8 @@ RequestContextV2Authorizer, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyApiGatewayBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyApiGatewayBusiness @event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayV2Envelope) diff --git a/tests/unit/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py index fb0988368f5..47c0312bf20 100644 --- a/tests/unit/parser/test_cloudwatch.py +++ b/tests/unit/parser/test_cloudwatch.py @@ -15,8 +15,8 @@ CloudWatchLogsModel, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyCloudWatchBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyCloudWatchBusiness def decode_cloudwatch_raw_event(event: dict): diff --git a/tests/unit/parser/test_dynamodb.py b/tests/unit/parser/test_dynamodb.py index 45d153a9c8f..bf838e38cd5 100644 --- a/tests/unit/parser/test_dynamodb.py +++ b/tests/unit/parser/test_dynamodb.py @@ -8,8 +8,8 @@ event_parser, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness @event_parser(model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) diff --git a/tests/unit/parser/test_eventbridge.py b/tests/unit/parser/test_eventbridge.py index d485deeb4e7..0cd522f02a7 100644 --- a/tests/unit/parser/test_eventbridge.py +++ b/tests/unit/parser/test_eventbridge.py @@ -6,11 +6,11 @@ event_parser, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import ( +from tests.functional.utils import load_event +from tests.unit.parser.schemas import ( MyAdvancedEventbridgeBusiness, MyEventbridgeBusiness, ) -from tests.functional.utils import load_event @event_parser(model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) diff --git a/tests/unit/parser/test_kafka.py b/tests/unit/parser/test_kafka.py index 59e2df622d3..fa6493abd45 100644 --- a/tests/unit/parser/test_kafka.py +++ b/tests/unit/parser/test_kafka.py @@ -7,8 +7,8 @@ KafkaSelfManagedEventModel, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyLambdaKafkaBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyLambdaKafkaBusiness @event_parser(model=MyLambdaKafkaBusiness, envelope=envelopes.KafkaEnvelope) diff --git a/tests/unit/parser/test_kinesis.py b/tests/unit/parser/test_kinesis.py new file mode 100644 index 00000000000..d06d2f081d0 --- /dev/null +++ b/tests/unit/parser/test_kinesis.py @@ -0,0 +1,115 @@ +from typing import List + +import pytest + +from aws_lambda_powertools.utilities.parser import ( + BaseModel, + ValidationError, + envelopes, + event_parser, +) +from aws_lambda_powertools.utilities.parser.models import ( + KinesisDataStreamModel, + KinesisDataStreamRecordPayload, +) +from aws_lambda_powertools.utilities.parser.models.cloudwatch import ( + CloudWatchLogsDecode, +) +from aws_lambda_powertools.utilities.parser.models.kinesis import ( + extract_cloudwatch_logs_from_event, + extract_cloudwatch_logs_from_record, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyKinesisBusiness + + +@event_parser(model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) +def handle_kinesis(event: List[MyKinesisBusiness], _: LambdaContext): + assert len(event) == 1 + record: KinesisDataStreamModel = event[0] + assert record.message == "test message" + assert record.username == "test" + + +@event_parser(model=KinesisDataStreamModel) +def handle_kinesis_no_envelope(event: KinesisDataStreamModel, _: LambdaContext): + return event + + +def test_kinesis_trigger_bad_base64_event(): + raw_event = load_event("kinesisStreamEvent.json") + + raw_event["Records"][0]["kinesis"]["data"] = "bad" + + with pytest.raises(ValidationError): + handle_kinesis_no_envelope(raw_event, LambdaContext()) + + +def test_kinesis_trigger_event_no_envelope(): + raw_event = load_event("kinesisStreamEvent.json") + parsed_event: KinesisDataStreamModel = handle_kinesis_no_envelope(raw_event, LambdaContext()) + + records = parsed_event.Records + assert len(records) == 2 + record: KinesisDataStreamModel = records[0] + raw_record = raw_event["Records"][0] + + assert record.awsRegion == raw_record["awsRegion"] + assert record.eventID == raw_record["eventID"] + assert record.eventName == raw_record["eventName"] + assert record.eventSource == raw_record["eventSource"] + assert record.eventSourceARN == raw_record["eventSourceARN"] + assert record.eventVersion == raw_record["eventVersion"] + assert record.invokeIdentityArn == raw_record["invokeIdentityArn"] + + kinesis: KinesisDataStreamRecordPayload = record.kinesis + assert kinesis.approximateArrivalTimestamp == raw_record["kinesis"]["approximateArrivalTimestamp"] + assert kinesis.kinesisSchemaVersion == raw_record["kinesis"]["kinesisSchemaVersion"] + assert kinesis.partitionKey == raw_record["kinesis"]["partitionKey"] + assert kinesis.sequenceNumber == raw_record["kinesis"]["sequenceNumber"] + assert kinesis.data == b"Hello, this is a test." + + +def test_validate_event_does_not_conform_with_model_no_envelope(): + raw_event: dict = {"hello": "s"} + with pytest.raises(ValidationError): + handle_kinesis_no_envelope(raw_event, LambdaContext()) + + +def test_validate_event_does_not_conform_with_model(): + raw_event: dict = {"hello": "s"} + with pytest.raises(ValidationError): + handle_kinesis(raw_event, LambdaContext()) + + +def test_kinesis_stream_event_cloudwatch_logs_data_extraction(): + # GIVEN a KinesisDataStreamModel is instantiated with CloudWatch Logs compressed data + raw_event = load_event("kinesisStreamCloudWatchLogsEvent.json") + stream_data = KinesisDataStreamModel(**raw_event) + single_record = stream_data.Records[0] + + # WHEN we try to extract CloudWatch Logs from KinesisDataStreamRecordPayload model + extracted_logs = extract_cloudwatch_logs_from_event(stream_data) + individual_logs = [extract_cloudwatch_logs_from_record(record) for record in stream_data.Records] + single_log = extract_cloudwatch_logs_from_record(single_record) + + # THEN we should have extracted any potential logs as CloudWatchLogsDecode models + assert len(extracted_logs) == len(individual_logs) + assert isinstance(single_log, CloudWatchLogsDecode) + + +def test_kinesis_stream_event_cloudwatch_logs_data_extraction_fails_with_custom_model(): + # GIVEN a custom model replaces Kinesis Record Data bytes + class DummyModel(BaseModel): + ... + + raw_event = load_event("kinesisStreamCloudWatchLogsEvent.json") + stream_data = KinesisDataStreamModel(**raw_event) + + # WHEN decompress_zlib_record_data_as_json is used + # THEN ValueError should be raised + with pytest.raises(ValueError, match="We can only decompress bytes data"): + for record in stream_data.Records: + record.kinesis.data = DummyModel() + record.decompress_zlib_record_data_as_json() diff --git a/tests/unit/parser/test_kinesis_firehose.py b/tests/unit/parser/test_kinesis_firehose.py index 18be7e8397f..c400d3260da 100644 --- a/tests/unit/parser/test_kinesis_firehose.py +++ b/tests/unit/parser/test_kinesis_firehose.py @@ -15,8 +15,8 @@ KinesisFirehoseSqsRecord, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyKinesisFirehoseBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyKinesisFirehoseBusiness @event_parser(model=MyKinesisFirehoseBusiness, envelope=envelopes.KinesisFirehoseEnvelope) diff --git a/tests/unit/parser/test_lambda_function_url.py b/tests/unit/parser/test_lambda_function_url.py index 56022c40c2b..a98a6ff00bc 100644 --- a/tests/unit/parser/test_lambda_function_url.py +++ b/tests/unit/parser/test_lambda_function_url.py @@ -1,8 +1,8 @@ from aws_lambda_powertools.utilities.parser import envelopes, event_parser from aws_lambda_powertools.utilities.parser.models import LambdaFunctionUrlModel from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyALambdaFuncUrlBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyALambdaFuncUrlBusiness @event_parser(model=MyALambdaFuncUrlBusiness, envelope=envelopes.LambdaFunctionUrlEnvelope) diff --git a/tests/unit/parser/test_sns.py b/tests/unit/parser/test_sns.py index efd3a45bc27..6b02409d711 100644 --- a/tests/unit/parser/test_sns.py +++ b/tests/unit/parser/test_sns.py @@ -9,9 +9,9 @@ event_parser, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyAdvancedSnsBusiness, MySnsBusiness from tests.functional.utils import load_event from tests.functional.validator.conftest import sns_event # noqa: F401 +from tests.unit.parser.schemas import MyAdvancedSnsBusiness, MySnsBusiness @event_parser(model=MySnsBusiness, envelope=envelopes.SnsEnvelope) diff --git a/tests/unit/parser/test_sqs.py b/tests/unit/parser/test_sqs.py index e01cb36f23e..164bdcfafe9 100644 --- a/tests/unit/parser/test_sqs.py +++ b/tests/unit/parser/test_sqs.py @@ -8,9 +8,9 @@ event_parser, ) from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness from tests.functional.utils import load_event from tests.functional.validator.conftest import sqs_event # noqa: F401 +from tests.unit.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness @event_parser(model=MySqsBusiness, envelope=envelopes.SqsEnvelope) diff --git a/tests/unit/parser/test_vpc_lattice.py b/tests/unit/parser/test_vpc_lattice.py index f0476509cea..b319de612f7 100644 --- a/tests/unit/parser/test_vpc_lattice.py +++ b/tests/unit/parser/test_vpc_lattice.py @@ -7,8 +7,8 @@ ) from aws_lambda_powertools.utilities.parser.models import VpcLatticeModel from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyVpcLatticeBusiness from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyVpcLatticeBusiness @event_parser(model=MyVpcLatticeBusiness, envelope=envelopes.VpcLatticeEnvelope) From f3c9d9e04ad988f7dac6cb118be0fccfbbdf5d94 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 4 Jul 2023 17:14:04 +0100 Subject: [PATCH 16/28] code coverage --- tests/events/kinesisSingeEvent.json | 20 ++++++++++++++++++++ tests/unit/parser/test_kinesis.py | 15 +++++++++++---- 2 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 tests/events/kinesisSingeEvent.json diff --git a/tests/events/kinesisSingeEvent.json b/tests/events/kinesisSingeEvent.json new file mode 100644 index 00000000000..05fe2d297a9 --- /dev/null +++ b/tests/events/kinesisSingeEvent.json @@ -0,0 +1,20 @@ +{ + "Records": [ + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "49590338271490256608559692538361571095921575989136588898", + "data": "eyJtZXNzYWdlIjogInRlc3QgbWVzc2FnZSIsICJ1c2VybmFtZSI6ICJ0ZXN0In0=", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:49590338271490256608559692538361571095921575989136588898", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + } + ] +} diff --git a/tests/unit/parser/test_kinesis.py b/tests/unit/parser/test_kinesis.py index d06d2f081d0..e39b7a43196 100644 --- a/tests/unit/parser/test_kinesis.py +++ b/tests/unit/parser/test_kinesis.py @@ -26,10 +26,7 @@ @event_parser(model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) def handle_kinesis(event: List[MyKinesisBusiness], _: LambdaContext): - assert len(event) == 1 - record: KinesisDataStreamModel = event[0] - assert record.message == "test message" - assert record.username == "test" + return event @event_parser(model=KinesisDataStreamModel) @@ -46,6 +43,16 @@ def test_kinesis_trigger_bad_base64_event(): handle_kinesis_no_envelope(raw_event, LambdaContext()) +def test_kinesis_trigger_event(): + raw_event = load_event("kinesisSingeEvent.json") + parsed_event: MyKinesisBusiness = handle_kinesis(raw_event, LambdaContext()) + + assert len(parsed_event) == 1 + record: KinesisDataStreamModel = parsed_event[0] + assert record.message == "test message" + assert record.username == "test" + + def test_kinesis_trigger_event_no_envelope(): raw_event = load_event("kinesisStreamEvent.json") parsed_event: KinesisDataStreamModel = handle_kinesis_no_envelope(raw_event, LambdaContext()) From 9e6e219bb44ec5efe5d5156100e1b87a1a0678f1 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 14:59:51 +0100 Subject: [PATCH 17/28] refactoring apigw --- tests/unit/parser/test_alb.py | 12 +++--------- tests/unit/parser/test_apigw.py | 24 +++++++++--------------- 2 files changed, 12 insertions(+), 24 deletions(-) diff --git a/tests/unit/parser/test_alb.py b/tests/unit/parser/test_alb.py index 29dac72fa81..6cd109941e9 100644 --- a/tests/unit/parser/test_alb.py +++ b/tests/unit/parser/test_alb.py @@ -1,19 +1,13 @@ import pytest -from aws_lambda_powertools.utilities.parser import ValidationError, event_parser +from aws_lambda_powertools.utilities.parser import ValidationError from aws_lambda_powertools.utilities.parser.models import AlbModel -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event -@event_parser(model=AlbModel) -def handle_alb(event: AlbModel, _: LambdaContext): - return event - - def test_alb_trigger_event(): raw_event = load_event("albEvent.json") - parsed_event: AlbModel = handle_alb(raw_event, LambdaContext()) + parsed_event: AlbModel = AlbModel(**raw_event) assert parsed_event.requestContext.elb.targetGroupArn == raw_event["requestContext"]["elb"]["targetGroupArn"] assert parsed_event.httpMethod == raw_event["httpMethod"] @@ -27,4 +21,4 @@ def test_alb_trigger_event(): def test_validate_event_does_not_conform_with_model(): event = {"invalid": "event"} with pytest.raises(ValidationError): - handle_alb(event, LambdaContext()) + AlbModel(**event) diff --git a/tests/unit/parser/test_apigw.py b/tests/unit/parser/test_apigw.py index beaa86e5f6d..a65d181cc54 100644 --- a/tests/unit/parser/test_apigw.py +++ b/tests/unit/parser/test_apigw.py @@ -1,27 +1,20 @@ import pytest from pydantic import ValidationError -from aws_lambda_powertools.utilities.parser import envelopes, event_parser, parse +from aws_lambda_powertools.utilities.parser import envelopes, parse from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventModel -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyApiGatewayBusiness -@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayEnvelope) -def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): - return event - - -@event_parser(model=APIGatewayProxyEventModel) -def handle_apigw_event(event: APIGatewayProxyEventModel, _: LambdaContext): - return event - - def test_apigw_event_with_envelope(): raw_event = load_event("apiGatewayProxyEvent.json") raw_event["body"] = '{"message": "Hello", "username": "Ran"}' - parsed_event: MyApiGatewayBusiness = handle_apigw_with_envelope(raw_event, LambdaContext()) + parsed_event: MyApiGatewayBusiness = parse( + event=raw_event, + model=MyApiGatewayBusiness, + envelope=envelopes.ApiGatewayEnvelope, + ) assert parsed_event.message == "Hello" assert parsed_event.username == "Ran" @@ -29,7 +22,8 @@ def test_apigw_event_with_envelope(): def test_apigw_event(): raw_event = load_event("apiGatewayProxyEvent.json") - parsed_event: APIGatewayProxyEventModel = handle_apigw_event(raw_event, LambdaContext()) + parsed_event: APIGatewayProxyEventModel = APIGatewayProxyEventModel(**raw_event) + assert parsed_event.version == raw_event["version"] assert parsed_event.resource == raw_event["resource"] assert parsed_event.path == raw_event["path"] @@ -138,7 +132,7 @@ def test_apigw_event_with_invalid_websocket_request(): # WHEN calling event_parser with APIGatewayProxyEventModel with pytest.raises(ValidationError) as err: - handle_apigw_event(event, LambdaContext()) + APIGatewayProxyEventModel(**event) # THEN raise TypeError for invalid event errors = err.value.errors() From 56ff353cd7380f961d2f89ce796a0eb5d2702b6f Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:12:14 +0100 Subject: [PATCH 18/28] refactoring apigwv2 --- tests/unit/parser/test_apigwv2.py | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/tests/unit/parser/test_apigwv2.py b/tests/unit/parser/test_apigwv2.py index c176a54b82b..9ffc7f525bc 100644 --- a/tests/unit/parser/test_apigwv2.py +++ b/tests/unit/parser/test_apigwv2.py @@ -1,28 +1,21 @@ -from aws_lambda_powertools.utilities.parser import envelopes, event_parser, parse +from aws_lambda_powertools.utilities.parser import envelopes, parse from aws_lambda_powertools.utilities.parser.models import ( APIGatewayProxyEventV2Model, RequestContextV2, RequestContextV2Authorizer, ) -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyApiGatewayBusiness -@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayV2Envelope) -def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): - return event - - -@event_parser(model=APIGatewayProxyEventV2Model) -def handle_apigw_event(event: APIGatewayProxyEventV2Model, _: LambdaContext): - return event - - def test_apigw_v2_event_with_envelope(): raw_event = load_event("apiGatewayProxyV2Event.json") raw_event["body"] = '{"message": "Hello", "username": "Ran"}' - parsed_event: MyApiGatewayBusiness = handle_apigw_with_envelope(raw_event, LambdaContext()) + parsed_event: MyApiGatewayBusiness = parse( + event=raw_event, + model=MyApiGatewayBusiness, + envelope=envelopes.ApiGatewayV2Envelope, + ) assert parsed_event.message == "Hello" assert parsed_event.username == "Ran" @@ -30,7 +23,7 @@ def test_apigw_v2_event_with_envelope(): def test_apigw_v2_event_jwt_authorizer(): raw_event = load_event("apiGatewayProxyV2Event.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(raw_event, LambdaContext()) + parsed_event: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model(**raw_event) assert parsed_event.version == raw_event["version"] assert parsed_event.routeKey == raw_event["routeKey"] @@ -72,7 +65,7 @@ def test_apigw_v2_event_jwt_authorizer(): def test_api_gateway_proxy_v2_event_lambda_authorizer(): raw_event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(raw_event, LambdaContext()) + parsed_event: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model(**raw_event) request_context: RequestContextV2 = parsed_event.requestContext assert request_context is not None @@ -84,7 +77,7 @@ def test_api_gateway_proxy_v2_event_lambda_authorizer(): def test_api_gateway_proxy_v2_event_iam_authorizer(): raw_event = load_event("apiGatewayProxyV2IamEvent.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(raw_event, LambdaContext()) + parsed_event: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model(**raw_event) iam = parsed_event.requestContext.authorizer.iam raw_iam = raw_event["requestContext"]["authorizer"]["iam"] From 8ed4c4121f611044694a486bf030840b405cd8c7 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:17:08 +0100 Subject: [PATCH 19/28] refactoring cloudwatch --- tests/unit/parser/test_cloudwatch.py | 44 ++++++++++++---------------- 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/tests/unit/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py index 47c0312bf20..bc8bf0776f9 100644 --- a/tests/unit/parser/test_cloudwatch.py +++ b/tests/unit/parser/test_cloudwatch.py @@ -1,20 +1,15 @@ import base64 import json import zlib -from typing import Any, List +from typing import Any import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from aws_lambda_powertools.utilities.parser.models import ( CloudWatchLogsLogEvent, CloudWatchLogsModel, ) -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyCloudWatchBusiness @@ -25,19 +20,6 @@ def decode_cloudwatch_raw_event(event: dict): return json.loads(uncompressed.decode("utf-8")) -@event_parser(model=MyCloudWatchBusiness, envelope=envelopes.CloudWatchLogsEnvelope) -def handle_cloudwatch_logs(event: List[MyCloudWatchBusiness], _: LambdaContext): - assert len(event) == 1 - log: MyCloudWatchBusiness = event[0] - assert log.my_message == "hello" - assert log.user == "test" - - -@event_parser(model=CloudWatchLogsModel) -def handle_cloudwatch_logs_no_envelope(event: CloudWatchLogsModel, _: LambdaContext): - return event - - def test_validate_event_user_model_with_envelope(): my_log_message = {"my_message": "hello", "user": "test"} inner_event_dict = { @@ -50,20 +32,29 @@ def test_validate_event_user_model_with_envelope(): } dict_str = json.dumps(inner_event_dict) compressesd_str = zlib.compress(str.encode(dict_str), -1) - event_dict = {"awslogs": {"data": base64.b64encode(compressesd_str)}} + raw_event = {"awslogs": {"data": base64.b64encode(compressesd_str)}} + + parsed_event: MyCloudWatchBusiness = parse( + event=raw_event, + model=MyCloudWatchBusiness, + envelope=envelopes.CloudWatchLogsEnvelope, + ) - handle_cloudwatch_logs(event_dict, LambdaContext()) + assert len(parsed_event) == 1 + log: MyCloudWatchBusiness = parsed_event[0] + assert log.my_message == "hello" + assert log.user == "test" def test_validate_event_does_not_conform_with_user_dict_model(): event_dict = load_event("cloudWatchLogEvent.json") with pytest.raises(ValidationError): - handle_cloudwatch_logs(event_dict, LambdaContext()) + MyCloudWatchBusiness(**event_dict) def test_handle_cloudwatch_trigger_event_no_envelope(): raw_event = load_event("cloudWatchLogEvent.json") - parsed_event: CloudWatchLogsModel = handle_cloudwatch_logs_no_envelope(raw_event, LambdaContext()) + parsed_event: CloudWatchLogsModel = CloudWatchLogsModel(**raw_event) raw_event_decoded = decode_cloudwatch_raw_event(raw_event["awslogs"]["data"]) @@ -93,11 +84,12 @@ def test_handle_cloudwatch_trigger_event_no_envelope(): def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): raw_event: Any = {"awslogs": {"data": "invalid_data"}} with pytest.raises(ValidationError) as context: - handle_cloudwatch_logs_no_envelope(raw_event, LambdaContext()) + CloudWatchLogsModel(**raw_event) assert context.value.errors()[0]["msg"] == "unable to decompress data" def test_handle_invalid_event_with_envelope(): + empty_dict = {} with pytest.raises(ValidationError): - handle_cloudwatch_logs(event={}, context=LambdaContext()) + CloudWatchLogsModel(**empty_dict) From cf2c0dfb5997ff533826413bd176e9d918801074 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:21:13 +0100 Subject: [PATCH 20/28] refactoring dynamodb --- tests/unit/parser/test_dynamodb.py | 35 ++++++++++-------------------- 1 file changed, 11 insertions(+), 24 deletions(-) diff --git a/tests/unit/parser/test_dynamodb.py b/tests/unit/parser/test_dynamodb.py index bf838e38cd5..57bd2be5f0a 100644 --- a/tests/unit/parser/test_dynamodb.py +++ b/tests/unit/parser/test_dynamodb.py @@ -1,30 +1,17 @@ -from typing import Any, Dict, List - import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from tests.functional.utils import load_event from tests.unit.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness -@event_parser(model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) -def handle_dynamodb(event: List[Dict[str, MyDynamoBusiness]], _: LambdaContext): - return event - - -@event_parser(model=MyAdvancedDynamoBusiness) -def handle_dynamodb_no_envelope(event: MyAdvancedDynamoBusiness, _: LambdaContext): - return event - - def test_dynamo_db_stream_trigger_event(): raw_event = load_event("dynamoStreamEvent.json") - parserd_event: MyDynamoBusiness = handle_dynamodb(raw_event, LambdaContext()) + parserd_event: MyDynamoBusiness = parse( + event=raw_event, + model=MyDynamoBusiness, + envelope=envelopes.DynamoDBStreamEnvelope, + ) assert len(parserd_event) == 2 @@ -51,7 +38,7 @@ def test_dynamo_db_stream_trigger_event(): def test_dynamo_db_stream_trigger_event_no_envelope(): raw_event = load_event("dynamoStreamEvent.json") - parserd_event: MyAdvancedDynamoBusiness = handle_dynamodb_no_envelope(raw_event, LambdaContext()) + parserd_event: MyAdvancedDynamoBusiness = MyAdvancedDynamoBusiness(**raw_event) records = parserd_event.Records record = records[0] @@ -86,12 +73,12 @@ def test_dynamo_db_stream_trigger_event_no_envelope(): def test_validate_event_does_not_conform_with_model_no_envelope(): - raw_event: Any = {"hello": "s"} + raw_event: dict = {"hello": "s"} with pytest.raises(ValidationError): - handle_dynamodb_no_envelope(raw_event, LambdaContext()) + MyAdvancedDynamoBusiness(**raw_event) def test_validate_event_does_not_conform_with_model(): - raw_event: Any = {"hello": "s"} + raw_event: dict = {"hello": "s"} with pytest.raises(ValidationError): - handle_dynamodb(raw_event, LambdaContext()) + parse(event=raw_event, model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) From 5e141e5809bed91a5453940a5f3cced75fb6300e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:25:03 +0100 Subject: [PATCH 21/28] refactoring eventbridge --- tests/unit/parser/test_eventbridge.py | 30 +++++++++------------------ 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/tests/unit/parser/test_eventbridge.py b/tests/unit/parser/test_eventbridge.py index 0cd522f02a7..7f250ecdb83 100644 --- a/tests/unit/parser/test_eventbridge.py +++ b/tests/unit/parser/test_eventbridge.py @@ -1,11 +1,6 @@ import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from tests.functional.utils import load_event from tests.unit.parser.schemas import ( MyAdvancedEventbridgeBusiness, @@ -13,19 +8,13 @@ ) -@event_parser(model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) -def handle_eventbridge(event: MyEventbridgeBusiness, _: LambdaContext): - return event - - -@event_parser(model=MyAdvancedEventbridgeBusiness) -def handle_eventbridge_no_envelope(event: MyAdvancedEventbridgeBusiness, _: LambdaContext): - return event - - def test_handle_eventbridge_trigger_event(): raw_event = load_event("eventBridgeEvent.json") - parsed_event: MyEventbridgeBusiness = handle_eventbridge(raw_event, LambdaContext()) + parsed_event: MyEventbridgeBusiness = parse( + event=raw_event, + model=MyEventbridgeBusiness, + envelope=envelopes.EventBridgeEnvelope, + ) assert parsed_event.instance_id == raw_event["detail"]["instance_id"] assert parsed_event.state == raw_event["detail"]["state"] @@ -37,12 +26,12 @@ def test_validate_event_does_not_conform_with_user_dict_model(): raw_event.pop("version") with pytest.raises(ValidationError): - handle_eventbridge(raw_event, LambdaContext()) + parse(event=raw_event, model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) def test_handle_eventbridge_trigger_event_no_envelope(): raw_event = load_event("eventBridgeEvent.json") - parsed_event: MyAdvancedEventbridgeBusiness = handle_eventbridge_no_envelope(raw_event, LambdaContext()) + parsed_event: MyAdvancedEventbridgeBusiness = MyAdvancedEventbridgeBusiness(**raw_event) assert parsed_event.detail.instance_id == raw_event["detail"]["instance_id"] assert parsed_event.detail.state == raw_event["detail"]["state"] @@ -59,5 +48,6 @@ def test_handle_eventbridge_trigger_event_no_envelope(): def test_handle_invalid_event_with_eventbridge_envelope(): + empty_event = {} with pytest.raises(ValidationError): - handle_eventbridge(event={}, context=LambdaContext()) + parse(event=empty_event, model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) From 60c529f94cc73c968d381636f3daa8793b0ef376 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:28:36 +0100 Subject: [PATCH 22/28] refactoring kafka --- tests/unit/parser/test_kafka.py | 36 ++++++++++++--------------------- 1 file changed, 13 insertions(+), 23 deletions(-) diff --git a/tests/unit/parser/test_kafka.py b/tests/unit/parser/test_kafka.py index fa6493abd45..1f229c1db6e 100644 --- a/tests/unit/parser/test_kafka.py +++ b/tests/unit/parser/test_kafka.py @@ -1,29 +1,20 @@ -from typing import List - -from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser import envelopes, parse from aws_lambda_powertools.utilities.parser.models import ( KafkaMskEventModel, KafkaRecordModel, KafkaSelfManagedEventModel, ) -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyLambdaKafkaBusiness -@event_parser(model=MyLambdaKafkaBusiness, envelope=envelopes.KafkaEnvelope) -def handle_lambda_kafka_with_envelope(event: List[MyLambdaKafkaBusiness], _: LambdaContext): - return event - - -@event_parser(model=KafkaSelfManagedEventModel) -def handle_kafka_event(event: KafkaSelfManagedEventModel, _: LambdaContext): - return event - - def test_kafka_msk_event_with_envelope(): raw_event = load_event("kafkaEventMsk.json") - parsed_event: MyLambdaKafkaBusiness = handle_lambda_kafka_with_envelope(raw_event, LambdaContext()) + parsed_event: MyLambdaKafkaBusiness = parse( + event=raw_event, + model=MyLambdaKafkaBusiness, + envelope=envelopes.KafkaEnvelope, + ) assert parsed_event[0].key == "value" assert len(parsed_event) == 1 @@ -31,7 +22,11 @@ def test_kafka_msk_event_with_envelope(): def test_kafka_self_managed_event_with_envelope(): raw_event = load_event("kafkaEventSelfManaged.json") - parsed_event: MyLambdaKafkaBusiness = handle_lambda_kafka_with_envelope(raw_event, LambdaContext()) + parsed_event: MyLambdaKafkaBusiness = parse( + event=raw_event, + model=MyLambdaKafkaBusiness, + envelope=envelopes.KafkaEnvelope, + ) assert parsed_event[0].key == "value" assert len(parsed_event) == 1 @@ -39,7 +34,7 @@ def test_kafka_self_managed_event_with_envelope(): def test_self_managed_kafka_event(): raw_event = load_event("kafkaEventSelfManaged.json") - parsed_event: KafkaSelfManagedEventModel = handle_kafka_event(raw_event, LambdaContext()) + parsed_event: KafkaSelfManagedEventModel = KafkaSelfManagedEventModel(**raw_event) assert parsed_event.eventSource == raw_event["eventSource"] @@ -62,14 +57,9 @@ def test_self_managed_kafka_event(): assert record.headers[0]["headerKey"] == b"headerValue" -@event_parser(model=KafkaMskEventModel) -def handle_msk_event(event: KafkaMskEventModel, _: LambdaContext): - return event - - def test_kafka_msk_event(): raw_event = load_event("kafkaEventMsk.json") - parsed_event: KafkaMskEventModel = handle_msk_event(raw_event, LambdaContext()) + parsed_event: KafkaMskEventModel = KafkaMskEventModel(**raw_event) assert parsed_event.eventSource == raw_event["eventSource"] assert parsed_event.bootstrapServers == raw_event["bootstrapServers"].split(",") From 1ff0de9a0765d09820295848c46a768769690d0a Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:35:18 +0100 Subject: [PATCH 23/28] refactoring kinesis --- tests/unit/parser/test_kinesis_firehose.py | 47 ++++++---------------- 1 file changed, 12 insertions(+), 35 deletions(-) diff --git a/tests/unit/parser/test_kinesis_firehose.py b/tests/unit/parser/test_kinesis_firehose.py index c400d3260da..87eaef7ca9d 100644 --- a/tests/unit/parser/test_kinesis_firehose.py +++ b/tests/unit/parser/test_kinesis_firehose.py @@ -1,12 +1,6 @@ -from typing import List - import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from aws_lambda_powertools.utilities.parser.models import ( KinesisFirehoseModel, KinesisFirehoseRecord, @@ -14,34 +8,13 @@ KinesisFirehoseSqsModel, KinesisFirehoseSqsRecord, ) -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyKinesisFirehoseBusiness -@event_parser(model=MyKinesisFirehoseBusiness, envelope=envelopes.KinesisFirehoseEnvelope) -def handle_firehose(event: List[MyKinesisFirehoseBusiness], _: LambdaContext): - return event - - -@event_parser(model=KinesisFirehoseModel) -def handle_firehose_no_envelope_kinesis(event: KinesisFirehoseModel, _: LambdaContext): - return event - - -@event_parser(model=KinesisFirehoseModel) -def handle_firehose_no_envelope_put(event: KinesisFirehoseModel, _: LambdaContext): - return event - - -@event_parser(model=KinesisFirehoseSqsModel) -def handle_firehose_sqs_wrapped_message(event: KinesisFirehoseSqsModel, _: LambdaContext): - return event - - def test_firehose_sqs_wrapped_message_event(): raw_event = load_event("kinesisFirehoseSQSEvent.json") - parsed_event: KinesisFirehoseSqsModel = handle_firehose_sqs_wrapped_message(raw_event, LambdaContext()) + parsed_event: KinesisFirehoseSqsModel = KinesisFirehoseSqsModel(**raw_event) assert parsed_event.region == raw_event["region"] assert parsed_event.invocationId == raw_event["invocationId"] @@ -61,7 +34,11 @@ def test_firehose_sqs_wrapped_message_event(): def test_firehose_trigger_event(): raw_event = load_event("kinesisFirehoseKinesisEvent.json") raw_event["records"].pop(0) # remove first item since the payload is bytes and we want to test payload json class - parsed_event: MyKinesisFirehoseBusiness = handle_firehose(raw_event, LambdaContext()) + parsed_event: MyKinesisFirehoseBusiness = parse( + event=raw_event, + model=MyKinesisFirehoseBusiness, + envelope=envelopes.KinesisFirehoseEnvelope, + ) assert len(parsed_event) == 1 assert parsed_event[0].Hello == "World" @@ -69,7 +46,7 @@ def test_firehose_trigger_event(): def test_firehose_trigger_event_kinesis_no_envelope(): raw_event = load_event("kinesisFirehoseKinesisEvent.json") - parsed_event: KinesisFirehoseModel = handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) + parsed_event: KinesisFirehoseModel = KinesisFirehoseModel(**raw_event) assert parsed_event.region == raw_event["region"] assert parsed_event.invocationId == raw_event["invocationId"] @@ -105,7 +82,7 @@ def test_firehose_trigger_event_kinesis_no_envelope(): def test_firehose_trigger_event_put_no_envelope(): raw_event = load_event("kinesisFirehosePutEvent.json") - parsed_event: KinesisFirehoseModel = handle_firehose_no_envelope_put(raw_event, LambdaContext()) + parsed_event: KinesisFirehoseModel = KinesisFirehoseModel(**raw_event) assert parsed_event.region == raw_event["region"] assert parsed_event.invocationId == raw_event["invocationId"] @@ -129,18 +106,18 @@ def test_kinesis_trigger_bad_base64_event(): raw_event = load_event("kinesisFirehoseKinesisEvent.json") raw_event["records"][0]["data"] = {"bad base64"} with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) + KinesisFirehoseModel(**raw_event) def test_kinesis_trigger_bad_timestamp_event(): raw_event = load_event("kinesisFirehoseKinesisEvent.json") raw_event["records"][0]["approximateArrivalTimestamp"] = -1 with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) + KinesisFirehoseModel(**raw_event) def test_kinesis_trigger_bad_metadata_timestamp_event(): raw_event = load_event("kinesisFirehoseKinesisEvent.json") raw_event["records"][0]["kinesisRecordMetadata"]["approximateArrivalTimestamp"] = "-1" with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(raw_event, LambdaContext()) + KinesisFirehoseModel(**raw_event) From 8025b3c5af50f126b0a86448f1df9cb82449bdf5 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:42:22 +0100 Subject: [PATCH 24/28] refactoring kinesis --- tests/unit/parser/test_kinesis.py | 34 +++++++++---------------------- 1 file changed, 10 insertions(+), 24 deletions(-) diff --git a/tests/unit/parser/test_kinesis.py b/tests/unit/parser/test_kinesis.py index e39b7a43196..9f749cc2da2 100644 --- a/tests/unit/parser/test_kinesis.py +++ b/tests/unit/parser/test_kinesis.py @@ -1,13 +1,6 @@ -from typing import List - import pytest -from aws_lambda_powertools.utilities.parser import ( - BaseModel, - ValidationError, - envelopes, - event_parser, -) +from aws_lambda_powertools.utilities.parser import BaseModel, ValidationError, envelopes, parse from aws_lambda_powertools.utilities.parser.models import ( KinesisDataStreamModel, KinesisDataStreamRecordPayload, @@ -19,33 +12,26 @@ extract_cloudwatch_logs_from_event, extract_cloudwatch_logs_from_record, ) -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyKinesisBusiness -@event_parser(model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) -def handle_kinesis(event: List[MyKinesisBusiness], _: LambdaContext): - return event - - -@event_parser(model=KinesisDataStreamModel) -def handle_kinesis_no_envelope(event: KinesisDataStreamModel, _: LambdaContext): - return event - - def test_kinesis_trigger_bad_base64_event(): raw_event = load_event("kinesisStreamEvent.json") raw_event["Records"][0]["kinesis"]["data"] = "bad" with pytest.raises(ValidationError): - handle_kinesis_no_envelope(raw_event, LambdaContext()) + KinesisDataStreamModel(**raw_event) def test_kinesis_trigger_event(): raw_event = load_event("kinesisSingeEvent.json") - parsed_event: MyKinesisBusiness = handle_kinesis(raw_event, LambdaContext()) + parsed_event: MyKinesisBusiness = parse( + event=raw_event, + model=MyKinesisBusiness, + envelope=envelopes.KinesisDataStreamEnvelope, + ) assert len(parsed_event) == 1 record: KinesisDataStreamModel = parsed_event[0] @@ -55,7 +41,7 @@ def test_kinesis_trigger_event(): def test_kinesis_trigger_event_no_envelope(): raw_event = load_event("kinesisStreamEvent.json") - parsed_event: KinesisDataStreamModel = handle_kinesis_no_envelope(raw_event, LambdaContext()) + parsed_event: KinesisDataStreamModel = KinesisDataStreamModel(**raw_event) records = parsed_event.Records assert len(records) == 2 @@ -81,13 +67,13 @@ def test_kinesis_trigger_event_no_envelope(): def test_validate_event_does_not_conform_with_model_no_envelope(): raw_event: dict = {"hello": "s"} with pytest.raises(ValidationError): - handle_kinesis_no_envelope(raw_event, LambdaContext()) + KinesisDataStreamModel(**raw_event) def test_validate_event_does_not_conform_with_model(): raw_event: dict = {"hello": "s"} with pytest.raises(ValidationError): - handle_kinesis(raw_event, LambdaContext()) + parse(event=raw_event, model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) def test_kinesis_stream_event_cloudwatch_logs_data_extraction(): From 37f7c4ba3421b8cdbcd583334d12a63cd0967179 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 15:46:16 +0100 Subject: [PATCH 25/28] refactoring lambdaurl + s3 --- tests/unit/parser/test_lambda_function_url.py | 23 +++++++------------ tests/unit/parser/test_s3 object_event.py | 16 ++----------- 2 files changed, 10 insertions(+), 29 deletions(-) diff --git a/tests/unit/parser/test_lambda_function_url.py b/tests/unit/parser/test_lambda_function_url.py index a98a6ff00bc..3b1a7f259ec 100644 --- a/tests/unit/parser/test_lambda_function_url.py +++ b/tests/unit/parser/test_lambda_function_url.py @@ -1,25 +1,18 @@ -from aws_lambda_powertools.utilities.parser import envelopes, event_parser +from aws_lambda_powertools.utilities.parser import envelopes, parse from aws_lambda_powertools.utilities.parser.models import LambdaFunctionUrlModel -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyALambdaFuncUrlBusiness -@event_parser(model=MyALambdaFuncUrlBusiness, envelope=envelopes.LambdaFunctionUrlEnvelope) -def handle_lambda_func_url_with_envelope(event: MyALambdaFuncUrlBusiness, _: LambdaContext): - return event - - -@event_parser(model=LambdaFunctionUrlModel) -def handle_lambda_func_url_event(event: LambdaFunctionUrlModel, _: LambdaContext): - return event - - def test_lambda_func_url_event_with_envelope(): raw_event = load_event("lambdaFunctionUrlEvent.json") raw_event["body"] = '{"message": "Hello", "username": "Ran"}' - parsed_event: MyALambdaFuncUrlBusiness = handle_lambda_func_url_with_envelope(raw_event, LambdaContext()) + parsed_event: MyALambdaFuncUrlBusiness = parse( + event=raw_event, + model=MyALambdaFuncUrlBusiness, + envelope=envelopes.LambdaFunctionUrlEnvelope, + ) assert parsed_event.message == "Hello" assert parsed_event.username == "Ran" @@ -27,7 +20,7 @@ def test_lambda_func_url_event_with_envelope(): def test_lambda_function_url_event(): raw_event = load_event("lambdaFunctionUrlEvent.json") - parsed_event: LambdaFunctionUrlModel = handle_lambda_func_url_event(raw_event, LambdaContext()) + parsed_event: LambdaFunctionUrlModel = LambdaFunctionUrlModel(**raw_event) assert parsed_event.version == raw_event["version"] assert parsed_event.routeKey == raw_event["routeKey"] @@ -73,7 +66,7 @@ def test_lambda_function_url_event(): def test_lambda_function_url_event_iam(): raw_event = load_event("lambdaFunctionUrlIAMEvent.json") - parsed_event: LambdaFunctionUrlModel = handle_lambda_func_url_event(raw_event, LambdaContext()) + parsed_event: LambdaFunctionUrlModel = LambdaFunctionUrlModel(**raw_event) assert parsed_event.version == raw_event["version"] assert parsed_event.routeKey == raw_event["routeKey"] diff --git a/tests/unit/parser/test_s3 object_event.py b/tests/unit/parser/test_s3 object_event.py index 1ab39b2c6ad..c106a66c2a9 100644 --- a/tests/unit/parser/test_s3 object_event.py +++ b/tests/unit/parser/test_s3 object_event.py @@ -1,17 +1,10 @@ -from aws_lambda_powertools.utilities.parser import event_parser from aws_lambda_powertools.utilities.parser.models import S3ObjectLambdaEvent -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event -@event_parser(model=S3ObjectLambdaEvent) -def handle_s3_object_event_iam(event: S3ObjectLambdaEvent, _: LambdaContext): - return event - - def test_s3_object_event(): event = load_event("s3ObjectEventIAMUser.json") - parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_iam(event, LambdaContext()) + parsed_event: S3ObjectLambdaEvent = S3ObjectLambdaEvent(**event) assert parsed_event.xAmzRequestId == event["xAmzRequestId"] assert parsed_event.getObjectContext is not None object_context = parsed_event.getObjectContext @@ -40,14 +33,9 @@ def test_s3_object_event(): assert parsed_event.protocolVersion == event["protocolVersion"] -@event_parser(model=S3ObjectLambdaEvent) -def handle_s3_object_event_temp_creds(event: S3ObjectLambdaEvent, _: LambdaContext): - return event - - def test_s3_object_event_temp_credentials(): event = load_event("s3ObjectEventTempCredentials.json") - parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_temp_creds(event, LambdaContext()) + parsed_event: S3ObjectLambdaEvent = S3ObjectLambdaEvent(**event) assert parsed_event.xAmzRequestId == event["xAmzRequestId"] session_context = parsed_event.userIdentity.sessionContext assert session_context is not None From 2d52c771ebfa424e7675d9b39430556c20dc7732 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 16:09:00 +0100 Subject: [PATCH 26/28] refactoring s3 + s3s + sns --- tests/unit/parser/test_s3.py | 30 ++++++----------------- tests/unit/parser/test_ses.py | 9 +------ tests/unit/parser/test_sns.py | 45 ++++++++++------------------------- 3 files changed, 20 insertions(+), 64 deletions(-) diff --git a/tests/unit/parser/test_s3.py b/tests/unit/parser/test_s3.py index 65c7e73ac06..1586f32d28e 100644 --- a/tests/unit/parser/test_s3.py +++ b/tests/unit/parser/test_s3.py @@ -1,29 +1,13 @@ import pytest -from aws_lambda_powertools.utilities.parser import ValidationError, event_parser, parse +from aws_lambda_powertools.utilities.parser import ValidationError from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event -@event_parser(model=S3Model) -def handle_s3(event: S3Model, _: LambdaContext): - return event - - -@event_parser(model=S3Model) -def handle_s3_delete_object(event: S3Model, _: LambdaContext): - return event - - -@event_parser(model=S3Model) -def handle_s3_glacier(event: S3Model, _: LambdaContext): - return event - - def test_s3_trigger_event(): raw_event = load_event("s3Event.json") - parsed_event: S3Model = handle_s3(raw_event, LambdaContext()) + parsed_event: S3Model = S3Model(**raw_event) records = list(parsed_event.Records) assert len(records) == 1 @@ -65,7 +49,7 @@ def test_s3_trigger_event(): def test_s3_glacier_trigger_event(): raw_event = load_event("s3EventGlacier.json") - parsed_event: S3Model = handle_s3_glacier(raw_event, LambdaContext()) + parsed_event: S3Model = S3Model(**raw_event) records = list(parsed_event.Records) assert len(records) == 1 @@ -115,7 +99,7 @@ def test_s3_glacier_trigger_event(): def test_s3_trigger_event_delete_object(): raw_event = load_event("s3EventDeleteObject.json") - parsed_event: S3Model = handle_s3_delete_object(raw_event, LambdaContext()) + parsed_event: S3Model = S3Model(**raw_event) records = list(parsed_event.Records) assert len(records) == 1 @@ -158,18 +142,18 @@ def test_s3_trigger_event_delete_object(): def test_s3_empty_object(): raw_event = load_event("s3Event.json") raw_event["Records"][0]["s3"]["object"]["size"] = 0 - parse(event=raw_event, model=S3Model) + S3Model(**raw_event) def test_s3_none_object_size_failed_validation(): raw_event = load_event("s3Event.json") raw_event["Records"][0]["s3"]["object"]["size"] = None with pytest.raises(ValidationError): - parse(event=raw_event, model=S3Model) + S3Model(**raw_event) def test_s3_none_etag_value_failed_validation(): raw_event = load_event("s3Event.json") raw_event["Records"][0]["s3"]["object"]["eTag"] = None with pytest.raises(ValidationError): - parse(event=raw_event, model=S3Model) + S3Model(**raw_event) diff --git a/tests/unit/parser/test_ses.py b/tests/unit/parser/test_ses.py index 4742b8832ae..34eb43bf5eb 100644 --- a/tests/unit/parser/test_ses.py +++ b/tests/unit/parser/test_ses.py @@ -1,17 +1,10 @@ -from aws_lambda_powertools.utilities.parser import event_parser from aws_lambda_powertools.utilities.parser.models import SesModel, SesRecordModel -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event -@event_parser(model=SesModel) -def handle_ses(event: SesModel, _: LambdaContext): - return event - - def test_ses_trigger_event(): raw_event = load_event("sesEvent.json") - parsed_event: SesModel = handle_ses(raw_event, LambdaContext()) + parsed_event: SesModel = SesModel(**raw_event) records = parsed_event.Records record: SesRecordModel = records[0] diff --git a/tests/unit/parser/test_sns.py b/tests/unit/parser/test_sns.py index 6b02409d711..9b925d5fa76 100644 --- a/tests/unit/parser/test_sns.py +++ b/tests/unit/parser/test_sns.py @@ -1,37 +1,26 @@ import json -from typing import List import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from tests.functional.utils import load_event from tests.functional.validator.conftest import sns_event # noqa: F401 from tests.unit.parser.schemas import MyAdvancedSnsBusiness, MySnsBusiness -@event_parser(model=MySnsBusiness, envelope=envelopes.SnsEnvelope) -def handle_sns_json_body(event: List[MySnsBusiness], _: LambdaContext): - return event - - def test_handle_sns_trigger_event_json_body(sns_event): # noqa: F811 - handle_sns_json_body(sns_event, LambdaContext()) + parse(event=sns_event, model=MySnsBusiness, envelope=envelopes.SnsEnvelope) def test_validate_event_does_not_conform_with_model(): raw_event: dict = {"invalid": "event"} with pytest.raises(ValidationError): - handle_sns_json_body(raw_event, LambdaContext()) + parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsEnvelope) def test_validate_event_does_not_conform_user_json_string_with_model(): - event: dict = { + raw_event: dict = { "Records": [ { "EventVersion": "1.0", @@ -55,17 +44,12 @@ def test_validate_event_does_not_conform_user_json_string_with_model(): } with pytest.raises(ValidationError): - handle_sns_json_body(event, LambdaContext()) - - -@event_parser(model=MyAdvancedSnsBusiness) -def handle_sns_no_envelope(event: MyAdvancedSnsBusiness, _: LambdaContext): - return event + parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsEnvelope) def test_handle_sns_trigger_event_no_envelope(): raw_event = load_event("snsEvent.json") - parsed_event: MyAdvancedSnsBusiness = handle_sns_no_envelope(raw_event, LambdaContext()) + parsed_event: MyAdvancedSnsBusiness = MyAdvancedSnsBusiness(**raw_event) records = parsed_event.Records record = records[0] @@ -103,14 +87,9 @@ def test_handle_sns_trigger_event_no_envelope(): assert attrib_dict["TestBinary"].Value == raw_sns["MessageAttributes"]["TestBinary"]["Value"] -@event_parser(model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) -def handle_sns_sqs_json_body(event: List[MySnsBusiness], _: LambdaContext): - return event - - def test_handle_sns_sqs_trigger_event_json_body(): # noqa: F811 raw_event = load_event("snsSqsEvent.json") - parsed_event: MySnsBusiness = handle_sns_sqs_json_body(raw_event, LambdaContext()) + parsed_event: MySnsBusiness = parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) assert len(parsed_event) == 1 assert parsed_event[0].message == "hello world" @@ -119,20 +98,20 @@ def test_handle_sns_sqs_trigger_event_json_body(): # noqa: F811 def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url(): # GIVEN an event is tampered with a missing UnsubscribeURL - event_dict = load_event("snsSqsEvent.json") - payload = json.loads(event_dict["Records"][0]["body"]) + raw_event = load_event("snsSqsEvent.json") + payload = json.loads(raw_event["Records"][0]["body"]) payload.pop("UnsubscribeURL") - event_dict["Records"][0]["body"] = json.dumps(payload) + raw_event["Records"][0]["body"] = json.dumps(payload) # WHEN parsing the payload # THEN raise a ValidationError error with pytest.raises(ValidationError): - handle_sns_sqs_json_body(event_dict, LambdaContext()) + parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) def test_handle_sns_sqs_fifo_trigger_event_json_body(): raw_event = load_event("snsSqsFifoEvent.json") - parsed_event: MySnsBusiness = handle_sns_sqs_json_body(raw_event, LambdaContext()) + parsed_event: MySnsBusiness = parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) assert len(parsed_event) == 1 assert parsed_event[0].message == "hello world" From b716971d245c93260dee119c7f03b5889c44d710 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 16:24:46 +0100 Subject: [PATCH 27/28] refactoring ssqs + lattice --- tests/unit/parser/test_sqs.py | 35 ++++++++------------------- tests/unit/parser/test_vpc_lattice.py | 26 +++++++++----------- 2 files changed, 21 insertions(+), 40 deletions(-) diff --git a/tests/unit/parser/test_sqs.py b/tests/unit/parser/test_sqs.py index 164bdcfafe9..0d948acb39d 100644 --- a/tests/unit/parser/test_sqs.py +++ b/tests/unit/parser/test_sqs.py @@ -1,38 +1,28 @@ -from typing import Any, List - import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from tests.functional.utils import load_event from tests.functional.validator.conftest import sqs_event # noqa: F401 from tests.unit.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness -@event_parser(model=MySqsBusiness, envelope=envelopes.SqsEnvelope) -def handle_sqs_json_body(event: List[MySqsBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].message == "hello world" - assert event[0].username == "lessa" - - def test_handle_sqs_trigger_event_json_body(sqs_event): # noqa: F811 - handle_sqs_json_body(sqs_event, LambdaContext()) + parsed_event: MySqsBusiness = parse(event=sqs_event, model=MySqsBusiness, envelope=envelopes.SqsEnvelope) + + assert len(parsed_event) == 1 + assert parsed_event[0].message == "hello world" + assert parsed_event[0].username == "lessa" def test_validate_event_does_not_conform_with_model(): raw_event: dict = {"invalid": "event"} with pytest.raises(ValidationError): - handle_sqs_json_body(raw_event, LambdaContext()) + parse(event=raw_event, model=MySqsBusiness, envelope=envelopes.SqsEnvelope) def test_validate_event_does_not_conform_user_json_string_with_model(): - event: Any = { + raw_event: dict = { "Records": [ { "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", @@ -56,17 +46,12 @@ def test_validate_event_does_not_conform_user_json_string_with_model(): } with pytest.raises(ValidationError): - handle_sqs_json_body(event, LambdaContext()) - - -@event_parser(model=MyAdvancedSqsBusiness) -def handle_sqs_no_envelope(event: MyAdvancedSqsBusiness, _: LambdaContext): - return event + parse(event=raw_event, model=MySqsBusiness, envelope=envelopes.SqsEnvelope) def test_handle_sqs_trigger_event_no_envelope(): raw_event = load_event("sqsEvent.json") - parsed_event: MyAdvancedSqsBusiness = handle_sqs_no_envelope(raw_event, LambdaContext()) + parsed_event: MyAdvancedSqsBusiness = MyAdvancedSqsBusiness(**raw_event) records = parsed_event.Records record = records[0] diff --git a/tests/unit/parser/test_vpc_lattice.py b/tests/unit/parser/test_vpc_lattice.py index b319de612f7..e5dfedfb445 100644 --- a/tests/unit/parser/test_vpc_lattice.py +++ b/tests/unit/parser/test_vpc_lattice.py @@ -1,26 +1,22 @@ import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from aws_lambda_powertools.utilities.parser.models import VpcLatticeModel -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event from tests.unit.parser.schemas import MyVpcLatticeBusiness -@event_parser(model=MyVpcLatticeBusiness, envelope=envelopes.VpcLatticeEnvelope) -def handle_lambda_vpclattice_with_envelope(event: MyVpcLatticeBusiness, context: LambdaContext): - assert event.username == "Leandro" - assert event.name == "Damascena" - - def test_vpc_lattice_event_with_envelope(): - event = load_event("vpcLatticeEvent.json") - event["body"] = '{"username": "Leandro", "name": "Damascena"}' - handle_lambda_vpclattice_with_envelope(event, LambdaContext()) + raw_event = load_event("vpcLatticeEvent.json") + raw_event["body"] = '{"username": "Leandro", "name": "Damascena"}' + parsed_event: MyVpcLatticeBusiness = parse( + event=raw_event, + model=MyVpcLatticeBusiness, + envelope=envelopes.VpcLatticeEnvelope, + ) + + assert parsed_event.username == "Leandro" + assert parsed_event.name == "Damascena" def test_vpc_lattice_event(): From 327e093f0298fe2d987d3c66d217964890c35725 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 5 Jul 2023 16:31:42 +0100 Subject: [PATCH 28/28] refactoring parser --- ...{kinesisSingeEvent.json => kinesisStreamEventOneRecord.json} | 0 tests/{unit => functional}/parser/conftest.py | 0 tests/{unit => functional}/parser/test_parser.py | 0 tests/unit/parser/test_kinesis.py | 2 +- 4 files changed, 1 insertion(+), 1 deletion(-) rename tests/events/{kinesisSingeEvent.json => kinesisStreamEventOneRecord.json} (100%) rename tests/{unit => functional}/parser/conftest.py (100%) rename tests/{unit => functional}/parser/test_parser.py (100%) diff --git a/tests/events/kinesisSingeEvent.json b/tests/events/kinesisStreamEventOneRecord.json similarity index 100% rename from tests/events/kinesisSingeEvent.json rename to tests/events/kinesisStreamEventOneRecord.json diff --git a/tests/unit/parser/conftest.py b/tests/functional/parser/conftest.py similarity index 100% rename from tests/unit/parser/conftest.py rename to tests/functional/parser/conftest.py diff --git a/tests/unit/parser/test_parser.py b/tests/functional/parser/test_parser.py similarity index 100% rename from tests/unit/parser/test_parser.py rename to tests/functional/parser/test_parser.py diff --git a/tests/unit/parser/test_kinesis.py b/tests/unit/parser/test_kinesis.py index 9f749cc2da2..e8b1ae87378 100644 --- a/tests/unit/parser/test_kinesis.py +++ b/tests/unit/parser/test_kinesis.py @@ -26,7 +26,7 @@ def test_kinesis_trigger_bad_base64_event(): def test_kinesis_trigger_event(): - raw_event = load_event("kinesisSingeEvent.json") + raw_event = load_event("kinesisStreamEventOneRecord.json") parsed_event: MyKinesisBusiness = parse( event=raw_event, model=MyKinesisBusiness,