diff --git a/tests/events/kinesisStreamEventOneRecord.json b/tests/events/kinesisStreamEventOneRecord.json new file mode 100644 index 00000000000..05fe2d297a9 --- /dev/null +++ b/tests/events/kinesisStreamEventOneRecord.json @@ -0,0 +1,20 @@ +{ + "Records": [ + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "49590338271490256608559692538361571095921575989136588898", + "data": "eyJtZXNzYWdlIjogInRlc3QgbWVzc2FnZSIsICJ1c2VybmFtZSI6ICJ0ZXN0In0=", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:49590338271490256608559692538361571095921575989136588898", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + } + ] +} diff --git a/tests/functional/parser/__init__.py b/tests/functional/parser/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/functional/parser/test_alb.py b/tests/functional/parser/test_alb.py deleted file mode 100644 index d48e39f1bab..00000000000 --- a/tests/functional/parser/test_alb.py +++ /dev/null @@ -1,44 +0,0 @@ -import pytest - -from aws_lambda_powertools.utilities.parser import ValidationError, event_parser -from aws_lambda_powertools.utilities.parser.models import AlbModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.utils import load_event - - -@event_parser(model=AlbModel) -def handle_alb(event: AlbModel, _: LambdaContext): - assert ( - event.requestContext.elb.targetGroupArn - == "arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-279XGJDqGZ5rsrHC2Fjr/49e9d65c45c6791a" # noqa E501 - ) - assert event.httpMethod == "GET" - assert event.path == "/lambda" - assert event.queryStringParameters == {"query": "1234ABCD"} - assert event.headers == { - "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", - "accept-encoding": "gzip", - "accept-language": "en-US,en;q=0.9", - "connection": "keep-alive", - "host": "lambda-alb-123578498.us-east-2.elb.amazonaws.com", - "upgrade-insecure-requests": "1", - "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36", # noqa E501 - "x-amzn-trace-id": "Root=1-5c536348-3d683b8b04734faae651f476", - "x-forwarded-for": "72.12.164.125", - "x-forwarded-port": "80", - "x-forwarded-proto": "http", - "x-imforwards": "20", - } - assert event.body == "Test" - assert not event.isBase64Encoded - - -def test_alb_trigger_event(): - event_dict = load_event("albEvent.json") - handle_alb(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model(): - event = {"invalid": "event"} - with pytest.raises(ValidationError): - handle_alb(event, LambdaContext()) diff --git a/tests/functional/parser/test_apigw.py b/tests/functional/parser/test_apigw.py deleted file mode 100644 index 35b2fdb1926..00000000000 --- a/tests/functional/parser/test_apigw.py +++ /dev/null @@ -1,152 +0,0 @@ -import pytest -from pydantic import ValidationError - -from aws_lambda_powertools.utilities.parser import envelopes, event_parser, parse -from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyApiGatewayBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayEnvelope) -def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): - assert event.message == "Hello" - assert event.username == "Ran" - - -@event_parser(model=APIGatewayProxyEventModel) -def handle_apigw_event(event: APIGatewayProxyEventModel, _: LambdaContext): - assert event.body == "Hello from Lambda!" - return event - - -def test_apigw_event_with_envelope(): - event = load_event("apiGatewayProxyEvent.json") - event["body"] = '{"message": "Hello", "username": "Ran"}' - handle_apigw_with_envelope(event, LambdaContext()) - - -def test_apigw_event(): - event = load_event("apiGatewayProxyEvent.json") - parsed_event: APIGatewayProxyEventModel = handle_apigw_event(event, LambdaContext()) - assert parsed_event.version == event["version"] - assert parsed_event.resource == event["resource"] - assert parsed_event.path == event["path"] - assert parsed_event.headers == event["headers"] - assert parsed_event.multiValueHeaders == event["multiValueHeaders"] - assert parsed_event.queryStringParameters == event["queryStringParameters"] - assert parsed_event.multiValueQueryStringParameters == event["multiValueQueryStringParameters"] - - request_context = parsed_event.requestContext - assert request_context.accountId == event["requestContext"]["accountId"] - assert request_context.apiId == event["requestContext"]["apiId"] - - authorizer = request_context.authorizer - assert authorizer.claims is None - assert authorizer.scopes is None - - assert request_context.domainName == event["requestContext"]["domainName"] - assert request_context.domainPrefix == event["requestContext"]["domainPrefix"] - assert request_context.extendedRequestId == event["requestContext"]["extendedRequestId"] - assert request_context.httpMethod == event["requestContext"]["httpMethod"] - - identity = request_context.identity - assert identity.accessKey == event["requestContext"]["identity"]["accessKey"] - assert identity.accountId == event["requestContext"]["identity"]["accountId"] - assert identity.caller == event["requestContext"]["identity"]["caller"] - assert ( - identity.cognitoAuthenticationProvider == event["requestContext"]["identity"]["cognitoAuthenticationProvider"] - ) - assert identity.cognitoAuthenticationType == event["requestContext"]["identity"]["cognitoAuthenticationType"] - assert identity.cognitoIdentityId == event["requestContext"]["identity"]["cognitoIdentityId"] - assert identity.cognitoIdentityPoolId == event["requestContext"]["identity"]["cognitoIdentityPoolId"] - assert identity.principalOrgId == event["requestContext"]["identity"]["principalOrgId"] - assert str(identity.sourceIp) == event["requestContext"]["identity"]["sourceIp"] - assert identity.user == event["requestContext"]["identity"]["user"] - assert identity.userAgent == event["requestContext"]["identity"]["userAgent"] - assert identity.userArn == event["requestContext"]["identity"]["userArn"] - assert identity.clientCert is not None - assert identity.clientCert.clientCertPem == event["requestContext"]["identity"]["clientCert"]["clientCertPem"] - assert identity.clientCert.subjectDN == event["requestContext"]["identity"]["clientCert"]["subjectDN"] - assert identity.clientCert.issuerDN == event["requestContext"]["identity"]["clientCert"]["issuerDN"] - assert identity.clientCert.serialNumber == event["requestContext"]["identity"]["clientCert"]["serialNumber"] - assert ( - identity.clientCert.validity.notBefore - == event["requestContext"]["identity"]["clientCert"]["validity"]["notBefore"] - ) - assert ( - identity.clientCert.validity.notAfter - == event["requestContext"]["identity"]["clientCert"]["validity"]["notAfter"] - ) - - assert request_context.path == event["requestContext"]["path"] - assert request_context.protocol == event["requestContext"]["protocol"] - assert request_context.requestId == event["requestContext"]["requestId"] - assert request_context.requestTime == event["requestContext"]["requestTime"] - convert_time = int(round(request_context.requestTimeEpoch.timestamp() * 1000)) - assert convert_time == 1583349317135 - assert request_context.resourceId == event["requestContext"]["resourceId"] - assert request_context.resourcePath == event["requestContext"]["resourcePath"] - assert request_context.stage == event["requestContext"]["stage"] - - assert parsed_event.pathParameters == event["pathParameters"] - assert parsed_event.stageVariables == event["stageVariables"] - assert parsed_event.body == event["body"] - assert parsed_event.isBase64Encoded == event["isBase64Encoded"] - - assert request_context.connectedAt is None - assert request_context.connectionId is None - assert request_context.eventType is None - assert request_context.messageDirection is None - assert request_context.messageId is None - assert request_context.routeKey is None - assert request_context.operationName is None - assert identity.apiKey is None - assert identity.apiKeyId is None - - -def test_apigw_event_with_invalid_websocket_request(): - # GIVEN an event with an eventType != MESSAGE and has a messageId - event = { - "resource": "/", - "path": "/", - "httpMethod": "GET", - "headers": {}, - "multiValueHeaders": {}, - "isBase64Encoded": False, - "body": "Foo!", - "requestContext": { - "accountId": "1234", - "apiId": "myApi", - "httpMethod": "GET", - "identity": { - "sourceIp": "127.0.0.1", - }, - "path": "/", - "protocol": "Https", - "requestId": "1234", - "requestTime": "2018-09-07T16:20:46Z", - "requestTimeEpoch": 1536992496000, - "resourcePath": "/", - "stage": "test", - "eventType": "DISCONNECT", - "messageId": "messageId", - }, - } - - # WHEN calling event_parser with APIGatewayProxyEventModel - with pytest.raises(ValidationError) as err: - handle_apigw_event(event, LambdaContext()) - - # THEN raise TypeError for invalid event - errors = err.value.errors() - assert len(errors) == 1 - expected_msg = "messageId is available only when the `eventType` is `MESSAGE`" - assert errors[0]["msg"] == expected_msg - assert expected_msg in str(err.value) - - -def test_apigw_event_empty_body(): - event = load_event("apiGatewayProxyEvent.json") - event["body"] = None - parse(event=event, model=APIGatewayProxyEventModel) diff --git a/tests/functional/parser/test_apigwv2.py b/tests/functional/parser/test_apigwv2.py deleted file mode 100644 index d3510b185dd..00000000000 --- a/tests/functional/parser/test_apigwv2.py +++ /dev/null @@ -1,105 +0,0 @@ -from aws_lambda_powertools.utilities.parser import envelopes, event_parser, parse -from aws_lambda_powertools.utilities.parser.models import ( - APIGatewayProxyEventV2Model, - RequestContextV2, - RequestContextV2Authorizer, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyApiGatewayBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyApiGatewayBusiness, envelope=envelopes.ApiGatewayV2Envelope) -def handle_apigw_with_envelope(event: MyApiGatewayBusiness, _: LambdaContext): - assert event.message == "Hello" - assert event.username == "Ran" - - -@event_parser(model=APIGatewayProxyEventV2Model) -def handle_apigw_event(event: APIGatewayProxyEventV2Model, _: LambdaContext): - return event - - -def test_apigw_v2_event_with_envelope(): - event = load_event("apiGatewayProxyV2Event.json") - event["body"] = '{"message": "Hello", "username": "Ran"}' - handle_apigw_with_envelope(event, LambdaContext()) - - -def test_apigw_v2_event_jwt_authorizer(): - event = load_event("apiGatewayProxyV2Event.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) - assert parsed_event.version == event["version"] - assert parsed_event.routeKey == event["routeKey"] - assert parsed_event.rawPath == event["rawPath"] - assert parsed_event.rawQueryString == event["rawQueryString"] - assert parsed_event.cookies == event["cookies"] - assert parsed_event.cookies[0] == "cookie1" - assert parsed_event.headers == event["headers"] - assert parsed_event.queryStringParameters == event["queryStringParameters"] - assert parsed_event.queryStringParameters["parameter2"] == "value" - - request_context = parsed_event.requestContext - assert request_context.accountId == event["requestContext"]["accountId"] - assert request_context.apiId == event["requestContext"]["apiId"] - assert request_context.authorizer.jwt.claims == event["requestContext"]["authorizer"]["jwt"]["claims"] - assert request_context.authorizer.jwt.scopes == event["requestContext"]["authorizer"]["jwt"]["scopes"] - assert request_context.domainName == event["requestContext"]["domainName"] - assert request_context.domainPrefix == event["requestContext"]["domainPrefix"] - - http = request_context.http - assert http.method == "POST" - assert http.path == "/my/path" - assert http.protocol == "HTTP/1.1" - assert str(http.sourceIp) == "192.168.0.1/32" - assert http.userAgent == "agent" - - assert request_context.requestId == event["requestContext"]["requestId"] - assert request_context.routeKey == event["requestContext"]["routeKey"] - assert request_context.stage == event["requestContext"]["stage"] - assert request_context.time == event["requestContext"]["time"] - convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) - assert convert_time == event["requestContext"]["timeEpoch"] - assert parsed_event.body == event["body"] - assert parsed_event.pathParameters == event["pathParameters"] - assert parsed_event.isBase64Encoded == event["isBase64Encoded"] - assert parsed_event.stageVariables == event["stageVariables"] - - -def test_api_gateway_proxy_v2_event_lambda_authorizer(): - event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) - request_context: RequestContextV2 = parsed_event.requestContext - assert request_context is not None - lambda_props: RequestContextV2Authorizer = request_context.authorizer.lambda_value - assert lambda_props is not None - assert lambda_props["key"] == "value" - - -def test_api_gateway_proxy_v2_event_iam_authorizer(): - event = load_event("apiGatewayProxyV2IamEvent.json") - parsed_event: APIGatewayProxyEventV2Model = handle_apigw_event(event, LambdaContext()) - iam = parsed_event.requestContext.authorizer.iam - assert iam is not None - assert iam.accessKey == "ARIA2ZJZYVUEREEIHAKY" - assert iam.accountId == "1234567890" - assert iam.callerId == "AROA7ZJZYVRE7C3DUXHH6:CognitoIdentityCredentials" - assert iam.cognitoIdentity.amr == ["foo"] - assert iam.cognitoIdentity.identityId == "us-east-1:3f291106-8703-466b-8f2b-3ecee1ca56ce" - assert iam.cognitoIdentity.identityPoolId == "us-east-1:4f291106-8703-466b-8f2b-3ecee1ca56ce" - assert iam.principalOrgId == "AwsOrgId" - assert iam.userArn == "arn:aws:iam::1234567890:user/Admin" - assert iam.userId == "AROA2ZJZYVRE7Y3TUXHH6" - - -def test_apigw_event_empty_body(): - event = load_event("apiGatewayProxyV2Event.json") - event.pop("body") # API GW v2 removes certain keys when no data is passed - parse(event=event, model=APIGatewayProxyEventV2Model) - - -def test_apigw_event_empty_query_strings(): - event = load_event("apiGatewayProxyV2Event.json") - event["rawQueryString"] = "" - event.pop("queryStringParameters") # API GW v2 removes certain keys when no data is passed - parse(event=event, model=APIGatewayProxyEventV2Model) diff --git a/tests/functional/parser/test_cloudwatch.py b/tests/functional/parser/test_cloudwatch.py deleted file mode 100644 index 5fa197bb792..00000000000 --- a/tests/functional/parser/test_cloudwatch.py +++ /dev/null @@ -1,89 +0,0 @@ -import base64 -import json -import zlib -from typing import Any, List - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.parser.models import ( - CloudWatchLogsLogEvent, - CloudWatchLogsModel, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyCloudWatchBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyCloudWatchBusiness, envelope=envelopes.CloudWatchLogsEnvelope) -def handle_cloudwatch_logs(event: List[MyCloudWatchBusiness], _: LambdaContext): - assert len(event) == 1 - log: MyCloudWatchBusiness = event[0] - assert log.my_message == "hello" - assert log.user == "test" - - -@event_parser(model=CloudWatchLogsModel) -def handle_cloudwatch_logs_no_envelope(event: CloudWatchLogsModel, _: LambdaContext): - assert event.awslogs.decoded_data.owner == "123456789123" - assert event.awslogs.decoded_data.logGroup == "testLogGroup" - assert event.awslogs.decoded_data.logStream == "testLogStream" - assert event.awslogs.decoded_data.subscriptionFilters == ["testFilter"] - assert event.awslogs.decoded_data.messageType == "DATA_MESSAGE" - - assert len(event.awslogs.decoded_data.logEvents) == 2 - log_record: CloudWatchLogsLogEvent = event.awslogs.decoded_data.logEvents[0] - assert log_record.id == "eventId1" - convert_time = int(round(log_record.timestamp.timestamp() * 1000)) - assert convert_time == 1440442987000 - assert log_record.message == "[ERROR] First test message" - log_record: CloudWatchLogsLogEvent = event.awslogs.decoded_data.logEvents[1] - assert log_record.id == "eventId2" - convert_time = int(round(log_record.timestamp.timestamp() * 1000)) - assert convert_time == 1440442987001 - assert log_record.message == "[ERROR] Second test message" - - -def test_validate_event_user_model_with_envelope(): - my_log_message = {"my_message": "hello", "user": "test"} - inner_event_dict = { - "messageType": "DATA_MESSAGE", - "owner": "123456789123", - "logGroup": "testLogGroup", - "logStream": "testLogStream", - "subscriptionFilters": ["testFilter"], - "logEvents": [{"id": "eventId1", "timestamp": 1440442987000, "message": json.dumps(my_log_message)}], - } - dict_str = json.dumps(inner_event_dict) - compressesd_str = zlib.compress(str.encode(dict_str), -1) - event_dict = {"awslogs": {"data": base64.b64encode(compressesd_str)}} - - handle_cloudwatch_logs(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_user_dict_model(): - event_dict = load_event("cloudWatchLogEvent.json") - with pytest.raises(ValidationError): - handle_cloudwatch_logs(event_dict, LambdaContext()) - - -def test_handle_cloudwatch_trigger_event_no_envelope(): - event_dict = load_event("cloudWatchLogEvent.json") - handle_cloudwatch_logs_no_envelope(event_dict, LambdaContext()) - - -def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): - event_dict: Any = {"awslogs": {"data": "invalid_data"}} - with pytest.raises(ValidationError) as context: - handle_cloudwatch_logs_no_envelope(event_dict, LambdaContext()) - - assert context.value.errors()[0]["msg"] == "unable to decompress data" - - -def test_handle_invalid_event_with_envelope(): - with pytest.raises(ValidationError): - handle_cloudwatch_logs(event={}, context=LambdaContext()) diff --git a/tests/functional/parser/test_dynamodb.py b/tests/functional/parser/test_dynamodb.py deleted file mode 100644 index e6238b00b83..00000000000 --- a/tests/functional/parser/test_dynamodb.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import Any, Dict, List - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) -def handle_dynamodb(event: List[Dict[str, MyDynamoBusiness]], _: LambdaContext): - assert len(event) == 2 - assert event[0]["OldImage"] is None - assert event[0]["NewImage"].Message["S"] == "New item!" - assert event[0]["NewImage"].Id["N"] == 101 - assert event[1]["OldImage"].Message["S"] == "New item!" - assert event[1]["OldImage"].Id["N"] == 101 - assert event[1]["NewImage"].Message["S"] == "This item has changed" - assert event[1]["NewImage"].Id["N"] == 101 - - -@event_parser(model=MyAdvancedDynamoBusiness) -def handle_dynamodb_no_envelope(event: MyAdvancedDynamoBusiness, _: LambdaContext): - records = event.Records - record = records[0] - assert record.awsRegion == "us-west-2" - dynamodb = record.dynamodb - assert dynamodb is not None - assert dynamodb.ApproximateCreationDateTime is None - keys = dynamodb.Keys - assert keys is not None - id_key = keys["Id"] - assert id_key["N"] == "101" - message_key = dynamodb.NewImage.Message - assert message_key is not None - assert message_key["S"] == "New item!" - assert dynamodb.OldImage is None - assert dynamodb.SequenceNumber == "111" - assert dynamodb.SizeBytes == 26 - assert dynamodb.StreamViewType == "NEW_AND_OLD_IMAGES" - assert record.eventID == "1" - assert record.eventName == "INSERT" - assert record.eventSource == "aws:dynamodb" - assert record.eventSourceARN == "eventsource_arn" - assert record.eventVersion == 1.0 - assert record.userIdentity is None - - -def test_dynamo_db_stream_trigger_event(): - event_dict = load_event("dynamoStreamEvent.json") - handle_dynamodb(event_dict, LambdaContext()) - - -def test_dynamo_db_stream_trigger_event_no_envelope(): - event_dict = load_event("dynamoStreamEvent.json") - handle_dynamodb_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model_no_envelope(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_dynamodb_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_dynamodb(event_dict, LambdaContext()) diff --git a/tests/functional/parser/test_eventbridge.py b/tests/functional/parser/test_eventbridge.py deleted file mode 100644 index ca41e1a4bc5..00000000000 --- a/tests/functional/parser/test_eventbridge.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Any - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import ( - MyAdvancedEventbridgeBusiness, - MyEventbridgeBusiness, -) -from tests.functional.utils import load_event - - -@event_parser(model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) -def handle_eventbridge(event: MyEventbridgeBusiness, _: LambdaContext): - assert event.instance_id == "i-1234567890abcdef0" - assert event.state == "terminated" - - -@event_parser(model=MyAdvancedEventbridgeBusiness) -def handle_eventbridge_no_envelope(event: MyAdvancedEventbridgeBusiness, _: LambdaContext): - assert event.detail.instance_id == "i-1234567890abcdef0" - assert event.detail.state == "terminated" - assert event.id == "6a7e8feb-b491-4cf7-a9f1-bf3703467718" - assert event.version == "0" - assert event.account == "111122223333" - time_str = event.time.strftime("%Y-%m-%dT%H:%M:%SZ") - assert time_str == "2017-12-22T18:43:48Z" - assert event.region == "us-west-1" - assert event.resources == ["arn:aws:ec2:us-west-1:123456789012:instance/i-1234567890abcdef0"] - assert event.source == "aws.ec2" - assert event.detail_type == "EC2 Instance State-change Notification" - assert event.replay_name == "replay_archive" - - -def test_handle_eventbridge_trigger_event(): - event_dict = load_event("eventBridgeEvent.json") - handle_eventbridge(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_user_dict_model(): - event_dict: Any = { - "version": "0", - "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", - "detail-type": "EC2 Instance State-change Notification", - "source": "aws.ec2", - "account": "111122223333", - "time": "2017-12-22T18:43:48Z", - "region": "us-west-1", - "resources": ["arn:aws:ec2:us-west-1:123456789012:instance/i-1234567890abcdef0"], - "detail": {}, - } - with pytest.raises(ValidationError) as e: - handle_eventbridge(event_dict, LambdaContext()) - print(e.exconly()) - - -def test_handle_eventbridge_trigger_event_no_envelope(): - event_dict = load_event("eventBridgeEvent.json") - handle_eventbridge_no_envelope(event_dict, LambdaContext()) - - -def test_handle_invalid_event_with_eventbridge_envelope(): - with pytest.raises(ValidationError): - handle_eventbridge(event={}, context=LambdaContext()) diff --git a/tests/functional/parser/test_kafka.py b/tests/functional/parser/test_kafka.py deleted file mode 100644 index f764106add4..00000000000 --- a/tests/functional/parser/test_kafka.py +++ /dev/null @@ -1,93 +0,0 @@ -from typing import List - -from aws_lambda_powertools.utilities.parser import envelopes, event_parser -from aws_lambda_powertools.utilities.parser.models import ( - KafkaMskEventModel, - KafkaRecordModel, - KafkaSelfManagedEventModel, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyLambdaKafkaBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyLambdaKafkaBusiness, envelope=envelopes.KafkaEnvelope) -def handle_lambda_kafka_with_envelope(event: List[MyLambdaKafkaBusiness], _: LambdaContext): - assert event[0].key == "value" - assert len(event) == 1 - - -@event_parser(model=KafkaSelfManagedEventModel) -def handle_kafka_event(event: KafkaSelfManagedEventModel, _: LambdaContext): - return event - - -def test_kafka_msk_event_with_envelope(): - event = load_event("kafkaEventMsk.json") - handle_lambda_kafka_with_envelope(event, LambdaContext()) - - -def test_kafka_self_managed_event_with_envelope(): - event = load_event("kafkaEventSelfManaged.json") - handle_lambda_kafka_with_envelope(event, LambdaContext()) - - -def test_self_managed_kafka_event(): - json_event = load_event("kafkaEventSelfManaged.json") - event: KafkaSelfManagedEventModel = handle_kafka_event(json_event, LambdaContext()) - assert event.eventSource == "aws:SelfManagedKafka" - bootstrap_servers = [ - "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - ] - assert event.bootstrapServers == bootstrap_servers - - records = list(event.records["mytopic-0"]) - assert len(records) == 1 - record: KafkaRecordModel = records[0] - assert record.topic == "mytopic" - assert record.partition == 0 - assert record.offset == 15 - assert record.timestamp is not None - convert_time = int(round(record.timestamp.timestamp() * 1000)) - assert convert_time == 1545084650987 - assert record.timestampType == "CREATE_TIME" - assert record.key == b"recordKey" - assert record.value == '{"key":"value"}' - assert len(record.headers) == 1 - assert record.headers[0]["headerKey"] == b"headerValue" - - -@event_parser(model=KafkaMskEventModel) -def handle_msk_event(event: KafkaMskEventModel, _: LambdaContext): - return event - - -def test_kafka_msk_event(): - json_event = load_event("kafkaEventMsk.json") - event: KafkaMskEventModel = handle_msk_event(json_event, LambdaContext()) - assert event.eventSource == "aws:kafka" - bootstrap_servers = [ - "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - ] - assert event.bootstrapServers == bootstrap_servers - assert ( - event.eventSourceArn - == "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4" - ) - - records = list(event.records["mytopic-0"]) - assert len(records) == 1 - record: KafkaRecordModel = records[0] - assert record.topic == "mytopic" - assert record.partition == 0 - assert record.offset == 15 - assert record.timestamp is not None - convert_time = int(round(record.timestamp.timestamp() * 1000)) - assert convert_time == 1545084650987 - assert record.timestampType == "CREATE_TIME" - assert record.key == b"recordKey" - assert record.value == '{"key":"value"}' - assert len(record.headers) == 1 - assert record.headers[0]["headerKey"] == b"headerValue" diff --git a/tests/functional/parser/test_kinesis.py b/tests/functional/parser/test_kinesis.py deleted file mode 100644 index 151102c34c8..00000000000 --- a/tests/functional/parser/test_kinesis.py +++ /dev/null @@ -1,153 +0,0 @@ -from typing import Any, List - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - BaseModel, - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.parser.models import ( - KinesisDataStreamModel, - KinesisDataStreamRecordPayload, -) -from aws_lambda_powertools.utilities.parser.models.cloudwatch import ( - CloudWatchLogsDecode, -) -from aws_lambda_powertools.utilities.parser.models.kinesis import ( - extract_cloudwatch_logs_from_event, - extract_cloudwatch_logs_from_record, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyKinesisBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) -def handle_kinesis(event: List[MyKinesisBusiness], _: LambdaContext): - assert len(event) == 1 - record: KinesisDataStreamModel = event[0] - assert record.message == "test message" - assert record.username == "test" - - -@event_parser(model=KinesisDataStreamModel) -def handle_kinesis_no_envelope(event: KinesisDataStreamModel, _: LambdaContext): - records = event.Records - assert len(records) == 2 - record: KinesisDataStreamModel = records[0] - - assert record.awsRegion == "us-east-2" - assert record.eventID == "shardId-000000000006:49590338271490256608559692538361571095921575989136588898" - assert record.eventName == "aws:kinesis:record" - assert record.eventSource == "aws:kinesis" - assert record.eventSourceARN == "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" - assert record.eventVersion == "1.0" - assert record.invokeIdentityArn == "arn:aws:iam::123456789012:role/lambda-role" - - kinesis: KinesisDataStreamRecordPayload = record.kinesis - assert kinesis.approximateArrivalTimestamp == 1545084650.987 - assert kinesis.kinesisSchemaVersion == "1.0" - assert kinesis.partitionKey == "1" - assert kinesis.sequenceNumber == "49590338271490256608559692538361571095921575989136588898" - assert kinesis.data == b"Hello, this is a test." - - -def test_kinesis_trigger_event(): - event_dict = { - "Records": [ - { - "kinesis": { - "kinesisSchemaVersion": "1.0", - "partitionKey": "1", - "sequenceNumber": "49590338271490256608559692538361571095921575989136588898", - "data": "eyJtZXNzYWdlIjogInRlc3QgbWVzc2FnZSIsICJ1c2VybmFtZSI6ICJ0ZXN0In0=", - "approximateArrivalTimestamp": 1545084650.987, - }, - "eventSource": "aws:kinesis", - "eventVersion": "1.0", - "eventID": "shardId-000000000006:49590338271490256608559692538361571095921575989136588898", - "eventName": "aws:kinesis:record", - "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", - "awsRegion": "us-east-2", - "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream", - }, - ], - } - - handle_kinesis(event_dict, LambdaContext()) - - -def test_kinesis_trigger_bad_base64_event(): - event_dict = { - "Records": [ - { - "kinesis": { - "kinesisSchemaVersion": "1.0", - "partitionKey": "1", - "sequenceNumber": "49590338271490256608559692538361571095921575989136588898", - "data": "bad", - "approximateArrivalTimestamp": 1545084650.987, - }, - "eventSource": "aws:kinesis", - "eventVersion": "1.0", - "eventID": "shardId-000000000006:49590338271490256608559692538361571095921575989136588898", - "eventName": "aws:kinesis:record", - "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", - "awsRegion": "us-east-2", - "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream", - }, - ], - } - with pytest.raises(ValidationError): - handle_kinesis_no_envelope(event_dict, LambdaContext()) - - -def test_kinesis_trigger_event_no_envelope(): - event_dict = load_event("kinesisStreamEvent.json") - handle_kinesis_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model_no_envelope(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_kinesis_no_envelope(event_dict, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model(): - event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): - handle_kinesis(event_dict, LambdaContext()) - - -def test_kinesis_stream_event_cloudwatch_logs_data_extraction(): - # GIVEN a KinesisDataStreamModel is instantiated with CloudWatch Logs compressed data - event_dict = load_event("kinesisStreamCloudWatchLogsEvent.json") - stream_data = KinesisDataStreamModel(**event_dict) - single_record = stream_data.Records[0] - - # WHEN we try to extract CloudWatch Logs from KinesisDataStreamRecordPayload model - extracted_logs = extract_cloudwatch_logs_from_event(stream_data) - individual_logs = [extract_cloudwatch_logs_from_record(record) for record in stream_data.Records] - single_log = extract_cloudwatch_logs_from_record(single_record) - - # THEN we should have extracted any potential logs as CloudWatchLogsDecode models - assert len(extracted_logs) == len(individual_logs) - assert isinstance(single_log, CloudWatchLogsDecode) - - -def test_kinesis_stream_event_cloudwatch_logs_data_extraction_fails_with_custom_model(): - # GIVEN a custom model replaces Kinesis Record Data bytes - class DummyModel(BaseModel): - ... - - event_dict = load_event("kinesisStreamCloudWatchLogsEvent.json") - stream_data = KinesisDataStreamModel(**event_dict) - - # WHEN decompress_zlib_record_data_as_json is used - # THEN ValueError should be raised - with pytest.raises(ValueError, match="We can only decompress bytes data"): - for record in stream_data.Records: - record.kinesis.data = DummyModel() - record.decompress_zlib_record_data_as_json() diff --git a/tests/functional/parser/test_kinesis_firehose.py b/tests/functional/parser/test_kinesis_firehose.py deleted file mode 100644 index c0b71f80540..00000000000 --- a/tests/functional/parser/test_kinesis_firehose.py +++ /dev/null @@ -1,138 +0,0 @@ -from typing import List - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.parser.models import ( - KinesisFirehoseModel, - KinesisFirehoseRecord, - KinesisFirehoseRecordMetadata, - KinesisFirehoseSqsModel, - KinesisFirehoseSqsRecord, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyKinesisFirehoseBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyKinesisFirehoseBusiness, envelope=envelopes.KinesisFirehoseEnvelope) -def handle_firehose(event: List[MyKinesisFirehoseBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].Hello == "World" - - -@event_parser(model=KinesisFirehoseModel) -def handle_firehose_no_envelope_kinesis(event: KinesisFirehoseModel, _: LambdaContext): - assert event.region == "us-east-2" - assert event.invocationId == "2b4d1ad9-2f48-94bd-a088-767c317e994a" - assert event.deliveryStreamArn == "arn:aws:firehose:us-east-2:123456789012:deliverystream/delivery-stream-name" - assert event.sourceKinesisStreamArn == "arn:aws:kinesis:us-east-1:123456789012:stream/kinesis-source" - - records = list(event.records) - assert len(records) == 2 - record_01: KinesisFirehoseRecord = records[0] - assert record_01.approximateArrivalTimestamp == 1664028820148 - assert record_01.recordId == "record1" - assert record_01.data == b"Hello World" - - metadata_01: KinesisFirehoseRecordMetadata = record_01.kinesisRecordMetadata - assert metadata_01.partitionKey == "4d1ad2b9-24f8-4b9d-a088-76e9947c317a" - assert metadata_01.subsequenceNumber == "" - assert metadata_01.shardId == "shardId-000000000000" - assert metadata_01.approximateArrivalTimestamp == 1664028820148 - assert metadata_01.sequenceNumber == "49546986683135544286507457936321625675700192471156785154" - - record_02: KinesisFirehoseRecord = records[1] - assert record_02.approximateArrivalTimestamp == 1664028793294 - assert record_02.recordId == "record2" - assert record_02.data == b'{"Hello": "World"}' - - metadata_02: KinesisFirehoseRecordMetadata = record_02.kinesisRecordMetadata - assert metadata_02.partitionKey == "4d1ad2b9-24f8-4b9d-a088-76e9947c318a" - assert metadata_02.subsequenceNumber == "" - assert metadata_02.shardId == "shardId-000000000001" - assert metadata_02.approximateArrivalTimestamp == 1664028793294 - assert metadata_02.sequenceNumber == "49546986683135544286507457936321625675700192471156785155" - - -@event_parser(model=KinesisFirehoseModel) -def handle_firehose_no_envelope_put(event: KinesisFirehoseModel, _: LambdaContext): - assert event.region == "us-east-2" - assert event.invocationId == "2b4d1ad9-2f48-94bd-a088-767c317e994a" - assert event.deliveryStreamArn == "arn:aws:firehose:us-east-2:123456789012:deliverystream/delivery-stream-name" - - records = list(event.records) - assert len(records) == 2 - - record_01: KinesisFirehoseRecord = records[0] - assert record_01.approximateArrivalTimestamp == 1664029185290 - assert record_01.recordId == "record1" - assert record_01.data == b"Hello World" - - record_02: KinesisFirehoseRecord = records[1] - assert record_02.approximateArrivalTimestamp == 1664029186945 - assert record_02.recordId == "record2" - assert record_02.data == b'{"Hello": "World"}' - - -@event_parser(model=KinesisFirehoseSqsModel) -def handle_firehose_sqs_wrapped_message(event: KinesisFirehoseSqsModel, _: LambdaContext): - assert event.region == "us-east-1" - assert event.invocationId == "556b67a3-48fc-4385-af49-e133aade9cb9" - assert event.deliveryStreamArn == "arn:aws:firehose:us-east-1:123456789012:deliverystream/PUT-S3-tdyyE" - - records = list(event.records) - assert len(records) == 1 - - record_01: KinesisFirehoseSqsRecord = records[0] - assert record_01.data.messageId == "5ab807d4-5644-4c55-97a3-47396635ac74" - assert record_01.data.receiptHandle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." - assert record_01.data.body == "Test message." - assert record_01.data.attributes.ApproximateReceiveCount == "1" - assert record_01.data.attributes.SenderId == "AIDAIENQZJOLO23YVJ4VO" - - -def test_firehose_sqs_wrapped_message_event(): - event_dict = load_event("kinesisFirehoseSQSEvent.json") - handle_firehose_sqs_wrapped_message(event_dict, LambdaContext()) - - -def test_firehose_trigger_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"].pop(0) # remove first item since the payload is bytes and we want to test payload json class - handle_firehose(event_dict, LambdaContext()) - - -def test_firehose_trigger_event_kinesis_no_envelope(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) - - -def test_firehose_trigger_event_put_no_envelope(): - event_dict = load_event("kinesisFirehosePutEvent.json") - handle_firehose_no_envelope_put(event_dict, LambdaContext()) - - -def test_kinesis_trigger_bad_base64_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"][0]["data"] = {"bad base64"} - with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) - - -def test_kinesis_trigger_bad_timestamp_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"][0]["approximateArrivalTimestamp"] = -1 - with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) - - -def test_kinesis_trigger_bad_metadata_timestamp_event(): - event_dict = load_event("kinesisFirehoseKinesisEvent.json") - event_dict["records"][0]["kinesisRecordMetadata"]["approximateArrivalTimestamp"] = "-1" - with pytest.raises(ValidationError): - handle_firehose_no_envelope_kinesis(event_dict, LambdaContext()) diff --git a/tests/functional/parser/test_lambda_function_url.py b/tests/functional/parser/test_lambda_function_url.py deleted file mode 100644 index a63a4e25884..00000000000 --- a/tests/functional/parser/test_lambda_function_url.py +++ /dev/null @@ -1,128 +0,0 @@ -from aws_lambda_powertools.utilities.parser import envelopes, event_parser -from aws_lambda_powertools.utilities.parser.models import LambdaFunctionUrlModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyALambdaFuncUrlBusiness -from tests.functional.utils import load_event - - -@event_parser(model=MyALambdaFuncUrlBusiness, envelope=envelopes.LambdaFunctionUrlEnvelope) -def handle_lambda_func_url_with_envelope(event: MyALambdaFuncUrlBusiness, _: LambdaContext): - assert event.message == "Hello" - assert event.username == "Ran" - - -@event_parser(model=LambdaFunctionUrlModel) -def handle_lambda_func_url_event(event: LambdaFunctionUrlModel, _: LambdaContext): - return event - - -def test_lambda_func_url_event_with_envelope(): - event = load_event("lambdaFunctionUrlEvent.json") - event["body"] = '{"message": "Hello", "username": "Ran"}' - handle_lambda_func_url_with_envelope(event, LambdaContext()) - - -def test_lambda_function_url_event(): - json_event = load_event("lambdaFunctionUrlEvent.json") - event: LambdaFunctionUrlModel = handle_lambda_func_url_event(json_event, LambdaContext()) - - assert event.version == "2.0" - assert event.routeKey == "$default" - - assert event.rawQueryString == "" - - assert event.cookies is None - - headers = event.headers - assert len(headers) == 20 - - assert event.queryStringParameters is None - - assert event.isBase64Encoded is False - assert event.body is None - assert event.pathParameters is None - assert event.stageVariables is None - - request_context = event.requestContext - - assert request_context.accountId == "anonymous" - assert request_context.apiId is not None - assert request_context.domainName == ".lambda-url.us-east-1.on.aws" - assert request_context.domainPrefix == "" - assert request_context.requestId == "id" - assert request_context.routeKey == "$default" - assert request_context.stage == "$default" - assert request_context.time is not None - convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) - assert convert_time == 1659687279885 - assert request_context.authorizer is None - - http = request_context.http - assert http.method == "GET" - assert http.path == "/" - assert http.protocol == "HTTP/1.1" - assert str(http.sourceIp) == "123.123.123.123/32" - assert http.userAgent == "agent" - - assert request_context.authorizer is None - - -def test_lambda_function_url_event_iam(): - json_event = load_event("lambdaFunctionUrlIAMEvent.json") - event: LambdaFunctionUrlModel = handle_lambda_func_url_event(json_event, LambdaContext()) - - assert event.version == "2.0" - assert event.routeKey == "$default" - - assert event.rawQueryString == "parameter1=value1¶meter1=value2¶meter2=value" - - cookies = event.cookies - assert len(cookies) == 2 - assert cookies[0] == "cookie1" - - headers = event.headers - assert len(headers) == 2 - - query_string_parameters = event.queryStringParameters - assert len(query_string_parameters) == 2 - assert query_string_parameters.get("parameter2") == "value" - - assert event.isBase64Encoded is False - assert event.body == "Hello from client!" - assert event.pathParameters is None - assert event.stageVariables is None - - request_context = event.requestContext - - assert request_context.accountId == "123456789012" - assert request_context.apiId is not None - assert request_context.domainName == ".lambda-url.us-west-2.on.aws" - assert request_context.domainPrefix == "" - assert request_context.requestId == "id" - assert request_context.routeKey == "$default" - assert request_context.stage == "$default" - assert request_context.time is not None - convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) - assert convert_time == 1583348638390 - - http = request_context.http - assert http.method == "POST" - assert http.path == "/my/path" - assert http.protocol == "HTTP/1.1" - assert str(http.sourceIp) == "123.123.123.123/32" - assert http.userAgent == "agent" - - authorizer = request_context.authorizer - assert authorizer is not None - assert authorizer.jwt is None - assert authorizer.lambda_value is None - - iam = authorizer.iam - assert iam is not None - assert iam.accessKey == "AKIA..." - assert iam.accountId == "111122223333" - assert iam.callerId == "AIDA..." - assert iam.cognitoIdentity is None - assert iam.principalOrgId is None - assert iam.userId == "AIDA..." - assert iam.userArn == "arn:aws:iam::111122223333:user/example-user" diff --git a/tests/functional/parser/test_s3.py b/tests/functional/parser/test_s3.py deleted file mode 100644 index 4037790efc5..00000000000 --- a/tests/functional/parser/test_s3.py +++ /dev/null @@ -1,155 +0,0 @@ -import pytest - -from aws_lambda_powertools.utilities.parser import ValidationError, event_parser, parse -from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.utils import load_event - - -def assert_s3(event: S3Model): - records = list(event.Records) - assert len(records) == 1 - record: S3RecordModel = records[0] - assert record.eventVersion == "2.1" - assert record.eventSource == "aws:s3" - assert record.awsRegion == "us-east-2" - convert_time = int(round(record.eventTime.timestamp() * 1000)) - assert convert_time == 1567539447192 - assert record.eventName == "ObjectCreated:Put" - user_identity = record.userIdentity - assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" - request_parameters = record.requestParameters - assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" - assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" - assert ( - record.responseElements.x_amz_id_2 - == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" - ) - s3 = record.s3 - assert s3.s3SchemaVersion == "1.0" - assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" - bucket = s3.bucket - assert bucket.name == "lambda-artifacts-deafc19498e3f2df" - assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" - assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" - assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.size == 1305107 - assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.versionId is None - assert s3.object.sequencer == "0C0F6F405D6ED209E1" - assert record.glacierEventData is None - - -@event_parser(model=S3Model) -def handle_s3(event: S3Model, _: LambdaContext): - assert_s3(event) - - -@event_parser(model=S3Model) -def handle_s3_glacier(event: S3Model, _: LambdaContext): - records = list(event.Records) - assert len(records) == 1 - record: S3RecordModel = records[0] - assert record.eventVersion == "2.1" - assert record.eventSource == "aws:s3" - assert record.awsRegion == "us-east-2" - convert_time = int(round(record.eventTime.timestamp() * 1000)) - assert convert_time == 1567539447192 - assert record.eventName == "ObjectCreated:Put" - user_identity = record.userIdentity - assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" - request_parameters = record.requestParameters - assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" - assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" - assert ( - record.responseElements.x_amz_id_2 - == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" - ) - s3 = record.s3 - assert s3.s3SchemaVersion == "1.0" - assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" - bucket = s3.bucket - assert bucket.name == "lambda-artifacts-deafc19498e3f2df" - assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" - assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" - assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.size == 1305107 - assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.versionId is None - assert s3.object.sequencer == "0C0F6F405D6ED209E1" - assert record.glacierEventData is not None - convert_time = int( - round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000), - ) - assert convert_time == 60000 - assert record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass == "standard" - - -def test_s3_trigger_event(): - event_dict = load_event("s3Event.json") - handle_s3(event_dict, LambdaContext()) - - -def test_s3_glacier_trigger_event(): - event_dict = load_event("s3EventGlacier.json") - handle_s3_glacier(event_dict, LambdaContext()) - - -def test_s3_empty_object(): - event_dict = load_event("s3Event.json") - event_dict["Records"][0]["s3"]["object"]["size"] = 0 - parse(event=event_dict, model=S3Model) - - -def test_s3_none_object_size_failed_validation(): - event_dict = load_event("s3Event.json") - event_dict["Records"][0]["s3"]["object"]["size"] = None - with pytest.raises(ValidationError): - parse(event=event_dict, model=S3Model) - - -def test_s3_none_etag_value_failed_validation(): - event_dict = load_event("s3Event.json") - event_dict["Records"][0]["s3"]["object"]["eTag"] = None - with pytest.raises(ValidationError): - parse(event=event_dict, model=S3Model) - - -@event_parser(model=S3Model) -def handle_s3_delete_object(event: S3Model, _: LambdaContext): - records = list(event.Records) - assert len(records) == 1 - record: S3RecordModel = records[0] - assert record.eventVersion == "2.1" - assert record.eventSource == "aws:s3" - assert record.awsRegion == "us-east-2" - convert_time = int(round(record.eventTime.timestamp() * 1000)) - assert convert_time == 1567539447192 - assert record.eventName == "ObjectRemoved:Delete" - user_identity = record.userIdentity - assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2" - request_parameters = record.requestParameters - assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" - assert record.responseElements.x_amz_request_id == "D82B88E5F771F645" - assert ( - record.responseElements.x_amz_id_2 - == "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" - ) - s3 = record.s3 - assert s3.s3SchemaVersion == "1.0" - assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1" - bucket = s3.bucket - assert bucket.name == "lambda-artifacts-deafc19498e3f2df" - assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E" - assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" - assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b" - assert s3.object.size is None - assert s3.object.eTag is None - assert s3.object.versionId is None - assert s3.object.sequencer == "0C0F6F405D6ED209E1" - assert record.glacierEventData is None - - -def test_s3_trigger_event_delete_object(): - event_dict = load_event("s3EventDeleteObject.json") - handle_s3_delete_object(event_dict, LambdaContext()) diff --git a/tests/functional/parser/test_ses.py b/tests/functional/parser/test_ses.py deleted file mode 100644 index d434e2350f8..00000000000 --- a/tests/functional/parser/test_ses.py +++ /dev/null @@ -1,49 +0,0 @@ -from aws_lambda_powertools.utilities.parser import event_parser -from aws_lambda_powertools.utilities.parser.models import SesModel, SesRecordModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.utils import load_event - - -@event_parser(model=SesModel) -def handle_ses(event: SesModel, _: LambdaContext): - expected_address = "johndoe@example.com" - records = event.Records - record: SesRecordModel = records[0] - assert record.eventSource == "aws:ses" - assert record.eventVersion == "1.0" - mail = record.ses.mail - convert_time = int(round(mail.timestamp.timestamp() * 1000)) - assert convert_time == 0 - assert mail.source == "janedoe@example.com" - assert mail.messageId == "o3vrnil0e2ic28tr" - assert mail.destination == [expected_address] - assert mail.headersTruncated is False - headers = list(mail.headers) - assert len(headers) == 10 - assert headers[0].name == "Return-Path" - assert headers[0].value == "" - common_headers = mail.commonHeaders - assert common_headers.returnPath == "janedoe@example.com" - assert common_headers.header_from == ["Jane Doe "] - assert common_headers.date == "Wed, 7 Oct 2015 12:34:56 -0700" - assert common_headers.to == [expected_address] - assert common_headers.messageId == "<0123456789example.com>" - assert common_headers.subject == "Test Subject" - receipt = record.ses.receipt - convert_time = int(round(receipt.timestamp.timestamp() * 1000)) - assert convert_time == 0 - assert receipt.processingTimeMillis == 574 - assert receipt.recipients == [expected_address] - assert receipt.spamVerdict.status == "PASS" - assert receipt.virusVerdict.status == "PASS" - assert receipt.spfVerdict.status == "PASS" - assert receipt.dmarcVerdict.status == "PASS" - action = receipt.action - assert action.type == "Lambda" - assert action.functionArn == "arn:aws:lambda:us-west-2:012345678912:function:Example" - assert action.invocationType == "Event" - - -def test_ses_trigger_event(): - event_dict = load_event("sesEvent.json") - handle_ses(event_dict, LambdaContext()) diff --git a/tests/functional/parser/test_sns.py b/tests/functional/parser/test_sns.py deleted file mode 100644 index 617de487748..00000000000 --- a/tests/functional/parser/test_sns.py +++ /dev/null @@ -1,128 +0,0 @@ -import json -from typing import Any, List - -import pytest - -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyAdvancedSnsBusiness, MySnsBusiness -from tests.functional.utils import load_event -from tests.functional.validator.conftest import sns_event # noqa: F401 - - -@event_parser(model=MySnsBusiness, envelope=envelopes.SnsEnvelope) -def handle_sns_json_body(event: List[MySnsBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].message == "hello world" - assert event[0].username == "lessa" - - -def test_handle_sns_trigger_event_json_body(sns_event): # noqa: F811 - handle_sns_json_body(sns_event, LambdaContext()) - - -def test_validate_event_does_not_conform_with_model(): - event: Any = {"invalid": "event"} - - with pytest.raises(ValidationError): - handle_sns_json_body(event, LambdaContext()) - - -def test_validate_event_does_not_conform_user_json_string_with_model(): - event: Any = { - "Records": [ - { - "EventVersion": "1.0", - "EventSubscriptionArn": "arn:aws:sns:us-east-2:123456789012:sns-la ...", - "EventSource": "aws:sns", - "Sns": { - "SignatureVersion": "1", - "Timestamp": "2019-01-02T12:45:07.000Z", - "Signature": "tcc6faL2yUC6dgZdmrwh1Y4cGa/ebXEkAi6RibDsvpi+tE/1+82j...65r==", - "SigningCertUrl": "https://sns.us-east-2.amazonaws.com/SimpleNotificat ...", - "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", - "Message": "not a valid JSON!", - "MessageAttributes": {"Test": {"Type": "String", "Value": "TestString"}}, - "Type": "Notification", - "UnsubscribeUrl": "https://sns.us-east-2.amazonaws.com/?Action=Unsubscri ...", - "TopicArn": "arn:aws:sns:us-east-2:123456789012:sns-lambda", - "Subject": "TestInvoke", - }, - }, - ], - } - - with pytest.raises(ValidationError): - handle_sns_json_body(event, LambdaContext()) - - -@event_parser(model=MyAdvancedSnsBusiness) -def handle_sns_no_envelope(event: MyAdvancedSnsBusiness, _: LambdaContext): - records = event.Records - record = records[0] - - assert len(records) == 1 - assert record.EventVersion == "1.0" - assert record.EventSubscriptionArn == "arn:aws:sns:us-east-2:123456789012:sns-la ..." - assert record.EventSource == "aws:sns" - assert record.Sns.Type == "Notification" - assert record.Sns.UnsubscribeUrl.scheme == "https" - assert record.Sns.UnsubscribeUrl.host == "sns.us-east-2.amazonaws.com" - assert record.Sns.UnsubscribeUrl.query == "Action=Unsubscribe" - assert record.Sns.TopicArn == "arn:aws:sns:us-east-2:123456789012:sns-lambda" - assert record.Sns.Subject == "TestInvoke" - assert record.Sns.SignatureVersion == "1" - convert_time = int(round(record.Sns.Timestamp.timestamp() * 1000)) - assert convert_time == 1546433107000 - assert record.Sns.Signature == "tcc6faL2yUC6dgZdmrwh1Y4cGa/ebXEkAi6RibDsvpi+tE/1+82j...65r==" - assert record.Sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" - assert record.Sns.SigningCertUrl.scheme == "https" - assert record.Sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" - assert record.Sns.SigningCertUrl.path == "/SimpleNotification" - assert record.Sns.MessageId == "95df01b4-ee98-5cb9-9903-4c221d41eb5e" - assert record.Sns.Message == "Hello from SNS!" - attrib_dict = record.Sns.MessageAttributes - assert len(attrib_dict) == 2 - assert attrib_dict["Test"].Type == "String" - assert attrib_dict["Test"].Value == "TestString" - assert attrib_dict["TestBinary"].Type == "Binary" - assert attrib_dict["TestBinary"].Value == "TestBinary" - - -def test_handle_sns_trigger_event_no_envelope(): - event_dict = load_event("snsEvent.json") - handle_sns_no_envelope(event_dict, LambdaContext()) - - -@event_parser(model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) -def handle_sns_sqs_json_body(event: List[MySnsBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].message == "hello world" - assert event[0].username == "lessa" - - -def test_handle_sns_sqs_trigger_event_json_body(): # noqa: F811 - event_dict = load_event("snsSqsEvent.json") - handle_sns_sqs_json_body(event_dict, LambdaContext()) - - -def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url(): - # GIVEN an event is tampered with a missing UnsubscribeURL - event_dict = load_event("snsSqsEvent.json") - payload = json.loads(event_dict["Records"][0]["body"]) - payload.pop("UnsubscribeURL") - event_dict["Records"][0]["body"] = json.dumps(payload) - - # WHEN parsing the payload - # THEN raise a ValidationError error - with pytest.raises(ValidationError): - handle_sns_sqs_json_body(event_dict, LambdaContext()) - - -def test_handle_sns_sqs_fifo_trigger_event_json_body(): - event_dict = load_event("snsSqsFifoEvent.json") - handle_sns_sqs_json_body(event_dict, LambdaContext()) diff --git a/tests/functional/parser/schemas.py b/tests/unit/parser/schemas.py similarity index 100% rename from tests/functional/parser/schemas.py rename to tests/unit/parser/schemas.py diff --git a/tests/unit/parser/test_alb.py b/tests/unit/parser/test_alb.py new file mode 100644 index 00000000000..6cd109941e9 --- /dev/null +++ b/tests/unit/parser/test_alb.py @@ -0,0 +1,24 @@ +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError +from aws_lambda_powertools.utilities.parser.models import AlbModel +from tests.functional.utils import load_event + + +def test_alb_trigger_event(): + raw_event = load_event("albEvent.json") + parsed_event: AlbModel = AlbModel(**raw_event) + + assert parsed_event.requestContext.elb.targetGroupArn == raw_event["requestContext"]["elb"]["targetGroupArn"] + assert parsed_event.httpMethod == raw_event["httpMethod"] + assert parsed_event.path == raw_event["path"] + assert parsed_event.queryStringParameters == raw_event["queryStringParameters"] + assert parsed_event.headers == raw_event["headers"] + assert parsed_event.body == raw_event["body"] + assert not parsed_event.isBase64Encoded + + +def test_validate_event_does_not_conform_with_model(): + event = {"invalid": "event"} + with pytest.raises(ValidationError): + AlbModel(**event) diff --git a/tests/unit/parser/test_apigw.py b/tests/unit/parser/test_apigw.py new file mode 100644 index 00000000000..a65d181cc54 --- /dev/null +++ b/tests/unit/parser/test_apigw.py @@ -0,0 +1,148 @@ +import pytest +from pydantic import ValidationError + +from aws_lambda_powertools.utilities.parser import envelopes, parse +from aws_lambda_powertools.utilities.parser.models import APIGatewayProxyEventModel +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyApiGatewayBusiness + + +def test_apigw_event_with_envelope(): + raw_event = load_event("apiGatewayProxyEvent.json") + raw_event["body"] = '{"message": "Hello", "username": "Ran"}' + parsed_event: MyApiGatewayBusiness = parse( + event=raw_event, + model=MyApiGatewayBusiness, + envelope=envelopes.ApiGatewayEnvelope, + ) + + assert parsed_event.message == "Hello" + assert parsed_event.username == "Ran" + + +def test_apigw_event(): + raw_event = load_event("apiGatewayProxyEvent.json") + parsed_event: APIGatewayProxyEventModel = APIGatewayProxyEventModel(**raw_event) + + assert parsed_event.version == raw_event["version"] + assert parsed_event.resource == raw_event["resource"] + assert parsed_event.path == raw_event["path"] + assert parsed_event.headers == raw_event["headers"] + assert parsed_event.multiValueHeaders == raw_event["multiValueHeaders"] + assert parsed_event.queryStringParameters == raw_event["queryStringParameters"] + assert parsed_event.multiValueQueryStringParameters == raw_event["multiValueQueryStringParameters"] + + request_context = parsed_event.requestContext + assert request_context.accountId == raw_event["requestContext"]["accountId"] + assert request_context.apiId == raw_event["requestContext"]["apiId"] + + authorizer = request_context.authorizer + assert authorizer.claims is None + assert authorizer.scopes is None + + assert request_context.domainName == raw_event["requestContext"]["domainName"] + assert request_context.domainPrefix == raw_event["requestContext"]["domainPrefix"] + assert request_context.extendedRequestId == raw_event["requestContext"]["extendedRequestId"] + assert request_context.httpMethod == raw_event["requestContext"]["httpMethod"] + + identity = request_context.identity + assert identity.accessKey == raw_event["requestContext"]["identity"]["accessKey"] + assert identity.accountId == raw_event["requestContext"]["identity"]["accountId"] + assert identity.caller == raw_event["requestContext"]["identity"]["caller"] + assert ( + identity.cognitoAuthenticationProvider + == raw_event["requestContext"]["identity"]["cognitoAuthenticationProvider"] + ) + assert identity.cognitoAuthenticationType == raw_event["requestContext"]["identity"]["cognitoAuthenticationType"] + assert identity.cognitoIdentityId == raw_event["requestContext"]["identity"]["cognitoIdentityId"] + assert identity.cognitoIdentityPoolId == raw_event["requestContext"]["identity"]["cognitoIdentityPoolId"] + assert identity.principalOrgId == raw_event["requestContext"]["identity"]["principalOrgId"] + assert str(identity.sourceIp) == raw_event["requestContext"]["identity"]["sourceIp"] + assert identity.user == raw_event["requestContext"]["identity"]["user"] + assert identity.userAgent == raw_event["requestContext"]["identity"]["userAgent"] + assert identity.userArn == raw_event["requestContext"]["identity"]["userArn"] + assert identity.clientCert is not None + assert identity.clientCert.clientCertPem == raw_event["requestContext"]["identity"]["clientCert"]["clientCertPem"] + assert identity.clientCert.subjectDN == raw_event["requestContext"]["identity"]["clientCert"]["subjectDN"] + assert identity.clientCert.issuerDN == raw_event["requestContext"]["identity"]["clientCert"]["issuerDN"] + assert identity.clientCert.serialNumber == raw_event["requestContext"]["identity"]["clientCert"]["serialNumber"] + assert ( + identity.clientCert.validity.notBefore + == raw_event["requestContext"]["identity"]["clientCert"]["validity"]["notBefore"] + ) + assert ( + identity.clientCert.validity.notAfter + == raw_event["requestContext"]["identity"]["clientCert"]["validity"]["notAfter"] + ) + + assert request_context.path == raw_event["requestContext"]["path"] + assert request_context.protocol == raw_event["requestContext"]["protocol"] + assert request_context.requestId == raw_event["requestContext"]["requestId"] + assert request_context.requestTime == raw_event["requestContext"]["requestTime"] + convert_time = int(round(request_context.requestTimeEpoch.timestamp() * 1000)) + assert convert_time == 1583349317135 + assert request_context.resourceId == raw_event["requestContext"]["resourceId"] + assert request_context.resourcePath == raw_event["requestContext"]["resourcePath"] + assert request_context.stage == raw_event["requestContext"]["stage"] + + assert parsed_event.pathParameters == raw_event["pathParameters"] + assert parsed_event.stageVariables == raw_event["stageVariables"] + assert parsed_event.body == raw_event["body"] + assert parsed_event.isBase64Encoded == raw_event["isBase64Encoded"] + + assert request_context.connectedAt is None + assert request_context.connectionId is None + assert request_context.eventType is None + assert request_context.messageDirection is None + assert request_context.messageId is None + assert request_context.routeKey is None + assert request_context.operationName is None + assert identity.apiKey is None + assert identity.apiKeyId is None + + +def test_apigw_event_with_invalid_websocket_request(): + # GIVEN an event with an eventType != MESSAGE and has a messageId + event = { + "resource": "/", + "path": "/", + "httpMethod": "GET", + "headers": {}, + "multiValueHeaders": {}, + "isBase64Encoded": False, + "body": "Foo!", + "requestContext": { + "accountId": "1234", + "apiId": "myApi", + "httpMethod": "GET", + "identity": { + "sourceIp": "127.0.0.1", + }, + "path": "/", + "protocol": "Https", + "requestId": "1234", + "requestTime": "2018-09-07T16:20:46Z", + "requestTimeEpoch": 1536992496000, + "resourcePath": "/", + "stage": "test", + "eventType": "DISCONNECT", + "messageId": "messageId", + }, + } + + # WHEN calling event_parser with APIGatewayProxyEventModel + with pytest.raises(ValidationError) as err: + APIGatewayProxyEventModel(**event) + + # THEN raise TypeError for invalid event + errors = err.value.errors() + assert len(errors) == 1 + expected_msg = "messageId is available only when the `eventType` is `MESSAGE`" + assert errors[0]["msg"] == expected_msg + assert expected_msg in str(err.value) + + +def test_apigw_event_empty_body(): + event = load_event("apiGatewayProxyEvent.json") + event["body"] = None + parse(event=event, model=APIGatewayProxyEventModel) diff --git a/tests/unit/parser/test_apigwv2.py b/tests/unit/parser/test_apigwv2.py new file mode 100644 index 00000000000..9ffc7f525bc --- /dev/null +++ b/tests/unit/parser/test_apigwv2.py @@ -0,0 +1,106 @@ +from aws_lambda_powertools.utilities.parser import envelopes, parse +from aws_lambda_powertools.utilities.parser.models import ( + APIGatewayProxyEventV2Model, + RequestContextV2, + RequestContextV2Authorizer, +) +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyApiGatewayBusiness + + +def test_apigw_v2_event_with_envelope(): + raw_event = load_event("apiGatewayProxyV2Event.json") + raw_event["body"] = '{"message": "Hello", "username": "Ran"}' + parsed_event: MyApiGatewayBusiness = parse( + event=raw_event, + model=MyApiGatewayBusiness, + envelope=envelopes.ApiGatewayV2Envelope, + ) + + assert parsed_event.message == "Hello" + assert parsed_event.username == "Ran" + + +def test_apigw_v2_event_jwt_authorizer(): + raw_event = load_event("apiGatewayProxyV2Event.json") + parsed_event: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model(**raw_event) + + assert parsed_event.version == raw_event["version"] + assert parsed_event.routeKey == raw_event["routeKey"] + assert parsed_event.rawPath == raw_event["rawPath"] + assert parsed_event.rawQueryString == raw_event["rawQueryString"] + assert parsed_event.cookies == raw_event["cookies"] + assert parsed_event.cookies[0] == "cookie1" + assert parsed_event.headers == raw_event["headers"] + assert parsed_event.queryStringParameters == raw_event["queryStringParameters"] + assert parsed_event.queryStringParameters.get("parameter2") == raw_event["queryStringParameters"]["parameter2"] + + request_context = parsed_event.requestContext + assert request_context.accountId == raw_event["requestContext"]["accountId"] + assert request_context.apiId == raw_event["requestContext"]["apiId"] + assert request_context.authorizer.jwt.claims == raw_event["requestContext"]["authorizer"]["jwt"]["claims"] + assert request_context.authorizer.jwt.scopes == raw_event["requestContext"]["authorizer"]["jwt"]["scopes"] + assert request_context.domainName == raw_event["requestContext"]["domainName"] + assert request_context.domainPrefix == raw_event["requestContext"]["domainPrefix"] + + http = request_context.http + raw_http = raw_event["requestContext"]["http"] + assert http.method == raw_http["method"] + assert http.path == raw_http["path"] + assert http.protocol == raw_http["protocol"] + assert str(http.sourceIp) == raw_http["sourceIp"] + assert http.userAgent == raw_http["userAgent"] + + assert request_context.requestId == raw_event["requestContext"]["requestId"] + assert request_context.routeKey == raw_event["requestContext"]["routeKey"] + assert request_context.stage == raw_event["requestContext"]["stage"] + assert request_context.time == raw_event["requestContext"]["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == raw_event["requestContext"]["timeEpoch"] + assert parsed_event.body == raw_event["body"] + assert parsed_event.pathParameters == raw_event["pathParameters"] + assert parsed_event.isBase64Encoded == raw_event["isBase64Encoded"] + assert parsed_event.stageVariables == raw_event["stageVariables"] + + +def test_api_gateway_proxy_v2_event_lambda_authorizer(): + raw_event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") + parsed_event: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model(**raw_event) + + request_context: RequestContextV2 = parsed_event.requestContext + assert request_context is not None + + lambda_props: RequestContextV2Authorizer = request_context.authorizer.lambda_value + assert lambda_props is not None + assert lambda_props["key"] == raw_event["requestContext"]["authorizer"]["lambda"]["key"] + + +def test_api_gateway_proxy_v2_event_iam_authorizer(): + raw_event = load_event("apiGatewayProxyV2IamEvent.json") + parsed_event: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model(**raw_event) + + iam = parsed_event.requestContext.authorizer.iam + raw_iam = raw_event["requestContext"]["authorizer"]["iam"] + assert iam is not None + assert iam.accessKey == raw_iam["accessKey"] + assert iam.accountId == raw_iam["accountId"] + assert iam.callerId == raw_iam["callerId"] + assert iam.cognitoIdentity.amr == raw_iam["cognitoIdentity"]["amr"] + assert iam.cognitoIdentity.identityId == raw_iam["cognitoIdentity"]["identityId"] + assert iam.cognitoIdentity.identityPoolId == raw_iam["cognitoIdentity"]["identityPoolId"] + assert iam.principalOrgId == raw_iam["principalOrgId"] + assert iam.userArn == raw_iam["userArn"] + assert iam.userId == raw_iam["userId"] + + +def test_apigw_event_empty_body(): + raw_event = load_event("apiGatewayProxyV2Event.json") + raw_event.pop("body") # API GW v2 removes certain keys when no data is passed + parse(event=raw_event, model=APIGatewayProxyEventV2Model) + + +def test_apigw_event_empty_query_strings(): + raw_event = load_event("apiGatewayProxyV2Event.json") + raw_event["rawQueryString"] = "" + raw_event.pop("queryStringParameters") # API GW v2 removes certain keys when no data is passed + parse(event=raw_event, model=APIGatewayProxyEventV2Model) diff --git a/tests/unit/parser/test_cloudwatch.py b/tests/unit/parser/test_cloudwatch.py new file mode 100644 index 00000000000..bc8bf0776f9 --- /dev/null +++ b/tests/unit/parser/test_cloudwatch.py @@ -0,0 +1,95 @@ +import base64 +import json +import zlib +from typing import Any + +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse +from aws_lambda_powertools.utilities.parser.models import ( + CloudWatchLogsLogEvent, + CloudWatchLogsModel, +) +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyCloudWatchBusiness + + +def decode_cloudwatch_raw_event(event: dict): + payload = base64.b64decode(event) + uncompressed = zlib.decompress(payload, zlib.MAX_WBITS | 32) + return json.loads(uncompressed.decode("utf-8")) + + +def test_validate_event_user_model_with_envelope(): + my_log_message = {"my_message": "hello", "user": "test"} + inner_event_dict = { + "messageType": "DATA_MESSAGE", + "owner": "123456789123", + "logGroup": "testLogGroup", + "logStream": "testLogStream", + "subscriptionFilters": ["testFilter"], + "logEvents": [{"id": "eventId1", "timestamp": 1440442987000, "message": json.dumps(my_log_message)}], + } + dict_str = json.dumps(inner_event_dict) + compressesd_str = zlib.compress(str.encode(dict_str), -1) + raw_event = {"awslogs": {"data": base64.b64encode(compressesd_str)}} + + parsed_event: MyCloudWatchBusiness = parse( + event=raw_event, + model=MyCloudWatchBusiness, + envelope=envelopes.CloudWatchLogsEnvelope, + ) + + assert len(parsed_event) == 1 + log: MyCloudWatchBusiness = parsed_event[0] + assert log.my_message == "hello" + assert log.user == "test" + + +def test_validate_event_does_not_conform_with_user_dict_model(): + event_dict = load_event("cloudWatchLogEvent.json") + with pytest.raises(ValidationError): + MyCloudWatchBusiness(**event_dict) + + +def test_handle_cloudwatch_trigger_event_no_envelope(): + raw_event = load_event("cloudWatchLogEvent.json") + parsed_event: CloudWatchLogsModel = CloudWatchLogsModel(**raw_event) + + raw_event_decoded = decode_cloudwatch_raw_event(raw_event["awslogs"]["data"]) + + assert parsed_event.awslogs.decoded_data.owner == raw_event_decoded["owner"] + assert parsed_event.awslogs.decoded_data.logGroup == raw_event_decoded["logGroup"] + assert parsed_event.awslogs.decoded_data.logStream == raw_event_decoded["logStream"] + assert parsed_event.awslogs.decoded_data.subscriptionFilters == raw_event_decoded["subscriptionFilters"] + assert parsed_event.awslogs.decoded_data.messageType == raw_event_decoded["messageType"] + + assert len(parsed_event.awslogs.decoded_data.logEvents) == 2 + + log_record: CloudWatchLogsLogEvent = parsed_event.awslogs.decoded_data.logEvents[0] + raw_log_record = raw_event_decoded["logEvents"][0] + assert log_record.id == raw_log_record["id"] + convert_time = int(round(log_record.timestamp.timestamp() * 1000)) + assert convert_time == raw_log_record["timestamp"] + assert log_record.message == raw_log_record["message"] + + log_record: CloudWatchLogsLogEvent = parsed_event.awslogs.decoded_data.logEvents[1] + raw_log_record = raw_event_decoded["logEvents"][1] + assert log_record.id == raw_log_record["id"] + convert_time = int(round(log_record.timestamp.timestamp() * 1000)) + assert convert_time == raw_log_record["timestamp"] + assert log_record.message == raw_log_record["message"] + + +def test_handle_invalid_cloudwatch_trigger_event_no_envelope(): + raw_event: Any = {"awslogs": {"data": "invalid_data"}} + with pytest.raises(ValidationError) as context: + CloudWatchLogsModel(**raw_event) + + assert context.value.errors()[0]["msg"] == "unable to decompress data" + + +def test_handle_invalid_event_with_envelope(): + empty_dict = {} + with pytest.raises(ValidationError): + CloudWatchLogsModel(**empty_dict) diff --git a/tests/unit/parser/test_dynamodb.py b/tests/unit/parser/test_dynamodb.py new file mode 100644 index 00000000000..57bd2be5f0a --- /dev/null +++ b/tests/unit/parser/test_dynamodb.py @@ -0,0 +1,84 @@ +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness + + +def test_dynamo_db_stream_trigger_event(): + raw_event = load_event("dynamoStreamEvent.json") + parserd_event: MyDynamoBusiness = parse( + event=raw_event, + model=MyDynamoBusiness, + envelope=envelopes.DynamoDBStreamEnvelope, + ) + + assert len(parserd_event) == 2 + + # record index 0 + old_image = parserd_event[0]["OldImage"] + assert old_image is None + + new_image = parserd_event[0]["NewImage"] + new_image_raw = raw_event["Records"][0]["dynamodb"]["NewImage"] + assert new_image.Message["S"] == new_image_raw["Message"]["S"] + assert new_image.Id["N"] == float(new_image_raw["Id"]["N"]) + + # record index 1 + old_image = parserd_event[1]["OldImage"] + old_image_raw = raw_event["Records"][1]["dynamodb"]["OldImage"] + assert old_image.Message["S"] == old_image_raw["Message"]["S"] + assert old_image.Id["N"] == float(old_image_raw["Id"]["N"]) + + new_image = parserd_event[1]["NewImage"] + new_image_raw = raw_event["Records"][1]["dynamodb"]["NewImage"] + assert new_image.Message["S"] == new_image_raw["Message"]["S"] + assert new_image.Id["N"] == float(new_image_raw["Id"]["N"]) + + +def test_dynamo_db_stream_trigger_event_no_envelope(): + raw_event = load_event("dynamoStreamEvent.json") + parserd_event: MyAdvancedDynamoBusiness = MyAdvancedDynamoBusiness(**raw_event) + + records = parserd_event.Records + record = records[0] + raw_record = raw_event["Records"][0] + + assert record.awsRegion == raw_record["awsRegion"] + assert record.eventID == raw_record["eventID"] + assert record.eventName == raw_record["eventName"] + assert record.eventSource == raw_record["eventSource"] + assert record.eventSourceARN == raw_record["eventSourceARN"] + assert record.eventVersion == float(raw_record["eventVersion"]) + assert record.userIdentity is None + + dynamodb = record.dynamodb + raw_dynamodb = raw_record["dynamodb"] + assert dynamodb is not None + assert dynamodb.ApproximateCreationDateTime is None + assert dynamodb.OldImage is None + assert dynamodb.SequenceNumber == raw_dynamodb["SequenceNumber"] + assert dynamodb.SizeBytes == raw_dynamodb["SizeBytes"] + assert dynamodb.StreamViewType == raw_dynamodb["StreamViewType"] + + keys = dynamodb.Keys + raw_keys = raw_dynamodb["Keys"] + assert keys is not None + id_key = keys["Id"] + assert id_key["N"] == raw_keys["Id"]["N"] + + message_key = dynamodb.NewImage.Message + assert message_key is not None + assert message_key["S"] == "New item!" + + +def test_validate_event_does_not_conform_with_model_no_envelope(): + raw_event: dict = {"hello": "s"} + with pytest.raises(ValidationError): + MyAdvancedDynamoBusiness(**raw_event) + + +def test_validate_event_does_not_conform_with_model(): + raw_event: dict = {"hello": "s"} + with pytest.raises(ValidationError): + parse(event=raw_event, model=MyDynamoBusiness, envelope=envelopes.DynamoDBStreamEnvelope) diff --git a/tests/unit/parser/test_eventbridge.py b/tests/unit/parser/test_eventbridge.py new file mode 100644 index 00000000000..7f250ecdb83 --- /dev/null +++ b/tests/unit/parser/test_eventbridge.py @@ -0,0 +1,53 @@ +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse +from tests.functional.utils import load_event +from tests.unit.parser.schemas import ( + MyAdvancedEventbridgeBusiness, + MyEventbridgeBusiness, +) + + +def test_handle_eventbridge_trigger_event(): + raw_event = load_event("eventBridgeEvent.json") + parsed_event: MyEventbridgeBusiness = parse( + event=raw_event, + model=MyEventbridgeBusiness, + envelope=envelopes.EventBridgeEnvelope, + ) + + assert parsed_event.instance_id == raw_event["detail"]["instance_id"] + assert parsed_event.state == raw_event["detail"]["state"] + + +def test_validate_event_does_not_conform_with_user_dict_model(): + raw_event = load_event("eventBridgeEvent.json") + + raw_event.pop("version") + + with pytest.raises(ValidationError): + parse(event=raw_event, model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) + + +def test_handle_eventbridge_trigger_event_no_envelope(): + raw_event = load_event("eventBridgeEvent.json") + parsed_event: MyAdvancedEventbridgeBusiness = MyAdvancedEventbridgeBusiness(**raw_event) + + assert parsed_event.detail.instance_id == raw_event["detail"]["instance_id"] + assert parsed_event.detail.state == raw_event["detail"]["state"] + assert parsed_event.id == raw_event["id"] + assert parsed_event.version == raw_event["version"] + assert parsed_event.account == raw_event["account"] + time_str = parsed_event.time.strftime("%Y-%m-%dT%H:%M:%SZ") + assert time_str == raw_event["time"] + assert parsed_event.region == raw_event["region"] + assert parsed_event.resources == raw_event["resources"] + assert parsed_event.source == raw_event["source"] + assert parsed_event.detail_type == raw_event["detail-type"] + assert parsed_event.replay_name == raw_event["replay-name"] + + +def test_handle_invalid_event_with_eventbridge_envelope(): + empty_event = {} + with pytest.raises(ValidationError): + parse(event=empty_event, model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) diff --git a/tests/unit/parser/test_kafka.py b/tests/unit/parser/test_kafka.py new file mode 100644 index 00000000000..1f229c1db6e --- /dev/null +++ b/tests/unit/parser/test_kafka.py @@ -0,0 +1,82 @@ +from aws_lambda_powertools.utilities.parser import envelopes, parse +from aws_lambda_powertools.utilities.parser.models import ( + KafkaMskEventModel, + KafkaRecordModel, + KafkaSelfManagedEventModel, +) +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyLambdaKafkaBusiness + + +def test_kafka_msk_event_with_envelope(): + raw_event = load_event("kafkaEventMsk.json") + parsed_event: MyLambdaKafkaBusiness = parse( + event=raw_event, + model=MyLambdaKafkaBusiness, + envelope=envelopes.KafkaEnvelope, + ) + + assert parsed_event[0].key == "value" + assert len(parsed_event) == 1 + + +def test_kafka_self_managed_event_with_envelope(): + raw_event = load_event("kafkaEventSelfManaged.json") + parsed_event: MyLambdaKafkaBusiness = parse( + event=raw_event, + model=MyLambdaKafkaBusiness, + envelope=envelopes.KafkaEnvelope, + ) + + assert parsed_event[0].key == "value" + assert len(parsed_event) == 1 + + +def test_self_managed_kafka_event(): + raw_event = load_event("kafkaEventSelfManaged.json") + parsed_event: KafkaSelfManagedEventModel = KafkaSelfManagedEventModel(**raw_event) + + assert parsed_event.eventSource == raw_event["eventSource"] + + assert parsed_event.bootstrapServers == raw_event["bootstrapServers"].split(",") + + records = list(parsed_event.records["mytopic-0"]) + assert len(records) == 1 + record: KafkaRecordModel = records[0] + raw_record = raw_event["records"]["mytopic-0"][0] + assert record.topic == raw_record["topic"] + assert record.partition == raw_record["partition"] + assert record.offset == raw_record["offset"] + assert record.timestamp is not None + convert_time = int(round(record.timestamp.timestamp() * 1000)) + assert convert_time == raw_record["timestamp"] + assert record.timestampType == raw_record["timestampType"] + assert record.key == b"recordKey" + assert record.value == '{"key":"value"}' + assert len(record.headers) == 1 + assert record.headers[0]["headerKey"] == b"headerValue" + + +def test_kafka_msk_event(): + raw_event = load_event("kafkaEventMsk.json") + parsed_event: KafkaMskEventModel = KafkaMskEventModel(**raw_event) + + assert parsed_event.eventSource == raw_event["eventSource"] + assert parsed_event.bootstrapServers == raw_event["bootstrapServers"].split(",") + assert parsed_event.eventSourceArn == raw_event["eventSourceArn"] + + records = list(parsed_event.records["mytopic-0"]) + assert len(records) == 1 + record: KafkaRecordModel = records[0] + raw_record = raw_event["records"]["mytopic-0"][0] + assert record.topic == raw_record["topic"] + assert record.partition == raw_record["partition"] + assert record.offset == raw_record["offset"] + assert record.timestamp is not None + convert_time = int(round(record.timestamp.timestamp() * 1000)) + assert convert_time == raw_record["timestamp"] + assert record.timestampType == raw_record["timestampType"] + assert record.key == b"recordKey" + assert record.value == '{"key":"value"}' + assert len(record.headers) == 1 + assert record.headers[0]["headerKey"] == b"headerValue" diff --git a/tests/unit/parser/test_kinesis.py b/tests/unit/parser/test_kinesis.py new file mode 100644 index 00000000000..e8b1ae87378 --- /dev/null +++ b/tests/unit/parser/test_kinesis.py @@ -0,0 +1,108 @@ +import pytest + +from aws_lambda_powertools.utilities.parser import BaseModel, ValidationError, envelopes, parse +from aws_lambda_powertools.utilities.parser.models import ( + KinesisDataStreamModel, + KinesisDataStreamRecordPayload, +) +from aws_lambda_powertools.utilities.parser.models.cloudwatch import ( + CloudWatchLogsDecode, +) +from aws_lambda_powertools.utilities.parser.models.kinesis import ( + extract_cloudwatch_logs_from_event, + extract_cloudwatch_logs_from_record, +) +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyKinesisBusiness + + +def test_kinesis_trigger_bad_base64_event(): + raw_event = load_event("kinesisStreamEvent.json") + + raw_event["Records"][0]["kinesis"]["data"] = "bad" + + with pytest.raises(ValidationError): + KinesisDataStreamModel(**raw_event) + + +def test_kinesis_trigger_event(): + raw_event = load_event("kinesisStreamEventOneRecord.json") + parsed_event: MyKinesisBusiness = parse( + event=raw_event, + model=MyKinesisBusiness, + envelope=envelopes.KinesisDataStreamEnvelope, + ) + + assert len(parsed_event) == 1 + record: KinesisDataStreamModel = parsed_event[0] + assert record.message == "test message" + assert record.username == "test" + + +def test_kinesis_trigger_event_no_envelope(): + raw_event = load_event("kinesisStreamEvent.json") + parsed_event: KinesisDataStreamModel = KinesisDataStreamModel(**raw_event) + + records = parsed_event.Records + assert len(records) == 2 + record: KinesisDataStreamModel = records[0] + raw_record = raw_event["Records"][0] + + assert record.awsRegion == raw_record["awsRegion"] + assert record.eventID == raw_record["eventID"] + assert record.eventName == raw_record["eventName"] + assert record.eventSource == raw_record["eventSource"] + assert record.eventSourceARN == raw_record["eventSourceARN"] + assert record.eventVersion == raw_record["eventVersion"] + assert record.invokeIdentityArn == raw_record["invokeIdentityArn"] + + kinesis: KinesisDataStreamRecordPayload = record.kinesis + assert kinesis.approximateArrivalTimestamp == raw_record["kinesis"]["approximateArrivalTimestamp"] + assert kinesis.kinesisSchemaVersion == raw_record["kinesis"]["kinesisSchemaVersion"] + assert kinesis.partitionKey == raw_record["kinesis"]["partitionKey"] + assert kinesis.sequenceNumber == raw_record["kinesis"]["sequenceNumber"] + assert kinesis.data == b"Hello, this is a test." + + +def test_validate_event_does_not_conform_with_model_no_envelope(): + raw_event: dict = {"hello": "s"} + with pytest.raises(ValidationError): + KinesisDataStreamModel(**raw_event) + + +def test_validate_event_does_not_conform_with_model(): + raw_event: dict = {"hello": "s"} + with pytest.raises(ValidationError): + parse(event=raw_event, model=MyKinesisBusiness, envelope=envelopes.KinesisDataStreamEnvelope) + + +def test_kinesis_stream_event_cloudwatch_logs_data_extraction(): + # GIVEN a KinesisDataStreamModel is instantiated with CloudWatch Logs compressed data + raw_event = load_event("kinesisStreamCloudWatchLogsEvent.json") + stream_data = KinesisDataStreamModel(**raw_event) + single_record = stream_data.Records[0] + + # WHEN we try to extract CloudWatch Logs from KinesisDataStreamRecordPayload model + extracted_logs = extract_cloudwatch_logs_from_event(stream_data) + individual_logs = [extract_cloudwatch_logs_from_record(record) for record in stream_data.Records] + single_log = extract_cloudwatch_logs_from_record(single_record) + + # THEN we should have extracted any potential logs as CloudWatchLogsDecode models + assert len(extracted_logs) == len(individual_logs) + assert isinstance(single_log, CloudWatchLogsDecode) + + +def test_kinesis_stream_event_cloudwatch_logs_data_extraction_fails_with_custom_model(): + # GIVEN a custom model replaces Kinesis Record Data bytes + class DummyModel(BaseModel): + ... + + raw_event = load_event("kinesisStreamCloudWatchLogsEvent.json") + stream_data = KinesisDataStreamModel(**raw_event) + + # WHEN decompress_zlib_record_data_as_json is used + # THEN ValueError should be raised + with pytest.raises(ValueError, match="We can only decompress bytes data"): + for record in stream_data.Records: + record.kinesis.data = DummyModel() + record.decompress_zlib_record_data_as_json() diff --git a/tests/unit/parser/test_kinesis_firehose.py b/tests/unit/parser/test_kinesis_firehose.py new file mode 100644 index 00000000000..87eaef7ca9d --- /dev/null +++ b/tests/unit/parser/test_kinesis_firehose.py @@ -0,0 +1,123 @@ +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse +from aws_lambda_powertools.utilities.parser.models import ( + KinesisFirehoseModel, + KinesisFirehoseRecord, + KinesisFirehoseRecordMetadata, + KinesisFirehoseSqsModel, + KinesisFirehoseSqsRecord, +) +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyKinesisFirehoseBusiness + + +def test_firehose_sqs_wrapped_message_event(): + raw_event = load_event("kinesisFirehoseSQSEvent.json") + parsed_event: KinesisFirehoseSqsModel = KinesisFirehoseSqsModel(**raw_event) + + assert parsed_event.region == raw_event["region"] + assert parsed_event.invocationId == raw_event["invocationId"] + assert parsed_event.deliveryStreamArn == raw_event["deliveryStreamArn"] + + records = list(parsed_event.records) + assert len(records) == 1 + + record_01: KinesisFirehoseSqsRecord = records[0] + assert record_01.data.messageId == "5ab807d4-5644-4c55-97a3-47396635ac74" + assert record_01.data.receiptHandle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." + assert record_01.data.body == "Test message." + assert record_01.data.attributes.ApproximateReceiveCount == "1" + assert record_01.data.attributes.SenderId == "AIDAIENQZJOLO23YVJ4VO" + + +def test_firehose_trigger_event(): + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"].pop(0) # remove first item since the payload is bytes and we want to test payload json class + parsed_event: MyKinesisFirehoseBusiness = parse( + event=raw_event, + model=MyKinesisFirehoseBusiness, + envelope=envelopes.KinesisFirehoseEnvelope, + ) + + assert len(parsed_event) == 1 + assert parsed_event[0].Hello == "World" + + +def test_firehose_trigger_event_kinesis_no_envelope(): + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + parsed_event: KinesisFirehoseModel = KinesisFirehoseModel(**raw_event) + + assert parsed_event.region == raw_event["region"] + assert parsed_event.invocationId == raw_event["invocationId"] + assert parsed_event.deliveryStreamArn == raw_event["deliveryStreamArn"] + assert parsed_event.sourceKinesisStreamArn == raw_event["sourceKinesisStreamArn"] + + records = list(parsed_event.records) + assert len(records) == 2 + record_01: KinesisFirehoseRecord = records[0] + assert record_01.approximateArrivalTimestamp == 1664028820148 + assert record_01.recordId == "record1" + assert record_01.data == b"Hello World" + + metadata_01: KinesisFirehoseRecordMetadata = record_01.kinesisRecordMetadata + assert metadata_01.partitionKey == "4d1ad2b9-24f8-4b9d-a088-76e9947c317a" + assert metadata_01.subsequenceNumber == "" + assert metadata_01.shardId == "shardId-000000000000" + assert metadata_01.approximateArrivalTimestamp == 1664028820148 + assert metadata_01.sequenceNumber == "49546986683135544286507457936321625675700192471156785154" + + record_02: KinesisFirehoseRecord = records[1] + assert record_02.approximateArrivalTimestamp == 1664028793294 + assert record_02.recordId == "record2" + assert record_02.data == b'{"Hello": "World"}' + + metadata_02: KinesisFirehoseRecordMetadata = record_02.kinesisRecordMetadata + assert metadata_02.partitionKey == "4d1ad2b9-24f8-4b9d-a088-76e9947c318a" + assert metadata_02.subsequenceNumber == "" + assert metadata_02.shardId == "shardId-000000000001" + assert metadata_02.approximateArrivalTimestamp == 1664028793294 + assert metadata_02.sequenceNumber == "49546986683135544286507457936321625675700192471156785155" + + +def test_firehose_trigger_event_put_no_envelope(): + raw_event = load_event("kinesisFirehosePutEvent.json") + parsed_event: KinesisFirehoseModel = KinesisFirehoseModel(**raw_event) + + assert parsed_event.region == raw_event["region"] + assert parsed_event.invocationId == raw_event["invocationId"] + assert parsed_event.deliveryStreamArn == raw_event["deliveryStreamArn"] + + records = list(parsed_event.records) + assert len(records) == 2 + + record_01: KinesisFirehoseRecord = records[0] + assert record_01.approximateArrivalTimestamp == 1664029185290 + assert record_01.recordId == "record1" + assert record_01.data == b"Hello World" + + record_02: KinesisFirehoseRecord = records[1] + assert record_02.approximateArrivalTimestamp == 1664029186945 + assert record_02.recordId == "record2" + assert record_02.data == b'{"Hello": "World"}' + + +def test_kinesis_trigger_bad_base64_event(): + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"][0]["data"] = {"bad base64"} + with pytest.raises(ValidationError): + KinesisFirehoseModel(**raw_event) + + +def test_kinesis_trigger_bad_timestamp_event(): + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"][0]["approximateArrivalTimestamp"] = -1 + with pytest.raises(ValidationError): + KinesisFirehoseModel(**raw_event) + + +def test_kinesis_trigger_bad_metadata_timestamp_event(): + raw_event = load_event("kinesisFirehoseKinesisEvent.json") + raw_event["records"][0]["kinesisRecordMetadata"]["approximateArrivalTimestamp"] = "-1" + with pytest.raises(ValidationError): + KinesisFirehoseModel(**raw_event) diff --git a/tests/unit/parser/test_lambda_function_url.py b/tests/unit/parser/test_lambda_function_url.py new file mode 100644 index 00000000000..3b1a7f259ec --- /dev/null +++ b/tests/unit/parser/test_lambda_function_url.py @@ -0,0 +1,127 @@ +from aws_lambda_powertools.utilities.parser import envelopes, parse +from aws_lambda_powertools.utilities.parser.models import LambdaFunctionUrlModel +from tests.functional.utils import load_event +from tests.unit.parser.schemas import MyALambdaFuncUrlBusiness + + +def test_lambda_func_url_event_with_envelope(): + raw_event = load_event("lambdaFunctionUrlEvent.json") + raw_event["body"] = '{"message": "Hello", "username": "Ran"}' + + parsed_event: MyALambdaFuncUrlBusiness = parse( + event=raw_event, + model=MyALambdaFuncUrlBusiness, + envelope=envelopes.LambdaFunctionUrlEnvelope, + ) + + assert parsed_event.message == "Hello" + assert parsed_event.username == "Ran" + + +def test_lambda_function_url_event(): + raw_event = load_event("lambdaFunctionUrlEvent.json") + parsed_event: LambdaFunctionUrlModel = LambdaFunctionUrlModel(**raw_event) + + assert parsed_event.version == raw_event["version"] + assert parsed_event.routeKey == raw_event["routeKey"] + + assert parsed_event.rawQueryString == raw_event["rawQueryString"] + + assert parsed_event.cookies is None + + headers = parsed_event.headers + assert len(headers) == 20 + + assert parsed_event.queryStringParameters is None + + assert parsed_event.isBase64Encoded is False + assert parsed_event.body is None + assert parsed_event.pathParameters is None + assert parsed_event.stageVariables is None + + request_context = parsed_event.requestContext + raw_request_context = raw_event["requestContext"] + + assert request_context.accountId == raw_request_context["accountId"] + assert request_context.apiId == raw_request_context["apiId"] + assert request_context.domainName == raw_request_context["domainName"] + assert request_context.domainPrefix == raw_request_context["domainPrefix"] + assert request_context.requestId == raw_request_context["requestId"] + assert request_context.routeKey == raw_request_context["routeKey"] + assert request_context.stage == raw_request_context["stage"] + assert request_context.time == raw_request_context["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == raw_request_context["timeEpoch"] + assert request_context.authorizer is None + + http = request_context.http + assert http.method == raw_request_context["http"]["method"] + assert http.path == raw_request_context["http"]["path"] + assert http.protocol == raw_request_context["http"]["protocol"] + assert str(http.sourceIp) == "123.123.123.123/32" + assert http.userAgent == raw_request_context["http"]["userAgent"] + + assert request_context.authorizer is None + + +def test_lambda_function_url_event_iam(): + raw_event = load_event("lambdaFunctionUrlIAMEvent.json") + parsed_event: LambdaFunctionUrlModel = LambdaFunctionUrlModel(**raw_event) + + assert parsed_event.version == raw_event["version"] + assert parsed_event.routeKey == raw_event["routeKey"] + + assert parsed_event.rawQueryString == raw_event["rawQueryString"] + + cookies = parsed_event.cookies + assert len(cookies) == 2 + assert cookies[0] == raw_event["cookies"][0] + + headers = parsed_event.headers + assert len(headers) == 2 + + query_string_parameters = parsed_event.queryStringParameters + assert len(query_string_parameters) == 2 + assert query_string_parameters.get("parameter2") == raw_event["queryStringParameters"]["parameter2"] + + assert parsed_event.isBase64Encoded is False + assert parsed_event.body == raw_event["body"] + assert parsed_event.pathParameters is None + assert parsed_event.stageVariables is None + + request_context = parsed_event.requestContext + raw_request_context = raw_event["requestContext"] + assert request_context.accountId == raw_request_context["accountId"] + assert request_context.apiId == raw_request_context["apiId"] + assert request_context.domainName == raw_request_context["domainName"] + assert request_context.domainPrefix == raw_request_context["domainPrefix"] + assert request_context.requestId == raw_request_context["requestId"] + assert request_context.routeKey == raw_request_context["routeKey"] + assert request_context.stage == raw_request_context["stage"] + assert request_context.time == raw_request_context["time"] + convert_time = int(round(request_context.timeEpoch.timestamp() * 1000)) + assert convert_time == raw_request_context["timeEpoch"] + assert request_context.authorizer is not None + + http = request_context.http + assert http.method == raw_request_context["http"]["method"] + assert http.path == raw_request_context["http"]["path"] + assert http.protocol == raw_request_context["http"]["protocol"] + assert str(http.sourceIp) == "123.123.123.123/32" + assert http.userAgent == raw_request_context["http"]["userAgent"] + + authorizer = request_context.authorizer + assert authorizer is not None + assert authorizer.jwt is None + assert authorizer.lambda_value is None + + iam = authorizer.iam + iam_raw = raw_event["requestContext"]["authorizer"]["iam"] + assert iam is not None + assert iam.accessKey == iam_raw["accessKey"] + assert iam.accountId == iam_raw["accountId"] + assert iam.callerId == iam_raw["callerId"] + assert iam.cognitoIdentity is None + assert iam.principalOrgId is None + assert iam.userId == iam_raw["userId"] + assert iam.userArn == iam_raw["userArn"] diff --git a/tests/functional/parser/test_s3 object_event.py b/tests/unit/parser/test_s3 object_event.py similarity index 62% rename from tests/functional/parser/test_s3 object_event.py rename to tests/unit/parser/test_s3 object_event.py index 90c2555360d..c106a66c2a9 100644 --- a/tests/functional/parser/test_s3 object_event.py +++ b/tests/unit/parser/test_s3 object_event.py @@ -1,17 +1,10 @@ -from aws_lambda_powertools.utilities.parser import event_parser from aws_lambda_powertools.utilities.parser.models import S3ObjectLambdaEvent -from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.utils import load_event -@event_parser(model=S3ObjectLambdaEvent) -def handle_s3_object_event_iam(event: S3ObjectLambdaEvent, _: LambdaContext): - return event - - def test_s3_object_event(): event = load_event("s3ObjectEventIAMUser.json") - parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_iam(event, LambdaContext()) + parsed_event: S3ObjectLambdaEvent = S3ObjectLambdaEvent(**event) assert parsed_event.xAmzRequestId == event["xAmzRequestId"] assert parsed_event.getObjectContext is not None object_context = parsed_event.getObjectContext @@ -40,28 +33,22 @@ def test_s3_object_event(): assert parsed_event.protocolVersion == event["protocolVersion"] -@event_parser(model=S3ObjectLambdaEvent) -def handle_s3_object_event_temp_creds(event: S3ObjectLambdaEvent, _: LambdaContext): - return event - - def test_s3_object_event_temp_credentials(): event = load_event("s3ObjectEventTempCredentials.json") - parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_temp_creds(event, LambdaContext()) + parsed_event: S3ObjectLambdaEvent = S3ObjectLambdaEvent(**event) assert parsed_event.xAmzRequestId == event["xAmzRequestId"] session_context = parsed_event.userIdentity.sessionContext assert session_context is not None session_issuer = session_context.sessionIssuer + session_issuer_raw = event["userIdentity"]["sessionContext"]["sessionIssuer"] assert session_issuer is not None - assert session_issuer.type == event["userIdentity"]["sessionContext"]["sessionIssuer"]["type"] - assert session_issuer.userName == event["userIdentity"]["sessionContext"]["sessionIssuer"]["userName"] - assert session_issuer.principalId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["principalId"] - assert session_issuer.arn == event["userIdentity"]["sessionContext"]["sessionIssuer"]["arn"] - assert session_issuer.accountId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["accountId"] + assert session_issuer.type == session_issuer_raw["type"] + assert session_issuer.userName == session_issuer_raw["userName"] + assert session_issuer.principalId == session_issuer_raw["principalId"] + assert session_issuer.arn == session_issuer_raw["arn"] + assert session_issuer.accountId == session_issuer_raw["accountId"] session_attributes = session_context.attributes + session_attributes_raw = event["userIdentity"]["sessionContext"]["attributes"] assert session_attributes is not None - assert ( - str(session_attributes.mfaAuthenticated).lower() - == event["userIdentity"]["sessionContext"]["attributes"]["mfaAuthenticated"] - ) - assert session_attributes.creationDate == event["userIdentity"]["sessionContext"]["attributes"]["creationDate"] + assert str(session_attributes.mfaAuthenticated).lower() == session_attributes_raw["mfaAuthenticated"] + assert session_attributes.creationDate == session_attributes_raw["creationDate"] diff --git a/tests/unit/parser/test_s3.py b/tests/unit/parser/test_s3.py index c77c70095a3..1586f32d28e 100644 --- a/tests/unit/parser/test_s3.py +++ b/tests/unit/parser/test_s3.py @@ -1,145 +1,159 @@ -import json -from datetime import datetime - import pytest from aws_lambda_powertools.utilities.parser import ValidationError -from aws_lambda_powertools.utilities.parser.models import ( - S3EventNotificationEventBridgeModel, - S3SqsEventNotificationModel, -) +from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel from tests.functional.utils import load_event -def test_s3_eventbridge_notification_object_created_event(): - raw_event = load_event("s3EventBridgeNotificationObjectCreatedEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] - assert model.detail.reason == raw_event["detail"]["reason"] - - -def test_s3_eventbridge_notification_object_deleted_event(): - raw_event = load_event("s3EventBridgeNotificationObjectDeletedEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] - assert model.detail.reason == raw_event["detail"]["reason"] - assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] - - -def test_s3_eventbridge_notification_object_expired_event(): - raw_event = load_event("s3EventBridgeNotificationObjectExpiredEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.reason == raw_event["detail"]["reason"] - assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] - - -def test_s3_eventbridge_notification_object_restore_completed_event(): - raw_event = load_event("s3EventBridgeNotificationObjectRestoreCompletedEvent.json") - model = S3EventNotificationEventBridgeModel(**raw_event) - - assert model.version == raw_event["version"] - assert model.id == raw_event["id"] - assert model.detail_type == raw_event["detail-type"] - assert model.source == raw_event["source"] - assert model.account == raw_event["account"] - assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) - assert model.region == raw_event["region"] - assert model.resources == raw_event["resources"] - - assert model.detail.version == raw_event["detail"]["version"] - assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] - assert model.detail.object.key == raw_event["detail"]["object"]["key"] - assert model.detail.object.size == raw_event["detail"]["object"]["size"] - assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] - assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] - assert model.detail.request_id == raw_event["detail"]["request-id"] - assert model.detail.requester == raw_event["detail"]["requester"] - assert model.detail.restore_expiry_time == raw_event["detail"]["restore-expiry-time"] - assert model.detail.source_storage_class == raw_event["detail"]["source-storage-class"] - - -def test_s3_sqs_event_notification(): - raw_event = load_event("s3SqsEvent.json") - model = S3SqsEventNotificationModel(**raw_event) - - body = json.loads(raw_event["Records"][0]["body"]) - - assert model.Records[0].body.Records[0].eventVersion == body["Records"][0]["eventVersion"] - assert model.Records[0].body.Records[0].eventSource == body["Records"][0]["eventSource"] - assert model.Records[0].body.Records[0].eventTime == datetime.fromisoformat( - body["Records"][0]["eventTime"].replace("Z", "+00:00"), +def test_s3_trigger_event(): + raw_event = load_event("s3Event.json") + parsed_event: S3Model = S3Model(**raw_event) + + records = list(parsed_event.Records) + assert len(records) == 1 + + record: S3RecordModel = records[0] + raw_record = raw_event["Records"][0] + assert record.eventVersion == raw_record["eventVersion"] + assert record.eventSource == raw_record["eventSource"] + assert record.awsRegion == raw_record["awsRegion"] + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == raw_record["eventName"] + assert record.glacierEventData is None + + user_identity = record.userIdentity + assert user_identity.principalId == raw_record["userIdentity"]["principalId"] + + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] + + s3 = record.s3 + raw_s3 = raw_event["Records"][0]["s3"] + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] + assert s3.configurationId == raw_record["s3"]["configurationId"] + assert s3.object.key == raw_s3["object"]["key"] + assert s3.object.size == raw_s3["object"]["size"] + assert s3.object.eTag == raw_s3["object"]["eTag"] + assert s3.object.versionId is None + assert s3.object.sequencer == raw_s3["object"]["sequencer"] + + bucket = s3.bucket + raw_bucket = raw_record["s3"]["bucket"] + assert bucket.name == raw_bucket["name"] + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] + assert bucket.arn == raw_bucket["arn"] + + +def test_s3_glacier_trigger_event(): + raw_event = load_event("s3EventGlacier.json") + parsed_event: S3Model = S3Model(**raw_event) + + records = list(parsed_event.Records) + assert len(records) == 1 + + record: S3RecordModel = records[0] + raw_record = raw_event["Records"][0] + assert record.eventVersion == raw_record["eventVersion"] + assert record.eventSource == raw_record["eventSource"] + assert record.awsRegion == raw_record["awsRegion"] + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == raw_record["eventName"] + assert record.glacierEventData is not None + convert_time = int( + round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000), + ) + assert convert_time == 60000 + assert ( + record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass + == raw_record["glacierEventData"]["restoreEventData"]["lifecycleRestoreStorageClass"] ) - assert model.Records[0].body.Records[0].eventName == body["Records"][0]["eventName"] - - -def test_s3_sqs_event_notification_body_invalid_json(): - raw_event = load_event("s3SqsEvent.json") - - for record in raw_event["Records"]: - record["body"] = "invalid body" + user_identity = record.userIdentity + assert user_identity.principalId == raw_record["userIdentity"]["principalId"] + + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] + + s3 = record.s3 + raw_s3 = raw_event["Records"][0]["s3"] + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] + assert s3.configurationId == raw_record["s3"]["configurationId"] + assert s3.object.key == raw_s3["object"]["key"] + assert s3.object.size == raw_s3["object"]["size"] + assert s3.object.eTag == raw_s3["object"]["eTag"] + assert s3.object.versionId is None + assert s3.object.sequencer == raw_s3["object"]["sequencer"] + + bucket = s3.bucket + raw_bucket = raw_record["s3"]["bucket"] + assert bucket.name == raw_bucket["name"] + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] + assert bucket.arn == raw_bucket["arn"] + + +def test_s3_trigger_event_delete_object(): + raw_event = load_event("s3EventDeleteObject.json") + parsed_event: S3Model = S3Model(**raw_event) + + records = list(parsed_event.Records) + assert len(records) == 1 + + record: S3RecordModel = records[0] + raw_record = raw_event["Records"][0] + assert record.eventVersion == raw_record["eventVersion"] + assert record.eventSource == raw_record["eventSource"] + assert record.awsRegion == raw_record["awsRegion"] + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == raw_record["eventName"] + assert record.glacierEventData is None + + user_identity = record.userIdentity + assert user_identity.principalId == raw_record["userIdentity"]["principalId"] + + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32" + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] + + s3 = record.s3 + raw_s3 = raw_event["Records"][0]["s3"] + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] + assert s3.configurationId == raw_record["s3"]["configurationId"] + assert s3.object.key == raw_s3["object"]["key"] + assert s3.object.size is None + assert s3.object.eTag is None + assert s3.object.versionId is None + assert s3.object.sequencer == raw_s3["object"]["sequencer"] + + bucket = s3.bucket + raw_bucket = raw_record["s3"]["bucket"] + assert bucket.name == raw_bucket["name"] + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] + assert bucket.arn == raw_bucket["arn"] + + +def test_s3_empty_object(): + raw_event = load_event("s3Event.json") + raw_event["Records"][0]["s3"]["object"]["size"] = 0 + S3Model(**raw_event) + + +def test_s3_none_object_size_failed_validation(): + raw_event = load_event("s3Event.json") + raw_event["Records"][0]["s3"]["object"]["size"] = None with pytest.raises(ValidationError): - S3SqsEventNotificationModel(**raw_event) - + S3Model(**raw_event) -def test_s3_sqs_event_notification_body_containing_arbitrary_json(): - raw_event = load_event("s3SqsEvent.json") - for record in raw_event["Records"]: - record["body"] = {"foo": "bar"} +def test_s3_none_etag_value_failed_validation(): + raw_event = load_event("s3Event.json") + raw_event["Records"][0]["s3"]["object"]["eTag"] = None with pytest.raises(ValidationError): - S3SqsEventNotificationModel(**raw_event) + S3Model(**raw_event) diff --git a/tests/unit/parser/test_s3_notification.py b/tests/unit/parser/test_s3_notification.py new file mode 100644 index 00000000000..c77c70095a3 --- /dev/null +++ b/tests/unit/parser/test_s3_notification.py @@ -0,0 +1,145 @@ +import json +from datetime import datetime + +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError +from aws_lambda_powertools.utilities.parser.models import ( + S3EventNotificationEventBridgeModel, + S3SqsEventNotificationModel, +) +from tests.functional.utils import load_event + + +def test_s3_eventbridge_notification_object_created_event(): + raw_event = load_event("s3EventBridgeNotificationObjectCreatedEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] + assert model.detail.reason == raw_event["detail"]["reason"] + + +def test_s3_eventbridge_notification_object_deleted_event(): + raw_event = load_event("s3EventBridgeNotificationObjectDeletedEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.source_ip_address == raw_event["detail"]["source-ip-address"] + assert model.detail.reason == raw_event["detail"]["reason"] + assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] + + +def test_s3_eventbridge_notification_object_expired_event(): + raw_event = load_event("s3EventBridgeNotificationObjectExpiredEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.reason == raw_event["detail"]["reason"] + assert model.detail.deletion_type == raw_event["detail"]["deletion-type"] + + +def test_s3_eventbridge_notification_object_restore_completed_event(): + raw_event = load_event("s3EventBridgeNotificationObjectRestoreCompletedEvent.json") + model = S3EventNotificationEventBridgeModel(**raw_event) + + assert model.version == raw_event["version"] + assert model.id == raw_event["id"] + assert model.detail_type == raw_event["detail-type"] + assert model.source == raw_event["source"] + assert model.account == raw_event["account"] + assert model.time == datetime.fromisoformat(raw_event["time"].replace("Z", "+00:00")) + assert model.region == raw_event["region"] + assert model.resources == raw_event["resources"] + + assert model.detail.version == raw_event["detail"]["version"] + assert model.detail.bucket.name == raw_event["detail"]["bucket"]["name"] + assert model.detail.object.key == raw_event["detail"]["object"]["key"] + assert model.detail.object.size == raw_event["detail"]["object"]["size"] + assert model.detail.object.etag == raw_event["detail"]["object"]["etag"] + assert model.detail.object.sequencer == raw_event["detail"]["object"]["sequencer"] + assert model.detail.request_id == raw_event["detail"]["request-id"] + assert model.detail.requester == raw_event["detail"]["requester"] + assert model.detail.restore_expiry_time == raw_event["detail"]["restore-expiry-time"] + assert model.detail.source_storage_class == raw_event["detail"]["source-storage-class"] + + +def test_s3_sqs_event_notification(): + raw_event = load_event("s3SqsEvent.json") + model = S3SqsEventNotificationModel(**raw_event) + + body = json.loads(raw_event["Records"][0]["body"]) + + assert model.Records[0].body.Records[0].eventVersion == body["Records"][0]["eventVersion"] + assert model.Records[0].body.Records[0].eventSource == body["Records"][0]["eventSource"] + assert model.Records[0].body.Records[0].eventTime == datetime.fromisoformat( + body["Records"][0]["eventTime"].replace("Z", "+00:00"), + ) + assert model.Records[0].body.Records[0].eventName == body["Records"][0]["eventName"] + + +def test_s3_sqs_event_notification_body_invalid_json(): + raw_event = load_event("s3SqsEvent.json") + + for record in raw_event["Records"]: + record["body"] = "invalid body" + + with pytest.raises(ValidationError): + S3SqsEventNotificationModel(**raw_event) + + +def test_s3_sqs_event_notification_body_containing_arbitrary_json(): + raw_event = load_event("s3SqsEvent.json") + for record in raw_event["Records"]: + record["body"] = {"foo": "bar"} + + with pytest.raises(ValidationError): + S3SqsEventNotificationModel(**raw_event) diff --git a/tests/unit/parser/test_ses.py b/tests/unit/parser/test_ses.py new file mode 100644 index 00000000000..34eb43bf5eb --- /dev/null +++ b/tests/unit/parser/test_ses.py @@ -0,0 +1,52 @@ +from aws_lambda_powertools.utilities.parser.models import SesModel, SesRecordModel +from tests.functional.utils import load_event + + +def test_ses_trigger_event(): + raw_event = load_event("sesEvent.json") + parsed_event: SesModel = SesModel(**raw_event) + + records = parsed_event.Records + record: SesRecordModel = records[0] + raw_record = raw_event["Records"][0] + + assert record.eventSource == raw_record["eventSource"] + assert record.eventVersion == raw_record["eventVersion"] + + mail = record.ses.mail + raw_mail = raw_record["ses"]["mail"] + assert mail.source == raw_mail["source"] + assert mail.messageId == raw_mail["messageId"] + assert mail.destination == raw_mail["destination"] + assert mail.headersTruncated is False + convert_time = int(round(mail.timestamp.timestamp() * 1000)) + assert convert_time == 0 + + headers = list(mail.headers) + assert len(headers) == 10 + assert headers[0].name == raw_mail["headers"][0]["name"] + assert headers[0].value == raw_mail["headers"][0]["value"] + + common_headers = mail.commonHeaders + assert common_headers.returnPath == raw_mail["commonHeaders"]["returnPath"] + assert common_headers.header_from == raw_mail["commonHeaders"]["from"] + assert common_headers.date == raw_mail["commonHeaders"]["date"] + assert common_headers.to == raw_mail["commonHeaders"]["to"] + assert common_headers.messageId == raw_mail["commonHeaders"]["messageId"] + assert common_headers.subject == raw_mail["commonHeaders"]["subject"] + + receipt = record.ses.receipt + raw_receipt = raw_record["ses"]["receipt"] + convert_time = int(round(receipt.timestamp.timestamp() * 1000)) + assert convert_time == 0 + assert receipt.processingTimeMillis == raw_receipt["processingTimeMillis"] + assert receipt.recipients == raw_receipt["recipients"] + assert receipt.spamVerdict.status == raw_receipt["spamVerdict"]["status"] + assert receipt.virusVerdict.status == raw_receipt["virusVerdict"]["status"] + assert receipt.spfVerdict.status == raw_receipt["spfVerdict"]["status"] + assert receipt.dmarcVerdict.status == raw_receipt["dmarcVerdict"]["status"] + + action = receipt.action + assert action.type == raw_receipt["action"]["type"] + assert action.functionArn == raw_receipt["action"]["functionArn"] + assert action.invocationType == raw_receipt["action"]["invocationType"] diff --git a/tests/unit/parser/test_sns.py b/tests/unit/parser/test_sns.py new file mode 100644 index 00000000000..9b925d5fa76 --- /dev/null +++ b/tests/unit/parser/test_sns.py @@ -0,0 +1,118 @@ +import json + +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse +from tests.functional.utils import load_event +from tests.functional.validator.conftest import sns_event # noqa: F401 +from tests.unit.parser.schemas import MyAdvancedSnsBusiness, MySnsBusiness + + +def test_handle_sns_trigger_event_json_body(sns_event): # noqa: F811 + parse(event=sns_event, model=MySnsBusiness, envelope=envelopes.SnsEnvelope) + + +def test_validate_event_does_not_conform_with_model(): + raw_event: dict = {"invalid": "event"} + + with pytest.raises(ValidationError): + parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsEnvelope) + + +def test_validate_event_does_not_conform_user_json_string_with_model(): + raw_event: dict = { + "Records": [ + { + "EventVersion": "1.0", + "EventSubscriptionArn": "arn:aws:sns:us-east-2:123456789012:sns-la ...", + "EventSource": "aws:sns", + "Sns": { + "SignatureVersion": "1", + "Timestamp": "2019-01-02T12:45:07.000Z", + "Signature": "tcc6faL2yUC6dgZdmrwh1Y4cGa/ebXEkAi6RibDsvpi+tE/1+82j...65r==", + "SigningCertUrl": "https://sns.us-east-2.amazonaws.com/SimpleNotificat ...", + "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "Message": "not a valid JSON!", + "MessageAttributes": {"Test": {"Type": "String", "Value": "TestString"}}, + "Type": "Notification", + "UnsubscribeUrl": "https://sns.us-east-2.amazonaws.com/?Action=Unsubscri ...", + "TopicArn": "arn:aws:sns:us-east-2:123456789012:sns-lambda", + "Subject": "TestInvoke", + }, + }, + ], + } + + with pytest.raises(ValidationError): + parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsEnvelope) + + +def test_handle_sns_trigger_event_no_envelope(): + raw_event = load_event("snsEvent.json") + parsed_event: MyAdvancedSnsBusiness = MyAdvancedSnsBusiness(**raw_event) + + records = parsed_event.Records + record = records[0] + raw_record = raw_event["Records"][0] + + assert len(records) == 1 + assert record.EventVersion == raw_record["EventVersion"] + assert record.EventSubscriptionArn == raw_record["EventSubscriptionArn"] + assert record.EventSource == raw_record["EventSource"] + + sns = record.Sns + raw_sns = raw_record["Sns"] + assert sns.Type == raw_sns["Type"] + assert sns.UnsubscribeUrl.scheme == "https" + assert sns.UnsubscribeUrl.host == "sns.us-east-2.amazonaws.com" + assert sns.UnsubscribeUrl.query == "Action=Unsubscribe" + assert sns.TopicArn == raw_sns["TopicArn"] + assert sns.Subject == raw_sns["Subject"] + assert sns.SignatureVersion == raw_sns["SignatureVersion"] + convert_time = int(round(sns.Timestamp.timestamp() * 1000)) + assert convert_time == 1546433107000 + assert sns.Signature == raw_sns["Signature"] + assert sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" + assert sns.SigningCertUrl.scheme == "https" + assert sns.SigningCertUrl.host == "sns.us-east-2.amazonaws.com" + assert sns.SigningCertUrl.path == "/SimpleNotification" + assert sns.MessageId == raw_sns["MessageId"] + assert sns.Message == raw_sns["Message"] + + attrib_dict = sns.MessageAttributes + assert len(attrib_dict) == 2 + assert attrib_dict["Test"].Type == raw_sns["MessageAttributes"]["Test"]["Type"] + assert attrib_dict["Test"].Value == raw_sns["MessageAttributes"]["Test"]["Value"] + assert attrib_dict["TestBinary"].Type == raw_sns["MessageAttributes"]["TestBinary"]["Type"] + assert attrib_dict["TestBinary"].Value == raw_sns["MessageAttributes"]["TestBinary"]["Value"] + + +def test_handle_sns_sqs_trigger_event_json_body(): # noqa: F811 + raw_event = load_event("snsSqsEvent.json") + parsed_event: MySnsBusiness = parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) + + assert len(parsed_event) == 1 + assert parsed_event[0].message == "hello world" + assert parsed_event[0].username == "lessa" + + +def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url(): + # GIVEN an event is tampered with a missing UnsubscribeURL + raw_event = load_event("snsSqsEvent.json") + payload = json.loads(raw_event["Records"][0]["body"]) + payload.pop("UnsubscribeURL") + raw_event["Records"][0]["body"] = json.dumps(payload) + + # WHEN parsing the payload + # THEN raise a ValidationError error + with pytest.raises(ValidationError): + parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) + + +def test_handle_sns_sqs_fifo_trigger_event_json_body(): + raw_event = load_event("snsSqsFifoEvent.json") + parsed_event: MySnsBusiness = parse(event=raw_event, model=MySnsBusiness, envelope=envelopes.SnsSqsEnvelope) + + assert len(parsed_event) == 1 + assert parsed_event[0].message == "hello world" + assert parsed_event[0].username == "lessa" diff --git a/tests/functional/parser/test_sqs.py b/tests/unit/parser/test_sqs.py similarity index 51% rename from tests/functional/parser/test_sqs.py rename to tests/unit/parser/test_sqs.py index 4f547ad4bcc..0d948acb39d 100644 --- a/tests/functional/parser/test_sqs.py +++ b/tests/unit/parser/test_sqs.py @@ -1,38 +1,28 @@ -from typing import Any, List - import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from tests.functional.utils import load_event from tests.functional.validator.conftest import sqs_event # noqa: F401 - - -@event_parser(model=MySqsBusiness, envelope=envelopes.SqsEnvelope) -def handle_sqs_json_body(event: List[MySqsBusiness], _: LambdaContext): - assert len(event) == 1 - assert event[0].message == "hello world" - assert event[0].username == "lessa" +from tests.unit.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness def test_handle_sqs_trigger_event_json_body(sqs_event): # noqa: F811 - handle_sqs_json_body(sqs_event, LambdaContext()) + parsed_event: MySqsBusiness = parse(event=sqs_event, model=MySqsBusiness, envelope=envelopes.SqsEnvelope) + + assert len(parsed_event) == 1 + assert parsed_event[0].message == "hello world" + assert parsed_event[0].username == "lessa" def test_validate_event_does_not_conform_with_model(): - event: Any = {"invalid": "event"} + raw_event: dict = {"invalid": "event"} with pytest.raises(ValidationError): - handle_sqs_json_body(event, LambdaContext()) + parse(event=raw_event, model=MySqsBusiness, envelope=envelopes.SqsEnvelope) def test_validate_event_does_not_conform_user_json_string_with_model(): - event: Any = { + raw_event: dict = { "Records": [ { "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", @@ -56,41 +46,42 @@ def test_validate_event_does_not_conform_user_json_string_with_model(): } with pytest.raises(ValidationError): - handle_sqs_json_body(event, LambdaContext()) + parse(event=raw_event, model=MySqsBusiness, envelope=envelopes.SqsEnvelope) -@event_parser(model=MyAdvancedSqsBusiness) -def handle_sqs_no_envelope(event: MyAdvancedSqsBusiness, _: LambdaContext): - records = event.Records - record = records[0] - attributes = record.attributes - message_attributes = record.messageAttributes - test_attr = message_attributes["testAttr"] +def test_handle_sqs_trigger_event_no_envelope(): + raw_event = load_event("sqsEvent.json") + parsed_event: MyAdvancedSqsBusiness = MyAdvancedSqsBusiness(**raw_event) + records = parsed_event.Records + record = records[0] + raw_record = raw_event["Records"][0] assert len(records) == 2 - assert record.messageId == "059f36b4-87a3-44ab-83d2-661975830a7d" - assert record.receiptHandle == "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a..." - assert record.body == "Test message." + + assert record.messageId == raw_record["messageId"] + assert record.receiptHandle == raw_record["receiptHandle"] + assert record.body == raw_record["body"] + assert record.eventSource == raw_record["eventSource"] + assert record.eventSourceARN == raw_record["eventSourceARN"] + assert record.awsRegion == raw_record["awsRegion"] + assert record.md5OfBody == raw_record["md5OfBody"] + + attributes = record.attributes assert attributes.AWSTraceHeader is None - assert attributes.ApproximateReceiveCount == "1" - convert_time = int(round(attributes.SentTimestamp.timestamp() * 1000)) - assert convert_time == 1545082649183 - assert attributes.SenderId == "AIDAIENQZJOLO23YVJ4VO" - convert_time = int(round(attributes.ApproximateFirstReceiveTimestamp.timestamp() * 1000)) - assert convert_time == 1545082649185 + assert attributes.ApproximateReceiveCount == raw_record["attributes"]["ApproximateReceiveCount"] assert attributes.SequenceNumber is None assert attributes.MessageGroupId is None assert attributes.MessageDeduplicationId is None - assert message_attributes.get("NotFound") is None - assert test_attr.stringValue == "100" - assert test_attr.binaryValue == "base64Str" - assert test_attr.dataType == "Number" - assert record.md5OfBody == "e4e68fb7bd0e697a0ae8f1bb342846b3" - assert record.eventSource == "aws:sqs" - assert record.eventSourceARN == "arn:aws:sqs:us-east-2:123456789012:my-queue" - assert record.awsRegion == "us-east-2" - + assert attributes.SenderId == raw_record["attributes"]["SenderId"] + convert_time = int(round(attributes.ApproximateFirstReceiveTimestamp.timestamp() * 1000)) + assert convert_time == int(raw_record["attributes"]["ApproximateFirstReceiveTimestamp"]) + convert_time = int(round(attributes.SentTimestamp.timestamp() * 1000)) + assert convert_time == int(raw_record["attributes"]["SentTimestamp"]) -def test_handle_sqs_trigger_event_no_envelope(): - event_dict = load_event("sqsEvent.json") - handle_sqs_no_envelope(event_dict, LambdaContext()) + message_attributes = record.messageAttributes + message_attributes_raw = raw_record["messageAttributes"]["testAttr"] + test_attr = message_attributes["testAttr"] + assert message_attributes.get("NotFound") is None + assert test_attr.stringValue == message_attributes_raw["stringValue"] + assert test_attr.binaryValue == message_attributes_raw["binaryValue"] + assert test_attr.dataType == message_attributes_raw["dataType"] diff --git a/tests/unit/parser/test_vpc_lattice.py b/tests/unit/parser/test_vpc_lattice.py index f0476509cea..e5dfedfb445 100644 --- a/tests/unit/parser/test_vpc_lattice.py +++ b/tests/unit/parser/test_vpc_lattice.py @@ -1,26 +1,22 @@ import pytest -from aws_lambda_powertools.utilities.parser import ( - ValidationError, - envelopes, - event_parser, -) +from aws_lambda_powertools.utilities.parser import ValidationError, envelopes, parse from aws_lambda_powertools.utilities.parser.models import VpcLatticeModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from tests.functional.parser.schemas import MyVpcLatticeBusiness from tests.functional.utils import load_event - - -@event_parser(model=MyVpcLatticeBusiness, envelope=envelopes.VpcLatticeEnvelope) -def handle_lambda_vpclattice_with_envelope(event: MyVpcLatticeBusiness, context: LambdaContext): - assert event.username == "Leandro" - assert event.name == "Damascena" +from tests.unit.parser.schemas import MyVpcLatticeBusiness def test_vpc_lattice_event_with_envelope(): - event = load_event("vpcLatticeEvent.json") - event["body"] = '{"username": "Leandro", "name": "Damascena"}' - handle_lambda_vpclattice_with_envelope(event, LambdaContext()) + raw_event = load_event("vpcLatticeEvent.json") + raw_event["body"] = '{"username": "Leandro", "name": "Damascena"}' + parsed_event: MyVpcLatticeBusiness = parse( + event=raw_event, + model=MyVpcLatticeBusiness, + envelope=envelopes.VpcLatticeEnvelope, + ) + + assert parsed_event.username == "Leandro" + assert parsed_event.name == "Damascena" def test_vpc_lattice_event():