Skip to content

fix(parser): S3Model Object Deleted omits size and eTag attr #1638

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 24, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions aws_lambda_powertools/utilities/parser/models/s3.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import datetime
from typing import List, Optional

from pydantic import BaseModel
from pydantic import BaseModel, root_validator
from pydantic.fields import Field
from pydantic.networks import IPvAnyNetwork
from pydantic.types import NonNegativeFloat
Expand Down Expand Up @@ -43,8 +43,8 @@ class S3Bucket(BaseModel):

class S3Object(BaseModel):
key: str
size: NonNegativeFloat
eTag: str
size: Optional[NonNegativeFloat]
eTag: Optional[str]
sequencer: str
versionId: Optional[str]

Expand All @@ -68,6 +68,15 @@ class S3RecordModel(BaseModel):
s3: S3Message
glacierEventData: Optional[S3EventRecordGlacierEventData]

@root_validator
def validate_s3_object(cls, values):
event_name = values.get("eventName")
s3_object = values.get("s3").object
if "ObjectRemoved" not in event_name:
if s3_object.size is None or s3_object.eTag is None:
raise ValueError("S3Object.size and S3Object.eTag are required for non-ObjectRemoved events")
return values


class S3Model(BaseModel):
Records: List[S3RecordModel]
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import base64
import json

from aws_lambda_powertools.utilities.data_classes import KinesisFirehoseEvent, event_source
from aws_lambda_powertools.utilities.data_classes import (
KinesisFirehoseEvent,
event_source,
)
from aws_lambda_powertools.utilities.typing import LambdaContext


Expand Down
36 changes: 36 additions & 0 deletions tests/events/s3EventDeleteObject.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
"Records": [
{
"eventVersion": "2.1",
"eventSource": "aws:s3",
"awsRegion": "us-east-2",
"eventTime": "2019-09-03T19:37:27.192Z",
"eventName": "ObjectRemoved:Delete",
"userIdentity": {
"principalId": "AWS:AIDAINPONIXQXHT3IKHL2"
},
"requestParameters": {
"sourceIPAddress": "205.255.255.255"
},
"responseElements": {
"x-amz-request-id": "D82B88E5F771F645",
"x-amz-id-2": "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "828aa6fc-f7b5-4305-8584-487c791949c1",
"bucket": {
"name": "lambda-artifacts-deafc19498e3f2df",
"ownerIdentity": {
"principalId": "A3I5XTEXAMAI3E"
},
"arn": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
},
"object": {
"key": "b21b84d653bb07b05b1e6b33684dc11b",
"sequencer": "0C0F6F405D6ED209E1"
}
}
}
]
}
58 changes: 57 additions & 1 deletion tests/functional/parser/test_s3.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
from aws_lambda_powertools.utilities.parser import event_parser, parse
import pytest

from aws_lambda_powertools.utilities.parser import ValidationError, event_parser, parse
from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel
from aws_lambda_powertools.utilities.typing import LambdaContext
from tests.functional.utils import load_event
Expand Down Expand Up @@ -93,3 +95,57 @@ def test_s3_empty_object():
event_dict = load_event("s3Event.json")
event_dict["Records"][0]["s3"]["object"]["size"] = 0
parse(event=event_dict, model=S3Model)


def test_s3_none_object_size_failed_validation():
event_dict = load_event("s3Event.json")
event_dict["Records"][0]["s3"]["object"]["size"] = None
with pytest.raises(ValidationError):
parse(event=event_dict, model=S3Model)


def test_s3_none_etag_value_failed_validation():
event_dict = load_event("s3Event.json")
event_dict["Records"][0]["s3"]["object"]["eTag"] = None
with pytest.raises(ValidationError):
parse(event=event_dict, model=S3Model)


@event_parser(model=S3Model)
def handle_s3_delete_object(event: S3Model, _: LambdaContext):
records = list(event.Records)
assert len(records) == 1
record: S3RecordModel = records[0]
assert record.eventVersion == "2.1"
assert record.eventSource == "aws:s3"
assert record.awsRegion == "us-east-2"
convert_time = int(round(record.eventTime.timestamp() * 1000))
assert convert_time == 1567539447192
assert record.eventName == "ObjectRemoved:Delete"
user_identity = record.userIdentity
assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2"
request_parameters = record.requestParameters
assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32"
assert record.responseElements.x_amz_request_id == "D82B88E5F771F645"
assert (
record.responseElements.x_amz_id_2
== "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
)
s3 = record.s3
assert s3.s3SchemaVersion == "1.0"
assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1"
bucket = s3.bucket
assert bucket.name == "lambda-artifacts-deafc19498e3f2df"
assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E"
assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b"
assert s3.object.size is None
assert s3.object.eTag is None
assert s3.object.versionId is None
assert s3.object.sequencer == "0C0F6F405D6ED209E1"
assert record.glacierEventData is None


def test_s3_trigger_event_delete_object():
event_dict = load_event("s3EventDeleteObject.json")
handle_s3_delete_object(event_dict, LambdaContext())