Skip to content

Commit 4688495

Browse files
refactor(parser): add from __future__ import annotations (#4983)
* refactor(parser): add from __future__ import annotations and update code according to ruff rules TCH, UP006, UP007, UP037 and FA100. * Fix type alias with Python 3.8 See https://bugs.python.org/issue45117 * Fix pydantic not working with Python 3.8 TypeError: You have a type annotation 'str | None' which makes use of newer typing features than are supported in your version of Python. To handle this error, you should either remove the use of new syntax or install the `eval_type_backport` package. * Update ruff.toml Configure lint.per-file-ignores in ruff.toml instead of adding a # ruff: noqa: FA100 line to each file. --------- Co-authored-by: Leandro Damascena <[email protected]>
1 parent a0463d1 commit 4688495

14 files changed

+35
-44
lines changed

aws_lambda_powertools/utilities/parser/functions.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
from __future__ import annotations
22

3+
from typing import TYPE_CHECKING
4+
35
from pydantic import TypeAdapter
46

57
from aws_lambda_powertools.shared.cache_dict import LRUDict
6-
from aws_lambda_powertools.utilities.parser.types import T
8+
9+
if TYPE_CHECKING:
10+
from aws_lambda_powertools.utilities.parser.types import T
711

812
CACHE_TYPE_ADAPTER = LRUDict(max_items=1024)
913

aws_lambda_powertools/utilities/parser/models/apigw.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
11
from datetime import datetime
2-
from typing import Any, Dict, List, Optional, Type, Union
2+
from typing import Any, Dict, List, Literal, Optional, Type, Union
33

44
from pydantic import BaseModel, model_validator
55
from pydantic.networks import IPvAnyNetwork
66

7-
from aws_lambda_powertools.utilities.parser.types import Literal
8-
97

108
class ApiGatewayUserCertValidity(BaseModel):
119
notBefore: str

aws_lambda_powertools/utilities/parser/models/apigwv2.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
11
from datetime import datetime
2-
from typing import Any, Dict, List, Optional, Type, Union
2+
from typing import Any, Dict, List, Literal, Optional, Type, Union
33

44
from pydantic import BaseModel, Field
55
from pydantic.networks import IPvAnyNetwork
66

7-
from aws_lambda_powertools.utilities.parser.types import Literal
8-
97

108
class RequestContextV2AuthorizerIamCognito(BaseModel):
119
amr: List[str]

aws_lambda_powertools/utilities/parser/models/cloudformation_custom_resource.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
from typing import Any, Dict, Union
1+
from typing import Any, Dict, Literal, Union
22

33
from pydantic import BaseModel, Field, HttpUrl
44

5-
from aws_lambda_powertools.utilities.parser.types import Literal
6-
75

86
class CloudFormationCustomResourceBaseModel(BaseModel):
97
request_type: str = Field(..., alias="RequestType")

aws_lambda_powertools/utilities/parser/models/dynamodb.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1+
# ruff: noqa: FA100
12
from datetime import datetime
2-
from typing import Any, Dict, List, Optional, Type, Union
3+
from typing import Any, Dict, List, Literal, Optional, Type, Union
34

45
from pydantic import BaseModel, field_validator
56

67
from aws_lambda_powertools.shared.dynamodb_deserializer import TypeDeserializer
7-
from aws_lambda_powertools.utilities.parser.types import Literal
88

99
_DESERIALIZER = TypeDeserializer()
1010

aws_lambda_powertools/utilities/parser/models/kafka.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
11
from datetime import datetime
2-
from typing import Dict, List, Type, Union
2+
from typing import Dict, List, Literal, Type, Union
33

44
from pydantic import BaseModel, field_validator
55

66
from aws_lambda_powertools.shared.functions import base64_decode, bytes_to_string
7-
from aws_lambda_powertools.utilities.parser.types import Literal
87

98
SERVERS_DELIMITER = ","
109

aws_lambda_powertools/utilities/parser/models/kinesis.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
import json
22
import zlib
3-
from typing import Dict, List, Type, Union
3+
from typing import Dict, List, Literal, Type, Union
44

55
from pydantic import BaseModel, field_validator
66

77
from aws_lambda_powertools.shared.functions import base64_decode
88
from aws_lambda_powertools.utilities.parser.models.cloudwatch import (
99
CloudWatchLogsDecode,
1010
)
11-
from aws_lambda_powertools.utilities.parser.types import Literal
1211

1312

1413
class KinesisDataStreamRecordPayload(BaseModel):

aws_lambda_powertools/utilities/parser/models/s3.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
11
from datetime import datetime
2-
from typing import List, Optional
2+
from typing import List, Literal, Optional
33

44
from pydantic import BaseModel, model_validator
55
from pydantic.fields import Field
66
from pydantic.networks import IPvAnyNetwork
77
from pydantic.types import NonNegativeFloat
88

9-
from aws_lambda_powertools.utilities.parser.types import Literal
10-
119
from .event_bridge import EventBridgeModel
1210

1311

aws_lambda_powertools/utilities/parser/models/s3_batch_operation.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
from typing import Any, Dict, List, Optional
1+
from typing import Any, Dict, List, Literal, Optional
22

33
from pydantic import BaseModel, model_validator
44

5-
from aws_lambda_powertools.utilities.parser.types import Literal
6-
75

86
class S3BatchOperationTaskModel(BaseModel):
97
taskId: str

aws_lambda_powertools/utilities/parser/models/ses.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
11
from datetime import datetime
2-
from typing import List, Optional
2+
from typing import List, Literal, Optional
33

44
from pydantic import BaseModel, Field
55
from pydantic.types import PositiveInt
66

7-
from ..types import Literal
8-
97

108
class SesReceiptVerdict(BaseModel):
119
status: Literal["PASS", "FAIL", "GRAY", "PROCESSING_FAILED"]

aws_lambda_powertools/utilities/parser/models/sns.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
11
from datetime import datetime
2-
from typing import Dict, List, Optional, Union
2+
from typing import Dict, List, Literal, Optional, Union
33
from typing import Type as TypingType
44

55
from pydantic import BaseModel, model_validator
66
from pydantic.networks import HttpUrl
77

8-
from aws_lambda_powertools.utilities.parser.types import Literal
9-
108

119
class SnsMsgAttributeModel(BaseModel):
1210
Type: str

aws_lambda_powertools/utilities/parser/models/sqs.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,8 @@
11
from datetime import datetime
2-
from typing import Dict, List, Optional, Sequence, Type, Union
2+
from typing import Dict, List, Literal, Optional, Sequence, Type, Union
33

44
from pydantic import BaseModel
55

6-
from aws_lambda_powertools.utilities.parser.types import Literal
7-
86

97
class SqsAttributesModel(BaseModel):
108
ApproximateReceiveCount: str

aws_lambda_powertools/utilities/parser/parser.py

+15-13
Original file line numberDiff line numberDiff line change
@@ -2,27 +2,29 @@
22

33
import logging
44
import typing
5-
from typing import Any, Callable, Dict, Optional, Type, overload
5+
from typing import TYPE_CHECKING, Any, Callable, overload
66

77
from pydantic import PydanticSchemaGenerationError, ValidationError
88

99
from aws_lambda_powertools.middleware_factory import lambda_handler_decorator
10-
from aws_lambda_powertools.utilities.parser.envelopes.base import Envelope
1110
from aws_lambda_powertools.utilities.parser.exceptions import InvalidEnvelopeError, InvalidModelTypeError
1211
from aws_lambda_powertools.utilities.parser.functions import _retrieve_or_set_model_from_cache
13-
from aws_lambda_powertools.utilities.parser.types import EventParserReturnType, T
14-
from aws_lambda_powertools.utilities.typing import LambdaContext
12+
13+
if TYPE_CHECKING:
14+
from aws_lambda_powertools.utilities.parser.envelopes.base import Envelope
15+
from aws_lambda_powertools.utilities.parser.types import EventParserReturnType, T
16+
from aws_lambda_powertools.utilities.typing import LambdaContext
1517

1618
logger = logging.getLogger(__name__)
1719

1820

1921
@lambda_handler_decorator
2022
def event_parser(
2123
handler: Callable[..., EventParserReturnType],
22-
event: Dict[str, Any],
24+
event: dict[str, Any],
2325
context: LambdaContext,
24-
model: Optional[type[T]] = None,
25-
envelope: Optional[Type[Envelope]] = None,
26+
model: type[T] | None = None,
27+
envelope: type[Envelope] | None = None,
2628
**kwargs: Any,
2729
) -> EventParserReturnType:
2830
"""Lambda handler decorator to parse & validate events using Pydantic models
@@ -67,11 +69,11 @@ def handler(event: Order, context: LambdaContext):
6769
----------
6870
handler: Callable
6971
Method to annotate on
70-
event: Dict
72+
event: dict
7173
Lambda event to be parsed & validated
7274
context: LambdaContext
7375
Lambda context object
74-
model: Optional[type[T]]
76+
model: type[T] | None
7577
Your data model that will replace the event.
7678
envelope: Envelope
7779
Optional envelope to extract the model from
@@ -111,14 +113,14 @@ def handler(event: Order, context: LambdaContext):
111113

112114

113115
@overload
114-
def parse(event: Dict[str, Any], model: type[T]) -> T: ... # pragma: no cover
116+
def parse(event: dict[str, Any], model: type[T]) -> T: ... # pragma: no cover
115117

116118

117119
@overload
118-
def parse(event: Dict[str, Any], model: type[T], envelope: Type[Envelope]) -> T: ... # pragma: no cover
120+
def parse(event: dict[str, Any], model: type[T], envelope: type[Envelope]) -> T: ... # pragma: no cover
119121

120122

121-
def parse(event: Dict[str, Any], model: type[T], envelope: Optional[Type[Envelope]] = None):
123+
def parse(event: dict[str, Any], model: type[T], envelope: type[Envelope] | None = None):
122124
"""Standalone function to parse & validate events using Pydantic models
123125
124126
Typically used when you need fine-grained control over error handling compared to event_parser decorator.
@@ -156,7 +158,7 @@ def handler(event: Order, context: LambdaContext):
156158
157159
Parameters
158160
----------
159-
event: Dict
161+
event: dict
160162
Lambda event to be parsed & validated
161163
model: Model
162164
Your data model that will replace the event

ruff.toml

+3
Original file line numberDiff line numberDiff line change
@@ -90,3 +90,6 @@ split-on-trailing-comma = true
9090
"aws_lambda_powertools/event_handler/openapi/compat.py" = ["F401"]
9191
# Maintenance: we're keeping EphemeralMetrics code in case of Hyrum's law so we can quickly revert it
9292
"aws_lambda_powertools/metrics/metrics.py" = ["ERA001"]
93+
"examples/*" = ["FA100"]
94+
"tests/*" = ["FA100"]
95+
"aws_lambda_powertools/utilities/parser/models/*" = ["FA100"]

0 commit comments

Comments
 (0)