From 3d5062c3892fd0fc135e729d7f47116b8f404513 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Sun, 29 Jan 2023 17:29:55 +0000 Subject: [PATCH 01/81] (redis): initial commit --- .../utilities/idempotency/__init__.py | 12 +- .../utilities/idempotency/exceptions.py | 6 + .../idempotency/persistence/redis.py | 171 ++++++++++++++++++ poetry.lock | 43 ++++- pyproject.toml | 2 + 5 files changed, 229 insertions(+), 5 deletions(-) create mode 100644 aws_lambda_powertools/utilities/idempotency/persistence/redis.py diff --git a/aws_lambda_powertools/utilities/idempotency/__init__.py b/aws_lambda_powertools/utilities/idempotency/__init__.py index 148b291ea6d..30447acb28c 100644 --- a/aws_lambda_powertools/utilities/idempotency/__init__.py +++ b/aws_lambda_powertools/utilities/idempotency/__init__.py @@ -8,7 +8,17 @@ from aws_lambda_powertools.utilities.idempotency.persistence.dynamodb import ( DynamoDBPersistenceLayer, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) from .idempotency import IdempotencyConfig, idempotent, idempotent_function -__all__ = ("DynamoDBPersistenceLayer", "BasePersistenceLayer", "idempotent", "idempotent_function", "IdempotencyConfig") +__all__ = ( + "DynamoDBPersistenceLayer", + "BasePersistenceLayer", + "idempotent", + "idempotent_function", + "IdempotencyConfig", + "RedisCachePersistenceLayer", +) diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index 69ab420850a..f27d5044da0 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -71,3 +71,9 @@ class IdempotencyKeyError(BaseError): """ Payload does not contain an idempotent key """ + + +class IdempotencyRedisConnectionError(BaseError): + """ + Payload does not contain an idempotent key + """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py new file mode 100644 index 00000000000..407fdacba44 --- /dev/null +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -0,0 +1,171 @@ +import logging +import os +from typing import Any, Dict, Optional + +import redis + +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyItemNotFoundError, + IdempotencyPersistenceLayerError, + IdempotencyRedisConnectionError, +) +from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord + +logger = logging.getLogger(__name__) + + +class RedisCachePersistenceLayer(BasePersistenceLayer): + def __init__( + self, + host: str, + port: int = "6379", + username: Optional[str] = None, + password: Optional[str] = None, + db_index: int = "0", + static_pk_value: Optional[str] = None, + expiry_attr: str = "expiration", + in_progress_expiry_attr: str = "in_progress_expiration", + status_attr: str = "status", + data_attr: str = "data", + validation_key_attr: str = "validation", + ): + """ + Initialize the Redis client + Parameters + ---------- + host: str + Name of the host to connect to Redis instance/cluster + port: int + Number of the port to connect to Redis instance/cluster + username: str + Name of the username to connect to Redis instance/cluster in case of using ACL + See: https://redis.io/docs/management/security/acl/ + password: str + Password to connect to Redis instance/cluster + db_index: int + Index of Redis database + See: https://redis.io/commands/select/ + static_pk_value: str, optional + Redis attribute value for cache key, by default "idempotency#". + expiry_attr: str, optional + Redis hash attribute name for expiry timestamp, by default "expiration" + in_progress_expiry_attr: str, optional + Redis hash attribute name for in-progress expiry timestamp, by default "in_progress_expiration" + status_attr: str, optional + Redis hash attribute name for status, by default "status" + data_attr: str, optional + Redis hash attribute name for response data, by default "data" + """ + + self._connection = None + self.host = host + self.port = port + self.username = username + self.password = password + self.db_index = db_index + + if static_pk_value is None: + static_pk_value = f"idempotency#{os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, '')}" + + self.static_pk_value = static_pk_value + self.in_progress_expiry_attr = in_progress_expiry_attr + self.expiry_attr = expiry_attr + self.status_attr = status_attr + self.data_attr = data_attr + self.validation_key_attr = validation_key_attr + super(RedisCachePersistenceLayer, self).__init__() + + @property + def connection(self): + """ + Caching property to store redis connection + """ + if self._connection: + return self._connection + + logger.info(f"Trying to connect to Redis Host/Cluster: {self.host}") + + try: + self._connection = redis.Redis( + host=self.host, port=self.port, username=self.username, password=self.password, db=self.db_index + ) + except redis.exceptions.ConnectionError as exc: + logger.debug(f"Cannot connect in Redis Host: {self.host}") + raise IdempotencyRedisConnectionError("Could not to connect to Redis", exc) from exc + return self._connection + + @connection.setter + def connection(self, connection): + """ + Allow redis connection variable to be set directly, primarily for use in tests + """ + self._connection = connection + + def _get_key(self, idempotency_key: str) -> dict: + # Need to review this after adding GETKEY logic + if self.sort_key_attr: + return {self.key_attr: self.static_pk_value, self.sort_key_attr: idempotency_key} + return {self.key_attr: idempotency_key} + + def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: + # Need to review this after adding GETKEY logic + return DataRecord( + idempotency_key=item[self.key_attr], + status=item[self.status_attr], + expiry_timestamp=item[self.expiry_attr], + in_progress_expiry_timestamp=item.get(self.in_progress_expiry_attr), + response_data=item.get(self.data_attr), + payload_hash=item.get(self.validation_key_attr), + ) + + def _get_record(self, idempotency_key) -> DataRecord: + # See: https://redis.io/commands/hgetall/ + response = self.connection.hgetall(idempotency_key) + + try: + item = response + except KeyError: + raise IdempotencyItemNotFoundError + return self._item_to_data_record(item) + + def _put_record(self, data_record: DataRecord) -> None: + + # Redis works with hset to support hashing keys with multiple attributes + # See: https://redis.io/commands/hset/ + item = { + "name": data_record.idempotency_key, + "mapping": { + self.in_progress_expiry_attr: data_record.in_progress_expiry_timestamp, + self.status_attr: data_record.status, + }, + } + + try: + logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") + self.connection.hset(**item) + # hset type must set expiration after adding the record + # Need to review this to get ttl in seconds + self.connection.expire(name=data_record.idempotency_key, time=60) + except Exception as exc: + logger.debug(f"Failed to add record idempotency key: {data_record.idempotency_key}") + raise IdempotencyPersistenceLayerError( + f"Failed to add record idempotency key: {data_record.idempotency_key}", exc + ) from exc + + def _update_record(self, data_record: DataRecord) -> None: + item = { + "name": data_record.idempotency_key, + "mapping": { + self.data_attr: data_record.response_data, + self.status_attr: data_record.status, + }, + } + logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") + self.connection.hset(**item) + + def _delete_record(self, data_record: DataRecord) -> None: + logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") + # See: https://redis.io/commands/del/ + self.connection.delete(data_record.idempotency_key) diff --git a/poetry.lock b/poetry.lock index 83c36e27073..76a72f1a8a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,20 @@ # This file is automatically @generated by Poetry and should not be changed by hand. +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + [[package]] name = "attrs" version = "22.1.0" @@ -870,7 +885,7 @@ files = [ name = "importlib-metadata" version = "6.0.0" description = "Read metadata from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1062,7 +1077,6 @@ category = "dev" optional = false python-versions = "*" files = [ - {file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"}, {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, ] @@ -2139,6 +2153,27 @@ colorama = {version = ">=0.4.1", markers = "python_version > \"3.4\""} future = "*" mando = ">=0.6,<0.7" +[[package]] +name = "redis" +version = "4.4.2" +description = "Python client for Redis database and key-value store" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ + {file = "redis-4.4.2-py3-none-any.whl", hash = "sha256:e6206448e2f8a432871d07d432c13ed6c2abcf6b74edb436c99752b1371be387"}, + {file = "redis-4.4.2.tar.gz", hash = "sha256:a010f6cb7378065040a02839c3f75c7e0fb37a87116fb4a95be82a95552776c7"}, +] + +[package.dependencies] +async-timeout = ">=4.0.2" +importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + [[package]] name = "regex" version = "2022.10.31" @@ -2644,7 +2679,7 @@ requests = ">=2.0,<3.0" name = "zipp" version = "3.11.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2666,4 +2701,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "6593de2a17ba398072a78ad12e6d3ec19bb3f0ec70ee572502e5e8b3c1866fb9" +content-hash = "471c33ef48bd747ea58e5e131a924e16ca843fa8dce74bfd1ed21104df52a9e9" diff --git a/pyproject.toml b/pyproject.toml index 71b9ad96319..7e3451cd825 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ fastjsonschema = { version = "^2.14.5", optional = true } pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } typing-extensions = "^4.4.0" +redis = {version = "^4.4.2", optional = true} [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^7.1"} @@ -88,6 +89,7 @@ tracer = ["aws-xray-sdk"] all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] +redis = ["redis"] [tool.poetry.group.dev.dependencies] cfn-lint = "0.67.0" From aed821a6f1583e272d34732c02832df2dd78cf46 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 6 Feb 2023 21:02:17 +0000 Subject: [PATCH 02/81] feat(redis/idempotency): creating redis connections --- .../utilities/database/__init__.py | 3 + .../utilities/database/exceptions.py | 4 ++ .../utilities/database/redis.py | 56 +++++++++++++++ .../idempotency/persistence/redis.py | 68 +++---------------- 4 files changed, 72 insertions(+), 59 deletions(-) create mode 100644 aws_lambda_powertools/utilities/database/__init__.py create mode 100644 aws_lambda_powertools/utilities/database/exceptions.py create mode 100644 aws_lambda_powertools/utilities/database/redis.py diff --git a/aws_lambda_powertools/utilities/database/__init__.py b/aws_lambda_powertools/utilities/database/__init__.py new file mode 100644 index 00000000000..3939531172d --- /dev/null +++ b/aws_lambda_powertools/utilities/database/__init__.py @@ -0,0 +1,3 @@ +from aws_lambda_powertools.utilities.database.redis import RedisStandalone + +__all__ = RedisStandalone diff --git a/aws_lambda_powertools/utilities/database/exceptions.py b/aws_lambda_powertools/utilities/database/exceptions.py new file mode 100644 index 00000000000..5107cc0f485 --- /dev/null +++ b/aws_lambda_powertools/utilities/database/exceptions.py @@ -0,0 +1,4 @@ +class RedisConnectionError(Exception): + """ + Payload does not contain an idempotent key + """ diff --git a/aws_lambda_powertools/utilities/database/redis.py b/aws_lambda_powertools/utilities/database/redis.py new file mode 100644 index 00000000000..33edb6f02cb --- /dev/null +++ b/aws_lambda_powertools/utilities/database/redis.py @@ -0,0 +1,56 @@ +import logging +from typing import Optional + +import redis + +from aws_lambda_powertools.utilities.database.exceptions import RedisConnectionError + +logger = logging.getLogger(__name__) + + +class RedisStandalone: + def __init__( + self, host: str, port: int, username: Optional[str] = None, password: Optional[str] = None, db_index: int = "0" + ) -> None: + """ + Initialize the Redis standalone client + Parameters + ---------- + host: str + Name of the host to connect to Redis instance/cluster + port: int + Number of the port to connect to Redis instance/cluster + username: str + Name of the username to connect to Redis instance/cluster in case of using ACL + See: https://redis.io/docs/management/security/acl/ + password: str + Passwod to connect to Redis instance/cluster + db_index: int + Index of Redis database + See: https://redis.io/commands/select/ + """ + + self.host = host + self.port = port + self.username = username + self.password = password + self.db_index = db_index + + def _init_connection(self): + """ + Connection is cached, so returning this + """ + if self._connection: + return self._connection + + logger.info(f"Trying to connect to Redis Host/Cluster: {self.host}") + + try: + self._connection = redis.Redis( + host=self.host, port=self.port, username=self.username, password=self.password, db=self.db_index + ) + except redis.exceptions.ConnectionError as exc: + logger.debug(f"Cannot connect in Redis Host: {self.host}") + raise RedisConnectionError("Could not to connect to Redis Standalone", exc) from exc + + return self._connection diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 407fdacba44..9ba41c1c600 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -2,14 +2,11 @@ import os from typing import Any, Dict, Optional -import redis - from aws_lambda_powertools.shared import constants from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyItemNotFoundError, IdempotencyPersistenceLayerError, - IdempotencyRedisConnectionError, ) from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord @@ -19,11 +16,7 @@ class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, - host: str, - port: int = "6379", - username: Optional[str] = None, - password: Optional[str] = None, - db_index: int = "0", + connection, static_pk_value: Optional[str] = None, expiry_attr: str = "expiration", in_progress_expiry_attr: str = "in_progress_expiration", @@ -32,21 +25,9 @@ def __init__( validation_key_attr: str = "validation", ): """ - Initialize the Redis client + Initialize the Redis Persistence Layer Parameters ---------- - host: str - Name of the host to connect to Redis instance/cluster - port: int - Number of the port to connect to Redis instance/cluster - username: str - Name of the username to connect to Redis instance/cluster in case of using ACL - See: https://redis.io/docs/management/security/acl/ - password: str - Password to connect to Redis instance/cluster - db_index: int - Index of Redis database - See: https://redis.io/commands/select/ static_pk_value: str, optional Redis attribute value for cache key, by default "idempotency#". expiry_attr: str, optional @@ -59,12 +40,8 @@ def __init__( Redis hash attribute name for response data, by default "data" """ - self._connection = None - self.host = host - self.port = port - self.username = username - self.password = password - self.db_index = db_index + # Initialize connection with Redis + self._connection = connection._init_connection() if static_pk_value is None: static_pk_value = f"idempotency#{os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, '')}" @@ -77,32 +54,6 @@ def __init__( self.validation_key_attr = validation_key_attr super(RedisCachePersistenceLayer, self).__init__() - @property - def connection(self): - """ - Caching property to store redis connection - """ - if self._connection: - return self._connection - - logger.info(f"Trying to connect to Redis Host/Cluster: {self.host}") - - try: - self._connection = redis.Redis( - host=self.host, port=self.port, username=self.username, password=self.password, db=self.db_index - ) - except redis.exceptions.ConnectionError as exc: - logger.debug(f"Cannot connect in Redis Host: {self.host}") - raise IdempotencyRedisConnectionError("Could not to connect to Redis", exc) from exc - return self._connection - - @connection.setter - def connection(self, connection): - """ - Allow redis connection variable to be set directly, primarily for use in tests - """ - self._connection = connection - def _get_key(self, idempotency_key: str) -> dict: # Need to review this after adding GETKEY logic if self.sort_key_attr: @@ -122,7 +73,7 @@ def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: def _get_record(self, idempotency_key) -> DataRecord: # See: https://redis.io/commands/hgetall/ - response = self.connection.hgetall(idempotency_key) + response = self._connection.hgetall(idempotency_key) try: item = response @@ -131,7 +82,6 @@ def _get_record(self, idempotency_key) -> DataRecord: return self._item_to_data_record(item) def _put_record(self, data_record: DataRecord) -> None: - # Redis works with hset to support hashing keys with multiple attributes # See: https://redis.io/commands/hset/ item = { @@ -144,10 +94,10 @@ def _put_record(self, data_record: DataRecord) -> None: try: logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") - self.connection.hset(**item) + self._connection.hset(**item) # hset type must set expiration after adding the record # Need to review this to get ttl in seconds - self.connection.expire(name=data_record.idempotency_key, time=60) + self._connection.expire(name=data_record.idempotency_key, time=60) except Exception as exc: logger.debug(f"Failed to add record idempotency key: {data_record.idempotency_key}") raise IdempotencyPersistenceLayerError( @@ -163,9 +113,9 @@ def _update_record(self, data_record: DataRecord) -> None: }, } logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") - self.connection.hset(**item) + self._connection.hset(**item) def _delete_record(self, data_record: DataRecord) -> None: logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") # See: https://redis.io/commands/del/ - self.connection.delete(data_record.idempotency_key) + self._connection.delete(data_record.idempotency_key) From ffcf16d41fe024cdf62a9a770b187a28b68dbb94 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 6 Feb 2023 21:57:56 +0000 Subject: [PATCH 03/81] feat(redis/idempotency): creating redis connections --- .../utilities/database/__init__.py | 4 +- .../utilities/database/redis.py | 91 +++++++++++++++++-- 2 files changed, 87 insertions(+), 8 deletions(-) diff --git a/aws_lambda_powertools/utilities/database/__init__.py b/aws_lambda_powertools/utilities/database/__init__.py index 3939531172d..214a04d3329 100644 --- a/aws_lambda_powertools/utilities/database/__init__.py +++ b/aws_lambda_powertools/utilities/database/__init__.py @@ -1,3 +1,3 @@ -from aws_lambda_powertools.utilities.database.redis import RedisStandalone +from aws_lambda_powertools.utilities.database.redis import RedisCluster, RedisStandalone -__all__ = RedisStandalone +__all__ = (RedisStandalone, RedisCluster) diff --git a/aws_lambda_powertools/utilities/database/redis.py b/aws_lambda_powertools/utilities/database/redis.py index 33edb6f02cb..e78ec92a0d9 100644 --- a/aws_lambda_powertools/utilities/database/redis.py +++ b/aws_lambda_powertools/utilities/database/redis.py @@ -10,7 +10,13 @@ class RedisStandalone: def __init__( - self, host: str, port: int, username: Optional[str] = None, password: Optional[str] = None, db_index: int = "0" + self, + host: Optional[str] = None, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + db_index: Optional[int] = None, + url: Optional[str] = None, ) -> None: """ Initialize the Redis standalone client @@ -24,19 +30,24 @@ def __init__( Name of the username to connect to Redis instance/cluster in case of using ACL See: https://redis.io/docs/management/security/acl/ password: str - Passwod to connect to Redis instance/cluster + Password to connect to Redis instance/cluster db_index: int Index of Redis database See: https://redis.io/commands/select/ + url: str + Redis client object configured from the given URL + See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url """ + self.url = url self.host = host self.port = port self.username = username self.password = password self.db_index = db_index + self._connection = None - def _init_connection(self): + def get_redis_connection(self): """ Connection is cached, so returning this """ @@ -46,11 +57,79 @@ def _init_connection(self): logger.info(f"Trying to connect to Redis Host/Cluster: {self.host}") try: - self._connection = redis.Redis( - host=self.host, port=self.port, username=self.username, password=self.password, db=self.db_index - ) + if self.url: + logger.debug(f"Using URL format to connect to Redis: {self.host}") + self._connection = redis.Redis.from_url(url=self.url) + else: + logger.debug(f"Using other parameters to connect to Redis: {self.host}") + self._connection = redis.Redis( + host=self.host, port=self.port, username=self.username, password=self.password, db=self.db_index + ) except redis.exceptions.ConnectionError as exc: logger.debug(f"Cannot connect in Redis Host: {self.host}") raise RedisConnectionError("Could not to connect to Redis Standalone", exc) from exc return self._connection + + +class RedisCluster: + def __init__( + self, + host: Optional[str] = None, + port: Optional[int] = None, + read_from_replicas: Optional[bool] = False, + url: Optional[str] = None, + ) -> None: + """ + Initialize the Redis standalone client + Parameters + ---------- + host: str + Name of the host to connect to Redis instance/cluster + port: int + Number of the port to connect to Redis instance/cluster + username: str + Name of the username to connect to Redis instance/cluster in case of using ACL + See: https://redis.io/docs/management/security/acl/ + password: str + Passwod to connect to Redis instance/cluster + db_index: int + Index of Redis database + See: https://redis.io/commands/select/ + url: str + Redis client object configured from the given URL + See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url + """ + + self.url = url + self.host = host + self.port = port + self.read_from_replicas = read_from_replicas + self._connection = None + + def get_redis_connection(self): + """ + Connection is cached, so returning this + """ + if self._connection: + return self._connection + + logger.info(f"Trying to connect to Redis Cluster: {self.host}") + + try: + if self.url: + logger.debug(f"Using URL format to connect to Redis Cluster: {self.host}") + self._connection = redis.Redis.from_url(url=self.url) + else: + logger.debug(f"Using other parameters to connect to Redis Cluster: {self.host}") + self._connection = redis.cluster.RedisCluster( + host=self.host, + port=self.port, + server_type=None, + read_from_replicas=self.read_from_replicas, + ) + except redis.exceptions.ConnectionError as exc: + logger.debug(f"Cannot connect in Redis Cluster: {self.host}") + raise RedisConnectionError("Could not to connect to Redis Cluster", exc) from exc + + return self._connection From b5c579189c57737f8a04cde2447aebfff159c701 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 6 Feb 2023 22:17:48 +0000 Subject: [PATCH 04/81] feat(redis/idempotency): fixing import --- aws_lambda_powertools/utilities/database/exceptions.py | 2 +- aws_lambda_powertools/utilities/idempotency/exceptions.py | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/aws_lambda_powertools/utilities/database/exceptions.py b/aws_lambda_powertools/utilities/database/exceptions.py index 5107cc0f485..b4426c2b142 100644 --- a/aws_lambda_powertools/utilities/database/exceptions.py +++ b/aws_lambda_powertools/utilities/database/exceptions.py @@ -1,4 +1,4 @@ class RedisConnectionError(Exception): """ - Payload does not contain an idempotent key + Redis connection error """ diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index f27d5044da0..69ab420850a 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -71,9 +71,3 @@ class IdempotencyKeyError(BaseError): """ Payload does not contain an idempotent key """ - - -class IdempotencyRedisConnectionError(BaseError): - """ - Payload does not contain an idempotent key - """ From 6902e7334399d9656f183df8335fb6fc9562eca6 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 7 Feb 2023 16:13:47 +0000 Subject: [PATCH 05/81] feat(redis/idempotency): adding base class --- .../utilities/connections/__init__.py | 6 ++++++ .../utilities/connections/base_sync.py | 7 +++++++ .../{database => connections}/exceptions.py | 0 .../utilities/{database => connections}/redis.py | 13 +++++++------ .../utilities/database/__init__.py | 3 --- .../utilities/idempotency/persistence/redis.py | 2 +- 6 files changed, 21 insertions(+), 10 deletions(-) create mode 100644 aws_lambda_powertools/utilities/connections/__init__.py create mode 100644 aws_lambda_powertools/utilities/connections/base_sync.py rename aws_lambda_powertools/utilities/{database => connections}/exceptions.py (100%) rename aws_lambda_powertools/utilities/{database => connections}/redis.py (93%) delete mode 100644 aws_lambda_powertools/utilities/database/__init__.py diff --git a/aws_lambda_powertools/utilities/connections/__init__.py b/aws_lambda_powertools/utilities/connections/__init__.py new file mode 100644 index 00000000000..b9c93b96b68 --- /dev/null +++ b/aws_lambda_powertools/utilities/connections/__init__.py @@ -0,0 +1,6 @@ +from aws_lambda_powertools.utilities.connections.redis import ( + RedisCluster, + RedisStandalone, +) + +__all__ = (RedisStandalone, RedisCluster) diff --git a/aws_lambda_powertools/utilities/connections/base_sync.py b/aws_lambda_powertools/utilities/connections/base_sync.py new file mode 100644 index 00000000000..f67149c3277 --- /dev/null +++ b/aws_lambda_powertools/utilities/connections/base_sync.py @@ -0,0 +1,7 @@ +from abc import ABC, abstractmethod + + +class BaseConnectionSync(ABC): + @abstractmethod + def init_connection(self): + raise NotImplementedError() # pragma: no cover diff --git a/aws_lambda_powertools/utilities/database/exceptions.py b/aws_lambda_powertools/utilities/connections/exceptions.py similarity index 100% rename from aws_lambda_powertools/utilities/database/exceptions.py rename to aws_lambda_powertools/utilities/connections/exceptions.py diff --git a/aws_lambda_powertools/utilities/database/redis.py b/aws_lambda_powertools/utilities/connections/redis.py similarity index 93% rename from aws_lambda_powertools/utilities/database/redis.py rename to aws_lambda_powertools/utilities/connections/redis.py index e78ec92a0d9..6dfd1e839b9 100644 --- a/aws_lambda_powertools/utilities/database/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -3,12 +3,13 @@ import redis -from aws_lambda_powertools.utilities.database.exceptions import RedisConnectionError +from .base_sync import BaseConnectionSync +from .exceptions import RedisConnectionError logger = logging.getLogger(__name__) -class RedisStandalone: +class RedisStandalone(BaseConnectionSync): def __init__( self, host: Optional[str] = None, @@ -30,7 +31,7 @@ def __init__( Name of the username to connect to Redis instance/cluster in case of using ACL See: https://redis.io/docs/management/security/acl/ password: str - Password to connect to Redis instance/cluster + Passwod to connect to Redis instance/cluster db_index: int Index of Redis database See: https://redis.io/commands/select/ @@ -47,7 +48,7 @@ def __init__( self.db_index = db_index self._connection = None - def get_redis_connection(self): + def init_connection(self): """ Connection is cached, so returning this """ @@ -72,7 +73,7 @@ def get_redis_connection(self): return self._connection -class RedisCluster: +class RedisCluster(BaseConnectionSync): def __init__( self, host: Optional[str] = None, @@ -107,7 +108,7 @@ def __init__( self.read_from_replicas = read_from_replicas self._connection = None - def get_redis_connection(self): + def init_connection(self): """ Connection is cached, so returning this """ diff --git a/aws_lambda_powertools/utilities/database/__init__.py b/aws_lambda_powertools/utilities/database/__init__.py deleted file mode 100644 index 214a04d3329..00000000000 --- a/aws_lambda_powertools/utilities/database/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from aws_lambda_powertools.utilities.database.redis import RedisCluster, RedisStandalone - -__all__ = (RedisStandalone, RedisCluster) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 9ba41c1c600..ee88e5854fb 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -41,7 +41,7 @@ def __init__( """ # Initialize connection with Redis - self._connection = connection._init_connection() + self._connection = connection.init_connection() if static_pk_value is None: static_pk_value = f"idempotency#{os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, '')}" From 504d2589d32e30fbb956bc373899928508137a0b Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 7 Feb 2023 22:51:30 +0000 Subject: [PATCH 06/81] feat(redis/idempotency): adding logic to get record --- .../utilities/connections/redis.py | 7 +++- .../utilities/idempotency/persistence/base.py | 2 +- .../idempotency/persistence/redis.py | 37 +++++++++++++++---- 3 files changed, 37 insertions(+), 9 deletions(-) diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py index 6dfd1e839b9..aa29d8ba39a 100644 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -64,7 +64,12 @@ def init_connection(self): else: logger.debug(f"Using other parameters to connect to Redis: {self.host}") self._connection = redis.Redis( - host=self.host, port=self.port, username=self.username, password=self.password, db=self.db_index + host=self.host, + port=self.port, + username=self.username, + password=self.password, + db=self.db_index, + decode_responses=True, ) except redis.exceptions.ConnectionError as exc: logger.debug(f"Cannot connect in Redis Host: {self.host}") diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index a87980d7fe0..aaf550e4785 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -37,7 +37,7 @@ class DataRecord: def __init__( self, - idempotency_key, + idempotency_key: Optional[str] = "", status: str = "", expiry_timestamp: Optional[int] = None, in_progress_expiry_timestamp: Optional[int] = None, diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index ee88e5854fb..6976a65642a 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -5,8 +5,8 @@ from aws_lambda_powertools.shared import constants from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, - IdempotencyPersistenceLayerError, ) from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord @@ -63,7 +63,6 @@ def _get_key(self, idempotency_key: str) -> dict: def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: # Need to review this after adding GETKEY logic return DataRecord( - idempotency_key=item[self.key_attr], status=item[self.status_attr], expiry_timestamp=item[self.expiry_attr], in_progress_expiry_timestamp=item.get(self.in_progress_expiry_attr), @@ -89,20 +88,44 @@ def _put_record(self, data_record: DataRecord) -> None: "mapping": { self.in_progress_expiry_attr: data_record.in_progress_expiry_timestamp, self.status_attr: data_record.status, + self.expiry_attr: data_record.expiry_timestamp, }, } + if data_record.in_progress_expiry_timestamp is not None: + item["mapping"][self.in_progress_expiry_attr] = data_record.in_progress_expiry_timestamp + + if self.payload_validation_enabled: + item["mapping"][self.validation_key_attr] = data_record.payload_hash + try: + # | LOCKED | RETRY if status = "INPROGRESS" | RETRY + # |----------------|-------------------------------------------------------|-------------> .... (time) + # | Lambda Idempotency Record + # | Timeout Timeout + # | (in_progress_expiry) (expiry) + + # Conditions to successfully save a record: + + # The idempotency key does not exist: + # - first time that this invocation key is used + # - previous invocation with the same key was deleted due to TTL + idempotency_key_not_exist = self._connection.exists(data_record.idempotency_key) + + # key exists + if idempotency_key_not_exist == 1: + raise + + # missing logic to compare expiration + logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") self._connection.hset(**item) # hset type must set expiration after adding the record # Need to review this to get ttl in seconds self._connection.expire(name=data_record.idempotency_key, time=60) - except Exception as exc: - logger.debug(f"Failed to add record idempotency key: {data_record.idempotency_key}") - raise IdempotencyPersistenceLayerError( - f"Failed to add record idempotency key: {data_record.idempotency_key}", exc - ) from exc + except Exception: + logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") + raise IdempotencyItemAlreadyExistsError def _update_record(self, data_record: DataRecord) -> None: item = { From 0d1e3e991be70a7b37f9d2e690333112f6cd7aa5 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 7 Feb 2023 23:01:52 +0000 Subject: [PATCH 07/81] feat(redis/idempotency): adding expiry timeout --- .../utilities/idempotency/persistence/base.py | 10 +++++++--- .../utilities/idempotency/persistence/redis.py | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index aaf550e4785..c14232ca9bf 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -116,6 +116,7 @@ class BasePersistenceLayer(ABC): def __init__(self): """Initialize the defaults""" self.function_name = "" + self.backend = "" self.configured = False self.event_key_jmespath: Optional[str] = None self.event_key_compiled_jmespath = None @@ -262,9 +263,12 @@ def _get_expiry_timestamp(self) -> int: unix timestamp of expiry date for idempotency record """ - now = datetime.datetime.now() - period = datetime.timedelta(seconds=self.expires_after_seconds) - return int((now + period).timestamp()) + if self.backend == "redis": + return self.expires_after_seconds + else: + now = datetime.datetime.now() + period = datetime.timedelta(seconds=self.expires_after_seconds) + return int((now + period).timestamp()) def _save_to_cache(self, data_record: DataRecord): """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 6976a65642a..4603c98e944 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -122,7 +122,7 @@ def _put_record(self, data_record: DataRecord) -> None: self._connection.hset(**item) # hset type must set expiration after adding the record # Need to review this to get ttl in seconds - self._connection.expire(name=data_record.idempotency_key, time=60) + self._connection.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) except Exception: logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") raise IdempotencyItemAlreadyExistsError From 3126885a4ced64e052e8296803f7bd2ca4cae029 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 17 Feb 2023 00:04:13 +0000 Subject: [PATCH 08/81] feat(redis) - refactoring connection and fixing mypy errors --- .../utilities/connections/__init__.py | 2 +- .../utilities/connections/redis.py | 107 ++++++----- .../utilities/idempotency/persistence/base.py | 2 +- .../idempotency/persistence/dynamodb.py | 2 +- .../idempotency/persistence/redis.py | 14 +- poetry.lock | 168 +++++++++++++++++- pyproject.toml | 1 + 7 files changed, 225 insertions(+), 71 deletions(-) diff --git a/aws_lambda_powertools/utilities/connections/__init__.py b/aws_lambda_powertools/utilities/connections/__init__.py index b9c93b96b68..660c559447f 100644 --- a/aws_lambda_powertools/utilities/connections/__init__.py +++ b/aws_lambda_powertools/utilities/connections/__init__.py @@ -3,4 +3,4 @@ RedisStandalone, ) -__all__ = (RedisStandalone, RedisCluster) +__all__ = ["RedisStandalone", "RedisCluster"] diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py index aa29d8ba39a..10a314db5d8 100644 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -1,5 +1,5 @@ import logging -from typing import Optional +from typing import Optional, Type, Union import redis @@ -9,36 +9,19 @@ logger = logging.getLogger(__name__) -class RedisStandalone(BaseConnectionSync): +class RedisConnection(BaseConnectionSync): def __init__( self, + client: Type[Union[redis.Redis, redis.RedisCluster]], host: Optional[str] = None, port: Optional[int] = None, username: Optional[str] = None, password: Optional[str] = None, db_index: Optional[int] = None, url: Optional[str] = None, + **extra_options, ) -> None: - """ - Initialize the Redis standalone client - Parameters - ---------- - host: str - Name of the host to connect to Redis instance/cluster - port: int - Number of the port to connect to Redis instance/cluster - username: str - Name of the username to connect to Redis instance/cluster in case of using ACL - See: https://redis.io/docs/management/security/acl/ - password: str - Passwod to connect to Redis instance/cluster - db_index: int - Index of Redis database - See: https://redis.io/commands/select/ - url: str - Redis client object configured from the given URL - See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url - """ + self.extra_options: dict = {} self.url = url self.host = host @@ -46,7 +29,9 @@ def __init__( self.username = username self.password = password self.db_index = db_index + self.extra_options.update(**extra_options) self._connection = None + self._client = client def init_connection(self): """ @@ -55,36 +40,40 @@ def init_connection(self): if self._connection: return self._connection - logger.info(f"Trying to connect to Redis Host/Cluster: {self.host}") + logger.info(f"Trying to connect to Redis: {self.host}") try: if self.url: logger.debug(f"Using URL format to connect to Redis: {self.host}") - self._connection = redis.Redis.from_url(url=self.url) + self._connection = self._client.from_url(url=self.url) else: logger.debug(f"Using other parameters to connect to Redis: {self.host}") - self._connection = redis.Redis( + self._connection = self._client( host=self.host, port=self.port, username=self.username, password=self.password, db=self.db_index, decode_responses=True, + **self.extra_options, ) except redis.exceptions.ConnectionError as exc: - logger.debug(f"Cannot connect in Redis Host: {self.host}") - raise RedisConnectionError("Could not to connect to Redis Standalone", exc) from exc + logger.debug(f"Cannot connect in Redis: {self.host}") + raise RedisConnectionError("Could not to connect to Redis", exc) from exc return self._connection -class RedisCluster(BaseConnectionSync): +class RedisStandalone(RedisConnection): def __init__( self, host: Optional[str] = None, port: Optional[int] = None, - read_from_replicas: Optional[bool] = False, + username: Optional[str] = None, + password: Optional[str] = None, + db_index: Optional[int] = None, url: Optional[str] = None, + **extra_options, ) -> None: """ Initialize the Redis standalone client @@ -106,36 +95,40 @@ def __init__( Redis client object configured from the given URL See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url """ + print(extra_options) + super().__init__(redis.Redis, host, port, username, password, db_index, url, **extra_options) - self.url = url - self.host = host - self.port = port - self.read_from_replicas = read_from_replicas - self._connection = None - def init_connection(self): +class RedisCluster(RedisConnection): + def __init__( + self, + host: Optional[str] = None, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + db_index: Optional[int] = None, + url: Optional[str] = None, + **extra_options, + ) -> None: """ - Connection is cached, so returning this + Initialize the Redis standalone client + Parameters + ---------- + host: str + Name of the host to connect to Redis instance/cluster + port: int + Number of the port to connect to Redis instance/cluster + username: str + Name of the username to connect to Redis instance/cluster in case of using ACL + See: https://redis.io/docs/management/security/acl/ + password: str + Passwod to connect to Redis instance/cluster + db_index: int + Index of Redis database + See: https://redis.io/commands/select/ + url: str + Redis client object configured from the given URL + See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url """ - if self._connection: - return self._connection - logger.info(f"Trying to connect to Redis Cluster: {self.host}") - - try: - if self.url: - logger.debug(f"Using URL format to connect to Redis Cluster: {self.host}") - self._connection = redis.Redis.from_url(url=self.url) - else: - logger.debug(f"Using other parameters to connect to Redis Cluster: {self.host}") - self._connection = redis.cluster.RedisCluster( - host=self.host, - port=self.port, - server_type=None, - read_from_replicas=self.read_from_replicas, - ) - except redis.exceptions.ConnectionError as exc: - logger.debug(f"Cannot connect in Redis Cluster: {self.host}") - raise RedisConnectionError("Could not to connect to Redis Cluster", exc) from exc - - return self._connection + super().__init__(redis.cluster.RedisCluster, host, port, username, password, db_index, url, **extra_options) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index c14232ca9bf..a19dd073d7a 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -37,7 +37,7 @@ class DataRecord: def __init__( self, - idempotency_key: Optional[str] = "", + idempotency_key: str = "", status: str = "", expiry_timestamp: Optional[int] = None, in_progress_expiry_timestamp: Optional[int] = None, diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index b05d8216b50..fe47d12845c 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -26,7 +26,7 @@ class DynamoDBPersistenceLayer(BasePersistenceLayer): def __init__( self, table_name: str, - key_attr: str = "id", + key_attr: Optional[str] = "id", static_pk_value: Optional[str] = None, sort_key_attr: Optional[str] = None, expiry_attr: str = "expiration", diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 4603c98e944..b291a5e2ef1 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -19,10 +19,10 @@ def __init__( connection, static_pk_value: Optional[str] = None, expiry_attr: str = "expiration", - in_progress_expiry_attr: str = "in_progress_expiration", + in_progress_expiry_attr="in_progress_expiration", status_attr: str = "status", data_attr: str = "data", - validation_key_attr: str = "validation", + validation_key_attr="validation", ): """ Initialize the Redis Persistence Layer @@ -54,12 +54,6 @@ def __init__( self.validation_key_attr = validation_key_attr super(RedisCachePersistenceLayer, self).__init__() - def _get_key(self, idempotency_key: str) -> dict: - # Need to review this after adding GETKEY logic - if self.sort_key_attr: - return {self.key_attr: self.static_pk_value, self.sort_key_attr: idempotency_key} - return {self.key_attr: idempotency_key} - def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: # Need to review this after adding GETKEY logic return DataRecord( @@ -93,10 +87,10 @@ def _put_record(self, data_record: DataRecord) -> None: } if data_record.in_progress_expiry_timestamp is not None: - item["mapping"][self.in_progress_expiry_attr] = data_record.in_progress_expiry_timestamp + item.update({"mapping": {self.in_progress_expiry_attr: data_record.in_progress_expiry_timestamp}}) if self.payload_validation_enabled: - item["mapping"][self.validation_key_attr] = data_record.payload_hash + item.update({"mapping": {self.validation_key_attr: data_record.payload_hash}}) try: # | LOCKED | RETRY if status = "INPROGRESS" | RETRY diff --git a/poetry.lock b/poetry.lock index d9d1ee6fb0f..040311acba0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -360,6 +360,83 @@ files = [ {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfn-lint" version = "0.67.0" @@ -604,6 +681,52 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "cryptography" +version = "39.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"}, + {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"}, + {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"}, + {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"}, + {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"}, + {file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"}, + {file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"}, + {file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"}, + {file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"}, + {file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] +sdist = ["setuptools-rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test-randomorder = ["pytest-randomly"] +tox = ["tox"] + [[package]] name = "decorator" version = "5.1.1" @@ -1912,6 +2035,18 @@ files = [ {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + [[package]] name = "pydantic" version = "1.10.4" @@ -2639,6 +2774,21 @@ files = [ doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] test = ["mypy", "pytest", "typing-extensions"] +[[package]] +name = "types-pyopenssl" +version = "23.0.0.3" +description = "Typing stubs for pyOpenSSL" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-pyOpenSSL-23.0.0.3.tar.gz", hash = "sha256:6ca54d593f8b946f9570f9ed7457c41da3b518feff5e344851941a6209bea62b"}, + {file = "types_pyOpenSSL-23.0.0.3-py3-none-any.whl", hash = "sha256:847ab17a16475a882dc29898648a6a35ad0d3e11a5bba5aa8ab2f3435a8647cb"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" + [[package]] name = "types-python-dateutil" version = "2.8.19.6" @@ -2651,6 +2801,22 @@ files = [ {file = "types_python_dateutil-2.8.19.6-py3-none-any.whl", hash = "sha256:cfb7d31021c6bce6f3362c69af6e3abb48fe3e08854f02487e844ff910deec2a"}, ] +[[package]] +name = "types-redis" +version = "4.5.1.1" +description = "Typing stubs for redis" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-redis-4.5.1.1.tar.gz", hash = "sha256:c072e4824855f46d0a968509c3e0fa4789fc13b62d472064527bad3d1815aeed"}, + {file = "types_redis-4.5.1.1-py3-none-any.whl", hash = "sha256:081dfeec730df6e3f32ccbdafe3198873b7c02516c22d79cc2a40efdd69a3963"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-pyOpenSSL = "*" + [[package]] name = "types-requests" version = "2.28.11.12" @@ -2881,4 +3047,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "7b24ff99d31f27beb822b0917cfb5cdf1be6a3b49aec62b5a35ef0ca21ec49e4" +content-hash = "8018f0b880e13d656975c8a26b840469485e97cf539c2624ae9e5a27cb6f0959" diff --git a/pyproject.toml b/pyproject.toml index 70ae7356f80..50cad7b5ad5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,6 +105,7 @@ redis = ["redis"] cfn-lint = "0.67.0" mypy = "^0.982" types-python-dateutil = "^2.8.19.6" +types-redis = "^4.5.1.1" [tool.coverage.run] source = ["aws_lambda_powertools"] From 81c141eb0b337c3da5decc81c065a05507cbda2e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 17 Feb 2023 00:06:36 +0000 Subject: [PATCH 09/81] feat(redis) - removing wrong print --- aws_lambda_powertools/utilities/connections/redis.py | 1 - 1 file changed, 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py index 10a314db5d8..d394cba4777 100644 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -95,7 +95,6 @@ def __init__( Redis client object configured from the given URL See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url """ - print(extra_options) super().__init__(redis.Redis, host, port, username, password, db_index, url, **extra_options) From 5d6b0d0a24f54cfde90f9d456e058192fe325dd1 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 17 Feb 2023 23:32:38 +0000 Subject: [PATCH 10/81] feat(redis) - removing fields and adding additional logic to validate the idempotency key --- .../idempotency/persistence/dynamodb.py | 2 + .../idempotency/persistence/redis.py | 61 ++++++++++--------- 2 files changed, 34 insertions(+), 29 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index fe47d12845c..26f4e4c841d 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -59,6 +59,8 @@ def __init__( DynamoDB attribute name for status, by default "status" data_attr: str, optional DynamoDB attribute name for response data, by default "data" + validation_key_attr: str, optional + DynamoDB attribute name for hashed representation of the parts of the event used for validation boto_config: botocore.config.Config, optional Botocore configuration to pass during client initialization boto3_session : boto3.session.Session, optional diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index b291a5e2ef1..56913a70d20 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,14 +1,18 @@ +import datetime import logging -import os -from typing import Any, Dict, Optional +from typing import Any, Dict, Union + +import redis -from aws_lambda_powertools.shared import constants from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, ) -from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord +from aws_lambda_powertools.utilities.idempotency.persistence.base import ( + STATUS_CONSTANTS, + DataRecord, +) logger = logging.getLogger(__name__) @@ -17,48 +21,37 @@ class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, connection, - static_pk_value: Optional[str] = None, - expiry_attr: str = "expiration", - in_progress_expiry_attr="in_progress_expiration", + in_progress_expiry_attr: str = "in_progress_expiration", status_attr: str = "status", data_attr: str = "data", - validation_key_attr="validation", + validation_key_attr: str = "validation", ): """ Initialize the Redis Persistence Layer Parameters ---------- - static_pk_value: str, optional - Redis attribute value for cache key, by default "idempotency#". - expiry_attr: str, optional - Redis hash attribute name for expiry timestamp, by default "expiration" in_progress_expiry_attr: str, optional Redis hash attribute name for in-progress expiry timestamp, by default "in_progress_expiration" status_attr: str, optional Redis hash attribute name for status, by default "status" data_attr: str, optional Redis hash attribute name for response data, by default "data" + validation_key_attr: str, optional + Redis hash attribute name for hashed representation of the parts of the event used for validation """ # Initialize connection with Redis - self._connection = connection.init_connection() - - if static_pk_value is None: - static_pk_value = f"idempotency#{os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, '')}" + self._connection: Union[redis.Redis, redis.RedisCluster] = connection.init_connection() - self.static_pk_value = static_pk_value self.in_progress_expiry_attr = in_progress_expiry_attr - self.expiry_attr = expiry_attr self.status_attr = status_attr self.data_attr = data_attr self.validation_key_attr = validation_key_attr super(RedisCachePersistenceLayer, self).__init__() def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: - # Need to review this after adding GETKEY logic return DataRecord( status=item[self.status_attr], - expiry_timestamp=item[self.expiry_attr], in_progress_expiry_timestamp=item.get(self.in_progress_expiry_attr), response_data=item.get(self.data_attr), payload_hash=item.get(self.validation_key_attr), @@ -75,23 +68,24 @@ def _get_record(self, idempotency_key) -> DataRecord: return self._item_to_data_record(item) def _put_record(self, data_record: DataRecord) -> None: + item: Dict[str, Any] = {} + # Redis works with hset to support hashing keys with multiple attributes # See: https://redis.io/commands/hset/ item = { "name": data_record.idempotency_key, "mapping": { - self.in_progress_expiry_attr: data_record.in_progress_expiry_timestamp, self.status_attr: data_record.status, - self.expiry_attr: data_record.expiry_timestamp, }, } if data_record.in_progress_expiry_timestamp is not None: - item.update({"mapping": {self.in_progress_expiry_attr: data_record.in_progress_expiry_timestamp}}) + item["mapping"][self.in_progress_expiry_attr] = data_record.in_progress_expiry_timestamp if self.payload_validation_enabled: - item.update({"mapping": {self.validation_key_attr: data_record.payload_hash}}) + item["mapping"][self.validation_key_attr] = data_record.payload_hash + now = datetime.datetime.now() try: # | LOCKED | RETRY if status = "INPROGRESS" | RETRY # |----------------|-------------------------------------------------------|-------------> .... (time) @@ -104,13 +98,20 @@ def _put_record(self, data_record: DataRecord) -> None: # The idempotency key does not exist: # - first time that this invocation key is used # - previous invocation with the same key was deleted due to TTL - idempotency_key_not_exist = self._connection.exists(data_record.idempotency_key) + idempotency_record = self._connection.hgetall(data_record.idempotency_key) + if len(idempotency_record) > 0: + # record already exists. - # key exists - if idempotency_key_not_exist == 1: - raise + # status is completed, so raise exception because it exists and still valid + if idempotency_record[self.status_attr] == STATUS_CONSTANTS["COMPLETED"]: + raise - # missing logic to compare expiration + # checking if in_progress_expiry_attr exists + # if in_progress_expiry_attr exist, must be lower than now + if self.in_progress_expiry_attr in idempotency_record and int( + idempotency_record[self.in_progress_expiry_attr] + ) > int(now.timestamp() * 1000): + raise logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") self._connection.hset(**item) @@ -122,6 +123,8 @@ def _put_record(self, data_record: DataRecord) -> None: raise IdempotencyItemAlreadyExistsError def _update_record(self, data_record: DataRecord) -> None: + item: Dict[str, Any] = {} + item = { "name": data_record.idempotency_key, "mapping": { From 038926cd3418278e10f23c9785bda92c86d01d6e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Fri, 17 Feb 2023 23:43:20 +0000 Subject: [PATCH 11/81] feat(redis) - adding redis as dev dependency --- poetry.lock | 6 +++--- pyproject.toml | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 00226bc6eec..871a2472a9b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,7 +27,7 @@ name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" category = "main" -optional = true +optional = false python-versions = ">=3.6" files = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, @@ -2536,7 +2536,7 @@ name = "redis" version = "4.5.1" description = "Python client for Redis database and key-value store" category = "main" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "redis-4.5.1-py3-none-any.whl", hash = "sha256:5deb072d26e67d2be1712603bfb7947ec3431fb0eec9c578994052e33035af6d"}, @@ -3144,4 +3144,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "ddcef7738045cc2c082cb5a58200d7a2305e82376415a382d56d03227ae83ba1" +content-hash = "bdbccdd716558b962a801c46c062aeb7892153a1f821c3cfee836294a102cb0f" diff --git a/pyproject.toml b/pyproject.toml index e82119375e8..54f06b6a849 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,6 +107,7 @@ mypy = "^0.982" types-python-dateutil = "^2.8.19.6" types-redis = "^4.5.1.1" httpx = "^0.23.3" +redis = "^4.5.1" [tool.coverage.run] source = ["aws_lambda_powertools"] From ca9b16db14f945f6f2696258e035f5247817d597 Mon Sep 17 00:00:00 2001 From: Vandita Patidar Date: Thu, 23 Mar 2023 14:11:04 -0700 Subject: [PATCH 12/81] Update idempotency.md Signed-off-by: Vandita Patidar --- docs/utilities/idempotency.md | 254 ++++++++++++++++++++++++++++++++++ 1 file changed, 254 insertions(+) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 49a028168b3..081ad3a509e 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -28,6 +28,7 @@ times with the same parameters**. This makes idempotent operations safe to retry ### IAM Permissions +#### DynamoDB Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, `dynamodb:UpdateItem` and `dynamodb:DeleteItem` IAM permissions before using this feature. ???+ note @@ -35,10 +36,14 @@ Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, ### Required resources +_**DynamoDB**_ Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. As of now, Amazon DynamoDB is the only supported persistent storage layer, so you'll need to create a table first. +_**Redis**_ +Before getting started you need to setup your [EC2 Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EC2_GetStarted.html) and [ElastiCache for Redis cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/GettingStarted.html). + **Default table configuration** If you're not [changing the default configuration for the DynamoDB persistence layer](#dynamodbpersistencelayer), this is the expected default configuration: @@ -90,6 +95,8 @@ Resources: ### Idempotent decorator +_**DynamoDB**_ + You can quickly start by initializing the `DynamoDBPersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. === "app.py" @@ -123,6 +130,31 @@ You can quickly start by initializing the `DynamoDBPersistenceLayer` class and u "product_id": "123456789" } ``` + +_**Redis**_ + +You can initialize `RedisCachePersistenceLayer` class and use it with `idempotent` decorator on your lambda handler. + +=== "app.py" + +``` + from aws_lambda_powertools.utilities.connections import RedisStandalone, RedisCluster + from aws_lambda_powertools.utilities.idempotency import ( + idempotent, + RedisCachePersistenceLayer, + IdempotencyConfig + ) + # For connection using Redis Standalone architecture + redis_connection = RedisStandalone(host="192.168.68.112", port=6379, password="pass", db_index=0) + + persistence_layer = RedisCachePersistenceLayer(connection=redis_connection) + config = IdempotencyConfig( + expires_after_seconds=1*60, # 1 minutes + ) + @idempotent(config=config, persistence_store=persistence_layer) + def lambda_handler(event, context): + return {"message":"Hello"} +``` ### Idempotent_function decorator @@ -565,6 +597,57 @@ When using DynamoDB as a persistence layer, you can alter the attribute names by | **sort_key_attr** | | | Sort key of the table (if table is configured with a sort key). | | **static_pk_value** | | `idempotency#{LAMBDA_FUNCTION_NAME}` | Static value to use as the partition key. Only used when **sort_key_attr** is set. | +#### RedisCachePersistenceLayer + +This persistence layer is built-in and you can use ElastiCache to store and see the keys. + +``` + from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer + persistence_layer = RedisCachePersistenceLayer( + static_pk_value: Optional[str] = None, + expiry_attr: str = "expiration", + in_progress_expiry_attr: str = "in_progress_expiration", + status_attr: str = "status", + data_attr: str = "data", + validation_key_attr: str = "validation", + ) +``` + +When using ElastiCache for Redis as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: + +| Parameter | Required | Default | Description | +| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | +| **static_pk_value** | | `idempotency#{LAMBDA_FUNCTION_NAME}` | Static value to use as the partition key. Only used when **sort_key_attr** is set. | +| **expiry_attr** | | `expiration` | Unix timestamp of when record expires | +| **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | +| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | +| **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | +| **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | + +#### RedisStandalone/RedisCluster: + +``` +from aws_lambda_powertools.utilities.connections import RedisStandalone,RedisCluster + +redis_connection = RedisStandalone( + host="192.168.68.112", + port=6379, + username = "abc" + password="pass", + db_index=0, + url = None +) +``` + +| Parameter | Required | Default | Description | +| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | +| **host** | | `localhost` | Name of the host to connect to Redis instance/cluster | +| **port** | | 6379 | Number of the port to connect to Redis instance/cluster | +| **username** | | `None` | Name of the username to connect to Redis instance/cluster in case of using ACL | +| **password** | | `None` | Passwod to connect to Redis instance/cluster | +| **db_index** | | 0. | Index of Redis database | +| **url** | | `None` | Redis client object configured from the given URL. | + ## Advanced ### Customizing the default behavior @@ -626,6 +709,8 @@ In most cases, it is not desirable to store the idempotency records forever. Rat You can change this window with the **`expires_after_seconds`** parameter: +_**DynamoDB**_ + ```python hl_lines="8 11" title="Adjusting cache TTL" from aws_lambda_powertools.utilities.idempotency import ( IdempotencyConfig, DynamoDBPersistenceLayer, idempotent @@ -642,6 +727,24 @@ def handler(event, context): ... ``` +_**Redis**_ + +``` +from aws_lambda_powertools.utilities.connections import RedisStandalone, RedisCluster +from aws_lambda_powertools.utilities.idempotency import ( + idempotent, + RedisCachePersistenceLayer, + IdempotencyConfig +) +# For connection using Redis Standalone architecture +redis_connection = RedisStandalone(host="192.168.68.112", port=6379, password="pass", db_index=0) + +persistence_layer = RedisCachePersistenceLayer(connection=redis_connection) +config = IdempotencyConfig( + expires_after_seconds=5*60, # 5 minutes +) +``` + This will mark any records older than 5 minutes as expired, and the lambda handler will be executed as normal if it is invoked with a matching payload. ???+ note "Note: DynamoDB time-to-live field" @@ -856,6 +959,8 @@ This utility provides an abstract base class (ABC), so that you can implement yo You can inherit from the `BasePersistenceLayer` class and implement the abstract methods `_get_record`, `_put_record`, `_update_record` and `_delete_record`. +_**DynamoDB**_ + ```python hl_lines="8-13 57 65 74 96 124" title="Excerpt DynamoDB Persistence Layer implementation for reference" import datetime import logging @@ -985,6 +1090,155 @@ class DynamoDBPersistenceLayer(BasePersistenceLayer): self.table.delete_item(Key={self.key_attr: data_record.idempotency_key},) ``` +_**Redis**_ + +``` +import logging +import os +from typing import Any, Dict, Optional + +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyItemAlreadyExistsError, + IdempotencyItemNotFoundError, +) +from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord + +logger = logging.getLogger(__name__) + + +class RedisCachePersistenceLayer(BasePersistenceLayer): + def __init__( + self, + connection, + static_pk_value: Optional[str] = None, + expiry_attr: str = "expiration", + in_progress_expiry_attr: str = "in_progress_expiration", + status_attr: str = "status", + data_attr: str = "data", + validation_key_attr: str = "validation", + ): + """ + Initialize the Redis Persistence Layer + Parameters + ---------- + static_pk_value: str, optional + Redis attribute value for cache key, by default "idempotency#". + expiry_attr: str, optional + Redis hash attribute name for expiry timestamp, by default "expiration" + in_progress_expiry_attr: str, optional + Redis hash attribute name for in-progress expiry timestamp, by default "in_progress_expiration" + status_attr: str, optional + Redis hash attribute name for status, by default "status" + data_attr: str, optional + Redis hash attribute name for response data, by default "data" + """ + + # Initialize connection with Redis + self._connection = connection.init_connection() + + if static_pk_value is None: + static_pk_value = f"idempotency#{os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, '')}" + + self.static_pk_value = static_pk_value + self.in_progress_expiry_attr = in_progress_expiry_attr + self.expiry_attr = expiry_attr + self.status_attr = status_attr + self.data_attr = data_attr + self.validation_key_attr = validation_key_attr + super(RedisCachePersistenceLayer, self).__init__() + + def _get_key(self, idempotency_key: str) -> dict: + # Need to review this after adding GETKEY logic + if self.sort_key_attr: + return {self.key_attr: self.static_pk_value, self.sort_key_attr: idempotency_key} + return {self.key_attr: idempotency_key} + + def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: + # Need to review this after adding GETKEY logic + return DataRecord( + status=item[self.status_attr], + expiry_timestamp=item[self.expiry_attr], + in_progress_expiry_timestamp=item.get(self.in_progress_expiry_attr), + response_data=item.get(self.data_attr), + payload_hash=item.get(self.validation_key_attr), + ) + + def _get_record(self, idempotency_key) -> DataRecord: + # See: https://redis.io/commands/hgetall/ + response = self._connection.hgetall(idempotency_key) + + try: + item = response + except KeyError: + raise IdempotencyItemNotFoundError + return self._item_to_data_record(item) + + def _put_record(self, data_record: DataRecord) -> None: + # Redis works with hset to support hashing keys with multiple attributes + # See: https://redis.io/commands/hset/ + item = { + "name": data_record.idempotency_key, + "mapping": { + self.in_progress_expiry_attr: data_record.in_progress_expiry_timestamp, + self.status_attr: data_record.status, + self.expiry_attr: data_record.expiry_timestamp, + }, + } + + if data_record.in_progress_expiry_timestamp is not None: + item["mapping"][self.in_progress_expiry_attr] = data_record.in_progress_expiry_timestamp + + if self.payload_validation_enabled: + item["mapping"][self.validation_key_attr] = data_record.payload_hash + + try: + # | LOCKED | RETRY if status = "INPROGRESS" | RETRY + # |----------------|-------------------------------------------------------|-------------> .... (time) + # | Lambda Idempotency Record + # | Timeout Timeout + # | (in_progress_expiry) (expiry) + + # Conditions to successfully save a record: + + # The idempotency key does not exist: + # - first time that this invocation key is used + # - previous invocation with the same key was deleted due to TTL + idempotency_key_not_exist = self._connection.exists(data_record.idempotency_key) + + # key exists + if idempotency_key_not_exist == 1: + raise + + # missing logic to compare expiration + + logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") + self._connection.hset(**item) + # hset type must set expiration after adding the record + # Need to review this to get ttl in seconds + self._connection.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) + except Exception: + logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") + raise IdempotencyItemAlreadyExistsError + + def _update_record(self, data_record: DataRecord) -> None: + item = { + "name": data_record.idempotency_key, + "mapping": { + self.data_attr: data_record.response_data, + self.status_attr: data_record.status, + }, + } + logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") + self._connection.hset(**item) + + def _delete_record(self, data_record: DataRecord) -> None: + logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") + # See: https://redis.io/commands/del/ + self._connection.delete(data_record.idempotency_key) +``` + ???+ danger Pay attention to the documentation for each - you may need to perform additional checks inside these methods to ensure the idempotency guarantees remain intact. From b8bcfe4b90503c13c7bbdce1a486a3d24ffcc806 Mon Sep 17 00:00:00 2001 From: Vandita Patidar Date: Sun, 26 Mar 2023 21:35:44 -0700 Subject: [PATCH 13/81] Update idempotency.md Signed-off-by: Vandita Patidar --- docs/utilities/idempotency.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 081ad3a509e..8b0c1800c94 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -37,11 +37,13 @@ Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, ### Required resources _**DynamoDB**_ + Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. As of now, Amazon DynamoDB is the only supported persistent storage layer, so you'll need to create a table first. _**Redis**_ + Before getting started you need to setup your [EC2 Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EC2_GetStarted.html) and [ElastiCache for Redis cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/GettingStarted.html). **Default table configuration** From cbfd890ddd76c04e182f0d9f805b38b5485a767c Mon Sep 17 00:00:00 2001 From: Roger Zhang Date: Fri, 23 Jun 2023 15:58:32 -0700 Subject: [PATCH 14/81] resolve regarding to ruben's comment --- .../utilities/connections/__init__.py | 7 +- .../utilities/connections/redis.py | 91 +++---------------- .../utilities/idempotency/__init__.py | 2 + .../utilities/idempotency/persistence/base.py | 13 ++- .../idempotency/persistence/redis.py | 4 +- docs/utilities/idempotency.md | 34 +++---- 6 files changed, 45 insertions(+), 106 deletions(-) diff --git a/aws_lambda_powertools/utilities/connections/__init__.py b/aws_lambda_powertools/utilities/connections/__init__.py index 660c559447f..f517e30c09d 100644 --- a/aws_lambda_powertools/utilities/connections/__init__.py +++ b/aws_lambda_powertools/utilities/connections/__init__.py @@ -1,6 +1,3 @@ -from aws_lambda_powertools.utilities.connections.redis import ( - RedisCluster, - RedisStandalone, -) +from aws_lambda_powertools.utilities.connections.redis import RedisConnection -__all__ = ["RedisStandalone", "RedisCluster"] +__all__ = ["RedisConnection"] diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py index d394cba4777..c383ea80655 100644 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -12,7 +12,6 @@ class RedisConnection(BaseConnectionSync): def __init__( self, - client: Type[Union[redis.Redis, redis.RedisCluster]], host: Optional[str] = None, port: Optional[int] = None, username: Optional[str] = None, @@ -30,25 +29,23 @@ def __init__( self.password = password self.db_index = db_index self.extra_options.update(**extra_options) - self._connection = None - self._client = client + self._cluster_connection = None + self._stdalone_connection = None - def init_connection(self): + def _init_connection(self, client: Type[Union[redis.Redis, redis.RedisCluster]]): """ Connection is cached, so returning this """ - if self._connection: - return self._connection logger.info(f"Trying to connect to Redis: {self.host}") try: if self.url: logger.debug(f"Using URL format to connect to Redis: {self.host}") - self._connection = self._client.from_url(url=self.url) + return client.from_url(url=self.url) else: logger.debug(f"Using other parameters to connect to Redis: {self.host}") - self._connection = self._client( + return client( host=self.host, port=self.port, username=self.username, @@ -61,73 +58,13 @@ def init_connection(self): logger.debug(f"Cannot connect in Redis: {self.host}") raise RedisConnectionError("Could not to connect to Redis", exc) from exc - return self._connection + # simplified to use different func to get each connection. + def get_standalone_connection(self): + if self._stdalone_connection: + return self._stdalone_connection + return self._init_connection(client=redis.Redis) - -class RedisStandalone(RedisConnection): - def __init__( - self, - host: Optional[str] = None, - port: Optional[int] = None, - username: Optional[str] = None, - password: Optional[str] = None, - db_index: Optional[int] = None, - url: Optional[str] = None, - **extra_options, - ) -> None: - """ - Initialize the Redis standalone client - Parameters - ---------- - host: str - Name of the host to connect to Redis instance/cluster - port: int - Number of the port to connect to Redis instance/cluster - username: str - Name of the username to connect to Redis instance/cluster in case of using ACL - See: https://redis.io/docs/management/security/acl/ - password: str - Passwod to connect to Redis instance/cluster - db_index: int - Index of Redis database - See: https://redis.io/commands/select/ - url: str - Redis client object configured from the given URL - See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url - """ - super().__init__(redis.Redis, host, port, username, password, db_index, url, **extra_options) - - -class RedisCluster(RedisConnection): - def __init__( - self, - host: Optional[str] = None, - port: Optional[int] = None, - username: Optional[str] = None, - password: Optional[str] = None, - db_index: Optional[int] = None, - url: Optional[str] = None, - **extra_options, - ) -> None: - """ - Initialize the Redis standalone client - Parameters - ---------- - host: str - Name of the host to connect to Redis instance/cluster - port: int - Number of the port to connect to Redis instance/cluster - username: str - Name of the username to connect to Redis instance/cluster in case of using ACL - See: https://redis.io/docs/management/security/acl/ - password: str - Passwod to connect to Redis instance/cluster - db_index: int - Index of Redis database - See: https://redis.io/commands/select/ - url: str - Redis client object configured from the given URL - See: https://redis.readthedocs.io/en/latest/connections.html#redis.Redis.from_url - """ - - super().__init__(redis.cluster.RedisCluster, host, port, username, password, db_index, url, **extra_options) + def get_cluster_connection(self): + if self._cluster_connection: + return self._cluster_connection + return self._init_connection(client=redis.cluster.RedisCluster) diff --git a/aws_lambda_powertools/utilities/idempotency/__init__.py b/aws_lambda_powertools/utilities/idempotency/__init__.py index 30447acb28c..296b641df2b 100644 --- a/aws_lambda_powertools/utilities/idempotency/__init__.py +++ b/aws_lambda_powertools/utilities/idempotency/__init__.py @@ -8,6 +8,8 @@ from aws_lambda_powertools.utilities.idempotency.persistence.dynamodb import ( DynamoDBPersistenceLayer, ) + +# import RedisCachePersistenceLayer here mean we will need redis as a required lib? Do we want to make it optional? from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( RedisCachePersistenceLayer, ) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 45f6e091c2f..d5bada71d66 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -268,12 +268,14 @@ def _get_expiry_timestamp(self) -> int: unix timestamp of expiry date for idempotency record """ - if self.backend == "redis": + # removed for now, seems not being used in redis + """ if self.backend == "redis": return self.expires_after_seconds - else: - now = datetime.datetime.now() - period = datetime.timedelta(seconds=self.expires_after_seconds) - return int((now + period).timestamp()) + else: """ + + now = datetime.datetime.now() + period = datetime.timedelta(seconds=self.expires_after_seconds) + return int((now + period).timestamp()) def _save_to_cache(self, data_record: DataRecord): """ @@ -368,6 +370,7 @@ def save_inprogress(self, data: Dict[str, Any], remaining_time_in_millis: Option data_record = DataRecord( idempotency_key=idempotency_key, status=STATUS_CONSTANTS["INPROGRESS"], + # This expiry_timestamp is never used in redis, remove specific _get_expiry_timestamp for now expiry_timestamp=self._get_expiry_timestamp(), payload_hash=self._get_hashed_payload(data=data), ) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 56913a70d20..781e4dfb4b9 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -20,7 +20,7 @@ class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, - connection, + connection: Union[redis.Redis, redis.RedisCluster], in_progress_expiry_attr: str = "in_progress_expiration", status_attr: str = "status", data_attr: str = "data", @@ -41,7 +41,7 @@ def __init__( """ # Initialize connection with Redis - self._connection: Union[redis.Redis, redis.RedisCluster] = connection.init_connection() + self._connection = connection self.in_progress_expiry_attr = in_progress_expiry_attr self.status_attr = status_attr diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index d2ad2f1fb0e..4cdd1ef01c8 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -163,14 +163,14 @@ You can initialize `RedisCachePersistenceLayer` class and use it with `idempoten === "app.py" ```python - from aws_lambda_powertools.utilities.connections import RedisStandalone, RedisCluster + from aws_lambda_powertools.utilities.connections import RedisConnection from aws_lambda_powertools.utilities.idempotency import ( idempotent, RedisCachePersistenceLayer, IdempotencyConfig ) # For connection using Redis Standalone architecture - redis_connection = RedisStandalone(host="192.168.68.112", port=6379, password="pass", db_index=0) + redis_connection = RedisConnection(host="192.168.68.112", port=6379, password="pass", db_index=0).get_standalone_connection() persistence_layer = RedisCachePersistenceLayer(connection=redis_connection) config = IdempotencyConfig( @@ -755,15 +755,15 @@ When using DynamoDB as a persistence layer, you can alter the attribute names by This persistence layer is built-in and you can use ElastiCache to store and see the keys. ```python - from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer - persistence_layer = RedisCachePersistenceLayer( - static_pk_value: Optional[str] = None, - expiry_attr: str = "expiration", - in_progress_expiry_attr: str = "in_progress_expiration", - status_attr: str = "status", - data_attr: str = "data", - validation_key_attr: str = "validation", - ) +from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer +persistence_layer = RedisCachePersistenceLayer( + static_pk_value: Optional[str] = None, + expiry_attr: str = "expiration", + in_progress_expiry_attr: str = "in_progress_expiration", + status_attr: str = "status", + data_attr: str = "data", + validation_key_attr: str = "validation", +) ``` When using ElastiCache for Redis as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: @@ -780,16 +780,16 @@ When using ElastiCache for Redis as a persistence layer, you can alter the attri #### RedisStandalone/RedisCluster ```python -from aws_lambda_powertools.utilities.connections import RedisStandalone,RedisCluster +from aws_lambda_powertools.utilities.connections import RedisConnection -redis_connection = RedisStandalone( +redis_connection = RedisConnection( host="192.168.68.112", port=6379, - username = "abc" + username = "abc", password="pass", db_index=0, url = None -) +).get_standalone_connection() ``` | Parameter | Required | Default | Description | @@ -880,14 +880,14 @@ def handler(event, context): _**Redis**_ ```python -from aws_lambda_powertools.utilities.connections import RedisStandalone, RedisCluster +from aws_lambda_powertools.utilities.connections import RedisConnection from aws_lambda_powertools.utilities.idempotency import ( idempotent, RedisCachePersistenceLayer, IdempotencyConfig ) # For connection using Redis Standalone architecture -redis_connection = RedisStandalone(host="192.168.68.112", port=6379, password="pass", db_index=0) +redis_connection = RedisConnection(host="192.168.68.112", port=6379, password="pass", db_index=0).get_standalone_connection() persistence_layer = RedisCachePersistenceLayer(connection=redis_connection) config = IdempotencyConfig( From ecabaffde284ba50dd696bbce68581148c85c7a1 Mon Sep 17 00:00:00 2001 From: Roger Zhang Date: Fri, 23 Jun 2023 16:11:36 -0700 Subject: [PATCH 15/81] resolve regarding to ruben's comment --- .../utilities/connections/redis.py | 5 ++++- .../utilities/idempotency/persistence/redis.py | 13 ++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py index c383ea80655..322ef29284c 100644 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -1,7 +1,10 @@ import logging from typing import Optional, Type, Union -import redis +try: + import redis # type:ignore +except ImportError: + redis = None from .base_sync import BaseConnectionSync from .exceptions import RedisConnectionError diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 781e4dfb4b9..7662d8a25ac 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,8 +1,11 @@ import datetime import logging -from typing import Any, Dict, Union +from typing import Any, Dict -import redis +try: + import redis # type:ignore +except ImportError: + redis = None from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( @@ -20,7 +23,7 @@ class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, - connection: Union[redis.Redis, redis.RedisCluster], + connection, in_progress_expiry_attr: str = "in_progress_expiration", status_attr: str = "status", data_attr: str = "data", @@ -53,8 +56,8 @@ def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: return DataRecord( status=item[self.status_attr], in_progress_expiry_timestamp=item.get(self.in_progress_expiry_attr), - response_data=item.get(self.data_attr), - payload_hash=item.get(self.validation_key_attr), + response_data=str(item.get(self.data_attr)), + payload_hash=str(item.get(self.validation_key_attr)), ) def _get_record(self, idempotency_key) -> DataRecord: From 2352161796ce7831056bf0ceebddd32267f3ec7c Mon Sep 17 00:00:00 2001 From: Roger Zhang Date: Tue, 27 Jun 2023 16:19:12 -0700 Subject: [PATCH 16/81] local test, minor fixes --- aws_lambda_powertools/utilities/connections/base_sync.py | 2 +- .../utilities/idempotency/persistence/dynamodb.py | 2 +- docs/utilities/idempotency.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aws_lambda_powertools/utilities/connections/base_sync.py b/aws_lambda_powertools/utilities/connections/base_sync.py index f67149c3277..c326345bb19 100644 --- a/aws_lambda_powertools/utilities/connections/base_sync.py +++ b/aws_lambda_powertools/utilities/connections/base_sync.py @@ -3,5 +3,5 @@ class BaseConnectionSync(ABC): @abstractmethod - def init_connection(self): + def _init_connection(self): raise NotImplementedError() # pragma: no cover diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index 3a6f2bcc9b0..4dc121c5ff3 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -32,7 +32,7 @@ class DynamoDBPersistenceLayer(BasePersistenceLayer): def __init__( self, table_name: str, - key_attr: Optional[str] = "id", + key_attr: str = "id", # remove optional for type checking. static_pk_value: Optional[str] = None, sort_key_attr: Optional[str] = None, expiry_attr: str = "expiration", diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 4cdd1ef01c8..c1c1f122beb 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -147,7 +147,7 @@ You can quickly start by initializing the `DynamoDBPersistenceLayer` class and u ```json { - "username": "xyz", + "user": "xyz", "product_id": "123456789" } ``` From b81383673cf984062e017b21af3e5a74b2b110c8 Mon Sep 17 00:00:00 2001 From: Roger Zhang Date: Thu, 29 Jun 2023 13:37:38 -0700 Subject: [PATCH 17/81] add redis to extra dep --- poetry.lock | 672 ++++++++++++++++++++++++++----------------------- pyproject.toml | 5 +- 2 files changed, 364 insertions(+), 313 deletions(-) diff --git a/poetry.lock b/poetry.lock index 20ea8d812d5..a2146621ea1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,68 +2,86 @@ [[package]] name = "anyio" -version = "3.6.2" +version = "3.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" files = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, + {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"}, + {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"}, ] [package.dependencies] +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] -trio = ["trio (>=0.16,<0.22)"] +doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} [[package]] name = "attrs" -version = "22.2.0" +version = "23.1.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "aws-cdk-asset-awscli-v1" -version = "2.2.145" +version = "2.2.199" description = "A library that contains the AWS CLI for use in Lambda Layers" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.asset-awscli-v1-2.2.145.tar.gz", hash = "sha256:74098ca36a0e3d0655c98638174c77f1c6599f1b16fb4e4387721a955b431aac"}, - {file = "aws_cdk.asset_awscli_v1-2.2.145-py3-none-any.whl", hash = "sha256:bf736e03cff78b6bad51777cfa204985368975d5b0139434ef6d7e069b757bb7"}, + {file = "aws-cdk.asset-awscli-v1-2.2.199.tar.gz", hash = "sha256:d2da639d36c739452734ef00858e56cdf6bc4638087ee58e208e3c7fce73fcd3"}, + {file = "aws_cdk.asset_awscli_v1-2.2.199-py3-none-any.whl", hash = "sha256:dc1799314b36dddbe5800a5a9db08e7741544936632718511303d2eb21d117a4"}, ] [package.dependencies] -jsii = ">=1.80.0,<2.0.0" +jsii = ">=1.84.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-asset-kubectl-v20" -version = "2.1.1" +version = "2.1.2" description = "A library that contains kubectl for use in Lambda Layers" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.asset-kubectl-v20-2.1.1.tar.gz", hash = "sha256:9834cdb150c5590aea4e5eba6de2a89b4c60617451181c524810c5a75154565c"}, - {file = "aws_cdk.asset_kubectl_v20-2.1.1-py3-none-any.whl", hash = "sha256:a2fad1a5a35a94a465efe60859f91e45dacc33261fb9bbf1cf9bbc6e2f70e9d6"}, + {file = "aws-cdk.asset-kubectl-v20-2.1.2.tar.gz", hash = "sha256:346283e43018a43e3b3ca571de3f44e85d49c038dc20851894cb8f9b2052b164"}, + {file = "aws_cdk.asset_kubectl_v20-2.1.2-py3-none-any.whl", hash = "sha256:7f0617ab6cb942b066bd7174bf3e1f377e57878c3e1cddc21d6b2d13c92d0cc1"}, ] [package.dependencies] @@ -73,93 +91,93 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-asset-node-proxy-agent-v5" -version = "2.0.120" +version = "2.0.165" description = "@aws-cdk/asset-node-proxy-agent-v5" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.asset-node-proxy-agent-v5-2.0.120.tar.gz", hash = "sha256:5c885003685fe86aafe1611a4f1310f1cda62b27036e3528e1fe4e16ba3a3ac4"}, - {file = "aws_cdk.asset_node_proxy_agent_v5-2.0.120-py3-none-any.whl", hash = "sha256:6264375b03fa62a3a2e67e360517276874ec814dc4367d7c9eff55a4e8dff155"}, + {file = "aws-cdk.asset-node-proxy-agent-v5-2.0.165.tar.gz", hash = "sha256:e1afb5773d185cf5f335e4c2dd8dd09967a5e700b45eddf5559b24d7e665628d"}, + {file = "aws_cdk.asset_node_proxy_agent_v5-2.0.165-py3-none-any.whl", hash = "sha256:96afc5747276d21fc25a4aacdb361e3b1cb9a53b2a87a2affa20bbfbe87a0c65"}, ] [package.dependencies] -jsii = ">=1.80.0,<2.0.0" +jsii = ">=1.84.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.75.1a0" +version = "2.85.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.75.1a0.tar.gz", hash = "sha256:ddfa9b4ad8c9a4968fd637081ed732b0e099ccedb4d7ccd7c1526bff7f036b93"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.75.1a0-py3-none-any.whl", hash = "sha256:cf1920e37a60265286c0ab50ea78e1e065278f3758c0dde9e79d0811bd7c90d9"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.85.0a0.tar.gz", hash = "sha256:bac434ff40df5913cf9f92530c29f11a42c8d0edf0e48320c0453fd9e4bbe6b2"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.85.0a0-py3-none-any.whl", hash = "sha256:e0f71cfcb6343902827e287ba70f2780ebb0a7cdea5f8991b1cc43b82c8ea6a3"}, ] [package.dependencies] -aws-cdk-lib = "2.75.1" +aws-cdk-lib = "2.85.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.78.1,<2.0.0" +jsii = ">=1.82.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.75.1a0" +version = "2.85.0a0" description = "Authorizers for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.75.1a0.tar.gz", hash = "sha256:524918a1aebf29c72f345162079bbb34cca87213480dc106931444a37496c8fe"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.75.1a0-py3-none-any.whl", hash = "sha256:493ea5f7e981c08dd772e50c95158aa7223b395a0668b5bba05b8c0ccb694f5a"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.85.0a0.tar.gz", hash = "sha256:699e752702643034e699da3c01ace2fe276c90b4851c84deb6fea46ac6927e63"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.85.0a0-py3-none-any.whl", hash = "sha256:7d23afe56d8d804e3f6306bfedcc1d0fb4bea45cabf26d7c34e54ca0740998f3"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.75.1.a0" -aws-cdk-lib = "2.75.1" +"aws-cdk.aws-apigatewayv2-alpha" = "2.85.0.a0" +aws-cdk-lib = "2.85.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.78.1,<2.0.0" +jsii = ">=1.82.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.75.1a0" +version = "2.85.0a0" description = "Integrations for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.75.1a0.tar.gz", hash = "sha256:bfc11f80ad38b092c2ce861579b1bfa3b841773c85fb79c3ed548170f80cc115"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.75.1a0-py3-none-any.whl", hash = "sha256:f536d663a1fffb55e0785151d2def5a94f1b914a59463824e69ca185b6018f4c"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.85.0a0.tar.gz", hash = "sha256:a280f9a5bfd5520e6356c733b385a642b27aced83597c5b6bacf55b2a95223ed"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.85.0a0-py3-none-any.whl", hash = "sha256:dd0fcfcae9a2fb4588a0e04b16a864773b7f7aaf22c17414a9661c11ba876d9a"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.75.1.a0" -aws-cdk-lib = "2.75.1" +"aws-cdk.aws-apigatewayv2-alpha" = "2.85.0.a0" +aws-cdk-lib = "2.85.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.78.1,<2.0.0" +jsii = ">=1.82.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.75.1" +version = "2.85.0" description = "Version 2 of the AWS Cloud Development Kit library" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.75.1.tar.gz", hash = "sha256:7b9c285ea9681e59c215e2ad3917b046ef47c48d83bc9c555b425a37df30d34d"}, - {file = "aws_cdk_lib-2.75.1-py3-none-any.whl", hash = "sha256:1a4a61ce69280f522b7cee7b910284ab01ef091af220e7e0fad8433fd58e4325"}, + {file = "aws-cdk-lib-2.85.0.tar.gz", hash = "sha256:cbd13ec361a841077db0a0acb1d84c44c030ff198ad0a8e6d47760b3f8362ed4"}, + {file = "aws_cdk_lib-2.85.0-py3-none-any.whl", hash = "sha256:b5b388c0478b39820d8facee6ed7b07a1e1e1e7c01dded3d17f86797fb8dddb3"}, ] [package.dependencies] -"aws-cdk.asset-awscli-v1" = ">=2.2.97,<3.0.0" +"aws-cdk.asset-awscli-v1" = ">=2.2.177,<3.0.0" "aws-cdk.asset-kubectl-v20" = ">=2.1.1,<3.0.0" -"aws-cdk.asset-node-proxy-agent-v5" = ">=2.0.77,<3.0.0" +"aws-cdk.asset-node-proxy-agent-v5" = ">=2.0.148,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.78.1,<2.0.0" +jsii = ">=1.82.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -179,13 +197,13 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.68.0" +version = "1.70.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.68.0.tar.gz", hash = "sha256:d12a7bb3909142d32458f76818cb96a5ebc5f50fbd5943301d552679a893afcc"}, - {file = "aws_sam_translator-1.68.0-py3-none-any.whl", hash = "sha256:557d8080c9e6c1c609bfe806ea9545f7ea34144e2466c0ddc801806c2c05afdc"}, + {file = "aws-sam-translator-1.70.0.tar.gz", hash = "sha256:753288eda07b057e5350773b7617076962b59404d49cd05e2259ac96a7694436"}, + {file = "aws_sam_translator-1.70.0-py3-none-any.whl", hash = "sha256:a2df321607d29791893707ef2ded9e79be00dbb71ac430696f6e6d7d0b0301a5"}, ] [package.dependencies] @@ -287,17 +305,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.26.115" +version = "1.26.163" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.26.115-py3-none-any.whl", hash = "sha256:deb53ad15ff0e75ae0be6d7115a2d34e4bafb0541484485f0feb61dabdfb5513"}, - {file = "boto3-1.26.115.tar.gz", hash = "sha256:2272a060005bf8299f7342cbf1344304eb44b7060cddba6784f676e3bc737bb8"}, + {file = "boto3-1.26.163-py3-none-any.whl", hash = "sha256:61b66b9ab03bf59c26f546c9dca053a888dd3e7e85d49a5de6112232b5e5f6c5"}, + {file = "boto3-1.26.163.tar.gz", hash = "sha256:341ad62c53f9717cfe5fb2ae33e34f2dd3ee930abaa0fc864a10c018c0c78783"}, ] [package.dependencies] -botocore = ">=1.29.115,<1.30.0" +botocore = ">=1.29.163,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -306,13 +324,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.115" +version = "1.29.163" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.29.115-py3-none-any.whl", hash = "sha256:dff327977d7c9f98f2dc54b51b8f70326952dd50ae23b885fdfa8bfeec014b76"}, - {file = "botocore-1.29.115.tar.gz", hash = "sha256:58eee8cf8f4f3e515df29f6dc535dd86ed3f4cea40999c5bc74640ff40bdc71f"}, + {file = "botocore-1.29.163-py3-none-any.whl", hash = "sha256:dd0af0de58c12df39e043be3ad864a47d8b8ef10eedde15a73504ff75dcc261b"}, + {file = "botocore-1.29.163.tar.gz", hash = "sha256:f374bea656bf9025ad685f47e7b8ff9e20b1a2584823855ba1c4c58957768612"}, ] [package.dependencies] @@ -325,29 +343,38 @@ crt = ["awscrt (==0.16.9)"] [[package]] name = "cattrs" -version = "22.2.0" +version = "23.1.2" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.7" files = [ - {file = "cattrs-22.2.0-py3-none-any.whl", hash = "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21"}, - {file = "cattrs-22.2.0.tar.gz", hash = "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d"}, + {file = "cattrs-23.1.2-py3-none-any.whl", hash = "sha256:b2bb14311ac17bed0d58785e5a60f022e5431aca3932e3fc5cc8ed8639de50a4"}, + {file = "cattrs-23.1.2.tar.gz", hash = "sha256:db1c821b8c537382b2c7c66678c3790091ca0275ac486c76f3c8f3920e83c657"}, ] [package.dependencies] attrs = ">=20" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -typing_extensions = {version = "*", markers = "python_version < \"3.8\""} +typing_extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.2.0,<5.0.0)"] +cbor2 = ["cbor2 (>=5.4.6,<6.0.0)"] +msgpack = ["msgpack (>=1.0.2,<2.0.0)"] +orjson = ["orjson (>=3.5.2,<4.0.0)"] +pyyaml = ["PyYAML (>=6.0,<7.0)"] +tomlkit = ["tomlkit (>=0.11.4,<0.12.0)"] +ujson = ["ujson (>=5.4.0,<6.0.0)"] [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -496,17 +523,17 @@ files = [ [[package]] name = "constructs" -version = "10.2.1" +version = "10.2.62" description = "A programming model for software-defined state" optional = false python-versions = "~=3.7" files = [ - {file = "constructs-10.2.1-py3-none-any.whl", hash = "sha256:398d32c65d2945949195dcc642bc7bd7b7552758d274fa0e1616ca0069d01a25"}, - {file = "constructs-10.2.1.tar.gz", hash = "sha256:36e86d1092edd1f2ee8895d21c49bdabf81e62deb5f18329b275ee8ec1f67aa3"}, + {file = "constructs-10.2.62-py3-none-any.whl", hash = "sha256:47db05c49ca227d91fa6bb5992d4b16663efea3070627dae4de0ff46e0066248"}, + {file = "constructs-10.2.62.tar.gz", hash = "sha256:20cccdb9e6c3b5410fe64c017a9b72b054ba0ebb54453cc7e750f49671d02c05"}, ] [package.dependencies] -jsii = ">=1.80.0,<2.0.0" +jsii = ">=1.84.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -598,13 +625,13 @@ files = [ [[package]] name = "eradicate" -version = "2.2.0" +version = "2.3.0" description = "Removes commented-out code." optional = false python-versions = "*" files = [ - {file = "eradicate-2.2.0-py3-none-any.whl", hash = "sha256:751813c315a48ce7e3d0483410991015342d380a956e86e0265c61bfb875bcbc"}, - {file = "eradicate-2.2.0.tar.gz", hash = "sha256:c329a05def6a4b558dab58bb1b694f5209706b7c99ba174d226dfdb69a5ba0da"}, + {file = "eradicate-2.3.0-py3-none-any.whl", hash = "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e"}, + {file = "eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37"}, ] [[package]] @@ -892,13 +919,13 @@ typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "httpcore" -version = "0.17.0" +version = "0.17.2" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.7" files = [ - {file = "httpcore-0.17.0-py3-none-any.whl", hash = "sha256:0fdfea45e94f0c9fd96eab9286077f9ff788dd186635ae61b312693e4d943599"}, - {file = "httpcore-0.17.0.tar.gz", hash = "sha256:cc045a3241afbf60ce056202301b4d8b6af08845e3294055eb26b09913ef903c"}, + {file = "httpcore-0.17.2-py3-none-any.whl", hash = "sha256:5581b9c12379c4288fe70f43c710d16060c10080617001e6b22a3b6dbcbefd36"}, + {file = "httpcore-0.17.2.tar.gz", hash = "sha256:125f8375ab60036db632f34f4b627a9ad085048eef7cb7d2616fea0f739f98af"}, ] [package.dependencies] @@ -1049,13 +1076,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -1065,7 +1092,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" @@ -1159,18 +1186,18 @@ pbr = "*" [[package]] name = "jsii" -version = "1.80.0" +version = "1.84.0" description = "Python client for jsii runtime" optional = false python-versions = "~=3.7" files = [ - {file = "jsii-1.80.0-py3-none-any.whl", hash = "sha256:ea3cace063f6a47cdf0a74c929618d779efab426fedb7692a8ac1b9b29797f8c"}, - {file = "jsii-1.80.0.tar.gz", hash = "sha256:4da63ab99f2696cd063574460c94221f0a7de9d345e71dfb19dfbcecf8ca8355"}, + {file = "jsii-1.84.0-py3-none-any.whl", hash = "sha256:7748205665d079112721d6eb23a37fa084722ab675f65e8a6514ec908660bd57"}, + {file = "jsii-1.84.0.tar.gz", hash = "sha256:29f8352525e980e126d5d0fd3510fff9a81a8dbc36249f99d922907f2e6bf4c3"}, ] [package.dependencies] -attrs = ">=21.2,<23.0" -cattrs = ">=1.8,<22.3" +attrs = ">=21.2,<24.0" +cattrs = ">=1.8,<23.2" importlib-resources = ">=5.2.0" publication = ">=0.0.3" python-dateutil = "*" @@ -1179,13 +1206,13 @@ typing-extensions = ">=3.7,<5.0" [[package]] name = "jsonpatch" -version = "1.32" +version = "1.33" description = "Apply JSON-Patches (RFC 6902)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ - {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, - {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, ] [package.dependencies] @@ -1212,13 +1239,13 @@ testing-libs = ["simplejson", "ujson"] [[package]] name = "jsonpointer" -version = "2.3" +version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ - {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, - {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] @@ -1339,61 +1366,61 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] @@ -1463,13 +1490,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.4.2" +version = "1.4.3" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.4.2-py3-none-any.whl", hash = "sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c"}, - {file = "mkdocs-1.4.2.tar.gz", hash = "sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5"}, + {file = "mkdocs-1.4.3-py3-none-any.whl", hash = "sha256:6ee46d309bda331aac915cd24aab882c179a933bd9e77b80ce7d2eaaa3f689dd"}, + {file = "mkdocs-1.4.3.tar.gz", hash = "sha256:5955093bbd4dd2e9403c5afaf57324ad8b04f16886512a3ee6ef828956481c57"}, ] [package.dependencies] @@ -1507,13 +1534,13 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.16" +version = "9.1.17" description = "Documentation that simply works" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.16-py3-none-any.whl", hash = "sha256:f9e62558a6b01ffac314423cbc223d970c25fbc78999860226245b64e64d6751"}, - {file = "mkdocs_material-9.1.16.tar.gz", hash = "sha256:1021bfea20f00a9423530c8c2ae9be3c78b80f5a527b3f822e6de3d872e5ab79"}, + {file = "mkdocs_material-9.1.17-py3-none-any.whl", hash = "sha256:809ed68427fbab0330b0b07bc93175824c3b98f4187060a5c7b46aa8ae398a75"}, + {file = "mkdocs_material-9.1.17.tar.gz", hash = "sha256:5a076524625047bf4ee4da1509ec90626f8fce915839dc07bdae6b59ff4f36f9"}, ] [package.dependencies] @@ -1557,44 +1584,44 @@ tests = ["pytest (>=4.6)"] [[package]] name = "mypy" -version = "1.4.0" +version = "1.4.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3af348e0925a59213244f28c7c0c3a2c2088b4ba2fe9d6c8d4fbb0aba0b7d05"}, - {file = "mypy-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0b2e0da7ff9dd8d2066d093d35a169305fc4e38db378281fce096768a3dbdbf"}, - {file = "mypy-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210fe0f39ec5be45dd9d0de253cb79245f0a6f27631d62e0c9c7988be7152965"}, - {file = "mypy-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f7a5971490fd4a5a436e143105a1f78fa8b3fe95b30fff2a77542b4f3227a01f"}, - {file = "mypy-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:50f65f0e9985f1e50040e603baebab83efed9eb37e15a22a4246fa7cd660f981"}, - {file = "mypy-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1b5c875fcf3e7217a3de7f708166f641ca154b589664c44a6fd6d9f17d9e7e"}, - {file = "mypy-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4c734d947e761c7ceb1f09a98359dd5666460acbc39f7d0a6b6beec373c5840"}, - {file = "mypy-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5984a8d13d35624e3b235a793c814433d810acba9eeefe665cdfed3d08bc3af"}, - {file = "mypy-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0f98973e39e4a98709546a9afd82e1ffcc50c6ec9ce6f7870f33ebbf0bd4f26d"}, - {file = "mypy-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:19d42b08c7532d736a7e0fb29525855e355fa51fd6aef4f9bbc80749ff64b1a2"}, - {file = "mypy-1.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ba9a69172abaa73910643744d3848877d6aac4a20c41742027dcfd8d78f05d9"}, - {file = "mypy-1.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a34eed094c16cad0f6b0d889811592c7a9b7acf10d10a7356349e325d8704b4f"}, - {file = "mypy-1.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:53c2a1fed81e05ded10a4557fe12bae05b9ecf9153f162c662a71d924d504135"}, - {file = "mypy-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bba57b4d2328740749f676807fcf3036e9de723530781405cc5a5e41fc6e20de"}, - {file = "mypy-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:653863c75f0dbb687d92eb0d4bd9fe7047d096987ecac93bb7b1bc336de48ebd"}, - {file = "mypy-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7461469e163f87a087a5e7aa224102a30f037c11a096a0ceeb721cb0dce274c8"}, - {file = "mypy-1.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf0ca95e4b8adeaf07815a78b4096b65adf64ea7871b39a2116c19497fcd0dd"}, - {file = "mypy-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94a81b9354545123feb1a99b960faeff9e1fa204fce47e0042335b473d71530d"}, - {file = "mypy-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:67242d5b28ed0fa88edd8f880aed24da481929467fdbca6487167cb5e3fd31ff"}, - {file = "mypy-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f2b353eebef669529d9bd5ae3566905a685ae98b3af3aad7476d0d519714758"}, - {file = "mypy-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62bf18d97c6b089f77f0067b4e321db089d8520cdeefc6ae3ec0f873621c22e5"}, - {file = "mypy-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca33ab70a4aaa75bb01086a0b04f0ba8441e51e06fc57e28585176b08cad533b"}, - {file = "mypy-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5a0ee54c2cb0f957f8a6f41794d68f1a7e32b9968675ade5846f538504856d42"}, - {file = "mypy-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c34d43e3d54ad05024576aef28081d9d0580f6fa7f131255f54020eb12f5352"}, - {file = "mypy-1.4.0-py3-none-any.whl", hash = "sha256:f051ca656be0c179c735a4c3193f307d34c92fdc4908d44fd4516fbe8b10567d"}, - {file = "mypy-1.4.0.tar.gz", hash = "sha256:de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -1674,13 +1701,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-lambda" -version = "1.26.157" -description = "Type annotations for boto3.Lambda 1.26.157 service generated with mypy-boto3-builder 7.14.5" +version = "1.26.163" +description = "Type annotations for boto3.Lambda 1.26.163 service generated with mypy-boto3-builder 7.14.5" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-lambda-1.26.157.tar.gz", hash = "sha256:ef5e0c3c7a8467e85d57b2caaee4ff7db9ba1e4703d3c28213df1269180f1731"}, - {file = "mypy_boto3_lambda-1.26.157-py3-none-any.whl", hash = "sha256:2e0c5f92e2c22cadd0093bd800ab24393cbb247a5971c119a7d9d6a18bd9ee43"}, + {file = "mypy-boto3-lambda-1.26.163.tar.gz", hash = "sha256:fd36cf8a3f5e733f6e80c755af19538ce0a50c3eafb1be7cf7de0c29c40788a0"}, + {file = "mypy_boto3_lambda-1.26.163-py3-none-any.whl", hash = "sha256:f6ca7fd11c8e9c81218fdbeae163c343c6dc3b7df88a70c30c0c7b30c90163d9"}, ] [package.dependencies] @@ -1702,13 +1729,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-s3" -version = "1.26.155" -description = "Type annotations for boto3.S3 1.26.155 service generated with mypy-boto3-builder 7.14.5" +version = "1.26.163" +description = "Type annotations for boto3.S3 1.26.163 service generated with mypy-boto3-builder 7.14.5" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.26.155.tar.gz", hash = "sha256:2a399abba8c45b7bb3f3623baf86858625d7b0e5a902edfe7d9fa18fcbeb2f7b"}, - {file = "mypy_boto3_s3-1.26.155-py3-none-any.whl", hash = "sha256:0dd0f5747451cbffb3582d416b3bc32fa2573c9e1c9dd6a6073463be0621d2c2"}, + {file = "mypy-boto3-s3-1.26.163.tar.gz", hash = "sha256:47d3639cd097aa142dcaca43b431fa7a7eefb56e1596ebff625f571f16bd7623"}, + {file = "mypy_boto3_s3-1.26.163-py3-none-any.whl", hash = "sha256:39417a3e4128c61e7b9353d080b93293f9ca9a4acf059a255d8357b9a4e12db2"}, ] [package.dependencies] @@ -1730,17 +1757,17 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-ssm" -version = "1.26.97" -description = "Type annotations for boto3.SSM 1.26.97 service generated with mypy-boto3-builder 7.13.0" +version = "1.26.162" +description = "Type annotations for boto3.SSM 1.26.162 service generated with mypy-boto3-builder 7.14.5" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-ssm-1.26.97.tar.gz", hash = "sha256:58f780b1fad4a441cea2322e3a28b107cdc96f6fd72fd33f344f588c96c82039"}, - {file = "mypy_boto3_ssm-1.26.97-py3-none-any.whl", hash = "sha256:ddf288d0267a1d9885aa7eccaf89aeee2f73dad76ce0c0132bc1b7e4e2aebc12"}, + {file = "mypy-boto3-ssm-1.26.162.tar.gz", hash = "sha256:a5d9d7e4c7dbf4788dddcc02410afa349d7356a24fbffe5b7a36cd0c04211f06"}, + {file = "mypy_boto3_ssm-1.26.162-py3-none-any.whl", hash = "sha256:42c52e55a94b5d5ce14d628b4bc168b2b4230e130050172bf54747edb7b8da14"}, ] [package.dependencies] -typing-extensions = ">=4.1.0" +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-xray" @@ -1846,31 +1873,31 @@ files = [ [[package]] name = "platformdirs" -version = "3.2.0" +version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, - {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, + {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, + {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, ] [package.dependencies] -typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=4.6.3", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.dependencies] @@ -2035,13 +2062,13 @@ files = [ [[package]] name = "pymdown-extensions" -version = "10.0" +version = "10.0.1" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "pymdown_extensions-10.0-py3-none-any.whl", hash = "sha256:e6cbe8ace7d8feda30bc4fd6a21a073893a9a0e90c373e92d69ce5b653051f55"}, - {file = "pymdown_extensions-10.0.tar.gz", hash = "sha256:9a77955e63528c2ee98073a1fb3207c1a45607bc74a34ef21acd098f46c3aa8a"}, + {file = "pymdown_extensions-10.0.1-py3-none-any.whl", hash = "sha256:ae66d84013c5d027ce055693e09a4628b67e9dec5bce05727e45b0918e36f274"}, + {file = "pymdown_extensions-10.0.1.tar.gz", hash = "sha256:b44e1093a43b8a975eae17b03c3a77aad4681b3b56fce60ce746dbef1944c8cb"}, ] [package.dependencies] @@ -2351,101 +2378,121 @@ colorama = {version = ">=0.4.1", markers = "python_version > \"3.4\""} future = "*" mando = ">=0.6,<0.7" +[[package]] +name = "redis" +version = "4.6.0" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, + {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} +importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + [[package]] name = "regex" -version = "2022.10.31" +version = "2023.6.3" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.6" files = [ - {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, - {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"}, - {file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"}, - {file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"}, - {file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"}, - {file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"}, - {file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"}, - {file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"}, - {file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"}, - {file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"}, - {file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"}, - {file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"}, - {file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"}, - {file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"}, - {file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"}, - {file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"}, - {file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"}, - {file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"}, - {file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"}, - {file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"}, - {file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"}, - {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, - {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, + {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, + {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, + {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, + {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, + {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, + {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, + {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, + {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, + {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, + {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, + {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, + {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, + {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, + {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, + {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, ] [[package]] @@ -2486,17 +2533,17 @@ py = ">=1.4.26,<2.0.0" [[package]] name = "rich" -version = "13.3.4" +version = "13.4.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.3.4-py3-none-any.whl", hash = "sha256:22b74cae0278fd5086ff44144d3813be1cedc9115bdfabbfefd86400cb88b20a"}, - {file = "rich-13.3.4.tar.gz", hash = "sha256:b5d573e13605423ec80bdd0cd5f8541f7844a0e71a13f74cf454ccb2f490708b"}, + {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"}, + {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"}, ] [package.dependencies] -markdown-it-py = ">=2.2.0,<3.0.0" +markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} @@ -2505,13 +2552,13 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "s3transfer" -version = "0.6.0" +version = "0.6.1" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, - {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, + {file = "s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346"}, + {file = "s3transfer-0.6.1.tar.gz", hash = "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"}, ] [package.dependencies] @@ -2725,35 +2772,35 @@ types-urllib3 = "*" [[package]] name = "types-urllib3" -version = "1.26.25.10" +version = "1.26.25.13" description = "Typing stubs for urllib3" optional = false python-versions = "*" files = [ - {file = "types-urllib3-1.26.25.10.tar.gz", hash = "sha256:c44881cde9fc8256d05ad6b21f50c4681eb20092552351570ab0a8a0653286d6"}, - {file = "types_urllib3-1.26.25.10-py3-none-any.whl", hash = "sha256:12c744609d588340a07e45d333bf870069fc8793bcf96bae7a96d4712a42591d"}, + {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"}, + {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"}, ] [[package]] name = "typing-extensions" -version = "4.6.2" +version = "4.7.0" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, - {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, + {file = "typing_extensions-4.7.0-py3-none-any.whl", hash = "sha256:5d8c9dac95c27d20df12fb1d97b9793ab8b2af8a3a525e68c80e21060c161771"}, + {file = "typing_extensions-4.7.0.tar.gz", hash = "sha256:935ccf31549830cda708b42289d44b6f74084d616a00be651601a4f968e77c82"}, ] [[package]] name = "urllib3" -version = "1.26.15" +version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] [package.extras] @@ -2930,13 +2977,14 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] +all = ["aws-xray-sdk", "fastjsonschema", "pydantic", "redis"] aws-sdk = ["boto3"] parser = ["pydantic"] +redis = ["redis"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "c1d0ad9a9d554e10d464a004d2f9b0bfdc1e6e297e0504046907fd4cf3a63599" +content-hash = "5d44439ec2e925b96e8a0ed7af24afcf48240a46073f53a44ec061045bf892df" diff --git a/pyproject.toml b/pyproject.toml index 249ebd176ed..719c600cbf6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ aws-xray-sdk = { version = "^2.8.0", optional = true } fastjsonschema = { version = "^2.14.5", optional = true } pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } +redis = {version = "^4.4.0", optional = true} typing-extensions = "^4.6.2" [tool.poetry.dev-dependencies] @@ -90,12 +91,14 @@ ijson = "^3.2.2" typed-ast = { version = "^1.5.4", python = "< 3.8"} hvac = "^1.1.1" aws-requests-auth = "^0.4.3" +redis = "^4.4.0" [tool.poetry.extras] parser = ["pydantic"] validation = ["fastjsonschema"] tracer = ["aws-xray-sdk"] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] +redis = ["redis"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema","redis"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] From 1a9d8d945eaaa2326bde09a68c262d0de1a9c154 Mon Sep 17 00:00:00 2001 From: Roger Zhang Date: Thu, 6 Jul 2023 19:32:12 -0700 Subject: [PATCH 18/81] fix git issue --- Makefile | 7 +- .../idempotency/persistence/redis.py | 12 +- tests/integration/__init__.py | 0 tests/integration/idempotency/__init__.py | 0 .../idempotency/test_idempotency_redis.py | 159 ++++++++++++++++++ 5 files changed, 173 insertions(+), 5 deletions(-) create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/idempotency/__init__.py create mode 100644 tests/integration/idempotency/test_idempotency_redis.py diff --git a/Makefile b/Makefile index 39cab23945a..2ae0e3f918f 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,6 @@ .PHONY: target dev format lint test coverage-html pr build build-docs build-docs-api build-docs-website .PHONY: docs-local docs-api-local security-baseline complexity-baseline release-prod release-test release +.PHONY: int-test-idem target: @$(MAKE) pr @@ -30,12 +31,16 @@ lint-docs-fix: docker run -v ${PWD}:/markdown 06kellyjac/markdownlint-cli --fix "docs" test: - poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=xml + poetry run pytest -m "not perf" --ignore tests/e2e --ignore tests/integration --cov=aws_lambda_powertools --cov-report=xml poetry run pytest --cache-clear tests/performance unit-test: poetry run pytest tests/unit +int-test-idem: + docker run --name int-test-idem -d -p 63005:6379 redis + poetry run pytest tests/integration/idempotency;docker stop int-test-idem;docker rm int-test-idem + e2e-test: python parallel_run_e2e.py diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 7662d8a25ac..2790fcf7123 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -52,10 +52,14 @@ def __init__( self.validation_key_attr = validation_key_attr super(RedisCachePersistenceLayer, self).__init__() - def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: + def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> DataRecord: + in_progress_expiry_timestamp = item.get(self.in_progress_expiry_attr) + if isinstance(in_progress_expiry_timestamp, str): + in_progress_expiry_timestamp = int(in_progress_expiry_timestamp) return DataRecord( + idempotency_key=idempotency_key, status=item[self.status_attr], - in_progress_expiry_timestamp=item.get(self.in_progress_expiry_attr), + in_progress_expiry_timestamp=in_progress_expiry_timestamp, response_data=str(item.get(self.data_attr)), payload_hash=str(item.get(self.validation_key_attr)), ) @@ -68,7 +72,7 @@ def _get_record(self, idempotency_key) -> DataRecord: item = response except KeyError: raise IdempotencyItemNotFoundError - return self._item_to_data_record(item) + return self._item_to_data_record(idempotency_key, item) def _put_record(self, data_record: DataRecord) -> None: item: Dict[str, Any] = {} @@ -112,7 +116,7 @@ def _put_record(self, data_record: DataRecord) -> None: # checking if in_progress_expiry_attr exists # if in_progress_expiry_attr exist, must be lower than now if self.in_progress_expiry_attr in idempotency_record and int( - idempotency_record[self.in_progress_expiry_attr] + idempotency_record[self.in_progress_expiry_attr], ) > int(now.timestamp() * 1000): raise diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/integration/idempotency/__init__.py b/tests/integration/idempotency/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py new file mode 100644 index 00000000000..aca9e4ed20c --- /dev/null +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -0,0 +1,159 @@ +import copy + +import pytest + +from aws_lambda_powertools.utilities.connections import RedisConnection +from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyAlreadyInProgressError, + IdempotencyItemAlreadyExistsError, + IdempotencyItemNotFoundError, +) +from aws_lambda_powertools.utilities.idempotency.idempotency import ( + idempotent, + idempotent_function, +) + +redis_stdalone_config = {"host": "127.0.0.1", "port": 63005} + + +@pytest.fixture +def lambda_context(): + class LambdaContext: + def __init__(self): + self.function_name = "test-func" + self.memory_limit_in_mb = 128 + self.invoked_function_arn = "arn:aws:lambda:eu-west-1:809313241234:function:test-func" + self.aws_request_id = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + def get_remaining_time_in_millis(self) -> int: + return 1000 + + return LambdaContext() + + +@pytest.fixture +def persistence_store_standalone_redis(): + return RedisCachePersistenceLayer(connection=RedisConnection(**redis_stdalone_config).get_standalone_connection()) + + +# test basic +def test_idempotent_function_and_lambda_handler_redis_basic( + # idempotency_config: IdempotencyConfig, + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"data": "value"} + persistence_layer = persistence_store_standalone_redis + expected_result = {"message": "Foo"} + + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return expected_result + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, _): + return expected_result + + # WHEN calling the function + fn_result = record_handler(record=mock_event) + # WHEN calling lambda handler + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result + + +def test_idempotent_function_and_lambda_handler_redis_cache( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"data": "value2"} + persistence_layer = persistence_store_standalone_redis + result = {"message": "Foo"} + expected_result = copy.deepcopy(result) + + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return result + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, _): + return result + + # WHEN calling the function + fn_result = record_handler(record=mock_event) + # WHEN calling lambda handler + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result + + # modify the return to check if idem cache works + result = {"message": "Bar"} + fn_result2 = record_handler(record=mock_event) + # Second time calling lambda handler, test if same result + handler_result2 = lambda_handler(mock_event, lambda_context) + assert fn_result2 == expected_result + assert handler_result2 == expected_result + + # modify the mock event to check if we got updated result + mock_event = {"data": "value3"} + fn_result3 = record_handler(record=mock_event) + # thrid time calling lambda handler, test if result updated + handler_result3 = lambda_handler(mock_event, lambda_context) + assert fn_result3 == result + assert handler_result3 == result + + +# test idem-inprogress +def test_idempotent_lambda_redis_in_progress( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + """ + Test idempotent decorator where lambda_handler is already processing an event with matching event key + """ + + mock_event = {"data": "value4"} + persistence_store = persistence_store_standalone_redis + lambda_response = {"foo": "bar"} + + @idempotent(persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + # register the context first + lambda_handler(mock_event, lambda_context) + # save additional to in_progress + mock_event = {"data": "value7"} + try: + persistence_store.save_inprogress(mock_event, 1000) + except IdempotencyItemAlreadyExistsError: + pass + + with pytest.raises(IdempotencyAlreadyInProgressError): + lambda_handler(mock_event, lambda_context) + + +# -remove +def test_idempotent_lambda_redis_delete( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"data": "test_delete"} + persistence_layer = persistence_store_standalone_redis + result = {"message": "Foo"} + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, _): + return result + + handler_result = lambda_handler(mock_event, lambda_context) + assert handler_result == result + + # delete the idem and handler should output new result + persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) + result = {"message": "Foo2"} + handler_result2 = lambda_handler(mock_event, lambda_context) + assert handler_result2 == result From 9859896dbd3b135a9013f3110d9c3d7a2975ef0b Mon Sep 17 00:00:00 2001 From: Roger Zhang Date: Mon, 10 Jul 2023 15:11:57 -0700 Subject: [PATCH 19/81] fix docstring --- .../utilities/connections/base_sync.py | 2 +- .../utilities/connections/redis.py | 59 +++++++++++++++---- .../idempotency/persistence/redis.py | 1 + 3 files changed, 49 insertions(+), 13 deletions(-) diff --git a/aws_lambda_powertools/utilities/connections/base_sync.py b/aws_lambda_powertools/utilities/connections/base_sync.py index c326345bb19..29b0b162eae 100644 --- a/aws_lambda_powertools/utilities/connections/base_sync.py +++ b/aws_lambda_powertools/utilities/connections/base_sync.py @@ -3,5 +3,5 @@ class BaseConnectionSync(ABC): @abstractmethod - def _init_connection(self): + def _init_connection(self, **kwargs): raise NotImplementedError() # pragma: no cover diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py index 322ef29284c..0274f87759e 100644 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -23,6 +23,26 @@ def __init__( url: Optional[str] = None, **extra_options, ) -> None: + """ + Initialize Redis connection which will be used in redis persistence_store to support idempotency + + Parameters + ---------- + host: str, optional + redis host + port: int, optional + redis port + username: str, optional + redis username + password: str, optional + redis password + db_index: str, optional + redis db index + url: str, optional + redis connection string, using url will override the host/port in the previous parameters + extra_options: **kwargs, optional + extra kwargs to pass directly into redis client + """ self.extra_options: dict = {} self.url = url @@ -33,13 +53,9 @@ def __init__( self.db_index = db_index self.extra_options.update(**extra_options) self._cluster_connection = None - self._stdalone_connection = None - - def _init_connection(self, client: Type[Union[redis.Redis, redis.RedisCluster]]): - """ - Connection is cached, so returning this - """ + self._standalone_connection = None + def _init_connection(self, client: Type[Union[redis.Redis, redis.cluster.RedisCluster]]): logger.info(f"Trying to connect to Redis: {self.host}") try: @@ -62,12 +78,31 @@ def _init_connection(self, client: Type[Union[redis.Redis, redis.RedisCluster]]) raise RedisConnectionError("Could not to connect to Redis", exc) from exc # simplified to use different func to get each connection. - def get_standalone_connection(self): - if self._stdalone_connection: - return self._stdalone_connection - return self._init_connection(client=redis.Redis) + def get_standalone_connection(self) -> redis.Redis: + """ + return a standalone redis client based on class's init parameter + + Returns + ------- + Client: + redis.Redis + """ + if self._standalone_connection: + return self._standalone_connection + self._standalone_connection = self._init_connection(client=redis.Redis) + return self._standalone_connection - def get_cluster_connection(self): + def get_cluster_connection(self) -> redis.cluster.RedisCluster: + """ + return a cluster redis client based on class's init parameter + if there are cached connection then return directly + + Returns + ------- + Client: + redis.cluster.RedisCluster + """ if self._cluster_connection: return self._cluster_connection - return self._init_connection(client=redis.cluster.RedisCluster) + self._cluster_connection = self._init_connection(client=redis.cluster.RedisCluster) + return self._cluster_connection diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 2790fcf7123..566f02d0be2 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -124,6 +124,7 @@ def _put_record(self, data_record: DataRecord) -> None: self._connection.hset(**item) # hset type must set expiration after adding the record # Need to review this to get ttl in seconds + # Q: should we replace self.expires_after_seconds with _get_expiry_timestamp? more consistent self._connection.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) except Exception: logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") From 8c38af1ba4780172fc2065193d5a5a293f7618bb Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Tue, 15 Aug 2023 01:15:12 +0000 Subject: [PATCH 20/81] fix poetry, address some Leandro's suggestion --- Makefile | 8 +-- .../idempotency/persistence/dynamodb.py | 2 +- poetry.lock | 50 ++++++++++++------- 3 files changed, 37 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index 85976bd528f..d02f65b554b 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ .PHONY: target dev format lint test coverage-html pr build build-docs build-docs-api build-docs-website .PHONY: docs-local docs-api-local security-baseline complexity-baseline release-prod release-test release -.PHONY: int-test-idem +.PHONY: test-idempotency-redis target: @$(MAKE) pr @@ -36,9 +36,9 @@ test: unit-test: poetry run pytest tests/unit -int-test-idem: - docker run --name int-test-idem -d -p 63005:6379 redis - poetry run pytest tests/integration/idempotency;docker stop int-test-idem;docker rm int-test-idem +test-idempotency-redis: + docker run --name test-idempotency-redis -d -p 63005:6379 redis + poetry run pytest tests/integration/idempotency;docker stop test-idempotency-redis;docker rm test-idempotency-redis e2e-test: python parallel_run_e2e.py diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index b5cf96f2465..913e88524e2 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -32,7 +32,7 @@ class DynamoDBPersistenceLayer(BasePersistenceLayer): def __init__( self, table_name: str, - key_attr: str = "id", # remove optional for type checking. + key_attr: str = "id", static_pk_value: Optional[str] = None, sort_key_attr: Optional[str] = None, expiry_attr: str = "expiration", diff --git a/poetry.lock b/poetry.lock index 535060363d1..6ea5243f82f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -22,6 +22,20 @@ doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd- test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = true +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + [[package]] name = "attrs" version = "23.1.0" @@ -291,17 +305,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.24" +version = "1.28.26" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.24-py3-none-any.whl", hash = "sha256:0300ca6ec8bc136eb316b32cc1e30c66b85bc497f5a5fe42e095ae4280569708"}, - {file = "boto3-1.28.24.tar.gz", hash = "sha256:9d1b4713c888e53a218648ad71522bee9bec9d83f2999fff2494675af810b632"}, + {file = "boto3-1.28.26-py3-none-any.whl", hash = "sha256:63619ffa44bc7f799b525c86d73bdb7f7a70994942bbff78253585bf64084e6e"}, + {file = "boto3-1.28.26.tar.gz", hash = "sha256:a15841c7d04f87c63c9f2587b2b48198bec04d307d7b9950cbe4a021f845a5ba"}, ] [package.dependencies] -botocore = ">=1.31.24,<1.32.0" +botocore = ">=1.31.26,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -310,13 +324,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.24" +version = "1.31.26" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.24-py3-none-any.whl", hash = "sha256:8c7ba9b09e9104e2d473214e1ffcf84b77e04cf6f5f2344942c1eed9e299f947"}, - {file = "botocore-1.31.24.tar.gz", hash = "sha256:2d8f412c67f9285219f52d5dbbb6ef0dfa9f606da29cbdd41b6d6474bcc4bbd4"}, + {file = "botocore-1.31.26-py3-none-any.whl", hash = "sha256:74d1c26144915312004a9f0232cdbe08946dfec9fc7dcd854456d2b73be9bfd9"}, + {file = "botocore-1.31.26.tar.gz", hash = "sha256:e68a50ba76425ede8693fdf1f95b8411e283bc7619c03d7eb666db9f1de48153"}, ] [package.dependencies] @@ -815,13 +829,13 @@ mypy = ["mypy"] [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -1209,13 +1223,13 @@ pbr = "*" [[package]] name = "jsii" -version = "1.86.1" +version = "1.87.0" description = "Python client for jsii runtime" optional = false python-versions = "~=3.7" files = [ - {file = "jsii-1.86.1-py3-none-any.whl", hash = "sha256:32eb46ed4c9a35bc92b892ef049ed1996f13be38ffef964d607e8fe930471b3e"}, - {file = "jsii-1.86.1.tar.gz", hash = "sha256:44f9a820eea92c9508693f72d3129b5a080421c949c32303f4f7b2cc98a81f59"}, + {file = "jsii-1.87.0-py3-none-any.whl", hash = "sha256:4b7c1331e950af10ba7b71d1c9d2e634b17716b3d1b2bf61e0382ff524d8aafb"}, + {file = "jsii-1.87.0.tar.gz", hash = "sha256:ba6860c827551901a8c43c7b09a9bb901d5f1231fc4693f2f4bf87b3db200064"}, ] [package.dependencies] @@ -1243,13 +1257,13 @@ jsonpointer = ">=1.9" [[package]] name = "jsonpickle" -version = "3.0.1" +version = "3.0.2" description = "Python library for serializing any arbitrary object graph into JSON" optional = false python-versions = ">=3.7" files = [ - {file = "jsonpickle-3.0.1-py2.py3-none-any.whl", hash = "sha256:130d8b293ea0add3845de311aaba55e6d706d0bb17bc123bd2c8baf8a39ac77c"}, - {file = "jsonpickle-3.0.1.tar.gz", hash = "sha256:032538804795e73b94ead410800ac387fdb6de98f8882ac957fcd247e3a85200"}, + {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"}, + {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"}, ] [package.dependencies] @@ -2312,7 +2326,7 @@ mando = ">=0.6,<0.7" name = "redis" version = "4.6.0" description = "Python client for Redis database and key-value store" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, @@ -2970,4 +2984,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "0d10a97be8e144737d6e0b2b9fafedafde51e709304e3cc21ce5be31afb0b317" +content-hash = "26287678601db7965142f3448f0be18905409cfda177621da5e64c35bcd293db" From cceba130359c9ed8e1b802546f488b72a7394404 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Thu, 17 Aug 2023 23:51:42 +0000 Subject: [PATCH 21/81] change redis connection --- .../utilities/connections/redis.py | 36 +++--- poetry.lock | 108 +++++++++--------- .../idempotency/test_idempotency_redis.py | 4 +- 3 files changed, 72 insertions(+), 76 deletions(-) diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py index 0274f87759e..1381fa3afda 100644 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ b/aws_lambda_powertools/utilities/connections/redis.py @@ -1,5 +1,5 @@ import logging -from typing import Optional, Type, Union +from typing import Literal, Optional, Type, Union try: import redis # type:ignore @@ -21,6 +21,7 @@ def __init__( password: Optional[str] = None, db_index: Optional[int] = None, url: Optional[str] = None, + mode: Optional[Literal["standalone", "cluster"]] = "standalone", **extra_options, ) -> None: """ @@ -38,6 +39,8 @@ def __init__( redis password db_index: str, optional redis db index + mode: str, Literal["standalone","cluster"] + set redis client mode, choose from standalone/cluster url: str, optional redis connection string, using url will override the host/port in the previous parameters extra_options: **kwargs, optional @@ -51,6 +54,7 @@ def __init__( self.username = username self.password = password self.db_index = db_index + self.mode = mode self.extra_options.update(**extra_options) self._cluster_connection = None self._standalone_connection = None @@ -78,31 +82,23 @@ def _init_connection(self, client: Type[Union[redis.Redis, redis.cluster.RedisCl raise RedisConnectionError("Could not to connect to Redis", exc) from exc # simplified to use different func to get each connection. - def get_standalone_connection(self) -> redis.Redis: + def get_connection(self) -> Type[Union[redis.Redis, redis.cluster.RedisCluster]]: """ return a standalone redis client based on class's init parameter Returns ------- Client: - redis.Redis + Union[redis.Redis, redis.cluster.RedisCluster] """ - if self._standalone_connection: + if self.mode == "standalone": + if self._standalone_connection: + return self._standalone_connection + self._standalone_connection = self._init_connection(client=redis.Redis) return self._standalone_connection - self._standalone_connection = self._init_connection(client=redis.Redis) - return self._standalone_connection - - def get_cluster_connection(self) -> redis.cluster.RedisCluster: - """ - return a cluster redis client based on class's init parameter - if there are cached connection then return directly - - Returns - ------- - Client: - redis.cluster.RedisCluster - """ - if self._cluster_connection: + if self.mode == "cluster": + if self._cluster_connection: + return self._cluster_connection + self._cluster_connection = self._init_connection(client=redis.cluster.RedisCluster) return self._cluster_connection - self._cluster_connection = self._init_connection(client=redis.cluster.RedisCluster) - return self._cluster_connection + raise RedisConnectionError("Redis connection mode not supported yet:", self.mode) diff --git a/poetry.lock b/poetry.lock index 6ea5243f82f..4de9ae8b4f4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -90,94 +90,94 @@ publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] -name = "aws-cdk-asset-node-proxy-agent-v5" -version = "2.0.166" -description = "@aws-cdk/asset-node-proxy-agent-v5" +name = "aws-cdk-asset-node-proxy-agent-v6" +version = "2.0.1" +description = "@aws-cdk/asset-node-proxy-agent-v6" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.asset-node-proxy-agent-v5-2.0.166.tar.gz", hash = "sha256:73d4a4f4bdeb3019779137e4b18d7c5efc8353dff673508bd8c584569ea18db6"}, - {file = "aws_cdk.asset_node_proxy_agent_v5-2.0.166-py3-none-any.whl", hash = "sha256:d71d80710734c47f7114e3e51843c4a8ebae3bbe5032a5af40ca56ca611c0cfe"}, + {file = "aws-cdk.asset-node-proxy-agent-v6-2.0.1.tar.gz", hash = "sha256:42cdbc1de2ed3f845e3eb883a72f58fc7e5554c2e0b6fcdb366c159778dce74d"}, + {file = "aws_cdk.asset_node_proxy_agent_v6-2.0.1-py3-none-any.whl", hash = "sha256:e442673d4f93137ab165b75386761b1d46eea25fc5015e5145ae3afa9da06b6e"}, ] [package.dependencies] -jsii = ">=1.85.0,<2.0.0" +jsii = ">=1.86.1,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.91.0a0" +version = "2.92.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.91.0a0.tar.gz", hash = "sha256:a7b0e78862f3dd81cf13740df2ecda1c877545500872dc476f2dbf3807632a32"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:e3d606055c2fe268d80f96052b583060a25fadcdee79d89a75f2eac4354f2e69"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.92.0a0.tar.gz", hash = "sha256:9e0779e0b53b04eb3d6f7a218c57b5a9f5a0880d041d708fb51077961af2451f"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.92.0a0-py3-none-any.whl", hash = "sha256:fd93be00d8215931c12fb269397e7e6414372d7372aa5036cb70e997defc2a4b"}, ] [package.dependencies] -aws-cdk-lib = "2.91.0" +aws-cdk-lib = "2.92.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.85.0,<2.0.0" +jsii = ">=1.87.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.91.0a0" +version = "2.92.0a0" description = "Authorizers for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.91.0a0.tar.gz", hash = "sha256:cafd747af66f92755f188172f0e892503bc73c26f0d6d95e5f733c67b0307fa8"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:972393ad1c220708616322946ba3f8936cbe143a69e543762295c1ea02d69849"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.92.0a0.tar.gz", hash = "sha256:5c58846756810d5136fc1adaf3f3f6387427d4fba1160d475cfb0a8dd3476fd1"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.92.0a0-py3-none-any.whl", hash = "sha256:62b67f68d7c71ad5286f98ef78910f98a1640f60a3b158cbd2da2347c1878984"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" -aws-cdk-lib = "2.91.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.92.0.a0" +aws-cdk-lib = "2.92.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.85.0,<2.0.0" +jsii = ">=1.87.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.91.0a0" +version = "2.92.0a0" description = "Integrations for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.91.0a0.tar.gz", hash = "sha256:db607df2563f0b839795a41218a59e3ebc29e906dd08aed7b0b59aceba0bde02"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:34d0f103846613a72cfae8419be2e4302863a1e8f6e81951b0a51c2f62ab80b3"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.92.0a0.tar.gz", hash = "sha256:93a3b07aa84a097c448f15a52b114feb15977133078f52dff90897e2966e51a1"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.92.0a0-py3-none-any.whl", hash = "sha256:73885920d969e1c1f4b0d415c123f9bc711b6bd66b9dbbd3523c0a3beb3d564a"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" -aws-cdk-lib = "2.91.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.92.0.a0" +aws-cdk-lib = "2.92.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.85.0,<2.0.0" +jsii = ">=1.87.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.91.0" +version = "2.92.0" description = "Version 2 of the AWS Cloud Development Kit library" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.91.0.tar.gz", hash = "sha256:1163926527a8b7da931cddea77a4824b929b3f775447c3b7427ecdef7701ce74"}, - {file = "aws_cdk_lib-2.91.0-py3-none-any.whl", hash = "sha256:ec2cadeb5727ea8259ad8a54ac9ff40502032cd2572c81f4594df93365da39da"}, + {file = "aws-cdk-lib-2.92.0.tar.gz", hash = "sha256:08fa326cb1bb9e460e96d2c4da838445d625e827e9d0b5511a3c730743e7694e"}, + {file = "aws_cdk_lib-2.92.0-py3-none-any.whl", hash = "sha256:b627d30ec19cf4aacb7b1a4857254c4acd52c346d2e078a3873b97cf773ad962"}, ] [package.dependencies] "aws-cdk.asset-awscli-v1" = ">=2.2.200,<3.0.0" "aws-cdk.asset-kubectl-v20" = ">=2.1.2,<3.0.0" -"aws-cdk.asset-node-proxy-agent-v5" = ">=2.0.166,<3.0.0" +"aws-cdk.asset-node-proxy-agent-v6" = ">=2.0.1,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.85.0,<2.0.0" +jsii = ">=1.87.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -305,17 +305,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.26" +version = "1.28.29" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.26-py3-none-any.whl", hash = "sha256:63619ffa44bc7f799b525c86d73bdb7f7a70994942bbff78253585bf64084e6e"}, - {file = "boto3-1.28.26.tar.gz", hash = "sha256:a15841c7d04f87c63c9f2587b2b48198bec04d307d7b9950cbe4a021f845a5ba"}, + {file = "boto3-1.28.29-py3-none-any.whl", hash = "sha256:7b8e7deee9f665612b3cd7412989aaab0337d8006a0490a188c814af137bd32d"}, + {file = "boto3-1.28.29.tar.gz", hash = "sha256:1ab375c231547db4c9ce760e29cbe90d0341fe44910571b1bc4967a72fd8276f"}, ] [package.dependencies] -botocore = ">=1.31.26,<1.32.0" +botocore = ">=1.31.29,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -324,13 +324,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.26" +version = "1.31.29" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.26-py3-none-any.whl", hash = "sha256:74d1c26144915312004a9f0232cdbe08946dfec9fc7dcd854456d2b73be9bfd9"}, - {file = "botocore-1.31.26.tar.gz", hash = "sha256:e68a50ba76425ede8693fdf1f95b8411e283bc7619c03d7eb666db9f1de48153"}, + {file = "botocore-1.31.29-py3-none-any.whl", hash = "sha256:d3dc422491b3a30667f188f3434541a1dd86d6f8ed7f98ca26e056ae7d912c85"}, + {file = "botocore-1.31.29.tar.gz", hash = "sha256:71b335a47ee061994ac12f15ffe63c5c783cb055dc48079b7d755a46b9c1918c"}, ] [package.dependencies] @@ -522,13 +522,13 @@ files = [ [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -1692,13 +1692,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-cloudwatch" -version = "1.28.16" -description = "Type annotations for boto3.CloudWatch 1.28.16 service generated with mypy-boto3-builder 7.17.1" +version = "1.28.28" +description = "Type annotations for boto3.CloudWatch 1.28.28 service generated with mypy-boto3-builder 7.17.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudwatch-1.28.16.tar.gz", hash = "sha256:15ccb83964bae8c15479852a9526898e6105defd6d09cc326034e98e4b00a896"}, - {file = "mypy_boto3_cloudwatch-1.28.16-py3-none-any.whl", hash = "sha256:fc3c5df0e67ac4b4bf246bcb06ab2ecb9ccff6a5fa1ced51dd2fd8e59a2863df"}, + {file = "mypy-boto3-cloudwatch-1.28.28.tar.gz", hash = "sha256:6dfad8f0f5fffbe1350c6e0f2fab4a0e184d1714f3de644937ad0bc458e7229c"}, + {file = "mypy_boto3_cloudwatch-1.28.28-py3-none-any.whl", hash = "sha256:102a5f5c63ec2654f6446e6ae12705de4bebc1a599fe35608e953ce151cc29a9"}, ] [package.dependencies] @@ -1706,13 +1706,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-dynamodb" -version = "1.28.19" -description = "Type annotations for boto3.DynamoDB 1.28.19 service generated with mypy-boto3-builder 7.17.2" +version = "1.28.27" +description = "Type annotations for boto3.DynamoDB 1.28.27 service generated with mypy-boto3-builder 7.17.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-dynamodb-1.28.19.tar.gz", hash = "sha256:6b9b407c9e2fc73ac5d475a8c98af4e1bea3899e5e74ce00d11cb9c878af761b"}, - {file = "mypy_boto3_dynamodb-1.28.19-py3-none-any.whl", hash = "sha256:a0859c469f46f7e354ae707f1f97bea36746d5538ee851b18d5b61cfb9cf91b9"}, + {file = "mypy-boto3-dynamodb-1.28.27.tar.gz", hash = "sha256:b6786cf953e65293ec25c791e7efcd8ededceb6bda2e04910785b0f62584417d"}, + {file = "mypy_boto3_dynamodb-1.28.27-py3-none-any.whl", hash = "sha256:218f7bcb04010058aea5a735d52b87c4f70e8c5feb44e64ab6baf377ebb4e22a"}, ] [package.dependencies] @@ -1748,13 +1748,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-s3" -version = "1.28.19" -description = "Type annotations for boto3.S3 1.28.19 service generated with mypy-boto3-builder 7.17.2" +version = "1.28.27" +description = "Type annotations for boto3.S3 1.28.27 service generated with mypy-boto3-builder 7.17.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.28.19.tar.gz", hash = "sha256:b8104b191924d8672068d21d748c0f8ae0b0e1950324cb315ec8a1ceed9d23ac"}, - {file = "mypy_boto3_s3-1.28.19-py3-none-any.whl", hash = "sha256:d3759a8fb004f482565904d788d844eeccf3e03b8fa1b96a0f82ba1527ef4022"}, + {file = "mypy-boto3-s3-1.28.27.tar.gz", hash = "sha256:f1094344f68d1ffe2b998404e2e4ff9aa4239438692187fa83ad7b734739991c"}, + {file = "mypy_boto3_s3-1.28.27-py3-none-any.whl", hash = "sha256:f4fdefbfe084c92a6b3d000689e61ab12a985a72b07c5ff157f8a66bcbdb83ba"}, ] [package.dependencies] @@ -2520,13 +2520,13 @@ files = [ [[package]] name = "s3transfer" -version = "0.6.1" +version = "0.6.2" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346"}, - {file = "s3transfer-0.6.1.tar.gz", hash = "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"}, + {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, + {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, ] [package.dependencies] @@ -2984,4 +2984,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "26287678601db7965142f3448f0be18905409cfda177621da5e64c35bcd293db" +content-hash = "3f6652f08a626029a097a2712a6752dc69d95f6407cf3645531cb87f4fb6041c" diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index aca9e4ed20c..3797a2d8cc0 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -14,7 +14,7 @@ idempotent_function, ) -redis_stdalone_config = {"host": "127.0.0.1", "port": 63005} +redis_stdalone_config = {"host": "127.0.0.1", "port": 63005, "mode": "standalone"} @pytest.fixture @@ -34,7 +34,7 @@ def get_remaining_time_in_millis(self) -> int: @pytest.fixture def persistence_store_standalone_redis(): - return RedisCachePersistenceLayer(connection=RedisConnection(**redis_stdalone_config).get_standalone_connection()) + return RedisCachePersistenceLayer(connection=RedisConnection(**redis_stdalone_config).get_connection()) # test basic From ad26e31ce84b29e7e9f371b8bb3b7cc93f1c7673 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Thu, 24 Aug 2023 00:03:37 +0000 Subject: [PATCH 22/81] add mock redis, redis validation, sentinel support --- Makefile | 2 + .../utilities/idempotency/exceptions.py | 6 + .../idempotency/persistence/redis.py | 29 ++- .../persistence/test_redis_layer.py | 226 ++++++++++++++++++ .../integration/idempotency/setup_sentinel.sh | 7 + .../idempotency/test_idempotency_redis.py | 69 +++++- 6 files changed, 325 insertions(+), 14 deletions(-) create mode 100644 tests/functional/idempotency/persistence/test_redis_layer.py create mode 100644 tests/integration/idempotency/setup_sentinel.sh diff --git a/Makefile b/Makefile index d02f65b554b..f557eb927dd 100644 --- a/Makefile +++ b/Makefile @@ -40,6 +40,8 @@ test-idempotency-redis: docker run --name test-idempotency-redis -d -p 63005:6379 redis poetry run pytest tests/integration/idempotency;docker stop test-idempotency-redis;docker rm test-idempotency-redis + + e2e-test: python parallel_run_e2e.py diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index 67a8d6721b1..26133297a57 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -71,3 +71,9 @@ class IdempotencyKeyError(BaseError): """ Payload does not contain an idempotent key """ + + +class IdempotencyRedisClientConfigError(BaseError): + """ + Redis Client passed into persistant layer is not valid + """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 566f02d0be2..f07d5665614 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,16 +1,20 @@ import datetime import logging -from typing import Any, Dict +from typing import Any, Dict, Union try: import redis # type:ignore except ImportError: redis = None + +import redis + from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, + IdempotencyRedisClientConfigError, ) from aws_lambda_powertools.utilities.idempotency.persistence.base import ( STATUS_CONSTANTS, @@ -23,7 +27,7 @@ class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, - connection, + connection: Union[redis.Redis, redis.cluster.RedisCluster], in_progress_expiry_attr: str = "in_progress_expiration", status_attr: str = "status", data_attr: str = "data", @@ -44,6 +48,12 @@ def __init__( """ # Initialize connection with Redis + + if not hasattr(connection, "get_connection_kwargs"): + raise IdempotencyRedisClientConfigError + if not connection.get_connection_kwargs().get("decode_responses", False): + # Requires decode_responses to be true + raise IdempotencyRedisClientConfigError self._connection = connection self.in_progress_expiry_attr = in_progress_expiry_attr @@ -106,19 +116,20 @@ def _put_record(self, data_record: DataRecord) -> None: # - first time that this invocation key is used # - previous invocation with the same key was deleted due to TTL idempotency_record = self._connection.hgetall(data_record.idempotency_key) + print(idempotency_record) if len(idempotency_record) > 0: # record already exists. # status is completed, so raise exception because it exists and still valid if idempotency_record[self.status_attr] == STATUS_CONSTANTS["COMPLETED"]: - raise + raise IdempotencyItemAlreadyExistsError # checking if in_progress_expiry_attr exists # if in_progress_expiry_attr exist, must be lower than now if self.in_progress_expiry_attr in idempotency_record and int( idempotency_record[self.in_progress_expiry_attr], ) > int(now.timestamp() * 1000): - raise + raise IdempotencyItemAlreadyExistsError logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") self._connection.hset(**item) @@ -126,9 +137,13 @@ def _put_record(self, data_record: DataRecord) -> None: # Need to review this to get ttl in seconds # Q: should we replace self.expires_after_seconds with _get_expiry_timestamp? more consistent self._connection.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) - except Exception: - logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") - raise IdempotencyItemAlreadyExistsError + except redis.exceptions.RedisError: + raise redis.exceptions.RedisError + except redis.exceptions.RedisClusterException: + raise redis.exceptions.RedisClusterException + except Exception as e: + logger.debug(f"encountered non-redis exception:{e}") + raise e def _update_record(self, data_record: DataRecord) -> None: item: Dict[str, Any] = {} diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py new file mode 100644 index 00000000000..6bb85b49f61 --- /dev/null +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -0,0 +1,226 @@ +import copy +import time as t + +import pytest + +from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyAlreadyInProgressError, + IdempotencyItemAlreadyExistsError, + IdempotencyItemNotFoundError, + IdempotencyRedisClientConfigError, +) +from aws_lambda_powertools.utilities.idempotency.idempotency import ( + idempotent, + idempotent_function, +) + + +@pytest.fixture +def lambda_context(): + class LambdaContext: + def __init__(self): + self.function_name = "test-func" + self.memory_limit_in_mb = 128 + self.invoked_function_arn = "arn:aws:lambda:eu-west-1:809313241234:function:test-func" + self.aws_request_id = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + def get_remaining_time_in_millis(self) -> int: + return 1000 + + return LambdaContext() + + +class MockRedis: + def __init__(self, decode_responses, cache, **kwargs): + self.cache = cache or {} + self.expire_dict = {} + self.decode_responses = decode_responses + self.acl = {} + self.username = "" + + def hset(self, name, mapping): + self.expire_dict.pop(name, {}) + self.cache[name] = mapping + + # not covered by test yet. + def expire(self, name, time): + self.expire_dict[name] = t.time() + time + + # return {} if no match + def hgetall(self, name): + if self.expire_dict.get(name, t.time() + 1) < t.time(): + self.cache.pop(name, {}) + return self.cache.get(name, {}) + + def get_connection_kwargs(self): + return {"decode_responses": self.decode_responses} + + def auth(self, username, **kwargs): + self.username = username + + def delete(self, name): + self.cache.pop(name, {}) + + +@pytest.fixture +def persistence_store_standalone_redis(): + # you will need to handle yourself the connection to pass again the password + # and avoid AuthenticationError at redis queries + redis_client = MockRedis( + host="localhost", + port="63005", + decode_responses=True, + ) + return RedisCachePersistenceLayer(connection=redis_client) + + +# test basic +def test_idempotent_function_and_lambda_handler_redis_basic( + # idempotency_config: IdempotencyConfig, + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"data": "value"} + persistence_layer = persistence_store_standalone_redis + expected_result = {"message": "Foo"} + + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return expected_result + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, context): + return expected_result + + # WHEN calling the function + fn_result = record_handler(record=mock_event) + # WHEN calling lambda handler + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result + + +def test_idempotent_lambda_redis_no_decode(): + redis_client = MockRedis( + host="localhost", + port="63005", + decode_responses=False, + ) + # decode_responses=False will not be accepted + with pytest.raises(IdempotencyRedisClientConfigError): + RedisCachePersistenceLayer(connection=redis_client) + + +def test_idempotent_function_and_lambda_handler_redis_cache( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"data": "value2"} + persistence_layer = persistence_store_standalone_redis + result = {"message": "Foo"} + expected_result = copy.deepcopy(result) + + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return result + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, context): + return result + + # WHEN calling the function + fn_result = record_handler(record=mock_event) + # WHEN calling lambda handler + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result + + # modify the return to check if idem cache works + result = {"message": "Bar"} + fn_result2 = record_handler(record=mock_event) + # Second time calling lambda handler, test if same result + handler_result2 = lambda_handler(mock_event, lambda_context) + assert fn_result2 == expected_result + assert handler_result2 == expected_result + + # modify the mock event to check if we got updated result + mock_event = {"data": "value3"} + fn_result3 = record_handler(record=mock_event) + # thrid time calling lambda handler, test if result updated + handler_result3 = lambda_handler(mock_event, lambda_context) + assert fn_result3 == result + assert handler_result3 == result + + +# test idem-inprogress +def test_idempotent_lambda_redis_in_progress( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + """ + Test idempotent decorator where lambda_handler is already processing an event with matching event key + """ + + mock_event = {"data": "value4"} + persistence_store = persistence_store_standalone_redis + lambda_response = {"foo": "bar"} + + @idempotent(persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response + + # register the context first + lambda_handler(mock_event, lambda_context) + # save additional to in_progress + mock_event = {"data": "value7"} + try: + persistence_store.save_inprogress(mock_event, 1000) + except IdempotencyItemAlreadyExistsError: + pass + + with pytest.raises(IdempotencyAlreadyInProgressError): + lambda_handler(mock_event, lambda_context) + + +# test -remove +def test_idempotent_lambda_redis_delete( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"data": "test_delete"} + persistence_layer = persistence_store_standalone_redis + result = {"message": "Foo"} + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, _): + return result + + handler_result = lambda_handler(mock_event, lambda_context) + assert handler_result == result + + # delete the idem and handler should output new result + persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) + result = {"message": "Foo2"} + handler_result2 = lambda_handler(mock_event, lambda_context) + assert handler_result2 == result + + +"""def test_idempotent_lambda_redis_credential(lambda_context): + redis_client = MockRedis( + host='localhost', + port='63005', + decode_responses=True, + ) + pwd = "terriblePassword" + usr = "test_acl_denial" + redis_client.acl_setuser(username=usr, enabled=True, passwords="+"+pwd,keys='*',commands=['+hgetall','-set']) + redis_client.auth(password=pwd,username=usr) + + @idempotent(persistence_store=RedisCachePersistenceLayer(connection=redis_client)) + def lambda_handler(event, _): + return True + with pytest.raises(IdempotencyPersistenceLayerError): + handler_result = lambda_handler("test_Acl", lambda_context)""" diff --git a/tests/integration/idempotency/setup_sentinel.sh b/tests/integration/idempotency/setup_sentinel.sh new file mode 100644 index 00000000000..90e8b0f725d --- /dev/null +++ b/tests/integration/idempotency/setup_sentinel.sh @@ -0,0 +1,7 @@ +docker run --name redis_master -p 6379:6379 -d redis +docker run --name redis_slave_1 -p 6380:6380 --link redis_master:redis_master -d redis redis-server --slaveof redis_master 6379 +docker run --name redis_slave_2 -p 6381:6381 --link redis_master:redis_master -d redis redis-server --slaveof redis_master 6379 +docker run --name redis_slave_3 -p 6382:6382 --link redis_master:redis_master -d redis redis-server --slaveof redis_master 6379 +docker run --name redis_sentinel_1 -d -e REDIS_MASTER_HOST=redis_master -e REDIS_SENTINEL_PORT_NUMBER=26379 -e REDIS_SENTINEL_QUORUM=2 -p 26379:26379 --link redis_master:redis_master bitnami/redis-sentinel:latest +docker run --name redis_sentinel_2 -d -e REDIS_MASTER_HOST=redis_master -e REDIS_SENTINEL_PORT_NUMBER=26380 -e REDIS_SENTINEL_QUORUM=2 -p 26380:26380 --link redis_master:redis_master bitnami/redis-sentinel:latest +docker run --name redis_sentinel_3 -d -e REDIS_MASTER_HOST=redis_master -e REDIS_SENTINEL_PORT_NUMBER=26381 -e REDIS_SENTINEL_QUORUM=2 -p 26381:26381 --link redis_master:redis_master bitnami/redis-sentinel:latest diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index 3797a2d8cc0..9714665b8b8 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -1,21 +1,21 @@ import copy import pytest +import redis -from aws_lambda_powertools.utilities.connections import RedisConnection from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, + IdempotencyPersistenceLayerError, + IdempotencyRedisClientConfigError, ) from aws_lambda_powertools.utilities.idempotency.idempotency import ( idempotent, idempotent_function, ) -redis_stdalone_config = {"host": "127.0.0.1", "port": 63005, "mode": "standalone"} - @pytest.fixture def lambda_context(): @@ -32,9 +32,34 @@ def get_remaining_time_in_millis(self) -> int: return LambdaContext() +@pytest.fixture +def persistence_store_sentinel_redis(): + sentinel = redis.Sentinel( + [("localhost", 26379), ("localhost", 26380), ("localhost", 26381)], + ) + # you will need to handle yourself the connection to pass again the password + # and avoid AuthenticationError at redis queries + host, port = sentinel.discover_master("mymaster") + redis_client = redis.Redis( + host=host, + port=port, + decode_responses=True, + ) + redis_client.expire() + + return RedisCachePersistenceLayer(connection=redis_client) + + @pytest.fixture def persistence_store_standalone_redis(): - return RedisCachePersistenceLayer(connection=RedisConnection(**redis_stdalone_config).get_connection()) + # you will need to handle yourself the connection to pass again the password + # and avoid AuthenticationError at redis queries + redis_client = redis.Redis( + host="localhost", + port="63005", + decode_responses=True, + ) + return RedisCachePersistenceLayer(connection=redis_client) # test basic @@ -52,7 +77,7 @@ def record_handler(record): return expected_result @idempotent(persistence_store=persistence_layer) - def lambda_handler(event, _): + def lambda_handler(event, context): return expected_result # WHEN calling the function @@ -64,6 +89,17 @@ def lambda_handler(event, _): assert handler_result == expected_result +def test_idempotent_lambda_redis_no_decode(): + redis_client = redis.Redis( + host="localhost", + port="63005", + decode_responses=False, + ) + # decode_responses=False will not be accepted + with pytest.raises(IdempotencyRedisClientConfigError): + RedisCachePersistenceLayer(connection=redis_client) + + def test_idempotent_function_and_lambda_handler_redis_cache( persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, @@ -78,7 +114,7 @@ def record_handler(record): return result @idempotent(persistence_store=persistence_layer) - def lambda_handler(event, _): + def lambda_handler(event, context): return result # WHEN calling the function @@ -136,7 +172,7 @@ def lambda_handler(event, context): lambda_handler(mock_event, lambda_context) -# -remove +# test -remove def test_idempotent_lambda_redis_delete( persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, @@ -157,3 +193,22 @@ def lambda_handler(event, _): result = {"message": "Foo2"} handler_result2 = lambda_handler(mock_event, lambda_context) assert handler_result2 == result + + +def test_idempotent_lambda_redis_credential(lambda_context): + redis_client = redis.Redis( + host="localhost", + port="63005", + decode_responses=True, + ) + pwd = "terriblePassword" + usr = "test_acl_denial" + redis_client.acl_setuser(username=usr, enabled=True, passwords="+" + pwd, keys="*", commands=["+hgetall", "-set"]) + redis_client.auth(password=pwd, username=usr) + + @idempotent(persistence_store=RedisCachePersistenceLayer(connection=redis_client)) + def lambda_handler(event, _): + return True + + with pytest.raises(IdempotencyPersistenceLayerError): + lambda_handler("test_Acl", lambda_context) From 9c36fc80ad47fdaf76690eb49e49aca21030defe Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Thu, 24 Aug 2023 23:54:41 +0000 Subject: [PATCH 23/81] fix test --- tests/functional/idempotency/persistence/test_redis_layer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index 6bb85b49f61..366fded97f1 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -32,7 +32,7 @@ def get_remaining_time_in_millis(self) -> int: class MockRedis: - def __init__(self, decode_responses, cache, **kwargs): + def __init__(self, decode_responses, cache: dict = None, **kwargs): self.cache = cache or {} self.expire_dict = {} self.decode_responses = decode_responses @@ -43,6 +43,9 @@ def hset(self, name, mapping): self.expire_dict.pop(name, {}) self.cache[name] = mapping + def from_url(self, url: str): + pass + # not covered by test yet. def expire(self, name, time): self.expire_dict[name] = t.time() + time From 46fb65e86f04dfc280f26c7473a914975315a354 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Mon, 25 Sep 2023 19:51:34 +0000 Subject: [PATCH 24/81] add redis --- poetry.lock | 226 ++++++++++++++++++++++++++-------------------------- 1 file changed, 115 insertions(+), 111 deletions(-) diff --git a/poetry.lock b/poetry.lock index 17cc8c178dd..6688e6f855c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -107,69 +107,69 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.93.0a0" +version = "2.97.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.93.0a0.tar.gz", hash = "sha256:67b5c1cb5a3405f321a25da185ef949460793d9b33313f13544106bed2ce2180"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:962d52fdfbc922f104381943d2edb0d535f1d793fd73f4518fb25fb7d63041f4"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.97.0a0.tar.gz", hash = "sha256:6ac4701a3cee8c490ffa5e608d5b178d02ca255322be3bc57c695f6cc83a408b"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.97.0a0-py3-none-any.whl", hash = "sha256:a03ad8a66505eeabe1de15eb69ded67797642a04741a0a2de9e676001417acc9"}, ] [package.dependencies] -aws-cdk-lib = "2.93.0" +aws-cdk-lib = "2.97.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.93.0a0" +version = "2.97.0a0" description = "Authorizers for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.93.0a0.tar.gz", hash = "sha256:495969d05ca85942bc3da6fac7d0a6df5893265b644921d9e891441ee845fdfd"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:6b22e4d94afa481c94fcafdc62c2cf22ea08ea0d985e738569b39da4ba4ffbb0"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.97.0a0.tar.gz", hash = "sha256:92864e155e368beed9f4167fcb62a626c88bc6cca1c70820d8d824974e1fef5a"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.97.0a0-py3-none-any.whl", hash = "sha256:55de13c6aa29eab320666da08ca431156d894334d1717fea7cce3fe1bc50ccce"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.93.0.a0" -aws-cdk-lib = "2.93.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.97.0.a0" +aws-cdk-lib = "2.97.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.93.0a0" +version = "2.97.0a0" description = "Integrations for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.93.0a0.tar.gz", hash = "sha256:4c581f67634fab19b11025751e3ee825f055ee9d1bc77d9cbc5009f261456e62"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:48479656dca9e446ae625e5936ddd940863bd478eb86cdd62889c6b5fee9f751"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.97.0a0.tar.gz", hash = "sha256:3279b780adb436a3d7e5c0b5a09ee412de3b7d9ac1fa07644e709286c4b2b5ee"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.97.0a0-py3-none-any.whl", hash = "sha256:d533aa124942ffb48aa0adef0fbcfe889883d9f6b834d64f7a02a259a6020329"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.93.0.a0" -aws-cdk-lib = "2.93.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.97.0.a0" +aws-cdk-lib = "2.97.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.93.0" +version = "2.97.0" description = "Version 2 of the AWS Cloud Development Kit library" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.93.0.tar.gz", hash = "sha256:54252c8df547d2bd83584278529f47506fa2c27adcbfa623f00322b685f24c18"}, - {file = "aws_cdk_lib-2.93.0-py3-none-any.whl", hash = "sha256:063e7c1f2588a254766229130347fb60e0bd7dd2a6d222d3ae2aa145a6059554"}, + {file = "aws-cdk-lib-2.97.0.tar.gz", hash = "sha256:63fa0b59e7e9f5cd14565d6ed055cf992ef81bf5741bfbf914e90f0a87437404"}, + {file = "aws_cdk_lib-2.97.0-py3-none-any.whl", hash = "sha256:f66c0020760a6f0c5d498ad8e7fd470fd128c1d8db311b6e9f7e38db0bd83c95"}, ] [package.dependencies] @@ -177,7 +177,7 @@ files = [ "aws-cdk.asset-kubectl-v20" = ">=2.1.2,<3.0.0" "aws-cdk.asset-node-proxy-agent-v6" = ">=2.0.1,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -197,23 +197,23 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.73.0" +version = "1.75.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.73.0.tar.gz", hash = "sha256:bfa7cad3a78f002edeec5e39fd61b616cf84f34f61010c5dc2f7a76845fe7a02"}, - {file = "aws_sam_translator-1.73.0-py3-none-any.whl", hash = "sha256:c0132b065d743773fcd2573ed1ae60e0129fa46043fad76430261b098a811924"}, + {file = "aws-sam-translator-1.75.0.tar.gz", hash = "sha256:18c83abcae594de084947befb9c80f689f8b99ece2d38729d27a9cea634da15c"}, + {file = "aws_sam_translator-1.75.0-py3-none-any.whl", hash = "sha256:02bad7636356438b439c8e0ef0195618e3b7b67b6dfbf675b1627d6fd84b2910"}, ] [package.dependencies] boto3 = ">=1.19.5,<2.dev0" jsonschema = ">=3.2,<5" -pydantic = ">=1.8,<2.0" +pydantic = ">=1.8,<3" typing-extensions = ">=4.4,<5" [package.extras] -dev = ["black (==23.1.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.1.0,<1.2.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.263)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" @@ -319,17 +319,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.35" +version = "1.28.54" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.35-py3-none-any.whl", hash = "sha256:d77415f22bbc14f3d72eaed2fc9f96d161f3ba7686922ad26d6bbc9d4985f3df"}, - {file = "boto3-1.28.35.tar.gz", hash = "sha256:580b584e36967155abed7cc9b088b3bd784e8242ae4d8841f58cb50ab05520dc"}, + {file = "boto3-1.28.54-py3-none-any.whl", hash = "sha256:3cb2aee317a1b8686e3b23674e4099b8ff7451bd8acc61b9719acff86fa024d1"}, + {file = "boto3-1.28.54.tar.gz", hash = "sha256:22e37d8c4f2d97b5e5c6ccc1d9edc7760717990b0ba8b8ea17a58cc87e57c5c9"}, ] [package.dependencies] -botocore = ">=1.31.35,<1.32.0" +botocore = ">=1.31.54,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -338,13 +338,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.35" +version = "1.31.54" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.35-py3-none-any.whl", hash = "sha256:943e1465aad66db4933b06809134bd08c5b05e8eb18c19742ffec82f54769457"}, - {file = "botocore-1.31.35.tar.gz", hash = "sha256:7e4534325262f43293a9cc9937cb3f1711365244ffde8b925a6ee862bcf30a83"}, + {file = "botocore-1.31.54-py3-none-any.whl", hash = "sha256:71fdb337ddcdb6bf378e1211cba9ce754c35f12b1524c7d0c0c147b2310356c7"}, + {file = "botocore-1.31.54.tar.gz", hash = "sha256:c98e78a9490c4166b205f87912b46770e156bfe7d53bae54ccbd49c68a336ec6"}, ] [package.dependencies] @@ -368,13 +368,13 @@ files = [ [[package]] name = "bytecode" -version = "0.14.2" +version = "0.15.0" description = "Python module to generate and modify bytecode" optional = false python-versions = ">=3.8" files = [ - {file = "bytecode-0.14.2-py3-none-any.whl", hash = "sha256:e368a2b9bbd7c986133c951250db94fb32f774cfc49752a9db9073bcf9899762"}, - {file = "bytecode-0.14.2.tar.gz", hash = "sha256:386378d9025d68ddb144870ae74330a492717b11b8c9164c4034e88add808f0c"}, + {file = "bytecode-0.15.0-py3-none-any.whl", hash = "sha256:a66718dc1d246b4fec52b5850c15592344a56c8bdb28fd243c895ccf00f8371f"}, + {file = "bytecode-0.15.0.tar.gz", hash = "sha256:0908a8348cabf366b5c1865daabcdc0d650cb0cbdeb1750cc90564852f81945c"}, ] [package.dependencies] @@ -562,17 +562,17 @@ files = [ [[package]] name = "constructs" -version = "10.2.69" +version = "10.2.70" description = "A programming model for software-defined state" optional = false python-versions = "~=3.7" files = [ - {file = "constructs-10.2.69-py3-none-any.whl", hash = "sha256:27a60f5ce4faa4d43c91c73f24e1a245c0a1ef67ea1c8a3df9ca6af9adf618df"}, - {file = "constructs-10.2.69.tar.gz", hash = "sha256:520ddd665cc336df90be06bb1bd49f3a9a7400d886cad8aef7b0155593b4ffa4"}, + {file = "constructs-10.2.70-py3-none-any.whl", hash = "sha256:ade1b5224830e78724ed50ce91ec2e6ce437c9983713c2b8ca541272283c5d37"}, + {file = "constructs-10.2.70.tar.gz", hash = "sha256:f4ae2e0705baff188519e0233ad2129537c8eca40d68242873ca444a659549f8"}, ] [package.dependencies] -jsii = ">=1.84.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -653,13 +653,13 @@ toml = ["tomli"] [[package]] name = "datadog" -version = "0.46.0" +version = "0.47.0" description = "The Datadog Python library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "datadog-0.46.0-py2.py3-none-any.whl", hash = "sha256:3d7bcda6177b43be4cdb52e16b4bdd4f9005716c0dd7cfea009e018c36bb7a3d"}, - {file = "datadog-0.46.0.tar.gz", hash = "sha256:e4fbc92a85e2b0919a226896ae45fc5e4b356c0c57f1c2659659dfbe0789c674"}, + {file = "datadog-0.47.0-py2.py3-none-any.whl", hash = "sha256:a45ec997ab554208837e8c44d81d0e1456539dc14da5743687250e028bc809b7"}, + {file = "datadog-0.47.0.tar.gz", hash = "sha256:47be3b2c3d709a7f5b709eb126ed4fe6cc7977d618fe5c158dd89c2a9f7d9916"}, ] [package.dependencies] @@ -667,13 +667,13 @@ requests = ">=2.6.0" [[package]] name = "datadog-lambda" -version = "4.78.0" +version = "4.80.0" description = "The Datadog AWS Lambda Library" optional = false python-versions = ">=3.7.0,<4" files = [ - {file = "datadog_lambda-4.78.0-py3-none-any.whl", hash = "sha256:660bae6057f3b2033b0c035e9d542af491e40f9ce57b97b4891c491262b9148c"}, - {file = "datadog_lambda-4.78.0.tar.gz", hash = "sha256:3e57faa8f80ddd43b595355b92045fde8f9ed87efe8619133e82cebb87cbe434"}, + {file = "datadog_lambda-4.80.0-py3-none-any.whl", hash = "sha256:506b8964567230d87e2bfd323420854d37b4d7c7a9bfab7e192389f9b4c8150c"}, + {file = "datadog_lambda-4.80.0.tar.gz", hash = "sha256:ddd3ed20592df97523ae26ba552b69de239520c37e31804ca9949b010f90b461"}, ] [package.dependencies] @@ -931,19 +931,22 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.35" +version = "3.1.37" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.35-py3-none-any.whl", hash = "sha256:c19b4292d7a1d3c0f653858db273ff8a6614100d1eb1528b014ec97286193c09"}, - {file = "GitPython-3.1.35.tar.gz", hash = "sha256:9cbefbd1789a5fe9bcf621bb34d3f441f3a90c8461d377f84eda73e721d9b06b"}, + {file = "GitPython-3.1.37-py3-none-any.whl", hash = "sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33"}, + {file = "GitPython-3.1.37.tar.gz", hash = "sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"] + [[package]] name = "h11" version = "0.14.0" @@ -1227,13 +1230,13 @@ pbr = "*" [[package]] name = "jsii" -version = "1.88.0" +version = "1.89.0" description = "Python client for jsii runtime" optional = false python-versions = "~=3.7" files = [ - {file = "jsii-1.88.0-py3-none-any.whl", hash = "sha256:b3888141c30b83a30bfbe03a877bbf8ae42f957b6ccca02bae448853debffaf8"}, - {file = "jsii-1.88.0.tar.gz", hash = "sha256:a59e0f962589dcc741d2bcf2a7b4c4a927a29d3f9a2804a192c734e2e3275018"}, + {file = "jsii-1.89.0-py3-none-any.whl", hash = "sha256:20a463e8533eded656b285f532e5468a414c48ab083cf0cf93a86d593f0c36b8"}, + {file = "jsii-1.89.0.tar.gz", hash = "sha256:6edbb79afc0b7407cb64e9dd0f27b512279201307c16dd9ae72462b3cbd09970"}, ] [package.dependencies] @@ -1433,16 +1436,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1520,13 +1513,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.5.2" +version = "1.5.3" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.5.2-py3-none-any.whl", hash = "sha256:60a62538519c2e96fe8426654a67ee177350451616118a41596ae7c876bb7eac"}, - {file = "mkdocs-1.5.2.tar.gz", hash = "sha256:70d0da09c26cff288852471be03c23f0f521fc15cf16ac89c7a3bfb9ae8d24f9"}, + {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, + {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, ] [package.dependencies] @@ -1591,13 +1584,13 @@ requests = ">=2.26,<3.0" [[package]] name = "mkdocs-material-extensions" -version = "1.1.1" +version = "1.2" description = "Extension pack for Python Markdown and MkDocs Material." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, - {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, + {file = "mkdocs_material_extensions-1.2-py3-none-any.whl", hash = "sha256:c767bd6d6305f6420a50f0b541b0c9966d52068839af97029be14443849fb8a1"}, + {file = "mkdocs_material_extensions-1.2.tar.gz", hash = "sha256:27e2d1ed2d031426a6e10d5ea06989d67e90bb02acd588bc5673106b5ee5eedf"}, ] [[package]] @@ -1849,13 +1842,13 @@ test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] [[package]] name = "opentelemetry-api" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"}, - {file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"}, + {file = "opentelemetry_api-1.20.0-py3-none-any.whl", hash = "sha256:982b76036fec0fdaf490ae3dfd9f28c81442a33414f737abc687a32758cdcba5"}, + {file = "opentelemetry_api-1.20.0.tar.gz", hash = "sha256:06abe351db7572f8afdd0fb889ce53f3c992dbf6f6262507b385cc1963e06983"}, ] [package.dependencies] @@ -1969,24 +1962,24 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "4.24.2" +version = "4.24.3" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.24.2-cp310-abi3-win32.whl", hash = "sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924"}, - {file = "protobuf-4.24.2-cp310-abi3-win_amd64.whl", hash = "sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3"}, - {file = "protobuf-4.24.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880"}, - {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c"}, - {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74"}, - {file = "protobuf-4.24.2-cp37-cp37m-win32.whl", hash = "sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1"}, - {file = "protobuf-4.24.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099"}, - {file = "protobuf-4.24.2-cp38-cp38-win32.whl", hash = "sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e"}, - {file = "protobuf-4.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16"}, - {file = "protobuf-4.24.2-cp39-cp39-win32.whl", hash = "sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b"}, - {file = "protobuf-4.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd"}, - {file = "protobuf-4.24.2-py3-none-any.whl", hash = "sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e"}, - {file = "protobuf-4.24.2.tar.gz", hash = "sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e"}, + {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, + {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, + {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, + {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, + {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, + {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, + {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, + {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, + {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, + {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, + {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, ] [[package]] @@ -2079,12 +2072,13 @@ plugins = ["importlib-metadata"] [[package]] name = "pyhcl" -version = "0.4.4" +version = "0.4.5" description = "HCL configuration parser for python" optional = false python-versions = "*" files = [ - {file = "pyhcl-0.4.4.tar.gz", hash = "sha256:2d9b9dcdf1023d812bfed561ba72c99104c5b3f52e558d595130a44ce081b003"}, + {file = "pyhcl-0.4.5-py3-none-any.whl", hash = "sha256:30ee337d330d1f90c9f5ed8f49c468f66c8e6e43192bdc7c6ece1420beb3070c"}, + {file = "pyhcl-0.4.5.tar.gz", hash = "sha256:c47293a51ccdd25e18bb5c8c0ab0ffe355b37c87f8d6f9d3280dc41efd4740bc"}, ] [[package]] @@ -2274,13 +2268,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -2295,7 +2289,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2303,15 +2296,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2328,7 +2314,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2336,7 +2321,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2374,6 +2358,26 @@ mando = ">=0.6,<0.8" [package.extras] toml = ["tomli (>=2.0.1)"] +[[package]] +name = "redis" +version = "4.6.0" +description = "Python client for Redis database and key-value store" +optional = true +python-versions = ">=3.7" +files = [ + {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, + {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} +importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + [[package]] name = "regex" version = "2022.10.31" @@ -2507,13 +2511,13 @@ decorator = ">=3.4.2" [[package]] name = "rich" -version = "13.5.2" +version = "13.5.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, - {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, + {file = "rich-13.5.3-py3-none-any.whl", hash = "sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9"}, + {file = "rich-13.5.3.tar.gz", hash = "sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6"}, ] [package.dependencies] @@ -2640,13 +2644,13 @@ files = [ [[package]] name = "smmap" -version = "5.0.0" +version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] [[package]] @@ -2778,13 +2782,13 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.3" +version = "2.31.0.5" description = "Typing stubs for requests" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.3.tar.gz", hash = "sha256:d5d7a08965fca12bedf716eaf5430c6e3d0da9f3164a1dba2a7f3885f9ebe3c0"}, - {file = "types_requests-2.31.0.3-py3-none-any.whl", hash = "sha256:938f51653c757716aeca5d72c405c5e2befad8b0d330e3b385ce7f148e1b10dc"}, + {file = "types-requests-2.31.0.5.tar.gz", hash = "sha256:e4153c2a4e48dcc661600fa5f199b483cdcbd21965de0b5e2df26e93343c0f57"}, + {file = "types_requests-2.31.0.5-py3-none-any.whl", hash = "sha256:e2523825754b2832e04cdc1e731423390e731457890113a201ebca8ad9b40427"}, ] [package.dependencies] @@ -3019,4 +3023,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "07f7e3364f2353eaa9305805d1182d46924314b53e19b49c0e713f461d5ee8cb" +content-hash = "43ec13a1aee90b6497dcdf5b1e73b83c82decb9c0d6ee56f0a6a156b2b4e08cd" From 0861f2a9f90f4d8de9678fd86ab6d968ab48f445 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 27 Sep 2023 00:00:11 +0000 Subject: [PATCH 25/81] add doc, few todos still need to address --- .../utilities/idempotency/exceptions.py | 6 + .../idempotency/persistence/redis.py | 14 +- docs/utilities/idempotency.md | 151 ++++++++++-------- ...g_started_with_idempotency_redis_client.py | 38 +++++ ...g_started_with_idempotency_redis_config.py | 38 +++++ .../idempotency/templates/sam_redis_vpc.yaml | 13 ++ .../idempotency/tests/test_with_mock_redis.py | 81 ++++++++++ .../idempotency/tests/test_with_real_redis.py | 56 +++++++ 8 files changed, 327 insertions(+), 70 deletions(-) create mode 100644 examples/idempotency/src/getting_started_with_idempotency_redis_client.py create mode 100644 examples/idempotency/src/getting_started_with_idempotency_redis_config.py create mode 100644 examples/idempotency/templates/sam_redis_vpc.yaml create mode 100644 examples/idempotency/tests/test_with_mock_redis.py create mode 100644 examples/idempotency/tests/test_with_real_redis.py diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index 6e5930549c4..43f2d1cd1f3 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -83,3 +83,9 @@ class IdempotencyNoSerializationModelError(BaseError): """ No model was supplied to the serializer """ + + +class IdempotencyRedisClientConfigError(BaseError): + """ + The Redis connection passed in has unsupported config + """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index f07d5665614..1e301c007d5 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -54,7 +54,7 @@ def __init__( if not connection.get_connection_kwargs().get("decode_responses", False): # Requires decode_responses to be true raise IdempotencyRedisClientConfigError - self._connection = connection + self.connection = connection self.in_progress_expiry_attr = in_progress_expiry_attr self.status_attr = status_attr @@ -76,7 +76,7 @@ def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> Da def _get_record(self, idempotency_key) -> DataRecord: # See: https://redis.io/commands/hgetall/ - response = self._connection.hgetall(idempotency_key) + response = self.connection.hgetall(idempotency_key) try: item = response @@ -115,7 +115,7 @@ def _put_record(self, data_record: DataRecord) -> None: # The idempotency key does not exist: # - first time that this invocation key is used # - previous invocation with the same key was deleted due to TTL - idempotency_record = self._connection.hgetall(data_record.idempotency_key) + idempotency_record = self.connection.hgetall(data_record.idempotency_key) print(idempotency_record) if len(idempotency_record) > 0: # record already exists. @@ -132,11 +132,11 @@ def _put_record(self, data_record: DataRecord) -> None: raise IdempotencyItemAlreadyExistsError logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") - self._connection.hset(**item) + self.connection.hset(**item) # hset type must set expiration after adding the record # Need to review this to get ttl in seconds # Q: should we replace self.expires_after_seconds with _get_expiry_timestamp? more consistent - self._connection.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) + self.connection.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) except redis.exceptions.RedisError: raise redis.exceptions.RedisError except redis.exceptions.RedisClusterException: @@ -156,9 +156,9 @@ def _update_record(self, data_record: DataRecord) -> None: }, } logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") - self._connection.hset(**item) + self.connection.hset(**item) def _delete_record(self, data_record: DataRecord) -> None: logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") # See: https://redis.io/commands/del/ - self._connection.delete(data_record.idempotency_key) + self.connection.delete(data_record.idempotency_key) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 128b8d53b6c..fc49d208a1e 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -51,9 +51,10 @@ classDiagram ## Getting started -### IAM Permissions +???+ note + This section uses DynamoDB as default idempotent persistence storage layer. If you are interested in using Redis as persistence storage layer, Check out the [Redis as persistence storage layer](#redis-as-persistent-storage-layer-provider) Section. -#### DynamoDB +### IAM Permissions Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, `dynamodb:UpdateItem` and `dynamodb:DeleteItem` IAM permissions before using this feature. @@ -62,16 +63,10 @@ Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, ### Required resources -_**DynamoDB**_ - Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. As of now, Amazon DynamoDB is the only supported persistent storage layer, so you'll need to create a table first. -_**Redis**_ - -Before getting started you need to setup your [EC2 Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EC2_GetStarted.html) and [ElastiCache for Redis cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/GettingStarted.html). - **Default table configuration** If you're not [changing the default configuration for the DynamoDB persistence layer](#dynamodbpersistencelayer), this is the expected default configuration: @@ -114,8 +109,6 @@ If you're not [changing the default configuration for the DynamoDB persistence l ### Idempotent decorator -_**DynamoDB**_ - You can quickly start by initializing the `DynamoDBPersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. ???+ note @@ -135,15 +128,13 @@ You can quickly start by initializing the `DynamoDBPersistenceLayer` class and u --8<-- "examples/idempotency/src/getting_started_with_idempotency_payload.json" ``` -_**Redis**_ - After processing this request successfully, a second request containing the exact same payload above will now return the same response, ensuring our customer isn't charged twice. !!! question "New to idempotency concept? Please review our [Terminology](#terminology) section if you haven't yet." -You can initialize `RedisCachePersistenceLayer` class and use it with `idempotent` decorator on your lambda handler. +### Idempotent_function decorator -=== "app.py" +Similar to [idempotent decorator](#idempotent-decorator), you can use `idempotent_function` decorator for any synchronous Python function. When using `idempotent_function`, you must tell us which keyword parameter in your function signature has the data we should use via **`data_keyword_argument`**. @@ -539,6 +530,59 @@ sequenceDiagram Optional idempotency key +## Redis as persistent storage layer provider + +### Redis resources + +You need an existing Redis service before setting up Redis as persistent storage layer provider. You can also use Redis compatible services like [Amazon ElastiCache for Redis](https://aws.amazon.com/elasticache/redis/) or [Amazon MemoryDB for Redis](https://aws.amazon.com/memorydb/) as persistent storage layer provider. +???+ tip "No existing Redis service?" + If you don't have an existing Redis service, we recommend using [DynamoDB](#dynamodbpersistencelayer) as persistent storage layer provider. + +### VPC Access + +Your Lambda Function must be able to reach the Redis endpoint before using it for idempotency persistent storage layer. In most cases you will need to [configure VPC access](https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html) for your Lambda Fucntion. Using a public accessable Redis is not recommended. + +???+ tip "Amazon ElastiCache/MemoryDB for Redis as persistent storage layer provider" + If you intend to use Amazon ElastiCache for Redis for idempotency persistent storage layer, you can also reference [This AWS Tutorial](https://docs.aws.amazon.com/lambda/latest/dg/services-elasticache-tutorial.html). + If you are using Amazon MemoryDB for Redis, reference [This AWS Tutorial](https://aws.amazon.com/blogs/database/access-amazon-memorydb-for-redis-from-aws-lambda/) for only VPC setup part. + +After VPC setup, you can follow the templates down below to setup Lambda fucntions with VPC internal subnet access. + +=== "AWS Serverless Application Model (SAM) example" + + ```yaml hl_lines="8-13" + --8<-- "examples/idempotency/templates/sam_redis_vpc.yaml" + ``` + + 1. Replace the Security Group ID and Subnet ID to match your Redis' VPC setting. + +### Idempotent decorator for Redis + +You can quickly start by initializing the `RedisCachePersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. Check out detailed example of `RedisCachePersistenceLayer` in [Persistence layers section](#redispersistencelayer) + +???+ warning "Passing in Redis Client" + We support passing in established Redis clients when initilizing `RedisPersistenceLayer`. However, this rely on Redis parameter `decode_responses=True` to decode all Redis response. Please make sure this parameter is set when establishing Redis client or `RedisPersistenceLayer` will raise a `IdempotencyRedisClientConfigError`. See example below + +=== "Use established Redis Client" + TODO + ```python hl_lines="4-7 10 24" + --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_client.py" + ``` + +=== "Use Redis Config Class" + TODO + ```python hl_lines="4-7 10 24" + --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_config.py" + ``` + +=== "Sample event" + + ```json + --8<-- "examples/idempotency/src/getting_started_with_idempotency_payload.json" + ``` + +For other use cases like `Idempotent function decorator` please reference the [DynamoDB section](#idempotent_function-decorator). You only need to substitute the `persistence_store` from `DynamoDBPersistenceLayer` to `RedisPersistenceLayer` and no other code changes are required. + ## Advanced ### Persistence layers @@ -567,56 +611,9 @@ When using DynamoDB as a persistence layer, you can alter the attribute names by | **sort_key_attr** | | | Sort key of the table (if table is configured with a sort key). | | **static_pk_value** | | `idempotency#{LAMBDA_FUNCTION_NAME}` | Static value to use as the partition key. Only used when **sort_key_attr** is set. | -#### RedisCachePersistenceLayer - -This persistence layer is built-in and you can use ElastiCache to store and see the keys. - -```python -from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer -persistence_layer = RedisCachePersistenceLayer( - static_pk_value: Optional[str] = None, - expiry_attr: str = "expiration", - in_progress_expiry_attr: str = "in_progress_expiration", - status_attr: str = "status", - data_attr: str = "data", - validation_key_attr: str = "validation", -) -``` - -When using ElastiCache for Redis as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: - -| Parameter | Required | Default | Description | -| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | -| **static_pk_value** | | `idempotency#{LAMBDA_FUNCTION_NAME}` | Static value to use as the partition key. Only used when **sort_key_attr** is set. | -| **expiry_attr** | | `expiration` | Unix timestamp of when record expires | -| **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | -| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | -| **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | -| **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | - -#### RedisStandalone/RedisCluster - -```python -from aws_lambda_powertools.utilities.connections import RedisConnection +#### RedisPersistenceLayer -redis_connection = RedisConnection( - host="192.168.68.112", - port=6379, - username = "abc", - password="pass", - db_index=0, - url = None -).get_standalone_connection() -``` - -| Parameter | Required | Default | Description | -| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | -| **host** | | `localhost` | Name of the host to connect to Redis instance/cluster | -| **port** | | 6379 | Number of the port to connect to Redis instance/cluster | -| **username** | | `None` | Name of the username to connect to Redis instance/cluster in case of using ACL | -| **password** | | `None` | Passwod to connect to Redis instance/cluster | -| **db_index** | | 0. | Index of Redis database | -| **url** | | `None` | Redis client object configured from the given URL. | +TODO, check github ### Customizing the default behavior @@ -917,6 +914,34 @@ This means it is possible to pass a mocked Table resource, or stub various metho --8<-- "examples/idempotency/tests/app_test_io_operations.py" ``` +### Testing with Redis + +To test locally, You can either utilize [fakeredis-py](https://github.com/cunla/fakeredis-py) or check out the [MockRedis](https://github.com/aws-powertools/powertools-lambda-python/blob/ba6532a1c73e20fdaee88c5795fd40e978553e14/tests/functional/idempotency/persistence/test_redis_layer.py#L34-L66) Class we used in our test. + +=== "test_with_mock_redis.py" + + ```python hl_lines="4 5 24 25 27" + --8<-- "examples/idempotency/tests/test_with_mock_redis.py" + ``` + +If you want to actually setup a Real Redis client for integration test, reference the code below + +=== "test_with_real_redis.py" + + ```python hl_lines="4 5 24 25 27" + --8<-- "examples/idempotency/tests/test_with_real_redis.py" + ``` + +=== "Makefile" + + ```bash + test-idempotency-redis: # (1)! + docker run --name test-idempotency-redis -d -p 63005:6379 redis + pytest test_with_real_redis.py;docker stop test-idempotency-redis;docker rm test-idempotency-redis + ``` + + 1. Use this script to setup a temp Redis docker and auto remove it upon completion + ## Extra resources If you're interested in a deep dive on how Amazon uses idempotency when building our APIs, check out diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py new file mode 100644 index 00000000000..0754f42c6b3 --- /dev/null +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py @@ -0,0 +1,38 @@ +from dataclasses import dataclass, field +from uuid import uuid4 + +from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, + idempotent, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + +@dataclass +class Payment: + user_id: str + product_id: str + payment_id: str = field(default_factory=lambda: f"{uuid4()}") + + +class PaymentError(Exception): + ... + + +@idempotent(persistence_store=persistence_layer) +def lambda_handler(event: dict, context: LambdaContext): + try: + payment: Payment = create_subscription_payment(event) + return { + "payment_id": payment.payment_id, + "message": "success", + "statusCode": 200, + } + except Exception as exc: + raise PaymentError(f"Error creating payment {str(exc)}") + + +def create_subscription_payment(event: dict) -> Payment: + return Payment(**event) diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py new file mode 100644 index 00000000000..0754f42c6b3 --- /dev/null +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py @@ -0,0 +1,38 @@ +from dataclasses import dataclass, field +from uuid import uuid4 + +from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, + idempotent, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + +@dataclass +class Payment: + user_id: str + product_id: str + payment_id: str = field(default_factory=lambda: f"{uuid4()}") + + +class PaymentError(Exception): + ... + + +@idempotent(persistence_store=persistence_layer) +def lambda_handler(event: dict, context: LambdaContext): + try: + payment: Payment = create_subscription_payment(event) + return { + "payment_id": payment.payment_id, + "message": "success", + "statusCode": 200, + } + except Exception as exc: + raise PaymentError(f"Error creating payment {str(exc)}") + + +def create_subscription_payment(event: dict) -> Payment: + return Payment(**event) diff --git a/examples/idempotency/templates/sam_redis_vpc.yaml b/examples/idempotency/templates/sam_redis_vpc.yaml new file mode 100644 index 00000000000..517ca3eeeb8 --- /dev/null +++ b/examples/idempotency/templates/sam_redis_vpc.yaml @@ -0,0 +1,13 @@ +Transform: AWS::Serverless-2016-10-31 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Runtime: python3.11 + Handler: app.py + VpcConfig: # (1)! + SecurityGroupIds: + - sg-{your_sg_id} + SubnetIds: + - subnet-{your_subnet_id_1} + - subnet-{your_subnet_id_2} diff --git a/examples/idempotency/tests/test_with_mock_redis.py b/examples/idempotency/tests/test_with_mock_redis.py new file mode 100644 index 00000000000..3b02b6eb94c --- /dev/null +++ b/examples/idempotency/tests/test_with_mock_redis.py @@ -0,0 +1,81 @@ +import time as t +from dataclasses import dataclass + +import pytest + +from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer, + idempotent, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + def get_remaining_time_in_millis(self) -> int: + return 1000 + + return LambdaContext() + + +# Mock redis class that includes all operations we used in Idempotency +class MockRedis: + def __init__(self, decode_responses, cache: dict = None, **kwargs): + self.cache = cache or {} + self.expire_dict = {} + self.decode_responses = decode_responses + self.acl = {} + self.username = "" + + def hset(self, name, mapping): + self.expire_dict.pop(name, {}) + self.cache[name] = mapping + + def from_url(self, url: str): + pass + + def expire(self, name, time): + self.expire_dict[name] = t.time() + time + + # return {} if no match + def hgetall(self, name): + if self.expire_dict.get(name, t.time() + 1) < t.time(): + self.cache.pop(name, {}) + return self.cache.get(name, {}) + + def get_connection_kwargs(self): + return {"decode_responses": self.decode_responses} + + def auth(self, username, **kwargs): + self.username = username + + def delete(self, name): + self.cache.pop(name, {}) + + +def test_idempotent_lambda(lambda_context): + # Init the Mock redis client + redis_client = MockRedis(decode_responses=True) + # Establish persistence layer using the mock redis client + persistence_layer = RedisCachePersistenceLayer(connection=redis_client) + + # setup idempotent with redis persistence layer + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event: dict, context: LambdaContext): + print("expensive operation") + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } + + # Inovke the sim lambda handler + result = lambda_handler({"testkey": "testvalue"}, lambda_context) + assert result["payment_id"] == 12345 diff --git a/examples/idempotency/tests/test_with_real_redis.py b/examples/idempotency/tests/test_with_real_redis.py new file mode 100644 index 00000000000..c4b707cda0a --- /dev/null +++ b/examples/idempotency/tests/test_with_real_redis.py @@ -0,0 +1,56 @@ +from dataclasses import dataclass + +import pytest +import redis + +from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer, + idempotent, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + def get_remaining_time_in_millis(self) -> int: + return 1000 + + return LambdaContext() + + +@pytest.fixture +def persistence_store_standalone_redis(): + # you will need to handle yourself the connection to pass again the password + # and avoid AuthenticationError at redis queries + redis_client = redis.Redis( + host="localhost", + port="63005", + decode_responses=True, + ) + return RedisCachePersistenceLayer(connection=redis_client) + + +def test_idempotent_lambda(lambda_context, persistence_store_standalone_redis): + # Establish persistence layer using the mock redis client + persistence_layer = persistence_store_standalone_redis + + # setup idempotent with redis persistence layer + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event: dict, context: LambdaContext): + print("expensive operation") + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } + + # Inovke the sim lambda handler + result = lambda_handler({"testkey": "testvalue"}, lambda_context) + assert result["payment_id"] == 12345 From 9775eaa18423f90ef02b98a729e149d03f930b46 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 27 Sep 2023 21:55:33 +0000 Subject: [PATCH 26/81] add docs, test. Removed Connection --- .../utilities/connections/__init__.py | 3 - .../utilities/connections/base_sync.py | 7 - .../utilities/connections/exceptions.py | 4 - .../utilities/connections/redis.py | 104 --------- .../utilities/idempotency/__init__.py | 2 + .../utilities/idempotency/exceptions.py | 6 + .../idempotency/persistence/redis.py | 205 ++++++++++++++++-- docs/utilities/idempotency.md | 36 ++- .../src/customize_persistence_layer_redis.py | 21 ++ ...g_started_with_idempotency_redis_client.py | 12 +- ...g_started_with_idempotency_redis_config.py | 7 +- examples/idempotency/tests/mock_redis.py | 37 ++++ .../idempotency/tests/test_with_mock_redis.py | 39 +--- .../idempotency/tests/test_with_real_redis.py | 9 +- .../persistence/test_redis_layer.py | 18 +- .../idempotency/test_idempotency_redis.py | 19 +- 16 files changed, 337 insertions(+), 192 deletions(-) delete mode 100644 aws_lambda_powertools/utilities/connections/__init__.py delete mode 100644 aws_lambda_powertools/utilities/connections/base_sync.py delete mode 100644 aws_lambda_powertools/utilities/connections/exceptions.py delete mode 100644 aws_lambda_powertools/utilities/connections/redis.py create mode 100644 examples/idempotency/src/customize_persistence_layer_redis.py create mode 100644 examples/idempotency/tests/mock_redis.py diff --git a/aws_lambda_powertools/utilities/connections/__init__.py b/aws_lambda_powertools/utilities/connections/__init__.py deleted file mode 100644 index f517e30c09d..00000000000 --- a/aws_lambda_powertools/utilities/connections/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from aws_lambda_powertools.utilities.connections.redis import RedisConnection - -__all__ = ["RedisConnection"] diff --git a/aws_lambda_powertools/utilities/connections/base_sync.py b/aws_lambda_powertools/utilities/connections/base_sync.py deleted file mode 100644 index 29b0b162eae..00000000000 --- a/aws_lambda_powertools/utilities/connections/base_sync.py +++ /dev/null @@ -1,7 +0,0 @@ -from abc import ABC, abstractmethod - - -class BaseConnectionSync(ABC): - @abstractmethod - def _init_connection(self, **kwargs): - raise NotImplementedError() # pragma: no cover diff --git a/aws_lambda_powertools/utilities/connections/exceptions.py b/aws_lambda_powertools/utilities/connections/exceptions.py deleted file mode 100644 index b4426c2b142..00000000000 --- a/aws_lambda_powertools/utilities/connections/exceptions.py +++ /dev/null @@ -1,4 +0,0 @@ -class RedisConnectionError(Exception): - """ - Redis connection error - """ diff --git a/aws_lambda_powertools/utilities/connections/redis.py b/aws_lambda_powertools/utilities/connections/redis.py deleted file mode 100644 index 1381fa3afda..00000000000 --- a/aws_lambda_powertools/utilities/connections/redis.py +++ /dev/null @@ -1,104 +0,0 @@ -import logging -from typing import Literal, Optional, Type, Union - -try: - import redis # type:ignore -except ImportError: - redis = None - -from .base_sync import BaseConnectionSync -from .exceptions import RedisConnectionError - -logger = logging.getLogger(__name__) - - -class RedisConnection(BaseConnectionSync): - def __init__( - self, - host: Optional[str] = None, - port: Optional[int] = None, - username: Optional[str] = None, - password: Optional[str] = None, - db_index: Optional[int] = None, - url: Optional[str] = None, - mode: Optional[Literal["standalone", "cluster"]] = "standalone", - **extra_options, - ) -> None: - """ - Initialize Redis connection which will be used in redis persistence_store to support idempotency - - Parameters - ---------- - host: str, optional - redis host - port: int, optional - redis port - username: str, optional - redis username - password: str, optional - redis password - db_index: str, optional - redis db index - mode: str, Literal["standalone","cluster"] - set redis client mode, choose from standalone/cluster - url: str, optional - redis connection string, using url will override the host/port in the previous parameters - extra_options: **kwargs, optional - extra kwargs to pass directly into redis client - """ - self.extra_options: dict = {} - - self.url = url - self.host = host - self.port = port - self.username = username - self.password = password - self.db_index = db_index - self.mode = mode - self.extra_options.update(**extra_options) - self._cluster_connection = None - self._standalone_connection = None - - def _init_connection(self, client: Type[Union[redis.Redis, redis.cluster.RedisCluster]]): - logger.info(f"Trying to connect to Redis: {self.host}") - - try: - if self.url: - logger.debug(f"Using URL format to connect to Redis: {self.host}") - return client.from_url(url=self.url) - else: - logger.debug(f"Using other parameters to connect to Redis: {self.host}") - return client( - host=self.host, - port=self.port, - username=self.username, - password=self.password, - db=self.db_index, - decode_responses=True, - **self.extra_options, - ) - except redis.exceptions.ConnectionError as exc: - logger.debug(f"Cannot connect in Redis: {self.host}") - raise RedisConnectionError("Could not to connect to Redis", exc) from exc - - # simplified to use different func to get each connection. - def get_connection(self) -> Type[Union[redis.Redis, redis.cluster.RedisCluster]]: - """ - return a standalone redis client based on class's init parameter - - Returns - ------- - Client: - Union[redis.Redis, redis.cluster.RedisCluster] - """ - if self.mode == "standalone": - if self._standalone_connection: - return self._standalone_connection - self._standalone_connection = self._init_connection(client=redis.Redis) - return self._standalone_connection - if self.mode == "cluster": - if self._cluster_connection: - return self._cluster_connection - self._cluster_connection = self._init_connection(client=redis.cluster.RedisCluster) - return self._cluster_connection - raise RedisConnectionError("Redis connection mode not supported yet:", self.mode) diff --git a/aws_lambda_powertools/utilities/idempotency/__init__.py b/aws_lambda_powertools/utilities/idempotency/__init__.py index 296b641df2b..329c1e621d5 100644 --- a/aws_lambda_powertools/utilities/idempotency/__init__.py +++ b/aws_lambda_powertools/utilities/idempotency/__init__.py @@ -12,6 +12,7 @@ # import RedisCachePersistenceLayer here mean we will need redis as a required lib? Do we want to make it optional? from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( RedisCachePersistenceLayer, + RedisConfig, ) from .idempotency import IdempotencyConfig, idempotent, idempotent_function @@ -23,4 +24,5 @@ "idempotent_function", "IdempotencyConfig", "RedisCachePersistenceLayer", + "RedisConfig", ) diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index 43f2d1cd1f3..a111df687ea 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -89,3 +89,9 @@ class IdempotencyRedisClientConfigError(BaseError): """ The Redis connection passed in has unsupported config """ + + +class IdempotencyRedisConnectionError(BaseError): + """ + Redis connection error + """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 1e301c007d5..9dba8b3ebe8 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,11 +1,12 @@ import datetime import logging -from typing import Any, Dict, Union +from dataclasses import dataclass +from typing import Any, Dict, Literal, Optional, Union try: - import redis # type:ignore + import redis except ImportError: - redis = None + redis = None # type:ignore import redis @@ -15,6 +16,7 @@ IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, IdempotencyRedisClientConfigError, + IdempotencyRedisConnectionError, ) from aws_lambda_powertools.utilities.idempotency.persistence.base import ( STATUS_CONSTANTS, @@ -24,10 +26,114 @@ logger = logging.getLogger(__name__) +@dataclass(repr=False, order=False) +class RedisConfig: + host: Optional[str] + port: Optional[int] + username: Optional[str] + password: Optional[str] + db_index: Optional[int] + url: Optional[str] + mode: Optional[Literal["standalone", "cluster"]] = "standalone" + + def __init__( + self, + host: Optional[str] = None, + port: Optional[int] = None, + username: Optional[str] = None, + password: Optional[str] = None, + db_index: Optional[int] = None, + url: Optional[str] = None, + mode: Optional[Literal["standalone", "cluster"]] = "standalone", + **extra_options, + ) -> None: + """ + Initialize Redis connection which will be used in redis persistence_store to support idempotency + + Parameters + ---------- + host: str, optional + redis host + port: int, optional + redis port + username: str, optional + redis username + password: str, optional + redis password + db_index: str, optional + redis db index + mode: str, Literal["standalone","cluster"] + set redis client mode, choose from standalone/cluster + url: str, optional + redis connection string, using url will override the host/port in the previous parameters + extra_options: **kwargs, optional + extra kwargs to pass directly into redis client + + Examples + -------- + + ```python + from dataclasses import dataclass, field + from uuid import uuid4 + + from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer, + RedisConfig, + idempotent, + ) + from aws_lambda_powertools.utilities.typing import LambdaContext + + config = RedisConfig(host="localhost", port=6379, mode="standalone") + + persistence_layer = RedisCachePersistenceLayer(config=config) + + + @dataclass + class Payment: + user_id: str + product_id: str + payment_id: str = field(default_factory=lambda: f"{uuid4()}") + + + class PaymentError(Exception): + ... + + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event: dict, context: LambdaContext): + try: + payment: Payment = create_subscription_payment(event) + return { + "payment_id": payment.payment_id, + "message": "success", + "statusCode": 200, + } + except Exception as exc: + raise PaymentError(f"Error creating payment {str(exc)}") + + + def create_subscription_payment(event: dict) -> Payment: + return Payment(**event) + + ``` + """ + self.extra_options: dict = {} + + self.url = url + self.host = host + self.port = port + self.username = username + self.password = password + self.db_index = db_index + self.mode = mode + self.extra_options.update(**extra_options) + + class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, - connection: Union[redis.Redis, redis.cluster.RedisCluster], + client: Optional[Union[redis.Redis, redis.cluster.RedisCluster]] = None, + config: Optional[RedisConfig] = None, in_progress_expiry_attr: str = "in_progress_expiration", status_attr: str = "status", data_attr: str = "data", @@ -37,6 +143,12 @@ def __init__( Initialize the Redis Persistence Layer Parameters ---------- + client: Union[redis.Redis, redis.cluster.RedisCluster], optional + You can bring your established Redis client. + If client is provided, config will be ignored + config: RedisConfig, optional + If client is not provided, config will be parsed and a corresponding + Redis client will be created. in_progress_expiry_attr: str, optional Redis hash attribute name for in-progress expiry timestamp, by default "in_progress_expiration" status_attr: str, optional @@ -45,16 +157,52 @@ def __init__( Redis hash attribute name for response data, by default "data" validation_key_attr: str, optional Redis hash attribute name for hashed representation of the parts of the event used for validation + + Examples + -------- + + ```python + from redis import Redis + from aws_lambda_powertools.utilities.data_class import( + RedisCachePersistenceLayer, + ) + from aws_lambda_powertools.utilities.idempotency.idempotency import ( + idempotent, + ) + + client = redis.Redis( + host="localhost", + port="6379", + decode_responses=True, + ) + persistence_layer = RedisCachePersistenceLayer(client=client) + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event: dict, context: LambdaContext): + print("expensive operation") + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } + ``` """ - # Initialize connection with Redis + # Initialize Redis client with Redis config if no client is passed in + if client is None: + if config is None: + raise IdempotencyRedisClientConfigError("Both client and config param are empty") - if not hasattr(connection, "get_connection_kwargs"): + self.config = config + self.client = self._init_client() + else: + self.client = client + + if not hasattr(self.client, "get_connection_kwargs"): raise IdempotencyRedisClientConfigError - if not connection.get_connection_kwargs().get("decode_responses", False): + if not self.client.get_connection_kwargs().get("decode_responses", False): # Requires decode_responses to be true raise IdempotencyRedisClientConfigError - self.connection = connection self.in_progress_expiry_attr = in_progress_expiry_attr self.status_attr = status_attr @@ -62,6 +210,35 @@ def __init__( self.validation_key_attr = validation_key_attr super(RedisCachePersistenceLayer, self).__init__() + def _init_client(self) -> Union[redis.Redis, redis.cluster.RedisCluster]: + client: Union[redis.Redis, redis.cluster.RedisCluster] + logger.info(f"Trying to connect to Redis: {self.config.host}") + if self.config.mode == "standalone": + client = redis.Redis # type: ignore + elif self.config.mode == "cluster": + client = redis.cluster.RedisCluster # type: ignore + else: + raise IdempotencyRedisClientConfigError(f"Mode {self.config.mode} not supported") + + try: + if self.config.url: + logger.debug(f"Using URL format to connect to Redis: {self.config.host}") + return client.from_url(url=self.config.url) + else: + logger.debug(f"Using other parameters to connect to Redis: {self.config.host}") + return client( # type: ignore + host=self.config.host, + port=self.config.port, + username=self.config.username, + password=self.config.password, + db=self.config.db_index, + decode_responses=True, + **self.config.extra_options, + ) + except redis.exceptions.ConnectionError as exc: + logger.debug(f"Cannot connect in Redis: {self.config.host}") + raise IdempotencyRedisConnectionError("Could not to connect to Redis", exc) from exc + def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> DataRecord: in_progress_expiry_timestamp = item.get(self.in_progress_expiry_attr) if isinstance(in_progress_expiry_timestamp, str): @@ -76,7 +253,7 @@ def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> Da def _get_record(self, idempotency_key) -> DataRecord: # See: https://redis.io/commands/hgetall/ - response = self.connection.hgetall(idempotency_key) + response = self.client.hgetall(idempotency_key) try: item = response @@ -115,7 +292,7 @@ def _put_record(self, data_record: DataRecord) -> None: # The idempotency key does not exist: # - first time that this invocation key is used # - previous invocation with the same key was deleted due to TTL - idempotency_record = self.connection.hgetall(data_record.idempotency_key) + idempotency_record = self.client.hgetall(data_record.idempotency_key) print(idempotency_record) if len(idempotency_record) > 0: # record already exists. @@ -132,11 +309,11 @@ def _put_record(self, data_record: DataRecord) -> None: raise IdempotencyItemAlreadyExistsError logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") - self.connection.hset(**item) + self.client.hset(**item) # hset type must set expiration after adding the record # Need to review this to get ttl in seconds # Q: should we replace self.expires_after_seconds with _get_expiry_timestamp? more consistent - self.connection.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) + self.client.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) except redis.exceptions.RedisError: raise redis.exceptions.RedisError except redis.exceptions.RedisClusterException: @@ -156,9 +333,9 @@ def _update_record(self, data_record: DataRecord) -> None: }, } logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") - self.connection.hset(**item) + self.client.hset(**item) def _delete_record(self, data_record: DataRecord) -> None: logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") # See: https://redis.io/commands/del/ - self.connection.delete(data_record.idempotency_key) + self.client.delete(data_record.idempotency_key) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index fc49d208a1e..9c534bdaddb 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -564,14 +564,14 @@ You can quickly start by initializing the `RedisCachePersistenceLayer` class and We support passing in established Redis clients when initilizing `RedisPersistenceLayer`. However, this rely on Redis parameter `decode_responses=True` to decode all Redis response. Please make sure this parameter is set when establishing Redis client or `RedisPersistenceLayer` will raise a `IdempotencyRedisClientConfigError`. See example below === "Use established Redis Client" - TODO - ```python hl_lines="4-7 10 24" + ```python hl_lines="4 7 12-16 18 32" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_client.py" ``` + 1. Notice we rely on this field to be true + === "Use Redis Config Class" - TODO - ```python hl_lines="4-7 10 24" + ```python hl_lines="4-8 11 13 27" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_config.py" ``` @@ -613,7 +613,23 @@ When using DynamoDB as a persistence layer, you can alter the attribute names by #### RedisPersistenceLayer -TODO, check github +This persistence layer is built-in, and you can use an existing Redis service. We don't recomend using Redis Persistence Layer if you don't have a exsiting Redis service. You can try [DynamoDBPersistenceLayer](#dynamodbpersistencelayer) instead. + +=== "Customizing RedisPersistenceLayer to suit your data structure" + + ```python hl_lines="10-16" + --8<-- "examples/idempotency/src/customize_persistence_layer_redis.py" + ``` + +When using Redis as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: + +| Parameter | Required | Default | Description | +| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | +| **config** | | `None` | You can pass in the configs to establish the corresponding Redis client | +| **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | +| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | +| **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | +| **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | ### Customizing the default behavior @@ -920,15 +936,21 @@ To test locally, You can either utilize [fakeredis-py](https://github.com/cunla/ === "test_with_mock_redis.py" - ```python hl_lines="4 5 24 25 27" + ```python hl_lines="2 3 29 31" --8<-- "examples/idempotency/tests/test_with_mock_redis.py" ``` +=== "mock_redis.py" + + ```python + --8<-- "examples/idempotency/tests/mock_redis.py" + ``` + If you want to actually setup a Real Redis client for integration test, reference the code below === "test_with_real_redis.py" - ```python hl_lines="4 5 24 25 27" + ```python hl_lines="3 4 29 38" --8<-- "examples/idempotency/tests/test_with_real_redis.py" ``` diff --git a/examples/idempotency/src/customize_persistence_layer_redis.py b/examples/idempotency/src/customize_persistence_layer_redis.py new file mode 100644 index 00000000000..20f46fc9758 --- /dev/null +++ b/examples/idempotency/src/customize_persistence_layer_redis.py @@ -0,0 +1,21 @@ +from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer, + RedisConfig, + idempotent, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +config = RedisConfig(host="localhost", port=6379, mode="standalone") + +persistence_layer = RedisCachePersistenceLayer( + config=config, + in_progress_expiry_attr="in_progress_expiration", + status_attr="status", + data_attr="data", + validation_key_attr="validation", +) + + +@idempotent(persistence_store=persistence_layer) +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return event diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py index 0754f42c6b3..dd75e1cb9a7 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py @@ -1,13 +1,21 @@ from dataclasses import dataclass, field from uuid import uuid4 +from redis import Redis + from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, + RedisCachePersistenceLayer, idempotent, ) from aws_lambda_powertools.utilities.typing import LambdaContext -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") +client = Redis( + host="localhost", + port=6379, + decode_responses=True, # (1)! +) + +persistence_layer = RedisCachePersistenceLayer(client=client) @dataclass diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py index 0754f42c6b3..75e922735ea 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py @@ -2,12 +2,15 @@ from uuid import uuid4 from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, + RedisCachePersistenceLayer, + RedisConfig, idempotent, ) from aws_lambda_powertools.utilities.typing import LambdaContext -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") +config = RedisConfig(host="localhost", port=6379, mode="standalone") + +persistence_layer = RedisCachePersistenceLayer(config=config) @dataclass diff --git a/examples/idempotency/tests/mock_redis.py b/examples/idempotency/tests/mock_redis.py new file mode 100644 index 00000000000..89b68e4873c --- /dev/null +++ b/examples/idempotency/tests/mock_redis.py @@ -0,0 +1,37 @@ +import time as t +from typing import Dict + + +# Mock redis class that includes all operations we used in Idempotency +class MockRedis: + def __init__(self, decode_responses, cache: Dict, **kwargs): + self.cache = cache or {} + self.expire_dict: Dict = {} + self.decode_responses = decode_responses + self.acl: Dict = {} + self.username = "" + + def hset(self, name, mapping): + self.expire_dict.pop(name, {}) + self.cache[name] = mapping + + def from_url(self, url: str): + pass + + def expire(self, name, time): + self.expire_dict[name] = t.time() + time + + # return {} if no match + def hgetall(self, name): + if self.expire_dict.get(name, t.time() + 1) < t.time(): + self.cache.pop(name, {}) + return self.cache.get(name, {}) + + def get_connection_kwargs(self): + return {"decode_responses": self.decode_responses} + + def auth(self, username, **kwargs): + self.username = username + + def delete(self, name): + self.cache.pop(name, {}) diff --git a/examples/idempotency/tests/test_with_mock_redis.py b/examples/idempotency/tests/test_with_mock_redis.py index 3b02b6eb94c..e2b2e9a3f6b 100644 --- a/examples/idempotency/tests/test_with_mock_redis.py +++ b/examples/idempotency/tests/test_with_mock_redis.py @@ -1,7 +1,7 @@ -import time as t from dataclasses import dataclass import pytest +from mock_redis import MockRedis from aws_lambda_powertools.utilities.idempotency import ( RedisCachePersistenceLayer, @@ -25,46 +25,11 @@ def get_remaining_time_in_millis(self) -> int: return LambdaContext() -# Mock redis class that includes all operations we used in Idempotency -class MockRedis: - def __init__(self, decode_responses, cache: dict = None, **kwargs): - self.cache = cache or {} - self.expire_dict = {} - self.decode_responses = decode_responses - self.acl = {} - self.username = "" - - def hset(self, name, mapping): - self.expire_dict.pop(name, {}) - self.cache[name] = mapping - - def from_url(self, url: str): - pass - - def expire(self, name, time): - self.expire_dict[name] = t.time() + time - - # return {} if no match - def hgetall(self, name): - if self.expire_dict.get(name, t.time() + 1) < t.time(): - self.cache.pop(name, {}) - return self.cache.get(name, {}) - - def get_connection_kwargs(self): - return {"decode_responses": self.decode_responses} - - def auth(self, username, **kwargs): - self.username = username - - def delete(self, name): - self.cache.pop(name, {}) - - def test_idempotent_lambda(lambda_context): # Init the Mock redis client redis_client = MockRedis(decode_responses=True) # Establish persistence layer using the mock redis client - persistence_layer = RedisCachePersistenceLayer(connection=redis_client) + persistence_layer = RedisCachePersistenceLayer(client=redis_client) # setup idempotent with redis persistence layer @idempotent(persistence_store=persistence_layer) diff --git a/examples/idempotency/tests/test_with_real_redis.py b/examples/idempotency/tests/test_with_real_redis.py index c4b707cda0a..13c083aa1c2 100644 --- a/examples/idempotency/tests/test_with_real_redis.py +++ b/examples/idempotency/tests/test_with_real_redis.py @@ -27,18 +27,19 @@ def get_remaining_time_in_millis(self) -> int: @pytest.fixture def persistence_store_standalone_redis(): - # you will need to handle yourself the connection to pass again the password - # and avoid AuthenticationError at redis queries + # init a Real Redis client and connect to the Port set in the Makefile redis_client = redis.Redis( host="localhost", port="63005", decode_responses=True, ) - return RedisCachePersistenceLayer(connection=redis_client) + + # return a persistence layer with real Redis + return RedisCachePersistenceLayer(client=redis_client) def test_idempotent_lambda(lambda_context, persistence_store_standalone_redis): - # Establish persistence layer using the mock redis client + # Establish persistence layer using the real redis client persistence_layer = persistence_store_standalone_redis # setup idempotent with redis persistence layer diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index 366fded97f1..f9d448b9fcc 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -3,7 +3,10 @@ import pytest -from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer +from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer, + RedisConfig, +) from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, @@ -75,7 +78,16 @@ def persistence_store_standalone_redis(): port="63005", decode_responses=True, ) - return RedisCachePersistenceLayer(connection=redis_client) + return RedisCachePersistenceLayer(client=redis_client) + + +@pytest.fixture +def redis_config(): + return RedisConfig(host="localhost", port="63005", mode="standalone", ssl=False) + + +def test_idempotent_create_redis_client_with_config(redis_config): + RedisCachePersistenceLayer(config=redis_config) # test basic @@ -113,7 +125,7 @@ def test_idempotent_lambda_redis_no_decode(): ) # decode_responses=False will not be accepted with pytest.raises(IdempotencyRedisClientConfigError): - RedisCachePersistenceLayer(connection=redis_client) + RedisCachePersistenceLayer(client=redis_client) def test_idempotent_function_and_lambda_handler_redis_cache( diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index 9714665b8b8..3d9b6477e4b 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -3,7 +3,7 @@ import pytest import redis -from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer +from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer, RedisConfig from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, @@ -47,7 +47,7 @@ def persistence_store_sentinel_redis(): ) redis_client.expire() - return RedisCachePersistenceLayer(connection=redis_client) + return RedisCachePersistenceLayer(client=redis_client) @pytest.fixture @@ -59,7 +59,16 @@ def persistence_store_standalone_redis(): port="63005", decode_responses=True, ) - return RedisCachePersistenceLayer(connection=redis_client) + return RedisCachePersistenceLayer(client=redis_client) + + +@pytest.fixture +def redis_config(): + return RedisConfig(host="localhost", port=63005, mode="standalone", ssl=False) + + +def test_idempotent_create_redis_client_with_config(redis_config): + RedisCachePersistenceLayer(config=redis_config) # test basic @@ -97,7 +106,7 @@ def test_idempotent_lambda_redis_no_decode(): ) # decode_responses=False will not be accepted with pytest.raises(IdempotencyRedisClientConfigError): - RedisCachePersistenceLayer(connection=redis_client) + RedisCachePersistenceLayer(client=redis_client) def test_idempotent_function_and_lambda_handler_redis_cache( @@ -206,7 +215,7 @@ def test_idempotent_lambda_redis_credential(lambda_context): redis_client.acl_setuser(username=usr, enabled=True, passwords="+" + pwd, keys="*", commands=["+hgetall", "-set"]) redis_client.auth(password=pwd, username=usr) - @idempotent(persistence_store=RedisCachePersistenceLayer(connection=redis_client)) + @idempotent(persistence_store=RedisCachePersistenceLayer(client=redis_client)) def lambda_handler(event, _): return True From e6300a88e19fdd4018c0cabff1baface6192c3ef Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 27 Sep 2023 23:04:35 +0000 Subject: [PATCH 27/81] fix test on delete --- .../idempotency/persistence/redis.py | 2 +- .../idempotency/test_idempotency_redis.py | 36 +++++++++++++++---- 2 files changed, 31 insertions(+), 7 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 9dba8b3ebe8..2c2f9e273c5 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -83,7 +83,7 @@ def __init__( ) from aws_lambda_powertools.utilities.typing import LambdaContext - config = RedisConfig(host="localhost", port=6379, mode="standalone") + config = RedisConfig(host="localhost", port="6379, mode="standalone") persistence_layer = RedisCachePersistenceLayer(config=config) diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index 3d9b6477e4b..d4d574c6eef 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -56,7 +56,7 @@ def persistence_store_standalone_redis(): # and avoid AuthenticationError at redis queries redis_client = redis.Redis( host="localhost", - port="63005", + port=63005, decode_responses=True, ) return RedisCachePersistenceLayer(client=redis_client) @@ -67,8 +67,26 @@ def redis_config(): return RedisConfig(host="localhost", port=63005, mode="standalone", ssl=False) -def test_idempotent_create_redis_client_with_config(redis_config): - RedisCachePersistenceLayer(config=redis_config) +def test_idempotent_create_redis_client_with_config(redis_config, lambda_context): + persistence_layer = RedisCachePersistenceLayer(config=redis_config) + mock_event = {"data": "value"} + expected_result = {"message": "Foo"} + + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return expected_result + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, context): + return expected_result + + # WHEN calling the function + fn_result = record_handler(record=mock_event) + # WHEN calling lambda handler + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result # test basic @@ -191,16 +209,22 @@ def test_idempotent_lambda_redis_delete( result = {"message": "Foo"} @idempotent(persistence_store=persistence_layer) - def lambda_handler(event, _): + def lambda_handler(event, context): return result + # first run is just to populate function infos for deletion. + # delete_record won't work if the function was not run yet. bug maybe? + handler_result = lambda_handler(mock_event, lambda_context) + # delete what's might be dirty data + persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) + # run second time to ensure clean result handler_result = lambda_handler(mock_event, lambda_context) assert handler_result == result - - # delete the idem and handler should output new result persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) + # delete the idem and handler should output new result result = {"message": "Foo2"} handler_result2 = lambda_handler(mock_event, lambda_context) + assert handler_result2 == result From fcd200902cf9503dbb2d7b5617999883d67904a0 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Thu, 28 Sep 2023 17:53:36 +0000 Subject: [PATCH 28/81] add types-redis to pyproj --- poetry.lock | 35 ++++++++++++++++++++++++++++++++--- pyproject.toml | 5 +++-- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5b82c1a30a3..66219ecb226 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2932,6 +2932,20 @@ files = [ doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] test = ["mypy", "pytest", "typing-extensions"] +[[package]] +name = "types-pyopenssl" +version = "23.2.0.2" +description = "Typing stubs for pyOpenSSL" +optional = true +python-versions = "*" +files = [ + {file = "types-pyOpenSSL-23.2.0.2.tar.gz", hash = "sha256:6a010dac9ecd42b582d7dd2cc3e9e40486b79b3b64bb2fffba1474ff96af906d"}, + {file = "types_pyOpenSSL-23.2.0.2-py3-none-any.whl", hash = "sha256:19536aa3debfbe25a918cf0d898e9f5fbbe6f3594a429da7914bf331deb1b342"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" + [[package]] name = "types-python-dateutil" version = "2.8.19.14" @@ -2943,6 +2957,21 @@ files = [ {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, ] +[[package]] +name = "types-redis" +version = "4.6.0.7" +description = "Typing stubs for redis" +optional = true +python-versions = "*" +files = [ + {file = "types-redis-4.6.0.7.tar.gz", hash = "sha256:28c4153ddb5c9d4f10def44a2454673c361d2d5fc3cd867cf3bb1520f3f59a38"}, + {file = "types_redis-4.6.0.7-py3-none-any.whl", hash = "sha256:05b1bf92879b25df20433fa1af07784a0d7928c616dc2ebf9087618db77ccbd0"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-pyOpenSSL = "*" + [[package]] name = "types-requests" version = "2.31.0.6" @@ -3175,16 +3204,16 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-xray-sdk", "fastjsonschema", "pydantic", "redis"] +all = ["aws-xray-sdk", "fastjsonschema", "pydantic", "redis", "types-redis"] aws-sdk = ["boto3"] datadog = ["datadog-lambda"] datamasking-aws-sdk = ["aws-encryption-sdk"] parser = ["pydantic"] -redis = ["redis"] +redis = ["redis", "types-redis"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "a6bf1f13542f5ac50a1e96945172e1ea8fe898b59bc0dfbec8a2771f8169f434" +content-hash = "948387121798b46e130e564e62596b1484d4cd409a055c0b80849b5cc92074c9" diff --git a/pyproject.toml b/pyproject.toml index 426b9fb513a..4675dc6b4bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ fastjsonschema = { version = "^2.14.5", optional = true } pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } redis = {version = "^4.4.0", optional = true} +types-redis = {version = "^4.4.0", optional = true} typing-extensions = "^4.6.2" datadog-lambda = { version = "^4.77.0", optional = true } aws-encryption-sdk = { version = "^3.1.1", optional = true } @@ -95,8 +96,8 @@ datadog-lambda = "^4.77.0" parser = ["pydantic"] validation = ["fastjsonschema"] tracer = ["aws-xray-sdk"] -redis = ["redis"] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema","redis"] +redis = ["redis","types-redis"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema","redis","types-redis"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] datadog = ["datadog-lambda"] From 5414d5e888877d5455afaba6a28f2a7293c0fe01 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Thu, 28 Sep 2023 17:59:38 +0000 Subject: [PATCH 29/81] fix Literal for 3.7 --- .../utilities/idempotency/persistence/redis.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 2c2f9e273c5..741bea44450 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,7 +1,9 @@ import datetime import logging from dataclasses import dataclass -from typing import Any, Dict, Literal, Optional, Union +from typing import Any, Dict, Optional, Union + +from typing_extensions import Literal try: import redis From 9cb5f3f018ace8c3f38b141cf881187c660d859c Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Thu, 28 Sep 2023 18:09:59 +0000 Subject: [PATCH 30/81] add comment for delete --- .../utilities/idempotency/persistence/redis.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 741bea44450..973745f8a8a 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -338,6 +338,8 @@ def _update_record(self, data_record: DataRecord) -> None: self.client.hset(**item) def _delete_record(self, data_record: DataRecord) -> None: + # This function only works when Lambda handler has already been invoked once + # maybe we should add some exception when this is called before Lambda handler logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") # See: https://redis.io/commands/del/ self.client.delete(data_record.idempotency_key) From 12b76c4f3c50bdda3105a5b68102128f63135c05 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 29 Sep 2023 17:52:15 +0000 Subject: [PATCH 31/81] add redis as dev dep --- poetry.lock | 12 ++++++------ pyproject.toml | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 66219ecb226..119b4b676ce 100644 --- a/poetry.lock +++ b/poetry.lock @@ -437,7 +437,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -optional = true +optional = false python-versions = "*" files = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, @@ -748,7 +748,7 @@ toml = ["tomli"] name = "cryptography" version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, @@ -2146,7 +2146,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -optional = true +optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, @@ -2936,7 +2936,7 @@ test = ["mypy", "pytest", "typing-extensions"] name = "types-pyopenssl" version = "23.2.0.2" description = "Typing stubs for pyOpenSSL" -optional = true +optional = false python-versions = "*" files = [ {file = "types-pyOpenSSL-23.2.0.2.tar.gz", hash = "sha256:6a010dac9ecd42b582d7dd2cc3e9e40486b79b3b64bb2fffba1474ff96af906d"}, @@ -2961,7 +2961,7 @@ files = [ name = "types-redis" version = "4.6.0.7" description = "Typing stubs for redis" -optional = true +optional = false python-versions = "*" files = [ {file = "types-redis-4.6.0.7.tar.gz", hash = "sha256:28c4153ddb5c9d4f10def44a2454673c361d2d5fc3cd867cf3bb1520f3f59a38"}, @@ -3216,4 +3216,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "948387121798b46e130e564e62596b1484d4cd409a055c0b80849b5cc92074c9" +content-hash = "54b337d0e75ea0b49c3ce3e9d9f16ba0a08fa7c2e1ad5fbfffdd9aaaca86ea6d" diff --git a/pyproject.toml b/pyproject.toml index 4675dc6b4bb..3b8fb35d63f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,6 @@ fastjsonschema = { version = "^2.14.5", optional = true } pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } redis = {version = "^4.4.0", optional = true} -types-redis = {version = "^4.4.0", optional = true} typing-extensions = "^4.6.2" datadog-lambda = { version = "^4.77.0", optional = true } aws-encryption-sdk = { version = "^3.1.1", optional = true } @@ -96,8 +95,8 @@ datadog-lambda = "^4.77.0" parser = ["pydantic"] validation = ["fastjsonschema"] tracer = ["aws-xray-sdk"] -redis = ["redis","types-redis"] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema","redis","types-redis"] +redis = ["redis"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema","redis"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] datadog = ["datadog-lambda"] @@ -112,6 +111,7 @@ sentry-sdk = "^1.22.2" ruff = ">=0.0.272,<0.0.292" retry2 = "^0.9.5" pytest-socket = "^0.6.0" +types-redis = "^4.6.0.7" [tool.coverage.run] source = ["aws_lambda_powertools"] From 2a4060973dccc6d00bc98a9eff3433924c99ceb2 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 29 Sep 2023 17:52:47 +0000 Subject: [PATCH 32/81] fix poetry --- poetry.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index 119b4b676ce..b7c9f4a07e4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -214,13 +214,13 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.75.0" +version = "1.76.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.75.0.tar.gz", hash = "sha256:18c83abcae594de084947befb9c80f689f8b99ece2d38729d27a9cea634da15c"}, - {file = "aws_sam_translator-1.75.0-py3-none-any.whl", hash = "sha256:02bad7636356438b439c8e0ef0195618e3b7b67b6dfbf675b1627d6fd84b2910"}, + {file = "aws-sam-translator-1.76.0.tar.gz", hash = "sha256:6d0d68071071bd54c11ec69346a5a52db22cc316bfa2cb6f827a36a9348bbf00"}, + {file = "aws_sam_translator-1.76.0-py3-none-any.whl", hash = "sha256:782ed1502d1a84a4912827da168edb133da9f8a5a7d5701cff7eede80ead1dca"}, ] [package.dependencies] @@ -336,17 +336,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.56" +version = "1.28.57" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.56-py3-none-any.whl", hash = "sha256:f5fcb27cdbd08ca38d699f2d2e32d96d1d9fab3368c15c6bc326256612d2cfd7"}, - {file = "boto3-1.28.56.tar.gz", hash = "sha256:b927a7ed335d543c33c15fa63f1076f3fa8422959771c2187da74bc4395ab6e3"}, + {file = "boto3-1.28.57-py3-none-any.whl", hash = "sha256:5ddf24cf52c7fb6aaa332eaa08ae8c2afc8f2d1e8860680728533dd573904e32"}, + {file = "boto3-1.28.57.tar.gz", hash = "sha256:e2d2824ba6459b330d097e94039a9c4f96ae3f4bcdc731d620589ad79dcd16d3"}, ] [package.dependencies] -botocore = ">=1.31.56,<1.32.0" +botocore = ">=1.31.57,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.7.0,<0.8.0" @@ -355,13 +355,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.56" +version = "1.31.57" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.56-py3-none-any.whl", hash = "sha256:66c686e4eda7051ffcc9357d9075390c8ab2f95a2977669039618ee186fb533b"}, - {file = "botocore-1.31.56.tar.gz", hash = "sha256:70252cd8abc2fe9b791328e187620f5a3911545e2520486b01ecfad31f41b9cb"}, + {file = "botocore-1.31.57-py3-none-any.whl", hash = "sha256:af006248276ff8e19e3ec7214478f6257035eb40aed865e405486500471ae71b"}, + {file = "botocore-1.31.57.tar.gz", hash = "sha256:301436174635bec739b225b840fc365ca00e5c1a63e5b2a19ee679d204e01b78"}, ] [package.dependencies] @@ -3204,16 +3204,16 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-xray-sdk", "fastjsonschema", "pydantic", "redis", "types-redis"] +all = ["aws-xray-sdk", "fastjsonschema", "pydantic", "redis"] aws-sdk = ["boto3"] datadog = ["datadog-lambda"] datamasking-aws-sdk = ["aws-encryption-sdk"] parser = ["pydantic"] -redis = ["redis", "types-redis"] +redis = ["redis"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "54b337d0e75ea0b49c3ce3e9d9f16ba0a08fa7c2e1ad5fbfffdd9aaaca86ea6d" +content-hash = "56b0ac2a1d61fe1b85c6ada47ca657477768bc9c393b3cf30ef20899b1e1febf" From 205c2014d02559670e7eeecdfc17fe3d7e9e4547 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 16 Oct 2023 20:56:01 +0100 Subject: [PATCH 33/81] Simplifying DX --- .../utilities/idempotency/__init__.py | 2 - .../idempotency/persistence/redis.py | 123 ++++++++---------- .../persistence/test_redis_layer.py | 6 - 3 files changed, 56 insertions(+), 75 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/__init__.py b/aws_lambda_powertools/utilities/idempotency/__init__.py index 329c1e621d5..296b641df2b 100644 --- a/aws_lambda_powertools/utilities/idempotency/__init__.py +++ b/aws_lambda_powertools/utilities/idempotency/__init__.py @@ -12,7 +12,6 @@ # import RedisCachePersistenceLayer here mean we will need redis as a required lib? Do we want to make it optional? from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( RedisCachePersistenceLayer, - RedisConfig, ) from .idempotency import IdempotencyConfig, idempotent, idempotent_function @@ -24,5 +23,4 @@ "idempotent_function", "IdempotencyConfig", "RedisCachePersistenceLayer", - "RedisConfig", ) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 973745f8a8a..61e6ebea38f 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,17 +1,11 @@ +from __future__ import annotations + import datetime import logging -from dataclasses import dataclass -from typing import Any, Dict, Optional, Union - -from typing_extensions import Literal - -try: - import redis -except ImportError: - redis = None # type:ignore - +from typing import Any, Dict import redis +from typing_extensions import Literal from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( @@ -28,26 +22,16 @@ logger = logging.getLogger(__name__) -@dataclass(repr=False, order=False) -class RedisConfig: - host: Optional[str] - port: Optional[int] - username: Optional[str] - password: Optional[str] - db_index: Optional[int] - url: Optional[str] - mode: Optional[Literal["standalone", "cluster"]] = "standalone" - +class RedisConnection: def __init__( self, - host: Optional[str] = None, - port: Optional[int] = None, - username: Optional[str] = None, - password: Optional[str] = None, - db_index: Optional[int] = None, - url: Optional[str] = None, - mode: Optional[Literal["standalone", "cluster"]] = "standalone", - **extra_options, + host: str | None, + username: str | None, + password: str | None, + url: str | None, + db_index: int = 0, + port: int = 6379, + mode: Literal["standalone", "cluster"] = "standalone", ) -> None: """ Initialize Redis connection which will be used in redis persistence_store to support idempotency @@ -119,8 +103,6 @@ def create_subscription_payment(event: dict) -> Payment: ``` """ - self.extra_options: dict = {} - self.url = url self.host = host self.port = port @@ -128,14 +110,46 @@ def create_subscription_payment(event: dict) -> Payment: self.password = password self.db_index = db_index self.mode = mode - self.extra_options.update(**extra_options) + + def _init_client(self) -> redis.Redis | redis.cluster.RedisCluster: + logger.info(f"Trying to connect to Redis: {self.host}") + if self.mode == "standalone": + client = redis.Redis + elif self.mode == "cluster": + client = redis.cluster.RedisCluster + else: + raise IdempotencyRedisClientConfigError(f"Mode {self.mode} not supported") + + try: + if self.url: + logger.debug(f"Using URL format to connect to Redis: {self.host}") + return client.from_url(url=self.url) + else: + logger.debug(f"Using other parameters to connect to Redis: {self.host}") + return client( # type: ignore + host=self.host, + port=self.port, + username=self.username, + password=self.password, + db=self.db_index, + decode_responses=True, + ) + except redis.exceptions.ConnectionError as exc: + logger.debug(f"Cannot connect in Redis: {self.host}") + raise IdempotencyRedisConnectionError("Could not to connect to Redis", exc) from exc class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, - client: Optional[Union[redis.Redis, redis.cluster.RedisCluster]] = None, - config: Optional[RedisConfig] = None, + host: str | None = None, + username: str | None = None, + password: str | None = None, + url: str | None = None, + db_index: int = 0, + port: int = 6379, + mode: Literal["standalone", "cluster"] = "standalone", + client: redis.Redis | redis.cluster.RedisCluster | None = None, in_progress_expiry_attr: str = "in_progress_expiration", status_attr: str = "status", data_attr: str = "data", @@ -192,11 +206,15 @@ def lambda_handler(event: dict, context: LambdaContext): # Initialize Redis client with Redis config if no client is passed in if client is None: - if config is None: - raise IdempotencyRedisClientConfigError("Both client and config param are empty") - - self.config = config - self.client = self._init_client() + self.client = RedisConnection( + host=host, + port=port, + username=username, + password=password, + db_index=db_index, + url=url, + mode=mode, + )._init_client() else: self.client = client @@ -212,35 +230,6 @@ def lambda_handler(event: dict, context: LambdaContext): self.validation_key_attr = validation_key_attr super(RedisCachePersistenceLayer, self).__init__() - def _init_client(self) -> Union[redis.Redis, redis.cluster.RedisCluster]: - client: Union[redis.Redis, redis.cluster.RedisCluster] - logger.info(f"Trying to connect to Redis: {self.config.host}") - if self.config.mode == "standalone": - client = redis.Redis # type: ignore - elif self.config.mode == "cluster": - client = redis.cluster.RedisCluster # type: ignore - else: - raise IdempotencyRedisClientConfigError(f"Mode {self.config.mode} not supported") - - try: - if self.config.url: - logger.debug(f"Using URL format to connect to Redis: {self.config.host}") - return client.from_url(url=self.config.url) - else: - logger.debug(f"Using other parameters to connect to Redis: {self.config.host}") - return client( # type: ignore - host=self.config.host, - port=self.config.port, - username=self.config.username, - password=self.config.password, - db=self.config.db_index, - decode_responses=True, - **self.config.extra_options, - ) - except redis.exceptions.ConnectionError as exc: - logger.debug(f"Cannot connect in Redis: {self.config.host}") - raise IdempotencyRedisConnectionError("Could not to connect to Redis", exc) from exc - def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> DataRecord: in_progress_expiry_timestamp = item.get(self.in_progress_expiry_attr) if isinstance(in_progress_expiry_timestamp, str): diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index f9d448b9fcc..b26d4ea54e7 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -5,7 +5,6 @@ from aws_lambda_powertools.utilities.idempotency import ( RedisCachePersistenceLayer, - RedisConfig, ) from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, @@ -81,11 +80,6 @@ def persistence_store_standalone_redis(): return RedisCachePersistenceLayer(client=redis_client) -@pytest.fixture -def redis_config(): - return RedisConfig(host="localhost", port="63005", mode="standalone", ssl=False) - - def test_idempotent_create_redis_client_with_config(redis_config): RedisCachePersistenceLayer(config=redis_config) From 9ff5e48da13e269f21f5ebc9a687dd7f05ee7af5 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Mon, 16 Oct 2023 23:43:25 +0000 Subject: [PATCH 34/81] remove redis-config in doc,test --- .../utilities/idempotency/persistence/redis.py | 5 +---- docs/utilities/idempotency.md | 7 +++---- .../src/customize_persistence_layer_redis.py | 11 ++++++++--- .../getting_started_with_idempotency_redis_config.py | 5 +---- .../idempotency/persistence/test_redis_layer.py | 4 ---- 5 files changed, 13 insertions(+), 19 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 61e6ebea38f..7af4a77e1ad 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -64,14 +64,11 @@ def __init__( from aws_lambda_powertools.utilities.idempotency import ( RedisCachePersistenceLayer, - RedisConfig, idempotent, ) from aws_lambda_powertools.utilities.typing import LambdaContext - config = RedisConfig(host="localhost", port="6379, mode="standalone") - - persistence_layer = RedisCachePersistenceLayer(config=config) + persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379, mode="standalone") @dataclass diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 9c534bdaddb..eeec0797eb1 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -570,8 +570,8 @@ You can quickly start by initializing the `RedisCachePersistenceLayer` class and 1. Notice we rely on this field to be true -=== "Use Redis Config Class" - ```python hl_lines="4-8 11 13 27" +=== "Use Persistence Layer with Redis config variables" + ```python hl_lines="4-8 10 24" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_config.py" ``` @@ -617,7 +617,7 @@ This persistence layer is built-in, and you can use an existing Redis service. W === "Customizing RedisPersistenceLayer to suit your data structure" - ```python hl_lines="10-16" + ```python hl_lines="14-20" --8<-- "examples/idempotency/src/customize_persistence_layer_redis.py" ``` @@ -625,7 +625,6 @@ When using Redis as a persistence layer, you can alter the attribute names by pa | Parameter | Required | Default | Description | | --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | -| **config** | | `None` | You can pass in the configs to establish the corresponding Redis client | | **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | | **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | | **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | diff --git a/examples/idempotency/src/customize_persistence_layer_redis.py b/examples/idempotency/src/customize_persistence_layer_redis.py index 20f46fc9758..ccb4dbbbc5d 100644 --- a/examples/idempotency/src/customize_persistence_layer_redis.py +++ b/examples/idempotency/src/customize_persistence_layer_redis.py @@ -1,14 +1,19 @@ +from redis import Redis + from aws_lambda_powertools.utilities.idempotency import ( RedisCachePersistenceLayer, - RedisConfig, idempotent, ) from aws_lambda_powertools.utilities.typing import LambdaContext -config = RedisConfig(host="localhost", port=6379, mode="standalone") +redis_client = Redis( + host="localhost", + port=6379, + decode_responses=True, +) persistence_layer = RedisCachePersistenceLayer( - config=config, + client=redis_client, in_progress_expiry_attr="in_progress_expiration", status_attr="status", data_attr="data", diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py index 75e922735ea..fdedfab4471 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py @@ -3,14 +3,11 @@ from aws_lambda_powertools.utilities.idempotency import ( RedisCachePersistenceLayer, - RedisConfig, idempotent, ) from aws_lambda_powertools.utilities.typing import LambdaContext -config = RedisConfig(host="localhost", port=6379, mode="standalone") - -persistence_layer = RedisCachePersistenceLayer(config=config) +persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379, mode="standalone") @dataclass diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index b26d4ea54e7..ff3347980d6 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -80,10 +80,6 @@ def persistence_store_standalone_redis(): return RedisCachePersistenceLayer(client=redis_client) -def test_idempotent_create_redis_client_with_config(redis_config): - RedisCachePersistenceLayer(config=redis_config) - - # test basic def test_idempotent_function_and_lambda_handler_redis_basic( # idempotency_config: IdempotencyConfig, From 7ceb45e203412941e2c87e17d14152d8cc7626ad Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 18 Oct 2023 00:53:02 +0000 Subject: [PATCH 35/81] handle race condition --- .../utilities/idempotency/persistence/base.py | 3 +- .../idempotency/persistence/redis.py | 86 ++++++++++++++----- .../idempotency/test_idempotency_redis.py | 31 +------ 3 files changed, 70 insertions(+), 50 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 2b491b45108..58e0981cc14 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -381,7 +381,7 @@ def save_inprogress(self, data: Dict[str, Any], remaining_time_in_millis: Option now = datetime.datetime.now() period = datetime.timedelta(milliseconds=remaining_time_in_millis) timestamp = (now + period).timestamp() - + # change this to fit in redis data_record.in_progress_expiry_timestamp = int(timestamp * 1000) else: warnings.warn( @@ -389,6 +389,7 @@ def save_inprogress(self, data: Dict[str, Any], remaining_time_in_millis: Option "Did you call register_lambda_context on IdempotencyConfig?", stacklevel=2, ) + # set the default value to 15 minute logger.debug(f"Saving in progress record for idempotency key: {data_record.idempotency_key}") diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 7af4a77e1ad..b6cea6c0882 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -20,6 +20,8 @@ ) logger = logging.getLogger(__name__) +orphan_lock = "OrphanLock" +orphan_lock_timeout = 10 class RedisConnection: @@ -148,6 +150,7 @@ def __init__( mode: Literal["standalone", "cluster"] = "standalone", client: redis.Redis | redis.cluster.RedisCluster | None = None, in_progress_expiry_attr: str = "in_progress_expiration", + expiry_attr: str = "expiration", status_attr: str = "status", data_attr: str = "data", validation_key_attr: str = "validation", @@ -222,6 +225,7 @@ def lambda_handler(event: dict, context: LambdaContext): raise IdempotencyRedisClientConfigError self.in_progress_expiry_attr = in_progress_expiry_attr + self.expiry_attr = expiry_attr self.status_attr = status_attr self.data_attr = data_attr self.validation_key_attr = validation_key_attr @@ -249,18 +253,17 @@ def _get_record(self, idempotency_key) -> DataRecord: raise IdempotencyItemNotFoundError return self._item_to_data_record(idempotency_key, item) - def _put_record(self, data_record: DataRecord) -> None: + def _put_in_progress_record(self, data_record: DataRecord) -> None: item: Dict[str, Any] = {} - - # Redis works with hset to support hashing keys with multiple attributes - # See: https://redis.io/commands/hset/ item = { "name": data_record.idempotency_key, "mapping": { self.status_attr: data_record.status, + self.expiry_attr: data_record.expiry_timestamp, }, } + # means we are saving in_progress if data_record.in_progress_expiry_timestamp is not None: item["mapping"][self.in_progress_expiry_attr] = data_record.in_progress_expiry_timestamp @@ -280,28 +283,58 @@ def _put_record(self, data_record: DataRecord) -> None: # The idempotency key does not exist: # - first time that this invocation key is used # - previous invocation with the same key was deleted due to TTL - idempotency_record = self.client.hgetall(data_record.idempotency_key) - print(idempotency_record) - if len(idempotency_record) > 0: - # record already exists. - # status is completed, so raise exception because it exists and still valid - if idempotency_record[self.status_attr] == STATUS_CONSTANTS["COMPLETED"]: - raise IdempotencyItemAlreadyExistsError + logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") - # checking if in_progress_expiry_attr exists - # if in_progress_expiry_attr exist, must be lower than now - if self.in_progress_expiry_attr in idempotency_record and int( - idempotency_record[self.in_progress_expiry_attr], - ) > int(now.timestamp() * 1000): - raise IdempotencyItemAlreadyExistsError + pipe = self.client.pipeline(transaction=True) + for key, value in item["mapping"].items(): + pipe.hsetnx(name=item["name"], key=key, value=value) + pipe.expireat(name=data_record.idempotency_key, when=data_record.expiry_timestamp) + result_list = pipe.execute() - logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") - self.client.hset(**item) + # a zero in list means one of the operation is failed + if 0 not in result_list: + return + + # handle key already exist error + idempotency_record = self.client.hgetall(data_record.idempotency_key) + if len(idempotency_record) == 0: + # somthing wired happend, hsetnx failed however there's no record. we raise an error + raise IdempotencyItemNotFoundError + + # record already exists. + # status is completed, so raise exception because it exists and still valid + if idempotency_record[self.status_attr] == STATUS_CONSTANTS["COMPLETED"] and int( + idempotency_record[self.expiry_attr], + ) < int(now.timestamp() * 1000): + raise IdempotencyItemAlreadyExistsError + + # checking if in_progress_expiry_attr exists + # if in_progress_expiry_attr exist, must be lower than now + if self.in_progress_expiry_attr in idempotency_record and int( + idempotency_record[self.in_progress_expiry_attr], + ) > int(now.timestamp() * 1000): + raise IdempotencyItemAlreadyExistsError # hset type must set expiration after adding the record # Need to review this to get ttl in seconds + # Q: What if Lambda function timed out here? This record will be here forever # Q: should we replace self.expires_after_seconds with _get_expiry_timestamp? more consistent - self.client.expire(name=data_record.idempotency_key, time=self.expires_after_seconds) + + # If the code reaches here means we found an Orphan record. + # It could be a case where Redis expire not working properly or a bug in our code + # we need to add a lock(in case another istance is doing same thing) and overwrite key with current payload. + pipe.hsetnx(name=item["name"] + "lock", key=orphan_lock, value="True") + pipe.expire(name=item["name"] + "lock", time=orphan_lock_timeout) + result_list = pipe.execute() + if 0 in result_list: + # lock failed to aquire + raise IdempotencyItemAlreadyExistsError + + # overwrite orpahn record and set timeout + pipe.hset(**item) + pipe.expireat(name=data_record.idempotency_key, when=data_record.expiry_timestamp) + pipe.execute() + except redis.exceptions.RedisError: raise redis.exceptions.RedisError except redis.exceptions.RedisClusterException: @@ -310,6 +343,18 @@ def _put_record(self, data_record: DataRecord) -> None: logger.debug(f"encountered non-redis exception:{e}") raise e + def _put_record(self, data_record: DataRecord) -> None: + # Redis works with hset to support hashing keys with multiple attributes + # See: https://redis.io/commands/hset/ + + # current this function only support set in_progress. set complete should use update_record + if data_record.status != STATUS_CONSTANTS["INPROGRESS"]: + raise NotImplementedError + + # seperate in_progress logic in case we use _put_record to save other record in the future + self._put_in_progress_record(data_record=data_record) + + # Q:Will here accidentally create race? def _update_record(self, data_record: DataRecord) -> None: item: Dict[str, Any] = {} @@ -321,6 +366,7 @@ def _update_record(self, data_record: DataRecord) -> None: }, } logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") + # should we check if this key has expriation already set? self.client.hset(**item) def _delete_record(self, data_record: DataRecord) -> None: diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index d4d574c6eef..7ac06953891 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -3,7 +3,7 @@ import pytest import redis -from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer, RedisConfig +from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, @@ -62,33 +62,6 @@ def persistence_store_standalone_redis(): return RedisCachePersistenceLayer(client=redis_client) -@pytest.fixture -def redis_config(): - return RedisConfig(host="localhost", port=63005, mode="standalone", ssl=False) - - -def test_idempotent_create_redis_client_with_config(redis_config, lambda_context): - persistence_layer = RedisCachePersistenceLayer(config=redis_config) - mock_event = {"data": "value"} - expected_result = {"message": "Foo"} - - @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") - def record_handler(record): - return expected_result - - @idempotent(persistence_store=persistence_layer) - def lambda_handler(event, context): - return expected_result - - # WHEN calling the function - fn_result = record_handler(record=mock_event) - # WHEN calling lambda handler - handler_result = lambda_handler(mock_event, lambda_context) - # THEN we expect the function and lambda handler to execute successfully - assert fn_result == expected_result - assert handler_result == expected_result - - # test basic def test_idempotent_function_and_lambda_handler_redis_basic( # idempotency_config: IdempotencyConfig, @@ -191,7 +164,7 @@ def lambda_handler(event, context): # save additional to in_progress mock_event = {"data": "value7"} try: - persistence_store.save_inprogress(mock_event, 1000) + persistence_store.save_inprogress(mock_event, 10000) except IdempotencyItemAlreadyExistsError: pass From 602c393eea210f9f501b0f7d8933db874fc19732 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 18 Oct 2023 00:56:41 +0000 Subject: [PATCH 36/81] remove todo --- .../utilities/idempotency/persistence/base.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 58e0981cc14..4bbf68accbe 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -372,7 +372,6 @@ def save_inprogress(self, data: Dict[str, Any], remaining_time_in_millis: Option data_record = DataRecord( idempotency_key=idempotency_key, status=STATUS_CONSTANTS["INPROGRESS"], - # This expiry_timestamp is never used in redis, remove specific _get_expiry_timestamp for now expiry_timestamp=self._get_expiry_timestamp(), payload_hash=self._get_hashed_payload(data=data), ) @@ -381,7 +380,6 @@ def save_inprogress(self, data: Dict[str, Any], remaining_time_in_millis: Option now = datetime.datetime.now() period = datetime.timedelta(milliseconds=remaining_time_in_millis) timestamp = (now + period).timestamp() - # change this to fit in redis data_record.in_progress_expiry_timestamp = int(timestamp * 1000) else: warnings.warn( @@ -389,7 +387,6 @@ def save_inprogress(self, data: Dict[str, Any], remaining_time_in_millis: Option "Did you call register_lambda_context on IdempotencyConfig?", stacklevel=2, ) - # set the default value to 15 minute logger.debug(f"Saving in progress record for idempotency key: {data_record.idempotency_key}") From 28af43998ad43702a257f0cde9e67c7b84f35636 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 18 Oct 2023 23:58:28 +0000 Subject: [PATCH 37/81] change to use redis.set --- .../utilities/idempotency/exceptions.py | 6 + .../idempotency/persistence/redis.py | 106 +++++++++++------- 2 files changed, 74 insertions(+), 38 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index a111df687ea..19e609bf045 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -95,3 +95,9 @@ class IdempotencyRedisConnectionError(BaseError): """ Redis connection error """ + + +class IdempotencyOrphanRecordError(BaseError): + """ + Redis Orphan Record found, need to be removed + """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index b6cea6c0882..401b4787fc4 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,8 +1,9 @@ from __future__ import annotations import datetime +import json import logging -from typing import Any, Dict +from typing import Any, Callable, Dict, Optional import redis from typing_extensions import Literal @@ -11,6 +12,7 @@ from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, + IdempotencyOrphanRecordError, IdempotencyRedisClientConfigError, IdempotencyRedisConnectionError, ) @@ -20,8 +22,6 @@ ) logger = logging.getLogger(__name__) -orphan_lock = "OrphanLock" -orphan_lock_timeout = 10 class RedisConnection: @@ -112,6 +112,7 @@ def create_subscription_payment(event: dict) -> Payment: def _init_client(self) -> redis.Redis | redis.cluster.RedisCluster: logger.info(f"Trying to connect to Redis: {self.host}") + client: type[redis.Redis | redis.cluster.RedisCluster] if self.mode == "standalone": client = redis.Redis elif self.mode == "cluster": @@ -154,6 +155,8 @@ def __init__( status_attr: str = "status", data_attr: str = "data", validation_key_attr: str = "validation", + json_serializer: Optional[Callable] = None, + json_deserializer: Optional[Callable] = None, ): """ Initialize the Redis Persistence Layer @@ -173,7 +176,12 @@ def __init__( Redis hash attribute name for response data, by default "data" validation_key_attr: str, optional Redis hash attribute name for hashed representation of the parts of the event used for validation - + json_serializer : Callable, optional + function to serialize Python `obj` to a JSON document in `str`, `bytes`, `bytearray` format, + by default json.dumps + json_deserializer : Callable, optional + function to deserialize `str`, `bytes`, `bytearray` containing a JSON document to a Python `obj`, + by default json.loads Examples -------- @@ -229,7 +237,18 @@ def lambda_handler(event: dict, context: LambdaContext): self.status_attr = status_attr self.data_attr = data_attr self.validation_key_attr = validation_key_attr + self._json_serializer = json_serializer or json.dumps + self._json_deserializer = json_deserializer or json.loads super(RedisCachePersistenceLayer, self).__init__() + self._orphan_lock_timeout = min(10, self.expires_after_seconds) + + def _get_expiry_second(self, expery_timestamp: int | None) -> int: + """ + return seconds of timedelta from now to the given unix timestamp + """ + if expery_timestamp: + return expery_timestamp - int(datetime.datetime.now().timestamp()) + return self.expires_after_seconds def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> DataRecord: in_progress_expiry_timestamp = item.get(self.in_progress_expiry_attr) @@ -245,12 +264,14 @@ def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> Da def _get_record(self, idempotency_key) -> DataRecord: # See: https://redis.io/commands/hgetall/ - response = self.client.hgetall(idempotency_key) + response = self.client.get(idempotency_key) try: - item = response + item = self._json_deserializer(response) except KeyError: raise IdempotencyItemNotFoundError + except json.JSONDecodeError: + raise IdempotencyOrphanRecordError return self._item_to_data_record(idempotency_key, item) def _put_in_progress_record(self, data_record: DataRecord) -> None: @@ -283,30 +304,37 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: # The idempotency key does not exist: # - first time that this invocation key is used # - previous invocation with the same key was deleted due to TTL + # - SET see https://redis.io/commands/set/ - logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") + logger.debug(f"Putting record on Redis for idempotency key: {item['name']}") - pipe = self.client.pipeline(transaction=True) - for key, value in item["mapping"].items(): - pipe.hsetnx(name=item["name"], key=key, value=value) - pipe.expireat(name=data_record.idempotency_key, when=data_record.expiry_timestamp) - result_list = pipe.execute() + encoded_item = self._json_serializer(item["mapping"]) - # a zero in list means one of the operation is failed - if 0 not in result_list: + ttl = self._get_expiry_second(item["mapping"][self.expiry_attr]) + redis_response = self.client.set(name=item["name"], value=encoded_item, ex=ttl, nx=True) + + # redis_response=True means redis set succeed, we return on success.(99% request should end up here) + # redis_response=None means this is the case where idempotency record is hit. continue checking + if redis_response: return - # handle key already exist error - idempotency_record = self.client.hgetall(data_record.idempotency_key) + # fetch from redis and check if it's still valid + encoded_idempotency_record = self.client.get(item["name"]) + + try: + idempotency_record = self._json_deserializer(encoded_idempotency_record) + except json.JSONDecodeError: + # found a currupted record, treat as Orphan Record. + raise IdempotencyOrphanRecordError + if len(idempotency_record) == 0: - # somthing wired happend, hsetnx failed however there's no record. we raise an error - raise IdempotencyItemNotFoundError + # somthing wired happend, hsetnx failed however there's no record. treat as Orphan Record. + raise IdempotencyOrphanRecordError - # record already exists. # status is completed, so raise exception because it exists and still valid if idempotency_record[self.status_attr] == STATUS_CONSTANTS["COMPLETED"] and int( idempotency_record[self.expiry_attr], - ) < int(now.timestamp() * 1000): + ) > int(now.timestamp()): raise IdempotencyItemAlreadyExistsError # checking if in_progress_expiry_attr exists @@ -315,30 +343,29 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: idempotency_record[self.in_progress_expiry_attr], ) > int(now.timestamp() * 1000): raise IdempotencyItemAlreadyExistsError - # hset type must set expiration after adding the record - # Need to review this to get ttl in seconds - # Q: What if Lambda function timed out here? This record will be here forever - # Q: should we replace self.expires_after_seconds with _get_expiry_timestamp? more consistent # If the code reaches here means we found an Orphan record. - # It could be a case where Redis expire not working properly or a bug in our code - # we need to add a lock(in case another istance is doing same thing) and overwrite key with current payload. - pipe.hsetnx(name=item["name"] + "lock", key=orphan_lock, value="True") - pipe.expire(name=item["name"] + "lock", time=orphan_lock_timeout) - result_list = pipe.execute() - if 0 in result_list: - # lock failed to aquire - raise IdempotencyItemAlreadyExistsError - - # overwrite orpahn record and set timeout - pipe.hset(**item) - pipe.expireat(name=data_record.idempotency_key, when=data_record.expiry_timestamp) - pipe.execute() + # It could be a case where Previous hander timed out, Redis expire not working properly, + # or a bug in our code. we need to add a lock(in case another istance is doing same thing) + # then overwrite key with current payload. + raise IdempotencyOrphanRecordError except redis.exceptions.RedisError: raise redis.exceptions.RedisError except redis.exceptions.RedisClusterException: raise redis.exceptions.RedisClusterException + except IdempotencyOrphanRecordError: + # deal with orphan record here + # aquire a lock for default 10 seconds + lock = self.client.set(name=item["name"] + ":lock", value="True", ex=self._orphan_lock_timeout, nx=True) + logger.debug("acquiring lock to overwrite orphan record") + if not lock: + # lock failed to aquire, means encountered a race condition. just return + raise IdempotencyItemAlreadyExistsError + + # overwrite orphan record and set timeout, no nx here for we need to overwrite + self.client.set(name=item["name"], value=encoded_item, ex=ttl) + # lock was not removed here intentionally. Prevent another orphan fix in race condition. except Exception as e: logger.debug(f"encountered non-redis exception:{e}") raise e @@ -363,11 +390,14 @@ def _update_record(self, data_record: DataRecord) -> None: "mapping": { self.data_attr: data_record.response_data, self.status_attr: data_record.status, + self.expiry_attr: data_record.expiry_timestamp, }, } logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") # should we check if this key has expriation already set? - self.client.hset(**item) + encoded_item = self._json_serializer(item["mapping"]) + ttl = self._get_expiry_second(data_record.expiry_timestamp) + self.client.set(name=item["name"], value=encoded_item, ex=ttl) def _delete_record(self, data_record: DataRecord) -> None: # This function only works when Lambda handler has already been invoked once From 6644217a9f0791056c7a6bf7af3dbbce3c53f33e Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 20 Oct 2023 18:15:58 +0000 Subject: [PATCH 38/81] support decode_response=False --- .../idempotency/persistence/redis.py | 86 ++++++++----------- .../idempotency/test_idempotency_redis.py | 26 +++--- 2 files changed, 51 insertions(+), 61 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 401b4787fc4..92fe74c63c5 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -3,7 +3,8 @@ import datetime import json import logging -from typing import Any, Callable, Dict, Optional +import warnings +from typing import Any, Dict import redis from typing_extensions import Literal @@ -155,8 +156,6 @@ def __init__( status_attr: str = "status", data_attr: str = "data", validation_key_attr: str = "validation", - json_serializer: Optional[Callable] = None, - json_deserializer: Optional[Callable] = None, ): """ Initialize the Redis Persistence Layer @@ -176,12 +175,7 @@ def __init__( Redis hash attribute name for response data, by default "data" validation_key_attr: str, optional Redis hash attribute name for hashed representation of the parts of the event used for validation - json_serializer : Callable, optional - function to serialize Python `obj` to a JSON document in `str`, `bytes`, `bytearray` format, - by default json.dumps - json_deserializer : Callable, optional - function to deserialize `str`, `bytes`, `bytearray` containing a JSON document to a Python `obj`, - by default json.loads + Examples -------- @@ -229,16 +223,18 @@ def lambda_handler(event: dict, context: LambdaContext): if not hasattr(self.client, "get_connection_kwargs"): raise IdempotencyRedisClientConfigError if not self.client.get_connection_kwargs().get("decode_responses", False): - # Requires decode_responses to be true - raise IdempotencyRedisClientConfigError + warnings.warn( + "Redis connection with `decode_responses=False` might casue lower performance", + stacklevel=2, + ) self.in_progress_expiry_attr = in_progress_expiry_attr self.expiry_attr = expiry_attr self.status_attr = status_attr self.data_attr = data_attr self.validation_key_attr = validation_key_attr - self._json_serializer = json_serializer or json.dumps - self._json_deserializer = json_deserializer or json.loads + self._json_serializer = json.dumps + self._json_deserializer = json.loads super(RedisCachePersistenceLayer, self).__init__() self._orphan_lock_timeout = min(10, self.expires_after_seconds) @@ -267,7 +263,7 @@ def _get_record(self, idempotency_key) -> DataRecord: response = self.client.get(idempotency_key) try: - item = self._json_deserializer(response) + item = self._json_deserializer(response) # type: ignore except KeyError: raise IdempotencyItemNotFoundError except json.JSONDecodeError: @@ -284,7 +280,6 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: }, } - # means we are saving in_progress if data_record.in_progress_expiry_timestamp is not None: item["mapping"][self.in_progress_expiry_attr] = data_record.in_progress_expiry_timestamp @@ -307,81 +302,76 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: # - SET see https://redis.io/commands/set/ logger.debug(f"Putting record on Redis for idempotency key: {item['name']}") - encoded_item = self._json_serializer(item["mapping"]) + ttl = self._get_expiry_second(expery_timestamp=item["mapping"][self.expiry_attr]) - ttl = self._get_expiry_second(item["mapping"][self.expiry_attr]) redis_response = self.client.set(name=item["name"], value=encoded_item, ex=ttl, nx=True) - # redis_response=True means redis set succeed, we return on success.(99% request should end up here) - # redis_response=None means this is the case where idempotency record is hit. continue checking + # redis_response:True -> Redis set succeed, idempotency key does not exist before + # return to idempotency and proceed to handler excution phase. Most cases should return here if redis_response: return - # fetch from redis and check if it's still valid + # redis_response:None -> Existing record on Redis, continue to checking phase + # The idempotency key exist: + # - previous invocation with the same key and not expired(active idempotency) + # - previous invocation timed out (Orphan Record) + # - previous invocation record expired but not deleted by Redis (Orphan Record) + encoded_idempotency_record = self.client.get(item["name"]) try: - idempotency_record = self._json_deserializer(encoded_idempotency_record) + idempotency_record = self._json_deserializer(encoded_idempotency_record) # type: ignore except json.JSONDecodeError: - # found a currupted record, treat as Orphan Record. + # found a corrupted record, treat as Orphan Record. raise IdempotencyOrphanRecordError if len(idempotency_record) == 0: - # somthing wired happend, hsetnx failed however there's no record. treat as Orphan Record. - raise IdempotencyOrphanRecordError + # Set in Redis with nx failed however there's no record. return to idempotency to retry + raise IdempotencyItemNotFoundError - # status is completed, so raise exception because it exists and still valid + # status is completed and expiry_attr timestamp still larger than current timestamp + # found a valid completed record if idempotency_record[self.status_attr] == STATUS_CONSTANTS["COMPLETED"] and int( idempotency_record[self.expiry_attr], ) > int(now.timestamp()): raise IdempotencyItemAlreadyExistsError - # checking if in_progress_expiry_attr exists - # if in_progress_expiry_attr exist, must be lower than now + # in_progress_expiry_attr exist means status is in_progress, and still larger than current timestamp, + # found a vaild in_progress record if self.in_progress_expiry_attr in idempotency_record and int( idempotency_record[self.in_progress_expiry_attr], ) > int(now.timestamp() * 1000): raise IdempotencyItemAlreadyExistsError # If the code reaches here means we found an Orphan record. - # It could be a case where Previous hander timed out, Redis expire not working properly, - # or a bug in our code. we need to add a lock(in case another istance is doing same thing) - # then overwrite key with current payload. raise IdempotencyOrphanRecordError - except redis.exceptions.RedisError: - raise redis.exceptions.RedisError - except redis.exceptions.RedisClusterException: - raise redis.exceptions.RedisClusterException except IdempotencyOrphanRecordError: # deal with orphan record here # aquire a lock for default 10 seconds lock = self.client.set(name=item["name"] + ":lock", value="True", ex=self._orphan_lock_timeout, nx=True) logger.debug("acquiring lock to overwrite orphan record") if not lock: - # lock failed to aquire, means encountered a race condition. just return + # lock failed to aquire, means encountered a race condition. Just return raise IdempotencyItemAlreadyExistsError - # overwrite orphan record and set timeout, no nx here for we need to overwrite + # Overwrite orphan record and set timeout, we don't use nx here for we need to overwrite self.client.set(name=item["name"], value=encoded_item, ex=ttl) # lock was not removed here intentionally. Prevent another orphan fix in race condition. + except (redis.exceptions.RedisError, redis.exceptions.RedisClusterException) as e: + raise e except Exception as e: logger.debug(f"encountered non-redis exception:{e}") raise e def _put_record(self, data_record: DataRecord) -> None: - # Redis works with hset to support hashing keys with multiple attributes - # See: https://redis.io/commands/hset/ - - # current this function only support set in_progress. set complete should use update_record - if data_record.status != STATUS_CONSTANTS["INPROGRESS"]: + if data_record.status == STATUS_CONSTANTS["INPROGRESS"]: + self._put_in_progress_record(data_record=data_record) + else: + # current this function only support set in_progress. set complete should use update_record raise NotImplementedError - # seperate in_progress logic in case we use _put_record to save other record in the future - self._put_in_progress_record(data_record=data_record) - - # Q:Will here accidentally create race? def _update_record(self, data_record: DataRecord) -> None: item: Dict[str, Any] = {} @@ -394,14 +384,14 @@ def _update_record(self, data_record: DataRecord) -> None: }, } logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") - # should we check if this key has expriation already set? encoded_item = self._json_serializer(item["mapping"]) ttl = self._get_expiry_second(data_record.expiry_timestamp) + # need to set ttl again, if we don't set ex here the record will not have a ttl self.client.set(name=item["name"], value=encoded_item, ex=ttl) def _delete_record(self, data_record: DataRecord) -> None: - # This function only works when Lambda handler has already been invoked once - # maybe we should add some exception when this is called before Lambda handler + # This function only works when Lambda handler has already been invoked + # Or you'll get empty idempotency_key logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") # See: https://redis.io/commands/del/ self.client.delete(data_record.idempotency_key) diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index 7ac06953891..0dc8446ef1f 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -1,4 +1,5 @@ import copy +import json import pytest import redis @@ -9,7 +10,6 @@ IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, IdempotencyPersistenceLayerError, - IdempotencyRedisClientConfigError, ) from aws_lambda_powertools.utilities.idempotency.idempotency import ( idempotent, @@ -57,7 +57,7 @@ def persistence_store_standalone_redis(): redis_client = redis.Redis( host="localhost", port=63005, - decode_responses=True, + decode_responses=False, ) return RedisCachePersistenceLayer(client=redis_client) @@ -89,17 +89,6 @@ def lambda_handler(event, context): assert handler_result == expected_result -def test_idempotent_lambda_redis_no_decode(): - redis_client = redis.Redis( - host="localhost", - port="63005", - decode_responses=False, - ) - # decode_responses=False will not be accepted - with pytest.raises(IdempotencyRedisClientConfigError): - RedisCachePersistenceLayer(client=redis_client) - - def test_idempotent_function_and_lambda_handler_redis_cache( persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, @@ -218,3 +207,14 @@ def lambda_handler(event, _): with pytest.raises(IdempotencyPersistenceLayerError): lambda_handler("test_Acl", lambda_context) + + +def test_redis_decode(): + redis_client = redis.Redis( + host="localhost", + port="63005", + decode_responses=True, + ) + + redis_client.set(name="test", value=json.dumps({"1": 2, "3": 4})) + print(json.loads(redis_client.get(name="test"))) From ab12c298843aa1437ad0145731de8d293b67f3f2 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 20 Oct 2023 19:48:36 +0000 Subject: [PATCH 39/81] fix mock redis --- .../idempotency/persistence/redis.py | 2 +- .../persistence/test_redis_layer.py | 61 +++++++++---------- .../idempotency/test_idempotency_redis.py | 12 ---- 3 files changed, 31 insertions(+), 44 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 92fe74c63c5..c8ab42f974e 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -224,7 +224,7 @@ def lambda_handler(event: dict, context: LambdaContext): raise IdempotencyRedisClientConfigError if not self.client.get_connection_kwargs().get("decode_responses", False): warnings.warn( - "Redis connection with `decode_responses=False` might casue lower performance", + "Redis connection with `decode_responses=False` may casue lower performance", stacklevel=2, ) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index ff3347980d6..e78cffbf25a 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -1,3 +1,4 @@ +# ruff: noqa import copy import time as t @@ -10,7 +11,6 @@ IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, - IdempotencyRedisClientConfigError, ) from aws_lambda_powertools.utilities.idempotency.idempotency import ( idempotent, @@ -50,7 +50,8 @@ def from_url(self, url: str): # not covered by test yet. def expire(self, name, time): - self.expire_dict[name] = t.time() + time + if time != 0: + self.expire_dict[name] = t.time() + time # return {} if no match def hgetall(self, name): @@ -67,6 +68,33 @@ def auth(self, username, **kwargs): def delete(self, name): self.cache.pop(name, {}) + def set(self, name, value, ex: int = 0, nx: bool = False): + # expire existing + if self.expire_dict.get(name, t.time() + 1) < t.time(): + self.cache.pop(name, {}) + + if isinstance(value, str): + value = value.encode() + + # nx logic + if name in self.cache and nx: + return None + + self.cache[name] = value + self.expire(name, ex) + return True + + def get(self, name: str): + if self.expire_dict.get(name, t.time() + 1) < t.time(): + self.cache.pop(name, {}) + + resp = self.cache.get(name, None) + + if resp and self.decode_responses: + resp = resp.decode("utf-8") + + return resp + @pytest.fixture def persistence_store_standalone_redis(): @@ -107,17 +135,6 @@ def lambda_handler(event, context): assert handler_result == expected_result -def test_idempotent_lambda_redis_no_decode(): - redis_client = MockRedis( - host="localhost", - port="63005", - decode_responses=False, - ) - # decode_responses=False will not be accepted - with pytest.raises(IdempotencyRedisClientConfigError): - RedisCachePersistenceLayer(client=redis_client) - - def test_idempotent_function_and_lambda_handler_redis_cache( persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, @@ -211,21 +228,3 @@ def lambda_handler(event, _): result = {"message": "Foo2"} handler_result2 = lambda_handler(mock_event, lambda_context) assert handler_result2 == result - - -"""def test_idempotent_lambda_redis_credential(lambda_context): - redis_client = MockRedis( - host='localhost', - port='63005', - decode_responses=True, - ) - pwd = "terriblePassword" - usr = "test_acl_denial" - redis_client.acl_setuser(username=usr, enabled=True, passwords="+"+pwd,keys='*',commands=['+hgetall','-set']) - redis_client.auth(password=pwd,username=usr) - - @idempotent(persistence_store=RedisCachePersistenceLayer(connection=redis_client)) - def lambda_handler(event, _): - return True - with pytest.raises(IdempotencyPersistenceLayerError): - handler_result = lambda_handler("test_Acl", lambda_context)""" diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index 0dc8446ef1f..3acd31baa12 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -1,5 +1,4 @@ import copy -import json import pytest import redis @@ -207,14 +206,3 @@ def lambda_handler(event, _): with pytest.raises(IdempotencyPersistenceLayerError): lambda_handler("test_Acl", lambda_context) - - -def test_redis_decode(): - redis_client = redis.Redis( - host="localhost", - port="63005", - decode_responses=True, - ) - - redis_client.set(name="test", value=json.dumps({"1": 2, "3": 4})) - print(json.loads(redis_client.get(name="test"))) From 1ea6a2182ec6afac74b31b026575120a408c2a7c Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 20 Oct 2023 20:33:54 +0000 Subject: [PATCH 40/81] add test for no decode --- .../persistence/test_redis_layer.py | 87 +++++++++++++++---- 1 file changed, 70 insertions(+), 17 deletions(-) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index e78cffbf25a..fb2b75d29b3 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -68,6 +68,7 @@ def auth(self, username, **kwargs): def delete(self, name): self.cache.pop(name, {}) + # return None if nx failed, return True if done def set(self, name, value, ex: int = 0, nx: bool = False): # expire existing if self.expire_dict.get(name, t.time() + 1) < t.time(): @@ -84,6 +85,7 @@ def set(self, name, value, ex: int = 0, nx: bool = False): self.expire(name, ex) return True + # return None if not found def get(self, name: str): if self.expire_dict.get(name, t.time() + 1) < t.time(): self.cache.pop(name, {}) @@ -96,10 +98,18 @@ def get(self, name: str): return resp +@pytest.fixture +def persistence_store_standalone_redis_no_decode(): + redis_client = MockRedis( + host="localhost", + port="63005", + decode_responses=False, + ) + return RedisCachePersistenceLayer(client=redis_client) + + @pytest.fixture def persistence_store_standalone_redis(): - # you will need to handle yourself the connection to pass again the password - # and avoid AuthenticationError at redis queries redis_client = MockRedis( host="localhost", port="63005", @@ -108,9 +118,7 @@ def persistence_store_standalone_redis(): return RedisCachePersistenceLayer(client=redis_client) -# test basic def test_idempotent_function_and_lambda_handler_redis_basic( - # idempotency_config: IdempotencyConfig, persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, ): @@ -152,32 +160,76 @@ def record_handler(record): def lambda_handler(event, context): return result - # WHEN calling the function + # WHEN calling the function and handler with idempotency fn_result = record_handler(record=mock_event) - # WHEN calling lambda handler handler_result = lambda_handler(mock_event, lambda_context) # THEN we expect the function and lambda handler to execute successfully assert fn_result == expected_result assert handler_result == expected_result - # modify the return to check if idem cache works result = {"message": "Bar"} + # Given idempotency record already in Redis + # When we modified the actual function output and run the second time fn_result2 = record_handler(record=mock_event) - # Second time calling lambda handler, test if same result handler_result2 = lambda_handler(mock_event, lambda_context) + # Then the result should be the same as first time assert fn_result2 == expected_result assert handler_result2 == expected_result - # modify the mock event to check if we got updated result + # Given idempotency record already in Redis + # When we modified the actual function output and use a different payload mock_event = {"data": "value3"} fn_result3 = record_handler(record=mock_event) - # thrid time calling lambda handler, test if result updated handler_result3 = lambda_handler(mock_event, lambda_context) + # Then the result should be the actual function output + assert fn_result3 == result + assert handler_result3 == result + + +def test_idempotent_function_and_lambda_handler_redis_basic_no_decode( + persistence_store_standalone_redis_no_decode: RedisCachePersistenceLayer, + lambda_context, +): + # GIVEN redis client passed in has decode_responses=False + mock_event = {"data": "value-nodecode"} + persistence_layer = persistence_store_standalone_redis_no_decode + result = {"message": "Foo"} + expected_result = copy.deepcopy(result) + + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return result + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, context): + return result + + # WHEN calling the function and handler with idempotency + fn_result = record_handler(record=mock_event) + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result + + result = {"message": "Bar"} + # Given idempotency record already in Redis + # When we modified the actual function output and run the second time + fn_result2 = record_handler(record=mock_event) + handler_result2 = lambda_handler(mock_event, lambda_context) + # Then the result should be the same as first time + assert fn_result2 == expected_result + assert handler_result2 == expected_result + + # Given idempotency record already in Redis + # When we modified the actual function output and use a different payload + mock_event = {"data": "value3"} + fn_result3 = record_handler(record=mock_event) + handler_result3 = lambda_handler(mock_event, lambda_context) + # Then the result should be the actual function output assert fn_result3 == result assert handler_result3 == result -# test idem-inprogress def test_idempotent_lambda_redis_in_progress( persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, @@ -194,20 +246,19 @@ def test_idempotent_lambda_redis_in_progress( def lambda_handler(event, context): return lambda_response - # register the context first + # Given in_progress idempotency record already in Redis lambda_handler(mock_event, lambda_context) - # save additional to in_progress mock_event = {"data": "value7"} try: persistence_store.save_inprogress(mock_event, 1000) except IdempotencyItemAlreadyExistsError: pass - + # when invoking with same payload + # then should raise IdempotencyAlreadyInProgressError with pytest.raises(IdempotencyAlreadyInProgressError): lambda_handler(mock_event, lambda_context) -# test -remove def test_idempotent_lambda_redis_delete( persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, @@ -222,9 +273,11 @@ def lambda_handler(event, _): handler_result = lambda_handler(mock_event, lambda_context) assert handler_result == result - - # delete the idem and handler should output new result + # Given the idempotency record from the first run deleted persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) result = {"message": "Foo2"} + # When lambda hander run for the second time handler_result2 = lambda_handler(mock_event, lambda_context) + + # Then lambda handler should return a actual function output assert handler_result2 == result From adee7aff280588154ca6b079cf71b2f9984dd2c6 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Mon, 23 Oct 2023 17:56:02 +0000 Subject: [PATCH 41/81] fix docstring --- .../utilities/idempotency/persistence/redis.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index c8ab42f974e..0457cd71274 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -238,7 +238,7 @@ def lambda_handler(event: dict, context: LambdaContext): super(RedisCachePersistenceLayer, self).__init__() self._orphan_lock_timeout = min(10, self.expires_after_seconds) - def _get_expiry_second(self, expery_timestamp: int | None) -> int: + def _get_expiry_second(self, expery_timestamp: int | None = None) -> int: """ return seconds of timedelta from now to the given unix timestamp """ @@ -259,10 +259,9 @@ def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> Da ) def _get_record(self, idempotency_key) -> DataRecord: - # See: https://redis.io/commands/hgetall/ - response = self.client.get(idempotency_key) - + # See: https://redis.io/commands/get/ try: + response = self.client.get(idempotency_key) item = self._json_deserializer(response) # type: ignore except KeyError: raise IdempotencyItemNotFoundError From 05b7ff05624da493cac8cc3956cfba36cf84a491 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 27 Oct 2023 00:38:22 +0000 Subject: [PATCH 42/81] fix coverage --- .../idempotency/persistence/redis.py | 33 +-- .../persistence/test_redis_layer.py | 276 +++++++++++++++++- tests/functional/test_utilities_parameters.py | 8 +- 3 files changed, 292 insertions(+), 25 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 0457cd71274..452e1a5283e 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -260,11 +260,12 @@ def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> Da def _get_record(self, idempotency_key) -> DataRecord: # See: https://redis.io/commands/get/ - try: - response = self.client.get(idempotency_key) - item = self._json_deserializer(response) # type: ignore - except KeyError: + response = self.client.get(idempotency_key) + # key not found + if not response: raise IdempotencyItemNotFoundError + try: + item = self._json_deserializer(response) except json.JSONDecodeError: raise IdempotencyOrphanRecordError return self._item_to_data_record(idempotency_key, item) @@ -317,30 +318,20 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: # - previous invocation timed out (Orphan Record) # - previous invocation record expired but not deleted by Redis (Orphan Record) - encoded_idempotency_record = self.client.get(item["name"]) - - try: - idempotency_record = self._json_deserializer(encoded_idempotency_record) # type: ignore - except json.JSONDecodeError: - # found a corrupted record, treat as Orphan Record. - raise IdempotencyOrphanRecordError - - if len(idempotency_record) == 0: - # Set in Redis with nx failed however there's no record. return to idempotency to retry - raise IdempotencyItemNotFoundError + idempotency_record = self._get_record(item["name"]) # status is completed and expiry_attr timestamp still larger than current timestamp # found a valid completed record - if idempotency_record[self.status_attr] == STATUS_CONSTANTS["COMPLETED"] and int( - idempotency_record[self.expiry_attr], - ) > int(now.timestamp()): + if idempotency_record.status == STATUS_CONSTANTS["COMPLETED"] and not idempotency_record.is_expired: raise IdempotencyItemAlreadyExistsError # in_progress_expiry_attr exist means status is in_progress, and still larger than current timestamp, # found a vaild in_progress record - if self.in_progress_expiry_attr in idempotency_record and int( - idempotency_record[self.in_progress_expiry_attr], - ) > int(now.timestamp() * 1000): + if ( + idempotency_record.status == STATUS_CONSTANTS["INPROGRESS"] + and idempotency_record.in_progress_expiry_timestamp + and idempotency_record.in_progress_expiry_timestamp > int(now.timestamp() * 1000) + ): raise IdempotencyItemAlreadyExistsError # If the code reaches here means we found an Orphan record. diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index fb2b75d29b3..075f736fdc2 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -3,20 +3,37 @@ import time as t import pytest +from unittest.mock import patch from aws_lambda_powertools.utilities.idempotency import ( RedisCachePersistenceLayer, ) +import datetime + +from aws_lambda_powertools.utilities.idempotency.persistence.base import ( + STATUS_CONSTANTS, + DataRecord, +) + +from unittest import mock + from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, + IdempotencyRedisConnectionError, + IdempotencyRedisClientConfigError, + IdempotencyOrphanRecordError, + IdempotencyValidationError, ) from aws_lambda_powertools.utilities.idempotency.idempotency import ( idempotent, idempotent_function, + IdempotencyConfig, ) +redis_badhost = "badhost" + @pytest.fixture def lambda_context(): @@ -33,23 +50,78 @@ def get_remaining_time_in_millis(self) -> int: return LambdaContext() -class MockRedis: - def __init__(self, decode_responses, cache: dict = None, **kwargs): +class RedisExceptions: + class RedisClusterException(Exception): + "mock cluster exception" + + class RedisError(Exception): + "mock redis exception" + + class ConnectionError(Exception): + "mock connection exception" + + +class MockRedisBase: + # use this class to test no get_connection_kwargs error + exceptions = RedisExceptions + + def __call__(self, *args, **kwargs): + if kwargs.get("host") == redis_badhost: + raise self.exceptions.ConnectionError + self.__dict__.update(kwargs) + return self + + @property + def Redis(self): + self.mode = "standalone" + return self + + @property + def cluster(self): + return self + + @property + def RedisCluster(self): + self.mode = "cluster" + return self + + # use this to mimic Redis error + def close(self): + self.closed = True + + +class MockRedis(MockRedisBase): + def __init__(self, decode_responses=False, cache: dict = None, **kwargs): self.cache = cache or {} self.expire_dict = {} self.decode_responses = decode_responses self.acl = {} self.username = "" + self.mode = "" + self.url = "" + self.__dict__.update(kwargs) + self.closed = False + super(MockRedis, self).__init__() + + def check_closed(self): + if self.closed == False: + return + if self.mode == "cluster": + raise self.exceptions.RedisClusterException + raise self.exceptions.RedisError def hset(self, name, mapping): + self.check_closed() self.expire_dict.pop(name, {}) self.cache[name] = mapping def from_url(self, url: str): - pass + self.url = url + return self # not covered by test yet. def expire(self, name, time): + self.check_closed() if time != 0: self.expire_dict[name] = t.time() + time @@ -66,11 +138,13 @@ def auth(self, username, **kwargs): self.username = username def delete(self, name): + self.check_closed() self.cache.pop(name, {}) # return None if nx failed, return True if done def set(self, name, value, ex: int = 0, nx: bool = False): # expire existing + self.check_closed() if self.expire_dict.get(name, t.time() + 1) < t.time(): self.cache.pop(name, {}) @@ -87,6 +161,7 @@ def set(self, name, value, ex: int = 0, nx: bool = False): # return None if not found def get(self, name: str): + self.check_closed() if self.expire_dict.get(name, t.time() + 1) < t.time(): self.cache.pop(name, {}) @@ -118,6 +193,136 @@ def persistence_store_standalone_redis(): return RedisCachePersistenceLayer(client=redis_client) +@pytest.fixture +def orphan_record(): + return DataRecord( + idempotency_key="test_orphan_key", + status=STATUS_CONSTANTS["INPROGRESS"], + in_progress_expiry_timestamp=int(datetime.datetime.now().timestamp() * 1000 - 1), + ) + + +@pytest.fixture +def valid_record(): + return DataRecord( + idempotency_key="test_orphan_key", + status=STATUS_CONSTANTS["INPROGRESS"], + in_progress_expiry_timestamp=int(datetime.datetime.now().timestamp() * 1000 + 1000), + ) + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_connection(): + # when RedisCachePersistenceLayer is init with the following params + redis_conf = { + "host": "host", + "port": "port", + "mode": "cluster", + "username": "redis_user", + "password": "redis_pass", + "db_index": "db_index", + } + layer = RedisCachePersistenceLayer(**redis_conf) + redis_conf["db"] = redis_conf["db_index"] + redis_conf.pop("db_index") + # then these paramas should be passed down to mock Redis identically + for k, v in redis_conf.items(): + assert layer.client.__dict__.get(k) == v + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_connection_conn_error(): + # when RedisCachePersistenceLayer is init with a bad host + # then should raise IdempotencyRedisConnectionError + with pytest.raises(IdempotencyRedisConnectionError): + layer = RedisCachePersistenceLayer(host=redis_badhost) + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_connection_conf_error(): + # when RedisCachePersistenceLayer is init with a not_supported_mode in mode param + # then should raise IdempotencyRedisClientConfigError + with pytest.raises(IdempotencyRedisClientConfigError): + layer = RedisCachePersistenceLayer(mode="not_supported_mode") + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_key_error(): + # when RedisCachePersistenceLayer is trying to get a non-exist key + # then should raise IdempotencyItemNotFoundError + with pytest.raises(IdempotencyItemNotFoundError): + layer = RedisCachePersistenceLayer(host="host") + layer._get_record(idempotency_key="not_exist") + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_key_corrupted(): + # when RedisCachePersistenceLayer got a non-json formatted record + # then should raise IdempotencyOrphanRecordError + with pytest.raises(IdempotencyOrphanRecordError): + layer = RedisCachePersistenceLayer(url="sample_url") + layer.client.set("corrupted_json", "not_json_string") + layer._get_record(idempotency_key="corrupted_json") + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_orphan_record(orphan_record, valid_record): + layer = RedisCachePersistenceLayer(host="host") + # Given orphan record exist + layer._put_in_progress_record(orphan_record) + # When we are tyring to update the record + layer._put_in_progress_record(valid_record) + # Then orphan record will be overwritten + assert ( + layer._get_record(valid_record.idempotency_key).in_progress_expiry_timestamp + == valid_record.in_progress_expiry_timestamp + ) + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_orphan_record_lock(orphan_record, valid_record): + layer = RedisCachePersistenceLayer(host="host") + # Given orphan record exist, lock also exist + layer._put_in_progress_record(orphan_record) + layer.client.set("test_orphan_key:lock", "True") + # when trying to overwrite the record + # Then we should raise IdempotencyItemAlreadyExistsError + with pytest.raises(IdempotencyItemAlreadyExistsError): + layer._put_in_progress_record(valid_record) + # And the record should not be overwritten + assert ( + layer._get_record(valid_record.idempotency_key).in_progress_expiry_timestamp + == orphan_record.in_progress_expiry_timestamp + ) + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_error_in_progress(valid_record): + layer = RedisCachePersistenceLayer(host="host", mode="standalone") + layer.client.close() + # given a Redis is returning RedisError + # when trying to save inprogress + # then layer should raise RedisExceptions.RedisError + with pytest.raises(RedisExceptions.RedisError): + layer._put_in_progress_record(valid_record) + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_item_to_datarecord_conversion(valid_record): + layer = RedisCachePersistenceLayer(host="host", mode="standalone") + item = { + "status": STATUS_CONSTANTS["INPROGRESS"], + layer.in_progress_expiry_attr: str(int(datetime.datetime.now().timestamp() * 1000)), + } + # given we have a dict of datarecord + # when calling _item_to_data_record + record = layer._item_to_data_record(idempotency_key="abc", item=item) + # then all valid fields in dict should be copied into data_record + assert record.idempotency_key == "abc" + assert record.status == STATUS_CONSTANTS["INPROGRESS"] + assert record.in_progress_expiry_timestamp == int(item[layer.in_progress_expiry_attr]) + + def test_idempotent_function_and_lambda_handler_redis_basic( persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, @@ -186,6 +391,61 @@ def lambda_handler(event, context): assert handler_result3 == result +def test_idempotent_function_and_lambda_handler_redis_event_key( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"body": '{"user_id":"xyz","time":"1234"}'} + persistence_layer = persistence_store_standalone_redis + result = {"message": "Foo"} + expected_result = copy.deepcopy(result) + config = IdempotencyConfig(event_key_jmespath='powertools_json(body).["user_id"]') + + @idempotent(persistence_store=persistence_layer, config=config) + def lambda_handler(event, context): + return result + + # WHEN calling the function and handler with idempotency and event_key_jmespath config to only verify user_id + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert handler_result == expected_result + + result = {"message": "Bar"} + mock_event = {"body": '{"user_id":"xyz","time":"2345"}'} + # Given idempotency record already in Redis + # When we modified the actual function output, time in mock event and run the second time + handler_result2 = lambda_handler(mock_event, lambda_context) + # Then the result should be the same as first time + assert handler_result2 == expected_result + + +def test_idempotent_function_and_lambda_handler_redis_validation( + persistence_store_standalone_redis: RedisCachePersistenceLayer, + lambda_context, +): + mock_event = {"user_id": "xyz", "time": "1234"} + persistence_layer = persistence_store_standalone_redis + result = {"message": "Foo"} + expected_result = copy.deepcopy(result) + config = IdempotencyConfig(event_key_jmespath="user_id", payload_validation_jmespath="time") + + @idempotent(persistence_store=persistence_layer, config=config) + def lambda_handler(event, context): + return result + + # WHEN calling the function and handler with idempotency and event_key_jmespath,payload_validation_jmespath + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + + result = {"message": "Bar"} + mock_event = {"user_id": "xyz", "time": "2345"} + # Given idempotency record already in Redis + # When we modified the payload where validation is on and invoke again. + # Then should raise IdempotencyValidationError + with pytest.raises(IdempotencyValidationError): + handler_result2 = lambda_handler(mock_event, lambda_context) + + def test_idempotent_function_and_lambda_handler_redis_basic_no_decode( persistence_store_standalone_redis_no_decode: RedisCachePersistenceLayer, lambda_context, @@ -281,3 +541,13 @@ def lambda_handler(event, _): # Then lambda handler should return a actual function output assert handler_result2 == result + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedisBase()) +def test_redis_connection_get_kwargs_error(): + # when Layer is init with a redis client that doesn't have get_connection_kwargs method + + # then should raise IdempotencyRedisClientConfigError + + with pytest.raises(IdempotencyRedisClientConfigError): + layer = RedisCachePersistenceLayer(host="testhost") diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py index d0f51b2b338..2ee544f87c8 100644 --- a/tests/functional/test_utilities_parameters.py +++ b/tests/functional/test_utilities_parameters.py @@ -960,7 +960,13 @@ def test_ssm_provider_get_sdk_options_overwrite(mock_name, mock_value, mock_vers stubber.deactivate() -def test_ssm_provider_get_multiple_with_decrypt_environment_variable(monkeypatch, mock_name, mock_value, mock_version, config): +def test_ssm_provider_get_multiple_with_decrypt_environment_variable( + monkeypatch, + mock_name, + mock_value, + mock_version, + config, +): """ Test SSMProvider.get_multiple() with decrypt value replaced by environment variable """ From b8f938cc6b20fb1d57248ed71b3bf7d714c41173 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 8 Nov 2023 01:09:13 +0000 Subject: [PATCH 43/81] add a test case to demostrate race condition --- .../idempotency/persistence/redis.py | 1 + .../persistence/test_redis_layer.py | 56 ++++++++++++++++++- 2 files changed, 55 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 452e1a5283e..b9bb3046402 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -256,6 +256,7 @@ def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> Da in_progress_expiry_timestamp=in_progress_expiry_timestamp, response_data=str(item.get(self.data_attr)), payload_hash=str(item.get(self.validation_key_attr)), + expiry_timestamp=item.get("expiration", None), ) def _get_record(self, idempotency_key) -> DataRecord: diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index 075f736fdc2..c9b37f39aea 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -1,5 +1,6 @@ # ruff: noqa import copy +import json import time as t import pytest @@ -16,7 +17,7 @@ ) from unittest import mock - +from multiprocessing import Process, Manager from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, @@ -91,7 +92,7 @@ def close(self): class MockRedis(MockRedisBase): - def __init__(self, decode_responses=False, cache: dict = None, **kwargs): + def __init__(self, decode_responses=False, cache: dict = None, mock_latency_ms: int = 0, **kwargs): self.cache = cache or {} self.expire_dict = {} self.decode_responses = decode_responses @@ -101,9 +102,13 @@ def __init__(self, decode_responses=False, cache: dict = None, **kwargs): self.url = "" self.__dict__.update(kwargs) self.closed = False + self.mock_latency_ms = mock_latency_ms super(MockRedis, self).__init__() + # check_closed is called before every mock redis operation def check_closed(self): + if self.mock_latency_ms != 0: + t.sleep(self.mock_latency_ms / 1000) if self.closed == False: return if self.mode == "cluster": @@ -127,6 +132,7 @@ def expire(self, name, time): # return {} if no match def hgetall(self, name): + self.check_closed() if self.expire_dict.get(name, t.time() + 1) < t.time(): self.cache.pop(name, {}) return self.cache.get(name, {}) @@ -551,3 +557,49 @@ def test_redis_connection_get_kwargs_error(): with pytest.raises(IdempotencyRedisClientConfigError): layer = RedisCachePersistenceLayer(host="testhost") + + +def test_redis_orphan_record_race_condition(lambda_context, capsys): + redis_client = MockRedis( + host="localhost", + port="63005", + decode_responses=True, + mock_latency_ms=200, + ) + manager = Manager() + # use a thread safe dict + redis_client.expire_dict = manager.dict() + redis_client.cache = manager.dict() + # given a mock redis client with latency, orphan record exists + layer = RedisCachePersistenceLayer(client=redis_client) + + mock_event = {"data": "value4"} + lambda_response = {"foo": "bar"} + + @idempotent(persistence_store=layer) + def lambda_handler(event, context): + print("lambda executed") + if redis_client.cache.get("exec_count", None) != None: + redis_client.cache["exec_count"] += 1 + return lambda_response + + # run handler for the first time to create a valid record in cache + lambda_handler(mock_event, lambda_context) + # modify the cache to create the orphan record + for key, item in redis_client.cache.items(): + json_dict = json.loads(item) + json_dict["expiration"] = int(t.time()) - 4000 + redis_client.cache[key] = json.dumps(json_dict).encode() + # Given orpahn idempotency record with same payload already in Redis + # When running two lambda handler at the same time + redis_client.cache["exec_count"] = 0 + p1 = Process(target=lambda_handler, args=(mock_event, lambda_context)) + p2 = Process(target=lambda_handler, args=(mock_event, lambda_context)) + p1.start() + t.sleep(0.01) + p2.start() + p1.join() + p2.join() + # print(redis_client.cache) + # Then only one handler will actually run + assert redis_client.cache["exec_count"] == 1 From 23de1533aa7132aa9659d58d1710eb05b81e8745 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 10 Nov 2023 23:37:28 +0000 Subject: [PATCH 44/81] add a race condition test for empty record --- .../persistence/test_redis_layer.py | 63 +++++++++++++++---- 1 file changed, 51 insertions(+), 12 deletions(-) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index c9b37f39aea..34f039ef878 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -17,7 +17,7 @@ ) from unittest import mock -from multiprocessing import Process, Manager +from multiprocessing import Process, Manager, Lock from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, @@ -103,6 +103,7 @@ def __init__(self, decode_responses=False, cache: dict = None, mock_latency_ms: self.__dict__.update(kwargs) self.closed = False self.mock_latency_ms = mock_latency_ms + self.nx_lock = Lock() super(MockRedis, self).__init__() # check_closed is called before every mock redis operation @@ -157,12 +158,15 @@ def set(self, name, value, ex: int = 0, nx: bool = False): if isinstance(value, str): value = value.encode() - # nx logic - if name in self.cache and nx: - return None + # nx logic, acquire a lock for multiprocessing safety + with self.nx_lock: + # key exist, nx mode will just return None + if name in self.cache and nx: + return None - self.cache[name] = value - self.expire(name, ex) + # key doesn't exist, set the key + self.cache[name] = value + self.expire(name, ex) return True # return None if not found @@ -559,12 +563,12 @@ def test_redis_connection_get_kwargs_error(): layer = RedisCachePersistenceLayer(host="testhost") -def test_redis_orphan_record_race_condition(lambda_context, capsys): +def test_redis_orphan_record_race_condition(lambda_context): redis_client = MockRedis( host="localhost", port="63005", decode_responses=True, - mock_latency_ms=200, + mock_latency_ms=50, ) manager = Manager() # use a thread safe dict @@ -585,21 +589,56 @@ def lambda_handler(event, context): # run handler for the first time to create a valid record in cache lambda_handler(mock_event, lambda_context) - # modify the cache to create the orphan record + # modify the cache expiration to create the orphan record for key, item in redis_client.cache.items(): json_dict = json.loads(item) json_dict["expiration"] = int(t.time()) - 4000 redis_client.cache[key] = json.dumps(json_dict).encode() - # Given orpahn idempotency record with same payload already in Redis + # Given orphan idempotency record with same payload already in Redis + # When running two lambda handler at the same time + redis_client.cache["exec_count"] = 0 + p1 = Process(target=lambda_handler, args=(mock_event, lambda_context)) + p2 = Process(target=lambda_handler, args=(mock_event, lambda_context)) + p1.start() + p2.start() + p1.join() + p2.join() + # Then only one handler will actually run + assert redis_client.cache["exec_count"] == 1 + + +# race condition on empty record +def test_redis_race_condition(lambda_context): + redis_client = MockRedis( + host="localhost", + port="63005", + decode_responses=True, + mock_latency_ms=50, + ) + manager = Manager() + # use a thread safe dict + redis_client.expire_dict = manager.dict() + redis_client.cache = manager.dict() + # given a mock redis client with latency, orphan record exists + layer = RedisCachePersistenceLayer(client=redis_client) + + mock_event = {"data": "value4"} + lambda_response = {"foo": "bar"} + + @idempotent(persistence_store=layer) + def lambda_handler(event, context): + print("lambda executed") + if redis_client.cache.get("exec_count", None) != None: + redis_client.cache["exec_count"] += 1 + return lambda_response + # When running two lambda handler at the same time redis_client.cache["exec_count"] = 0 p1 = Process(target=lambda_handler, args=(mock_event, lambda_context)) p2 = Process(target=lambda_handler, args=(mock_event, lambda_context)) p1.start() - t.sleep(0.01) p2.start() p1.join() p2.join() - # print(redis_client.cache) # Then only one handler will actually run assert redis_client.cache["exec_count"] == 1 From 7cb4e1f876184f1d2640f7ecf1567955fe3f8ba3 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 8 Dec 2023 02:28:54 +0000 Subject: [PATCH 45/81] add abs_lambda_path, protocol for redis client --- aws_lambda_powertools/shared/functions.py | 24 +++++ .../idempotency/persistence/redis.py | 92 +++++++++++++------ tests/unit/test_shared_functions.py | 32 +++++++ 3 files changed, 121 insertions(+), 27 deletions(-) diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index 82ea7dad8d8..951346dc6a9 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -7,6 +7,7 @@ import os import warnings from binascii import Error as BinAsciiError +from pathlib import Path from typing import Any, Dict, Generator, Optional, Union, overload from aws_lambda_powertools.shared import constants @@ -173,3 +174,26 @@ def extract_event_from_common_models(data: Any) -> Dict | Any: # Is it a Dataclass? If not return as is return dataclasses.asdict(data) if dataclasses.is_dataclass(data) else data + + +def abs_lambda_path(relatvie_path="") -> str: + """Return the absolute path from the given relative path to lambda handler + + Parameters + ---------- + path : string + the relative path to lambda handler, by default "" + + Returns + ------- + string + the absolute path generated from the given relative path. + If the environment variable LAMBDA_TASK_ROOT is set, it will use that value. + Otherwise, it will use the current working directory. + If the path is empty, it will return the current working directory. + """ + current_working_directory = os.environ.get("LAMBDA_TASK_ROOT", "") + if not current_working_directory: + current_working_directory = Path.cwd() + Path(current_working_directory, relatvie_path) + return Path(current_working_directory, relatvie_path) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index b9bb3046402..c836d288583 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,13 +1,16 @@ +# ruff: noqa from __future__ import annotations import datetime import json import logging import warnings -from typing import Any, Dict +from contextlib import contextmanager +from datetime import timedelta +from typing import Any, Awaitable, Dict, Union import redis -from typing_extensions import Literal +from typing_extensions import Literal, Protocol from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( @@ -25,15 +28,35 @@ logger = logging.getLogger(__name__) +class RedisClientProtocol(Protocol): + def get(name: Union[bytes, str, memoryview]) -> Union[Awaitable, Any]: + ... + + def set( + name: Union[bytes, str, memoryview], + value: Union[bytes, memoryview, str, int, float], + ex: Union[int, timedelta, None], + nx: bool, + ) -> Union[Awaitable, Any]: + ... + + def delete(keys: Union[bytes, str, memoryview]) -> Union[Awaitable, Any]: + ... + + def get_connection_kwargs() -> Dict: + ... + + class RedisConnection: def __init__( self, - host: str | None, - username: str | None, - password: str | None, - url: str | None, + host: str = "", + username: str = "", + password: str = "", + url: str = "", db_index: int = 0, port: int = 6379, + ssl: bool = False, mode: Literal["standalone", "cluster"] = "standalone", ) -> None: """ @@ -43,21 +66,20 @@ def __init__( ---------- host: str, optional redis host - port: int, optional - redis port username: str, optional redis username password: str, optional redis password - db_index: str, optional + url: str, optional + redis connection string, using url will override the host/port in the previous parameters + db_index: str, optional: default 0 redis db index + port: int, optional: default 6379 + redis port mode: str, Literal["standalone","cluster"] set redis client mode, choose from standalone/cluster - url: str, optional - redis connection string, using url will override the host/port in the previous parameters - extra_options: **kwargs, optional - extra kwargs to pass directly into redis client - + ssl: bool, optional: default False + set whether to use ssl for Redis connection Examples -------- @@ -109,6 +131,7 @@ def create_subscription_payment(event: dict) -> Payment: self.username = username self.password = password self.db_index = db_index + self.ssl = ssl self.mode = mode def _init_client(self) -> redis.Redis | redis.cluster.RedisCluster: @@ -134,6 +157,7 @@ def _init_client(self) -> redis.Redis | redis.cluster.RedisCluster: password=self.password, db=self.db_index, decode_responses=True, + ssl=self.ssl, ) except redis.exceptions.ConnectionError as exc: logger.debug(f"Cannot connect in Redis: {self.host}") @@ -143,14 +167,14 @@ def _init_client(self) -> redis.Redis | redis.cluster.RedisCluster: class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, - host: str | None = None, - username: str | None = None, - password: str | None = None, - url: str | None = None, + host: str = "", + username: str = "", + password: str = "", + url: str = "", db_index: int = 0, port: int = 6379, mode: Literal["standalone", "cluster"] = "standalone", - client: redis.Redis | redis.cluster.RedisCluster | None = None, + client: RedisClientProtocol = None, in_progress_expiry_attr: str = "in_progress_expiration", expiry_attr: str = "expiration", status_attr: str = "status", @@ -341,21 +365,35 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: except IdempotencyOrphanRecordError: # deal with orphan record here # aquire a lock for default 10 seconds - lock = self.client.set(name=item["name"] + ":lock", value="True", ex=self._orphan_lock_timeout, nx=True) - logger.debug("acquiring lock to overwrite orphan record") - if not lock: - # lock failed to aquire, means encountered a race condition. Just return - raise IdempotencyItemAlreadyExistsError + with self._acquire_lock(name=item["name"]): + self.client.set(name=item["name"], value=encoded_item, ex=ttl) - # Overwrite orphan record and set timeout, we don't use nx here for we need to overwrite - self.client.set(name=item["name"], value=encoded_item, ex=ttl) - # lock was not removed here intentionally. Prevent another orphan fix in race condition. + # lock was not removed here intentionally. Prevent another orphan operation in race condition. except (redis.exceptions.RedisError, redis.exceptions.RedisClusterException) as e: raise e except Exception as e: logger.debug(f"encountered non-redis exception:{e}") raise e + @contextmanager + def _acquire_lock(self, name: str): + """ + aquire a lock for default 10 seconds + """ + try: + acquired = self.client.set(name=f"{name}:lock", value="True", ex=self._orphan_lock_timeout, nx=True) + logger.debug("acquiring lock to overwrite orphan record") + if acquired: + logger.debug("lock acquired") + yield + else: + # lock failed to aquire, means encountered a race condition. Just return + logger.debug("lock failed to acquire, raise to retry") + raise IdempotencyItemAlreadyExistsError + + finally: + ... + def _put_record(self, data_record: DataRecord) -> None: if data_record.status == STATUS_CONSTANTS["INPROGRESS"]: self._put_in_progress_record(data_record=data_record) diff --git a/tests/unit/test_shared_functions.py b/tests/unit/test_shared_functions.py index 9232b72527b..9cf951714d3 100644 --- a/tests/unit/test_shared_functions.py +++ b/tests/unit/test_shared_functions.py @@ -1,12 +1,14 @@ import os import warnings from dataclasses import dataclass +from pathlib import Path import pytest from pydantic import BaseModel from aws_lambda_powertools.shared import constants from aws_lambda_powertools.shared.functions import ( + abs_lambda_path, extract_event_from_common_models, powertools_debug_is_set, powertools_dev_is_set, @@ -138,3 +140,33 @@ def test_resolve_max_age_env_var_wins_over_default_value(monkeypatch: pytest.Mon # THEN the result must be the environment variable value assert max_age == 20 + + +def test_abs_lambda_path_empty(): + # Given Env is not set + os.environ["LAMBDA_TASK_ROOT"] = "" + # Then path = os.getcwd + assert abs_lambda_path() == f"{Path.cwd()}/" + + +def test_abs_lambda_path_empty_envvar(): + # Given Env is set + os.environ["LAMBDA_TASK_ROOT"] = "/var/task" + # Then path = Env/ + assert abs_lambda_path() == "/var/task/" + + +def test_abs_lambda_path_w_filename(): + # Given Env is not set and relative_path provided + relatvie_path = "cert/pub.cert" + os.environ["LAMBDA_TASK_ROOT"] = "" + # Then path = os.getcwd + relative_path + assert abs_lambda_path(relatvie_path) == str(Path(Path.cwd(), relatvie_path)) + + +def test_abs_lambda_path_w_filename_envvar(): + # Given Env is set and relative_path provided + relatvie_path = "cert/pub.cert" + os.environ["LAMBDA_TASK_ROOT"] = "/var/task" + # Then path = env + relative_path + assert abs_lambda_path(relatvie_path="cert/pub.cert") == str(Path(os.environ["LAMBDA_TASK_ROOT"], relatvie_path)) From 514ed7d52554bf81a1f60662387304c8a5309835 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 8 Dec 2023 02:58:59 +0000 Subject: [PATCH 46/81] fix typing --- aws_lambda_powertools/shared/functions.py | 2 +- .../idempotency/persistence/redis.py | 29 ++++++++++++------- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index d2cbd4c095b..e393d0abfed 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -271,6 +271,6 @@ def abs_lambda_path(relatvie_path="") -> str: """ current_working_directory = os.environ.get("LAMBDA_TASK_ROOT", "") if not current_working_directory: - current_working_directory = Path.cwd() + current_working_directory = str(Path.cwd()) Path(current_working_directory, relatvie_path) return str(Path(current_working_directory, relatvie_path)) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index c836d288583..b101750c67c 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -29,21 +29,28 @@ class RedisClientProtocol(Protocol): - def get(name: Union[bytes, str, memoryview]) -> Union[Awaitable, Any]: + def get(self, name: Union[bytes, str, memoryview]) -> bytes | str | None: ... def set( - name: Union[bytes, str, memoryview], - value: Union[bytes, memoryview, str, int, float], - ex: Union[int, timedelta, None], - nx: bool, - ) -> Union[Awaitable, Any]: + self, + name: str | bytes, + value: bytes | float | int | str, + ex: float | timedelta | None = ..., + px: float | timedelta | None = ..., + nx: bool = ..., + xx: bool = ..., + keepttl: bool = ..., + get: bool = ..., + exat: Any | None = ..., + pxat: Any | None = ..., + ) -> bool | None: ... - def delete(keys: Union[bytes, str, memoryview]) -> Union[Awaitable, Any]: + def delete(self, keys: Union[bytes, str, memoryview]) -> Awaitable | Any: ... - def get_connection_kwargs() -> Dict: + def get_connection_kwargs(self) -> Dict: ... @@ -134,7 +141,7 @@ def create_subscription_payment(event: dict) -> Payment: self.ssl = ssl self.mode = mode - def _init_client(self) -> redis.Redis | redis.cluster.RedisCluster: + def _init_client(self) -> RedisClientProtocol: logger.info(f"Trying to connect to Redis: {self.host}") client: type[redis.Redis | redis.cluster.RedisCluster] if self.mode == "standalone": @@ -150,7 +157,7 @@ def _init_client(self) -> redis.Redis | redis.cluster.RedisCluster: return client.from_url(url=self.url) else: logger.debug(f"Using other parameters to connect to Redis: {self.host}") - return client( # type: ignore + return client( host=self.host, port=self.port, username=self.username, @@ -174,7 +181,7 @@ def __init__( db_index: int = 0, port: int = 6379, mode: Literal["standalone", "cluster"] = "standalone", - client: RedisClientProtocol = None, + client: RedisClientProtocol | None = None, in_progress_expiry_attr: str = "in_progress_expiration", expiry_attr: str = "expiration", status_attr: str = "status", From bebd9c60245d1bdb72359cfe9b2c2490d437ef08 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Fri, 8 Dec 2023 02:59:43 +0000 Subject: [PATCH 47/81] remove awaitable --- .../utilities/idempotency/persistence/redis.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index b101750c67c..372a2496a17 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -7,7 +7,7 @@ import warnings from contextlib import contextmanager from datetime import timedelta -from typing import Any, Awaitable, Dict, Union +from typing import Any, Dict, Union import redis from typing_extensions import Literal, Protocol @@ -47,7 +47,7 @@ def set( ) -> bool | None: ... - def delete(self, keys: Union[bytes, str, memoryview]) -> Awaitable | Any: + def delete(self, keys: Union[bytes, str, memoryview]) -> Any: ... def get_connection_kwargs(self) -> Dict: From f00cd3cd00f6c5705afd07b18aa1982618cc2627 Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Sat, 9 Dec 2023 01:39:52 +0000 Subject: [PATCH 48/81] optimize protocol --- .../idempotency/persistence/redis.py | 68 ++++++++++++------- 1 file changed, 43 insertions(+), 25 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 372a2496a17..6265ac29e14 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -1,4 +1,3 @@ -# ruff: noqa from __future__ import annotations import datetime @@ -7,7 +6,7 @@ import warnings from contextlib import contextmanager from datetime import timedelta -from typing import Any, Dict, Union +from typing import Any, Dict import redis from typing_extensions import Literal, Protocol @@ -29,28 +28,20 @@ class RedisClientProtocol(Protocol): - def get(self, name: Union[bytes, str, memoryview]) -> bytes | str | None: + def get(self, name: bytes | str | memoryview) -> bytes | str | None: ... - def set( + def set( # noqa self, name: str | bytes, - value: bytes | float | int | str, + value: bytes | float | str, ex: float | timedelta | None = ..., px: float | timedelta | None = ..., nx: bool = ..., - xx: bool = ..., - keepttl: bool = ..., - get: bool = ..., - exat: Any | None = ..., - pxat: Any | None = ..., ) -> bool | None: ... - def delete(self, keys: Union[bytes, str, memoryview]) -> Any: - ... - - def get_connection_kwargs(self) -> Dict: + def delete(self, keys: bytes | str | memoryview) -> Any: ... @@ -63,8 +54,9 @@ def __init__( url: str = "", db_index: int = 0, port: int = 6379, - ssl: bool = False, mode: Literal["standalone", "cluster"] = "standalone", + ssl: bool = False, + ssl_cert_reqs: Literal["required", "optional", "none"] = "none", ) -> None: """ Initialize Redis connection which will be used in redis persistence_store to support idempotency @@ -87,6 +79,9 @@ def __init__( set redis client mode, choose from standalone/cluster ssl: bool, optional: default False set whether to use ssl for Redis connection + ssl_cert_reqs: str, optional: default "none" + set whether to use ssl cert for Redis connection, choose from required/optional/none + Examples -------- @@ -139,6 +134,7 @@ def create_subscription_payment(event: dict) -> Payment: self.password = password self.db_index = db_index self.ssl = ssl + self.ssl_cert_reqs = ssl_cert_reqs self.mode = mode def _init_client(self) -> RedisClientProtocol: @@ -165,6 +161,7 @@ def _init_client(self) -> RedisClientProtocol: db=self.db_index, decode_responses=True, ssl=self.ssl, + ssl_cert_reqs=self.ssl_cert_reqs, ) except redis.exceptions.ConnectionError as exc: logger.debug(f"Cannot connect in Redis: {self.host}") @@ -181,6 +178,8 @@ def __init__( db_index: int = 0, port: int = 6379, mode: Literal["standalone", "cluster"] = "standalone", + ssl: bool = False, + ssl_cert_reqs: Literal["required", "optional", "none"] = "none", client: RedisClientProtocol | None = None, in_progress_expiry_attr: str = "in_progress_expiration", expiry_attr: str = "expiration", @@ -192,20 +191,37 @@ def __init__( Initialize the Redis Persistence Layer Parameters ---------- - client: Union[redis.Redis, redis.cluster.RedisCluster], optional - You can bring your established Redis client. - If client is provided, config will be ignored - config: RedisConfig, optional - If client is not provided, config will be parsed and a corresponding - Redis client will be created. + host: str, optional + redis host + username: str, optional + redis username + password: str, optional + redis password + url: str, optional + redis connection string, using url will override the host/port in the previous parameters + db_index: str, optional: default 0 + redis db index + port: int, optional: default 6379 + redis port + mode: str, Literal["standalone","cluster"] + set redis client mode, choose from standalone/cluster + ssl: bool, optional: default False + set whether to use ssl for Redis connection + ssl_cert_reqs: str, optional: default "none" + set whether to use ssl cert for Redis connection, choose from required/optional/none + client: RedisClientProtocol, optional + You can bring your established Redis client that follows RedisClientProtocol. + If client is provided, all connection config above will be ignored + expiry_attr: str, optional + Redis json attribute name for expiry timestamp, by default "expiration" in_progress_expiry_attr: str, optional - Redis hash attribute name for in-progress expiry timestamp, by default "in_progress_expiration" + Redis json attribute name for in-progress expiry timestamp, by default "in_progress_expiration" status_attr: str, optional - Redis hash attribute name for status, by default "status" + Redis json attribute name for status, by default "status" data_attr: str, optional - Redis hash attribute name for response data, by default "data" + Redis json attribute name for response data, by default "data" validation_key_attr: str, optional - Redis hash attribute name for hashed representation of the parts of the event used for validation + Redis json attribute name for hashed representation of the parts of the event used for validation Examples -------- @@ -247,6 +263,8 @@ def lambda_handler(event: dict, context: LambdaContext): db_index=db_index, url=url, mode=mode, + ssl=ssl, + ssl_cert_reqs=ssl_cert_reqs, )._init_client() else: self.client = client From b23bc5c9a719c1130b1b03235d8c20468028704d Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 11 Dec 2023 22:50:26 +0000 Subject: [PATCH 49/81] Fix Bandit issue --- .../idempotency/persistence/redis.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 6265ac29e14..038e0b3d84b 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -50,7 +50,7 @@ def __init__( self, host: str = "", username: str = "", - password: str = "", + password: str = "", # nosec - password for Redis connection url: str = "", db_index: int = 0, port: int = 6379, @@ -173,7 +173,7 @@ def __init__( self, host: str = "", username: str = "", - password: str = "", + password: str = "", # nosec - password for Redis connection url: str = "", db_index: int = 0, port: int = 6379, @@ -270,10 +270,12 @@ def lambda_handler(event: dict, context: LambdaContext): self.client = client if not hasattr(self.client, "get_connection_kwargs"): - raise IdempotencyRedisClientConfigError + raise IdempotencyRedisClientConfigError( + "Error configuring the Redis Client. The client must implement get_connection_kwargs function.", + ) if not self.client.get_connection_kwargs().get("decode_responses", False): warnings.warn( - "Redis connection with `decode_responses=False` may casue lower performance", + "Redis connection with `decode_responses=False` may cause lower performance", stacklevel=2, ) @@ -287,12 +289,12 @@ def lambda_handler(event: dict, context: LambdaContext): super(RedisCachePersistenceLayer, self).__init__() self._orphan_lock_timeout = min(10, self.expires_after_seconds) - def _get_expiry_second(self, expery_timestamp: int | None = None) -> int: + def _get_expiry_second(self, expiry_timestamp: int | None = None) -> int: """ return seconds of timedelta from now to the given unix timestamp """ - if expery_timestamp: - return expery_timestamp - int(datetime.datetime.now().timestamp()) + if expiry_timestamp: + return expiry_timestamp - int(datetime.datetime.now().timestamp()) return self.expires_after_seconds def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> DataRecord: @@ -353,12 +355,12 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: logger.debug(f"Putting record on Redis for idempotency key: {item['name']}") encoded_item = self._json_serializer(item["mapping"]) - ttl = self._get_expiry_second(expery_timestamp=item["mapping"][self.expiry_attr]) + ttl = self._get_expiry_second(expiry_timestamp=item["mapping"][self.expiry_attr]) redis_response = self.client.set(name=item["name"], value=encoded_item, ex=ttl, nx=True) # redis_response:True -> Redis set succeed, idempotency key does not exist before - # return to idempotency and proceed to handler excution phase. Most cases should return here + # return to idempotency and proceed to handler execution phase. Most cases should return here if redis_response: return From 5ae98cee4a71ac720119ed54ed9f61d982a5e3cc Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 11 Dec 2023 23:51:19 +0000 Subject: [PATCH 50/81] Refactoring integration tests to use testcontainers --- poetry.lock | 145 +++++++--- pyproject.toml | 1 + .../idempotency/test_idempotency_redis.py | 262 +++++++++--------- 3 files changed, 236 insertions(+), 172 deletions(-) diff --git a/poetry.lock b/poetry.lock index 20d50bcc7ed..5b1c7ec7a7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -26,7 +26,7 @@ trio = ["trio (<0.22)"] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, @@ -981,6 +981,41 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + +[[package]] +name = "docker" +version = "6.1.3" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.7" +files = [ + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] + [[package]] name = "envier" version = "0.4.0" @@ -1214,17 +1249,6 @@ files = [ {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"}, - {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"}, - {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"}, {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, @@ -1601,16 +1625,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2477,6 +2491,29 @@ files = [ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -2489,7 +2526,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2497,15 +2533,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2522,7 +2551,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2530,7 +2558,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2572,7 +2599,7 @@ toml = ["tomli (>=2.0.1)"] name = "redis" version = "4.6.0" description = "Python client for Redis database and key-value store" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, @@ -2919,6 +2946,40 @@ files = [ [package.dependencies] mpmath = ">=0.19" +[[package]] +name = "testcontainers" +version = "3.7.1" +description = "Library provides lightweight, throwaway instances of common databases, Selenium web browsers, or anything else that can run in a Docker container" +optional = false +python-versions = ">=3.7" +files = [ + {file = "testcontainers-3.7.1-py2.py3-none-any.whl", hash = "sha256:7f48cef4bf0ccd78f1a4534d4b701a003a3bace851f24eae58a32f9e3f0aeba0"}, +] + +[package.dependencies] +deprecation = "*" +docker = ">=4.0.0" +redis = {version = "*", optional = true, markers = "extra == \"redis\""} +wrapt = "*" + +[package.extras] +arangodb = ["python-arango"] +azurite = ["azure-storage-blob"] +clickhouse = ["clickhouse-driver"] +docker-compose = ["docker-compose"] +google-cloud-pubsub = ["google-cloud-pubsub (<2)"] +kafka = ["kafka-python"] +keycloak = ["python-keycloak"] +mongo = ["pymongo"] +mssqlserver = ["pymssql"] +mysql = ["pymysql", "sqlalchemy"] +neo4j = ["neo4j"] +oracle = ["cx-Oracle", "sqlalchemy"] +postgresql = ["psycopg2-binary", "sqlalchemy"] +rabbitmq = ["pika"] +redis = ["redis"] +selenium = ["selenium"] + [[package]] name = "tomli" version = "2.0.1" @@ -3157,6 +3218,22 @@ files = [ [package.extras] watchmedo = ["PyYAML (>=3.10)"] +[[package]] +name = "websocket-client" +version = "1.6.1" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.7" +files = [ + {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, + {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, +] + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "wrapt" version = "1.15.0" @@ -3296,4 +3373,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "109b837ba0c504264fc186038afd29cd2b94c898755a0576b71a25b749b8cde8" +content-hash = "0f4820ac3e0bfb49ec79f4f05e2477621c2de50ff1d3022aa4a9528e53490b5a" diff --git a/pyproject.toml b/pyproject.toml index 64f13781c56..8c33776bc05 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,6 +112,7 @@ ruff = ">=0.0.272,<0.1.8" retry2 = "^0.9.5" pytest-socket = "^0.6.0" types-redis = "^4.6.0.7" +testcontainers = {extras = ["redis"], version = "^3.7.1"} [tool.coverage.run] source = ["aws_lambda_powertools"] diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index 3acd31baa12..b0524fba4fe 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -1,7 +1,7 @@ import copy import pytest -import redis +from testcontainers.redis import RedisContainer from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( @@ -16,6 +16,11 @@ ) +@pytest.fixture +def redis_container_image(): + return "public.ecr.aws/docker/library/redis:7.2-alpine" + + @pytest.fixture def lambda_context(): class LambdaContext: @@ -31,174 +36,155 @@ def get_remaining_time_in_millis(self) -> int: return LambdaContext() -@pytest.fixture -def persistence_store_sentinel_redis(): - sentinel = redis.Sentinel( - [("localhost", 26379), ("localhost", 26380), ("localhost", 26381)], - ) - # you will need to handle yourself the connection to pass again the password - # and avoid AuthenticationError at redis queries - host, port = sentinel.discover_master("mymaster") - redis_client = redis.Redis( - host=host, - port=port, - decode_responses=True, - ) - redis_client.expire() - - return RedisCachePersistenceLayer(client=redis_client) - - -@pytest.fixture -def persistence_store_standalone_redis(): - # you will need to handle yourself the connection to pass again the password - # and avoid AuthenticationError at redis queries - redis_client = redis.Redis( - host="localhost", - port=63005, - decode_responses=False, - ) - return RedisCachePersistenceLayer(client=redis_client) - - # test basic def test_idempotent_function_and_lambda_handler_redis_basic( - # idempotency_config: IdempotencyConfig, - persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, + redis_container_image, ): - mock_event = {"data": "value"} - persistence_layer = persistence_store_standalone_redis - expected_result = {"message": "Foo"} + with RedisContainer(image=redis_container_image) as redis_container: + redis_client = redis_container.get_client() + mock_event = {"data": "value"} + persistence_layer = RedisCachePersistenceLayer(client=redis_client) + expected_result = {"message": "Foo"} - @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") - def record_handler(record): - return expected_result + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return expected_result - @idempotent(persistence_store=persistence_layer) - def lambda_handler(event, context): - return expected_result + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, context): + return expected_result - # WHEN calling the function - fn_result = record_handler(record=mock_event) - # WHEN calling lambda handler - handler_result = lambda_handler(mock_event, lambda_context) - # THEN we expect the function and lambda handler to execute successfully - assert fn_result == expected_result - assert handler_result == expected_result + # WHEN calling the function + fn_result = record_handler(record=mock_event) + # WHEN calling lambda handler + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result def test_idempotent_function_and_lambda_handler_redis_cache( - persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, + redis_container_image, ): - mock_event = {"data": "value2"} - persistence_layer = persistence_store_standalone_redis - result = {"message": "Foo"} - expected_result = copy.deepcopy(result) - - @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") - def record_handler(record): - return result - - @idempotent(persistence_store=persistence_layer) - def lambda_handler(event, context): - return result - - # WHEN calling the function - fn_result = record_handler(record=mock_event) - # WHEN calling lambda handler - handler_result = lambda_handler(mock_event, lambda_context) - # THEN we expect the function and lambda handler to execute successfully - assert fn_result == expected_result - assert handler_result == expected_result - - # modify the return to check if idem cache works - result = {"message": "Bar"} - fn_result2 = record_handler(record=mock_event) - # Second time calling lambda handler, test if same result - handler_result2 = lambda_handler(mock_event, lambda_context) - assert fn_result2 == expected_result - assert handler_result2 == expected_result - - # modify the mock event to check if we got updated result - mock_event = {"data": "value3"} - fn_result3 = record_handler(record=mock_event) - # thrid time calling lambda handler, test if result updated - handler_result3 = lambda_handler(mock_event, lambda_context) - assert fn_result3 == result - assert handler_result3 == result + with RedisContainer(image=redis_container_image) as redis_container: + redis_client = redis_container.get_client() + mock_event = {"data": "value2"} + persistence_layer = RedisCachePersistenceLayer(client=redis_client) + result = {"message": "Foo"} + expected_result = copy.deepcopy(result) + + @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") + def record_handler(record): + return result + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, context): + return result + + # WHEN calling the function + fn_result = record_handler(record=mock_event) + # WHEN calling lambda handler + handler_result = lambda_handler(mock_event, lambda_context) + # THEN we expect the function and lambda handler to execute successfully + assert fn_result == expected_result + assert handler_result == expected_result + + # modify the return to check if idem cache works + result = {"message": "Bar"} + fn_result2 = record_handler(record=mock_event) + # Second time calling lambda handler, test if same result + handler_result2 = lambda_handler(mock_event, lambda_context) + assert fn_result2 == expected_result + assert handler_result2 == expected_result + + # modify the mock event to check if we got updated result + mock_event = {"data": "value3"} + fn_result3 = record_handler(record=mock_event) + # thrid time calling lambda handler, test if result updated + handler_result3 = lambda_handler(mock_event, lambda_context) + assert fn_result3 == result + assert handler_result3 == result # test idem-inprogress def test_idempotent_lambda_redis_in_progress( - persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, + redis_container_image, ): """ Test idempotent decorator where lambda_handler is already processing an event with matching event key """ + with RedisContainer(image=redis_container_image) as redis_container: + redis_client = redis_container.get_client() - mock_event = {"data": "value4"} - persistence_store = persistence_store_standalone_redis - lambda_response = {"foo": "bar"} + mock_event = {"data": "value4"} + persistence_store = RedisCachePersistenceLayer(client=redis_client) + lambda_response = {"foo": "bar"} - @idempotent(persistence_store=persistence_store) - def lambda_handler(event, context): - return lambda_response + @idempotent(persistence_store=persistence_store) + def lambda_handler(event, context): + return lambda_response - # register the context first - lambda_handler(mock_event, lambda_context) - # save additional to in_progress - mock_event = {"data": "value7"} - try: - persistence_store.save_inprogress(mock_event, 10000) - except IdempotencyItemAlreadyExistsError: - pass - - with pytest.raises(IdempotencyAlreadyInProgressError): + # register the context first lambda_handler(mock_event, lambda_context) + # save additional to in_progress + mock_event = {"data": "value7"} + try: + persistence_store.save_inprogress(mock_event, 10000) + except IdempotencyItemAlreadyExistsError: + pass + + with pytest.raises(IdempotencyAlreadyInProgressError): + lambda_handler(mock_event, lambda_context) # test -remove def test_idempotent_lambda_redis_delete( - persistence_store_standalone_redis: RedisCachePersistenceLayer, lambda_context, + redis_container_image, ): - mock_event = {"data": "test_delete"} - persistence_layer = persistence_store_standalone_redis - result = {"message": "Foo"} - - @idempotent(persistence_store=persistence_layer) - def lambda_handler(event, context): - return result - - # first run is just to populate function infos for deletion. - # delete_record won't work if the function was not run yet. bug maybe? - handler_result = lambda_handler(mock_event, lambda_context) - # delete what's might be dirty data - persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) - # run second time to ensure clean result - handler_result = lambda_handler(mock_event, lambda_context) - assert handler_result == result - persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) - # delete the idem and handler should output new result - result = {"message": "Foo2"} - handler_result2 = lambda_handler(mock_event, lambda_context) - - assert handler_result2 == result - - -def test_idempotent_lambda_redis_credential(lambda_context): - redis_client = redis.Redis( - host="localhost", - port="63005", - decode_responses=True, - ) - pwd = "terriblePassword" - usr = "test_acl_denial" - redis_client.acl_setuser(username=usr, enabled=True, passwords="+" + pwd, keys="*", commands=["+hgetall", "-set"]) - redis_client.auth(password=pwd, username=usr) + with RedisContainer(image=redis_container_image) as redis_container: + redis_client = redis_container.get_client() + mock_event = {"data": "test_delete"} + persistence_layer = RedisCachePersistenceLayer(client=redis_client) + result = {"message": "Foo"} + + @idempotent(persistence_store=persistence_layer) + def lambda_handler(event, context): + return result + + # first run is just to populate function infos for deletion. + # delete_record won't work if the function was not run yet. bug maybe? + handler_result = lambda_handler(mock_event, lambda_context) + # delete what's might be dirty data + persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) + # run second time to ensure clean result + handler_result = lambda_handler(mock_event, lambda_context) + assert handler_result == result + persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) + # delete the idem and handler should output new result + result = {"message": "Foo2"} + handler_result2 = lambda_handler(mock_event, lambda_context) + + assert handler_result2 == result + + +def test_idempotent_lambda_redis_credential(lambda_context, redis_container_image): + with RedisContainer(image=redis_container_image) as redis_container: + redis_client = redis_container.get_client() + + pwd = "terriblePassword" + usr = "test_acl_denial" + redis_client.acl_setuser( + username=usr, + enabled=True, + passwords="+" + pwd, + keys="*", + commands=["+hgetall", "-set"], + ) + redis_client.auth(password=pwd, username=usr) @idempotent(persistence_store=RedisCachePersistenceLayer(client=redis_client)) def lambda_handler(event, _): From f8490d105ee35e421c82eaadbb293534de608e1c Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 11 Dec 2023 23:59:50 +0000 Subject: [PATCH 51/81] Removing code smell --- .../functional/idempotency/persistence/test_redis_layer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index 34f039ef878..bf5ed1ce194 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -245,7 +245,7 @@ def test_redis_connection_conn_error(): # when RedisCachePersistenceLayer is init with a bad host # then should raise IdempotencyRedisConnectionError with pytest.raises(IdempotencyRedisConnectionError): - layer = RedisCachePersistenceLayer(host=redis_badhost) + RedisCachePersistenceLayer(host=redis_badhost) @mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) @@ -253,7 +253,7 @@ def test_redis_connection_conf_error(): # when RedisCachePersistenceLayer is init with a not_supported_mode in mode param # then should raise IdempotencyRedisClientConfigError with pytest.raises(IdempotencyRedisClientConfigError): - layer = RedisCachePersistenceLayer(mode="not_supported_mode") + RedisCachePersistenceLayer(mode="not_supported_mode") @mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) @@ -560,7 +560,7 @@ def test_redis_connection_get_kwargs_error(): # then should raise IdempotencyRedisClientConfigError with pytest.raises(IdempotencyRedisClientConfigError): - layer = RedisCachePersistenceLayer(host="testhost") + RedisCachePersistenceLayer(host="testhost") def test_redis_orphan_record_race_condition(lambda_context): From 8d6aec5ecd1c967ff1609a434030f0f092c9316b Mon Sep 17 00:00:00 2001 From: RogerZhang Date: Wed, 20 Dec 2023 00:39:13 +0000 Subject: [PATCH 52/81] fix makefile, remove sentinel setup --- Makefile | 8 +------- tests/integration/idempotency/setup_sentinel.sh | 7 ------- 2 files changed, 1 insertion(+), 14 deletions(-) delete mode 100644 tests/integration/idempotency/setup_sentinel.sh diff --git a/Makefile b/Makefile index 010c25caf6c..f721ea9830b 100644 --- a/Makefile +++ b/Makefile @@ -31,7 +31,7 @@ lint-docs-fix: docker run -v ${PWD}:/markdown 06kellyjac/markdownlint-cli --fix "docs" test: - poetry run pytest -m "not perf" --ignore tests/e2e --ignore tests/integration --cov=aws_lambda_powertools --cov-report=xml + poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=xml poetry run pytest --cache-clear tests/performance test-pydanticv2: @@ -40,12 +40,6 @@ test-pydanticv2: unit-test: poetry run pytest tests/unit -test-idempotency-redis: - docker run --name test-idempotency-redis -d -p 63005:6379 redis - poetry run pytest tests/integration/idempotency;docker stop test-idempotency-redis;docker rm test-idempotency-redis - - - e2e-test: poetry run pytest tests/e2e diff --git a/tests/integration/idempotency/setup_sentinel.sh b/tests/integration/idempotency/setup_sentinel.sh deleted file mode 100644 index 90e8b0f725d..00000000000 --- a/tests/integration/idempotency/setup_sentinel.sh +++ /dev/null @@ -1,7 +0,0 @@ -docker run --name redis_master -p 6379:6379 -d redis -docker run --name redis_slave_1 -p 6380:6380 --link redis_master:redis_master -d redis redis-server --slaveof redis_master 6379 -docker run --name redis_slave_2 -p 6381:6381 --link redis_master:redis_master -d redis redis-server --slaveof redis_master 6379 -docker run --name redis_slave_3 -p 6382:6382 --link redis_master:redis_master -d redis redis-server --slaveof redis_master 6379 -docker run --name redis_sentinel_1 -d -e REDIS_MASTER_HOST=redis_master -e REDIS_SENTINEL_PORT_NUMBER=26379 -e REDIS_SENTINEL_QUORUM=2 -p 26379:26379 --link redis_master:redis_master bitnami/redis-sentinel:latest -docker run --name redis_sentinel_2 -d -e REDIS_MASTER_HOST=redis_master -e REDIS_SENTINEL_PORT_NUMBER=26380 -e REDIS_SENTINEL_QUORUM=2 -p 26380:26380 --link redis_master:redis_master bitnami/redis-sentinel:latest -docker run --name redis_sentinel_3 -d -e REDIS_MASTER_HOST=redis_master -e REDIS_SENTINEL_PORT_NUMBER=26381 -e REDIS_SENTINEL_QUORUM=2 -p 26381:26381 --link redis_master:redis_master bitnami/redis-sentinel:latest From 2d13a3cafca494bb5219aa6d8e5b7c3d58aa21df Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Thu, 28 Dec 2023 19:40:52 +0000 Subject: [PATCH 53/81] Adding e2e tests --- tests/e2e/idempotency_redis/__init__.py | 0 tests/e2e/idempotency_redis/conftest.py | 19 ++ .../function_thread_safety_handler.py | 29 +++ .../optional_idempotency_key_handler.py | 17 ++ .../handlers/parallel_execution_handler.py | 17 ++ .../handlers/ttl_cache_expiration_handler.py | 19 ++ .../handlers/ttl_cache_timeout_handler.py | 20 ++ tests/e2e/idempotency_redis/infrastructure.py | 91 +++++++++ .../test_idempotency_redis.py | 183 ++++++++++++++++++ tests/e2e/utils/infrastructure.py | 4 +- 10 files changed, 397 insertions(+), 2 deletions(-) create mode 100644 tests/e2e/idempotency_redis/__init__.py create mode 100644 tests/e2e/idempotency_redis/conftest.py create mode 100644 tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py create mode 100644 tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py create mode 100644 tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py create mode 100644 tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py create mode 100644 tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py create mode 100644 tests/e2e/idempotency_redis/infrastructure.py create mode 100644 tests/e2e/idempotency_redis/test_idempotency_redis.py diff --git a/tests/e2e/idempotency_redis/__init__.py b/tests/e2e/idempotency_redis/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/idempotency_redis/conftest.py b/tests/e2e/idempotency_redis/conftest.py new file mode 100644 index 00000000000..65cffcd1948 --- /dev/null +++ b/tests/e2e/idempotency_redis/conftest.py @@ -0,0 +1,19 @@ +import pytest + +from tests.e2e.idempotency_redis.infrastructure import IdempotencyRedisServerlessStack + + +@pytest.fixture(autouse=True, scope="package") +def infrastructure(): + """Setup and teardown logic for E2E test infrastructure + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = IdempotencyRedisServerlessStack() + try: + yield stack.deploy() + finally: + stack.delete() diff --git a/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py b/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py new file mode 100644 index 00000000000..ac2dedc7fab --- /dev/null +++ b/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py @@ -0,0 +1,29 @@ +import os +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from threading import current_thread + +from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer, + idempotent_function, +) + +REDIS_HOST = os.getenv("RedisEndpoint", "") +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +threads_count = 2 + + +@idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") +def record_handler(record): + time_now = time.time() + return {"thread_name": current_thread().name, "time": str(time_now)} + + +def lambda_handler(event, context): + with ThreadPoolExecutor(max_workers=threads_count) as executor: + futures = [executor.submit(record_handler, **{"record": i}) for i in range(threads_count)] + + return [ + {"state": future._state, "exception": future.exception(), "output": future.result()} + for future in as_completed(futures) + ] diff --git a/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py b/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py new file mode 100644 index 00000000000..cbdc6d60c11 --- /dev/null +++ b/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py @@ -0,0 +1,17 @@ +import os +import uuid + +from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, + RedisCachePersistenceLayer, + idempotent, +) + +REDIS_HOST = os.getenv("RedisEndpoint", "") +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +config = IdempotencyConfig(event_key_jmespath='headers."X-Idempotency-Key"', use_local_cache=True) + + +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + return {"request": str(uuid.uuid4())} diff --git a/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py b/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py new file mode 100644 index 00000000000..8a3b4f66982 --- /dev/null +++ b/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py @@ -0,0 +1,17 @@ +import os +import time + +from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer, + idempotent, +) + +REDIS_HOST = os.getenv("RedisEndpoint", "") +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) + + +@idempotent(persistence_store=persistence_layer) +def lambda_handler(event, context): + time.sleep(5) + + return event diff --git a/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py b/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py new file mode 100644 index 00000000000..e5770e722e1 --- /dev/null +++ b/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py @@ -0,0 +1,19 @@ +import os +import time + +from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, + RedisCachePersistenceLayer, + idempotent, +) + +REDIS_HOST = os.getenv("RedisEndpoint", "") +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +config = IdempotencyConfig(expires_after_seconds=5) + + +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + time_now = time.time() + + return {"time": str(time_now)} diff --git a/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py b/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py new file mode 100644 index 00000000000..40c479d6696 --- /dev/null +++ b/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py @@ -0,0 +1,20 @@ +import os +import time + +from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, + RedisCachePersistenceLayer, + idempotent, +) + +REDIS_HOST = os.getenv("RedisEndpoint", "") +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +config = IdempotencyConfig(expires_after_seconds=1) + + +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + sleep_time: int = event.get("sleep") or 0 + time.sleep(sleep_time) + + return event diff --git a/tests/e2e/idempotency_redis/infrastructure.py b/tests/e2e/idempotency_redis/infrastructure.py new file mode 100644 index 00000000000..8034731a355 --- /dev/null +++ b/tests/e2e/idempotency_redis/infrastructure.py @@ -0,0 +1,91 @@ +import time +from typing import Tuple + +from aws_cdk import aws_ec2 as ec2 +from aws_cdk.aws_ec2 import ( + SecurityGroup, + SubnetType, + Vpc, +) +from aws_cdk.aws_elasticache import ( + CfnServerlessCache, +) + +from tests.e2e.utils.data_builder import build_random_value +from tests.e2e.utils.infrastructure import BaseInfrastructure + + +class IdempotencyRedisServerlessStack(BaseInfrastructure): + def create_resources(self) -> None: + service_name = build_random_value(10) + + vpc_stack: Vpc = self._create_vpc(service_name, "172.150.0.0/16") + security_groups: Tuple = self._create_security_groups(vpc_stack) + redis_cluster: CfnServerlessCache = self._create_redis_cache(service_name, vpc_stack, security_groups[0]) + + env_vars = {"RedisEndpoint": f"{str(redis_cluster.attr_endpoint_address)}"} + + self.create_lambda_functions( + function_props={ + "environment": env_vars, + "vpc": vpc_stack, + "security_groups": [security_groups[1]], + }, + ) + + def _create_vpc(self, service_name: str, cidr: str) -> Vpc: + vpc_stack: Vpc = Vpc( + self.stack, + "VPC-ServerlessCache", + nat_gateways=1, + vpc_name=f"VPC-ServerlessCache-{service_name}", + ip_addresses=ec2.IpAddresses.cidr(cidr), + subnet_configuration=[ + ec2.SubnetConfiguration(name="public", subnet_type=SubnetType.PUBLIC, cidr_mask=24), + ec2.SubnetConfiguration(name="private", subnet_type=SubnetType.PRIVATE_WITH_EGRESS, cidr_mask=24), + ], + max_azs=2, + ) + + return vpc_stack + + def _create_security_groups(self, vpc_stack: Vpc) -> Tuple[SecurityGroup, SecurityGroup]: + # Create a security group for the ElastiCache cluster + cache_security_group: SecurityGroup = SecurityGroup(self.stack, "ElastiCacheSecurityGroup", vpc=vpc_stack) + cache_security_group.add_ingress_rule( + peer=ec2.Peer.ipv4(vpc_stack.vpc_cidr_block), + connection=ec2.Port.tcp(6379), + description="Allow inbound traffic from VPC", + ) + + lambda_security_group = SecurityGroup( + self.stack, + "LambdaSecurityGroup", + vpc=vpc_stack, + allow_all_ipv6_outbound=True, + allow_all_outbound=True, + ) + + return cache_security_group, lambda_security_group + + def _create_redis_cache( + self, + service_name: str, + vpc_stack: Vpc, + cache_security_group: SecurityGroup, + ) -> CfnServerlessCache: + cache_cluster = CfnServerlessCache( + self.stack, + "ElastiCacheCluster", + engine="redis", + security_group_ids=[cache_security_group.security_group_id], + subnet_ids=[subnet.subnet_id for subnet in vpc_stack.private_subnets], + serverless_cache_name=f"Cache-{service_name}", + ) + + # Just to make sure the Cluster will be ready before the Stack is complete + while cache_cluster.attr_status == "CREATING": + print("Waiting for ElastiCache serverless to be created...") + time.sleep(5) + + return cache_cluster diff --git a/tests/e2e/idempotency_redis/test_idempotency_redis.py b/tests/e2e/idempotency_redis/test_idempotency_redis.py new file mode 100644 index 00000000000..4b5840ac477 --- /dev/null +++ b/tests/e2e/idempotency_redis/test_idempotency_redis.py @@ -0,0 +1,183 @@ +import json +from time import sleep + +import pytest + +from tests.e2e.utils import data_fetcher +from tests.e2e.utils.data_fetcher.common import GetLambdaResponseOptions, get_lambda_response_in_parallel + + +@pytest.fixture +def ttl_cache_expiration_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("TtlCacheExpirationHandlerArn", "") + + +@pytest.fixture +def ttl_cache_timeout_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("TtlCacheTimeoutHandlerArn", "") + + +@pytest.fixture +def parallel_execution_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("ParallelExecutionHandlerArn", "") + + +@pytest.fixture +def function_thread_safety_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("FunctionThreadSafetyHandlerArn", "") + + +@pytest.fixture +def optional_idempotency_key_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("OptionalIdempotencyKeyHandlerArn", "") + + +@pytest.mark.xdist_group(name="idempotency-redis") +def test_ttl_caching_expiration_idempotency(ttl_cache_expiration_handler_fn_arn: str): + # GIVEN + payload = json.dumps({"message": "Powertools for AWS Lambda (Python) - TTL 5s"}) + + # WHEN + # first execution + first_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_expiration_handler_fn_arn, + payload=payload, + ) + first_execution_response = first_execution["Payload"].read().decode("utf-8") + + # the second execution should return the same response as the first execution + second_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_expiration_handler_fn_arn, + payload=payload, + ) + second_execution_response = second_execution["Payload"].read().decode("utf-8") + + # wait 8s to expire ttl and execute again, this should return a new response value + sleep(8) + third_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_expiration_handler_fn_arn, + payload=payload, + ) + third_execution_response = third_execution["Payload"].read().decode("utf-8") + + # THEN + assert first_execution_response == second_execution_response + assert third_execution_response != second_execution_response + + +@pytest.mark.xdist_group(name="idempotency-redis") +def test_ttl_caching_timeout_idempotency(ttl_cache_timeout_handler_fn_arn: str): + # GIVEN + payload_timeout_execution = json.dumps( + {"sleep": 5, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}, + sort_keys=True, + ) + payload_working_execution = json.dumps( + {"sleep": 0, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}, + sort_keys=True, + ) + + # WHEN + # first call should fail due to timeout + execution_with_timeout, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_timeout_handler_fn_arn, + payload=payload_timeout_execution, + raise_on_error=False, + ) + execution_with_timeout_response = execution_with_timeout["Payload"].read().decode("utf-8") + + # the second call should work and return the payload + execution_working, _ = data_fetcher.get_lambda_response( + lambda_arn=ttl_cache_timeout_handler_fn_arn, + payload=payload_working_execution, + ) + execution_working_response = execution_working["Payload"].read().decode("utf-8") + + # THEN + assert "Task timed out after" in execution_with_timeout_response + assert payload_working_execution == execution_working_response + + +@pytest.mark.xdist_group(name="idempotency-redis") +def test_parallel_execution_idempotency(parallel_execution_handler_fn_arn: str): + # GIVEN + payload = json.dumps({"message": "Powertools for AWS Lambda (Python) - Parallel execution"}) + + invocation_options = [ + GetLambdaResponseOptions(lambda_arn=parallel_execution_handler_fn_arn, payload=payload, raise_on_error=False), + GetLambdaResponseOptions(lambda_arn=parallel_execution_handler_fn_arn, payload=payload, raise_on_error=False), + ] + + # WHEN executing Lambdas in parallel + execution_result_list = get_lambda_response_in_parallel(invocation_options) + + timeout_execution_response = execution_result_list[0][0]["Payload"].read().decode("utf-8") + error_idempotency_execution_response = execution_result_list[1][0]["Payload"].read().decode("utf-8") + + # THEN + assert "Execution already in progress with idempotency key" in error_idempotency_execution_response + assert "Task timed out after" in timeout_execution_response + + +@pytest.mark.xdist_group(name="idempotency-redis") +def test_idempotent_function_thread_safety(function_thread_safety_handler_fn_arn: str): + # GIVEN + payload = json.dumps({"message": "Powertools for AWS Lambda (Python) - Idempotent function thread safety check"}) + + # WHEN + # first execution + first_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=function_thread_safety_handler_fn_arn, + payload=payload, + ) + first_execution_response = first_execution["Payload"].read().decode("utf-8") + + # the second execution should return the same response as the first execution + second_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=function_thread_safety_handler_fn_arn, + payload=payload, + ) + second_execution_response = second_execution["Payload"].read().decode("utf-8") + + # THEN + # Function threads finished without exception AND + # first and second execution is the same + for function_thread in json.loads(first_execution_response): + assert function_thread["state"] == "FINISHED" + assert function_thread["exception"] is None + assert function_thread["output"] is not None + + # we use set() here because we want to compare the elements regardless of their order in the array + assert set(first_execution_response) == set(second_execution_response) + + +@pytest.mark.xdist_group(name="idempotency-redis") +def test_optional_idempotency_key(optional_idempotency_key_fn_arn: str): + # GIVEN two payloads where only one has the expected idempotency key + payload = json.dumps({"headers": {"X-Idempotency-Key": "here"}}) + payload_without = json.dumps({"headers": {}}) + + # WHEN + # we make one request with an idempotency key + first_execution, _ = data_fetcher.get_lambda_response(lambda_arn=optional_idempotency_key_fn_arn, payload=payload) + first_execution_response = first_execution["Payload"].read().decode("utf-8") + + # and two others without the idempotency key + second_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=optional_idempotency_key_fn_arn, + payload=payload_without, + ) + second_execution_response = second_execution["Payload"].read().decode("utf-8") + + third_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=optional_idempotency_key_fn_arn, + payload=payload_without, + ) + third_execution_response = third_execution["Payload"].read().decode("utf-8") + + # THEN + # we should treat 2nd and 3rd requests with NULL idempotency key as non-idempotent transactions + # that is, no cache, no calls to persistent store, etc. + assert first_execution_response != second_execution_response + assert first_execution_response != third_execution_response + assert second_execution_response != third_execution_response diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 42157ad0dfa..fec0fc8a4e3 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -176,9 +176,9 @@ def deploy(self) -> Dict[str, str]: CloudFormation Stack Outputs with output key and value """ stack_file = self._create_temp_cdk_app() - synth_command = f"npx cdk synth --app 'python {stack_file}' -o {self._cdk_out_dir}" + synth_command = f"npx cdk@latest synth --app 'python {stack_file}' -o {self._cdk_out_dir}" deploy_command = ( - f"npx cdk deploy --app '{self._cdk_out_dir}' -O {self._stack_outputs_file} " + f"npx cdk@latest deploy --app '{self._cdk_out_dir}' -O {self._stack_outputs_file} " "--require-approval=never --method=direct" ) From 693d731c585877a306a6da2156f08217048679da Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 10:15:45 +0000 Subject: [PATCH 54/81] Testing pipeline --- .github/workflows/quality_check_pydanticv2.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/quality_check_pydanticv2.yml b/.github/workflows/quality_check_pydanticv2.yml index 8855a90b3f6..d0af2934986 100644 --- a/.github/workflows/quality_check_pydanticv2.yml +++ b/.github/workflows/quality_check_pydanticv2.yml @@ -58,7 +58,9 @@ jobs: python-version: ${{ matrix.python-version }} cache: "poetry" - name: Replacing Pydantic v1 with v2 > 2.0.3 - run: poetry add "pydantic=^2.0.3" + run: | + rm -rf poetry.lock + poetry add "pydantic=^2.0.3" - name: Install dependencies run: make dev - name: Test with pytest From 12af90e3e55dec8ef9dd0fcaed0b7eb2c2d9d5bf Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 10:26:02 +0000 Subject: [PATCH 55/81] Removing things --- Makefile | 1 - tests/integration/idempotency/test_idempotency_redis.py | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f721ea9830b..0de65f2d48c 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,5 @@ .PHONY: target dev format lint test coverage-html pr build build-docs build-docs-api build-docs-website .PHONY: docs-local docs-api-local security-baseline complexity-baseline release-prod release-test release -.PHONY: test-idempotency-redis target: @$(MAKE) pr diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index b0524fba4fe..c82096b42af 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -15,6 +15,8 @@ idempotent_function, ) +pytest.skip(reason="Integration tests disabled for Redis Idempotency.", allow_module_level=True) + @pytest.fixture def redis_container_image(): From 0899f68841488c010987cee167f9f1736694157e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 10:33:06 +0000 Subject: [PATCH 56/81] Removing things --- .../utilities/idempotency/persistence/base.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 4bbf68accbe..eaf7c455f10 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -270,11 +270,6 @@ def _get_expiry_timestamp(self) -> int: unix timestamp of expiry date for idempotency record """ - # removed for now, seems not being used in redis - """ if self.backend == "redis": - return self.expires_after_seconds - else: """ - now = datetime.datetime.now() period = datetime.timedelta(seconds=self.expires_after_seconds) return int((now + period).timestamp()) From c3674c04b7e77480d18fb23da115c9cff7004c53 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 11:03:15 +0000 Subject: [PATCH 57/81] Fixing docstring and removing old code --- .../utilities/idempotency/persistence/base.py | 1 - .../idempotency/persistence/redis.py | 26 +++++++++---------- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index eaf7c455f10..e2a84597094 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -116,7 +116,6 @@ class BasePersistenceLayer(ABC): def __init__(self): """Initialize the defaults""" self.function_name = "" - self.backend = "" self.configured = False self.event_key_jmespath: str = "" self.event_key_compiled_jmespath = None diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 038e0b3d84b..3a99875067b 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -48,23 +48,25 @@ def delete(self, keys: bytes | str | memoryview) -> Any: class RedisConnection: def __init__( self, + url: str = "", host: str = "", + port: int = 6379, username: str = "", password: str = "", # nosec - password for Redis connection - url: str = "", db_index: int = 0, - port: int = 6379, mode: Literal["standalone", "cluster"] = "standalone", ssl: bool = False, ssl_cert_reqs: Literal["required", "optional", "none"] = "none", ) -> None: """ - Initialize Redis connection which will be used in redis persistence_store to support idempotency + Initialize Redis connection which will be used in redis persistence_store to support Idempotency Parameters ---------- host: str, optional redis host + port: int, optional: default 6379 + redis port username: str, optional redis username password: str, optional @@ -73,10 +75,8 @@ def __init__( redis connection string, using url will override the host/port in the previous parameters db_index: str, optional: default 0 redis db index - port: int, optional: default 6379 - redis port mode: str, Literal["standalone","cluster"] - set redis client mode, choose from standalone/cluster + set redis client mode, choose from standalone/cluster. The default is standalone ssl: bool, optional: default False set whether to use ssl for Redis connection ssl_cert_reqs: str, optional: default "none" @@ -138,7 +138,7 @@ def create_subscription_payment(event: dict) -> Payment: self.mode = mode def _init_client(self) -> RedisClientProtocol: - logger.info(f"Trying to connect to Redis: {self.host}") + logger.debug(f"Trying to connect to Redis: {self.host}") client: type[redis.Redis | redis.cluster.RedisCluster] if self.mode == "standalone": client = redis.Redis @@ -171,12 +171,12 @@ def _init_client(self) -> RedisClientProtocol: class RedisCachePersistenceLayer(BasePersistenceLayer): def __init__( self, + url: str = "", host: str = "", + port: int = 6379, username: str = "", password: str = "", # nosec - password for Redis connection - url: str = "", db_index: int = 0, - port: int = 6379, mode: Literal["standalone", "cluster"] = "standalone", ssl: bool = False, ssl_cert_reqs: Literal["required", "optional", "none"] = "none", @@ -193,6 +193,8 @@ def __init__( ---------- host: str, optional redis host + port: int, optional: default 6379 + redis port username: str, optional redis username password: str, optional @@ -201,8 +203,6 @@ def __init__( redis connection string, using url will override the host/port in the previous parameters db_index: str, optional: default 0 redis db index - port: int, optional: default 6379 - redis port mode: str, Literal["standalone","cluster"] set redis client mode, choose from standalone/cluster ssl: bool, optional: default False @@ -231,7 +231,7 @@ def __init__( from aws_lambda_powertools.utilities.data_class import( RedisCachePersistenceLayer, ) - from aws_lambda_powertools.utilities.idempotency.idempotency import ( + from aws_lambda_powertools.utilities.idempotency import ( idempotent, ) @@ -271,7 +271,7 @@ def lambda_handler(event: dict, context: LambdaContext): if not hasattr(self.client, "get_connection_kwargs"): raise IdempotencyRedisClientConfigError( - "Error configuring the Redis Client. The client must implement get_connection_kwargs function.", + "Error configuring the Redis Client. The Redis library must implement get_connection_kwargs function.", ) if not self.client.get_connection_kwargs().get("decode_responses", False): warnings.warn( From 68fa0c4cc61fad2b12e1c7a981ffbeae170678ef Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 14:03:43 +0000 Subject: [PATCH 58/81] Improving the documentation --- aws_lambda_powertools/shared/functions.py | 22 +-- docs/utilities/idempotency.md | 131 ++++++++++++------ .../using_redis_client_with_aws_secrets.py | 28 ++++ .../using_redis_client_with_local_certs.py | 35 +++++ .../templates/cfn_redis_serverless.yaml | 13 ++ .../idempotency/templates/sam_redis_vpc.yaml | 3 +- tests/unit/test_shared_functions.py | 4 +- 7 files changed, 179 insertions(+), 57 deletions(-) create mode 100644 examples/idempotency/src/using_redis_client_with_aws_secrets.py create mode 100644 examples/idempotency/src/using_redis_client_with_local_certs.py create mode 100644 examples/idempotency/templates/cfn_redis_serverless.yaml diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index e393d0abfed..c427f0d720f 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -253,24 +253,30 @@ def dataclass_to_dict(data) -> dict: return dataclasses.asdict(data) -def abs_lambda_path(relatvie_path="") -> str: - """Return the absolute path from the given relative path to lambda handler +def abs_lambda_path(relative_path: str = "") -> str: + """Return the absolute path from the given relative path to lambda handler. Parameters ---------- - path : string - the relative path to lambda handler, by default "" + relative_path : str, optional + The relative path to the lambda handler, by default an empty string. Returns ------- - string - the absolute path generated from the given relative path. + str + The absolute path generated from the given relative path. If the environment variable LAMBDA_TASK_ROOT is set, it will use that value. Otherwise, it will use the current working directory. If the path is empty, it will return the current working directory. """ + # Retrieve the LAMBDA_TASK_ROOT environment variable or default to an empty string current_working_directory = os.environ.get("LAMBDA_TASK_ROOT", "") + + # If LAMBDA_TASK_ROOT is not set, use the current working directory if not current_working_directory: current_working_directory = str(Path.cwd()) - Path(current_working_directory, relatvie_path) - return str(Path(current_working_directory, relatvie_path)) + + # Combine the current working directory and the relative path to get the absolute path + absolute_path = str(Path(current_working_directory, relative_path)) + + return absolute_path diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index eeec0797eb1..647dc1388b2 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -14,6 +14,7 @@ The idempotency utility provides a simple solution to convert your Lambda functi * Select a subset of the event as the idempotency key using JMESPath expressions * Set a time window in which records with the same payload should be considered duplicates * Expires in-progress executions if the Lambda function times out halfway through +* Support Amazon DynamoDB and Redis as persistence layer ## Terminology @@ -65,7 +66,7 @@ Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. -As of now, Amazon DynamoDB is the only supported persistent storage layer, so you'll need to create a table first. +We currently support Amazon DynamoDB and Redis as a storage layer. This example demonstrates how to create a table in DynamoDB. If you want to use Redis, please go to the section [RedisPersistenceLayer](#redispersistencelayer) **Default table configuration** @@ -336,6 +337,51 @@ If an Exception is raised _outside_ the scope of the decorated function and afte As this happens outside the scope of your decorated function, you are not able to catch it if you're using the `idempotent` decorator on your Lambda handler. +### Persistence layers + +#### DynamoDBPersistenceLayer + +This persistence layer is built-in, and you can either use an existing DynamoDB table or create a new one dedicated for idempotency state (recommended). + +=== "Customizing DynamoDBPersistenceLayer to suit your table structure" + + ```python hl_lines="7-15" + --8<-- "examples/idempotency/src/customize_persistence_layer.py" + ``` + +When using DynamoDB as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: + +| Parameter | Required | Default | Description | +| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | +| **table_name** | :heavy_check_mark: | | Table name to store state | +| **key_attr** | | `id` | Partition key of the table. Hashed representation of the payload (unless **sort_key_attr** is specified) | +| **expiry_attr** | | `expiration` | Unix timestamp of when record expires | +| **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | +| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | +| **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | +| **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | +| **sort_key_attr** | | | Sort key of the table (if table is configured with a sort key). | +| **static_pk_value** | | `idempotency#{LAMBDA_FUNCTION_NAME}` | Static value to use as the partition key. Only used when **sort_key_attr** is set. | + +#### RedisPersistenceLayer + +This persistence layer is built-in, and you can use an existing Redis service. For optimal performance and compatibility, we strongly advise using a Redis service version 7 or higher. + +=== "Customizing RedisPersistenceLayer to suit your data structure" + + ```python hl_lines="14-20" + --8<-- "examples/idempotency/src/customize_persistence_layer_redis.py" + ``` + +When using Redis as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: + +| Parameter | Required | Default | Description | +| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | +| **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | +| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | +| **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | +| **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | + ### Idempotency request flow The following sequence diagrams explain how the Idempotency feature behaves under different scenarios. @@ -538,15 +584,23 @@ You need an existing Redis service before setting up Redis as persistent storage ???+ tip "No existing Redis service?" If you don't have an existing Redis service, we recommend using [DynamoDB](#dynamodbpersistencelayer) as persistent storage layer provider. +=== "AWS CloudFormation example" + + ```yaml hl_lines="5" + --8<-- "examples/idempotency/templates/cfn_redis_serverless.yaml" + ``` + + 1. Replace the Security Group ID and Subnet ID to match your VPC settings. + ### VPC Access -Your Lambda Function must be able to reach the Redis endpoint before using it for idempotency persistent storage layer. In most cases you will need to [configure VPC access](https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html) for your Lambda Fucntion. Using a public accessable Redis is not recommended. +Your Lambda Function must be able to reach the Redis endpoint before using it for idempotency persistent storage layer. In most cases you will need to [configure VPC access](https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html) for your Lambda Function. Using a public accessible Redis is not recommended. ???+ tip "Amazon ElastiCache/MemoryDB for Redis as persistent storage layer provider" If you intend to use Amazon ElastiCache for Redis for idempotency persistent storage layer, you can also reference [This AWS Tutorial](https://docs.aws.amazon.com/lambda/latest/dg/services-elasticache-tutorial.html). If you are using Amazon MemoryDB for Redis, reference [This AWS Tutorial](https://aws.amazon.com/blogs/database/access-amazon-memorydb-for-redis-from-aws-lambda/) for only VPC setup part. -After VPC setup, you can follow the templates down below to setup Lambda fucntions with VPC internal subnet access. +After VPC setup, you can follow the templates down below to setup Lambda functions with VPC internal subnet access. === "AWS Serverless Application Model (SAM) example" @@ -556,12 +610,12 @@ After VPC setup, you can follow the templates down below to setup Lambda fucntio 1. Replace the Security Group ID and Subnet ID to match your Redis' VPC setting. -### Idempotent decorator for Redis +### Configuring Redis persistence layer You can quickly start by initializing the `RedisCachePersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. Check out detailed example of `RedisCachePersistenceLayer` in [Persistence layers section](#redispersistencelayer) ???+ warning "Passing in Redis Client" - We support passing in established Redis clients when initilizing `RedisPersistenceLayer`. However, this rely on Redis parameter `decode_responses=True` to decode all Redis response. Please make sure this parameter is set when establishing Redis client or `RedisPersistenceLayer` will raise a `IdempotencyRedisClientConfigError`. See example below + We support passing in established Redis clients when initializing `RedisPersistenceLayer`. However, this rely on Redis parameter `decode_responses=True` to decode all Redis response. Please make sure this parameter is set when establishing Redis client or `RedisPersistenceLayer` will raise a `IdempotencyRedisClientConfigError`. See example below === "Use established Redis Client" ```python hl_lines="4 7 12-16 18 32" @@ -581,54 +635,39 @@ You can quickly start by initializing the `RedisCachePersistenceLayer` class and --8<-- "examples/idempotency/src/getting_started_with_idempotency_payload.json" ``` -For other use cases like `Idempotent function decorator` please reference the [DynamoDB section](#idempotent_function-decorator). You only need to substitute the `persistence_store` from `DynamoDBPersistenceLayer` to `RedisPersistenceLayer` and no other code changes are required. - -## Advanced - -### Persistence layers - -#### DynamoDBPersistenceLayer - -This persistence layer is built-in, and you can either use an existing DynamoDB table or create a new one dedicated for idempotency state (recommended). +### Custom advanced settings -=== "Customizing DynamoDBPersistenceLayer to suit your table structure" +For advanced settings, including SSL certificates and the ability to customize parameters such as a custom timeout, you can use the Redis client to accommodate these specific settings. - ```python hl_lines="7-15" - --8<-- "examples/idempotency/src/customize_persistence_layer.py" +=== "Advanced configuration using AWS Secrets" + ```python hl_lines="8 10 20" + --8<-- "examples/idempotency/src/using_redis_client_with_aws_secrets.py" ``` -When using DynamoDB as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: - -| Parameter | Required | Default | Description | -| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | -| **table_name** | :heavy_check_mark: | | Table name to store state | -| **key_attr** | | `id` | Partition key of the table. Hashed representation of the payload (unless **sort_key_attr** is specified) | -| **expiry_attr** | | `expiration` | Unix timestamp of when record expires | -| **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | -| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | -| **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | -| **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | -| **sort_key_attr** | | | Sort key of the table (if table is configured with a sort key). | -| **static_pk_value** | | `idempotency#{LAMBDA_FUNCTION_NAME}` | Static value to use as the partition key. Only used when **sort_key_attr** is set. | - -#### RedisPersistenceLayer - -This persistence layer is built-in, and you can use an existing Redis service. We don't recomend using Redis Persistence Layer if you don't have a exsiting Redis service. You can try [DynamoDBPersistenceLayer](#dynamodbpersistencelayer) instead. - -=== "Customizing RedisPersistenceLayer to suit your data structure" + 1. JSON stored: + { + "REDIS_ENDPOINT": "127.0.0.1", + "REDIS_PORT": "6379", + "REDIS_PASSWORD": "redis-secret" + } - ```python hl_lines="14-20" - --8<-- "examples/idempotency/src/customize_persistence_layer_redis.py" +=== "Advanced configuration with local certificates" + ```python hl_lines="11 22-24" + --8<-- "examples/idempotency/src/using_redis_client_with_local_certs.py" ``` -When using Redis as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: + 1. JSON stored: + { + "REDIS_ENDPOINT": "127.0.0.1", + "REDIS_PORT": "6379", + "REDIS_PASSWORD": "redis-secret" + } + 2. Return the absolute path from the given relative path to lambda handler + 3. redis_user.crt file stored in the root directory of your Lambda function + 4. redis_user_private.key file stored in the root directory of your Lambda function + 5. redis_ca.pem file stored in the root directory of your Lambda function -| Parameter | Required | Default | Description | -| --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | -| **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | -| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | -| **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | -| **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | +## Advanced ### Customizing the default behavior @@ -858,7 +897,7 @@ The idempotency utility can be used with the `validator` decorator. Ensure that If you use an envelope with the validator, the event received by the idempotency utility will be the unwrapped event - not the "raw" event Lambda was invoked with. - Make sure to account for this behaviour, if you set the `event_key_jmespath`. + Make sure to account for this behavior, if you set the `event_key_jmespath`. === "Using Idempotency with JSONSchema Validation utility" diff --git a/examples/idempotency/src/using_redis_client_with_aws_secrets.py b/examples/idempotency/src/using_redis_client_with_aws_secrets.py new file mode 100644 index 00000000000..92dd46687d4 --- /dev/null +++ b/examples/idempotency/src/using_redis_client_with_aws_secrets.py @@ -0,0 +1,28 @@ +from typing import Any + +from redis import Redis + +from aws_lambda_powertools.utilities import parameters +from aws_lambda_powertools.utilities.idempotency import IdempotencyConfig, RedisCachePersistenceLayer, idempotent + +redis_values: Any = parameters.get_secret("redis_info", transform="json") # (1)! + +redis_client = Redis( + host=redis_values.get("REDIS_HOST"), + port=redis_values.get("REDIS_PORT"), + password=redis_values.get("REDIS_PASSWORD"), + decode_responses=True, + socket_timeout=10.0, + ssl=True, + retry_on_timeout=True, +) + +persistence_layer = RedisCachePersistenceLayer(client=redis_client) +config = IdempotencyConfig( + expires_after_seconds=2 * 60, # 2 minutes +) + + +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + return {"message": "Hello"} diff --git a/examples/idempotency/src/using_redis_client_with_local_certs.py b/examples/idempotency/src/using_redis_client_with_local_certs.py new file mode 100644 index 00000000000..e94589f00a0 --- /dev/null +++ b/examples/idempotency/src/using_redis_client_with_local_certs.py @@ -0,0 +1,35 @@ +from typing import Any + +from redis import Redis + +from aws_lambda_powertools.shared.functions import abs_lambda_path +from aws_lambda_powertools.utilities import parameters +from aws_lambda_powertools.utilities.idempotency import IdempotencyConfig, RedisCachePersistenceLayer, idempotent + +redis_values: Any = parameters.get_secret("redis_info", transform="json") # (1)! + +default_lambda_path = abs_lambda_path() # (2)! + + +redis_client = Redis( + host=redis_values.get("REDIS_HOST"), + port=redis_values.get("REDIS_PORT"), + password=redis_values.get("REDIS_PASSWORD"), + decode_responses=True, + socket_timeout=10.0, + ssl=True, + retry_on_timeout=True, + ssl_certfile=f"{default_lambda_path}/redis_user.crt", # (3)! + ssl_keyfile=f"{default_lambda_path}/redis_user_private.key", # (4)! + ssl_ca_certs=f"{default_lambda_path}/redis_ca.pem", # (5)! +) + +persistence_layer = RedisCachePersistenceLayer(client=redis_client) +config = IdempotencyConfig( + expires_after_seconds=2 * 60, # 2 minutes +) + + +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + return {"message": "Hello"} diff --git a/examples/idempotency/templates/cfn_redis_serverless.yaml b/examples/idempotency/templates/cfn_redis_serverless.yaml new file mode 100644 index 00000000000..9087efce6f9 --- /dev/null +++ b/examples/idempotency/templates/cfn_redis_serverless.yaml @@ -0,0 +1,13 @@ +AWSTemplateFormatVersion: '2010-09-09' + +Resources: + RedisServerlessIdempotency: + Type: AWS::ElastiCache::ServerlessCache + Properties: + Engine: redis + ServerlessCacheName: redis-cache + SecurityGroupIds: # (1)! + - security-{your_sg_id} + SubnetIds: + - subnet-{your_subnet_id_1} + - subnet-{your_subnet_id_2} diff --git a/examples/idempotency/templates/sam_redis_vpc.yaml b/examples/idempotency/templates/sam_redis_vpc.yaml index 517ca3eeeb8..921b1e75b84 100644 --- a/examples/idempotency/templates/sam_redis_vpc.yaml +++ b/examples/idempotency/templates/sam_redis_vpc.yaml @@ -1,3 +1,4 @@ +AWSTemplateFormatVersion: '2010-09-09' Transform: AWS::Serverless-2016-10-31 Resources: HelloWorldFunction: @@ -7,7 +8,7 @@ Resources: Handler: app.py VpcConfig: # (1)! SecurityGroupIds: - - sg-{your_sg_id} + - security-{your_sg_id} SubnetIds: - subnet-{your_subnet_id_1} - subnet-{your_subnet_id_2} diff --git a/tests/unit/test_shared_functions.py b/tests/unit/test_shared_functions.py index 4bac527e439..5300c967e61 100644 --- a/tests/unit/test_shared_functions.py +++ b/tests/unit/test_shared_functions.py @@ -166,7 +166,7 @@ def test_abs_lambda_path_w_filename(): def test_abs_lambda_path_w_filename_envvar(): # Given Env is set and relative_path provided - relatvie_path = "cert/pub.cert" + relative_path = "cert/pub.cert" os.environ["LAMBDA_TASK_ROOT"] = "/var/task" # Then path = env + relative_path - assert abs_lambda_path(relatvie_path="cert/pub.cert") == str(Path(os.environ["LAMBDA_TASK_ROOT"], relatvie_path)) + assert abs_lambda_path(relative_path="cert/pub.cert") == str(Path(os.environ["LAMBDA_TASK_ROOT"], relative_path)) From 09544c1dbf195229ad5aecff7ed5f287099910f6 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 22:34:10 +0000 Subject: [PATCH 59/81] Highlights + code removal --- .../idempotency/persistence/redis.py | 17 ++------------- docs/utilities/idempotency.md | 21 +++++++------------ ...g_started_with_idempotency_redis_client.py | 1 - ...g_started_with_idempotency_redis_config.py | 2 +- 4 files changed, 11 insertions(+), 30 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 3a99875067b..f00528925de 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -3,7 +3,6 @@ import datetime import json import logging -import warnings from contextlib import contextmanager from datetime import timedelta from typing import Any, Dict @@ -95,7 +94,7 @@ def __init__( ) from aws_lambda_powertools.utilities.typing import LambdaContext - persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379, mode="standalone") + persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379) @dataclass @@ -228,10 +227,8 @@ def __init__( ```python from redis import Redis - from aws_lambda_powertools.utilities.data_class import( - RedisCachePersistenceLayer, - ) from aws_lambda_powertools.utilities.idempotency import ( + RedisCachePersistenceLayer idempotent, ) @@ -269,16 +266,6 @@ def lambda_handler(event: dict, context: LambdaContext): else: self.client = client - if not hasattr(self.client, "get_connection_kwargs"): - raise IdempotencyRedisClientConfigError( - "Error configuring the Redis Client. The Redis library must implement get_connection_kwargs function.", - ) - if not self.client.get_connection_kwargs().get("decode_responses", False): - warnings.warn( - "Redis connection with `decode_responses=False` may cause lower performance", - stacklevel=2, - ) - self.in_progress_expiry_attr = in_progress_expiry_attr self.expiry_attr = expiry_attr self.status_attr = status_attr diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 647dc1388b2..113e1aa0c87 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -580,7 +580,7 @@ sequenceDiagram ### Redis resources -You need an existing Redis service before setting up Redis as persistent storage layer provider. You can also use Redis compatible services like [Amazon ElastiCache for Redis](https://aws.amazon.com/elasticache/redis/) or [Amazon MemoryDB for Redis](https://aws.amazon.com/memorydb/) as persistent storage layer provider. +You need an existing Redis service before setting up Redis as persistent storage layer provider. You can also use Redis compatible services like [Amazon ElastiCache for Redis](https://aws.amazon.com/elasticache/redis/){target="_blank"} or [Amazon MemoryDB for Redis](https://aws.amazon.com/memorydb/){target="_blank"} as persistent storage layer provider. ???+ tip "No existing Redis service?" If you don't have an existing Redis service, we recommend using [DynamoDB](#dynamodbpersistencelayer) as persistent storage layer provider. @@ -594,36 +594,31 @@ You need an existing Redis service before setting up Redis as persistent storage ### VPC Access -Your Lambda Function must be able to reach the Redis endpoint before using it for idempotency persistent storage layer. In most cases you will need to [configure VPC access](https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html) for your Lambda Function. Using a public accessible Redis is not recommended. +Your Lambda Function must be able to reach the Redis endpoint before using it for idempotency persistent storage layer. In most cases you will need to [configure VPC access](https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html){target="_blank"} for your Lambda Function. ???+ tip "Amazon ElastiCache/MemoryDB for Redis as persistent storage layer provider" - If you intend to use Amazon ElastiCache for Redis for idempotency persistent storage layer, you can also reference [This AWS Tutorial](https://docs.aws.amazon.com/lambda/latest/dg/services-elasticache-tutorial.html). - If you are using Amazon MemoryDB for Redis, reference [This AWS Tutorial](https://aws.amazon.com/blogs/database/access-amazon-memorydb-for-redis-from-aws-lambda/) for only VPC setup part. + If you intend to use Amazon ElastiCache for Redis for idempotency persistent storage layer, you can also reference [This AWS Tutorial](https://docs.aws.amazon.com/lambda/latest/dg/services-elasticache-tutorial.html){target="_blank"}. + If you are using Amazon MemoryDB for Redis, reference [This AWS Tutorial](https://aws.amazon.com/blogs/database/access-amazon-memorydb-for-redis-from-aws-lambda/){target="_blank"} for only VPC setup part. After VPC setup, you can follow the templates down below to setup Lambda functions with VPC internal subnet access. === "AWS Serverless Application Model (SAM) example" - ```yaml hl_lines="8-13" + ```yaml hl_lines="9" --8<-- "examples/idempotency/templates/sam_redis_vpc.yaml" ``` - 1. Replace the Security Group ID and Subnet ID to match your Redis' VPC setting. + 1. Replace the Security Group ID and Subnet ID to match your VPC settings. ### Configuring Redis persistence layer -You can quickly start by initializing the `RedisCachePersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. Check out detailed example of `RedisCachePersistenceLayer` in [Persistence layers section](#redispersistencelayer) - -???+ warning "Passing in Redis Client" - We support passing in established Redis clients when initializing `RedisPersistenceLayer`. However, this rely on Redis parameter `decode_responses=True` to decode all Redis response. Please make sure this parameter is set when establishing Redis client or `RedisPersistenceLayer` will raise a `IdempotencyRedisClientConfigError`. See example below +You can quickly start by initializing the `RedisCachePersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. Check out detailed example of `RedisCachePersistenceLayer` in [Persistence layers section](#redispersistencelayer). === "Use established Redis Client" - ```python hl_lines="4 7 12-16 18 32" + ```python hl_lines="4 7 12-15 17 31" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_client.py" ``` - 1. Notice we rely on this field to be true - === "Use Persistence Layer with Redis config variables" ```python hl_lines="4-8 10 24" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_config.py" diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py index dd75e1cb9a7..ce702893fc2 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py @@ -12,7 +12,6 @@ client = Redis( host="localhost", port=6379, - decode_responses=True, # (1)! ) persistence_layer = RedisCachePersistenceLayer(client=client) diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py index fdedfab4471..30b621a84f8 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py @@ -7,7 +7,7 @@ ) from aws_lambda_powertools.utilities.typing import LambdaContext -persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379, mode="standalone") +persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379) @dataclass From 2bc79871641916dad176393edfa8644fb7b4dd21 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 22:42:22 +0000 Subject: [PATCH 60/81] Removing unnecessary tests --- .../idempotency/persistence/test_redis_layer.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index bf5ed1ce194..1a19f2a8d9c 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -553,16 +553,6 @@ def lambda_handler(event, _): assert handler_result2 == result -@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedisBase()) -def test_redis_connection_get_kwargs_error(): - # when Layer is init with a redis client that doesn't have get_connection_kwargs method - - # then should raise IdempotencyRedisClientConfigError - - with pytest.raises(IdempotencyRedisClientConfigError): - RedisCachePersistenceLayer(host="testhost") - - def test_redis_orphan_record_race_condition(lambda_context): redis_client = MockRedis( host="localhost", From 41f353e56cb86b4653fa3248deef220f565f6488 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 22:49:10 +0000 Subject: [PATCH 61/81] Removing unnecessary tests --- .../idempotency/persistence/redis.py | 6 ++--- .../persistence/test_redis_layer.py | 25 +------------------ 2 files changed, 4 insertions(+), 27 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index f00528925de..45df75f41ec 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -28,7 +28,7 @@ class RedisClientProtocol(Protocol): def get(self, name: bytes | str | memoryview) -> bytes | str | None: - ... + raise NotImplementedError def set( # noqa self, @@ -38,10 +38,10 @@ def set( # noqa px: float | timedelta | None = ..., nx: bool = ..., ) -> bool | None: - ... + raise NotImplementedError def delete(self, keys: bytes | str | memoryview) -> Any: - ... + raise NotImplementedError class RedisConnection: diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index 1a19f2a8d9c..d00aa52078f 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -92,10 +92,9 @@ def close(self): class MockRedis(MockRedisBase): - def __init__(self, decode_responses=False, cache: dict = None, mock_latency_ms: int = 0, **kwargs): + def __init__(self, cache: dict = None, mock_latency_ms: int = 0, **kwargs): self.cache = cache or {} self.expire_dict = {} - self.decode_responses = decode_responses self.acl = {} self.username = "" self.mode = "" @@ -116,11 +115,6 @@ def check_closed(self): raise self.exceptions.RedisClusterException raise self.exceptions.RedisError - def hset(self, name, mapping): - self.check_closed() - self.expire_dict.pop(name, {}) - self.cache[name] = mapping - def from_url(self, url: str): self.url = url return self @@ -131,16 +125,6 @@ def expire(self, name, time): if time != 0: self.expire_dict[name] = t.time() + time - # return {} if no match - def hgetall(self, name): - self.check_closed() - if self.expire_dict.get(name, t.time() + 1) < t.time(): - self.cache.pop(name, {}) - return self.cache.get(name, {}) - - def get_connection_kwargs(self): - return {"decode_responses": self.decode_responses} - def auth(self, username, **kwargs): self.username = username @@ -177,9 +161,6 @@ def get(self, name: str): resp = self.cache.get(name, None) - if resp and self.decode_responses: - resp = resp.decode("utf-8") - return resp @@ -188,7 +169,6 @@ def persistence_store_standalone_redis_no_decode(): redis_client = MockRedis( host="localhost", port="63005", - decode_responses=False, ) return RedisCachePersistenceLayer(client=redis_client) @@ -198,7 +178,6 @@ def persistence_store_standalone_redis(): redis_client = MockRedis( host="localhost", port="63005", - decode_responses=True, ) return RedisCachePersistenceLayer(client=redis_client) @@ -557,7 +536,6 @@ def test_redis_orphan_record_race_condition(lambda_context): redis_client = MockRedis( host="localhost", port="63005", - decode_responses=True, mock_latency_ms=50, ) manager = Manager() @@ -602,7 +580,6 @@ def test_redis_race_condition(lambda_context): redis_client = MockRedis( host="localhost", port="63005", - decode_responses=True, mock_latency_ms=50, ) manager = Manager() From 9418aaa03c5022da9e44839cedf007260a1d235a Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 3 Jan 2024 23:49:45 +0000 Subject: [PATCH 62/81] Documentation --- docs/utilities/idempotency.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 113e1aa0c87..4ffdbf2a138 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -100,7 +100,7 @@ If you're not [changing the default configuration for the DynamoDB persistence l ???+ warning "Warning: Large responses with DynamoDB persistence layer" When using this utility with DynamoDB, your function's responses must be [smaller than 400KB](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html#limits-items){target="_blank"}. - Larger items cannot be written to DynamoDB and will cause exceptions. + Larger items cannot be written to DynamoDB and will cause exceptions. If your response exceeds 400kb, consider using Redis as your persistence layer. ???+ info "Info: DynamoDB" Each function invocation will generally make 2 requests to DynamoDB. If the From 2ae69f804ffb9443e0085f7108eff83a3728dfe1 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 8 Jan 2024 20:56:10 +0000 Subject: [PATCH 63/81] Addressing initial Ruben's feedback --- .../utilities/idempotency/persistence/base.py | 2 +- .../utilities/idempotency/persistence/redis.py | 9 +++------ tests/e2e/utils/infrastructure.py | 4 ++-- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index e2a84597094..f3b12da0310 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -37,7 +37,7 @@ class DataRecord: def __init__( self, - idempotency_key: str = "", + idempotency_key: str, status: str = "", expiry_timestamp: Optional[int] = None, in_progress_expiry_timestamp: Optional[int] = None, diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 45df75f41ec..a18f8e64abb 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -8,8 +8,8 @@ from typing import Any, Dict import redis -from typing_extensions import Literal, Protocol +from aws_lambda_powertools.shared.types import Literal, Protocol from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyItemAlreadyExistsError, @@ -310,8 +310,7 @@ def _get_record(self, idempotency_key) -> DataRecord: return self._item_to_data_record(idempotency_key, item) def _put_in_progress_record(self, data_record: DataRecord) -> None: - item: Dict[str, Any] = {} - item = { + item: Dict[str, Any] = { "name": data_record.idempotency_key, "mapping": { self.status_attr: data_record.status, @@ -416,9 +415,7 @@ def _put_record(self, data_record: DataRecord) -> None: raise NotImplementedError def _update_record(self, data_record: DataRecord) -> None: - item: Dict[str, Any] = {} - - item = { + item: Dict[str, Any] = { "name": data_record.idempotency_key, "mapping": { self.data_attr: data_record.response_data, diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 1e72f641195..5adef6133f8 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -177,9 +177,9 @@ def deploy(self) -> Dict[str, str]: CloudFormation Stack Outputs with output key and value """ stack_file = self._create_temp_cdk_app() - synth_command = f"npx cdk@latest synth --app 'python {stack_file}' -o {self._cdk_out_dir}" + synth_command = f"npx cdk synth --app 'python {stack_file}' -o {self._cdk_out_dir}" deploy_command = ( - f"npx cdk@latest deploy --app '{self._cdk_out_dir}' -O {self._stack_outputs_file} " + f"npx cdk deploy --app '{self._cdk_out_dir}' -O {self._stack_outputs_file} " "--require-approval=never --method=direct" ) From d87c8692984a10343cd36c9143adaf4d1c543d5e Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 8 Jan 2024 22:12:06 +0000 Subject: [PATCH 64/81] Addressing Ruben's feedback - documentation --- docs/utilities/idempotency.md | 37 ++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 4ffdbf2a138..5766ba1f350 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -14,7 +14,7 @@ The idempotency utility provides a simple solution to convert your Lambda functi * Select a subset of the event as the idempotency key using JMESPath expressions * Set a time window in which records with the same payload should be considered duplicates * Expires in-progress executions if the Lambda function times out halfway through -* Support Amazon DynamoDB and Redis as persistence layer +* Support Amazon DynamoDB and Redis as persistence layers ## Terminology @@ -53,7 +53,7 @@ classDiagram ## Getting started ???+ note - This section uses DynamoDB as default idempotent persistence storage layer. If you are interested in using Redis as persistence storage layer, Check out the [Redis as persistence storage layer](#redis-as-persistent-storage-layer-provider) Section. + This section uses DynamoDB as the default idempotent persistence storage layer. If you are interested in using Redis as the persistence storage layer, check out the [Redis as persistence storage layer](#redis-as-persistent-storage-layer-provider) Section. ### IAM Permissions @@ -66,7 +66,7 @@ Your Lambda function IAM Role must have `dynamodb:GetItem`, `dynamodb:PutItem`, Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. -We currently support Amazon DynamoDB and Redis as a storage layer. This example demonstrates how to create a table in DynamoDB. If you want to use Redis, please go to the section [RedisPersistenceLayer](#redispersistencelayer) +We currently support Amazon DynamoDB and Redis as a storage layer. The following example demonstrates how to create a table in DynamoDB. If you prefer to use Redis, refer go to the section [RedisPersistenceLayer](#redispersistencelayer) section. **Default table configuration** @@ -341,7 +341,7 @@ If an Exception is raised _outside_ the scope of the decorated function and afte #### DynamoDBPersistenceLayer -This persistence layer is built-in, and you can either use an existing DynamoDB table or create a new one dedicated for idempotency state (recommended). +This persistence layer is built-in, allowing you to use an existing DynamoDB table or create a new one dedicated to idempotency state (recommended). === "Customizing DynamoDBPersistenceLayer to suit your table structure" @@ -349,7 +349,7 @@ This persistence layer is built-in, and you can either use an existing DynamoDB --8<-- "examples/idempotency/src/customize_persistence_layer.py" ``` -When using DynamoDB as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: +When using DynamoDB as the persistence layer, you can customize the attribute names by passing the following parameters during the initialization of the persistence layer: | Parameter | Required | Default | Description | | --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | @@ -365,7 +365,7 @@ When using DynamoDB as a persistence layer, you can alter the attribute names by #### RedisPersistenceLayer -This persistence layer is built-in, and you can use an existing Redis service. For optimal performance and compatibility, we strongly advise using a Redis service version 7 or higher. +This persistence layer is built-in, allowing you to use an existing Redis service. For optimal performance and compatibility, it is strongly recommended to use a Redis service version 7 or higher. === "Customizing RedisPersistenceLayer to suit your data structure" @@ -373,12 +373,12 @@ This persistence layer is built-in, and you can use an existing Redis service. F --8<-- "examples/idempotency/src/customize_persistence_layer_redis.py" ``` -When using Redis as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: +When using Redis as the persistence layer, you can customize the attribute names by providing the following parameters upon initialization of the persistence layer: | Parameter | Required | Default | Description | | --------------------------- | ------------------ | ------------------------------------ | -------------------------------------------------------------------------------------------------------- | | **in_progress_expiry_attr** | | `in_progress_expiration` | Unix timestamp of when record expires while in progress (in case of the invocation times out) | -| **status_attr** | | `status` | Stores status of the lambda execution during and after invocation | +| **status_attr** | | `status` | Stores status of the Lambda execution during and after invocation | | **data_attr** | | `data` | Stores results of successfully executed Lambda handlers | | **validation_key_attr** | | `validation` | Hashed representation of the parts of the event used for validation | @@ -580,9 +580,10 @@ sequenceDiagram ### Redis resources -You need an existing Redis service before setting up Redis as persistent storage layer provider. You can also use Redis compatible services like [Amazon ElastiCache for Redis](https://aws.amazon.com/elasticache/redis/){target="_blank"} or [Amazon MemoryDB for Redis](https://aws.amazon.com/memorydb/){target="_blank"} as persistent storage layer provider. +Before setting up Redis as the persistent storage layer provider, you must have an existing Redis service. We recommend you to use Redis compatible services such as [Amazon ElastiCache for Redis](https://aws.amazon.com/elasticache/redis/){target="_blank"} or [Amazon MemoryDB for Redis](https://aws.amazon.com/memorydb/){target="_blank"} as your persistent storage layer provider. + ???+ tip "No existing Redis service?" - If you don't have an existing Redis service, we recommend using [DynamoDB](#dynamodbpersistencelayer) as persistent storage layer provider. + If you don't have an existing Redis service, we recommend using [DynamoDB](#dynamodbpersistencelayer) as the persistent storage layer provider. === "AWS CloudFormation example" @@ -594,13 +595,13 @@ You need an existing Redis service before setting up Redis as persistent storage ### VPC Access -Your Lambda Function must be able to reach the Redis endpoint before using it for idempotency persistent storage layer. In most cases you will need to [configure VPC access](https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html){target="_blank"} for your Lambda Function. +Your Lambda Function must have network access to the Redis endpoint before using it as the idempotency persistent storage layer. In most cases, you will need to [configure VPC access](https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html){target="_blank"} for your Lambda Function. ???+ tip "Amazon ElastiCache/MemoryDB for Redis as persistent storage layer provider" - If you intend to use Amazon ElastiCache for Redis for idempotency persistent storage layer, you can also reference [This AWS Tutorial](https://docs.aws.amazon.com/lambda/latest/dg/services-elasticache-tutorial.html){target="_blank"}. - If you are using Amazon MemoryDB for Redis, reference [This AWS Tutorial](https://aws.amazon.com/blogs/database/access-amazon-memorydb-for-redis-from-aws-lambda/){target="_blank"} for only VPC setup part. + If you plan to use Amazon ElastiCache for Redis as the idempotency persistent storage layer, you may find [this AWS tutorial](https://docs.aws.amazon.com/lambda/latest/dg/services-elasticache-tutorial.html){target="_blank"} helpful. + For those using Amazon MemoryDB for Redis, refer to [this AWS tutorial](https://aws.amazon.com/blogs/database/access-amazon-memorydb-for-redis-from-aws-lambda/){target="_blank"} specifically for the VPC setup guidance. -After VPC setup, you can follow the templates down below to setup Lambda functions with VPC internal subnet access. +After completing the VPC setup, you can use the templates provided below to set up Lambda functions with access to VPC internal subnets. === "AWS Serverless Application Model (SAM) example" @@ -612,7 +613,7 @@ After VPC setup, you can follow the templates down below to setup Lambda functio ### Configuring Redis persistence layer -You can quickly start by initializing the `RedisCachePersistenceLayer` class and using it with the `idempotent` decorator on your lambda handler. Check out detailed example of `RedisCachePersistenceLayer` in [Persistence layers section](#redispersistencelayer). +You can quickly get started by initializing the `RedisCachePersistenceLayer` class and applying the `idempotent` decorator to your Lambda handler. For a detailed example of using the `RedisCachePersistenceLayer`, refer to the [Persistence layers section](#redispersistencelayer). === "Use established Redis Client" ```python hl_lines="4 7 12-15 17 31" @@ -632,7 +633,7 @@ You can quickly start by initializing the `RedisCachePersistenceLayer` class and ### Custom advanced settings -For advanced settings, including SSL certificates and the ability to customize parameters such as a custom timeout, you can use the Redis client to accommodate these specific settings. +For advanced configurations, such as setting up SSL certificates or customizing parameters like a custom timeout, you can utilize the Redis client to tailor these specific settings to your needs. === "Advanced configuration using AWS Secrets" ```python hl_lines="8 10 20" @@ -965,7 +966,7 @@ This means it is possible to pass a mocked Table resource, or stub various metho ### Testing with Redis -To test locally, You can either utilize [fakeredis-py](https://github.com/cunla/fakeredis-py) or check out the [MockRedis](https://github.com/aws-powertools/powertools-lambda-python/blob/ba6532a1c73e20fdaee88c5795fd40e978553e14/tests/functional/idempotency/persistence/test_redis_layer.py#L34-L66) Class we used in our test. +To test locally, you can either utilize [fakeredis-py](https://github.com/cunla/fakeredis-py) for a simulated Redis environment or refer to the [MockRedis](https://github.com/aws-powertools/powertools-lambda-python/blob/ba6532a1c73e20fdaee88c5795fd40e978553e14/tests/functional/idempotency/persistence/test_redis_layer.py#L34-L66) class used in our tests to mock Redis operations. === "test_with_mock_redis.py" @@ -979,7 +980,7 @@ To test locally, You can either utilize [fakeredis-py](https://github.com/cunla/ --8<-- "examples/idempotency/tests/mock_redis.py" ``` -If you want to actually setup a Real Redis client for integration test, reference the code below +If you want to set up a real Redis client for integration testing, you can reference the code provided below. === "test_with_real_redis.py" From 2d19e2421e76ae80fa00297ff2f3f885c2a47d34 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 8 Jan 2024 22:20:51 +0000 Subject: [PATCH 65/81] Addressing Ruben's feedback - docstring --- .../idempotency/persistence/redis.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index a18f8e64abb..b093bdbce4d 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -27,6 +27,41 @@ class RedisClientProtocol(Protocol): + """ + Protocol class defining the interface for a Redis client. + + This protocol outlines the expected behavior of a Redis client, allowing for + standardization among different implementations and allowing customers to extend it + in their own implementation. + + Methods + ------- + - get(name: bytes | str | memoryview) -> bytes | str | None: + Retrieves the value associated with the given key. + + - set( + name: str | bytes, + value: bytes | float | str, + ex: float | timedelta | None = ..., + px: float | timedelta | None = ..., + nx: bool = ..., + ) -> bool | None: + Sets the value for the specified key with optional parameters. + + - delete(keys: bytes | str | memoryview) -> Any: + Deletes one or more keys. + + Note + ---- + - The `ex` parameter represents the expiration time in seconds. + - The `px` parameter represents the expiration time in milliseconds. + - The `nx` parameter, if True, sets the value only if the key does not exist. + + Raises + ------ + - NotImplementedError: If any of the methods are not implemented by the concrete class. + """ + def get(self, name: bytes | str | memoryview) -> bytes | str | None: raise NotImplementedError From a031ced832b3cfae1a28783a8e657679c5d5fae4 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 8 Jan 2024 22:59:50 +0000 Subject: [PATCH 66/81] Addressing Ruben's feedback - SSL --- .../utilities/idempotency/persistence/redis.py | 17 ++++------------- .../handlers/function_thread_safety_handler.py | 2 +- .../optional_idempotency_key_handler.py | 2 +- .../handlers/parallel_execution_handler.py | 2 +- .../handlers/ttl_cache_expiration_handler.py | 2 +- .../handlers/ttl_cache_timeout_handler.py | 2 +- 6 files changed, 9 insertions(+), 18 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index b093bdbce4d..acb1d3d0a46 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -89,8 +89,7 @@ def __init__( password: str = "", # nosec - password for Redis connection db_index: int = 0, mode: Literal["standalone", "cluster"] = "standalone", - ssl: bool = False, - ssl_cert_reqs: Literal["required", "optional", "none"] = "none", + ssl: bool = True, ) -> None: """ Initialize Redis connection which will be used in redis persistence_store to support Idempotency @@ -111,10 +110,8 @@ def __init__( redis db index mode: str, Literal["standalone","cluster"] set redis client mode, choose from standalone/cluster. The default is standalone - ssl: bool, optional: default False + ssl: bool, optional: default True set whether to use ssl for Redis connection - ssl_cert_reqs: str, optional: default "none" - set whether to use ssl cert for Redis connection, choose from required/optional/none Examples -------- @@ -168,7 +165,6 @@ def create_subscription_payment(event: dict) -> Payment: self.password = password self.db_index = db_index self.ssl = ssl - self.ssl_cert_reqs = ssl_cert_reqs self.mode = mode def _init_client(self) -> RedisClientProtocol: @@ -195,7 +191,6 @@ def _init_client(self) -> RedisClientProtocol: db=self.db_index, decode_responses=True, ssl=self.ssl, - ssl_cert_reqs=self.ssl_cert_reqs, ) except redis.exceptions.ConnectionError as exc: logger.debug(f"Cannot connect in Redis: {self.host}") @@ -212,8 +207,7 @@ def __init__( password: str = "", # nosec - password for Redis connection db_index: int = 0, mode: Literal["standalone", "cluster"] = "standalone", - ssl: bool = False, - ssl_cert_reqs: Literal["required", "optional", "none"] = "none", + ssl: bool = True, client: RedisClientProtocol | None = None, in_progress_expiry_attr: str = "in_progress_expiration", expiry_attr: str = "expiration", @@ -239,10 +233,8 @@ def __init__( redis db index mode: str, Literal["standalone","cluster"] set redis client mode, choose from standalone/cluster - ssl: bool, optional: default False + ssl: bool, optional: default True set whether to use ssl for Redis connection - ssl_cert_reqs: str, optional: default "none" - set whether to use ssl cert for Redis connection, choose from required/optional/none client: RedisClientProtocol, optional You can bring your established Redis client that follows RedisClientProtocol. If client is provided, all connection config above will be ignored @@ -296,7 +288,6 @@ def lambda_handler(event: dict, context: LambdaContext): url=url, mode=mode, ssl=ssl, - ssl_cert_reqs=ssl_cert_reqs, )._init_client() else: self.client = client diff --git a/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py b/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py index ac2dedc7fab..d64ff5df10f 100644 --- a/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py +++ b/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py @@ -9,7 +9,7 @@ ) REDIS_HOST = os.getenv("RedisEndpoint", "") -persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) threads_count = 2 diff --git a/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py b/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py index cbdc6d60c11..4182812fc40 100644 --- a/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py +++ b/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py @@ -8,7 +8,7 @@ ) REDIS_HOST = os.getenv("RedisEndpoint", "") -persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) config = IdempotencyConfig(event_key_jmespath='headers."X-Idempotency-Key"', use_local_cache=True) diff --git a/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py b/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py index 8a3b4f66982..5dd4fbdcbdb 100644 --- a/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py +++ b/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py @@ -7,7 +7,7 @@ ) REDIS_HOST = os.getenv("RedisEndpoint", "") -persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) @idempotent(persistence_store=persistence_layer) diff --git a/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py b/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py index e5770e722e1..2f938d3c464 100644 --- a/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py +++ b/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py @@ -8,7 +8,7 @@ ) REDIS_HOST = os.getenv("RedisEndpoint", "") -persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) config = IdempotencyConfig(expires_after_seconds=5) diff --git a/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py b/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py index 40c479d6696..3d8ab335c8b 100644 --- a/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py +++ b/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py @@ -8,7 +8,7 @@ ) REDIS_HOST = os.getenv("RedisEndpoint", "") -persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379, ssl=True) +persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) config = IdempotencyConfig(expires_after_seconds=1) From 317480036c9bb106c8dcacf6d33a3f5046bdc44f Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 8 Jan 2024 23:16:40 +0000 Subject: [PATCH 67/81] Addressing Ruben's feedback - db_index --- .../utilities/idempotency/persistence/redis.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index acb1d3d0a46..818cb69f5ec 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -182,15 +182,20 @@ def _init_client(self) -> RedisClientProtocol: logger.debug(f"Using URL format to connect to Redis: {self.host}") return client.from_url(url=self.url) else: + # Redis in cluster mode doesn't support db parameter + extra_param_connection: Dict[str, str] = {} + if self.mode != "cluster": + extra_param_connection = {"db": self.db_index} + logger.debug(f"Using other parameters to connect to Redis: {self.host}") return client( host=self.host, port=self.port, username=self.username, password=self.password, - db=self.db_index, decode_responses=True, ssl=self.ssl, + **extra_param_connection, ) except redis.exceptions.ConnectionError as exc: logger.debug(f"Cannot connect in Redis: {self.host}") From 8d31fa58a76b5a491b1bd6e355713eb2740c1bb3 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 8 Jan 2024 23:26:21 +0000 Subject: [PATCH 68/81] Addressing Ruben's feedback - db_index --- .../persistence/test_redis_layer.py | 26 ++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index d00aa52078f..f7aa53760af 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -201,12 +201,12 @@ def valid_record(): @mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) -def test_redis_connection(): +def test_redis_connection_standalone(): # when RedisCachePersistenceLayer is init with the following params redis_conf = { "host": "host", "port": "port", - "mode": "cluster", + "mode": "standalone", "username": "redis_user", "password": "redis_pass", "db_index": "db_index", @@ -214,7 +214,27 @@ def test_redis_connection(): layer = RedisCachePersistenceLayer(**redis_conf) redis_conf["db"] = redis_conf["db_index"] redis_conf.pop("db_index") - # then these paramas should be passed down to mock Redis identically + # then these params should be passed down to mock Redis identically + for k, v in redis_conf.items(): + assert layer.client.__dict__.get(k) == v + + +@mock.patch("aws_lambda_powertools.utilities.idempotency.persistence.redis.redis", MockRedis()) +def test_redis_connection_cluster(): + # when RedisCachePersistenceLayer is init with the following params + redis_conf = { + "host": "host", + "port": "port", + "mode": "cluster", + "username": "redis_user", + "password": "redis_pass", + "db_index": "db_index", + } + layer = RedisCachePersistenceLayer(**redis_conf) + redis_conf["db"] = None + redis_conf.pop("db_index") + + # then these params should be passed down to mock Redis identically for k, v in redis_conf.items(): assert layer.client.__dict__.get(k) == v From 33025d4864e2a2e0c378f78a2bce630407de8654 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Mon, 8 Jan 2024 23:31:07 +0000 Subject: [PATCH 69/81] Addressing Ruben's feedback - db_index --- .../utilities/idempotency/persistence/redis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 818cb69f5ec..7978d2e82f3 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -183,7 +183,7 @@ def _init_client(self) -> RedisClientProtocol: return client.from_url(url=self.url) else: # Redis in cluster mode doesn't support db parameter - extra_param_connection: Dict[str, str] = {} + extra_param_connection: Dict[str, Any] = {} if self.mode != "cluster": extra_param_connection = {"db": self.db_index} From 122947ad8dc8fb1618978b184f5bf77958a6ffde Mon Sep 17 00:00:00 2001 From: roger zhang Date: Tue, 9 Jan 2024 00:07:51 +0000 Subject: [PATCH 70/81] redis comment improvements, minor refactor. --- .../utilities/idempotency/__init__.py | 6 - .../utilities/idempotency/exceptions.py | 6 +- .../idempotency/persistence/redis.py | 121 ++++++++++++------ .../persistence/test_redis_layer.py | 18 +-- 4 files changed, 91 insertions(+), 60 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/__init__.py b/aws_lambda_powertools/utilities/idempotency/__init__.py index 296b641df2b..ae27330cc1f 100644 --- a/aws_lambda_powertools/utilities/idempotency/__init__.py +++ b/aws_lambda_powertools/utilities/idempotency/__init__.py @@ -9,11 +9,6 @@ DynamoDBPersistenceLayer, ) -# import RedisCachePersistenceLayer here mean we will need redis as a required lib? Do we want to make it optional? -from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( - RedisCachePersistenceLayer, -) - from .idempotency import IdempotencyConfig, idempotent, idempotent_function __all__ = ( @@ -22,5 +17,4 @@ "idempotent", "idempotent_function", "IdempotencyConfig", - "RedisCachePersistenceLayer", ) diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index 19e609bf045..8e0d3666370 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -85,19 +85,19 @@ class IdempotencyNoSerializationModelError(BaseError): """ -class IdempotencyRedisClientConfigError(BaseError): +class IdempotencyPersistenceConfigError(BaseError): """ The Redis connection passed in has unsupported config """ -class IdempotencyRedisConnectionError(BaseError): +class IdempotencyPersistenceConnectionError(BaseError): """ Redis connection error """ -class IdempotencyOrphanRecordError(BaseError): +class IdempotencyPersistenceConsistencyError(BaseError): """ Redis Orphan Record found, need to be removed """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index 7978d2e82f3..f4fb685ca66 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -14,9 +14,9 @@ from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, - IdempotencyOrphanRecordError, - IdempotencyRedisClientConfigError, - IdempotencyRedisConnectionError, + IdempotencyPersistenceConfigError, + IdempotencyPersistenceConnectionError, + IdempotencyPersistenceConsistencyError, ) from aws_lambda_powertools.utilities.idempotency.persistence.base import ( STATUS_CONSTANTS, @@ -106,7 +106,7 @@ def __init__( redis password url: str, optional redis connection string, using url will override the host/port in the previous parameters - db_index: str, optional: default 0 + db_index: int, optional: default 0 redis db index mode: str, Literal["standalone","cluster"] set redis client mode, choose from standalone/cluster. The default is standalone @@ -175,7 +175,7 @@ def _init_client(self) -> RedisClientProtocol: elif self.mode == "cluster": client = redis.cluster.RedisCluster else: - raise IdempotencyRedisClientConfigError(f"Mode {self.mode} not supported") + raise IdempotencyPersistenceConfigError(f"Mode {self.mode} not supported") try: if self.url: @@ -187,7 +187,7 @@ def _init_client(self) -> RedisClientProtocol: if self.mode != "cluster": extra_param_connection = {"db": self.db_index} - logger.debug(f"Using other parameters to connect to Redis: {self.host}") + logger.debug(f"Using arguments to connect to Redis: {self.host}") return client( host=self.host, port=self.port, @@ -199,7 +199,7 @@ def _init_client(self) -> RedisClientProtocol: ) except redis.exceptions.ConnectionError as exc: logger.debug(f"Cannot connect in Redis: {self.host}") - raise IdempotencyRedisConnectionError("Could not to connect to Redis", exc) from exc + raise IdempotencyPersistenceConnectionError("Could not to connect to Redis", exc) from exc class RedisCachePersistenceLayer(BasePersistenceLayer): @@ -222,6 +222,7 @@ def __init__( ): """ Initialize the Redis Persistence Layer + Parameters ---------- host: str, optional @@ -234,15 +235,15 @@ def __init__( redis password url: str, optional redis connection string, using url will override the host/port in the previous parameters - db_index: str, optional: default 0 + db_index: int, optional: default 0 redis db index mode: str, Literal["standalone","cluster"] set redis client mode, choose from standalone/cluster ssl: bool, optional: default True set whether to use ssl for Redis connection client: RedisClientProtocol, optional - You can bring your established Redis client that follows RedisClientProtocol. - If client is provided, all connection config above will be ignored + Bring your own Redis client that follows RedisClientProtocol. + If provided, all other connection configuration options will be ignored expiry_attr: str, optional Redis json attribute name for expiry timestamp, by default "expiration" in_progress_expiry_attr: str, optional @@ -309,7 +310,7 @@ def lambda_handler(event: dict, context: LambdaContext): def _get_expiry_second(self, expiry_timestamp: int | None = None) -> int: """ - return seconds of timedelta from now to the given unix timestamp + Calculates the number of seconds remaining until a specified expiry time """ if expiry_timestamp: return expiry_timestamp - int(datetime.datetime.now().timestamp()) @@ -317,8 +318,7 @@ def _get_expiry_second(self, expiry_timestamp: int | None = None) -> int: def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> DataRecord: in_progress_expiry_timestamp = item.get(self.in_progress_expiry_attr) - if isinstance(in_progress_expiry_timestamp, str): - in_progress_expiry_timestamp = int(in_progress_expiry_timestamp) + return DataRecord( idempotency_key=idempotency_key, status=item[self.status_attr], @@ -331,13 +331,24 @@ def _item_to_data_record(self, idempotency_key: str, item: Dict[str, Any]) -> Da def _get_record(self, idempotency_key) -> DataRecord: # See: https://redis.io/commands/get/ response = self.client.get(idempotency_key) + # key not found if not response: raise IdempotencyItemNotFoundError + try: item = self._json_deserializer(response) except json.JSONDecodeError: - raise IdempotencyOrphanRecordError + # Json decoding error is considered an Consistency error. This scenario will also introduce possible + # race condition just like Orphan record does. As two lambda handlers is possible to reach this line + # of code almost simultaneously. If we simply regard this record as non-valid record. The two lambda + # handlers will both start the overwrite process without knowing each other. Causing this value being + # overwritten twice (ultimately two Lambda Handlers will both be executed, which is against idempotency). + # So this case should also be handled by the error handling in IdempotencyPersistenceConsistencyError + # part to avoid the possible race condition. + + raise IdempotencyPersistenceConsistencyError + return self._item_to_data_record(idempotency_key, item) def _put_in_progress_record(self, data_record: DataRecord) -> None: @@ -370,32 +381,38 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: # - previous invocation with the same key was deleted due to TTL # - SET see https://redis.io/commands/set/ - logger.debug(f"Putting record on Redis for idempotency key: {item['name']}") + logger.debug(f"Putting record on Redis for idempotency key: {data_record.idempotency_key}") encoded_item = self._json_serializer(item["mapping"]) - ttl = self._get_expiry_second(expiry_timestamp=item["mapping"][self.expiry_attr]) + ttl = self._get_expiry_second(expiry_timestamp=data_record.expiry_timestamp) - redis_response = self.client.set(name=item["name"], value=encoded_item, ex=ttl, nx=True) + redis_response = self.client.set(name=data_record.idempotency_key, value=encoded_item, ex=ttl, nx=True) - # redis_response:True -> Redis set succeed, idempotency key does not exist before - # return to idempotency and proceed to handler execution phase. Most cases should return here + # If redis_response is True, the Redis SET operation was successful and the idempotency key was not + # previously set. This indicates that we can safely proceed to the handler execution phase. + # Most invocations should successfully proceed past this point. if redis_response: return - # redis_response:None -> Existing record on Redis, continue to checking phase - # The idempotency key exist: - # - previous invocation with the same key and not expired(active idempotency) - # - previous invocation timed out (Orphan Record) - # - previous invocation record expired but not deleted by Redis (Orphan Record) + # If redis_response is None, it indicates an existing record in Redis for the given idempotency key. + # This could be due to: + # - An active idempotency record from a previous invocation that has not yet expired. + # - An orphan record where a previous invocation has timed out. + # - An expired idempotency record that has not been deleted by Redis. + # In any case, we proceed to retrieve the record for further inspection. - idempotency_record = self._get_record(item["name"]) + idempotency_record = self._get_record(data_record.idempotency_key) - # status is completed and expiry_attr timestamp still larger than current timestamp - # found a valid completed record + # If the status of the idempotency record is 'COMPLETED' and the record has not expired + # (i.e., the expiry timestamp is greater than the current timestamp), then a valid completed + # record exists. We raise an error to prevent duplicate processing of a request that has already + # been completed successfully. if idempotency_record.status == STATUS_CONSTANTS["COMPLETED"] and not idempotency_record.is_expired: raise IdempotencyItemAlreadyExistsError - # in_progress_expiry_attr exist means status is in_progress, and still larger than current timestamp, - # found a vaild in_progress record + # If the idempotency record has a status of 'INPROGRESS' and has a valid in_progress_expiry_timestamp + # (meaning the timestamp is greater than the current timestamp in milliseconds), then we have encountered + # a valid in-progress record. This indicates that another process is currently handling the request, and + # to maintain idempotency, we raise an error to prevent concurrent processing of the same request. if ( idempotency_record.status == STATUS_CONSTANTS["INPROGRESS"] and idempotency_record.in_progress_expiry_timestamp @@ -403,26 +420,36 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: ): raise IdempotencyItemAlreadyExistsError - # If the code reaches here means we found an Orphan record. - raise IdempotencyOrphanRecordError - - except IdempotencyOrphanRecordError: - # deal with orphan record here - # aquire a lock for default 10 seconds + # Reaching this point indicates that the idempotency record found is an orphan record. An orphan record is + # one that is neither completed nor in-progress within its expected time frame. It may result from a + # previous invocation that has timed out or an expired record that has yet to be cleaned up by Redis. + # We raise an error to handle this exceptional scenario appropriately. + raise IdempotencyPersistenceConsistencyError + + except IdempotencyPersistenceConsistencyError: + # Handle an orphan record by attempting to acquire a lock, which by default lasts for 10 seconds. + # The purpose of acquiring the lock is to prevent race conditions with other processes that might + # also be trying to handle the same orphan record. Once the lock is acquired, we set a new value + # for the idempotency record in Redis with the appropriate time-to-live (TTL). with self._acquire_lock(name=item["name"]): self.client.set(name=item["name"], value=encoded_item, ex=ttl) - # lock was not removed here intentionally. Prevent another orphan operation in race condition. + # Not removing the lock here serves as a safeguard against race conditions, + # preventing another operation from mistakenly treating this record as an orphan while the + # current operation is still in progress. except (redis.exceptions.RedisError, redis.exceptions.RedisClusterException) as e: raise e except Exception as e: - logger.debug(f"encountered non-redis exception:{e}") + logger.debug(f"encountered non-redis exception: {e}") raise e @contextmanager def _acquire_lock(self, name: str): """ - aquire a lock for default 10 seconds + Attempt to acquire a lock for a specified resource name, with a default timeout. + This context manager attempts to set a lock using Redis to prevent concurrent + access to a resource identified by 'name'. It uses the 'nx' flag to ensure that + the lock is only set if it does not already exist, thereby enforcing mutual exclusion. """ try: acquired = self.client.set(name=f"{name}:lock", value="True", ex=self._orphan_lock_timeout, nx=True) @@ -431,8 +458,10 @@ def _acquire_lock(self, name: str): logger.debug("lock acquired") yield else: - # lock failed to aquire, means encountered a race condition. Just return - logger.debug("lock failed to acquire, raise to retry") + # If the lock acquisition fails, it suggests a race condition has occurred. In this case, instead of + # proceeding, we log the event and raise an error to indicate that the current operation should be + # retried after the lock is released by the process that currently holds it. + logger.debug("lock acquisition failed, raise to retry") raise IdempotencyItemAlreadyExistsError finally: @@ -461,8 +490,16 @@ def _update_record(self, data_record: DataRecord) -> None: self.client.set(name=item["name"], value=encoded_item, ex=ttl) def _delete_record(self, data_record: DataRecord) -> None: - # This function only works when Lambda handler has already been invoked - # Or you'll get empty idempotency_key + """ + Deletes the idempotency record associated with a given DataRecord from Redis. + This function is designed to be called after a Lambda handler invocation has completed processing. + It ensures that the idempotency key associated with the DataRecord is removed from Redis to + prevent future conflicts and to maintain the idempotency integrity. + + Note: it is essential that the idempotency key is not empty, as that would indicate the Lambda + handler has not been invoked or the key was not properly set. + """ logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") + # See: https://redis.io/commands/del/ self.client.delete(data_record.idempotency_key) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index f7aa53760af..aad706ff811 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -6,7 +6,7 @@ import pytest from unittest.mock import patch -from aws_lambda_powertools.utilities.idempotency import ( +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( RedisCachePersistenceLayer, ) import datetime @@ -22,9 +22,9 @@ IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, - IdempotencyRedisConnectionError, - IdempotencyRedisClientConfigError, - IdempotencyOrphanRecordError, + IdempotencyPersistenceConnectionError, + IdempotencyPersistenceConfigError, + IdempotencyPersistenceConsistencyError, IdempotencyValidationError, ) from aws_lambda_powertools.utilities.idempotency.idempotency import ( @@ -243,7 +243,7 @@ def test_redis_connection_cluster(): def test_redis_connection_conn_error(): # when RedisCachePersistenceLayer is init with a bad host # then should raise IdempotencyRedisConnectionError - with pytest.raises(IdempotencyRedisConnectionError): + with pytest.raises(IdempotencyPersistenceConnectionError): RedisCachePersistenceLayer(host=redis_badhost) @@ -251,7 +251,7 @@ def test_redis_connection_conn_error(): def test_redis_connection_conf_error(): # when RedisCachePersistenceLayer is init with a not_supported_mode in mode param # then should raise IdempotencyRedisClientConfigError - with pytest.raises(IdempotencyRedisClientConfigError): + with pytest.raises(IdempotencyPersistenceConfigError): RedisCachePersistenceLayer(mode="not_supported_mode") @@ -268,7 +268,7 @@ def test_redis_key_error(): def test_redis_key_corrupted(): # when RedisCachePersistenceLayer got a non-json formatted record # then should raise IdempotencyOrphanRecordError - with pytest.raises(IdempotencyOrphanRecordError): + with pytest.raises(IdempotencyPersistenceConsistencyError): layer = RedisCachePersistenceLayer(url="sample_url") layer.client.set("corrupted_json", "not_json_string") layer._get_record(idempotency_key="corrupted_json") @@ -321,7 +321,7 @@ def test_item_to_datarecord_conversion(valid_record): layer = RedisCachePersistenceLayer(host="host", mode="standalone") item = { "status": STATUS_CONSTANTS["INPROGRESS"], - layer.in_progress_expiry_attr: str(int(datetime.datetime.now().timestamp() * 1000)), + layer.in_progress_expiry_attr: int(datetime.datetime.now().timestamp() * 1000), } # given we have a dict of datarecord # when calling _item_to_data_record @@ -329,7 +329,7 @@ def test_item_to_datarecord_conversion(valid_record): # then all valid fields in dict should be copied into data_record assert record.idempotency_key == "abc" assert record.status == STATUS_CONSTANTS["INPROGRESS"] - assert record.in_progress_expiry_timestamp == int(item[layer.in_progress_expiry_attr]) + assert record.in_progress_expiry_timestamp == item[layer.in_progress_expiry_attr] def test_idempotent_function_and_lambda_handler_redis_basic( From 1369cc1e74d4a96ce61a2259f54b5a7c73aea104 Mon Sep 17 00:00:00 2001 From: roger zhang Date: Tue, 9 Jan 2024 00:16:08 +0000 Subject: [PATCH 71/81] fix example and docstring import --- .../utilities/idempotency/persistence/redis.py | 10 ++++++++-- .../src/customize_persistence_layer_redis.py | 4 +++- .../getting_started_with_idempotency_redis_client.py | 4 +++- .../getting_started_with_idempotency_redis_config.py | 4 +++- .../src/using_redis_client_with_aws_secrets.py | 5 ++++- .../src/using_redis_client_with_local_certs.py | 5 ++++- examples/idempotency/tests/test_with_mock_redis.py | 4 +++- examples/idempotency/tests/test_with_real_redis.py | 4 +++- 8 files changed, 31 insertions(+), 9 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index f4fb685ca66..e48760c8654 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -121,9 +121,12 @@ def __init__( from uuid import uuid4 from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent, ) + from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, + ) + from aws_lambda_powertools.utilities.typing import LambdaContext persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379) @@ -261,10 +264,13 @@ def __init__( ```python from redis import Redis from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer idempotent, ) + from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, + ) + client = redis.Redis( host="localhost", port="6379", diff --git a/examples/idempotency/src/customize_persistence_layer_redis.py b/examples/idempotency/src/customize_persistence_layer_redis.py index ccb4dbbbc5d..0d095b8881c 100644 --- a/examples/idempotency/src/customize_persistence_layer_redis.py +++ b/examples/idempotency/src/customize_persistence_layer_redis.py @@ -1,9 +1,11 @@ from redis import Redis from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) from aws_lambda_powertools.utilities.typing import LambdaContext redis_client = Redis( diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py index ce702893fc2..c3983ee64e6 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py @@ -4,9 +4,11 @@ from redis import Redis from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) from aws_lambda_powertools.utilities.typing import LambdaContext client = Redis( diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py index 30b621a84f8..de9c6526059 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_config.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_config.py @@ -2,9 +2,11 @@ from uuid import uuid4 from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) from aws_lambda_powertools.utilities.typing import LambdaContext persistence_layer = RedisCachePersistenceLayer(host="localhost", port=6379) diff --git a/examples/idempotency/src/using_redis_client_with_aws_secrets.py b/examples/idempotency/src/using_redis_client_with_aws_secrets.py index 92dd46687d4..f30751c8808 100644 --- a/examples/idempotency/src/using_redis_client_with_aws_secrets.py +++ b/examples/idempotency/src/using_redis_client_with_aws_secrets.py @@ -3,7 +3,10 @@ from redis import Redis from aws_lambda_powertools.utilities import parameters -from aws_lambda_powertools.utilities.idempotency import IdempotencyConfig, RedisCachePersistenceLayer, idempotent +from aws_lambda_powertools.utilities.idempotency import IdempotencyConfig, idempotent +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) redis_values: Any = parameters.get_secret("redis_info", transform="json") # (1)! diff --git a/examples/idempotency/src/using_redis_client_with_local_certs.py b/examples/idempotency/src/using_redis_client_with_local_certs.py index e94589f00a0..b959d8f6027 100644 --- a/examples/idempotency/src/using_redis_client_with_local_certs.py +++ b/examples/idempotency/src/using_redis_client_with_local_certs.py @@ -4,7 +4,10 @@ from aws_lambda_powertools.shared.functions import abs_lambda_path from aws_lambda_powertools.utilities import parameters -from aws_lambda_powertools.utilities.idempotency import IdempotencyConfig, RedisCachePersistenceLayer, idempotent +from aws_lambda_powertools.utilities.idempotency import IdempotencyConfig, idempotent +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) redis_values: Any = parameters.get_secret("redis_info", transform="json") # (1)! diff --git a/examples/idempotency/tests/test_with_mock_redis.py b/examples/idempotency/tests/test_with_mock_redis.py index e2b2e9a3f6b..6842e818d27 100644 --- a/examples/idempotency/tests/test_with_mock_redis.py +++ b/examples/idempotency/tests/test_with_mock_redis.py @@ -4,9 +4,11 @@ from mock_redis import MockRedis from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) from aws_lambda_powertools.utilities.typing import LambdaContext diff --git a/examples/idempotency/tests/test_with_real_redis.py b/examples/idempotency/tests/test_with_real_redis.py index 13c083aa1c2..a465592eb05 100644 --- a/examples/idempotency/tests/test_with_real_redis.py +++ b/examples/idempotency/tests/test_with_real_redis.py @@ -4,9 +4,11 @@ import redis from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) from aws_lambda_powertools.utilities.typing import LambdaContext From aa63492c2fd204b43f6b992c3d75601b1aacd1f6 Mon Sep 17 00:00:00 2001 From: roger zhang Date: Tue, 9 Jan 2024 00:26:32 +0000 Subject: [PATCH 72/81] fix import in inegration test --- tests/integration/idempotency/test_idempotency_redis.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index c82096b42af..6b5a28961e1 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -3,7 +3,6 @@ import pytest from testcontainers.redis import RedisContainer -from aws_lambda_powertools.utilities.idempotency import RedisCachePersistenceLayer from aws_lambda_powertools.utilities.idempotency.exceptions import ( IdempotencyAlreadyInProgressError, IdempotencyItemAlreadyExistsError, @@ -14,6 +13,9 @@ idempotent, idempotent_function, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import ( + RedisCachePersistenceLayer, +) pytest.skip(reason="Integration tests disabled for Redis Idempotency.", allow_module_level=True) From ea48dfdb303578531794450be6fcf76ea9e14290 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 9 Jan 2024 01:17:01 +0000 Subject: [PATCH 73/81] Addressing Ruben's feedback - Redis name --- .../idempotency/persistence/redis.py | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py index e48760c8654..6dda3b7fbcd 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/redis.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/redis.py @@ -92,24 +92,24 @@ def __init__( ssl: bool = True, ) -> None: """ - Initialize Redis connection which will be used in redis persistence_store to support Idempotency + Initialize Redis connection which will be used in Redis persistence_store to support Idempotency Parameters ---------- host: str, optional - redis host + Redis host port: int, optional: default 6379 - redis port + Redis port username: str, optional - redis username + Redis username password: str, optional - redis password + Redis password url: str, optional - redis connection string, using url will override the host/port in the previous parameters + Redis connection string, using url will override the host/port in the previous parameters db_index: int, optional: default 0 - redis db index + Redis db index mode: str, Literal["standalone","cluster"] - set redis client mode, choose from standalone/cluster. The default is standalone + set Redis client mode, choose from standalone/cluster. The default is standalone ssl: bool, optional: default True set whether to use ssl for Redis connection @@ -229,19 +229,19 @@ def __init__( Parameters ---------- host: str, optional - redis host + Redis host port: int, optional: default 6379 - redis port + Redis port username: str, optional - redis username + Redis username password: str, optional - redis password + Redis password url: str, optional - redis connection string, using url will override the host/port in the previous parameters + Redis connection string, using url will override the host/port in the previous parameters db_index: int, optional: default 0 - redis db index + Redis db index mode: str, Literal["standalone","cluster"] - set redis client mode, choose from standalone/cluster + set Redis client mode, choose from standalone/cluster ssl: bool, optional: default True set whether to use ssl for Redis connection client: RedisClientProtocol, optional @@ -446,7 +446,7 @@ def _put_in_progress_record(self, data_record: DataRecord) -> None: except (redis.exceptions.RedisError, redis.exceptions.RedisClusterException) as e: raise e except Exception as e: - logger.debug(f"encountered non-redis exception: {e}") + logger.debug(f"encountered non-Redis exception: {e}") raise e @contextmanager From 442f36f5e660d11aaae81b349a6879e5984ffcf8 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Tue, 9 Jan 2024 11:21:21 +0000 Subject: [PATCH 74/81] Addressing Ruben's feedback - Comments --- aws_lambda_powertools/utilities/idempotency/exceptions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/exceptions.py b/aws_lambda_powertools/utilities/idempotency/exceptions.py index 8e0d3666370..e4c57a8f2b6 100644 --- a/aws_lambda_powertools/utilities/idempotency/exceptions.py +++ b/aws_lambda_powertools/utilities/idempotency/exceptions.py @@ -87,17 +87,17 @@ class IdempotencyNoSerializationModelError(BaseError): class IdempotencyPersistenceConfigError(BaseError): """ - The Redis connection passed in has unsupported config + The idempotency persistency configuration was unsupported """ class IdempotencyPersistenceConnectionError(BaseError): """ - Redis connection error + Idempotency persistence connection error """ class IdempotencyPersistenceConsistencyError(BaseError): """ - Redis Orphan Record found, need to be removed + Idempotency persistency consistency error, needs to be removed """ From 30ec3602df184e90d5572edf116ce684b5248b0a Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 10 Jan 2024 10:43:33 +0000 Subject: [PATCH 75/81] Minor changes in the documentation --- docs/utilities/idempotency.md | 39 +++++++++++++++---- .../using_redis_client_with_local_certs.py | 8 ++-- 2 files changed, 34 insertions(+), 13 deletions(-) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 5766ba1f350..2d8b4d7b798 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -576,6 +576,30 @@ sequenceDiagram Optional idempotency key +#### Race condition with Redis + +
+```mermaid +graph TD; + A(Existing orphan record in redis)-->A1; + A1[Two Lambda invoke at same time]-->B1[Lambda handler1]; + B1-->B2[Fetch from Redis]; + B2-->B3[Handler1 got orphan record]; + B3-->B4[Handler1 acquired lock]; + B4-->B5[Handler1 overwrite orphan record] + B5-->B6[Handler1 continue to execution]; + A1-->C1[Lambda handler2]; + C1-->C2[Fetch from Redis]; + C2-->C3[Handler2 got orphan record]; + C3-->C4[Handler2 failed to acquire lock]; + C4-->C5[Handler2 wait and fetch from Redis]; + C5-->C6[Handler2 return without executing]; + B6-->D(Lambda handler executed only once); + C6-->D; +``` +Race condition with Redis +
+ ## Redis as persistent storage layer provider ### Redis resources @@ -616,12 +640,12 @@ After completing the VPC setup, you can use the templates provided below to set You can quickly get started by initializing the `RedisCachePersistenceLayer` class and applying the `idempotent` decorator to your Lambda handler. For a detailed example of using the `RedisCachePersistenceLayer`, refer to the [Persistence layers section](#redispersistencelayer). === "Use established Redis Client" - ```python hl_lines="4 7 12-15 17 31" + ```python hl_lines="4 9-11 14 19 33" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_client.py" ``` === "Use Persistence Layer with Redis config variables" - ```python hl_lines="4-8 10 24" + ```python hl_lines="7-9 12 26" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_config.py" ``` @@ -636,7 +660,7 @@ You can quickly get started by initializing the `RedisCachePersistenceLayer` cla For advanced configurations, such as setting up SSL certificates or customizing parameters like a custom timeout, you can utilize the Redis client to tailor these specific settings to your needs. === "Advanced configuration using AWS Secrets" - ```python hl_lines="8 10 20" + ```python hl_lines="7-9 11 13 23" --8<-- "examples/idempotency/src/using_redis_client_with_aws_secrets.py" ``` @@ -648,7 +672,7 @@ For advanced configurations, such as setting up SSL certificates or customizing } === "Advanced configuration with local certificates" - ```python hl_lines="11 22-24" + ```python hl_lines="12 23-25" --8<-- "examples/idempotency/src/using_redis_client_with_local_certs.py" ``` @@ -658,10 +682,9 @@ For advanced configurations, such as setting up SSL certificates or customizing "REDIS_PORT": "6379", "REDIS_PASSWORD": "redis-secret" } - 2. Return the absolute path from the given relative path to lambda handler - 3. redis_user.crt file stored in the root directory of your Lambda function - 4. redis_user_private.key file stored in the root directory of your Lambda function - 5. redis_ca.pem file stored in the root directory of your Lambda function + 2. redis_user.crt file stored in the "certs" directory of your Lambda function + 3. redis_user_private.key file stored in the "certs" directory of your Lambda function + 4. redis_ca.pem file stored in the "certs" directory of your Lambda function ## Advanced diff --git a/examples/idempotency/src/using_redis_client_with_local_certs.py b/examples/idempotency/src/using_redis_client_with_local_certs.py index b959d8f6027..cbad1cc92f4 100644 --- a/examples/idempotency/src/using_redis_client_with_local_certs.py +++ b/examples/idempotency/src/using_redis_client_with_local_certs.py @@ -11,8 +11,6 @@ redis_values: Any = parameters.get_secret("redis_info", transform="json") # (1)! -default_lambda_path = abs_lambda_path() # (2)! - redis_client = Redis( host=redis_values.get("REDIS_HOST"), @@ -22,9 +20,9 @@ socket_timeout=10.0, ssl=True, retry_on_timeout=True, - ssl_certfile=f"{default_lambda_path}/redis_user.crt", # (3)! - ssl_keyfile=f"{default_lambda_path}/redis_user_private.key", # (4)! - ssl_ca_certs=f"{default_lambda_path}/redis_ca.pem", # (5)! + ssl_certfile=f"{abs_lambda_path()}/certs/redis_user.crt", # (2)! + ssl_keyfile=f"{abs_lambda_path()}/certs/redis_user_private.key", # (3)! + ssl_ca_certs=f"{abs_lambda_path()}/certs/redis_ca.pem", # (4)! ) persistence_layer = RedisCachePersistenceLayer(client=redis_client) From 5f836b942e5034eb2b88835f009de611f479e766 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 10 Jan 2024 11:27:48 +0000 Subject: [PATCH 76/81] Minor changes in the documentation --- docs/utilities/idempotency.md | 13 ++++++++----- .../src/customize_persistence_layer_redis.py | 9 +-------- ...getting_started_with_idempotency_redis_client.py | 3 +++ 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 2d8b4d7b798..3b7fe344b1c 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -369,7 +369,7 @@ This persistence layer is built-in, allowing you to use an existing Redis servic === "Customizing RedisPersistenceLayer to suit your data structure" - ```python hl_lines="14-20" + ```python hl_lines="9-16" --8<-- "examples/idempotency/src/customize_persistence_layer_redis.py" ``` @@ -639,16 +639,19 @@ After completing the VPC setup, you can use the templates provided below to set You can quickly get started by initializing the `RedisCachePersistenceLayer` class and applying the `idempotent` decorator to your Lambda handler. For a detailed example of using the `RedisCachePersistenceLayer`, refer to the [Persistence layers section](#redispersistencelayer). -=== "Use established Redis Client" - ```python hl_lines="4 9-11 14 19 33" - --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_client.py" - ``` +???+ info + We enforce security best practices by using SSL connections in the `RedisCachePersistenceLayer`; to disable it, set `ssl=False` === "Use Persistence Layer with Redis config variables" ```python hl_lines="7-9 12 26" --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_config.py" ``` +=== "Use established Redis Client" + ```python hl_lines="4 9-11 14 22 36" + --8<-- "examples/idempotency/src/getting_started_with_idempotency_redis_client.py" + ``` + === "Sample event" ```json diff --git a/examples/idempotency/src/customize_persistence_layer_redis.py b/examples/idempotency/src/customize_persistence_layer_redis.py index 0d095b8881c..7db3d1b53ea 100644 --- a/examples/idempotency/src/customize_persistence_layer_redis.py +++ b/examples/idempotency/src/customize_persistence_layer_redis.py @@ -1,5 +1,3 @@ -from redis import Redis - from aws_lambda_powertools.utilities.idempotency import ( idempotent, ) @@ -8,14 +6,9 @@ ) from aws_lambda_powertools.utilities.typing import LambdaContext -redis_client = Redis( +persistence_layer = RedisCachePersistenceLayer( host="localhost", port=6379, - decode_responses=True, -) - -persistence_layer = RedisCachePersistenceLayer( - client=redis_client, in_progress_expiry_attr="in_progress_expiration", status_attr="status", data_attr="data", diff --git a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py index c3983ee64e6..f06d059fad4 100644 --- a/examples/idempotency/src/getting_started_with_idempotency_redis_client.py +++ b/examples/idempotency/src/getting_started_with_idempotency_redis_client.py @@ -14,6 +14,9 @@ client = Redis( host="localhost", port=6379, + socket_connect_timeout=5, + socket_timeout=5, + max_connections=1000, ) persistence_layer = RedisCachePersistenceLayer(client=client) From dc07eaf112540722a05f940b5f3731b03ddb6cfd Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 10 Jan 2024 11:45:47 +0000 Subject: [PATCH 77/81] Removing Redis as builti-in dependency in our layer --- pyproject.toml | 2 +- tests/e2e/utils/lambda_layer/powertools_layer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 21927dd578b..cdb7ab83ea2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ parser = ["pydantic"] validation = ["fastjsonschema"] tracer = ["aws-xray-sdk"] redis = ["redis"] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema","redis"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] datadog = ["datadog-lambda"] diff --git a/tests/e2e/utils/lambda_layer/powertools_layer.py b/tests/e2e/utils/lambda_layer/powertools_layer.py index 05147048676..0bc1dbe97c7 100644 --- a/tests/e2e/utils/lambda_layer/powertools_layer.py +++ b/tests/e2e/utils/lambda_layer/powertools_layer.py @@ -19,7 +19,7 @@ class LocalLambdaPowertoolsLayer(BaseLocalLambdaLayer): def __init__(self, output_dir: Path = CDK_OUT_PATH, architecture: Architecture = Architecture.X86_64): super().__init__(output_dir) - self.package = f"{SOURCE_CODE_ROOT_PATH}[all]" + self.package = f"{SOURCE_CODE_ROOT_PATH}[all,redis]" self.platform_args = self._resolve_platform(architecture) self.build_args = f"{self.platform_args} --only-binary=:all: --upgrade" From 1e9337cc453589fab536f7c91612fbb92085a250 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 10 Jan 2024 11:48:57 +0000 Subject: [PATCH 78/81] Adding Redis to install when creating a new env --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 0de65f2d48c..80c89f72961 100644 --- a/Makefile +++ b/Makefile @@ -8,13 +8,13 @@ dev: pip install --upgrade pip pre-commit poetry poetry config --local virtualenvs.in-project true @$(MAKE) dev-version-plugin - poetry install --extras "all datamasking-aws-sdk" + poetry install --extras "all datamasking-aws-sdk redis" pre-commit install dev-gitpod: pip install --upgrade pip poetry @$(MAKE) dev-version-plugin - poetry install --extras "all datamasking-aws-sdk" + poetry install --extras "all datamasking-aws-sdk redis" pre-commit install format: From f570723bbca71ad3397695a282abf505d9002947 Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 10 Jan 2024 15:10:15 +0000 Subject: [PATCH 79/81] Adressing Ruben's feedback --- tests/functional/idempotency/persistence/test_redis_layer.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/functional/idempotency/persistence/test_redis_layer.py b/tests/functional/idempotency/persistence/test_redis_layer.py index aad706ff811..75db55dba55 100644 --- a/tests/functional/idempotency/persistence/test_redis_layer.py +++ b/tests/functional/idempotency/persistence/test_redis_layer.py @@ -435,7 +435,6 @@ def test_idempotent_function_and_lambda_handler_redis_validation( mock_event = {"user_id": "xyz", "time": "1234"} persistence_layer = persistence_store_standalone_redis result = {"message": "Foo"} - expected_result = copy.deepcopy(result) config = IdempotencyConfig(event_key_jmespath="user_id", payload_validation_jmespath="time") @idempotent(persistence_store=persistence_layer, config=config) @@ -443,7 +442,7 @@ def lambda_handler(event, context): return result # WHEN calling the function and handler with idempotency and event_key_jmespath,payload_validation_jmespath - handler_result = lambda_handler(mock_event, lambda_context) + lambda_handler(mock_event, lambda_context) # THEN we expect the function and lambda handler to execute successfully result = {"message": "Bar"} @@ -452,7 +451,7 @@ def lambda_handler(event, context): # When we modified the payload where validation is on and invoke again. # Then should raise IdempotencyValidationError with pytest.raises(IdempotencyValidationError): - handler_result2 = lambda_handler(mock_event, lambda_context) + lambda_handler(mock_event, lambda_context) def test_idempotent_function_and_lambda_handler_redis_basic_no_decode( From 255a720c14b370702694b75c8525b1bfaad2205d Mon Sep 17 00:00:00 2001 From: Leandro Damascena Date: Wed, 10 Jan 2024 15:14:23 +0000 Subject: [PATCH 80/81] Making sonar happy --- .../idempotency/test_idempotency_redis.py | 2 +- tests/unit/test_shared_functions.py | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/integration/idempotency/test_idempotency_redis.py b/tests/integration/idempotency/test_idempotency_redis.py index 6b5a28961e1..bfced379dbf 100644 --- a/tests/integration/idempotency/test_idempotency_redis.py +++ b/tests/integration/idempotency/test_idempotency_redis.py @@ -161,7 +161,7 @@ def lambda_handler(event, context): # first run is just to populate function infos for deletion. # delete_record won't work if the function was not run yet. bug maybe? - handler_result = lambda_handler(mock_event, lambda_context) + lambda_handler(mock_event, lambda_context) # delete what's might be dirty data persistence_layer.delete_record(mock_event, IdempotencyItemNotFoundError) # run second time to ensure clean result diff --git a/tests/unit/test_shared_functions.py b/tests/unit/test_shared_functions.py index 5300c967e61..c8c4bb2afb2 100644 --- a/tests/unit/test_shared_functions.py +++ b/tests/unit/test_shared_functions.py @@ -21,6 +21,11 @@ from aws_lambda_powertools.utilities.parameters.base import DEFAULT_MAX_AGE_SECS +@pytest.fixture +def default_lambda_path(): + return "/var/task" + + def test_resolve_env_var_choice_explicit_wins_over_env_var(): assert resolve_truthy_env_var_choice(env="true", choice=False) is False assert resolve_env_var_choice(env="something", choice=False) is False @@ -149,11 +154,11 @@ def test_abs_lambda_path_empty(): assert abs_lambda_path() == f"{Path.cwd()}" -def test_abs_lambda_path_empty_envvar(): +def test_abs_lambda_path_empty_envvar(default_lambda_path): # Given Env is set - os.environ["LAMBDA_TASK_ROOT"] = "/var/task" + os.environ["LAMBDA_TASK_ROOT"] = default_lambda_path # Then path = Env/ - assert abs_lambda_path() == "/var/task" + assert abs_lambda_path() == default_lambda_path def test_abs_lambda_path_w_filename(): @@ -164,9 +169,9 @@ def test_abs_lambda_path_w_filename(): assert abs_lambda_path(relatvie_path) == str(Path(Path.cwd(), relatvie_path)) -def test_abs_lambda_path_w_filename_envvar(): +def test_abs_lambda_path_w_filename_envvar(default_lambda_path): # Given Env is set and relative_path provided relative_path = "cert/pub.cert" - os.environ["LAMBDA_TASK_ROOT"] = "/var/task" + os.environ["LAMBDA_TASK_ROOT"] = default_lambda_path # Then path = env + relative_path assert abs_lambda_path(relative_path="cert/pub.cert") == str(Path(os.environ["LAMBDA_TASK_ROOT"], relative_path)) From 698d147fa6231c17737304bcea9d15af41756b7e Mon Sep 17 00:00:00 2001 From: Cavalcante Damascena Date: Wed, 10 Jan 2024 17:49:39 +0000 Subject: [PATCH 81/81] e2e failing due to wrong import --- .../handlers/function_thread_safety_handler.py | 2 +- .../handlers/optional_idempotency_key_handler.py | 2 +- .../idempotency_redis/handlers/parallel_execution_handler.py | 2 +- .../idempotency_redis/handlers/ttl_cache_expiration_handler.py | 2 +- .../e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py b/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py index d64ff5df10f..6d326c17e40 100644 --- a/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py +++ b/tests/e2e/idempotency_redis/handlers/function_thread_safety_handler.py @@ -4,9 +4,9 @@ from threading import current_thread from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent_function, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import RedisCachePersistenceLayer REDIS_HOST = os.getenv("RedisEndpoint", "") persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) diff --git a/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py b/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py index 4182812fc40..75be8b31299 100644 --- a/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py +++ b/tests/e2e/idempotency_redis/handlers/optional_idempotency_key_handler.py @@ -3,9 +3,9 @@ from aws_lambda_powertools.utilities.idempotency import ( IdempotencyConfig, - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import RedisCachePersistenceLayer REDIS_HOST = os.getenv("RedisEndpoint", "") persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) diff --git a/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py b/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py index 5dd4fbdcbdb..c28f84f746e 100644 --- a/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py +++ b/tests/e2e/idempotency_redis/handlers/parallel_execution_handler.py @@ -2,9 +2,9 @@ import time from aws_lambda_powertools.utilities.idempotency import ( - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import RedisCachePersistenceLayer REDIS_HOST = os.getenv("RedisEndpoint", "") persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) diff --git a/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py b/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py index 2f938d3c464..a93413e157e 100644 --- a/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py +++ b/tests/e2e/idempotency_redis/handlers/ttl_cache_expiration_handler.py @@ -3,9 +3,9 @@ from aws_lambda_powertools.utilities.idempotency import ( IdempotencyConfig, - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import RedisCachePersistenceLayer REDIS_HOST = os.getenv("RedisEndpoint", "") persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379) diff --git a/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py b/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py index 3d8ab335c8b..56c2372b2c0 100644 --- a/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py +++ b/tests/e2e/idempotency_redis/handlers/ttl_cache_timeout_handler.py @@ -3,9 +3,9 @@ from aws_lambda_powertools.utilities.idempotency import ( IdempotencyConfig, - RedisCachePersistenceLayer, idempotent, ) +from aws_lambda_powertools.utilities.idempotency.persistence.redis import RedisCachePersistenceLayer REDIS_HOST = os.getenv("RedisEndpoint", "") persistence_layer = RedisCachePersistenceLayer(host=REDIS_HOST, port=6379)