diff --git a/Makefile b/Makefile index 5bfbf031949..584ed361ed9 100644 --- a/Makefile +++ b/Makefile @@ -7,13 +7,13 @@ target: dev: pip install --upgrade pip pre-commit poetry @$(MAKE) dev-version-plugin - poetry install --extras "all" + poetry install --extras "all datamasking-aws-sdk" pre-commit install dev-gitpod: pip install --upgrade pip poetry @$(MAKE) dev-version-plugin - poetry install --extras "all" + poetry install --extras "all datamasking-aws-sdk" pre-commit install format: diff --git a/aws_lambda_powertools/utilities/data_masking/__init__.py b/aws_lambda_powertools/utilities/data_masking/__init__.py new file mode 100644 index 00000000000..428cea6635d --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities.data_masking.base import DataMasking + +__all__ = [ + "DataMasking", +] diff --git a/aws_lambda_powertools/utilities/data_masking/base.py b/aws_lambda_powertools/utilities/data_masking/base.py new file mode 100644 index 00000000000..06b383ce4a4 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/base.py @@ -0,0 +1,170 @@ +import json +from typing import Optional, Union + +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider + + +class DataMasking: + """ + A utility class for masking sensitive data within various data types. + + This class provides methods for masking sensitive information, such as personal + identifiers or confidential data, within different data types such as strings, + dictionaries, lists, and more. It helps protect sensitive information while + preserving the structure of the original data. + + Usage: + Instantiate an object of this class and use its methods to mask sensitive data + based on the data type. Supported data types include strings, dictionaries, + and more. + + Example: + ``` + from aws_lambda_powertools.utilities.data_masking.base import DataMasking + + def lambda_handler(event, context): + masker = DataMasking() + + data = { + "project": "powertools", + "sensitive": "xxxxxxxxxx" + } + + masked = masker.mask(data,fields=["sensitive"]) + + return masked + + ``` + """ + + def __init__(self, provider: Optional[BaseProvider] = None): + self.provider = provider or BaseProvider() + + def encrypt(self, data, fields=None, **provider_options): + return self._apply_action(data, fields, self.provider.encrypt, **provider_options) + + def decrypt(self, data, fields=None, **provider_options): + return self._apply_action(data, fields, self.provider.decrypt, **provider_options) + + def mask(self, data, fields=None, **provider_options): + return self._apply_action(data, fields, self.provider.mask, **provider_options) + + def _apply_action(self, data, fields, action, **provider_options): + """ + Helper method to determine whether to apply a given action to the entire input data + or to specific fields if the 'fields' argument is specified. + + Parameters + ---------- + data : any + The input data to process. + fields : Optional[List[any]] = None + A list of fields to apply the action to. If 'None', the action is applied to the entire 'data'. + action : Callable + The action to apply to the data. It should be a callable that performs an operation on the data + and returns the modified value. + + Returns + ------- + any + The modified data after applying the action. + """ + + if fields is not None: + return self._apply_action_to_fields(data, fields, action, **provider_options) + else: + return action(data, **provider_options) + + def _apply_action_to_fields( + self, + data: Union[dict, str], + fields: list, + action, + **provider_options, + ) -> Union[dict, str]: + """ + This method takes the input data, which can be either a dictionary or a JSON string, + and applies a mask, an encryption, or a decryption to the specified fields. + + Parameters + ---------- + data : Union[dict, str]) + The input data to process. It can be either a dictionary or a JSON string. + fields : List + A list of fields to apply the action to. Each field can be specified as a string or + a list of strings representing nested keys in the dictionary. + action : Callable + The action to apply to the fields. It should be a callable that takes the current + value of the field as the first argument and any additional arguments that might be required + for the action. It performs an operation on the current value using the provided arguments and + returns the modified value. + **provider_options: + Additional keyword arguments to pass to the 'action' function. + + Returns + ------- + dict + The modified dictionary after applying the action to the + specified fields. + + Raises + ------- + ValueError + If 'fields' parameter is None. + TypeError + If the 'data' parameter is not a traversable type + + Example + ------- + ```python + >>> data = {'a': {'b': {'c': 1}}, 'x': {'y': 2}} + >>> fields = ['a.b.c', 'a.x.y'] + # The function will transform the value at 'a.b.c' (1) and 'a.x.y' (2) + # and store the result as: + new_dict = {'a': {'b': {'c': 'transformed_value'}}, 'x': {'y': 'transformed_value'}} + ``` + """ + + if fields is None: + raise ValueError("No fields specified.") + + if isinstance(data, str): + # Parse JSON string as dictionary + my_dict_parsed = json.loads(data) + elif isinstance(data, dict): + # In case their data has keys that are not strings (i.e. ints), convert it all into a JSON string + my_dict_parsed = json.dumps(data) + # Turn back into dict so can parse it + my_dict_parsed = json.loads(my_dict_parsed) + else: + raise TypeError( + f"Unsupported data type for 'data' parameter. Expected a traversable type, but got {type(data)}.", + ) + + # For example: ['a.b.c'] in ['a.b.c', 'a.x.y'] + for nested_key in fields: + # Prevent overriding loop variable + curr_nested_key = nested_key + + # If the nested_key is not a string, convert it to a string representation + if not isinstance(curr_nested_key, str): + curr_nested_key = json.dumps(curr_nested_key) + + # Split the nested key string into a list of nested keys + # ['a.b.c'] -> ['a', 'b', 'c'] + keys = curr_nested_key.split(".") + + # Initialize a current dictionary to the root dictionary + curr_dict = my_dict_parsed + + # Traverse the dictionary hierarchy by iterating through the list of nested keys + for key in keys[:-1]: + curr_dict = curr_dict[key] + + # Retrieve the final value of the nested field + valtochange = curr_dict[(keys[-1])] + + # Apply the specified 'action' to the target value + curr_dict[keys[-1]] = action(valtochange, **provider_options) + + return my_dict_parsed diff --git a/aws_lambda_powertools/utilities/data_masking/constants.py b/aws_lambda_powertools/utilities/data_masking/constants.py new file mode 100644 index 00000000000..47e74f472cf --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/constants.py @@ -0,0 +1,5 @@ +DATA_MASKING_STRING: str = "*****" +CACHE_CAPACITY: int = 100 +MAX_CACHE_AGE_SECONDS: float = 300.0 +MAX_MESSAGES_ENCRYPTED: int = 200 +# NOTE: You can also set max messages/bytes per data key diff --git a/aws_lambda_powertools/utilities/data_masking/provider/__init__.py b/aws_lambda_powertools/utilities/data_masking/provider/__init__.py new file mode 100644 index 00000000000..5a0180eb82b --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities.data_masking.provider.base import BaseProvider + +__all__ = [ + "BaseProvider", +] diff --git a/aws_lambda_powertools/utilities/data_masking/provider/base.py b/aws_lambda_powertools/utilities/data_masking/provider/base.py new file mode 100644 index 00000000000..ceb222aa7f8 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/base.py @@ -0,0 +1,34 @@ +import json +from typing import Any + +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING + + +class BaseProvider: + """ + When you try to create an instance of a subclass that does not implement the encrypt method, + you will get a NotImplementedError with a message that says the method is not implemented: + """ + + def __init__(self, json_serializer=None, json_deserializer=None) -> None: + self.json_serializer = json_serializer or self.default_json_serializer + self.json_deserializer = json_deserializer or self.default_json_deserializer + + def default_json_serializer(self, data): + return json.dumps(data).encode("utf-8") + + def default_json_deserializer(self, data): + return json.loads(data.decode("utf-8")) + + def encrypt(self, data) -> str: + raise NotImplementedError("Subclasses must implement encrypt()") + + def decrypt(self, data) -> Any: + raise NotImplementedError("Subclasses must implement decrypt()") + + def mask(self, data) -> Any: + if isinstance(data, (str, dict, bytes)): + return DATA_MASKING_STRING + elif isinstance(data, (list, tuple, set)): + return type(data)([DATA_MASKING_STRING] * len(data)) + return DATA_MASKING_STRING diff --git a/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py b/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py new file mode 100644 index 00000000000..8cc33b5e075 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider + +__all__ = [ + "AwsEncryptionSdkProvider", +] diff --git a/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py b/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py new file mode 100644 index 00000000000..9e4c7ef19cb --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py @@ -0,0 +1,177 @@ +from __future__ import annotations + +import base64 +from typing import Any, Callable, Dict, List + +import botocore +from aws_encryption_sdk import ( + CachingCryptoMaterialsManager, + EncryptionSDKClient, + LocalCryptoMaterialsCache, + StrictAwsKmsMasterKeyProvider, +) + +from aws_lambda_powertools.shared.user_agent import register_feature_to_botocore_session +from aws_lambda_powertools.utilities.data_masking.constants import ( + CACHE_CAPACITY, + MAX_CACHE_AGE_SECONDS, + MAX_MESSAGES_ENCRYPTED, +) +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider + + +class ContextMismatchError(Exception): + def __init__(self, key): + super().__init__(f"Encryption Context does not match expected value for key: {key}") + self.key = key + + +class AwsEncryptionSdkProvider(BaseProvider): + """ + The AwsEncryptionSdkProvider is used as a provider for the DataMasking class. + + This provider allows you to perform data masking using the AWS Encryption SDK + for encryption and decryption. It integrates with the DataMasking class to + securely encrypt and decrypt sensitive data. + + Usage Example: + ``` + from aws_lambda_powertools.utilities.data_masking import DataMasking + from aws_lambda_powertools.utilities.data_masking.providers.kms.aws_encryption_sdk import ( + AwsEncryptionSdkProvider, + ) + + + def lambda_handler(event, context): + provider = AwsEncryptionSdkProvider(["arn:aws:kms:us-east-1:0123456789012:key/key-id"]) + masker = DataMasking(provider=provider) + + data = { + "project": "powertools", + "sensitive": "xxxxxxxxxx" + } + + masked = masker.encrypt(data,fields=["sensitive"]) + + return masked + + ``` + """ + + def __init__( + self, + keys: List[str], + key_provider=None, + local_cache_capacity: int = CACHE_CAPACITY, + max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, + max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, + json_serializer: Callable | None = None, + json_deserializer: Callable | None = None, + ): + super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) + + self._key_provider = key_provider or KMSKeyProvider( + keys=keys, + local_cache_capacity=local_cache_capacity, + max_cache_age_seconds=max_cache_age_seconds, + max_messages_encrypted=max_messages_encrypted, + json_serializer=self.json_serializer, + json_deserializer=self.json_deserializer, + ) + + def encrypt(self, data: bytes | str | Dict | int, **provider_options) -> str: + return self._key_provider.encrypt(data=data, **provider_options) + + def decrypt(self, data: str, **provider_options) -> Any: + return self._key_provider.decrypt(data=data, **provider_options) + + +class KMSKeyProvider: + + """ + The KMSKeyProvider is responsible for assembling an AWS Key Management Service (KMS) + client, a caching mechanism, and a keyring for secure key management and data encryption. + """ + + def __init__( + self, + keys: List[str], + json_serializer: Callable, + json_deserializer: Callable, + local_cache_capacity: int = CACHE_CAPACITY, + max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, + max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, + ): + session = botocore.session.Session() + register_feature_to_botocore_session(session, "data-masking") + + self.json_serializer = json_serializer + self.json_deserializer = json_deserializer + self.client = EncryptionSDKClient() + self.keys = keys + self.cache = LocalCryptoMaterialsCache(local_cache_capacity) + self.key_provider = StrictAwsKmsMasterKeyProvider(key_ids=self.keys, botocore_session=session) + self.cache_cmm = CachingCryptoMaterialsManager( + master_key_provider=self.key_provider, + cache=self.cache, + max_age=max_cache_age_seconds, + max_messages_encrypted=max_messages_encrypted, + ) + + def encrypt(self, data: bytes | str | Dict | float, **provider_options) -> str: + """ + Encrypt data using the AwsEncryptionSdkProvider. + + Parameters + ------- + data : Union[bytes, str] + The data to be encrypted. + provider_options + Additional options for the aws_encryption_sdk.EncryptionSDKClient + + Returns + ------- + ciphertext : str + The encrypted data, as a base64-encoded string. + """ + data_encoded = self.json_serializer(data) + ciphertext, _ = self.client.encrypt( + source=data_encoded, + materials_manager=self.cache_cmm, + **provider_options, + ) + ciphertext = base64.b64encode(ciphertext).decode() + return ciphertext + + def decrypt(self, data: str, **provider_options) -> Any: + """ + Decrypt data using AwsEncryptionSdkProvider. + + Parameters + ------- + data : Union[bytes, str] + The encrypted data, as a base64-encoded string + provider_options + Additional options for the aws_encryption_sdk.EncryptionSDKClient + + Returns + ------- + ciphertext : bytes + The decrypted data in bytes + """ + ciphertext_decoded = base64.b64decode(data) + + expected_context = provider_options.pop("encryption_context", {}) + + ciphertext, decryptor_header = self.client.decrypt( + source=ciphertext_decoded, + key_provider=self.key_provider, + **provider_options, + ) + + for key, value in expected_context.items(): + if decryptor_header.encryption_context.get(key) != value: + raise ContextMismatchError(key) + + ciphertext = self.json_deserializer(ciphertext) + return ciphertext diff --git a/mypy.ini b/mypy.ini index 2b50293b561..cb2d3ce2443 100644 --- a/mypy.ini +++ b/mypy.ini @@ -12,6 +12,12 @@ disable_error_code = annotation-unchecked [mypy-jmespath] ignore_missing_imports=True +[mypy-aws_encryption_sdk] +ignore_missing_imports=True + +[mypy-sentry_sdk] +ignore_missing_imports=True + [mypy-jmespath.exceptions] ignore_missing_imports=True diff --git a/poetry.lock b/poetry.lock index a3bf0bef531..46d0d7bef43 100644 --- a/poetry.lock +++ b/poetry.lock @@ -93,69 +93,69 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.93.0a0" +version = "2.98.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.93.0a0.tar.gz", hash = "sha256:67b5c1cb5a3405f321a25da185ef949460793d9b33313f13544106bed2ce2180"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:962d52fdfbc922f104381943d2edb0d535f1d793fd73f4518fb25fb7d63041f4"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.98.0a0.tar.gz", hash = "sha256:c2786ad0c2f409a7215ad4d923f2a36977aad9d109910352dacbf2082857ca51"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.98.0a0-py3-none-any.whl", hash = "sha256:c460b7928c82997e666eefb7344f2c536d9b985a7892d5d191725c606c2f0e28"}, ] [package.dependencies] -aws-cdk-lib = "2.93.0" +aws-cdk-lib = "2.98.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.93.0a0" +version = "2.98.0a0" description = "Authorizers for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.93.0a0.tar.gz", hash = "sha256:495969d05ca85942bc3da6fac7d0a6df5893265b644921d9e891441ee845fdfd"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:6b22e4d94afa481c94fcafdc62c2cf22ea08ea0d985e738569b39da4ba4ffbb0"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.98.0a0.tar.gz", hash = "sha256:06a2013641bb053acd803f57e6efbf22ccb43ff71a9f8b93bb3a3b5b065007eb"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.98.0a0-py3-none-any.whl", hash = "sha256:d2c3186f023d564f5081f894d75eaffbe2181098c32412e38b75821ea817ca1a"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.93.0.a0" -aws-cdk-lib = "2.93.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.98.0.a0" +aws-cdk-lib = "2.98.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.93.0a0" +version = "2.98.0a0" description = "Integrations for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.93.0a0.tar.gz", hash = "sha256:4c581f67634fab19b11025751e3ee825f055ee9d1bc77d9cbc5009f261456e62"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.93.0a0-py3-none-any.whl", hash = "sha256:48479656dca9e446ae625e5936ddd940863bd478eb86cdd62889c6b5fee9f751"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.98.0a0.tar.gz", hash = "sha256:e7f70c663fbf2140737f6f97cf7611a30494ad011c6988020b126349a88b3185"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.98.0a0-py3-none-any.whl", hash = "sha256:4175b0fba4436e52e5e1b08527ef5097a41dd1c59960c3dcf3514752bbc214eb"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.93.0.a0" -aws-cdk-lib = "2.93.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.98.0.a0" +aws-cdk-lib = "2.98.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.93.0" +version = "2.98.0" description = "Version 2 of the AWS Cloud Development Kit library" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.93.0.tar.gz", hash = "sha256:54252c8df547d2bd83584278529f47506fa2c27adcbfa623f00322b685f24c18"}, - {file = "aws_cdk_lib-2.93.0-py3-none-any.whl", hash = "sha256:063e7c1f2588a254766229130347fb60e0bd7dd2a6d222d3ae2aa145a6059554"}, + {file = "aws-cdk-lib-2.98.0.tar.gz", hash = "sha256:4a1d13a9bd2b7ec0efa41d3d70291ff21cac02e9d3028253c08c53261d49bfa2"}, + {file = "aws_cdk_lib-2.98.0-py3-none-any.whl", hash = "sha256:8e5ae2ecd74e249e6cfdc0e53a1a71be516d2408d2bcd96c49c1718f51cdb16b"}, ] [package.dependencies] @@ -163,10 +163,27 @@ files = [ "aws-cdk.asset-kubectl-v20" = ">=2.1.2,<3.0.0" "aws-cdk.asset-node-proxy-agent-v6" = ">=2.0.1,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.87.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" +[[package]] +name = "aws-encryption-sdk" +version = "3.1.1" +description = "AWS Encryption SDK implementation for Python" +optional = true +python-versions = "*" +files = [ + {file = "aws-encryption-sdk-3.1.1.tar.gz", hash = "sha256:8d5fbf018fc68d6b1cacbe4dd037fd805296c7736a9fe457eb684d053f7f9563"}, + {file = "aws_encryption_sdk-3.1.1-py2.py3-none-any.whl", hash = "sha256:a3cbbf04e0b9038b9180af8b03da896af19083e00ca011dcfcb403421458ad02"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +boto3 = ">=1.10.0" +cryptography = ">=2.5.0" +wrapt = ">=1.10.11" + [[package]] name = "aws-requests-auth" version = "0.4.3" @@ -183,23 +200,23 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.73.0" +version = "1.75.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.73.0.tar.gz", hash = "sha256:bfa7cad3a78f002edeec5e39fd61b616cf84f34f61010c5dc2f7a76845fe7a02"}, - {file = "aws_sam_translator-1.73.0-py3-none-any.whl", hash = "sha256:c0132b065d743773fcd2573ed1ae60e0129fa46043fad76430261b098a811924"}, + {file = "aws-sam-translator-1.75.0.tar.gz", hash = "sha256:18c83abcae594de084947befb9c80f689f8b99ece2d38729d27a9cea634da15c"}, + {file = "aws_sam_translator-1.75.0-py3-none-any.whl", hash = "sha256:02bad7636356438b439c8e0ef0195618e3b7b67b6dfbf675b1627d6fd84b2910"}, ] [package.dependencies] boto3 = ">=1.19.5,<2.dev0" jsonschema = ">=3.2,<5" -pydantic = ">=1.8,<2.0" +pydantic = ">=1.8,<3" typing-extensions = ">=4.4,<5" [package.extras] -dev = ["black (==23.1.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.1.0,<1.2.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.263)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" @@ -305,32 +322,32 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.35" +version = "1.28.55" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.35-py3-none-any.whl", hash = "sha256:d77415f22bbc14f3d72eaed2fc9f96d161f3ba7686922ad26d6bbc9d4985f3df"}, - {file = "boto3-1.28.35.tar.gz", hash = "sha256:580b584e36967155abed7cc9b088b3bd784e8242ae4d8841f58cb50ab05520dc"}, + {file = "boto3-1.28.55-py3-none-any.whl", hash = "sha256:2680c0e36167e672777110ccef5303d59fa4a6a4f10086f9c14158c5cb008d5c"}, + {file = "boto3-1.28.55.tar.gz", hash = "sha256:2ceb644b1df7c3c8907913ab367a9900af79e271b4cfca37b542ec1fa142faf8"}, ] [package.dependencies] -botocore = ">=1.31.35,<1.32.0" +botocore = ">=1.31.55,<1.32.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.6.0,<0.7.0" +s3transfer = ">=0.7.0,<0.8.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.35" +version = "1.31.55" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.35-py3-none-any.whl", hash = "sha256:943e1465aad66db4933b06809134bd08c5b05e8eb18c19742ffec82f54769457"}, - {file = "botocore-1.31.35.tar.gz", hash = "sha256:7e4534325262f43293a9cc9937cb3f1711365244ffde8b925a6ee862bcf30a83"}, + {file = "botocore-1.31.55-py3-none-any.whl", hash = "sha256:5ec27caa440257619712af0a71524cc2e56110fc502853c3e4046f87b65e42e9"}, + {file = "botocore-1.31.55.tar.gz", hash = "sha256:21ba89c4df083338ec463d9c2a8cffca42a99f9ad5f24bcac1870393b216c5a7"}, ] [package.dependencies] @@ -354,13 +371,13 @@ files = [ [[package]] name = "bytecode" -version = "0.14.2" +version = "0.15.0" description = "Python module to generate and modify bytecode" optional = false python-versions = ">=3.8" files = [ - {file = "bytecode-0.14.2-py3-none-any.whl", hash = "sha256:e368a2b9bbd7c986133c951250db94fb32f774cfc49752a9db9073bcf9899762"}, - {file = "bytecode-0.14.2.tar.gz", hash = "sha256:386378d9025d68ddb144870ae74330a492717b11b8c9164c4034e88add808f0c"}, + {file = "bytecode-0.15.0-py3-none-any.whl", hash = "sha256:a66718dc1d246b4fec52b5850c15592344a56c8bdb28fd243c895ccf00f8371f"}, + {file = "bytecode-0.15.0.tar.gz", hash = "sha256:0908a8348cabf366b5c1865daabcdc0d650cb0cbdeb1750cc90564852f81945c"}, ] [package.dependencies] @@ -402,6 +419,82 @@ files = [ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = true +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfn-lint" version = "0.80.3" @@ -548,17 +641,17 @@ files = [ [[package]] name = "constructs" -version = "10.2.69" +version = "10.2.70" description = "A programming model for software-defined state" optional = false python-versions = "~=3.7" files = [ - {file = "constructs-10.2.69-py3-none-any.whl", hash = "sha256:27a60f5ce4faa4d43c91c73f24e1a245c0a1ef67ea1c8a3df9ca6af9adf618df"}, - {file = "constructs-10.2.69.tar.gz", hash = "sha256:520ddd665cc336df90be06bb1bd49f3a9a7400d886cad8aef7b0155593b4ffa4"}, + {file = "constructs-10.2.70-py3-none-any.whl", hash = "sha256:ade1b5224830e78724ed50ce91ec2e6ce437c9983713c2b8ca541272283c5d37"}, + {file = "constructs-10.2.70.tar.gz", hash = "sha256:f4ae2e0705baff188519e0233ad2129537c8eca40d68242873ca444a659549f8"}, ] [package.dependencies] -jsii = ">=1.84.0,<2.0.0" +jsii = ">=1.88.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -637,15 +730,60 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "cryptography" +version = "41.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = true +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "datadog" -version = "0.46.0" +version = "0.47.0" description = "The Datadog Python library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "datadog-0.46.0-py2.py3-none-any.whl", hash = "sha256:3d7bcda6177b43be4cdb52e16b4bdd4f9005716c0dd7cfea009e018c36bb7a3d"}, - {file = "datadog-0.46.0.tar.gz", hash = "sha256:e4fbc92a85e2b0919a226896ae45fc5e4b356c0c57f1c2659659dfbe0789c674"}, + {file = "datadog-0.47.0-py2.py3-none-any.whl", hash = "sha256:a45ec997ab554208837e8c44d81d0e1456539dc14da5743687250e028bc809b7"}, + {file = "datadog-0.47.0.tar.gz", hash = "sha256:47be3b2c3d709a7f5b709eb126ed4fe6cc7977d618fe5c158dd89c2a9f7d9916"}, ] [package.dependencies] @@ -653,13 +791,13 @@ requests = ">=2.6.0" [[package]] name = "datadog-lambda" -version = "4.78.0" +version = "4.80.0" description = "The Datadog AWS Lambda Library" optional = false python-versions = ">=3.7.0,<4" files = [ - {file = "datadog_lambda-4.78.0-py3-none-any.whl", hash = "sha256:660bae6057f3b2033b0c035e9d542af491e40f9ce57b97b4891c491262b9148c"}, - {file = "datadog_lambda-4.78.0.tar.gz", hash = "sha256:3e57faa8f80ddd43b595355b92045fde8f9ed87efe8619133e82cebb87cbe434"}, + {file = "datadog_lambda-4.80.0-py3-none-any.whl", hash = "sha256:506b8964567230d87e2bfd323420854d37b4d7c7a9bfab7e192389f9b4c8150c"}, + {file = "datadog_lambda-4.80.0.tar.gz", hash = "sha256:ddd3ed20592df97523ae26ba552b69de239520c37e31804ca9949b010f90b461"}, ] [package.dependencies] @@ -917,19 +1055,22 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.35" +version = "3.1.37" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.35-py3-none-any.whl", hash = "sha256:c19b4292d7a1d3c0f653858db273ff8a6614100d1eb1528b014ec97286193c09"}, - {file = "GitPython-3.1.35.tar.gz", hash = "sha256:9cbefbd1789a5fe9bcf621bb34d3f441f3a90c8461d377f84eda73e721d9b06b"}, + {file = "GitPython-3.1.37-py3-none-any.whl", hash = "sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33"}, + {file = "GitPython-3.1.37.tar.gz", hash = "sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"] + [[package]] name = "h11" version = "0.14.0" @@ -1213,13 +1354,13 @@ pbr = "*" [[package]] name = "jsii" -version = "1.88.0" +version = "1.89.0" description = "Python client for jsii runtime" optional = false python-versions = "~=3.7" files = [ - {file = "jsii-1.88.0-py3-none-any.whl", hash = "sha256:b3888141c30b83a30bfbe03a877bbf8ae42f957b6ccca02bae448853debffaf8"}, - {file = "jsii-1.88.0.tar.gz", hash = "sha256:a59e0f962589dcc741d2bcf2a7b4c4a927a29d3f9a2804a192c734e2e3275018"}, + {file = "jsii-1.89.0-py3-none-any.whl", hash = "sha256:20a463e8533eded656b285f532e5468a414c48ab083cf0cf93a86d593f0c36b8"}, + {file = "jsii-1.89.0.tar.gz", hash = "sha256:6edbb79afc0b7407cb64e9dd0f27b512279201307c16dd9ae72462b3cbd09970"}, ] [package.dependencies] @@ -1419,16 +1560,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1506,13 +1637,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.5.2" +version = "1.5.3" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.5.2-py3-none-any.whl", hash = "sha256:60a62538519c2e96fe8426654a67ee177350451616118a41596ae7c876bb7eac"}, - {file = "mkdocs-1.5.2.tar.gz", hash = "sha256:70d0da09c26cff288852471be03c23f0f521fc15cf16ac89c7a3bfb9ae8d24f9"}, + {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, + {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, ] [package.dependencies] @@ -1577,13 +1708,13 @@ requests = ">=2.26,<3.0" [[package]] name = "mkdocs-material-extensions" -version = "1.1.1" +version = "1.2" description = "Extension pack for Python Markdown and MkDocs Material." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, - {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, + {file = "mkdocs_material_extensions-1.2-py3-none-any.whl", hash = "sha256:c767bd6d6305f6420a50f0b541b0c9966d52068839af97029be14443849fb8a1"}, + {file = "mkdocs_material_extensions-1.2.tar.gz", hash = "sha256:27e2d1ed2d031426a6e10d5ea06989d67e90bb02acd588bc5673106b5ee5eedf"}, ] [[package]] @@ -1835,13 +1966,13 @@ test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] [[package]] name = "opentelemetry-api" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"}, - {file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"}, + {file = "opentelemetry_api-1.20.0-py3-none-any.whl", hash = "sha256:982b76036fec0fdaf490ae3dfd9f28c81442a33414f737abc687a32758cdcba5"}, + {file = "opentelemetry_api-1.20.0.tar.gz", hash = "sha256:06abe351db7572f8afdd0fb889ce53f3c992dbf6f6262507b385cc1963e06983"}, ] [package.dependencies] @@ -1955,24 +2086,24 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "4.24.2" +version = "4.24.3" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.24.2-cp310-abi3-win32.whl", hash = "sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924"}, - {file = "protobuf-4.24.2-cp310-abi3-win_amd64.whl", hash = "sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3"}, - {file = "protobuf-4.24.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880"}, - {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c"}, - {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74"}, - {file = "protobuf-4.24.2-cp37-cp37m-win32.whl", hash = "sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1"}, - {file = "protobuf-4.24.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099"}, - {file = "protobuf-4.24.2-cp38-cp38-win32.whl", hash = "sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e"}, - {file = "protobuf-4.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16"}, - {file = "protobuf-4.24.2-cp39-cp39-win32.whl", hash = "sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b"}, - {file = "protobuf-4.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd"}, - {file = "protobuf-4.24.2-py3-none-any.whl", hash = "sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e"}, - {file = "protobuf-4.24.2.tar.gz", hash = "sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e"}, + {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, + {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, + {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, + {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, + {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, + {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, + {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, + {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, + {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, + {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, + {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, ] [[package]] @@ -1997,6 +2128,17 @@ files = [ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + [[package]] name = "pydantic" version = "1.10.12" @@ -2065,12 +2207,13 @@ plugins = ["importlib-metadata"] [[package]] name = "pyhcl" -version = "0.4.4" +version = "0.4.5" description = "HCL configuration parser for python" optional = false python-versions = "*" files = [ - {file = "pyhcl-0.4.4.tar.gz", hash = "sha256:2d9b9dcdf1023d812bfed561ba72c99104c5b3f52e558d595130a44ce081b003"}, + {file = "pyhcl-0.4.5-py3-none-any.whl", hash = "sha256:30ee337d330d1f90c9f5ed8f49c468f66c8e6e43192bdc7c6ece1420beb3070c"}, + {file = "pyhcl-0.4.5.tar.gz", hash = "sha256:c47293a51ccdd25e18bb5c8c0ab0ffe355b37c87f8d6f9d3280dc41efd4740bc"}, ] [[package]] @@ -2224,6 +2367,20 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-socket" +version = "0.6.0" +description = "Pytest Plugin to disable socket calls during tests" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "pytest_socket-0.6.0-py3-none-any.whl", hash = "sha256:cca72f134ff01e0023c402e78d31b32e68da3efdf3493bf7788f8eba86a6824c"}, + {file = "pytest_socket-0.6.0.tar.gz", hash = "sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138"}, +] + +[package.dependencies] +pytest = ">=3.6.3" + [[package]] name = "pytest-xdist" version = "3.3.1" @@ -2260,13 +2417,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -2281,7 +2438,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2289,15 +2445,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2314,7 +2463,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2322,7 +2470,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2493,13 +2640,13 @@ decorator = ">=3.4.2" [[package]] name = "rich" -version = "13.5.2" +version = "13.5.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, - {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, + {file = "rich-13.5.3-py3-none-any.whl", hash = "sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9"}, + {file = "rich-13.5.3.tar.gz", hash = "sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6"}, ] [package.dependencies] @@ -2538,13 +2685,13 @@ files = [ [[package]] name = "s3transfer" -version = "0.6.2" +version = "0.7.0" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, - {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, + {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"}, + {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"}, ] [package.dependencies] @@ -2626,13 +2773,13 @@ files = [ [[package]] name = "smmap" -version = "5.0.0" +version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] [[package]] @@ -2994,9 +3141,11 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] +all = ["aws-encryption-sdk", "aws-xray-sdk", "fastjsonschema", "pydantic"] aws-sdk = ["boto3"] datadog = ["datadog-lambda"] +datamasking-all = ["aws-encryption-sdk"] +datamasking-aws-sdk = ["aws-encryption-sdk"] parser = ["pydantic"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] @@ -3004,4 +3153,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "4e756acb30cdd11d63e3116d54cf19027f0fd3f1dd39e85f32efdec908c95660" +content-hash = "6b80bba30034ca42bb4d53689cd73b9802333f6de3e194d01086243d0085c817" diff --git a/pyproject.toml b/pyproject.toml index bb98e8fae4f..1cad7a87b1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,21 +4,31 @@ version = "2.25.1" description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] -classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT No Attribution License (MIT-0)", - "Natural Language :: English", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT No Attribution License (MIT-0)", + "Natural Language :: English", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ] repository = "https://github.com/aws-powertools/powertools-lambda-python" documentation = "https://docs.powertools.aws.dev/lambda/python/" readme = "README.md" -keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "powertools", "feature_flags", "idempotency", "middleware"] +keywords = [ + "aws_lambda_powertools", + "aws", + "tracing", + "logging", + "lambda", + "powertools", + "feature_flags", + "idempotency", + "middleware", +] # MIT-0 is not recognized as an existing license from poetry. # By using `MIT` as a license value, a `License :: OSI Approved :: MIT License` classifier is added to the classifiers list. license = "MIT" @@ -35,6 +45,7 @@ pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } typing-extensions = "^4.6.2" datadog-lambda = { version = "^4.77.0", optional = true } +aws-encryption-sdk = { version = "^3.1.1", optional = true } [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^7.2"} @@ -86,7 +97,8 @@ tracer = ["aws-xray-sdk"] all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] -datadog=["datadog-lambda"] +datadog = ["datadog-lambda"] +datamasking-aws-sdk = ["aws-encryption-sdk"] [tool.poetry.group.dev.dependencies] cfn-lint = "0.80.3" @@ -96,10 +108,16 @@ httpx = ">=0.23.3,<0.25.0" sentry-sdk = "^1.22.2" ruff = ">=0.0.272,<0.0.292" retry2 = "^0.9.5" +pytest-socket = "^0.6.0" [tool.coverage.run] source = ["aws_lambda_powertools"] -omit = ["tests/*", "aws_lambda_powertools/exceptions/*", "aws_lambda_powertools/utilities/parser/types.py", "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py"] +omit = [ + "tests/*", + "aws_lambda_powertools/exceptions/*", + "aws_lambda_powertools/utilities/parser/types.py", + "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py", +] branch = true [tool.coverage.html] @@ -109,26 +127,26 @@ title = "Powertools for AWS Lambda (Python) Test Coverage" [tool.coverage.report] fail_under = 90 exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", + # Have to re-enable the standard pragma + "pragma: no cover", - # Don't complain about missing debug-only code: - "def __repr__", - "if self.debug", + # Don't complain about missing debug-only code: + "def __repr__", + "if self.debug", - # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", - # Don't complain if non-runnable code isn't run: - "if 0:", - "if __name__ == .__main__.:", + # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", - # Ignore runtime type checking - "if TYPE_CHECKING:", + # Ignore runtime type checking + "if TYPE_CHECKING:", - # Ignore type function overload - "@overload", + # Ignore type function overload + "@overload", ] [tool.isort] @@ -161,16 +179,16 @@ minversion = "6.0" addopts = "-ra -vv" testpaths = "./tests" markers = [ - "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", + "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", ] # MAINTENANCE: Remove these lines when drop support to Pydantic v1 -filterwarnings=[ +filterwarnings = [ "ignore:.*The `parse_obj` method is deprecated*:DeprecationWarning", "ignore:.*The `parse_raw` method is deprecated*:DeprecationWarning", "ignore:.*load_str_bytes is deprecated*:DeprecationWarning", "ignore:.*The `dict` method is deprecated; use `model_dump` instead*:DeprecationWarning", - "ignore:.*Pydantic V1 style `@validator` validators are deprecated*:DeprecationWarning" + "ignore:.*Pydantic V1 style `@validator` validators are deprecated*:DeprecationWarning", ] [build-system] diff --git a/tests/e2e/data_masking/__init__.py b/tests/e2e/data_masking/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/data_masking/conftest.py b/tests/e2e/data_masking/conftest.py new file mode 100644 index 00000000000..f1892d7c0c9 --- /dev/null +++ b/tests/e2e/data_masking/conftest.py @@ -0,0 +1,19 @@ +import pytest + +from tests.e2e.data_masking.infrastructure import DataMaskingStack + + +@pytest.fixture(autouse=True, scope="package") +def infrastructure(): + """Setup and teardown logic for E2E test infrastructure + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = DataMaskingStack() + try: + yield stack.deploy() + finally: + stack.delete() diff --git a/tests/e2e/data_masking/handlers/basic_handler.py b/tests/e2e/data_masking/handlers/basic_handler.py new file mode 100644 index 00000000000..7a5d965fa38 --- /dev/null +++ b/tests/e2e/data_masking/handlers/basic_handler.py @@ -0,0 +1,23 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event, context): + # Generating logs for test_encryption_in_logs test + message, append_keys = event.get("message", ""), event.get("append_keys", {}) + logger.append_keys(**append_keys) + logger.info(message) + + # Encrypting data for test_encryption_in_handler test + kms_key = event.get("kms_key", "") + data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key])) + value = [1, 2, "string", 4.5] + encrypted_data = data_masker.encrypt(value) + response = {} + response["encrypted_data"] = encrypted_data + + return response diff --git a/tests/e2e/data_masking/infrastructure.py b/tests/e2e/data_masking/infrastructure.py new file mode 100644 index 00000000000..ee18b272450 --- /dev/null +++ b/tests/e2e/data_masking/infrastructure.py @@ -0,0 +1,20 @@ +import aws_cdk.aws_kms as kms +from aws_cdk import CfnOutput, Duration +from aws_cdk import aws_iam as iam + +from tests.e2e.utils.infrastructure import BaseInfrastructure + + +class DataMaskingStack(BaseInfrastructure): + def create_resources(self): + functions = self.create_lambda_functions(function_props={"timeout": Duration.seconds(10)}) + + key1 = kms.Key(self.stack, "MyKMSKey1", description="My KMS Key1") + CfnOutput(self.stack, "KMSKey1Arn", value=key1.key_arn, description="ARN of the created KMS Key1") + + key2 = kms.Key(self.stack, "MyKMSKey2", description="My KMS Key2") + CfnOutput(self.stack, "KMSKey2Arn", value=key2.key_arn, description="ARN of the created KMS Key2") + + functions["BasicHandler"].add_to_role_policy( + iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=["kms:*"], resources=[key1.key_arn, key2.key_arn]), + ) diff --git a/tests/e2e/data_masking/test_e2e_data_masking.py b/tests/e2e/data_masking/test_e2e_data_masking.py new file mode 100644 index 00000000000..c15b4fb0d38 --- /dev/null +++ b/tests/e2e/data_masking/test_e2e_data_masking.py @@ -0,0 +1,151 @@ +import json +from uuid import uuid4 + +import pytest +from aws_encryption_sdk.exceptions import DecryptKeyError + +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AwsEncryptionSdkProvider, + ContextMismatchError, +) +from tests.e2e.utils import data_fetcher + + +@pytest.fixture +def basic_handler_fn(infrastructure: dict) -> str: + return infrastructure.get("BasicHandler", "") + + +@pytest.fixture +def basic_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("BasicHandlerArn", "") + + +@pytest.fixture +def kms_key1_arn(infrastructure: dict) -> str: + return infrastructure.get("KMSKey1Arn", "") + + +@pytest.fixture +def kms_key2_arn(infrastructure: dict) -> str: + return infrastructure.get("KMSKey2Arn", "") + + +@pytest.fixture +def data_masker(kms_key1_arn) -> DataMasking: + return DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key1_arn])) + + +@pytest.mark.xdist_group(name="data_masking") +def test_encryption(data_masker): + # GIVEN an instantiation of DataMasking with the AWS encryption provider + + # AWS Encryption SDK encrypt method only takes in bytes or strings + value = [1, 2, "string", 4.5] + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(value) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data == value + + +@pytest.mark.xdist_group(name="data_masking") +def test_encryption_context(data_masker): + # GIVEN an instantiation of DataMasking with the AWS encryption provider + + value = [1, 2, "string", 4.5] + context = {"this": "is_secure"} + + # WHEN encrypting and then decrypting the encrypted data with an encryption_context + encrypted_data = data_masker.encrypt(value, encryption_context=context) + decrypted_data = data_masker.decrypt(encrypted_data, encryption_context=context) + + # THEN the result is the original input data + assert decrypted_data == value + + +@pytest.mark.xdist_group(name="data_masking") +def test_encryption_context_mismatch(data_masker): + # GIVEN an instantiation of DataMasking with the AWS encryption provider + + value = [1, 2, "string", 4.5] + + # WHEN encrypting with a encryption_context + encrypted_data = data_masker.encrypt(value, encryption_context={"this": "is_secure"}) + + # THEN decrypting with a different encryption_context should raise a ContextMismatchError + with pytest.raises(ContextMismatchError): + data_masker.decrypt(encrypted_data, encryption_context={"not": "same_context"}) + + +@pytest.mark.xdist_group(name="data_masking") +def test_encryption_no_context_fail(data_masker): + # GIVEN an instantiation of DataMasking with the AWS encryption provider + + value = [1, 2, "string", 4.5] + + # WHEN encrypting with no encryption_context + encrypted_data = data_masker.encrypt(value) + + # THEN decrypting with an encryption_context should raise a ContextMismatchError + with pytest.raises(ContextMismatchError): + data_masker.decrypt(encrypted_data, encryption_context={"this": "is_secure"}) + + +@pytest.mark.xdist_group(name="data_masking") +def test_encryption_decryption_key_mismatch(data_masker, kms_key2_arn): + # GIVEN an instantiation of DataMasking with the AWS encryption provider with a certain key + + # WHEN encrypting and then decrypting the encrypted data + value = [1, 2, "string", 4.5] + encrypted_data = data_masker.encrypt(value) + + # THEN when decrypting with a different key it should fail + data_masker_key2 = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key2_arn])) + + with pytest.raises(DecryptKeyError): + data_masker_key2.decrypt(encrypted_data) + + +@pytest.mark.xdist_group(name="data_masking") +def test_encryption_in_logs(data_masker, basic_handler_fn, basic_handler_fn_arn, kms_key1_arn): + # GIVEN an instantiation of DataMasking with the AWS encryption provider + + # WHEN encrypting a value and logging it + value = [1, 2, "string", 4.5] + encrypted_data = data_masker.encrypt(value) + message = encrypted_data + custom_key = "order_id" + additional_keys = {custom_key: f"{uuid4()}"} + payload = json.dumps({"message": message, "kms_key": kms_key1_arn, "append_keys": additional_keys}) + + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) + data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) + + logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time, minimum_log_entries=2) + + # THEN decrypting it from the logs should show the original value + for log in logs.get_log(key=custom_key): + encrypted_data = log.message + decrypted_data = data_masker.decrypt(encrypted_data) + assert decrypted_data == value + + +@pytest.mark.xdist_group(name="data_masking") +def test_encryption_in_handler(data_masker, basic_handler_fn_arn, kms_key1_arn): + # GIVEN a lambda_handler with an instantiation the AWS encryption provider data masker + + payload = {"kms_key": kms_key1_arn} + + # WHEN the handler is invoked to encrypt data + handler_result, _ = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=json.dumps(payload)) + + response = json.loads(handler_result["Payload"].read()) + encrypted_data = response["encrypted_data"] + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN decrypting the encrypted data from the response should result in the original value + assert decrypted_data == [1, 2, "string", 4.5] diff --git a/tests/e2e/utils/lambda_layer/powertools_layer.py b/tests/e2e/utils/lambda_layer/powertools_layer.py index 70870af200e..05147048676 100644 --- a/tests/e2e/utils/lambda_layer/powertools_layer.py +++ b/tests/e2e/utils/lambda_layer/powertools_layer.py @@ -1,6 +1,6 @@ -import logging import subprocess from pathlib import Path +from typing import List from aws_cdk.aws_lambda import Architecture from checksumdir import dirhash @@ -9,18 +9,20 @@ from tests.e2e.utils.constants import CDK_OUT_PATH, SOURCE_CODE_ROOT_PATH from tests.e2e.utils.lambda_layer.base import BaseLocalLambdaLayer -logger = logging.getLogger(__name__) - class LocalLambdaPowertoolsLayer(BaseLocalLambdaLayer): IGNORE_EXTENSIONS = ["pyc"] + ARCHITECTURE_PLATFORM_MAPPING = { + Architecture.X86_64.name: ("manylinux_2_17_x86_64", "manylinux_2_28_x86_64"), + Architecture.ARM_64.name: ("manylinux_2_17_aarch64", "manylinux_2_28_aarch64"), + } def __init__(self, output_dir: Path = CDK_OUT_PATH, architecture: Architecture = Architecture.X86_64): super().__init__(output_dir) self.package = f"{SOURCE_CODE_ROOT_PATH}[all]" - platform_name = self._resolve_platform(architecture) - self.build_args = f"--platform {platform_name} --only-binary=:all: --upgrade" + self.platform_args = self._resolve_platform(architecture) + self.build_args = f"{self.platform_args} --only-binary=:all: --upgrade" self.build_command = f"python -m pip install {self.package} {self.build_args} --target {self.target_dir}" self.cleanup_command = ( f"rm -rf {self.target_dir}/boto* {self.target_dir}/s3transfer* && " @@ -62,16 +64,20 @@ def _has_source_changed(self) -> bool: return False def _resolve_platform(self, architecture: Architecture) -> str: - """Returns the correct plaform name for the manylinux project (see PEP 599) + """Returns the correct pip platform tag argument for the manylinux project (see PEP 599) Returns ------- - platform_name : str - The platform tag + str + pip's platform argument, e.g., --platform manylinux_2_17_x86_64 --platform manylinux_2_28_x86_64 """ - if architecture.name == Architecture.X86_64.name: - return "manylinux1_x86_64" - elif architecture.name == Architecture.ARM_64.name: - return "manylinux2014_aarch64" - else: - raise ValueError(f"unknown architecture {architecture.name}") + platforms = self.ARCHITECTURE_PLATFORM_MAPPING.get(architecture.name) + if not platforms: + raise ValueError( + f"unknown architecture {architecture.name}. Supported: {self.ARCHITECTURE_PLATFORM_MAPPING.keys()}", + ) + + return self._build_platform_args(platforms) + + def _build_platform_args(self, platforms: List[str]): + return " ".join([f"--platform {platform}" for platform in platforms]) diff --git a/tests/functional/data_masking/__init__.py b/tests/functional/data_masking/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/data_masking/conftest.py b/tests/functional/data_masking/conftest.py new file mode 100644 index 00000000000..f73ccca4113 --- /dev/null +++ b/tests/functional/data_masking/conftest.py @@ -0,0 +1,6 @@ +from pytest_socket import disable_socket + + +def pytest_runtest_setup(): + """Disable Unix and TCP sockets for Data masking tests""" + disable_socket() diff --git a/tests/functional/data_masking/test_aws_encryption_sdk.py b/tests/functional/data_masking/test_aws_encryption_sdk.py new file mode 100644 index 00000000000..49ed775c10d --- /dev/null +++ b/tests/functional/data_masking/test_aws_encryption_sdk.py @@ -0,0 +1,283 @@ +from __future__ import annotations + +import base64 +import json +from typing import Any, Callable, Dict, Union + +import pytest + +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider +from aws_lambda_powertools.utilities.data_masking.provider.kms import ( + AwsEncryptionSdkProvider, +) + + +class FakeEncryptionKeyProvider(BaseProvider): + def __init__( + self, + json_serializer: Callable[[Dict], str] | None = None, + json_deserializer: Callable[[Union[Dict, str, bool, int, float]], str] | None = None, + ): + super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) + + def encrypt(self, data: bytes | str, **kwargs) -> str: + data = self.json_serializer(data) + ciphertext = base64.b64encode(data).decode() + return ciphertext + + def decrypt(self, data: bytes, **kwargs) -> Any: + ciphertext_decoded = base64.b64decode(data) + ciphertext = self.json_deserializer(ciphertext_decoded) + return ciphertext + + +@pytest.fixture +def data_masker(monkeypatch) -> DataMasking: + """DataMasking using AWS Encryption SDK Provider with a fake client""" + fake_key_provider = FakeEncryptionKeyProvider() + provider = AwsEncryptionSdkProvider( + keys=["dummy"], + key_provider=fake_key_provider, + ) + return DataMasking(provider=provider) + + +def test_mask_int(data_masker): + # GIVEN an int data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(42) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_float(data_masker): + # GIVEN a float data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(4.2) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_bool(data_masker): + # GIVEN a bool data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(True) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_none(data_masker): + # GIVEN a None data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(None) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_str(data_masker): + # GIVEN a str data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask("this is a string") + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_list(data_masker): + # GIVEN a list data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask([1, 2, "string", 3]) + + # THEN the result is the data masked, while maintaining type list + assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] + + +def test_mask_dict(data_masker): + # GIVEN a dict data type + data = { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + } + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(data) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_dict_with_fields(data_masker): + # GIVEN a dict data type + data = { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + } + + # WHEN mask is called with a list of fields specified + masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + + # THEN the result is only the specified fields are masked + assert masked_string == { + "a": { + "1": {"None": DATA_MASKING_STRING, "four": "world"}, + "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, + }, + } + + +def test_mask_json_dict_with_fields(data_masker): + # GIVEN the data type is a json representation of a dictionary + data = json.dumps( + { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + }, + ) + + # WHEN mask is called with a list of fields specified + masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + + # THEN the result is only the specified fields are masked + assert masked_json_string == { + "a": { + "1": {"None": DATA_MASKING_STRING, "four": "world"}, + "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, + }, + } + + +def test_encrypt_int(data_masker): + # GIVEN an int data type + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(-1) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data == -1 + + +def test_encrypt_float(data_masker): + # GIVEN an float data type + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(-1.11) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data == -1.11 + + +def test_encrypt_bool(data_masker): + # GIVEN an bool data type + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(True) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data is True + + +def test_encrypt_none(data_masker): + # GIVEN an none data type + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(None) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data is None + + +def test_encrypt_str(data_masker): + # GIVEN an str data type + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt("this is a string") + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data == "this is a string" + + +def test_encrypt_list(data_masker): + # GIVEN an list data type + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt([1, 2, "a string", 3.4]) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data == [1, 2, "a string", 3.4] + + +def test_encrypt_dict(data_masker): + # GIVEN an dict data type + data = { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + } + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is the original input data + assert decrypted_data == data + + +def test_encrypt_dict_with_fields(data_masker): + # GIVEN the data type is a dictionary + data = { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + } + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"]) + decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"]) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_json_dict_with_fields(data_masker): + # GIVEN the data type is a json representation of a dictionary + data = json.dumps( + { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"]) + decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"]) + + # THEN the result is only the specified fields are masked + assert decrypted_data == json.loads(data) diff --git a/tests/performance/data_masking/data_masking_load_test.yaml b/tests/performance/data_masking/data_masking_load_test.yaml new file mode 100644 index 00000000000..5f696d57114 --- /dev/null +++ b/tests/performance/data_masking/data_masking_load_test.yaml @@ -0,0 +1,32 @@ +config: + target: https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function128 + phases: + - duration: 60 + arrivalRate: 1 + rampTo: 5 + name: Warm up phase + - duration: 60 + arrivalRate: 5 + rampTo: 10 + name: Ramp up load + - duration: 30 + arrivalRate: 10 + rampTo: 30 + name: Spike phase + # Load & configure a couple of useful plugins + # https://docs.art/reference/extensions + plugins: + apdex: {} + metrics-by-endpoint: {} + apdex: + threshold: 500 +scenarios: + - flow: + - loop: + - get: + url: "https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function128" + - get: + url: "https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function1024" + - get: + url: "https://sebwc2y7gh.execute-api.us-west-2.amazonaws.com/Prod/function1769" + count: 100 \ No newline at end of file diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/.gitignore b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/.gitignore new file mode 100644 index 00000000000..4c7a643c028 --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/.gitignore @@ -0,0 +1,243 @@ +# Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### OSX ### +*.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff: +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/dictionaries + +# Sensitive or high-churn files: +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.xml +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml + +# Gradle: +.idea/**/gradle.xml +.idea/**/libraries + +# CMake +cmake-build-debug/ + +# Mongo Explorer plugin: +.idea/**/mongoSettings.xml + +## File-based project format: +*.iws + +## Plugin-specific files: + +# IntelliJ +/out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Ruby plugin and RubyMine +/.rakeTasks + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +.idea/sonarlint + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +.pytest_cache/ +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule.* + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +.history + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Folder config file +Desktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# Build folder + +*/build/* + +# End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/README.md b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/README.md new file mode 100644 index 00000000000..aed9d43976d --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/README.md @@ -0,0 +1,163 @@ +# pt-load-test-stack + +Congratulations, you have just created a Serverless "Hello World" application using the AWS Serverless Application Model (AWS SAM) for the `python3.10` runtime, and options to bootstrap it with [**Powertools for AWS Lambda (Python)**](https://awslabs.github.io/aws-lambda-powertools-python/latest/) (Powertools for AWS Lambda (Python)) utilities for Logging, Tracing and Metrics. + +Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity. + +## Powertools for AWS Lambda (Python) features + +Powertools for AWS Lambda (Python) provides three core utilities: + +* **[Tracing](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/tracer/)** - Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions +* **[Logging](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/logger/)** - Structured logging made easier, and decorator to enrich structured logging with key Lambda context details +* **[Metrics](https://awslabs.github.io/aws-lambda-powertools-python/latest/core/metrics/)** - Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF) + +Find the complete project's [documentation here](https://awslabs.github.io/aws-lambda-powertools-python). + +### Installing Powertools for AWS Lambda (Python)n + +With [pip](https://pip.pypa.io/en/latest/index.html) installed, run: + +```bash +pip install aws-lambda-powertools +``` + +### Powertools for AWS Lambda (Python) Examples + +* [Tutorial](https://awslabs.github.io/aws-lambda-powertools-python/latest/tutorial) +* [Serverless Shopping cart](https://github.com/aws-samples/aws-serverless-shopping-cart) +* [Serverless Airline](https://github.com/aws-samples/aws-serverless-airline-booking) +* [Serverless E-commerce platform](https://github.com/aws-samples/aws-serverless-ecommerce-platform) +* [Serverless GraphQL Nanny Booking Api](https://github.com/trey-rosius/babysitter_api) + +## Working with this project + +This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders. + +* hello_world - Code for the application's Lambda function. +* events - Invocation events that you can use to invoke the function. +* tests - Unit tests for the application code. +* template.yaml - A template that defines the application's AWS resources. + +The application uses several AWS resources, including Lambda functions and an API Gateway API. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code. + +If you prefer to use an integrated development environment (IDE) to build and test your application, you can use the AWS Toolkit. +The AWS Toolkit is an open source plug-in for popular IDEs that uses the SAM CLI to build and deploy serverless applications on AWS. The AWS Toolkit also adds a simplified step-through debugging experience for Lambda function code. See the following links to get started. + +* [CLion](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [GoLand](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [IntelliJ](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [WebStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [Rider](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PhpStorm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [PyCharm](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [RubyMine](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [DataGrip](https://docs.aws.amazon.com/toolkit-for-jetbrains/latest/userguide/welcome.html) +* [VS Code](https://docs.aws.amazon.com/toolkit-for-vscode/latest/userguide/welcome.html) +* [Visual Studio](https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/welcome.html) + +### Deploy the sample application + +The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API. + +To use the SAM CLI, you need the following tools. + +* SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) +* [Python 3 installed](https://www.python.org/downloads/) +* Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) + +To build and deploy your application for the first time, run the following in your shell: + +```bash +sam build --use-container +sam deploy --guided +``` + +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: + +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modifies IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. + +You can find your API Gateway Endpoint URL in the output values displayed after deployment. + +### Use the SAM CLI to build and test locally + +Build your application with the `sam build --use-container` command. + +```bash +pt-load-test-stack$ sam build --use-container +``` + +The SAM CLI installs dependencies defined in `hello_world/requirements.txt`, creates a deployment package, and saves it in the `.aws-sam/build` folder. + +Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project. + +Run functions locally and invoke them with the `sam local invoke` command. + +```bash +pt-load-test-stack$ sam local invoke HelloWorldFunction --event events/event.json +``` + +The SAM CLI can also emulate your application's API. Use the `sam local start-api` to run the API locally on port 3000. + +```bash +pt-load-test-stack$ sam local start-api +pt-load-test-stack$ curl http://localhost:3000/ +``` + +The SAM CLI reads the application template to determine the API's routes and the functions that they invoke. The `Events` property on each function's definition includes the route and method for each path. + +```yaml + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get +``` + +### Add a resource to your application + +The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types. + +### Fetch, tail, and filter Lambda function logs + +To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug. + +`NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM. + +```bash +pt-load-test-stack$ sam logs -n HelloWorldFunction --stack-name pt-load-test-stack --tail +``` + +You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html). + +### Tests + +Tests are defined in the `tests` folder in this project. Use PIP to install the test dependencies and run tests. + +```bash +pt-load-test-stack$ pip install -r tests/requirements.txt --user +# unit test +pt-load-test-stack$ python -m pytest tests/unit -v +# integration test, requiring deploying the stack first. +# Create the env variable AWS_SAM_STACK_NAME with the name of the stack we are testing +pt-load-test-stack$ AWS_SAM_STACK_NAME="pt-load-test-stack" python -m pytest tests/integration -v +``` + +### Cleanup + +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: + +```bash +sam delete --stack-name "pt-load-test-stack" +``` + +## Resources + +See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts. + +Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) \ No newline at end of file diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/events/hello.json b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/events/hello.json new file mode 100644 index 00000000000..fdb5180fe0a --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/events/hello.json @@ -0,0 +1,111 @@ +{ + "body":"", + "headers":{ + "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", + "Accept-Encoding":"gzip, deflate, br", + "Accept-Language":"pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7", + "Cache-Control":"max-age=0", + "Connection":"keep-alive", + "Host":"127.0.0.1:3000", + "Sec-Ch-Ua":"\"Google Chrome\";v=\"105\", \"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"105\"", + "Sec-Ch-Ua-Mobile":"?0", + "Sec-Ch-Ua-Platform":"\"Linux\"", + "Sec-Fetch-Dest":"document", + "Sec-Fetch-Mode":"navigate", + "Sec-Fetch-Site":"none", + "Sec-Fetch-User":"?1", + "Upgrade-Insecure-Requests":"1", + "User-Agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36", + "X-Forwarded-Port":"3000", + "X-Forwarded-Proto":"http" + }, + "httpMethod":"GET", + "isBase64Encoded": false, + "multiValueHeaders":{ + "Accept":[ + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9" + ], + "Accept-Encoding":[ + "gzip, deflate, br" + ], + "Accept-Language":[ + "pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7" + ], + "Cache-Control":[ + "max-age=0" + ], + "Connection":[ + "keep-alive" + ], + "Host":[ + "127.0.0.1:3000" + ], + "Sec-Ch-Ua":[ + "\"Google Chrome\";v=\"105\", \"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"105\"" + ], + "Sec-Ch-Ua-Mobile":[ + "?0" + ], + "Sec-Ch-Ua-Platform":[ + "\"Linux\"" + ], + "Sec-Fetch-Dest":[ + "document" + ], + "Sec-Fetch-Mode":[ + "navigate" + ], + "Sec-Fetch-Site":[ + "none" + ], + "Sec-Fetch-User":[ + "?1" + ], + "Upgrade-Insecure-Requests":[ + "1" + ], + "User-Agent":[ + "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36" + ], + "X-Forwarded-Port":[ + "3000" + ], + "X-Forwarded-Proto":[ + "http" + ] + }, + "multiValueQueryStringParameters":"", + "path":"/hello", + "pathParameters":"", + "queryStringParameters":"", + "requestContext":{ + "accountId":"123456789012", + "apiId":"1234567890", + "domainName":"127.0.0.1:3000", + "extendedRequestId":"", + "httpMethod":"GET", + "identity":{ + "accountId":"", + "apiKey":"", + "caller":"", + "cognitoAuthenticationProvider":"", + "cognitoAuthenticationType":"", + "cognitoIdentityPoolId":"", + "sourceIp":"127.0.0.1", + "user":"", + "userAgent":"Custom User Agent String", + "userArn":"" + }, + "path":"/hello", + "protocol":"HTTP/1.1", + "requestId":"a3590457-cac2-4f10-8fc9-e47114bf7c62", + "requestTime":"02/Feb/2023:11:45:26 +0000", + "requestTimeEpoch":1675338326, + "resourceId":"123456", + "resourcePath":"/hello", + "stage":"Prod" + }, + "resource":"/hello", + "stageVariables":"", + "version":"1.0" + } diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py new file mode 100644 index 00000000000..f6988ea66ac --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py @@ -0,0 +1,60 @@ +import os + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] + +json_blob = { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"}, + "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"], + "interests": ["Hiking", "Traveling", "Photography", "Reading"], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr.", + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31", + }, + "about_me": """ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis + sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus, + ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim. + Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue, + risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin + interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat + volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat. + Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus + malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc. + """, +} + +app = APIGatewayRestResolver() +tracer = Tracer() +logger = Logger() + + +@app.get("/function1024") +@tracer.capture_method +def function1024(): + logger.info("Hello world function1024 - HTTP 200") + data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) + decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) + return {"Decrypted_json_blob_function_1024": decrypted} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/requirements.txt b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/requirements.txt new file mode 100644 index 00000000000..b74b60fc263 --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/requirements.txt @@ -0,0 +1,3 @@ +requests +aws-lambda-powertools[tracer] +aws-encryption-sdk diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py new file mode 100644 index 00000000000..463ed7ca1a9 --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py @@ -0,0 +1,60 @@ +import os + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] + +json_blob = { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"}, + "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"], + "interests": ["Hiking", "Traveling", "Photography", "Reading"], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr.", + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31", + }, + "about_me": """ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis + sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus, + ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim. + Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue, + risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin + interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat + volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat. + Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus + malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc. + """, +} + +app = APIGatewayRestResolver() +tracer = Tracer() +logger = Logger() + + +@app.get("/function128") +@tracer.capture_method +def function128(): + logger.info("Hello world function128 - HTTP 200") + data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) + decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) + return {"Decrypted_json_blob_function_128": decrypted} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/requirements.txt b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/requirements.txt new file mode 100644 index 00000000000..b74b60fc263 --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/requirements.txt @@ -0,0 +1,3 @@ +requests +aws-lambda-powertools[tracer] +aws-encryption-sdk diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/__init__.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py new file mode 100644 index 00000000000..44ddeab189b --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py @@ -0,0 +1,60 @@ +import os + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] + +json_blob = { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"}, + "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"], + "interests": ["Hiking", "Traveling", "Photography", "Reading"], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr.", + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31", + }, + "about_me": """ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis + sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus, + ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim. + Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue, + risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin + interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat + volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat. + Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus + malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc. + """, +} + +app = APIGatewayRestResolver() +tracer = Tracer() +logger = Logger() + + +@app.get("/function1769") +@tracer.capture_method +def function1769(): + logger.info("Hello world function1769 - HTTP 200") + data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) + decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) + return {"Decrypted_json_blob_function_1769": decrypted} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/requirements.txt b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/requirements.txt new file mode 100644 index 00000000000..b74b60fc263 --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/requirements.txt @@ -0,0 +1,3 @@ +requests +aws-lambda-powertools[tracer] +aws-encryption-sdk diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/samconfig.toml b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/samconfig.toml new file mode 100644 index 00000000000..82f9cdc06d9 --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/samconfig.toml @@ -0,0 +1,34 @@ +# More information about the configuration file can be found here: +# https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html +version = 0.1 + +[default] +[default.global.parameters] +stack_name = "pt-load-test-stack" + +[default.build.parameters] +cached = true +parallel = true + +[default.validate.parameters] +lint = true + +[default.deploy.parameters] +capabilities = "CAPABILITY_IAM" +confirm_changeset = true +resolve_s3 = true +s3_prefix = "pt-load-test-stack" +region = "us-west-2" +image_repositories = [] + +[default.package.parameters] +resolve_s3 = true + +[default.sync.parameters] +watch = true + +[default.local_start_api.parameters] +warm_containers = "EAGER" + +[default.local_start_lambda.parameters] +warm_containers = "EAGER" diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml new file mode 100644 index 00000000000..f2a6540c267 --- /dev/null +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml @@ -0,0 +1,147 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + pt-load-test-stack + + Powertools for AWS Lambda (Python) example + +Globals: # https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-template-anatomy-globals.html + Function: + Timeout: 5 + Runtime: python3.10 + + Tracing: Active + Api: + TracingEnabled: true +Resources: + MyKMSKey: + Type: AWS::KMS::Key + Properties: + Enabled: true + KeyPolicy: + Version: 2012-10-17 + Statement: + - Effect: Allow + Action: kms:* + Resource: "*" + Principal: + AWS: !Join [ "", [ "arn:aws:iam::", !Ref "AWS::AccountId", ":root" ] ] + Function128: + Type: AWS::Serverless::Function # More info about Function Resource: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-resource-function.html + Properties: + Handler: app.lambda_handler + CodeUri: function_128 + Description: function 128 MB + MemorySize: 128 + Architectures: + - x86_64 + Policies: + Statement: + - Effect: Allow + Action: kms:* + Resource: !GetAtt MyKMSKey.Arn + Tracing: Active + Events: + HelloPath: + Type: Api # More info about API Event Source: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-function-api.html + Properties: + Path: /function128 + Method: GET + # Powertools for AWS Lambda (Python) env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_METRICS_NAMESPACE: Powertools + LOG_LEVEL: INFO + KMS_KEY_ARN: !GetAtt MyKMSKey.Arn + Tags: + LambdaPowertools: python + Function1024: + Type: AWS::Serverless::Function # More info about Function Resource: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-resource-function.html + Properties: + Handler: app.lambda_handler + CodeUri: function_1024 + Description: function 1024 MB + MemorySize: 1024 + Architectures: + - x86_64 + Policies: + Statement: + - Effect: Allow + Action: kms:* + Resource: !GetAtt MyKMSKey.Arn + Tracing: Active + Events: + HelloPath: + Type: Api # More info about API Event Source: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-function-api.html + Properties: + Path: /function1024 + Method: GET + # Powertools for AWS Lambda (Python) env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_METRICS_NAMESPACE: Powertools + LOG_LEVEL: INFO + KMS_KEY_ARN: !GetAtt MyKMSKey.Arn + Tags: + LambdaPowertools: python + Function1769: + Type: AWS::Serverless::Function # More info about Function Resource: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-resource-function.html + Properties: + Handler: app.lambda_handler + CodeUri: function_1769 + Description: function 1769 MB + MemorySize: 1769 + Architectures: + - x86_64 + Policies: + Statement: + - Effect: Allow + Action: kms:* + Resource: !GetAtt MyKMSKey.Arn + Tracing: Active + Events: + HelloPath: + Type: Api # More info about API Event Source: https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-function-api.html + Properties: + Path: /function1769 + Method: GET + # Powertools for AWS Lambda (Python) env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_METRICS_NAMESPACE: Powertools + LOG_LEVEL: INFO + KMS_KEY_ARN: !GetAtt MyKMSKey.Arn + Tags: + LambdaPowertools: python + +Outputs: + KMSKeyArn: + Description: ARN of the KMS Key + Value: !GetAtt MyKMSKey.Arn + + 128FunctionApi: + Description: API Gateway endpoint URL for Prod environment for Function 128 MB + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/function128" + + 1024FunctionApi: + Description: API Gateway endpoint URL for Prod environment for Function 1024 MB + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/function1024" + + 1769FunctionApi: + Description: API Gateway endpoint URL for Prod environment for Function 1769 MB + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/function1769" + + Function128: + Description: Lambda Function 128 MB ARN + Value: !GetAtt Function128.Arn + + Function1024: + Description: Lambda Function 1024 MB ARN + Value: !GetAtt Function1024.Arn + + Function1769: + Description: Lambda Function 1769 MB ARN + Value: !GetAtt Function1769.Arn diff --git a/tests/performance/data_masking/test_perf_data_masking.py b/tests/performance/data_masking/test_perf_data_masking.py new file mode 100644 index 00000000000..9dcf041b81f --- /dev/null +++ b/tests/performance/data_masking/test_perf_data_masking.py @@ -0,0 +1,69 @@ +import importlib +from types import ModuleType + +import pytest + +from aws_lambda_powertools.utilities.data_masking.base import DataMasking + +DATA_MASKING_PACKAGE = "aws_lambda_powertools.utilities.data_masking" +DATA_MASKING_INIT_SLA: float = 0.002 +DATA_MASKING_NESTED_ENCRYPT_SLA: float = 0.001 + +json_blob = { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"}, + "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"], + "interests": ["Hiking", "Traveling", "Photography", "Reading"], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr.", + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31", + }, + "about_me": """ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis + sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus, + ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim. + Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue, + risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin + interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat + volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat. + Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus + malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc. + """, +} +json_blob_fields = ["address.street", "job_history.company.company_name"] + + +def import_data_masking_utility() -> ModuleType: + """Dynamically imports and return DataMasking module""" + return importlib.import_module(DATA_MASKING_PACKAGE) + + +@pytest.mark.perf +@pytest.mark.benchmark(group="core", disable_gc=True, warmup=False) +def test_data_masking_init(benchmark): + benchmark.pedantic(import_data_masking_utility) + stat = benchmark.stats.stats.max + if stat > DATA_MASKING_INIT_SLA: + pytest.fail(f"High level imports should be below {DATA_MASKING_INIT_SLA}s: {stat}") + + +def mask_json_blob(): + data_masker = DataMasking() + data_masker.mask(json_blob, json_blob_fields) + + +@pytest.mark.perf +@pytest.mark.benchmark(group="core", disable_gc=True, warmup=False) +def test_data_masking_encrypt_with_json_blob(benchmark): + benchmark.pedantic(mask_json_blob) + stat = benchmark.stats.stats.max + if stat > DATA_MASKING_NESTED_ENCRYPT_SLA: + pytest.fail(f"High level imports should be below {DATA_MASKING_NESTED_ENCRYPT_SLA}s: {stat}") diff --git a/tests/unit/data_masking/test_unit_data_masking.py b/tests/unit/data_masking/test_unit_data_masking.py new file mode 100644 index 00000000000..096eaf0bb6b --- /dev/null +++ b/tests/unit/data_masking/test_unit_data_masking.py @@ -0,0 +1,205 @@ +import json + +import pytest + +from aws_lambda_powertools.utilities.data_masking.base import DataMasking +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING + + +@pytest.fixture +def data_masker() -> DataMasking: + return DataMasking() + + +def test_mask_int(data_masker): + # GIVEN an int data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(42) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_float(data_masker): + # GIVEN a float data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(4.2) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_bool(data_masker): + # GIVEN a bool data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(True) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_none(data_masker): + # GIVEN a None data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(None) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_str(data_masker): + # GIVEN a str data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask("this is a string") + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_list(data_masker): + # GIVEN a list data type + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask([1, 2, "string", 3]) + + # THEN the result is the data masked, while maintaining type list + assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] + + +def test_mask_dict(data_masker): + # GIVEN a dict data type + data = { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + } + + # WHEN mask is called with no fields argument + masked_string = data_masker.mask(data) + + # THEN the result is the data masked + assert masked_string == DATA_MASKING_STRING + + +def test_mask_dict_with_fields(data_masker): + # GIVEN a dict data type + data = { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + } + + # WHEN mask is called with a list of fields specified + masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + + # THEN the result is only the specified fields are masked + assert masked_string == { + "a": { + "1": {"None": DATA_MASKING_STRING, "four": "world"}, + "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, + }, + } + + +def test_mask_json_dict_with_fields(data_masker): + # GIVEN the data type is a json representation of a dictionary + data = json.dumps( + { + "a": { + "1": {"None": "hello", "four": "world"}, + "b": {"3": {"4": "goodbye", "e": "world"}}, + }, + }, + ) + + # WHEN mask is called with a list of fields specified + masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + + # THEN the result is only the specified fields are masked + assert masked_json_string == { + "a": { + "1": {"None": DATA_MASKING_STRING, "four": "world"}, + "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, + }, + } + + +def test_encrypt_not_implemented(data_masker): + # GIVEN DataMasking is not initialized with a Provider + + # WHEN attempting to call the encrypt method on the data + with pytest.raises(NotImplementedError): + # THEN the result is a NotImplementedError + data_masker.encrypt("hello world") + + +def test_decrypt_not_implemented(data_masker): + # GIVEN DataMasking is not initialized with a Provider + + # WHEN attempting to call the decrypt method on the data + with pytest.raises(NotImplementedError): + # THEN the result is a NotImplementedError + data_masker.decrypt("hello world") + + +def test_parsing_unsupported_data_type(data_masker): + # GIVEN an initialization of the DataMasking class + + # WHEN attempting to pass in a list of fields with input data that is not a dict + with pytest.raises(TypeError): + # THEN the result is a TypeError + data_masker.mask(42, ["this.field"]) + + +def test_parsing_nonexistent_fields(data_masker): + # GIVEN a dict data type + data = { + "3": { + "1": {"None": "hello", "four": "world"}, + "4": {"33": {"5": "goodbye", "e": "world"}}, + }, + } + + # WHEN attempting to pass in fields that do not exist in the input data + with pytest.raises(KeyError): + # THEN the result is a KeyError + data_masker.mask(data, ["3.1.True"]) + + +def test_parsing_nonstring_fields(data_masker): + # GIVEN a dict data type + data = { + "3": { + "1": {"None": "hello", "four": "world"}, + "4": {"33": {"5": "goodbye", "e": "world"}}, + }, + } + + # WHEN attempting to pass in a list of fields that are not strings + masked = data_masker.mask(data, fields=[3.4]) + + # THEN the result is the value of the nested field should be masked as normal + assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}} + + +def test_parsing_nonstring_keys_and_fields(data_masker): + # GIVEN a dict data type with integer keys + data = { + 3: { + "1": {"None": "hello", "four": "world"}, + 4: {"33": {"5": "goodbye", "e": "world"}}, + }, + } + + # WHEN masked with a list of fields that are integer keys + masked = data_masker.mask(data, fields=[3.4]) + + # THEN the result is the value of the nested field should be masked + assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}}