diff --git a/Makefile b/Makefile index 80c89f72961..7fa170b28c6 100644 --- a/Makefile +++ b/Makefile @@ -8,13 +8,13 @@ dev: pip install --upgrade pip pre-commit poetry poetry config --local virtualenvs.in-project true @$(MAKE) dev-version-plugin - poetry install --extras "all datamasking-aws-sdk redis" + poetry install --extras "all redis" pre-commit install dev-gitpod: pip install --upgrade pip poetry @$(MAKE) dev-version-plugin - poetry install --extras "all datamasking-aws-sdk redis" + poetry install --extras "all redis" pre-commit install format: diff --git a/README.md b/README.md index d230c31906e..d3f0ec30603 100644 --- a/README.md +++ b/README.md @@ -30,6 +30,7 @@ Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverles * **[Event source data classes](https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_classes/)** - Data classes describing the schema of common Lambda event triggers * **[Parser](https://docs.powertools.aws.dev/lambda/python/latest/utilities/parser/)** - Data parsing and deep validation using Pydantic * **[Idempotency](https://docs.powertools.aws.dev/lambda/python/latest/utilities/idempotency/)** - Convert your Lambda functions into idempotent operations which are safe to retry +* **[Data Masking](https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_masking/)** - Protect confidential data with easy removal or encryption * **[Feature Flags](https://docs.powertools.aws.dev/lambda/python/latest/utilities/feature_flags/)** - A simple rule engine to evaluate when one or multiple features should be enabled depending on the input * **[Streaming](https://docs.powertools.aws.dev/lambda/python/latest/utilities/streaming/)** - Streams datasets larger than the available memory as streaming data. diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index c427f0d720f..9765f55c025 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -96,10 +96,18 @@ def resolve_env_var_choice( def base64_decode(value: str) -> bytes: try: - logger.debug("Decoding base64 record item before parsing") + logger.debug("Decoding base64 item to bytes") return base64.b64decode(value) except (BinAsciiError, TypeError): - raise ValueError("base64 decode failed") + raise ValueError("base64 decode failed - is this base64 encoded string?") + + +def bytes_to_base64_string(value: bytes) -> str: + try: + logger.debug("Encoding bytes to base64 string") + return base64.b64encode(value).decode() + except TypeError: + raise ValueError(f"base64 encoding failed - is this bytes data? type: {type(value)}") def bytes_to_string(value: bytes) -> str: diff --git a/aws_lambda_powertools/utilities/_data_masking/base.py b/aws_lambda_powertools/utilities/_data_masking/base.py deleted file mode 100644 index 211e44c3759..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/base.py +++ /dev/null @@ -1,174 +0,0 @@ -import json -from typing import Optional, Union - -from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider - - -class DataMasking: - """ - Note: This utility is currently in a Non-General Availability (Non-GA) phase and may have limitations. - Please DON'T USE THIS utility in production environments. - Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced. - - A utility class for masking sensitive data within various data types. - - This class provides methods for masking sensitive information, such as personal - identifiers or confidential data, within different data types such as strings, - dictionaries, lists, and more. It helps protect sensitive information while - preserving the structure of the original data. - - Usage: - Instantiate an object of this class and use its methods to mask sensitive data - based on the data type. Supported data types include strings, dictionaries, - and more. - - Example: - ``` - from aws_lambda_powertools.utilities.data_masking.base import DataMasking - - def lambda_handler(event, context): - masker = DataMasking() - - data = { - "project": "powertools", - "sensitive": "xxxxxxxxxx" - } - - masked = masker.mask(data,fields=["sensitive"]) - - return masked - - ``` - """ - - def __init__(self, provider: Optional[BaseProvider] = None): - self.provider = provider or BaseProvider() - - def encrypt(self, data, fields=None, **provider_options): - return self._apply_action(data, fields, self.provider.encrypt, **provider_options) - - def decrypt(self, data, fields=None, **provider_options): - return self._apply_action(data, fields, self.provider.decrypt, **provider_options) - - def mask(self, data, fields=None, **provider_options): - return self._apply_action(data, fields, self.provider.mask, **provider_options) - - def _apply_action(self, data, fields, action, **provider_options): - """ - Helper method to determine whether to apply a given action to the entire input data - or to specific fields if the 'fields' argument is specified. - - Parameters - ---------- - data : any - The input data to process. - fields : Optional[List[any]] = None - A list of fields to apply the action to. If 'None', the action is applied to the entire 'data'. - action : Callable - The action to apply to the data. It should be a callable that performs an operation on the data - and returns the modified value. - - Returns - ------- - any - The modified data after applying the action. - """ - - if fields is not None: - return self._apply_action_to_fields(data, fields, action, **provider_options) - else: - return action(data, **provider_options) - - def _apply_action_to_fields( - self, - data: Union[dict, str], - fields: list, - action, - **provider_options, - ) -> Union[dict, str]: - """ - This method takes the input data, which can be either a dictionary or a JSON string, - and applies a mask, an encryption, or a decryption to the specified fields. - - Parameters - ---------- - data : Union[dict, str]) - The input data to process. It can be either a dictionary or a JSON string. - fields : List - A list of fields to apply the action to. Each field can be specified as a string or - a list of strings representing nested keys in the dictionary. - action : Callable - The action to apply to the fields. It should be a callable that takes the current - value of the field as the first argument and any additional arguments that might be required - for the action. It performs an operation on the current value using the provided arguments and - returns the modified value. - **provider_options: - Additional keyword arguments to pass to the 'action' function. - - Returns - ------- - dict - The modified dictionary after applying the action to the - specified fields. - - Raises - ------- - ValueError - If 'fields' parameter is None. - TypeError - If the 'data' parameter is not a traversable type - - Example - ------- - ```python - >>> data = {'a': {'b': {'c': 1}}, 'x': {'y': 2}} - >>> fields = ['a.b.c', 'a.x.y'] - # The function will transform the value at 'a.b.c' (1) and 'a.x.y' (2) - # and store the result as: - new_dict = {'a': {'b': {'c': 'transformed_value'}}, 'x': {'y': 'transformed_value'}} - ``` - """ - - if fields is None: - raise ValueError("No fields specified.") - - if isinstance(data, str): - # Parse JSON string as dictionary - my_dict_parsed = json.loads(data) - elif isinstance(data, dict): - # In case their data has keys that are not strings (i.e. ints), convert it all into a JSON string - my_dict_parsed = json.dumps(data) - # Turn back into dict so can parse it - my_dict_parsed = json.loads(my_dict_parsed) - else: - raise TypeError( - f"Unsupported data type for 'data' parameter. Expected a traversable type, but got {type(data)}.", - ) - - # For example: ['a.b.c'] in ['a.b.c', 'a.x.y'] - for nested_key in fields: - # Prevent overriding loop variable - curr_nested_key = nested_key - - # If the nested_key is not a string, convert it to a string representation - if not isinstance(curr_nested_key, str): - curr_nested_key = json.dumps(curr_nested_key) - - # Split the nested key string into a list of nested keys - # ['a.b.c'] -> ['a', 'b', 'c'] - keys = curr_nested_key.split(".") - - # Initialize a current dictionary to the root dictionary - curr_dict = my_dict_parsed - - # Traverse the dictionary hierarchy by iterating through the list of nested keys - for key in keys[:-1]: - curr_dict = curr_dict[key] - - # Retrieve the final value of the nested field - valtochange = curr_dict[(keys[-1])] - - # Apply the specified 'action' to the target value - curr_dict[keys[-1]] = action(valtochange, **provider_options) - - return my_dict_parsed diff --git a/aws_lambda_powertools/utilities/_data_masking/constants.py b/aws_lambda_powertools/utilities/_data_masking/constants.py deleted file mode 100644 index 47e74f472cf..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/constants.py +++ /dev/null @@ -1,5 +0,0 @@ -DATA_MASKING_STRING: str = "*****" -CACHE_CAPACITY: int = 100 -MAX_CACHE_AGE_SECONDS: float = 300.0 -MAX_MESSAGES_ENCRYPTED: int = 200 -# NOTE: You can also set max messages/bytes per data key diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py b/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py deleted file mode 100644 index 7ee07f964b1..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from aws_lambda_powertools.utilities._data_masking.provider.base import BaseProvider - -__all__ = [ - "BaseProvider", -] diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/base.py b/aws_lambda_powertools/utilities/_data_masking/provider/base.py deleted file mode 100644 index a293c6aff9a..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/base.py +++ /dev/null @@ -1,34 +0,0 @@ -import json -from typing import Any - -from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING - - -class BaseProvider: - """ - When you try to create an instance of a subclass that does not implement the encrypt method, - you will get a NotImplementedError with a message that says the method is not implemented: - """ - - def __init__(self, json_serializer=None, json_deserializer=None) -> None: - self.json_serializer = json_serializer or self.default_json_serializer - self.json_deserializer = json_deserializer or self.default_json_deserializer - - def default_json_serializer(self, data): - return json.dumps(data).encode("utf-8") - - def default_json_deserializer(self, data): - return json.loads(data.decode("utf-8")) - - def encrypt(self, data) -> str: - raise NotImplementedError("Subclasses must implement encrypt()") - - def decrypt(self, data) -> Any: - raise NotImplementedError("Subclasses must implement decrypt()") - - def mask(self, data) -> Any: - if isinstance(data, (str, dict, bytes)): - return DATA_MASKING_STRING - elif isinstance(data, (list, tuple, set)): - return type(data)([DATA_MASKING_STRING] * len(data)) - return DATA_MASKING_STRING diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py b/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py deleted file mode 100644 index f257339d634..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/kms/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider - -__all__ = [ - "AwsEncryptionSdkProvider", -] diff --git a/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py b/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py deleted file mode 100644 index a895f8de0ac..00000000000 --- a/aws_lambda_powertools/utilities/_data_masking/provider/kms/aws_encryption_sdk.py +++ /dev/null @@ -1,177 +0,0 @@ -from __future__ import annotations - -import base64 -from typing import Any, Callable, Dict, List - -import botocore -from aws_encryption_sdk import ( - CachingCryptoMaterialsManager, - EncryptionSDKClient, - LocalCryptoMaterialsCache, - StrictAwsKmsMasterKeyProvider, -) - -from aws_lambda_powertools.shared.user_agent import register_feature_to_botocore_session -from aws_lambda_powertools.utilities._data_masking.constants import ( - CACHE_CAPACITY, - MAX_CACHE_AGE_SECONDS, - MAX_MESSAGES_ENCRYPTED, -) -from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider - - -class ContextMismatchError(Exception): - def __init__(self, key): - super().__init__(f"Encryption Context does not match expected value for key: {key}") - self.key = key - - -class AwsEncryptionSdkProvider(BaseProvider): - """ - The AwsEncryptionSdkProvider is used as a provider for the DataMasking class. - - This provider allows you to perform data masking using the AWS Encryption SDK - for encryption and decryption. It integrates with the DataMasking class to - securely encrypt and decrypt sensitive data. - - Usage Example: - ``` - from aws_lambda_powertools.utilities.data_masking import DataMasking - from aws_lambda_powertools.utilities.data_masking.providers.kms.aws_encryption_sdk import ( - AwsEncryptionSdkProvider, - ) - - - def lambda_handler(event, context): - provider = AwsEncryptionSdkProvider(["arn:aws:kms:us-east-1:0123456789012:key/key-id"]) - masker = DataMasking(provider=provider) - - data = { - "project": "powertools", - "sensitive": "xxxxxxxxxx" - } - - masked = masker.encrypt(data,fields=["sensitive"]) - - return masked - - ``` - """ - - def __init__( - self, - keys: List[str], - key_provider=None, - local_cache_capacity: int = CACHE_CAPACITY, - max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, - max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, - json_serializer: Callable | None = None, - json_deserializer: Callable | None = None, - ): - super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) - - self._key_provider = key_provider or KMSKeyProvider( - keys=keys, - local_cache_capacity=local_cache_capacity, - max_cache_age_seconds=max_cache_age_seconds, - max_messages_encrypted=max_messages_encrypted, - json_serializer=self.json_serializer, - json_deserializer=self.json_deserializer, - ) - - def encrypt(self, data: bytes | str | Dict | int, **provider_options) -> str: - return self._key_provider.encrypt(data=data, **provider_options) - - def decrypt(self, data: str, **provider_options) -> Any: - return self._key_provider.decrypt(data=data, **provider_options) - - -class KMSKeyProvider: - - """ - The KMSKeyProvider is responsible for assembling an AWS Key Management Service (KMS) - client, a caching mechanism, and a keyring for secure key management and data encryption. - """ - - def __init__( - self, - keys: List[str], - json_serializer: Callable, - json_deserializer: Callable, - local_cache_capacity: int = CACHE_CAPACITY, - max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, - max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, - ): - session = botocore.session.Session() - register_feature_to_botocore_session(session, "data-masking") - - self.json_serializer = json_serializer - self.json_deserializer = json_deserializer - self.client = EncryptionSDKClient() - self.keys = keys - self.cache = LocalCryptoMaterialsCache(local_cache_capacity) - self.key_provider = StrictAwsKmsMasterKeyProvider(key_ids=self.keys, botocore_session=session) - self.cache_cmm = CachingCryptoMaterialsManager( - master_key_provider=self.key_provider, - cache=self.cache, - max_age=max_cache_age_seconds, - max_messages_encrypted=max_messages_encrypted, - ) - - def encrypt(self, data: bytes | str | Dict | float, **provider_options) -> str: - """ - Encrypt data using the AwsEncryptionSdkProvider. - - Parameters - ------- - data : Union[bytes, str] - The data to be encrypted. - provider_options - Additional options for the aws_encryption_sdk.EncryptionSDKClient - - Returns - ------- - ciphertext : str - The encrypted data, as a base64-encoded string. - """ - data_encoded = self.json_serializer(data) - ciphertext, _ = self.client.encrypt( - source=data_encoded, - materials_manager=self.cache_cmm, - **provider_options, - ) - ciphertext = base64.b64encode(ciphertext).decode() - return ciphertext - - def decrypt(self, data: str, **provider_options) -> Any: - """ - Decrypt data using AwsEncryptionSdkProvider. - - Parameters - ------- - data : Union[bytes, str] - The encrypted data, as a base64-encoded string - provider_options - Additional options for the aws_encryption_sdk.EncryptionSDKClient - - Returns - ------- - ciphertext : bytes - The decrypted data in bytes - """ - ciphertext_decoded = base64.b64decode(data) - - expected_context = provider_options.pop("encryption_context", {}) - - ciphertext, decryptor_header = self.client.decrypt( - source=ciphertext_decoded, - key_provider=self.key_provider, - **provider_options, - ) - - for key, value in expected_context.items(): - if decryptor_header.encryption_context.get(key) != value: - raise ContextMismatchError(key) - - ciphertext = self.json_deserializer(ciphertext) - return ciphertext diff --git a/aws_lambda_powertools/utilities/_data_masking/__init__.py b/aws_lambda_powertools/utilities/data_masking/__init__.py similarity index 81% rename from aws_lambda_powertools/utilities/_data_masking/__init__.py rename to aws_lambda_powertools/utilities/data_masking/__init__.py index 806c856ba75..4d767e83ce1 100644 --- a/aws_lambda_powertools/utilities/_data_masking/__init__.py +++ b/aws_lambda_powertools/utilities/data_masking/__init__.py @@ -4,7 +4,7 @@ Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced. """ -from aws_lambda_powertools.utilities._data_masking.base import DataMasking +from aws_lambda_powertools.utilities.data_masking.base import DataMasking __all__ = [ "DataMasking", diff --git a/aws_lambda_powertools/utilities/data_masking/base.py b/aws_lambda_powertools/utilities/data_masking/base.py new file mode 100644 index 00000000000..c2557dcef24 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/base.py @@ -0,0 +1,291 @@ +from __future__ import annotations + +import functools +import logging +import warnings +from numbers import Number +from typing import Any, Callable, Mapping, Optional, Sequence, Union, overload + +from jsonpath_ng.ext import parse + +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingFieldNotFoundError, + DataMaskingUnsupportedTypeError, +) +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider + +logger = logging.getLogger(__name__) + + +class DataMasking: + """ + Note: This utility is currently in a Non-General Availability (Non-GA) phase and may have limitations. + Please DON'T USE THIS utility in production environments. + Keep in mind that when we transition to General Availability (GA), there might be breaking changes introduced. + + The DataMasking class orchestrates erasing, encrypting, and decrypting + for the base provider. + + Example: + ``` + from aws_lambda_powertools.utilities.data_masking.base import DataMasking + + def lambda_handler(event, context): + masker = DataMasking() + + data = { + "project": "powertools", + "sensitive": "password" + } + + erased = masker.erase(data,fields=["sensitive"]) + + return erased + + ``` + """ + + def __init__( + self, + provider: Optional[BaseProvider] = None, + raise_on_missing_field: bool = True, + ): + self.provider = provider or BaseProvider() + # NOTE: we depend on Provider to not confuse customers in passing the same 2 serializers in 2 places + self.json_serializer = self.provider.json_serializer + self.json_deserializer = self.provider.json_deserializer + self.raise_on_missing_field = raise_on_missing_field + + def encrypt( + self, + data: dict | Mapping | Sequence | Number, + provider_options: dict | None = None, + **encryption_context: str, + ) -> str: + return self._apply_action( + data=data, + fields=None, + action=self.provider.encrypt, + provider_options=provider_options or {}, + **encryption_context, + ) + + def decrypt( + self, + data, + provider_options: dict | None = None, + **encryption_context: str, + ) -> Any: + return self._apply_action( + data=data, + fields=None, + action=self.provider.decrypt, + provider_options=provider_options or {}, + **encryption_context, + ) + + @overload + def erase(self, data, fields: None) -> str: + ... + + @overload + def erase(self, data: list, fields: list[str]) -> list[str]: + ... + + @overload + def erase(self, data: tuple, fields: list[str]) -> tuple[str]: + ... + + @overload + def erase(self, data: dict, fields: list[str]) -> dict: + ... + + def erase(self, data: Sequence | Mapping, fields: list[str] | None = None) -> str | list[str] | tuple[str] | dict: + return self._apply_action(data=data, fields=fields, action=self.provider.erase) + + def _apply_action( + self, + data, + fields: list[str] | None, + action: Callable, + provider_options: dict | None = None, + **encryption_context: str, + ): + """ + Helper method to determine whether to apply a given action to the entire input data + or to specific fields if the 'fields' argument is specified. + + Parameters + ---------- + data : str | dict + The input data to process. + fields : Optional[List[str]] + A list of fields to apply the action to. If 'None', the action is applied to the entire 'data'. + action : Callable + The action to apply to the data. It should be a callable that performs an operation on the data + and returns the modified value. + provider_options : dict + Provider specific keyword arguments to propagate; used as an escape hatch. + encryption_context: str + Encryption context to use in encrypt and decrypt operations. + + Returns + ------- + any + The modified data after applying the action. + """ + + if fields is not None: + logger.debug(f"Running action {action.__name__} with fields {fields}") + return self._apply_action_to_fields( + data=data, + fields=fields, + action=action, + provider_options=provider_options, + **encryption_context, + ) + else: + logger.debug(f"Running action {action.__name__} with the entire data") + return action(data=data, provider_options=provider_options, **encryption_context) + + def _apply_action_to_fields( + self, + data: Union[dict, str], + fields: list, + action: Callable, + provider_options: dict | None = None, + **encryption_context: str, + ) -> Union[dict, str]: + """ + This method takes the input data, which can be either a dictionary or a JSON string, + and erases, encrypts, or decrypts the specified fields. + + Parameters + ---------- + data : Union[dict, str]) + The input data to process. It can be either a dictionary or a JSON string. + fields : List + A list of fields to apply the action to. Each field can be specified as a string or + a list of strings representing nested keys in the dictionary. + action : Callable + The action to apply to the fields. It should be a callable that takes the current + value of the field as the first argument and any additional arguments that might be required + for the action. It performs an operation on the current value using the provided arguments and + returns the modified value. + provider_options : dict + Optional dictionary representing additional options for the action. + **encryption_context: str + Additional keyword arguments collected into a dictionary. + + Returns + ------- + dict | str + The modified dictionary or string after applying the action to the + specified fields. + + Raises + ------- + ValueError + If 'fields' parameter is None. + TypeError + If the 'data' parameter is not a traversable type + + Example + ------- + ```python + >>> data = {'a': {'b': {'c': 1}}, 'x': {'y': 2}} + >>> fields = ['a.b.c', 'a.x.y'] + # The function will transform the value at 'a.b.c' (1) and 'a.x.y' (2) + # and store the result as: + new_dict = {'a': {'b': {'c': '*****'}}, 'x': {'y': '*****'}} + ``` + """ + + data_parsed: dict = self._normalize_data_to_parse(fields, data) + + # For in-place updates, json_parse accepts a callback function + # this function must receive 3 args: field_value, fields, field_name + # We create a partial callback to pre-populate known options (action, provider opts, enc ctx) + update_callback = functools.partial( + self._call_action, + action=action, + provider_options=provider_options, + **encryption_context, + ) + + # Iterate over each field to be parsed. + for field_parse in fields: + # Parse the field expression using a 'parse' function. + json_parse = parse(field_parse) + # Find the corresponding keys in the normalized data using the parsed expression. + result_parse = json_parse.find(data_parsed) + + if not result_parse: + if self.raise_on_missing_field: + # If the data for the field is not found, raise an exception. + raise DataMaskingFieldNotFoundError(f"Field or expression {field_parse} not found in {data_parsed}") + else: + # If the data for the field is not found, warning. + warnings.warn(f"Field or expression {field_parse} not found in {data_parsed}", stacklevel=2) + + # For in-place updates, json_parse accepts a callback function + # that receives 3 args: field_value, fields, field_name + # We create a partial callback to pre-populate known provider options (action, provider opts, enc ctx) + update_callback = functools.partial( + self._call_action, + action=action, + provider_options=provider_options, + **encryption_context, + ) + + json_parse.update( + data_parsed, + lambda field_value, fields, field_name: update_callback(field_value, fields, field_name), # noqa: B023 + ) + + return data_parsed + + @staticmethod + def _call_action( + field_value: Any, + fields: dict[str, Any], + field_name: str, + action: Callable, + provider_options: dict | None = None, + **encryption_context, + ) -> None: + """ + Apply a specified action to a field value and update the fields dictionary. + + Params: + -------- + - field_value: Current value of the field being processed. + - fields: Dictionary representing the fields being processed (mutable). + - field_name: Name of the field being processed. + - action: Callable (function or method) to be applied to the field_value. + - provider_options: Optional dictionary representing additional options for the action. + - **encryption_context: Additional keyword arguments collected into a dictionary. + + Returns: + - fields[field_name]: Returns the processed field value + """ + fields[field_name] = action(field_value, provider_options=provider_options, **encryption_context) + return fields[field_name] + + def _normalize_data_to_parse(self, fields: list, data: str | dict) -> dict: + if not fields: + raise ValueError("No fields specified.") + + if isinstance(data, str): + # Parse JSON string as dictionary + data_parsed = self.json_deserializer(data) + elif isinstance(data, dict): + # Convert the data to a JSON string in case it contains non-string keys (e.g., ints) + # Parse the JSON string back into a dictionary + data_parsed = self.json_deserializer(self.json_serializer(data)) + else: + raise DataMaskingUnsupportedTypeError( + f"Unsupported data type. Expected a traversable type (dict or str), but got {type(data)}.", + ) + + return data_parsed diff --git a/aws_lambda_powertools/utilities/data_masking/constants.py b/aws_lambda_powertools/utilities/data_masking/constants.py new file mode 100644 index 00000000000..f35f4291e40 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/constants.py @@ -0,0 +1,14 @@ +# The string that replaces values that have been erased +DATA_MASKING_STRING: str = "*****" +# The maximum number of entries that can be retained in the local cryptographic materials cache +CACHE_CAPACITY: int = 100 +# The maximum time (in seconds) that a cache entry may be kept in the cache +MAX_CACHE_AGE_SECONDS: float = 300.0 +# Maximum number of messages which are allowed to be encrypted under a single cached data key +# Values can be [1 - 4294967296] (2 ** 32) +MAX_MESSAGES_ENCRYPTED: int = 4294967296 +# Maximum number of bytes which are allowed to be encrypted under a single cached data key +# Values can be [1 - 9223372036854775807] (2 ** 63 - 1) +MAX_BYTES_ENCRYPTED: int = 9223372036854775807 + +ENCRYPTED_DATA_KEY_CTX_KEY = "aws-crypto-public-key" diff --git a/aws_lambda_powertools/utilities/data_masking/exceptions.py b/aws_lambda_powertools/utilities/data_masking/exceptions.py new file mode 100644 index 00000000000..7c962ddf385 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/exceptions.py @@ -0,0 +1,34 @@ +class DataMaskingUnsupportedTypeError(Exception): + """ + UnsupportedType Error + """ + + +class DataMaskingDecryptKeyError(Exception): + """ + Decrypting with an invalid AWS KMS Key ARN. + """ + + +class DataMaskingEncryptKeyError(Exception): + """ + Encrypting with an invalid AWS KMS Key ARN. + """ + + +class DataMaskingDecryptValueError(Exception): + """ + Decrypting an invalid field. + """ + + +class DataMaskingContextMismatchError(Exception): + """ + Decrypting with the incorrect encryption context. + """ + + +class DataMaskingFieldNotFoundError(Exception): + """ + Field not found. + """ diff --git a/aws_lambda_powertools/utilities/data_masking/provider/__init__.py b/aws_lambda_powertools/utilities/data_masking/provider/__init__.py new file mode 100644 index 00000000000..5a0180eb82b --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities.data_masking.provider.base import BaseProvider + +__all__ = [ + "BaseProvider", +] diff --git a/aws_lambda_powertools/utilities/data_masking/provider/base.py b/aws_lambda_powertools/utilities/data_masking/provider/base.py new file mode 100644 index 00000000000..3aacba1b7b2 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/base.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import functools +import json +from typing import Any, Callable, Iterable + +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING + + +class BaseProvider: + """ + The BaseProvider class serves as an abstract base class for data masking providers. + + Examples + -------- + ``` + from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider + from aws_lambda_powertools.utilities.data_masking import DataMasking + + class MyCustomProvider(BaseProvider): + def encrypt(self, data) -> str: + # Implementation logic for data encryption + + def decrypt(self, data) -> Any: + # Implementation logic for data decryption + + def erase(self, data) -> Union[str, Iterable]: + # Implementation logic for data masking + pass + + def lambda_handler(event, context): + provider = MyCustomProvider(["secret-key"]) + data_masker = DataMasking(provider=provider) + + data = { + "project": "powertools", + "sensitive": "password" + } + + encrypted = data_masker.encrypt(data) + + return encrypted + ``` + """ + + def __init__( + self, + json_serializer: Callable[..., str] = functools.partial(json.dumps, ensure_ascii=False), + json_deserializer: Callable[[str], Any] = json.loads, + ) -> None: + self.json_serializer = json_serializer + self.json_deserializer = json_deserializer + + def encrypt(self, data, provider_options: dict | None = None, **encryption_context: str) -> str: + """ + Abstract method for encrypting data. Subclasses must implement this method. + """ + raise NotImplementedError("Subclasses must implement encrypt()") + + def decrypt(self, data, provider_options: dict | None = None, **encryption_context: str) -> Any: + """ + Abstract method for decrypting data. Subclasses must implement this method. + """ + raise NotImplementedError("Subclasses must implement decrypt()") + + def erase(self, data, **kwargs) -> Iterable[str]: + """ + This method irreversibly erases data. + + If the data to be erased is of type `str`, `dict`, or `bytes`, + this method will return an erased string, i.e. "*****". + + If the data to be erased is of an iterable type like `list`, `tuple`, + or `set`, this method will return a new object of the same type as the + input data but with each element replaced by the string "*****". + """ + if isinstance(data, (str, dict, bytes)): + return DATA_MASKING_STRING + elif isinstance(data, (list, tuple, set)): + return type(data)([DATA_MASKING_STRING] * len(data)) + return DATA_MASKING_STRING diff --git a/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py b/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py new file mode 100644 index 00000000000..c1353094144 --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/kms/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider + +__all__ = [ + "AWSEncryptionSDKProvider", +] diff --git a/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py b/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py new file mode 100644 index 00000000000..bbdbb0bad6f --- /dev/null +++ b/aws_lambda_powertools/utilities/data_masking/provider/kms/aws_encryption_sdk.py @@ -0,0 +1,247 @@ +from __future__ import annotations + +import functools +import json +import logging +from binascii import Error +from typing import Any, Callable, List + +import botocore +from aws_encryption_sdk import ( + CachingCryptoMaterialsManager, + EncryptionSDKClient, + LocalCryptoMaterialsCache, + StrictAwsKmsMasterKeyProvider, +) +from aws_encryption_sdk.exceptions import ( + DecryptKeyError, + GenerateKeyError, + NotSupportedError, +) +from aws_encryption_sdk.structures import MessageHeader + +from aws_lambda_powertools.shared.functions import ( + base64_decode, + bytes_to_base64_string, + bytes_to_string, +) +from aws_lambda_powertools.shared.user_agent import register_feature_to_botocore_session +from aws_lambda_powertools.utilities.data_masking.constants import ( + CACHE_CAPACITY, + ENCRYPTED_DATA_KEY_CTX_KEY, + MAX_BYTES_ENCRYPTED, + MAX_CACHE_AGE_SECONDS, + MAX_MESSAGES_ENCRYPTED, +) +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingContextMismatchError, + DataMaskingDecryptKeyError, + DataMaskingDecryptValueError, + DataMaskingEncryptKeyError, + DataMaskingUnsupportedTypeError, +) +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider + +logger = logging.getLogger(__name__) + + +class AWSEncryptionSDKProvider(BaseProvider): + """ + The AWSEncryptionSDKProvider is used as a provider for the DataMasking class. + + Usage + ------- + ``` + from aws_lambda_powertools.utilities.data_masking import DataMasking + from aws_lambda_powertools.utilities.data_masking.providers.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, + ) + + + def lambda_handler(event, context): + provider = AWSEncryptionSDKProvider(["arn:aws:kms:us-east-1:0123456789012:key/key-id"]) + data_masker = DataMasking(provider=provider) + + data = { + "project": "powertools", + "sensitive": "password" + } + + encrypted = data_masker.encrypt(data) + + return encrypted + + ``` + """ + + def __init__( + self, + keys: List[str], + key_provider=None, + local_cache_capacity: int = CACHE_CAPACITY, + max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, + max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, + max_bytes_encrypted: int = MAX_BYTES_ENCRYPTED, + json_serializer: Callable[..., str] = functools.partial(json.dumps, ensure_ascii=False), + json_deserializer: Callable[[str], Any] = json.loads, + ): + super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) + + self._key_provider = key_provider or KMSKeyProvider( + keys=keys, + local_cache_capacity=local_cache_capacity, + max_cache_age_seconds=max_cache_age_seconds, + max_messages_encrypted=max_messages_encrypted, + max_bytes_encrypted=max_bytes_encrypted, + json_serializer=json_serializer, + json_deserializer=json_deserializer, + ) + + def encrypt(self, data: Any, provider_options: dict | None = None, **encryption_context: str) -> str: + return self._key_provider.encrypt(data=data, provider_options=provider_options, **encryption_context) + + def decrypt(self, data: str, provider_options: dict | None = None, **encryption_context: str) -> Any: + return self._key_provider.decrypt(data=data, provider_options=provider_options, **encryption_context) + + +class KMSKeyProvider: + + """ + The KMSKeyProvider is responsible for assembling an AWS Key Management Service (KMS) + client, a caching mechanism, and a keyring for secure key management and data encryption. + """ + + def __init__( + self, + keys: List[str], + json_serializer: Callable[..., str], + json_deserializer: Callable[[str], Any], + local_cache_capacity: int = CACHE_CAPACITY, + max_cache_age_seconds: float = MAX_CACHE_AGE_SECONDS, + max_messages_encrypted: int = MAX_MESSAGES_ENCRYPTED, + max_bytes_encrypted: int = MAX_BYTES_ENCRYPTED, + ): + session = botocore.session.Session() + register_feature_to_botocore_session(session, "data-masking") + + self.json_serializer = json_serializer + self.json_deserializer = json_deserializer + self.client = EncryptionSDKClient() + self.keys = keys + self.cache = LocalCryptoMaterialsCache(local_cache_capacity) + self.key_provider = StrictAwsKmsMasterKeyProvider(key_ids=self.keys, botocore_session=session) + self.cache_cmm = CachingCryptoMaterialsManager( + master_key_provider=self.key_provider, + cache=self.cache, + max_age=max_cache_age_seconds, + max_messages_encrypted=max_messages_encrypted, + max_bytes_encrypted=max_bytes_encrypted, + ) + + def encrypt(self, data: Any, provider_options: dict | None = None, **encryption_context: str) -> str: + """ + Encrypt data using the AWSEncryptionSDKProvider. + + Parameters + ------- + data : Union[bytes, str] + The data to be encrypted. + provider_options : dict + Additional options for the aws_encryption_sdk.EncryptionSDKClient + **encryption_context : str + Additional keyword arguments collected into a dictionary. + + Returns + ------- + ciphertext : str + The encrypted data, as a base64-encoded string. + """ + provider_options = provider_options or {} + self._validate_encryption_context(encryption_context) + + data_encoded = self.json_serializer(data).encode("utf-8") + + try: + ciphertext, _ = self.client.encrypt( + source=data_encoded, + materials_manager=self.cache_cmm, + encryption_context=encryption_context, + **provider_options, + ) + except GenerateKeyError: + raise DataMaskingEncryptKeyError( + "Failed to encrypt data. Please ensure you are using a valid Symmetric AWS KMS Key ARN, not KMS Key ID or alias.", # noqa E501 + ) + + return bytes_to_base64_string(ciphertext) + + def decrypt(self, data: str, provider_options: dict | None = None, **encryption_context: str) -> Any: + """ + Decrypt data using AWSEncryptionSDKProvider. + + Parameters + ------- + data : Union[bytes, str] + The encrypted data, as a base64-encoded string + provider_options + Additional options for the aws_encryption_sdk.EncryptionSDKClient + + Returns + ------- + ciphertext : bytes + The decrypted data in bytes + """ + provider_options = provider_options or {} + self._validate_encryption_context(encryption_context) + + try: + ciphertext_decoded = base64_decode(data) + except Error: + raise DataMaskingDecryptValueError( + "Data decryption failed. Please ensure that you are attempting to decrypt data that was previously encrypted.", # noqa E501 + ) + + try: + decryptor_header: MessageHeader + + ciphertext, decryptor_header = self.client.decrypt( + source=ciphertext_decoded, + key_provider=self.key_provider, + **provider_options, + ) + except DecryptKeyError: + raise DataMaskingDecryptKeyError( + "Failed to decrypt data - Please ensure you are using a valid Symmetric AWS KMS Key ARN, not KMS Key ID or alias.", # noqa E501 + ) + except (TypeError, NotSupportedError): + raise DataMaskingDecryptValueError( + "Data decryption failed. Please ensure that you are attempting to decrypt data that was previously encrypted.", # noqa E501 + ) + + self._compare_encryption_context(decryptor_header.encryption_context, encryption_context) + + decoded_ciphertext = bytes_to_string(ciphertext) + + return self.json_deserializer(decoded_ciphertext) + + @staticmethod + def _validate_encryption_context(context: dict): + if not context: + return + + for key, value in context.items(): + if not isinstance(value, str): + raise DataMaskingUnsupportedTypeError( + f"Encryption context values must be string. Received: {key}={value}", + ) + + @staticmethod + def _compare_encryption_context(actual_context: dict, expected_context: dict): + # We can safely remove encrypted data key after decryption for exact match verification + actual_context.pop(ENCRYPTED_DATA_KEY_CTX_KEY, None) + + # Encryption context could be out of order hence a set + if set(actual_context.items()) != set(expected_context.items()): + raise DataMaskingContextMismatchError( + "Encryption context does not match. You must use the exact same context used during encryption", + ) diff --git a/docs/index.md b/docs/index.md index 7f1ca98fb74..b13bbc122d8 100644 --- a/docs/index.md +++ b/docs/index.md @@ -701,6 +701,7 @@ Core utilities such as Tracing, Logging, Metrics, and Event Handler will be avai | [**Event source data classes**](./utilities/data_classes.md){target="_blank"} | Data classes describing the schema of common Lambda event triggers | | [**Parser**](./utilities/parser.md){target="_blank"} | Data parsing and deep validation using Pydantic | | [**Idempotency**](./utilities/idempotency.md){target="_blank"} | Idempotent Lambda handler | +| [**Data Masking**](./utilities/data_masking.md){target="_blank"} | Protect confidential data with easy removal or encryption | | [**Feature Flags**](./utilities/feature_flags.md){target="_blank"} | A simple rule engine to evaluate when one or multiple features should be enabled depending on the input | | [**Streaming**](./utilities/streaming.md){target="_blank"} | Streams datasets larger than the available memory as streaming data. | diff --git a/docs/utilities/data_masking.md b/docs/utilities/data_masking.md new file mode 100644 index 00000000000..5c30edc6bff --- /dev/null +++ b/docs/utilities/data_masking.md @@ -0,0 +1,638 @@ +--- +title: Data Masking +description: Utility +--- + + + +The data masking utility can encrypt, decrypt, or irreversibly erase sensitive information to protect data confidentiality. + +```mermaid +stateDiagram-v2 + direction LR + LambdaFn: Your Lambda function + DataMasking: DataMasking + Operation: Possible operations + Input: Sensitive value + Erase: Erase + Encrypt: Encrypt + Decrypt: Decrypt + Provider: AWS Encryption SDK provider + Result: Data transformed (erased, encrypted, or decrypted) + + LambdaFn --> DataMasking + DataMasking --> Operation + + state Operation { + [*] --> Input + Input --> Erase: Irreversible + Input --> Encrypt + Input --> Decrypt + Encrypt --> Provider + Decrypt --> Provider + } + + Operation --> Result +``` + +## Key features + +* Encrypt, decrypt, or irreversibly erase data with ease +* Erase sensitive information in one or more fields within nested data +* Seamless integration with [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/introduction.html){target="_blank"} for industry and AWS security best practices + +## Terminology + +**Erasing** replaces sensitive information **irreversibly** with a non-sensitive placeholder _(`*****`)_. This operation replaces data in-memory, making it a one-way action. + +**Encrypting** transforms plaintext into ciphertext using an encryption algorithm and a cryptographic key. It allows you to encrypt any sensitive data, so only allowed personnel to decrypt it. Learn more about encryption [here](https://aws.amazon.com/blogs/security/importance-of-encryption-and-how-aws-can-help/){target="_blank"}. + +**Decrypting** transforms ciphertext back into plaintext using a decryption algorithm and the correct decryption key. + +**Encryption context** is a non-secret `key=value` data used for authentication like `tenant_id:`. This adds extra security and confirms encrypted data relationship with a context. + +**[Encrypted message](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/message-format.html){target="_blank"}** is a portable data structure that includes encrypted data along with copies of the encrypted data key. It includes everything Encryption SDK needs to validate authenticity, integrity, and to decrypt with the right master key. + + +**[Envelope encryption](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/concepts.html#envelope-encryption){target="_blank"}** uses two different keys to encrypt data safely: master and data key. The data key encrypts the plaintext, and the master key encrypts the data key. It simplifies key management _(you own the master key)_, isolates compromises to data key, and scales better with large data volumes. + + +
+```mermaid +graph LR + M(Master key) --> |Encrypts| D(Data key) + D(Data key) --> |Encrypts| S(Sensitive data) +``` +Envelope encryption visualized. +
+ +## Getting started + +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/aws-powertools/powertools-lambda-python/tree/develop/examples){target="_blank"}. + +### Install + +!!! note "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" + +Add `aws-lambda-powertools[datamasking]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. This will install the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/introduction.html){target="_blank"}. + + +AWS Encryption SDK contains non-Python dependencies. This means you should use [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/using-sam-cli-build.html#using-sam-cli-build-options-container){target="_blank"} or [official build container images](https://gallery.ecr.aws/search?searchTerm=sam%2Fbuild-python&popularRegistries=amazon){target="_blank"} when building your application for AWS Lambda. Local development should work as expected. + + +### Required resources + +!!! info "By default, we use Amazon Key Management Service (KMS) for encryption and decryption operations." + +Before you start, you will need a KMS symmetric key to encrypt and decrypt your data. Your Lambda function will need read and write access to it. + +**NOTE**. We recommend setting a minimum of 1024MB of memory _(CPU intensive)_, and separate Lambda functions for encrypt and decrypt. For more information, you can see the full reports of our [load tests](https://github.com/aws-powertools/powertools-lambda-python/pull/2197#issuecomment-1730571597){target="_blank"} and [traces](https://github.com/aws-powertools/powertools-lambda-python/pull/2197#issuecomment-1732060923){target="_blank"}. + +=== "AWS Serverless Application Model (SAM) example" + ```yaml hl_lines="15 29 41 61 66-67" + --8<-- "examples/data_masking/sam/template.yaml" + ``` + + 1. [Key policy examples using IAM Roles](https://docs.aws.amazon.com/kms/latest/developerguide/key-policy-default.html#key-policy-default-allow-administrators){target="_blank"} + 2. [SAM generated CloudFormation Resources](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-generated-resources-function.html#sam-specification-generated-resources-function-not-role){target="_blank"} + 3. Required only when using [multiple keys](#using-multiple-keys) + +### Erasing data + +Erasing will remove the original data and replace it with a `*****`. This means you cannot recover erased data, and the data type will change to `str` for all data unless the data to be erased is of an Iterable type (`list`, `tuple`, `set`), in which case the method will return a new object of the same type as the input data but with each element replaced by the string `*****`. + +=== "getting_started_erase_data.py" + ```python hl_lines="4 8 17" + --8<-- "examples/data_masking/src/getting_started_erase_data.py" + ``` + + 1. See [working with nested data](#working-with-nested-data) to learn more about the `fields` parameter.

If we omit `fields` parameter, the entire dictionary will be erased with `*****`. + +=== "generic_data_input.json" + ```json hl_lines="7 9 14" + --8<-- "examples/data_masking/src/generic_data_input.json" + ``` + +=== "getting_started_erase_data_output.json" + ```json hl_lines="5 7 12" + --8<-- "examples/data_masking/src/getting_started_erase_data_output.json" + ``` + +### Encrypting data + +???+ note "About static typing and encryption" + Encrypting data may lead to a different data type, as it always transforms into a string _(``)_. + +To encrypt, you will need an [encryption provider](#providers). Here, we will use `AWSEncryptionSDKProvider`. + +Under the hood, we delegate a [number of operations](#encrypt-operation-with-encryption-sdk-kms) to AWS Encryption SDK to authenticate, create a portable encryption message, and actual data encryption. + +=== "getting_started_encrypt_data.py" + ```python hl_lines="6-8 14-15 26" + --8<-- "examples/data_masking/src/getting_started_encrypt_data.py" + ``` + + 1. You can use more than one KMS Key for higher availability but increased latency.

Encryption SDK will ensure the data key is encrypted with both keys. + +=== "generic_data_input.json" + ```json + --8<-- "examples/data_masking/src/generic_data_input.json" + ``` + +=== "encrypt_data_output.json" + ```json + --8<-- "examples/data_masking/src/encrypt_data_output.json" + ``` + +### Decrypting data + +???+ note "About static typing and decryption" + Decrypting data may lead to a different data type, as encrypted data is always a string _(``)_. + +To decrypt, you will need an [encryption provider](#providers). Here, we will use `AWSEncryptionSDKProvider`. + +Under the hood, we delegate a [number of operations](#decrypt-operation-with-encryption-sdk-kms) to AWS Encryption SDK to verify authentication, integrity, and actual ciphertext decryption. + +=== "getting_started_decrypt_data.py" + + **NOTE**. Decryption only works with KMS Key ARN. + + ```python hl_lines="6-7 12-13 24" + --8<-- "examples/data_masking/src/getting_started_decrypt_data.py" + ``` + + 1. Note that KMS key alias or key ID won't work. + 2. You can use more than one KMS Key for higher availability but increased latency.

Encryption SDK will call `Decrypt` API with all master keys when trying to decrypt the data key. + +=== "getting_started_decrypt_data_input.json" + + ```json + --8<-- "examples/data_masking/src/getting_started_decrypt_data_input.json" + ``` + +=== "getting_started_decrypt_data_output.json" + + ```json + --8<-- "examples/data_masking/src/getting_started_decrypt_data_output.json" + ``` + +### Encryption context for integrity and authenticity + + +For a stronger security posture, you can add metadata to each encryption operation, and verify them during decryption. This is known as additional authenticated data (AAD). These are non-sensitive data that can help protect authenticity and integrity of your encrypted data, and even help to prevent a [confused deputy](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html){target="_blank"} situation. + + +???+ danger "Important considerations you should know" + 1. **Exact match verification on decrypt**. Be careful using random data like `timestamps` as encryption context if you can't provide them on decrypt. + 2. **Only `string` values are supported**. We will raise `DataMaskingUnsupportedTypeError` for non-string values. + 3. **Use non-sensitive data only**. When using KMS, encryption context is available as plaintext in AWS CloudTrail, unless you [intentionally disabled KMS events](https://docs.aws.amazon.com/kms/latest/developerguide/logging-using-cloudtrail.html#filtering-kms-events){target="_blank"}. + +=== "getting_started_encryption_context.py" + + ```python hl_lines="26-28" + --8<-- "examples/data_masking/src/getting_started_encryption_context.py" + ``` + + 1. They must match on `decrypt()` otherwise the operation will fail with `DataMaskingContextMismatchError`. + +=== "getting_started_decryption_context.py" + + ```python hl_lines="26-28" + --8<-- "examples/data_masking/src/getting_started_decryption_context.py" + ``` + + 1. They must match otherwise the operation will fail with `DataMaskingContextMismatchError`. + +### Choosing parts of your data + +???+ note "Current limitations" + 1. The `fields` parameter is not yet supported in `encrypt` and `decrypt` operations. + 2. We support `JSON` data types only - see [data serialization for more details](#data-serialization). + +You can use the `fields` parameter with the dot notation `.` to choose one or more parts of your data to `erase`. This is useful when you want to keep data structure intact except the confidential fields. + +When `fields` is present, `erase` behaves differently: + +| Operation | Behavior | Example | Result | +| --------- | ----------------------------------------------------------- | ----------------------- | ------------------------------- | +| `erase` | Replace data while keeping collections type intact. | `{"cards": ["a", "b"]}` | `{"cards": ["*****", "*****"]}` | + +Here are common scenarios to best visualize how to use `fields`. + +=== "Top keys only" + + You want to erase data in the `card_number` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["card_number"])` + + ```json hl_lines="4" + --8<-- "examples/data_masking/src/choosing_payload_top_keys.json" + ``` + + === "Result" + + ```json hl_lines="4" + --8<-- "examples/data_masking/src/choosing_payload_top_keys_output.json" + ``` + +=== "Nested key" + + You want to erase data in the `postcode` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address.postcode"])` + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_nested_key.json" + ``` + + === "Result" + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_nested_key_output.json" + ``` + +=== "Multiple keys" + + You want to erase data in both `postcode` and `street` fields. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address.postcode", "address.street"])` + + ```json hl_lines="6-7" + --8<-- "examples/data_masking/src/choosing_payload_multiple_keys.json" + ``` + + === "Result" + + ```json hl_lines="6-7" + --8<-- "examples/data_masking/src/choosing_payload_multiple_keys_output.json" + ``` + +=== "All key items" + + You want to erase data under `address` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address"])` + + ```json hl_lines="6-17" + --8<-- "examples/data_masking/src/choosing_payload_all_nested_keys.json" + ``` + + === "Result" + + ```json hl_lines="6-7" + --8<-- "examples/data_masking/src/choosing_payload_all_nested_keys_output.json" + ``` + +=== "Complex nested key" + + You want to erase data under `name` field. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["category..name"])` + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_complex_nested_keys.json" + ``` + + === "Result" + + ```json hl_lines="6" + --8<-- "examples/data_masking/src/choosing_payload_complex_nested_keys_output.json" + ``` + +=== "All fields in a list" + + You want to erase data under `street` field located at the any index of the address list. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address[*].street"])` + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_list_all_index.json" + ``` + + === "Result" + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_list_all_index_output.json" + ``` + +=== "Slicing a list" + + You want to erase data by slicing a list. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["address[-1].street"])` + + ```json hl_lines="16" + --8<-- "examples/data_masking/src/choosing_payload_list_slice.json" + ``` + + === "Result" + + ```json hl_lines="16" + --8<-- "examples/data_masking/src/choosing_payload_list_slice_output.json" + ``` + +=== "Complex expressions" + + You want to erase data by finding for a field with conditional expression. + + === "Data" + + > Expression: `data_masker.erase(data, fields=["$.address[?(@.postcode > 12000)]"])` + + > `$`: Represents the root of the JSON structure. + + > `.address`: Selects the "address" property within the JSON structure. + + > `(@.postcode > 12000)`: Specifies the condition that elements should meet. It selects elements where the value of the `postcode` property is `greater than 12000`. + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_complex_search.json" + ``` + + === "Result" + + ```json hl_lines="8 12" + --8<-- "examples/data_masking/src/choosing_payload_complex_search_output.json" + ``` + +For comprehensive guidance on using JSONPath syntax, please refer to the official documentation available at [jsonpath-ng](https://github.com/h2non/jsonpath-ng#jsonpath-syntax){target="_blank" rel="nofollow"} + +#### JSON + +We also support data in JSON string format as input. We automatically deserialize it, then handle each field operation as expected. + +Note that the return will be a deserialized JSON and your desired fields updated. + +=== "Data" + + Expression: `data_masker.erase(data, fields=["card_number", "address.postcode"])` + + ```json + --8<-- "examples/data_masking/src/choosing_payload_simple_json.json" + ``` + +=== "Result" + + ```json + --8<-- "examples/data_masking/src/choosing_payload_simple_json_output.json" + ``` + +## Advanced + +### Data serialization + +???+ note "Current limitations" + 1. Python classes, `Dataclasses`, and `Pydantic models` are not supported yet. + +Before we traverse the data structure, we perform two important operations on input data: + +1. If `JSON string`, **deserialize** using default or provided deserializer. +2. If `dictionary`, **normalize** into `JSON` to prevent traversing unsupported data types. + +When decrypting, we revert the operation to restore the original data structure. + +For compatibility or performance, you can optionally pass your own JSON serializer and deserializer to replace `json.dumps` and `json.loads` respectively: + +```python hl_lines="17-18" title="advanced_custom_serializer.py" +--8<-- "examples/data_masking/src/advanced_custom_serializer.py" +``` + +### Using multiple keys + +You can use multiple KMS keys from more than one AWS account for higher availability, when instantiating `AWSEncryptionSDKProvider`. + +```python hl_lines="15" title="using_multiple_keys.py" +--8<-- "examples/data_masking/src/using_multiple_keys.py" +``` + +### Providers + +#### AWS Encryption SDK + +You can modify the following values when initializing the `AWSEncryptionSDKProvider` to best accommodate your security and performance thresholds. + +| Parameter | Default | Description | +| -------------------------- | --------------------- | --------------------------------------------------------------------------------------------- | +| **local_cache_capacity** | `100` | The maximum number of entries that can be retained in the local cryptographic materials cache | +| **max_cache_age_seconds** | `300` | The maximum time (in seconds) that a cache entry may be kept in the cache | +| **max_messages_encrypted** | `4294967296` | The maximum number of messages that may be encrypted under a cache entry | +| **max_bytes_encrypted** | `9223372036854775807` | The maximum number of bytes that may be encrypted under a cache entry | + +If required, you can customize the default values when initializing the `AWSEncryptionSDKProvider` class. + +```python hl_lines="14-19" title="aws_encryption_provider_example.py" +--8<-- "examples/data_masking/src/aws_encryption_provider_example.py" +``` + +##### Passing additional SDK arguments + +!!! note "See the [AWS Encryption SDK docs for more details](https://aws-encryption-sdk-python.readthedocs.io/en/latest/generated/aws_encryption_sdk.html#aws_encryption_sdk.EncryptionSDKClient.encrypt){target="_blank"}" + +As an escape hatch mechanism, you can pass additional arguments to the `AWSEncryptionSDKProvider` via the `provider_options` parameter. + +For example, the AWS Encryption SDK defaults to using the `AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384` algorithm for encrypting your Data Key. If you want, you have the flexibility to customize and choose a different encryption algorithm. + +```python hl_lines="5 26 30" title="changing_default_algorithm.py" +--8<-- "examples/data_masking/src/changing_default_algorithm.py" +``` + +### Data masking request flow + +The following sequence diagrams explain how `DataMasking` behaves under different scenarios. + +#### Erase operation + +Erasing operations occur in-memory and we cannot recover the original value. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking (in memory) + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: erase(data) + DataMasking->>DataMasking: replaces data with ***** + Note over Lambda,DataMasking: No encryption providers involved. + DataMasking->>Lambda: data masked + Lambda-->>Client: Return response +``` +Simple masking operation +
+ +#### Encrypt operation with Encryption SDK (KMS) + +We call KMS to generate an unique data key that can be used for multiple `encrypt` operation in-memory. It improves performance, cost and prevent throttling. + +To make this operation simpler to visualize, we keep caching details in a [separate sequence diagram](#caching-encrypt-operations-with-encryption-sdk). Caching is enabled by default. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KMS_KEY]) + Lambda->>DataMasking: encrypt(data) + DataMasking->>EncryptionProvider: Create unique data key + Note over DataMasking,EncryptionProvider: KMS GenerateDataKey API + DataMasking->>DataMasking: Cache new unique data key + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + DataMasking->>DataMasking: Create encrypted message + Note over DataMasking: Encrypted message includes encrypted data, data key encrypted, algorithm, and more. + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda-->>Client: Return response +``` +Encrypting operation using envelope encryption. +
+ +#### Encrypt operation with multiple KMS Keys + +When encrypting data with multiple KMS keys, the `aws_encryption_sdk` makes additional API calls to encrypt the data with each of the specified keys. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KEY_1, KEY_2]) + Lambda->>DataMasking: encrypt(data) + DataMasking->>EncryptionProvider: Create unique data key + Note over DataMasking,EncryptionProvider: KMS GenerateDataKey API - KEY_1 + DataMasking->>DataMasking: Cache new unique data key + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: KEY_1.encrypt(DATA_KEY) + loop For every additional KMS Key + DataMasking->>EncryptionProvider: Encrypt DATA_KEY + Note over DataMasking,EncryptionProvider: KMS Encrypt API - KEY_2 + end + DataMasking->>DataMasking: Create encrypted message + Note over DataMasking: Encrypted message includes encrypted data, all data keys encrypted, algorithm, and more. + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda-->>Client: Return response +``` +Encrypting operation using envelope encryption. +
+ +#### Decrypt operation with Encryption SDK (KMS) + +We call KMS to decrypt the encrypted data key available in the encrypted message. If successful, we run authentication _(context)_ and integrity checks (_algorithm, data key length, etc_) to confirm its proceedings. + +Lastly, we decrypt the original encrypted data, throw away the decrypted data key for security reasons, and return the original plaintext data. + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KMS_KEY]) + Lambda->>DataMasking: decrypt(data) + DataMasking->>EncryptionProvider: Decrypt encrypted data key + Note over DataMasking,EncryptionProvider: KMS Decrypt API + DataMasking->>DataMasking: Authentication and integrity checks + DataMasking->>DataMasking: DATA_KEY.decrypt(data) + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + DataMasking->>DataMasking: Discards decrypted data key + DataMasking->>Lambda: Plaintext + Lambda-->>Client: Return response +``` +Decrypting operation using envelope encryption. +
+ +#### Caching encrypt operations with Encryption SDK + +Without caching, every `encrypt()` operation would generate a new data key. It significantly increases latency and cost for ephemeral and short running environments like Lambda. + +With caching, we balance ephemeral Lambda environment performance characteristics with [adjustable thresholds](#aws-encryption-sdk) to meet your security needs. + +!!! info "Data key recycling" + We request a new data key when a cached data key exceeds any of the following security thresholds: + + 1. **Max age in seconds** + 2. **Max number of encrypted messages** + 3. **Max bytes encrypted** across all operations + +
+```mermaid +sequenceDiagram + autonumber + participant Client + participant Lambda + participant DataMasking as Data Masking + participant EncryptionProvider as Encryption Provider + Client->>Lambda: Invoke (event) + Lambda->>DataMasking: Init Encryption Provider with master key + Note over Lambda,DataMasking: AWSEncryptionSDKProvider([KMS_KEY]) + Lambda->>DataMasking: encrypt(data) + DataMasking->>EncryptionProvider: Create unique data key + Note over DataMasking,EncryptionProvider: KMS GenerateDataKey API + DataMasking->>DataMasking: Cache new unique data key + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + DataMasking->>DataMasking: Create encrypted message + Note over DataMasking: Encrypted message includes encrypted data, data key encrypted, algorithm, and more. + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda->>DataMasking: encrypt(another_data) + DataMasking->>DataMasking: Searches for data key in cache + alt Is Data key in cache? + DataMasking->>DataMasking: Reuses data key + else Is Data key evicted from cache? + DataMasking->>EncryptionProvider: Create unique data key + DataMasking->>DataMasking: MASTER_KEY.encrypt(DATA_KEY) + end + DataMasking->>DataMasking: DATA_KEY.encrypt(data) + DataMasking->>DataMasking: Create encrypted message + DataMasking->>Lambda: Ciphertext from encrypted message + Lambda-->>Client: Return response +``` +Caching data keys during encrypt operation. +
+ +## Testing your code + +### Testing erase operation + +Testing your code with a simple erase operation + +=== "test_lambda_mask.py" + +```python hl_lines="22" +--8<-- "examples/data_masking/tests/test_lambda_mask.py" +``` + +=== "lambda_mask.py" + +```python hl_lines="3 12" +--8<-- "examples/data_masking/tests/lambda_mask.py" +``` diff --git a/examples/data_masking/sam/template.yaml b/examples/data_masking/sam/template.yaml new file mode 100644 index 00000000000..67d5d923515 --- /dev/null +++ b/examples/data_masking/sam/template.yaml @@ -0,0 +1,67 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: > + Powertools for AWS Lambda (Python) data masking example + +Globals: # https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-specification-template-anatomy-globals.html + Function: + Timeout: 5 + Runtime: python3.11 + Tracing: Active + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: INFO + KMS_KEY_ARN: !GetAtt DataMaskingMasterKey.Arn + +# In production, we recommend you split up the encrypt and decrypt for fine-grained security. +# For example, one function can act as the encryption proxy via HTTP requests, data pipeline, etc., +# while only authorized personnel can call decrypt via a separate function. +Resources: + DataMaskingEncryptFunctionExample: + Type: AWS::Serverless::Function + Properties: + Handler: data_masking_function_example.lambda_handler + CodeUri: ../src + Description: Data Masking encryption function + # Cryptographic operations demand more CPU. CPU is proportionally allocated based on memory size. + # We recommend allocating a minimum of 1024MB of memory. + MemorySize: 1024 + + # DataMaskingDecryptFunctionExample: + # Type: AWS::Serverless::Function + # Properties: + # Handler: data_masking_function_decrypt.lambda_handler + # CodeUri: ../src + # Description: Data Masking decryption function + # MemorySize: 1024 + + # KMS Key + DataMaskingMasterKey: + Type: "AWS::KMS::Key" + Properties: + Description: KMS Key for encryption and decryption using Powertools for AWS Lambda Data masking feature + # KMS Key support both IAM Resource Policies and Key Policies + # For more details: https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html + KeyPolicy: + Version: "2012-10-17" + Id: data-masking-enc-dec + Statement: + # For security reasons, ensure your KMS Key has at least one administrator. + # In this example, the root account is granted administrator permissions. + # However, we recommended configuring specific IAM Roles for enhanced security in production. + - Effect: Allow + Principal: + AWS: !Sub "arn:aws:iam::${AWS::AccountId}:root" # (1)! + Action: "kms:*" + Resource: "*" + # We must grant Lambda's IAM Role access to the KMS Key + - Effect: Allow + Principal: + AWS: !GetAtt DataMaskingEncryptFunctionExampleRole.Arn # (2)! + Action: + - kms:Decrypt # to decrypt encrypted data key + - kms:GenerateDataKey # to create an unique and random data key for encryption + # Encrypt permission is required only when using multiple keys + - kms:Encrypt # (3)! + Resource: "*" diff --git a/examples/data_masking/src/advanced_custom_serializer.py b/examples/data_masking/src/advanced_custom_serializer.py new file mode 100644 index 00000000000..f870624bccb --- /dev/null +++ b/examples/data_masking/src/advanced_custom_serializer.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import os + +import ujson + +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider( + keys=[KMS_KEY_ARN], + json_serializer=ujson.dumps, + json_deserializer=ujson.loads, +) +data_masker = DataMasking(provider=encryption_provider) + + +def lambda_handler(event: dict, context: LambdaContext) -> str: + data: dict = event.get("body", {}) + + return data_masker.encrypt(data) diff --git a/examples/data_masking/src/aws_encryption_provider_example.py b/examples/data_masking/src/aws_encryption_provider_example.py new file mode 100644 index 00000000000..2ef34a82934 --- /dev/null +++ b/examples/data_masking/src/aws_encryption_provider_example.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider( + keys=[KMS_KEY_ARN], + local_cache_capacity=200, + max_cache_age_seconds=400, + max_messages_encrypted=200, + max_bytes_encrypted=2000) + +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Encrypting the whole object") + + encrypted = data_masker.encrypt(data) + + return {"body": encrypted} diff --git a/examples/data_masking/src/changing_default_algorithm.py b/examples/data_masking/src/changing_default_algorithm.py new file mode 100644 index 00000000000..27d52905459 --- /dev/null +++ b/examples/data_masking/src/changing_default_algorithm.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +import os + +from aws_encryption_sdk.identifiers import Algorithm + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> str: + data: dict = event.get("body", {}) + + logger.info("Encrypting whole object with a different algorithm") + + provider_options = {"algorithm": Algorithm.AES_256_GCM_HKDF_SHA512_COMMIT_KEY} + + encrypted = data_masker.encrypt( + data, + provider_options=provider_options, + ) + + return encrypted diff --git a/examples/data_masking/src/choosing_payload_all_nested_keys.json b/examples/data_masking/src/choosing_payload_all_nested_keys.json new file mode 100644 index 00000000000..7fad154c03e --- /dev/null +++ b/examples/data_masking/src/choosing_payload_all_nested_keys.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street", + "country": "United States", + "timezone": "America/La_Paz" + }, + { + "postcode": 67890, + "street": "100 Main Street", + "country": "United States", + "timezone": "America/Mazatlan" + } + ] +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_all_nested_keys_output.json b/examples/data_masking/src/choosing_payload_all_nested_keys_output.json new file mode 100644 index 00000000000..a28bfee974e --- /dev/null +++ b/examples/data_masking/src/choosing_payload_all_nested_keys_output.json @@ -0,0 +1,9 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + "*****", + "*****" + ] +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_complex_nested_keys.json b/examples/data_masking/src/choosing_payload_complex_nested_keys.json new file mode 100644 index 00000000000..7096e0074d9 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_nested_keys.json @@ -0,0 +1,11 @@ +{ + "category": { + "subcategory": { + "brand" : { + "product": { + "name": "Car" + } + } + } + } +} diff --git a/examples/data_masking/src/choosing_payload_complex_nested_keys_output.json b/examples/data_masking/src/choosing_payload_complex_nested_keys_output.json new file mode 100644 index 00000000000..843c8c7e1ce --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_nested_keys_output.json @@ -0,0 +1,11 @@ +{ + "category": { + "subcategory": { + "brand" : { + "product": { + "name": "*****" + } + } + } + } +} diff --git a/examples/data_masking/src/choosing_payload_complex_search.json b/examples/data_masking/src/choosing_payload_complex_search.json new file mode 100644 index 00000000000..e8db38a79ad --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_search.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Drive" + }, + { + "postcode": 67890, + "street": "111 Main Street" + }, + { + "postcode": 11111, + "street": "100 Any Street" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_complex_search_output.json b/examples/data_masking/src/choosing_payload_complex_search_output.json new file mode 100644 index 00000000000..6198e27c09a --- /dev/null +++ b/examples/data_masking/src/choosing_payload_complex_search_output.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "*****" + }, + { + "postcode": 67890, + "street": "*****" + }, + { + "postcode": 11111, + "street": "100 Any Street" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_all_index.json b/examples/data_masking/src/choosing_payload_list_all_index.json new file mode 100644 index 00000000000..670e3c420be --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_all_index.json @@ -0,0 +1,15 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Drive" + }, + { + "postcode": 67890, + "street": "100 Main Street," + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_all_index_output.json b/examples/data_masking/src/choosing_payload_list_all_index_output.json new file mode 100644 index 00000000000..8fb1f1b1c6d --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_all_index_output.json @@ -0,0 +1,16 @@ + +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "*****" + }, + { + "postcode": 67890, + "street": "*****" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_index.json b/examples/data_masking/src/choosing_payload_list_index.json new file mode 100644 index 00000000000..0f543b42f5f --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_index.json @@ -0,0 +1,15 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "100 Main Street" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_index_output.json b/examples/data_masking/src/choosing_payload_list_index_output.json new file mode 100644 index 00000000000..1481d78f4b6 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_index_output.json @@ -0,0 +1,16 @@ + +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "*****" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_slice.json b/examples/data_masking/src/choosing_payload_list_slice.json new file mode 100644 index 00000000000..c8a9f7f58af --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_slice.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "100 Main Street" + }, + { + "postcode": 78495, + "street": "111 Any Drive" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_list_slice_output.json b/examples/data_masking/src/choosing_payload_list_slice_output.json new file mode 100644 index 00000000000..efab8b03400 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_list_slice_output.json @@ -0,0 +1,19 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": [ + { + "postcode": 12345, + "street": "123 Any Street" + }, + { + "postcode": 67890, + "street": "100 Main Street" + }, + { + "postcode": 11111, + "street": "*****" + } + ] +} diff --git a/examples/data_masking/src/choosing_payload_multiple_keys.json b/examples/data_masking/src/choosing_payload_multiple_keys.json new file mode 100644 index 00000000000..640c274868e --- /dev/null +++ b/examples/data_masking/src/choosing_payload_multiple_keys.json @@ -0,0 +1,9 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": 12345, + "street": "123 Any Street" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_multiple_keys_output.json b/examples/data_masking/src/choosing_payload_multiple_keys_output.json new file mode 100644 index 00000000000..fca3391f2f4 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_multiple_keys_output.json @@ -0,0 +1,9 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": "*****", + "street": "*****" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_nested_key.json b/examples/data_masking/src/choosing_payload_nested_key.json new file mode 100644 index 00000000000..e3ff995026f --- /dev/null +++ b/examples/data_masking/src/choosing_payload_nested_key.json @@ -0,0 +1,8 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": 12345 + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_nested_key_output.json b/examples/data_masking/src/choosing_payload_nested_key_output.json new file mode 100644 index 00000000000..463f5a943f3 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_nested_key_output.json @@ -0,0 +1,8 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444", + "address": { + "postcode": "*****" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_simple_json.json b/examples/data_masking/src/choosing_payload_simple_json.json new file mode 100644 index 00000000000..057d43087f0 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_simple_json.json @@ -0,0 +1 @@ +'{"name": "Carlos", "operation": "non sensitive", "card_number": "1111 2222 3333 4444", "address": {"postcode": 12345}}' \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_simple_json_output.json b/examples/data_masking/src/choosing_payload_simple_json_output.json new file mode 100644 index 00000000000..b8920dc9696 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_simple_json_output.json @@ -0,0 +1,8 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "*****", + "address": { + "postcode": "*****" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_top_keys.json b/examples/data_masking/src/choosing_payload_top_keys.json new file mode 100644 index 00000000000..dce6ed78780 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_top_keys.json @@ -0,0 +1,5 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "1111 2222 3333 4444" +} \ No newline at end of file diff --git a/examples/data_masking/src/choosing_payload_top_keys_output.json b/examples/data_masking/src/choosing_payload_top_keys_output.json new file mode 100644 index 00000000000..c7d877cb804 --- /dev/null +++ b/examples/data_masking/src/choosing_payload_top_keys_output.json @@ -0,0 +1,5 @@ +{ + "name": "Carlos", + "operation": "non sensitive", + "card_number": "*****" +} \ No newline at end of file diff --git a/examples/data_masking/src/data_masking_function_example.py b/examples/data_masking/src/data_masking_function_example.py new file mode 100644 index 00000000000..e7ed3326890 --- /dev/null +++ b/examples/data_masking/src/data_masking_function_example.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +tracer = Tracer() +logger = Logger() + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + logger.info("Hello world function - HTTP 200") + + data = event["body"] + + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) + encrypted = data_masker.encrypt(data) + decrypted = data_masker.decrypt(encrypted) + return {"Decrypted_json": decrypted} diff --git a/examples/data_masking/src/data_masking_function_example_output.json b/examples/data_masking/src/data_masking_function_example_output.json new file mode 100644 index 00000000000..87601e79ee4 --- /dev/null +++ b/examples/data_masking/src/data_masking_function_example_output.json @@ -0,0 +1,34 @@ +{ + "Decrypted_json": { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": { + "street": "123 Main St", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "phone_numbers": [ + "+1-555-555-1234", + "+1-555-555-5678" + ], + "interests": [ + "Hiking", + "Traveling", + "Photography", + "Reading" + ], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr." + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31" + }, + "about_me": "\n Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis\n sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus,\n ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim.\n Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue,\n risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin\n interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat\n volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat.\n Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus\n malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc.\n " + } + } \ No newline at end of file diff --git a/examples/data_masking/src/encrypt_data_output.json b/examples/data_masking/src/encrypt_data_output.json new file mode 100644 index 00000000000..06e32c83804 --- /dev/null +++ b/examples/data_masking/src/encrypt_data_output.json @@ -0,0 +1,3 @@ +{ + "body": "AgV4uF5K2YMtNhYrtviTwKNrUHhqQr73l/jNfukkh+qLOC8AXwABABVhd3MtY3J5cHRvLXB1YmxpYy1rZXkAREEvcjEyaFZHY1R5cjJuTDNKbTJ3UFA3R3ZjaytIdi9hekZqbXVUb25Ya3J5SzFBOUlJZDZxZXpSR1NTVnZDUUxoZz09AAEAB2F3cy1rbXMAS2Fybjphd3M6a21zOnVzLWVhc3QtMToyMDA5ODQxMTIzODY6a2V5LzZkODJiMzRlLTM2NjAtNDRlMi04YWJiLTdmMzA1OGJlYTIxMgC4AQIBAHjxYXAO7wQGd+7qxoyvXAajwqboF5FL/9lgYUNJTB8VtAHBP2hwVgw+zypp7GoMNTPAAAAAfjB8BgkqhkiG9w0BBwagbzBtAgEAMGgGCSqGSIb3DQEHATAeBglghkgBZQMEAS4wEQQMx/B25MTgWwpL7CmuAgEQgDtan3orAOKFUfyNm3v6rFcglb+BVVVDV71fj4aRljhpg1ixsYFaKsoej8NcwRktIiWE+mw9XmTEVb6xFQIAABAA9DeLzlRaRQgTcXMJG0iBu/YTyyDKiROD+bU1Y09X9RBz5LA1nWIENJKq2seAhNSB/////wAAAAEAAAAAAAAAAAAAAAEAAAEBExLJ9wI4n7t+wyPEEP4kjYFBdkmNuLLsVC2Yt8mv9Y1iH2G+/g9SaIcdK57pkoW0ECpBxZVOxCuhmK2s74AJCUdem9McjS1waUKyzYTi9vv2ySNBsABIDwT990rE7jZJ3tEZAqcWZg/eWlxvnksFR/akBWZKsKzFz6lF57+cTgdISCEJRV0E7fcUeCuaMaQGK1Qw2OCmIeHEG5j5iztBkZG2IB2CVND/AbxmDUFHwgjsrJPTzaDYSufcGMoZW1A9X1sLVfqNVKvnOFP5tNY7kPF5eAI9FhGBw8SjTqODXz4k6zuqzy9no8HtXowP265U8NZ5VbVTd/zuVEbZyK5KBqzP1sExW4RhnlpXMoOs9WSuAGcwZQIxANTeEwb9V7CacV2Urt/oCqysUzhoV2AcT2ZjryFqY79Tsg+FRpIx7cBizL4ieRzbhQIwcRasNncO5OZOcmVr0MqHv+gCVznndMgjXJmWwUa7h6skJKmhhMPlN0CsugxtVWnD" +} diff --git a/examples/data_masking/src/generic_data_input.json b/examples/data_masking/src/generic_data_input.json new file mode 100644 index 00000000000..60ab0aa278e --- /dev/null +++ b/examples/data_masking/src/generic_data_input.json @@ -0,0 +1,21 @@ +{ + "body": + { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": { + "street": "123 Main St", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "company_address": { + "street": "456 ACME Ave", + "city": "Anytown", + "state": "CA", + "zip": "12345" + } + } +} \ No newline at end of file diff --git a/examples/data_masking/src/getting_started_decrypt_data.py b/examples/data_masking/src/getting_started_decrypt_data.py new file mode 100644 index 00000000000..d8e746a8dfe --- /dev/null +++ b/examples/data_masking/src/getting_started_decrypt_data.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") # (1)! + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) # (2)! +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Decrypting whole object") + + decrypted = data_masker.decrypt(data) + + return decrypted diff --git a/examples/data_masking/src/getting_started_decrypt_data_input.json b/examples/data_masking/src/getting_started_decrypt_data_input.json new file mode 100644 index 00000000000..06e32c83804 --- /dev/null +++ b/examples/data_masking/src/getting_started_decrypt_data_input.json @@ -0,0 +1,3 @@ +{ + "body": "AgV4uF5K2YMtNhYrtviTwKNrUHhqQr73l/jNfukkh+qLOC8AXwABABVhd3MtY3J5cHRvLXB1YmxpYy1rZXkAREEvcjEyaFZHY1R5cjJuTDNKbTJ3UFA3R3ZjaytIdi9hekZqbXVUb25Ya3J5SzFBOUlJZDZxZXpSR1NTVnZDUUxoZz09AAEAB2F3cy1rbXMAS2Fybjphd3M6a21zOnVzLWVhc3QtMToyMDA5ODQxMTIzODY6a2V5LzZkODJiMzRlLTM2NjAtNDRlMi04YWJiLTdmMzA1OGJlYTIxMgC4AQIBAHjxYXAO7wQGd+7qxoyvXAajwqboF5FL/9lgYUNJTB8VtAHBP2hwVgw+zypp7GoMNTPAAAAAfjB8BgkqhkiG9w0BBwagbzBtAgEAMGgGCSqGSIb3DQEHATAeBglghkgBZQMEAS4wEQQMx/B25MTgWwpL7CmuAgEQgDtan3orAOKFUfyNm3v6rFcglb+BVVVDV71fj4aRljhpg1ixsYFaKsoej8NcwRktIiWE+mw9XmTEVb6xFQIAABAA9DeLzlRaRQgTcXMJG0iBu/YTyyDKiROD+bU1Y09X9RBz5LA1nWIENJKq2seAhNSB/////wAAAAEAAAAAAAAAAAAAAAEAAAEBExLJ9wI4n7t+wyPEEP4kjYFBdkmNuLLsVC2Yt8mv9Y1iH2G+/g9SaIcdK57pkoW0ECpBxZVOxCuhmK2s74AJCUdem9McjS1waUKyzYTi9vv2ySNBsABIDwT990rE7jZJ3tEZAqcWZg/eWlxvnksFR/akBWZKsKzFz6lF57+cTgdISCEJRV0E7fcUeCuaMaQGK1Qw2OCmIeHEG5j5iztBkZG2IB2CVND/AbxmDUFHwgjsrJPTzaDYSufcGMoZW1A9X1sLVfqNVKvnOFP5tNY7kPF5eAI9FhGBw8SjTqODXz4k6zuqzy9no8HtXowP265U8NZ5VbVTd/zuVEbZyK5KBqzP1sExW4RhnlpXMoOs9WSuAGcwZQIxANTeEwb9V7CacV2Urt/oCqysUzhoV2AcT2ZjryFqY79Tsg+FRpIx7cBizL4ieRzbhQIwcRasNncO5OZOcmVr0MqHv+gCVznndMgjXJmWwUa7h6skJKmhhMPlN0CsugxtVWnD" +} diff --git a/examples/data_masking/src/getting_started_decrypt_data_output.json b/examples/data_masking/src/getting_started_decrypt_data_output.json new file mode 100644 index 00000000000..7871a0416e7 --- /dev/null +++ b/examples/data_masking/src/getting_started_decrypt_data_output.json @@ -0,0 +1,18 @@ +{ + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": { + "street": "123 Main St", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "company_address": { + "street": "456 ACME Ave", + "city": "Anytown", + "state": "CA", + "zip": "12345" + } +} \ No newline at end of file diff --git a/examples/data_masking/src/getting_started_decryption_context.py b/examples/data_masking/src/getting_started_decryption_context.py new file mode 100644 index 00000000000..f4b0f6d8ac3 --- /dev/null +++ b/examples/data_masking/src/getting_started_decryption_context.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data = event.get("body", {}) + + logger.info("Decrypting whole object") + + decrypted: dict = data_masker.decrypt( + data, + data_classification="confidential", # (1)! + data_type="customer-data", + tenant_id="a06bf973-0734-4b53-9072-39d7ac5b2cba", + ) + + return decrypted diff --git a/examples/data_masking/src/getting_started_encrypt_data.py b/examples/data_masking/src/getting_started_encrypt_data.py new file mode 100644 index 00000000000..579170113dd --- /dev/null +++ b/examples/data_masking/src/getting_started_encrypt_data.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) # (1)! +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Encrypting the whole object") + + encrypted = data_masker.encrypt(data) + + return {"body": encrypted} diff --git a/examples/data_masking/src/getting_started_encryption_context.py b/examples/data_masking/src/getting_started_encryption_context.py new file mode 100644 index 00000000000..6fea5dc9f65 --- /dev/null +++ b/examples/data_masking/src/getting_started_encryption_context.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN = os.getenv("KMS_KEY_ARN", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> str: + data = event.get("body", {}) + + logger.info("Encrypting whole object") + + encrypted: str = data_masker.encrypt( + data, + data_classification="confidential", # (1)! + data_type="customer-data", + tenant_id="a06bf973-0734-4b53-9072-39d7ac5b2cba", + ) + + return encrypted diff --git a/examples/data_masking/src/getting_started_erase_data.py b/examples/data_masking/src/getting_started_erase_data.py new file mode 100644 index 00000000000..a3e9fc7217e --- /dev/null +++ b/examples/data_masking/src/getting_started_erase_data.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = Logger() +data_masker = DataMasking() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Erasing fields email, address.street, and company_address") + + erased = data_masker.erase(data, fields=["email", "address.street", "company_address"]) # (1)! + + return erased diff --git a/examples/data_masking/src/getting_started_erase_data_output.json b/examples/data_masking/src/getting_started_erase_data_output.json new file mode 100644 index 00000000000..76a43cc81e7 --- /dev/null +++ b/examples/data_masking/src/getting_started_erase_data_output.json @@ -0,0 +1,13 @@ +{ + "id": 1, + "name": "John Doe", + "age": 30, + "email": "*****", + "address": { + "street": "*****", + "city": "Anytown", + "state": "CA", + "zip": "12345" + }, + "company_address": "*****" +} \ No newline at end of file diff --git a/examples/data_masking/src/large_data_input.json b/examples/data_masking/src/large_data_input.json new file mode 100644 index 00000000000..34275c3fa73 --- /dev/null +++ b/examples/data_masking/src/large_data_input.json @@ -0,0 +1,32 @@ +{ + "body": + { + "id": 1, + "name": "John Doe", + "age": 30, + "email": "johndoe@example.com", + "address": {"street": "123 Main St", "city": "Anytown", "state": "CA", "zip": "12345"}, + "phone_numbers": ["+1-555-555-1234", "+1-555-555-5678"], + "interests": ["Hiking", "Traveling", "Photography", "Reading"], + "job_history": { + "company": { + "company_name": "Acme Inc.", + "company_address": "5678 Interview Dr." + }, + "position": "Software Engineer", + "start_date": "2015-01-01", + "end_date": "2017-12-31" + }, + "about_me": """ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla tincidunt velit quis + sapien mollis, at egestas massa tincidunt. Suspendisse ultrices arcu a dolor dapibus, + ut pretium turpis volutpat. Vestibulum at sapien quis sapien dignissim volutpat ut a enim. + Praesent fringilla sem eu dui convallis luctus. Donec ullamcorper, sapien ut convallis congue, + risus mauris pretium tortor, nec dignissim arcu urna a nisl. Vivamus non fermentum ex. Proin + interdum nisi id sagittis egestas. Nam sit amet nisi nec quam pharetra sagittis. Aliquam erat + volutpat. Donec nec luctus sem, nec ornare lorem. Vivamus vitae orci quis enim faucibus placerat. + Nulla facilisi. Proin in turpis orci. Donec imperdiet velit ac tellus gravida, eget laoreet tellus + malesuada. Praesent venenatis tellus ac urna blandit, at varius felis posuere. Integer a commodo nunc. + """ + } +} diff --git a/examples/data_masking/src/using_multiple_keys.py b/examples/data_masking/src/using_multiple_keys.py new file mode 100644 index 00000000000..45c49f467d3 --- /dev/null +++ b/examples/data_masking/src/using_multiple_keys.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import os + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, +) +from aws_lambda_powertools.utilities.typing import LambdaContext + +KMS_KEY_ARN_1 = os.getenv("KMS_KEY_ARN_1", "") +KMS_KEY_ARN_2 = os.getenv("KMS_KEY_ARN_2", "") + +encryption_provider = AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN_1, KMS_KEY_ARN_2]) +data_masker = DataMasking(provider=encryption_provider) + +logger = Logger() + + +@logger.inject_lambda_context +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data: dict = event.get("body", {}) + + logger.info("Encrypting the whole object") + + encrypted = data_masker.encrypt(data) + + return {"body": encrypted} diff --git a/examples/data_masking/tests/lambda_mask.py b/examples/data_masking/tests/lambda_mask.py new file mode 100644 index 00000000000..6b2f461e663 --- /dev/null +++ b/examples/data_masking/tests/lambda_mask.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.typing import LambdaContext + +data_masker = DataMasking() + + +def lambda_handler(event: dict, context: LambdaContext) -> dict: + data = event + + erased = data_masker.erase(data, fields=["testkey"]) + + return erased diff --git a/examples/data_masking/tests/test_lambda_mask.py b/examples/data_masking/tests/test_lambda_mask.py new file mode 100644 index 00000000000..596f065b380 --- /dev/null +++ b/examples/data_masking/tests/test_lambda_mask.py @@ -0,0 +1,30 @@ +from dataclasses import dataclass + +import pytest +import test_lambda_mask + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:111111111:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + def get_remaining_time_in_millis(self) -> int: + return 5 + + return LambdaContext() + + +def test_encrypt_lambda(lambda_context): + # GIVEN: A sample event for testing + event = {"testkey": "testvalue"} + + # WHEN: Invoking the lambda_handler function with the sample event and Lambda context + result = test_lambda_mask.lambda_handler(event, lambda_context) + + # THEN: Assert that the result matches the expected output + assert result == {"testkey": "*****"} diff --git a/mkdocs.yml b/mkdocs.yml index a862430a054..50fe632539c 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -29,6 +29,7 @@ nav: - utilities/data_classes.md - utilities/parser.md - utilities/idempotency.md + - utilities/data_masking.md - utilities/feature_flags.md - utilities/streaming.md - utilities/middleware_factory.md diff --git a/mypy.ini b/mypy.ini index cb2d3ce2443..5fcb1533707 100644 --- a/mypy.ini +++ b/mypy.ini @@ -12,12 +12,15 @@ disable_error_code = annotation-unchecked [mypy-jmespath] ignore_missing_imports=True -[mypy-aws_encryption_sdk] +[mypy-aws_encryption_sdk.*] ignore_missing_imports=True [mypy-sentry_sdk] ignore_missing_imports=True +[mypy-jsonpath_ng.*] +ignore_missing_imports=True + [mypy-jmespath.exceptions] ignore_missing_imports=True @@ -71,3 +74,6 @@ ignore_missing_imports = True [mypy-importlib.metadata] ignore_missing_imports = True + +[mypy-ujson] +ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock index b6bf62d37bb..8e7fcad2cd9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -768,58 +768,67 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] - -[package.dependencies] -cffi = ">=1.12" + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "datadog" -version = "0.47.0" +version = "0.48.0" description = "The Datadog Python library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "datadog-0.47.0-py2.py3-none-any.whl", hash = "sha256:a45ec997ab554208837e8c44d81d0e1456539dc14da5743687250e028bc809b7"}, - {file = "datadog-0.47.0.tar.gz", hash = "sha256:47be3b2c3d709a7f5b709eb126ed4fe6cc7977d618fe5c158dd89c2a9f7d9916"}, + {file = "datadog-0.48.0-py2.py3-none-any.whl", hash = "sha256:c3f819e2dc632a546a5b4e8d45409e996d4fa18c60df7814c82eda548e0cca59"}, + {file = "datadog-0.48.0.tar.gz", hash = "sha256:d4d661358c3e7f801fbfe15118f5ccf08b9bd9b1f45b8b910605965283edad64"}, ] [package.dependencies] @@ -867,71 +876,71 @@ six = "*" [[package]] name = "ddtrace" -version = "2.4.0" +version = "2.5.2" description = "Datadog APM client library" optional = false python-versions = ">=3.7" files = [ - {file = "ddtrace-2.4.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:90641de597d3424573aa96263509800bb64018727bf74e29e250e6d21200a4be"}, - {file = "ddtrace-2.4.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:75b7d01af5fb8d279a2edb56d48af0dc221ed43f4e5049387e4a9be529217033"}, - {file = "ddtrace-2.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f104933ffbae735887e10e3e0d9a5d28dd7d42d1fd86141c4fa171c07598b561"}, - {file = "ddtrace-2.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d675545d2fd7c5be10fe704a3f151add0ce8b101c976ca0ab452699aac0d8489"}, - {file = "ddtrace-2.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b380dabf377a318ebd909423293b02beaa43ffda03ad129a5a93c4a1a4b5c6"}, - {file = "ddtrace-2.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2f93337c1546404967525388a45174481daa72ecf7d3a1e4c21349e1a2d572c"}, - {file = "ddtrace-2.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0e345e034e8962d76642ab2763f5bdb1bc4424c2ea17d9ca5f82e093160d6ca1"}, - {file = "ddtrace-2.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa5e1a7121d08d50795e3f6218f3959cfa55363a3896210410ef354a7573de9"}, - {file = "ddtrace-2.4.0-cp310-cp310-win32.whl", hash = "sha256:d9c69a42919a27cff8d42461b301014d79683c40f60d0cb5f3000e4ff7cb907f"}, - {file = "ddtrace-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:962de6a60f42e2cde1823c47a3383bb0d6beaa954d57b12687688935d0ddd3d3"}, - {file = "ddtrace-2.4.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ed91c32353c8288fb95de67faa341c5ab9a089c0161ad51fc739f0db2b46866e"}, - {file = "ddtrace-2.4.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:410c9b9241ed2514dc9413887d852140cc7ff396b40ffc412835a14668b9b1a3"}, - {file = "ddtrace-2.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639b11f780d0ed1a372a2a6b92cc1b9c586a0fea27439557e768d5ebedabbc34"}, - {file = "ddtrace-2.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08861e4acd61198428f0d994db1bc5d2893ec816b9cd78c0c6d1fc963f0dc771"}, - {file = "ddtrace-2.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad627a4611bff8f527e2c0c0fc51be9d74a563328269f53b871901570ee4ff3"}, - {file = "ddtrace-2.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6ae2f75f2edc068d6c104ceb0e882a6dfad8f702b27384b3dac5290aebbc248"}, - {file = "ddtrace-2.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82a0832000fedcb95856477bab95c6f151fa28ede3aceafaabe7c08beffaa577"}, - {file = "ddtrace-2.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8b1baac10f9cc3803854f802062e02ae5de0d5546f19165c3b6e8840e9b09f4"}, - {file = "ddtrace-2.4.0-cp311-cp311-win32.whl", hash = "sha256:c687fe20b17e2d24de222913dc2383e6b1462641d8ff18d27678dcb72ced82a3"}, - {file = "ddtrace-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:47296b116a97e01fe6bf48a4eea4e825212ee23288ee064964ab87ba608fc038"}, - {file = "ddtrace-2.4.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:6e2b2b4160ea53dd3e4f8bb35af7124a5e8954c8badffa81468c8a62d12acc51"}, - {file = "ddtrace-2.4.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:49ac0d69f98a4ff2175db39481598300fd94f038a027b537d0a66d9dbeca1ed7"}, - {file = "ddtrace-2.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2604e1c91b44d3b6fb15d0337cda1ac2c15aec215f6a44e1bb39d25b47c2633c"}, - {file = "ddtrace-2.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb7d2c846e3d7e8156199855d4db014a71d62daedba84a213416e2a488e834b3"}, - {file = "ddtrace-2.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85774e12d5d92152cd1c64f3a8a2f4dbe7f3d39201f8a8ff5e914b9639fe6e17"}, - {file = "ddtrace-2.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:418c0c990c505accc8665bfc056f4297938a54176157bf1f0765f2fae584efec"}, - {file = "ddtrace-2.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:183f7c3ddd9a2891bd1b6f5ea3af6d16517775268b3940259820ca3c83292d16"}, - {file = "ddtrace-2.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:eb90e71b70e3ea6c24711cfb5c48c711a2175c315daf07f4f28903aa773a48b7"}, - {file = "ddtrace-2.4.0-cp312-cp312-win32.whl", hash = "sha256:5eab75f1d4170c41de1f9c32e7e39714b2dd11a59d9ff7e94a199b88fa813ecd"}, - {file = "ddtrace-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:d892e0b71f3b6bcf31920b5e7fd699c86aea734bc02eec3c1b22acd8f63057e4"}, - {file = "ddtrace-2.4.0-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:c07ea7a17a2897d891ee5e95de3b0e4f57184c471e87ffcc7208b3ccd68b9fcc"}, - {file = "ddtrace-2.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05b28815e65d6361cd056c877ab051e132a6929b0d353313a499122e6522ea3"}, - {file = "ddtrace-2.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63719bfc8fe5e8510022a3275145d6b2b1c4f955c395698fb792d99d4cda698d"}, - {file = "ddtrace-2.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:190f96eccdd8107cc93db6e79af4b8fc9403418c823d895af898cf635f5cada6"}, - {file = "ddtrace-2.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b0fdb6a2fe0eadd122df4ea3a11690cb88f4f642bd19b1a21d01e9dcfd6eb20c"}, - {file = "ddtrace-2.4.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1b2bf18ee10ea8fe668096a6c70db4161e228edee161b04719506947d7117937"}, - {file = "ddtrace-2.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ca5fa396b8df0d7b55ad9e8d5b19be09c5dedefa388bf7590340ace5ce392e14"}, - {file = "ddtrace-2.4.0-cp37-cp37m-win32.whl", hash = "sha256:c67a4d8767aa269f8dfab79ae39b8170b95de6813bd1cba17dc951f0a1ee462b"}, - {file = "ddtrace-2.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1db7931541052622a91c8c6594b274d96efe956d5dbbe09c57a50c0f74640b52"}, - {file = "ddtrace-2.4.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8b6ab9f26d2ea50dfa69a282d727c865461f0c1b535f973922072f700cde031"}, - {file = "ddtrace-2.4.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:9ad7aa89988b77b893c3e9898fc48e3cef9471bc2648d6a83cc800b49cad1f1f"}, - {file = "ddtrace-2.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38b95920bcc17289a0e3871830ef19030df763039021a796a1debb7fd4ea347b"}, - {file = "ddtrace-2.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9152dcc4b8a98392ce5853b8e160f8d215ddd148337d42861ab3c12635b32b75"}, - {file = "ddtrace-2.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c335be0ab8f4f376f51111219a9d85bcdbd6d75c18a8d5471817645bed1430c0"}, - {file = "ddtrace-2.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0c95339694034d4fbf9e1b2a0918f99b3936336e8deb4d513e9cf7a6ae1532f3"}, - {file = "ddtrace-2.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f8bddc5e84e50663b64fbad2e2c61203484dea06de7759a47f096514d99f5c8f"}, - {file = "ddtrace-2.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af7c4c94959481bc4060c7dfb5f7e70b1929b18089c7ea0329fc3f28707fd8a"}, - {file = "ddtrace-2.4.0-cp38-cp38-win32.whl", hash = "sha256:de3fcca4747340c835e7816009dd363d4e02dc5fc25365b2418dc3d986a6550a"}, - {file = "ddtrace-2.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:2f3dbcff2b305d34ecc63db05d0efeb923846ba07871be6f0a3509a33290fb69"}, - {file = "ddtrace-2.4.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7b43e2e890e868a133afc25f57774bb6bc8ae8841094cba4e8f2b3ee50f9c7ee"}, - {file = "ddtrace-2.4.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:de66ea03ca5b3f02d0f878fc9d486d4d4f654cf66b38d3fdf73bf314fc0e3f5b"}, - {file = "ddtrace-2.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01cba8d20d4754135411e0e3398af02bc29b3c5f3dc85b1ee8cdfb9a0532f793"}, - {file = "ddtrace-2.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb324809582b65baa682f045cb2873d686de3aa93cac75718462d0a23f980836"}, - {file = "ddtrace-2.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f475ea4e2086e6a16a48568688918b21043ba391a6f968cb9bc17ec70d51de75"}, - {file = "ddtrace-2.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1d4a5d9c89db2cc0e4a6eaf10b6d1af449d1ef14060000b23eceee19497705e"}, - {file = "ddtrace-2.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a057db38d52271b6206bac2ab23f2a36cbe547397cba1ce586021df711570559"}, - {file = "ddtrace-2.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:45ee78018276764f7fdaf1cf3b945660cf1ab39e1a03e0c61bf1984a71562204"}, - {file = "ddtrace-2.4.0-cp39-cp39-win32.whl", hash = "sha256:4f63dea207c90bb2c2d52ff9de0ee71b27aedb5d8540745e4e0b38a896737de0"}, - {file = "ddtrace-2.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3523c71d37fb3135d0817e92b486bcee7829c41e5465ed41b080286d7e2739d"}, - {file = "ddtrace-2.4.0.tar.gz", hash = "sha256:fb1bab23debb3a1fb71e3d6a1ce9818bc5e6ad9b885b901f78f3f28639393ecb"}, + {file = "ddtrace-2.5.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:f918538a6adb33696be653d343ee318b16ea977376d9b7214d14fe97c42e9bd9"}, + {file = "ddtrace-2.5.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f56735eb636d3ab2f7224f261d3a6bd43f884e9901d68407d485ea65f3dc0f46"}, + {file = "ddtrace-2.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72d21fe6842a8d80c8765dd699153a2475ae2d49e82e10f9668eadb08b454040"}, + {file = "ddtrace-2.5.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6e48caf63506d7ac3df7caa955b6258de91c1a1f55149506ab8ac36143770b9"}, + {file = "ddtrace-2.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3f26e04ba7521f6885d871fd6266fedc0a7ccf2637b85579c058927404bad7"}, + {file = "ddtrace-2.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:15d78b0cd5d2090c063031d76e933b8b24e043d524a6091a751cf57b0fab025f"}, + {file = "ddtrace-2.5.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee76beaf87695f2204b0c2c2a3664b39f3483b7a8447b28e5e2bcc899861b3eb"}, + {file = "ddtrace-2.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8840f0e82d6dca3888bd06e7ab0ca6d39009f3cd3475028d8bc03c939127afc2"}, + {file = "ddtrace-2.5.2-cp310-cp310-win32.whl", hash = "sha256:a34ccab0c8991c5fc5252d5cd6e88852cd7f77c8bf838de84e70b4a3bfacaad4"}, + {file = "ddtrace-2.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:ffa4f5779c7000fe5960156bd15339184355b30a661b0955799cae50da5d03a7"}, + {file = "ddtrace-2.5.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ea2740a3d61876cb07b271af444e98cdc8b730497cfcddbc3794c7a7441b8d15"}, + {file = "ddtrace-2.5.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:62e775ba9d2a2b5f952a6609029e965057bdd852ccd6e53b55c0f82ae83aa542"}, + {file = "ddtrace-2.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30186112f156a564efda5e2018240b55baee7664897ca5fc35c452d032a77185"}, + {file = "ddtrace-2.5.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9dccdc69de364cffc2b892280724c78cb54db151452a0b6d1b4a89b6f060c44"}, + {file = "ddtrace-2.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa2543c2303ab325af7794f2a8a420133cd9222e70bfbce3869da146fc5e2ba"}, + {file = "ddtrace-2.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aa2e64f79ada9f2fd5307cd0eba726d8585e47b0282fb9463aaa4b267265e94a"}, + {file = "ddtrace-2.5.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:37b4d55a5be59530e6e5761a36d727aee812be69c81b00ee0182eb62be6f3b75"}, + {file = "ddtrace-2.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d97f990d2322a23e82203cc5a2aa694fb0d42541a44bb120390e6598a63e5f5"}, + {file = "ddtrace-2.5.2-cp311-cp311-win32.whl", hash = "sha256:5d3f1bc3ce87fbcf2256197178179ef681df720ebbc39b0559bda00247744533"}, + {file = "ddtrace-2.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:a50057085b0972e695bb1ef3042f6cd6a1a3b12111fac4985942f2dbbcf8ac2f"}, + {file = "ddtrace-2.5.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b923b099b9a1e50f01ce8bcd4d11e3255a48c71f3e6314dd9a482baed0a87ed6"}, + {file = "ddtrace-2.5.2-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:512d3975b1657c706ca9c84373e5fce323f6fc94bfac33c30876ad8d55e0ea71"}, + {file = "ddtrace-2.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c54bc474c70151d5a141061b6c20a1efabdf458e4239c790d45fa12a13b8e7d"}, + {file = "ddtrace-2.5.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5fb2bbd38dc46ba6a7ea1031c4751b1ca888be5fac8a42049ebc2517707c00d"}, + {file = "ddtrace-2.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa6fb6bcfb3810d8f0882e489e7d2ef4dd3a92b452cfdd8d1fd4703dc496b17"}, + {file = "ddtrace-2.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3f4eed40d978352c7371804ecb68bbe9e55967bb904bd03b0568554e0b6b92cf"}, + {file = "ddtrace-2.5.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:57606af5380888e2e7cc67b7c4fa5e1bc51d29c48f004b4be0cbe1b319fddc75"}, + {file = "ddtrace-2.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ee8d0259a004964a8eddb394aa84a5754435d4270cd2041e6559c9e68fa49141"}, + {file = "ddtrace-2.5.2-cp312-cp312-win32.whl", hash = "sha256:4df564e620ec7e657fcdb0d5bf1231aa1357bf49b736f0d9e9f6df17a23fc569"}, + {file = "ddtrace-2.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:637f16af1c84566bde044798312c67bc5676df949632ab02e740440558f2a598"}, + {file = "ddtrace-2.5.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:d24841a9390f3e169edcaf1ca5ac80599062e66dee43a510decb25e779b6f7b4"}, + {file = "ddtrace-2.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49aa4e0210862e829e09569de2e2f34ac17c5e246567c5b6662ec21e2a06d938"}, + {file = "ddtrace-2.5.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:985738fe875b11f05dfa2b1f21a619d499344eb740f63e01d6eae1fb29eb949b"}, + {file = "ddtrace-2.5.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8814321822e4afc95ac86fbc476dc20d78dd4b1d510c02606459df4580093d18"}, + {file = "ddtrace-2.5.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ad6c0ae7baff9d00c689834aec0627274d681ed1d2a8ae627348a6191e8d32ec"}, + {file = "ddtrace-2.5.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa596f2e80c525a2310e605bfa3fa6ba6790b2ae90c02e47ceee0e62ceae17a6"}, + {file = "ddtrace-2.5.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6bdfae9fa03af334820678196a4895450d0b6bd9f1b5119d42ddbd327a55fcce"}, + {file = "ddtrace-2.5.2-cp37-cp37m-win32.whl", hash = "sha256:227bb0391d310e0d5a54505c7ab59f9692a5db91dc492373489bc45726980e1d"}, + {file = "ddtrace-2.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6e55c4738b58b4452933204305243e19000f6f283af93bf51b63382100cb8f21"}, + {file = "ddtrace-2.5.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4d9e7a9e26c38ae1e368f5d820e78459ff2d39689f40d4a3db185ddb3686c383"}, + {file = "ddtrace-2.5.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:c361ea11b442b04d8e011528205ed65b926d71d18f38d372270204eabf49b068"}, + {file = "ddtrace-2.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aafd86eeea622cd0e8cf6b63632efc67a52a32317d2a376382ef6170d383c9f"}, + {file = "ddtrace-2.5.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ff039635470ba483ed448baaf6337d85a731b17af62fef06dfa811f761f374f"}, + {file = "ddtrace-2.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f1cb3bea1170410d603f9d557918c24d4d8783659c03817daea6352d9f37f9"}, + {file = "ddtrace-2.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7351500241eb24c7d789b371a6860ca2b0e2db1ff9d317089153b562a3a461e1"}, + {file = "ddtrace-2.5.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a2cfc6ee800890556e404b94d13680c83952efa5d3dafa72ef8cb08a8782f874"}, + {file = "ddtrace-2.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96a791f03b62ebdb9f3e635a0e93711149123a8fc1c1c152be0d1cdb5d8e6359"}, + {file = "ddtrace-2.5.2-cp38-cp38-win32.whl", hash = "sha256:6c61e72abec3f2f6b46e53712a32a971de1b6a9be657d5ebeff1334f6146babc"}, + {file = "ddtrace-2.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:b93d8b536f5fc45a72bb2785051dc729f4d581ef2d69ed10bccae6a7487477b2"}, + {file = "ddtrace-2.5.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:38cbcb7b4ff1371480b29228d2b8e570e7d7b386a7632b96f9600135ec3eb9db"}, + {file = "ddtrace-2.5.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a270d128c6067f52a76ecbb658fae3f4d3bd4888baa9e6159ff82b6de14c53be"}, + {file = "ddtrace-2.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e59f3958016fcec5eb16abd7979a9ec4d850733e2a03b878b096277fc092784"}, + {file = "ddtrace-2.5.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:066403f0e00a8de09c8187037befe7463d1fab5d8178b62a07c2542792710d14"}, + {file = "ddtrace-2.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbcbf24bca8497f1412ec438fbdc94847aef9e86092ffd4f8626bbe6d278d33"}, + {file = "ddtrace-2.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d34f8da809e2783770a6c88396b3653fb12a4196e9b5f16b8c10f37bbf2b7b31"}, + {file = "ddtrace-2.5.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9eaca41664dd0c2bd7257fe2e91c7e46718b20591bfaa0b5c01c39b599115f88"}, + {file = "ddtrace-2.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f4b67e02ba5c316711719dcfc15e94f47684e7af1785289d016a29a2c664827"}, + {file = "ddtrace-2.5.2-cp39-cp39-win32.whl", hash = "sha256:9bbd675d73aae6516e02a86cb830778771dafb0e182d5a122270ccd82ee77eed"}, + {file = "ddtrace-2.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:e93f3f5d3d57beb492b04286c758be65495908bd313df6f56865ad7af222e49e"}, + {file = "ddtrace-2.5.2.tar.gz", hash = "sha256:5addeb19eea5ebdc23c493e5635f4c8737795b48ba637117a1895f31b900985f"}, ] [package.dependencies] @@ -943,7 +952,7 @@ bytecode = [ cattrs = "*" ddsketch = ">=2.0.1" envier = "*" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-metadata = {version = "<=6.5.0", markers = "python_version < \"3.8\""} opentelemetry-api = ">=1" protobuf = ">=3" setuptools = {version = "*", markers = "python_version >= \"3.12\""} @@ -1019,13 +1028,13 @@ ssh = ["paramiko (>=2.4.3)"] [[package]] name = "envier" -version = "0.5.0" +version = "0.5.1" description = "Python application configuration via the environment" optional = false python-versions = ">=3.7" files = [ - {file = "envier-0.5.0-py3-none-any.whl", hash = "sha256:5fed6099ee5d7ad4cf664f8bb99d1281d4ab5fadeec8f40ba9458610938293be"}, - {file = "envier-0.5.0.tar.gz", hash = "sha256:f35ca8605f0c70c2c0367133af9dc1ef16710021dbd0e28c1b0a83070db06768"}, + {file = "envier-0.5.1-py3-none-any.whl", hash = "sha256:b45ef6051fea33d0c32a64e186bff2cfb446e2242d6781216c9bc9ce708c5909"}, + {file = "envier-0.5.1.tar.gz", hash = "sha256:bd5ccf707447973ea0f4125b7df202ba415ad888bcdcb8df80e0b002ee11ffdb"}, ] [package.extras] @@ -1250,17 +1259,6 @@ files = [ {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"}, - {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"}, - {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"}, - {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"}, - {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"}, - {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"}, {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, @@ -1321,13 +1319,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.7.0" +version = "6.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, + {file = "importlib_metadata-6.5.0-py3-none-any.whl", hash = "sha256:03ba783c3a2c69d751b109fc0c94a62c51f581b3d6acf8ed1331b6d5729321ff"}, + {file = "importlib_metadata-6.5.0.tar.gz", hash = "sha256:7a8bdf1bc3a726297f5cfbc999e6e7ff6b4fa41b26bba4afc580448624460045"}, ] [package.dependencies] @@ -1337,7 +1335,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -1463,6 +1461,20 @@ files = [ [package.dependencies] jsonpointer = ">=1.9" +[[package]] +name = "jsonpath-ng" +version = "1.6.1" +description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." +optional = true +python-versions = "*" +files = [ + {file = "jsonpath-ng-1.6.1.tar.gz", hash = "sha256:086c37ba4917304850bd837aeab806670224d3f038fe2833ff593a672ef0a5fa"}, + {file = "jsonpath_ng-1.6.1-py3-none-any.whl", hash = "sha256:8f22cd8273d7772eea9aaa84d922e0841aa36fdb8a2c6b7f6c3791a16a9bc0be"}, +] + +[package.dependencies] +ply = "*" + [[package]] name = "jsonpickle" version = "3.0.2" @@ -1612,71 +1624,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] @@ -2171,6 +2183,17 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "ply" +version = "3.11" +description = "Python Lex & Yacc" +optional = true +python-versions = "*" +files = [ + {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, + {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, +] + [[package]] name = "protobuf" version = "4.24.4" @@ -2505,13 +2528,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2023.4" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, + {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] [[package]] @@ -2549,7 +2572,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2557,16 +2579,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2583,7 +2597,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2591,7 +2604,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3144,6 +3156,20 @@ files = [ [package.dependencies] types-urllib3 = "*" +[[package]] +name = "types-requests" +version = "2.31.0.20231231" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-requests-2.31.0.20231231.tar.gz", hash = "sha256:0f8c0c9764773384122813548d9eea92a5c4e1f33ed54556b508968ec5065cee"}, + {file = "types_requests-2.31.0.20231231-py3-none-any.whl", hash = "sha256:2e2230c7bc8dd63fa3153c1c0ae335f8a368447f0582fc332f17d54f88e69027"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "types-urllib3" version = "1.26.25.14" @@ -3390,10 +3416,10 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] +all = ["aws-encryption-sdk", "aws-xray-sdk", "fastjsonschema", "jsonpath-ng", "pydantic"] aws-sdk = ["boto3"] datadog = ["datadog-lambda"] -datamasking-aws-sdk = ["aws-encryption-sdk"] +datamasking = ["aws-encryption-sdk", "jsonpath-ng"] parser = ["pydantic"] redis = ["redis"] tracer = ["aws-xray-sdk"] @@ -3402,4 +3428,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "f4c66a8fa656902aba0c04cc8b5dc236d7f0ed6f7c3e22507cc89e711b0b62b2" +content-hash = "28c3a405185f635f8e65ea51adfe1cfc589cb469497d800100521f91037ba26a" diff --git a/pyproject.toml b/pyproject.toml index 0e576d412df..cb1f322e9ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,7 @@ redis = {version = ">=4.4,<6.0", optional = true} typing-extensions = "^4.6.2" datadog-lambda = { version = ">=4.77,<6.0", optional = true } aws-encryption-sdk = { version = "^3.1.1", optional = true } +jsonpath-ng = { version = "^1.6.0", optional = true } [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^7.2"} @@ -97,11 +98,11 @@ parser = ["pydantic"] validation = ["fastjsonschema"] tracer = ["aws-xray-sdk"] redis = ["redis"] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema", "aws-encryption-sdk", "jsonpath-ng"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] datadog = ["datadog-lambda"] -datamasking-aws-sdk = ["aws-encryption-sdk"] +datamasking = ["aws-encryption-sdk", "jsonpath-ng"] [tool.poetry.group.dev.dependencies] cfn-lint = "0.83.8" diff --git a/tests/e2e/data_masking/handlers/basic_handler.py b/tests/e2e/data_masking/handlers/basic_handler.py index f31e822429a..6f696391822 100644 --- a/tests/e2e/data_masking/handlers/basic_handler.py +++ b/tests/e2e/data_masking/handlers/basic_handler.py @@ -1,6 +1,6 @@ from aws_lambda_powertools import Logger -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider logger = Logger() @@ -14,7 +14,7 @@ def lambda_handler(event, context): # Encrypting data for test_encryption_in_handler test kms_key = event.get("kms_key", "") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[kms_key])) value = [1, 2, "string", 4.5] encrypted_data = data_masker.encrypt(value) response = {} diff --git a/tests/e2e/data_masking/test_e2e_data_masking.py b/tests/e2e/data_masking/test_e2e_data_masking.py index 80f45564177..a720a265d83 100644 --- a/tests/e2e/data_masking/test_e2e_data_masking.py +++ b/tests/e2e/data_masking/test_e2e_data_masking.py @@ -4,15 +4,13 @@ import pytest from aws_encryption_sdk.exceptions import DecryptKeyError -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import ( - AwsEncryptionSdkProvider, - ContextMismatchError, +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.exceptions import DataMaskingContextMismatchError +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + AWSEncryptionSDKProvider, ) from tests.e2e.utils import data_fetcher -pytest.skip(reason="Data masking tests disabled until we go GA.", allow_module_level=True) - @pytest.fixture def basic_handler_fn(infrastructure: dict) -> str: @@ -36,7 +34,7 @@ def kms_key2_arn(infrastructure: dict) -> str: @pytest.fixture def data_masker(kms_key1_arn) -> DataMasking: - return DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key1_arn])) + return DataMasking(provider=AWSEncryptionSDKProvider(keys=[kms_key1_arn])) @pytest.mark.xdist_group(name="data_masking") @@ -79,7 +77,7 @@ def test_encryption_context_mismatch(data_masker): encrypted_data = data_masker.encrypt(value, encryption_context={"this": "is_secure"}) # THEN decrypting with a different encryption_context should raise a ContextMismatchError - with pytest.raises(ContextMismatchError): + with pytest.raises(DataMaskingContextMismatchError): data_masker.decrypt(encrypted_data, encryption_context={"not": "same_context"}) @@ -93,7 +91,7 @@ def test_encryption_no_context_fail(data_masker): encrypted_data = data_masker.encrypt(value) # THEN decrypting with an encryption_context should raise a ContextMismatchError - with pytest.raises(ContextMismatchError): + with pytest.raises(DataMaskingContextMismatchError): data_masker.decrypt(encrypted_data, encryption_context={"this": "is_secure"}) @@ -106,7 +104,7 @@ def test_encryption_decryption_key_mismatch(data_masker, kms_key2_arn): encrypted_data = data_masker.encrypt(value) # THEN when decrypting with a different key it should fail - data_masker_key2 = DataMasking(provider=AwsEncryptionSdkProvider(keys=[kms_key2_arn])) + data_masker_key2 = DataMasking(provider=AWSEncryptionSDKProvider(keys=[kms_key2_arn])) with pytest.raises(DecryptKeyError): data_masker_key2.decrypt(encrypted_data) diff --git a/tests/functional/data_masking/test_aws_encryption_sdk.py b/tests/functional/data_masking/test_aws_encryption_sdk.py index 978c2e21572..c1dfd22c6b9 100644 --- a/tests/functional/data_masking/test_aws_encryption_sdk.py +++ b/tests/functional/data_masking/test_aws_encryption_sdk.py @@ -1,34 +1,36 @@ from __future__ import annotations import base64 +import functools import json -from typing import Any, Callable, Dict, Union +from typing import Any, Callable import pytest +from aws_encryption_sdk.identifiers import Algorithm -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING -from aws_lambda_powertools.utilities._data_masking.provider import BaseProvider -from aws_lambda_powertools.utilities._data_masking.provider.kms import ( - AwsEncryptionSdkProvider, +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING +from aws_lambda_powertools.utilities.data_masking.provider import BaseProvider +from aws_lambda_powertools.utilities.data_masking.provider.kms import ( + AWSEncryptionSDKProvider, ) class FakeEncryptionKeyProvider(BaseProvider): def __init__( self, - json_serializer: Callable[[Dict], str] | None = None, - json_deserializer: Callable[[Union[Dict, str, bool, int, float]], str] | None = None, - ): - super().__init__(json_serializer=json_serializer, json_deserializer=json_deserializer) + json_serializer: Callable = functools.partial(json.dumps, ensure_ascii=False), + json_deserializer: Callable = json.loads, + ) -> None: + super().__init__(json_serializer, json_deserializer) def encrypt(self, data: bytes | str, **kwargs) -> str: - data = self.json_serializer(data) - ciphertext = base64.b64encode(data).decode() + encoded_data: str = self.json_serializer(data) + ciphertext = base64.b64encode(encoded_data.encode("utf-8")).decode() return ciphertext def decrypt(self, data: bytes, **kwargs) -> Any: - ciphertext_decoded = base64.b64decode(data) + ciphertext_decoded = base64.b64decode(data).decode("utf-8") ciphertext = self.json_deserializer(ciphertext_decoded) return ciphertext @@ -37,74 +39,74 @@ def decrypt(self, data: bytes, **kwargs) -> Any: def data_masker(monkeypatch) -> DataMasking: """DataMasking using AWS Encryption SDK Provider with a fake client""" fake_key_provider = FakeEncryptionKeyProvider() - provider = AwsEncryptionSdkProvider( + provider = AWSEncryptionSDKProvider( keys=["dummy"], key_provider=fake_key_provider, ) return DataMasking(provider=provider) -def test_mask_int(data_masker): +def test_erase_int(data_masker): # GIVEN an int data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(42) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(42) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_float(data_masker): +def test_erase_float(data_masker): # GIVEN a float data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(4.2) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(4.2) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_bool(data_masker): +def test_erase_bool(data_masker): # GIVEN a bool data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(True) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(True) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_none(data_masker): +def test_erase_none(data_masker): # GIVEN a None data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(None) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(None) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_str(data_masker): +def test_erase_str(data_masker): # GIVEN a str data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask("this is a string") + # WHEN erase is called with no fields argument + erased_string = data_masker.erase("this is a string") # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_list(data_masker): +def test_erase_list(data_masker): # GIVEN a list data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask([1, 2, "string", 3]) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase([1, 2, "string", 3]) # THEN the result is the data masked, while maintaining type list - assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] + assert erased_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] -def test_mask_dict(data_masker): +def test_erase_dict(data_masker): # GIVEN a dict data type data = { "a": { @@ -113,14 +115,14 @@ def test_mask_dict(data_masker): }, } - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(data) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(data) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_dict_with_fields(data_masker): +def test_erase_dict_with_fields(data_masker): # GIVEN a dict data type data = { "a": { @@ -129,11 +131,11 @@ def test_mask_dict_with_fields(data_masker): }, } - # WHEN mask is called with a list of fields specified - masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + erased_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) # THEN the result is only the specified fields are masked - assert masked_string == { + assert erased_string == { "a": { "1": {"None": DATA_MASKING_STRING, "four": "world"}, "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, @@ -141,7 +143,7 @@ def test_mask_dict_with_fields(data_masker): } -def test_mask_json_dict_with_fields(data_masker): +def test_erase_json_dict_with_fields(data_masker): # GIVEN the data type is a json representation of a dictionary data = json.dumps( { @@ -152,8 +154,8 @@ def test_mask_json_dict_with_fields(data_masker): }, ) - # WHEN mask is called with a list of fields specified - masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + masked_json_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) # THEN the result is only the specified fields are masked assert masked_json_string == { @@ -257,8 +259,8 @@ def test_encrypt_dict_with_fields(data_masker): } # WHEN encrypting and then decrypting the encrypted data - encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"]) - decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"]) + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) # THEN the result is only the specified fields are masked assert decrypted_data == data @@ -276,8 +278,199 @@ def test_encrypt_json_dict_with_fields(data_masker): ) # WHEN encrypting and then decrypting the encrypted data - encrypted_data = data_masker.encrypt(data, fields=["a.1.None", "a.b.3.4"]) - decrypted_data = data_masker.decrypt(encrypted_data, fields=["a.1.None", "a.b.3.4"]) + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) # THEN the result is only the specified fields are masked - assert decrypted_data == json.loads(data) + assert decrypted_data == data + + +def test_encrypt_json_with_list_fields(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": [{"key1": [0, 1]}], + }, + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_json_with_tuple_fields(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": (0, 1), + }, + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_encryption_context(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": (0, 1), + }, + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data, data_classification="confidential") + decrypted_data = data_masker.decrypt(encrypted_data, data_classification="confidential") + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_complex_dict(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "name": "Leandro", + "operation": "non sensitive", + "card_number": "1000 4444 333 2222", + "address": [ + { + "postcode": 81847, + "street": "38986 Joanne Stravenue", + "country": "United States", + "timezone": "America/La_Paz", + }, + { + "postcode": 94400, + "street": "623 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + { + "postcode": 94480, + "street": "123 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + ], + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_slice(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "name": "Leandro", + "operation": "non sensitive", + "card_number": "1000 4444 333 2222", + "address": [ + { + "postcode": 81847, + "street": "38986 Joanne Stravenue", + "country": "United States", + "timezone": "America/La_Paz", + }, + { + "postcode": 94400, + "street": "623 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + { + "postcode": 94480, + "street": "123 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + ], + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_complex_search(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "name": "Leandro", + "operation": "non sensitive", + "card_number": "1000 4444 333 2222", + "address": [ + { + "postcode": 81847, + "street": "38986 Joanne Stravenue", + "country": "United States", + "timezone": "America/La_Paz", + }, + { + "postcode": 94400, + "street": "623 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + { + "postcode": 94480, + "street": "123 Kraig Mall", + "country": "United States", + "timezone": "America/Mazatlan", + }, + ], + }, + ) + + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data + + +def test_encrypt_with_provider_options(data_masker): + # GIVEN the data type is a json representation of a dictionary with a list inside + data = json.dumps( + { + "payload": { + "first": ["value1", "value2"], + "second": (0, 1), + }, + }, + ) + + provider_options = {"algorithm": Algorithm.AES_256_GCM_HKDF_SHA512_COMMIT_KEY} + # WHEN encrypting and then decrypting the encrypted data + encrypted_data = data_masker.encrypt(data, provider_options=provider_options) + decrypted_data = data_masker.decrypt(encrypted_data) + + # THEN the result is only the specified fields are masked + assert decrypted_data == data diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py index 9a898ea10cd..76081b20392 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1024/app.py @@ -3,8 +3,8 @@ from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider from aws_lambda_powertools.utilities.typing import LambdaContext KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] @@ -48,7 +48,7 @@ @tracer.capture_method def function1024(): logger.info("Hello world function1024 - HTTP 200") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) return {"Decrypted_json_blob_function_1024": decrypted} diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py index 6b8250579a5..b191ade241a 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_128/app.py @@ -3,8 +3,8 @@ from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider from aws_lambda_powertools.utilities.typing import LambdaContext KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] @@ -48,7 +48,7 @@ @tracer.capture_method def function128(): logger.info("Hello world function128 - HTTP 200") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) return {"Decrypted_json_blob_function_128": decrypted} diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py index 623a1f7b232..19d287e6011 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/function_1769/app.py @@ -3,8 +3,8 @@ from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.utilities._data_masking import DataMasking -from aws_lambda_powertools.utilities._data_masking.provider.kms.aws_encryption_sdk import AwsEncryptionSdkProvider +from aws_lambda_powertools.utilities.data_masking import DataMasking +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import AWSEncryptionSDKProvider from aws_lambda_powertools.utilities.typing import LambdaContext KMS_KEY_ARN = os.environ["KMS_KEY_ARN"] @@ -48,7 +48,7 @@ @tracer.capture_method def function1769(): logger.info("Hello world function1769 - HTTP 200") - data_masker = DataMasking(provider=AwsEncryptionSdkProvider(keys=[KMS_KEY_ARN])) + data_masker = DataMasking(provider=AWSEncryptionSDKProvider(keys=[KMS_KEY_ARN])) encrypted = data_masker.encrypt(json_blob, fields=["address.street", "job_history.company.company_name"]) decrypted = data_masker.decrypt(encrypted, fields=["address.street", "job_history.company.company_name"]) return {"Decrypted_json_blob_function_1769": decrypted} diff --git a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml index b70fb6d061e..7df194d80bb 100644 --- a/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml +++ b/tests/performance/data_masking/load_test_data_masking/pt-load-test-stack/template.yaml @@ -38,7 +38,9 @@ Resources: Policies: Statement: - Effect: Allow - Action: kms:* + Action: + - kms:Decrypt + - kms:GenerateDataKey Resource: !GetAtt MyKMSKey.Arn Tracing: Active Events: @@ -68,7 +70,9 @@ Resources: Policies: Statement: - Effect: Allow - Action: kms:* + Action: + - kms:Decrypt + - kms:GenerateDataKey Resource: !GetAtt MyKMSKey.Arn Tracing: Active Events: @@ -98,7 +102,9 @@ Resources: Policies: Statement: - Effect: Allow - Action: kms:* + Action: + - kms:Decrypt + - kms:GenerateDataKey Resource: !GetAtt MyKMSKey.Arn Tracing: Active Events: diff --git a/tests/performance/data_masking/test_perf_data_masking.py b/tests/performance/data_masking/test_perf_data_masking.py index 688e36c7a64..668da32a6e9 100644 --- a/tests/performance/data_masking/test_perf_data_masking.py +++ b/tests/performance/data_masking/test_perf_data_masking.py @@ -3,11 +3,11 @@ import pytest -from aws_lambda_powertools.utilities._data_masking.base import DataMasking +from aws_lambda_powertools.utilities.data_masking.base import DataMasking -DATA_MASKING_PACKAGE = "aws_lambda_powertools.utilities._data_masking" +DATA_MASKING_PACKAGE = "aws_lambda_powertools.utilities.data_masking" DATA_MASKING_INIT_SLA: float = 0.002 -DATA_MASKING_NESTED_ENCRYPT_SLA: float = 0.001 +DATA_MASKING_NESTED_ENCRYPT_SLA: float = 0.05 json_blob = { "id": 1, @@ -55,15 +55,15 @@ def test_data_masking_init(benchmark): pytest.fail(f"High level imports should be below {DATA_MASKING_INIT_SLA}s: {stat}") -def mask_json_blob(): +def erase_json_blob(): data_masker = DataMasking() - data_masker.mask(json_blob, json_blob_fields) + data_masker.erase(json_blob, json_blob_fields) @pytest.mark.perf @pytest.mark.benchmark(group="core", disable_gc=True, warmup=False) def test_data_masking_encrypt_with_json_blob(benchmark): - benchmark.pedantic(mask_json_blob) + benchmark.pedantic(erase_json_blob) stat = benchmark.stats.stats.max if stat > DATA_MASKING_NESTED_ENCRYPT_SLA: pytest.fail(f"High level imports should be below {DATA_MASKING_NESTED_ENCRYPT_SLA}s: {stat}") diff --git a/tests/unit/data_masking/test_kms_provider.py b/tests/unit/data_masking/test_kms_provider.py new file mode 100644 index 00000000000..5fe9b2e53ed --- /dev/null +++ b/tests/unit/data_masking/test_kms_provider.py @@ -0,0 +1,42 @@ +import pytest + +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingContextMismatchError, + DataMaskingUnsupportedTypeError, +) +from aws_lambda_powertools.utilities.data_masking.provider.kms.aws_encryption_sdk import ( + KMSKeyProvider, +) + + +def test_encryption_context_exact_match(): + ctx = {"data_classification": "confidential", "data_type": "customer_data"} + ctx_two = {"data_type": "customer_data", "data_classification": "confidential"} + + KMSKeyProvider._compare_encryption_context(ctx, ctx_two) + + +def test_encryption_context_partial_match(): + ctx = {"data_classification": "confidential", "data_type": "customer_data"} + ctx_two = {"data_type": "customer_data"} + + with pytest.raises(DataMaskingContextMismatchError): + KMSKeyProvider._compare_encryption_context(ctx, ctx_two) + + +def test_encryption_context_supported_values(): + ctx = {"a": "b", "c": "d"} + KMSKeyProvider._validate_encryption_context(ctx) + KMSKeyProvider._validate_encryption_context({}) + + +@pytest.mark.parametrize( + "ctx", + [ + pytest.param({"a": 10, "b": True, "c": []}, id="non_string_values"), + pytest.param({"a": {"b": "c"}}, id="nested_dict"), + ], +) +def test_encryption_context_non_str_validation(ctx): + with pytest.raises(DataMaskingUnsupportedTypeError): + KMSKeyProvider._validate_encryption_context(ctx) diff --git a/tests/unit/data_masking/test_unit_data_masking.py b/tests/unit/data_masking/test_unit_data_masking.py index 4a92a668d73..4fbbc188ceb 100644 --- a/tests/unit/data_masking/test_unit_data_masking.py +++ b/tests/unit/data_masking/test_unit_data_masking.py @@ -2,8 +2,12 @@ import pytest -from aws_lambda_powertools.utilities._data_masking.base import DataMasking -from aws_lambda_powertools.utilities._data_masking.constants import DATA_MASKING_STRING +from aws_lambda_powertools.utilities.data_masking.base import DataMasking +from aws_lambda_powertools.utilities.data_masking.constants import DATA_MASKING_STRING +from aws_lambda_powertools.utilities.data_masking.exceptions import ( + DataMaskingFieldNotFoundError, + DataMaskingUnsupportedTypeError, +) @pytest.fixture @@ -11,67 +15,67 @@ def data_masker() -> DataMasking: return DataMasking() -def test_mask_int(data_masker): +def test_erase_int(data_masker): # GIVEN an int data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(42) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(42) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_float(data_masker): +def test_erase_float(data_masker): # GIVEN a float data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(4.2) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(4.2) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_bool(data_masker): +def test_erase_bool(data_masker): # GIVEN a bool data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(True) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(True) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_none(data_masker): +def test_erase_none(data_masker): # GIVEN a None data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(None) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(None) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_str(data_masker): +def test_erase_str(data_masker): # GIVEN a str data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask("this is a string") + # WHEN erase is called with no fields argument + erased_string = data_masker.erase("this is a string") # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_list(data_masker): +def test_erase_list(data_masker): # GIVEN a list data type - # WHEN mask is called with no fields argument - masked_string = data_masker.mask([1, 2, "string", 3]) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase([1, 2, "string", 3]) # THEN the result is the data masked, while maintaining type list - assert masked_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] + assert erased_string == [DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING, DATA_MASKING_STRING] -def test_mask_dict(data_masker): +def test_erase_dict(data_masker): # GIVEN a dict data type data = { "a": { @@ -80,14 +84,14 @@ def test_mask_dict(data_masker): }, } - # WHEN mask is called with no fields argument - masked_string = data_masker.mask(data) + # WHEN erase is called with no fields argument + erased_string = data_masker.erase(data) # THEN the result is the data masked - assert masked_string == DATA_MASKING_STRING + assert erased_string == DATA_MASKING_STRING -def test_mask_dict_with_fields(data_masker): +def test_erase_dict_with_fields(data_masker): # GIVEN a dict data type data = { "a": { @@ -96,11 +100,11 @@ def test_mask_dict_with_fields(data_masker): }, } - # WHEN mask is called with a list of fields specified - masked_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + erased_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) - # THEN the result is only the specified fields are masked - assert masked_string == { + # THEN the result is only the specified fields are erased + assert erased_string == { "a": { "1": {"None": DATA_MASKING_STRING, "four": "world"}, "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, @@ -108,7 +112,7 @@ def test_mask_dict_with_fields(data_masker): } -def test_mask_json_dict_with_fields(data_masker): +def test_erase_json_dict_with_fields(data_masker): # GIVEN the data type is a json representation of a dictionary data = json.dumps( { @@ -119,10 +123,10 @@ def test_mask_json_dict_with_fields(data_masker): }, ) - # WHEN mask is called with a list of fields specified - masked_json_string = data_masker.mask(data, fields=["a.1.None", "a.b.3.4"]) + # WHEN erase is called with a list of fields specified + masked_json_string = data_masker.erase(data, fields=["a.'1'.None", "a..'4'"]) - # THEN the result is only the specified fields are masked + # THEN the result is only the specified fields are erased assert masked_json_string == { "a": { "1": {"None": DATA_MASKING_STRING, "four": "world"}, @@ -153,13 +157,24 @@ def test_parsing_unsupported_data_type(data_masker): # GIVEN an initialization of the DataMasking class # WHEN attempting to pass in a list of fields with input data that is not a dict - with pytest.raises(TypeError): + with pytest.raises(DataMaskingUnsupportedTypeError): # THEN the result is a TypeError - data_masker.mask(42, ["this.field"]) + data_masker.erase(42, ["this.field"]) -def test_parsing_nonexistent_fields(data_masker): +def test_parsing_with_empty_field(data_masker): + # GIVEN an initialization of the DataMasking class + + # WHEN attempting to pass in a list of fields with input data that is not a dict + with pytest.raises(ValueError): + # THEN the result is a TypeError + data_masker.erase(42, []) + + +def test_parsing_nonexistent_fields_with_raise_on_missing_field(): # GIVEN a dict data type + + data_masker = DataMasking(raise_on_missing_field=True) data = { "3": { "1": {"None": "hello", "four": "world"}, @@ -168,13 +183,15 @@ def test_parsing_nonexistent_fields(data_masker): } # WHEN attempting to pass in fields that do not exist in the input data - with pytest.raises(KeyError): + with pytest.raises(DataMaskingFieldNotFoundError): # THEN the result is a KeyError - data_masker.mask(data, ["3.1.True"]) + data_masker.erase(data, ["'3'..True"]) -def test_parsing_nonstring_fields(data_masker): +def test_parsing_nonexistent_fields_warning_on_missing_field(): # GIVEN a dict data type + + data_masker = DataMasking(raise_on_missing_field=False) data = { "3": { "1": {"None": "hello", "four": "world"}, @@ -182,24 +199,9 @@ def test_parsing_nonstring_fields(data_masker): }, } - # WHEN attempting to pass in a list of fields that are not strings - masked = data_masker.mask(data, fields=[3.4]) - - # THEN the result is the value of the nested field should be masked as normal - assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}} - - -def test_parsing_nonstring_keys_and_fields(data_masker): - # GIVEN a dict data type with integer keys - data = { - 3: { - "1": {"None": "hello", "four": "world"}, - 4: {"33": {"5": "goodbye", "e": "world"}}, - }, - } - - # WHEN masked with a list of fields that are integer keys - masked = data_masker.mask(data, fields=[3.4]) + # WHEN erase is called with a non-existing field + with pytest.warns(UserWarning, match="Field or expression*"): + masked_json_string = data_masker.erase(data, fields=["non-existing"]) - # THEN the result is the value of the nested field should be masked - assert masked == {"3": {"1": {"None": "hello", "four": "world"}, "4": DATA_MASKING_STRING}} + # THEN the "erased" payload is the same of the original + assert masked_json_string == data