diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index f2aa2dedf10..0c4553f182f 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -407,7 +407,7 @@ def __init__( # OpenAPI spec only understands paths with { }. So we'll have to convert Powertools' < >. # https://swagger.io/specification/#path-templating - self.openapi_path = re.sub(r"<(.*?)>", lambda m: f"{{{''.join(m.group(1))}}}", self.path) + self.openapi_path = re.sub(r"<(.*?)>", lambda m: f"{{{''.join(m.group(1))}}}", self.path) # type: ignore[arg-type] self.rule = rule self.func = func diff --git a/aws_lambda_powertools/utilities/data_classes/kafka_event.py b/aws_lambda_powertools/utilities/data_classes/kafka_event.py index c3d549c0f49..094bd4bed6f 100644 --- a/aws_lambda_powertools/utilities/data_classes/kafka_event.py +++ b/aws_lambda_powertools/utilities/data_classes/kafka_event.py @@ -10,7 +10,19 @@ from collections.abc import Iterator -class KafkaEventRecord(DictWrapper): +class KafkaEventRecordSchemaMetadata(DictWrapper): + @property + def data_format(self) -> str | None: + """The data format of the Kafka record.""" + return self.get("dataFormat", None) + + @property + def schema_id(self) -> str | None: + """The schema id of the Kafka record.""" + return self.get("schemaId", None) + + +class KafkaEventRecordBase(DictWrapper): @property def topic(self) -> str: """The Kafka topic.""" @@ -36,6 +48,24 @@ def timestamp_type(self) -> str: """The Kafka record timestamp type.""" return self["timestampType"] + @property + def key_schema_metadata(self) -> KafkaEventRecordSchemaMetadata | None: + """The metadata of the Key Kafka record.""" + return ( + None if self.get("keySchemaMetadata") is None else KafkaEventRecordSchemaMetadata(self["keySchemaMetadata"]) + ) + + @property + def value_schema_metadata(self) -> KafkaEventRecordSchemaMetadata | None: + """The metadata of the Value Kafka record.""" + return ( + None + if self.get("valueSchemaMetadata") is None + else KafkaEventRecordSchemaMetadata(self["valueSchemaMetadata"]) + ) + + +class KafkaEventRecord(KafkaEventRecordBase): @property def key(self) -> str | None: """ @@ -83,18 +113,7 @@ def decoded_headers(self) -> dict[str, bytes]: return CaseInsensitiveDict((k, bytes(v)) for chunk in self.headers for k, v in chunk.items()) -class KafkaEvent(DictWrapper): - """Self-managed or MSK Apache Kafka event trigger - Documentation: - -------------- - - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html - - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html - """ - - def __init__(self, data: dict[str, Any]): - super().__init__(data) - self._records: Iterator[KafkaEventRecord] | None = None - +class KafkaEventBase(DictWrapper): @property def event_source(self) -> str: """The AWS service from which the Kafka event record originated.""" @@ -115,6 +134,19 @@ def decoded_bootstrap_servers(self) -> list[str]: """The decoded Kafka bootstrap URL.""" return self.bootstrap_servers.split(",") + +class KafkaEvent(KafkaEventBase): + """Self-managed or MSK Apache Kafka event trigger + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html + - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html + """ + + def __init__(self, data: dict[str, Any]): + super().__init__(data) + self._records: Iterator[KafkaEventRecord] | None = None + @property def records(self) -> Iterator[KafkaEventRecord]: """The Kafka records.""" diff --git a/aws_lambda_powertools/utilities/kafka/__init__.py b/aws_lambda_powertools/utilities/kafka/__init__.py new file mode 100644 index 00000000000..d41283cfc61 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/__init__.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.utilities.kafka.consumer_records import ConsumerRecords +from aws_lambda_powertools.utilities.kafka.kafka_consumer import kafka_consumer +from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig + +__all__ = [ + "kafka_consumer", + "ConsumerRecords", + "SchemaConfig", +] diff --git a/aws_lambda_powertools/utilities/kafka/consumer_records.py b/aws_lambda_powertools/utilities/kafka/consumer_records.py new file mode 100644 index 00000000000..47c732136d0 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/consumer_records.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +from functools import cached_property +from typing import TYPE_CHECKING, Any + +from aws_lambda_powertools.utilities.data_classes.common import CaseInsensitiveDict +from aws_lambda_powertools.utilities.data_classes.kafka_event import KafkaEventBase, KafkaEventRecordBase +from aws_lambda_powertools.utilities.kafka.deserializer.deserializer import get_deserializer +from aws_lambda_powertools.utilities.kafka.serialization.serialization import serialize_to_output_type + +if TYPE_CHECKING: + from collections.abc import Iterator + + from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig + + +class ConsumerRecordRecords(KafkaEventRecordBase): + """ + A Kafka Consumer Record + """ + + def __init__(self, data: dict[str, Any], schema_config: SchemaConfig | None = None): + super().__init__(data) + self.schema_config = schema_config + + @cached_property + def key(self) -> Any: + key = self.get("key") + + # Return None if key doesn't exist + if not key: + return None + + # Determine schema type and schema string + schema_type = None + schema_str = None + output_serializer = None + + if self.schema_config and self.schema_config.key_schema_type: + schema_type = self.schema_config.key_schema_type + schema_str = self.schema_config.key_schema + output_serializer = self.schema_config.key_output_serializer + + # Always use get_deserializer if None it will default to DEFAULT + deserializer = get_deserializer(schema_type, schema_str) + deserialized_value = deserializer.deserialize(key) + + # Apply output serializer if specified + if output_serializer: + return serialize_to_output_type(deserialized_value, output_serializer) + + return deserialized_value + + @cached_property + def value(self) -> Any: + value = self["value"] + + # Determine schema type and schema string + schema_type = None + schema_str = None + output_serializer = None + + if self.schema_config and self.schema_config.value_schema_type: + schema_type = self.schema_config.value_schema_type + schema_str = self.schema_config.value_schema + output_serializer = self.schema_config.value_output_serializer + + # Always use get_deserializer if None it will default to DEFAULT + deserializer = get_deserializer(schema_type, schema_str) + deserialized_value = deserializer.deserialize(value) + + # Apply output serializer if specified + if output_serializer: + return serialize_to_output_type(deserialized_value, output_serializer) + + return deserialized_value + + @property + def original_value(self) -> str: + """The original (base64 encoded) Kafka record value.""" + return self["value"] + + @property + def original_key(self) -> str | None: + """ + The original (base64 encoded) Kafka record key. + + This key is optional; if not provided, + a round-robin algorithm will be used to determine + the partition for the message. + """ + + return self.get("key") + + @property + def original_headers(self) -> list[dict[str, list[int]]]: + """The raw Kafka record headers.""" + return self["headers"] + + @cached_property + def headers(self) -> dict[str, bytes]: + """Decodes the headers as a single dictionary.""" + return CaseInsensitiveDict((k, bytes(v)) for chunk in self.original_headers for k, v in chunk.items()) + + +class ConsumerRecords(KafkaEventBase): + """Self-managed or MSK Apache Kafka event trigger + Documentation: + -------------- + - https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html + - https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html + """ + + def __init__(self, data: dict[str, Any], schema_config: SchemaConfig | None = None): + super().__init__(data) + self._records: Iterator[ConsumerRecordRecords] | None = None + self.schema_config = schema_config + + @property + def records(self) -> Iterator[ConsumerRecordRecords]: + """The Kafka records.""" + for chunk in self["records"].values(): + for record in chunk: + yield ConsumerRecordRecords(data=record, schema_config=self.schema_config) + + @property + def record(self) -> ConsumerRecordRecords: + """ + Returns the next Kafka record using an iterator. + + Returns + ------- + ConsumerRecordRecords + The next Kafka record. + + Raises + ------ + StopIteration + If there are no more records available. + + """ + if self._records is None: + self._records = self.records + return next(self._records) diff --git a/aws_lambda_powertools/utilities/kafka/deserializer/__init__.py b/aws_lambda_powertools/utilities/kafka/deserializer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/aws_lambda_powertools/utilities/kafka/deserializer/avro.py b/aws_lambda_powertools/utilities/kafka/deserializer/avro.py new file mode 100644 index 00000000000..89073f9e784 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/deserializer/avro.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +import io + +from avro.io import BinaryDecoder, DatumReader +from avro.schema import parse as parse_schema + +from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase +from aws_lambda_powertools.utilities.kafka.exceptions import ( + KafkaConsumerAvroSchemaParserError, + KafkaConsumerDeserializationError, +) + + +class AvroDeserializer(DeserializerBase): + """ + Deserializer for Apache Avro formatted data. + + This class provides functionality to deserialize Avro binary data using + a provided Avro schema definition. + """ + + def __init__(self, schema_str: str): + try: + self.parsed_schema = parse_schema(schema_str) + self.reader = DatumReader(self.parsed_schema) + except Exception as e: + raise KafkaConsumerAvroSchemaParserError( + f"Invalid Avro schema. Please ensure the provided avro schema is valid: {type(e).__name__}: {str(e)}", + ) from e + + def deserialize(self, data: bytes | str) -> object: + """ + Deserialize Avro binary data to a Python dictionary. + + Parameters + ---------- + data : bytes or str + The Avro binary data to deserialize. If provided as a string, + it will be decoded to bytes first. + + Returns + ------- + dict[str, Any] + Deserialized data as a dictionary. + + Raises + ------ + KafkaConsumerDeserializationError + When the data cannot be deserialized according to the schema, + typically due to data format incompatibility. + + Examples + -------- + >>> deserializer = AvroDeserializer(schema_str) + >>> avro_data = b'...' # binary Avro data + >>> try: + ... result = deserializer.deserialize(avro_data) + ... # Process the deserialized data + ... except KafkaConsumerDeserializationError as e: + ... print(f"Failed to deserialize: {e}") + """ + try: + value = self._decode_input(data) + bytes_reader = io.BytesIO(value) + decoder = BinaryDecoder(bytes_reader) + return self.reader.read(decoder) + except Exception as e: + raise KafkaConsumerDeserializationError( + f"Error trying to deserialize avro data - {type(e).__name__}: {str(e)}", + ) from e diff --git a/aws_lambda_powertools/utilities/kafka/deserializer/base.py b/aws_lambda_powertools/utilities/kafka/deserializer/base.py new file mode 100644 index 00000000000..9dfc5ad405b --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/deserializer/base.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import base64 +from abc import ABC, abstractmethod +from typing import Any + + +class DeserializerBase(ABC): + """ + Abstract base class for deserializers. + + This class defines the interface for all deserializers in the Kafka consumer utility + and provides a common method for decoding input data. + + Methods + ------- + deserialize(data) + Abstract method that must be implemented by subclasses to deserialize data. + _decode_input(data) + Helper method to decode input data to bytes. + + Examples + -------- + >>> class MyDeserializer(DeserializerBase): + ... def deserialize(self, data: bytes | str) -> dict[str, Any]: + ... value = self._decode_input(data) + ... # Custom deserialization logic here + ... return {"key": "value"} + """ + + @abstractmethod + def deserialize(self, data: str) -> dict[str, Any] | str | object: + """ + Deserialize input data to a Python dictionary. + + This abstract method must be implemented by subclasses to provide + specific deserialization logic. + + Parameters + ---------- + data : str + The data to deserialize, it's always a base64 encoded string + + Returns + ------- + dict[str, Any] + The deserialized data as a dictionary. + """ + raise NotImplementedError("Subclasses must implement the deserialize method") + + def _decode_input(self, data: bytes | str) -> bytes: + return base64.b64decode(data) diff --git a/aws_lambda_powertools/utilities/kafka/deserializer/default.py b/aws_lambda_powertools/utilities/kafka/deserializer/default.py new file mode 100644 index 00000000000..b889e958c08 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/deserializer/default.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +import base64 + +from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase + + +class DefaultDeserializer(DeserializerBase): + """ + A default deserializer that performs base64 decode + binary decode on the input data. + + This deserializer simply returns the input data with base64 decode, which is useful when + no customized deserialization is needed or when handling raw data formats. + """ + + def deserialize(self, data: bytes | str) -> str: + """ + Return the input data base64 decoded. + + This method implements the deserialize interface and performs base64 decode. + + Parameters + ---------- + data : bytes or str + The input data to "deserialize". + + Returns + ------- + dict[str, Any] + The input data base64 decoded. + + Example + -------- + >>> deserializer = NoOpDeserializer() + >>> + >>> # With string input + >>> string_data = "Hello, world!" + >>> result = deserializer.deserialize(string_data) + >>> print(result == string_data) # Output: True + >>> + >>> # With bytes input + >>> bytes_data = b"Binary data" + >>> result = deserializer.deserialize(bytes_data) + >>> print(result == bytes_data) # Output: True + """ + return base64.b64decode(data).decode("utf-8") diff --git a/aws_lambda_powertools/utilities/kafka/deserializer/deserializer.py b/aws_lambda_powertools/utilities/kafka/deserializer/deserializer.py new file mode 100644 index 00000000000..81c34be3aa5 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/deserializer/deserializer.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import hashlib +from typing import TYPE_CHECKING, Any + +from aws_lambda_powertools.utilities.kafka.deserializer.default import DefaultDeserializer +from aws_lambda_powertools.utilities.kafka.deserializer.json import JsonDeserializer + +if TYPE_CHECKING: + from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase + +# Cache for deserializers +_deserializer_cache: dict[str, DeserializerBase] = {} + + +def _get_cache_key(schema_type: str | object, schema_value: Any) -> str: + if schema_value is None: + return str(schema_type) + + if isinstance(schema_value, str): + # For string schemas like Avro, hash the content + schema_hash = hashlib.md5(schema_value.encode("utf-8"), usedforsecurity=False).hexdigest() + else: + # For objects like Protobuf, use the object id + schema_hash = str(id(schema_value)) + + return f"{schema_type}_{schema_hash}" + + +def get_deserializer(schema_type: str | object, schema_value: Any) -> DeserializerBase: + """ + Factory function to get the appropriate deserializer based on schema type. + + This function creates and returns a deserializer instance that corresponds to the + specified schema type. It handles lazy imports for optional dependencies. + + Parameters + ---------- + schema_type : str + The type of schema to use for deserialization. + Supported values are: "AVRO", "PROTOBUF", "JSON", or any other value for no-op. + schema_value : Any + The schema definition to use for deserialization. The format depends on the + schema_type: + - For "AVRO": A string containing the Avro schema definition + - For "PROTOBUF": A object containing the Protobuf schema definition + - For "JSON": Not used (can be None) + - For other types: Not used (can be None) + + Returns + ------- + DeserializerBase + An instance of a deserializer that implements the DeserializerBase interface. + + Examples + -------- + >>> # Get an Avro deserializer + >>> avro_schema = ''' + ... { + ... "type": "record", + ... "name": "User", + ... "fields": [ + ... {"name": "name", "type": "string"}, + ... {"name": "age", "type": "int"} + ... ] + ... } + ... ''' + >>> deserializer = get_deserializer("AVRO", avro_schema) + >>> + >>> # Get a JSON deserializer + >>> json_deserializer = get_deserializer("JSON", None) + >>> + >>> # Get a no-op deserializer for raw data + >>> no_op_deserializer = get_deserializer("RAW", None) + """ + + # Generate a cache key based on schema type and value + cache_key = _get_cache_key(schema_type, schema_value) + + # Check if we already have this deserializer in cache + if cache_key in _deserializer_cache: + return _deserializer_cache[cache_key] + + deserializer: DeserializerBase + + if schema_type == "AVRO": + # Import here to avoid dependency if not used + from aws_lambda_powertools.utilities.kafka.deserializer.avro import AvroDeserializer + + deserializer = AvroDeserializer(schema_value) + elif schema_type == "PROTOBUF": + # Import here to avoid dependency if not used + from aws_lambda_powertools.utilities.kafka.deserializer.protobuf import ProtobufDeserializer + + deserializer = ProtobufDeserializer(schema_value) + elif schema_type == "JSON": + deserializer = JsonDeserializer() + + else: + # Default to no-op deserializer + deserializer = DefaultDeserializer() + + # Store in cache for future use + _deserializer_cache[cache_key] = deserializer + + # Default to default deserializer that is base64 decode + bytes decoded + return deserializer diff --git a/aws_lambda_powertools/utilities/kafka/deserializer/json.py b/aws_lambda_powertools/utilities/kafka/deserializer/json.py new file mode 100644 index 00000000000..afd8effd489 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/deserializer/json.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import base64 +import json + +from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase +from aws_lambda_powertools.utilities.kafka.exceptions import KafkaConsumerDeserializationError + + +class JsonDeserializer(DeserializerBase): + """ + Deserializer for JSON formatted data. + + This class provides functionality to deserialize JSON data from bytes or string + into Python dictionaries. + """ + + def deserialize(self, data: bytes | str) -> dict: + """ + Deserialize JSON data to a Python dictionary. + + Parameters + ---------- + data : bytes or str + The JSON data to deserialize. If provided as bytes, it will be decoded as UTF-8. + If provided as a string, it's assumed to be base64-encoded and will be decoded first. + + Returns + ------- + dict + Deserialized data as a dictionary. + + Raises + ------ + KafkaConsumerDeserializationError + When the data cannot be deserialized as valid JSON. + + Examples + -------- + >>> deserializer = JsonDeserializer() + >>> json_data = '{"key": "value", "number": 123}' + >>> try: + ... result = deserializer.deserialize(json_data) + ... print(result["key"]) # Output: value + ... except KafkaConsumerDeserializationError as e: + ... print(f"Failed to deserialize: {e}") + """ + try: + return json.loads(base64.b64decode(data).decode("utf-8")) + except Exception as e: + raise KafkaConsumerDeserializationError( + f"Error trying to deserialize json data - {type(e).__name__}: {str(e)}", + ) from e diff --git a/aws_lambda_powertools/utilities/kafka/deserializer/protobuf.py b/aws_lambda_powertools/utilities/kafka/deserializer/protobuf.py new file mode 100644 index 00000000000..f4e02b8c565 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/deserializer/protobuf.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from typing import Any + +from google.protobuf.json_format import MessageToDict + +from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase +from aws_lambda_powertools.utilities.kafka.exceptions import ( + KafkaConsumerDeserializationError, +) + + +class ProtobufDeserializer(DeserializerBase): + """ + Deserializer for Protocol Buffer formatted data. + + This class provides functionality to deserialize Protocol Buffer binary data + into Python dictionaries using the provided Protocol Buffer message class. + """ + + def __init__(self, message_class: Any): + self.message_class = message_class + + def deserialize(self, data: bytes | str) -> dict: + """ + Deserialize Protocol Buffer binary data to a Python dictionary. + + Parameters + ---------- + data : bytes or str + The Protocol Buffer binary data to deserialize. If provided as a string, + it's assumed to be base64-encoded and will be decoded first. + + Returns + ------- + dict + Deserialized data as a dictionary with field names preserved from the + Protocol Buffer definition. + + Raises + ------ + KafkaConsumerDeserializationError + When the data cannot be deserialized according to the message class, + typically due to data format incompatibility or incorrect message class. + + Example + -------- + >>> # Assuming proper protobuf setup + >>> deserializer = ProtobufDeserializer(my_proto_module.MyMessage) + >>> proto_data = b'...' # binary protobuf data + >>> try: + ... result = deserializer.deserialize(proto_data) + ... # Process the deserialized dictionary + ... except KafkaConsumerDeserializationError as e: + ... print(f"Failed to deserialize: {e}") + """ + try: + value = self._decode_input(data) + message = self.message_class() + message.ParseFromString(value) + return MessageToDict(message, preserving_proto_field_name=True) + except Exception as e: + raise KafkaConsumerDeserializationError( + f"Error trying to deserialize protobuf data - {type(e).__name__}: {str(e)}", + ) from e diff --git a/aws_lambda_powertools/utilities/kafka/exceptions.py b/aws_lambda_powertools/utilities/kafka/exceptions.py new file mode 100644 index 00000000000..c8b5ee810a2 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/exceptions.py @@ -0,0 +1,22 @@ +class KafkaConsumerAvroSchemaParserError(Exception): + """ + Error raised when parsing Avro schema definition fails. + """ + + +class KafkaConsumerDeserializationError(Exception): + """ + Error raised when message deserialization fails. + """ + + +class KafkaConsumerMissingSchemaError(Exception): + """ + Error raised when a required schema is not provided. + """ + + +class KafkaConsumerOutputSerializerError(Exception): + """ + Error raised when output serializer fails. + """ diff --git a/aws_lambda_powertools/utilities/kafka/kafka_consumer.py b/aws_lambda_powertools/utilities/kafka/kafka_consumer.py new file mode 100644 index 00000000000..b4bacea545e --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/kafka_consumer.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.utilities.kafka.consumer_records import ConsumerRecords + +if TYPE_CHECKING: + from collections.abc import Callable + + from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig + from aws_lambda_powertools.utilities.typing import LambdaContext + + +@lambda_handler_decorator +def kafka_consumer( + handler: Callable[[Any, LambdaContext], Any], + event: dict[str, Any], + context: LambdaContext, + schema_config: SchemaConfig | None = None, +): + """ + Decorator for processing Kafka consumer records in AWS Lambda functions. + + This decorator transforms the raw Lambda event into a ConsumerRecords object, + making it easier to process Kafka messages with optional schema validation + and deserialization. + + Parameters + ---------- + handler : Callable[[Any, LambdaContext], Any] + The Lambda handler function being decorated. + event : dict[str, Any] + The Lambda event containing Kafka records. + context : LambdaContext + The Lambda context object. + schema_config : SchemaConfig, optional + Schema configuration for deserializing Kafka records. + Must be an instance of SchemaConfig. + + Returns + ------- + Any + The return value from the handler function. + + Examples + -------- + >>> from aws_lambda_powertools.utilities.kafka import kafka_consumer, SchemaConfig + >>> + >>> # With schema validation using SchemaConfig + >>> schema_config = SchemaConfig(value_schema_type="JSON") + >>> + >>> @kafka_consumer(schema_config=schema_config) + >>> def handler_with_schema(records, context): + >>> for record in records: + >>> # record.value will be automatically deserialized according to schema_config + >>> process_message(record.value) + >>> return {"statusCode": 200} + """ + return handler(ConsumerRecords(event, schema_config), context) diff --git a/aws_lambda_powertools/utilities/kafka/schema_config.py b/aws_lambda_powertools/utilities/kafka/schema_config.py new file mode 100644 index 00000000000..70efea3d43c --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/schema_config.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from typing import Any, Literal + +from aws_lambda_powertools.utilities.kafka.exceptions import KafkaConsumerMissingSchemaError + + +class SchemaConfig: + """ + Configuration for schema management in Kafka consumers. + + This class handles schema configuration for both keys and values in Kafka records, + supporting AVRO, PROTOBUF, and JSON schema types. + + Parameters + ---------- + value_schema_type : {'AVRO', 'PROTOBUF', 'JSON', None}, default=None + Schema type for message values. + value_schema : str, optional + Schema definition for message values. Required when value_schema_type is 'AVRO' or 'PROTOBUF'. + value_output_serializer : Any, optional + Custom output serializer for message values. Supports Pydantic classes, Dataclasses and Custom Class + key_schema_type : {'AVRO', 'PROTOBUF', 'JSON', None}, default=None + Schema type for message keys. + key_schema : str, optional + Schema definition for message keys. Required when key_schema_type is 'AVRO' or 'PROTOBUF'. + key_output_serializer : Any, optional + Custom serializer for message keys. Supports Pydantic classes, Dataclasses and Custom Class + + Raises + ------ + KafkaConsumerMissingSchemaError + When schema_type is set to 'AVRO' or 'PROTOBUF' but the corresponding schema + definition is not provided. + + Examples + -------- + >>> # Configure with AVRO schema for values + >>> avro_schema = ''' + ... { + ... "type": "record", + ... "name": "User", + ... "fields": [ + ... {"name": "name", "type": "string"}, + ... {"name": "age", "type": "int"} + ... ] + ... } + ... ''' + >>> config = SchemaConfig(value_schema_type="AVRO", value_schema=avro_schema) + + >>> # Configure with JSON schema for both keys and values + >>> config = SchemaConfig( + ... value_schema_type="JSON", + ... key_schema_type="JSON" + ... ) + """ + + def __init__( + self, + value_schema_type: Literal["AVRO", "PROTOBUF", "JSON"] | None = None, + value_schema: str | None = None, + value_output_serializer: Any | None = None, + key_schema_type: Literal["AVRO", "PROTOBUF", "JSON", None] | None = None, + key_schema: str | None = None, + key_output_serializer: Any | None = None, + ): + # Validate schema requirements + self._validate_schema_requirements(value_schema_type, value_schema, "value") + self._validate_schema_requirements(key_schema_type, key_schema, "key") + + self.value_schema_type = value_schema_type + self.value_schema = value_schema + self.value_output_serializer = value_output_serializer + self.key_schema_type = key_schema_type + self.key_schema = key_schema + self.key_output_serializer = key_output_serializer + + def _validate_schema_requirements(self, schema_type: str | None, schema: str | None, prefix: str) -> None: + """Validate that schema is provided when required by schema_type.""" + if schema_type in ["AVRO", "PROTOBUF"] and schema is None: + raise KafkaConsumerMissingSchemaError( + f"{prefix}_schema must be provided when {prefix}_schema_type is {schema_type}", + ) diff --git a/aws_lambda_powertools/utilities/kafka/serialization/__init__.py b/aws_lambda_powertools/utilities/kafka/serialization/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/aws_lambda_powertools/utilities/kafka/serialization/base.py b/aws_lambda_powertools/utilities/kafka/serialization/base.py new file mode 100644 index 00000000000..3ef422d74b7 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/serialization/base.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from collections.abc import Callable + + from aws_lambda_powertools.utilities.kafka.serialization.types import T + + +class OutputSerializerBase(ABC): + """ + Abstract base class for output serializers. + + This class defines the interface for serializers that transform dictionary data + into specific output formats or class instances. + + Methods + ------- + serialize(data, output) + Abstract method that must be implemented by subclasses to serialize data. + + Examples + -------- + >>> class MyOutputSerializer(OutputSerializerBase): + ... def serialize(self, data: dict[str, Any], output=None): + ... if output: + ... # Convert dictionary to class instance + ... return output(**data) + ... return data # Return as is if no output class provided + """ + + @abstractmethod + def serialize(self, data: dict[str, Any], output: type[T] | Callable | None = None) -> T | dict[str, Any]: + """ + Serialize dictionary data into a specific output format or class instance. + + This abstract method must be implemented by subclasses to provide + specific serialization logic. + + Parameters + ---------- + data : dict[str, Any] + The dictionary data to serialize. + output : type[T] or None, optional + Optional class type to convert the dictionary into. If provided, + the method should return an instance of this class. + + Returns + ------- + T or dict[str, Any] + An instance of output if provided, otherwise a processed dictionary. + The generic type T represents the type of the output. + """ + raise NotImplementedError("Subclasses must implement this method") diff --git a/aws_lambda_powertools/utilities/kafka/serialization/custom_dict.py b/aws_lambda_powertools/utilities/kafka/serialization/custom_dict.py new file mode 100644 index 00000000000..b644e5f9b68 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/serialization/custom_dict.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from aws_lambda_powertools.utilities.kafka.serialization.base import OutputSerializerBase + +if TYPE_CHECKING: + from collections.abc import Callable + + from aws_lambda_powertools.utilities.kafka.serialization.types import T + + +class CustomDictOutputSerializer(OutputSerializerBase): + """ + Serializer that allows custom dict transformations. + + This serializer takes dictionary data and either returns it as-is or passes it + through a custom transformation function provided as the output parameter. + """ + + def serialize(self, data: dict[str, Any], output: type[T] | Callable | None = None) -> T | dict[str, Any]: + return data if output is None else output(data) # type: ignore[call-arg] diff --git a/aws_lambda_powertools/utilities/kafka/serialization/dataclass.py b/aws_lambda_powertools/utilities/kafka/serialization/dataclass.py new file mode 100644 index 00000000000..2cdbfe11be2 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/serialization/dataclass.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from dataclasses import is_dataclass +from typing import TYPE_CHECKING, Any, cast + +from aws_lambda_powertools.utilities.kafka.serialization.base import OutputSerializerBase +from aws_lambda_powertools.utilities.kafka.serialization.types import T + +if TYPE_CHECKING: + from collections.abc import Callable + + +class DataclassOutputSerializer(OutputSerializerBase): + """ + Serializer that converts dictionary data into dataclass instances. + + This serializer takes dictionary data and converts it into an instance of the specified + dataclass type. + """ + + def serialize(self, data: dict[str, Any], output: type[T] | Callable | None = None) -> T | dict[str, Any]: + if not is_dataclass(output): # pragma: no cover + raise ValueError("Output class must be a dataclass") + + return cast(T, output(**data)) diff --git a/aws_lambda_powertools/utilities/kafka/serialization/pydantic.py b/aws_lambda_powertools/utilities/kafka/serialization/pydantic.py new file mode 100644 index 00000000000..63484644ba3 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/serialization/pydantic.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from pydantic import TypeAdapter + +from aws_lambda_powertools.utilities.kafka.serialization.base import OutputSerializerBase + +if TYPE_CHECKING: + from collections.abc import Callable + + from aws_lambda_powertools.utilities.kafka.serialization.types import T + + +class PydanticOutputSerializer(OutputSerializerBase): + """ + Serializer that converts dictionary data into Pydantic model instances. + + This serializer takes dictionary data and validates/converts it into an instance + of the specified Pydantic model type using Pydantic's TypeAdapter. + """ + + def serialize(self, data: dict[str, Any], output: type[T] | Callable | None = None) -> T | dict[str, Any]: + # Use TypeAdapter for better support of Union types and other complex types + adapter: TypeAdapter = TypeAdapter(output) + return adapter.validate_python(data) diff --git a/aws_lambda_powertools/utilities/kafka/serialization/serialization.py b/aws_lambda_powertools/utilities/kafka/serialization/serialization.py new file mode 100644 index 00000000000..2a6bc45dc29 --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/serialization/serialization.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import sys +from dataclasses import is_dataclass +from typing import TYPE_CHECKING, Annotated, Any, Optional, Union, get_args, get_origin + +# Conditionally import or define UnionType based on Python version +if sys.version_info >= (3, 10): + from types import UnionType # Available in Python 3.10+ +else: + UnionType = Union # Fallback for Python 3.9 + +from aws_lambda_powertools.utilities.kafka.serialization.custom_dict import CustomDictOutputSerializer +from aws_lambda_powertools.utilities.kafka.serialization.dataclass import DataclassOutputSerializer + +if TYPE_CHECKING: + from collections.abc import Callable + + from aws_lambda_powertools.utilities.kafka.serialization.types import T + + +def _get_output_serializer(output: type[T] | Callable | None = None) -> Any: + """ + Returns the appropriate serializer for the given output class. + Uses lazy imports to avoid unnecessary dependencies. + """ + # Check if it's a dataclass + if is_dataclass(output): + return DataclassOutputSerializer() + + if _is_pydantic_model(output): + from aws_lambda_powertools.utilities.kafka.serialization.pydantic import PydanticOutputSerializer + + return PydanticOutputSerializer() + + # Default to custom serializer + return CustomDictOutputSerializer() + + +def _is_pydantic_model(obj: Any) -> bool: + if isinstance(obj, type): + # Check for Pydantic model attributes without direct import + has_model_fields = getattr(obj, "model_fields", None) is not None + has_model_validate = callable(getattr(obj, "model_validate", None)) + return has_model_fields and has_model_validate + + origin = get_origin(obj) + if origin in (Union, Optional, Annotated) or (sys.version_info >= (3, 10) and origin in (Union, UnionType)): + # Check if any element in the Union is a Pydantic model + for arg in get_args(obj): + if _is_pydantic_model(arg): + return True + + return False + + +def serialize_to_output_type( + data: object | dict[str, Any], + output: type[T] | Callable | None = None, +) -> T | dict[str, Any]: + """ + Helper function to directly serialize data to the specified output class + """ + serializer = _get_output_serializer(output) + return serializer.serialize(data, output) diff --git a/aws_lambda_powertools/utilities/kafka/serialization/types.py b/aws_lambda_powertools/utilities/kafka/serialization/types.py new file mode 100644 index 00000000000..b186d5ad68b --- /dev/null +++ b/aws_lambda_powertools/utilities/kafka/serialization/types.py @@ -0,0 +1,3 @@ +from typing import TypeVar + +T = TypeVar("T") diff --git a/aws_lambda_powertools/utilities/parser/models/kafka.py b/aws_lambda_powertools/utilities/parser/models/kafka.py index c365c51c63c..717d47ff26c 100644 --- a/aws_lambda_powertools/utilities/parser/models/kafka.py +++ b/aws_lambda_powertools/utilities/parser/models/kafka.py @@ -8,6 +8,11 @@ SERVERS_DELIMITER = "," +class KafkaRecordSchemaMetadata(BaseModel): + dataFormat: str + schemaId: str + + class KafkaRecordModel(BaseModel): topic: str partition: int @@ -17,6 +22,8 @@ class KafkaRecordModel(BaseModel): key: Optional[bytes] = None value: Union[str, Type[BaseModel]] headers: List[Dict[str, bytes]] + keySchemaMetadata: Optional[KafkaRecordSchemaMetadata] = None + valueSchemaMetadata: Optional[KafkaRecordSchemaMetadata] = None # key is optional; only decode if not None @field_validator("key", mode="before") diff --git a/noxfile.py b/noxfile.py index 13f1126e6fd..9a648cf37fb 100644 --- a/noxfile.py +++ b/noxfile.py @@ -67,6 +67,7 @@ def test_with_only_required_packages(session: nox.Session): f"{PREFIX_TESTS_UNIT}/data_classes/required_dependencies/", f"{PREFIX_TESTS_FUNCTIONAL}/event_handler/required_dependencies/", f"{PREFIX_TESTS_FUNCTIONAL}/batch/required_dependencies/", + f"{PREFIX_TESTS_FUNCTIONAL}/kafka_consumer/required_dependencies/", ], ) @@ -155,11 +156,13 @@ def test_with_pydantic_required_package(session: nox.Session): # Event Handler OpenAPI # Parser # Batch Processor with pydantic integration + # Kafka Consumer with Output to Pydantic build_and_run_test( session, folders=[ f"{PREFIX_TESTS_FUNCTIONAL}/event_handler/_pydantic/", f"{PREFIX_TESTS_FUNCTIONAL}/batch/_pydantic/", + f"{PREFIX_TESTS_FUNCTIONAL}/kafka_consumer/_pydantic/", f"{PREFIX_TESTS_UNIT}/parser/_pydantic/", f"{PREFIX_TESTS_UNIT}/event_handler/_pydantic/", ], @@ -196,3 +199,29 @@ def test_with_redis_and_boto3_sdk_as_required_package(session: nox.Session): ], extras="redis,aws-sdk", ) + + +@nox.session() +def test_with_avro_required_package(session: nox.Session): + """Tests that only depends the Avro dependency""" + # Kafka Consumer with AVRO + build_and_run_test( + session, + folders=[ + f"{PREFIX_TESTS_FUNCTIONAL}/kafka_consumer/_avro/", + ], + extras="kafka-consumer-avro", + ) + + +@nox.session() +def test_with_protobuf_required_package(session: nox.Session): + """Tests that only depends the Protobuf dependency""" + # Kafka Consumer with PROTOBUF + build_and_run_test( + session, + folders=[ + f"{PREFIX_TESTS_FUNCTIONAL}/kafka_consumer/_protobuf/", + ], + extras="kafka-consumer-protobuf", + ) diff --git a/poetry.lock b/poetry.lock index f67da33ff05..64d56c9c0b2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -11,7 +11,7 @@ files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [[package]] name = "anyio" @@ -62,28 +62,44 @@ files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] -markers = {main = "extra == \"redis\" and python_full_version < \"3.11.3\" or python_version <= \"3.10\" and (extra == \"redis\" or extra == \"valkey\")", dev = "python_full_version < \"3.11.3\""} +markers = {main = "python_version < \"3.11\" and (extra == \"redis\" or extra == \"valkey\") or extra == \"redis\" and python_full_version < \"3.11.3\"", dev = "python_full_version < \"3.11.3\""} [[package]] name = "attrs" -version = "25.3.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" groups = ["main", "dev"] files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] markers = {main = "extra == \"all\" or extra == \"datamasking\""} [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6) ; platform_python_implementation == \"CPython\" and python_version >= \"3.8\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.8\""] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "avro" +version = "1.12.0" +description = "Avro is a serialization and RPC framework." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "avro-1.12.0-py2.py3-none-any.whl", hash = "sha256:9a255c72e1837341dd4f6ff57b2b6f68c0f0cecdef62dd04962e10fd33bec05b"}, + {file = "avro-1.12.0.tar.gz", hash = "sha256:cad9c53b23ceed699c7af6bddced42e2c572fd6b408c257a7d4fc4e8cf2e2d6b"}, +] + +[package.extras] +snappy = ["python-snappy"] +zstandard = ["zstandard"] [[package]] name = "aws-cdk-asset-awscli-v1" @@ -218,14 +234,14 @@ typeguard = ">=2.13.3,<4.3.0" [[package]] name = "aws-cdk-cloud-assembly-schema" -version = "44.1.0" +version = "44.2.0" description = "Schema for the protocol between CDK framework and CDK CLI" optional = false python-versions = "~=3.9" groups = ["dev"] files = [ - {file = "aws_cdk_cloud_assembly_schema-44.1.0-py3-none-any.whl", hash = "sha256:1fa97d9c64ce9108add436547ad20955295d59a616cec0b0f11d6b1d1eb08ed7"}, - {file = "aws_cdk_cloud_assembly_schema-44.1.0.tar.gz", hash = "sha256:03dccc3d2f39ea79ab4a95dda08e1559596a2a3b9bf40ae650877b36126d1428"}, + {file = "aws_cdk_cloud_assembly_schema-44.2.0-py3-none-any.whl", hash = "sha256:89d1aea12a15475f5240ee8eb73c304678e8ef51d90990978f0e58f5295bcf3f"}, + {file = "aws_cdk_cloud_assembly_schema-44.2.0.tar.gz", hash = "sha256:38a309a38a111d62ac32683f9f79b3d2e7cf4d78076ed9f3ec9788b273cd9b41"}, ] [package.dependencies] @@ -293,14 +309,14 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.97.0" +version = "1.98.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = "!=4.0,<=4.0,>=3.8" groups = ["dev"] files = [ - {file = "aws_sam_translator-1.97.0-py3-none-any.whl", hash = "sha256:305701ab49eb546fd720b3682e99cadcd43539f4ddb8395ea03c90c9e14d3325"}, - {file = "aws_sam_translator-1.97.0.tar.gz", hash = "sha256:6f7ec94de0a9b220dd1f1a0bf7e2df95dd44a85592301ee830744da2f209b7e6"}, + {file = "aws_sam_translator-1.98.0-py3-none-any.whl", hash = "sha256:65e7afffdda2e6f715debc251ddae5deba079af41db5dd9ecd370d658b9d728e"}, + {file = "aws_sam_translator-1.98.0.tar.gz", hash = "sha256:fe9fdf51b593aca4cde29f555e272b00d90662315c8078e9f5f3448dd962c66b"}, ] [package.dependencies] @@ -319,7 +335,7 @@ description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"all\" or extra == \"tracer\"" +markers = "extra == \"tracer\" or extra == \"all\"" files = [ {file = "aws_xray_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:cfbe6feea3d26613a2a869d14c9246a844285c97087ad8f296f901633554ad94"}, {file = "aws_xray_sdk-2.14.0.tar.gz", hash = "sha256:aab843c331af9ab9ba5cefb3a303832a19db186140894a523edafc024cc0493c"}, @@ -413,34 +429,34 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.38.18" +version = "1.38.34" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "boto3-1.38.18-py3-none-any.whl", hash = "sha256:38ecb477ba9fc4edcf97133bf1fe33261ebec6c58d59982abff3cea167624211"}, - {file = "boto3-1.38.18.tar.gz", hash = "sha256:bd723bfbc109bdc63e017ead74dd22f2cf8a7515e24e730870b8a70af823e626"}, + {file = "boto3-1.38.34-py3-none-any.whl", hash = "sha256:7d9409be63a11c1684427a9b06d6820ec72785cb275b56affe437f3709a80eb3"}, + {file = "boto3-1.38.34.tar.gz", hash = "sha256:25e76b9fec8db8e21adaf84df0de5c58fa779be121bc327e07e920c7c0870394"}, ] [package.dependencies] -botocore = ">=1.38.18,<1.39.0" +botocore = ">=1.38.34,<1.39.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.12.0,<0.13.0" +s3transfer = ">=0.13.0,<0.14.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.38.35" -description = "Type annotations for boto3 1.38.35 generated with mypy-boto3-builder 8.11.0" +version = "1.38.34" +description = "Type annotations for boto3 1.38.34 generated with mypy-boto3-builder 8.11.0" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "boto3_stubs-1.38.35-py3-none-any.whl", hash = "sha256:633460a8df4d1fa868aeb3bb64a1ea38a97e3d8614896a07f7da4601812970e9"}, - {file = "boto3_stubs-1.38.35.tar.gz", hash = "sha256:610dabe7924725daf452c2544ec1e2b32486a149b85d505126de86408e698520"}, + {file = "boto3_stubs-1.38.34-py3-none-any.whl", hash = "sha256:56ba625dc0b8673eedb2c93f29c02db2686a599820a54697f723c08fae0b8693"}, + {file = "boto3_stubs-1.38.34.tar.gz", hash = "sha256:719d75a3993a546bfe5e567a2e5d6f9f4d218846776d5bc9417371a980595008"}, ] [package.dependencies] @@ -507,7 +523,7 @@ bedrock-data-automation-runtime = ["mypy-boto3-bedrock-data-automation-runtime ( bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.38.0,<1.39.0)"] billing = ["mypy-boto3-billing (>=1.38.0,<1.39.0)"] billingconductor = ["mypy-boto3-billingconductor (>=1.38.0,<1.39.0)"] -boto3 = ["boto3 (==1.38.35)"] +boto3 = ["boto3 (==1.38.34)"] braket = ["mypy-boto3-braket (>=1.38.0,<1.39.0)"] budgets = ["mypy-boto3-budgets (>=1.38.0,<1.39.0)"] ce = ["mypy-boto3-ce (>=1.38.0,<1.39.0)"] @@ -871,22 +887,22 @@ xray = ["mypy-boto3-xray (>=1.38.0,<1.39.0)"] [[package]] name = "botocore" -version = "1.38.18" +version = "1.38.34" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "botocore-1.38.18-py3-none-any.whl", hash = "sha256:0b5ddf195f15218f30ec63d8aba9e55cf60af2984c068276b9fd206059043310"}, - {file = "botocore-1.38.18.tar.gz", hash = "sha256:ae4c97383e797e9648f8721bb0217fd9efd228e9fbc661d83dc0959be083ec5c"}, + {file = "botocore-1.38.34-py3-none-any.whl", hash = "sha256:95ff2c4819498e94b321c9b5ac65d02267df93ff7ce7617323b19f19ea7cb545"}, + {file = "botocore-1.38.34.tar.gz", hash = "sha256:a105f4d941f329aa72c43ddf42371ec4bee50ab3619fc1ef35d0005520219612"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -894,14 +910,14 @@ crt = ["awscrt (==0.23.8)"] [[package]] name = "botocore-stubs" -version = "1.38.18" +version = "1.38.30" description = "Type annotations and code completion for botocore" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "botocore_stubs-1.38.18-py3-none-any.whl", hash = "sha256:76ddec52faf091ea8c1330920eef6679d2036c8fcbc7ef21d265ced9d655b5f7"}, - {file = "botocore_stubs-1.38.18.tar.gz", hash = "sha256:95ce91adffa214ead5eb6baaf281c9926c52b20ad9b208d54f3b1833a1eec6a4"}, + {file = "botocore_stubs-1.38.30-py3-none-any.whl", hash = "sha256:2efb8bdf36504aff596c670d875d8f7dd15205277c15c4cea54afdba8200c266"}, + {file = "botocore_stubs-1.38.30.tar.gz", hash = "sha256:291d7bf39a316c00a8a55b7255489b02c0cea1a343482e7784e8d1e235bae995"}, ] [package.dependencies] @@ -954,14 +970,14 @@ ujson = ["ujson (>=5.7.0)"] [[package]] name = "cdk-nag" -version = "2.35.101" +version = "2.36.15" description = "Check CDK v2 applications for best practices using a combination on available rule packs." optional = false python-versions = "~=3.9" groups = ["dev"] files = [ - {file = "cdk_nag-2.35.101-py3-none-any.whl", hash = "sha256:530aa21d6b41a76eebeaf21cb69600a527d5f11a235798d380231ec0d329e275"}, - {file = "cdk_nag-2.35.101.tar.gz", hash = "sha256:6c6432a79896517649fece9d1a278b587aeb0f7c3755eeaf4ada29d323985eee"}, + {file = "cdk_nag-2.36.15-py3-none-any.whl", hash = "sha256:a8f7471b5385820cfa3cf032765f78fa063c1f258039ffd25b660f267b34d5f1"}, + {file = "cdk_nag-2.36.15.tar.gz", hash = "sha256:012276b17bb9fc9705d332c91759cb3a124255d4692427bc81ae1c7503f85152"}, ] [package.dependencies] @@ -1220,7 +1236,7 @@ description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version < \"3.10\"" +markers = "python_version == \"3.9\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -1231,15 +1247,15 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "click" -version = "8.2.0" +version = "8.2.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" groups = ["dev"] markers = "python_version >= \"3.10\"" files = [ - {file = "click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c"}, - {file = "click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d"}, + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, ] [package.dependencies] @@ -1411,7 +1427,7 @@ files = [ {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] -markers = {main = "python_version < \"3.10\" and (extra == \"all\" or extra == \"datamasking\")", dev = "python_version < \"3.10\""} +markers = {main = "python_version == \"3.9\" and (extra == \"all\" or extra == \"datamasking\")", dev = "python_version == \"3.9\""} [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} @@ -1428,49 +1444,49 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "cryptography" -version = "45.0.2" +version = "45.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" groups = ["main", "dev"] files = [ - {file = "cryptography-45.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:61a8b1bbddd9332917485b2453d1de49f142e6334ce1d97b7916d5a85d179c84"}, - {file = "cryptography-45.0.2-cp311-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cc31c66411e14dd70e2f384a9204a859dc25b05e1f303df0f5326691061b839"}, - {file = "cryptography-45.0.2-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:463096533acd5097f8751115bc600b0b64620c4aafcac10c6d0041e6e68f88fe"}, - {file = "cryptography-45.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:cdafb86eb673c3211accffbffdb3cdffa3aaafacd14819e0898d23696d18e4d3"}, - {file = "cryptography-45.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:05c2385b1f5c89a17df19900cfb1345115a77168f5ed44bdf6fd3de1ce5cc65b"}, - {file = "cryptography-45.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e9e4bdcd70216b08801e267c0b563316b787f957a46e215249921f99288456f9"}, - {file = "cryptography-45.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b2de529027579e43b6dc1f805f467b102fb7d13c1e54c334f1403ee2b37d0059"}, - {file = "cryptography-45.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10d68763892a7b19c22508ab57799c4423c7c8cd61d7eee4c5a6a55a46511949"}, - {file = "cryptography-45.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2a90ce2f0f5b695e4785ac07c19a58244092f3c85d57db6d8eb1a2b26d2aad6"}, - {file = "cryptography-45.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:59c0c8f043dd376bbd9d4f636223836aed50431af4c5a467ed9bf61520294627"}, - {file = "cryptography-45.0.2-cp311-abi3-win32.whl", hash = "sha256:80303ee6a02ef38c4253160446cbeb5c400c07e01d4ddbd4ff722a89b736d95a"}, - {file = "cryptography-45.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:7429936146063bd1b2cfc54f0e04016b90ee9b1c908a7bed0800049cbace70eb"}, - {file = "cryptography-45.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:e86c8d54cd19a13e9081898b3c24351683fd39d726ecf8e774aaa9d8d96f5f3a"}, - {file = "cryptography-45.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e328357b6bbf79928363dbf13f4635b7aac0306afb7e5ad24d21d0c5761c3253"}, - {file = "cryptography-45.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49af56491473231159c98c2c26f1a8f3799a60e5cf0e872d00745b858ddac9d2"}, - {file = "cryptography-45.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f169469d04a23282de9d0be349499cb6683b6ff1b68901210faacac9b0c24b7d"}, - {file = "cryptography-45.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9cfd1399064b13043082c660ddd97a0358e41c8b0dc7b77c1243e013d305c344"}, - {file = "cryptography-45.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f8084b7ca3ce1b8d38bdfe33c48116edf9a08b4d056ef4a96dceaa36d8d965"}, - {file = "cryptography-45.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2cb03a944a1a412724d15a7c051d50e63a868031f26b6a312f2016965b661942"}, - {file = "cryptography-45.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a9727a21957d3327cf6b7eb5ffc9e4b663909a25fea158e3fcbc49d4cdd7881b"}, - {file = "cryptography-45.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ddb8d01aa900b741d6b7cc585a97aff787175f160ab975e21f880e89d810781a"}, - {file = "cryptography-45.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c0c000c1a09f069632d8a9eb3b610ac029fcc682f1d69b758e625d6ee713f4ed"}, - {file = "cryptography-45.0.2-cp37-abi3-win32.whl", hash = "sha256:08281de408e7eb71ba3cd5098709a356bfdf65eebd7ee7633c3610f0aa80d79b"}, - {file = "cryptography-45.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:48caa55c528617fa6db1a9c3bf2e37ccb31b73e098ac2b71408d1f2db551dde4"}, - {file = "cryptography-45.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8ec324711596fbf21837d3a5db543937dd84597d364769b46e0102250023f77"}, - {file = "cryptography-45.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:965611880c3fa8e504b7458484c0697e00ae6e937279cd6734fdaa2bc954dc49"}, - {file = "cryptography-45.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d891942592789fa0ab71b502550bbadb12f540d7413d7d7c4cef4b02af0f5bc6"}, - {file = "cryptography-45.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b19f4b28dd2ef2e6d600307fee656c00825a2980c4356a7080bd758d633c3a6f"}, - {file = "cryptography-45.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7c73968fbb7698a4c5d6160859db560d3aac160edde89c751edd5a8bc6560c88"}, - {file = "cryptography-45.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:501de1296b2041dccf2115e3c7d4947430585601b251b140970ce255c5cfb985"}, - {file = "cryptography-45.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1655d3a76e3dedb683c982a6c3a2cbfae2d08f47a48ec5a3d58db52b3d29ea6f"}, - {file = "cryptography-45.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc7693573f16535428183de8fd27f0ca1ca37a51baa0b41dc5ed7b3d68fe80e2"}, - {file = "cryptography-45.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:614bca7c6ed0d8ad1dce683a6289afae1f880675b4090878a0136c3da16bc693"}, - {file = "cryptography-45.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:4142e20c29224cec63e9e32eb1e6014fb285fe39b7be66b3564ca978a3a8afe9"}, - {file = "cryptography-45.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9a900036b42f7324df7c7ad9569eb92ba0b613cf699160dd9c2154b24fd02f8e"}, - {file = "cryptography-45.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:057723b79752a142efbc609e90b0dff27b0361ccbee3bd48312d70f5cdf53b78"}, - {file = "cryptography-45.0.2.tar.gz", hash = "sha256:d784d57b958ffd07e9e226d17272f9af0c41572557604ca7554214def32c26bf"}, + {file = "cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:25eb4d4d3e54595dc8adebc6bbd5623588991d86591a78c2548ffb64797341e2"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce1678a2ccbe696cf3af15a75bb72ee008d7ff183c9228592ede9db467e64f1b"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:49fe9155ab32721b9122975e168a6760d8ce4cffe423bcd7ca269ba41b5dfac1"}, + {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999"}, + {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750"}, + {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2"}, + {file = "cryptography-45.0.4-cp311-abi3-win32.whl", hash = "sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257"}, + {file = "cryptography-45.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8"}, + {file = "cryptography-45.0.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eaa3e28ea2235b33220b949c5a0d6cf79baa80eab2eb5607ca8ab7525331b9ff"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7ef2dde4fa9408475038fc9aadfc1fb2676b174e68356359632e980c661ec8f6"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:6a3511ae33f09094185d111160fd192c67aa0a2a8d19b54d36e4c78f651dc5ad"}, + {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6"}, + {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872"}, + {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4"}, + {file = "cryptography-45.0.4-cp37-abi3-win32.whl", hash = "sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97"}, + {file = "cryptography-45.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a77c6fb8d76e9c9f99f2f3437c1a4ac287b34eaf40997cfab1e9bd2be175ac39"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7aad98a25ed8ac917fdd8a9c1e706e5a0956e06c498be1f713b61734333a4507"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3530382a43a0e524bc931f187fc69ef4c42828cf7d7f592f7f249f602b5a4ab0"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:6b613164cb8425e2f8db5849ffb84892e523bf6d26deb8f9bb76ae86181fa12b"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:96d4819e25bf3b685199b304a0029ce4a3caf98947ce8a066c9137cc78ad2c58"}, + {file = "cryptography-45.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b97737a3ffbea79eebb062eb0d67d72307195035332501722a9ca86bab9e3ab2"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4828190fb6c4bcb6ebc6331f01fe66ae838bb3bd58e753b59d4b22eb444b996c"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:03dbff8411206713185b8cebe31bc5c0eb544799a50c09035733716b386e61a4"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51dfbd4d26172d31150d84c19bbe06c68ea4b7f11bbc7b3a5e146b367c311349"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:0339a692de47084969500ee455e42c58e449461e0ec845a34a6a9b9bf7df7fb8"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:0cf13c77d710131d33e63626bd55ae7c0efb701ebdc2b3a7952b9b23a0412862"}, + {file = "cryptography-45.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bbc505d1dc469ac12a0a064214879eac6294038d6b24ae9f71faae1448a9608d"}, + {file = "cryptography-45.0.4.tar.gz", hash = "sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57"}, ] markers = {main = "python_version >= \"3.10\" and (extra == \"all\" or extra == \"datamasking\")", dev = "python_version >= \"3.10\""} @@ -1484,7 +1500,7 @@ nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8 pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==45.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==45.0.4)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -1525,79 +1541,81 @@ dev = ["botocore (>=1.34.0,<2.0.0)", "flake8 (>=5.0.4,<6.0.0)", "pytest (>=8.0.0 [[package]] name = "ddtrace" -version = "3.7.1" +version = "3.9.1" description = "Datadog APM client library" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "ddtrace-3.7.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:a5a6ac906ca2fc508a23ffabf6c1d1d52d5af64a78b4181f72e134e23274b40d"}, - {file = "ddtrace-3.7.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:e74eec82f34af210f0ee536807da3bd663d9395a90344472c6f79491282b740f"}, - {file = "ddtrace-3.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:440782392e680bac0595477606ea8da080aebf2e373a55cfbf4793be7009746a"}, - {file = "ddtrace-3.7.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:835cddec6026b118a2ecdec6b41be017a6232a46983e90651b44ec2552572b61"}, - {file = "ddtrace-3.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ede52783b4fe88a13291f449259bfdac22e93783d944f48141c76eb63484575"}, - {file = "ddtrace-3.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78247908d9f2381d866bdd521c8bf00ca925b1df86ea4c6ce52e42b18356ac3b"}, - {file = "ddtrace-3.7.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0290c209351f65de05a48d5714e7a6f55eb50e91d72f5228b4b46e22cef1ec01"}, - {file = "ddtrace-3.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c56e831a9463b75403c527232d1f4769b44f0b36fe5b82b07d0dbe7e666ebb8"}, - {file = "ddtrace-3.7.1-cp310-cp310-win32.whl", hash = "sha256:83da2f58e0056f092ec8306ef4bf97bcd1a9b07ca5fb8302656bd50e5ac08ba7"}, - {file = "ddtrace-3.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cda0574c23cc4a1d7b3da532107c2dd326e4b045aa76bd1ba16c524d39daf7dc"}, - {file = "ddtrace-3.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:adecedd674fd28f9c653f863bcdad2dd088d2c765f3b93c9de75285ae70b7138"}, - {file = "ddtrace-3.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:191f71d7b424e6c5177a50ebe2020e88e731de16b05a32235461dfa3a9b93f7c"}, - {file = "ddtrace-3.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc670d3b247df65a06827c6f355a211c95482544457a1ada15e4ef2b660497d2"}, - {file = "ddtrace-3.7.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87cd6da9c3ea15945d29c04afe5d7577c0990db2dc247e5526be12ebb029bc87"}, - {file = "ddtrace-3.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b7f05ba81a2c9d7a05d2963e70e5e703d04355545b82f5ac43bc616df282009"}, - {file = "ddtrace-3.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e3e9dbc9746d3dc0ecad8a6f49992298c4c1162ca0db0e31d694f3538e816aee"}, - {file = "ddtrace-3.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8e6e407f3aa54edb5343a617bf45e8af9daf2fdfb940e81e02d85c9fa3558069"}, - {file = "ddtrace-3.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf361ec99405e7207f10bc41f2cfb3d32d3ac02462d52d8c98f070c5aa421fc8"}, - {file = "ddtrace-3.7.1-cp311-cp311-win32.whl", hash = "sha256:053a96e8f739f3dd1914077262d1b0439e153d234602775954e8bfa1062ecec3"}, - {file = "ddtrace-3.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:58bbf43701291537ce940c76f836419aa03737ac16e9e45c74b557ea8831aaf4"}, - {file = "ddtrace-3.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:73636d80e0a2353adb548fea45a2c0198e57807142744220b8bd647b3ee37148"}, - {file = "ddtrace-3.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:46863d9c40a43e421e982e5bb875a1cdf047139c79356d0a665361856022e3e0"}, - {file = "ddtrace-3.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55ed0373d5fa2112058fa9ed74908272168f94b8a6d16e4df769ab7c129dc8fd"}, - {file = "ddtrace-3.7.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a877cfb0ad90732cca4347cf0e07495b1fc9ffd4d3e7f00872f8d62126511c20"}, - {file = "ddtrace-3.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:697dacf9a5be49def40ff582e9ad083fd029f8405888fc9e32e8d30bb2f988fd"}, - {file = "ddtrace-3.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5634e0ddd7e88273fbceff5baa60d704aeba858a30f8770d8269fb1076f8e469"}, - {file = "ddtrace-3.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cad481072a974c2c6bc0f11f9b7850be8d15e46ae9d4da2a7ecb3d54f8c0d3be"}, - {file = "ddtrace-3.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:71a05d04d3c255e65d812a6b7921ee48a6b97ae477be5129e763ef5c48236315"}, - {file = "ddtrace-3.7.1-cp312-cp312-win32.whl", hash = "sha256:8c6381afef235ba09e48bb07c625dc089611dc8a86ad3100a590640fa47581b8"}, - {file = "ddtrace-3.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:17dec3aa753a2c4e0cd976eed5c77f7535ccdffe1e97ed2f417c844cd2d06057"}, - {file = "ddtrace-3.7.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5097d29a6bc6473d4d3fc8b0067b8292ba078f4edb3fd706a20f8bbc4bbefeb6"}, - {file = "ddtrace-3.7.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f2eb834662985c0cce84eb6c56f67a2372a65eef21c18d23b6b9e95aba2758d8"}, - {file = "ddtrace-3.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fc203013fca2acf552abe45a6549c137fc9b25674de91515841e4322ec8d30"}, - {file = "ddtrace-3.7.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5e928a54367e04b38af06b2e8cb7cc4da41e9168fdada645e5daeceb6029335"}, - {file = "ddtrace-3.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b9571d38e556b83d336c5d4765300e2b85c91a3d3715bbec9940d2fe469d7c"}, - {file = "ddtrace-3.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0b7ce765ed10f424b7514e971272c90e87e3cd179cf31aa17722f3c4cd7378e8"}, - {file = "ddtrace-3.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b496f017a71cc0c973d437bbe4ae387d9b03867c773ef8025af0e178662dc484"}, - {file = "ddtrace-3.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:56e55e54f3cf40b98e98b3537a01a23695db5eb20ea7ad9e1e611a0b565fc121"}, - {file = "ddtrace-3.7.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:13947d244b1312c6a07264a4fa54fc6734400eacc325a8a412e29f872c3defea"}, - {file = "ddtrace-3.7.1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:214d327e67d35abeae052701c0819df66e3df6c4a2aa981d53b6dcdbc6e27f72"}, - {file = "ddtrace-3.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db5c4a9d5adde5fff58721e2d9432f93d49b4aa371a5d18fbb05479cfcd10053"}, - {file = "ddtrace-3.7.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b1ea7d12f78e4f713adcf18582658733f2929ba52bf31bc1e8b6d2e28f47762"}, - {file = "ddtrace-3.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc95e895e5493bbfb77a98e545d752668585289c6f0a49d02be4423c9b9b4571"}, - {file = "ddtrace-3.7.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:165d19b5a31a67f75c7fd766ffa774bd980d44908b9075a72037b7421921d444"}, - {file = "ddtrace-3.7.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb7da7133c26c8badf7938a09503cc7cd1228822eb6d332a5c2f692e070aa5a1"}, - {file = "ddtrace-3.7.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c87b9abb64f1025e8b4d2ccafbad7b13b524594c11e3b9e6bf7553e08c1cb9be"}, - {file = "ddtrace-3.7.1-cp38-cp38-win32.whl", hash = "sha256:06cd6ae8b2bce6c95d02e706fb80e1a90e454f7639721b24f97b0294122a3820"}, - {file = "ddtrace-3.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:ac1c29d85a86d8bace27c9c9ae6919fbdf7842819c418ede8e0f20c6f3776552"}, - {file = "ddtrace-3.7.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:cc9fec42dda62980636288575b0d963e0333c9bb818520f9c8e78483d315e117"}, - {file = "ddtrace-3.7.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:64145fa53d3f2026c5b9aa10db3ffdcd25168ec781bdad2b864dbe80277a9d38"}, - {file = "ddtrace-3.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248c6425c9ddae6a3fa32660b9ed6270de390ca49d0c3c80af7c20fffc2c3c9e"}, - {file = "ddtrace-3.7.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c34629ece7b3de84edc4ac4475c89bfc197f20f7b5e42b08964ee045d28767"}, - {file = "ddtrace-3.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:127a9e3e2d961da80120461f6031152630d3efa63990b81a654743f40b924324"}, - {file = "ddtrace-3.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8afcd435829726ba952d8d9cd21e198131a755b28711e32e12003acb7e0fbf57"}, - {file = "ddtrace-3.7.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:b8275250f95e67455886068b80cfa3fb14f0034bd56b90c1dcabccab91754107"}, - {file = "ddtrace-3.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9a23a98934c224772ba819ee04252af7bcbf2ff6dc285b2b39554ae55bff88fe"}, - {file = "ddtrace-3.7.1-cp39-cp39-win32.whl", hash = "sha256:8be74c75644316866fad5d362b61a0398a4f5eb4a4e428be6136296b725f375a"}, - {file = "ddtrace-3.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:3719c74c141868e1d27acaa7ee698af9cd17e0f2c6bbec90139051b4cb50e61c"}, - {file = "ddtrace-3.7.1.tar.gz", hash = "sha256:e0afdd325769980c2122c925db31ea5800aab6f0a504f1d0606015a6ec75805d"}, + {file = "ddtrace-3.9.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:36ee595bcf5bf7b7c2f9651d8faea6458d2f6011ea0eb554df73478022830776"}, + {file = "ddtrace-3.9.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:51e22a129ac16a2b86eda7bb8e89aeef8c1371daa83c683e1110ebb1df69f41e"}, + {file = "ddtrace-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27632122319854abe581bda73c2fbe53bd290bc0ee40b81aeb55f3114600dd03"}, + {file = "ddtrace-3.9.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e7ac3c4aa66b4bfdd610f7a3f14e05b2970b595bc6e73e9518f4154444239c4"}, + {file = "ddtrace-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35a616ccbd194128d5420afb412d21a864fc71bb634c81a6bd988b34a6df817c"}, + {file = "ddtrace-3.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:62d08eef95f00a13d09674ee4aa1cc32ed897710eba282a96f83f41389907ffd"}, + {file = "ddtrace-3.9.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:365a55c4228c59d127fe206cc74d31259c1d0dbbebd0bac054d876cf84317890"}, + {file = "ddtrace-3.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9f2bd2283b58f7e59b024b48fbf369942a178897688d4ba51d73078394ddb5f"}, + {file = "ddtrace-3.9.1-cp310-cp310-win32.whl", hash = "sha256:566ba3178f36b9bb448fd4389a0dec01ad0c4e9300b01da9ff4334afd6aa0a96"}, + {file = "ddtrace-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:eaecf7dbe9ad2fc6db208b55aa4d76a8c86d439b8a0785a1c437ee6399098600"}, + {file = "ddtrace-3.9.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:51c0a802977fd93e03997870c244c704ff4782229cc5acbb840778fc732e51e9"}, + {file = "ddtrace-3.9.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:f77ad4c7d8c90792d0eb4a4d4e091f5c6d58bf1852783f116a2ce74ce955b31e"}, + {file = "ddtrace-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b31287b39a17fc46a93351fea0bfa572bd501cea70738e76776a59f0f4c00e"}, + {file = "ddtrace-3.9.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:521a26dc7a805e9c94481a2d2eaede5ad933c4cb83666ce011a3ccff63b2a218"}, + {file = "ddtrace-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca2c9e329f82abaa0f19f7a1c8b760ca55888b16acf5cbf5c2c852b66988357"}, + {file = "ddtrace-3.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:13be1223229fee6662f854aa0d1c7d7bc0513ae96cdb4d561617aacb34a7d304"}, + {file = "ddtrace-3.9.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1eb28c1d1c1f22b55842bc5c663de81a9817463d4b757b625f8e2393c9fb0909"}, + {file = "ddtrace-3.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3910c48d19206c7ed08552c111bbefc1926b7f4b43bf7ff1db9a2fe420f7cc09"}, + {file = "ddtrace-3.9.1-cp311-cp311-win32.whl", hash = "sha256:9b6cbecfc46445d4a72c214a4ed312a6f7e0f3c000a006f48a89b1e934908da9"}, + {file = "ddtrace-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:d17e0725cef7ffd069ac4d28040777038d138b877403d713c071b80f9716e8f6"}, + {file = "ddtrace-3.9.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:188ee5238303997c02282775acb5ade2b56878ce3529709cbb826e8035e4974a"}, + {file = "ddtrace-3.9.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:a42f61f7c67ded15eb552902f2ae3adf5c9e005844c92e6aa80e7f7659b4ea9b"}, + {file = "ddtrace-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a43a40a1bd806abb68d6b189de44314c3e574aa7e9d3419074d93a0d448eb011"}, + {file = "ddtrace-3.9.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55726435ea9bde9d7fa48f23a76944330003a7f6bd66a9f323f62cd5e3ce7591"}, + {file = "ddtrace-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9908c413cfbade90195bf9de52455258a0b1005c3b57f677eefaf6e19098818b"}, + {file = "ddtrace-3.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5fbf0a7a4edaf63c956a4e62e142ed692a595f481afd97d2ac5bcaafaf9ff26f"}, + {file = "ddtrace-3.9.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d106d2ecffde6af8d22d8aa7d0d82623173b3b40afad5c40037e98fba0bbf057"}, + {file = "ddtrace-3.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c72c269786aceb89685ad54a620e663cba3f9a94b95e6b2a50385fed7611034"}, + {file = "ddtrace-3.9.1-cp312-cp312-win32.whl", hash = "sha256:caa2c4dd82cc96c53ce284ca9e61b9787c7a52638180438d04d55c7a258355ca"}, + {file = "ddtrace-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:e7eb7a525d7899b3434c7ff6d105e5679052ff2f0aa1d7eda7e3e297ccc75362"}, + {file = "ddtrace-3.9.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:fdf381f82ff28a712f111e57f86bb406b24aba935e0a61791d6912840050c643"}, + {file = "ddtrace-3.9.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:970106bcbfb5d10204a27fab59b14e2aec4d00117de756529887aae66fac670f"}, + {file = "ddtrace-3.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b70026a3f6e0cb2fef0e7cab692e1b6b0aabd630394c238ca86091f8a88ecc60"}, + {file = "ddtrace-3.9.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11262ec1de210d5003f71cd641795f3bdc2c5a7cb3b685fe62c54a959bd80723"}, + {file = "ddtrace-3.9.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74207f734b0f221411b7e7860f86e801a0c9608bc0a94d7039bbaefc2a80af0b"}, + {file = "ddtrace-3.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c41f062aa233ef4da932bcfe3a9d6b8f50407086c8a5d51afe1c056789938dbb"}, + {file = "ddtrace-3.9.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fb23cc1fd65a97cd5943fbc27420a32aea9ba4235eba00d0d688ecb7a7d585c1"}, + {file = "ddtrace-3.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:14e4772e8eb83ac7afe7cf5bed71d308bd4e52adf43dca8f893c576382c1daec"}, + {file = "ddtrace-3.9.1-cp313-cp313-win32.whl", hash = "sha256:821622ccc26cff09d0293f4d74baa351c5f43d89ffad513605dd715fc0bbed4c"}, + {file = "ddtrace-3.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:b7098f126410ea0128bbb2d6fdb2eb1df34ee60d9624649a0c4598a9697a52f6"}, + {file = "ddtrace-3.9.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:897925e09e5207d7eaacf92219cf6b8b3d946df384fddffca5886dc1eed35de8"}, + {file = "ddtrace-3.9.1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:92a9a24b76f227dda6968fbc7d64c5efcfdedb673d41923a13d3676c48a88269"}, + {file = "ddtrace-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0b5382e258a46404c80f02e283c2bf15b22a1d16d8fd6cd1634c58685adb55"}, + {file = "ddtrace-3.9.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0162d2df374586b0d580c48653363ea9b5e0cc79b0eb7f00da9136e944509e93"}, + {file = "ddtrace-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f92fbeaef6dd33f31e49ba67391007910d946e6221ca2cbaf628419f1bd08e80"}, + {file = "ddtrace-3.9.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e36cf4585bba526287d491837a73d44de58a850184d5d3f8dff8a8015152ec2a"}, + {file = "ddtrace-3.9.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d2468faa58a19b989b9296ae26e0f7c90c6fe8451c33b07327fdce2a6b6fb528"}, + {file = "ddtrace-3.9.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8c07540679869e6f54e17fedecdec65d68a324250d4995c48ba456b5a7e1f578"}, + {file = "ddtrace-3.9.1-cp38-cp38-win32.whl", hash = "sha256:1abc70d831dcf5e2f53ae4e2984134d8171b5ddb7e51ed7bf4975bb8920b761a"}, + {file = "ddtrace-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:7da86d6f0731a410c08f66e3dd7d7d7544443a1c5ef97450ee0599bc1b932608"}, + {file = "ddtrace-3.9.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:7e234139a296f256524b5659cfa0ce65fa19e3149c0bec3f611ebaa7ca81eef7"}, + {file = "ddtrace-3.9.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a3d0da607b3f4f4e744968edfba42a641642d543c30f3ac0f3cc4f95579f71b5"}, + {file = "ddtrace-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5faad3f84015e391f1c16bb7884fb106eb6f02f81ddc9136e76adf650d7d0135"}, + {file = "ddtrace-3.9.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:357721015cd7723aa7d0c4d74fb31c2799f56c6b77f2f08a99ef17f41b1e1623"}, + {file = "ddtrace-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d629929c38d2fc13ddd1b5299a85f00aee7251510bb38d65405e626c3157d1aa"}, + {file = "ddtrace-3.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3badf9d5af1f33e9fcadcf03b551da1c1cd87f68cd381318761e8f4954f141e4"}, + {file = "ddtrace-3.9.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fa8c3479059c0d6ae51592e4bc31a346690790811cce76d44e3b47b8ed786fc2"}, + {file = "ddtrace-3.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b64825818413342dc7974d68ac9db2ac25ff4f0729595a7588089a405c90420a"}, + {file = "ddtrace-3.9.1-cp39-cp39-win32.whl", hash = "sha256:5fdb694e2ed7851a5033e7f4d08190c6cbf6f27183643bb660193d5eb7388d4c"}, + {file = "ddtrace-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:84f9cc375eba081387599e146c8934a5fc55ea94f0d2d6d0c11d7505259c6df1"}, + {file = "ddtrace-3.9.1.tar.gz", hash = "sha256:6997cfa6693a466eb839b11176de4f7a8b72ca0a61c20dc575575a654af4ed3a"}, ] [package.dependencies] bytecode = [ + {version = ">=0.13.0", markers = "python_version < \"3.11\""}, {version = ">=0.16.0", markers = "python_version >= \"3.13.0\""}, {version = ">=0.15.1", markers = "python_version ~= \"3.12.0\""}, {version = ">=0.14.0", markers = "python_version ~= \"3.11.0\""}, - {version = ">=0.13.0", markers = "python_version < \"3.11.0\""}, ] envier = ">=0.6.1,<0.7.0" legacy-cgi = {version = ">=2.0.0", markers = "python_version >= \"3.13.0\""} @@ -1623,24 +1641,6 @@ files = [ {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, ] -[[package]] -name = "deprecated" -version = "1.2.18" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -groups = ["main", "dev"] -files = [ - {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, - {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] - [[package]] name = "dill" version = "0.4.0" @@ -1729,7 +1729,7 @@ description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, @@ -1763,7 +1763,7 @@ description = "Fastest Python implementation of JSON schema" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"validation\"" +markers = "extra == \"validation\" or extra == \"all\"" files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -2045,14 +2045,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.6.1" +version = "8.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, ] [package.dependencies] @@ -2194,8 +2194,6 @@ groups = ["main"] markers = "extra == \"all\" or extra == \"datamasking\"" files = [ {file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"}, - {file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"}, - {file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"}, ] [package.dependencies] @@ -2215,14 +2213,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.23.0" +version = "4.24.0" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, + {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, + {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, ] [package.dependencies] @@ -2513,14 +2511,14 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform [[package]] name = "mkdocs-autorefs" -version = "1.4.1" +version = "1.4.2" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "mkdocs_autorefs-1.4.1-py3-none-any.whl", hash = "sha256:9793c5ac06a6ebbe52ec0f8439256e66187badf4b5334b5fde0b128ec134df4f"}, - {file = "mkdocs_autorefs-1.4.1.tar.gz", hash = "sha256:4b5b6235a4becb2b10425c2fa191737e415b37aa3418919db33e5d774c9db079"}, + {file = "mkdocs_autorefs-1.4.2-py3-none-any.whl", hash = "sha256:83d6d777b66ec3c372a1aad4ae0cf77c243ba5bcda5bf0c6b8a2c5e7a3d89f13"}, + {file = "mkdocs_autorefs-1.4.2.tar.gz", hash = "sha256:e2ebe1abd2b67d597ed19378c0fff84d73d1dbce411fce7a7cc6f161888b6749"}, ] [package.dependencies] @@ -2717,48 +2715,49 @@ dill = ">=0.4.0" [[package]] name = "mypy" -version = "1.15.0" +version = "1.16.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, + {file = "mypy-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7909541fef256527e5ee9c0a7e2aeed78b6cda72ba44298d1334fe7881b05c5c"}, + {file = "mypy-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e71d6f0090c2256c713ed3d52711d01859c82608b5d68d4fa01a3fe30df95571"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777"}, + {file = "mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b"}, + {file = "mypy-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:09a8da6a0ee9a9770b8ff61b39c0bb07971cda90e7297f4213741b48a0cc8d93"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666"}, + {file = "mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c"}, + {file = "mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8"}, + {file = "mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730"}, + {file = "mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e056237c89f1587a3be1a3a70a06a698d25e2479b9a2f57325ddaaffc3567b"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b07e107affb9ee6ce1f342c07f51552d126c32cd62955f59a7db94a51ad12c0"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d"}, + {file = "mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52"}, + {file = "mypy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:55f9076c6ce55dd3f8cd0c6fff26a008ca8e5131b89d5ba6d86bd3f47e736eeb"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f56236114c425620875c7cf71700e3d60004858da856c6fc78998ffe767b73d3"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:15486beea80be24ff067d7d0ede673b001d0d684d0095803b3e6e17a886a2a92"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f2ed0e0847a80655afa2c121835b848ed101cc7b8d8d6ecc5205aedc732b1436"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb5fbc8063cb4fde7787e4c0406aa63094a34a2daf4673f359a1fb64050e9cb2"}, + {file = "mypy-1.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a5fcfdb7318c6a8dd127b14b1052743b83e97a970f0edb6c913211507a255e20"}, + {file = "mypy-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:2e7e0ad35275e02797323a5aa1be0b14a4d03ffdb2e5f2b0489fa07b89c67b21"}, + {file = "mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031"}, + {file = "mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing_extensions = ">=4.6.0" @@ -2801,14 +2800,14 @@ typing-extensions = {version = "*", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-cloudformation" -version = "1.38.0" -description = "Type annotations for boto3 CloudFormation 1.38.0 service generated with mypy-boto3-builder 8.10.1" +version = "1.38.31" +description = "Type annotations for boto3 CloudFormation 1.38.31 service generated with mypy-boto3-builder 8.11.0" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "mypy_boto3_cloudformation-1.38.0-py3-none-any.whl", hash = "sha256:a1411aa5875b737492aaac5f7e8ce450f034c18f972eb608a9eba6fe35837f6a"}, - {file = "mypy_boto3_cloudformation-1.38.0.tar.gz", hash = "sha256:563399166c07e91e0695fb1e58103a248b2bee0db5e2c3f07155776dd6311805"}, + {file = "mypy_boto3_cloudformation-1.38.31-py3-none-any.whl", hash = "sha256:1016508783c1263aba9bb24dd29afbea6f0c8c7cee79e9d073c4ed5524ce53f5"}, + {file = "mypy_boto3_cloudformation-1.38.31.tar.gz", hash = "sha256:f4185231faab97bfb50b25dc1323333c630a092ffa8c15356f21116fc92a7f42"}, ] [package.dependencies] @@ -2816,14 +2815,14 @@ typing-extensions = {version = "*", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-cloudwatch" -version = "1.38.0" -description = "Type annotations for boto3 CloudWatch 1.38.0 service generated with mypy-boto3-builder 8.10.1" +version = "1.38.21" +description = "Type annotations for boto3 CloudWatch 1.38.21 service generated with mypy-boto3-builder 8.11.0" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "mypy_boto3_cloudwatch-1.38.0-py3-none-any.whl", hash = "sha256:1976daa402ecc95200a9b641f733a5612e72daa883c8ac967443955e61cea6e9"}, - {file = "mypy_boto3_cloudwatch-1.38.0.tar.gz", hash = "sha256:bb3492af66e94eb20322d73b793050ea54f1742118b18e36e798e4dafe3b167e"}, + {file = "mypy_boto3_cloudwatch-1.38.21-py3-none-any.whl", hash = "sha256:96a014b3ccbc2cd77915fd832368506f77f63f57a1e528b4b270321df78c911b"}, + {file = "mypy_boto3_cloudwatch-1.38.21.tar.gz", hash = "sha256:d9f273a05a0434d7a5294ce81f3d45df46b3aafec3aee8d0b065a8216a290076"}, ] [package.dependencies] @@ -2876,14 +2875,14 @@ typing-extensions = {version = "*", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-s3" -version = "1.38.0" -description = "Type annotations for boto3 S3 1.38.0 service generated with mypy-boto3-builder 8.10.1" +version = "1.38.26" +description = "Type annotations for boto3 S3 1.38.26 service generated with mypy-boto3-builder 8.11.0" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "mypy_boto3_s3-1.38.0-py3-none-any.whl", hash = "sha256:5cd9449df0ef6cf89e00e6fc9130a0ab641f703a23ab1d2146c394da058e8282"}, - {file = "mypy_boto3_s3-1.38.0.tar.gz", hash = "sha256:f8fe586e45123ffcd305a0c30847128f3931d888649e2b4c5a52f412183c840a"}, + {file = "mypy_boto3_s3-1.38.26-py3-none-any.whl", hash = "sha256:1129d64be1aee863e04f0c92ac8d315578f13ccae64fa199b20ad0950d2b9616"}, + {file = "mypy_boto3_s3-1.38.26.tar.gz", hash = "sha256:38a45dee5782d5c07ddea07ea50965c4d2ba7e77617c19f613b4c9f80f961b52"}, ] [package.dependencies] @@ -2953,7 +2952,7 @@ description = "Python package for creating and manipulating graphs and networks" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version < \"3.10\"" +markers = "python_version == \"3.9\"" files = [ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, @@ -2973,7 +2972,7 @@ description = "Python package for creating and manipulating graphs and networks" optional = false python-versions = ">=3.10" groups = ["dev"] -markers = "python_version >= \"3.10\"" +markers = "python_version == \"3.10\"" files = [ {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, @@ -2987,6 +2986,28 @@ example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] +[[package]] +name = "networkx" +version = "3.5" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.11" +groups = ["dev"] +markers = "python_version >= \"3.11.0\"" +files = [ + {file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}, + {file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}, +] + +[package.extras] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] + [[package]] name = "nox" version = "2024.10.9" @@ -3012,30 +3033,30 @@ uv = ["uv (>=0.1.6)"] [[package]] name = "opentelemetry-api" -version = "1.33.1" +version = "1.34.1" description = "OpenTelemetry Python API" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83"}, - {file = "opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8"}, + {file = "opentelemetry_api-1.34.1-py3-none-any.whl", hash = "sha256:b7df4cb0830d5a6c29ad0c0691dbae874d8daefa934b8b1d642de48323d32a8c"}, + {file = "opentelemetry_api-1.34.1.tar.gz", hash = "sha256:64f0bd06d42824843731d05beea88d4d4b6ae59f9fe347ff7dfa2cc14233bbb3"}, ] [package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<8.7.0" +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" [[package]] name = "packaging" -version = "25.0" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -3194,7 +3215,7 @@ files = [ {file = "pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7"}, {file = "pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [package.dependencies] annotated-types = ">=0.6.0" @@ -3314,7 +3335,7 @@ files = [ {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" @@ -3359,6 +3380,25 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] +markers = {main = "extra == \"redis\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pymdown-extensions" version = "10.15" @@ -3688,24 +3728,24 @@ toml = ["tomli (>=2.0.1)"] [[package]] name = "redis" -version = "6.2.0" +version = "5.3.0" description = "Python client for Redis database and key-value store" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "redis-6.2.0-py3-none-any.whl", hash = "sha256:c8ddf316ee0aab65f04a11229e94a64b2618451dab7a67cb2f77eb799d872d5e"}, - {file = "redis-6.2.0.tar.gz", hash = "sha256:e821f129b75dde6cb99dd35e5c76e8c49512a5a0d8dfdc560b2fbd44b85ca977"}, + {file = "redis-5.3.0-py3-none-any.whl", hash = "sha256:f1deeca1ea2ef25c1e4e46b07f4ea1275140526b1feea4c6459c0ec27a10ef83"}, + {file = "redis-5.3.0.tar.gz", hash = "sha256:8d69d2dde11a12dc85d0dbf5c45577a5af048e2456f7077d87ad35c1c81c310e"}, ] markers = {main = "extra == \"redis\""} [package.dependencies] async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} +PyJWT = ">=2.9.0,<2.10.0" [package.extras] -hiredis = ["hiredis (>=3.2.0)"] -jwt = ["pyjwt (>=2.9.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "referencing" @@ -3866,14 +3906,14 @@ decorator = ">=3.4.2" [[package]] name = "rich" -version = "14.0.0" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" groups = ["dev"] files = [ - {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, - {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] @@ -3886,166 +3926,169 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.25.0" +version = "0.25.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "rpds_py-0.25.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c146a24a8f0dc4a7846fb4640b88b3a68986585b8ce8397af15e66b7c5817439"}, - {file = "rpds_py-0.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:77814c7a4e1dc43fba73aeb4c1ef0fe37d901f3aa869a4823de5ea843a283fd0"}, - {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5afbff2822016db3c696cb0c1432e6b1f0e34aa9280bc5184dc216812a24e70d"}, - {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ffae52cd76837a5c16409359d236b1fced79e42e0792e8adf375095a5e855368"}, - {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf9426b740a7047b2b0dddcba775211542e8053ce1e509a1759b665fe573508"}, - {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cad834f1a8f51eb037c3c4dc72c884c9e1e0644d900e2d45aa76450e4aa6282"}, - {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c46bd76986e05689376d28fdc2b97d899576ce3e3aaa5a5f80f67a8300b26eb3"}, - {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f3353a2d7eb7d5e0af8a7ca9fc85a34ba12619119bcdee6b8a28a6373cda65ce"}, - {file = "rpds_py-0.25.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdc648d4e81eef5ac4bb35d731562dffc28358948410f3274d123320e125d613"}, - {file = "rpds_py-0.25.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:098d446d76d26e394b440d73921b49c1c90274d46ccbaadf346b1b78f9fdd4b1"}, - {file = "rpds_py-0.25.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c624c82e645f6b5465d08cdc802fb0cd53aa1478782fb2992b9e09f2c9426865"}, - {file = "rpds_py-0.25.0-cp310-cp310-win32.whl", hash = "sha256:9d0041bd9e2d2ef803b32d84a0c8115d178132da5691346465953a2a966ba8ca"}, - {file = "rpds_py-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:d8b41195a6b03280ab00749a438fbce761e7acfd5381051a570239d752376f27"}, - {file = "rpds_py-0.25.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6587ece9f205097c62d0e3d3cb7c06991eb0083ab6a9cf48951ec49c2ab7183c"}, - {file = "rpds_py-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b0a5651e350997cebcdc23016dca26c4d1993d29015a535284da3159796e30b6"}, - {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3752a015db89ea3e9c04d5e185549be4aa29c1882150e094c614c0de8e788feb"}, - {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a05b199c11d2f39c72de8c30668734b5d20974ad44b65324ea3e647a211f135d"}, - {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2f91902fc0c95dd1fa6b30ebd2af83ace91e592f7fd6340a375588a9d4b9341b"}, - {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98c729193e7abe498565266933c125780fb646e977e94289cadbb36e4eeeb370"}, - {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36a7564deaac3f372e8b8b701eb982ea3113516e8e08cd87e3dc6ccf29bad14b"}, - {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b0c0f671a53c129ea48f9481e95532579cc489ab5a0ffe750c9020787181c48"}, - {file = "rpds_py-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d21408eaa157063f56e58ca50da27cad67c4395a85fb44cc7a31253ea4e58918"}, - {file = "rpds_py-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a413674eb2bd2ecb2b93fcc928871b19f7220ee04bca4af3375c50a2b32b5a50"}, - {file = "rpds_py-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:94f89161a3e358db33310a8a064852a6eb119ed1aa1a3dba927b4e5140e65d00"}, - {file = "rpds_py-0.25.0-cp311-cp311-win32.whl", hash = "sha256:540cd89d256119845b7f8f56c4bb80cad280cab92d9ca473be49ea13e678fd44"}, - {file = "rpds_py-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:2649ff19291928243f90c86e4dc9cd86c8c4c6a73c3693ba2e23bc2fbcd8338c"}, - {file = "rpds_py-0.25.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:89260601d497fa5957c3e46f10b16cfa2a4808ad4dd46cddc0b997461923a7d9"}, - {file = "rpds_py-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:637ec39f97e342a3f76af739eda96800549d92f3aa27a2170b6dcbdffd49f480"}, - {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd08c82336412a39a598e5baccab2ee2d7bd54e9115c8b64f2febb45da5c368"}, - {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:837fd066f974e5b98c69ac83ec594b79a2724a39a92a157b8651615e5032e530"}, - {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:653a066d2a4a332d4f8a11813e8124b643fa7b835b78468087a9898140469eee"}, - {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91a51499be506022b9f09facfc42f0c3a1c45969c0fc8f0bbebc8ff23ab9e531"}, - {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb91471640390a82744b164f8a0be4d7c89d173b1170713f9639c6bad61e9e64"}, - {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28bd2969445acc2d6801a22f97a43134ae3cb18e7495d668bfaa8d82b8526cdc"}, - {file = "rpds_py-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f933b35fa563f047896a70b69414dfb3952831817e4c4b3a6faa96737627f363"}, - {file = "rpds_py-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:80b37b37525492250adc7cbca20ae7084f86eb3eb62414b624d2a400370853b1"}, - {file = "rpds_py-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:864573b6440b770db5a8693547a8728d7fd32580d4903010a8eee0bb5b03b130"}, - {file = "rpds_py-0.25.0-cp312-cp312-win32.whl", hash = "sha256:ad4a896896346adab86d52b31163c39d49e4e94c829494b96cc064bff82c5851"}, - {file = "rpds_py-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:4fbec54cc42fa90ca69158d75f125febc4116b2d934e71c78f97de1388a8feb2"}, - {file = "rpds_py-0.25.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4e5fe366fa53bd6777cf5440245366705338587b2cf8d61348ddaad744eb591a"}, - {file = "rpds_py-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54f925ff8d4443b7cae23a5215954abbf4736a3404188bde53c4d744ac001d89"}, - {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58258a66255b2500ddaa4f33191ada5ec983a429c09eb151daf81efbb9aa115"}, - {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f3a57f08c558d0983a708bfe6d1265f47b5debff9b366b2f2091690fada055c"}, - {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7d60d42f1b9571341ad2322e748f7a60f9847546cd801a3a0eb72a1b54c6519"}, - {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a54b94b0e4de95aa92618906fb631779d9fde29b4bf659f482c354a3a79fd025"}, - {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af1c2241919304cc2f90e7dcb3eb1c1df6fb4172dd338e629dd6410e48b3d1a0"}, - {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7d34547810bfd61acf8a441e8a3651e7a919e8e8aed29850be14a1b05cfc6f41"}, - {file = "rpds_py-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66568caacf18542f0cf213db7adf3de2da6ad58c7bf2c4fafec0d81ae557443b"}, - {file = "rpds_py-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e49e4c3e899c32884d7828c91d6c3aff08d2f18857f50f86cc91187c31a4ca58"}, - {file = "rpds_py-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:20af08b0b2d5b196a2bcb70becf0b97ec5af579cee0ae6750b08a2eea3b6c77d"}, - {file = "rpds_py-0.25.0-cp313-cp313-win32.whl", hash = "sha256:d3dc8d6ce8f001c80919bdb49d8b0b815185933a0b8e9cdeaea42b0b6f27eeb0"}, - {file = "rpds_py-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:113d134dc5a8d2503630ca2707b58a1bf5b1b3c69b35c7dab8690ee650c111b8"}, - {file = "rpds_py-0.25.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:6c72a4a8fab10bc96720ad40941bb471e3b1150fb8d62dab205d495511206cf1"}, - {file = "rpds_py-0.25.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bb979162323f3534dce84b59f86e689a0761a2a300e0212bfaedfa80d4eb8100"}, - {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c8cb5dcf7d36d3adf2ae0730b60fb550a8feb6e432bee7ef84162a0d15714b"}, - {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:673ba018df5ae5e7b6c9a021d51ffe39c0ae1daa0041611ed27a0bca634b2d2e"}, - {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16fb28d3a653f67c871a47c5ca0be17bce9fab8adb8bcf7bd09f3771b8c4d860"}, - {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12a84c3851f9e68633d883c01347db3cb87e6160120a489f9c47162cd276b0a5"}, - {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b5f457afffb45d3804728a54083e31fbaf460e902e3f7d063e56d0d0814301e"}, - {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9442cbff21122e9a529b942811007d65eabe4182e7342d102caf119b229322c6"}, - {file = "rpds_py-0.25.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:383cf0d4288baf5a16812ed70d54ecb7f2064e255eb7fe42c38e926adeae4534"}, - {file = "rpds_py-0.25.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0dcdee07ebf76223092666c72a9552db276fbe46b98830ecd1bb836cc98adc81"}, - {file = "rpds_py-0.25.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5bbfbd9c74c4dd74815bd532bf29bedea6d27d38f35ef46f9754172a14e4c655"}, - {file = "rpds_py-0.25.0-cp313-cp313t-win32.whl", hash = "sha256:90dbd2c42cb6463c07020695800ae8f347e7dbeff09da2975a988e467b624539"}, - {file = "rpds_py-0.25.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8c2ad59c4342a176cb3e0d5753e1c911eabc95c210fc6d0e913c32bf560bf012"}, - {file = "rpds_py-0.25.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9f9a1b15b875160186177f659cde2b0f899182b0aca49457d6396afc4bbda7b9"}, - {file = "rpds_py-0.25.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e849315963eb08c26167d0f2c0f9319c9bd379daea75092b3c595d70be6209d"}, - {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad37c29adc435e6d8b24be86b03596183ee8d4bb8580cc4c676879b0b896a99"}, - {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:587cad3959d3d85127cf5df1624cdce569bb3796372e00420baad46af7c56b9b"}, - {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce0518667855a1598d9b1f4fcf0fed1182c67c5ba4fe6a2c6bce93440a65cead"}, - {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c18cb2f6805861dcdf11fb0b3c111a0335f6475411687db2f6636f32bed66b0"}, - {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a21f4584f69547ae03aaa21be98753e85599f3437b84039da5dc20b53abe987"}, - {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d7d65aa934899849628137ab391562cdb487c6ffb9b9781319a64a9c66afbce"}, - {file = "rpds_py-0.25.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fd9167e9604cb5a218a2e847aa8cdc5f98b379a673371978ee7b0c11b4d2e140"}, - {file = "rpds_py-0.25.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6c27156c8d836e7ff760767e93245b286ae028bfd81d305db676662d1f642637"}, - {file = "rpds_py-0.25.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:66087711faf29cb3ac8ab05341939aec29968626aff8ef18e483e229055dd9a7"}, - {file = "rpds_py-0.25.0-cp39-cp39-win32.whl", hash = "sha256:f2e69415e4e33cdeee50ebc2c4d8fcbef12c3181d9274e512ccd2a905a76aad1"}, - {file = "rpds_py-0.25.0-cp39-cp39-win_amd64.whl", hash = "sha256:58cfaa54752d6d2b4f10e87571688dbb7792327a69eca5417373d77d42787058"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57e9616a2a9da08fe0994e37a0c6f578fbaf6d35911bcba31e99660542d60c45"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6d95521901896a90a858993bfa3ec0f9160d3d97e8c8fefc279b3306cdadfee0"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33aef3914a5b49db12ed3f24d214ffa50caefc8f4b0c7c7b9485bd4b231a898"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4acbe2349a3baac9cc212005b6cb4bbb7e5b34538886cde4f55dfc29173da1d6"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b75b5d3416b00d064a5e6f4814fdfb18a964a7cf38dc00b5c2c02fa30a7dd0b"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:542a6f1d0f400b9ce1facb3e30dd3dc84e4affc60353509b00a7bdcd064be91e"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60ba9d104f4e8496107b1cb86e45a68a16d13511dc3986e0780e9f85c2136f9"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6065a489b7b284efb29d57adffae2b9b5e9403d3c8d95cfa04e04e024e6b4e77"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6bcca4d0d24d8c37bfe0cafdaaf4346b6c516db21ccaad5c7fba0a0df818dfc9"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:8155e21203161e5c78791fc049b99f0bbbf14d1d1839c8c93c8344957f9e8e1e"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a1eda14db1ac7a2ab4536dfe69e4d37fdd765e8e784ae4451e61582ebb76012"}, - {file = "rpds_py-0.25.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:de34a7d1893be76cb015929690dce3bde29f4de08143da2e9ad1cedb11dbf80e"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0d63a86b457069d669c423f093db4900aa102f0e5a626973eff4db8355c0fd96"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89bb2b20829270aca28b1e5481be8ee24cb9aa86e6c0c81cb4ada2112c9588c5"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e103b48e63fd2b8a8e2b21ab5b5299a7146045626c2ed4011511ea8122d217"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fccd24c080850715c58a80200d367bc62b4bff6c9fb84e9564da1ebcafea6418"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12b42790c91e0041a98f0ec04244fb334696938793e785a5d4c7e56ca534d7da"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc907ea12216cfc5560148fc42459d86740fc739981c6feb94230dab09362679"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e11065b759c38c4945f8c9765ed2910e31fa5b2f7733401eb7d966f468367a2"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8abc1a3e29b599bf8bb5ad455256a757e8b0ed5621e7e48abe8209932dc6d11e"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:cd36b71f9f3bf195b2dd9be5eafbfc9409e6c8007aebc38a4dc051f522008033"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:805a0dff0674baa3f360c21dcbc622ae544f2bb4753d87a4a56a1881252a477e"}, - {file = "rpds_py-0.25.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:96742796f499ac23b59856db734e65b286d1214a0d9b57bcd7bece92d9201fa4"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7715597186a7277be12729c896019226321bad1f047da381ab707b177aa5017c"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b049dd0792d51f07193cd934acec89abe84d2607109e6ca223b2f0ff24f0c7d"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87c6ff87b38f46d712418d78b34db1198408a3d9a42eddc640644aea561216b1"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:240251fd95b057c26f8538d0e673bf983eba4f38da95fbaf502bfc1a768b3984"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85587479f210350e9d9d25e505f422dd636e561658382ee8947357a4bac491ad"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:551897221bbc9de17bce4574810347db8ec1ba4ec2f50f35421790d34bdb6ef9"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d50ac3b772c10e0b918a5ce2e871138896bfb5f35050ff1ff87ddca45961fc"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8029c19c8a32ef3093c417dd16a5f806e7f529fcceea7c627b2635e9da5104da"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:fe7439d9c5b402af2c9911c7facda1808d0c8dbfa9cf085e6aeac511a23f7d87"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:77910d6bec321c9fccfe9cf5e407fed9d2c48a5e510473b4f070d5cf2413c003"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0ee0cc81f875e853ccdf3badb44b67f771fb9149baa9e752777ccdcaf052ad26"}, - {file = "rpds_py-0.25.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:469054e6b2f8e41f1fe62b95f617082019d343eddeec3219ff3909067e672fb9"}, - {file = "rpds_py-0.25.0.tar.gz", hash = "sha256:4d97661bf5848dd9e5eb7ded480deccf9d32ce2cd500b88a26acbf7bd2864985"}, + {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, + {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2"}, + {file = "rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24"}, + {file = "rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a"}, + {file = "rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d"}, + {file = "rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042"}, + {file = "rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc"}, + {file = "rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4"}, + {file = "rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4"}, + {file = "rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c"}, + {file = "rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb"}, + {file = "rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe"}, + {file = "rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192"}, + {file = "rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728"}, + {file = "rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559"}, + {file = "rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd"}, + {file = "rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31"}, + {file = "rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500"}, + {file = "rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5"}, + {file = "rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129"}, + {file = "rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66"}, + {file = "rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523"}, + {file = "rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763"}, + {file = "rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd"}, + {file = "rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f"}, + {file = "rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449"}, + {file = "rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793"}, + {file = "rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3"}, ] [[package]] name = "ruff" -version = "0.11.13" +version = "0.11.8" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46"}, - {file = "ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48"}, - {file = "ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b"}, - {file = "ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a"}, - {file = "ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc"}, - {file = "ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629"}, - {file = "ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933"}, - {file = "ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165"}, - {file = "ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71"}, - {file = "ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9"}, - {file = "ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc"}, - {file = "ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7"}, - {file = "ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432"}, - {file = "ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492"}, - {file = "ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250"}, - {file = "ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3"}, - {file = "ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b"}, - {file = "ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514"}, + {file = "ruff-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:896a37516c594805e34020c4a7546c8f8a234b679a7716a3f08197f38913e1a3"}, + {file = "ruff-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab86d22d3d721a40dd3ecbb5e86ab03b2e053bc93c700dc68d1c3346b36ce835"}, + {file = "ruff-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:258f3585057508d317610e8a412788cf726efeefa2fec4dba4001d9e6f90d46c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727d01702f7c30baed3fc3a34901a640001a2828c793525043c29f7614994a8c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dca977cc4fc8f66e89900fa415ffe4dbc2e969da9d7a54bfca81a128c5ac219"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c657fa987d60b104d2be8b052d66da0a2a88f9bd1d66b2254333e84ea2720c7f"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f2e74b021d0de5eceb8bd32919f6ff8a9b40ee62ed97becd44993ae5b9949474"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9b5ef39820abc0f2c62111f7045009e46b275f5b99d5e59dda113c39b7f4f38"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1dba3135ca503727aa4648152c0fa67c3b1385d3dc81c75cd8a229c4b2a1458"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f024d32e62faad0f76b2d6afd141b8c171515e4fb91ce9fd6464335c81244e5"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d365618d3ad747432e1ae50d61775b78c055fee5936d77fb4d92c6f559741948"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d9aaa91035bdf612c8ee7266153bcf16005c7c7e2f5878406911c92a31633cb"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0eba551324733efc76116d9f3a0d52946bc2751f0cd30661564117d6fd60897c"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:161eb4cff5cfefdb6c9b8b3671d09f7def2f960cee33481dd898caf2bcd02304"}, + {file = "ruff-0.11.8-py3-none-win32.whl", hash = "sha256:5b18caa297a786465cc511d7f8be19226acf9c0a1127e06e736cd4e1878c3ea2"}, + {file = "ruff-0.11.8-py3-none-win_amd64.whl", hash = "sha256:6e70d11043bef637c5617297bdedec9632af15d53ac1e1ba29c448da9341b0c4"}, + {file = "ruff-0.11.8-py3-none-win_arm64.whl", hash = "sha256:304432e4c4a792e3da85b7699feb3426a0908ab98bf29df22a31b0cdd098fac2"}, + {file = "ruff-0.11.8.tar.gz", hash = "sha256:6d742d10626f9004b781f4558154bb226620a7242080e11caeffab1a40e99df8"}, ] [[package]] name = "s3transfer" -version = "0.12.0" +version = "0.13.0" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "s3transfer-0.12.0-py3-none-any.whl", hash = "sha256:35b314d7d82865756edab59f7baebc6b477189e6ab4c53050e28c1de4d9cce18"}, - {file = "s3transfer-0.12.0.tar.gz", hash = "sha256:8ac58bc1989a3fdb7c7f3ee0918a66b160d038a147c7b5db1500930a607e9a1c"}, + {file = "s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be"}, + {file = "s3transfer-0.13.0.tar.gz", hash = "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"}, ] [package.dependencies] @@ -4129,14 +4172,14 @@ unleash = ["UnleashClient (>=6.0.1)"] [[package]] name = "setuptools" -version = "80.7.1" +version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-80.7.1-py3-none-any.whl", hash = "sha256:ca5cc1069b85dc23070a6628e6bcecb3292acac802399c7f8edc0100619f9009"}, - {file = "setuptools-80.7.1.tar.gz", hash = "sha256:f6ffc5f0142b1bd8d0ca94ee91b30c0ca862ffd50826da1ea85258a06fd94552"}, + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] @@ -4291,7 +4334,7 @@ description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -4357,19 +4400,31 @@ files = [ [[package]] name = "types-cffi" -version = "1.17.0.20250516" +version = "1.17.0.20250523" description = "Typing stubs for cffi" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_cffi-1.17.0.20250516-py3-none-any.whl", hash = "sha256:b5a7b61fa60114072900a1f25094d0ea3d4f398d060128583ef644bb686d027d"}, - {file = "types_cffi-1.17.0.20250516.tar.gz", hash = "sha256:f63c42ab07fd71f4ed218e2dea64f8714e71c585db5c6bdef9ea8f57cf99979b"}, + {file = "types_cffi-1.17.0.20250523-py3-none-any.whl", hash = "sha256:e98c549d8e191f6220e440f9f14315d6775a21a0e588c32c20476be885b2fad9"}, + {file = "types_cffi-1.17.0.20250523.tar.gz", hash = "sha256:e7110f314c65590533adae1b30763be08ca71ad856a1ae3fe9b9d8664d49ec22"}, ] [package.dependencies] types-setuptools = "*" +[[package]] +name = "types-protobuf" +version = "6.30.2.20250516" +description = "Typing stubs for protobuf" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_protobuf-6.30.2.20250516-py3-none-any.whl", hash = "sha256:8c226d05b5e8b2623111765fa32d6e648bbc24832b4c2fddf0fa340ba5d5b722"}, + {file = "types_protobuf-6.30.2.20250516.tar.gz", hash = "sha256:aecd1881770a9bb225ede66872ef7f0da4505edd0b193108edd9892e48d49a41"}, +] + [[package]] name = "types-pyopenssl" version = "24.1.0.20240722" @@ -4431,26 +4486,26 @@ types-urllib3 = "*" [[package]] name = "types-s3transfer" -version = "0.12.0" +version = "0.13.0" description = "Type annotations and code completion for s3transfer" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "types_s3transfer-0.12.0-py3-none-any.whl", hash = "sha256:101bbc5b7f00b71512374df881f480fc6bf63c948b5098ab024bf3370fbfb0e8"}, - {file = "types_s3transfer-0.12.0.tar.gz", hash = "sha256:f8f59201481e904362873bf0be3267f259d60ad946ebdfcb847d092a1fa26f98"}, + {file = "types_s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:79c8375cbf48a64bff7654c02df1ec4b20d74f8c5672fc13e382f593ca5565b3"}, + {file = "types_s3transfer-0.13.0.tar.gz", hash = "sha256:203dadcb9865c2f68fb44bc0440e1dc05b79197ba4a641c0976c26c9af75ef52"}, ] [[package]] name = "types-setuptools" -version = "80.7.0.20250516" +version = "80.9.0.20250529" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_setuptools-80.7.0.20250516-py3-none-any.whl", hash = "sha256:c1da6c11698139c8307c6df5987592df940e956912c204e42d844ba821dd2741"}, - {file = "types_setuptools-80.7.0.20250516.tar.gz", hash = "sha256:57274b58e05434de42088a86074c9e630e5786f759cf9cc1e3015e886297ca21"}, + {file = "types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f"}, + {file = "types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91"}, ] [[package]] @@ -4467,28 +4522,28 @@ files = [ [[package]] name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] [[package]] name = "typing-inspection" -version = "0.4.0" +version = "0.4.1" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, - {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [package.dependencies] typing-extensions = ">=4.12.0" @@ -4846,14 +4901,14 @@ files = [ [[package]] name = "zipp" -version = "3.21.0" +version = "3.23.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, ] [package.extras] @@ -4861,7 +4916,7 @@ check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \" cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] @@ -4869,6 +4924,8 @@ all = ["aws-encryption-sdk", "aws-xray-sdk", "fastjsonschema", "jsonpath-ng", "p aws-sdk = ["boto3"] datadog = ["datadog-lambda"] datamasking = ["aws-encryption-sdk", "jsonpath-ng"] +kafka-consumer-avro = ["avro"] +kafka-consumer-protobuf = ["protobuf"] parser = ["pydantic"] redis = ["redis"] tracer = ["aws-xray-sdk"] @@ -4878,4 +4935,4 @@ valkey = ["valkey-glide"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<4.0.0" -content-hash = "c786bd8eee8e8ddfdcea9372328b34c645bdda06feec56479d3d617486fce98e" +content-hash = "0f5e7c0e335d0d3ccd891fc12b4a2130a51df6c2f74406b66b1794a8b5a77dfd" diff --git a/pyproject.toml b/pyproject.toml index 39b6ec0abd6..e23eff1edf6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,8 @@ valkey-glide = { version = ">=1.3.5,<2.0", optional = true } aws-encryption-sdk = { version = ">=3.1.1,<5.0.0", optional = true } jsonpath-ng = { version = "^1.6.0", optional = true } datadog-lambda = { version = "^6.106.0", optional = true } - +avro = { version = "^1.12.0", optional = true } +protobuf = {version = "^6.30.2", optional = true } [tool.poetry.extras] parser = ["pydantic"] @@ -75,6 +76,8 @@ all = [ aws-sdk = ["boto3"] datadog = ["datadog-lambda"] datamasking = ["aws-encryption-sdk", "jsonpath-ng"] +kafka-consumer-avro = ["avro"] +kafka-consumer-protobuf = ["protobuf"] [tool.poetry.group.dev.dependencies] coverage = { extras = ["toml"], version = "^7.6" } @@ -125,6 +128,9 @@ nox = "^2024.4.15" mkdocstrings-python = "^1.13.0" datadog-lambda = "^6.106.0" mkdocs-llmstxt = "^0.2.0" +avro = "^1.12.0" +protobuf = "^6.30.2" +types-protobuf = "^6.30.2.20250516" [tool.coverage.run] source = ["aws_lambda_powertools"] diff --git a/tests/events/kafkaEventMsk.json b/tests/events/kafkaEventMsk.json index f0c7d36c2cf..6c27594460c 100644 --- a/tests/events/kafkaEventMsk.json +++ b/tests/events/kafkaEventMsk.json @@ -28,7 +28,15 @@ 101 ] } - ] + ], + "valueSchemaMetadata": { + "dataFormat": "AVRO", + "schemaId": "1234" + }, + "keySchemaMetadata": { + "dataFormat": "AVRO", + "schemaId": "1234" + } }, { "topic":"mytopic", @@ -53,7 +61,15 @@ 101 ] } - ] + ], + "valueSchemaMetadata": { + "dataFormat": "AVRO", + "schemaId": "1234" + }, + "keySchemaMetadata": { + "dataFormat": "AVRO", + "schemaId": "1234" + } }, { "topic":"mytopic", @@ -79,7 +95,15 @@ 101 ] } - ] + ], + "valueSchemaMetadata": { + "dataFormat": "AVRO", + "schemaId": "1234" + }, + "keySchemaMetadata": { + "dataFormat": "AVRO", + "schemaId": "1234" + } } ] } diff --git a/tests/functional/kafka_consumer/__init__.py b/tests/functional/kafka_consumer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/kafka_consumer/_avro/__init__.py b/tests/functional/kafka_consumer/_avro/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/kafka_consumer/_avro/test_kafka_consumer_with_avro.py b/tests/functional/kafka_consumer/_avro/test_kafka_consumer_with_avro.py new file mode 100644 index 00000000000..9359558605c --- /dev/null +++ b/tests/functional/kafka_consumer/_avro/test_kafka_consumer_with_avro.py @@ -0,0 +1,311 @@ +import base64 +import io +from copy import deepcopy +from dataclasses import dataclass + +import pytest +from avro.io import BinaryEncoder, DatumWriter +from avro.schema import parse as parse_schema + +from aws_lambda_powertools.utilities.kafka.consumer_records import ConsumerRecords +from aws_lambda_powertools.utilities.kafka.exceptions import ( + KafkaConsumerAvroSchemaParserError, + KafkaConsumerDeserializationError, + KafkaConsumerMissingSchemaError, +) +from aws_lambda_powertools.utilities.kafka.kafka_consumer import kafka_consumer +from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig + + +@pytest.fixture +def avro_value_schema(): + return """ + { + "type": "record", + "name": "User", + "namespace": "com.example", + "fields": [ + {"name": "name", "type": "string"}, + {"name": "age", "type": "int"} + ] + } + """ + + +@pytest.fixture +def avro_key_schema(): + return """ + { + "type": "record", + "name": "Key", + "namespace": "com.example", + "fields": [ + {"name": "user_id", "type": "string"} + ] + } + """ + + +@pytest.fixture +def avro_encoded_value(avro_value_schema): + parsed_schema = parse_schema(avro_value_schema) + writer = DatumWriter(parsed_schema) + bytes_writer = io.BytesIO() + encoder = BinaryEncoder(bytes_writer) + writer.write({"name": "John Doe", "age": 30}, encoder) + return base64.b64encode(bytes_writer.getvalue()).decode("utf-8") + + +@pytest.fixture +def avro_encoded_key(avro_key_schema): + parsed_key_schema = parse_schema(avro_key_schema) + writer = DatumWriter(parsed_key_schema) + bytes_writer = io.BytesIO() + encoder = BinaryEncoder(bytes_writer) + writer.write({"user_id": "user-123"}, encoder) + return base64.b64encode(bytes_writer.getvalue()).decode("utf-8") + + +@pytest.fixture +def kafka_event_with_avro_data(avro_encoded_value, avro_encoded_key): + return { + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:123456789012:cluster/my-cluster/abcdefg", + "records": { + "my-topic-1": [ + { + "topic": "my-topic-1", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": avro_encoded_key, + "value": avro_encoded_value, + "headers": [{"headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]}], + }, + ], + }, + } + + +@dataclass +class UserValueDataClass: + name: str + age: int + + +@dataclass +class UserKeyClass: + user_id: str + + +def test_kafka_consumer_with_avro(kafka_event_with_avro_data, avro_value_schema, lambda_context): + # GIVEN A Kafka consumer configured with Avro schema deserialization + schema_config = SchemaConfig(value_schema_type="AVRO", value_schema=avro_value_schema) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + return event.record.value + + # WHEN The handler processes the Kafka event containing Avro-encoded data + result = handler(kafka_event_with_avro_data, lambda_context) + + # THEN The Avro data should be correctly deserialized into a Python dictionary + assert result["name"] == "John Doe" + assert result["age"] == 30 + + +def test_kafka_consumer_with_avro_and_dataclass( + kafka_event_with_avro_data, + avro_value_schema, + lambda_context, +): + # GIVEN A Kafka consumer configured with Avro schema deserialization + # and a dataclass for output serialization + schema_config = SchemaConfig( + value_schema_type="AVRO", + value_schema=avro_value_schema, + value_output_serializer=UserValueDataClass, + ) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Capture the results to verify + value: UserValueDataClass = event.record.value + return value + + # WHEN The handler processes the Kafka event containing Avro-encoded data + # and serializes the output as a UserValueDataClass instance + result = handler(kafka_event_with_avro_data, lambda_context) + + # THEN The Avro data should be correctly deserialized and converted to a dataclass instance + # with the expected property values + assert result.name == "John Doe" + assert result.age == 30 + assert isinstance(result, UserValueDataClass) + + +def test_kafka_consumer_with_avro_and_custom_function( + kafka_event_with_avro_data, + avro_value_schema, + lambda_context, +): + # GIVEN A custom serialization function that removes the age field from the dictionary + def dict_output(data: dict) -> dict: + # removing age key + del data["age"] + return data + + # A Kafka consumer configured with Avro schema deserialization + # and a custom function for output transformation + schema_config = SchemaConfig( + value_schema_type="AVRO", + value_schema=avro_value_schema, + value_output_serializer=dict_output, + ) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Capture the results to verify + return event.record.value + + # WHEN The handler processes the Kafka event containing Avro-encoded data + # and applies the custom transformation function to the output + result = handler(kafka_event_with_avro_data, lambda_context) + + # THEN The Avro data should be correctly deserialized and transformed + # with the name field intact but the age field removed + assert result["name"] == "John Doe" + assert "age" not in result + + +def test_kafka_consumer_with_invalid_avro_data(kafka_event_with_avro_data, lambda_context, avro_value_schema): + # GIVEN A Kafka event with deliberately corrupted Avro data + invalid_data = base64.b64encode(b"invalid avro data").decode("utf-8") + kafka_event_with_avro_data_temp = deepcopy(kafka_event_with_avro_data) + kafka_event_with_avro_data_temp["records"]["my-topic-1"][0]["value"] = invalid_data + + schema_config = SchemaConfig(value_schema_type="AVRO", value_schema=avro_value_schema) + + @kafka_consumer(schema_config=schema_config) + def lambda_handler(event: ConsumerRecords, context): + # This should never be reached if deserializer fails + return event.record.value + + # WHEN/THEN + # The handler should fail to process the invalid Avro data + # and raise a specific deserialization error + with pytest.raises(KafkaConsumerDeserializationError) as excinfo: + lambda_handler(kafka_event_with_avro_data_temp, lambda_context) + + # The exact error message may vary depending on the Avro library's internals, + # but should indicate a deserialization problem + assert "Error trying to deserialize avro data" in str(excinfo.value) + + +def test_kafka_consumer_with_invalid_avro_schema(kafka_event_with_avro_data, lambda_context): + # GIVEN + # An intentionally malformed Avro schema with syntax errors + avro_schema = """ + { + "type": "record", + "name": "User", + "namespace": "com.example", + "fields": [ "invalid schema" ] + } + """ + + # A Kafka consumer configured with the invalid schema + schema_config = SchemaConfig(value_schema_type="AVRO", value_schema=avro_schema) + + @kafka_consumer(schema_config=schema_config) + def lambda_handler(event: ConsumerRecords, context): + # This should never be reached if deserializer fails + return event.record.value + + # WHEN/THEN + # The handler should fail during initialization when it tries to parse the schema + # and raise a specific schema parser error + with pytest.raises(KafkaConsumerAvroSchemaParserError) as excinfo: + lambda_handler(kafka_event_with_avro_data, lambda_context) + + # The exact error message may vary depending on the Avro library's internals, + # but should indicate a deserialization problem + assert "Invalid Avro schema. Please ensure the provided avro schema is valid:" in str(excinfo.value) + + +def test_kafka_consumer_with_key_deserialization( + kafka_event_with_avro_data, + lambda_context, + avro_value_schema, + avro_key_schema, +): + """Test deserializing both key and value with different schemas and serializers.""" + + key_value_result = {} + + # GIVEN A Kafka consumer configured with Avro schemas for both key and value + # with different output serializers for each + schema_config = SchemaConfig( + value_schema_type="AVRO", + value_schema=avro_value_schema, + value_output_serializer=UserValueDataClass, + key_schema_type="AVRO", + key_schema=avro_key_schema, + key_output_serializer=UserKeyClass, + ) + + @kafka_consumer(schema_config=schema_config) + def lambda_handler(event: ConsumerRecords, context): + record = next(event.records) + key_value_result["key_type"] = type(record.key).__name__ + key_value_result["key_id"] = record.key.user_id + key_value_result["value_type"] = type(record.value).__name__ + key_value_result["value_name"] = record.value.name + key_value_result["value_age"] = record.value.age + return {"processed": True} + + # WHEN + # The handler processes the Kafka event, deserializing both key and value + result = lambda_handler(kafka_event_with_avro_data, lambda_context) + + # THEN + # The handler should return success and the captured properties should match expectations + assert result == {"processed": True} + + # Key should be correctly deserialized into a UserKeyClass instance + assert key_value_result["key_type"] == "UserKeyClass" + assert key_value_result["key_id"] == "user-123" + + # Value should be correctly deserialized into a UserValueDataClass instance + assert key_value_result["value_type"] == "UserValueDataClass" + assert key_value_result["value_name"] == "John Doe" + assert key_value_result["value_age"] == 30 + + +def test_kafka_consumer_without_avro_value_schema(): + # GIVEN + # A scenario where AVRO schema type is specified for value + # but no actual schema is provided + + # WHEN/THEN + # SchemaConfig initialization should fail with an appropriate error + with pytest.raises(KafkaConsumerMissingSchemaError) as excinfo: + SchemaConfig(value_schema_type="AVRO", value_schema=None) + + # Verify the error message mentions 'value_schema' + assert "value_schema" in str(excinfo.value) + + +def test_kafka_consumer_without_avro_key_schema(): + # GIVEN + # A scenario where AVRO schema type is specified for key + # but no actual schema is provided + + # WHEN/THEN + # SchemaConfig initialization should fail with an appropriate error + with pytest.raises(KafkaConsumerMissingSchemaError) as excinfo: + SchemaConfig(key_schema_type="AVRO", key_schema=None) + + # Verify the error message mentions 'key_schema' + assert "key_schema" in str(excinfo.value) diff --git a/tests/functional/kafka_consumer/_protobuf/__init__.py b/tests/functional/kafka_consumer/_protobuf/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/kafka_consumer/_protobuf/test_kafka_consumer_with_protobuf.py b/tests/functional/kafka_consumer/_protobuf/test_kafka_consumer_with_protobuf.py new file mode 100644 index 00000000000..0fbc07158eb --- /dev/null +++ b/tests/functional/kafka_consumer/_protobuf/test_kafka_consumer_with_protobuf.py @@ -0,0 +1,337 @@ +import base64 +from copy import deepcopy +from dataclasses import dataclass + +import pytest + +from aws_lambda_powertools.utilities.kafka.consumer_records import ConsumerRecords +from aws_lambda_powertools.utilities.kafka.exceptions import ( + KafkaConsumerDeserializationError, + KafkaConsumerMissingSchemaError, +) +from aws_lambda_powertools.utilities.kafka.kafka_consumer import kafka_consumer +from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig + +# Import the generated protobuf classes +from .user_pb2 import Key, User + + +@pytest.fixture +def proto_encoded_value(): + # Create a User protobuf message + user = User() + user.name = "John Doe" + user.age = 30 + # Serialize and encode in base64 + return base64.b64encode(user.SerializeToString()).decode("utf-8") + + +@pytest.fixture +def proto_encoded_key(): + # Create a Key protobuf message + key = Key() + key.user_id = "user-123" + # Serialize and encode in base64 + return base64.b64encode(key.SerializeToString()).decode("utf-8") + + +@pytest.fixture +def kafka_event_with_proto_data(proto_encoded_value, proto_encoded_key): + return { + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:123456789012:cluster/my-cluster/abcdefg", + "records": { + "my-topic-1": [ + { + "topic": "my-topic-1", + "partition": 1, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": proto_encoded_key, + "value": proto_encoded_value, + "headers": [{"headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]}], + }, + ], + }, + } + + +@dataclass +class UserValueDataClass: + name: str + age: int + + +@dataclass +class UserKeyClass: + user_id: str + + +def test_kafka_consumer_with_protobuf(kafka_event_with_proto_data, lambda_context): + # GIVEN A Kafka consumer configured to deserialize Protobuf data + # using the User protobuf message type as the schema + schema_config = SchemaConfig( + value_schema_type="PROTOBUF", + value_schema=User, + ) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Return the deserialized record value for verification + return event.record.value + + # WHEN The handler processes a Kafka event containing Protobuf-encoded data + result = handler(kafka_event_with_proto_data, lambda_context) + + # THEN The Protobuf data should be correctly deserialized into a dictionary + # with the expected field values from the User message + assert result["name"] == "John Doe" + assert result["age"] == 30 + + +def test_kafka_consumer_with_proto_and_dataclass( + kafka_event_with_proto_data, + lambda_context, +): + # GIVEN A Kafka consumer configured to deserialize Protobuf data + # using the User message type as the schema and convert the result to a UserValueDataClass instance + schema_config = SchemaConfig( + value_schema_type="PROTOBUF", + value_schema=User, + value_output_serializer=UserValueDataClass, + ) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Extract the deserialized and serialized value + # which should be a UserValueDataClass instance + value: UserValueDataClass = event.record.value + return value + + # WHEN The handler processes a Kafka event containing Protobuf-encoded data + # which is deserialized and then serialized to a dataclass + result = handler(kafka_event_with_proto_data, lambda_context) + + # THEN The result should be a UserValueDataClass instance + # with the correct property values from the original Protobuf message + assert isinstance(result, UserValueDataClass) + assert result.name == "John Doe" + assert result.age == 30 + + +def test_kafka_consumer_with_invalid_proto_data(kafka_event_with_proto_data, lambda_context): + """Test error handling when Protobuf data is invalid.""" + # GIVEN A Kafka event with deliberately corrupted Protobuf data + invalid_data = base64.b64encode(b"invalid protobuf data").decode("utf-8") + kafka_event_with_proto_data_temp = deepcopy(kafka_event_with_proto_data) + kafka_event_with_proto_data_temp["records"]["my-topic-1"][0]["value"] = invalid_data + + schema_config = SchemaConfig( + value_schema_type="PROTOBUF", + value_schema=User, + ) + + @kafka_consumer(schema_config=schema_config) + def lambda_handler(event: ConsumerRecords, context): + # This should never be reached if deserializer fails + record = next(event.records) + return record.value + + # WHEN/THEN + # The handler should fail to process the invalid Avro data + # and raise a specific deserialization error + with pytest.raises(KafkaConsumerDeserializationError) as excinfo: + lambda_handler(kafka_event_with_proto_data_temp, lambda_context) + + # The exact error message may vary depending on the Protobuf library's internals, + # but should indicate a deserialization problem + assert "Error trying to deserialize protobuf data" in str(excinfo.value) + + +def test_kafka_consumer_with_key_deserialization( + kafka_event_with_proto_data, + lambda_context, +): + # GIVEN A Kafka consumer configured to deserialize only the key using Protobuf + # and serialize it to a UserKeyClass instance + schema_config = SchemaConfig( + key_schema_type="PROTOBUF", + key_schema=Key, + key_output_serializer=UserKeyClass, + ) + + @kafka_consumer(schema_config=schema_config) + def lambda_handler(event: ConsumerRecords, context): + key: UserKeyClass = event.record.key + return key + + # WHEN The handler processes a Kafka event, deserializing only the key portion + # while leaving the value in its original format + result = lambda_handler(kafka_event_with_proto_data, lambda_context) + + # THEN The key should be properly deserialized from Protobuf and serialized to a UserKeyClass + # with the expected user_id value + assert result.user_id == "user-123" + assert isinstance(result, UserKeyClass) + + +def test_kafka_consumer_with_wrong_proto_message_class(kafka_event_with_proto_data, lambda_context): + # GIVEN + # A Kafka consumer configured with the wrong Protobuf message class (Key instead of User) + # for deserializing the value payload + schema_config = SchemaConfig( + value_schema_type="PROTOBUF", + value_schema=Key, # Incorrect schema for the value data + ) + + @kafka_consumer(schema_config=schema_config) + def lambda_handler(event: ConsumerRecords, context): + record = next(event.records) + return record.value + + # WHEN The handler processes a Kafka event with Protobuf data that doesn't match the schema + response = lambda_handler(kafka_event_with_proto_data, lambda_context) + + # THEN The deserialization should return an empty result + assert not response + + +def test_kafka_consumer_with_custom_function( + kafka_event_with_proto_data, + lambda_context, +): + # GIVEN A custom serialization function that removes the age field from the dictionary + def dict_output(data: dict) -> dict: + # removing age key + del data["age"] + return data + + # A Kafka consumer configured with Protobuf schema deserialization + # and a custom function for output transformation + schema_config = SchemaConfig( + value_schema_type="PROTOBUF", + value_schema=User, + value_output_serializer=dict_output, + ) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Capture the results to verify + return event.record.value + + # WHEN The handler processes the Kafka event containing Protobuf-encoded data + # and applies the custom transformation function to the output + result = handler(kafka_event_with_proto_data, lambda_context) + + # THEN The Avro data should be correctly deserialized and transformed + # with the name field intact but the age field removed + assert result["name"] == "John Doe" + assert "age" not in result + + +def test_kafka_consumer_with_multiple_records(lambda_context): + """Test Kafka consumer with multiple records.""" + + # GIVEN + # Two distinct Protobuf User messages to create multiple records + # First user: John Doe, age 30 + user1 = User() + user1.name = "John Doe" + user1.age = 30 + value1 = base64.b64encode(user1.SerializeToString()).decode("utf-8") + + # Second user: Jane Smith, age 25 + user2 = User() + user2.name = "Jane Smith" + user2.age = 25 + value2 = base64.b64encode(user2.SerializeToString()).decode("utf-8") + + # Create event with multiple records + event = { + "eventSource": "aws:kafka", + "records": { + "my-topic-1": [ + { + "topic": "my-topic-1", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "value": value1, + }, + { + "topic": "my-topic-1", + "partition": 0, + "offset": 16, + "timestamp": 1545084651000, + "timestampType": "CREATE_TIME", + "value": value2, + }, + ], + }, + } + + # Create dict to capture results + processed_records = [] + + schema_config = SchemaConfig( + value_schema_type="PROTOBUF", + value_schema=User, + ) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + for record in event.records: + processed_records.append({"name": record.value["name"], "age": record.value["age"]}) + return {"processed": len(processed_records)} + + # WHEN + # The handler processes the Kafka event containing multiple records + result = handler(event, lambda_context) + + # THEN + # The handler should successfully process both records + # and return the correct count + assert result == {"processed": 2} + + # All records should be correctly deserialized with proper values + assert len(processed_records) == 2 + + # First record should contain John Doe's details + assert processed_records[0]["name"] == "John Doe" + assert processed_records[0]["age"] == 30 + + # Second record should contain Jane Smith's details + assert processed_records[1]["name"] == "Jane Smith" + assert processed_records[1]["age"] == 25 + + +def test_kafka_consumer_without_protobuf_value_schema(): + # GIVEN + # A scenario where PROTOBUF schema type is specified for the value + # but no actual schema class is provided + + # WHEN/THEN + # SchemaConfig initialization should fail with an appropriate error + with pytest.raises(KafkaConsumerMissingSchemaError) as excinfo: + SchemaConfig(value_schema_type="PROTOBUF", value_schema=None) + + # Verify the error message mentions the missing value schema + assert "value_schema" in str(excinfo.value) + assert "PROTOBUF" in str(excinfo.value) + + +def test_kafka_consumer_without_protobuf_key_schema(): + # GIVEN + # A scenario where PROTOBUF schema type is specified for the key + # but no actual schema class is provided + + # WHEN/THEN + # SchemaConfig initialization should fail with an appropriate error + with pytest.raises(KafkaConsumerMissingSchemaError) as excinfo: + SchemaConfig(key_schema_type="PROTOBUF", key_schema=None) + + # Verify the error message mentions the missing key schema + assert "key_schema" in str(excinfo.value) + assert "PROTOBUF" in str(excinfo.value) diff --git a/tests/functional/kafka_consumer/_protobuf/user.proto b/tests/functional/kafka_consumer/_protobuf/user.proto new file mode 100644 index 00000000000..9eec9196f99 --- /dev/null +++ b/tests/functional/kafka_consumer/_protobuf/user.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package com.example; + +message User { + string name = 1; + int32 age = 2; +} + +message Key { + string user_id = 3; +} diff --git a/tests/functional/kafka_consumer/_protobuf/user_pb2.py b/tests/functional/kafka_consumer/_protobuf/user_pb2.py new file mode 100644 index 00000000000..034c7545ede --- /dev/null +++ b/tests/functional/kafka_consumer/_protobuf/user_pb2.py @@ -0,0 +1,31 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# Protobuf Python Version: 6.30.2 +"""Generated protocol buffer code.""" + +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +_runtime_version.ValidateProtobufRuntimeVersion(_runtime_version.Domain.PUBLIC, 6, 30, 2, "", "user.proto") +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\nuser.proto\x12\x0b\x63om.example"!\n\x04User\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03\x61ge\x18\x02 \x01(\x05"\x16\n\x03Key\x12\x0f\n\x07user_id\x18\x03 \x01(\tb\x06proto3', # noqa: E501 +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "user_pb2", _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals["_USER"]._serialized_start = 27 + _globals["_USER"]._serialized_end = 60 + _globals["_KEY"]._serialized_start = 62 + _globals["_KEY"]._serialized_end = 84 +# @@protoc_insertion_point(module_scope) diff --git a/tests/functional/kafka_consumer/_pydantic/test_kafka_consumer_with_pydantic.py b/tests/functional/kafka_consumer/_pydantic/test_kafka_consumer_with_pydantic.py new file mode 100644 index 00000000000..58c05833e1e --- /dev/null +++ b/tests/functional/kafka_consumer/_pydantic/test_kafka_consumer_with_pydantic.py @@ -0,0 +1,226 @@ +import base64 +import json +from typing import Annotated, Literal, Union + +import pytest +from pydantic import BaseModel, Field + +from aws_lambda_powertools.utilities.kafka.consumer_records import ConsumerRecords +from aws_lambda_powertools.utilities.kafka.kafka_consumer import kafka_consumer +from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig + + +@pytest.fixture +def json_encoded_value(): + data = {"name": "John Doe", "age": 30} + return base64.b64encode(json.dumps(data).encode("utf-8")).decode("utf-8") + + +@pytest.fixture +def json_encoded_key(): + data = {"user_id": "123"} + return base64.b64encode(json.dumps(data).encode("utf-8")).decode("utf-8") + + +@pytest.fixture +def kafka_event_with_json_data(json_encoded_value, json_encoded_key): + return { + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:123456789012:cluster/my-cluster/abcdefg", + "records": { + "my-topic-1": [ + { + "topic": "my-topic-1", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": json_encoded_key, + "value": json_encoded_value, + "headers": [{"headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]}], + }, + ], + }, + } + + +class UserValueModel(BaseModel): + name: str + age: int + + +class UserKeyModel(BaseModel): + user_id: str + + +def test_kafka_consumer_with_json_value_and_pydantic(kafka_event_with_json_data, lambda_context): + # GIVEN + # A Kafka consumer configured to deserialize JSON data + # and convert it to a Pydantic model instance + schema_config = SchemaConfig(value_schema_type="JSON", value_output_serializer=UserValueModel) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Extract the deserialized and serialized value + # which should be a UserValueModel instance + value: UserValueModel = event.record.value + return value + + # WHEN + # The handler processes a Kafka event containing JSON-encoded data + # which is deserialized into a dictionary and then converted to a Pydantic model + result = handler(kafka_event_with_json_data, lambda_context) + + # THEN + # The result should be a UserValueModel instance with the correct properties + assert isinstance(result, UserValueModel) + assert result.name == "John Doe" + assert result.age == 30 + + +def test_kafka_consumer_with_json_value_and_union_tag(kafka_event_with_json_data, lambda_context): + """Test Kafka consumer with JSON deserialization and dataclass output serialization.""" + + class UserValueModel(BaseModel): + name: Literal["John Doe"] + age: int + + class UserValueModel2(BaseModel): + name: Literal["Not using"] + email: str + + UnionModel = Annotated[Union[UserValueModel, UserValueModel2], Field(discriminator="name")] + + # GIVEN + # A Kafka consumer configured to deserialize JSON data + # and convert it to a Pydantic model instance with Union Tags + schema_config = SchemaConfig(value_schema_type="JSON", value_output_serializer=UnionModel) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Extract the deserialized and serialized value + # which should be a UserValueModel instance + value: UserValueModel = event.record.value + return value + + # WHEN + # The handler processes a Kafka event containing JSON-encoded data + # which is deserialized into a dictionary and then converted to a Pydantic model + result = handler(kafka_event_with_json_data, lambda_context) + + # THEN + # The result should be a UserValueModel instance with the correct properties + assert isinstance(result, UserValueModel) + assert result.name == "John Doe" + assert result.age == 30 + + +def test_kafka_consumer_with_json_key_and_pydantic(kafka_event_with_json_data, lambda_context): + # GIVEN + # A Kafka consumer configured to deserialize only the key using JSON + # and convert it to a Pydantic UserKeyModel instance + schema_config = SchemaConfig( + key_schema_type="JSON", + key_output_serializer=UserKeyModel, + ) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Extract the deserialized key to verify + key: UserKeyModel = event.record.key + return key + + # WHEN + # The handler processes a Kafka event, deserializing only the key portion as JSON + # while leaving the value in its original format + result = handler(kafka_event_with_json_data, lambda_context) + + # THEN + # The key should be properly deserialized from JSON and converted to a UserKeyModel + # with the expected user_id value + assert isinstance(result, UserKeyModel) + assert result.user_id == "123" + + +def test_kafka_consumer_with_multiple_records(lambda_context): + # GIVEN + # Three different user records to process + # First user: John Doe, age 30 + data1 = {"name": "John Doe", "age": 30} + # Second user: Jane Smith, age 25 + data2 = {"name": "Jane Smith", "age": 25} + # Third user: Bob Johnson, age 40 + data3 = {"name": "Bob Johnson", "age": 40} + + # Base64-encoded JSON data for each record + encoded1 = base64.b64encode(json.dumps(data1).encode("utf-8")).decode("utf-8") + encoded2 = base64.b64encode(json.dumps(data2).encode("utf-8")).decode("utf-8") + encoded3 = base64.b64encode(json.dumps(data3).encode("utf-8")).decode("utf-8") + + # A Kafka event containing multiple records across different offsets + multi_record_event = { + "eventSource": "aws:kafka", + "records": { + "my-topic-1": [ + { + "topic": "my-topic-1", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": None, + "value": encoded1, + "headers": [], + }, + { + "topic": "my-topic-1", + "partition": 0, + "offset": 16, + "timestamp": 1545084651987, + "timestampType": "CREATE_TIME", + "key": None, + "value": encoded2, + "headers": [], + }, + { + "topic": "my-topic-1", + "partition": 0, + "offset": 17, + "timestamp": 1545084652987, + "timestampType": "CREATE_TIME", + "key": None, + "value": encoded3, + "headers": [], + }, + ], + }, + } + + # A list to capture processed record details + processed_records = [] + + # A Kafka consumer configured to deserialize JSON and convert to Pydantic models + schema_config = SchemaConfig(value_schema_type="JSON", value_output_serializer=UserValueModel) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Process each record and collect its properties + for record in event.records: + processed_records.append({"name": record.value.name, "age": record.value.age}) + return {"processed": len(processed_records)} + + # WHEN + # The handler processes the Kafka event containing multiple JSON records + result = handler(multi_record_event, lambda_context) + + # THEN + # The handler should successfully process all three records + # and return the correct count + assert result == {"processed": 3} + assert len(processed_records) == 3 + + # All three users should be correctly deserialized and processed + # regardless of their order in the event + assert any(r["name"] == "John Doe" and r["age"] == 30 for r in processed_records) + assert any(r["name"] == "Jane Smith" and r["age"] == 25 for r in processed_records) + assert any(r["name"] == "Bob Johnson" and r["age"] == 40 for r in processed_records) diff --git a/tests/functional/kafka_consumer/conftest.py b/tests/functional/kafka_consumer/conftest.py new file mode 100644 index 00000000000..49ac95a3d6b --- /dev/null +++ b/tests/functional/kafka_consumer/conftest.py @@ -0,0 +1,17 @@ +import pytest + + +class LambdaContext: + def __init__(self): + self.function_name = "test-func" + self.memory_limit_in_mb = 128 + self.invoked_function_arn = "arn:aws:lambda:eu-west-1:809313241234:function:test-func" + self.aws_request_id = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + def get_remaining_time_in_millis(self) -> int: + return 1000 + + +@pytest.fixture +def lambda_context(): + return LambdaContext() diff --git a/tests/functional/kafka_consumer/required_dependencies/__init__.py b/tests/functional/kafka_consumer/required_dependencies/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/kafka_consumer/required_dependencies/test_kafka_consumer.py b/tests/functional/kafka_consumer/required_dependencies/test_kafka_consumer.py new file mode 100644 index 00000000000..a5240eb4d12 --- /dev/null +++ b/tests/functional/kafka_consumer/required_dependencies/test_kafka_consumer.py @@ -0,0 +1,331 @@ +import base64 +import json +from copy import deepcopy +from dataclasses import dataclass + +import pytest + +from aws_lambda_powertools.utilities.kafka.consumer_records import ConsumerRecords +from aws_lambda_powertools.utilities.kafka.exceptions import ( + KafkaConsumerDeserializationError, +) +from aws_lambda_powertools.utilities.kafka.kafka_consumer import kafka_consumer +from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig + + +@pytest.fixture +def json_encoded_value(): + data = {"name": "John Doe", "age": 30} + return base64.b64encode(json.dumps(data).encode("utf-8")).decode("utf-8") + + +@pytest.fixture +def json_encoded_key(): + data = {"user_id": "123"} + return base64.b64encode(json.dumps(data).encode("utf-8")).decode("utf-8") + + +@pytest.fixture +def kafka_event_with_json_data(json_encoded_value, json_encoded_key): + return { + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:123456789012:cluster/my-cluster/abcdefg", + "records": { + "my-topic-1": [ + { + "topic": "my-topic-1", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": json_encoded_key, + "value": json_encoded_value, + "headers": [{"headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]}], + }, + ], + }, + } + + +@dataclass +class UserValueDataClass: + name: str + age: int + + +@dataclass +class UserKeyClass: + user_id: str + + +def test_kafka_consumer_with_json(kafka_event_with_json_data, lambda_context): + # GIVEN + # A Kafka consumer configured to deserialize JSON data + # without any additional output serialization + schema_config = SchemaConfig(value_schema_type="JSON") + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Return the deserialized JSON value for verification + return event.record.value + + # WHEN + # The handler processes a Kafka event containing JSON-encoded data + result = handler(kafka_event_with_json_data, lambda_context) + + # THEN + # The JSON should be correctly deserialized into a Python dictionary + # with the expected field values + assert result["name"] == "John Doe" + assert result["age"] == 30 + + +def test_kafka_consumer_with_json_and_dataclass(kafka_event_with_json_data, lambda_context): + # GIVEN + # A Kafka consumer configured to deserialize JSON data + # and convert it to a UserValueDataClass instance + schema_config = SchemaConfig(value_schema_type="JSON", value_output_serializer=UserValueDataClass) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Extract the deserialized and serialized value + # which should be a UserValueDataClass instance + value: UserValueDataClass = event.record.value + return value + + # WHEN + # The handler processes a Kafka event containing JSON-encoded data + # which is deserialized into a dictionary and then converted to a dataclass + result = handler(kafka_event_with_json_data, lambda_context) + + # THEN + # The result should be a UserValueDataClass instance + # with the correct property values from the original JSON + assert isinstance(result, UserValueDataClass) + assert result.name == "John Doe" + assert result.age == 30 + + +def test_kafka_consumer_with_invalid_json_data(kafka_event_with_json_data, lambda_context): + # GIVEN + # A Kafka event with raw string data that is not valid base64-encoded JSON + invalid_data = "invalid json data" + kafka_event_with_json_data = deepcopy(kafka_event_with_json_data) + kafka_event_with_json_data["records"]["my-topic-1"][0]["value"] = invalid_data + + schema_config = SchemaConfig(value_schema_type="JSON") + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + return event.record.value + + # WHEN/THEN + # The handler should fail to process the invalid JSON data + # and raise a specific deserialization error + with pytest.raises(KafkaConsumerDeserializationError) as excinfo: + handler(kafka_event_with_json_data, lambda_context) + + # Ensure the error contains useful diagnostic information + assert "Error trying to deserialize json data" in str(excinfo.value) + + +def test_kafka_consumer_with_multiple_records_json(lambda_context): + # GIVEN + # Three different user records to process + # First user: John Doe, age 30 + data1 = {"name": "John Doe", "age": 30} + # Second user: Jane Smith, age 25 + data2 = {"name": "Jane Smith", "age": 25} + # Third user: Bob Johnson, age 40 + data3 = {"name": "Bob Johnson", "age": 40} + + # Base64-encoded JSON data for each record + encoded1 = base64.b64encode(json.dumps(data1).encode("utf-8")).decode("utf-8") + encoded2 = base64.b64encode(json.dumps(data2).encode("utf-8")).decode("utf-8") + encoded3 = base64.b64encode(json.dumps(data3).encode("utf-8")).decode("utf-8") + + # A Kafka event containing multiple records across different offsets + multi_record_event = { + "eventSource": "aws:kafka", + "records": { + "my-topic-1": [ + { + "topic": "my-topic-1", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": None, + "value": encoded1, + "headers": [], + }, + { + "topic": "my-topic-1", + "partition": 0, + "offset": 16, + "timestamp": 1545084651987, + "timestampType": "CREATE_TIME", + "key": None, + "value": encoded2, + "headers": [], + }, + { + "topic": "my-topic-1", + "partition": 0, + "offset": 17, + "timestamp": 1545084652987, + "timestampType": "CREATE_TIME", + "key": None, + "value": encoded3, + "headers": [], + }, + ], + }, + } + + # A list to capture processed record details + processed_records = [] + + # A Kafka consumer configured to deserialize JSON and convert to dataclass instances + schema_config = SchemaConfig(value_schema_type="JSON", value_output_serializer=UserValueDataClass) + + @kafka_consumer(schema_config=schema_config) + def handler(event: ConsumerRecords, context): + # Process each record and collect its properties + for record in event.records: + processed_records.append({"name": record.value.name, "age": record.value.age}) + return {"processed": len(processed_records)} + + # WHEN + # The handler processes the Kafka event containing multiple JSON records + result = handler(multi_record_event, lambda_context) + + # THEN + # The handler should successfully process all three records + # and return the correct count + assert result == {"processed": 3} + assert len(processed_records) == 3 + + # All three users should be correctly deserialized into dataclass instances + # and their properties should be accessible + assert any(r["name"] == "John Doe" and r["age"] == 30 for r in processed_records) + assert any(r["name"] == "Jane Smith" and r["age"] == 25 for r in processed_records) + assert any(r["name"] == "Bob Johnson" and r["age"] == 40 for r in processed_records) + + +def test_kafka_consumer_default_deserializer_value(kafka_event_with_json_data, lambda_context): + # GIVEN + # A simple string message encoded in base64 + raw_data = b"data" + base64_data = base64.b64encode(raw_data).decode("utf-8") + + # A Kafka event with the base64-encoded data as value + basic_kafka_event = deepcopy(kafka_event_with_json_data) + basic_kafka_event["records"]["my-topic-1"][0]["value"] = base64_data + + # A Kafka consumer with no schema configuration specified + # which should default to base64 decoding only + @kafka_consumer() + def handler(event: ConsumerRecords, context): + # Get the first record's value + record = next(event.records) + # Should receive UTF-8 decoded data with no further processing + return record.value + + # WHEN + # The handler processes the Kafka event with default deserializer + result = handler(basic_kafka_event, lambda_context) + + # THEN + # The result should be the UTF-8 decoded string from the base64 data + # with no additional deserialization applied + assert result == "data" + assert isinstance(result, str) + + +def test_kafka_consumer_default_deserializer_key(kafka_event_with_json_data, lambda_context): + # GIVEN + # A simple string message encoded in base64 for the key + raw_key_data = b"data" + base64_key = base64.b64encode(raw_key_data).decode("utf-8") + + # A Kafka event with the base64-encoded data as key + kafka_event_with_key = deepcopy(kafka_event_with_json_data) + kafka_event_with_key["records"]["my-topic-1"][0]["key"] = base64_key + + # A Kafka consumer with no schema configuration specified + # which should default to base64 decoding only + @kafka_consumer() + def handler(event: ConsumerRecords, context): + # Get the first record's key + record = next(event.records) + # Should receive UTF-8 decoded key with no further processing + return record.key + + # WHEN + # The handler processes the Kafka event with default key deserializer + result = handler(kafka_event_with_key, lambda_context) + + # THEN + # The key should be the UTF-8 decoded string from the base64 data + # with no additional deserialization or transformation applied + assert result == "data" + assert isinstance(result, str) + + +def test_kafka_consumer_default_deserializer_key_is_none(kafka_event_with_json_data, lambda_context): + # GIVEN + # A Kafka event with a null key in the record + kafka_event_with_null_key = deepcopy(kafka_event_with_json_data) + kafka_event_with_null_key["records"]["my-topic-1"][0]["key"] = None + + # A Kafka consumer with no schema configuration specified + @kafka_consumer() + def handler(event: ConsumerRecords, context): + # Get the first record's key which should be None + record = next(event.records) + return record.key + + # WHEN + # The handler processes the Kafka event with a null key + result = handler(kafka_event_with_null_key, lambda_context) + + # THEN + # The key should be preserved as None without any attempt at deserialization + assert result is None + + +def test_kafka_consumer_metadata_fields(kafka_event_with_json_data, lambda_context): + # GIVEN + # A Kafka event with specific metadata we want to verify is preserved + kafka_event = deepcopy(kafka_event_with_json_data) + kafka_event["records"]["my-topic-1"][0]["key"] = None + + # A Kafka consumer with no schema configuration + # that returns the full record object for inspection + @kafka_consumer() + def handler(event: ConsumerRecords, context): + return event.record + + # WHEN + # The handler processes the Kafka event and returns the record object + result = handler(kafka_event, lambda_context) + + # THEN + # The record should preserve all original metadata fields + + # Original encoded values should be preserved + assert result.original_value == kafka_event["records"]["my-topic-1"][0]["value"] + assert result.original_key == kafka_event["records"]["my-topic-1"][0]["key"] + + # Original headers array should be preserved + assert result.original_headers == kafka_event["records"]["my-topic-1"][0]["headers"] + + # Headers should be parsed into a dictionary for easy access + assert result.headers == {"headerKey": b"headerValue"} + + # Additional metadata checks could be added here: + assert result.topic == kafka_event["records"]["my-topic-1"][0]["topic"] + assert result.partition == kafka_event["records"]["my-topic-1"][0]["partition"] + assert result.offset == kafka_event["records"]["my-topic-1"][0]["offset"] + assert result.timestamp == kafka_event["records"]["my-topic-1"][0]["timestamp"] diff --git a/tests/unit/data_classes/required_dependencies/test_kafka_event.py b/tests/unit/data_classes/required_dependencies/test_kafka_event.py index 8e4480a06d7..fc7bbf12a1a 100644 --- a/tests/unit/data_classes/required_dependencies/test_kafka_event.py +++ b/tests/unit/data_classes/required_dependencies/test_kafka_event.py @@ -34,6 +34,10 @@ def test_kafka_msk_event(): assert record.json_value == {"key": "value"} assert record.decoded_headers == {"headerKey": b"headerValue"} assert record.decoded_headers["HeaderKey"] == b"headerValue" + assert record.key_schema_metadata.data_format == raw_record["keySchemaMetadata"]["dataFormat"] + assert record.key_schema_metadata.schema_id == raw_record["keySchemaMetadata"]["schemaId"] + assert record.value_schema_metadata.data_format == raw_record["valueSchemaMetadata"]["dataFormat"] + assert record.value_schema_metadata.schema_id == raw_record["valueSchemaMetadata"]["schemaId"] assert parsed_event.record == records[0] for i in range(1, 3): @@ -68,6 +72,8 @@ def test_kafka_self_managed_event(): assert record.json_value == {"key": "value"} assert record.decoded_headers == {"headerKey": b"headerValue"} assert record.decoded_headers["HeaderKey"] == b"headerValue" + assert record.key_schema_metadata is None + assert record.value_schema_metadata is None assert parsed_event.record == records[0] diff --git a/tests/unit/parser/_pydantic/test_kafka.py b/tests/unit/parser/_pydantic/test_kafka.py index aabb669b805..779756831a9 100644 --- a/tests/unit/parser/_pydantic/test_kafka.py +++ b/tests/unit/parser/_pydantic/test_kafka.py @@ -55,6 +55,8 @@ def test_self_managed_kafka_event(): assert record.value == '{"key":"value"}' assert len(record.headers) == 1 assert record.headers[0]["headerKey"] == b"headerValue" + assert record.keySchemaMetadata is None + assert record.valueSchemaMetadata is None record: KafkaRecordModel = records[1] assert record.key is None @@ -82,6 +84,10 @@ def test_kafka_msk_event(): assert record.value == '{"key":"value"}' assert len(record.headers) == 1 assert record.headers[0]["headerKey"] == b"headerValue" + assert record.keySchemaMetadata.dataFormat == "AVRO" + assert record.keySchemaMetadata.schemaId == "1234" + assert record.valueSchemaMetadata.dataFormat == "AVRO" + assert record.valueSchemaMetadata.schemaId == "1234" for i in range(1, 3): record: KafkaRecordModel = records[i] assert record.key is None