diff --git a/.github/workflows/quality_check.yml b/.github/workflows/quality_check.yml index c52e580e5d0..f0bc5dfb10a 100644 --- a/.github/workflows/quality_check.yml +++ b/.github/workflows/quality_check.yml @@ -63,6 +63,8 @@ jobs: run: make dev-quality-code - name: Checking third-party library licenses run: make check-licenses + - name: Checking and enforcing format + run: make format-check - name: Formatting and Linting run: make lint - name: Static type checking diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0a9cee41d5a..de0c36b21e0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,13 +12,13 @@ repos: - id: check-toml - repo: local hooks: - - id: black - name: formatting::black - entry: poetry run black + - id: ruff + name: formatting::ruff + entry: poetry run ruff format language: system types: [python] - id: ruff - name: linting-format::ruff + name: linting::ruff entry: poetry run ruff check language: system types: [python] diff --git a/Makefile b/Makefile index bde5516a832..843c8aab17e 100644 --- a/Makefile +++ b/Makefile @@ -25,8 +25,11 @@ dev-gitpod: check-licenses: poetry run licensecheck -u poetry:dev +format-check: + poetry run ruff format aws_lambda_powertools tests examples --check + format: - poetry run black aws_lambda_powertools tests examples + poetry run ruff format aws_lambda_powertools tests examples lint: format poetry run ruff check aws_lambda_powertools tests examples diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 81e59efa76f..399ec8052d1 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -1604,7 +1604,6 @@ def _validate_response_validation_error_http_code( response_validation_error_http_code: HTTPStatus | int | None, enable_validation: bool, ) -> HTTPStatus: - if response_validation_error_http_code and not enable_validation: msg = "'response_validation_error_http_code' cannot be set when enable_validation is False." raise ValueError(msg) @@ -1613,7 +1612,6 @@ def _validate_response_validation_error_http_code( not isinstance(response_validation_error_http_code, HTTPStatus) and response_validation_error_http_code is not None ): - try: response_validation_error_http_code = HTTPStatus(response_validation_error_http_code) except ValueError: diff --git a/aws_lambda_powertools/event_handler/openapi/models.py b/aws_lambda_powertools/event_handler/openapi/models.py index afeb0a77750..53becd3f870 100644 --- a/aws_lambda_powertools/event_handler/openapi/models.py +++ b/aws_lambda_powertools/event_handler/openapi/models.py @@ -36,7 +36,6 @@ class OpenAPIExtensions(BaseModel): @model_validator(mode="before") def serialize_openapi_extension_v2(self): if isinstance(self, dict) and self.get("openapi_extensions"): - openapi_extension_value = self.get("openapi_extensions") for extension_key in openapi_extension_value: diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index e8340dd2b13..a85593c9db7 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -1205,15 +1205,15 @@ def flush_buffer(self) -> None: buffer = self._buffer_cache.get(tracer_id) if not buffer: return - + if not self._buffer_config: return - + # Check ALC level against buffer level lambda_log_level = self._get_aws_lambda_log_level() if lambda_log_level: # Check if buffer level is less verbose than ALC - if (logging.getLevelName(lambda_log_level) > logging.getLevelName(self._buffer_config.buffer_at_verbosity)): + if logging.getLevelName(lambda_log_level) > logging.getLevelName(self._buffer_config.buffer_at_verbosity): warnings.warn( "Advanced Logging Controls (ALC) Log Level is less verbose than Log Buffering Log Level. " "Some logs might be missing", diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index c70419873a1..d21a329e7c9 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -296,8 +296,7 @@ def _clean(self): if self._entire_batch_failed() and self.raise_on_entire_batch_failure: raise BatchProcessingError( - msg=f"All records failed processing. {len(self.exceptions)} individual errors logged " - f"separately below.", + msg=f"All records failed processing. {len(self.exceptions)} individual errors logged separately below.", child_exceptions=self.exceptions, ) diff --git a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py index 3497227ed70..d314fde4cb3 100644 --- a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py +++ b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py @@ -314,7 +314,6 @@ def put_artifact(self, artifact_name: str, body: Any, content_type: str) -> None # So we are using if/else instead. if self.data.encryption_key: - encryption_key_id = self.data.encryption_key.get_id encryption_key_type = self.data.encryption_key.get_type if encryption_key_type == "KMS": diff --git a/aws_lambda_powertools/utilities/data_classes/transfer_family_event.py b/aws_lambda_powertools/utilities/data_classes/transfer_family_event.py index 5326a344ed0..5949c58a318 100644 --- a/aws_lambda_powertools/utilities/data_classes/transfer_family_event.py +++ b/aws_lambda_powertools/utilities/data_classes/transfer_family_event.py @@ -39,7 +39,6 @@ def source_ip(self) -> str: class TransferFamilyAuthorizerResponse: - def _build_authentication_response( self, role_arn: str, @@ -51,7 +50,6 @@ def _build_authentication_response( user_uid: int | None = None, public_keys: str | None = None, ) -> dict[str, Any]: - response: dict[str, Any] = {} if home_directory_type == "PATH": diff --git a/aws_lambda_powertools/utilities/data_masking/provider/base.py b/aws_lambda_powertools/utilities/data_masking/provider/base.py index e8724c5a4de..7905fa57db8 100644 --- a/aws_lambda_powertools/utilities/data_masking/provider/base.py +++ b/aws_lambda_powertools/utilities/data_masking/provider/base.py @@ -82,7 +82,6 @@ def erase( masking_rules: dict | None = None, **kwargs, ) -> Any: - result: Any = DATA_MASKING_STRING if not any([dynamic_mask, custom_mask, regex_pattern, mask_format, masking_rules]): diff --git a/aws_lambda_powertools/utilities/idempotency/serialization/dataclass.py b/aws_lambda_powertools/utilities/idempotency/serialization/dataclass.py index fc8b72252c0..6477eb17984 100644 --- a/aws_lambda_powertools/utilities/idempotency/serialization/dataclass.py +++ b/aws_lambda_powertools/utilities/idempotency/serialization/dataclass.py @@ -38,7 +38,6 @@ def from_dict(self, data: dict) -> DataClass: @classmethod def instantiate(cls, model_type: Any) -> BaseIdempotencySerializer: - model_type = get_actual_type(model_type=model_type) if model_type is None: diff --git a/aws_lambda_powertools/utilities/idempotency/serialization/pydantic.py b/aws_lambda_powertools/utilities/idempotency/serialization/pydantic.py index 8ba45a40583..924d005ddbd 100644 --- a/aws_lambda_powertools/utilities/idempotency/serialization/pydantic.py +++ b/aws_lambda_powertools/utilities/idempotency/serialization/pydantic.py @@ -35,7 +35,6 @@ def from_dict(self, data: dict) -> BaseModel: @classmethod def instantiate(cls, model_type: Any) -> BaseIdempotencySerializer: - model_type = get_actual_type(model_type=model_type) if model_type is None: diff --git a/examples/batch_processing/src/getting_started_error_handling.py b/examples/batch_processing/src/getting_started_error_handling.py index 7307f0d0d09..0b4b0637db7 100644 --- a/examples/batch_processing/src/getting_started_error_handling.py +++ b/examples/batch_processing/src/getting_started_error_handling.py @@ -12,8 +12,7 @@ logger = Logger() -class InvalidPayload(Exception): - ... +class InvalidPayload(Exception): ... @tracer.capture_method diff --git a/examples/data_masking/src/aws_encryption_provider_example.py b/examples/data_masking/src/aws_encryption_provider_example.py index 2ef34a82934..51ca5fba310 100644 --- a/examples/data_masking/src/aws_encryption_provider_example.py +++ b/examples/data_masking/src/aws_encryption_provider_example.py @@ -16,7 +16,8 @@ local_cache_capacity=200, max_cache_age_seconds=400, max_messages_encrypted=200, - max_bytes_encrypted=2000) + max_bytes_encrypted=2000, +) data_masker = DataMasking(provider=encryption_provider) diff --git a/examples/event_handler_bedrock_agents/cdk/bedrock_agent_stack.py b/examples/event_handler_bedrock_agents/cdk/bedrock_agent_stack.py index e6d427bc8c3..17f07c47296 100644 --- a/examples/event_handler_bedrock_agents/cdk/bedrock_agent_stack.py +++ b/examples/event_handler_bedrock_agents/cdk/bedrock_agent_stack.py @@ -8,7 +8,6 @@ class AgentsCdkStack(Stack): - def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) diff --git a/examples/event_handler_graphql/src/enable_exceptions_batch_resolver.py b/examples/event_handler_graphql/src/enable_exceptions_batch_resolver.py index f77374527ea..1d94e4693c8 100644 --- a/examples/event_handler_graphql/src/enable_exceptions_batch_resolver.py +++ b/examples/event_handler_graphql/src/enable_exceptions_batch_resolver.py @@ -14,8 +14,7 @@ } -class PostRelatedNotFound(Exception): - ... +class PostRelatedNotFound(Exception): ... @app.batch_resolver(type_name="Query", field_name="relatedPosts", raise_on_error=True) # (1)! diff --git a/examples/event_handler_rest/src/raising_http_errors.py b/examples/event_handler_rest/src/raising_http_errors.py index b62597eaf33..c792c7908ec 100644 --- a/examples/event_handler_rest/src/raising_http_errors.py +++ b/examples/event_handler_rest/src/raising_http_errors.py @@ -66,6 +66,7 @@ def service_error(): def service_unavailable_error(): raise ServiceUnavailableError("Service is temporarily unavailable") # HTTP 503 + @app.get("/todos") @tracer.capture_method def get_todos(): diff --git a/examples/event_sources/src/cloudWatchDashboard.py b/examples/event_sources/src/cloudWatchDashboard.py index ff8b896a806..583f97df68a 100644 --- a/examples/event_sources/src/cloudWatchDashboard.py +++ b/examples/event_sources/src/cloudWatchDashboard.py @@ -26,6 +26,6 @@ def lambda_handler(event: CloudWatchDashboardCustomWidgetEvent, context): "markdown": f""" Dashboard: {event.widget_context.dashboard_name} Time Range: {time_range.start} to {time_range.end} - Theme: {event.widget_context.theme or 'default'} + Theme: {event.widget_context.theme or "default"} """, } diff --git a/examples/event_sources/src/s3_batch_operation.py b/examples/event_sources/src/s3_batch_operation.py index e292d8cae47..81eb5181c41 100644 --- a/examples/event_sources/src/s3_batch_operation.py +++ b/examples/event_sources/src/s3_batch_operation.py @@ -33,5 +33,4 @@ def lambda_handler(event: S3BatchOperationEvent, context: LambdaContext): return response.asdict() -def do_some_work(s3_client, src_bucket: str, src_key: str): - ... +def do_some_work(s3_client, src_bucket: str, src_key: str): ... diff --git a/examples/homepage/install/arm64/cdk_arm64.py b/examples/homepage/install/arm64/cdk_arm64.py index ba0c153fe02..a49bdfcec6f 100644 --- a/examples/homepage/install/arm64/cdk_arm64.py +++ b/examples/homepage/install/arm64/cdk_arm64.py @@ -3,7 +3,6 @@ class SampleApp(Stack): - def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) diff --git a/examples/homepage/install/sar/cdk_sar.py b/examples/homepage/install/sar/cdk_sar.py index 1d07f4a7098..19502444590 100644 --- a/examples/homepage/install/sar/cdk_sar.py +++ b/examples/homepage/install/sar/cdk_sar.py @@ -10,7 +10,6 @@ class SampleApp(Stack): - def __init__(self, scope: Construct, id_: str) -> None: super().__init__(scope, id_) diff --git a/examples/homepage/install/x86_64/cdk_x86.py b/examples/homepage/install/x86_64/cdk_x86.py index 0550766b3fa..159859bf409 100644 --- a/examples/homepage/install/x86_64/cdk_x86.py +++ b/examples/homepage/install/x86_64/cdk_x86.py @@ -3,7 +3,6 @@ class SampleApp(Stack): - def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) diff --git a/examples/idempotency/src/working_with_custom_idempotency_key_prefix.py b/examples/idempotency/src/working_with_custom_idempotency_key_prefix.py index eacc2d3254b..b41c3c1c212 100644 --- a/examples/idempotency/src/working_with_custom_idempotency_key_prefix.py +++ b/examples/idempotency/src/working_with_custom_idempotency_key_prefix.py @@ -22,7 +22,7 @@ class Payment: class PaymentError(Exception): ... -@idempotent(persistence_store=persistence_layer, key_prefix="my_custom_prefix") # (1)! +@idempotent(persistence_store=persistence_layer, key_prefix="my_custom_prefix") # (1)! def lambda_handler(event: dict, context: LambdaContext): try: payment: Payment = create_subscription_payment(event) diff --git a/examples/idempotency/src/working_with_custom_idempotency_key_prefix_standalone.py b/examples/idempotency/src/working_with_custom_idempotency_key_prefix_standalone.py index 2fb8bd92275..4092e23b5ae 100644 --- a/examples/idempotency/src/working_with_custom_idempotency_key_prefix_standalone.py +++ b/examples/idempotency/src/working_with_custom_idempotency_key_prefix_standalone.py @@ -29,15 +29,15 @@ class Order: data_keyword_argument="order", config=config, persistence_store=dynamodb, - key_prefix="my_custom_prefix", # (1)! + key_prefix="my_custom_prefix", # (1)! ) -def process_order(order: Order): +def process_order(order: Order): return f"processed order {order.order_id}" def lambda_handler(event: dict, context: LambdaContext): # see Lambda timeouts section - config.register_lambda_context(context) + config.register_lambda_context(context) order_item = OrderItem(sku="fake", description="sample") order = Order(item=order_item, order_id=1) diff --git a/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py b/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py index 776d5485741..6aa83a00018 100644 --- a/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py +++ b/examples/jmespath_functions/src/powertools_json_idempotency_jmespath.py @@ -16,8 +16,7 @@ config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") -class PaymentError(Exception): - ... +class PaymentError(Exception): ... @idempotent(config=config, persistence_store=persistence_layer) diff --git a/examples/logger/src/append_keys_vs_extra.py b/examples/logger/src/append_keys_vs_extra.py index 432dd1c23aa..5953df2de14 100644 --- a/examples/logger/src/append_keys_vs_extra.py +++ b/examples/logger/src/append_keys_vs_extra.py @@ -8,8 +8,7 @@ logger = Logger(service="payment") -class PaymentError(Exception): - ... +class PaymentError(Exception): ... def lambda_handler(event, context): diff --git a/examples/metrics/src/capture_cold_start_metric.py b/examples/metrics/src/capture_cold_start_metric.py index 93468eba345..0d2da53b0bf 100644 --- a/examples/metrics/src/capture_cold_start_metric.py +++ b/examples/metrics/src/capture_cold_start_metric.py @@ -5,5 +5,4 @@ @metrics.log_metrics(capture_cold_start_metric=True) -def lambda_handler(event: dict, context: LambdaContext): - ... +def lambda_handler(event: dict, context: LambdaContext): ... diff --git a/examples/metrics/src/flush_metrics.py b/examples/metrics/src/flush_metrics.py index a66ce07cbf7..72ad71f7c6e 100644 --- a/examples/metrics/src/flush_metrics.py +++ b/examples/metrics/src/flush_metrics.py @@ -5,7 +5,7 @@ metrics = Metrics() -def book_flight(flight_id: str, **kwargs): +def book_flight(flight_id: str, **kwargs): # logic to book flight ... metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) diff --git a/examples/metrics/src/single_metric_with_different_timestamp.py b/examples/metrics/src/single_metric_with_different_timestamp.py index bd99041c007..10a274bbc41 100644 --- a/examples/metrics/src/single_metric_with_different_timestamp.py +++ b/examples/metrics/src/single_metric_with_different_timestamp.py @@ -6,9 +6,7 @@ def lambda_handler(event: dict, context: LambdaContext): - for record in event: - record_id: str = record.get("record_id") amount: int = record.get("amount") timestamp: int = record.get("timestamp") diff --git a/examples/parameters/src/builtin_provider_dynamodb_custom_endpoint.py b/examples/parameters/src/builtin_provider_dynamodb_custom_endpoint.py index e77506f27d7..3bc054f00fe 100644 --- a/examples/parameters/src/builtin_provider_dynamodb_custom_endpoint.py +++ b/examples/parameters/src/builtin_provider_dynamodb_custom_endpoint.py @@ -9,7 +9,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Usually an endpoint is not sensitive data, so we store it in DynamoDB Table endpoint_comments: Any = dynamodb_provider.get("comments_endpoint") diff --git a/examples/parameters/src/builtin_provider_dynamodb_recursive_parameter.py b/examples/parameters/src/builtin_provider_dynamodb_recursive_parameter.py index 7db0d4d913a..48f1cd9bcc1 100644 --- a/examples/parameters/src/builtin_provider_dynamodb_recursive_parameter.py +++ b/examples/parameters/src/builtin_provider_dynamodb_recursive_parameter.py @@ -9,7 +9,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Retrieve multiple parameters using HASH KEY all_parameters: Any = dynamodb_provider.get_multiple("config") @@ -17,7 +16,6 @@ def lambda_handler(event: dict, context: LambdaContext): limit = 2 for parameter, value in all_parameters.items(): - if parameter == "endpoint_comments": endpoint_comments = value diff --git a/examples/parameters/src/builtin_provider_dynamodb_single_parameter.py b/examples/parameters/src/builtin_provider_dynamodb_single_parameter.py index 036058f2b33..490b32715c6 100644 --- a/examples/parameters/src/builtin_provider_dynamodb_single_parameter.py +++ b/examples/parameters/src/builtin_provider_dynamodb_single_parameter.py @@ -9,7 +9,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Usually an endpoint is not sensitive data, so we store it in DynamoDB Table endpoint_comments: Any = dynamodb_provider.get("comments_endpoint") diff --git a/examples/parameters/src/builtin_provider_secret.py b/examples/parameters/src/builtin_provider_secret.py index 449664c1863..5be600dd71c 100644 --- a/examples/parameters/src/builtin_provider_secret.py +++ b/examples/parameters/src/builtin_provider_secret.py @@ -11,7 +11,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Usually an endpoint is not sensitive data, so we store it in SSM Parameters endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments") diff --git a/examples/parameters/src/builtin_provider_ssm_with_no_recursive.py b/examples/parameters/src/builtin_provider_ssm_with_no_recursive.py index 0f92d27bfbc..a3f9ff4b2a1 100644 --- a/examples/parameters/src/builtin_provider_ssm_with_no_recursive.py +++ b/examples/parameters/src/builtin_provider_ssm_with_no_recursive.py @@ -8,8 +8,7 @@ ssm_provider = parameters.SSMProvider() -class ConfigNotFound(Exception): - ... +class ConfigNotFound(Exception): ... def lambda_handler(event: dict, context: LambdaContext): @@ -22,7 +21,6 @@ def lambda_handler(event: dict, context: LambdaContext): endpoint_comments = "https://jsonplaceholder.typicode.com/comments/" for parameter, value in all_parameters.items(): - # query parameter is used to query endpoint if "query" in parameter: endpoint_comments = f"{endpoint_comments}{value}" diff --git a/examples/parameters/src/getting_started_secret.py b/examples/parameters/src/getting_started_secret.py index 1f10394e834..4f03fc14293 100644 --- a/examples/parameters/src/getting_started_secret.py +++ b/examples/parameters/src/getting_started_secret.py @@ -7,7 +7,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Usually an endpoint is not sensitive data, so we store it in SSM Parameters endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments") diff --git a/examples/parameters/src/recursive_ssm_parameter_force_fetch.py b/examples/parameters/src/recursive_ssm_parameter_force_fetch.py index 6082a0173d4..29bdfed7328 100644 --- a/examples/parameters/src/recursive_ssm_parameter_force_fetch.py +++ b/examples/parameters/src/recursive_ssm_parameter_force_fetch.py @@ -13,7 +13,6 @@ def lambda_handler(event: dict, context: LambdaContext): endpoint_comments = "https://jsonplaceholder.typicode.com/noexists/" for parameter, value in all_parameters.items(): - if parameter == "endpoint_comments": endpoint_comments = value diff --git a/examples/parameters/src/recursive_ssm_parameter_with_cache.py b/examples/parameters/src/recursive_ssm_parameter_with_cache.py index 9cf48b39dde..7d1afe572bc 100644 --- a/examples/parameters/src/recursive_ssm_parameter_with_cache.py +++ b/examples/parameters/src/recursive_ssm_parameter_with_cache.py @@ -13,7 +13,6 @@ def lambda_handler(event: dict, context: LambdaContext): endpoint_comments = "https://jsonplaceholder.typicode.com/noexists/" for parameter, value in all_parameters.items(): - if parameter == "endpoint_comments": endpoint_comments = value diff --git a/examples/parameters/src/secret_force_fetch.py b/examples/parameters/src/secret_force_fetch.py index 121d9f57bfb..3578cbc3a58 100644 --- a/examples/parameters/src/secret_force_fetch.py +++ b/examples/parameters/src/secret_force_fetch.py @@ -7,7 +7,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Usually an endpoint is not sensitive data, so we store it in SSM Parameters endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments") diff --git a/examples/parameters/src/secret_with_cache.py b/examples/parameters/src/secret_with_cache.py index 8d3ed927107..ed9b16084ca 100644 --- a/examples/parameters/src/secret_with_cache.py +++ b/examples/parameters/src/secret_with_cache.py @@ -7,7 +7,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Usually an endpoint is not sensitive data, so we store it in SSM Parameters endpoint_comments: Any = parameters.get_parameter("/lambda-powertools/endpoint_comments") diff --git a/examples/parameters/src/working_with_own_provider_s3.py b/examples/parameters/src/working_with_own_provider_s3.py index d4f011a9e23..eb3326724ba 100644 --- a/examples/parameters/src/working_with_own_provider_s3.py +++ b/examples/parameters/src/working_with_own_provider_s3.py @@ -12,7 +12,6 @@ def lambda_handler(event: dict, context: LambdaContext): - try: # Retrieve a single parameter using key endpoint_comments: Any = s3_provider.get("comments_endpoint") diff --git a/examples/tracer/src/disable_capture_error.py b/examples/tracer/src/disable_capture_error.py index 59fc2d2376a..85497ee906b 100644 --- a/examples/tracer/src/disable_capture_error.py +++ b/examples/tracer/src/disable_capture_error.py @@ -9,8 +9,7 @@ ENDPOINT = os.getenv("PAYMENT_API", "") -class PaymentError(Exception): - ... +class PaymentError(Exception): ... @tracer.capture_method(capture_error=False) diff --git a/examples/tracer/src/ignore_endpoints.py b/examples/tracer/src/ignore_endpoints.py index 0fe256aeee9..3b73af17481 100644 --- a/examples/tracer/src/ignore_endpoints.py +++ b/examples/tracer/src/ignore_endpoints.py @@ -13,8 +13,7 @@ tracer.ignore_endpoint(hostname=f"*.{ENDPOINT}", urls=IGNORE_URLS) # `.ENDPOINT` -class PaymentError(Exception): - ... +class PaymentError(Exception): ... @tracer.capture_method(capture_error=False) diff --git a/examples/validation/src/getting_started_validator_decorator_function.py b/examples/validation/src/getting_started_validator_decorator_function.py index 1e9b1bd2a09..3ad416c9211 100644 --- a/examples/validation/src/getting_started_validator_decorator_function.py +++ b/examples/validation/src/getting_started_validator_decorator_function.py @@ -12,8 +12,7 @@ ALLOWED_IPS = parameters.get_parameter("/lambda-powertools/allowed_ips") -class UserPermissionsError(Exception): - ... +class UserPermissionsError(Exception): ... @dataclass diff --git a/poetry.lock b/poetry.lock index 6713e048044..223a3ab9af5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -11,7 +11,7 @@ files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [[package]] name = "anyio" @@ -331,7 +331,7 @@ description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"all\" or extra == \"tracer\"" +markers = "extra == \"tracer\" or extra == \"all\"" files = [ {file = "aws_xray_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:cfbe6feea3d26613a2a869d14c9246a844285c97087ad8f296f901633554ad94"}, {file = "aws_xray_sdk-2.14.0.tar.gz", hash = "sha256:aab843c331af9ab9ba5cefb3a303832a19db186140894a523edafc024cc0493c"}, @@ -400,53 +400,6 @@ test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", toml = ["tomli (>=1.1.0) ; python_version < \"3.11\""] yaml = ["PyYAML"] -[[package]] -name = "black" -version = "25.1.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, - {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, - {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, - {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, - {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, - {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, - {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, - {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, - {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, - {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, - {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, - {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, - {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, - {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, - {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, - {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, - {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, - {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, - {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, - {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, - {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, - {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "boto3" version = "1.37.14" @@ -921,8 +874,8 @@ files = [ jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -1426,7 +1379,7 @@ files = [ {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] -markers = {main = "python_version < \"3.10\" and (extra == \"all\" or extra == \"datamasking\")", dev = "python_version < \"3.10\""} +markers = {main = "python_version == \"3.9\" and (extra == \"all\" or extra == \"datamasking\")", dev = "python_version == \"3.9\""} [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} @@ -1607,10 +1560,10 @@ files = [ [package.dependencies] bytecode = [ + {version = ">=0.13.0", markers = "python_version < \"3.11\""}, {version = ">=0.16.0", markers = "python_version >= \"3.13.0\""}, {version = ">=0.15.1", markers = "python_version ~= \"3.12.0\""}, {version = ">=0.14.0", markers = "python_version ~= \"3.11.0\""}, - {version = ">=0.13.0", markers = "python_version < \"3.11.0\""}, ] envier = ">=0.6.1,<0.7.0" legacy-cgi = {version = ">=2.0.0", markers = "python_version >= \"3.13.0\""} @@ -1742,7 +1695,7 @@ description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -1773,7 +1726,7 @@ description = "Fastest Python implementation of JSON schema" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"all\" or extra == \"validation\"" +markers = "extra == \"validation\" or extra == \"all\"" files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -2229,8 +2182,6 @@ groups = ["main"] markers = "extra == \"all\" or extra == \"datamasking\"" files = [ {file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"}, - {file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"}, - {file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"}, ] [package.dependencies] @@ -2980,7 +2931,7 @@ description = "Python package for creating and manipulating graphs and networks" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version < \"3.10\"" +markers = "python_version == \"3.9\"" files = [ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, @@ -3221,7 +3172,7 @@ files = [ {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [package.dependencies] annotated-types = ">=0.6.0" @@ -3341,7 +3292,7 @@ files = [ {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" @@ -4536,7 +4487,7 @@ files = [ {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, ] -markers = {main = "extra == \"all\" or extra == \"parser\""} +markers = {main = "extra == \"parser\" or extra == \"all\""} [package.dependencies] typing-extensions = ">=4.12.0" @@ -4935,4 +4886,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<4.0.0" -content-hash = "d35f3e50c2f622dba2b22600ad4d77e60572a343ed425fae88b6a2af92ebb870" +content-hash = "405eccf2a4cab2b9b2bf21ab58938d58aa1f2fbbb12a7984462d28d83d40d82b" diff --git a/pyproject.toml b/pyproject.toml index 4baf97b6d6d..fdc343cf8b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,6 @@ datamasking = ["aws-encryption-sdk", "jsonpath-ng"] [tool.poetry.group.dev.dependencies] coverage = { extras = ["toml"], version = "^7.6" } pytest = "^8.3.4" -black = ">=24.8,<26.0" boto3 = "^1.26.164" isort = ">=5.13.2,<7.0.0" pytest-cov = ">=5,<7" diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index bc804f5f499..dc64499d14f 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -63,7 +63,7 @@ def __init__(self) -> None: self._feature_infra_file = self.feature_path / "infrastructure.py" self._handlers_dir = self.feature_path / "handlers" self._cdk_out_dir: Path = CDK_OUT_PATH / self.feature_name - self._stack_outputs_file = f'{self._cdk_out_dir / "stack_outputs.json"}' + self._stack_outputs_file = f"{self._cdk_out_dir / 'stack_outputs.json'}" if not self._feature_infra_file.exists(): raise FileNotFoundError( diff --git a/tests/functional/event_handler/_pydantic/test_openapi_config.py b/tests/functional/event_handler/_pydantic/test_openapi_config.py index 84dfceea5d4..f66a82c85d8 100644 --- a/tests/functional/event_handler/_pydantic/test_openapi_config.py +++ b/tests/functional/event_handler/_pydantic/test_openapi_config.py @@ -28,7 +28,6 @@ def handler(): def test_prioritize_direct_parameters_over_stored_configuration(): - # GIVEN stored_config = { "title": "Stored API Title", diff --git a/tests/functional/event_handler/_pydantic/test_openapi_with_pep563.py b/tests/functional/event_handler/_pydantic/test_openapi_with_pep563.py index 0f5fa517f0e..35ce00b8482 100644 --- a/tests/functional/event_handler/_pydantic/test_openapi_with_pep563.py +++ b/tests/functional/event_handler/_pydantic/test_openapi_with_pep563.py @@ -59,7 +59,6 @@ def handler( def test_openapi_with_pep563_and_output_model(): - app = APIGatewayRestResolver() @app.get("/") @@ -89,7 +88,6 @@ def handler() -> Todo: def test_openapi_with_pep563_and_annotated_body(): - app = APIGatewayRestResolver() @app.post("/todo") diff --git a/tests/functional/event_handler/required_dependencies/appsync/test_appsync_batch_resolvers.py b/tests/functional/event_handler/required_dependencies/appsync/test_appsync_batch_resolvers.py index 8896c8f19ef..2466ac6d6a3 100644 --- a/tests/functional/event_handler/required_dependencies/appsync/test_appsync_batch_resolvers.py +++ b/tests/functional/event_handler/required_dependencies/appsync/test_appsync_batch_resolvers.py @@ -988,7 +988,6 @@ async def get_user(event: list) -> list: def test_exception_handler_with_batch_resolver_and_raise_exception(): - # GIVEN a AppSyncResolver instance app = AppSyncResolver() @@ -1049,7 +1048,6 @@ def create_something(event: AppSyncResolverEvent) -> list | None: # noqa AA03 V def test_exception_handler_with_batch_resolver_and_no_raise_exception(): - # GIVEN a AppSyncResolver instance app = AppSyncResolver() diff --git a/tests/functional/feature_flags/_boto3/test_schema_validation.py b/tests/functional/feature_flags/_boto3/test_schema_validation.py index 23c06bf3ff2..b7bf8392ada 100644 --- a/tests/functional/feature_flags/_boto3/test_schema_validation.py +++ b/tests/functional/feature_flags/_boto3/test_schema_validation.py @@ -844,9 +844,7 @@ def test_validate_time_condition_between_days_range_invalid_condition_value(cond CONDITION_KEY: TimeKeys.CURRENT_DAY_OF_WEEK.value, } rule_name = "dummy" - match_str = ( - f"condition value DAYS must represent a day of the week in 'TimeValues' enum, rule={rule_name}" # noqa: E501 - ) + match_str = f"condition value DAYS must represent a day of the week in 'TimeValues' enum, rule={rule_name}" # noqa: E501 # WHEN calling validate_condition # THEN raise SchemaValidationError with pytest.raises( diff --git a/tests/functional/logger/required_dependencies/test_logger.py b/tests/functional/logger/required_dependencies/test_logger.py index ec6bd4d016b..e799dce9b60 100644 --- a/tests/functional/logger/required_dependencies/test_logger.py +++ b/tests/functional/logger/required_dependencies/test_logger.py @@ -182,12 +182,12 @@ def handler(event, context): # THEN verify the number of logs falls within the expected range logs = list(stdout.getvalue().strip().split("\n")) - assert ( - len(logs) >= minimum_logs_excepted - ), f"Log count {len(logs)} should be at least {minimum_logs_excepted} for sampling rate {sampling_rate}" - assert ( - len(logs) <= maximum_logs_excepted - ), f"Log count {len(logs)} should be at most {maximum_logs_excepted} for sampling rate {sampling_rate}" + assert len(logs) >= minimum_logs_excepted, ( + f"Log count {len(logs)} should be at least {minimum_logs_excepted} for sampling rate {sampling_rate}" + ) + assert len(logs) <= maximum_logs_excepted, ( + f"Log count {len(logs)} should be at most {maximum_logs_excepted} for sampling rate {sampling_rate}" + ) @pytest.mark.parametrize( @@ -223,12 +223,12 @@ def handler(event, context): # THEN verify the number of logs falls within the expected range logs = list(stdout.getvalue().strip().split("\n")) - assert ( - len(logs) >= minimum_logs_excepted - ), f"Log count {len(logs)} should be at least {minimum_logs_excepted} for sampling rate {sampling_rate}" - assert ( - len(logs) <= maximum_logs_excepted - ), f"Log count {len(logs)} should be at most {maximum_logs_excepted} for sampling rate {sampling_rate}" + assert len(logs) >= minimum_logs_excepted, ( + f"Log count {len(logs)} should be at least {minimum_logs_excepted} for sampling rate {sampling_rate}" + ) + assert len(logs) <= maximum_logs_excepted, ( + f"Log count {len(logs)} should be at most {maximum_logs_excepted} for sampling rate {sampling_rate}" + ) def test_inject_lambda_context(lambda_context, stdout, service_name): @@ -342,7 +342,6 @@ def handler(event, context): def test_inject_lambda_cold_start_with_provisioned_concurrency(monkeypatch, lambda_context, stdout, service_name): - # GIVEN Provisioned Concurrency is enabled via AWS_LAMBDA_INITIALIZATION_TYPE environment variable # AND Logger's cold start flag is explicitly set to True (simulating fresh module import) monkeypatch.setenv("AWS_LAMBDA_INITIALIZATION_TYPE", "provisioned-concurrency") diff --git a/tests/functional/logger/required_dependencies/test_powertools_logger_buffer.py b/tests/functional/logger/required_dependencies/test_powertools_logger_buffer.py index 7b02a35551d..7ee3d4c97ff 100644 --- a/tests/functional/logger/required_dependencies/test_powertools_logger_buffer.py +++ b/tests/functional/logger/required_dependencies/test_powertools_logger_buffer.py @@ -50,7 +50,6 @@ def capture_multiple_logging_statements_output(stdout): @pytest.mark.parametrize("log_level", ["DEBUG", "WARNING", "INFO"]) def test_logger_buffer_with_minimum_level_warning(log_level, stdout, service_name, monkeypatch): - monkeypatch.setenv(constants.XRAY_TRACE_ID_ENV, "1-67c39786-5908a82a246fb67f3089263f") # GIVEN A logger configured with a buffer and minimum log level set to WARNING @@ -545,5 +544,3 @@ def test_warning_when_alc_less_verbose_than_buffer(stdout, monkeypatch): # THEN another warning should be emitted about ALC and buffer level mismatch with pytest.warns(PowertoolsUserWarning, match="Advanced Logging Controls*"): logger.flush_buffer() - - diff --git a/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py b/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py index bff9ac254fc..9d9c8f82450 100644 --- a/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py +++ b/tests/unit/data_classes/_boto3/test_code_pipeline_job_event.py @@ -222,7 +222,6 @@ def get_object(Bucket: str, Key: str): def test_code_pipeline_put_artifact(mocker: MockerFixture): - raw_content = json.dumps({"steve": "french"}) artifact_content_type = "application/json" event = CodePipelineJobEvent(load_event("codePipelineEventData.json")) @@ -268,7 +267,6 @@ def put_object( def test_code_pipeline_put_unencrypted_artifact(mocker: MockerFixture): - raw_content = json.dumps({"steve": "french"}) artifact_content_type = "application/json" event_without_artifact_encryption = load_event("codePipelineEventData.json") diff --git a/tests/unit/data_classes/required_dependencies/test_dynamo_db_stream_event.py b/tests/unit/data_classes/required_dependencies/test_dynamo_db_stream_event.py index 1611b38a243..8c6b62867ae 100644 --- a/tests/unit/data_classes/required_dependencies/test_dynamo_db_stream_event.py +++ b/tests/unit/data_classes/required_dependencies/test_dynamo_db_stream_event.py @@ -19,7 +19,6 @@ def test_dynamodb_stream_trigger_event(): - raw_event = load_event("dynamoStreamEvent.json") parsed_event = DynamoDBStreamEvent(raw_event) @@ -78,7 +77,6 @@ def test_dynamodb_stream_record_deserialization_large_int_without_trailing_zeros def test_dynamodb_stream_record_deserialization_zero_value(): - data = { "Keys": {"key1": {"attr1": "value1"}}, "NewImage": { diff --git a/tests/unit/data_classes/required_dependencies/test_transfer_family_event.py b/tests/unit/data_classes/required_dependencies/test_transfer_family_event.py index 0ab7c9fe1c1..2e74d2d7457 100644 --- a/tests/unit/data_classes/required_dependencies/test_transfer_family_event.py +++ b/tests/unit/data_classes/required_dependencies/test_transfer_family_event.py @@ -22,7 +22,6 @@ def test_transfer_family_authorizer_event(): @pytest.mark.parametrize("home_directory_type", ["LOGICAL", "PATH"]) def test_build_authentication_response_s3(home_directory_type): - # GIVEN a Authorizer response response = TransferFamilyAuthorizerResponse() @@ -60,7 +59,6 @@ def test_build_authentication_response_s3(home_directory_type): @pytest.mark.parametrize("home_directory_type", ["LOGICAL", "PATH"]) def test_build_authentication_response_efs(home_directory_type): - # GIVEN a Authorizer response response = TransferFamilyAuthorizerResponse() @@ -93,7 +91,6 @@ def test_build_authentication_response_efs(home_directory_type): def test_build_authentication_missing_home_directory(): - # GIVEN a Authorizer response response = TransferFamilyAuthorizerResponse() diff --git a/tests/unit/event_handler/_pydantic/conftest.py b/tests/unit/event_handler/_pydantic/conftest.py index d50d4e483ef..b88fc3e157d 100644 --- a/tests/unit/event_handler/_pydantic/conftest.py +++ b/tests/unit/event_handler/_pydantic/conftest.py @@ -4,7 +4,6 @@ @pytest.fixture(scope="session") def pydanticv1_only(): - version = __version__.split(".") if version[0] != "1": pytest.skip("pydanticv1 test only") @@ -12,7 +11,6 @@ def pydanticv1_only(): @pytest.fixture(scope="session") def pydanticv2_only(): - version = __version__.split(".") if version[0] != "2": pytest.skip("pydanticv2 test only") diff --git a/tests/unit/event_handler/_pydantic/test_openapi_models_pydantic_v2.py b/tests/unit/event_handler/_pydantic/test_openapi_models_pydantic_v2.py index dd6aba913a1..c426309f389 100644 --- a/tests/unit/event_handler/_pydantic/test_openapi_models_pydantic_v2.py +++ b/tests/unit/event_handler/_pydantic/test_openapi_models_pydantic_v2.py @@ -20,7 +20,6 @@ def test_openapi_extensions_with_invalid_key(): def test_openapi_extensions_with_proxy_models(): - # GIVEN we create an models using OpenAPIExtensions as a "Proxy" Model class MyModelFoo(OpenAPIExtensions): foo: str diff --git a/tests/unit/logger/required_dependencies/test_logger_buffer_cache.py b/tests/unit/logger/required_dependencies/test_logger_buffer_cache.py index 814331844a2..25ed7ece631 100644 --- a/tests/unit/logger/required_dependencies/test_logger_buffer_cache.py +++ b/tests/unit/logger/required_dependencies/test_logger_buffer_cache.py @@ -6,7 +6,6 @@ def test_initialization(): - # GIVEN a new instance of LoggerBufferCache logger_cache = LoggerBufferCache(1000) diff --git a/tests/unit/parser/_pydantic/test_apigw_websockets.py b/tests/unit/parser/_pydantic/test_apigw_websockets.py index 7b8a3c9ba46..36f6355d42f 100644 --- a/tests/unit/parser/_pydantic/test_apigw_websockets.py +++ b/tests/unit/parser/_pydantic/test_apigw_websockets.py @@ -31,7 +31,7 @@ def test_apigw_websocket_message_event(): assert request_context.extended_request_id == raw_event["requestContext"]["extendedRequestId"] identity = request_context.identity - assert str(identity.source_ip) == f'{raw_event["requestContext"]["identity"]["sourceIp"]}/32' + assert str(identity.source_ip) == f"{raw_event['requestContext']['identity']['sourceIp']}/32" assert request_context.request_id == raw_event["requestContext"]["requestId"] assert request_context.request_time == raw_event["requestContext"]["requestTime"] @@ -67,7 +67,7 @@ def test_apigw_websocket_connect_event(): assert request_context.extended_request_id == raw_event["requestContext"]["extendedRequestId"] identity = request_context.identity - assert str(identity.source_ip) == f'{raw_event["requestContext"]["identity"]["sourceIp"]}/32' + assert str(identity.source_ip) == f"{raw_event['requestContext']['identity']['sourceIp']}/32" assert request_context.request_id == raw_event["requestContext"]["requestId"] assert request_context.request_time == raw_event["requestContext"]["requestTime"] @@ -96,7 +96,7 @@ def test_apigw_websocket_disconnect_event(): assert request_context.extended_request_id == raw_event["requestContext"]["extendedRequestId"] identity = request_context.identity - assert str(identity.source_ip) == f'{raw_event["requestContext"]["identity"]["sourceIp"]}/32' + assert str(identity.source_ip) == f"{raw_event['requestContext']['identity']['sourceIp']}/32" assert request_context.request_id == raw_event["requestContext"]["requestId"] assert request_context.request_time == raw_event["requestContext"]["requestTime"] diff --git a/tests/unit/parser/_pydantic/test_iot_registry_events.py b/tests/unit/parser/_pydantic/test_iot_registry_events.py index a36e44e2a52..8418b4bddcd 100644 --- a/tests/unit/parser/_pydantic/test_iot_registry_events.py +++ b/tests/unit/parser/_pydantic/test_iot_registry_events.py @@ -60,7 +60,6 @@ def test_iot_core_thing_type_association_event(): def test_iot_core_thing_group_event(): - raw_event = load_event("iotRegistryEventsThingGroupEvent.json") parsed_event: IoTCoreThingGroupEvent = IoTCoreThingGroupEvent(**raw_event) @@ -81,7 +80,6 @@ def test_iot_core_thing_group_event(): def test_iot_core_add_or_remove_from_thing_group_event(): - raw_event = load_event("iotRegistryEventsAddOrRemoveFromThingGroupEvent.json") parsed_event: IoTCoreAddOrRemoveFromThingGroupEvent = IoTCoreAddOrRemoveFromThingGroupEvent(**raw_event) @@ -99,7 +97,6 @@ def test_iot_core_add_or_remove_from_thing_group_event(): def test_iot_core_add_or_delete_from_thing_group_event(): - raw_event = load_event("iotRegistryEventsAddOrDeleteFromThingGroupEvent.json") parsed_event: IoTCoreAddOrDeleteFromThingGroupEvent = IoTCoreAddOrDeleteFromThingGroupEvent(**raw_event)