Skip to content

chore(deps-dev): bump flake8-bugbear from 23.1.20 to 23.2.13 #1924

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Feb 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions aws_lambda_powertools/event_handler/api_gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -501,7 +501,9 @@ def register_resolver(func: Callable):
self._routes.append(Route(item, self._compile_regex(rule), func, cors_enabled, compress, cache_control))
route_key = item + rule
if route_key in self._route_keys:
warnings.warn(f"A route like this was already registered. method: '{item}' rule: '{rule}'")
warnings.warn(
f"A route like this was already registered. method: '{item}' rule: '{rule}'", stacklevel=2
)
self._route_keys.append(route_key)
if cors_enabled:
logger.debug(f"Registering method {item.upper()} to Allow Methods in CORS")
Expand All @@ -526,7 +528,9 @@ def resolve(self, event, context) -> Dict[str, Any]:
"""
if isinstance(event, BaseProxyEvent):
warnings.warn(
"You don't need to serialize event to Event Source Data Class when using Event Handler; see issue #1152"
"You don't need to serialize event to Event Source Data Class when using Event Handler; "
"see issue #1152",
stacklevel=2,
)
event = event.raw_event

Expand Down
2 changes: 1 addition & 1 deletion aws_lambda_powertools/metrics/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ def decorate(event, context):
self._add_cold_start_metric(context=context)
finally:
if not raise_on_empty_metrics and not self.metric_set:
warnings.warn("No metrics to publish, skipping")
warnings.warn("No metrics to publish, skipping", stacklevel=2)
else:
metrics = self.serialize_metric_set()
self.clear_metrics()
Expand Down
6 changes: 4 additions & 2 deletions aws_lambda_powertools/shared/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,9 @@ def bytes_to_string(value: bytes) -> str:
def powertools_dev_is_set() -> bool:
is_on = strtobool(os.getenv(constants.POWERTOOLS_DEV_ENV, "0"))
if is_on:
warnings.warn("POWERTOOLS_DEV environment variable is enabled. Increasing verbosity across utilities.")
warnings.warn(
"POWERTOOLS_DEV environment variable is enabled. Increasing verbosity across utilities.", stacklevel=2
)
return True

return False
Expand All @@ -115,7 +117,7 @@ def powertools_dev_is_set() -> bool:
def powertools_debug_is_set() -> bool:
is_on = strtobool(os.getenv(constants.POWERTOOLS_DEBUG_ENV, "0"))
if is_on:
warnings.warn("POWERTOOLS_DEBUG environment variable is enabled. Setting logging level to DEBUG.")
warnings.warn("POWERTOOLS_DEBUG environment variable is enabled. Setting logging level to DEBUG.", stacklevel=2)
return True

return False
Expand Down
6 changes: 4 additions & 2 deletions aws_lambda_powertools/shared/headers_serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,8 @@ def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Coo
if len(cookies) > 1:
warnings.warn(
"Can't encode more than one cookie in the response. Sending the last cookie only. "
"Did you enable multiValueHeaders on the ALB Target Group?"
"Did you enable multiValueHeaders on the ALB Target Group?",
stacklevel=2,
)

# We can only send one cookie, send the last one
Expand All @@ -114,7 +115,8 @@ def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Coo
if len(values) > 1:
warnings.warn(
f"Can't encode more than one header value for the same key ('{key}') in the response. "
"Did you enable multiValueHeaders on the ALB Target Group?"
"Did you enable multiValueHeaders on the ALB Target Group?",
stacklevel=2,
)

# We can only set one header per key, send the last one
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def _get_hashed_idempotency_key(self, data: Dict[str, Any]) -> str:
if self.is_missing_idempotency_key(data=data):
if self.raise_on_no_idempotency_key:
raise IdempotencyKeyError("No data found to create a hashed idempotency_key")
warnings.warn(f"No value found for idempotency_key. jmespath: {self.event_key_jmespath}")
warnings.warn(f"No value found for idempotency_key. jmespath: {self.event_key_jmespath}", stacklevel=2)

generated_hash = self._generate_hash(data=data)
return f"{self.function_name}#{generated_hash}"
Expand Down Expand Up @@ -359,7 +359,8 @@ def save_inprogress(self, data: Dict[str, Any], remaining_time_in_millis: Option
else:
warnings.warn(
"Couldn't determine the remaining time left. "
"Did you call register_lambda_context on IdempotencyConfig?"
"Did you call register_lambda_context on IdempotencyConfig?",
stacklevel=2,
)

logger.debug(f"Saving in progress record for idempotency key: {data_record.idempotency_key}")
Expand Down
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ bandit = "^1.7.1"
radon = "^5.1.0"
xenon = "^0.9.0"
flake8-eradicate = "^1.2.1"
flake8-bugbear = "^23.1.20"
flake8-bugbear = "^23.2.13"
mkdocs-git-revision-date-plugin = "^0.3.2"
mike = "^1.1.2"
retry = "^0.9.2"
Expand Down