Skip to content

chore(ci): enable Ruff rule ERA001 and fix errors #2591

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions aws_lambda_powertools/logging/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ def findCaller(stack_info=False, stacklevel=2): # pragma: no cover
## If we want to use the last (deepest) frame:
break
## If we want to mimic the warnings module:
# return ("sys", 1, "(unknown function)", None) # noqa: E800
## If we want to be pedantic: # noqa: E800
# raise ValueError("call stack is not deep enough") # noqa: E800
# return ("sys", 1, "(unknown function)", None) # noqa: ERA001
## If we want to be pedantic: # noqa: ERA001
# raise ValueError("call stack is not deep enough") # noqa: ERA001
f = next_f # noqa: VNE001
if not _is_internal_frame(f):
stacklevel -= 1
Expand Down
4 changes: 2 additions & 2 deletions aws_lambda_powertools/metrics/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,10 +225,10 @@ def serialize_metric_set(
logger.debug({"details": "Serializing metrics", "metrics": metrics, "dimensions": dimensions})

# For standard resolution metrics, don't add StorageResolution field to avoid unnecessary ingestion of data into cloudwatch # noqa E501
# Example: [ { "Name": "metric_name", "Unit": "Count"} ] # noqa E800
# Example: [ { "Name": "metric_name", "Unit": "Count"} ] # noqa ERA001
#
# In case using high-resolution metrics, add StorageResolution field
# Example: [ { "Name": "metric_name", "Unit": "Count", "StorageResolution": 1 } ] # noqa E800
# Example: [ { "Name": "metric_name", "Unit": "Count", "StorageResolution": 1 } ] # noqa ERA001
metric_definition: List[MetricNameUnitResolution] = []
metric_names_and_values: Dict[str, float] = {} # { "metric_name": 1.0 }

Expand Down
2 changes: 1 addition & 1 deletion aws_lambda_powertools/tracing/tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,7 @@ async def async_tasks():
functools.partial(self.capture_method, capture_response=capture_response, capture_error=capture_error),
)

# Example: app.ClassA.get_all # noqa E800
# Example: app.ClassA.get_all # noqa ERA001
method_name = f"{method.__module__}.{method.__qualname__}"

capture_response = resolve_truthy_env_var_choice(
Expand Down
12 changes: 6 additions & 6 deletions aws_lambda_powertools/utilities/batch/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,13 @@ def __call__(self, records: List[dict], handler: Callable, lambda_context: Optio
#
# Scenario: Injects Lambda context
#
# def record_handler(record, lambda_context): ... # noqa: E800
# with processor(records=batch, handler=record_handler, lambda_context=context): ... # noqa: E800
# def record_handler(record, lambda_context): ... # noqa: ERA001
# with processor(records=batch, handler=record_handler, lambda_context=context): ... # noqa: ERA001
#
# Scenario: Does NOT inject Lambda context (default)
#
# def record_handler(record): pass # noqa: E800
# with processor(records=batch, handler=record_handler): ... # noqa: E800
# def record_handler(record): pass # noqa: ERA001
# with processor(records=batch, handler=record_handler): ... # noqa: ERA001
#
if lambda_context is None:
self._handler_accepts_lambda_context = False
Expand Down Expand Up @@ -449,7 +449,7 @@ def record_handler(record: DynamoDBRecord):
logger.info(record.dynamodb.new_image)
payload: dict = json.loads(record.dynamodb.new_image.get("item"))
# alternatively:
# changes: Dict[str, Any] = record.dynamodb.new_image # noqa: E800
# changes: Dict[str, Any] = record.dynamodb.new_image # noqa: ERA001
# payload = change.get("Message") -> "<payload>"
...

Expand Down Expand Up @@ -593,7 +593,7 @@ async def record_handler(record: DynamoDBRecord):
logger.info(record.dynamodb.new_image)
payload: dict = json.loads(record.dynamodb.new_image.get("item"))
# alternatively:
# changes: Dict[str, Any] = record.dynamodb.new_image # noqa: E800
# changes: Dict[str, Any] = record.dynamodb.new_image # noqa: ERA001
# payload = change.get("Message") -> "<payload>"
...

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def extract_data_from_envelope(data: Union[Dict, str], envelope: str, jmespath_o


def handler(event: dict, context: LambdaContext):
# event = {"body": "{\"customerId\":\"dd4649e6-2484-4993-acb8-0f9123103394\"}"} # noqa: E800
# event = {"body": "{\"customerId\":\"dd4649e6-2484-4993-acb8-0f9123103394\"}"} # noqa: ERA001
payload = extract_data_from_envelope(data=event, envelope="powertools_json(body)")
customer = payload.get("customerId") # now deserialized
...
Expand Down
12 changes: 6 additions & 6 deletions aws_lambda_powertools/utilities/parser/models/dynamodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ class DynamoDBStreamChangedRecordModel(BaseModel):
# since both images are optional, they can both be None. However, at least one must
# exist in a legal model of NEW_AND_OLD_IMAGES type
# @root_validator
# def check_one_image_exists(cls, values): # noqa: E800
# new_img, old_img = values.get("NewImage"), values.get("OldImage") # noqa: E800
# stream_type = values.get("StreamViewType") # noqa: E800
# if stream_type == "NEW_AND_OLD_IMAGES" and not new_img and not old_img: # noqa: E800
# raise TypeError("DynamoDB streams model failed validation, missing both new & old stream images") # noqa: E800,E501
# return values # noqa: E800
# def check_one_image_exists(cls, values): # noqa: ERA001
# new_img, old_img = values.get("NewImage"), values.get("OldImage") # noqa: ERA001
# stream_type = values.get("StreamViewType") # noqa: ERA001
# if stream_type == "NEW_AND_OLD_IMAGES" and not new_img and not old_img: # noqa: ERA001
# raise TypeError("DynamoDB streams model failed validation, missing both new & old stream images") # noqa: ERA001,E501
# return values # noqa: ERA001


class UserIdentity(BaseModel):
Expand Down
30 changes: 15 additions & 15 deletions aws_lambda_powertools/utilities/parser/models/sqs.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,24 +29,24 @@ class SqsMsgAttributeModel(BaseModel):
# labels with the format .custom-data-type.
# https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-metadata.html#sqs-message-attributes
# @validator("dataType")
# def valid_type(cls, v): # noqa: VNE001,E800 # noqa: E800
# pattern = re.compile("Number.*|String.*|Binary.*") # noqa: E800
# if not pattern.match(v): # noqa: E800
# raise TypeError("data type is invalid") # noqa: E800
# return v # noqa: E800
# def valid_type(cls, v): # noqa: VNE001,ERA001 # noqa: ERA001
# pattern = re.compile("Number.*|String.*|Binary.*") # noqa: ERA001
# if not pattern.match(v): # noqa: ERA001
# raise TypeError("data type is invalid") # noqa: ERA001
# return v # noqa: ERA001
#
# # validate that dataType and value are not None and match
# @root_validator
# def check_str_and_binary_values(cls, values): # noqa: E800
# binary_val, str_val = values.get("binaryValue", ""), values.get("stringValue", "") # noqa: E800
# data_type = values.get("dataType") # noqa: E800
# if not str_val and not binary_val: # noqa: E800
# raise TypeError("both binaryValue and stringValue are missing") # noqa: E800
# if data_type.startswith("Binary") and not binary_val: # noqa: E800
# raise TypeError("binaryValue is missing") # noqa: E800
# if (data_type.startswith("String") or data_type.startswith("Number")) and not str_val: # noqa: E800
# raise TypeError("stringValue is missing") # noqa: E800
# return values # noqa: E800
# def check_str_and_binary_values(cls, values): # noqa: ERA001
# binary_val, str_val = values.get("binaryValue", ""), values.get("stringValue", "") # noqa: ERA001
# data_type = values.get("dataType") # noqa: ERA001
# if not str_val and not binary_val: # noqa: ERA001
# raise TypeError("both binaryValue and stringValue are missing") # noqa: ERA001
# if data_type.startswith("Binary") and not binary_val: # noqa: ERA001
# raise TypeError("binaryValue is missing") # noqa: ERA001
# if (data_type.startswith("String") or data_type.startswith("Number")) and not str_val: # noqa: ERA001
# raise TypeError("stringValue is missing") # noqa: ERA001
# return values # noqa: ERA001


class SqsRecordModel(BaseModel):
Expand Down
2 changes: 1 addition & 1 deletion examples/idempotency/tests/test_with_dynamodb_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def test_idempotent_lambda(lambda_context):
app_test_dynamodb_local.persistence_layer.client = dynamodb_local_client

# If desired, you can use a different DynamoDB Local table name than what your code already uses
# app.persistence_layer.table_name = "another table name" # noqa: E800
# app.persistence_layer.table_name = "another table name" # noqa: ERA001

result = app_test_dynamodb_local.handler({"testkey": "testvalue"}, lambda_context)
assert result["payment_id"] == 12345
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ ignore = [
"B018", # useless-expression - disabled temporarily
"COM812", # Trailing comma missing - disabled temporarily
"PLC1901", # Compare-to-empty-string - disabled temporarily
"ERA001", # Found commented-out code - disabled temporarily
"PLW", # Warning category - disabled temporarily
"PLR", # Refactoring category - disabled temporarily
"PLC", # Convention category - disabled temporarily
Expand Down
12 changes: 6 additions & 6 deletions tests/functional/event_handler/test_api_gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -923,17 +923,17 @@ def test_similar_dynamic_routes():
event = deepcopy(LOAD_GW_EVENT)

# WHEN
# r'^/accounts/(?P<account_id>\\w+\\b)$' # noqa: E800
# r'^/accounts/(?P<account_id>\\w+\\b)$' # noqa: ERA001
@app.get("/accounts/<account_id>")
def get_account(account_id: str):
assert account_id == "single_account"

# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks$' # noqa: E800
# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks$' # noqa: ERA001
@app.get("/accounts/<account_id>/source_networks")
def get_account_networks(account_id: str):
assert account_id == "nested_account"

# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks/(?P<network_id>\\w+\\b)$' # noqa: E800
# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks/(?P<network_id>\\w+\\b)$' # noqa: ERA001
@app.get("/accounts/<account_id>/source_networks/<network_id>")
def get_network_account(account_id: str, network_id: str):
assert account_id == "nested_account"
Expand All @@ -959,17 +959,17 @@ def test_similar_dynamic_routes_with_whitespaces():
event = deepcopy(LOAD_GW_EVENT)

# WHEN
# r'^/accounts/(?P<account_id>\\w+\\b)$' # noqa: E800
# r'^/accounts/(?P<account_id>\\w+\\b)$' # noqa: ERA001
@app.get("/accounts/<account_id>")
def get_account(account_id: str):
assert account_id == "single account"

# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks$' # noqa: E800
# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks$' # noqa: ERA001
@app.get("/accounts/<account_id>/source_networks")
def get_account_networks(account_id: str):
assert account_id == "nested account"

# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks/(?P<network_id>\\w+\\b)$' # noqa: E800
# r'^/accounts/(?P<account_id>\\w+\\b)/source_networks/(?P<network_id>\\w+\\b)$' # noqa: ERA001
@app.get("/accounts/<account_id>/source_networks/<network_id>")
def get_network_account(account_id: str, network_id: str):
assert account_id == "nested account"
Expand Down