Skip to content

fix(logger): add explicit None return type annotations #3113

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Sep 22, 2023
4 changes: 2 additions & 2 deletions aws_lambda_powertools/logging/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@


class SuppressFilter(logging.Filter):
def __init__(self, logger):
def __init__(self, logger: str):
self.logger = logger

def filter(self, record): # noqa: A003
def filter(self, record: logging.LogRecord) -> bool: # noqa: A003
"""Suppress Log Records from registered logger
It rejects log records from registered logger e.g. a child logger
Expand Down
18 changes: 9 additions & 9 deletions aws_lambda_powertools/logging/formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@

class BasePowertoolsFormatter(logging.Formatter, metaclass=ABCMeta):
@abstractmethod
def append_keys(self, **additional_keys):
def append_keys(self, **additional_keys) -> None:
raise NotImplementedError()

def remove_keys(self, keys: Iterable[str]):
def remove_keys(self, keys: Iterable[str]) -> None:
raise NotImplementedError()

@abstractmethod
def clear_state(self):
def clear_state(self) -> None:
"""Removes any previously added logging keys"""
raise NotImplementedError()

Expand All @@ -78,7 +78,7 @@ def __init__(
utc: bool = False,
use_rfc3339: bool = False,
**kwargs,
):
) -> None:
"""Return a LambdaPowertoolsFormatter instance.

The `log_record_order` kwarg is used to specify the order of the keys used in
Expand Down Expand Up @@ -217,26 +217,26 @@ def formatTime(self, record: logging.LogRecord, datefmt: Optional[str] = None) -
custom_fmt = self.default_time_format.replace(self.custom_ms_time_directive, msecs)
return time.strftime(custom_fmt, record_ts)

def append_keys(self, **additional_keys):
def append_keys(self, **additional_keys) -> None:
self.log_format.update(additional_keys)

def remove_keys(self, keys: Iterable[str]):
def remove_keys(self, keys: Iterable[str]) -> None:
for key in keys:
self.log_format.pop(key, None)

def clear_state(self):
def clear_state(self) -> None:
self.log_format = dict.fromkeys(self.log_record_order)
self.log_format.update(**self.keys_combined)

@staticmethod
def _build_default_keys():
def _build_default_keys() -> Dict[str, str]:
return {
"level": "%(levelname)s",
"location": "%(funcName)s:%(lineno)d",
"timestamp": "%(asctime)s",
}

def _get_latest_trace_id(self):
def _get_latest_trace_id(self) -> Optional[str]:
xray_trace_id_key = self.log_format.get("xray_trace_id", "")
if xray_trace_id_key is None:
# key is explicitly disabled; ignore it. e.g., Logger(xray_trace_id=None)
Expand Down
59 changes: 35 additions & 24 deletions aws_lambda_powertools/logging/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def __init__(
utc: bool = False,
use_rfc3339: bool = False,
**kwargs,
):
) -> None:
self.service = resolve_env_var_choice(
choice=service,
env=os.getenv(constants.SERVICE_NAME_ENV, "service_undefined"),
Expand Down Expand Up @@ -270,15 +270,20 @@ def __getattr__(self, name):
# https://github.com/aws-powertools/powertools-lambda-python/issues/97
return getattr(self._logger, name)

def _get_logger(self):
def _get_logger(self) -> logging.Logger:
"""Returns a Logger named {self.service}, or {self.service.filename} for child loggers"""
logger_name = self.service
if self.child:
logger_name = f"{self.service}.{_get_caller_filename()}"

return logging.getLogger(logger_name)

def _init_logger(self, formatter_options: Optional[Dict] = None, log_level: Union[str, int, None] = None, **kwargs):
def _init_logger(
self,
formatter_options: Optional[Dict] = None,
log_level: Union[str, int, None] = None,
**kwargs,
) -> None:
"""Configures new logger"""

# Skip configuration if it's a child logger or a pre-configured logger
Expand All @@ -296,7 +301,7 @@ def _init_logger(self, formatter_options: Optional[Dict] = None, log_level: Unio
self.structure_logs(formatter_options=formatter_options, **kwargs)

# Maintenance: We can drop this upon Py3.7 EOL. It's a backport for "location" key to work
self._logger.findCaller = compat.findCaller
self._logger.findCaller = compat.findCaller # type: ignore[method-assign]

# Pytest Live Log feature duplicates log records for colored output
# but we explicitly add a filter for log deduplication.
Expand All @@ -313,9 +318,9 @@ def _init_logger(self, formatter_options: Optional[Dict] = None, log_level: Unio
# therefore we set a custom attribute in the Logger that will be returned
# std logging will return the same Logger with our attribute if name is reused
logger.debug(f"Marking logger {self.service} as preconfigured")
self._logger.init = True
self._logger.init = True # type: ignore[attr-defined]

def _configure_sampling(self):
def _configure_sampling(self) -> None:
"""Dynamically set log level based on sampling rate

Raises
Expand All @@ -329,8 +334,10 @@ def _configure_sampling(self):
self._logger.setLevel(logging.DEBUG)
except ValueError:
raise InvalidLoggerSamplingRateError(
f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead."
f"Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable.",
(
f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead."
"Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable."
),
)

@overload
Expand Down Expand Up @@ -452,7 +459,7 @@ def info(
stacklevel: int = 2,
extra: Optional[Mapping[str, object]] = None,
**kwargs,
):
) -> None:
extra = extra or {}
extra = {**extra, **kwargs}

Expand All @@ -477,7 +484,7 @@ def error(
stacklevel: int = 2,
extra: Optional[Mapping[str, object]] = None,
**kwargs,
):
) -> None:
extra = extra or {}
extra = {**extra, **kwargs}

Expand All @@ -502,7 +509,7 @@ def exception(
stacklevel: int = 2,
extra: Optional[Mapping[str, object]] = None,
**kwargs,
):
) -> None:
extra = extra or {}
extra = {**extra, **kwargs}

Expand All @@ -527,7 +534,7 @@ def critical(
stacklevel: int = 2,
extra: Optional[Mapping[str, object]] = None,
**kwargs,
):
) -> None:
extra = extra or {}
extra = {**extra, **kwargs}

Expand All @@ -552,7 +559,7 @@ def warning(
stacklevel: int = 2,
extra: Optional[Mapping[str, object]] = None,
**kwargs,
):
) -> None:
extra = extra or {}
extra = {**extra, **kwargs}

Expand All @@ -577,7 +584,7 @@ def debug(
stacklevel: int = 2,
extra: Optional[Mapping[str, object]] = None,
**kwargs,
):
) -> None:
extra = extra or {}
extra = {**extra, **kwargs}

Expand All @@ -593,13 +600,13 @@ def debug(
extra=extra,
)

def append_keys(self, **additional_keys):
def append_keys(self, **additional_keys) -> None:
self.registered_formatter.append_keys(**additional_keys)

def remove_keys(self, keys: Iterable[str]):
def remove_keys(self, keys: Iterable[str]) -> None:
self.registered_formatter.remove_keys(keys)

def structure_logs(self, append: bool = False, formatter_options: Optional[Dict] = None, **keys):
def structure_logs(self, append: bool = False, formatter_options: Optional[Dict] = None, **keys) -> None:
"""Sets logging formatting to JSON.

Optionally, it can append keyword arguments
Expand Down Expand Up @@ -645,7 +652,7 @@ def structure_logs(self, append: bool = False, formatter_options: Optional[Dict]
self.registered_formatter.clear_state()
self.registered_formatter.append_keys(**log_keys)

def set_correlation_id(self, value: Optional[str]):
def set_correlation_id(self, value: Optional[str]) -> None:
"""Sets the correlation_id in the logging json

Parameters
Expand Down Expand Up @@ -676,7 +683,9 @@ def addHandler(self, handler: logging.Handler) -> None:
@property
def registered_handler(self) -> logging.Handler:
"""Convenience property to access the first logger handler"""
handlers = self._logger.parent.handlers if self.child else self._logger.handlers
# We ignore mypy here because self.child encodes whether or not self._logger.parent is
# None, mypy can't see this from context but we can
handlers = self._logger.parent.handlers if self.child else self._logger.handlers # type: ignore[union-attr]
return handlers[0]

@property
Expand Down Expand Up @@ -720,7 +729,7 @@ def set_package_logger(
level: Union[str, int] = logging.DEBUG,
stream: Optional[IO[str]] = None,
formatter: Optional[logging.Formatter] = None,
):
) -> None:
"""Set an additional stream handler, formatter, and log level for aws_lambda_powertools package logger.

**Package log by default is suppressed (NullHandler), this should only used for debugging.
Expand Down Expand Up @@ -755,16 +764,18 @@ def set_package_logger(
logger.addHandler(handler)


def log_uncaught_exception_hook(exc_type, exc_value, exc_traceback, logger: Logger):
def log_uncaught_exception_hook(exc_type, exc_value, exc_traceback, logger: Logger) -> None:
"""Callback function for sys.excepthook to use Logger to log uncaught exceptions"""
logger.exception(exc_value, exc_info=(exc_type, exc_value, exc_traceback)) # pragma: no cover


def _get_caller_filename():
def _get_caller_filename() -> str:
"""Return caller filename by finding the caller frame"""
# Current frame => _get_logger()
# Previous frame => logger.py
# Before previous frame => Caller
# We ignore mypy here because *we* know that there will always be at least
# 3 frames (above) so repeatedly calling f_back is safe here
frame = inspect.currentframe()
caller_frame = frame.f_back.f_back.f_back
return caller_frame.f_globals["__name__"]
caller_frame = frame.f_back.f_back.f_back # type: ignore[union-attr]
return caller_frame.f_globals["__name__"] # type: ignore[union-attr]