diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py
index bc44b14b1e5..26c1df5ab93 100644
--- a/aws_lambda_powertools/logging/logger.py
+++ b/aws_lambda_powertools/logging/logger.py
@@ -129,12 +129,14 @@ def __init__(
self.sampling_rate = resolve_env_var_choice(
choice=sampling_rate, env=os.getenv(constants.LOGGER_LOG_SAMPLING_RATE, 0.0)
)
+ self._is_deduplication_disabled = resolve_truthy_env_var_choice(
+ env=os.getenv(constants.LOGGER_LOG_DEDUPLICATION_ENV, "false")
+ )
self.log_level = self._get_log_level(level)
self.child = child
self._handler = logging.StreamHandler(stream) if stream is not None else logging.StreamHandler(sys.stdout)
self._default_log_keys = {"service": self.service, "sampling_rate": self.sampling_rate}
self._logger = self._get_logger()
-
self._init_logger(**kwargs)
def __getattr__(self, name):
@@ -167,12 +169,16 @@ def _init_logger(self, **kwargs):
self._logger.addHandler(self._handler)
self.structure_logs(**kwargs)
- logger.debug("Adding filter in root logger to suppress child logger records to bubble up")
- for handler in logging.root.handlers:
- # It'll add a filter to suppress any child logger from self.service
- # Where service is Order, it'll reject parent logger Order,
- # and child loggers such as Order.checkout, Order.shared
- handler.addFilter(SuppressFilter(self.service))
+ # Pytest Live Log feature duplicates log records for colored output
+ # but we explicitly add a filter for log deduplication.
+ # This flag disables this protection when you explicit want logs to be duplicated (#262)
+ if not self._is_deduplication_disabled:
+ logger.debug("Adding filter in root logger to suppress child logger records to bubble up")
+ for handler in logging.root.handlers:
+ # It'll add a filter to suppress any child logger from self.service
+ # Example: `Logger(service="order")`, where service is Order
+ # It'll reject all loggers starting with `order` e.g. order.checkout, order.shared
+ handler.addFilter(SuppressFilter(self.service))
# as per bug in #249, we should not be pre-configuring an existing logger
# therefore we set a custom attribute in the Logger that will be returned
diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py
index 27ab3c34197..c69d6b5ea49 100644
--- a/aws_lambda_powertools/shared/constants.py
+++ b/aws_lambda_powertools/shared/constants.py
@@ -4,6 +4,7 @@
LOGGER_LOG_SAMPLING_RATE: str = "POWERTOOLS_LOGGER_SAMPLE_RATE"
LOGGER_LOG_EVENT_ENV: str = "POWERTOOLS_LOGGER_LOG_EVENT"
+LOGGER_LOG_DEDUPLICATION_ENV: str = "POWERTOOLS_LOG_DEDUPLICATION_DISABLED"
MIDDLEWARE_FACTORY_TRACE_ENV: str = "POWERTOOLS_TRACE_MIDDLEWARES"
diff --git a/docs/content/core/logger.mdx b/docs/content/core/logger.mdx
index e54ea43cd62..ad8613d09cb 100644
--- a/docs/content/core/logger.mdx
+++ b/docs/content/core/logger.mdx
@@ -439,6 +439,18 @@ def test_lambda_handler(lambda_handler, lambda_context):
lambda_handler(test_event, lambda_context) # this will now have a Context object populated
```
+### pytest live log feature
+
+Pytest Live Log feature duplicates emitted log messages in order to style log statements according to their levels, for this to work use `POWERTOOLS_LOG_DEDUPLICATION_DISABLED` env var.
+
+```bash:title=pytest_live_log.sh
+POWERTOOLS_LOG_DEDUPLICATION_DISABLED="1" pytest -o log_cli=1
+```
+
+
+ This feature should be used with care, as it explicitly disables our ability to filter propagated messages to the root logger (if configured).
+
+
## FAQ
**How can I enable boto3 and botocore library logging?**
diff --git a/docs/content/index.mdx b/docs/content/index.mdx
index d313f284368..df5e6aba22c 100644
--- a/docs/content/index.mdx
+++ b/docs/content/index.mdx
@@ -138,6 +138,7 @@ Environment variable | Description | Utility | Default
**POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory) | `false`
**POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger) | `false`
**POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger) | `0`
+**POWERTOOLS_LOG_DEDUPLICATION_DISABLED** | Disables log deduplication filter protection to use Pytest Live Log feature | [Logging](./core/logger) | `false`
**LOG_LEVEL** | Sets logging level | [Logging](./core/logger) | `INFO`
## Debug mode
diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py
index 6c7862896a3..4e1ab245141 100644
--- a/tests/functional/test_logger.py
+++ b/tests/functional/test_logger.py
@@ -11,6 +11,7 @@
from aws_lambda_powertools import Logger, Tracer
from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError
from aws_lambda_powertools.logging.logger import set_package_logger
+from aws_lambda_powertools.shared import constants
@pytest.fixture
@@ -376,6 +377,7 @@ def test_logger_do_not_log_twice_when_root_logger_is_setup(stdout, service_name)
child_logger = Logger(service=service_name, child=True, stream=stdout)
logger.info("PARENT")
child_logger.info("CHILD")
+ root_logger.info("ROOT")
# THEN it should only contain only two log entries
# since child's log records propagated to root logger should be rejected
@@ -400,3 +402,23 @@ def test_logger_extra_kwargs(stdout, service_name):
# THEN second log should not have request_id in the root structure
assert "request_id" not in no_extra_fields_log
+
+
+def test_logger_log_twice_when_log_filter_isnt_present_and_root_logger_is_setup(monkeypatch, stdout, service_name):
+ # GIVEN Lambda configures the root logger with a handler
+ root_logger = logging.getLogger()
+ root_logger.addHandler(logging.StreamHandler(stream=stdout))
+
+ # WHEN we create a new Logger and child Logger
+ # and log deduplication filter for child messages are disabled
+ # see #262 for more details on why this is needed for Pytest Live Log feature
+ monkeypatch.setenv(constants.LOGGER_LOG_DEDUPLICATION_ENV, "true")
+ logger = Logger(service=service_name, stream=stdout)
+ child_logger = Logger(service=service_name, child=True, stream=stdout)
+ logger.info("PARENT")
+ child_logger.info("CHILD")
+
+ # THEN it should only contain only two log entries
+ # since child's log records propagated to root logger should be rejected
+ logs = list(stdout.getvalue().strip().split("\n"))
+ assert len(logs) == 4