Skip to content

Commit 1fa4700

Browse files
feat(metrics): disable metrics flush via environment variables (#6046)
* disable metrics env var cloudwatch * add for datadog * add tests * acept none env var * fix is disable metrics * add documentation * fix mypy * add datadog tests * disable when powertools dev * add disable to flush * remove print * Changing metrics flush decision * Changing metrics flush decision --------- Co-authored-by: Leandro Damascena <[email protected]>
1 parent 1ca05a2 commit 1fa4700

File tree

10 files changed

+375
-14
lines changed

10 files changed

+375
-14
lines changed

Diff for: aws_lambda_powertools/metrics/functions.py

+27
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
import os
34
from datetime import datetime
45

56
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import (
@@ -8,6 +9,7 @@
89
)
910
from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit
1011
from aws_lambda_powertools.shared import constants
12+
from aws_lambda_powertools.shared.functions import strtobool
1113

1214

1315
def extract_cloudwatch_metric_resolution_value(metric_resolutions: list, resolution: int | MetricResolution) -> int:
@@ -134,3 +136,28 @@ def convert_timestamp_to_emf_format(timestamp: int | datetime) -> int:
134136
# Returning zero represents the initial date of epoch time,
135137
# which will be skipped by Amazon CloudWatch.
136138
return 0
139+
140+
141+
def is_metrics_disabled() -> bool:
142+
"""
143+
Determine if metrics should be disabled based on environment variables.
144+
145+
Returns:
146+
bool: True if metrics are disabled, False otherwise.
147+
148+
Rules:
149+
- If POWERTOOLS_DEV is True and POWERTOOLS_METRICS_DISABLED is True: Disable metrics
150+
- If POWERTOOLS_METRICS_DISABLED is True: Disable metrics
151+
- If POWERTOOLS_DEV is True and POWERTOOLS_METRICS_DISABLED is not set: Disable metrics
152+
"""
153+
154+
is_dev_mode = strtobool(os.getenv(constants.POWERTOOLS_DEV_ENV, "false"))
155+
is_metrics_disabled = strtobool(os.getenv(constants.METRICS_DISABLED_ENV, "false"))
156+
157+
disable_conditions = [
158+
is_metrics_disabled,
159+
is_metrics_disabled and is_dev_mode,
160+
is_dev_mode and os.getenv(constants.METRICS_DISABLED_ENV) is None,
161+
]
162+
163+
return any(disable_conditions)

Diff for: aws_lambda_powertools/metrics/metrics.py

+2
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,8 @@ def lambda_handler():
4747
metric namespace
4848
POWERTOOLS_SERVICE_NAME : str
4949
service name used for default dimension
50+
POWERTOOLS_METRICS_DISABLED: bool
51+
Powertools metrics disabled (e.g. `"true", "True", "TRUE"`)
5052
5153
Parameters
5254
----------

Diff for: aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
convert_timestamp_to_emf_format,
1616
extract_cloudwatch_metric_resolution_value,
1717
extract_cloudwatch_metric_unit_value,
18+
is_metrics_disabled,
1819
validate_emf_timestamp,
1920
)
2021
from aws_lambda_powertools.metrics.provider.base import BaseProvider
@@ -77,6 +78,7 @@ def __init__(
7778
self.default_dimensions = default_dimensions or {}
7879
self.namespace = resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV))
7980
self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV))
81+
8082
self.metadata_set = metadata_set if metadata_set is not None else {}
8183
self.timestamp: int | None = None
8284

@@ -127,6 +129,7 @@ def add_metric(
127129
MetricResolutionError
128130
When metric resolution is not supported by CloudWatch
129131
"""
132+
130133
if not isinstance(value, numbers.Number):
131134
raise MetricValueError(f"{value} is not a valid number")
132135

@@ -268,6 +271,7 @@ def add_dimension(self, name: str, value: str) -> None:
268271
value : str
269272
Dimension value
270273
"""
274+
271275
logger.debug(f"Adding dimension: {name}:{value}")
272276
if len(self.dimension_set) == MAX_DIMENSIONS:
273277
raise SchemaValidationError(
@@ -374,7 +378,7 @@ def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None:
374378
"If application metrics should never be empty, consider using 'raise_on_empty_metrics'",
375379
stacklevel=2,
376380
)
377-
else:
381+
elif not is_metrics_disabled():
378382
logger.debug("Flushing existing metrics")
379383
metrics = self.serialize_metric_set()
380384
print(json.dumps(metrics, separators=(",", ":")))

Diff for: aws_lambda_powertools/metrics/provider/datadog/datadog.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from typing import TYPE_CHECKING, Any
1111

1212
from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError
13+
from aws_lambda_powertools.metrics.functions import is_metrics_disabled
1314
from aws_lambda_powertools.metrics.provider import BaseProvider
1415
from aws_lambda_powertools.metrics.provider.datadog.warnings import DatadogDataValidationWarning
1516
from aws_lambda_powertools.shared import constants
@@ -99,7 +100,6 @@ def add_metric(
99100
>>> sales='sam'
100101
>>> )
101102
"""
102-
103103
# validating metric name
104104
if not self._validate_datadog_metric_name(name):
105105
docs = "https://docs.datadoghq.com/metrics/custom_metrics/#naming-custom-metrics"
@@ -180,6 +180,7 @@ def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None:
180180
raise_on_empty_metrics : bool, optional
181181
raise exception if no metrics are emitted, by default False
182182
"""
183+
183184
if not raise_on_empty_metrics and len(self.metric_set) == 0:
184185
warnings.warn(
185186
"No application metrics to publish. The cold-start metric may be published if enabled. "
@@ -200,7 +201,7 @@ def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None:
200201
timestamp=metric_item["e"],
201202
tags=metric_item["t"],
202203
)
203-
else:
204+
elif not is_metrics_disabled():
204205
# dd module not found: flush to log, this format can be recognized via datadog log forwarder
205206
# https://github.com/Datadog/datadog-lambda-python/blob/main/datadog_lambda/metric.py#L77
206207
for metric_item in metrics:

Diff for: aws_lambda_powertools/shared/constants.py

+1
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE"
4141
DATADOG_FLUSH_TO_LOG: str = "DD_FLUSH_TO_LOG"
4242
SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME"
43+
METRICS_DISABLED_ENV: str = "POWERTOOLS_METRICS_DISABLED"
4344
# If the timestamp of log event is more than 2 hours in future, the log event is skipped.
4445
# If the timestamp of log event is more than 14 days in past, the log event is skipped.
4546
# See https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/AgentReference.html

Diff for: docs/core/metrics.md

+10-6
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,16 @@ If you're new to Amazon CloudWatch, there are five terminologies you must be awa
3434
???+ tip
3535
All examples shared in this documentation are available within the [project repository](https://github.com/aws-powertools/powertools-lambda-python/tree/develop/examples){target="_blank"}.
3636

37-
Metric has two global settings that will be used across all metrics emitted:
37+
Metric has three global settings that will be used across all metrics emitted:
3838

39-
| Setting | Description | Environment variable | Constructor parameter |
40-
| -------------------- | ------------------------------------------------------------------------------- | ------------------------------ | --------------------- |
41-
| **Metric namespace** | Logical container where all metrics will be placed e.g. `ServerlessAirline` | `POWERTOOLS_METRICS_NAMESPACE` | `namespace` |
42-
| **Service** | Optionally, sets **service** metric dimension across all metrics e.g. `payment` | `POWERTOOLS_SERVICE_NAME` | `service` |
39+
| Setting | Description | Environment variable | Constructor parameter |
40+
| ------------------------------- | ------------------------------------------------------------------------------- | ------------------------------ | --------------------- |
41+
| **Metric namespace** | Logical container where all metrics will be placed e.g. `ServerlessAirline` | `POWERTOOLS_METRICS_NAMESPACE` | `namespace` |
42+
| **Service** | Optionally, sets **service** metric dimension across all metrics e.g. `payment` | `POWERTOOLS_SERVICE_NAME` | `service` |
43+
| **Disable Powertools Metrics** | Optionally, disables all Powertools metrics. | `POWERTOOLS_METRICS_DISABLED` | N/A |
44+
45+
???+ info
46+
`POWERTOOLS_METRICS_DISABLED` will not disable default metrics created by AWS services.
4347

4448
???+ tip
4549
Use your application or main service as the metric namespace to easily group all metrics.
@@ -79,7 +83,7 @@ You can create metrics using `add_metric`, and you can create dimensions for all
7983
CloudWatch EMF supports a max of 100 metrics per batch. Metrics utility will flush all metrics when adding the 100th metric. Subsequent metrics (101th+) will be aggregated into a new EMF object, for your convenience.
8084

8185
???+ warning "Warning: Do not create metrics or dimensions outside the handler"
82-
Metrics or dimensions added in the global scope will only be added during cold start. Disregard if you that's the intended behavior.
86+
Metrics or dimensions added in the global scope will only be added during cold start. Disregard if that's the intended behavior.
8387

8488
### Adding high-resolution metrics
8589

Diff for: docs/core/metrics/datadog.md

+9-5
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ stateDiagram-v2
2323
DatadogExtension --> Datadog: async
2424
2525
state LambdaExtension {
26-
DatadogExtension
26+
DatadogExtension
2727
}
2828
2929
```
@@ -174,10 +174,14 @@ This has the advantage of keeping cold start metric separate from your applicati
174174

175175
You can use any of the following environment variables to configure `DatadogMetrics`:
176176

177-
| Setting | Description | Environment variable | Constructor parameter |
178-
| -------------------- | -------------------------------------------------------------------------------- | ------------------------------ | --------------------- |
179-
| **Metric namespace** | Logical container where all metrics will be placed e.g. `ServerlessAirline` | `POWERTOOLS_METRICS_NAMESPACE` | `namespace` |
180-
| **Flush to log** | Use this when you want to flush metrics to be exported through Datadog Forwarder | `DD_FLUSH_TO_LOG` | `flush_to_log` |
177+
| Setting | Description | Environment variable | Constructor parameter |
178+
| ------------------------------ | -------------------------------------------------------------------------------- | ------------------------------ | --------------------- |
179+
| **Metric namespace** | Logical container where all metrics will be placed e.g. `ServerlessAirline` | `POWERTOOLS_METRICS_NAMESPACE` | `namespace` |
180+
| **Flush to log** | Use this when you want to flush metrics to be exported through Datadog Forwarder | `DD_FLUSH_TO_LOG` | `flush_to_log` |
181+
| **Disable Powertools Metrics** | Optionally, disables all Powertools metrics. | `POWERTOOLS_METRICS_DISABLED` | N/A |
182+
183+
???+ info
184+
`POWERTOOLS_METRICS_DISABLED` will not disable default metrics created by AWS services.
181185

182186
## Advanced
183187

Diff for: docs/index.md

+1
Original file line numberDiff line numberDiff line change
@@ -432,6 +432,7 @@ When `POWERTOOLS_DEV` is set to a truthy value (`1`, `true`), it'll have the fol
432432
| __Logger__ | Increase JSON indentation to 4. This will ease local debugging when running functions locally under emulators or direct calls while not affecting unit tests. <br><br> However, Amazon CloudWatch Logs view will degrade as each new line is treated as a new message. |
433433
| __Event Handler__ | Enable full traceback errors in the response, indent request/responses, and CORS in dev mode (`*`). |
434434
| __Tracer__ | Future-proof safety to disables tracing operations in non-Lambda environments. This already happens automatically in the Tracer utility. |
435+
| __Metrics__ | Disables Powertools metrics emission by default. <br><br> However, this can be overridden by explicitly setting POWERTOOLS_METRICS_DISABLED=false, which takes precedence over the dev mode setting. |
435436

436437
## Debug mode
437438

Diff for: tests/functional/metrics/datadog/test_metrics_datadog.py

+160
Original file line numberDiff line numberDiff line change
@@ -334,3 +334,163 @@ def test_namespace_env_var(monkeypatch):
334334

335335
# THEN namespace should match the explicitly passed variable and not the env var
336336
assert output[0]["m"] == f"{env_namespace}.item_sold"
337+
338+
339+
def test_metrics_disabled_with_env_var(monkeypatch, capsys):
340+
# GIVEN environment variable is set to disable metrics
341+
monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "true")
342+
343+
# WHEN metrics is initialized and adding metrics
344+
metrics = DatadogMetrics()
345+
metrics.add_metric(name="test_metric", value=1)
346+
metrics.flush_metrics()
347+
348+
# THEN no metrics should have been recorded
349+
captured = capsys.readouterr()
350+
assert not captured.out
351+
352+
353+
def test_metrics_disabled_persists_after_flush(monkeypatch, capsys):
354+
# GIVEN environment variable is set to disable metrics
355+
monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "true")
356+
metrics = DatadogMetrics()
357+
358+
# WHEN multiple operations are performed with flush in between
359+
metrics.add_metric(name="metric1", value=1)
360+
metrics.flush_metrics()
361+
362+
# THEN first flush should not emit any metrics
363+
captured = capsys.readouterr()
364+
assert not captured.out
365+
366+
# WHEN adding and flushing more metrics
367+
metrics.add_metric(name="metric2", value=2)
368+
metrics.flush_metrics()
369+
370+
# THEN second flush should also not emit any metrics
371+
captured = capsys.readouterr()
372+
assert not captured.out
373+
374+
375+
def test_metrics_disabled_with_namespace(monkeypatch, capsys):
376+
# GIVEN environment variable is set to disable metrics
377+
monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "true")
378+
379+
# WHEN metrics is initialized with namespace and service
380+
metrics = DatadogMetrics(namespace="test_namespace")
381+
metrics.add_metric(name="test_metric", value=1)
382+
metrics.flush_metrics()
383+
384+
# THEN no metrics should have been recorded
385+
captured = capsys.readouterr()
386+
assert not captured.out
387+
388+
389+
def test_metrics_disabled_with_dev_mode_true(monkeypatch, capsys):
390+
# GIVEN dev mode is enabled
391+
monkeypatch.setenv("POWERTOOLS_DEV", "true")
392+
393+
# WHEN metrics is initialized
394+
metrics = DatadogMetrics(namespace="test")
395+
metrics.add_metric(name="test_metric", value=1)
396+
metrics.flush_metrics()
397+
398+
# THEN no metrics should have been recorded
399+
captured = capsys.readouterr()
400+
assert not captured.out
401+
402+
403+
def test_metrics_enabled_with_env_var_false(monkeypatch, capsys):
404+
# GIVEN environment variable is set to enable metrics
405+
monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "false")
406+
407+
# WHEN metrics is initialized with namespace and metrics added
408+
metrics = DatadogMetrics(namespace="test")
409+
metrics.add_metric(name="test_metric", value=1)
410+
metrics.flush_metrics()
411+
412+
# THEN Datadog metrics should be written to stdout
413+
output = capsys.readouterr().out
414+
metrics_output = json.loads(output)
415+
416+
assert metrics_output
417+
418+
419+
def test_metrics_enabled_with_env_var_not_set(monkeypatch, capsys):
420+
# GIVEN environment variable is not set
421+
monkeypatch.delenv("POWERTOOLS_METRICS_DISABLED", raising=False)
422+
423+
# WHEN metrics is initialized with namespace and metrics added
424+
metrics = DatadogMetrics(namespace="test")
425+
metrics.add_metric(name="test_metric", value=1)
426+
metrics.flush_metrics()
427+
428+
# THEN metrics should be written to stdout
429+
output = capsys.readouterr().out
430+
metrics_output = json.loads(output)
431+
432+
assert "test.test_metric" in metrics_output["m"]
433+
434+
435+
def test_metrics_enabled_with_dev_mode_false(monkeypatch, capsys):
436+
# GIVEN dev mode is disabled
437+
monkeypatch.setenv("POWERTOOLS_DEV", "false")
438+
439+
# WHEN metrics is initialized
440+
metrics = DatadogMetrics(namespace="test")
441+
metrics.add_metric(name="test_metric", value=1)
442+
metrics.flush_metrics()
443+
444+
# THEN metrics should be written to stdout
445+
output = capsys.readouterr().out
446+
metrics_output = json.loads(output)
447+
assert metrics_output
448+
449+
450+
def test_metrics_disabled_dev_mode_overrides_metrics_disabled(monkeypatch, capsys):
451+
# GIVEN dev mode is enabled but metrics disabled is false
452+
monkeypatch.setenv("POWERTOOLS_DEV", "true")
453+
monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "false")
454+
455+
# WHEN metrics is initialized
456+
metrics = DatadogMetrics(namespace="test")
457+
metrics.add_metric(name="test_metric", value=1)
458+
metrics.flush_metrics()
459+
460+
# THEN metrics should be written to stdout since POWERTOOLS_METRICS_DISABLED is false
461+
output = capsys.readouterr().out
462+
assert output # First verify we have output
463+
metrics_output = json.loads(output)
464+
assert metrics_output # Then verify it's valid JSON
465+
assert "test.test_metric" in metrics_output["m"] # Verify the metric is present
466+
467+
468+
def test_metrics_enabled_with_both_false(monkeypatch, capsys):
469+
# GIVEN both dev mode and metrics disabled are false
470+
monkeypatch.setenv("POWERTOOLS_DEV", "false")
471+
monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "false")
472+
473+
# WHEN metrics is initialized
474+
metrics = DatadogMetrics(namespace="test")
475+
metrics.add_metric(name="test_metric", value=1)
476+
metrics.flush_metrics()
477+
478+
# THEN metrics should be written to stdout
479+
output = capsys.readouterr().out
480+
metrics_output = json.loads(output)
481+
assert metrics_output
482+
483+
484+
def test_metrics_disabled_with_dev_mode_false_and_metrics_disabled_true(monkeypatch, capsys):
485+
# GIVEN dev mode is false but metrics disabled is true
486+
monkeypatch.setenv("POWERTOOLS_DEV", "false")
487+
monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "true")
488+
489+
# WHEN metrics is initialized
490+
metrics = DatadogMetrics(namespace="test")
491+
metrics.add_metric(name="test_metric", value=1)
492+
metrics.flush_metrics()
493+
494+
# THEN no metrics should have been recorded
495+
captured = capsys.readouterr()
496+
assert not captured.out

0 commit comments

Comments
 (0)