diff --git a/docs/content/core/logging.mdx b/docs/content/core/logging.mdx index 6d6591f9d..ae355c605 100644 --- a/docs/content/core/logging.mdx +++ b/docs/content/core/logging.mdx @@ -100,6 +100,7 @@ public class App implements RequestHandler This is disabled by default to prevent sensitive info being logged. @@ -165,6 +166,42 @@ public class App implements RequestHandler { + + Logger log = LogManager.getLogger(); + + // highlight-start + static { + ObjectMapper objectMapper = new ObjectMapper(); + LoggingUtils.defaultObjectMapper(objectMapper); + } + // highlight-end + + @Logging(logEvent = true) + public APIGatewayProxyResponseEvent handleRequest(final APIGatewayProxyRequestEvent input, final Context context) { + ... + } +} +``` + ## Sampling debug logs You can dynamically set a percentage of your logs to **DEBUG** level via env var `POWERTOOLS_LOGGER_SAMPLE_RATE` or diff --git a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/LoggingUtils.java b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/LoggingUtils.java index 6270c7049..839bb1d6a 100644 --- a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/LoggingUtils.java +++ b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/LoggingUtils.java @@ -15,6 +15,7 @@ import java.util.Map; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.logging.log4j.ThreadContext; /** @@ -23,6 +24,7 @@ * {@see Logging} */ public final class LoggingUtils { + private static ObjectMapper objectMapper = new ObjectMapper(); private LoggingUtils() { } @@ -48,4 +50,18 @@ public static void appendKey(String key, String value) { public static void appendKeys(Map customKeys) { ThreadContext.putAll(customKeys); } + + /** + * Sets the instance of ObjectMapper object which is used for serialising event when + * {@code @Logging(logEvent = true)}. + * + * @param objectMapper Custom implementation of object mapper to be used for logging serialised event + */ + public static void defaultObjectMapper(ObjectMapper objectMapper) { + LoggingUtils.objectMapper = objectMapper; + } + + public static ObjectMapper objectMapper() { + return objectMapper; + } } diff --git a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspect.java b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspect.java index 421462e0c..3d0d035b6 100644 --- a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspect.java +++ b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspect.java @@ -24,7 +24,6 @@ import java.util.Random; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -49,12 +48,12 @@ import static software.amazon.lambda.powertools.core.internal.LambdaHandlerProcessor.serviceName; import static software.amazon.lambda.powertools.logging.LoggingUtils.appendKey; import static software.amazon.lambda.powertools.logging.LoggingUtils.appendKeys; +import static software.amazon.lambda.powertools.logging.LoggingUtils.objectMapper; import static software.amazon.lambda.powertools.logging.internal.SystemWrapper.getenv; @Aspect public final class LambdaLoggingAspect { private static final Logger LOG = LogManager.getLogger(LambdaLoggingAspect.class); - private static final ObjectMapper MAPPER = new ObjectMapper(); private static final Random SAMPLER = new Random(); private static final String LOG_LEVEL = System.getenv("LOG_LEVEL"); @@ -175,7 +174,7 @@ private Object[] logFromInputStream(final ProceedingJoinPoint pjp) { Logger log = logger(pjp); - asJson(pjp, MAPPER.readValue(bytes, Map.class)) + asJson(pjp, objectMapper().readValue(bytes, Map.class)) .ifPresent(log::info); } catch (IOException e) { @@ -189,7 +188,7 @@ private Object[] logFromInputStream(final ProceedingJoinPoint pjp) { private Optional asJson(final ProceedingJoinPoint pjp, final Object target) { try { - return ofNullable(MAPPER.writeValueAsString(target)); + return ofNullable(objectMapper().writeValueAsString(target)); } catch (JsonProcessingException e) { logger(pjp).error("Failed logging event of type {}", target.getClass(), e); return empty(); diff --git a/powertools-logging/src/test/java/software/amazon/lambda/powertools/logging/handlers/PowerToolLogEventEnabledWithCustomMapper.java b/powertools-logging/src/test/java/software/amazon/lambda/powertools/logging/handlers/PowerToolLogEventEnabledWithCustomMapper.java new file mode 100644 index 000000000..d761c9ac0 --- /dev/null +++ b/powertools-logging/src/test/java/software/amazon/lambda/powertools/logging/handlers/PowerToolLogEventEnabledWithCustomMapper.java @@ -0,0 +1,49 @@ +package software.amazon.lambda.powertools.logging.handlers; + +import java.io.IOException; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; +import com.amazonaws.services.lambda.runtime.events.models.s3.S3EventNotification; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import software.amazon.lambda.powertools.logging.Logging; +import software.amazon.lambda.powertools.logging.LoggingUtils; + +public class PowerToolLogEventEnabledWithCustomMapper implements RequestHandler { + + static { + ObjectMapper objectMapper = new ObjectMapper(); + SimpleModule module = new SimpleModule(); + module.addSerializer(S3EventNotification.class, new S3EventNotificationSerializer()); + objectMapper.registerModule(module); + LoggingUtils.defaultObjectMapper(objectMapper); + } + + @Logging(logEvent = true) + @Override + public Object handleRequest(S3EventNotification input, Context context) { + return null; + } + + static class S3EventNotificationSerializer extends StdSerializer { + + public S3EventNotificationSerializer() { + this(null); + } + + public S3EventNotificationSerializer(Class t) { + super(t); + } + + @Override + public void serialize(S3EventNotification o, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { + jsonGenerator.writeStartObject(); + jsonGenerator.writeStringField("eventSource", o.getRecords().get(0).getEventSource()); + jsonGenerator.writeEndObject(); + } + } +} diff --git a/powertools-logging/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspectTest.java b/powertools-logging/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspectTest.java index 7f38db0a7..64c3f13e0 100644 --- a/powertools-logging/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspectTest.java +++ b/powertools-logging/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaLoggingAspectTest.java @@ -47,6 +47,7 @@ import software.amazon.lambda.powertools.logging.handlers.PowerToolDisabledForStream; import software.amazon.lambda.powertools.logging.handlers.PowerToolLogEventEnabled; import software.amazon.lambda.powertools.logging.handlers.PowerToolLogEventEnabledForStream; +import software.amazon.lambda.powertools.logging.handlers.PowerToolLogEventEnabledWithCustomMapper; import static com.amazonaws.services.lambda.runtime.events.models.s3.S3EventNotification.RequestParametersEntity; import static com.amazonaws.services.lambda.runtime.events.models.s3.S3EventNotification.ResponseElementsEntity; @@ -183,6 +184,23 @@ void shouldLogEventForHandler() throws IOException, JSONException { assertEquals(expectEvent, event, false); } + @Test + void shouldLogEventForHandlerWithOverriddenObjectMapper() throws IOException, JSONException { + RequestHandler handler = new PowerToolLogEventEnabledWithCustomMapper(); + S3EventNotification s3EventNotification = s3EventNotification(); + + handler.handleRequest(s3EventNotification, context); + + Map log = parseToMap(Files.lines(Paths.get("target/logfile.json")).collect(joining())); + + String event = (String) log.get("message"); + + String expectEvent = new BufferedReader(new InputStreamReader(this.getClass().getResourceAsStream("/customizedLogEvent.json"))) + .lines().collect(joining("\n")); + + assertEquals(expectEvent, event, false); + } + @Test void shouldLogEventForStreamAndLambdaStreamIsValid() throws IOException, JSONException { requestStreamHandler = new PowerToolLogEventEnabledForStream(); diff --git a/powertools-logging/src/test/resources/customizedLogEvent.json b/powertools-logging/src/test/resources/customizedLogEvent.json new file mode 100644 index 000000000..d8b0481fe --- /dev/null +++ b/powertools-logging/src/test/resources/customizedLogEvent.json @@ -0,0 +1,3 @@ +{ + "eventSource": "aws:s3" +} \ No newline at end of file