@@ -258,7 +258,7 @@ void headersNotStripped() {
258
258
headers = captor .getValue ().headers ();
259
259
assertThat (headers .lastHeader (SerializationUtils .VALUE_DESERIALIZER_EXCEPTION_HEADER )).isNotNull ();
260
260
assertThat (headers .lastHeader (SerializationUtils .KEY_DESERIALIZER_EXCEPTION_HEADER )).isNotNull ();
261
- assertThat (headers .lastHeader (KafkaHeaders .DLT_KEY_EXCEPTION_MESSAGE ).value ()).isEqualTo ("testK" . getBytes () );
261
+ assertThat (new String ( headers .lastHeader (KafkaHeaders .DLT_KEY_EXCEPTION_MESSAGE ).value ())) .isEqualTo ("testK" );
262
262
assertThat (headers .lastHeader (KafkaHeaders .DLT_EXCEPTION_MESSAGE ).value ()).isEqualTo ("testV" .getBytes ());
263
263
}
264
264
@@ -399,7 +399,8 @@ void appendOriginalHeaders() {
399
399
DeadLetterPublishingRecoverer recoverer = new DeadLetterPublishingRecoverer (template );
400
400
recoverer .setAppendOriginalHeaders (true );
401
401
recoverer .setStripPreviousExceptionHeaders (false );
402
- recoverer .accept (record , new RuntimeException (new IllegalStateException ()));
402
+ recoverer .accept (record , new ListenerExecutionFailedException ("Listener failed" ,
403
+ new TimestampedException (new RuntimeException ("ex1 msg" , new IllegalStateException ()))));
403
404
ArgumentCaptor <ProducerRecord > producerRecordCaptor = ArgumentCaptor .forClass (ProducerRecord .class );
404
405
then (template ).should (times (1 )).send (producerRecordCaptor .capture ());
405
406
Headers headers = producerRecordCaptor .getValue ().headers ();
@@ -412,11 +413,15 @@ void appendOriginalHeaders() {
412
413
Header firstExceptionCauseType = headers .lastHeader (KafkaHeaders .DLT_EXCEPTION_CAUSE_FQCN );
413
414
Header firstExceptionMessage = headers .lastHeader (KafkaHeaders .DLT_EXCEPTION_MESSAGE );
414
415
Header firstExceptionStackTrace = headers .lastHeader (KafkaHeaders .DLT_EXCEPTION_STACKTRACE );
416
+ assertThat (new String (firstExceptionMessage .value ())).isEqualTo ("Listener failed; ex1 msg" );
417
+ assertThat (new String (firstExceptionType .value ())).isEqualTo (ListenerExecutionFailedException .class .getName ());
418
+ assertThat (new String (firstExceptionCauseType .value ())).isEqualTo (RuntimeException .class .getName ());
415
419
416
420
ConsumerRecord <String , String > anotherRecord = new ConsumerRecord <>("bar" , 1 , 12L , 4321L ,
417
421
TimestampType .LOG_APPEND_TIME , 321 , 321 , "bar" , null , new RecordHeaders (), Optional .empty ());
418
422
headers .forEach (header -> anotherRecord .headers ().add (header ));
419
- recoverer .accept (anotherRecord , new RuntimeException (new IllegalStateException ()));
423
+ recoverer .accept (anotherRecord , new ListenerExecutionFailedException ("Listener failed" ,
424
+ new TimestampedException (new RuntimeException ("ex2 msg" , new IllegalStateException ()))));
420
425
ArgumentCaptor <ProducerRecord > anotherProducerRecordCaptor = ArgumentCaptor .forClass (ProducerRecord .class );
421
426
then (template ).should (times (2 )).send (anotherProducerRecordCaptor .capture ());
422
427
Headers anotherHeaders = anotherProducerRecordCaptor .getAllValues ().get (1 ).headers ();
@@ -436,6 +441,8 @@ void appendOriginalHeaders() {
436
441
assertThat (anotherHeaders .lastHeader (KafkaHeaders .DLT_EXCEPTION_CAUSE_FQCN ))
437
442
.isNotSameAs (firstExceptionCauseType );
438
443
assertThat (anotherHeaders .lastHeader (KafkaHeaders .DLT_EXCEPTION_MESSAGE )).isNotSameAs (firstExceptionMessage );
444
+ assertThat (new String (anotherHeaders .lastHeader (KafkaHeaders .DLT_EXCEPTION_MESSAGE ).value ()))
445
+ .isEqualTo ("Listener failed; ex2 msg" );
439
446
assertThat (anotherHeaders .lastHeader (KafkaHeaders .DLT_EXCEPTION_STACKTRACE ))
440
447
.isNotSameAs (firstExceptionStackTrace );
441
448
Iterator <Header > exceptionHeaders = anotherHeaders .headers (KafkaHeaders .DLT_EXCEPTION_FQCN ).iterator ();
0 commit comments