@@ -7,8 +7,8 @@ const logger = require('./common/logger')
7
7
const updateInformix = require ( './services/updateInformix' )
8
8
const pushToKafka = require ( './services/pushToKafka' )
9
9
const healthcheck = require ( 'topcoder-healthcheck-dropin' ) ;
10
+ const auditTrail = require ( './services/auditTrail' ) ;
10
11
const kafkaOptions = config . get ( 'KAFKA' )
11
- //const sleep = require('sleep');
12
12
const isSslEnabled = kafkaOptions . SSL && kafkaOptions . SSL . cert && kafkaOptions . SSL . key
13
13
const consumer = new Kafka . SimpleConsumer ( {
14
14
connectionString : kafkaOptions . brokers_url ,
@@ -21,17 +21,17 @@ const consumer = new Kafka.SimpleConsumer({
21
21
} )
22
22
23
23
24
- const check = function ( ) {
25
- if ( ! consumer . client . initialBrokers && ! consumer . client . initialBrokers . length ) {
26
- return false ;
27
- }
28
- let connected = true ;
29
- consumer . client . initialBrokers . forEach ( conn => {
30
- logger . debug ( `url ${ conn . server ( ) } - connected=${ conn . connected } ` ) ;
31
- connected = conn . connected & connected ;
32
- } ) ;
33
- return connected ;
34
- } ;
24
+ const check = function ( ) {
25
+ if ( ! consumer . client . initialBrokers && ! consumer . client . initialBrokers . length ) {
26
+ return false ;
27
+ }
28
+ let connected = true ;
29
+ consumer . client . initialBrokers . forEach ( conn => {
30
+ logger . debug ( `url ${ conn . server ( ) } - connected=${ conn . connected } ` ) ;
31
+ connected = conn . connected & connected ;
32
+ } ) ;
33
+ return connected ;
34
+ } ;
35
35
36
36
37
37
const terminate = ( ) => process . exit ( )
@@ -41,39 +41,54 @@ const terminate = () => process.exit()
41
41
* @param {String } topic The name of the message topic
42
42
* @param {Number } partition The kafka partition to which messages are written
43
43
*/
44
- async function dataHandler ( messageSet , topic , partition ) {
44
+ async function dataHandler ( messageSet , topic , partition ) {
45
45
for ( const m of messageSet ) { // Process messages sequentially
46
+ let message
46
47
try {
47
- const payload = JSON . parse ( m . message . value )
48
- logger . debug ( 'Received payload from kafka:' )
49
- logger . debug ( payload )
50
- await updateInformix ( payload )
51
- // await insertConsumerAudit(payload, true, undefined, false)
48
+ message = JSON . parse ( m . message . value )
49
+ logger . debug ( 'Received message from kafka:' )
50
+ logger . debug ( JSON . stringify ( message ) )
51
+ await updateInformix ( message )
52
52
await consumer . commitOffset ( { topic, partition, offset : m . offset } ) // Commit offset only on success
53
+ await auditTrail ( [ message . payload . payloadseqid , 'scorecard_consumer' , message . payload . table , message . payload . Uniquecolumn ,
54
+ message . payload . operation , 1 , 0 , "" , message . timestamp , new Date ( ) , "" ] , 'consumer' )
53
55
} catch ( err ) {
54
56
logger . error ( 'Could not process kafka message' )
55
- logger . logFullError ( err )
56
- if ( ! payload . retryCount ) {
57
- payload . retryCount = 0
58
- }
59
- if ( payload . retryCount >= config . KAFKA . maxRetry ) {
60
- await pushToKafka (
61
- Object . assign ( { } , payload , { topic : config . KAFKA . errorTopic , recipients : config . KAFKA . recipients } )
62
- )
63
- return
64
- }
65
- await pushToKafka (
66
- Object . assign ( { } , payload , { retryCount : payload . retryCount + 1 } )
67
- )
57
+ //logger.logFullError(err)
58
+ try {
59
+ await consumer . commitOffset ( { topic, partition, offset : m . offset } ) // Commit success as will re-publish
60
+ logger . debug ( 'Trying to push same message after adding retryCounter' )
61
+ if ( ! message . payload . retryCount ) {
62
+ message . payload . retryCount = 0
63
+ logger . debug ( 'setting retry counter to 0 and max try count is : ' , config . KAFKA . maxRetry ) ;
64
+ }
65
+ if ( message . payload . retryCount >= config . KAFKA . maxRetry ) {
66
+ logger . debug ( 'Recached at max retry counter, sending it to error queue: ' , config . KAFKA . errorTopic ) ;
67
+
68
+ let notifiyMessage = Object . assign ( { } , message , { topic : config . KAFKA . errorTopic } )
69
+ notifiyMessage . payload [ 'recipients' ] = config . KAFKA . recipients
70
+ logger . debug ( 'pushing following message on kafka error alert queue:' )
71
+ logger . debug ( notifiyMessage )
72
+ await pushToKafka ( notifiyMessage )
73
+ return
74
+ }
75
+ message . payload [ 'retryCount' ] = message . payload . retryCount + 1 ;
76
+ await pushToKafka ( message )
77
+ logger . debug ( 'pushed same message after adding retryCount' )
78
+ } catch ( err ) {
79
+ //await auditTrail([payload.payload.payloadseqid,'scorecard_consumer',payload.payload.table,payload.payload.Uniquecolumn,
80
+ // payload.payload.operation,0,message.payload.retryCount,"re-publish kafka err",payload.timestamp,new Date(),""],'consumer')
81
+ logger . error ( "Error occured in re-publishing kafka message" , err )
68
82
}
69
83
}
70
84
}
85
+ }
71
86
72
87
73
88
/**
74
89
* Initialize kafka consumer
75
90
*/
76
- async function setupKafkaConsumer ( ) {
91
+ async function setupKafkaConsumer ( ) {
77
92
try {
78
93
await consumer . init ( )
79
94
await consumer . subscribe ( kafkaOptions . topic , kafkaOptions . partition , { time : Kafka . LATEST_OFFSET } , dataHandler )
@@ -87,4 +102,3 @@ async function setupKafkaConsumer () {
87
102
}
88
103
89
104
setupKafkaConsumer ( )
90
-
0 commit comments