@@ -10,6 +10,8 @@ const healthcheck = require('topcoder-healthcheck-dropin');
10
10
const auditTrail = require ( './services/auditTrail' ) ;
11
11
const kafkaOptions = config . get ( 'KAFKA' )
12
12
const postMessage = require ( './services/posttoslack' )
13
+ const kafkaService = require ( './services/pushToDirectKafka' )
14
+
13
15
//const isSslEnabled = kafkaOptions.SSL && kafkaOptions.SSL.cert && kafkaOptions.SSL.key
14
16
15
17
const options = {
@@ -50,25 +52,25 @@ async function dataHandler(messageSet, topic, partition) {
50
52
let message
51
53
let ifxstatus = 0
52
54
try {
53
- // let ifxstatus = 0
55
+ // let ifxstatus = 0
54
56
let cs_payloadseqid ;
55
57
message = JSON . parse ( m . message . value )
56
58
//logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`)
57
59
if ( message . payload . payloadseqid ) cs_payloadseqid = message . payload . payloadseqid ;
58
60
logger . debug ( `consumer : ${ message . payload . payloadseqid } ${ message . payload . table } ${ message . payload . Uniquecolumn } ${ message . payload . operation } ${ message . timestamp } ` ) ;
59
61
//await updateInformix(message)
60
62
ifxstatus = await updateInformix ( message )
61
- // if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') {
62
- // logger.debug(`operation : ${message.payload.operation}`)
63
- // logger.debug(`Consumer :informixt status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus} - Retrying`)
64
- // auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn,
65
- // message.payload.operation, "push-to-kafka", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer')
63
+ // if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') {
64
+ // logger.debug(`operation : ${message.payload.operation}`)
65
+ // logger.debug(`Consumer :informixt status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus} - Retrying`)
66
+ // auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn,
67
+ // message.payload.operation, "push-to-kafka", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer')
66
68
// await retrypushtokakfa(message, topic, m, partition)
67
69
//} else {
68
- logger . debug ( `Consumer :informix status for ${ message . payload . table } ${ message . payload . payloadseqid } : ${ ifxstatus } ` )
69
- if ( message . payload [ 'retryCount' ] ) retryvar = message . payload . retryCount ;
70
- await auditTrail ( [ cs_payloadseqid , cs_processId , message . payload . table , message . payload . Uniquecolumn ,
71
- message . payload . operation , "Informix-updated" , retryvar , "" , "" , JSON . stringify ( message ) , new Date ( ) , message . topic ] , 'consumer' )
70
+ logger . debug ( `Consumer :informix status for ${ message . payload . table } ${ message . payload . payloadseqid } : ${ ifxstatus } ` )
71
+ if ( message . payload [ 'retryCount' ] ) retryvar = message . payload . retryCount ;
72
+ await auditTrail ( [ cs_payloadseqid , cs_processId , message . payload . table , message . payload . Uniquecolumn ,
73
+ message . payload . operation , "Informix-updated" , retryvar , "" , "" , JSON . stringify ( message ) , new Date ( ) , message . topic ] , 'consumer' )
72
74
//}
73
75
} catch ( err ) {
74
76
logger . debug ( `Consumer:ifx return status error for ${ message . payload . table } ${ message . payload . payloadseqid } : ${ ifxstatus } ` )
@@ -99,16 +101,18 @@ async function retrypushtokakfa(message, topic, m, partition) {
99
101
if ( message . payload . retryCount >= config . KAFKA . maxRetry ) {
100
102
logger . debug ( 'Reached at max retry counter, sending it to error queue: ' , config . KAFKA . errorTopic ) ;
101
103
logger . debug ( `error-sync: consumer max-retry-limit reached` )
102
- await callposttoslack ( `error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${ message . payload . table } ": payloadseqid : "${ cs_payloadseqid } "` )
104
+ // await callposttoslack(`error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${message.payload.table}": payloadseqid : "${cs_payloadseqid}"`)
103
105
let notifiyMessage = Object . assign ( { } , message , { topic : config . KAFKA . errorTopic } )
104
106
notifiyMessage . payload [ 'recipients' ] = config . KAFKA . recipients
105
107
logger . debug ( 'pushing following message on kafka error alert queue:' )
106
108
//retry push to error topic kafka again
107
- await pushToKafka ( notifiyMessage )
109
+ //await pushToKafka(notifiyMessage)
110
+ await kafkaService . pushToKafka ( notifiyMessage )
108
111
return
109
112
}
110
113
message . payload [ 'retryCount' ] = message . payload . retryCount + 1 ;
111
- await pushToKafka ( message )
114
+ //await pushToKafka(message)
115
+ kafkaService . pushToKafka ( message )
112
116
var errmsg9 = `consumer : Retry for Kafka push : retrycount : "${ message . payload . retryCount } " : "${ cs_payloadseqid } "`
113
117
logger . debug ( errmsg9 )
114
118
}
0 commit comments