Skip to content

Commit 3dbfde7

Browse files
author
nkumar
committed
log update
Committer: nkumar
1 parent 8855c20 commit 3dbfde7

File tree

5 files changed

+48
-20
lines changed

5 files changed

+48
-20
lines changed

src/consumer.js

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -41,56 +41,61 @@ const terminate = () => process.exit()
4141
* @param {String} topic The name of the message topic
4242
* @param {Number} partition The kafka partition to which messages are written
4343
*/
44-
let message;
45-
let cs_payloadseqid;
44+
//let message;
45+
//let cs_payloadseqid;
4646
async function dataHandler(messageSet, topic, partition) {
47+
let cs_payloadseqid
4748
for (const m of messageSet) { // Process messages sequentially
4849
let message
4950
try {
50-
//let message
5151
message = JSON.parse(m.message.value)
52-
// cs_payloadseqid = message.payload.payloadseqid
53-
//console.log(message);
5452
logger.debug('Received message from kafka:')
55-
logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `);
53+
if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid;
54+
logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `);
5655
await updateInformix(message)
5756
await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit offset only on success
58-
await auditTrail([message.payload.payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn,
57+
auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn,
5958
message.payload.operation,"Informix-updated","","","",message.payload.data, message.timestamp,message.topic],'consumer')
6059
} catch (err) {
6160
logger.error(`Could not process kafka message or informix DB error: "${err.message}"`)
6261
//logger.logFullError(err)
62+
logger.debug(`error-sync: consumer "${err.message}"`)
6363
if (!cs_payloadseqid){
6464
cs_payloadseqid= 'err-'+(new Date()).getTime().toString(36) + Math.random().toString(36).slice(2);
65-
}
65+
}
6666

6767
await auditTrail([cs_payloadseqid,3333,'message.payload.table','message.payload.Uniquecolumn',
6868
'message.payload.operation',"Error-Consumer","",err.message,"",'message.payload.data',new Date(),'message.topic'],'consumer')
6969
try {
70+
var retryvar
71+
if (message.payload['retryCount']) retryvar = message.payload.retryCount;
7072
await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit success as will re-publish
73+
await auditTrail([cs_payloadseqid,3333,'message.payload.table','message.payload.Uniquecolumn',
74+
'message.payload.operation',"Informix-Updated1",retryvar,"","",'message.payload.data',new Date(),'message.topic'],'consumer')
7175
logger.debug(`Trying to push same message after adding retryCounter`)
7276
if (!message.payload.retryCount) {
7377
message.payload.retryCount = 0
7478
logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA.maxRetry);
7579
}
7680
if (message.payload.retryCount >= config.KAFKA.maxRetry) {
7781
logger.debug('Recached at max retry counter, sending it to error queue: ', config.KAFKA.errorTopic);
78-
82+
logger.debug(`error-sync: consumer max-retry-limit reached`)
7983
let notifiyMessage = Object.assign({}, message, { topic: config.KAFKA.errorTopic })
8084
notifiyMessage.payload['recipients'] = config.KAFKA.recipients
8185
logger.debug('pushing following message on kafka error alert queue:')
82-
logger.debug(notifiyMessage)
86+
//logger.debug(notifiyMessage)
8387
await pushToKafka(notifiyMessage)
8488
return
8589
}
8690
message.payload['retryCount'] = message.payload.retryCount + 1;
87-
//await auditTrail([message.payload.payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn,
88-
// message.payload.operation,"Error",message.payload['retryCount'],err.message,"",message.payload.data, message.timestamp,message.topic],'consumer')
8991
await pushToKafka(message)
9092
logger.debug(` After kafka push Retry Count "${message.payload.retryCount}"`)
9193
} catch (err) {
9294

95+
await auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn,
96+
message.payload.operation,"Error-republishing",message.payload['retryCount'],err.message,"",message.payload.data, message.timestamp,message.topic],'consumer')
9397
logger.error("Error occured in re-publishing kafka message", err)
98+
logger.debug(`error-sync: consumer re-publishing "${err.message}"`)
9499
}
95100
}
96101
}
@@ -111,6 +116,7 @@ async function setupKafkaConsumer() {
111116
} catch (err) {
112117
logger.error('Could not setup kafka consumer')
113118
logger.logFullError(err)
119+
logger.debug(`error-sync: consumer kafka-setup "${err.message}"`)
114120
terminate()
115121
}
116122
}

src/producer.js

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,24 +42,35 @@ async function setupPgClient () {
4242
await pushToKafka(payload)
4343
} else {
4444
logger.debug('Ignoring message with incorrect topic or originator')
45+
4546
}
4647
await auditTrail([pl_seqid,pl_processid,pl_table,pl_uniquecolumn,pl_operation,"push-to-kafka","","","",pl_payload,pl_timestamp,pl_topic],'producer')
4748
} catch (error) {
4849
logger.error('Could not parse message payload')
50+
logger.debug(`error-sync: producer parse message : "${error.message}"`)
4951
await auditTrail([pl_randonseq,1111,'pl_table','pl_uniquecolumn','pl_operation',"error-producer","","",error.message,'pl_payload',new Date(),'pl_topic'],'producer')
5052
logger.logFullError(error)
5153
}
5254
})
5355
logger.info('pg-ifx-sync producer: Listening to notifications')
5456
} catch (err) {
57+
logger.debug(`error-sync: producer postgres-setup 1 :"${err.message}"`)
5558
logger.error('Could not setup postgres client')
5659
logger.logFullError(err)
60+
5761
terminate()
5862
}
5963
}
60-
64+
const terminate = () => process.exit()
6165
async function run () {
62-
await setupPgClient()
66+
try {
67+
await setupPgClient()
68+
}
69+
catch(err)
70+
{
71+
logger.debug(`Could not setup postgres client`)
72+
logger.debug(`error-sync: producer postgres-setup 0 :"${err.message}"`)
73+
}
6374
}
6475

6576
run()

src/producer_dd.js

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
const config = require('config')
55
const pg = require('pg')
66
const logger = require('./common/logger')
7+
//const pushToKafka = require('./services/pushToKafka')
78
const pushToDynamoDb = require('./services/pushToDynamoDb')
89
const pgOptions = config.get('POSTGRES')
910
const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${pgOptions.database}`
@@ -12,6 +13,7 @@ const auditTrail = require('./services/auditTrail');
1213
const express = require('express')
1314
const app = express()
1415
const port = 3000
16+
//console.log(`pgConnectionString value = ${pgConnectionString}`)
1517
var pl_processid;
1618
var pl_randonseq = 'err-'+(new Date()).getTime().toString(36) + Math.random().toString(36).slice(2);
1719
async function setupPgClient () {
@@ -35,13 +37,14 @@ async function setupPgClient () {
3537
var pl_payload = JSON.stringify(payload.payload)
3638
const validTopicAndOriginator = (pgOptions.triggerTopics.includes(payload.topic)) && (pgOptions.triggerOriginators.includes(payload.originator)) // Check if valid topic and originator
3739
if (validTopicAndOriginator) {
38-
logger.info(`Producer DynamoDb : ${pl_seqid} ${pl_processid} ${pl_table} ${pl_uniquecolumn} ${pl_operation} ${payload.timestamp}`);
40+
logger.debug(`Producer DynamoDb : ${pl_seqid} ${pl_processid} ${pl_table} ${pl_uniquecolumn} ${pl_operation} ${payload.timestamp}`);
3941
await pushToDynamoDb(payload)
4042
} else {
4143
logger.debug('Ignoring message with incorrect topic or originator')
4244
}
4345
await auditTrail([pl_seqid,pl_processid,pl_table,pl_uniquecolumn,pl_operation,"push-to-DynamoDb","","","",pl_payload,pl_timestamp,pl_topic],'producer')
4446
} catch (error) {
47+
logger.debug(`error-sync: producer_dynamoDb parse message : "${error.message}"`)
4548
logger.error('Could not parse message payload')
4649
await auditTrail([pl_randonseq,2222,'pl_table','pl_uniquecolumn','pl_operation',"error-DynamoDB","","",error.message,'pl_payload',new Date(),'pl_topic'],'producer')
4750
logger.logFullError(error)
@@ -50,14 +53,22 @@ await auditTrail([pl_randonseq,2222,'pl_table','pl_uniquecolumn','pl_operation',
5053
logger.info('Producer DynamoDb: Listening to notifications')
5154
} catch (err) {
5255
logger.error('Could not setup postgres client')
53-
logger.logFullError(err)
56+
logger.debug(`error-sync: producer_dd postgres-setup 1 :"${err.message}"`)
57+
//setup slack alert here
58+
logger.logFullError(err)
5459
terminate()
5560
}
5661
}
57-
62+
const terminate = () => process.exit()
5863
async function run () {
64+
try{
5965
await setupPgClient()
6066
}
67+
catch(err){
68+
logger.debug(`Producer_dd: Could not setup postgres client`)
69+
logger.debug(`error-sync: producer_dynamoDb postgres-setup 0 :"${err.message}"`)
70+
//setup slackmessage here
71+
}}
6172

6273
run()
6374

src/services/auditTrail.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ const logger = require('../common/logger')
55
const pgOptions = config.get('POSTGRES')
66
const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${pgOptions.database}`
77
let pgClient2
8-
console.log(`"${pgConnectionString}"`);
8+
//console.log(`"${pgConnectionString}"`);
99
async function setupPgClient2 () {
1010
pgClient2 = new pg.Client(pgConnectionString)
1111
try {

src/services/pushToDynamoDb.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ async function pushToDynamoDb(payload) {
1818
timestamp: Date.now()
1919
}
2020
}
21-
var docClient = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
21+
var docClient = new AWS.DynamoDB.DocumentClient({region: 'us-east-1',convertEmptyValues: true});
2222
docClient.put(params, function(err, data) {
2323
if (err) logger.error(err);
2424
else logger.info(data);
@@ -29,5 +29,5 @@ async function pushToDynamoDb(payload) {
2929
}
3030
}
3131

32-
console.log("--from DyanomoDb==")
32+
console.log("--from DyanomoDb--")
3333
module.exports = pushToDynamoDb

0 commit comments

Comments
 (0)