Skip to content
This repository was archived by the owner on Mar 13, 2025. It is now read-only.

Commit fdd5df6

Browse files
authored
Merge pull request #6 from topcoder-platform/debug-es
Debug why ES will not update
2 parents 08b52f4 + 6987fae commit fdd5df6

File tree

3 files changed

+40
-1
lines changed

3 files changed

+40
-1
lines changed

package-lock.json

Lines changed: 13 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
},
2828
"dependencies": {
2929
"@hapi/joi": "^15.1.0",
30+
"async-mutex": "^0.2.4",
3031
"aws-sdk": "^2.476.0",
3132
"bluebird": "^3.5.5",
3233
"config": "^3.1.0",

src/app.js

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,28 @@ const healthcheck = require('topcoder-healthcheck-dropin')
99
const logger = require('./common/logger')
1010
const helper = require('./common/helper')
1111
const ProcessorService = require('./services/ProcessorService')
12+
const Mutex = require('async-mutex').Mutex
1213

1314
// Start kafka consumer
1415
logger.info('Starting kafka consumer')
1516
// create consumer
1617
const consumer = new Kafka.GroupConsumer(helper.getKafkaOptions())
1718

19+
let count = 0
20+
let mutex = new Mutex()
21+
22+
async function getLatestCount () {
23+
const release = await mutex.acquire()
24+
25+
try {
26+
count = count + 1
27+
28+
return count
29+
} finally {
30+
release()
31+
}
32+
}
33+
1834
/*
1935
* Data handler linked with Kafka consumer
2036
* Whenever a new message is received by Kafka consumer,
@@ -25,12 +41,17 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, a
2541
logger.info(`Handle Kafka event message; Topic: ${topic}; Partition: ${partition}; Offset: ${
2642
m.offset}; Message: ${message}.`)
2743
let messageJSON
44+
let messageCount = await getLatestCount()
45+
46+
logger.debug(`Current message count: ${messageCount}`)
2847
try {
2948
messageJSON = JSON.parse(message)
3049
} catch (e) {
3150
logger.error('Invalid message JSON.')
3251
logger.logFullError(e)
3352

53+
logger.debug(`Commiting offset after processing message with count ${messageCount}`)
54+
3455
// commit the message and ignore it
3556
await consumer.commitOffset({ topic, partition, offset: m.offset })
3657
return
@@ -39,6 +60,8 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, a
3960
if (messageJSON.topic !== topic) {
4061
logger.error(`The message topic ${messageJSON.topic} doesn't match the Kafka topic ${topic}.`)
4162

63+
logger.debug(`Commiting offset after processing message with count ${messageCount}`)
64+
4265
// commit the message and ignore it
4366
await consumer.commitOffset({ topic, partition, offset: m.offset })
4467
return
@@ -57,10 +80,12 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, a
5780
break
5881
}
5982

60-
logger.debug('Successfully processed message')
83+
logger.debug(`Successfully processed message with count ${messageCount}`)
6184
} catch (err) {
6285
logger.logFullError(err)
6386
} finally {
87+
logger.debug(`Commiting offset after processing message with count ${messageCount}`)
88+
6489
// Commit offset regardless of error
6590
await consumer.commitOffset({ topic, partition, offset: m.offset })
6691
}

0 commit comments

Comments
 (0)