Skip to content

Commit d901a92

Browse files
Revert "Fix bug in processor"
This reverts commit 460fc4e.
1 parent 460fc4e commit d901a92

File tree

5 files changed

+41
-58
lines changed

5 files changed

+41
-58
lines changed

README.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ Configuration for the notification server is at `config/default.js`.
1313
The following parameters can be set in config files or in env variables:
1414

1515
- LOG_LEVEL: the log level; default value: 'debug'
16-
- KAFKA_GROUP_ID: group id of the consumer; default value: 'submission-processor-es-group'
1716
- KAFKA_URL: comma separated Kafka hosts; default value: 'localhost:9092'
1817
- KAFKA_CLIENT_CERT: Kafka connection certificate, optional; default value is undefined;
1918
if not provided, then SSL connection is not used, direct insecure connection is used;

config/default.js

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ module.exports = {
66
DISABLE_LOGGING: process.env.DISABLE_LOGGING || false, // If true, logging will be disabled
77
LOG_LEVEL: process.env.LOG_LEVEL || 'debug',
88

9-
KAFKA_GROUP_ID: process.env.KAFKA_GROUP_ID || 'submission-processor-es-group',
109
KAFKA_URL: process.env.KAFKA_URL || 'localhost:9092',
1110
// below are used for secure Kafka connection, they are optional
1211
// for the local Kafka, they are not needed

package-lock.json

Lines changed: 23 additions & 37 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
"http-aws-es": "^6.0.0",
3636
"joi": "^9.0.4",
3737
"lodash": "^4.17.10",
38-
"no-kafka": "^3.4.3",
38+
"no-kafka": "^3.2.4",
3939
"topcoder-healthcheck-dropin": "^1.0.2",
4040
"winston": "^2.2.0"
4141
},

src/app.js

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,21 +3,23 @@
33
*/
44

55
global.Promise = require('bluebird')
6+
const _ = require('lodash')
67
const config = require('config')
78
const logger = require('./common/logger')
89
const Kafka = require('no-kafka')
10+
const co = require('co')
911
const ProcessorService = require('./services/ProcessorService')
1012
const healthcheck = require('topcoder-healthcheck-dropin')
1113

1214
// create consumer
13-
const options = { connectionString: config.KAFKA_URL, groupId: config.KAFKA_GROUP_ID, handlerConcurrency: 1 }
15+
const options = { connectionString: config.KAFKA_URL, handlerConcurrency: 1 }
1416
if (config.KAFKA_CLIENT_CERT && config.KAFKA_CLIENT_CERT_KEY) {
1517
options.ssl = { cert: config.KAFKA_CLIENT_CERT, key: config.KAFKA_CLIENT_CERT_KEY }
1618
}
17-
const consumer = new Kafka.GroupConsumer(options)
19+
const consumer = new Kafka.SimpleConsumer(options)
1820

1921
// data handler
20-
const dataHandler = async (messageSet, topic, partition) => Promise.each(messageSet, async (m) => {
22+
const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, (m) => {
2123
const message = m.message.value.toString('utf8')
2224
logger.info(`Handle Kafka event message; Topic: ${topic}; Partition: ${partition}; Offset: ${
2325
m.offset}; Message: ${message}.`)
@@ -35,26 +37,24 @@ const dataHandler = async (messageSet, topic, partition) => Promise.each(message
3537
// ignore the message
3638
return
3739
}
38-
try {
40+
return co(function * () {
3941
switch (topic) {
4042
case config.CREATE_DATA_TOPIC:
41-
await ProcessorService.create(messageJSON)
43+
yield ProcessorService.create(messageJSON)
4244
break
4345
case config.UPDATE_DATA_TOPIC:
44-
await ProcessorService.update(messageJSON)
46+
yield ProcessorService.update(messageJSON)
4547
break
4648
case config.DELETE_DATA_TOPIC:
47-
await ProcessorService.remove(messageJSON)
49+
yield ProcessorService.remove(messageJSON)
4850
break
4951
default:
5052
throw new Error(`Invalid topic: ${topic}`)
5153
}
52-
54+
})
5355
// commit offset
54-
await consumer.commitOffset({ topic, partition, offset: m.offset })
55-
} catch (err) {
56-
logger.error(err)
57-
}
56+
.then(() => consumer.commitOffset({ topic, partition, offset: m.offset }))
57+
.catch((err) => logger.error(err))
5858
})
5959

6060
// check if there is kafka connection alive
@@ -70,14 +70,13 @@ function check () {
7070
return connected
7171
}
7272

73-
const topics = [config.CREATE_DATA_TOPIC, config.UPDATE_DATA_TOPIC, config.DELETE_DATA_TOPIC]
74-
// consume configured topics
7573
consumer
76-
.init([{
77-
subscriptions: topics,
78-
handler: dataHandler
79-
}])
74+
.init()
75+
// consume configured topics
8076
.then(() => {
8177
healthcheck.init([check])
78+
79+
const topics = [config.CREATE_DATA_TOPIC, config.UPDATE_DATA_TOPIC, config.DELETE_DATA_TOPIC]
80+
_.each(topics, (tp) => consumer.subscribe(tp, { time: Kafka.LATEST_OFFSET }, dataHandler))
8281
})
8382
.catch((err) => logger.error(err))

0 commit comments

Comments
 (0)