3
3
*/
4
4
5
5
global . Promise = require ( 'bluebird' )
6
- const _ = require ( 'lodash' )
7
6
const config = require ( 'config' )
8
7
const logger = require ( './common/logger' )
9
8
const Kafka = require ( 'no-kafka' )
10
- const co = require ( 'co' )
11
9
const ProcessorService = require ( './services/ProcessorService' )
12
10
const healthcheck = require ( 'topcoder-healthcheck-dropin' )
13
11
14
12
// create consumer
15
- const options = { connectionString : config . KAFKA_URL , handlerConcurrency : 1 }
13
+ const options = { connectionString : config . KAFKA_URL , groupId : config . KAFKA_GROUP_ID , handlerConcurrency : 1 }
16
14
if ( config . KAFKA_CLIENT_CERT && config . KAFKA_CLIENT_CERT_KEY ) {
17
15
options . ssl = { cert : config . KAFKA_CLIENT_CERT , key : config . KAFKA_CLIENT_CERT_KEY }
18
16
}
19
- const consumer = new Kafka . SimpleConsumer ( options )
17
+ const consumer = new Kafka . GroupConsumer ( options )
20
18
21
19
// data handler
22
- const dataHandler = ( messageSet , topic , partition ) => Promise . each ( messageSet , ( m ) => {
20
+ const dataHandler = async ( messageSet , topic , partition ) => Promise . each ( messageSet , async ( m ) => {
23
21
const message = m . message . value . toString ( 'utf8' )
24
22
logger . info ( `Handle Kafka event message; Topic: ${ topic } ; Partition: ${ partition } ; Offset: ${
25
23
m . offset } ; Message: ${ message } .`)
@@ -37,24 +35,26 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, (
37
35
// ignore the message
38
36
return
39
37
}
40
- return co ( function * ( ) {
38
+ try {
41
39
switch ( topic ) {
42
40
case config . CREATE_DATA_TOPIC :
43
- yield ProcessorService . create ( messageJSON )
41
+ await ProcessorService . create ( messageJSON )
44
42
break
45
43
case config . UPDATE_DATA_TOPIC :
46
- yield ProcessorService . update ( messageJSON )
44
+ await ProcessorService . update ( messageJSON )
47
45
break
48
46
case config . DELETE_DATA_TOPIC :
49
- yield ProcessorService . remove ( messageJSON )
47
+ await ProcessorService . remove ( messageJSON )
50
48
break
51
49
default :
52
50
throw new Error ( `Invalid topic: ${ topic } ` )
53
51
}
54
- } )
52
+
55
53
// commit offset
56
- . then ( ( ) => consumer . commitOffset ( { topic, partition, offset : m . offset } ) )
57
- . catch ( ( err ) => logger . error ( err ) )
54
+ await consumer . commitOffset ( { topic, partition, offset : m . offset } )
55
+ } catch ( err ) {
56
+ logger . error ( err )
57
+ }
58
58
} )
59
59
60
60
// check if there is kafka connection alive
@@ -70,13 +70,14 @@ function check () {
70
70
return connected
71
71
}
72
72
73
+ const topics = [ config . CREATE_DATA_TOPIC , config . UPDATE_DATA_TOPIC , config . DELETE_DATA_TOPIC ]
74
+ // consume configured topics
73
75
consumer
74
- . init ( )
75
- // consume configured topics
76
+ . init ( [ {
77
+ subscriptions : topics ,
78
+ handler : dataHandler
79
+ } ] )
76
80
. then ( ( ) => {
77
81
healthcheck . init ( [ check ] )
78
-
79
- const topics = [ config . CREATE_DATA_TOPIC , config . UPDATE_DATA_TOPIC , config . DELETE_DATA_TOPIC ]
80
- _ . each ( topics , ( tp ) => consumer . subscribe ( tp , { time : Kafka . LATEST_OFFSET } , dataHandler ) )
81
82
} )
82
83
. catch ( ( err ) => logger . error ( err ) )
0 commit comments