@@ -9,14 +9,13 @@ const Kafka = require('no-kafka')
9
9
const co = require ( 'co' )
10
10
const ProcessorService = require ( './services/ProcessorService' )
11
11
const healthcheck = require ( 'topcoder-healthcheck-dropin' )
12
- const _ = require ( 'lodash' )
13
12
14
13
// create consumer
15
- const options = { connectionString : config . KAFKA_URL }
14
+ const options = { connectionString : config . KAFKA_URL , groupId : config . KAFKA_GROUP_ID }
16
15
if ( config . KAFKA_CLIENT_CERT && config . KAFKA_CLIENT_CERT_KEY ) {
17
16
options . ssl = { cert : config . KAFKA_CLIENT_CERT , key : config . KAFKA_CLIENT_CERT_KEY }
18
17
}
19
- const consumer = new Kafka . SimpleConsumer ( options )
18
+ const consumer = new Kafka . GroupConsumer ( options )
20
19
21
20
// data handler
22
21
const dataHandler = ( messageSet , topic , partition ) => Promise . each ( messageSet , ( m ) => {
@@ -47,14 +46,12 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, (
47
46
return
48
47
}
49
48
50
-
51
49
if ( topic === config . SUBMISSION_CREATE_TOPIC && messageJSON . payload . fileType === 'url' ) {
52
50
logger . debug ( `Ignoring message in topic ${ messageJSON . topic } with file type as url` )
53
51
// ignore the message
54
52
return
55
53
}
56
54
57
-
58
55
return co ( function * ( ) {
59
56
switch ( topic ) {
60
57
case config . SUBMISSION_CREATE_TOPIC :
@@ -67,9 +64,14 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, (
67
64
throw new Error ( `Invalid topic: ${ topic } ` )
68
65
}
69
66
} )
70
- // commit offset
71
- . then ( ( ) => consumer . commitOffset ( { topic, partition, offset : m . offset } ) )
72
- . catch ( ( err ) => logger . error ( err ) )
67
+ // commit offset regardless of errors
68
+ . then ( ( ) => {
69
+ consumer . commitOffset ( { topic, partition, offset : m . offset } )
70
+ } )
71
+ . catch ( ( err ) => {
72
+ logger . error ( err )
73
+ consumer . commitOffset ( { topic, partition, offset : m . offset } )
74
+ } )
73
75
} )
74
76
75
77
// check if there is kafka connection alive
@@ -85,12 +87,18 @@ function check () {
85
87
return connected
86
88
}
87
89
90
+ const topics = [ config . SUBMISSION_CREATE_TOPIC , config . AVSCAN_TOPIC ]
91
+
88
92
consumer
89
- . init ( )
90
- // consume configured topic
93
+ . init ( [ {
94
+ subscriptions : topics ,
95
+ handler : dataHandler
96
+ } ] )
97
+ // consume configured topics
91
98
. then ( ( ) => {
99
+ logger . info ( 'Initialized.......' )
92
100
healthcheck . init ( [ check ] )
93
- const topics = [ config . SUBMISSION_CREATE_TOPIC , config . AVSCAN_TOPIC ]
94
- _ . each ( topics , ( tp ) => consumer . subscribe ( tp , { time : Kafka . LATEST_OFFSET } , dataHandler ) )
101
+ logger . info ( 'Adding topics successfully.......' )
102
+ logger . info ( topics )
103
+ logger . info ( 'Kick Start.......' )
95
104
} )
96
- . catch ( ( err ) => logger . error ( err ) )
0 commit comments