@@ -79,27 +79,27 @@ function configureKafkaConsumer(handlers) {
79
79
// emailTries[topicName] += 1; //temporary disabling this feature
80
80
emailModel . status = 'FAILED' ;
81
81
return emailModel . save ( ) . then ( ( ) => {
82
- /*
83
- * temporary disabling this feature as there is chance of losing message during
84
- * unsubscribe/pausing due to simple kafka consumer
85
- */
86
- /*
87
- const currentTries = emailTries[topicName];
88
- if (currentTries > maxErrors) {
89
- logger.debug(`Failed to send email. Will sleep for ${pauseTime}s`);
90
- emailTries[topicName] = 0;
91
-
92
- schedule.scheduleJob(new Date(now.getTime() + pauseTime * 1000), () => {
93
- consumer.subscribe(topic, dataHandler);
94
- });
95
-
96
- return consumer.unsubscribe(topic, partition).then(() => {
97
- throw result.error
98
- });
99
- } else {
100
- logger.debug(`Failed to send email (retries left ${maxErrors - currentTries})`);
101
- throw result.error;
102
- }*/
82
+ /*
83
+ * temporary disabling this feature as there is chance of losing message during
84
+ * unsubscribe/pausing due to simple kafka consumer
85
+ */
86
+ /*
87
+ const currentTries = emailTries[topicName];
88
+ if (currentTries > maxErrors) {
89
+ logger.debug(`Failed to send email. Will sleep for ${pauseTime}s`);
90
+ emailTries[topicName] = 0;
91
+
92
+ schedule.scheduleJob(new Date(now.getTime() + pauseTime * 1000), () => {
93
+ consumer.subscribe(topic, dataHandler);
94
+ });
95
+
96
+ return consumer.unsubscribe(topic, partition).then(() => {
97
+ throw result.error
98
+ });
99
+ } else {
100
+ logger.debug(`Failed to send email (retries left ${maxErrors - currentTries})`);
101
+ throw result.error;
102
+ }*/
103
103
} ) ;
104
104
}
105
105
} ) . then ( ( ) => consumer . commitOffset ( { topic, partition, offset : m . offset } ) ) // commit offset
@@ -124,9 +124,9 @@ function startKafkaConsumer(consumer, handlers, dataHandler) {
124
124
return consumer
125
125
. init ( )
126
126
. then ( ( ) => Promise . each ( _ . keys ( handlers ) , ( topicName ) => { // add back the ignored topic prefix to use full topic name
127
- emailTries [ topicName ] = 0 ;
128
- return consumer . subscribe ( `${ config . KAFKA_TOPIC_IGNORE_PREFIX || '' } ${ topicName } ` , dataHandler ) ;
129
- } )
127
+ emailTries [ topicName ] = 0 ;
128
+ return consumer . subscribe ( `${ config . KAFKA_TOPIC_IGNORE_PREFIX || '' } ${ topicName } ` , dataHandler ) ;
129
+ } )
130
130
) ;
131
131
}
132
132
@@ -189,8 +189,7 @@ function start(handlers) {
189
189
req . signature = `${ def . controller } #${ def . method } ` ;
190
190
next ( ) ;
191
191
} ) ;
192
- if ( ( url !== '/email/health' )
193
- && ( url !== `/${ config . API_VERSION } /${ config . API_CONTEXT_PATH } /health` ) ) {
192
+ if ( url !== '/health' ) {
194
193
actions . push ( jwtAuth ( ) ) ;
195
194
actions . push ( ( req , res , next ) => {
196
195
if ( ! req . authUser ) {
@@ -205,7 +204,7 @@ function start(handlers) {
205
204
} ) ;
206
205
} ) ;
207
206
208
- app . use ( '/' , apiRouter ) ;
207
+ app . use ( config . API_CONTEXT_PATH , apiRouter ) ;
209
208
210
209
app . use ( ( req , res ) => {
211
210
res . status ( 404 ) . json ( { error : 'route not found' } ) ;
0 commit comments