Skip to content

Commit 3736a6e

Browse files
author
vikasrohit
authored
Merge pull request #196 from topcoder-platform/dev
Supporting release for Connect 2.4.7
2 parents 89707d7 + fd15bb4 commit 3736a6e

File tree

10 files changed

+182
-47
lines changed

10 files changed

+182
-47
lines changed

.circleci/config.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ workflows:
7676
- test
7777
filters:
7878
branches:
79-
only: ['dev']
79+
only: ['dev', 'feature/auth0-proxy-server']
8080
- deployProd:
8181
requires:
8282
- test

README.md

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,11 @@
22

33
Microservice to manage CRUD operations for all things Projects.
44

5+
### Note : Steps mentioned below are best to our capability as guide for local deployment, however, we expect from contributor, being a developer, to resolve run-time issues (e.g. OS and node version issues etc), if any.
6+
57
### Local Development
68
* We use docker-compose for running dependencies locally. Instructions for Docker compose setup - https://docs.docker.com/compose/install/
7-
* Nodejs 6.9.4 - consider using [nvm](https://github.com/creationix/nvm) or equivalent to manage your node version
9+
* Nodejs 8.9.4 - consider using [nvm](https://github.com/creationix/nvm) or equivalent to manage your node version
810
* Install [libpg](https://www.npmjs.com/package/pg-native)
911
* Install node dependencies
1012
`npm install`
@@ -63,9 +65,9 @@ New Kafka related configuration options has been introduced:
6365
}
6466
```
6567
Environment variables:
66-
KAFKA_HOSTS - same as "hosts"
67-
KAFKA_CLIENT_CERT - same as "clientCert"
68-
KAFKA_CLIENT_CERT_KEY - same as "clientCertKey"
68+
- `KAFKA_HOSTS` - same as "hosts"
69+
- `KAFKA_CLIENT_CERT` - same as "clientCert"
70+
- `KAFKA_CLIENT_CERT_KEY` - same as "clientCertKey"
6971

7072
### Test
7173

deploy.sh

Lines changed: 18 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ AWS_ECS_CONTAINER_NAME="tc-project-service"
1111
AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY")
1212
AWS_ECS_CLUSTER=$(eval "echo \$${ENV}_AWS_ECS_CLUSTER")
1313
AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE")
14+
AWS_ECS_SERVICE_CONSUMERS=$(eval "echo \$${ENV}_AWS_ECS_SERVICE_CONSUMERS")
1415
AUTH_DOMAIN=$(eval "echo \$${ENV}_AUTH_DOMAIN")
1516
AUTH_SECRET=$(eval "echo \$${ENV}_AUTH_SECRET")
1617
VALID_ISSUERS=$(eval "echo \$${ENV}_VALID_ISSUERS")
@@ -29,9 +30,9 @@ configure_aws_cli() {
2930
# deploys the app to the ecs cluster
3031
deploy_cluster() {
3132

32-
make_task_def
33-
register_definition
34-
if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $AWS_ECS_SERVICE --task-definition $revision | \
33+
make_task_def $1 $2 $3 $4
34+
register_definition $1
35+
if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $1 --task-definition $revision | \
3536
$JQ '.service.taskDefinition') != $revision ]]; then
3637
echo "Error updating service."
3738
return 1
@@ -46,6 +47,7 @@ make_task_def(){
4647
"family": "%s",
4748
"requiresCompatibilities": ["EC2", "FARGATE"],
4849
"networkMode": "awsvpc",
50+
"taskRoleArn": "arn:aws:iam::%s:role/tc-project-service-ecs-task-role",
4951
"executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole",
5052
"cpu": "1024",
5153
"memory": "2048",
@@ -56,6 +58,7 @@ make_task_def(){
5658
"essential": true,
5759
"memory": 1536,
5860
"cpu": 768,
61+
"entryPoint": ["%s", "%s", "%s"],
5962
"environment": [
6063
{
6164
"name": "NODE_ENV",
@@ -85,14 +88,6 @@ make_task_def(){
8588
"name": "AWS_REGION",
8689
"value": "%s"
8790
},
88-
{
89-
"name": "AWS_ACCESS_KEY_ID",
90-
"value": "%s"
91-
},
92-
{
93-
"name": "AWS_SECRET_ACCESS_KEY",
94-
"value": "%s"
95-
},
9691
{
9792
"name": "AUTH_DOMAIN",
9893
"value": "%s"
@@ -255,16 +250,16 @@ make_task_def(){
255250
KAFKA_CLIENT_CERT_KEY=$(eval "echo \$${ENV}_KAFKA_CLIENT_CERT_KEY")
256251
KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID")
257252
KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL")
258-
259253
AUTH0_PROXY_SERVER_URL=$(eval "echo \$${ENV}_AUTH0_PROXY_SERVER_URL")
260254

255+
AUTH0_PROXY_SERVER_URL=$(eval "echo \$${ENV}_AUTH0_PROXY_SERVER_URL")
261256

262-
task_def=$(printf "$task_template" $family $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$AUTH0_PROXY_SERVER_URL" $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV)
257+
task_def=$(printf "$task_template" $1 $ACCOUNT_ID $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$AUTH0_PROXY_SERVER_URL" $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV)
263258
}
264259

265260
push_ecr_image(){
266261
eval $(aws ecr get-login --region $AWS_REGION --no-include-email)
267-
docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$AWS_REPOSITORY:$CIRCLE_SHA1
262+
docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$1:$CIRCLE_SHA1
268263
}
269264

270265
register_definition() {
@@ -279,13 +274,13 @@ register_definition() {
279274
check_service_status() {
280275
counter=0
281276
sleep 60
282-
servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'`
277+
servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'`
283278
while [[ $servicestatus != *"steady state"* ]]
284279
do
285280
echo "Current event message : $servicestatus"
286281
echo "Waiting for 30 seconds to check the service status...."
287282
sleep 30
288-
servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'`
283+
servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'`
289284
counter=`expr $counter + 1`
290285
if [[ $counter -gt $COUNTER_LIMIT ]] ; then
291286
echo "Service does not reach steady state within 10 minutes. Please check"
@@ -296,6 +291,10 @@ check_service_status() {
296291
}
297292

298293
configure_aws_cli
299-
push_ecr_image
300-
deploy_cluster
301-
check_service_status
294+
push_ecr_image $AWS_REPOSITORY
295+
deploy_cluster $AWS_ECS_SERVICE "npm" "run" "start"
296+
297+
deploy_cluster $AWS_ECS_SERVICE_CONSUMERS "npm" "run" "startKafkaConsumers"
298+
299+
check_service_status $AWS_ECS_SERVICE
300+
check_service_status $AWS_ECS_SERVICE_CONSUMERS

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
"migrate:es": "./node_modules/.bin/babel-node migrations/seedElasticsearchIndex.js",
1616
"prestart": "npm run -s build",
1717
"start": "node dist",
18+
"startKafkaConsumers": "npm run -s build && node dist/index-kafka.js",
1819
"start:dev": "NODE_ENV=development PORT=8001 nodemon -w src --exec \"babel-node src --presets es2015\" | ./node_modules/.bin/bunyan",
1920
"test": "NODE_ENV=test npm run lint && NODE_ENV=test npm run sync:es && NODE_ENV=test npm run sync:db && NODE_ENV=test ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- --timeout 10000 --compilers js:babel-core/register $(find src -path '*spec.js*')",
2021
"test:watch": "NODE_ENV=test ./node_modules/.bin/mocha -w --compilers js:babel-core/register $(find src -path '*spec.js*')",
@@ -49,7 +50,7 @@
4950
"express-request-id": "^1.1.0",
5051
"express-sanitizer": "^1.0.2",
5152
"express-validation": "^0.6.0",
52-
"http-aws-es": "^1.1.3",
53+
"http-aws-es": "^4.0.0",
5354
"joi": "^8.0.5",
5455
"jsonwebtoken": "^8.3.0",
5556
"lodash": "^4.16.4",

src/constants.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ export const BUS_API_EVENT = {
8787

8888
PROJECT_LINK_CREATED: 'notifications.connect.project.linkCreated',
8989
PROJECT_FILE_UPLOADED: 'notifications.connect.project.fileUploaded',
90-
PROJECT_SPECIFICATION_MODIFIED: 'notifications.connect.project.specificationModified',
90+
PROJECT_SPECIFICATION_MODIFIED: 'connect.action.project.updated.spec',
9191
PROJECT_PROGRESS_MODIFIED: 'connect.action.project.updated.progress',
9292
PROJECT_FILES_UPDATED: 'connect.action.project.files.updated',
9393
PROJECT_TEAM_UPDATED: 'connect.action.project.team.updated',

src/events/projectPhases/index.js

Lines changed: 57 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -107,15 +107,14 @@ const projectPhaseAddedHandler = Promise.coroutine(function* (logger, msg, chann
107107
});
108108

109109
/**
110-
* Handler for project phase updated event
110+
* Indexes the project phase in the elastic search.
111+
*
111112
* @param {Object} logger logger to log along with trace id
112-
* @param {Object} msg event payload
113-
* @param {Object} channel channel to ack, nack
113+
* @param {Object} data event payload
114114
* @returns {undefined}
115115
*/
116-
const projectPhaseUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
116+
const updateIndexProjectPhase = Promise.coroutine(function* (logger, data) { // eslint-disable-line func-names
117117
try {
118-
const data = JSON.parse(msg.content.toString());
119118
const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.original.projectId });
120119
const phases = _.map(data.allPhases, single => _.omit(single, ['deletedAt', 'deletedBy']));
121120
const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle
@@ -127,7 +126,59 @@ const projectPhaseUpdatedHandler = Promise.coroutine(function* (logger, msg, cha
127126
doc: merged,
128127
},
129128
});
130-
logger.debug('elasticsearch index updated, project phase updated successfully');
129+
logger.debug('project phase updated to project document successfully');
130+
} catch (error) {
131+
logger.error('Error handling indexing the project phase', error);
132+
// throw the error back to nack the bus
133+
throw error;
134+
}
135+
});
136+
137+
/**
138+
* Creates a new phase topic in message api.
139+
*
140+
* @param {Object} logger logger to log along with trace id
141+
* @param {Object} msg event payload
142+
* @returns {undefined}
143+
*/
144+
const updatePhaseTopic = Promise.coroutine(function* (logger, phase) { // eslint-disable-line func-names
145+
try {
146+
logger.debug('Updating topic for phase with phase', phase);
147+
const topic = yield messageService.getPhaseTopic(phase.projectId, phase.id, logger);
148+
logger.trace('Topic', topic);
149+
const title = phase.name;
150+
const titleChanged = topic && topic.title !== title;
151+
logger.trace('titleChanged', titleChanged);
152+
const contentPost = topic && topic.posts && topic.posts.length > 0 ? topic.posts[0] : null;
153+
logger.trace('contentPost', contentPost);
154+
const postId = _.get(contentPost, 'id');
155+
const content = _.get(contentPost, 'body');
156+
if (postId && content && titleChanged) {
157+
const updatedTopic = yield messageService.updateTopic(topic.id, { title, postId, content }, logger);
158+
logger.debug('topic for the phase updated successfully');
159+
logger.trace('updated topic', updatedTopic);
160+
}
161+
} catch (error) {
162+
logger.error('Error in updating topic for the project phase', error);
163+
// don't throw the error back to nack the bus, because we don't want to get multiple topics per phase
164+
// we can create topic for a phase manually, if somehow it fails
165+
}
166+
});
167+
168+
/**
169+
* Handler for project phase updated event
170+
* @param {Object} logger logger to log along with trace id
171+
* @param {Object} msg event payload
172+
* @param {Object} channel channel to ack, nack
173+
* @returns {undefined}
174+
*/
175+
const projectPhaseUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
176+
try {
177+
const data = JSON.parse(msg.content.toString());
178+
logger.debug('calling updateIndexProjectPhase', data);
179+
yield updateIndexProjectPhase(logger, data, channel);
180+
logger.debug('calling updatePhaseTopic', data.updated);
181+
yield updatePhaseTopic(logger, data.updated);
131182
channel.ack(msg);
132183
} catch (error) {
133184
logger.error('Error handling project.phase.updated event', error);

src/index-kafka.js

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
import _ from 'lodash';
2+
import config from 'config';
3+
import startKafkaConsumer from './services/kafkaConsumer';
4+
import { kafkaHandlers } from './events';
5+
import models from './models';
6+
7+
const coreLib = require('tc-core-library-js');
8+
9+
10+
// =======================
11+
// Loger =========
12+
// =======================
13+
let appName = 'tc-projects-consumer';
14+
switch (process.env.NODE_ENV.toLowerCase()) {
15+
case 'development':
16+
appName += '-dev';
17+
break;
18+
case 'qa':
19+
appName += '-qa';
20+
break;
21+
case 'production':
22+
default:
23+
appName += '-prod';
24+
break;
25+
}
26+
27+
const logger = coreLib.logger({
28+
name: appName,
29+
level: _.get(config, 'logLevel', 'debug').toLowerCase(),
30+
captureLogs: config.get('captureLogs'),
31+
logentriesToken: _.get(config, 'logentriesToken', null),
32+
});
33+
34+
// =======================
35+
// Database =========
36+
// =======================
37+
logger.info('Registering models ... ', !!models);
38+
39+
/**
40+
* Handle server shutdown gracefully
41+
* @returns {undefined}
42+
*/
43+
function gracefulShutdown() {
44+
// TODO
45+
}
46+
process.on('SIGTERM', gracefulShutdown);
47+
process.on('SIGINT', gracefulShutdown);
48+
49+
const app = { logger, models };
50+
51+
module.exports = startKafkaConsumer(kafkaHandlers, app, logger);

src/services/index.js

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22

33
import config from 'config';
44
import RabbitMQService from './rabbitmq';
5-
import startKafkaConsumer from './kafkaConsumer';
6-
import { kafkaHandlers } from '../events';
5+
// import startKafkaConsumer from './kafkaConsumer';
6+
// import { kafkaHandlers } from '../events';
77

88
/**
99
* Responsible for establishing connections to all external services
@@ -33,10 +33,10 @@ module.exports = (fapp, logger) => {
3333
.then(() => {
3434
logger.info('RabbitMQ service initialized');
3535
})
36-
.then(() => startKafkaConsumer(kafkaHandlers, app, logger))
37-
.then(() => {
38-
logger.info('Kafka consumer service initialized');
39-
})
36+
// .then(() => startKafkaConsumer(kafkaHandlers, app, logger))
37+
// .then(() => {
38+
// logger.info('Kafka consumer service initialized');
39+
// })
4040
.catch((err) => {
4141
logger.error('Error initializing services', err);
4242
// gracefulShutdown()

src/services/messageService.js

Lines changed: 35 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,35 @@ function createTopic(topic, logger) {
8787
});
8888
}
8989

90+
/**
91+
* Updates the given topic in message api
92+
*
93+
* @param {Number} topicId id of the topic to be updated
94+
* @param {Object} topic the topic, should be a JSON object
95+
* @param {Object} logger object
96+
* @return {Promise} new topic promise
97+
*/
98+
function updateTopic(topicId, topic, logger) {
99+
logger.debug(`updateTopic for topic: ${JSON.stringify(topic)}`);
100+
return getClient(logger).then((msgClient) => {
101+
logger.debug('calling message service');
102+
return msgClient.post(`/topics/${topicId}/edit`, topic)
103+
.then((resp) => {
104+
logger.debug('Topic updated successfully');
105+
logger.debug(`Topic updated successfully [status]: ${resp.status}`);
106+
logger.debug(`Topic updated successfully [data]: ${resp.data}`);
107+
return _.get(resp.data, 'result.content', {});
108+
})
109+
.catch((error) => {
110+
logger.debug('Error updating topic');
111+
logger.error(error);
112+
// eslint-disable-line
113+
});
114+
}).catch((errMessage) => {
115+
logger.debug(errMessage);
116+
});
117+
}
118+
90119
/**
91120
* Deletes the given posts for the given topic.
92121
*
@@ -121,12 +150,13 @@ function deletePosts(topicId, postIds, logger) {
121150
* @return {Promise} topic promise
122151
*/
123152
function getPhaseTopic(projectId, phaseId, logger) {
124-
logger.debug(`getPhaseTopic for phaseId: ${phaseId}`);
153+
logger.debug(`getPhaseTopic for projectId: ${projectId} phaseId: ${phaseId}`);
125154
return getClient(logger).then((msgClient) => {
126155
logger.debug(`calling message service for fetching phaseId#${phaseId}`);
127-
return msgClient.get('/topics/list', {
128-
params: { filter: `reference=project&referenceId=${projectId}&tag=phase#${phaseId}` },
129-
}).then((resp) => {
156+
const encodedFilter = encodeURIComponent(`reference=project&referenceId=${projectId}&tag=phase#${phaseId}`);
157+
return msgClient.get(`/topics/list/db?filter=${encodedFilter}`)
158+
.then((resp) => {
159+
logger.debug('Fetched phase topic', resp);
130160
const topics = _.get(resp.data, 'result.content', []);
131161
if (topics && topics.length > 0) {
132162
return topics[0];
@@ -153,6 +183,7 @@ function deleteTopic(topicId, logger) {
153183

154184
module.exports = {
155185
createTopic,
186+
updateTopic,
156187
deletePosts,
157188
getPhaseTopic,
158189
deleteTopic,

0 commit comments

Comments
 (0)