From 99b624c8bfda3202a3d67a76e583cd0715b71df9 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 17 Oct 2018 11:56:18 +0530 Subject: [PATCH 1/9] =?UTF-8?q?GitHub=20topic#2609,=20When=20we=20edit=20p?= =?UTF-8?q?hase=20titles,=20respective=20topic=20tiles=20are=20not=20updat?= =?UTF-8?q?ed=20in=20the=20sidebar=20=E2=80=94=20Fixed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/events/projectPhases/index.js | 63 ++++++++++++++++++++++++++++--- src/services/messageService.js | 39 +++++++++++++++++-- 2 files changed, 92 insertions(+), 10 deletions(-) diff --git a/src/events/projectPhases/index.js b/src/events/projectPhases/index.js index 09e964cb..d9e8c218 100644 --- a/src/events/projectPhases/index.js +++ b/src/events/projectPhases/index.js @@ -107,15 +107,14 @@ const projectPhaseAddedHandler = Promise.coroutine(function* (logger, msg, chann }); /** - * Handler for project phase updated event + * Indexes the project phase in the elastic search. + * * @param {Object} logger logger to log along with trace id - * @param {Object} msg event payload - * @param {Object} channel channel to ack, nack + * @param {Object} data event payload * @returns {undefined} */ -const projectPhaseUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names +const updateIndexProjectPhase = Promise.coroutine(function* (logger, data) { // eslint-disable-line func-names try { - const data = JSON.parse(msg.content.toString()); const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.original.projectId }); const phases = _.map(data.allPhases, single => _.omit(single, ['deletedAt', 'deletedBy'])); const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle @@ -127,7 +126,59 @@ const projectPhaseUpdatedHandler = Promise.coroutine(function* (logger, msg, cha doc: merged, }, }); - logger.debug('elasticsearch index updated, project phase updated successfully'); + logger.debug('project phase updated to project document successfully'); + } catch (error) { + logger.error('Error handling indexing the project phase', error); + // throw the error back to nack the bus + throw error; + } +}); + +/** + * Creates a new phase topic in message api. + * + * @param {Object} logger logger to log along with trace id + * @param {Object} msg event payload + * @returns {undefined} + */ +const updatePhaseTopic = Promise.coroutine(function* (logger, phase) { // eslint-disable-line func-names + try { + logger.debug('Updating topic for phase with phase', phase); + const topic = yield messageService.getPhaseTopic(phase.projectId, phase.id, logger); + logger.trace('Topic', topic); + const title = phase.name; + const titleChanged = topic && topic.title !== title; + logger.trace('titleChanged', titleChanged); + const contentPost = topic && topic.posts && topic.posts.length > 0 ? topic.posts[0] : null; + logger.trace('contentPost', contentPost); + const postId = _.get(contentPost, 'id'); + const content = _.get(contentPost, 'body'); + if (postId && content && titleChanged) { + const updatedTopic = yield messageService.updateTopic(topic.id, { title, postId, content }, logger); + logger.debug('topic for the phase updated successfully'); + logger.trace('updated topic', updatedTopic); + } + } catch (error) { + logger.error('Error in updating topic for the project phase', error); + // don't throw the error back to nack the bus, because we don't want to get multiple topics per phase + // we can create topic for a phase manually, if somehow it fails + } +}); + +/** + * Handler for project phase updated event + * @param {Object} logger logger to log along with trace id + * @param {Object} msg event payload + * @param {Object} channel channel to ack, nack + * @returns {undefined} + */ +const projectPhaseUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names + try { + const data = JSON.parse(msg.content.toString()); + logger.debug('calling updateIndexProjectPhase', data); + yield updateIndexProjectPhase(logger, data, channel); + logger.debug('calling updatePhaseTopic', data.updated); + yield updatePhaseTopic(logger, data.updated); channel.ack(msg); } catch (error) { logger.error('Error handling project.phase.updated event', error); diff --git a/src/services/messageService.js b/src/services/messageService.js index d7e0f678..4ea5f04b 100644 --- a/src/services/messageService.js +++ b/src/services/messageService.js @@ -87,6 +87,35 @@ function createTopic(topic, logger) { }); } +/** + * Updates the given topic in message api + * + * @param {Number} topicId id of the topic to be updated + * @param {Object} topic the topic, should be a JSON object + * @param {Object} logger object + * @return {Promise} new topic promise + */ +function updateTopic(topicId, topic, logger) { + logger.debug(`updateTopic for topic: ${JSON.stringify(topic)}`); + return getClient(logger).then((msgClient) => { + logger.debug('calling message service'); + return msgClient.post(`/topics/${topicId}/edit`, topic) + .then((resp) => { + logger.debug('Topic updated successfully'); + logger.debug(`Topic updated successfully [status]: ${resp.status}`); + logger.debug(`Topic updated successfully [data]: ${resp.data}`); + return _.get(resp.data, 'result.content', {}); + }) + .catch((error) => { + logger.debug('Error updating topic'); + logger.error(error); + // eslint-disable-line + }); + }).catch((errMessage) => { + logger.debug(errMessage); + }); +} + /** * Deletes the given posts for the given topic. * @@ -121,12 +150,13 @@ function deletePosts(topicId, postIds, logger) { * @return {Promise} topic promise */ function getPhaseTopic(projectId, phaseId, logger) { - logger.debug(`getPhaseTopic for phaseId: ${phaseId}`); + logger.debug(`getPhaseTopic for projectId: ${projectId} phaseId: ${phaseId}`); return getClient(logger).then((msgClient) => { logger.debug(`calling message service for fetching phaseId#${phaseId}`); - return msgClient.get('/topics/list', { - params: { filter: `reference=project&referenceId=${projectId}&tag=phase#${phaseId}` }, - }).then((resp) => { + const encodedFilter = encodeURIComponent(`reference=project&referenceId=${projectId}&tag=phase#${phaseId}`); + return msgClient.get(`/topics/list/db?filter=${encodedFilter}`) + .then((resp) => { + logger.debug('Fetched phase topic', resp); const topics = _.get(resp.data, 'result.content', []); if (topics && topics.length > 0) { return topics[0]; @@ -153,6 +183,7 @@ function deleteTopic(topicId, logger) { module.exports = { createTopic, + updateTopic, deletePosts, getPhaseTopic, deleteTopic, From 205732d5568c7a1c908a9c70af18bf4cc47eae0e Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Wed, 17 Oct 2018 16:10:50 +0530 Subject: [PATCH 2/9] Fixed kafka topic constant for project spec modified --- src/constants.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/constants.js b/src/constants.js index 7f238600..bc99c3b2 100644 --- a/src/constants.js +++ b/src/constants.js @@ -87,7 +87,7 @@ export const BUS_API_EVENT = { PROJECT_LINK_CREATED: 'notifications.connect.project.linkCreated', PROJECT_FILE_UPLOADED: 'notifications.connect.project.fileUploaded', - PROJECT_SPECIFICATION_MODIFIED: 'notifications.connect.project.specificationModified', + PROJECT_SPECIFICATION_MODIFIED: 'connect.action.project.updated.spec', PROJECT_PROGRESS_MODIFIED: 'connect.action.project.updated.progress', PROJECT_FILES_UPDATED: 'connect.action.project.files.updated', PROJECT_TEAM_UPDATED: 'connect.action.project.team.updated', From 7b2ee49d60c4e6fad52fdaba92a47ccad0c78612 Mon Sep 17 00:00:00 2001 From: Maksym Mykhailenko Date: Thu, 25 Oct 2018 08:29:52 +0800 Subject: [PATCH 3/9] minor improve in README readability --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 60abab9e..a51b6f2d 100644 --- a/README.md +++ b/README.md @@ -63,9 +63,9 @@ New Kafka related configuration options has been introduced: } ``` Environment variables: -KAFKA_HOSTS - same as "hosts" -KAFKA_CLIENT_CERT - same as "clientCert" -KAFKA_CLIENT_CERT_KEY - same as "clientCertKey" +- `KAFKA_HOSTS` - same as "hosts" +- `KAFKA_CLIENT_CERT` - same as "clientCert" +- `KAFKA_CLIENT_CERT_KEY` - same as "clientCertKey" ### Test From 356594fc996f2d21ea5391020051f1c7e2c618c1 Mon Sep 17 00:00:00 2001 From: Vikas Agarwal Date: Thu, 25 Oct 2018 15:11:29 +0530 Subject: [PATCH 4/9] Github issue#193, Remove using AWS keys from env variables, use role based access instead Github issue#194, Separate task deployment for API and event bus consumers --- deploy.sh | 35 +++++++++++++++-------------- package.json | 3 ++- src/index-kafka.js | 51 +++++++++++++++++++++++++++++++++++++++++++ src/services/index.js | 12 +++++----- src/util.js | 10 ++++----- 5 files changed, 81 insertions(+), 30 deletions(-) create mode 100644 src/index-kafka.js diff --git a/deploy.sh b/deploy.sh index 1d4ea007..87f6fd23 100755 --- a/deploy.sh +++ b/deploy.sh @@ -11,6 +11,7 @@ AWS_ECS_CONTAINER_NAME="tc-project-service" AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY") AWS_ECS_CLUSTER=$(eval "echo \$${ENV}_AWS_ECS_CLUSTER") AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE") +AWS_ECS_SERVICE_CONSUMERS=$(eval "echo \$${ENV}_AWS_ECS_SERVICE_CONSUMERS") AUTH_DOMAIN=$(eval "echo \$${ENV}_AUTH_DOMAIN") AUTH_SECRET=$(eval "echo \$${ENV}_AUTH_SECRET") VALID_ISSUERS=$(eval "echo \$${ENV}_VALID_ISSUERS") @@ -29,9 +30,9 @@ configure_aws_cli() { # deploys the app to the ecs cluster deploy_cluster() { - make_task_def - register_definition - if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $AWS_ECS_SERVICE --task-definition $revision | \ + make_task_def $1 $2 $3 $4 + register_definition $1 + if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $1 --task-definition $revision | \ $JQ '.service.taskDefinition') != $revision ]]; then echo "Error updating service." return 1 @@ -46,6 +47,7 @@ make_task_def(){ "family": "%s", "requiresCompatibilities": ["EC2", "FARGATE"], "networkMode": "awsvpc", + "taskRoleArn": "arn:aws:iam::%s:role/tc-project-service-ecs-task-role", "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", "cpu": "1024", "memory": "2048", @@ -56,6 +58,7 @@ make_task_def(){ "essential": true, "memory": 1536, "cpu": 768, + "entryPoint": ["%s", "%s", "%s"], "environment": [ { "name": "NODE_ENV", @@ -85,14 +88,6 @@ make_task_def(){ "name": "AWS_REGION", "value": "%s" }, - { - "name": "AWS_ACCESS_KEY_ID", - "value": "%s" - }, - { - "name": "AWS_SECRET_ACCESS_KEY", - "value": "%s" - }, { "name": "AUTH_DOMAIN", "value": "%s" @@ -253,12 +248,12 @@ make_task_def(){ KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - task_def=$(printf "$task_template" $family $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) + task_def=$(printf "$task_template" $1 $ACCOUNT_ID $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) } push_ecr_image(){ eval $(aws ecr get-login --region $AWS_REGION --no-include-email) - docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$AWS_REPOSITORY:$CIRCLE_SHA1 + docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$1:$CIRCLE_SHA1 } register_definition() { @@ -273,13 +268,13 @@ register_definition() { check_service_status() { counter=0 sleep 60 - servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` + servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` while [[ $servicestatus != *"steady state"* ]] do echo "Current event message : $servicestatus" echo "Waiting for 30 seconds to check the service status...." sleep 30 - servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` + servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` counter=`expr $counter + 1` if [[ $counter -gt $COUNTER_LIMIT ]] ; then echo "Service does not reach steady state within 10 minutes. Please check" @@ -290,6 +285,10 @@ check_service_status() { } configure_aws_cli -push_ecr_image -deploy_cluster -check_service_status +push_ecr_image $AWS_REPOSITORY +deploy_cluster $AWS_ECS_SERVICE "npm" "run" "start" + +deploy_cluster $AWS_ECS_SERVICE_CONSUMERS "npm" "run" "startKafkaConsumers" + +check_service_status $AWS_ECS_SERVICE +check_service_status $AWS_ECS_SERVICE_CONSUMERS \ No newline at end of file diff --git a/package.json b/package.json index 008d7ad1..9aadead8 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,7 @@ "migrate:es": "./node_modules/.bin/babel-node migrations/seedElasticsearchIndex.js", "prestart": "npm run -s build", "start": "node dist", + "startKafkaConsumers": "npm run -s build && node dist/index-kafka.js", "start:dev": "NODE_ENV=development PORT=8001 nodemon -w src --exec \"babel-node src --presets es2015\" | ./node_modules/.bin/bunyan", "test": "NODE_ENV=test npm run lint && NODE_ENV=test npm run sync:es && NODE_ENV=test npm run sync:db && NODE_ENV=test ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- --timeout 10000 --compilers js:babel-core/register $(find src -path '*spec.js*')", "test:watch": "NODE_ENV=test ./node_modules/.bin/mocha -w --compilers js:babel-core/register $(find src -path '*spec.js*')", @@ -48,7 +49,7 @@ "express-request-id": "^1.1.0", "express-sanitizer": "^1.0.2", "express-validation": "^0.6.0", - "http-aws-es": "^1.1.3", + "http-aws-es": "^4.0.0", "joi": "^8.0.5", "jsonwebtoken": "^8.3.0", "lodash": "^4.16.4", diff --git a/src/index-kafka.js b/src/index-kafka.js new file mode 100644 index 00000000..aa0123e5 --- /dev/null +++ b/src/index-kafka.js @@ -0,0 +1,51 @@ +import _ from 'lodash'; +import config from 'config'; +import startKafkaConsumer from './services/kafkaConsumer'; +import { kafkaHandlers } from './events'; +import models from './models'; + +const coreLib = require('tc-core-library-js'); + + +// ======================= +// Loger ========= +// ======================= +let appName = 'tc-projects-consumer'; +switch (process.env.NODE_ENV.toLowerCase()) { + case 'development': + appName += '-dev'; + break; + case 'qa': + appName += '-qa'; + break; + case 'production': + default: + appName += '-prod'; + break; +} + +const logger = coreLib.logger({ + name: appName, + level: _.get(config, 'logLevel', 'debug').toLowerCase(), + captureLogs: config.get('captureLogs'), + logentriesToken: _.get(config, 'logentriesToken', null), +}); + +// ======================= +// Database ========= +// ======================= +logger.info('Registering models ... ', !!models); + +/** + * Handle server shutdown gracefully + * @returns {undefined} + */ +function gracefulShutdown() { + // TODO +} +process.on('SIGTERM', gracefulShutdown); +process.on('SIGINT', gracefulShutdown); + +const app = { logger, models }; + +module.exports = startKafkaConsumer(kafkaHandlers, app, logger); diff --git a/src/services/index.js b/src/services/index.js index 017a6ec2..6c8306c8 100644 --- a/src/services/index.js +++ b/src/services/index.js @@ -2,8 +2,8 @@ import config from 'config'; import RabbitMQService from './rabbitmq'; -import startKafkaConsumer from './kafkaConsumer'; -import { kafkaHandlers } from '../events'; +// import startKafkaConsumer from './kafkaConsumer'; +// import { kafkaHandlers } from '../events'; /** * Responsible for establishing connections to all external services @@ -33,10 +33,10 @@ module.exports = (fapp, logger) => { .then(() => { logger.info('RabbitMQ service initialized'); }) - .then(() => startKafkaConsumer(kafkaHandlers, app, logger)) - .then(() => { - logger.info('Kafka consumer service initialized'); - }) + // .then(() => startKafkaConsumer(kafkaHandlers, app, logger)) + // .then(() => { + // logger.info('Kafka consumer service initialized'); + // }) .catch((err) => { logger.error('Error initializing services', err); // gracefulShutdown() diff --git a/src/util.js b/src/util.js index 906f1389..542c2ee1 100644 --- a/src/util.js +++ b/src/util.js @@ -16,7 +16,7 @@ import config from 'config'; import urlencode from 'urlencode'; import elasticsearch from 'elasticsearch'; import Promise from 'bluebird'; -import AWS from 'aws-sdk'; +// import AWS from 'aws-sdk'; import { ADMIN_ROLES, TOKEN_SCOPES } from './constants'; @@ -317,10 +317,10 @@ _.assignIn(util, { apiVersion: config.get('elasticsearchConfig.apiVersion'), hosts: esHost, connectionClass: require('http-aws-es'), // eslint-disable-line global-require - amazonES: { - region: 'us-east-1', - credentials: new AWS.EnvironmentCredentials('AWS'), - }, + // amazonES: { + // region: 'us-east-1', + // credentials: new AWS.EnvironmentCredentials('AWS'), + // }, }); } else { esClient = new elasticsearch.Client(_.cloneDeep(config.elasticsearchConfig)); From 0325431db7771b88a7c7a50072cbd931679e93fe Mon Sep 17 00:00:00 2001 From: Sachin Maheshwari Date: Mon, 26 Nov 2018 13:01:50 +0530 Subject: [PATCH 5/9] adding auth0 proxy lambda server support. --- .circleci/config.yml | 2 +- config/custom-environment-variables.json | 3 ++- config/default.json | 3 ++- deploy.sh | 7 ++++++- package.json | 2 +- 5 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 459ec723..4b315cea 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -76,7 +76,7 @@ workflows: - test filters: branches: - only: ['dev'] + only: ['dev', 'feature/auth0-proxy-server'] - deployProd: requires: - test diff --git a/config/custom-environment-variables.json b/config/custom-environment-variables.json index 59814a2c..6620f43c 100644 --- a/config/custom-environment-variables.json +++ b/config/custom-environment-variables.json @@ -46,5 +46,6 @@ "AUTH0_CLIENT_SECRET": "AUTH0_CLIENT_SECRET", "AUTH0_AUDIENCE": "AUTH0_AUDIENCE", "TOKEN_CACHE_TIME" : "TOKEN_CACHE_TIME", - "whitelistedOriginsForUserIdAuth": "WHITELISTED_ORIGINS_FOR_USERID_AUTH" + "whitelistedOriginsForUserIdAuth": "WHITELISTED_ORIGINS_FOR_USERID_AUTH", + "AUTH0_PROXY_SERVER_URL" : "AUTH0_PROXY_SERVER_URL" } diff --git a/config/default.json b/config/default.json index da84855b..a23a86e8 100644 --- a/config/default.json +++ b/config/default.json @@ -50,5 +50,6 @@ "AUTH0_AUDIENCE": "", "AUTH0_URL": "", "TOKEN_CACHE_TIME": "", - "whitelistedOriginsForUserIdAuth": "[\"https:\/\/topcoder-newauth.auth0.com\/\",\"https:\/\/api.topcoder-dev.com\"]" + "whitelistedOriginsForUserIdAuth": "[\"https:\/\/topcoder-newauth.auth0.com\/\",\"https:\/\/api.topcoder-dev.com\"]", + "AUTH0_PROXY_SERVER_URL" : "" } diff --git a/deploy.sh b/deploy.sh index 87f6fd23..52a101f9 100755 --- a/deploy.sh +++ b/deploy.sh @@ -191,6 +191,10 @@ make_task_def(){ { "name": "KAFKA_URL", "value": "%s" + }, + { + "name": "AUTH0_PROXY_SERVER_URL", + "value": "%s" } ], "portMappings": [ @@ -246,9 +250,10 @@ make_task_def(){ KAFKA_CLIENT_CERT_KEY=$(eval "echo \$${ENV}_KAFKA_CLIENT_CERT_KEY") KAFKA_GROUP_ID=$(eval "echo \$${ENV}_KAFKA_GROUP_ID") KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") + AUTH0_PROXY_SERVER_URL=$(eval "echo \$${ENV}_AUTH0_PROXY_SERVER_URL") - task_def=$(printf "$task_template" $1 $ACCOUNT_ID $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) + task_def=$(printf "$task_template" $1 $ACCOUNT_ID $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$AUTH0_PROXY_SERVER_URL" $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) } push_ecr_image(){ diff --git a/package.json b/package.json index 9aadead8..9ca2d96f 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,7 @@ "pg": "^4.5.5", "pg-native": "^1.10.1", "sequelize": "^3.23.0", - "tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.3", + "tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.6", "traverse": "^0.6.6", "urlencode": "^1.1.0" }, From 61a68c521c8e855dc2977b72b325359b601f6e40 Mon Sep 17 00:00:00 2001 From: Sachin Maheshwari Date: Mon, 26 Nov 2018 13:43:01 +0530 Subject: [PATCH 6/9] checking test cases failure reason. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9ca2d96f..9aadead8 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,7 @@ "pg": "^4.5.5", "pg-native": "^1.10.1", "sequelize": "^3.23.0", - "tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.6", + "tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.3", "traverse": "^0.6.6", "urlencode": "^1.1.0" }, From a4c953e5ac5057ed0e0eab6f16f62aea475c5838 Mon Sep 17 00:00:00 2001 From: Sachin Maheshwari Date: Mon, 26 Nov 2018 18:17:44 +0530 Subject: [PATCH 7/9] changes according to core lib#2.6 . --- package.json | 2 +- src/routes/index.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 9aadead8..9ca2d96f 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,7 @@ "pg": "^4.5.5", "pg-native": "^1.10.1", "sequelize": "^3.23.0", - "tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.3", + "tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.6", "traverse": "^0.6.6", "urlencode": "^1.1.0" }, diff --git a/src/routes/index.js b/src/routes/index.js index 7dcba789..5af5ff89 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -52,7 +52,7 @@ router.route('/v4/projects/metadata') router.all( RegExp(`\\/${apiVersion}\\/(projects|timelines)(?!\\/health).*`), (req, res, next) => ( // JWT authentication - jwtAuth()(req, res, next) + jwtAuth(config)(req, res, next) ), ); From 0932a77d157a0da759499730a4c57a24098aec78 Mon Sep 17 00:00:00 2001 From: gondzo Date: Sat, 1 Dec 2018 11:52:18 +0100 Subject: [PATCH 8/9] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a51b6f2d..fa6d0a45 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Microservice to manage CRUD operations for all things Projects. ### Local Development * We use docker-compose for running dependencies locally. Instructions for Docker compose setup - https://docs.docker.com/compose/install/ -* Nodejs 6.9.4 - consider using [nvm](https://github.com/creationix/nvm) or equivalent to manage your node version +* Nodejs 8.9.4 - consider using [nvm](https://github.com/creationix/nvm) or equivalent to manage your node version * Install [libpg](https://www.npmjs.com/package/pg-native) * Install node dependencies `npm install` From b9b2a8432c54e641e3bba666e3f2590929c9cf5e Mon Sep 17 00:00:00 2001 From: RishiRajSahu Date: Wed, 5 Dec 2018 10:58:53 +0530 Subject: [PATCH 9/9] Added Note Section For Contributors/Developers --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index fa6d0a45..c9efc4e7 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ Microservice to manage CRUD operations for all things Projects. +### Note : Steps mentioned below are best to our capability as guide for local deployment, however, we expect from contributor, being a developer, to resolve run-time issues (e.g. OS and node version issues etc), if any. + ### Local Development * We use docker-compose for running dependencies locally. Instructions for Docker compose setup - https://docs.docker.com/compose/install/ * Nodejs 8.9.4 - consider using [nvm](https://github.com/creationix/nvm) or equivalent to manage your node version