diff --git a/deploy.sh b/deploy.sh index 1d4ea007..87f6fd23 100755 --- a/deploy.sh +++ b/deploy.sh @@ -11,6 +11,7 @@ AWS_ECS_CONTAINER_NAME="tc-project-service" AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY") AWS_ECS_CLUSTER=$(eval "echo \$${ENV}_AWS_ECS_CLUSTER") AWS_ECS_SERVICE=$(eval "echo \$${ENV}_AWS_ECS_SERVICE") +AWS_ECS_SERVICE_CONSUMERS=$(eval "echo \$${ENV}_AWS_ECS_SERVICE_CONSUMERS") AUTH_DOMAIN=$(eval "echo \$${ENV}_AUTH_DOMAIN") AUTH_SECRET=$(eval "echo \$${ENV}_AUTH_SECRET") VALID_ISSUERS=$(eval "echo \$${ENV}_VALID_ISSUERS") @@ -29,9 +30,9 @@ configure_aws_cli() { # deploys the app to the ecs cluster deploy_cluster() { - make_task_def - register_definition - if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $AWS_ECS_SERVICE --task-definition $revision | \ + make_task_def $1 $2 $3 $4 + register_definition $1 + if [[ $(aws ecs update-service --cluster $AWS_ECS_CLUSTER --service $1 --task-definition $revision | \ $JQ '.service.taskDefinition') != $revision ]]; then echo "Error updating service." return 1 @@ -46,6 +47,7 @@ make_task_def(){ "family": "%s", "requiresCompatibilities": ["EC2", "FARGATE"], "networkMode": "awsvpc", + "taskRoleArn": "arn:aws:iam::%s:role/tc-project-service-ecs-task-role", "executionRoleArn": "arn:aws:iam::%s:role/ecsTaskExecutionRole", "cpu": "1024", "memory": "2048", @@ -56,6 +58,7 @@ make_task_def(){ "essential": true, "memory": 1536, "cpu": 768, + "entryPoint": ["%s", "%s", "%s"], "environment": [ { "name": "NODE_ENV", @@ -85,14 +88,6 @@ make_task_def(){ "name": "AWS_REGION", "value": "%s" }, - { - "name": "AWS_ACCESS_KEY_ID", - "value": "%s" - }, - { - "name": "AWS_SECRET_ACCESS_KEY", - "value": "%s" - }, { "name": "AUTH_DOMAIN", "value": "%s" @@ -253,12 +248,12 @@ make_task_def(){ KAFKA_URL=$(eval "echo \$${ENV}_KAFKA_URL") - task_def=$(printf "$task_template" $family $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) + task_def=$(printf "$task_template" $1 $ACCOUNT_ID $ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV) } push_ecr_image(){ eval $(aws ecr get-login --region $AWS_REGION --no-include-email) - docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$AWS_REPOSITORY:$CIRCLE_SHA1 + docker push $ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$1:$CIRCLE_SHA1 } register_definition() { @@ -273,13 +268,13 @@ register_definition() { check_service_status() { counter=0 sleep 60 - servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` + servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` while [[ $servicestatus != *"steady state"* ]] do echo "Current event message : $servicestatus" echo "Waiting for 30 seconds to check the service status...." sleep 30 - servicestatus=`aws ecs describe-services --service $AWS_ECS_SERVICE --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` + servicestatus=`aws ecs describe-services --service $1 --cluster $AWS_ECS_CLUSTER | $JQ '.services[].events[0].message'` counter=`expr $counter + 1` if [[ $counter -gt $COUNTER_LIMIT ]] ; then echo "Service does not reach steady state within 10 minutes. Please check" @@ -290,6 +285,10 @@ check_service_status() { } configure_aws_cli -push_ecr_image -deploy_cluster -check_service_status +push_ecr_image $AWS_REPOSITORY +deploy_cluster $AWS_ECS_SERVICE "npm" "run" "start" + +deploy_cluster $AWS_ECS_SERVICE_CONSUMERS "npm" "run" "startKafkaConsumers" + +check_service_status $AWS_ECS_SERVICE +check_service_status $AWS_ECS_SERVICE_CONSUMERS \ No newline at end of file diff --git a/package.json b/package.json index 008d7ad1..9aadead8 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,7 @@ "migrate:es": "./node_modules/.bin/babel-node migrations/seedElasticsearchIndex.js", "prestart": "npm run -s build", "start": "node dist", + "startKafkaConsumers": "npm run -s build && node dist/index-kafka.js", "start:dev": "NODE_ENV=development PORT=8001 nodemon -w src --exec \"babel-node src --presets es2015\" | ./node_modules/.bin/bunyan", "test": "NODE_ENV=test npm run lint && NODE_ENV=test npm run sync:es && NODE_ENV=test npm run sync:db && NODE_ENV=test ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- --timeout 10000 --compilers js:babel-core/register $(find src -path '*spec.js*')", "test:watch": "NODE_ENV=test ./node_modules/.bin/mocha -w --compilers js:babel-core/register $(find src -path '*spec.js*')", @@ -48,7 +49,7 @@ "express-request-id": "^1.1.0", "express-sanitizer": "^1.0.2", "express-validation": "^0.6.0", - "http-aws-es": "^1.1.3", + "http-aws-es": "^4.0.0", "joi": "^8.0.5", "jsonwebtoken": "^8.3.0", "lodash": "^4.16.4", diff --git a/src/index-kafka.js b/src/index-kafka.js new file mode 100644 index 00000000..aa0123e5 --- /dev/null +++ b/src/index-kafka.js @@ -0,0 +1,51 @@ +import _ from 'lodash'; +import config from 'config'; +import startKafkaConsumer from './services/kafkaConsumer'; +import { kafkaHandlers } from './events'; +import models from './models'; + +const coreLib = require('tc-core-library-js'); + + +// ======================= +// Loger ========= +// ======================= +let appName = 'tc-projects-consumer'; +switch (process.env.NODE_ENV.toLowerCase()) { + case 'development': + appName += '-dev'; + break; + case 'qa': + appName += '-qa'; + break; + case 'production': + default: + appName += '-prod'; + break; +} + +const logger = coreLib.logger({ + name: appName, + level: _.get(config, 'logLevel', 'debug').toLowerCase(), + captureLogs: config.get('captureLogs'), + logentriesToken: _.get(config, 'logentriesToken', null), +}); + +// ======================= +// Database ========= +// ======================= +logger.info('Registering models ... ', !!models); + +/** + * Handle server shutdown gracefully + * @returns {undefined} + */ +function gracefulShutdown() { + // TODO +} +process.on('SIGTERM', gracefulShutdown); +process.on('SIGINT', gracefulShutdown); + +const app = { logger, models }; + +module.exports = startKafkaConsumer(kafkaHandlers, app, logger); diff --git a/src/services/index.js b/src/services/index.js index 017a6ec2..6c8306c8 100644 --- a/src/services/index.js +++ b/src/services/index.js @@ -2,8 +2,8 @@ import config from 'config'; import RabbitMQService from './rabbitmq'; -import startKafkaConsumer from './kafkaConsumer'; -import { kafkaHandlers } from '../events'; +// import startKafkaConsumer from './kafkaConsumer'; +// import { kafkaHandlers } from '../events'; /** * Responsible for establishing connections to all external services @@ -33,10 +33,10 @@ module.exports = (fapp, logger) => { .then(() => { logger.info('RabbitMQ service initialized'); }) - .then(() => startKafkaConsumer(kafkaHandlers, app, logger)) - .then(() => { - logger.info('Kafka consumer service initialized'); - }) + // .then(() => startKafkaConsumer(kafkaHandlers, app, logger)) + // .then(() => { + // logger.info('Kafka consumer service initialized'); + // }) .catch((err) => { logger.error('Error initializing services', err); // gracefulShutdown() diff --git a/src/util.js b/src/util.js index 906f1389..542c2ee1 100644 --- a/src/util.js +++ b/src/util.js @@ -16,7 +16,7 @@ import config from 'config'; import urlencode from 'urlencode'; import elasticsearch from 'elasticsearch'; import Promise from 'bluebird'; -import AWS from 'aws-sdk'; +// import AWS from 'aws-sdk'; import { ADMIN_ROLES, TOKEN_SCOPES } from './constants'; @@ -317,10 +317,10 @@ _.assignIn(util, { apiVersion: config.get('elasticsearchConfig.apiVersion'), hosts: esHost, connectionClass: require('http-aws-es'), // eslint-disable-line global-require - amazonES: { - region: 'us-east-1', - credentials: new AWS.EnvironmentCredentials('AWS'), - }, + // amazonES: { + // region: 'us-east-1', + // credentials: new AWS.EnvironmentCredentials('AWS'), + // }, }); } else { esClient = new elasticsearch.Client(_.cloneDeep(config.elasticsearchConfig));