Skip to content

Merge into Dev from dev_msmstsv1.3 branch #98

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Mar 25, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
128 changes: 66 additions & 62 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,89 +1,93 @@
version: 2

jobs:
# Build & Deploy against development backend rer
"build-dev":
version: 2
defaults: &defaults
docker:
- image: docker:17.06.1-ce-git
steps:
# Initialization.
- checkout
- setup_remote_docker
- run:
name: Installation of build dependencies.
command: apk add --no-cache bash
install_dependency: &install_dependency
name: Installation of build and deployment dependencies.
command: |
apk update
apk add --no-cache bash curl
apk upgrade
apk add --no-cache jq py-pip sudo
sudo pip install awscli --upgrade
install_deploysuite: &install_deploysuite
name: Installation of install_deploysuite.
command: |
git clone --branch master https://github.com/topcoder-platform/tc-deploy-scripts ../buildscript
cp ./../buildscript/master_deploy.sh .
cp ./../buildscript/buildenv.sh .
cp ./../buildscript/awsconfiguration.sh .

restore_cache_settings_for_build: &restore_cache_settings_for_build
key: docker-tc-notify-{{ checksum "package-lock.json" }}

# Restoration of node_modules from cache.
- restore_cache:
key: docker-tc-notify-{{ checksum "package-lock.json" }}
save_cache_settings: &save_cache_settings
key: docker-tc-notify-{{ checksum "package-lock.json" }}
paths:
- node_modules

# Build of Docker image.
- run:
run_build: &run_build
name: Build of Docker image
command: ./build.sh DEV
command: ./build.sh

# Caching node modules.
- save_cache:
key: docker-tc-notify-{{ checksum "package-lock.json" }}
paths:
- node_modules

# Deployment.
- run:
name: Installing AWS client
command: |
apk add --no-cache jq py-pip sudo
sudo pip install awscli --upgrade

jobs:
# Build & Deploy against development backend rer
"build-dev":
<<: *defaults
steps:
- checkout
- setup_remote_docker
- run: *install_dependency
- run: *install_deploysuite
- restore_cache: *restore_cache_settings_for_build
- run: *run_build
- save_cache: *save_cache_settings
- deploy:
command: ./deploy.sh DEV $CIRCLE_SHA1
name: Running Masterscript - deploy tc-notifications-api service
command: |
./awsconfiguration.sh DEV
source awsenvconf
./buildenv.sh -e DEV -b dev-tc-notifications-deployvar
source buildenvvar
./master_deploy.sh -d ECS -e DEV -t latest -s dev-global-appvar,dev-tc-notifications-appvar -i tc-notifications
echo "Running Masterscript - deploy tc-notifications-consumers service"
if [ -e dev-tc-notifications-deployvar.json ]; then sudo rm -vf dev-tc-notifications-deployvar.json; fi
./buildenv.sh -e DEV -b dev-tc-notifications-consumers-deployvar
source buildenvvar
./master_deploy.sh -d ECS -e DEV -t latest -s dev-global-appvar,dev-tc-notifications-appvar -i tc-notifications

"build-prod":
docker:
- image: docker:17.06.1-ce-git
<<: *defaults
steps:
# Initialization.
- checkout
- setup_remote_docker
- run:
name: Installation of build dependencies.
command: apk add --no-cache bash

# Restoration of node_modules from cache.
- restore_cache:
key: docker-tc-notify-{{ checksum "package-lock.json" }}

# Build of Docker image.
- run:
name: Build of Docker image
command: ./build.sh PROD

# Caching node modules.
- save_cache:
key: docker-tc-notify-{{ checksum "package-lock.json" }}
paths:
- node_modules

# Deployment.
- run:
name: Installing AWS client
command: |
apk add --no-cache jq py-pip sudo
sudo pip install awscli --upgrade

- run: *install_dependency
- run: *install_deploysuite
- restore_cache: *restore_cache_settings_for_build
- run: *run_build
- save_cache: *save_cache_settings
- deploy:
command: ./deploy.sh PROD $CIRCLE_SHA1

command: |
./awsconfiguration.sh PROD
source awsenvconf
./buildenv.sh -e PROD -b prod-tc-notifications-deployvar
source buildenvvar
./master_deploy.sh -d ECS -e PROD -t latest -s prod-global-appvar,prod-tc-notifications-appvar -i tc-notifications

workflows:
version: 2
build:
jobs:
# Development builds are executed on "develop" branch only.
- "build-dev":
context : org-global
filters:
branches:
only: [dev]
only: ['dev', 'dev_msstsv1.3']
- "build-prod":
context : org-global
filters:
branches:
only: master
3 changes: 2 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ COPY . .
RUN npm install
RUN npm install dotenv --save
RUN npm test
CMD ["npm", "start"]
ENTRYPOINT ["npm","run"]
#CMD ["npm", "start"]

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@nkumar-topcoder just curious what is purpose of using ENTRYPOINT instead of CMD?

18 changes: 9 additions & 9 deletions build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,17 @@ set -eo pipefail
# This script expects a single argument: NODE_ENV, which must be either
# "development" or "production".

NODE_ENV=$1
# NODE_ENV=$1

ENV=$1
AWS_REGION=$(eval "echo \$${ENV}_AWS_REGION")
AWS_ACCESS_KEY_ID=$(eval "echo \$${ENV}_AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY=$(eval "echo \$${ENV}_AWS_SECRET_ACCESS_KEY")
AWS_ACCOUNT_ID=$(eval "echo \$${ENV}_AWS_ACCOUNT_ID")
AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY")

TAG=$AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/tc-notifications:$CIRCLE_SHA1
#ENV=$1
#AWS_REGION=$(eval "echo \$${ENV}_AWS_REGION")
#AWS_ACCESS_KEY_ID=$(eval "echo \$${ENV}_AWS_ACCESS_KEY_ID")
#AWS_SECRET_ACCESS_KEY=$(eval "echo \$${ENV}_AWS_SECRET_ACCESS_KEY")
#AWS_ACCOUNT_ID=$(eval "echo \$${ENV}_AWS_ACCOUNT_ID")
#AWS_REPOSITORY=$(eval "echo \$${ENV}_AWS_REPOSITORY")

#TAG=$AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/tc-notifications:$CIRCLE_SHA1
TAG="tc-notifications:latest"
docker build -t $TAG .

# Copies "node_modules" from the created image, if necessary for caching.
Expand Down
2 changes: 1 addition & 1 deletion config/default.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ module.exports = {
},

AUTH_SECRET: process.env.authSecret,
VALID_ISSUERS: process.env.validIssuers ? process.env.validIssuers.replace(/\\"/g, '') : null,
VALID_ISSUERS: process.env.VALID_ISSUERS ? process.env.VALID_ISSUERS.replace(/\\"/g, '') : null,
// keep it here for dev purposes, it's only needed by modified version of tc-core-library-js
// which skips token validation when locally deployed

Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@
"sequelize": "^4.21.0",
"superagent": "^3.8.0",
"tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.6",
"winston": "^2.2.0"
"winston": "^2.2.0",
"topcoder-healthcheck-dropin": "^1.0.3"
},
"engines": {
"node": "6.x"
Expand Down
19 changes: 17 additions & 2 deletions src/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const logger = require('./common/logger');
const errors = require('./common/errors');
const models = require('./models');
const Kafka = require('no-kafka');
const healthcheck = require('topcoder-healthcheck-dropin')

/**
* Start Kafka consumer for event bus events.
Expand Down Expand Up @@ -74,11 +75,25 @@ function startKafkaConsumer(handlers, notificationServiceHandlers) {
});
});

const check = function () {
if (!consumer.client.initialBrokers && !consumer.client.initialBrokers.length) {
return false
}
let connected = true
consumer.client.initialBrokers.forEach(conn => {
logger.debug(`url ${conn.server()} - connected=${conn.connected}`)
connected = conn.connected & connected
})
return connected
}

consumer
.init()
.then(() => _.each(_.keys(handlers),
(topicName) => consumer.subscribe(topicName, dataHandler)))
.then(() => {
_.each(_.keys(handlers),
(topicName) => consumer.subscribe(topicName, dataHandler))
healthcheck.init([check])
})
.catch((err) => {
logger.error('Kafka Consumer failed');
logger.error(err);
Expand Down