Skip to content

Commit a871d2c

Browse files
committed
feat: improve local setup
- docker-compose file to run all the dependant services - done by "phaniram" via challenge "30162708" (2nd place)
1 parent f3610a8 commit a871d2c

File tree

9 files changed

+359
-96
lines changed

9 files changed

+359
-96
lines changed

README.md

Lines changed: 175 additions & 96 deletions
Large diffs are not rendered by default.

config/default.js

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,63 +1,101 @@
11
require('dotenv').config()
22
module.exports = {
3+
// the log level, default is 'debug'
34
LOG_LEVEL: process.env.LOG_LEVEL || 'debug',
5+
// the server port, default is 3000
46
PORT: process.env.PORT || 3000,
7+
// the server api base path
58
BASE_PATH: process.env.BASE_PATH || '/api/v5',
69

10+
// The authorization secret used during token verification.
711
AUTH_SECRET: process.env.AUTH_SECRET || 'mysecret',
12+
// The valid issuer of tokens, a json array contains valid issuer.
813
VALID_ISSUERS: process.env.VALID_ISSUERS || '["https://api.topcoder-dev.com", "https://api.topcoder.com", "https://topcoder-dev.auth0.com/", "https://auth.topcoder-dev.com/"]',
14+
// Auth0 URL, used to get TC M2M token
915
AUTH0_URL: process.env.AUTH0_URL,
16+
// Auth0 audience, used to get TC M2M token
1017
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE,
18+
// Auth0 audience for U-Bahn
1119
AUTH0_AUDIENCE_UBAHN: process.env.AUTH0_AUDIENCE_UBAHN,
20+
// Auth0 token cache time, used to get TC M2M token
1221
TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME,
22+
// Auth0 client id, used to get TC M2M token
1323
AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID,
24+
// Auth0 client secret, used to get TC M2M token
1425
AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET,
26+
// Proxy Auth0 URL, used to get TC M2M token
1527
AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL,
1628

1729
m2m: {
1830
M2M_AUDIT_USER_ID: process.env.M2M_AUDIT_USER_ID || '00000000-0000-0000-0000-000000000000',
1931
M2M_AUDIT_HANDLE: process.env.M2M_AUDIT_HANDLE || 'TopcoderService'
2032
},
2133

34+
// the Topcoder v5 url
2235
TC_API: process.env.TC_API || 'https://api.topcoder-dev.com/v5',
36+
// the organization id
2337
ORG_ID: process.env.ORG_ID || '36ed815b-3da1-49f1-a043-aaed0a4e81ad',
38+
// the referenced skill provider id
2439
TOPCODER_SKILL_PROVIDER_ID: process.env.TOPCODER_SKILL_PROVIDER_ID || '9cc0795a-6e12-4c84-9744-15858dba1861',
2540

2641
TOPCODER_USERS_API: process.env.TOPCODER_USERS_API || 'https://api.topcoder-dev.com/v3/users',
2742

43+
// PostgreSQL database url.
2844
DATABASE_URL: process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5432/postgres',
45+
// string - PostgreSQL database target schema
2946
DB_SCHEMA_NAME: process.env.DB_SCHEMA_NAME || 'bookings',
47+
// the project service url
3048
PROJECT_API_URL: process.env.PROJECT_API_URL || 'https://api.topcoder-dev.com',
3149

3250
esConfig: {
51+
// the elasticsearch host
3352
HOST: process.env.ES_HOST || 'http://localhost:9200',
3453

3554
ELASTICCLOUD: {
55+
// The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this
3656
id: process.env.ELASTICCLOUD_ID,
57+
// The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
3758
username: process.env.ELASTICCLOUD_USERNAME,
59+
// The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
3860
password: process.env.ELASTICCLOUD_PASSWORD
3961
},
4062

63+
// The Amazon region to use when using AWS Elasticsearch service
4164
AWS_REGION: process.env.AWS_REGION || 'us-east-1', // AWS Region to be used if we use AWS ES
4265

66+
// the job index
4367
ES_INDEX_JOB: process.env.ES_INDEX_JOB || 'job',
68+
// the job candidate index
4469
ES_INDEX_JOB_CANDIDATE: process.env.ES_INDEX_JOB_CANDIDATE || 'job_candidate',
70+
// the resource booking index
4571
ES_INDEX_RESOURCE_BOOKING: process.env.ES_INDEX_RESOURCE_BOOKING || 'resource_booking'
4672
},
4773

74+
// Topcoder Bus API URL
4875
BUSAPI_URL: process.env.BUSAPI_URL || 'https://api.topcoder-dev.com/v5',
76+
// The error topic at which bus api will publish any errors
4977
KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || 'common.error.reporting',
78+
// The originator value for the kafka messages
5079
KAFKA_MESSAGE_ORIGINATOR: process.env.KAFKA_MESSAGE_ORIGINATOR || 'taas-api',
5180
// topics for job service
81+
// the create job entity Kafka message topic
5282
TAAS_JOB_CREATE_TOPIC: process.env.TAAS_JOB_CREATE_TOPIC || 'taas.job.create',
83+
// the update job entity Kafka message topic
5384
TAAS_JOB_UPDATE_TOPIC: process.env.TAAS_JOB_UPDATE_TOPIC || 'taas.job.update',
85+
// the delete job entity Kafka message topic
5486
TAAS_JOB_DELETE_TOPIC: process.env.TAAS_JOB_DELETE_TOPIC || 'taas.job.delete',
5587
// topics for jobcandidate service
88+
// the create job candidate entity Kafka message topic
5689
TAAS_JOB_CANDIDATE_CREATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_CREATE_TOPIC || 'taas.jobcandidate.create',
90+
// the update job candidate entity Kafka message topic
5791
TAAS_JOB_CANDIDATE_UPDATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_UPDATE_TOPIC || 'taas.jobcandidate.update',
92+
// the delete job candidate entity Kafka message topic
5893
TAAS_JOB_CANDIDATE_DELETE_TOPIC: process.env.TAAS_JOB_CANDIDATE_DELETE_TOPIC || 'taas.jobcandidate.delete',
5994
// topics for job service
95+
// the create resource booking entity Kafka message topic
6096
TAAS_RESOURCE_BOOKING_CREATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_CREATE_TOPIC || 'taas.resourcebooking.create',
97+
// the update resource booking entity Kafka message topic
6198
TAAS_RESOURCE_BOOKING_UPDATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC || 'taas.resourcebooking.update',
99+
// the delete resource booking entity Kafka message topic
62100
TAAS_RESOURCE_BOOKING_DELETE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_DELETE_TOPIC || 'taas.resourcebooking.delete'
63101
}

local/docker-compose.yml

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
version: "3"
2+
services:
3+
postgres:
4+
container_name: tc-taas-postgres
5+
image: postgres
6+
environment:
7+
POSTGRES_USER: postgres
8+
POSTGRES_PASSWORD: postgres
9+
ports:
10+
- 5432:5432
11+
12+
zookeeper:
13+
image: wurstmeister/zookeeper
14+
container_name: tc-taas-zookeeper
15+
ports:
16+
- 2181:2181
17+
18+
kafka:
19+
image: wurstmeister/kafka
20+
container_name: tc-taas-kafka
21+
depends_on:
22+
- zookeeper
23+
ports:
24+
- 9092:9092
25+
environment:
26+
KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9093,OUTSIDE://localhost:9092
27+
KAFKA_LISTENERS: INSIDE://kafka:9093,OUTSIDE://0.0.0.0:9092
28+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
29+
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
30+
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
31+
32+
kafka-client:
33+
container_name: tc-kafka-client
34+
build: ./kafka-client
35+
depends_on:
36+
- kafka
37+
- zookeeper
38+
39+
elasticsearch:
40+
image: elasticsearch:7.7.1
41+
container_name: tc-taas-elasticsearch
42+
environment:
43+
- discovery.type=single-node
44+
ports:
45+
- 9200:9200
46+
47+
taas-es-processor:
48+
container_name: tc-taas-es-procesor
49+
build:
50+
context: ./generic-tc-service
51+
args:
52+
NODE_VERSION: 12.16.3
53+
GIT_URL: https://github.com/topcoder-platform/taas-es-processor
54+
GIT_BRANCH: dev
55+
command: start kafka-client
56+
ports:
57+
- 5000:5000
58+
depends_on:
59+
- kafka-client
60+
- elasticsearch
61+
environment:
62+
- KAFKA_URL=kafka:9093
63+
- ES_HOST=http://elasticsearch:9200
64+
65+
tc-bus-api:
66+
container_name: tc-bus-api
67+
build:
68+
context: ./generic-tc-service
69+
args:
70+
NODE_VERSION: 8.11.3
71+
GIT_URL: https://github.com/topcoder-platform/tc-bus-api
72+
GIT_BRANCH: dev
73+
BYPASS_TOKEN_VALIDATION: 1
74+
command: start kafka-client
75+
ports:
76+
- 8002:8002
77+
depends_on:
78+
- kafka-client
79+
environment:
80+
- PORT=8002
81+
- KAFKA_URL=kafka:9093
82+
- JWT_TOKEN_SECRET=secret
83+
- VALID_ISSUERS="[\"https:\/\/topcoder-newauth.auth0.com\/\",\"https:\/\/api.topcoder-dev.com\",\"https:\/\/topcoder-dev.auth0.com\/\"]"
84+
- AUTH0_URL
85+
- AUTH0_AUDIENCE
86+
- AUTH0_CLIENT_ID
87+
- AUTH0_CLIENT_SECRET
88+
- AUTH0_PROXY_SERVER_URL

local/generic-tc-service/Dockerfile

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
ARG NODE_VERSION=12.16.3
2+
3+
FROM node:$NODE_VERSION
4+
ARG GIT_URL
5+
ARG GIT_BRANCH
6+
ARG BYPASS_TOKEN_VALIDATION
7+
8+
RUN git clone $GIT_URL /opt/app
9+
WORKDIR /opt/app
10+
RUN git checkout -b node-branch origin/$GIT_BRANCH
11+
12+
RUN npm install
13+
RUN if [ $BYPASS_TOKEN_VALIDATION -eq 1 ]; then sed -i '/decodedToken = jwt.decode/a \ callback(undefined, decodedToken.payload); return;' node_modules/tc-core-library-js/lib/auth/verifier.js; fi
14+
COPY docker-entrypoint.sh /opt/
15+
ENTRYPOINT ["/opt/docker-entrypoint.sh"]
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#!/bin/bash
2+
3+
HOST_DOMAIN="host.docker.internal"
4+
ping -q -c1 $HOST_DOMAIN > /dev/null 2>&1
5+
if [ $? -ne 0 ]; then
6+
HOST_IP=$(ip route | awk 'NR==1 {print $3}')
7+
echo -e "$HOST_IP\t$HOST_DOMAIN" >> /etc/hosts
8+
fi
9+
10+
if [ $# -eq 2 ]; then
11+
echo "Waiting for $2 to exit...."
12+
while ping -c1 $2 &>/dev/null
13+
do
14+
sleep 1
15+
done
16+
echo "$2 exited!"
17+
fi
18+
19+
cd /opt/app/ && npm run $1

local/kafka-client/Dockerfile

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
From wurstmeister/kafka
2+
WORKDIR /app/
3+
COPY topics.txt .
4+
COPY create-topics.sh .
5+
ENTRYPOINT ["/app/create-topics.sh"]

local/kafka-client/create-topics.sh

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#!/bin/bash
2+
3+
/opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181 > exists-topics.txt
4+
while read topic; do
5+
/opt/kafka/bin/kafka-topics.sh --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $topic
6+
done < <(sort topics.txt exists-topics.txt exists-topics.txt | uniq -u)

local/kafka-client/topics.txt

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
taas.job.create
2+
taas.jobcandidate.create
3+
taas.resourcebooking.create
4+
taas.job.update
5+
taas.jobcandidate.update
6+
taas.resourcebooking.update
7+
taas.job.delete
8+
taas.jobcandidate.delete
9+
taas.resourcebooking.delete

package.json

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@
1515
"migrate:undo": "npx sequelize db:migrate:undo",
1616
"test-data": "node scripts/insert-es-data.js",
1717
"test": "mocha test/unit/*.test.js --timeout 30000 --exit",
18+
"services:up": "docker-compose -f ./local/docker-compose.yml up -d",
19+
"services:down": "docker-compose -f ./local/docker-compose.yml down",
20+
"services:logs": "docker-compose -f ./local/docker-compose.yml logs",
21+
"local:init": "npm run create-index && npm run init-db",
1822
"cov": "nyc --reporter=html --reporter=text mocha test/unit/*.test.js --timeout 30000 --exit"
1923
},
2024
"keywords": [],

0 commit comments

Comments
 (0)