diff --git a/.nvmrc b/.nvmrc
new file mode 100644
index 00000000..b06cd07c
--- /dev/null
+++ b/.nvmrc
@@ -0,0 +1 @@
+12.18.0
diff --git a/README.md b/README.md
index 4c142f0a..e8870c8d 100644
--- a/README.md
+++ b/README.md
@@ -1,131 +1,204 @@
# Topcoder Bookings API
-## Dependencies
-
-- nodejs https://nodejs.org/en/ (v12+)
-- PostgreSQL
-- ElasticSearch (7.x)
-- Docker
-- Docker-Compose
-
-## Configuration
-
-Configuration for the application is at `config/default.js`.
-
-The following parameters can be set in config files or in env variables:
-
-- `LOG_LEVEL`: the log level, default is 'debug'
-- `PORT`: the server port, default is 3000
-- `BASE_PATH`: the server api base path
-- `AUTH_SECRET`: The authorization secret used during token verification.
-- `VALID_ISSUERS`: The valid issuer of tokens, a json array contains valid issuer.
-
-- `AUTH0_URL`: Auth0 URL, used to get TC M2M token
-- `AUTH0_AUDIENCE`: Auth0 audience, used to get TC M2M token
-- `AUTH0_AUDIENCE_UBAHN`: Auth0 audience for U-Bahn
-- `TOKEN_CACHE_TIME`: Auth0 token cache time, used to get TC M2M token
-- `AUTH0_CLIENT_ID`: Auth0 client id, used to get TC M2M token
-- `AUTH0_CLIENT_SECRET`: Auth0 client secret, used to get TC M2M token
-- `AUTH0_PROXY_SERVER_URL`: Proxy Auth0 URL, used to get TC M2M token
-
-- `m2m.M2M_AUDIT_USER_ID`: default value is `00000000-0000-0000-0000-000000000000`
-- `m2m.M2M_AUDIT_HANDLE`: default value is `TopcoderService`
-
-- `DATABASE_URL`: PostgreSQL database url.
-- `DB_SCHEMA_NAME`: string - PostgreSQL database target schema
-- `PROJECT_API_URL`: the project service url
-- `TC_API`: the Topcoder v5 url
-- `ORG_ID`: the organization id
-- `TOPCODER_SKILL_PROVIDER_ID`: the referenced skill provider id
-
-- `esConfig.HOST`: the elasticsearch host
-- `esConfig.ES_INDEX_JOB`: the job index
-- `esConfig.ES_INDEX_JOB_CANDIDATE`: the job candidate index
-- `esConfig.ES_INDEX_RESOURCE_BOOKING`: the resource booking index
-- `esConfig.AWS_REGION`: The Amazon region to use when using AWS Elasticsearch service
-- `esConfig.ELASTICCLOUD.id`: The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this
-- `esConfig.ELASTICCLOUD.username`: The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
-- `esConfig.ELASTICCLOUD.password`: The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
-
-- `BUSAPI_URL`: Topcoder Bus API URL
-- `KAFKA_ERROR_TOPIC`: The error topic at which bus api will publish any errors
-- `KAFKA_MESSAGE_ORIGINATOR`: The originator value for the kafka messages
-
-- `TAAS_JOB_CREATE_TOPIC`: the create job entity Kafka message topic
-- `TAAS_JOB_UPDATE_TOPIC`: the update job entity Kafka message topic
-- `TAAS_JOB_DELETE_TOPIC`: the delete job entity Kafka message topic
-- `TAAS_JOB_CANDIDATE_CREATE_TOPIC`: the create job candidate entity Kafka message topic
-- `TAAS_JOB_CANDIDATE_UPDATE_TOPIC`: the update job candidate entity Kafka message topic
-- `TAAS_JOB_CANDIDATE_DELETE_TOPIC`: the delete job candidate entity Kafka message topic
-- `TAAS_RESOURCE_BOOKING_CREATE_TOPIC`: the create resource booking entity Kafka message topic
-- `TAAS_RESOURCE_BOOKING_UPDATE_TOPIC`: the update resource booking entity Kafka message topic
-- `TAAS_RESOURCE_BOOKING_DELETE_TOPIC`: the delete resource booking entity Kafka message topic
-
-
-## PostgreSQL Database Setup
-- Go to https://www.postgresql.org/ download and install the PostgreSQL.
-- Modify `DATABASE_URL` under `config/default.js` to meet your environment.
-- Run `npm run init-db` to create table(run `npm run init-db force` to force creating table)
+## Requirements
-## DB Migration
-- `npm run migrate`: run any migration files which haven't run yet.
-- `npm run migrate:undo`: revert most recent migration.
+- [Node.js](https://nodejs.org/en/) v12+
+- [Docker](https://www.docker.com/)
+- [Docker-Compose](https://docs.docker.com/compose/install/)
-Configuration for migration is at `./config/config.json`.
+### Steps to run locally
-The following parameters can be set in the config file or via env variables:
+1. 📦 Install npm dependencies
-- `username`: set via env `DB_USERNAME`; datebase username
-- `password`: set via env `DB_PASSWORD`; datebase password
-- `database`: set via env `DB_NAME`; datebase name
-- `host`: set via env `DB_HOST`; datebase host name
+ ```bash
+ npm install
+ ```
+
+2. ⚙ Local config
+
+ 1. In the `taas-apis` root directory create `.env` file with the next environment variables. Values for **Auth0 config** should be shared with you on the forum.
+
+ ```bash
+ # Auth0 config
+ AUTH0_URL=
+ AUTH0_AUDIENCE=
+ AUTH0_AUDIENCE_UBAHN=
+ AUTH0_CLIENT_ID=
+ AUTH0_CLIENT_SECRET=
+
+ # Locally deployed services (via docker-compose)
+ ES_HOST=dockerhost:9200
+ DATABASE_URL=postgres://postgres:postgres@dockerhost:5432/postgres
+ BUSAPI_URL=http://dockerhost:8002/v5
+ ```
+
+ - Values from this file would be automatically used by many `npm` commands.
+ - ⚠️ Never commit this file or its copy to the repository!
+
+ 1. Set `dockerhost` to point the IP address of Docker. Docker IP address depends on your system. For example if docker is run on IP `127.0.0.1` add a the next line to your `/etc/hosts` file:
+
+ ```
+ 127.0.0.1 dockerhost
+ ```
+
+ Alternatively, you may update `.env` file and replace `dockerhost` with your docker IP address.
+
+3. 🚢 Start docker-compose with services which are required to start Topcoder Bookings API locally
+
+ ```bash
+ npm run services:up
+ ```
+
+ Wait until all containers are fully started. As a good indicator, wait until `taas-es-processor` successfully started by viewing its logs:
+
+ ```bash
+ npm run services:logs -- -f taas-es-processor
+ ```
+
+ Click to see a good logs example
+
+
+ - first it would be waiting for `kafka-client` to create all the required topics and exit, you would see:
+
+ ```
+ tc-taas-es-procesor | Waiting for kafka-client to exit....
+ ```
+
+ - after that, `taas-es-processor` would be started itself. Make sure it successfully connected to Kafka, you should see 9 lines with text `Subscribed to taas.`:
+
+ ```
+ tc-taas-es-procesor | 2021-01-22T14:27:48.971Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.create:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.972Z DEBUG no-kafka-client Subscribed to taas.job.create:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.972Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.delete:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.973Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.delete:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.974Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.update:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.975Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.create:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.976Z DEBUG no-kafka-client Subscribed to taas.job.delete:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.977Z DEBUG no-kafka-client Subscribed to taas.job.update:0 offset 0 leader kafka:9093
+ tc-taas-es-procesor | 2021-01-22T14:27:48.978Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.update:0 offset 0 leader kafka:9093
+ ```
+
+
+
+
+ If you want to learn more about docker-compose configuration
+ see more details here
+
+
+ This docker-compose file starts the next services:
+ | Service | Name | Port |
+ |----------|:-----:|:----:|
+ | PostgreSQL | postgres | 5432 |
+ | Elasticsearch | elasticsearch | 9200 |
+ | Zookeeper | zookeeper | 2181 |
+ | Kafka | kafka | 9092 |
+ | [tc-bus-api](https://github.com/topcoder-platform/tc-bus-api) | tc-bus-api | 8002 |
+ | [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor) | taas-es-processor | 5000 |
-## ElasticSearch Setup
-- Go to https://www.elastic.co/downloads/ download and install the elasticsearch.
-- Modify `esConfig` under `config/default.js` to meet your environment.
-- Run `npm run create-index` to create ES index.
-- Run `npm run delete-index` to delete ES index.
+ - as many of the Topcoder services in this docker-compose require Auth0 configuration for M2M calls, our docker-compose file passes environment variables `AUTH0_CLIENT_ID`, `AUTH0_CLIENT_SECRET`, `AUTH0_URL`, `AUTH0_AUDIENCE`, `AUTH0_PROXY_SERVER_URL` to its containers. docker-compose takes them from `.env` file if provided.
-## Local Deployment
+ - `docker-compose` automatically would create Kafka topics which are used by `taas-es-processor` listed in `local/kafka-client/topics.txt`.
-- Install dependencies `npm install`
-- Run lint `npm run lint`
-- Run lint fix `npm run lint:fix`
-- Clear and init db `npm run init-db force`
-- Clear and create es index
+ - To view the logs from any container inside docker-compose use the following command, replacing `SERVICE_NAME` with the corresponding value under the **Name** column in the above table:
- ``` bash
- npm run delete-index # run this if you already created index
- npm run create-index
- ```
+ ```bash
+ npm run services:log -- -f SERVICE_NAME
+ ```
-- Start app `npm start`
-- App is running at `http://localhost:3000`
+ - If you want to modify the code of any of the services which are run inside this docker-compose file, you can stop such service inside docker-compose by command `docker-compose -f local/docker-compose.yml stop -f ` and run the service separately, following its README file.
-## Local Deployment with Docker
+
-Make sure all config values are right, and you can run on local successful, then run below commands
+ _NOTE: In production these dependencies / services are hosted & managed outside Topcoder Bookings API._
-1. Navigate to the directory `docker`
+4. ♻ Init DB, ES
-2. Rename the file `sample.api.env` to `api.env`
+ ```bash
+ npm run local:init
+ ```
-3. Set the required AUTH0 configurations, PostgreSQL Database url and ElasticSearch host in the file `api.env`
+ This command will do 2 things:
- Note that you can also add other variables to `api.env`, with `=` format per line.
- If using AWS ES you should add `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` variables as well.
+ - create Database table
+ - create Elasticsearch indexes
-4. Once that is done, run the following command
+5. 🚀 Start Topcoder Bookings API
- ```bash
- docker-compose up
- ```
+ ```bash
+ npm run dev
+ ```
-5. When you are running the application for the first time, It will take some time initially to download the image and install the dependencies
+ Runs the Topcoder Bookings API using nodemon, so it would be restarted after any of the files is updated.
+ The Topcoder Bookings API will be served on `http://localhost:3000`.
+
+## NPM Commands
+
+| Command | Description |
+| ------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------- |
+| `npm run lint` | Check for for lint errors. |
+| `npm run lint:fix` | Check for for lint errors and fix error automatically when possible. |
+| `npm run build` | Build source code for production run into `dist` folder. |
+| `npm run start` | Start app in the production mode from prebuilt `dist` folder. |
+| `npm run dev` | Start app in the development mode using `nodemon`. |
+| `npm run test` | Run tests. |
+| `npm run test-data` | Clears and imports Data into ES. |
+| `npm run init-db` | Initializes Database. |
+| `npm run create-index` | Create Elasticsearch indexes. |
+| `npm run delete-index` | Delete Elasticsearch indexes. |
+| `npm run services:up` | Start services via docker-compose for local development. |
+| `npm run services:down` | Stop services via docker-compose for local development. |
+| `npm run services:logs -- -f ` | View logs of some service inside docker-compose. |
+| `npm run local:init` | Creates Elasticsearch indexes and initializes Database. |
+| `npm run cov` | Code Coverage Report. |
+| `npm run migrate` | Run any migration files which haven't run yet. |
+| `npm run migrate:undo` | Revert most recent migration. |
+
+## Kafka commands
+
+If you've used `docker-compose` with the file `local/docker-compose.yml` during local setup to spawn kafka & zookeeper, you can use the following commands to manipulate kafka topics and messages:
+(Replace `TOPIC_NAME` with the name of the desired topic)
+
+### Create Topic
+
+```bash
+docker exec tc-taas-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME
+```
+
+### List Topics
+
+```bash
+docker exec tc-taas-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181
+```
+
+### Watch Topic
+
+```bash
+docker exec tc-taas-kafka /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic TOPIC_NAME
+```
+
+### Post Message to Topic (from stdin)
+
+```bash
+docker exec -it tc-taas-kafka /opt/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic TOPIC_NAME
+```
+
+- Enter or copy/paste the message into the console after starting this command.
+
+## DB Migration
+
+- `npm run migrate`: run any migration files which haven't run yet.
+- `npm run migrate:undo`: revert most recent migration.
+
+Configuration for migration is at `./config/config.json`.
+
+The following parameters can be set in the config file or via env variables:
+
+- `username`: set via env `DB_USERNAME`; datebase username
+- `password`: set via env `DB_PASSWORD`; datebase password
+- `database`: set via env `DB_NAME`; datebase name
+- `host`: set via env `DB_HOST`; datebase host name
## Testing
+
- Run `npm run test` to execute unit tests
- Run `npm run cov` to execute unit tests and generate coverage report.
-
-## Verification
-Refer to the verification document [Verification.md](Verification.md)
diff --git a/Verification.md b/Verification.md
deleted file mode 100644
index 4867b6ea..00000000
--- a/Verification.md
+++ /dev/null
@@ -1,40 +0,0 @@
-# Topcoder Bookings API
-
-## Postman test
-- start PostgreSQL and ElasticSearch
-- Refer `README.md#Local Deployment` to start the app
-- Import Postman collection and environment file in the `docs` folder to Postman and execute the scripts to validate the app from top to bottom.
-
-## Note About Testing `/taas-teams` Endpoints
-Before you run tests against the `taas-teams` endpoints, you should insert the dedicated test data by running `npm run test-data`.
-
-## Unit test Coverage
-
-
-``` bash
- 96 passing (170ms)
-
-----------------------------|---------|----------|---------|---------|----------------------------
-File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s
-----------------------------|---------|----------|---------|---------|----------------------------
-All files | 98.43 | 91.03 | 100 | 98.56 |
- config | 100 | 100 | 100 | 100 |
- default.js | 100 | 100 | 100 | 100 |
- test.js | 100 | 100 | 100 | 100 |
- src | 90.91 | 50 | 100 | 94.44 |
- bootstrap.js | 90.91 | 50 | 100 | 94.44 | 18
- src/common | 97.69 | 90.91 | 100 | 97.66 |
- errors.js | 100 | 50 | 100 | 100 | 23
- helper.js | 97.5 | 92.86 | 100 | 97.46 | 94,176,284
- src/models | 100 | 92.86 | 100 | 100 |
- Job.js | 100 | 100 | 100 | 100 |
- JobCandidate.js | 100 | 100 | 100 | 100 |
- ResourceBooking.js | 100 | 100 | 100 | 100 |
- index.js | 100 | 80 | 100 | 100 | 29
- src/services | 98.81 | 89.5 | 100 | 98.8 |
- JobCandidateService.js | 98.77 | 88 | 100 | 98.77 | 37
- JobService.js | 97.37 | 85.37 | 100 | 97.32 | 73,285,326
- ResourceBookingService.js | 98.86 | 93.1 | 100 | 98.86 | 54
- TeamService.js | 100 | 90.7 | 100 | 100 | 19,135-138,188-202,251,267
-----------------------------|---------|----------|---------|---------|----------------------------
-```
diff --git a/config/default.js b/config/default.js
index d726ca58..e729d62b 100644
--- a/config/default.js
+++ b/config/default.js
@@ -1,17 +1,29 @@
require('dotenv').config()
module.exports = {
+ // the log level, default is 'debug'
LOG_LEVEL: process.env.LOG_LEVEL || 'debug',
+ // the server port, default is 3000
PORT: process.env.PORT || 3000,
+ // the server api base path
BASE_PATH: process.env.BASE_PATH || '/api/v5',
+ // The authorization secret used during token verification.
AUTH_SECRET: process.env.AUTH_SECRET || 'mysecret',
+ // The valid issuer of tokens, a json array contains valid issuer.
VALID_ISSUERS: process.env.VALID_ISSUERS || '["https://api.topcoder-dev.com", "https://api.topcoder.com", "https://topcoder-dev.auth0.com/", "https://auth.topcoder-dev.com/"]',
+ // Auth0 URL, used to get TC M2M token
AUTH0_URL: process.env.AUTH0_URL,
+ // Auth0 audience, used to get TC M2M token
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE,
+ // Auth0 audience for U-Bahn
AUTH0_AUDIENCE_UBAHN: process.env.AUTH0_AUDIENCE_UBAHN,
+ // Auth0 token cache time, used to get TC M2M token
TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME,
+ // Auth0 client id, used to get TC M2M token
AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID,
+ // Auth0 client secret, used to get TC M2M token
AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET,
+ // Proxy Auth0 URL, used to get TC M2M token
AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL,
m2m: {
@@ -19,45 +31,71 @@ module.exports = {
M2M_AUDIT_HANDLE: process.env.M2M_AUDIT_HANDLE || 'TopcoderService'
},
+ // the Topcoder v5 url
TC_API: process.env.TC_API || 'https://api.topcoder-dev.com/v5',
+ // the organization id
ORG_ID: process.env.ORG_ID || '36ed815b-3da1-49f1-a043-aaed0a4e81ad',
+ // the referenced skill provider id
TOPCODER_SKILL_PROVIDER_ID: process.env.TOPCODER_SKILL_PROVIDER_ID || '9cc0795a-6e12-4c84-9744-15858dba1861',
TOPCODER_USERS_API: process.env.TOPCODER_USERS_API || 'https://api.topcoder-dev.com/v3/users',
+ // PostgreSQL database url.
DATABASE_URL: process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5432/postgres',
+ // string - PostgreSQL database target schema
DB_SCHEMA_NAME: process.env.DB_SCHEMA_NAME || 'bookings',
+ // the project service url
PROJECT_API_URL: process.env.PROJECT_API_URL || 'https://api.topcoder-dev.com',
esConfig: {
+ // the elasticsearch host
HOST: process.env.ES_HOST || 'http://localhost:9200',
ELASTICCLOUD: {
+ // The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this
id: process.env.ELASTICCLOUD_ID,
+ // The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
username: process.env.ELASTICCLOUD_USERNAME,
+ // The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
password: process.env.ELASTICCLOUD_PASSWORD
},
+ // The Amazon region to use when using AWS Elasticsearch service
AWS_REGION: process.env.AWS_REGION || 'us-east-1', // AWS Region to be used if we use AWS ES
+ // the job index
ES_INDEX_JOB: process.env.ES_INDEX_JOB || 'job',
+ // the job candidate index
ES_INDEX_JOB_CANDIDATE: process.env.ES_INDEX_JOB_CANDIDATE || 'job_candidate',
+ // the resource booking index
ES_INDEX_RESOURCE_BOOKING: process.env.ES_INDEX_RESOURCE_BOOKING || 'resource_booking'
},
+ // Topcoder Bus API URL
BUSAPI_URL: process.env.BUSAPI_URL || 'https://api.topcoder-dev.com/v5',
+ // The error topic at which bus api will publish any errors
KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || 'common.error.reporting',
+ // The originator value for the kafka messages
KAFKA_MESSAGE_ORIGINATOR: process.env.KAFKA_MESSAGE_ORIGINATOR || 'taas-api',
// topics for job service
+ // the create job entity Kafka message topic
TAAS_JOB_CREATE_TOPIC: process.env.TAAS_JOB_CREATE_TOPIC || 'taas.job.create',
+ // the update job entity Kafka message topic
TAAS_JOB_UPDATE_TOPIC: process.env.TAAS_JOB_UPDATE_TOPIC || 'taas.job.update',
+ // the delete job entity Kafka message topic
TAAS_JOB_DELETE_TOPIC: process.env.TAAS_JOB_DELETE_TOPIC || 'taas.job.delete',
// topics for jobcandidate service
+ // the create job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_CREATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_CREATE_TOPIC || 'taas.jobcandidate.create',
+ // the update job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_UPDATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_UPDATE_TOPIC || 'taas.jobcandidate.update',
+ // the delete job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_DELETE_TOPIC: process.env.TAAS_JOB_CANDIDATE_DELETE_TOPIC || 'taas.jobcandidate.delete',
// topics for job service
+ // the create resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_CREATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_CREATE_TOPIC || 'taas.resourcebooking.create',
+ // the update resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_UPDATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC || 'taas.resourcebooking.update',
+ // the delete resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_DELETE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_DELETE_TOPIC || 'taas.resourcebooking.delete'
}
diff --git a/local/docker-compose.yml b/local/docker-compose.yml
new file mode 100644
index 00000000..509d8640
--- /dev/null
+++ b/local/docker-compose.yml
@@ -0,0 +1,88 @@
+version: "3"
+services:
+ postgres:
+ container_name: tc-taas-postgres
+ image: postgres
+ environment:
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: postgres
+ ports:
+ - 5432:5432
+
+ zookeeper:
+ image: wurstmeister/zookeeper
+ container_name: tc-taas-zookeeper
+ ports:
+ - 2181:2181
+
+ kafka:
+ image: wurstmeister/kafka
+ container_name: tc-taas-kafka
+ depends_on:
+ - zookeeper
+ ports:
+ - 9092:9092
+ environment:
+ KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9093,OUTSIDE://localhost:9092
+ KAFKA_LISTENERS: INSIDE://kafka:9093,OUTSIDE://0.0.0.0:9092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+
+ kafka-client:
+ container_name: tc-kafka-client
+ build: ./kafka-client
+ depends_on:
+ - kafka
+ - zookeeper
+
+ elasticsearch:
+ image: elasticsearch:7.7.1
+ container_name: tc-taas-elasticsearch
+ environment:
+ - discovery.type=single-node
+ ports:
+ - 9200:9200
+
+ taas-es-processor:
+ container_name: tc-taas-es-procesor
+ build:
+ context: ./generic-tc-service
+ args:
+ NODE_VERSION: 12.16.3
+ GIT_URL: https://github.com/topcoder-platform/taas-es-processor
+ GIT_BRANCH: dev
+ command: start kafka-client
+ ports:
+ - 5000:5000
+ depends_on:
+ - kafka-client
+ - elasticsearch
+ environment:
+ - KAFKA_URL=kafka:9093
+ - ES_HOST=http://elasticsearch:9200
+
+ tc-bus-api:
+ container_name: tc-bus-api
+ build:
+ context: ./generic-tc-service
+ args:
+ NODE_VERSION: 8.11.3
+ GIT_URL: https://github.com/topcoder-platform/tc-bus-api
+ GIT_BRANCH: dev
+ BYPASS_TOKEN_VALIDATION: 1
+ command: start kafka-client
+ ports:
+ - 8002:8002
+ depends_on:
+ - kafka-client
+ environment:
+ - PORT=8002
+ - KAFKA_URL=kafka:9093
+ - JWT_TOKEN_SECRET=secret
+ - VALID_ISSUERS="[\"https:\/\/topcoder-newauth.auth0.com\/\",\"https:\/\/api.topcoder-dev.com\",\"https:\/\/topcoder-dev.auth0.com\/\"]"
+ - AUTH0_URL
+ - AUTH0_AUDIENCE
+ - AUTH0_CLIENT_ID
+ - AUTH0_CLIENT_SECRET
+ - AUTH0_PROXY_SERVER_URL
diff --git a/local/generic-tc-service/Dockerfile b/local/generic-tc-service/Dockerfile
new file mode 100644
index 00000000..63d00bb3
--- /dev/null
+++ b/local/generic-tc-service/Dockerfile
@@ -0,0 +1,15 @@
+ARG NODE_VERSION=12.16.3
+
+FROM node:$NODE_VERSION
+ARG GIT_URL
+ARG GIT_BRANCH
+ARG BYPASS_TOKEN_VALIDATION
+
+RUN git clone $GIT_URL /opt/app
+WORKDIR /opt/app
+RUN git checkout -b node-branch origin/$GIT_BRANCH
+
+RUN npm install
+RUN if [ $BYPASS_TOKEN_VALIDATION -eq 1 ]; then sed -i '/decodedToken = jwt.decode/a \ callback(undefined, decodedToken.payload); return;' node_modules/tc-core-library-js/lib/auth/verifier.js; fi
+COPY docker-entrypoint.sh /opt/
+ENTRYPOINT ["/opt/docker-entrypoint.sh"]
\ No newline at end of file
diff --git a/local/generic-tc-service/docker-entrypoint.sh b/local/generic-tc-service/docker-entrypoint.sh
new file mode 100755
index 00000000..771f3ba5
--- /dev/null
+++ b/local/generic-tc-service/docker-entrypoint.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+HOST_DOMAIN="host.docker.internal"
+ping -q -c1 $HOST_DOMAIN > /dev/null 2>&1
+if [ $? -ne 0 ]; then
+ HOST_IP=$(ip route | awk 'NR==1 {print $3}')
+ echo -e "$HOST_IP\t$HOST_DOMAIN" >> /etc/hosts
+fi
+
+if [ $# -eq 2 ]; then
+ echo "Waiting for $2 to exit...."
+ while ping -c1 $2 &>/dev/null
+ do
+ sleep 1
+ done
+ echo "$2 exited!"
+fi
+
+cd /opt/app/ && npm run $1
\ No newline at end of file
diff --git a/local/kafka-client/Dockerfile b/local/kafka-client/Dockerfile
new file mode 100644
index 00000000..e34a3ae6
--- /dev/null
+++ b/local/kafka-client/Dockerfile
@@ -0,0 +1,5 @@
+From wurstmeister/kafka
+WORKDIR /app/
+COPY topics.txt .
+COPY create-topics.sh .
+ENTRYPOINT ["/app/create-topics.sh"]
\ No newline at end of file
diff --git a/local/kafka-client/create-topics.sh b/local/kafka-client/create-topics.sh
new file mode 100755
index 00000000..88339fe9
--- /dev/null
+++ b/local/kafka-client/create-topics.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+/opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181 > exists-topics.txt
+while read topic; do
+ /opt/kafka/bin/kafka-topics.sh --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $topic
+done < <(sort topics.txt exists-topics.txt exists-topics.txt | uniq -u)
\ No newline at end of file
diff --git a/local/kafka-client/topics.txt b/local/kafka-client/topics.txt
new file mode 100644
index 00000000..6888c6bd
--- /dev/null
+++ b/local/kafka-client/topics.txt
@@ -0,0 +1,9 @@
+taas.job.create
+taas.jobcandidate.create
+taas.resourcebooking.create
+taas.job.update
+taas.jobcandidate.update
+taas.resourcebooking.update
+taas.job.delete
+taas.jobcandidate.delete
+taas.resourcebooking.delete
\ No newline at end of file
diff --git a/package.json b/package.json
index 15788a91..d65a6882 100644
--- a/package.json
+++ b/package.json
@@ -15,6 +15,10 @@
"migrate:undo": "npx sequelize db:migrate:undo",
"test-data": "node scripts/insert-es-data.js",
"test": "mocha test/unit/*.test.js --timeout 30000 --exit",
+ "services:up": "docker-compose -f ./local/docker-compose.yml up -d",
+ "services:down": "docker-compose -f ./local/docker-compose.yml down",
+ "services:logs": "docker-compose -f ./local/docker-compose.yml logs",
+ "local:init": "npm run create-index && npm run init-db",
"cov": "nyc --reporter=html --reporter=text mocha test/unit/*.test.js --timeout 30000 --exit"
},
"keywords": [],