diff --git a/README.md b/README.md
index 4c142f0a..0e2aeaaf 100644
--- a/README.md
+++ b/README.md
@@ -5,68 +5,11 @@
- nodejs https://nodejs.org/en/ (v12+)
- PostgreSQL
- ElasticSearch (7.x)
-- Docker
+- Zookeeper
+- Kafka
+- Docker(version 20.10 and above)
- Docker-Compose
-## Configuration
-
-Configuration for the application is at `config/default.js`.
-
-The following parameters can be set in config files or in env variables:
-
-- `LOG_LEVEL`: the log level, default is 'debug'
-- `PORT`: the server port, default is 3000
-- `BASE_PATH`: the server api base path
-- `AUTH_SECRET`: The authorization secret used during token verification.
-- `VALID_ISSUERS`: The valid issuer of tokens, a json array contains valid issuer.
-
-- `AUTH0_URL`: Auth0 URL, used to get TC M2M token
-- `AUTH0_AUDIENCE`: Auth0 audience, used to get TC M2M token
-- `AUTH0_AUDIENCE_UBAHN`: Auth0 audience for U-Bahn
-- `TOKEN_CACHE_TIME`: Auth0 token cache time, used to get TC M2M token
-- `AUTH0_CLIENT_ID`: Auth0 client id, used to get TC M2M token
-- `AUTH0_CLIENT_SECRET`: Auth0 client secret, used to get TC M2M token
-- `AUTH0_PROXY_SERVER_URL`: Proxy Auth0 URL, used to get TC M2M token
-
-- `m2m.M2M_AUDIT_USER_ID`: default value is `00000000-0000-0000-0000-000000000000`
-- `m2m.M2M_AUDIT_HANDLE`: default value is `TopcoderService`
-
-- `DATABASE_URL`: PostgreSQL database url.
-- `DB_SCHEMA_NAME`: string - PostgreSQL database target schema
-- `PROJECT_API_URL`: the project service url
-- `TC_API`: the Topcoder v5 url
-- `ORG_ID`: the organization id
-- `TOPCODER_SKILL_PROVIDER_ID`: the referenced skill provider id
-
-- `esConfig.HOST`: the elasticsearch host
-- `esConfig.ES_INDEX_JOB`: the job index
-- `esConfig.ES_INDEX_JOB_CANDIDATE`: the job candidate index
-- `esConfig.ES_INDEX_RESOURCE_BOOKING`: the resource booking index
-- `esConfig.AWS_REGION`: The Amazon region to use when using AWS Elasticsearch service
-- `esConfig.ELASTICCLOUD.id`: The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this
-- `esConfig.ELASTICCLOUD.username`: The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
-- `esConfig.ELASTICCLOUD.password`: The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
-
-- `BUSAPI_URL`: Topcoder Bus API URL
-- `KAFKA_ERROR_TOPIC`: The error topic at which bus api will publish any errors
-- `KAFKA_MESSAGE_ORIGINATOR`: The originator value for the kafka messages
-
-- `TAAS_JOB_CREATE_TOPIC`: the create job entity Kafka message topic
-- `TAAS_JOB_UPDATE_TOPIC`: the update job entity Kafka message topic
-- `TAAS_JOB_DELETE_TOPIC`: the delete job entity Kafka message topic
-- `TAAS_JOB_CANDIDATE_CREATE_TOPIC`: the create job candidate entity Kafka message topic
-- `TAAS_JOB_CANDIDATE_UPDATE_TOPIC`: the update job candidate entity Kafka message topic
-- `TAAS_JOB_CANDIDATE_DELETE_TOPIC`: the delete job candidate entity Kafka message topic
-- `TAAS_RESOURCE_BOOKING_CREATE_TOPIC`: the create resource booking entity Kafka message topic
-- `TAAS_RESOURCE_BOOKING_UPDATE_TOPIC`: the update resource booking entity Kafka message topic
-- `TAAS_RESOURCE_BOOKING_DELETE_TOPIC`: the delete resource booking entity Kafka message topic
-
-
-## PostgreSQL Database Setup
-- Go to https://www.postgresql.org/ download and install the PostgreSQL.
-- Modify `DATABASE_URL` under `config/default.js` to meet your environment.
-- Run `npm run init-db` to create table(run `npm run init-db force` to force creating table)
-
## DB Migration
- `npm run migrate`: run any migration files which haven't run yet.
- `npm run migrate:undo`: revert most recent migration.
@@ -80,27 +23,186 @@ The following parameters can be set in the config file or via env variables:
- `database`: set via env `DB_NAME`; datebase name
- `host`: set via env `DB_HOST`; datebase host name
-## ElasticSearch Setup
-- Go to https://www.elastic.co/downloads/ download and install the elasticsearch.
-- Modify `esConfig` under `config/default.js` to meet your environment.
-- Run `npm run create-index` to create ES index.
-- Run `npm run delete-index` to delete ES index.
+### Steps to run locally
+1. 📦 Install npm dependencies
+
+ ```bash
+ npm install
+ ```
+
+2. ⚙ Local config
+
+ 1. In the root directory create `.env` file with the next environment variables. Values for **Auth0 config** should be shared with you on the forum.
+ ```bash
+ # Auth0 config
+ AUTH0_URL=
+ AUTH0_AUDIENCE=
+ AUTH0_AUDIENCE_UBAHN=
+ AUTH0_CLIENT_ID=
+ AUTH0_CLIENT_SECRET=
+ AUTH0_PROXY_SERVER_URL=
+
+ # Locally deployed services (via docker-compose)
+ ES_HOST=http://dockerhost:9200
+ DATABASE_URL=postgres://postgres:postgres@dockerhost:5432/postgres
+ BUSAPI_URL=http://dockerhost:8002/v5
+ ```
+
+ - Values from this file would be automatically used by many `npm` commands.
+ - ⚠️ Never commit this file or its copy to the repository!
+
+ 1. Set `dockerhost` to point the IP address of Docker. Docker IP address depends on your system. For example if docker is run on IP `127.0.0.1` add a the next line to your `/etc/hosts` file:
+ ```
+ 127.0.0.1 dockerhost
+ ```
+
+ Alternatively, you may update `.env` file and replace `dockerhost` with your docker IP address.
+
+1. 🚢 Start docker-compose with services which are required to start Taas API locally
+
+ *(NOTE Please ensure that you have installed docker of version 20.10 or above since the docker-compose file uses new feature introduced by docker version 20.10. Run `docker --version` to check your docker version.)*
-## Local Deployment
+ ```bash
+ npm run services:up
+ ```
-- Install dependencies `npm install`
-- Run lint `npm run lint`
-- Run lint fix `npm run lint:fix`
-- Clear and init db `npm run init-db force`
-- Clear and create es index
+ Wait until all containers are fully started. As a good indicator, wait until `es-processor` successfully started by viewing its logs:
+
+ ```bash
+ npm run services:logs -- -f es-processor
+ ```
+
+ 🖱️ Click to see a good logs example
``` bash
- npm run delete-index # run this if you already created index
- npm run create-index
+ tc-taas-es-processor | Waiting for kafka-client to exit....
+ tc-taas-es-processor | kafka-client exited!
+ tc-taas-es-processor |
+ tc-taas-es-processor | > taas-es-processor@1.0.0 start /opt/app
+ tc-taas-es-processor | > node src/app.js
+ tc-taas-es-processor |
+ tc-taas-es-processor | [2021-01-21T02:44:43.442Z] app INFO : Starting kafka consumer
+ tc-taas-es-processor | 2021-01-21T02:44:44.534Z INFO no-kafka-client Joined group taas-es-processor generationId 1 as no-kafka-client-70c25a43-af93-495e-a123-0c4f4ea389eb
+ tc-taas-es-processor | 2021-01-21T02:44:44.534Z INFO no-kafka-client Elected as group leader
+ tc-taas-es-processor | 2021-01-21T02:44:44.614Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.create:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.615Z DEBUG no-kafka-client Subscribed to taas.job.create:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.615Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.delete:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.616Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.delete:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.616Z DEBUG no-kafka-client Subscribed to taas.jobcandidate.update:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.617Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.create:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.617Z DEBUG no-kafka-client Subscribed to taas.job.delete:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.618Z DEBUG no-kafka-client Subscribed to taas.job.update:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | 2021-01-21T02:44:44.618Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.update:0 offset 0 leader kafka:9093
+ tc-taas-es-processor | [2021-01-21T02:44:44.619Z] app INFO : Initialized.......
+ tc-taas-es-processor | [2021-01-21T02:44:44.623Z] app INFO : taas.job.create,taas.job.update,taas.job.delete,taas.jobcandidate.create,taas.jobcandidate.update,taas.jobcandidate.delete,taas.resourcebooking.create,taas.resourcebooking.update,taas.resourcebooking.delete
+ tc-taas-es-processor | [2021-01-21T02:44:44.623Z] app INFO : Kick Start.......
+ tc-taas-es-processor | ********** Topcoder Health Check DropIn listening on port 3001
+ tc-taas-es-processor | Topcoder Health Check DropIn started and ready to roll
```
-- Start app `npm start`
-- App is running at `http://localhost:3000`
+
+
+ If you want to learn more about docker-compose configuration
+ 🖱️ Click to see more details here
+
+
+ This docker-compose file starts the next services:
+ | Service | Name | Port |
+ |----------|:-----:|:----:|
+ | PostgreSQL | db | 5432 |
+ | Elasticsearch | esearch | 9200 |
+ | Zookeeper | zookeeper | 2181 |
+ | Kafka | kafka | 9092 |
+ | [tc-bus-api](https://github.com/topcoder-platform/tc-bus-api) | bus-api | 8002 |
+ | [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor) | es-processor | 5000 |
+
+ - as many of the Topcoder services in this docker-compose require Auth0 configuration for M2M calls, our docker-compose file passes environment variables `AUTH0_CLIENT_ID`, `AUTH0_CLIENT_SECRET`, `AUTH0_URL`, `AUTH0_AUDIENCE`, `AUTH0_PROXY_SERVER_URL` to its containers. docker-compose takes them from `.env` file if provided.
+
+ - `docker-compose` automatically would create Kafka topics which are used by `taas-apis` listed in `./local/kafka-client/topics.txt`.
+
+ - To view the logs from any container inside docker-compose use the following command, replacing `SERVICE_NAME` with the corresponding value under the **Name** column in the above table:
+
+ ```bash
+ npm run services:logs -- -f SERVICE_NAME
+ ```
+
+ - If you want to modify the code of any of the services which are run inside this docker-compose file, you can stop such service inside docker-compose by command `docker-compose -f local/docker-compose.yaml stop ` and run the service separately, following its README file.
+ *NOTE: If kafka(along with zookeeper) is stopped and brings up in the host machine you will need to restart the `es-processor` service by running `docker-compose -f local/docker-compose.yaml restart es-processor` so the processor will connect with the new zookeeper.*
+
+ *NOTE: In production these dependencies / services are hosted & managed outside Taas API.*
+
+2. ♻ Init DB and ES
+
+ ```bash
+ npm run local:init
+ ```
+
+ This command will do 2 things:
+ - create Database tables
+ - create Elasticsearch indexes
+
+3. 🚀 Start Taas API
+
+ ```bash
+ npm run dev
+ ```
+
+ Runs the Taas API using nodemon, so it would be restarted after any of the files is updated.
+ The API will be served on `http://localhost:3000`.
+
+## NPM Commands
+
+| Command | Description |
+| -- | -- |
+| `npm start` | Start app. |
+| `npm run dev` | Start app using `nodemon`. |
+| `npm run lint` | Check for for lint errors. |
+| `npm run lint:fix` | Check for for lint errors and fix error automatically when possible. |
+| `npm run services:up` | Start services via docker-compose for local development. |
+| `npm run services:down` | Stop services via docker-compose for local development. |
+| `npm run services:logs -- -f ` | View logs of some service inside docker-compose. |
+| `npm run local:init` | Create Database and Elasticsearch indexes. |
+| `npm run init-db` | Create database. |
+| `npm run init-db force` | Force re-creating database. |
+| `npm run create-index` | Create Elasticsearch indexes. |
+| `npm run delete-index` | Delete Elasticsearch indexes. |
+| `npm run migrate` | Run DB migration. |
+| `npm run migrate:undo` | Undo DB migration executed previously |
+| `npm run test-data` | Insert test data. |
+| `npm run test` | Run tests. |
+| `npm run cov` | Run test with coverage. |
+
+## Kafka Commands
+
+You can use the following commands to manipulate kafka topics and messages:
+
+(Replace `TOPIC_NAME` with the name of the desired topic)
+
+### Create Topic
+
+```bash
+docker exec tc-taas-kafka /opt/kafka/bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic TOPIC_NAME
+```
+
+### List Topics
+
+```bash
+docker exec tc-taas-kafka /opt/kafka/bin/kafka-topics.sh --list --zookeeper zookeeper:2181
+```
+
+### Watch Topic
+
+```bash
+docker exec tc-taas-kafka /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic TOPIC_NAME
+```
+
+### Post Message to Topic (from stdin)
+
+```bash
+docker exec -it tc-taas-kafka /opt/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic TOPIC_NAME
+```
+
+- Enter or copy/paste the message into the console after starting this command.
## Local Deployment with Docker
diff --git a/config/default.js b/config/default.js
index d726ca58..578c1484 100644
--- a/config/default.js
+++ b/config/default.js
@@ -1,63 +1,104 @@
-require('dotenv').config()
module.exports = {
+ // the log level
LOG_LEVEL: process.env.LOG_LEVEL || 'debug',
+ // the server port
PORT: process.env.PORT || 3000,
+ // the server api base path
BASE_PATH: process.env.BASE_PATH || '/api/v5',
+ // The authorization secret used during token verification.
AUTH_SECRET: process.env.AUTH_SECRET || 'mysecret',
+ // The valid issuer of tokens, a json array contains valid issuer.
VALID_ISSUERS: process.env.VALID_ISSUERS || '["https://api.topcoder-dev.com", "https://api.topcoder.com", "https://topcoder-dev.auth0.com/", "https://auth.topcoder-dev.com/"]',
+ // Auth0 URL, used to get TC M2M token
AUTH0_URL: process.env.AUTH0_URL,
+ // Auth0 audience, used to get TC M2M token
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE,
+ // Auth0 audience for U-Bahn
AUTH0_AUDIENCE_UBAHN: process.env.AUTH0_AUDIENCE_UBAHN,
+ // Auth0 token cache time, used to get TC M2M token
TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME,
+ // Auth0 client id, used to get TC M2M token
AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID,
+ // Auth0 client secret, used to get TC M2M token
AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET,
+ // Proxy Auth0 URL, used to get TC M2M token
AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL,
m2m: {
+ // default user ID for m2m user
M2M_AUDIT_USER_ID: process.env.M2M_AUDIT_USER_ID || '00000000-0000-0000-0000-000000000000',
+ // default handle name for m2m user
M2M_AUDIT_HANDLE: process.env.M2M_AUDIT_HANDLE || 'TopcoderService'
},
+ // the Topcoder v5 url
TC_API: process.env.TC_API || 'https://api.topcoder-dev.com/v5',
+ // the organization id
ORG_ID: process.env.ORG_ID || '36ed815b-3da1-49f1-a043-aaed0a4e81ad',
+ // the referenced skill provider id
TOPCODER_SKILL_PROVIDER_ID: process.env.TOPCODER_SKILL_PROVIDER_ID || '9cc0795a-6e12-4c84-9744-15858dba1861',
+ // the TC API for v3 users
TOPCODER_USERS_API: process.env.TOPCODER_USERS_API || 'https://api.topcoder-dev.com/v3/users',
+ // PostgreSQL database url.
DATABASE_URL: process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5432/postgres',
+ // string - PostgreSQL database target schema
DB_SCHEMA_NAME: process.env.DB_SCHEMA_NAME || 'bookings',
+ // the project service url
PROJECT_API_URL: process.env.PROJECT_API_URL || 'https://api.topcoder-dev.com',
esConfig: {
+ // the elasticsearch host
HOST: process.env.ES_HOST || 'http://localhost:9200',
ELASTICCLOUD: {
+ // The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this
id: process.env.ELASTICCLOUD_ID,
+ // The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
username: process.env.ELASTICCLOUD_USERNAME,
+ // The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud
password: process.env.ELASTICCLOUD_PASSWORD
},
+ // The Amazon region to use when using AWS Elasticsearch service
AWS_REGION: process.env.AWS_REGION || 'us-east-1', // AWS Region to be used if we use AWS ES
+ // the job index
ES_INDEX_JOB: process.env.ES_INDEX_JOB || 'job',
+ // // The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this
+ // the job candidate index
ES_INDEX_JOB_CANDIDATE: process.env.ES_INDEX_JOB_CANDIDATE || 'job_candidate',
+ // the resource booking index
ES_INDEX_RESOURCE_BOOKING: process.env.ES_INDEX_RESOURCE_BOOKING || 'resource_booking'
},
+ // Topcoder Bus API URL
BUSAPI_URL: process.env.BUSAPI_URL || 'https://api.topcoder-dev.com/v5',
+ // The error topic at which bus api will publish any errors
KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || 'common.error.reporting',
+ // The originator value for the kafka messages
KAFKA_MESSAGE_ORIGINATOR: process.env.KAFKA_MESSAGE_ORIGINATOR || 'taas-api',
// topics for job service
+ // the create job entity Kafka message topic
TAAS_JOB_CREATE_TOPIC: process.env.TAAS_JOB_CREATE_TOPIC || 'taas.job.create',
+ // the update job entity Kafka message topic
TAAS_JOB_UPDATE_TOPIC: process.env.TAAS_JOB_UPDATE_TOPIC || 'taas.job.update',
+ // the delete job entity Kafka message topic
TAAS_JOB_DELETE_TOPIC: process.env.TAAS_JOB_DELETE_TOPIC || 'taas.job.delete',
// topics for jobcandidate service
+ // the create job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_CREATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_CREATE_TOPIC || 'taas.jobcandidate.create',
+ // the update job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_UPDATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_UPDATE_TOPIC || 'taas.jobcandidate.update',
+ // the delete job candidate entity Kafka message topic
TAAS_JOB_CANDIDATE_DELETE_TOPIC: process.env.TAAS_JOB_CANDIDATE_DELETE_TOPIC || 'taas.jobcandidate.delete',
// topics for job service
+ // the create resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_CREATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_CREATE_TOPIC || 'taas.resourcebooking.create',
+ // the update resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_UPDATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC || 'taas.resourcebooking.update',
+ // the delete resource booking entity Kafka message topic
TAAS_RESOURCE_BOOKING_DELETE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_DELETE_TOPIC || 'taas.resourcebooking.delete'
}
diff --git a/local/docker-compose.yaml b/local/docker-compose.yaml
new file mode 100644
index 00000000..1709207e
--- /dev/null
+++ b/local/docker-compose.yaml
@@ -0,0 +1,99 @@
+version: '2.4'
+services:
+ zookeeper:
+ image: wurstmeister/zookeeper
+ container_name: tc-taas-zookeeper
+ ports:
+ - "2181:2181"
+ environment:
+ zk_id: "1"
+ kafka:
+ image: wurstmeister/kafka
+ container_name: tc-taas-kafka
+ ports:
+ - "9092:9092"
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9093,OUTSIDE://localhost:9092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
+ KAFKA_LISTENERS: INSIDE://0.0.0.0:9093,OUTSIDE://0.0.0.0:9092
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
+ esearch:
+ image: elasticsearch:7.7.1
+ container_name: tc-taas-es
+ ports:
+ - "9200:9200"
+ environment:
+ - discovery.type=single-node
+ db:
+ image: postgres
+ container_name: tc-taas-postgres
+ ports:
+ - "5432:5432"
+ environment:
+ - POSTGRES_USER=postgres
+ - POSTGRES_PASSWORD=postgres
+ kafka-client:
+ build: ./kafka-client
+ container_name: tc-taas-kafka-client
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ bus-api:
+ container_name: tc-taas-bus-api
+ build:
+ context: ./generic-tc-service
+ args:
+ NODE_VERSION: 8.11.3
+ GIT_URL: https://github.com/topcoder-platform/tc-bus-api
+ GIT_BRANCH: dev
+ BYPASS_TOKEN_VALIDATION: 1
+ command: start kafka-client
+ expose:
+ - "3000"
+ ports:
+ - "8002:3000"
+ depends_on:
+ - kafka-client
+ environment:
+ - PORT=3000
+ - KAFKA_URL=http://host.docker.internal:9092
+ - JWT_TOKEN_SECRET=secret
+ - VALID_ISSUERS="[\"https:\/\/topcoder-newauth.auth0.com\/\",\"https:\/\/api.topcoder-dev.com\",\"https:\/\/topcoder-dev.auth0.com\/\"]"
+ - AUTH0_CLIENT_ID
+ - AUTH0_CLIENT_SECRET
+ - AUTH0_URL
+ - AUTH0_AUDIENCE
+ - AUTH0_PROXY_SERVER_URL
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ - "localhost:host-gateway"
+ es-processor:
+ container_name: tc-taas-es-processor
+ build:
+ context: ./generic-tc-service
+ args:
+ NODE_VERSION: 12
+ GIT_URL: https://github.com/topcoder-platform/taas-es-processor
+ GIT_BRANCH: dev
+ BYPASS_TOKEN_VALIDATION: 0
+ command: start kafka-client
+ depends_on:
+ - kafka-client
+ expose:
+ - "3001"
+ ports:
+ - "5000:3001"
+ environment:
+ - PORT=3001
+ - KAFKA_URL=http://host.docker.internal:9092
+ - ES_HOST=http://host.docker.internal:9200
+ - AUTH0_CLIENT_ID
+ - AUTH0_CLIENT_SECRET
+ - AUTH0_URL
+ - AUTH0_AUDIENCE
+ - AUTH0_PROXY_SERVER_URL
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+ - "localhost:host-gateway"
diff --git a/local/generic-tc-service/Dockerfile b/local/generic-tc-service/Dockerfile
new file mode 100644
index 00000000..e3113c7f
--- /dev/null
+++ b/local/generic-tc-service/Dockerfile
@@ -0,0 +1,15 @@
+ARG NODE_VERSION=8.11.3
+
+FROM node:$NODE_VERSION
+ARG GIT_URL
+ARG GIT_BRANCH
+ARG BYPASS_TOKEN_VALIDATION
+
+RUN git clone $GIT_URL /opt/app
+WORKDIR /opt/app
+RUN git checkout -b node-branch origin/$GIT_BRANCH
+
+RUN npm install
+RUN if [ $BYPASS_TOKEN_VALIDATION -eq 1 ]; then sed -i '/decodedToken = jwt.decode/a \ callback(undefined, decodedToken.payload); return;' node_modules/tc-core-library-js/lib/auth/verifier.js; fi
+COPY docker-entrypoint.sh /opt/
+ENTRYPOINT ["/opt/docker-entrypoint.sh"]
diff --git a/local/generic-tc-service/docker-entrypoint.sh b/local/generic-tc-service/docker-entrypoint.sh
new file mode 100755
index 00000000..24cb1fe8
--- /dev/null
+++ b/local/generic-tc-service/docker-entrypoint.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+if [ $# -eq 2 ]; then
+ echo "Waiting for $2 to exit...."
+ while ping -c1 $2 &>/dev/null
+ do
+ sleep 1
+ done
+ echo "$2 exited!"
+fi
+
+tail -n+3 /etc/hosts > /tmp/hosts && cp /tmp/hosts /etc/hosts # remove default localhost
+cd /opt/app/ && npm run $1
diff --git a/local/kafka-client/Dockerfile b/local/kafka-client/Dockerfile
new file mode 100644
index 00000000..15c20839
--- /dev/null
+++ b/local/kafka-client/Dockerfile
@@ -0,0 +1,5 @@
+From wurstmeister/kafka
+WORKDIR /app/
+COPY topics.txt .
+COPY create-topics.sh .
+ENTRYPOINT ["/app/create-topics.sh"]
diff --git a/local/kafka-client/create-topics.sh b/local/kafka-client/create-topics.sh
new file mode 100755
index 00000000..df00f7ea
--- /dev/null
+++ b/local/kafka-client/create-topics.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+until /opt/kafka/bin/kafka-topics.sh --list --zookeeper host.docker.internal:2181 > exists-topics.txt
+ do sleep 1
+done
+
+while read topic; do
+ /opt/kafka/bin/kafka-topics.sh --create --if-not-exists --zookeeper host.docker.internal:2181 --partitions 1 --replication-factor 1 --topic $topic
+done < <(sort topics.txt exists-topics.txt exists-topics.txt | uniq -u)
diff --git a/local/kafka-client/topics.txt b/local/kafka-client/topics.txt
new file mode 100644
index 00000000..a392a8fc
--- /dev/null
+++ b/local/kafka-client/topics.txt
@@ -0,0 +1,9 @@
+taas.job.create
+taas.jobcandidate.create
+taas.resourcebooking.create
+taas.job.update
+taas.jobcandidate.update
+taas.resourcebooking.update
+taas.job.delete
+taas.jobcandidate.delete
+taas.resourcebooking.delete
diff --git a/migrations/2021-01-13-make-some-job-fields-longer.js b/migrations/2021-01-13-make-some-job-fields-longer.js
index 286fd888..7b2fc2b4 100644
--- a/migrations/2021-01-13-make-some-job-fields-longer.js
+++ b/migrations/2021-01-13-make-some-job-fields-longer.js
@@ -7,14 +7,14 @@
module.exports = {
up: queryInterface => {
return Promise.all([
- queryInterface.sequelize.query(`ALTER TABLE bookings.jobs ALTER COLUMN title TYPE VARCHAR(128)`),
- queryInterface.sequelize.query(`ALTER TABLE bookings.jobs ALTER COLUMN description TYPE TEXT`)
+ queryInterface.sequelize.query('ALTER TABLE bookings.jobs ALTER COLUMN title TYPE VARCHAR(128)'),
+ queryInterface.sequelize.query('ALTER TABLE bookings.jobs ALTER COLUMN description TYPE TEXT')
])
},
down: queryInterface => {
return Promise.all([
- queryInterface.sequelize.query(`ALTER TABLE bookings.jobs ALTER COLUMN title TYPE VARCHAR(64)`),
- queryInterface.sequelize.query(`ALTER TABLE bookings.jobs ALTER COLUMN description TYPE VARCHAR(255)`)
+ queryInterface.sequelize.query('ALTER TABLE bookings.jobs ALTER COLUMN title TYPE VARCHAR(64)'),
+ queryInterface.sequelize.query('ALTER TABLE bookings.jobs ALTER COLUMN description TYPE VARCHAR(255)')
])
}
}
diff --git a/package-lock.json b/package-lock.json
index 5b90459b..d904d878 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1796,7 +1796,69 @@
"dotenv": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz",
- "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw=="
+ "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==",
+ "dev": true
+ },
+ "dotenv-cli": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/dotenv-cli/-/dotenv-cli-4.0.0.tgz",
+ "integrity": "sha512-ByKEec+ashePEXthZaA1fif9XDtcaRnkN7eGdBDx3HHRjwZ/rA1go83Cbs4yRrx3JshsCf96FjAyIA2M672+CQ==",
+ "dev": true,
+ "requires": {
+ "cross-spawn": "^7.0.1",
+ "dotenv": "^8.1.0",
+ "dotenv-expand": "^5.1.0",
+ "minimist": "^1.1.3"
+ },
+ "dependencies": {
+ "cross-spawn": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
+ "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ }
+ },
+ "path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true
+ },
+ "shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "requires": {
+ "shebang-regex": "^3.0.0"
+ }
+ },
+ "shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true
+ },
+ "which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ }
+ }
+ },
+ "dotenv-expand": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz",
+ "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==",
+ "dev": true
},
"dottie": {
"version": "2.0.2",
@@ -3115,9 +3177,9 @@
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
},
"ini": {
- "version": "1.3.5",
- "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
- "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
+ "version": "1.3.8",
+ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
+ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
"dev": true
},
"inquirer": {
diff --git a/package.json b/package.json
index 15788a91..1df401a2 100644
--- a/package.json
+++ b/package.json
@@ -5,17 +5,21 @@
"main": "app.js",
"scripts": {
"start": "node app.js",
- "dev": "nodemon app.js",
+ "dev": "dotenv nodemon app.js",
"lint": "standard",
"lint:fix": "standard --fix",
- "init-db": "node src/init-db.js",
- "create-index": "node scripts/createIndex.js",
- "delete-index": "node scripts/deleteIndex.js",
- "migrate": "npx sequelize db:migrate",
- "migrate:undo": "npx sequelize db:migrate:undo",
- "test-data": "node scripts/insert-es-data.js",
- "test": "mocha test/unit/*.test.js --timeout 30000 --exit",
- "cov": "nyc --reporter=html --reporter=text mocha test/unit/*.test.js --timeout 30000 --exit"
+ "services:up": "docker-compose -f local/docker-compose.yaml up -d",
+ "services:down": "docker-compose -f local/docker-compose.yaml down",
+ "services:logs": "docker-compose -f local/docker-compose.yaml logs",
+ "local:init": "npm run init-db && npm run create-index",
+ "init-db": "dotenv node src/init-db.js",
+ "create-index": "dotenv node scripts/createIndex.js",
+ "delete-index": "dotenv node scripts/deleteIndex.js",
+ "migrate": "dotenv npx sequelize db:migrate",
+ "migrate:undo": "dotenv npx sequelize db:migrate:undo",
+ "test-data": "dotenv node scripts/insert-es-data.js",
+ "test": "dotenv mocha test/unit/*.test.js --timeout 30000 --exit",
+ "cov": "dotenv nyc --reporter=html --reporter=text mocha test/unit/*.test.js --timeout 30000 --exit"
},
"keywords": [],
"author": "",
@@ -27,7 +31,6 @@
"config": "^3.3.2",
"cors": "^2.8.5",
"date-fns": "^2.16.1",
- "dotenv": "^8.2.0",
"express": "^4.17.1",
"express-interceptor": "^1.2.0",
"get-parameter-names": "^0.3.0",
@@ -47,6 +50,7 @@
},
"devDependencies": {
"chai": "^4.2.0",
+ "dotenv-cli": "^4.0.0",
"mocha": "^8.1.3",
"nodemon": "^2.0.4",
"nyc": "^15.1.0",
diff --git a/src/models/Job.js b/src/models/Job.js
index 14cec753..d6cc3955 100644
--- a/src/models/Job.js
+++ b/src/models/Job.js
@@ -69,7 +69,7 @@ module.exports = (sequelize) => {
type: Sequelize.STRING(255)
},
description: {
- type: Sequelize.TEXT, // technically unlimited length
+ type: Sequelize.TEXT // technically unlimited length
},
title: {
type: Sequelize.STRING(128),