From decf81951eec8472a521d1140c299cb1550ca662 Mon Sep 17 00:00:00 2001 From: veshu Date: Thu, 12 Nov 2020 09:39:38 +0545 Subject: [PATCH 1/3] feat: es segregation --- .gitignore | 5 +- README.md | 66 ++++- Verification.md | 50 ++-- app.js | 6 +- build.sh | 0 config/default.js | 28 +- config/test.js | 7 +- docker/Dockerfile | 2 +- docker/docker-compose.yml | 11 + docker/sample.api.env | 9 + ...coder-bookings-api.postman_collection.json | 145 ++++++++-- docs/swagger.yaml | 44 ++++ package-lock.json | 219 ++++++++++++++++ package.json | 2 + scripts/createIndex.js | 5 +- scripts/deleteIndex.js | 6 +- src/bootstrap.js | 3 +- src/common/helper.js | 91 ++++++- src/common/logger.js | 64 +++-- src/controllers/HealthCheckController.js | 37 +++ src/controllers/JobCandidateController.js | 2 +- src/controllers/JobController.js | 2 +- src/controllers/ResourceBookingController.js | 2 +- src/init-db.js | 8 +- src/models/Job.js | 2 +- src/models/JobCandidate.js | 2 +- src/models/ResourceBooking.js | 2 +- src/routes/HealthCheckRoutes.js | 12 + src/routes/index.js | 4 +- src/services/JobCandidateService.js | 145 +++++----- src/services/JobService.js | 247 +++++++++++------- src/services/ResourceBookingService.js | 168 +++++++----- test/unit/JobCandidateService.test.js | 89 +++++-- test/unit/JobService.test.js | 102 ++++++-- test/unit/ResourceBookingService.test.js | 125 ++++++--- test/unit/helper.test.js | 4 +- 36 files changed, 1296 insertions(+), 420 deletions(-) mode change 100755 => 100644 build.sh create mode 100644 docker/docker-compose.yml create mode 100644 docker/sample.api.env create mode 100644 src/controllers/HealthCheckController.js create mode 100644 src/routes/HealthCheckRoutes.js diff --git a/.gitignore b/.gitignore index d5a06f7a..d4e0230b 100644 --- a/.gitignore +++ b/.gitignore @@ -113,4 +113,7 @@ dist .yarn/unplugged .yarn/build-state.yml .yarn/install-state.gz -.pnp.* \ No newline at end of file +.pnp.* + +# api.env +api.env \ No newline at end of file diff --git a/README.md b/README.md index ff0fcab9..88bca715 100644 --- a/README.md +++ b/README.md @@ -19,27 +19,49 @@ The following parameters can be set in config files or in env variables: - `BASE_PATH`: the server api base path - `AUTH_SECRET`: The authorization secret used during token verification. - `VALID_ISSUERS`: The valid issuer of tokens, a json array contains valid issuer. + - `AUTH0_URL`: Auth0 URL, used to get TC M2M token - `AUTH0_AUDIENCE`: Auth0 audience, used to get TC M2M token +- `AUTH0_AUDIENCE_FOR_BUS_API`: Auth0 audience, used to get TC M2M token to be used in bus api client - `TOKEN_CACHE_TIME`: Auth0 token cache time, used to get TC M2M token - `AUTH0_CLIENT_ID`: Auth0 client id, used to get TC M2M token - `AUTH0_CLIENT_SECRET`: Auth0 client secret, used to get TC M2M token - `AUTH0_PROXY_SERVER_URL`: Proxy Auth0 URL, used to get TC M2M token + - `DATABASE_URL`: PostgreSQL database url. - `DB_SCHEMA_NAME`: string - PostgreSQL database target schema - `PROJECT_API_URL`: the project service url - `TC_API`: the Topcoder v5 url - `ORG_ID`: the organization id -- `HOST`: the elasticsearch host -- `ES_INDEX_JOB`: the job index -- `ES_INDEX_JOB_CANDIDATE`: the job candidate index -- `ES_INDEX_RESOURCE_BOOKING`: the resource booking index + +- `esConfig.HOST`: the elasticsearch host +- `esConfig.ES_INDEX_JOB`: the job index +- `esConfig.ES_INDEX_JOB_CANDIDATE`: the job candidate index +- `esConfig.ES_INDEX_RESOURCE_BOOKING`: the resource booking index +- `esConfig.AWS_REGION`: The Amazon region to use when using AWS Elasticsearch service +- `esConfig.ELASTICCLOUD.id`: The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this +- `esConfig.ELASTICCLOUD.username`: The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud +- `esConfig.ELASTICCLOUD.password`: The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud + +- `BUSAPI_URL`: Topcoder Bus API URL +- `KAFKA_ERROR_TOPIC`: The error topic at which bus api will publish any errors +- `KAFKA_MESSAGE_ORIGINATOR`: The originator value for the kafka messages + +- `TAAS_JOB_CREATE_TOPIC`: the create job entity Kafka message topic +- `TAAS_JOB_UPDATE_TOPIC`: the update job entity Kafka message topic +- `TAAS_JOB_DELETE_TOPIC`: the delete job entity Kafka message topic +- `TAAS_JOB_CANDIDATE_CREATE_TOPIC`: the create job candidate entity Kafka message topic +- `TAAS_JOB_CANDIDATE_UPDATE_TOPIC`: the update job candidate entity Kafka message topic +- `TAAS_JOB_CANDIDATE_DELETE_TOPIC`: the delete job candidate entity Kafka message topic +- `TAAS_RESOURCE_BOOKING_CREATE_TOPIC`: the create resource booking entity Kafka message topic +- `TAAS_RESOURCE_BOOKING_UPDATE_TOPIC`: the update resource booking entity Kafka message topic +- `TAAS_RESOURCE_BOOKING_DELETE_TOPIC`: the delete resource booking entity Kafka message topic ## PostgreSQL Database Setup - Go to https://www.postgresql.org/ download and install the PostgreSQL. - Modify `DATABASE_URL` under `config/default.js` to meet your environment. -- Run `npm run init-db` to create table +- Run `npm run init-db` to create table(run `npm run init-db force` to force creating table) ## ElasticSearch Setup - Go to https://www.elastic.co/downloads/ download and install the elasticsearch. @@ -52,17 +74,41 @@ The following parameters can be set in config files or in env variables: - Install dependencies `npm install` - Run lint `npm run lint` - Run lint fix `npm run lint:fix` -- Clear and init db `npm run init-db` -- Clear and create es index `npm run delete-index && npm run create-index` +- Clear and init db `npm run init-db force` +- Clear and create es index + + ``` bash + npm run delete-index # run this if you already created index + npm run create-index + ``` + - Start app `npm start` - App is running at `http://localhost:3000` -## Docker Deployment -- Run `docker-compose up` +## Local Deployment with Docker + +Make sure all config values are right, and you can run on local successful, then run below commands + +1. Navigate to the directory `docker` + +2. Rename the file `sample.api.env` to `api.env` + +3. Set the required AUTH0 configurations, PostgreSQL Database url and ElasticSearch host in the file `api.env` + + Note that you can also add other variables to `api.env`, with `=` format per line. + If using AWS ES you should add `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` variables as well. + +4. Once that is done, run the following command + + ```bash + docker-compose up + ``` + +5. When you are running the application for the first time, It will take some time initially to download the image and install the dependencies ## Testing - Run `npm run test` to execute unit tests - Run `npm run cov` to execute unit tests and generate coverage report. ## Verification -Refer to the verification document [Verification.md](Verification.md) \ No newline at end of file +Refer to the verification document [Verification.md](Verification.md) diff --git a/Verification.md b/Verification.md index 4aa42dc0..4eb15eb6 100644 --- a/Verification.md +++ b/Verification.md @@ -1,35 +1,37 @@ # Topcoder Bookings API ## Postman test -- Refer `ReadMe.md` to start the app and postgreSQL database -- Run `npm run init-db` to init db before testing. -- Run `npm run create-index` to create es index before testing +- start PostgreSQL and ElasticSearch +- Refer `README.md#Local Deployment` to start the app - Import Postman collection and environment file in the `docs` folder to Postman and execute the scripts to validate the app from top to bottom. ## Unit test Coverage - 63 passing (43s) +``` bash + 78 passing (137ms) - -File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s ----------------------------|---------|----------|---------|---------|------------------- -All files | 99.49 | 97.62 | 100 | 99.74 | - config | 100 | 100 | 100 | 100 | - default.js | 100 | 100 | 100 | 100 | - test.js | 100 | 100 | 100 | 100 | - src | 90.48 | 50 | 100 | 94.12 | - bootstrap.js | 90.48 | 50 | 100 | 94.12 | 18 - src/common | 100 | 100 | 100 | 100 | - errors.js | 100 | 100 | 100 | 100 | - helper.js | 100 | 100 | 100 | 100 | - src/models | 100 | 92.86 | 100 | 100 | - Job.js | 100 | 100 | 100 | 100 | - JobCandidate.js | 100 | 100 | 100 | 100 | - ResourceBooking.js | 100 | 100 | 100 | 100 | - index.js | 100 | 80 | 100 | 100 | 29 - src/services | 100 | 100 | 100 | 100 | - JobCandidateService.js | 100 | 100 | 100 | 100 | - JobService.js | 100 | 100 | 100 | 100 | - ResourceBookingService.js | 100 | 100 | 100 | 100 | +File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s +----------------------------|---------|----------|---------|---------|------------------- +All files | 98.1 | 91.55 | 100 | 98.28 | + config | 100 | 100 | 100 | 100 | + default.js | 100 | 100 | 100 | 100 | + test.js | 100 | 100 | 100 | 100 | + src | 90.91 | 50 | 100 | 94.44 | + bootstrap.js | 90.91 | 50 | 100 | 94.44 | 18 + src/common | 97.17 | 90.91 | 100 | 97.12 | + errors.js | 100 | 50 | 100 | 100 | 23 + helper.js | 96.88 | 92.86 | 100 | 96.81 | 94,176,284 + src/models | 100 | 92.86 | 100 | 100 | + Job.js | 100 | 100 | 100 | 100 | + JobCandidate.js | 100 | 100 | 100 | 100 | + ResourceBooking.js | 100 | 100 | 100 | 100 | + index.js | 100 | 80 | 100 | 100 | 29 + src/services | 98.58 | 89.25 | 100 | 98.57 | + JobCandidateService.js | 98.77 | 88 | 100 | 98.77 | 37 + JobService.js | 98.21 | 87.18 | 100 | 98.18 | 73,318 + ResourceBookingService.js | 98.86 | 93.1 | 100 | 98.86 | 54 +----------------------------|---------|----------|---------|---------|------------------- +``` diff --git a/app.js b/app.js index 0c70b726..a0a9e0e1 100644 --- a/app.js +++ b/app.js @@ -8,8 +8,8 @@ const config = require('config') const express = require('express') const cors = require('cors') const HttpStatus = require('http-status-codes') -const logger = require('./src/common/logger') const interceptor = require('express-interceptor') +const logger = require('./src/common/logger') // setup express app const app = express() @@ -52,7 +52,7 @@ require('./app-routes')(app) // The error handler // eslint-disable-next-line no-unused-vars app.use((err, req, res, next) => { - logger.logFullError(err, req.signature || `${req.method} ${req.url}`) + logger.logFullError(err, { component: 'app', signature: req.signature || `${req.method}_${req.url}` }) const errorResponse = {} const status = err.isJoi ? HttpStatus.BAD_REQUEST : (err.status || err.httpStatus || HttpStatus.INTERNAL_SERVER_ERROR) @@ -87,7 +87,7 @@ app.use((err, req, res, next) => { }) const server = app.listen(app.get('port'), () => { - logger.info(`Express server listening on port ${app.get('port')}`) + logger.info({ component: 'app', message: `Express server listening on port ${app.get('port')}` }) }) if (process.env.NODE_ENV === 'test') { diff --git a/build.sh b/build.sh old mode 100755 new mode 100644 diff --git a/config/default.js b/config/default.js index be8346ef..333d5f55 100644 --- a/config/default.js +++ b/config/default.js @@ -8,6 +8,7 @@ module.exports = { VALID_ISSUERS: process.env.VALID_ISSUERS || '["https://api.topcoder-dev.com", "https://api.topcoder.com", "https://topcoder-dev.auth0.com/", "https://auth.topcoder-dev.com/"]', AUTH0_URL: process.env.AUTH0_URL, AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE, + AUTH0_AUDIENCE_FOR_BUS_API: process.env.AUTH0_AUDIENCE_FOR_BUS_API, TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME, AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID, AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET, @@ -22,8 +23,33 @@ module.exports = { esConfig: { HOST: process.env.ES_HOST || 'http://localhost:9200', + + ELASTICCLOUD: { + id: process.env.ELASTICCLOUD_ID, + username: process.env.ELASTICCLOUD_USERNAME, + password: process.env.ELASTICCLOUD_PASSWORD + }, + + AWS_REGION: process.env.AWS_REGION || 'us-east-1', // AWS Region to be used if we use AWS ES + ES_INDEX_JOB: process.env.ES_INDEX_JOB || 'job', ES_INDEX_JOB_CANDIDATE: process.env.ES_INDEX_JOB_CANDIDATE || 'job_candidate', ES_INDEX_RESOURCE_BOOKING: process.env.ES_INDEX_RESOURCE_BOOKING || 'resource_booking' - } + }, + + BUSAPI_URL: process.env.BUSAPI_URL || 'https://api.topcoder-dev.com/v5', + KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || 'common.error.reporting', + KAFKA_MESSAGE_ORIGINATOR: process.env.KAFKA_MESSAGE_ORIGINATOR || 'taas-api', + // topics for job service + TAAS_JOB_CREATE_TOPIC: process.env.TAAS_JOB_CREATE_TOPIC || 'taas.job.create', + TAAS_JOB_UPDATE_TOPIC: process.env.TAAS_JOB_UPDATE_TOPIC || 'taas.job.update', + TAAS_JOB_DELETE_TOPIC: process.env.TAAS_JOB_DELETE_TOPIC || 'taas.job.delete', + // topics for jobcandidate service + TAAS_JOB_CANDIDATE_CREATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_CREATE_TOPIC || 'taas.jobcandidate.create', + TAAS_JOB_CANDIDATE_UPDATE_TOPIC: process.env.TAAS_JOB_CANDIDATE_UPDATE_TOPIC || 'taas.jobcandidate.update', + TAAS_JOB_CANDIDATE_DELETE_TOPIC: process.env.TAAS_JOB_CANDIDATE_DELETE_TOPIC || 'taas.jobcandidate.delete', + // topics for job service + TAAS_RESOURCE_BOOKING_CREATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_CREATE_TOPIC || 'taas.resourcebooking.create', + TAAS_RESOURCE_BOOKING_UPDATE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC || 'taas.resourcebooking.update', + TAAS_RESOURCE_BOOKING_DELETE_TOPIC: process.env.TAAS_RESOURCE_BOOKING_DELETE_TOPIC || 'taas.resourcebooking.delete' } diff --git a/config/test.js b/config/test.js index a818158a..baa8e7f8 100644 --- a/config/test.js +++ b/config/test.js @@ -1,3 +1,8 @@ module.exports = { - LOG_LEVEL: process.env.LOG_LEVEL || 'info' + LOG_LEVEL: process.env.LOG_LEVEL || 'info', + AUTH0_URL: 'http://example.com', + AUTH0_AUDIENCE: 'http://example.com', + AUTH0_AUDIENCE_FOR_BUS_API: 'http://example.com', + AUTH0_CLIENT_ID: 'fake_id', + AUTH0_CLIENT_SECRET: 'fake_secret' } diff --git a/docker/Dockerfile b/docker/Dockerfile index a0c7e4e8..ff4253fb 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -14,4 +14,4 @@ RUN npm install COPY . . # Run the app -CMD [ "node", "app.js" ] \ No newline at end of file +CMD [ "npm", "start" ] diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 00000000..55b6a95a --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,11 @@ +version: '3' +services: + taas_api: + image: taas_api:latest + build: + context: ../ + dockerfile: docker/Dockerfile + env_file: + - api.env + ports: + - "3000:3000" diff --git a/docker/sample.api.env b/docker/sample.api.env new file mode 100644 index 00000000..f7ed6a2d --- /dev/null +++ b/docker/sample.api.env @@ -0,0 +1,9 @@ +DATABASE_URL= +ES_HOST= + +AUTH0_URL= +AUTH0_AUDIENCE= +AUTH0_AUDIENCE_FOR_BUS_API= +TOKEN_CACHE_TIME=500000 +AUTH0_CLIENT_ID= +AUTH0_CLIENT_SECRET= diff --git a/docs/Topcoder-bookings-api.postman_collection.json b/docs/Topcoder-bookings-api.postman_collection.json index c95d79d2..338ebdc3 100644 --- a/docs/Topcoder-bookings-api.postman_collection.json +++ b/docs/Topcoder-bookings-api.postman_collection.json @@ -1,6 +1,6 @@ { "info": { - "_postman_id": "4eab05e9-0474-4c51-8129-fff46034fae7", + "_postman_id": "48d00a68-e5e9-44b4-bc6e-357a3e95d1d6", "name": "Topcoder-bookings-api", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" }, @@ -14,7 +14,7 @@ { "listen": "test", "script": { - "id": "08a44bb2-3d15-4f33-be09-9b8baefbe967", + "id": "2e346a9f-de3c-485d-8681-59271541dd78", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"jobId\",data.id);" @@ -59,7 +59,7 @@ { "listen": "test", "script": { - "id": "067f1c4a-f69a-4ad5-9a6f-731aee2c8540", + "id": "6420b802-df77-4836-9458-1c1d02c4bb51", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"jobId\",data.id);" @@ -104,7 +104,7 @@ { "listen": "test", "script": { - "id": "67f54beb-d9bd-437f-a6a0-8087526a36af", + "id": "3ccfd704-685e-48e4-91e8-b9a475e0ce76", "exec": [ "" ], @@ -148,7 +148,7 @@ { "listen": "test", "script": { - "id": "69e81fc3-2660-4780-983c-54dcb8ea2f70", + "id": "16f1cca4-3de4-47ec-9769-e80e2287f6c5", "exec": [ "" ], @@ -192,7 +192,7 @@ { "listen": "test", "script": { - "id": "754b1988-7cb2-4841-87bb-f349d2bb6ead", + "id": "b2f757a8-8355-409a-850d-cf9c2570b5be", "exec": [ "" ], @@ -254,6 +254,36 @@ }, "response": [] }, + { + "name": "get job with booking manager from db", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "type": "text", + "value": "Bearer {{token_bookingManager}}" + } + ], + "url": { + "raw": "{{URL}}/jobs/{{jobId}}?fromDb=true", + "host": [ + "{{URL}}" + ], + "path": [ + "jobs", + "{{jobId}}" + ], + "query": [ + { + "key": "fromDb", + "value": "true" + } + ] + } + }, + "response": [] + }, { "name": "get job with connect user", "request": { @@ -1156,7 +1186,7 @@ { "listen": "test", "script": { - "id": "6123ade2-c217-4cc1-b15a-287b6fb5830e", + "id": "5c05699c-7312-4f41-beb5-883401ea2fe0", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"jobCandidateId\",data.id);" @@ -1201,7 +1231,7 @@ { "listen": "test", "script": { - "id": "9b969b29-a02a-4020-a15d-4e3e2b7438d3", + "id": "401b5076-4055-4bc4-b63c-4366cffe4806", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"jobCandidateId\",data.id);" @@ -1246,7 +1276,7 @@ { "listen": "test", "script": { - "id": "692f7d69-3e02-4d8b-8e6d-d25d832b798e", + "id": "96fd60e4-fefd-4a0c-bcff-afb0aed66756", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"jobCandidateId\",data.id);" @@ -1291,7 +1321,7 @@ { "listen": "test", "script": { - "id": "dd78126f-682a-4b26-ad83-1abfc442f76a", + "id": "3c191203-4715-4bc5-8f30-b4204e58ea46", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"jobCandidateId\",data.id);" @@ -1336,7 +1366,7 @@ { "listen": "test", "script": { - "id": "d5b63f44-3ffc-417a-8e17-6e2ab19ec043", + "id": "f1257f98-a3ec-49d4-8e8a-613e0c5c2273", "exec": [ "" ], @@ -1398,6 +1428,36 @@ }, "response": [] }, + { + "name": "get job candidate with booking manager from db", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "type": "text", + "value": "Bearer {{token_bookingManager}}" + } + ], + "url": { + "raw": "{{URL}}/jobCandidates/{{jobCandidateId}}?fromDb=true", + "host": [ + "{{URL}}" + ], + "path": [ + "jobCandidates", + "{{jobCandidateId}}" + ], + "query": [ + { + "key": "fromDb", + "value": "true" + } + ] + } + }, + "response": [] + }, { "name": "get job candidate with connect user", "request": { @@ -2123,7 +2183,7 @@ { "listen": "test", "script": { - "id": "71bb710a-9c6a-4192-be70-6a3801ce6514", + "id": "29ff4137-5b1f-4be4-bf5d-b0a1aab70eb5", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"resourceBookingId\",data.id);" @@ -2168,7 +2228,7 @@ { "listen": "test", "script": { - "id": "a6a7f344-824d-4947-9fca-1b10545900e1", + "id": "1693d79f-cbea-4b20-b04f-c7d0278116b2", "exec": [ "var data = JSON.parse(responseBody);\r", "postman.setEnvironmentVariable(\"resourceBookingId\",data.id);" @@ -2213,7 +2273,7 @@ { "listen": "test", "script": { - "id": "65a44572-451b-41d0-8946-dd14bac1b731", + "id": "9895bedf-46f2-43bb-904b-f7c90e078cc4", "exec": [ "" ], @@ -2257,7 +2317,7 @@ { "listen": "test", "script": { - "id": "4994a8e1-eac7-4ee1-8fcc-6b53bbffe3f3", + "id": "1e0fdfea-6ef0-4a5f-abcd-a60b47ed3eb7", "exec": [ "" ], @@ -2301,7 +2361,7 @@ { "listen": "test", "script": { - "id": "e9acb6ed-1931-493d-abd6-6bee2ab560fa", + "id": "414d9fae-4cc1-442e-b8a6-4827fd44e33a", "exec": [ "" ], @@ -2363,6 +2423,36 @@ }, "response": [] }, + { + "name": "get resource booking with booking manager from db", + "request": { + "method": "GET", + "header": [ + { + "key": "Authorization", + "type": "text", + "value": "Bearer {{token_bookingManager}}" + } + ], + "url": { + "raw": "{{URL}}/resourceBookings/{{resourceBookingId}}?fromDb=true", + "host": [ + "{{URL}}" + ], + "path": [ + "resourceBookings", + "{{resourceBookingId}}" + ], + "query": [ + { + "key": "fromDb", + "value": "true" + } + ] + } + }, + "response": [] + }, { "name": "get resource booking with connect user", "request": { @@ -3117,6 +3207,29 @@ } ], "protocolProfileBehavior": {} + }, + { + "name": "health check", + "item": [ + { + "name": "health check", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "{{URL}}/health", + "host": [ + "{{URL}}" + ], + "path": [ + "health" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} } ], "protocolProfileBehavior": {} diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 6eacb75a..7f59f497 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -240,6 +240,12 @@ paths: required: true schema: type: string + - in: query + name: fromDb + description: get data from db or not. + required: false + schema: + type: boolean responses: '200': description: OK @@ -636,6 +642,12 @@ paths: schema: type: string format: uuid + - in: query + name: fromDb + description: get data from db or not. + required: false + schema: + type: boolean responses: '200': description: OK @@ -1039,6 +1051,12 @@ paths: schema: type: string format: uuid + - in: query + name: fromDb + description: get data from db or not. + required: false + schema: + type: boolean responses: '200': description: OK @@ -1246,6 +1264,25 @@ paths: application/json: schema: $ref: '#/components/schemas/Error' + /health: + get: + tags: + - Health + description: | + Get health status of the app. + responses: + '200': + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/CheckRun' + '503': + description: Service unavailable + content: + application/json: + schema: + $ref: '#/components/schemas/CheckRun' components: securitySchemes: bearerAuth: @@ -1641,6 +1678,13 @@ components: type: string enum: ['hourly', 'daily', 'weekly', 'monthly'] description: "The rate type of the job." + CheckRun: + type: object + properties: + checksRun: + type: integer + required: + - checksRun Error: required: - message diff --git a/package-lock.json b/package-lock.json index 82571617..6f940aa0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -479,6 +479,104 @@ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==" }, + "@topcoder-platform/topcoder-bus-api-wrapper": { + "version": "github:topcoder-platform/tc-bus-api-wrapper#f8cbd335a0e0b4d6edd7cae859473593271fd97f", + "from": "github:topcoder-platform/tc-bus-api-wrapper", + "requires": { + "joi": "^13.4.0", + "lodash": "^4.17.15", + "superagent": "^3.8.3", + "tc-core-library-js": "github:appirio-tech/tc-core-library-js#v2.6.4" + }, + "dependencies": { + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "joi": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/joi/-/joi-13.7.0.tgz", + "integrity": "sha512-xuY5VkHfeOYK3Hdi91ulocfuFopwgbSORmIwzcwHKESQhC7w1kD5jaVSPnqDxS2I8t3RZ9omCKAxNwXN5zG1/Q==", + "requires": { + "hoek": "5.x.x", + "isemail": "3.x.x", + "topo": "3.x.x" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "superagent": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", + "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", + "requires": { + "component-emitter": "^1.2.0", + "cookiejar": "^2.1.0", + "debug": "^3.1.0", + "extend": "^3.0.0", + "form-data": "^2.3.1", + "formidable": "^1.2.0", + "methods": "^1.1.1", + "mime": "^1.4.1", + "qs": "^6.5.1", + "readable-stream": "^2.3.5" + } + }, + "tc-core-library-js": { + "version": "github:appirio-tech/tc-core-library-js#df0b36c51cf80918194cbff777214b3c0cf5a151", + "from": "github:appirio-tech/tc-core-library-js#v2.6.4", + "requires": { + "axios": "^0.19.0", + "bunyan": "^1.8.12", + "jsonwebtoken": "^8.5.1", + "jwks-rsa": "^1.6.0", + "lodash": "^4.17.15", + "millisecond": "^0.1.2", + "r7insight_node": "^1.8.4", + "request": "^2.88.0" + } + } + } + }, "@types/body-parser": { "version": "1.19.0", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.0.tgz", @@ -818,6 +916,29 @@ "array-filter": "^1.0.0" } }, + "aws-sdk": { + "version": "2.787.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.787.0.tgz", + "integrity": "sha512-3WlUdWqUB8Vhdvj/7TENr/7SEmQzxmnHxOJ8l2WjZbcMRSuI0/9Ym4p1TC3hf21VDVDhkdGlw60QqpZQ1qb+Mg==", + "requires": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" + }, + "dependencies": { + "uuid": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + } + } + }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -857,6 +978,11 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, + "base64-js": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", + "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" + }, "bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -980,6 +1106,16 @@ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, + "buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, "buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", @@ -1987,6 +2123,11 @@ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" }, + "events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" + }, "express": { "version": "4.17.1", "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", @@ -2540,6 +2681,11 @@ "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "dev": true }, + "hoek": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-5.0.4.tgz", + "integrity": "sha512-Alr4ZQgoMlnere5FZJsIyfIjORBqZll5POhDsF4q64dPuJR6rNxXdDxtHSQq8OXRurhmx+PWYEE8bXRROY8h0w==" + }, "hosted-git-info": { "version": "2.8.8", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", @@ -2647,6 +2793,11 @@ "safer-buffer": ">= 2.1.2 < 3" } }, + "ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" + }, "ignore": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", @@ -2973,6 +3124,14 @@ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, + "isemail": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", + "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", + "requires": { + "punycode": "2.x.x" + } + }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -3179,6 +3338,11 @@ "iterate-iterator": "^1.0.1" } }, + "jmespath": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" + }, "joi": { "version": "17.2.1", "resolved": "https://registry.npmjs.org/joi/-/joi-17.2.1.tgz", @@ -4824,6 +4988,11 @@ "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" }, + "querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + }, "r7insight_node": { "version": "1.8.4", "resolved": "https://registry.npmjs.org/r7insight_node/-/r7insight_node-1.8.4.tgz", @@ -5132,6 +5301,11 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, + "sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + }, "secure-json-parse": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.1.0.tgz", @@ -5722,6 +5896,21 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, + "topo": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/topo/-/topo-3.0.3.tgz", + "integrity": "sha512-IgpPtvD4kjrJ7CRA3ov2FhWQADwv+Tdqbsf1ZnPUSAtCJ9e1Z44MmoSGDXGk4IppoZA7jd/QRkNddlLJWlUZsQ==", + "requires": { + "hoek": "6.x.x" + }, + "dependencies": { + "hoek": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-6.1.3.tgz", + "integrity": "sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==" + } + } + }, "toposort-class": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz", @@ -5920,6 +6109,22 @@ "punycode": "^2.1.0" } }, + "url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + }, + "dependencies": { + "punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + } + } + }, "url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", @@ -6205,6 +6410,20 @@ "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", "dev": true }, + "xml2js": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", + "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==", + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~9.0.1" + } + }, + "xmlbuilder": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", + "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" + }, "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", diff --git a/package.json b/package.json index a82b5c59..374e7731 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,8 @@ "license": "ISC", "dependencies": { "@elastic/elasticsearch": "^7.9.1", + "@topcoder-platform/topcoder-bus-api-wrapper": "github:topcoder-platform/tc-bus-api-wrapper", + "aws-sdk": "^2.787.0", "config": "^3.3.2", "cors": "^2.8.5", "dotenv": "^8.2.0", diff --git a/scripts/createIndex.js b/scripts/createIndex.js index 853aec42..ad5fa2c0 100644 --- a/scripts/createIndex.js +++ b/scripts/createIndex.js @@ -7,7 +7,6 @@ const logger = require('../src/common/logger') const helper = require('../src/common/helper') async function createIndex () { - logger.info('ES Index creation started!') const esClient = helper.getESClient() const indices = [ @@ -75,12 +74,12 @@ async function createIndex () { for (const index of indices) { await esClient.indices.create(index) - logger.info(`ES Index ${index.index} creation succeeded!`) + logger.info({ component: 'createIndex', message: `ES Index ${index.index} creation succeeded!` }) } process.exit(0) } createIndex().catch((err) => { - logger.logFullError(err) + logger.logFullError(err, { component: 'createIndex' }) process.exit(1) }) diff --git a/scripts/deleteIndex.js b/scripts/deleteIndex.js index 8ae7916b..dd7ca7e6 100644 --- a/scripts/deleteIndex.js +++ b/scripts/deleteIndex.js @@ -7,7 +7,7 @@ const logger = require('../src/common/logger') const helper = require('../src/common/helper') async function deleteIndex () { - logger.info('ES Index deletion started!') + logger.info({ component: 'deleteIndex', message: 'ES Index deletion started!' }) const esClient = helper.getESClient() const indices = [config.get('esConfig.ES_INDEX_JOB'), config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), @@ -16,11 +16,11 @@ async function deleteIndex () { await esClient.indices.delete({ index }) - logger.info(`ES Index ${index} deletion succeeded!`) + logger.info({ component: 'deleteIndex', message: `ES Index ${index} deletion succeeded!` }) } process.exit(0) } deleteIndex().catch((err) => { - logger.logFullError(err) + logger.logFullError(err, { component: 'deleteIndex' }) process.exit(1) }) diff --git a/src/bootstrap.js b/src/bootstrap.js index 56aa515f..4e6d87f7 100644 --- a/src/bootstrap.js +++ b/src/bootstrap.js @@ -17,7 +17,8 @@ function buildServices (dir) { if (stats.isDirectory()) { buildServices(curPath) } else if (path.extname(file) === '.js') { - logger.buildService(require(curPath)); // eslint-disable-line + const serviceName = path.basename(file, '.js') + logger.buildService(require(curPath), serviceName) } }) }) diff --git a/src/common/helper.js b/src/common/helper.js index a59cd11e..9432cedc 100644 --- a/src/common/helper.js +++ b/src/common/helper.js @@ -3,17 +3,39 @@ */ const querystring = require('querystring') +const AWS = require('aws-sdk') const config = require('config') const _ = require('lodash') const request = require('superagent') const elasticsearch = require('@elastic/elasticsearch') const errors = require('../common/errors') +const logger = require('./logger') +const busApi = require('@topcoder-platform/topcoder-bus-api-wrapper') + +AWS.config.region = config.esConfig.AWS_REGION const m2mAuth = require('tc-core-library-js').auth.m2m -//const m2m = m2mAuth(_.pick(config, ['AUTH0_URL', 'AUTH0_AUDIENCE', 'TOKEN_CACHE_TIME', 'AUTH0_PROXY_SERVER_URL'])) -const m2m = m2mAuth(_.pick(config, ['AUTH0_URL', 'AUTH0_AUDIENCE', 'AUTH0_CLIENT_ID','AUTH0_CLIENT_SECRET', 'AUTH0_PROXY_SERVER_URL'])) +// const m2m = m2mAuth(_.pick(config, ['AUTH0_URL', 'AUTH0_AUDIENCE', 'TOKEN_CACHE_TIME', 'AUTH0_PROXY_SERVER_URL'])) +const m2m = m2mAuth(_.pick(config, ['AUTH0_URL', 'AUTH0_AUDIENCE', 'AUTH0_CLIENT_ID', 'AUTH0_CLIENT_SECRET', 'AUTH0_PROXY_SERVER_URL'])) + +let busApiClient +/** + * Get bus api client. + * + * @returns {Object} the bus api client + */ +function getBusApiClient () { + if (busApiClient) { + return busApiClient + } + busApiClient = busApi({ + AUTH0_AUDIENCE: config.AUTH0_AUDIENCE_FOR_BUS_API, + ..._.pick(config, ['AUTH0_URL', 'TOKEN_CACHE_TIME', 'AUTH0_CLIENT_ID', 'AUTH0_CLIENT_SECRET', 'BUSAPI_URL', 'KAFKA_ERROR_TOPIC', 'AUTH0_PROXY_SERVER_URL']) + }) + return busApiClient +} // ES Client mapping const esClients = {} @@ -68,6 +90,9 @@ function getPageLink (req, page) { * @param {Object} result the operation result */ function setResHeaders (req, res, result) { + if (result.fromDb) { + return + } const totalPages = Math.ceil(result.total / result.perPage) if (result.page > 1) { res.set('X-Prev-Page', result.page - 1) @@ -140,10 +165,26 @@ async function isConnectMember (projectId, jwtToken) { * @return {Object} Elastic Host Client Instance */ function getESClient () { - const esHost = config.get('esConfig.HOST') - if (!esClients.client) { + if (esClients.client) { + return esClients.client + } + + const host = config.esConfig.HOST + const cloudId = config.esConfig.ELASTICCLOUD.id + if (cloudId) { + // Elastic Cloud configuration esClients.client = new elasticsearch.Client({ - node: esHost + cloud: { + id: cloudId + }, + auth: { + username: config.esConfig.ELASTICCLOUD.username, + password: config.esConfig.ELASTICCLOUD.password + } + }) + } else { + esClients.client = new elasticsearch.Client({ + node: host }) } return esClients.client @@ -154,7 +195,7 @@ function getESClient () { * @returns {Promise} */ const getM2Mtoken = async () => { - return m2m.getMachineToken(config.AUTH0_CLIENT_ID, config.AUTH0_CLIENT_SECRET) + return await m2m.getMachineToken(config.AUTH0_CLIENT_ID, config.AUTH0_CLIENT_SECRET) } /** @@ -207,16 +248,50 @@ async function getUserIds (userId) { async function getUserId (userId) { const ids = await getUserIds(userId) if (_.isEmpty(ids)) { - throw new errors.NotFoundError('user id not found') + throw new errors.NotFoundError(`userId: ${userId} "user" not found`) } return ids[0].id } +/** + * Send Kafka event message + * @params {String} topic the topic name + * @params {Object} payload the payload + */ +async function postEvent (topic, payload) { + logger.debug({ component: 'helper', context: 'postEvent', message: `Posting event to Kafka topic ${topic}, ${JSON.stringify(payload)}` }) + const client = getBusApiClient() + const message = { + topic, + originator: config.KAFKA_MESSAGE_ORIGINATOR, + timestamp: new Date().toISOString(), + 'mime-type': 'application/json', + payload + } + await client.postEvent(message) +} + +/** + * Test if an error is document missing exception + * + * @param {Object} err the err + * @returns {Boolean} the result + */ +function isDocumentMissingException (err) { + if (err.statusCode === 404) { + return true + } + return false +} + module.exports = { autoWrapExpress, setResHeaders, clearObject, isConnectMember, getESClient, - getUserId + getUserId, + postEvent, + getBusApiClient, + isDocumentMissingException } diff --git a/src/common/logger.js b/src/common/logger.js index cd2f2d01..c0965d1a 100644 --- a/src/common/logger.js +++ b/src/common/logger.js @@ -10,10 +10,13 @@ const getParams = require('get-parameter-names') const winston = require('winston') const { - combine, timestamp, colorize, align, printf + combine, timestamp, colorize, printf } = winston.format -const basicFormat = printf(info => `${info.timestamp} ${info.level}: ${info.message}`) +const basicFormat = printf(info => { + const location = `${info.component}${info.context ? ` ${info.context}` : ''}` + return `[${info.timestamp}] ${location} ${info.level} : ${info.message}` +}) const transports = [] if (!config.DISABLE_LOGGING) { @@ -23,8 +26,11 @@ if (!config.DISABLE_LOGGING) { const logger = winston.createLogger({ transports, format: combine( + winston.format(info => { + info.level = info.level.toUpperCase() + return info + })(), colorize(), - align(), timestamp(), basicFormat ) @@ -33,15 +39,20 @@ const logger = winston.createLogger({ logger.config = config /** - * Log error details with signature - * @param err the error - * @param signature the signature + * Log error details + * @param {Object} err the error + * @param {Object} context contains extra info about errors */ -logger.logFullError = (err, signature) => { +logger.logFullError = (err, context = {}) => { if (!err) { return } - logger.error((signature ? (`${signature} : `) : '') + util.inspect(err)) + if (err.logged) { + return + } + const signature = context.signature ? `${context.signature} : ` : '' + const errMessage = err.message || util.inspect(err).split('\n')[0] + logger.error({ ..._.pick(context, ['component', 'context']), message: `${signature}${errMessage}` }) err.logged = true } @@ -79,29 +90,37 @@ const _combineObject = (params, arr) => { /** * Decorate all functions of a service and log debug information if DEBUG is enabled * @param {Object} service the service + * @param {String} serviceName the service name */ -logger.decorateWithLogging = (service) => { +logger.decorateWithLogging = (service, serviceName) => { if (logger.config.LOG_LEVEL !== 'debug') { return } _.each(service, (method, name) => { const params = method.params || getParams(method) service[name] = async function () { - logger.debug(`ENTER ${name}`) - logger.debug('input arguments') const args = Array.prototype.slice.call(arguments) - logger.debug(util.inspect(_sanitizeObject(_combineObject(params, args)))) + logger.debug({ + component: serviceName, + context: name, + message: `input arguments: ${util.inspect(_sanitizeObject(_combineObject(params, args)), { compact: true, breakLength: Infinity })}` + }) try { const result = await method.apply(this, arguments) - logger.debug(`EXIT ${name}`) - logger.debug('output arguments') - if (result !== null && result !== undefined) { - logger.debug(util.inspect(_sanitizeObject(result))) - } + logger.debug({ + component: serviceName, + context: name, + message: `output arguments: ${result !== null && result !== undefined + ? util.inspect(_sanitizeObject(result), { compact: true, breakLength: Infinity }) + : undefined}` + }) return result - } catch (e) { - logger.logFullError(e, name) - throw e + } catch (err) { + logger.logFullError(err, { + component: serviceName, + context: name + }) + throw err } } }) @@ -140,10 +159,11 @@ logger.decorateWithValidators = function (service) { /** * Apply logger and validation decorators * @param {Object} service the service to wrap + * @param {String} serviceName the service name */ -logger.buildService = (service) => { +logger.buildService = (service, serviceName) => { logger.decorateWithValidators(service) - logger.decorateWithLogging(service) + logger.decorateWithLogging(service, serviceName) } module.exports = logger diff --git a/src/controllers/HealthCheckController.js b/src/controllers/HealthCheckController.js new file mode 100644 index 00000000..9579520c --- /dev/null +++ b/src/controllers/HealthCheckController.js @@ -0,0 +1,37 @@ +/** + * Controller for health check endpoint + */ +const models = require('../models') +const config = require('config') +const logger = require('../common/logger') + +// the topcoder-healthcheck-dropin library returns checksRun count, +// here it follows that to return such count +let checksRun = 0 + +/** + * Check health of the DB + * @param {Object} req the request + * @param {Object} res the response + */ +async function checkHealth (req, res) { + checksRun += 1 + const conn = new models.Sequelize(config.get('DATABASE_URL'), { + logging: false + }) + await conn + .authenticate() + .then(() => { + logger.info({ component: 'src/controllers/HealthCheckController.js', context: 'checkHealth', message: 'Connection has been established successfully.' }) + }) + .catch(err => { + logger.logFullError(err, { component: 'HealthCheckController', context: 'checkHealth' }) + res.status(503) + }) + await conn.close() + res.send({ checksRun }) +} + +module.exports = { + checkHealth +} diff --git a/src/controllers/JobCandidateController.js b/src/controllers/JobCandidateController.js index 86b38be6..45810f39 100644 --- a/src/controllers/JobCandidateController.js +++ b/src/controllers/JobCandidateController.js @@ -11,7 +11,7 @@ const helper = require('../common/helper') * @param res the response */ async function getJobCandidate (req, res) { - res.send(await service.getJobCandidate(req.params.id)) + res.send(await service.getJobCandidate(req.params.id, req.query.fromDb)) } /** diff --git a/src/controllers/JobController.js b/src/controllers/JobController.js index dfb319ed..6e71b108 100644 --- a/src/controllers/JobController.js +++ b/src/controllers/JobController.js @@ -11,7 +11,7 @@ const helper = require('../common/helper') * @param res the response */ async function getJob (req, res) { - res.send(await service.getJob(req.params.id)) + res.send(await service.getJob(req.params.id, req.query.fromDb)) } /** diff --git a/src/controllers/ResourceBookingController.js b/src/controllers/ResourceBookingController.js index 06c57ae3..ed846a91 100644 --- a/src/controllers/ResourceBookingController.js +++ b/src/controllers/ResourceBookingController.js @@ -11,7 +11,7 @@ const helper = require('../common/helper') * @param res the response */ async function getResourceBooking (req, res) { - res.send(await service.getResourceBooking(req.authUser, req.params.id)) + res.send(await service.getResourceBooking(req.authUser, req.params.id, req.query.fromDb)) } /** diff --git a/src/init-db.js b/src/init-db.js index 3b838bda..95b608a8 100644 --- a/src/init-db.js +++ b/src/init-db.js @@ -6,17 +6,19 @@ const models = require('./models') const logger = require('./common/logger') const initDB = async () => { - // await models.sequelize.dropSchema(config.DB_SCHEMA_NAME) + if (process.argv[2] === 'force') { + await models.sequelize.dropSchema(config.DB_SCHEMA_NAME) + } await models.sequelize.createSchema(config.DB_SCHEMA_NAME) await models.sequelize.sync({ force: true }) } if (!module.parent) { initDB().then(() => { - logger.info('Database synced successfully') + logger.info({ component: 'init-db', message: 'Database synced successfully' }) process.exit() }).catch((e) => { - logger.logFullError(e) + logger.logFullError(e, { component: 'init-db' }) process.exit(1) }) } diff --git a/src/models/Job.js b/src/models/Job.js index dc012350..dcb11a3d 100644 --- a/src/models/Job.js +++ b/src/models/Job.js @@ -46,7 +46,7 @@ module.exports = (sequelize) => { } const job = await Job.findOne(criteria) if (!job) { - throw new errors.NotFoundError(`Job with id: ${id} doesn't exists.`) + throw new errors.NotFoundError(`id: ${id} "Job" doesn't exists.`) } return job } diff --git a/src/models/JobCandidate.js b/src/models/JobCandidate.js index 74a7cdbd..e6761eaa 100644 --- a/src/models/JobCandidate.js +++ b/src/models/JobCandidate.js @@ -29,7 +29,7 @@ module.exports = (sequelize) => { } }) if (!jobCandidate) { - throw new errors.NotFoundError(`JobCandidate with id: ${id} doesn't exists.`) + throw new errors.NotFoundError(`id: ${id} "JobCandidate" doesn't exists.`) } return jobCandidate } diff --git a/src/models/ResourceBooking.js b/src/models/ResourceBooking.js index e0a0b0c8..37ff8b6c 100644 --- a/src/models/ResourceBooking.js +++ b/src/models/ResourceBooking.js @@ -28,7 +28,7 @@ module.exports = (sequelize) => { } }) if (!resourceBooking) { - throw new errors.NotFoundError(`ResourceBooking with id: ${id} doesn't exists.`) + throw new errors.NotFoundError(`id: ${id} "ResourceBooking" doesn't exists.`) } return resourceBooking } diff --git a/src/routes/HealthCheckRoutes.js b/src/routes/HealthCheckRoutes.js new file mode 100644 index 00000000..9e0f4f55 --- /dev/null +++ b/src/routes/HealthCheckRoutes.js @@ -0,0 +1,12 @@ +/** + * Contains healthcheck routes + */ + +module.exports = { + '/health': { + get: { + controller: 'HealthCheckController', + method: 'checkHealth' + } + } +} diff --git a/src/routes/index.js b/src/routes/index.js index dccb8b74..08e94809 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -6,9 +6,11 @@ const _ = require('lodash') const JobRoutes = require('./JobRoutes') const JobCandidateRoutes = require('./JobCandidateRoutes') const ResourceBookingRoutes = require('./ResourceBookingRoutes') +const HealthCheckRoutes = require('./HealthCheckRoutes') module.exports = _.extend({}, JobRoutes, JobCandidateRoutes, - ResourceBookingRoutes + ResourceBookingRoutes, + HealthCheckRoutes ) diff --git a/src/services/JobCandidateService.js b/src/services/JobCandidateService.js index 1dabf5af..d768cf84 100644 --- a/src/services/JobCandidateService.js +++ b/src/services/JobCandidateService.js @@ -5,6 +5,7 @@ const _ = require('lodash') const Joi = require('joi') const config = require('config') +const { Op } = require('sequelize') const { v4: uuid } = require('uuid') const helper = require('../common/helper') const logger = require('../common/logger') @@ -12,19 +13,38 @@ const errors = require('../common/errors') const models = require('../models') const JobCandidate = models.JobCandidate +const esClient = helper.getESClient() /** * Get jobCandidate by id * @param {String} id the jobCandidate id + * @param {Boolean} fromDb flag if query db for data or not * @returns {Object} the jobCandidate */ -async function getJobCandidate (id) { +async function getJobCandidate (id, fromDb = false) { + if (!fromDb) { + try { + const jobCandidate = await esClient.get({ + index: config.esConfig.ES_INDEX_JOB_CANDIDATE, + id + }) + const jobCandidateRecord = { id: jobCandidate.body._id, ...jobCandidate.body._source } + return jobCandidateRecord + } catch (err) { + if (helper.isDocumentMissingException(err)) { + throw new errors.NotFoundError(`id: ${id} "JobCandidate" not found`) + } + logger.logFullError(err, { component: 'JobCandidateService', context: 'getJobCandidate' }) + } + } + logger.info({ component: 'JobCandidateService', context: 'getJobCandidate', message: 'try to query db for data' }) const jobCandidate = await JobCandidate.findById(id) return helper.clearObject(jobCandidate.dataValues) } getJobCandidate.schema = Joi.object().keys({ - id: Joi.string().guid().required() + id: Joi.string().guid().required(), + fromDb: Joi.boolean() }).required() /** @@ -39,15 +59,8 @@ async function createJobCandidate (currentUser, jobCandidate) { jobCandidate.createdBy = await helper.getUserId(currentUser.userId) jobCandidate.status = 'open' - const esClient = helper.getESClient() - await esClient.create({ - index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), - id: jobCandidate.id, - body: _.omit(jobCandidate, 'id'), - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }) - const created = await JobCandidate.create(jobCandidate) + await helper.postEvent(config.TAAS_JOB_CANDIDATE_CREATE_TOPIC, jobCandidate) return helper.clearObject(created.dataValues) } @@ -80,17 +93,8 @@ async function updateJobCandidate (currentUser, id, data) { data.updatedAt = new Date() data.updatedBy = await helper.getUserId(currentUser.userId) - const esClient = helper.getESClient() - await esClient.update({ - index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), - id, - body: { - doc: data - }, - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }) - await jobCandidate.update(data) + await helper.postEvent(config.TAAS_JOB_CANDIDATE_UPDATE_TOPIC, { id, ...data }) const result = helper.clearObject(_.assign(jobCandidate.dataValues, data)) return result } @@ -145,17 +149,9 @@ async function deleteJobCandidate (currentUser, id) { throw new errors.ForbiddenError('You are not allowed to perform this action!') } - const esClient = helper.getESClient() - await esClient.delete({ - index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), - id, - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }, { - ignore: [404] - }) - const jobCandidate = await JobCandidate.findById(id) await jobCandidate.update({ deletedAt: new Date() }) + await helper.postEvent(config.TAAS_JOB_CANDIDATE_DELETE_TOPIC, { id }) } deleteJobCandidate.schema = Joi.object().keys({ @@ -172,53 +168,76 @@ async function searchJobCandidates (criteria) { const page = criteria.page > 0 ? criteria.page : 1 const perPage = criteria.perPage > 0 ? criteria.perPage : 20 if (!criteria.sortBy) { - criteria.sortBy = '_id' - } - if (criteria.sortBy === 'id') { - criteria.sortBy = '_id' + criteria.sortBy = 'id' } if (!criteria.sortOrder) { criteria.sortOrder = 'desc' } - const sort = [{ [criteria.sortBy]: { order: criteria.sortOrder } }] - - const esQuery = { - index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), - body: { - query: { - bool: { - must: [] - } - }, - from: (page - 1) * perPage, - size: perPage, - sort + try { + const sort = [{ [criteria.sortBy === 'id' ? '_id' : criteria.sortBy]: { order: criteria.sortOrder } }] + + const esQuery = { + index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), + body: { + query: { + bool: { + must: [] + } + }, + from: (page - 1) * perPage, + size: perPage, + sort + } } - } - _.each(_.pick(criteria, ['jobId', 'userId', 'status']), (value, key) => { - esQuery.body.query.bool.must.push({ - term: { - [key]: { - value + _.each(_.pick(criteria, ['jobId', 'userId', 'status']), (value, key) => { + esQuery.body.query.bool.must.push({ + term: { + [key]: { + value + } } - } + }) }) - }) - logger.debug(`Query: ${JSON.stringify(esQuery)}`) + logger.debug({ component: 'JobCandidateService', context: 'searchJobCandidates', message: `Query: ${JSON.stringify(esQuery)}` }) - const esClient = helper.getESClient() - const { body } = await esClient.search(esQuery) + const { body } = await esClient.search(esQuery) + return { + total: body.hits.total.value, + page, + perPage, + result: _.map(body.hits.hits, (hit) => { + const obj = _.cloneDeep(hit._source) + obj.id = hit._id + return obj + }) + } + } catch (err) { + logger.logFullError(err, { component: 'JobCandidateService', context: 'searchJobCandidates' }) + } + logger.info({ component: 'JobCandidateService', context: 'searchJobCandidates', message: 'fallback to DB query' }) + const filter = { + [Op.and]: [{ deletedAt: null }] + } + _.each(_.pick(criteria, ['jobId', 'userId', 'status']), (value, key) => { + filter[Op.and].push({ [key]: value }) + }) + const jobCandidates = await JobCandidate.findAll({ + where: filter, + attributes: { + exclude: ['deletedAt'] + }, + offset: ((page - 1) * perPage), + limit: perPage, + order: [[criteria.sortBy, criteria.sortOrder]] + }) return { - total: body.hits.total.value, + fromDb: true, + total: jobCandidates.length, page, perPage, - result: _.map(body.hits.hits, (hit) => { - const obj = _.cloneDeep(hit._source) - obj.id = hit._id - return obj - }) + result: _.map(jobCandidates, jobCandidate => helper.clearObject(jobCandidate.dataValues)) } } diff --git a/src/services/JobService.js b/src/services/JobService.js index 4ac6d80b..cd106642 100644 --- a/src/services/JobService.js +++ b/src/services/JobService.js @@ -5,6 +5,7 @@ const _ = require('lodash') const Joi = require('joi') const config = require('config') +const { Op } = require('sequelize') const { v4: uuid } = require('uuid') const helper = require('../common/helper') const logger = require('../common/logger') @@ -12,20 +13,75 @@ const errors = require('../common/errors') const models = require('../models') const Job = models.Job +const esClient = helper.getESClient() + +/** + * populate candidates for a job. + * + * @param {String} jobId the job id + * @returns {Array} the list of candidates + */ +async function _getJobCandidates (jobId) { + const { body } = await esClient.search({ + index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), + body: { + query: { + term: { + jobId: { + value: jobId + } + } + } + } + }) + + if (body.hits.total.value === 0) { + return [] + } + const candidates = _.map(body.hits.hits, (hit) => { + const candidateRecord = _.cloneDeep(hit._source) + candidateRecord.id = hit._id + return candidateRecord + }) + return candidates +} /** * Get job by id * @param {String} id the job id + * @param {Boolean} fromDb flag if query db for data or not * @returns {Object} the job */ -async function getJob (id) { +async function getJob (id, fromDb = false) { + if (!fromDb) { + try { + const job = await esClient.get({ + index: config.esConfig.ES_INDEX_JOB, + id + }) + const jobId = job.body._id + const jobRecord = { id: jobId, ...job.body._source } + const candidates = await _getJobCandidates(jobId) + if (candidates.length) { + jobRecord.candidates = candidates + } + return jobRecord + } catch (err) { + if (helper.isDocumentMissingException(err)) { + throw new errors.NotFoundError(`id: ${id} "Job" not found`) + } + logger.logFullError(err, { component: 'JobService', context: 'getJob' }) + } + } + logger.info({ component: 'JobService', context: 'getJob', message: 'try to query db for data' }) const job = await Job.findById(id, true) job.dataValues.candidates = _.map(job.dataValues.candidates, (c) => helper.clearObject(c.dataValues)) return helper.clearObject(job.dataValues) } getJob.schema = Joi.object().keys({ - id: Joi.string().guid().required() + id: Joi.string().guid().required(), + fromDb: Joi.boolean() }).required() /** @@ -46,15 +102,8 @@ async function createJob (currentUser, job) { job.createdBy = await helper.getUserId(currentUser.userId) job.status = 'sourcing' - const esClient = helper.getESClient() - await esClient.create({ - index: config.get('esConfig.ES_INDEX_JOB'), - id: job.id, - body: _.omit(job, 'id'), - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }) - const created = await Job.create(job) + await helper.postEvent(config.TAAS_JOB_CREATE_TOPIC, job) return helper.clearObject(created.dataValues) } @@ -92,17 +141,8 @@ async function updateJob (currentUser, id, data) { data.updatedAt = new Date() data.updatedBy = await helper.getUserId(currentUser.userId) - const esClient = helper.getESClient() - await esClient.update({ - index: config.get('esConfig.ES_INDEX_JOB'), - id, - body: { - doc: data - }, - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }) - await job.update(data) + await helper.postEvent(config.TAAS_JOB_UPDATE_TOPIC, { id, ...data }) job = await Job.findById(id, true) job.dataValues.candidates = _.map(job.dataValues.candidates, (c) => helper.clearObject(c.dataValues)) return helper.clearObject(job.dataValues) @@ -172,16 +212,9 @@ async function deleteJob (currentUser, id) { throw new errors.ForbiddenError('You are not allowed to perform this action!') } - const esClient = helper.getESClient() - await esClient.delete({ - index: config.get('esConfig.ES_INDEX_JOB'), - id, - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }, { - ignore: [404] - }) const job = await Job.findById(id) await job.update({ deletedAt: new Date() }) + await helper.postEvent(config.TAAS_JOB_DELETE_TOPIC, { id }) } deleteJob.schema = Joi.object().keys({ @@ -198,94 +231,120 @@ async function searchJobs (criteria) { const page = criteria.page > 0 ? criteria.page : 1 const perPage = criteria.perPage > 0 ? criteria.perPage : 20 if (!criteria.sortBy) { - criteria.sortBy = '_id' - } - if (criteria.sortBy === 'id') { - criteria.sortBy = '_id' + criteria.sortBy = 'id' } if (!criteria.sortOrder) { criteria.sortOrder = 'desc' } - const sort = [{ [criteria.sortBy]: { order: criteria.sortOrder } }] + try { + const sort = [{ [criteria.sortBy === 'id' ? '_id' : criteria.sortBy]: { order: criteria.sortOrder } }] - const esQuery = { - index: config.get('esConfig.ES_INDEX_JOB'), - body: { - query: { - bool: { - must: [] - } - }, - from: (page - 1) * perPage, - size: perPage, - sort + const esQuery = { + index: config.get('esConfig.ES_INDEX_JOB'), + body: { + query: { + bool: { + must: [] + } + }, + from: (page - 1) * perPage, + size: perPage, + sort + } } - } - _.each(_.pick(criteria, ['projectId', 'externalId', 'description', 'startDate', 'endDate', 'resourceType', 'skill', 'rateType', 'status']), (value, key) => { - let must - if (key === 'description') { - must = { - match: { - [key]: { - query: value + _.each(_.pick(criteria, ['projectId', 'externalId', 'description', 'startDate', 'endDate', 'resourceType', 'skill', 'rateType', 'status']), (value, key) => { + let must + if (key === 'description') { + must = { + match: { + [key]: { + query: value + } } } - } - } else if (key === 'skill') { - must = { - terms: { - skills: [value] - } - } - } else { - must = { - term: { - [key]: { - value + } else if (key === 'skill') { + must = { + terms: { + skills: [value] } } - } - } - esQuery.body.query.bool.must.push(must) - }) - logger.debug(`Query: ${JSON.stringify(esQuery)}`) - - const esClient = helper.getESClient() - const { body } = await esClient.search(esQuery) - const result = await Promise.all(_.map(body.hits.hits, async (hit) => { - const jobRecord = _.cloneDeep(hit._source) - jobRecord.id = hit._id - - const { body } = await esClient.search({ - index: config.get('esConfig.ES_INDEX_JOB_CANDIDATE'), - body: { - query: { + } else { + must = { term: { - jobId: { - value: jobRecord.id + [key]: { + value } } } } + esQuery.body.query.bool.must.push(must) }) + logger.debug({ component: 'JobService', context: 'searchJobs', message: `Query: ${JSON.stringify(esQuery)}` }) - if (body.hits.total.value > 0) { - const candidates = _.map(body.hits.hits, (hit) => { - const candidateRecord = _.cloneDeep(hit._source) - candidateRecord.id = hit._id - return candidateRecord - }) - jobRecord.candidates = candidates - } - return jobRecord - })) + const { body } = await esClient.search(esQuery) + const result = await Promise.all(_.map(body.hits.hits, async (hit) => { + const jobRecord = _.cloneDeep(hit._source) + jobRecord.id = hit._id + const candidates = await _getJobCandidates(jobRecord.id) + if (candidates.length) { + jobRecord.candidates = candidates + } + return jobRecord + })) + return { + total: body.hits.total.value, + page, + perPage, + result + } + } catch (err) { + logger.logFullError(err, { component: 'JobService', context: 'searchJobs' }) + } + logger.info({ component: 'JobService', context: 'searchJobs', message: 'fallback to DB query' }) + const filter = { + [Op.and]: [{ deletedAt: null }] + } + _.each(_.pick(criteria, ['projectId', 'externalId', 'startDate', 'endDate', 'resourceType', 'rateType', 'status']), (value, key) => { + filter[Op.and].push({ [key]: value }) + }) + if (criteria.description) { + filter.description = { + [Op.like]: `%${criteria.description}%` + } + } + if (criteria.skills) { + filter.skills = { + [Op.contains]: [criteria.skills] + } + } + const jobs = await Job.findAll({ + where: filter, + attributes: { + exclude: ['deletedAt'] + }, + offset: ((page - 1) * perPage), + limit: perPage, + order: [[criteria.sortBy, criteria.sortOrder]], + include: [{ + model: models.JobCandidate, + as: 'candidates', + where: { + deletedAt: null + }, + required: false, + attributes: { + exclude: ['deletedAt'] + } + }] + }) return { - total: body.hits.total.value, + fromDb: true, + total: jobs.length, page, perPage, - result + result: _.map(jobs, job => helper.clearObject(job.dataValues)) } } diff --git a/src/services/ResourceBookingService.js b/src/services/ResourceBookingService.js index 9e502600..9f9c24a9 100644 --- a/src/services/ResourceBookingService.js +++ b/src/services/ResourceBookingService.js @@ -5,6 +5,7 @@ const _ = require('lodash') const Joi = require('joi') const config = require('config') +const { Op } = require('sequelize') const { v4: uuid } = require('uuid') const helper = require('../common/helper') const logger = require('../common/logger') @@ -12,27 +13,56 @@ const errors = require('../common/errors') const models = require('../models') const ResourceBooking = models.ResourceBooking +const esClient = helper.getESClient() /** - * Get resourceBooking by id + * filter fields of resource booking by user role. * @param {Object} currentUser the user who perform this operation. - * @param {String} id the resourceBooking id + * @param {Object} resourceBooking the resourceBooking with all fields * @returns {Object} the resourceBooking */ -async function getResourceBooking (currentUser, id) { - const resourceBooking = await ResourceBooking.findById(id) +async function _getResourceBookingFilteringFields (currentUser, resourceBooking) { if (currentUser.isBookingManager) { - return helper.clearObject(resourceBooking.dataValues) - } else if (await helper.isConnectMember(resourceBooking.dataValues.projectId, currentUser.jwtToken)) { - return _.omit(helper.clearObject(resourceBooking.dataValues), 'memberRate') + return helper.clearObject(resourceBooking) + } else if (await helper.isConnectMember(resourceBooking.projectId, currentUser.jwtToken)) { + return _.omit(helper.clearObject(resourceBooking), 'memberRate') } else { - return _.omit(helper.clearObject(resourceBooking.dataValues), 'customerRate') + return _.omit(helper.clearObject(resourceBooking), 'customerRate') + } +} + +/** + * Get resourceBooking by id + * @param {Object} currentUser the user who perform this operation. + * @param {String} id the resourceBooking id + * @param {Boolean} fromDb flag if query db for data or not + * @returns {Object} the resourceBooking + */ +async function getResourceBooking (currentUser, id, fromDb = false) { + if (!fromDb) { + try { + const resourceBooking = await esClient.get({ + index: config.esConfig.ES_INDEX_RESOURCE_BOOKING, + id + }) + const resourceBookingRecord = { id: resourceBooking.body._id, ...resourceBooking.body._source } + return _getResourceBookingFilteringFields(currentUser, resourceBookingRecord) + } catch (err) { + if (helper.isDocumentMissingException(err)) { + throw new errors.NotFoundError(`id: ${id} "ResourceBooking" not found`) + } + logger.logFullError(err, { component: 'ResourceBookingService', context: 'getResourceBooking' }) + } } + logger.info({ component: 'ResourceBookingService', context: 'getResourceBooking', message: 'try to query db for data' }) + const resourceBooking = await ResourceBooking.findById(id) + return _getResourceBookingFilteringFields(currentUser, resourceBooking.dataValues) } getResourceBooking.schema = Joi.object().keys({ currentUser: Joi.object().required(), - id: Joi.string().guid().required() + id: Joi.string().guid().required(), + fromDb: Joi.boolean() }).required() /** @@ -53,15 +83,8 @@ async function createResourceBooking (currentUser, resourceBooking) { resourceBooking.createdBy = await helper.getUserId(currentUser.userId) resourceBooking.status = 'sourcing' - const esClient = helper.getESClient() - await esClient.create({ - index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'), - id: resourceBooking.id, - body: _.omit(resourceBooking, 'id'), - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }) - const created = await ResourceBooking.create(resourceBooking) + await helper.postEvent(config.TAAS_RESOURCE_BOOKING_CREATE_TOPIC, resourceBooking) return helper.clearObject(created.dataValues) } @@ -97,17 +120,8 @@ async function updateResourceBooking (currentUser, id, data) { data.updatedAt = new Date() data.updatedBy = await helper.getUserId(currentUser.userId) - const esClient = helper.getESClient() - await esClient.update({ - index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'), - id, - body: { - doc: data - }, - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }) - await resourceBooking.update(data) + await helper.postEvent(config.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC, { id, ...data }) const result = helper.clearObject(_.assign(resourceBooking.dataValues, data)) return result } @@ -173,17 +187,9 @@ async function deleteResourceBooking (currentUser, id) { throw new errors.ForbiddenError('You are not allowed to perform this action!') } - const esClient = helper.getESClient() - await esClient.delete({ - index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'), - id, - refresh: 'true' // refresh ES so that it is visible for read operations instantly - }, { - ignore: [404] - }) - const resourceBooking = await ResourceBooking.findById(id) await resourceBooking.update({ deletedAt: new Date() }) + await helper.postEvent(config.TAAS_RESOURCE_BOOKING_DELETE_TOPIC, { id }) } deleteResourceBooking.schema = Joi.object().keys({ @@ -201,54 +207,76 @@ async function searchResourceBookings (criteria) { const perPage = criteria.perPage > 0 ? criteria.perPage : 20 if (!criteria.sortBy) { - criteria.sortBy = '_id' + criteria.sortBy = 'id' } - if (criteria.sortBy === 'id') { - criteria.sortBy = '_id' - } - if (!criteria.sortOrder) { criteria.sortOrder = 'desc' } - const sort = [{ [criteria.sortBy]: { order: criteria.sortOrder } }] + try { + const sort = [{ [criteria.sortBy === 'id' ? '_id' : criteria.sortBy]: { order: criteria.sortOrder } }] - const esQuery = { - index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'), - body: { - query: { - bool: { - must: [] - } - }, - from: (page - 1) * perPage, - size: perPage, - sort + const esQuery = { + index: config.get('esConfig.ES_INDEX_RESOURCE_BOOKING'), + body: { + query: { + bool: { + must: [] + } + }, + from: (page - 1) * perPage, + size: perPage, + sort + } } - } - _.each(_.pick(criteria, ['status', 'startDate', 'endDate', 'rateType']), (value, key) => { - esQuery.body.query.bool.must.push({ - term: { - [key]: { - value + _.each(_.pick(criteria, ['status', 'startDate', 'endDate', 'rateType']), (value, key) => { + esQuery.body.query.bool.must.push({ + term: { + [key]: { + value + } } - } + }) }) - }) - logger.debug(`Query: ${JSON.stringify(esQuery)}`) + logger.debug({ component: 'ResourceBookingService', context: 'searchResourceBookings', message: `Query: ${JSON.stringify(esQuery)}` }) - const esClient = helper.getESClient() - const { body } = await esClient.search(esQuery) + const { body } = await esClient.search(esQuery) + return { + total: body.hits.total.value, + page, + perPage, + result: _.map(body.hits.hits, (hit) => { + const obj = _.cloneDeep(hit._source) + obj.id = hit._id + return obj + }) + } + } catch (err) { + logger.logFullError(err, { component: 'ResourceBookingService', context: 'searchResourceBookings' }) + } + logger.info({ component: 'ResourceBookingService', context: 'searchResourceBookings', message: 'fallback to DB query' }) + const filter = { + [Op.and]: [{ deletedAt: null }] + } + _.each(_.pick(criteria, ['status', 'startDate', 'endDate', 'rateType']), (value, key) => { + filter[Op.and].push({ [key]: value }) + }) + const resourceBookings = await ResourceBooking.findAll({ + where: filter, + attributes: { + exclude: ['deletedAt'] + }, + offset: ((page - 1) * perPage), + limit: perPage, + order: [[criteria.sortBy, criteria.sortOrder]] + }) return { - total: body.hits.total.value, + fromDb: true, + total: resourceBookings.length, page, perPage, - result: _.map(body.hits.hits, (hit) => { - const obj = _.cloneDeep(hit._source) - obj.id = hit._id - return obj - }) + result: _.map(resourceBookings, resourceBooking => helper.clearObject(resourceBooking.dataValues)) } } diff --git a/test/unit/JobCandidateService.test.js b/test/unit/JobCandidateService.test.js index c2860fe8..bfa7b9c2 100644 --- a/test/unit/JobCandidateService.test.js +++ b/test/unit/JobCandidateService.test.js @@ -14,6 +14,7 @@ const { const helper = require('../../src/common/helper') const esClient = helper.getESClient() +const busApiClient = helper.getBusApiClient() const JobCandidate = models.JobCandidate const Job = models.Job @@ -23,6 +24,7 @@ describe('jobCandidate service test', () => { let userId let stubIsConnectMember let stubGetUserId + let stubPostEvent beforeEach(() => { isConnectMember = true stubIsConnectMember = sinon.stub(helper, 'isConnectMember').callsFake(() => { @@ -33,6 +35,7 @@ describe('jobCandidate service test', () => { stubGetUserId = sinon.stub(helper, 'getUserId').callsFake(() => { return userId }) + stubPostEvent = sinon.stub(busApiClient, 'postEvent').callsFake(async () => {}) }) afterEach(() => { @@ -40,11 +43,6 @@ describe('jobCandidate service test', () => { }) describe('create job candidate test', () => { - let stubESCreate - beforeEach(() => { - stubESCreate = sinon.stub(esClient, 'create').callsFake(async () => {}) - }) - it('create job candidate with booking manager success ', async () => { const jobCandidateRes = _.cloneDeep(jobCandidateResponseBody) const stubDBCreate = sinon.stub(JobCandidate, 'create').callsFake(() => { @@ -54,7 +52,7 @@ describe('jobCandidate service test', () => { const entity = await service.createJobCandidate(bookingManagerUser, jobCandidateRequestBody) expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubDBCreate.calledOnce).to.be.true - expect(stubESCreate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -66,7 +64,7 @@ describe('jobCandidate service test', () => { const entity = await service.createJobCandidate(connectUser, jobCandidateRequestBody) expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubDBCreate.calledOnce).to.be.true - expect(stubESCreate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -84,35 +82,60 @@ describe('jobCandidate service test', () => { describe('get job candidate test', () => { it('get job candidate success', async () => { + const jobCandidateRes = _.cloneDeep(jobCandidateResponseBody) + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + return { + body: { + _id: jobCandidateRes.dataValues.id, + _source: _.omit(jobCandidateRes.dataValues, ['id']) + } + } + }) + const entity = await service.getJobCandidate(jobCandidateResponseBody.dataValues.id) + expect(entity).to.deep.eql(jobCandidateRes.dataValues) + expect(stub.calledOnce).to.be.true + }) + + it('get job candidate with candidate not exist success', async () => { + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + const err = new Error() + err.statusCode = 404 + throw err + }) + try { + await service.getJobCandidate(jobCandidateResponseBody.dataValues.id) + unexpected() + } catch (error) { + expect(error.message).to.equal(`id: ${jobCandidateResponseBody.dataValues.id} "JobCandidate" not found`) + expect(stub.calledOnce).to.be.true + } + }) + + it('get job candidate from db success', async () => { const jobCandidateRes = _.cloneDeep(jobCandidateResponseBody) const stubJobCandidateFindOne = sinon.stub(JobCandidate, 'findOne').callsFake(() => { return jobCandidateRes }) - const entity = await service.getJobCandidate(jobCandidateResponseBody.dataValues.id) + const entity = await service.getJobCandidate(jobCandidateResponseBody.dataValues.id, true) expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubJobCandidateFindOne.calledOnce).to.be.true }) - it('get job candidate with candidate not exist success', async () => { + it('get job candidate from db with candidate not exist success', async () => { const stubJobCandidateFindOne = sinon.stub(JobCandidate, 'findOne').callsFake(() => { return null }) try { - await service.getJobCandidate(jobCandidateResponseBody.dataValues.id) + await service.getJobCandidate(jobCandidateResponseBody.dataValues.id, true) unexpected() } catch (error) { - expect(error.message).to.equal(`JobCandidate with id: ${jobCandidateResponseBody.dataValues.id} doesn't exists.`) + expect(error.message).to.equal(`id: ${jobCandidateResponseBody.dataValues.id} "JobCandidate" doesn't exists.`) expect(stubJobCandidateFindOne.calledOnce).to.be.true } }) }) describe('fully update job candidate test', () => { - let stubESUpdate - beforeEach(() => { - stubESUpdate = sinon.stub(esClient, 'update').callsFake(() => {}) - }) - it('fully update job candidate test with booking manager success', async () => { const jobCandidateRes = _.cloneDeep(jobCandidateResponseBody) const stubJobCandidateFindOne = sinon.stub(JobCandidate, 'findOne').callsFake(() => { @@ -129,7 +152,7 @@ describe('jobCandidate service test', () => { expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubJobCandidateFindOne.calledOnce).to.be.true expect(stubJobFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('fully update job candidate test with connect user success', async () => { @@ -148,7 +171,7 @@ describe('jobCandidate service test', () => { expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubJobCandidateFindOne.calledOnce).to.be.true expect(stubJobFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true }) @@ -169,7 +192,7 @@ describe('jobCandidate service test', () => { expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubJobCandidateFindOne.calledOnce).to.be.true expect(stubJobFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('fully update job candidate test with topcoder user failed', async () => { @@ -197,11 +220,6 @@ describe('jobCandidate service test', () => { }) describe('partially update job candidate test', () => { - let stubESUpdate - beforeEach(() => { - stubESUpdate = sinon.stub(esClient, 'update').callsFake(() => {}) - }) - it('partially update job candidate test with booking manager success', async () => { const jobCandidateRes = _.cloneDeep(jobCandidateResponseBody) const stubJobCandidateFindOne = sinon.stub(JobCandidate, 'findOne').callsFake(() => { @@ -218,7 +236,7 @@ describe('jobCandidate service test', () => { expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubJobCandidateFindOne.calledOnce).to.be.true expect(stubJobFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('partially update job candidate test with connect user success', async () => { @@ -237,7 +255,7 @@ describe('jobCandidate service test', () => { expect(entity).to.deep.eql(jobCandidateRes.dataValues) expect(stubJobCandidateFindOne.calledOnce).to.be.true expect(stubJobFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true }) @@ -278,7 +296,7 @@ describe('jobCandidate service test', () => { await service.partiallyUpdateJobCandidate(bookingManagerUser, jobCandidateResponseBody.dataValues.id, partiallyUpdateJobCandidateRequestBody) expect(stubJobCandidateFindOne.calledOnce).to.be.true expect(stubJobFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) }) @@ -290,10 +308,9 @@ describe('jobCandidate service test', () => { update: () => { return null } } }) - const stubESDelete = sinon.stub(esClient, 'delete').callsFake(() => {}) await service.deleteJobCandidate(bookingManagerUser, jobCandidateResponseBody.dataValues.id) expect(stubJobCandidateFindOne.calledOnce).to.be.true - expect(stubESDelete.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('delete job candidate test with connect user success', async () => { @@ -357,5 +374,19 @@ describe('jobCandidate service test', () => { expect(entity.result[0]).to.deep.eql(jobCandidateResponseBody.dataValues) expect(stub.calledOnce).to.be.true }) + + it('search job candidates success when es search fails', async () => { + const stubESSearch = sinon.stub(esClient, 'search').callsFake(() => { + throw new Error('dedicated es failure') + }) + + const stubDBSearch = sinon.stub(JobCandidate, 'findAll').callsFake(() => { + return [jobCandidateResponseBody] + }) + const entity = await service.searchJobCandidates({ sortBy: 'id', sortOrder: 'asc', page: 1, perPage: 1, jobId: '36762910-4efa-4db4-9b2a-c9ab54c232ed' }) + expect(entity.result[0]).to.deep.eql(jobCandidateResponseBody.dataValues) + expect(stubESSearch.calledOnce).to.be.true + expect(stubDBSearch.calledOnce).to.be.true + }) }) }) diff --git a/test/unit/JobService.test.js b/test/unit/JobService.test.js index 06bf1932..0f0d7ef2 100644 --- a/test/unit/JobService.test.js +++ b/test/unit/JobService.test.js @@ -15,6 +15,7 @@ const helper = require('../../src/common/helper') const errors = require('../../src/common/errors') const esClient = helper.getESClient() +const busApiClient = helper.getBusApiClient() const Job = models.Job @@ -23,6 +24,7 @@ describe('job service test', () => { let userId let stubIsConnectMember let stubGetUserId + let stubPostEvent beforeEach(() => { isConnectMember = true stubIsConnectMember = sinon.stub(helper, 'isConnectMember').callsFake(() => { @@ -33,6 +35,7 @@ describe('job service test', () => { stubGetUserId = sinon.stub(helper, 'getUserId').callsFake(() => { return userId }) + stubPostEvent = sinon.stub(busApiClient, 'postEvent').callsFake(async () => {}) }) afterEach(() => { @@ -40,11 +43,6 @@ describe('job service test', () => { }) describe('create job test', () => { - let stubESCreate - beforeEach(() => { - stubESCreate = sinon.stub(esClient, 'create').callsFake(async () => {}) - }) - it('create job with booking manager user success ', async () => { const stubDBCreate = sinon.stub(Job, 'create').callsFake(() => { return _.cloneDeep(jobResponseBody) @@ -53,7 +51,7 @@ describe('job service test', () => { const entity = await service.createJob(bookingManagerUser, jobRequestBody) expect(entity).to.deep.eql(jobResponseBody.dataValues) expect(stubDBCreate.calledOnce).to.be.true - expect(stubESCreate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -64,7 +62,7 @@ describe('job service test', () => { const entity = await service.createJob(connectUser, jobRequestBody) expect(entity).to.deep.eql(jobResponseBody.dataValues) expect(stubDBCreate.calledOnce).to.be.true - expect(stubESCreate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -99,34 +97,75 @@ describe('job service test', () => { describe('get job test', () => { it('get job success', async () => { const jobResBody = _.cloneDeep(jobResponseBody) - const stub = sinon.stub(Job, 'findOne').callsFake(() => { - return jobResBody + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + return { + body: { + _id: jobResponseBody.dataValues.id, + _source: _.omit(jobResponseBody.dataValues, ['id']) + } + } + }) + const stubSearchCandidates = sinon.stub(esClient, 'search').callsFake(() => { + return { + body: { + hits: { + total: { + value: 1 + }, + hits: [{ + _id: jobResponseBody.dataValues.candidates[0].id, + _source: _.omit(jobResponseBody.dataValues.candidates[0], ['id']) + }] + } + } + } }) const entity = await service.getJob(jobResponseBody.dataValues.id) expect(entity).to.deep.eql(jobResBody.dataValues) expect(stub.calledOnce).to.be.true + expect(stubSearchCandidates.calledOnce).to.be.true }) it('get job with job not exist failed', async () => { + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + const err = new Error() + err.statusCode = 404 + throw err + }) + try { + await service.getJob(jobResponseBody.dataValues.id) + unexpected() + } catch (error) { + expect(error.message).to.equal(`id: ${jobResponseBody.dataValues.id} "Job" not found`) + expect(stub.calledOnce).to.be.true + } + }) + + it('get job from db success', async () => { + const jobResBody = _.cloneDeep(jobResponseBody) + const stub = sinon.stub(Job, 'findOne').callsFake(() => { + return jobResBody + }) + const entity = await service.getJob(jobResponseBody.dataValues.id, true) + expect(entity).to.deep.eql(jobResBody.dataValues) + expect(stub.calledOnce).to.be.true + }) + + it('get job from db with job not exist failed', async () => { const stub = sinon.stub(Job, 'findOne').callsFake(() => { return null }) try { - await service.getJob(jobResponseBody.dataValues.id) + await service.getJob(jobResponseBody.dataValues.id, true) unexpected() } catch (error) { - expect(error.message).to.equal(`Job with id: ${jobResponseBody.dataValues.id} doesn't exists.`) + expect(error.message).to.equal(`id: ${jobResponseBody.dataValues.id} "Job" doesn't exists.`) expect(stub.calledOnce).to.be.true } }) }) describe('fully update job test', () => { - let stubESUpdate - beforeEach(() => { - stubESUpdate = sinon.stub(esClient, 'update').callsFake(() => {}) - }) - it('fully update job test with booking manager success', async () => { const jobResBody = _.cloneDeep(jobResponseBody) const stub = sinon.stub(Job, 'findOne').onFirstCall().callsFake(() => { @@ -145,7 +184,7 @@ describe('job service test', () => { const entity = await service.fullyUpdateJob(bookingManagerUser, jobResponseBody.dataValues.id, fullyUpdateJobRequestBody) expect(entity).to.deep.eql(jobResBody.dataValues) expect(stub.calledTwice).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -167,7 +206,7 @@ describe('job service test', () => { const entity = await service.fullyUpdateJob(connectUser, jobResponseBody.dataValues.id, fullyUpdateJobRequestBody) expect(entity).to.deep.eql(jobResBody.dataValues) expect(stub.calledTwice).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -192,11 +231,6 @@ describe('job service test', () => { }) describe('partially update job test', () => { - let stubESUpdate - beforeEach(() => { - stubESUpdate = sinon.stub(esClient, 'update').callsFake(() => {}) - }) - it('partially update job with booking manager success', async () => { const jobResBody = _.cloneDeep(jobResponseBody) const stub = sinon.stub(Job, 'findOne').onFirstCall().callsFake(() => { @@ -215,7 +249,7 @@ describe('job service test', () => { const entity = await service.partiallyUpdateJob(bookingManagerUser, jobResponseBody.dataValues.id, partiallyUpdateJobRequestBody) expect(entity).to.deep.eql(jobResBody.dataValues) expect(stub.calledTwice).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -236,7 +270,7 @@ describe('job service test', () => { const entity = await service.partiallyUpdateJob(connectUser, jobResponseBody.dataValues.id, partiallyUpdateJobRequestBody) expect(entity).to.deep.eql(jobResBody.dataValues) expect(stub.calledTwice).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -271,10 +305,9 @@ describe('job service test', () => { } } }) - const stubDelete = sinon.stub(esClient, 'delete').callsFake(async () => {}) await service.deleteJob(bookingManagerUser, jobResponseBody.dataValues.id) expect(stub.calledOnce).to.be.true - expect(stubDelete.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('delete job test with connect user failed', async () => { @@ -363,5 +396,20 @@ describe('job service test', () => { expect(entity.result[0]).to.deep.eql(jobResponseBody.dataValues) expect(stub.calledTwice).to.be.true }) + + it('search jobs success when es search fails', async () => { + const stubESSearch = sinon.stub(esClient, 'search').callsFake(() => { + throw new Error('dedicated es failure') + }) + + const stubDBSearch = sinon.stub(Job, 'findAll').callsFake(() => { + return [jobResponseBody] + }) + + const entity = await service.searchJobs({ sortBy: 'id', sortOrder: 'asc', page: 1, perPage: 1, skill: '56fdc405-eccc-4189-9e83-c78abf844f50', description: 'description 1', rateType: 'hourly' }) + expect(entity.result[0]).to.deep.eql(jobResponseBody.dataValues) + expect(stubESSearch.calledOnce).to.be.true + expect(stubDBSearch.calledOnce).to.be.true + }) }) }) diff --git a/test/unit/ResourceBookingService.test.js b/test/unit/ResourceBookingService.test.js index c662d26b..fba2415a 100644 --- a/test/unit/ResourceBookingService.test.js +++ b/test/unit/ResourceBookingService.test.js @@ -17,6 +17,7 @@ const { const helper = require('../../src/common/helper') const esClient = helper.getESClient() +const busApiClient = helper.getBusApiClient() const ResourceBooking = models.ResourceBooking @@ -25,6 +26,7 @@ describe('resourceBooking service test', () => { let userId let stubIsConnectMember let stubGetUserId + let stubPostEvent beforeEach(() => { isConnectMember = true stubIsConnectMember = sinon.stub(helper, 'isConnectMember').callsFake(() => { @@ -35,6 +37,7 @@ describe('resourceBooking service test', () => { stubGetUserId = sinon.stub(helper, 'getUserId').callsFake(() => { return userId }) + stubPostEvent = sinon.stub(busApiClient, 'postEvent').callsFake(async () => {}) }) afterEach(() => { @@ -42,11 +45,6 @@ describe('resourceBooking service test', () => { }) describe('create resource booking test', () => { - let stubESCreate - beforeEach(() => { - stubESCreate = sinon.stub(esClient, 'create').callsFake(async () => {}) - }) - it('create resource booking with booking manager success ', async () => { const stubDBCreate = sinon.stub(ResourceBooking, 'create').callsFake(() => { return resourceBookingResponseBody @@ -54,7 +52,7 @@ describe('resourceBooking service test', () => { const entity = await service.createResourceBooking(bookingManagerUser, resourceBookingRequestBody) expect(entity).to.deep.eql(resourceBookingResponseBody.dataValues) expect(stubDBCreate.calledOnce).to.be.true - expect(stubESCreate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('create resource booking with connect user success ', async () => { @@ -65,7 +63,7 @@ describe('resourceBooking service test', () => { const entity = await service.createResourceBooking(connectUser, resourceBookingRequestBody) expect(entity).to.deep.eql(resourceBookingResponseBody.dataValues) expect(stubDBCreate.calledOnce).to.be.true - expect(stubESCreate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true expect(stubGetUserId.calledOnce).to.be.true }) @@ -83,8 +81,13 @@ describe('resourceBooking service test', () => { describe('get resource booking test', () => { it('get resource booking with booking manager success', async () => { const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) - const stub = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { - return resourceBookingRes + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + return { + body: { + _id: resourceBookingRes.dataValues.id, + _source: _.omit(resourceBookingRes.dataValues, ['id']) + } + } }) const entity = await service.getResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id) expect(entity).to.deep.eql(resourceBookingRes.dataValues) @@ -93,8 +96,13 @@ describe('resourceBooking service test', () => { it('get resource booking with connect user success', async () => { const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) - const stub = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { - return resourceBookingRes + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + return { + body: { + _id: resourceBookingRes.dataValues.id, + _source: _.omit(resourceBookingRes.dataValues, ['id']) + } + } }) const entity = await service.getResourceBooking(connectUser, resourceBookingResponseBody.dataValues.id) @@ -106,8 +114,13 @@ describe('resourceBooking service test', () => { it('get resource booking with topcoder user success', async () => { isConnectMember = false const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) - const stub = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { - return resourceBookingRes + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + return { + body: { + _id: resourceBookingRes.dataValues.id, + _source: _.omit(resourceBookingRes.dataValues, ['id']) + } + } }) const entity = await service.getResourceBooking(topCoderUser, resourceBookingResponseBody.dataValues.id) expect(entity).to.deep.eql(_.omit(resourceBookingRes.dataValues, ['customerRate'])) @@ -115,24 +128,66 @@ describe('resourceBooking service test', () => { }) it('get resource booking with resource booking not exist success', async () => { + const stub = sinon.stub(esClient, 'get').callsFake(async () => { + const err = new Error() + err.statusCode = 404 + throw err + }) + try { + await service.getResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id) + } catch (error) { + expect(error.message).to.equal(`id: ${resourceBookingResponseBody.dataValues.id} "ResourceBooking" not found`) + expect(stub.calledOnce).to.be.true + } + }) + + it('get resource booking from db with booking manager success', async () => { + const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) + const stub = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { + return resourceBookingRes + }) + const entity = await service.getResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id, true) + expect(entity).to.deep.eql(resourceBookingRes.dataValues) + expect(stub.calledOnce).to.be.true + }) + + it('get resource booking from db with connect user success', async () => { + const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) + const stub = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { + return resourceBookingRes + }) + + const entity = await service.getResourceBooking(connectUser, resourceBookingResponseBody.dataValues.id, true) + expect(entity).to.deep.eql(_.omit(resourceBookingRes.dataValues, ['memberRate'])) + expect(stub.calledOnce).to.be.true + expect(stubIsConnectMember.calledOnce).to.be.true + }) + + it('get resource booking from db with topcoder user success', async () => { + isConnectMember = false + const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) + const stub = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { + return resourceBookingRes + }) + const entity = await service.getResourceBooking(topCoderUser, resourceBookingResponseBody.dataValues.id, true) + expect(entity).to.deep.eql(_.omit(resourceBookingRes.dataValues, ['customerRate'])) + expect(stub.calledOnce).to.be.true + }) + + it('get resource booking from db with resource booking not exist success', async () => { const stub = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { return null }) try { - await service.getResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id) + await service.getResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id, true) } catch (error) { - expect(error.message).to.equal(`ResourceBooking with id: ${resourceBookingResponseBody.dataValues.id} doesn't exists.`) + expect(error.message).to.equal(`id: ${resourceBookingResponseBody.dataValues.id} "ResourceBooking" doesn't exists.`) expect(stub.calledOnce).to.be.true } }) }) describe('fully update resource booking test', () => { - let stubESUpdate - beforeEach(() => { - stubESUpdate = sinon.stub(esClient, 'update').callsFake(() => {}) - }) - it('fully update resource booking test with booking manager success', async () => { const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) const stubResourceBookingFindOne = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { @@ -144,7 +199,7 @@ describe('resourceBooking service test', () => { const entity = await service.fullyUpdateResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id, fullyUpdateResourceBookingRequestBody) expect(entity).to.deep.eql(resourceBookingRes.dataValues) expect(stubResourceBookingFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('fully update resource booking test with connect user success', async () => { @@ -159,7 +214,7 @@ describe('resourceBooking service test', () => { const entity = await service.fullyUpdateResourceBooking(connectUser, resourceBookingResponseBody.dataValues.id, fullyUpdateResourceBookingRequestBody) expect(entity).to.deep.eql(resourceBookingRes.dataValues) expect(stubResourceBookingFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true }) @@ -183,11 +238,6 @@ describe('resourceBooking service test', () => { }) describe('partially update resource booking test', () => { - let stubESUpdate - beforeEach(() => { - stubESUpdate = sinon.stub(esClient, 'update').callsFake(() => {}) - }) - it('partially update resource booking test with booking manager success', async () => { const resourceBookingRes = _.cloneDeep(resourceBookingResponseBody) const stubResourceBookingFindOne = sinon.stub(ResourceBooking, 'findOne').callsFake(() => { @@ -199,7 +249,7 @@ describe('resourceBooking service test', () => { const entity = await service.partiallyUpdateResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id, partiallyUpdateResourceBookingRequestBody) expect(entity).to.deep.eql(resourceBookingRes.dataValues) expect(stubResourceBookingFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('partially update resource booking test with connect user success', async () => { @@ -214,7 +264,7 @@ describe('resourceBooking service test', () => { const entity = await service.partiallyUpdateResourceBooking(connectUser, resourceBookingResponseBody.dataValues.id, partiallyUpdateResourceBookingRequestBody) expect(entity).to.deep.eql(resourceBookingRes.dataValues) expect(stubResourceBookingFindOne.calledOnce).to.be.true - expect(stubESUpdate.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true expect(stubIsConnectMember.calledOnce).to.be.true }) @@ -245,10 +295,9 @@ describe('resourceBooking service test', () => { update: () => { return null } } }) - const stubES = sinon.stub(esClient, 'delete').callsFake(() => {}) await service.deleteResourceBooking(bookingManagerUser, resourceBookingResponseBody.dataValues.id) expect(stubResourceBookingFindOne.calledOnce).to.be.true - expect(stubES.calledOnce).to.be.true + expect(stubPostEvent.calledOnce).to.be.true }) it('delete resource booking test with connect user failed', async () => { @@ -312,5 +361,19 @@ describe('resourceBooking service test', () => { expect(entity.result[0]).to.deep.eql(resourceBookingResponseBody.dataValues) expect(stub.calledOnce).to.be.true }) + + it('search resource booking success when es search fails', async () => { + const stubESSearch = sinon.stub(esClient, 'search').callsFake(() => { + throw new Error('dedicated es failure') + }) + + const stubDBSearch = sinon.stub(ResourceBooking, 'findAll').callsFake(() => { + return [resourceBookingResponseBody] + }) + const entity = await service.searchResourceBookings({ sortBy: 'id', sortOrder: 'asc', page: 1, perPage: 1, status: 'sourcing' }) + expect(entity.result[0]).to.deep.eql(resourceBookingResponseBody.dataValues) + expect(stubESSearch.calledOnce).to.be.true + expect(stubDBSearch.calledOnce).to.be.true + }) }) }) diff --git a/test/unit/helper.test.js b/test/unit/helper.test.js index 7f0fb777..93834f0d 100644 --- a/test/unit/helper.test.js +++ b/test/unit/helper.test.js @@ -199,7 +199,7 @@ describe('helper test', () => { expect(res).to.equal(id) }) - it('getUserId return id', async () => { + it('getUserId catch not found', async () => { let i = 0 sinon.stub(request, 'get').callsFake(() => { return { @@ -222,7 +222,7 @@ describe('helper test', () => { try { await helper.getUserId(44532) } catch (err) { - expect(err.message).to.equal('user id not found') + expect(err.message).to.equal('userId: 44532 "user" not found') } }) }) From acc27970c910fed0150c4b644d959c806a510603 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 16 Nov 2020 08:36:20 +0530 Subject: [PATCH 2/3] Update config.yml --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2b41d1d2..26a14ebc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -68,6 +68,7 @@ workflows: branches: only: - dev + - feature/es-segregation # Production builds are exectuted only on tagged commits to the # master branch. From 8bbda8513065a465c7e9abf59f78cb68b1630fe7 Mon Sep 17 00:00:00 2001 From: Nkumar Date: Mon, 16 Nov 2020 08:40:22 +0530 Subject: [PATCH 3/3] permission build.sh --- build.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 build.sh diff --git a/build.sh b/build.sh old mode 100644 new mode 100755