diff --git a/.eslintignore b/.eslintignore
index 87d8ad2d..66509f73 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -1,7 +1,7 @@
config/local.js
config/mock.local.js
config/m2m.local.js
-local/seed/
+scripts/import-from-api/
node_modules
dist
.ebextensions
diff --git a/README.md b/README.md
index 46b8fe7a..1d270350 100644
--- a/README.md
+++ b/README.md
@@ -2,31 +2,31 @@
Microservice to manage CRUD operations for all things Projects.
-**Note : Steps mentioned below are best to our capability as guide for local deployment, however, we expect from contributor, being a developer, to resolve run-time issues (e.g. OS and node version issues etc), if any.**
+🤓**Steps mentioned below are best to our capability as guide for local deployment, however, we expect from contributor, being a developer, to resolve run-time issues (e.g. OS and node version issues etc), if any.**
- [Topcoder Projects Service](#topcoder-projects-service)
- [Local Development](#local-development)
- [Requirements](#requirements)
- [Steps to run locally](#steps-to-run-locally)
- - [Import and Export data](#import-and-export-data)
- - [📤 Export data](#%f0%9f%93%a4-export-data)
- - [📥 Import data](#%f0%9f%93%a5-import-data)
- - [Run Connect App with Project Service locally](#run-connect-app-with-project-service-locally)
- - [NPM Commands](#npm-commands)
- - [Import metadata from api.topcoder-dev.com (deprecated)](#import-metadata-from-apitopcoder-devcom-deprecated)
- - [Test](#test)
- - [JWT Authentication](#jwt-authentication)
- - [Deploying with docker (might need updates)](#deploying-with-docker-might-need-updates)
+ - [Run Connect App with Project Service locally](#run-connect-app-with-project-service-locally)
+ - [Import and Export data](#import-and-export-data)
+ - [📤 Export data](#%f0%9f%93%a4-export-data)
+ - [📥 Import data](#%f0%9f%93%a5-import-data)
+ - [Import metadata from api.topcoder-dev.com (deprecated)](#import-metadata-from-apitopcoder-devcom-deprecated)
+ - [Run via Docker](#run-via-docker)
+ - [NPM Commands](#npm-commands)
- [Kafka commands](#kafka-commands)
- [Create Topic](#create-topic)
- [List Topics](#list-topics)
- [Watch Topic](#watch-topic)
- [Post Message to Topic (from stdin)](#post-message-to-topic-from-stdin)
- - [References](#references)
+ - [Test](#test)
+ - [JWT Authentication](#jwt-authentication)
+ - [Documentation](#documentation)
## Local Development
-Local setup should work good on **Linux** and **macOS**. But **Windows** is not supported at the moment.
+Local setup should work good on **Linux**, **macOS** and **Windows**.
### Requirements
@@ -43,38 +43,44 @@ Local setup should work good on **Linux** and **macOS**. But **Windows** is not
2. ⚙ Local config
- 1. In the `tc-project-service` root directory create `.env` file with the environment variables _(values should be shared with you on the forum)_:
- ```
- AUTH0_CLIENT_ID=...
- AUTH0_CLIENT_SECRET=...
- AUTH0_URL=...
- AUTH0_AUDIENCE=...
- AUTH0_PROXY_SERVER_URL=...
+ 1. In the `tc-project-service` root directory create `.env` file with the next environment variables. Values for **Auth0 config** should be shared with you on the forum.
+ ```bash
+ # Auth0 config
+ AUTH0_CLIENT_ID=
+ AUTH0_CLIENT_SECRET=
+ AUTH0_URL=
+ AUTH0_AUDIENCE=
+ AUTH0_PROXY_SERVER_URL=
+
+ # Locally deployed services (via docker-compose)
+ PROJECTS_ES_URL=dockerhost:9200
+ DB_MASTER_URL=postgres://coder:mysecretpassword@dockerhost:5432/projectsdb
+ RABBITMQ_URL=amqp://dockerhost:5672
+ BUS_API_URL=http://dockerhost:8002/v5
+
+ # Locally we usually run in Development mode
+ NODE_ENV=development
```
- Values from this file would be automatically used by `docker-compose` , command `npm run start:dev` and some other command during local development.
-
- 2. Copy config file `config/m2m.local.js` into `config/local.js`:
- ```bash
- cp config/m2m.local.js config/local.js
- ```
+ - Values from this file would be automatically used by many `npm` commands.
+ - ⚠️ Never commit this file or its copy to the repository!
- 3. Set `dockerhost` to point the IP address of Docker. Docker IP address depends on your system. For example if docker is run on IP `127.0.0.1` add a the next line to your `/etc/hosts` file:
+ 1. Set `dockerhost` to point the IP address of Docker. Docker IP address depends on your system. For example if docker is run on IP `127.0.0.1` add a the next line to your `/etc/hosts` file:
```
127.0.0.1 dockerhost
```
- Alternatively, you may update `config/local.js` and replace `dockerhost` with your docker IP address.
+ Alternatively, you may update `.env` file and replace `dockerhost` with your docker IP address.
-3. 🚢 Start docker-compose with services which are required to start Project Service locally
+1. 🚢 Start docker-compose with services which are required to start Project Service locally
```bash
- npm run local:docker:up
+ npm run services:up
```
Wait until all containers are fully started. As a good indicator, wait until `project-processor-es` successfully started by viewing its logs:
```bash
- npm run local:docker:logs -- -f project-processor-es
+ npm run services:logs -- -f project-processor-es
```
Click to see a good logs example
@@ -121,7 +127,7 @@ Local setup should work good on **Linux** and **macOS**. But **Windows** is not
- To view the logs from any container inside docker-compose use the following command, replacing `SERVICE_NAME` with the corresponding value under the **Name** column in the above table:
```bash
- docker-compose -f local/full/docker-compose.yml logs -f SERVICE_NAME
+ npm run services:log -- -f SERVICE_NAME
```
- If you want to modify the code of any of the services which are run inside this docker-compose file, you can stop such service inside docker-compose by command `docker-compose -f local/full/docker-compose.yml stop -f ` and run the service separately, following its README file.
@@ -135,7 +141,7 @@ Local setup should work good on **Linux** and **macOS**. But **Windows** is not
Run, in the project root folder:
```bash
- docker-compose -f local/docker-compose.yml up -d
+ docker-compose -f local/light/docker-compose.yml up -d
```
This docker-compose file starts the next services:
@@ -152,7 +158,7 @@ Local setup should work good on **Linux** and **macOS**. But **Windows** is not
*NOTE: In production these dependencies / services are hosted & managed outside Project Service.*
-4. ♻ Init DB, ES and demo data (it clears any existent data)
+2. ♻ Init DB, ES and demo data (it clears any existent data)
```bash
npm run local:init
@@ -163,7 +169,7 @@ Local setup should work good on **Linux** and **macOS**. But **Windows** is not
- create Elasticsearch indexes (remove if exists)
- import demo data from `data/demo-data.json`
-5. 🚀 Start Project Service
+3. 🚀 Start Project Service
```bash
npm run start:dev
@@ -172,7 +178,7 @@ Local setup should work good on **Linux** and **macOS**. But **Windows** is not
Runs the Project Service using nodemon, so it would be restarted after any of the files is updated.
The project service will be served on `http://localhost:8001`.
-6. *(Optional)* Start Project Service Kafka Consumer
+4. *(Optional)* Start Project Service Kafka Consumer
*Run this only if you want to test or modify logic of `lastActivityAt` or `lastActivityBy`.*
@@ -182,9 +188,32 @@ Local setup should work good on **Linux** and **macOS**. But **Windows** is not
npm run startKafkaConsumers:dev
```
-### Import and Export data
+## Run Connect App with Project Service locally
+
+To be able to run [Connect App](https://github.com/appirio-tech/connect-app) with the local setup of Project Service we have to do two things:
+1. Configure Connect App to use locally deployed Project service inside `connect-app/config/constants/dev.js` set
+
+ ```js
+ PROJECTS_API_URL: 'http://localhost:8001'
+ TC_NOTIFICATION_URL: 'http://localhost:4000/v5/notifications' # if tc-notfication-api has been locally deployed
+ ```
+
+1. Bypass token validation in Project Service.
+
+ In `tc-project-service/node_modules/tc-core-library-js/lib/auth/verifier.js` add this to line 23:
+ ```js
+ callback(undefined, decodedToken.payload);
+ return;
+ ```
+ Connect App when making requests to the Project Service uses token retrieved from the Topcoder service deployed online. Project Service validates the token. For this purpose Project Service have to know the `secret` which has been used to generate the token. But we don't know the `secret` which is used by Topcoder for both DEV and PROD environment. So to bypass token validation we change these lines in the auth library.
-#### 📤 Export data
+ *NOTE: this change only let us bypass validation during local development process*.
+
+2. Restart both Connect App and Project Service if they were running.
+
+## Import and Export data
+
+### 📤 Export data
To export data to the default file `data/demo-data.json`, run:
```bash
@@ -199,7 +228,7 @@ npm run data:export -- --file path/to-file.json
- List of models that will be exported are defined in `scripts/data/dataModels.js`. You can add new models to this list, but make sure that new models are added to list such that each model comes after its dependencies.
-#### 📥 Import data
+### 📥 Import data
*During importing, data would be first imported to the database, and after from the database it would be indexed to the Elasticsearch index.*
@@ -220,96 +249,62 @@ npm run data:import -- --file path/to-file.json
- List of models that will be imported are defined in `scripts/data/dataModels.js`. You can add new models to this list, but make sure that new models are added to list such that each model comes after its dependencies.
-### Run Connect App with Project Service locally
+## Import metadata from api.topcoder-dev.com (deprecated)
-To be able to run [Connect App](https://github.com/appirio-tech/connect-app) with the local setup of Project Service we have to do two things:
-1. Configure Connect App to use locally deployed Project service inside `connect-app/config/constants/dev.js` set
+```bash
+CONNECT_USER_TOKEN= npm run import-from-api
+```
+To retrieve data from DEV env we have to provide a valid user token (`CONNECT_USER_TOKEN`). You may login to http://connect.topcoder-dev.com and find the Bearer token in the request headers using browser dev tools.
- ```js
- PROJECTS_API_URL: 'http://localhost:8001'
- TC_NOTIFICATION_URL: 'http://localhost:4000/v5/notifications' # if tc-notfication-api has been locally deployed
- ```
+This command for importing data uses API to create demo data. Which has a few pecularities:
+- data in DB would be for sure created
+- data in ElasticSearch Index (ES) would be only created if services [project-processor-es](https://github.com/topcoder-platform/project-processor-es) and [tc-bus-api](https://github.com/topcoder-platform/tc-bus-api) are also started locally. If you don't start them, then imported data wouldn't be indexed in ES, and would be only added to DB. You may start them locally separately, or better use `local/full/docker-compose.yml` as described [next section](#local-deployment-with-other-topcoder-services) which would start them automatically.
+ - **NOTE** During data importing a lot of records has to be indexed in ES, so you have to wait about 5-10 minutes after `npm run import-from-api` is finished until imported data is indexed in ES. You may watch logs of `project-processor-es` to see if its done or no.
-1. Bypass token validation in Project Service.
+## Run via Docker
- In `tc-project-service/node_modules/tc-core-library-js/lib/auth/verifier.js` add this to line 23:
- ```js
- callback(undefined, decodedToken.payload);
- return;
+1. Build image
+ ```bash
+ docker build -t tc_projects_services .
```
- Connect App when making requests to the Project Service uses token retrieved from the Topcoder service deployed online. Project Service validates the token. For this purpose Project Service have to know the `secret` which has been used to generate the token. But we don't know the `secret` which is used by Topcoder for both DEV and PROD environment. So to bypass token validation we change these lines in the auth library.
- *NOTE: this change only let us bypass validation during local development process*.
+2. Follow all the steps 1 - 4 from [steps to run locally](#steps-to-run-locally). But on the step 2 replace `dockerhost` with the IP address of the host machine from inside the docker container, see [stackoverflow](https://stackoverflow.com/questions/22944631/how-to-get-the-ip-address-of-the-docker-host-from-inside-a-docker-container).
+ - For **macOS** and **Windows** on Docker 18.03+ this should work: replace `dockerhost` with `host.docker.internal`.
-2. Restart both Connect App and Project Service if they were running.
+3. Start Project Service via Docker by:
+ ```bash
+ docker run -p 8001:3000 -i -t --env-file .env tc_projects_services start
+ ```
+ The project service will be served on http://localhost:8001.
-### NPM Commands
+## NPM Commands
| Command | Description |
|--------------------|--|
| `npm run lint` | Check for for lint errors. |
| `npm run lint:fix` | Check for for lint errors and fix error automatically when possible. |
| `npm run build` | Build source code for production run into `dist` folder. |
-| `npm run sync:all` | Recreate Database and Elasticsearch indexes (removes any existent data). |
-| `npm run sync:db` | Recreate Database schemas (removes any existent data). |
-| `npm run sync:es` | Recreate Elasticsearch indexes (removes any existent data). |
| `npm run start` | Start app in the production mode from prebuilt `dist` folder. |
| `npm run start:dev` | Start app in the development mode using `nodemon`. |
| `npm run startKafkaConsumers` | Start Kafka consumer app in production mode from prebuilt `dist` folder. |
-| `npm run startKafkaConsumers` | Start Kafka consumer app in the development mode using `nodemon`. |
+| `npm run startKafkaConsumers:dev` | Start Kafka consumer app in the development mode using `nodemon`. |
| `npm run test` | Run tests. |
| `npm run test:watch` | Run tests and re-run them on changes (not useful now as it re-runs all the test). |
-| `npm run demo-data` | Import Metadata from DEV environment, see [docs](#import-metadata-from-apitopcoder-devcom-deprecated). |
+| `npm run reset:all` | Recreate Database and Elasticsearch indexes (removes any existent data). |
+| `npm run reset:db` | Recreate Database schemas (removes any existent data). |
+| `npm run reset:es` | Recreate Elasticsearch indexes (removes any existent data). |
+| `npm run import-from-api` | Import Metadata from DEV environment, see [docs](#import-metadata-from-apitopcoder-devcom-deprecated). |
| `npm run es-db-compare` | Run helper script to compare data in Database and Elasticsearch indexes, see [docs](./scripts/es-db-compare/README.md). |
| `npm run data:export` | Export data from Database to file, see [docs](#📤-export-data) |
| `npm run data:import` | Import data from file to Database and index it to Elasticsearch, see [docs](#📥-import-data) |
-| `npm run local:docker:up` | Start docker-compose for local development. |
-| `npm run local:docker:down` | Stop docker-compose for local development. |
-| `npm run local:docker:logs -- -f ` | View logs of some service inside docker-compose. |
-| `npm run local:init` | Recreate Database and Elasticsearch indexes and populate demo data for local development (removes any existent data). |
+| `npm run services:up` | Start services via docker-compose for local development. |
+| `npm run services:down` | Stop services via docker-compose for local development. |
+| `npm run services:logs -- -f ` | View logs of some service inside docker-compose. |
+| `npm run local:init` | Recreate Database and Elasticsearch indexes and populate demo data for local development (removes any existent data). |
+| `npm run babel-node-script -- ` | Helper command which is used by other commands to run node scripts using `babel-node` and `dotenv` so variables from `.env` file are automatically applied. |
| `npm run generate:doc:permissions` | Generate [permissions.html](docs/permissions.html) which later can be viewed by [link](https://htmlpreview.github.io/?https://github.com/topcoder-platform/tc-project-service/blob/develop/docs/permissions.html). |
| `npm run generate:doc:permissions:dev` | Generate [permissions.html](docs/permissions.html) on any changes (useful during development). |
-
-### Import metadata from api.topcoder-dev.com (deprecated)
-
-```bash
-CONNECT_USER_TOKEN= npm run demo-data
-```
-To retrieve data from DEV env we have to provide a valid user token (`CONNECT_USER_TOKEN`). You may login to http://connect.topcoder-dev.com and find the Bearer token in the request headers using browser dev tools.
-
-This command for importing data uses API to create demo data. Which has a few pecularities:
-- data in DB would be for sure created
-- data in ElasticSearch Index (ES) would be only created if services [project-processor-es](https://github.com/topcoder-platform/project-processor-es) and [tc-bus-api](https://github.com/topcoder-platform/tc-bus-api) are also started locally. If you don't start them, then imported data wouldn't be indexed in ES, and would be only added to DB. You may start them locally separately, or better use `local/full/docker-compose.yml` as described [next section](#local-deployment-with-other-topcoder-services) which would start them automatically.
- - **NOTE** During data importing a lot of records has to be indexed in ES, so you have to wait about 5-10 minutes after `npm run demo-data` is finished until imported data is indexed in ES. You may watch logs of `project-processor-es` to see if its done or no.
-
-## Test
-```bash
-npm run test
-```
-Tests are being executed with the `NODE_ENV` environment variable has a value `test` and `config/test.js` configuration is loaded.
-
-Each of the individual modules/services are unit tested.
-
-### JWT Authentication
-Authentication is handled via Authorization (Bearer) token header field. Token is a JWT token. Here is a sample token that is valid for a very long time for a user with administrator role.
-```
-eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJyb2xlcyI6WyJhZG1pbmlzdHJhdG9yIl0sImlzcyI6Imh0dHBzOi8vYXBpLnRvcGNvZGVyLWRldi5jb20iLCJoYW5kbGUiOiJwc2hhaDEiLCJleHAiOjI0NjI0OTQ2MTgsInVzZXJJZCI6IjQwMTM1OTc4IiwiaWF0IjoxNDYyNDk0MDE4LCJlbWFpbCI6InBzaGFoMUB0ZXN0LmNvbSIsImp0aSI6ImY0ZTFhNTE0LTg5ODAtNDY0MC04ZWM1LWUzNmUzMWE3ZTg0OSJ9.XuNN7tpMOXvBG1QwWRQROj7NfuUbqhkjwn39Vy4tR5I
-```
-It's been signed with the secret 'secret'. This secret should match your entry in config/local.js. You can generate your own token using https://jwt.io
-
-## Deploying with docker (might need updates)
-
-**NOTE: This part of README may contain inconsistencies and requires update. Don't follow it unless you know how to properly make configuration for these steps. It's not needed for regular development process.**
-
-Build image:
-`docker build -t tc_projects_services .`
-Run image:
-`docker run -p 3000:3000 -i -t -e DB_HOST=172.17.0.1 tc_projects_services`
-You may replace 172.17.0.1 with your docker0 IP.
-
-You can paste **swagger.yaml** to [swagger editor](http://editor.swagger.io/) or import **postman.json** and **postman_environment.json** to verify endpoints.
-
## Kafka commands
If you've used **Full** `docker-compose` with the file `local/full/docker-compose.yml` during local setup to spawn kafka & zookeeper, you can use the following commands to manipulate kafka topics and messages:
@@ -341,6 +336,23 @@ docker exec -it tc-projects-kafka /opt/kafka/bin/kafka-console-producer.sh --bro
- Enter or copy/paste the message into the console after starting this command.
-## References
+## Test
+```bash
+npm run test
+```
+Tests are being executed with the `NODE_ENV` environment variable has a value `test` and `config/test.js` configuration is loaded.
+
+Each of the individual modules/services are unit tested.
+
+## JWT Authentication
+Authentication is handled via Authorization (Bearer) token header field. Token is a JWT token. Here is a sample token that is valid for a very long time for a user with administrator role.
+```
+eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJyb2xlcyI6WyJhZG1pbmlzdHJhdG9yIl0sImlzcyI6Imh0dHBzOi8vYXBpLnRvcGNvZGVyLWRldi5jb20iLCJoYW5kbGUiOiJwc2hhaDEiLCJleHAiOjI0NjI0OTQ2MTgsInVzZXJJZCI6IjQwMTM1OTc4IiwiaWF0IjoxNDYyNDk0MDE4LCJlbWFpbCI6InBzaGFoMUB0ZXN0LmNvbSIsImp0aSI6ImY0ZTFhNTE0LTg5ODAtNDY0MC04ZWM1LWUzNmUzMWE3ZTg0OSJ9.XuNN7tpMOXvBG1QwWRQROj7NfuUbqhkjwn39Vy4tR5I
+```
+It's been signed with the secret 'secret'. This secret should match your entry in config/local.js. You can generate your own token using https://jwt.io
+
+## Documentation
+- [Swagger API Definition](http://editor.swagger.io/?url=https://raw.githubusercontent.com/topcoder-platform/tc-project-service/develop/docs/swagger.yaml) - click to open it via Online Swagger Editor.
+- [Permissions]() - the list of all permissions in Project Service.
- [Projects Service Architecture](./docs/guides/architercture/architecture.md)
diff --git a/config/development.json b/config/development.json
index 12dc197a..631024c1 100644
--- a/config/development.json
+++ b/config/development.json
@@ -2,5 +2,8 @@
"pubsubQueueName": "dev.project.service",
"pubsubExchangeName": "dev.projects",
"attachmentsS3Bucket": "topcoder-dev-media",
- "connectProjectsUrl": "https://connect.topcoder-dev.com/projects/"
+ "connectProjectsUrl": "https://connect.topcoder-dev.com/projects/",
+ "fileServiceEndpoint": "https://api.topcoder-dev.com/v3/files/",
+ "connectProjectsUrl": "https://connect.topcoder-dev.com/projects/",
+ "memberServiceEndpoint": "https://api.topcoder-dev.com/v3/members"
}
diff --git a/local/docker-compose.yml b/local/docker-compose.yml
deleted file mode 100644
index 8c49d647..00000000
--- a/local/docker-compose.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-version: "2"
-services:
- jsonserver:
- build: "mock-services"
- ports:
- - "3001:3001"
- db:
- build: "postgres-db"
- ports:
- - "5432:5432"
- environment:
- - POSTGRES_PASSWORD=mysecretpassword
- - POSTGRES_USER=coder
- - POSTGRES_MULTIPLE_DATABASES=projectsdb,projectsdb_test
- esearch:
- image: "elasticsearch:2.3"
- ports:
- - "9200:9200"
- - "9300:9300"
- queue:
- image: "rabbitmq:3-management"
- restart: always
- ports:
- - "5672:5672"
- - "15672:15672"
\ No newline at end of file
diff --git a/local/full/docker-compose.yml b/local/full/docker-compose.yml
index 2bd921af..ce37204a 100644
--- a/local/full/docker-compose.yml
+++ b/local/full/docker-compose.yml
@@ -1,23 +1,28 @@
version: "2"
services:
jsonserver:
- extends:
- file: ../docker-compose.yml
- service: jsonserver
+ build: "../mock-services"
+ ports:
+ - "3001:3001"
db:
- extends:
- file: ../docker-compose.yml
- service: db
+ build: "../postgres-db"
+ ports:
+ - "5432:5432"
environment:
+ - POSTGRES_PASSWORD=mysecretpassword
+ - POSTGRES_USER=coder
- POSTGRES_MULTIPLE_DATABASES=projectsdb,projectsdb_test,tc_notifications
esearch:
- extends:
- file: ../docker-compose.yml
- service: esearch
+ image: "elasticsearch:2.3"
+ ports:
+ - "9200:9200"
+ - "9300:9300"
queue:
- extends:
- file: ../docker-compose.yml
- service: queue
+ image: "rabbitmq:3-management"
+ restart: always
+ ports:
+ - "5672:5672"
+ - "15672:15672"
zookeeper:
image: wurstmeister/zookeeper
ports:
diff --git a/local/light/docker-compose.yml b/local/light/docker-compose.yml
new file mode 100644
index 00000000..fd02046a
--- /dev/null
+++ b/local/light/docker-compose.yml
@@ -0,0 +1,23 @@
+version: "2"
+services:
+ jsonserver:
+ extends:
+ file: ../full/docker-compose.yml
+ service: jsonserver
+
+ db:
+ extends:
+ file: ../full/docker-compose.yml
+ service: db
+ environment:
+ - POSTGRES_MULTIPLE_DATABASES=projectsdb,projectsdb_test
+
+ esearch:
+ extends:
+ file: ../full/docker-compose.yml
+ service: esearch
+
+ queue:
+ extends:
+ file: ../full/docker-compose.yml
+ service: queue
diff --git a/package-lock.json b/package-lock.json
index 46e2f2e2..be239d4a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2518,6 +2518,58 @@
"capture-stack-trace": "^1.0.0"
}
},
+ "cross-env": {
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.2.tgz",
+ "integrity": "sha512-KZP/bMEOJEDCkDQAyRhu3RL2ZO/SUVrxQVI0G3YEQ+OLbRA3c6zgixe8Mq8a/z7+HKlNEjo8oiLUs8iRijY2Rw==",
+ "dev": true,
+ "requires": {
+ "cross-spawn": "^7.0.1"
+ },
+ "dependencies": {
+ "cross-spawn": {
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.2.tgz",
+ "integrity": "sha512-PD6G8QG3S4FK/XCGFbEQrDqO2AnMMsy0meR7lerlIOHAAbkuavGU/pOqprrlvfTNjvowivTeBsjebAL0NSoMxw==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ }
+ },
+ "path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true
+ },
+ "shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "requires": {
+ "shebang-regex": "^3.0.0"
+ }
+ },
+ "shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true
+ },
+ "which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ }
+ }
+ },
"cross-spawn": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz",
diff --git a/package.json b/package.json
index 33d00701..e6a0618f 100644
--- a/package.json
+++ b/package.json
@@ -7,29 +7,30 @@
"node": ">=12"
},
"scripts": {
- "lint": "./node_modules/.bin/eslint .",
- "lint:fix": "./node_modules/.bin/eslint . --fix || true",
+ "lint": "eslint .",
+ "lint:fix": "eslint . --fix || true",
"build": "babel src -d dist --presets es2015 --copy-files",
- "sync:all": "NODE_ENV=development npm run sync:db && NODE_ENV=development npm run sync:es",
- "sync:db": "./node_modules/.bin/babel-node migrations/sync.js",
- "sync:es": "./node_modules/.bin/babel-node migrations/elasticsearch_sync.js",
"prestart": "npm run -s build",
"start": "node dist",
- "start:dev": "NODE_ENV=development PORT=8001 nodemon -w src --exec \"node --require dotenv/config --require babel-core/register src\" | ./node_modules/.bin/bunyan",
+ "start:dev": "cross-env NODE_ENV=development PORT=8001 nodemon -w src --exec \"npm run babel-node-script -- src\" | bunyan",
"startKafkaConsumers": "npm run -s build && node dist/index-kafka.js",
- "startKafkaConsumers:dev": "NODE_ENV=development nodemon -w src --exec \"babel-node src/index-kafka.js --presets es2015\" | ./node_modules/.bin/bunyan",
- "test": "NODE_ENV=test npm run lint && NODE_ENV=test npm run sync:es && NODE_ENV=test npm run sync:db && NODE_ENV=test ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- --timeout 10000 --require babel-core/register $(find src -path '*spec.js*') --exit",
- "test:watch": "NODE_ENV=test ./node_modules/.bin/mocha -w --require babel-core/register $(find src -path '*spec.js*')",
- "demo-data": "babel-node local/seed",
- "es-db-compare": "babel-node scripts/es-db-compare",
- "data:export": "NODE_ENV=development LOG_LEVEL=info node --require dotenv/config --require babel-core/register scripts/data/export",
- "data:import": "NODE_ENV=development LOG_LEVEL=info node --require dotenv/config --require babel-core/register scripts/data/import",
- "local:docker:up": "docker-compose -f ./local/full/docker-compose.yml up -d",
- "local:docker:down": "docker-compose -f ./local/full/docker-compose.yml down",
- "local:docker:logs": "docker-compose -f ./local/full/docker-compose.yml logs",
- "local:init": "npm run sync:all && npm run data:import",
- "generate:doc:permissions": "babel-node scripts/permissions-doc",
- "generate:doc:permissions:dev": "nodemon --watch scripts/permissions-doc --watch src --ext js,jsx,hbs --exec babel-node scripts/permissions-doc"
+ "startKafkaConsumers:dev": "cross-env NODE_ENV=development nodemon -w src --exec \"npm run babel-node-script src/index-kafka.js\" | bunyan",
+ "test": "cross-env NODE_ENV=test npm run lint && cross-env NODE_ENV=test npm run reset:all && cross-env NODE_ENV=test istanbul cover node_modules/mocha/bin/_mocha -- --timeout 10000 --require babel-core/register \"./src/**/*.spec.js*\" --exit",
+ "test:watch": "cross-env NODE_ENV=test mocha -w --require babel-core/register \"./src/**/*.spec.js*\" ",
+ "reset:all": "npm run reset:db && npm run reset:es",
+ "reset:db": "npm run babel-node-script -- migrations/sync.js",
+ "reset:es": "npm run babel-node-script -- migrations/elasticsearch_sync.js",
+ "import-from-api": "npm run babel-node-script -- scripts/import-from-api",
+ "es-db-compare": "npm run babel-node-script -- scripts/es-db-compare",
+ "data:export": "cross-env NODE_ENV=development LOG_LEVEL=info npm run babel-node-script -- scripts/data/export",
+ "data:import": "cross-env NODE_ENV=development LOG_LEVEL=info npm run babel-node-script -- scripts/data/import",
+ "services:up": "docker-compose -f ./local/full/docker-compose.yml up -d",
+ "services:down": "docker-compose -f ./local/full/docker-compose.yml down",
+ "services:logs": "docker-compose -f ./local/full/docker-compose.yml logs",
+ "local:init": "npm run reset:all && npm run data:import",
+ "babel-node-script": "node --require dotenv/config --require babel-core/register",
+ "generate:doc:permissions": "npm run babel-node-script -- scripts/permissions-doc",
+ "generate:doc:permissions:dev": "nodemon --watch scripts/permissions-doc --watch src --ext js,jsx,hbs --exec --exec \"npm run babel-node-script scripts/permissions-doc\""
},
"repository": {
"type": "git",
@@ -90,6 +91,7 @@
"bunyan": "^1.8.12",
"chai": "^3.5.0",
"chai-as-promised": "^7.1.1",
+ "cross-env": "^7.0.2",
"eslint": "^6.8.0",
"eslint-config-airbnb-base": "^11.1.0",
"eslint-plugin-import": "^2.2.0",
diff --git a/local/seed/index.js b/scripts/import-from-api/index.js
similarity index 100%
rename from local/seed/index.js
rename to scripts/import-from-api/index.js
diff --git a/local/seed/projects.json b/scripts/import-from-api/projects.json
similarity index 100%
rename from local/seed/projects.json
rename to scripts/import-from-api/projects.json
diff --git a/local/seed/seedMetadata.js b/scripts/import-from-api/seedMetadata.js
similarity index 63%
rename from local/seed/seedMetadata.js
rename to scripts/import-from-api/seedMetadata.js
index cf1049b3..bc081c95 100644
--- a/local/seed/seedMetadata.js
+++ b/scripts/import-from-api/seedMetadata.js
@@ -1,9 +1,9 @@
-const _ = require('lodash')
+const _ = require('lodash');
const axios = require('axios');
const Promise = require('bluebird');
if (!process.env.CONNECT_USER_TOKEN) {
- console.error('This script requires environment variable CONNECT_USER_TOKEN to be defined. Login to http://connect.topcoder-dev.com and get your user token from the requests headers.')
+ console.error('This script requires environment variable CONNECT_USER_TOKEN to be defined. Login to http://connect.topcoder-dev.com and get your user token from the requests headers.');
process.exit(1);
}
@@ -15,7 +15,7 @@ if (!process.env.CONNECT_USER_TOKEN) {
* @param {Object} o object
*/
function dummifyPrices(o) {
- Object.keys(o).forEach(function (k) {
+ Object.keys(o).forEach((k) => {
if (o[k] !== null && typeof o[k] === 'object') {
dummifyPrices(o[k]);
return;
@@ -32,128 +32,120 @@ function dummifyPrices(o) {
// we need to know any logged in Connect user token to retrieve data from DEV
const CONNECT_USER_TOKEN = process.env.CONNECT_USER_TOKEN;
-var url = 'https://api.topcoder-dev.com/v5/projects/metadata';
+const url = 'https://api.topcoder-dev.com/v5/projects/metadata';
module.exports = (targetUrl, token) => {
- var destUrl = targetUrl + 'projects/';
- var destTimelines = targetUrl;
+ const destUrl = `${targetUrl}projects/`;
+ const destTimelines = targetUrl;
console.log('Getting metadata from DEV environment...');
return axios.get(url, {
headers: {
'Content-Type': 'application/json',
- 'Authorization': `Bearer ${CONNECT_USER_TOKEN}`
- }
+ Authorization: `Bearer ${CONNECT_USER_TOKEN}`,
+ },
})
.catch((err) => {
const errMessage = _.get(err, 'response.data.message');
- throw errMessage ? new Error('Error during obtaining data from DEV: ' + errMessage) : err
+ throw errMessage ? new Error(`Error during obtaining data from DEV: ${errMessage}`) : err;
})
- .then(async function (response) {
- let data = response.data;
- dummifyPrices(data)
+ .then(async (response) => {
+ const data = response.data;
+ dummifyPrices(data);
console.log('Creating metadata objects locally...');
- var headers = {
+ const headers = {
'Content-Type': 'application/json',
- 'Authorization': 'Bearer ' + token
- }
+ Authorization: `Bearer ${token}`,
+ };
- let promises
+ let promises;
- promises = _(data.forms).orderBy(['key', 'asc'], ['version', 'asc']).map(pt=>{
+ promises = _(data.forms).orderBy(['key', 'asc'], ['version', 'asc']).map((pt) => {
const param = _.omit(pt, ['id', 'version', 'revision', 'key']);
return axios
- .post(destUrl + `metadata/form/${pt.key}/versions`, param, {headers:headers})
+ .post(`${destUrl}metadata/form/${pt.key}/versions`, param, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create form with key=${pt.key} version=${pt.version}.`, errMessage)
- })
+ console.log(`Failed to create form with key=${pt.key} version=${pt.version}.`, errMessage);
+ });
});
await Promise.all(promises);
- promises = _(data.planConfigs).orderBy(['key', 'asc'], ['version', 'asc']).map(pt=>{
+ promises = _(data.planConfigs).orderBy(['key', 'asc'], ['version', 'asc']).map((pt) => {
const param = _.omit(pt, ['id', 'version', 'revision', 'key']);
return axios
- .post(destUrl + `metadata/planConfig/${pt.key}/versions`, param, {headers:headers})
+ .post(`${destUrl}metadata/planConfig/${pt.key}/versions`, param, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create planConfig with key=${pt.key} version=${pt.version}.`, errMessage)
- })
+ console.log(`Failed to create planConfig with key=${pt.key} version=${pt.version}.`, errMessage);
+ });
});
await Promise.all(promises);
- promises = _(data.priceConfigs).orderBy(['key', 'asc'], ['version', 'asc']).map(pt=>{
+ promises = _(data.priceConfigs).orderBy(['key', 'asc'], ['version', 'asc']).map((pt) => {
const param = _.omit(pt, ['id', 'version', 'revision', 'key']);
return axios
- .post(destUrl + `metadata/priceConfig/${pt.key}/versions`, param, {headers:headers})
+ .post(`${destUrl}metadata/priceConfig/${pt.key}/versions`, param, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create priceConfig with key=${pt.key} version=${pt.version}.`, errMessage)
- })
+ console.log(`Failed to create priceConfig with key=${pt.key} version=${pt.version}.`, errMessage);
+ });
});
await Promise.all(promises);
- promises = _(data.projectTypes).map(pt=>{
- return axios
- .post(destUrl+'metadata/projectTypes', pt, {headers:headers})
+ promises = _(data.projectTypes).map(pt => axios
+ .post(`${destUrl}metadata/projectTypes`, pt, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create projectType with key=${pt.key}.`, errMessage)
- })
- });
+ console.log(`Failed to create projectType with key=${pt.key}.`, errMessage);
+ }));
await Promise.all(promises);
- promises = _(data.productCategories).map(pt=>{
- return axios
- .post(destUrl+'metadata/productCategories', pt, {headers:headers})
+ promises = _(data.productCategories).map(pt => axios
+ .post(`${destUrl}metadata/productCategories`, pt, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create productCategory with key=${pt.key}.`, errMessage)
- })
- });
+ console.log(`Failed to create productCategory with key=${pt.key}.`, errMessage);
+ }));
await Promise.all(promises);
- promises = _(data.projectTemplates).map(pt=>{
- return axios
- .post(destUrl+'metadata/projectTemplates', pt, {headers:headers})
+ promises = _(data.projectTemplates).map(pt => axios
+ .post(`${destUrl}metadata/projectTemplates`, pt, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create projectTemplate with id=${pt.id}.`, errMessage)
- })
- });
+ console.log(`Failed to create projectTemplate with id=${pt.id}.`, errMessage);
+ }));
await Promise.all(promises);
- promises = _(data.productTemplates).map(pt=>{
- return axios
- .post(destUrl+'metadata/productTemplates', pt, {headers:headers})
+ promises = _(data.productTemplates).map(pt => axios
+ .post(`${destUrl}metadata/productTemplates`, pt, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create productTemplate with id=${pt.id}.`, errMessage)
- })
- });
+ console.log(`Failed to create productTemplate with id=${pt.id}.`, errMessage);
+ }));
await Promise.all(promises);
- await Promise.each(data.milestoneTemplates,pt=> (
+ await Promise.each(data.milestoneTemplates, pt => (
axios
- .post(destTimelines+'timelines/metadata/milestoneTemplates', pt, {headers:headers})
+ .post(`${destTimelines}timelines/metadata/milestoneTemplates`, pt, { headers })
.catch((err) => {
const errMessage = _.get(err, 'response.data.message', '');
- console.log(`Failed to create milestoneTemplate with id=${pt.id}.`, errMessage)
+ console.log(`Failed to create milestoneTemplate with id=${pt.id}.`, errMessage);
})
));
// handle success
console.log('Done metadata seed');
- }).catch(err=>{
+ }).catch((err) => {
console.error(err && err.response ? err.response : err);
});
-}
+};
diff --git a/local/seed/seedProjects.js b/scripts/import-from-api/seedProjects.js
similarity index 90%
rename from local/seed/seedProjects.js
rename to scripts/import-from-api/seedProjects.js
index a84b82d8..e00eff78 100644
--- a/local/seed/seedProjects.js
+++ b/scripts/import-from-api/seedProjects.js
@@ -33,7 +33,7 @@ module.exports = (targetUrl, token) => {
const invites = _.cloneDeep(_.get(project, 'invites'));
const acceptInvitation = _.get(project, 'acceptInvitation');
- if(project.templateId) {
+ if (project.templateId) {
await findProjectTemplate(project.templateId, targetUrl, adminHeaders).catch((ex) => {
delete project.templateId;
});
@@ -78,28 +78,28 @@ module.exports = (targetUrl, token) => {
// creating invitations
if (Array.isArray(invites)) {
- let promises = []
- invites.forEach(invite => {
- promises.push(createProjectMemberInvite(projectId, invite, targetUrl, connectAdminHeaders))
- })
+ const promises = [];
+ invites.forEach((invite) => {
+ promises.push(createProjectMemberInvite(projectId, invite, targetUrl, connectAdminHeaders));
+ });
// accepting invitations
console.log(`Project #${projectId}: Wait a bit to give time ES to index before creating invitation...`);
await Promise.delay(ES_INDEX_DELAY);
- const responses = await Promise.all(promises)
+ const responses = await Promise.all(promises);
if (acceptInvitation) {
- let acceptInvitationPromises = []
- responses.forEach(response => {
- const userId = _.get(response, 'data.success[0].userId')
+ const acceptInvitationPromises = [];
+ responses.forEach((response) => {
+ const userId = _.get(response, 'data.success[0].userId');
acceptInvitationPromises.push(updateProjectMemberInvite(projectId, {
userId,
- status: 'accepted'
- }, targetUrl, connectAdminHeaders))
- })
+ status: 'accepted',
+ }, targetUrl, connectAdminHeaders));
+ });
console.log(`Project #${projectId}: Wait a bit to give time ES to index before accepting invitation...`);
await Promise.delay(ES_INDEX_DELAY);
- await Promise.all(acceptInvitationPromises)
+ await Promise.all(acceptInvitationPromises);
}
}
@@ -140,7 +140,7 @@ function createProjectMemberInvite(projectId, params, targetUrl, headers) {
.post(projectMemberInviteUrl, params, { headers })
.catch((err) => {
console.log(`Failed to create project member invites ${projectId}: ${err.message}`);
- })
+ });
}
function updateProjectMemberInvite(projectId, params, targetUrl, headers) {
@@ -150,7 +150,7 @@ function updateProjectMemberInvite(projectId, params, targetUrl, headers) {
.put(updateProjectMemberInviteUrl, params, { headers })
.catch((err) => {
console.log(`Failed to update project member invites ${projectId}: ${err.message}`);
- })
+ });
}
function findProjectTemplate(templateId, targetUrl, headers) {
@@ -159,5 +159,5 @@ function findProjectTemplate(templateId, targetUrl, headers) {
return axios({
url: projectTemplateUrl,
headers,
- })
+ });
}