diff --git a/.circleci/config.yml b/.circleci/config.yml
index 4ab77efc..d9851d0d 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -48,7 +48,6 @@ jobs:
- POSTGRES_USER: circle_test
- POSTGRES_DB: circle_test
- image: elasticsearch:2.3
- - image: rabbitmq:3-management
environment:
DEPLOY_ENV: "DEV"
LOGICAL_ENV: "dev"
diff --git a/.ebextensions/01-environment-variables.config b/.ebextensions/01-environment-variables.config
index 8d8d7ffc..b7ed81ad 100644
--- a/.ebextensions/01-environment-variables.config
+++ b/.ebextensions/01-environment-variables.config
@@ -23,9 +23,6 @@ option_settings:
- namespace: aws:elasticbeanstalk:application:environment
option_name: DB_MASTER_URL
value: TBD
- - namespace: aws:elasticbeanstalk:application:environment
- option_name: RABBITMQ_URL
- value: TBD
- namespace: aws:elasticbeanstalk:application:environment
option_name: PROJECTS_ES_URL
value: TBD
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 00000000..a20502b7
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,4 @@
+{
+ "singleQuote": true,
+ "trailingComma": "all"
+}
diff --git a/README.md b/README.md
index eaa9ec86..084ebdbc 100644
--- a/README.md
+++ b/README.md
@@ -9,8 +9,8 @@ Microservice to manage CRUD operations for all things Projects.
- [Steps to run locally](#steps-to-run-locally)
- [Run Connect App with Project Service locally](#run-connect-app-with-project-service-locally)
- [Import and Export data](#import-and-export-data)
- - [📤 Export data](#%f0%9f%93%a4-export-data)
- - [📥 Import data](#%f0%9f%93%a5-import-data)
+ - [📤 Export data](#-export-data)
+ - [📥 Import data](#-import-data)
- [Import metadata from api.topcoder-dev.com (deprecated)](#import-metadata-from-apitopcoder-devcom-deprecated)
- [Run via Docker](#run-via-docker)
- [NPM Commands](#npm-commands)
@@ -54,7 +54,6 @@ Local setup should work good on **Linux**, **macOS** and **Windows**.
# Locally deployed services (via docker-compose)
PROJECTS_ES_URL=dockerhost:9200
DB_MASTER_URL=postgres://coder:mysecretpassword@dockerhost:5432/projectsdb
- RABBITMQ_URL=amqp://dockerhost:5672
BUS_API_URL=http://dockerhost:8002/v5
# Locally we usually run in Development mode
@@ -110,7 +109,6 @@ Local setup should work good on **Linux**, **macOS** and **Windows**.
|----------|:-----:|:----:|
| PostgreSQL | db | 5432 |
| Elasticsearch | esearch | 9200 |
- | RabbitMQ | queue | 5672, 15672 |
| Mock Service (not in use) | jsonserver | 3001 |
| Zookeeper | zookeeper | 2181 |
| Kafka | kafka | 9092 |
@@ -148,7 +146,6 @@ Local setup should work good on **Linux**, **macOS** and **Windows**.
|----------|:-----:|:----:|
| PostgreSQL | db | 5432 |
| Elasticsearch | esearch | 9200 |
- | RabbitMQ | queue | 5672, 15672 |
| Mock Service (not in use) | jsonserver | 3001 |
diff --git a/config/custom-environment-variables.json b/config/custom-environment-variables.json
index c72c169c..c414fbce 100644
--- a/config/custom-environment-variables.json
+++ b/config/custom-environment-variables.json
@@ -17,7 +17,6 @@
"metadataDocType": "METADATA_ES_DOC_TYPE",
"metadataDocDefaultId": "METADATA_ES_DOC_DEFAULT_ID"
},
- "rabbitmqURL": "RABBITMQ_URL",
"pubsubQueueName": "PUBSUB_QUEUE_NAME",
"pubsubExchangeName": "PUBSUB_EXCHANGE_NAME",
"directProjectServiceEndpoint": "DIRECT_PROJECT_SERVICE_ENDPOINT",
diff --git a/config/default.json b/config/default.json
index 13b28e3a..87fcaa52 100644
--- a/config/default.json
+++ b/config/default.json
@@ -6,7 +6,6 @@
"captureLogs": "false",
"enableFileUpload": "true",
"logentriesToken": "",
- "rabbitmqURL": "",
"pubsubQueueName": "project.service",
"pubsubExchangeName": "projects",
"fileServiceEndpoint": "",
diff --git a/config/development.json b/config/development.json
index 631024c1..3f3e909b 100644
--- a/config/development.json
+++ b/config/development.json
@@ -5,5 +5,6 @@
"connectProjectsUrl": "https://connect.topcoder-dev.com/projects/",
"fileServiceEndpoint": "https://api.topcoder-dev.com/v3/files/",
"connectProjectsUrl": "https://connect.topcoder-dev.com/projects/",
- "memberServiceEndpoint": "https://api.topcoder-dev.com/v3/members"
+ "memberServiceEndpoint": "https://api.topcoder-dev.com/v3/members",
+ "identityServiceEndpoint": "https://api.topcoder-dev.com/v3/"
}
diff --git a/config/m2m.local.js b/config/m2m.local.js
index 41677f37..45fb5183 100644
--- a/config/m2m.local.js
+++ b/config/m2m.local.js
@@ -12,7 +12,6 @@ if (process.env.NODE_ENV === 'test') {
logLevel: 'debug',
captureLogs: 'false',
logentriesToken: '',
- rabbitmqURL: 'amqp://dockerhost:5672',
fileServiceEndpoint: 'https://api.topcoder-dev.com/v3/files/',
directProjectServiceEndpoint: 'https://api.topcoder-dev.com/v3/direct',
connectProjectsUrl: 'https://connect.topcoder-dev.com/projects/',
diff --git a/config/mock.local.js b/config/mock.local.js
index 14fbdba4..2d359870 100644
--- a/config/mock.local.js
+++ b/config/mock.local.js
@@ -12,7 +12,6 @@ if (process.env.NODE_ENV === 'test') {
logLevel: 'debug',
captureLogs: 'false',
logentriesToken: '',
- rabbitmqURL: 'amqp://dockerhost:5672',
fileServiceEndpoint: 'https://api.topcoder-dev.com/v3/files/',
directProjectServiceEndpoint: 'https://api.topcoder-dev.com/v3/direct',
connectProjectsUrl: 'https://connect.topcoder-dev.com/projects/',
diff --git a/config/test.json b/config/test.json
index cb81054c..d226b208 100644
--- a/config/test.json
+++ b/config/test.json
@@ -15,7 +15,6 @@
"metadataIndexName": "metadata_test",
"metadataDocType": "doc"
},
- "rabbitmqUrl": "amqp://localhost:5672",
"connectProjectsUrl": "https://local.topcoder-dev.com/projects/",
"dbConfig": {
"masterUrl": "postgres://coder:mysecretpassword@localhost:5432/projectsdb_test",
diff --git a/deploy.sh b/deploy.sh
index 11775b08..f874b87a 100755
--- a/deploy.sh
+++ b/deploy.sh
@@ -136,10 +136,6 @@ make_task_def(){
"name": "PROJECTS_ES_INDEX_NAME",
"value": "%s"
},
- {
- "name": "RABBITMQ_URL",
- "value": "%s"
- },
{
"name": "DIRECT_PROJECT_SERVICE_ENDPOINT",
"value": "%s"
@@ -250,7 +246,6 @@ make_task_def(){
LOG_LEVEL=$(eval "echo \$${ENV}_LOG_LEVEL")
PROJECTS_ES_URL=$(eval "echo \$${ENV}_PROJECTS_ES_URL")
PROJECTS_ES_INDEX_NAME=$(eval "echo \$${ENV}_PROJECTS_ES_INDEX_NAME")
- RABBITMQ_URL=$(eval "echo \$${ENV}_RABBITMQ_URL")
DIRECT_PROJECT_SERVICE_ENDPOINT=$(eval "echo \$${ENV}_DIRECT_PROJECT_SERVICE_ENDPOINT")
FILE_SERVICE_ENDPOINT=$(eval "echo \$${ENV}_FILE_SERVICE_ENDPOINT")
CONNECT_PROJECTS_URL=$(eval "echo \$${ENV}_CONNECT_PROJECTS_URL")
@@ -283,7 +278,7 @@ make_task_def(){
INVITE_EMAIL_SUBJECT=$(eval "echo \$${ENV}_INVITE_EMAIL_SUBJECT")
INVITE_EMAIL_SECTION_TITLE=$(eval "echo \$${ENV}_INVITE_EMAIL_SECTION_TITLE")
- task_def=$(printf "$task_template" $1 $AWS_ACCOUNT_ID $AWS_ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $AWS_ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $RABBITMQ_URL $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $CONNECT_URL $ACCOUNTS_APP_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$AUTH0_PROXY_SERVER_URL" "$EMAIL_INVITE_FROM_NAME" "$EMAIL_INVITE_FROM_EMAIL" "$INVITE_EMAIL_SUBJECT" "$INVITE_EMAIL_SECTION_TITLE" $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV)
+ task_def=$(printf "$task_template" $1 $AWS_ACCOUNT_ID $AWS_ACCOUNT_ID $AWS_ECS_CONTAINER_NAME $AWS_ACCOUNT_ID $AWS_REGION $AWS_REPOSITORY $CIRCLE_SHA1 $2 $3 $4 $NODE_ENV $ENABLE_FILE_UPLOAD $LOG_LEVEL $CAPTURE_LOGS $LOGENTRIES_TOKEN $API_VERSION $AWS_REGION $AUTH_DOMAIN $AUTH_SECRET $VALID_ISSUERS $DB_MASTER_URL $MEMBER_SERVICE_ENDPOINT $IDENTITY_SERVICE_ENDPOINT $BUS_API_URL $MESSAGE_SERVICE_URL $SYSTEM_USER_CLIENT_ID $SYSTEM_USER_CLIENT_SECRET $PROJECTS_ES_URL $PROJECTS_ES_INDEX_NAME $DIRECT_PROJECT_SERVICE_ENDPOINT $FILE_SERVICE_ENDPOINT $CONNECT_PROJECTS_URL $CONNECT_URL $ACCOUNTS_APP_URL $SEGMENT_ANALYTICS_KEY "$AUTH0_URL" "$AUTH0_AUDIENCE" $AUTH0_CLIENT_ID "$AUTH0_CLIENT_SECRET" $TOKEN_CACHE_TIME "$KAFKA_CLIENT_CERT" "$KAFKA_CLIENT_CERT_KEY" $KAFKA_GROUP_ID $KAFKA_URL "$AUTH0_PROXY_SERVER_URL" "$EMAIL_INVITE_FROM_NAME" "$EMAIL_INVITE_FROM_EMAIL" "$INVITE_EMAIL_SUBJECT" "$INVITE_EMAIL_SECTION_TITLE" $PORT $PORT $AWS_ECS_CLUSTER $AWS_REGION $NODE_ENV)
}
push_ecr_image(){
diff --git a/docs/permissions.html b/docs/permissions.html
index 33f4d536..43823aa0 100644
--- a/docs/permissions.html
+++ b/docs/permissions.html
@@ -85,6 +85,11 @@
background-color: #b8daff;
vertical-align: middle;
}
+
+ .badge-crossed {
+ opacity: 0.4;
+ text-decoration: line-through;
+ }
@@ -96,11 +101,11 @@ Permissions
Legend:
- allowed Project Role - users with such a Project Role are allowed to perform the action
- - denied Project Role - users with such a Project Role are denied to perform the action even they have some other allow roles
+ - denied Project Role - users with such a Project Role are denied to perform the action even they have some other allow roles
- allowed Topcoder Role - users with such a Topcoder Role are allowed to perform the action
- - denied Topcoder Role - users with such a Topcoder Role are denied to perform the action even they have some other allow roles
+ - denied Topcoder Role - users with such a Topcoder Role are denied to perform the action even they have some other allow roles
- allowed M2M Scope - M2M tokens with such a scope are allowed to perform the action
- - denied M2M Scope - M2M tokens with such a scope are allowed to perform the action even they have some other allow scopes
+ - denied M2M Scope - M2M tokens with such a scope are allowed to perform the action even they have some other allow scopes
@@ -175,7 +180,7 @@
Read Project
READ_PROJECT
-
+ Read project when user is a member.
@@ -186,14 +191,6 @@
Connect Admin
administrator
Connect Manager
- Connect Account Manager
- Connect Copilot Manager
- Business Development Representative
- Presales
- Account Executive
- Program Manager
- Solution Architect
- Project Manager
@@ -219,14 +216,6 @@
Connect Admin
administrator
Connect Manager
- Connect Account Manager
- Connect Copilot Manager
- Business Development Representative
- Presales
- Account Executive
- Program Manager
- Solution Architect
- Project Manager
@@ -1171,10 +1160,12 @@
+ manager
+ account_manager
program_manager
+ account_executive
solution_architect
project_manager
- manager
copilot
@@ -1197,6 +1188,7 @@
Project \ Topcoder |
Connect Manager |
+ Connect Copilot Manager |
Connect Admin |
administrator |
Connect Account Manager |
@@ -1246,6 +1238,9 @@
✅
|
+
+ ✅
+ |
✅
|
@@ -1260,84 +1255,9 @@
✅
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- copilot |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
|
✅
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- account_manager |
-
- ✅
- |
-
-
- |
-
-
|
✅
@@ -1351,92 +1271,14 @@
|
-
- ✅
- |
-
- ✅
- |
-
+ |
✅
|
-
+ |
✅
- |
-
-
- |
- |
-
- account_executive |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
|
✅
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- project_manager |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
|
✅
@@ -1446,13 +1288,7 @@
|
- solution_architect |
-
-
- |
-
-
- |
+ copilot |
|
@@ -1483,36 +1319,6 @@
|
-
-
- program_manager |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
-
- ✅
- |
|
@@ -1526,7 +1332,7 @@
-
+ |
- means default Project Role if user with according Topcoder Role directly joins the project (if they are allowed to join directly). If user has multiple Topcoder Roles then the most left Topcoder Role on the table would define default Project Role.
|
diff --git a/local/full/docker-compose.yml b/local/full/docker-compose.yml
index b494df2f..ea6e2221 100644
--- a/local/full/docker-compose.yml
+++ b/local/full/docker-compose.yml
@@ -17,11 +17,6 @@ services:
ports:
- "9200:9200"
- "9300:9300"
- queue:
- image: "rabbitmq:3-management"
- ports:
- - "5672:5672"
- - "15672:15672"
zookeeper:
image: wurstmeister/zookeeper
ports:
diff --git a/migrations/seedElasticsearchIndex.js b/migrations/seedElasticsearchIndex.js
deleted file mode 100644
index cf353f8f..00000000
--- a/migrations/seedElasticsearchIndex.js
+++ /dev/null
@@ -1,130 +0,0 @@
-/* eslint-disable import/no-extraneous-dependencies,no-param-reassign */
-
-import _ from 'lodash';
-import bunyan from 'bunyan';
-import config from 'config';
-import Promise from 'bluebird';
-import models from '../src/models';
-import RabbitMQService from '../src/services/rabbitmq';
-import { TIMELINE_REFERENCES } from '../src/constants';
-
-const logger = bunyan.createLogger({ name: 'init-es', level: config.get('logLevel') });
-
-
-/**
- * Retrieve project ids from cli if provided
- * @return {Array} list of projectIds
- */
-function getProjectIds() {
- let projectIdArg = _.find(process.argv, a => a.indexOf('projectIds') > -1);
- if (projectIdArg) {
- projectIdArg = projectIdArg.split('=');
- return projectIdArg[1].split(',').map(i => parseInt(i, 10));
- }
- return [];
-}
-
-/**
- * Retrieve timeline ids from cli if provided
- * @return {Array} list of timelineIds
- */
-function getTimelineIds() {
- let timelineIdArg = _.find(process.argv, a => a.indexOf('timelineIds') > -1);
- if (timelineIdArg) {
- timelineIdArg = timelineIdArg.split('=');
- return timelineIdArg[1].split(',').map(i => parseInt(i, 10));
- }
- return [];
-}
-
-Promise.coroutine(function* wrapped() {
- try {
- const rabbit = new RabbitMQService(logger);
- // initialize RabbitMQ
- yield rabbit.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- );
-
- const projectIds = getProjectIds();
- const projectWhereClause = (projectIds.length > 0) ? { id: { $in: projectIds } } : { deletedAt: { $eq: null } };
- let projects = yield models.Project.findAll({
- where: projectWhereClause,
- include: [{
- model: models.ProjectPhase,
- as: 'phases',
- include: [{ model: models.PhaseProduct, as: 'products' }],
- }],
- });
- logger.info(`Retrieved #${projects.length} projects`);
-
- // Convert to raw json
- projects = _.map(projects, project => project.toJSON());
-
- const memberWhereClause = (projectIds.length > 0)
- ? { projectId: { $in: projectIds } }
- : { deletedAt: { $eq: null } };
- let members = yield models.ProjectMember.findAll({
- raw: true,
- where: memberWhereClause,
- });
- logger.info(`Retrieved #${members.length} members`);
- members = _.groupBy(members, 'projectId');
-
- // Get timelines
- const timelineIds = getTimelineIds();
- const timelineWhereClause = (timelineIds.length > 0) ? { id: { $in: timelineIds } } : {};
- let timelines = yield models.Timeline.findAll({
- where: timelineWhereClause,
- include: [{ model: models.Milestone, as: 'milestones' }],
- });
- logger.info(`Retrieved #${projects.length} timelines`);
-
- // Convert to raw json and remove unnecessary fields
- timelines = _.map(timelines, (timeline) => {
- const entity = _.omit(timeline.toJSON(), ['deletedBy', 'deletedAt']);
- entity.milestones = _.map(entity.milestones, milestone => _.omit(milestone, ['deletedBy', 'deletedAt']));
- return entity;
- });
-
- // Get projectId for each timeline
- yield Promise.all(
- _.map(timelines, (timeline) => {
- if (timeline.reference === TIMELINE_REFERENCES.PROJECT) {
- timeline.projectId = timeline.referenceId;
- return Promise.resolve(timeline);
- }
-
- return models.ProjectPhase.findById(timeline.referenceId)
- .then((phase) => {
- timeline.projectId = phase.projectId;
- return Promise.resolve(timeline);
- });
- }),
- );
-
- const promises = [];
- _.forEach(projects, (p) => {
- p.members = members[p.id];
- logger.debug(`Processing Project #${p.id}`);
- promises.push(rabbit.publish('project.initial', p, {}));
- });
- _.forEach(timelines, (t) => {
- logger.debug(`Processing Timeline #${t.id}`);
- promises.push(rabbit.publish('timeline.initial', t, {}));
- });
- Promise.all(promises)
- .then(() => {
- logger.info(`Published ${promises.length} msgs`);
- process.exit();
- })
- .catch((err) => {
- logger.error(err);
- process.exit();
- });
- } catch (err) {
- logger.error(err);
- process.exit();
- }
-})();
diff --git a/package-lock.json b/package-lock.json
index e0bc55d1..2c8bc171 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -147,6 +147,11 @@
"@types/express": "*"
}
},
+ "@types/joi": {
+ "version": "14.3.4",
+ "resolved": "https://registry.npmjs.org/@types/joi/-/joi-14.3.4.tgz",
+ "integrity": "sha512-1TQNDJvIKlgYXGNIABfgFp9y0FziDpuGrd799Q5RcnsDu+krD+eeW/0Fs5PHARvWWFelOhIG2OPCo6KbadBM4A=="
+ },
"@types/lodash": {
"version": "4.14.149",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.149.tgz",
@@ -227,19 +232,6 @@
"uri-js": "^4.2.2"
}
},
- "amqplib": {
- "version": "0.5.5",
- "resolved": "https://registry.npmjs.org/amqplib/-/amqplib-0.5.5.tgz",
- "integrity": "sha512-sWx1hbfHbyKMw6bXOK2k6+lHL8TESWxjAx5hG8fBtT7wcxoXNIsFxZMnFyBjxt3yL14vn7WqBDe5U6BGOadtLg==",
- "requires": {
- "bitsyntax": "~0.1.0",
- "bluebird": "^3.5.2",
- "buffer-more-ints": "~1.0.0",
- "readable-stream": "1.x >=1.1.9",
- "safe-buffer": "~5.1.2",
- "url-parse": "~1.4.3"
- }
- },
"analytics-node": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/analytics-node/-/analytics-node-2.4.1.tgz",
@@ -1831,16 +1823,6 @@
"file-uri-to-path": "1.0.0"
}
},
- "bitsyntax": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/bitsyntax/-/bitsyntax-0.1.0.tgz",
- "integrity": "sha512-ikAdCnrloKmFOugAfxWws89/fPc+nw0OOG1IzIE72uSOg/A3cYptKCjSUhDTuj7fhsJtzkzlv7l3b8PzRHLN0Q==",
- "requires": {
- "buffer-more-ints": "~1.0.0",
- "debug": "~2.6.9",
- "safe-buffer": "~5.1.2"
- }
- },
"bluebird": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
@@ -2013,11 +1995,6 @@
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
},
- "buffer-more-ints": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/buffer-more-ints/-/buffer-more-ints-1.0.0.tgz",
- "integrity": "sha512-EMetuGFz5SLsT0QTnXzINh4Ksr+oo4i+UGTXEshiGCQWnsgSs7ZhJ8fzlwQ+OzEMs0MpDAMr1hxnblp5a4vcHg=="
- },
"buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
@@ -3532,11 +3509,12 @@
}
},
"express-validation": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/express-validation/-/express-validation-0.6.0.tgz",
- "integrity": "sha1-DXf0r8flixIBat7FmzJb7v2dwmg=",
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/express-validation/-/express-validation-1.0.3.tgz",
+ "integrity": "sha512-mtegbSCoplU5DpBLF3XpeudQD22Jv6Dmo4Tmxn2wxnWaMw4hmV1BjpkxCW2jXAV1q/2R70qKLauDEQzTN1uTGA==",
"requires": {
- "lodash": "^4.9.0"
+ "@types/joi": "^14.3.3",
+ "lodash": "^4.17.15"
}
},
"ext": {
@@ -7490,11 +7468,6 @@
"resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
"integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA="
},
- "querystringify": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.1.tgz",
- "integrity": "sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA=="
- },
"randomatic": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz",
@@ -7572,17 +7545,6 @@
"read-pkg": "^2.0.0"
}
},
- "readable-stream": {
- "version": "1.1.14",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz",
- "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=",
- "requires": {
- "core-util-is": "~1.0.0",
- "inherits": "~2.0.1",
- "isarray": "0.0.1",
- "string_decoder": "~0.10.x"
- }
- },
"readdirp": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz",
@@ -8120,11 +8082,6 @@
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
"dev": true
},
- "requires-port": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
- "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
- },
"resolve": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.0.tgz",
@@ -9411,15 +9368,6 @@
"resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz",
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA=="
},
- "url-parse": {
- "version": "1.4.7",
- "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.4.7.tgz",
- "integrity": "sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg==",
- "requires": {
- "querystringify": "^2.1.1",
- "requires-port": "^1.0.0"
- }
- },
"url-parse-lax": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz",
diff --git a/package.json b/package.json
index c48c04b1..b903f117 100644
--- a/package.json
+++ b/package.json
@@ -15,8 +15,8 @@
"start:dev": "cross-env NODE_ENV=development PORT=8001 nodemon -w src --exec \"./node_modules/.bin/env-cmd npm run babel-node-script -- src\" | bunyan",
"startKafkaConsumers": "npm run -s build && node dist/index-kafka.js",
"startKafkaConsumers:dev": "cross-env NODE_ENV=development nodemon -w src --exec \"./node_modules/.bin/env-cmd npm run babel-node-script src/index-kafka.js\" | bunyan",
- "test": "cross-env NODE_ENV=test npm run lint && cross-env NODE_ENV=test npm run reset:db && cross-env NODE_ENV=test npm run reset:es && cross-env NODE_ENV=test istanbul cover node_modules/mocha/bin/_mocha -- --timeout 10000 --require babel-core/register \"./src/**/*.spec.js*\" --exit",
- "test:watch": "cross-env NODE_ENV=test mocha -w --require babel-core/register \"./src/**/*.spec.js*\" ",
+ "test": "cross-env NODE_ENV=test npm run lint && cross-env NODE_ENV=test npm run reset:db && cross-env NODE_ENV=test npm run reset:es && cross-env NODE_ENV=test istanbul cover node_modules/mocha/bin/_mocha -- --timeout 10000 --require babel-core/register --require ./src/tests \"./src/**/*.spec.js*\" --exit",
+ "test:watch": "cross-env NODE_ENV=test mocha -w --require babel-core/register --require ./src/tests \"./src/**/*.spec.js*\" ",
"reset:db": "npm run babel-node-script -- migrations/sync.js",
"reset:es": "npm run babel-node-script -- migrations/elasticsearch_sync.js",
"import-from-api": "env-cmd npm run babel-node-script -- scripts/import-from-api",
@@ -43,7 +43,6 @@
},
"homepage": "https://github.com/appirio-tech/tc-projects-service#readme",
"dependencies": {
- "amqplib": "^0.5.1",
"analytics-node": "^2.1.1",
"app-module-path": "^1.0.7",
"aws-sdk": "^2.610.0",
diff --git a/scripts/permissions-doc/template.hbs b/scripts/permissions-doc/template.hbs
index 41d49140..f4a9a644 100644
--- a/scripts/permissions-doc/template.hbs
+++ b/scripts/permissions-doc/template.hbs
@@ -85,6 +85,11 @@
background-color: #b8daff;
vertical-align: middle;
}
+
+ .badge-crossed {
+ opacity: 0.4;
+ text-decoration: line-through;
+ }
@@ -96,11 +101,11 @@
Legend:
- allowed Project Role - users with such a Project Role are allowed to perform the action
- - denied Project Role - users with such a Project Role are denied to perform the action even they have some other allow roles
+ - denied Project Role - users with such a Project Role are denied to perform the action even they have some other allow roles
- allowed Topcoder Role - users with such a Topcoder Role are allowed to perform the action
- - denied Topcoder Role - users with such a Topcoder Role are denied to perform the action even they have some other allow roles
+ - denied Topcoder Role - users with such a Topcoder Role are denied to perform the action even they have some other allow roles
- allowed M2M Scope - M2M tokens with such a scope are allowed to perform the action
- - denied M2M Scope - M2M tokens with such a scope are allowed to perform the action even they have some other allow scopes
+ - denied M2M Scope - M2M tokens with such a scope are allowed to perform the action even they have some other allow scopes
@@ -131,7 +136,7 @@
{{/each}}
{{/if}}
{{#each denyRule.projectRoles}}
- {{this}}
+ {{this}}
{{/each}}
@@ -144,7 +149,7 @@
{{/each}}
{{/if}}
{{#each denyRule.topcoderRoles}}
- {{this}}
+ {{this}}
{{/each}}
@@ -153,7 +158,7 @@
{{this}}
{{/each}}
{{#each denyRule.scopes}}
- {{this}}
+ {{this}}
{{/each}}
diff --git a/src/app.js b/src/app.js
index 46806ce6..9c970d07 100644
--- a/src/app.js
+++ b/src/app.js
@@ -117,10 +117,4 @@ app.use(router);
app.routerRef = router;
-// =======================
-// Initialize services
-// =======================
-require('./services')(app, logger);
-
-
module.exports = app;
diff --git a/src/events/index.js b/src/events/index.js
deleted file mode 100644
index b1757dec..00000000
--- a/src/events/index.js
+++ /dev/null
@@ -1,88 +0,0 @@
-
-import { EVENT, CONNECT_NOTIFICATION_EVENT } from '../constants';
-import { projectCreatedHandler,
- projectCreatedHandlerForPhases,
- projectUpdatedKafkaHandler } from './projects';
-import { projectPhaseAddedHandler, projectPhaseRemovedHandler,
- projectPhaseUpdatedHandler } from './projectPhases';
-import {
- timelineAddedHandler,
- timelineAdjustedKafkaHandler,
-} from './timelines';
-import {
- milestoneUpdatedKafkaHandler,
-} from './milestones';
-
-/**
- * Void RabbitMQ event handler.
- * It "ack"s messages which are still published but we don't want to consume.
- *
- * It's used to "disable" events which we don't want to handle anymore. But for a time being
- * we don't want to remove the code of them until we validate that we are good without them.
- *
- * @param {Object} logger logger
- * @param {Object} msg RabbitMQ message
- * @param {Object} channel RabbitMQ channel
- * @returns {Promise} nothing
- */
-const voidRabbitHandler = (logger, msg, channel) => {
- logger.debug('Calling void RabbitMQ handler.');
- channel.ack(msg);
- return Promise.resolve();
-};
-
-// NOTE: We use "project-processor-es" for ES indexing now.
-// So I disable indexing using RabbitMQ for a transition period for most of the objects
-// which don't have any special logic.
-// As soon as we are sure, that "project-processor-es" works well for ES indexing,
-// we should completely remove the handlers for this events.
-export const rabbitHandlers = {
- 'project.initial': projectCreatedHandler, // is only used `seedElasticsearchIndex.js` and can be removed
- [EVENT.ROUTING_KEY.PROJECT_DRAFT_CREATED]: projectCreatedHandlerForPhases, // we have to call it, because it triggers topics creating for phases
- [EVENT.ROUTING_KEY.PROJECT_UPDATED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_DELETED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_MEMBER_ADDED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_MEMBER_REMOVED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_MEMBER_UPDATED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_MEMBER_INVITE_CREATED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_MEMBER_INVITE_UPDATED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_ADDED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_REMOVED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_UPDATED]: voidRabbitHandler, // DISABLED
-
- // project phase handles additionally implement logic for creating associated topics in Message Service
- [EVENT.ROUTING_KEY.PROJECT_PHASE_ADDED]: projectPhaseAddedHandler, // index in ES because of cascade updates
- [EVENT.ROUTING_KEY.PROJECT_PHASE_REMOVED]: projectPhaseRemovedHandler, // doesn't index in ES
- [EVENT.ROUTING_KEY.PROJECT_PHASE_UPDATED]: projectPhaseUpdatedHandler, // index in ES because of cascade updates
-
- [EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_ADDED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_REMOVED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_UPDATED]: voidRabbitHandler, // DISABLED
-
- // Timeline and milestone
- 'timeline.initial': timelineAddedHandler, // is only used `seedElasticsearchIndex.js` and can be removed
- [EVENT.ROUTING_KEY.TIMELINE_ADDED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.TIMELINE_REMOVED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.TIMELINE_UPDATED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.MILESTONE_ADDED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.MILESTONE_REMOVED]: voidRabbitHandler, // DISABLED
- [EVENT.ROUTING_KEY.MILESTONE_UPDATED]: voidRabbitHandler, // DISABLED
-};
-
-export const kafkaHandlers = {
- // Events defined by project-api
- [CONNECT_NOTIFICATION_EVENT.PROJECT_UPDATED]: projectUpdatedKafkaHandler,
- [CONNECT_NOTIFICATION_EVENT.PROJECT_ATTACHMENT_UPDATED]: projectUpdatedKafkaHandler,
- [CONNECT_NOTIFICATION_EVENT.PROJECT_TEAM_UPDATED]: projectUpdatedKafkaHandler,
- [CONNECT_NOTIFICATION_EVENT.PROJECT_PLAN_UPDATED]: projectUpdatedKafkaHandler,
-
- // Events from message-service
- [CONNECT_NOTIFICATION_EVENT.TOPIC_CREATED]: projectUpdatedKafkaHandler,
- [CONNECT_NOTIFICATION_EVENT.TOPIC_UPDATED]: projectUpdatedKafkaHandler,
- [CONNECT_NOTIFICATION_EVENT.POST_CREATED]: projectUpdatedKafkaHandler,
- [CONNECT_NOTIFICATION_EVENT.POST_UPDATED]: projectUpdatedKafkaHandler,
-
- // Events coming from timeline/milestones (considering it as a separate module/service in future)
- [CONNECT_NOTIFICATION_EVENT.MILESTONE_TRANSITION_COMPLETED]: milestoneUpdatedKafkaHandler,
- [CONNECT_NOTIFICATION_EVENT.TIMELINE_ADJUSTED]: timelineAdjustedKafkaHandler,
-};
diff --git a/src/events/kafkaHandlers.js b/src/events/kafkaHandlers.js
new file mode 100644
index 00000000..b6bf90cf
--- /dev/null
+++ b/src/events/kafkaHandlers.js
@@ -0,0 +1,54 @@
+/**
+ * BUS Event Handlers
+ */
+import { CONNECT_NOTIFICATION_EVENT, BUS_API_EVENT, RESOURCES } from '../constants';
+import {
+ projectCreatedKafkaHandler,
+ projectUpdatedKafkaHandler } from './projects';
+import { projectPhaseAddedKafkaHandler, projectPhaseRemovedKafkaHandler,
+ projectPhaseUpdatedKafkaHandler } from './projectPhases';
+import {
+ timelineAdjustedKafkaHandler,
+} from './timelines';
+import {
+ milestoneUpdatedKafkaHandler,
+} from './milestones';
+
+const kafkaHandlers = {
+ /**
+ * Deprecated specific Bus Events
+ */
+ // Events defined by project-api
+ [CONNECT_NOTIFICATION_EVENT.PROJECT_UPDATED]: projectUpdatedKafkaHandler,
+ [CONNECT_NOTIFICATION_EVENT.PROJECT_ATTACHMENT_UPDATED]: projectUpdatedKafkaHandler,
+ [CONNECT_NOTIFICATION_EVENT.PROJECT_TEAM_UPDATED]: projectUpdatedKafkaHandler,
+ [CONNECT_NOTIFICATION_EVENT.PROJECT_PLAN_UPDATED]: projectUpdatedKafkaHandler,
+
+ // Events from message-service
+ [CONNECT_NOTIFICATION_EVENT.TOPIC_CREATED]: projectUpdatedKafkaHandler,
+ [CONNECT_NOTIFICATION_EVENT.TOPIC_UPDATED]: projectUpdatedKafkaHandler,
+ [CONNECT_NOTIFICATION_EVENT.POST_CREATED]: projectUpdatedKafkaHandler,
+ [CONNECT_NOTIFICATION_EVENT.POST_UPDATED]: projectUpdatedKafkaHandler,
+
+ // Events coming from timeline/milestones (considering it as a separate module/service in future)
+ [CONNECT_NOTIFICATION_EVENT.MILESTONE_TRANSITION_COMPLETED]: milestoneUpdatedKafkaHandler,
+ [CONNECT_NOTIFICATION_EVENT.TIMELINE_ADJUSTED]: timelineAdjustedKafkaHandler,
+
+ /**
+ * New Unified Bus Events
+ */
+ [BUS_API_EVENT.PROJECT_CREATED]: {
+ [RESOURCES.PROJECT]: projectCreatedKafkaHandler,
+ },
+ [BUS_API_EVENT.PROJECT_PHASE_CREATED]: {
+ [RESOURCES.PHASE]: projectPhaseAddedKafkaHandler,
+ },
+ [BUS_API_EVENT.PROJECT_PHASE_UPDATED]: {
+ [RESOURCES.PHASE]: projectPhaseUpdatedKafkaHandler,
+ },
+ [BUS_API_EVENT.PROJECT_PHASE_DELETED]: {
+ [RESOURCES.PHASE]: projectPhaseRemovedKafkaHandler,
+ },
+};
+
+export default kafkaHandlers;
diff --git a/src/events/phaseProducts/index.js b/src/events/phaseProducts/index.js
deleted file mode 100644
index 3322d78e..00000000
--- a/src/events/phaseProducts/index.js
+++ /dev/null
@@ -1,129 +0,0 @@
-/**
- * Event handlers for phase product create, update and delete.
- * Current functionality just updates the elasticsearch indexes.
- */
-
-import config from 'config';
-import _ from 'lodash';
-import Promise from 'bluebird';
-import util from '../../util';
-
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
-const eClient = util.getElasticSearchClient();
-
-/**
- * Handler for phase product creation event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const phaseProductAddedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const data = JSON.parse(msg.content.toString());
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.projectId });
- const phases = _.isArray(doc._source.phases) ? doc._source.phases : []; // eslint-disable-line no-underscore-dangle
-
- _.each(phases, (phase) => {
- if (phase.id === data.phaseId) {
- phase.products = _.isArray(phase.products) ? phase.products : []; // eslint-disable-line no-param-reassign
- phase.products.push(_.omit(data, ['deletedAt', 'deletedBy']));
- }
- });
-
- const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.projectId, body: { doc: merged } });
- logger.debug('phase product added to project document successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling project.phase.added event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Handler for phase product updated event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const phaseProductUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const data = JSON.parse(msg.content.toString());
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.original.projectId });
- const phases = _.map(doc._source.phases, (phase) => { // eslint-disable-line no-underscore-dangle
- if (phase.id === data.original.phaseId) {
- phase.products = _.map(phase.products, (product) => { // eslint-disable-line no-param-reassign
- if (product.id === data.original.id) {
- return _.assign(product, _.omit(data.updated, ['deletedAt', 'deletedBy']));
- }
- return product;
- });
- }
- return phase;
- });
- const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.original.projectId,
- body: {
- doc: merged,
- },
- });
- logger.debug('elasticsearch index updated, phase product updated successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling project.phase.updated event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Handler for phase product deleted event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const phaseProductRemovedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const data = JSON.parse(msg.content.toString());
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.projectId });
- const phases = _.map(doc._source.phases, (phase) => { // eslint-disable-line no-underscore-dangle
- if (phase.id === data.phaseId) {
- phase.products = _.filter(phase.products, product => product.id !== data.id); // eslint-disable-line no-param-reassign
- }
- return phase;
- });
-
- const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle
-
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.projectId,
- body: {
- doc: merged,
- },
- });
- logger.debug('phase product removed from project document successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error fetching project document from elasticsearch', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-
-module.exports = {
- phaseProductAddedHandler,
- phaseProductRemovedHandler,
- phaseProductUpdatedHandler,
-};
diff --git a/src/events/projectAttachments/index.js b/src/events/projectAttachments/index.js
deleted file mode 100644
index c964e960..00000000
--- a/src/events/projectAttachments/index.js
+++ /dev/null
@@ -1,110 +0,0 @@
-/**
- * Event handlers for project attachment create, update and delete
- * Current functionality just updates the elasticsearch indexes.
- */
-
-import config from 'config';
-import _ from 'lodash';
-import Promise from 'bluebird';
-import util from '../../util';
-
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
-const eClient = util.getElasticSearchClient();
-
-/**
- * Handler for project attachment creation event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectAttachmentAddedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const data = JSON.parse(msg.content.toString());
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.projectId });
- const attachments = _.isArray(doc._source.attachments) ? doc._source.attachments : []; // eslint-disable-line no-underscore-dangle
- attachments.push(data);
- const merged = _.merge(doc._source, { attachments }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.projectId, body: { doc: merged } });
- logger.debug('project attachment added to project document successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling project.attachment.added event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Handler for project attachment updated event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectAttachmentUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const data = JSON.parse(msg.content.toString());
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.original.projectId });
- const attachments = _.map(doc._source.attachments, (single) => { // eslint-disable-line no-underscore-dangle
- if (single.id === data.original.id) {
- return _.merge(single, data.updated);
- }
- return single;
- });
- const merged = _.merge(doc._source, { attachments }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.original.projectId,
- body: {
- doc: merged,
- },
- });
- logger.debug('elasticsearch index updated, project attachment updated successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling project.attachment.updated event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Handler for project attachment deleted event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectAttachmentRemovedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const data = JSON.parse(msg.content.toString());
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.projectId });
- const attachments = _.filter(doc._source.attachments, single => single.id !== data.id); // eslint-disable-line no-underscore-dangle
- const merged = _.merge(doc._source, { attachments }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.projectId,
- body: {
- doc: merged,
- },
- });
- logger.debug('project attachment removed from project document successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error fetching project document from elasticsearch', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-
-module.exports = {
- projectAttachmentAddedHandler,
- projectAttachmentRemovedHandler,
- projectAttachmentUpdatedHandler,
-};
diff --git a/src/events/projectMemberInvites/index.js b/src/events/projectMemberInvites/index.js
deleted file mode 100644
index f67d07ed..00000000
--- a/src/events/projectMemberInvites/index.js
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Event handlers for project member invite create and update
- */
-import _ from 'lodash';
-import Promise from 'bluebird';
-import { updateESPromise } from '../projectMembers';
-
-/**
- * Project member invite careted event handler
- * @param {Object} logger logger
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack / nack
- * @return {undefined}
- */
-const projectMemberInviteCreatedHandler = Promise.coroutine(function* a(logger, msg, channel) {
- try {
- const origRequestId = msg.properties.correlationId;
- const newInvite = JSON.parse(msg.content.toString());
- const projectId = newInvite.projectId;
-
- // handle ES Update
- // add new invite to document invites array
- const updateDocPromise = Promise.coroutine(function* (doc) { // eslint-disable-line
- // now merge the updated changes and reindex the document
- const invites = _.isArray(doc._source.invites) ? doc._source.invites : []; // eslint-disable-line no-underscore-dangle
- invites.push(newInvite);
- return _.merge(doc._source, { invites }); // eslint-disable-line no-underscore-dangle
- });
-
- yield updateESPromise(logger, origRequestId, projectId, updateDocPromise);
- logger.debug('elasticsearch index updated successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling projectMemberInviteCreated Event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Project member invite updated event handler
- * @param {Object} logger logger
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack / nack
- * @return {undefined}
- */
-const projectMemberInviteUpdatedHandler = Promise.coroutine(function* a(logger, msg, channel) {
- try {
- const origRequestId = msg.properties.correlationId;
- const updatedInvite = JSON.parse(msg.content.toString());
- const projectId = updatedInvite.projectId;
-
- // handle ES Update
- // remove invite in document invites array, based on either userId or email
- const updateDocPromise = Promise.coroutine(function* (doc) { // eslint-disable-line
- // now merge the updated changes and reindex the document
- const invites = _.isArray(doc._source.invites) ? doc._source.invites : []; // eslint-disable-line no-underscore-dangle
- _.remove(invites, invite => (!!updatedInvite.email && invite.email === updatedInvite.email) ||
- (!!updatedInvite.userId && invite.userId === updatedInvite.userId));
- return _.merge(doc._source, { invites }); // eslint-disable-line no-underscore-dangle
- });
-
- yield updateESPromise(logger, origRequestId, projectId, updateDocPromise);
- logger.debug('elasticsearch index updated successfully');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling projectMemberInviteCreated Event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-module.exports = {
- projectMemberInviteCreatedHandler,
- projectMemberInviteUpdatedHandler,
-};
diff --git a/src/events/projectMembers/index.js b/src/events/projectMembers/index.js
deleted file mode 100644
index b0dd780a..00000000
--- a/src/events/projectMembers/index.js
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Event handlers for project members create, update and delete
- */
-import _ from 'lodash';
-import Promise from 'bluebird';
-import config from 'config';
-import util from '../../util';
-
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-const eClient = util.getElasticSearchClient();
-
-const updateESPromise = Promise.coroutine(function* a(logger, requestId, projectId, updateDocHandler) {
- try {
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: projectId });
- const updatedDoc = yield updateDocHandler(doc);
- return eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: projectId,
- body: { doc: updatedDoc },
- })
- .then(() => logger.debug('elasticsearch project document updated successfully'));
- } catch (error) {
- logger.error('Error caught updating ES document', error);
- return Promise.reject(error);
- }
-});
-
-/**
- * Project member added event handler
- * @param {Object} logger logger
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack / nack
- * @return {undefined}
- */
-const projectMemberAddedHandler = Promise.coroutine(function* a(logger, msg, channel) {
- try {
- const origRequestId = msg.properties.correlationId;
- const newMember = JSON.parse(msg.content.toString());
- const projectId = newMember.projectId;
- // handle ES Update
- // fetch the member information
- const updateDocPromise = Promise.coroutine(function* (doc) { // eslint-disable-line func-names
- const memberDetails = yield util.getMemberDetailsByUserIds([newMember.userId], logger, origRequestId);
- const payload = _.merge(newMember, _.pick(memberDetails[0], 'handle', 'firstName', 'lastName', 'email'));
- // now merge the updated changes and reindex the document for members
- const members = _.isArray(doc._source.members) ? doc._source.members : []; // eslint-disable-line no-underscore-dangle
- members.push(payload);
- // now merge the updated changes and reindex the document for invites
- const invites = _.isArray(doc._source.invites) ? doc._source.invites : []; // eslint-disable-line no-underscore-dangle
- // removing any invites for the member just added to the team
- _.remove(invites, invite => invite.email === payload.email || invite.userId === payload.userId);
- return _.merge(doc._source, { members, invites }); // eslint-disable-line no-underscore-dangle
- });
- yield Promise.all([updateESPromise(logger, origRequestId, projectId, updateDocPromise)]);
- logger.debug('elasticsearch index updated successfully and co-pilot/manager updated in direct project');
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling projectMemberAdded Event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Project member removed event handler
- * @param {Object} logger logger
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack / nack
- * @return {undefined}
- */
-const projectMemberRemovedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const origRequestId = msg.properties.correlationId;
- const member = JSON.parse(msg.content.toString());
- const projectId = member.projectId;
- const updateDocPromise = (doc) => {
- const members = _.filter(doc._source.members, single => single.id !== member.id); // eslint-disable-line no-underscore-dangle
- return Promise.resolve(_.set(doc._source, 'members', members)); // eslint-disable-line no-underscore-dangle
- };
- yield Promise.all([
- updateESPromise(logger, origRequestId, projectId, updateDocPromise),
- ]);
- logger.info('elasticsearch index updated successfully and co-pilot/manager removed in direct project');
- channel.ack(msg);
- } catch (error) {
- logger.error('failed to consume message, unexpected error', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Project member updated event handler
- * @param {Object} logger logger
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack / nack
- * @return {undefined}
- */
-const projectMemberUpdatedHandler = Promise.coroutine(function* a(logger, msg, channel) {
- try {
- const data = JSON.parse(msg.content.toString());
- // get member information
- const memberDetails = yield util.getMemberDetailsByUserIds([data.original.userId], logger);
- const payload = _.merge(data.updated, _.pick(memberDetails[0], 'handle', 'firstName', 'lastName', 'email'));
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.original.projectId });
-
- // merge the changes and update the elasticsearch index
- const members = _.map(doc._source.members, (single) => { // eslint-disable-line no-underscore-dangle
- if (single.id === data.original.id) {
- return _.merge(single, payload);
- }
- return single;
- });
- const merged = _.merge(doc._source, { members }); // eslint-disable-line no-underscore-dangle
- // update the merged document
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.original.projectId,
- body: { doc: merged },
- });
- logger.debug('elasticsearch project document updated, member updated successfully');
- channel.ack(msg);
- } catch (err) {
- logger.error('Unhandled error', err);
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-module.exports = {
- projectMemberAddedHandler,
- projectMemberRemovedHandler,
- projectMemberUpdatedHandler,
- updateESPromise,
-};
diff --git a/src/events/projectPhases/index.js b/src/events/projectPhases/index.js
index bb885f4d..9b126e8b 100644
--- a/src/events/projectPhases/index.js
+++ b/src/events/projectPhases/index.js
@@ -3,19 +3,13 @@
* Current functionality just updates the elasticsearch indexes.
*/
-import config from 'config';
+import Joi from 'joi';
import _ from 'lodash';
import Promise from 'bluebird';
-import util from '../../util';
import { TIMELINE_REFERENCES } from '../../constants';
import messageService from '../../services/messageService';
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
-const eClient = util.getElasticSearchClient();
-
/**
* Build topics data based on route parameter.
*
@@ -49,50 +43,6 @@ const buildTopicsData = (logger, phase, route) => {
}];
};
-/**
- * Indexes the project phase in the elastic search.
- *
- * @param {Object} logger logger to log along with trace id
- * @param {Object} phase event payload
- * @returns {undefined}
- */
-const indexProjectPhase = Promise.coroutine(function* (logger, phase) { // eslint-disable-line func-names
- try {
- // const phase = JSON.parse(msg.content.toString());
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: phase.projectId });
- const phases = _.isArray(doc._source.phases) ? doc._source.phases : []; // eslint-disable-line no-underscore-dangle
- const existingPhaseIndex = _.findIndex(phases, p => p.id === phase.id);
- // if phase does not exists already
- if (existingPhaseIndex === -1) {
- // Increase the order of the other phases in the same project,
- // which have `order` >= this phase order
- _.each(phases, (_phase) => {
- if (!_.isNil(_phase.order) && !_.isNil(phase.order) && _phase.order >= phase.order) {
- _phase.order += 1; // eslint-disable-line no-param-reassign
- }
- });
-
- phases.push(_.omit(phase, ['deletedAt', 'deletedBy']));
- } else { // if phase already exists, ideally we should never land here, but code handles the buggy indexing
- // replaces the old inconsistent index where previously phase was not removed from the index but deleted
- // from the database
- phases.splice(existingPhaseIndex, 1, phase);
- }
- const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: phase.projectId,
- body: { doc: merged },
- });
- logger.debug('project phase added to project document successfully');
- } catch (error) {
- logger.error('Error handling indexing the project phase', error);
- // throw the error back to nack the bus
- throw error;
- }
-});
-
/**
* Creates topics in message api
*
@@ -115,58 +65,6 @@ const createTopics = Promise.coroutine(function* (logger, phase, route) { // esl
}
});
-/**
- * Handler for project phase creation event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectPhaseAddedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- const data = JSON.parse(msg.content.toString());
- const phase = _.get(data, 'added', {});
- const route = _.get(data, 'route', 'PHASE');
- try {
- logger.debug('calling indexProjectPhase', phase);
- yield indexProjectPhase(logger, phase, channel);
- logger.debug('calling createPhaseTopic', phase);
- yield createTopics(logger, phase, route);
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling project.phase.added event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Indexes the project phase in the elastic search.
- *
- * @param {Object} logger logger to log along with trace id
- * @param {Object} data event payload
- * @returns {undefined}
- */
-const updateIndexProjectPhase = Promise.coroutine(function* (logger, data) { // eslint-disable-line func-names
- try {
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.original.projectId });
- const phases = _.map(data.allPhases, single => _.omit(single, ['deletedAt', 'deletedBy']));
- const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.original.projectId,
- body: {
- doc: merged,
- },
- });
- logger.debug('project phase updated to project document successfully');
- } catch (error) {
- logger.error('Error handling indexing the project phase', error);
- // throw the error back to nack the bus
- throw error;
- }
-});
-
/**
* Update one topic
*
@@ -213,59 +111,6 @@ const updateTopics = Promise.coroutine(function* (logger, phase, route) { // esl
}
});
-/**
- * Handler for project phase updated event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectPhaseUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- const data = JSON.parse(msg.content.toString());
- const route = _.get(data, 'route', 'PHASE');
- logger.debug('calling updateIndexProjectPhase', data);
- yield updateIndexProjectPhase(logger, data, channel);
- logger.debug('calling updateTopics', data.updated);
- yield updateTopics(logger, data.updated, route);
- channel.ack(msg);
- } catch (error) {
- logger.error('Error handling project.phase.updated event', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Removes the project phase from the elastic search.
- *
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @returns {undefined}
- */
-const removePhaseFromIndex = Promise.coroutine(function* (logger, msg) { // eslint-disable-line func-names, no-unused-vars
- try {
- const data = JSON.parse(msg.content.toString());
- const phase = _.get(data, 'deleted', {});
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: phase.projectId });
- const phases = _.filter(doc._source.phases, single => single.id !== phase.id); // eslint-disable-line no-underscore-dangle
- const merged = _.assign(doc._source, { phases }); // eslint-disable-line no-underscore-dangle
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: phase.projectId,
- body: {
- doc: merged,
- },
- });
- logger.debug('project phase removed from project document successfully');
- } catch (error) {
- logger.error('Error in removing project phase from index', error);
- // throw the error back to nack the bus
- throw error;
- }
-});
-
/**
* Removes one topic from the message api.
*
@@ -308,33 +153,92 @@ const removeTopics = Promise.coroutine(function* (logger, phase, route) { // esl
});
/**
- * Handler for project phase deleted event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
+ * Payload for new unified BUS events like `project.action.created` with `resource=phase`
*/
-const projectPhaseRemovedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- try {
- // NOTE We use "project-processor-es" for ES indexing now.
- // yield removePhaseFromIndex(logger, msg, channel);
- const data = JSON.parse(msg.content.toString());
- const phase = _.get(data, 'deleted', {});
- const route = _.get(data, 'route');
- logger.debug('calling removeTopics');
- yield removeTopics(logger, phase, route);
- channel.ack(msg);
- } catch (error) {
- logger.error('Error fetching project document from elasticsearch', error);
- // if the message has been redelivered dont attempt to reprocess it
- channel.nack(msg, false, !msg.fields.redelivered);
+const phasePayloadScheme = Joi.object().keys({
+ id: Joi.number().integer().positive().required(),
+ projectId: Joi.number().integer().positive().required(),
+ name: Joi.string().required(),
+ status: Joi.string().required(),
+ startDate: Joi.date().optional(),
+ endDate: Joi.date().optional(),
+ duration: Joi.number().min(0).optional().allow(null),
+ budget: Joi.number().min(0).optional(),
+ spentBudget: Joi.number().min(0).optional(),
+ progress: Joi.number().min(0).optional(),
+ details: Joi.any().optional(),
+ order: Joi.number().integer().optional().allow(null),
+}).unknown(true).required();
+
+/**
+ * Phase Created BUS API event handler.
+ * - create phase's Topic
+ * - throws exceptions in case of error
+ *
+ * @param {Object} app Application object
+ * @param {String} topic Kafka topic
+ * @param {Object} payload Message payload
+ * @return {Promise} Promise
+ */
+async function projectPhaseAddedKafkaHandler(app, topic, payload) {
+ // Validate payload
+ const result = Joi.validate(payload, phasePayloadScheme);
+ if (result.error) {
+ throw new Error(result.error);
+ }
+
+ const phase = payload;
+ app.logger.debug('calling createPhaseTopic', phase);
+ await createTopics(app.logger, phase, TIMELINE_REFERENCES.PHASE);
+}
+
+/**
+ * Phase Updated BUS API event handler.
+ * - updates phase's Topic
+ * - throws exceptions in case of error
+ *
+ * @param {Object} app Application object
+ * @param {String} topic Kafka topic
+ * @param {Object} payload Message payload
+ * @return {Promise} Promise
+ */
+async function projectPhaseUpdatedKafkaHandler(app, topic, payload) {
+ // Validate payload
+ const result = Joi.validate(payload, phasePayloadScheme);
+ if (result.error) {
+ throw new Error(result.error);
+ }
+
+ const phase = payload;
+ app.logger.debug('calling updateTopics', phase);
+ await updateTopics(app.logger, phase, TIMELINE_REFERENCES.PHASE);
+}
+
+/**
+ * Phase Deleted BUS API event handler.
+ * - removes phase's Topic
+ * - throws exceptions in case of error
+ *
+ * @param {Object} app Application object
+ * @param {String} topic Kafka topic
+ * @param {Object} payload Message payload
+ * @return {Promise} Promise
+ */
+async function projectPhaseRemovedKafkaHandler(app, topic, payload) {
+ // Validate payload
+ const result = Joi.validate(payload, phasePayloadScheme);
+ if (result.error) {
+ throw new Error(result.error);
}
-});
+ const phase = payload;
+ app.logger.debug('calling removeTopics', phase);
+ await removeTopics(app.logger, phase, TIMELINE_REFERENCES.PHASE);
+}
module.exports = {
- projectPhaseAddedHandler,
- projectPhaseRemovedHandler,
- projectPhaseUpdatedHandler,
createPhaseTopic: createTopics,
+ projectPhaseAddedKafkaHandler,
+ projectPhaseUpdatedKafkaHandler,
+ projectPhaseRemovedKafkaHandler,
};
diff --git a/src/events/projectPhases/index.spec.js b/src/events/projectPhases/index.spec.js
new file mode 100644
index 00000000..3afec26b
--- /dev/null
+++ b/src/events/projectPhases/index.spec.js
@@ -0,0 +1,176 @@
+/* eslint-disable no-unused-expressions */
+/* eslint-disable no-unused-vars */
+import _ from 'lodash';
+import sinon from 'sinon';
+import chai, { expect } from 'chai';
+import messageService from '../../services/messageService';
+import {
+ projectPhaseAddedKafkaHandler,
+ projectPhaseUpdatedKafkaHandler,
+ projectPhaseRemovedKafkaHandler,
+} from './index';
+import { BUS_API_EVENT } from '../../constants';
+
+chai.use(require('chai-as-promised'));
+
+chai.should();
+
+describe('project phase Kafka handlers', () => {
+ const mockedApp = {
+ logger: {
+ trace: sinon.stub(),
+ debug: sinon.stub(),
+ log: sinon.stub(),
+ info: sinon.stub(),
+ warn: sinon.stub(),
+ error: sinon.stub(),
+ },
+ };
+ const topic = {
+ id: 1,
+ title: 'test project phase',
+ posts: [{ id: 1, type: 'post', body: 'body' }],
+ };
+ const sandbox = sinon.sandbox.create();
+
+ const phasePayload = {
+ resource: 'phase',
+ createdAt: '2019-06-21T04:42:56.309Z',
+ updatedAt: '2019-06-21T04:42:56.310Z',
+ spentBudget: 0,
+ progress: 0,
+ id: 1,
+ name: 'test project phase',
+ status: 'active',
+ startDate: '2018-05-14T17:00:00.000Z',
+ endDate: '2018-05-15T17:00:00.000Z',
+ budget: 20,
+ details: { aDetails: 'a details' },
+ projectId: 1,
+ createdBy: 40051333,
+ updatedBy: 40051333,
+ duration: null,
+ order: 1,
+ };
+
+ describe('projectPhaseAddedKafkaHandler', () => {
+ let createMessageSpy;
+
+ beforeEach(() => {
+ createMessageSpy = sandbox.spy(messageService, 'createTopic');
+ });
+
+ afterEach(() => {
+ sandbox.restore();
+ });
+
+ it('should throw validation exception when payload is empty', async () => {
+ await expect(
+ projectPhaseAddedKafkaHandler(
+ mockedApp,
+ BUS_API_EVENT.PROJECT_PHASE_CREATED,
+ {},
+ ),
+ ).to.be.rejectedWith(Error);
+ });
+
+ it('should call create topic API with valid payload', async () => {
+ await projectPhaseAddedKafkaHandler(
+ mockedApp,
+ BUS_API_EVENT.PROJECT_PHASE_CREATED,
+ phasePayload,
+ );
+ createMessageSpy.calledOnce.should.be.true;
+ createMessageSpy.calledWith(
+ sinon.match({
+ reference: 'project',
+ referenceId: '1',
+ tag: 'phase#1',
+ title: 'test project phase',
+ }),
+ ).should.be.true;
+ });
+ });
+
+ describe('projectPhaseUpdatedKafkaHandler', () => {
+ let updateMessageSpy;
+
+ beforeEach(() => {
+ updateMessageSpy = sandbox.spy(messageService, 'updateTopic');
+ sandbox.stub(messageService, 'getTopicByTag', () =>
+ Promise.resolve(topic),
+ );
+ });
+
+ afterEach(() => {
+ sandbox.restore();
+ });
+
+ it('should throw validation exception when payload is empty', async () => {
+ await expect(
+ projectPhaseUpdatedKafkaHandler(
+ mockedApp,
+ BUS_API_EVENT.PROJECT_PHASE_UPDATED,
+ {},
+ ),
+ ).to.be.rejectedWith(Error);
+ });
+
+ it('should call update topic API with valid payload', async () => {
+ const updatedPhasePayload = _.cloneDeep(phasePayload);
+ updatedPhasePayload.name = 'test project phase UPDATED';
+ await projectPhaseUpdatedKafkaHandler(
+ mockedApp,
+ BUS_API_EVENT.PROJECT_PHASE_UPDATED,
+ updatedPhasePayload,
+ );
+ updateMessageSpy.calledOnce.should.be.true;
+ updateMessageSpy.calledWith(
+ topic.id,
+ sinon.match({
+ title: updatedPhasePayload.name,
+ postId: topic.posts[0].id,
+ content: topic.posts[0].body,
+ }),
+ ).should.be.true;
+ });
+ });
+
+ describe('projectPhaseRemovedKafkaHandler', () => {
+ let deleteTopicSpy;
+ let deletePostsSpy;
+
+ beforeEach(() => {
+ deleteTopicSpy = sandbox.spy(messageService, 'deleteTopic');
+ deletePostsSpy = sandbox.spy(messageService, 'deletePosts');
+ sandbox.stub(messageService, 'getTopicByTag', () =>
+ Promise.resolve(topic),
+ );
+ });
+
+ afterEach(() => {
+ sandbox.restore();
+ });
+
+ it('should throw validation exception when payload is empty', async () => {
+ await expect(
+ projectPhaseRemovedKafkaHandler(
+ mockedApp,
+ BUS_API_EVENT.PROJECT_PHASE_UPDATED,
+ {},
+ ),
+ ).to.be.rejectedWith(Error);
+ });
+
+ it('should call delete topic and posts API with valid payload', async () => {
+ await projectPhaseRemovedKafkaHandler(
+ mockedApp,
+ BUS_API_EVENT.PROJECT_PHASE_UPDATED,
+ phasePayload,
+ );
+ deleteTopicSpy.calledOnce.should.be.true;
+ deleteTopicSpy.calledWith(topic.id).should.be.true;
+ deletePostsSpy.calledWith(topic.id).should.be.true;
+ });
+ });
+});
diff --git a/src/events/projects/index.js b/src/events/projects/index.js
index 99d5c845..0f8dce34 100644
--- a/src/events/projects/index.js
+++ b/src/events/projects/index.js
@@ -8,159 +8,16 @@ import config from 'config';
import util from '../../util';
import models from '../../models';
import { createPhaseTopic } from '../projectPhases';
-import { REGEX } from '../../constants';
+import { PROJECT_STATUS, REGEX, TIMELINE_REFERENCES } from '../../constants';
const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
const eClient = util.getElasticSearchClient();
/**
- * Indexes the project in the elastic search.
- *
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload which is essentially a project in JSON format
- * @returns {undefined}
- */
-const indexProject = Promise.coroutine(function* (logger, msg) { // eslint-disable-line func-names
- const data = JSON.parse(msg.content.toString());
- const userIds = data.members ? _.map(data.members, 'userId') : [];
- try {
- // retrieve member details
- const memberDetails = yield util.getMemberDetailsByUserIds(userIds, logger, msg.properties.correlationId);
- // if no members are returned than this should result in nack
- // if (!_.isArray(memberDetails) || memberDetails.length === 0) {
- // logger.error(`Empty member details for userIds ${userIds.join(',')} requeing the message`);
- // throw new Error(`Empty member details for userIds ${userIds.join(',')} requeing the message`);
- // }
- // update project member record with details
- data.members = data.members.map((single) => {
- const detail = _.find(memberDetails, md => md.userId === single.userId);
- return _.merge(single, _.pick(detail, 'handle', 'firstName', 'lastName', 'email'));
- });
- if (data.phases) {
- // removes non required fields from phase objects
- data.phases = data.phases.map(phase => _.omit(phase, ['deletedAt', 'deletedBy']));
- }
- // add the record to the index
- const result = yield eClient.index({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.id,
- body: data,
- });
- logger.debug(`project indexed successfully (projectId: ${data.id})`, result);
- } catch (error) {
- logger.error(`Error indexing project (projectId: ${data.id})`, error);
- throw error;
- }
-});
-
-/**
- * Handler for project creation event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectCreatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- const project = JSON.parse(msg.content.toString());
- try {
- yield indexProject(logger, msg);
- if (project.phases && project.phases.length > 0) {
- logger.debug('Phases found for the project, trying to create topics for each phase.');
- const topicPromises = _.map(project.phases, phase => createPhaseTopic(logger, phase));
- yield Promise.all(topicPromises);
- }
- channel.ack(msg);
- } catch (error) {
- logger.error(`Error processing event (projectId: ${project.id})`, error);
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Handler for project creation event
- *
- * we call this handle only for the sake of creating topics for the phases
- *
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectCreatedHandlerForPhases = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- const project = JSON.parse(msg.content.toString());
- try {
- if (project.phases && project.phases.length > 0) {
- logger.debug('Phases found for the project, trying to create topics for each phase.');
- const topicPromises = _.map(project.phases, phase => createPhaseTopic(logger, phase));
- yield Promise.all(topicPromises);
- }
- channel.ack(msg);
- } catch (error) {
- logger.error(`Error processing event (projectId: ${project.id})`, error);
- channel.nack(msg, false, !msg.fields.redelivered);
- }
-});
-
-/**
- * Handler for project updated event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectUpdatedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- const data = JSON.parse(msg.content.toString());
- try {
- // first get the existing document and than merge the updated changes and save the new document
- const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.original.id });
- const merged = _.merge(doc._source, data.updated); // eslint-disable-line no-underscore-dangle
- // update the merged document
- yield eClient.update({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: data.original.id,
- body: {
- doc: merged,
- },
- });
- logger.debug(`project updated successfully in elasticsearh index, (projectId: ${data.original.id})`);
- channel.ack(msg);
- return undefined;
- } catch (error) {
- logger.error(`failed to get project document, (projectId: ${data.original.id})`, error);
- channel.nack(msg, false, !msg.fields.redelivered);
- return undefined;
- }
-});
-
-/**
- * Handler for project deleted event
- * @param {Object} logger logger to log along with trace id
- * @param {Object} msg event payload
- * @param {Object} channel channel to ack, nack
- * @returns {undefined}
- */
-const projectDeletedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names
- const data = JSON.parse(msg.content.toString());
- try {
- yield eClient.delete({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.id });
- logger.debug(`project deleted successfully from elasticsearh index (projectId: ${data.id})`);
- channel.ack(msg);
- return undefined;
- } catch (error) {
- logger.error(`failed to delete project document (projectId: ${data.id})`, error);
- channel.nack(msg, false, !msg.fields.redelivered);
- return undefined;
- }
-});
-
-/**
- * Kafka event handlers
+ * Payload for deprecated BUS events like `connect.notification.project.updated`.
*/
-
-const payloadSchema = Joi.object().keys({
+const projectUpdatedPayloadSchema = Joi.object().keys({
projectId: Joi.number().integer().positive().required(),
projectName: Joi.string().optional(),
projectUrl: Joi.string().regex(REGEX.URL).optional(),
@@ -177,7 +34,7 @@ const payloadSchema = Joi.object().keys({
*/
async function projectUpdatedKafkaHandler(app, topic, payload) {
// Validate payload
- const result = Joi.validate(payload, payloadSchema);
+ const result = Joi.validate(payload, projectUpdatedPayloadSchema);
if (result.error) {
throw new Error(result.error);
}
@@ -197,7 +54,7 @@ async function projectUpdatedKafkaHandler(app, topic, payload) {
// first get the existing document and than merge the updated changes and save the new document
try {
const doc = await eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: previousValue.id });
- console.log(doc._source, 'Received project from ES');// eslint-disable-line no-underscore-dangle
+ // console.log(doc._source, 'Received project from ES');// eslint-disable-line no-underscore-dangle
const merged = _.merge(doc._source, project.get({ plain: true })); // eslint-disable-line no-underscore-dangle
console.log(merged, 'Merged project');
// update the merged document
@@ -216,10 +73,100 @@ async function projectUpdatedKafkaHandler(app, topic, payload) {
}
}
+/**
+ * Payload for new unified BUS events like `project.action.created` with `resource=project`
+ */
+const projectPayloadSchema = Joi.object().keys({
+ id: Joi.number().integer().positive().required(),
+ createdAt: Joi.date().required(),
+ updatedAt: Joi.date().required(),
+ terms: Joi.array().items(Joi.number().positive()).optional(),
+ name: Joi.string().required(),
+ description: Joi.string().allow(null).allow('').optional(),
+ type: Joi.string().max(45).required(),
+ createdBy: Joi.number().integer().positive().required(), // userId
+ updatedBy: Joi.number().integer().required(), // userId - can be negative for M2M tokens
+ challengeEligibility: Joi.array().items(Joi.object().keys({
+ role: Joi.string().valid('submitter', 'reviewer', 'copilot'),
+ users: Joi.array().items(Joi.number().positive()),
+ groups: Joi.array().items(Joi.number().positive()),
+ })).allow(null),
+ bookmarks: Joi.array().items(Joi.object().keys({
+ title: Joi.string(),
+ address: Joi.string().regex(REGEX.URL),
+ createdAt: Joi.date(),
+ createdBy: Joi.number().integer().positive(),
+ updatedAt: Joi.date(),
+ updatedBy: Joi.number().integer().positive(),
+ })).optional().allow(null),
+ external: Joi.object().keys({
+ id: Joi.string(),
+ type: Joi.any().valid('github', 'jira', 'asana', 'other'),
+ data: Joi.string().max(300), // TODO - restrict length
+ }).allow(null),
+ status: Joi.string().required(),
+ lastActivityAt: Joi.date().required(),
+ lastActivityUserId: Joi.string().required(), // user handle
+ version: Joi.string(),
+ directProjectId: Joi.number().positive().allow(null),
+ billingAccountId: Joi.number().positive().allow(null),
+ utm: Joi.object().keys({
+ source: Joi.string().allow(null),
+ medium: Joi.string().allow(null),
+ campaign: Joi.string().allow(null),
+ }).allow(null),
+ estimatedPrice: Joi.number().precision(2).positive().optional()
+ .allow(null),
+ details: Joi.any(),
+ templateId: Joi.number().integer().positive().allow(null),
+ estimation: Joi.array().items(Joi.object().keys({
+ conditions: Joi.string().required(),
+ price: Joi.number().required(),
+ quantity: Joi.number().optional(),
+ minTime: Joi.number().integer().required(),
+ maxTime: Joi.number().integer().required(),
+ buildingBlockKey: Joi.string().required(),
+ metadata: Joi.object().optional(),
+ })).optional(),
+ // cancel reason is mandatory when project status is cancelled
+ cancelReason: Joi.when('status', {
+ is: PROJECT_STATUS.CANCELLED,
+ then: Joi.string().required(),
+ otherwise: Joi.string().optional().allow(null),
+ }),
+}).unknown(true).required();
+
+/**
+ * Project Created BUS API event handler.
+ * - creates topics for the phases of the newly created project
+ * - throws exceptions in case of error
+ *
+ * @param {Object} app Application object
+ * @param {String} topic Kafka topic
+ * @param {Object} payload Message payload
+ * @return {Promise} Promise
+ */
+async function projectCreatedKafkaHandler(app, topic, payload) {
+ // Validate payload
+ const result = Joi.validate(payload, projectPayloadSchema);
+ if (result.error) {
+ throw new Error(result.error);
+ }
+
+ const project = payload;
+
+ if (project.phases && project.phases.length > 0) {
+ app.logger.debug('Phases found for the project, trying to create topics for each phase.');
+ const topicPromises = _.map(
+ project.phases,
+ phase => createPhaseTopic(app.logger, phase, TIMELINE_REFERENCES.PHASE),
+ );
+ await Promise.all(topicPromises);
+ app.logger.debug('Topics for phases are successfully created.');
+ }
+}
+
module.exports = {
- projectCreatedHandler,
- projectCreatedHandlerForPhases,
- projectUpdatedHandler,
- projectDeletedHandler,
projectUpdatedKafkaHandler,
+ projectCreatedKafkaHandler,
};
diff --git a/src/events/projects/index.spec.js b/src/events/projects/index.spec.js
index e66754c4..0d75ba82 100644
--- a/src/events/projects/index.spec.js
+++ b/src/events/projects/index.spec.js
@@ -1,13 +1,11 @@
/* eslint-disable no-unused-expressions */
import _ from 'lodash';
-import sinon from 'sinon';
import chai, { expect } from 'chai';
import config from 'config';
import util from '../../util';
import models from '../../models';
import { projectUpdatedKafkaHandler } from './index';
import testUtil from '../../tests/util';
-import server from '../../app';
const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
@@ -27,13 +25,7 @@ describe('projectUpdatedKafkaHandler', () => {
initiatorUserId: 2,
};
- const mockedApp = {
- services: {
- pubsub: {
- publish: sinon.stub(),
- },
- },
- };
+ const mockedApp = {};
it('should throw validation exception when payload is empty', async () => {
await expect(projectUpdatedKafkaHandler(mockedApp, topic, {})).to.be.rejectedWith(Error);
@@ -108,7 +100,7 @@ describe('projectUpdatedKafkaHandler', () => {
lastActivityUserId: '1',
});
// add project to ES index
- await server.services.es.index({
+ await eClient.index({
index: ES_PROJECT_INDEX,
type: ES_PROJECT_TYPE,
id: project.id,
diff --git a/src/index-kafka.js b/src/index-kafka.js
index aa0123e5..e4f5b6a5 100644
--- a/src/index-kafka.js
+++ b/src/index-kafka.js
@@ -1,7 +1,7 @@
import _ from 'lodash';
import config from 'config';
import startKafkaConsumer from './services/kafkaConsumer';
-import { kafkaHandlers } from './events';
+import kafkaHandlers from './events/kafkaHandlers';
import models from './models';
const coreLib = require('tc-core-library-js');
@@ -36,16 +36,35 @@ const logger = coreLib.logger({
// =======================
logger.info('Registering models ... ', !!models);
+const app = { logger, models };
+
+const consumerPromise = startKafkaConsumer(kafkaHandlers, app, logger);
+
/**
* Handle server shutdown gracefully
* @returns {undefined}
*/
function gracefulShutdown() {
- // TODO
+ logger.info('Gracefully shutting down Kafka consumer...');
+ consumerPromise.then(async (consumer) => {
+ try {
+ await consumer.end();
+ logger.info('Successfully stopped Kafka consumer.');
+ process.exit(0);
+ } catch (error) {
+ logger.error('Failed to stop Kafka consumer.');
+ process.exit(1);
+ }
+ });
+
+ // if during some time couldn't gracefully shutdown, then force exiting
+ const timeout = 10; // seconds
+ setTimeout(() => {
+ logger.error('Could not close connections in time, forcefully shutting down.');
+ process.exit(1);
+ }, timeout * 1000);
}
process.on('SIGTERM', gracefulShutdown);
process.on('SIGINT', gracefulShutdown);
-const app = { logger, models };
-
-module.exports = startKafkaConsumer(kafkaHandlers, app, logger);
+module.exports = consumerPromise;
diff --git a/src/index.js b/src/index.js
index 4e73d87c..9ab40dab 100644
--- a/src/index.js
+++ b/src/index.js
@@ -4,27 +4,6 @@ const app = require('./app');
const coreLib = require('tc-core-library-js');
const expressListRoutes = require('express-list-routes');
-/**
- * Handle server shutdown gracefully
- * @returns {undefined}
- */
-function gracefulShutdown() {
- app.services.pubsub.disconnect()
- .then(() => {
- app.logger.info('Gracefully shutting down server');
- process.exit();
- }).catch((err) => {
- app.logger.error(err);
- });
- // if after
- setTimeout(() => {
- app.logger.error('Could not close connections in time, forcefully shutting down');
- process.exit();
- }, 10 * 1000);
-}
-process.on('SIGTERM', gracefulShutdown);
-process.on('SIGINT', gracefulShutdown);
-
// =======================
// start the server ======
// =======================
diff --git a/src/models/projectTemplate.js b/src/models/projectTemplate.js
index 75c8fe48..604913af 100644
--- a/src/models/projectTemplate.js
+++ b/src/models/projectTemplate.js
@@ -43,6 +43,11 @@ module.exports = (sequelize, DataTypes) => {
ProjectTemplate.getTemplate = templateId =>
ProjectTemplate.findByPk(templateId, { raw: true })
.then((template) => {
+ // if `template` is not found by `id` return `template`
+ if (!template) {
+ return template; // it suppose to be `null` or whatever `findByPk` returns in this case
+ }
+
const formRef = template.form;
return formRef
? models.Form.findAll({ where: formRef, raw: true })
diff --git a/src/permissions/constants.js b/src/permissions/constants.js
index 6c65d0d7..c69cf36f 100644
--- a/src/permissions/constants.js
+++ b/src/permissions/constants.js
@@ -161,8 +161,12 @@ export const PERMISSION = { // eslint-disable-line import/prefer-default-export
meta: {
title: 'Read Project',
group: 'Project',
+ description: 'Read project when user is a member.',
},
- topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS,
+ topcoderRoles: [
+ ...TOPCODER_ROLES_ADMINS,
+ USER_ROLE.MANAGER,
+ ],
projectRoles: ALL,
scopes: SCOPES_PROJECTS_READ,
},
@@ -173,7 +177,10 @@ export const PERMISSION = { // eslint-disable-line import/prefer-default-export
group: 'Project',
description: 'Read any project, even when not a member.',
},
- topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS,
+ topcoderRoles: [
+ ...TOPCODER_ROLES_ADMINS,
+ USER_ROLE.MANAGER,
+ ],
scopes: SCOPES_PROJECTS_READ,
},
@@ -558,10 +565,7 @@ export const PERMISSION = { // eslint-disable-line import/prefer-default-export
},
topcoderRoles: TOPCODER_ROLES_ADMINS,
projectRoles: [
- PROJECT_MEMBER_ROLE.PROGRAM_MANAGER,
- PROJECT_MEMBER_ROLE.SOLUTION_ARCHITECT,
- PROJECT_MEMBER_ROLE.PROJECT_MANAGER,
- PROJECT_MEMBER_ROLE.MANAGER,
+ ...PROJECT_ROLES_MANAGEMENT,
PROJECT_MEMBER_ROLE.COPILOT,
],
},
@@ -577,12 +581,6 @@ export const PROJECT_TO_TOPCODER_ROLES_MATRIX = {
USER_ROLE.TOPCODER_ADMIN,
USER_ROLE.CONNECT_ADMIN,
USER_ROLE.MANAGER,
- ],
- [PROJECT_MEMBER_ROLE.COPILOT]: [
- USER_ROLE.COPILOT,
- ],
- [PROJECT_MEMBER_ROLE.ACCOUNT_MANAGER]: [
- USER_ROLE.MANAGER,
USER_ROLE.TOPCODER_ACCOUNT_MANAGER,
USER_ROLE.BUSINESS_DEVELOPMENT_REPRESENTATIVE,
USER_ROLE.PRESALES,
@@ -590,18 +588,10 @@ export const PROJECT_TO_TOPCODER_ROLES_MATRIX = {
USER_ROLE.PROGRAM_MANAGER,
USER_ROLE.SOLUTION_ARCHITECT,
USER_ROLE.PROJECT_MANAGER,
+ USER_ROLE.COPILOT_MANAGER,
],
- [PROJECT_MEMBER_ROLE.ACCOUNT_EXECUTIVE]: [
- USER_ROLE.ACCOUNT_EXECUTIVE,
- ],
- [PROJECT_MEMBER_ROLE.PROJECT_MANAGER]: [
- USER_ROLE.PROJECT_MANAGER,
- ],
- [PROJECT_MEMBER_ROLE.SOLUTION_ARCHITECT]: [
- USER_ROLE.SOLUTION_ARCHITECT,
- ],
- [PROJECT_MEMBER_ROLE.PROGRAM_MANAGER]: [
- USER_ROLE.PROGRAM_MANAGER,
+ [PROJECT_MEMBER_ROLE.COPILOT]: [
+ USER_ROLE.COPILOT,
],
};
@@ -618,6 +608,9 @@ export const DEFAULT_PROJECT_ROLE = [
{
topcoderRole: USER_ROLE.MANAGER,
projectRole: PROJECT_MEMBER_ROLE.MANAGER,
+ }, {
+ topcoderRole: USER_ROLE.COPILOT_MANAGER,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.CONNECT_ADMIN,
projectRole: PROJECT_MEMBER_ROLE.MANAGER,
@@ -626,28 +619,28 @@ export const DEFAULT_PROJECT_ROLE = [
projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.TOPCODER_ACCOUNT_MANAGER,
- projectRole: PROJECT_MEMBER_ROLE.ACCOUNT_MANAGER,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.BUSINESS_DEVELOPMENT_REPRESENTATIVE,
- projectRole: PROJECT_MEMBER_ROLE.ACCOUNT_MANAGER,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.PRESALES,
- projectRole: PROJECT_MEMBER_ROLE.ACCOUNT_MANAGER,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.COPILOT,
projectRole: PROJECT_MEMBER_ROLE.COPILOT,
}, {
topcoderRole: USER_ROLE.ACCOUNT_EXECUTIVE,
- projectRole: PROJECT_MEMBER_ROLE.ACCOUNT_EXECUTIVE,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.PROGRAM_MANAGER,
- projectRole: PROJECT_MEMBER_ROLE.PROGRAM_MANAGER,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.SOLUTION_ARCHITECT,
- projectRole: PROJECT_MEMBER_ROLE.SOLUTION_ARCHITECT,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.PROJECT_MANAGER,
- projectRole: PROJECT_MEMBER_ROLE.PROJECT_MANAGER,
+ projectRole: PROJECT_MEMBER_ROLE.MANAGER,
}, {
topcoderRole: USER_ROLE.TOPCODER_USER,
projectRole: PROJECT_MEMBER_ROLE.CUSTOMER,
diff --git a/src/routes/attachments/create.js b/src/routes/attachments/create.js
index 81c1a80a..5394a6ad 100644
--- a/src/routes/attachments/create.js
+++ b/src/routes/attachments/create.js
@@ -92,13 +92,6 @@ module.exports = [
const link = _link.get({ plain: true });
req.log.debug('New Link Attachment record: ', link);
- // publish Rabbit MQ event
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_ADDED,
- link,
- { correlationId: req.id },
- );
-
// emit the Kafka event
util.sendResourceToKafkaBus(
req,
@@ -162,12 +155,6 @@ module.exports = [
response = _.omit(response, ['path', 'deletedAt']);
response.downloadUrl = resp.data.result.content.preSignedURL;
- // publish event
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_ADDED,
- newAttachment,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
@@ -184,12 +171,6 @@ module.exports = [
response = _.omit(response, ['path', 'deletedAt']);
// only in development mode
response.downloadUrl = path;
- // publish event
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_ADDED,
- newAttachment,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/attachments/delete.js b/src/routes/attachments/delete.js
index 6685feff..e1ccf885 100644
--- a/src/routes/attachments/delete.js
+++ b/src/routes/attachments/delete.js
@@ -52,11 +52,6 @@ module.exports = [
.then(() => {
// fire event
const pattachment = attachment.get({ plain: true });
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_REMOVED,
- pattachment,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/attachments/update.js b/src/routes/attachments/update.js
index 176ef5b1..5e2bede3 100644
--- a/src/routes/attachments/update.js
+++ b/src/routes/attachments/update.js
@@ -67,12 +67,6 @@ module.exports = [
})).then((updated) => {
req.log.debug('updated project attachment', JSON.stringify(updated, null, 2));
res.json(updated);
- // emit original and updated project information
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_ATTACHMENT_UPDATED,
- { original: previousValue, updated: updated.get({ plain: true }) },
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
diff --git a/src/routes/metadata/list.spec.js b/src/routes/metadata/list.spec.js
index 400034fb..e64c583c 100644
--- a/src/routes/metadata/list.spec.js
+++ b/src/routes/metadata/list.spec.js
@@ -9,12 +9,14 @@ import _ from 'lodash';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
+import util from '../../util';
const should = chai.should();
const expect = chai.expect;
const ES_METADATA_INDEX = config.get('elasticsearchConfig.metadataIndexName');
const ES_METADATA_TYPE = config.get('elasticsearchConfig.metadataDocType');
+const eClient = util.getElasticSearchClient();
const projectTemplates = [
{
@@ -412,7 +414,7 @@ describe('GET all metadata from ES', () => {
})
.then(() => models.BuildingBlock.bulkCreate(buildingBlocks, { returning: true }))
.then((created) => { esData.buildingBlocks = getObjToIndex(created); })
- .then(() => server.services.es.index({
+ .then(() => eClient.index({
index: ES_METADATA_INDEX,
type: ES_METADATA_TYPE,
body: esData,
diff --git a/src/routes/milestones/list.spec.js b/src/routes/milestones/list.spec.js
index 839c9edc..5a72025c 100644
--- a/src/routes/milestones/list.spec.js
+++ b/src/routes/milestones/list.spec.js
@@ -9,9 +9,11 @@ import _ from 'lodash';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
+import util from '../../util';
const ES_TIMELINE_INDEX = config.get('elasticsearchConfig.timelineIndexName');
const ES_TIMELINE_TYPE = config.get('elasticsearchConfig.timelineDocType');
+const eClient = util.getElasticSearchClient();
// eslint-disable-next-line no-unused-vars
const should = chai.should();
@@ -171,7 +173,7 @@ describe('LIST milestones', () => {
// Index to ES
timelines[0].milestones = _.map(createdMilestones, cm => _.omit(cm.toJSON(), 'deletedAt', 'deletedBy'));
timelines[0].projectId = 1;
- return server.services.es.index({
+ return eClient.index({
index: ES_TIMELINE_INDEX,
type: ES_TIMELINE_TYPE,
id: timelines[0].id,
diff --git a/src/routes/phaseProducts/create.js b/src/routes/phaseProducts/create.js
index 2273ec25..0618a4c6 100644
--- a/src/routes/phaseProducts/create.js
+++ b/src/routes/phaseProducts/create.js
@@ -99,12 +99,6 @@ module.exports = [
});
}))
.then(() => {
- // Send events to buses
- req.log.debug('Sending event to RabbitMQ bus for phase product %d', newPhaseProduct.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_ADDED,
- newPhaseProduct,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/phaseProducts/delete.js b/src/routes/phaseProducts/delete.js
index 5894037b..b9620336 100644
--- a/src/routes/phaseProducts/delete.js
+++ b/src/routes/phaseProducts/delete.js
@@ -39,13 +39,6 @@ module.exports = [
.then(entity => entity.destroy()))
.then((deleted) => {
req.log.debug('deleted phase product', JSON.stringify(deleted, null, 2));
-
- // Send events to buses
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_REMOVED,
- deleted,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/phaseProducts/list.spec.js b/src/routes/phaseProducts/list.spec.js
index a2a0f326..8c766df5 100644
--- a/src/routes/phaseProducts/list.spec.js
+++ b/src/routes/phaseProducts/list.spec.js
@@ -6,9 +6,11 @@ import config from 'config';
import server from '../../app';
import models from '../../models';
import testUtil from '../../tests/util';
+import util from '../../util';
const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
+const eClient = util.getElasticSearchClient();
const should = chai.should();
@@ -103,7 +105,7 @@ describe('Phase Products', () => {
// Overwrite lastActivityAt as otherwise ES fill not be able to parse it
project.lastActivityAt = 1;
// Index to ES
- return server.services.es.index({
+ return eClient.index({
index: ES_PROJECT_INDEX,
type: ES_PROJECT_TYPE,
id: projectId,
diff --git a/src/routes/phaseProducts/update.js b/src/routes/phaseProducts/update.js
index 0a648519..05b1f836 100644
--- a/src/routes/phaseProducts/update.js
+++ b/src/routes/phaseProducts/update.js
@@ -66,13 +66,6 @@ module.exports = [
const updatedValue = updated.get({ plain: true });
- // emit original and updated project phase information
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_UPDATED,
- { original: previousValue, updated: updatedValue },
- { correlationId: req.id },
- );
-
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/phases/create.js b/src/routes/phases/create.js
index 353a81b5..a6ff8bef 100644
--- a/src/routes/phases/create.js
+++ b/src/routes/phases/create.js
@@ -1,11 +1,10 @@
import validate from 'express-validation';
import _ from 'lodash';
import Joi from 'joi';
-import Sequelize from 'sequelize';
import models from '../../models';
import util from '../../util';
-import { EVENT, RESOURCES, TIMELINE_REFERENCES } from '../../constants';
+import { EVENT, RESOURCES } from '../../constants';
const permissions = require('tc-core-library-js').middleware.permissions;
@@ -45,7 +44,6 @@ module.exports = [
});
let newProjectPhase = null;
- let otherUpdated = null;
models.sequelize.transaction(() => {
req.log.debug('Create Phase - Starting transaction');
return models.Project.findOne({
@@ -73,39 +71,8 @@ module.exports = [
newProjectPhase = _.omit(newProjectPhase, ['deletedAt', 'deletedBy', 'utm']);
});
})
+ // create product if `productTemplateId` is defined
.then(() => {
- req.log.debug('re-ordering the other phases');
-
- if (_.isNil(newProjectPhase.order)) {
- return Promise.resolve();
- }
-
- // Increase the order of the other phases in the same project,
- // which have `order` >= this phase order
- return models.ProjectPhase.update({ order: Sequelize.literal('"order" + 1') }, {
- where: {
- projectId,
- id: { $ne: newProjectPhase.id },
- order: { $gte: newProjectPhase.order },
- },
- });
- })
- .then((updatedCount) => {
- if (updatedCount) {
- return models.ProjectPhase.findAll({
- where: {
- projectId,
- id: { $ne: newProjectPhase.id },
- order: { $gte: newProjectPhase.order },
- },
- order: [['updatedAt', 'DESC']],
- limit: updatedCount[0],
- });
- }
- return Promise.resolve();
- })
- .then((_otherUpdated) => {
- otherUpdated = _otherUpdated || [];
if (_.isNil(data.productTemplateId)) {
return Promise.resolve();
}
@@ -138,40 +105,12 @@ module.exports = [
});
})
.then(() => {
- // Send events to buses
- req.log.debug('Sending event to RabbitMQ bus for project phase %d', newProjectPhase.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.PROJECT_PHASE_ADDED,
- { added: newProjectPhase, route: TIMELINE_REFERENCES.PHASE },
- { correlationId: req.id },
- );
-
- // NOTE So far this logic is implemented in RabbitMQ handler of PROJECT_PHASE_UPDATED
- // Even though we send this event to the Kafka, the "project-processor-es" shouldn't process it.
util.sendResourceToKafkaBus(
req,
EVENT.ROUTING_KEY.PROJECT_PHASE_ADDED,
RESOURCES.PHASE,
newProjectPhase);
- // NOTE So far this logic is implemented in RabbitMQ handler of PROJECT_PHASE_UPDATED
- // Even though we send these events to the Kafka, the "project-processor-es" shouldn't process them.
- //
- // We don't process these event in "project-processor-es"
- // because it will make 'version conflict' error in ES.
- // The order of the other milestones need to be updated in the PROJECT_PHASE_UPDATED event handler
- _.map(otherUpdated, phase =>
- util.sendResourceToKafkaBus(
- req,
- EVENT.ROUTING_KEY.PROJECT_PHASE_UPDATED,
- RESOURCES.PHASE,
- _.assign(_.pick(phase.toJSON(), 'id', 'order', 'updatedBy', 'updatedAt')),
- // Pass the same object as original phase even though, the order has changed.
- // So far we don't use the order so it's ok. But in general, we should pass
- // the original phases. <- TODO
- _.assign(_.pick(phase.toJSON(), 'id', 'order', 'updatedBy', 'updatedAt'))),
- true, // don't send event to Notification Service as the main event here is updating one phase
- );
-
res.status(201).json(newProjectPhase);
})
.catch((err) => {
diff --git a/src/routes/phases/create.spec.js b/src/routes/phases/create.spec.js
index af2cdd61..983ced88 100644
--- a/src/routes/phases/create.spec.js
+++ b/src/routes/phases/create.spec.js
@@ -2,24 +2,17 @@
import _ from 'lodash';
import chai from 'chai';
import sinon from 'sinon';
-import config from 'config';
import request from 'supertest';
import server from '../../app';
import models from '../../models';
import testUtil from '../../tests/util';
import busApi from '../../services/busApi';
-import messageService from '../../services/messageService';
-import RabbitMQService from '../../services/rabbitmq';
-import mockRabbitMQ from '../../tests/mockRabbitMQ';
import {
BUS_API_EVENT, RESOURCES, CONNECT_NOTIFICATION_EVENT,
} from '../../constants';
const should = chai.should();
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
const body = {
name: 'test project phase',
description: 'test project phase description',
@@ -286,49 +279,6 @@ describe('Project Phases', () => {
});
});
- it('should return 201 if payload has order specified', (done) => {
- request(server)
- .post(`/v5/projects/${projectId}/phases/`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.copilot}`,
- })
- .send(_.assign({ order: 1 }, body))
- .expect('Content-Type', /json/)
- .expect(201)
- .end((err, res) => {
- if (err) {
- done(err);
- } else {
- const resJson = res.body;
- validatePhase(resJson, body);
- resJson.order.should.be.eql(1);
-
- const firstPhaseId = resJson.id;
-
- // Create second phase
- request(server)
- .post(`/v5/projects/${projectId}/phases/`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.copilot}`,
- })
- .send(_.assign({ order: 1 }, body))
- .expect('Content-Type', /json/)
- .expect(201)
- .end((err2, res2) => {
- const resJson2 = res2.body;
- validatePhase(resJson2, body);
- resJson2.order.should.be.eql(1);
-
- models.ProjectPhase.findOne({ where: { id: firstPhaseId } })
- .then((firstPhase) => {
- firstPhase.order.should.be.eql(2);
- done();
- });
- });
- }
- });
- });
-
it('should return 201 if payload has productTemplateId specified', (done) => {
request(server)
.post(`/v5/projects/${projectId}/phases/`)
@@ -343,6 +293,7 @@ describe('Project Phases', () => {
done(err);
} else {
const resJson = res.body;
+ console.log(resJson);
validatePhase(resJson, body);
resJson.products.should.have.length(1);
@@ -476,87 +427,5 @@ describe('Project Phases', () => {
});
});
});
-
- describe('RabbitMQ Message topic', () => {
- let createMessageSpy;
- let publishSpy;
- let sandbox;
-
- before((done) => {
- // Wait for 500ms in order to wait for createEvent calls from previous tests to complete
- testUtil.wait(done);
- });
-
- beforeEach(async () => {
- sandbox = sinon.sandbox.create();
- server.services.pubsub = new RabbitMQService(server.logger);
-
- // initialize RabbitMQ
- server.services.pubsub.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- );
-
- // add project to ES index
- await server.services.es.index({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: projectId,
- body: {
- doc: project,
- },
- });
-
- return new Promise(resolve => setTimeout(() => {
- publishSpy = sandbox.spy(server.services.pubsub, 'publish');
- createMessageSpy = sandbox.spy(messageService, 'createTopic');
- resolve();
- }, 500));
- });
-
- afterEach(() => {
- sandbox.restore();
- });
-
- after(() => {
- mockRabbitMQ(server);
- });
-
- it('should send message topic when phase added', (done) => {
- const mockHttpClient = _.merge(testUtil.mockHttpClient, {
- post: () => Promise.resolve({
- status: 200,
- data: {},
- }),
- });
- sandbox.stub(messageService, 'getClient', () => mockHttpClient);
- request(server)
- .post(`/v5/projects/${projectId}/phases/`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.copilot}`,
- })
- .send(body)
- .expect('Content-Type', /json/)
- .expect(201)
- .end((err) => {
- if (err) {
- done(err);
- } else {
- testUtil.wait(() => {
- publishSpy.calledOnce.should.be.true;
- publishSpy.calledWith('project.phase.added').should.be.true;
- createMessageSpy.calledOnce.should.be.true;
- createMessageSpy.calledWith(sinon.match({ reference: 'project',
- referenceId: '1',
- tag: 'phase#1',
- title: 'test project phase',
- })).should.be.true;
- done();
- });
- }
- });
- });
- });
});
});
diff --git a/src/routes/phases/delete.js b/src/routes/phases/delete.js
index ab934ef4..3dcaa017 100644
--- a/src/routes/phases/delete.js
+++ b/src/routes/phases/delete.js
@@ -4,7 +4,7 @@ import _ from 'lodash';
import { middleware as tcMiddleware } from 'tc-core-library-js';
import models from '../../models';
import util from '../../util';
-import { EVENT, RESOURCES, TIMELINE_REFERENCES } from '../../constants';
+import { EVENT, RESOURCES } from '../../constants';
const permissions = tcMiddleware.permissions;
@@ -38,13 +38,6 @@ module.exports = [
.then((deleted) => {
req.log.debug('deleted project phase', JSON.stringify(deleted, null, 2));
- // Send events to buses
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_REMOVED,
- { deleted, route: TIMELINE_REFERENCES.PHASE },
- { correlationId: req.id },
- );
-
// emit event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/phases/delete.spec.js b/src/routes/phases/delete.spec.js
index a07842c8..a690fe2c 100644
--- a/src/routes/phases/delete.spec.js
+++ b/src/routes/phases/delete.spec.js
@@ -3,14 +3,10 @@ import _ from 'lodash';
import request from 'supertest';
import sinon from 'sinon';
import chai from 'chai';
-import config from 'config';
import server from '../../app';
import models from '../../models';
import testUtil from '../../tests/util';
import busApi from '../../services/busApi';
-import messageService from '../../services/messageService';
-import RabbitMQService from '../../services/rabbitmq';
-import mockRabbitMQ from '../../tests/mockRabbitMQ';
import {
BUS_API_EVENT,
RESOURCES,
@@ -19,9 +15,6 @@ import {
const should = chai.should(); // eslint-disable-line no-unused-vars
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
const expectAfterDelete = (projectId, id, err, next) => {
if (err) throw err;
setTimeout(() =>
@@ -86,15 +79,6 @@ describe('Project Phases', () => {
lastActivityAt: 1,
lastActivityUserId: '1',
};
- const topic = {
- id: 1,
- title: 'test project phase',
- posts:
- [{ id: 1,
- type: 'post',
- body: 'body',
- }],
- };
beforeEach((done) => {
// mocks
testUtil.clearDb()
@@ -290,82 +274,5 @@ describe('Project Phases', () => {
});
});
});
-
- describe('RabbitMQ Message topic', () => {
- let deleteTopicSpy;
- let deletePostsSpy;
- let publishSpy;
- let sandbox;
-
- before((done) => {
- // Wait for 500ms in order to wait for createEvent calls from previous tests to complete
- testUtil.wait(done);
- });
-
- beforeEach(async () => {
- sandbox = sinon.sandbox.create();
- server.services.pubsub = new RabbitMQService(server.logger);
-
- // initialize RabbitMQ
- server.services.pubsub.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- );
-
- // add project to ES index
- await server.services.es.index({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: projectId,
- body: {
- doc: _.assign(project, { phases: [_.assign(body, { id: phaseId, projectId })] }),
- },
- });
-
- return new Promise(resolve => setTimeout(() => {
- publishSpy = sandbox.spy(server.services.pubsub, 'publish');
- deleteTopicSpy = sandbox.spy(messageService, 'deleteTopic');
- deletePostsSpy = sandbox.spy(messageService, 'deletePosts');
- sandbox.stub(messageService, 'getTopicByTag', () => Promise.resolve(topic));
- resolve();
- }, 500));
- });
-
- afterEach(() => {
- sandbox.restore();
- });
-
- after(() => {
- mockRabbitMQ(server);
- });
-
- it('should send message topic when phase deleted', (done) => {
- const mockHttpClient = _.merge(testUtil.mockHttpClient, {
- delete: () => Promise.resolve(true),
- });
- sandbox.stub(messageService, 'getClient', () => mockHttpClient);
- request(server)
- .delete(`/v5/projects/${projectId}/phases/${phaseId}`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.admin}`,
- })
- .expect(204)
- .end((err) => {
- if (err) {
- done(err);
- } else {
- testUtil.wait(() => {
- publishSpy.calledOnce.should.be.true;
- publishSpy.firstCall.calledWith('project.phase.removed').should.be.true;
- deleteTopicSpy.calledOnce.should.be.true;
- deleteTopicSpy.calledWith(topic.id).should.be.true;
- deletePostsSpy.calledWith(topic.id).should.be.true;
- done();
- });
- }
- });
- });
- });
});
});
diff --git a/src/routes/phases/list.spec.js b/src/routes/phases/list.spec.js
index bce8448b..6058643b 100644
--- a/src/routes/phases/list.spec.js
+++ b/src/routes/phases/list.spec.js
@@ -6,9 +6,11 @@ import chai from 'chai';
import server from '../../app';
import models from '../../models';
import testUtil from '../../tests/util';
+import util from '../../util';
const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
+const eClient = util.getElasticSearchClient();
const should = chai.should();
@@ -88,7 +90,7 @@ describe('Project Phases', () => {
// Overwrite lastActivityAt as otherwise ES fill not be able to parse it
project.lastActivityAt = 1;
project.phases = [phase];
- return server.services.es.index({
+ return eClient.index({
index: ES_PROJECT_INDEX,
type: ES_PROJECT_TYPE,
id: projectId,
diff --git a/src/routes/phases/update.js b/src/routes/phases/update.js
index 7bafa010..9f4c417a 100644
--- a/src/routes/phases/update.js
+++ b/src/routes/phases/update.js
@@ -2,11 +2,10 @@
import validate from 'express-validation';
import _ from 'lodash';
import Joi from 'joi';
-import Sequelize from 'sequelize';
import { middleware as tcMiddleware } from 'tc-core-library-js';
import models from '../../models';
import util from '../../util';
-import { EVENT, RESOURCES, TIMELINE_REFERENCES, ROUTES } from '../../constants';
+import { EVENT, RESOURCES, ROUTES } from '../../constants';
const permissions = tcMiddleware.permissions;
@@ -88,78 +87,13 @@ module.exports = [
}))
.then((updatedPhase) => {
updated = updatedPhase;
-
- // Ignore re-ordering if there's no order specified for this phase
- if (_.isNil(updated.order)) {
- return Promise.resolve();
- }
-
- // Update order of the other phases only if the order was changed
- if (previousValue.order === updated.order) {
- return Promise.resolve();
- }
-
- return models.ProjectPhase.count({
- where: {
- projectId,
- id: { $ne: updated.id },
- order: updated.order,
- },
- })
- .then((count) => {
- if (count === 0) {
- return Promise.resolve();
- }
-
- // Increase the order from M to K: if there is an item with order K,
- // orders from M+1 to K should be made M to K-1
- if (!_.isNil(previousValue.order) && previousValue.order < updated.order) {
- return models.ProjectPhase.update({ order: Sequelize.literal('"order" - 1') }, {
- where: {
- projectId,
- id: { $ne: updated.id },
- order: { $between: [previousValue.order + 1, updated.order] },
- },
- });
- }
-
- // Decrease the order from M to K: if there is an item with order K,
- // orders from K to M-1 should be made K+1 to M
- return models.ProjectPhase.update({ order: Sequelize.literal('"order" + 1') }, {
- where: {
- projectId,
- id: { $ne: updated.id },
- order: {
- $between: [
- updated.order,
- (previousValue.order ? previousValue.order : Number.MAX_SAFE_INTEGER) - 1,
- ],
- },
- },
- });
- });
- })
- .then(() =>
- // To simpify the logic, reload the phases from DB and send to the message queue
- models.ProjectPhase.findAll({
- where: {
- projectId,
- },
- include: [{ model: models.PhaseProduct, as: 'products' }],
- })),
+ }),
)
- .then((allPhases) => {
+ .then(() => {
req.log.debug('updated project phase', JSON.stringify(updated, null, 2));
const updatedValue = updated.get({ plain: true });
- // emit original and updated project phase information
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_UPDATED,
- { original: previousValue, updated: updatedValue, allPhases, route: TIMELINE_REFERENCES.PHASE },
- { correlationId: req.id },
- );
-
// emit event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/phases/update.spec.js b/src/routes/phases/update.spec.js
index b924f396..cd5720ac 100644
--- a/src/routes/phases/update.spec.js
+++ b/src/routes/phases/update.spec.js
@@ -2,24 +2,17 @@
import _ from 'lodash';
import sinon from 'sinon';
import chai from 'chai';
-import config from 'config';
import request from 'supertest';
import server from '../../app';
import models from '../../models';
import testUtil from '../../tests/util';
import busApi from '../../services/busApi';
-import messageService from '../../services/messageService';
-import RabbitMQService from '../../services/rabbitmq';
-import mockRabbitMQ from '../../tests/mockRabbitMQ';
import {
BUS_API_EVENT,
RESOURCES,
CONNECT_NOTIFICATION_EVENT,
} from '../../constants';
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
const should = chai.should();
const body = {
@@ -67,7 +60,6 @@ describe('Project Phases', () => {
let projectId;
let projectName;
let phaseId;
- let phaseId2;
let phaseId3;
const memberUser = {
handle: testUtil.getDecodedToken(testUtil.jwts.member).handle,
@@ -95,15 +87,6 @@ describe('Project Phases', () => {
lastActivityAt: 1,
lastActivityUserId: '1',
};
- const topic = {
- id: 1,
- title: 'test project phase',
- posts:
- [{ id: 1,
- type: 'post',
- body: 'body',
- }],
- };
beforeEach((done) => {
// mocks
testUtil.clearDb()
@@ -138,7 +121,6 @@ describe('Project Phases', () => {
models.ProjectPhase.bulkCreate(phases, { returning: true })
.then((createdPhases) => {
phaseId = createdPhases[0].id;
- phaseId2 = createdPhases[1].id;
phaseId3 = createdPhases[2].id;
done();
@@ -268,33 +250,6 @@ describe('Project Phases', () => {
});
});
- it('should return updated phase if the order is specified', (done) => {
- request(server)
- .patch(`/v5/projects/${projectId}/phases/${phaseId}`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.copilot}`,
- })
- .send(_.assign({ order: 1 }, updateBody))
- .expect('Content-Type', /json/)
- .expect(200)
- .end((err, res) => {
- if (err) {
- done(err);
- } else {
- const resJson = res.body;
- validatePhase(resJson, updateBody);
- resJson.order.should.be.eql(1);
-
- // Check the order of the other phase
- models.ProjectPhase.findOne({ where: { id: phaseId2 } })
- .then((phase2) => {
- phase2.order.should.be.eql(2);
- done();
- });
- }
- });
- });
-
it('should return 200 if requested by admin', (done) => {
request(server)
.patch(`/v5/projects/${projectId}/phases/${phaseId}`)
@@ -713,87 +668,5 @@ describe('Project Phases', () => {
});
});
});
-
- describe('RabbitMQ Message topic', () => {
- let updateMessageSpy;
- let publishSpy;
- let sandbox;
-
- before((done) => {
- // Wait for 500ms in order to wait for createEvent calls from previous tests to complete
- testUtil.wait(done);
- });
-
- beforeEach(async () => {
- sandbox = sinon.sandbox.create();
- server.services.pubsub = new RabbitMQService(server.logger);
-
- // initialize RabbitMQ
- server.services.pubsub.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- );
-
- // add project to ES index
- await server.services.es.index({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: projectId,
- body: {
- doc: _.assign(project, { phases: [_.assign(body, { id: phaseId, projectId })] }),
- },
- });
-
- return new Promise(resolve => setTimeout(() => {
- publishSpy = sandbox.spy(server.services.pubsub, 'publish');
- updateMessageSpy = sandbox.spy(messageService, 'updateTopic');
- sandbox.stub(messageService, 'getTopicByTag', () => Promise.resolve(topic));
- resolve();
- }, 500));
- });
-
- afterEach(() => {
- sandbox.restore();
- });
-
- after(() => {
- mockRabbitMQ(server);
- });
-
- it('should send message topic when phase Updated', (done) => {
- const mockHttpClient = _.merge(testUtil.mockHttpClient, {
- post: () => Promise.resolve({
- status: 200,
- data: {},
- }),
- });
- sandbox.stub(messageService, 'getClient', () => mockHttpClient);
- request(server)
- .patch(`/v5/projects/${projectId}/phases/${phaseId}`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.admin}`,
- })
- .send(_.assign(updateBody, { budget: 123 }))
- .expect('Content-Type', /json/)
- .expect(200)
- .end((err) => {
- if (err) {
- done(err);
- } else {
- testUtil.wait(() => {
- publishSpy.calledOnce.should.be.true;
- publishSpy.calledWith('project.phase.updated').should.be.true;
- updateMessageSpy.calledOnce.should.be.true;
- updateMessageSpy.calledWith(topic.id, sinon.match({
- title: updateBody.name,
- postId: topic.posts[0].id,
- content: topic.posts[0].body })).should.be.true;
- done();
- });
- }
- });
- });
- });
});
});
diff --git a/src/routes/projectMemberInvites/create.js b/src/routes/projectMemberInvites/create.js
index 60b1c570..97e0cd25 100644
--- a/src/routes/projectMemberInvites/create.js
+++ b/src/routes/projectMemberInvites/create.js
@@ -384,18 +384,13 @@ module.exports = [
req, invite.emails, inviteUserIds, invites, data, failed, members, inviteUsers))
.then((values) => {
values.forEach((v) => {
- // emit the event
+ // emit the event
util.sendResourceToKafkaBus(
req,
EVENT.ROUTING_KEY.PROJECT_MEMBER_INVITE_CREATED,
RESOURCES.PROJECT_MEMBER_INVITE,
v.toJSON());
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_MEMBER_INVITE_CREATED,
- v,
- { correlationId: req.id },
- );
// send email invite (async)
if (v.email && !v.userId && v.status === INVITE_STATUS.PENDING) {
sendInviteEmail(req, projectId, v);
diff --git a/src/routes/projectMemberInvites/create.spec.js b/src/routes/projectMemberInvites/create.spec.js
index cef73d72..2ed2f15c 100644
--- a/src/routes/projectMemberInvites/create.spec.js
+++ b/src/routes/projectMemberInvites/create.spec.js
@@ -158,11 +158,6 @@ describe('Project Member Invite create', () => {
let sandbox;
beforeEach(() => {
sandbox = sinon.sandbox.create();
- // restoring the stubs in beforeEach instead of afterEach because these methods are already stubbed
- server.services.pubsub.init.restore();
- server.services.pubsub.publish.restore();
- sinon.stub(server.services.pubsub, 'init', () => {});
- sinon.stub(server.services.pubsub, 'publish', () => {});
// by default mock lookupMultipleUserEmails return nothing so all the cases are not broken
sandbox.stub(util, 'getUserRoles', () => Promise.resolve([]));
sandbox.stub(util, 'lookupMultipleUserEmails', () => Promise.resolve([]));
@@ -239,7 +234,6 @@ describe('Project Member Invite create', () => {
resJson.role.should.equal('customer');
resJson.projectId.should.equal(project1.id);
resJson.email.should.equal('hello@world.com');
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
done();
}
});
@@ -388,7 +382,6 @@ describe('Project Member Invite create', () => {
resJson.role.should.equal('customer');
resJson.projectId.should.equal(project2.id);
resJson.email.should.equal('hello@world.com');
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
done();
}
});
@@ -440,7 +433,6 @@ describe('Project Member Invite create', () => {
resJson.projectId.should.equal(project2.id);
should.not.exist(resJson.userId);
resJson.email.should.equal('hello@world.com');
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
done();
}
});
@@ -488,7 +480,6 @@ describe('Project Member Invite create', () => {
resJson.projectId.should.equal(project2.id);
resJson.userId.should.equal(40051331);
should.not.exist(resJson.email);
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
done();
}
});
@@ -515,7 +506,6 @@ describe('Project Member Invite create', () => {
resJson[0].handle.should.equal('test_copilot1');
resJson[0].message.should.equal('User with such handle is already a member of the team.');
resJson.length.should.equal(1);
- server.services.pubsub.publish.neverCalledWith('project.member.invite.created').should.be.true;
done();
}
});
@@ -559,7 +549,6 @@ describe('Project Member Invite create', () => {
resJson[0].email.should.equal('romit.choudhary@rivigo.com');
resJson[0].message.should.equal('User with such email is already a member of the team.');
resJson.length.should.equal(1);
- server.services.pubsub.publish.neverCalledWith('project.member.invite.created').should.be.true;
done();
}
});
@@ -586,7 +575,6 @@ describe('Project Member Invite create', () => {
resJson.length.should.equal(1);
resJson[0].handle.should.equal('test_manager3');
resJson[0].message.should.equal('User with such handle is already invited to this project.');
- server.services.pubsub.publish.neverCalledWith('project.member.invite.created').should.be.true;
done();
}
});
@@ -693,7 +681,6 @@ describe('Project Member Invite create', () => {
resJson.role.should.equal('manager');
resJson.projectId.should.equal(project1.id);
resJson.userId.should.equal(40051336);
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
done();
});
});
@@ -718,32 +705,6 @@ describe('Project Member Invite create', () => {
resJson.role.should.equal('manager');
resJson.projectId.should.equal(project1.id);
resJson.userId.should.equal(40051333);
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
- done();
- });
- });
-
- it('should return 201 if try to create account_manager with MANAGER_ROLES', (done) => {
- util.getUserRoles.restore();
- sandbox.stub(util, 'getUserRoles', () => Promise.resolve([USER_ROLE.MANAGER]));
- request(server)
- .post(`/v5/projects/${project1.id}/invites`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.manager}`,
- })
- .send({
- handles: ['test_manager4'],
- role: 'account_manager',
- })
- .expect('Content-Type', /json/)
- .expect(201)
- .end((err, res) => {
- const resJson = res.body.success[0];
- should.exist(resJson);
- resJson.role.should.equal('account_manager');
- resJson.projectId.should.equal(project1.id);
- resJson.userId.should.equal(40051336);
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
done();
});
});
@@ -798,7 +759,6 @@ describe('Project Member Invite create', () => {
resJson.role.should.equal('copilot');
resJson.projectId.should.equal(project1.id);
resJson.userId.should.equal(40051331);
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
done();
}
});
diff --git a/src/routes/projectMemberInvites/update.js b/src/routes/projectMemberInvites/update.js
index 10668474..ccb7e657 100644
--- a/src/routes/projectMemberInvites/update.js
+++ b/src/routes/projectMemberInvites/update.js
@@ -89,10 +89,6 @@ module.exports = [
RESOURCES.PROJECT_MEMBER_INVITE,
updatedInvite.toJSON());
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.PROJECT_MEMBER_INVITE_UPDATED, updatedInvite, {
- correlationId: req.id,
- });
-
req.log.debug('Adding user to project');
// add user to project if accept invite
if (updatedInvite.status === INVITE_STATUS.ACCEPTED ||
diff --git a/src/routes/projectMembers/create.spec.js b/src/routes/projectMembers/create.spec.js
index 12234fc4..e607131a 100644
--- a/src/routes/projectMembers/create.spec.js
+++ b/src/routes/projectMembers/create.spec.js
@@ -133,7 +133,6 @@ describe('Project Members create', () => {
resJson.projectId.should.equal(project1.id);
resJson.userId.should.equal(40051332);
should.exist(resJson.id);
- server.services.pubsub.publish.calledWith('project.member.invite.created').should.be.true;
request(server)
.patch(`/v5/projects/${project1.id}/invites/${resJson.id}`)
.set({
@@ -153,8 +152,6 @@ describe('Project Members create', () => {
resJson2.role.should.equal('copilot');
resJson2.projectId.should.equal(project1.id);
resJson2.userId.should.equal(40051332);
- server.services.pubsub.publish.calledWith('project.member.invite.updated').should.be.true;
- server.services.pubsub.publish.calledWith('project.member.added').should.be.true;
request(server)
.patch(`/v5/projects/${project1.id}/invites/${resJson.id}`)
@@ -228,7 +225,6 @@ describe('Project Members create', () => {
resJson.isPrimary.should.be.truthy;
resJson.projectId.should.equal(project1.id);
resJson.userId.should.equal(40051334);
- server.services.pubsub.publish.calledWith('project.member.added').should.be.true;
done();
}
});
@@ -286,7 +282,6 @@ describe('Project Members create', () => {
resJson.projectId.should.equal(project1.id);
resJson.userId.should.equal(40051334);
resJson.createdBy.should.equal(config.DEFAULT_M2M_USERID);
- server.services.pubsub.publish.calledWith('project.member.added').should.be.true;
done();
}
});
@@ -327,7 +322,6 @@ describe('Project Members create', () => {
resJson.isPrimary.should.be.truthy;
resJson.projectId.should.equal(project1.id);
resJson.userId.should.equal(40051333);
- server.services.pubsub.publish.calledWith('project.member.added').should.be.true;
done();
}
});
@@ -344,17 +338,6 @@ describe('Project Members create', () => {
.expect(401, done);
});
- it('should return 401 if register admin as role other than manager (project manager) ', (done) => {
- request(server)
- .post(`/v5/projects/${project1.id}/members/`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.admin}`,
- })
- .send({ role: PROJECT_MEMBER_ROLE.PROJECT_MANAGER })
- .expect('Content-Type', /json/)
- .expect(401, done);
- });
-
describe('Bus api', () => {
let createEventSpy;
diff --git a/src/routes/projectMembers/delete.js b/src/routes/projectMembers/delete.js
index 18d43702..8c8e4d55 100644
--- a/src/routes/projectMembers/delete.js
+++ b/src/routes/projectMembers/delete.js
@@ -80,14 +80,8 @@ module.exports = [
}
}))).then((member) => {
// only return the response after transaction is committed
- // fire event
const pmember = member.get({ plain: true });
req.log.debug(pmember);
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_MEMBER_REMOVED,
- pmember,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
diff --git a/src/routes/projectMembers/delete.spec.js b/src/routes/projectMembers/delete.spec.js
index e9989df6..df386745 100644
--- a/src/routes/projectMembers/delete.spec.js
+++ b/src/routes/projectMembers/delete.spec.js
@@ -128,14 +128,6 @@ describe('Project members delete', () => {
.expect(204)
.end((err) => {
expectAfterDelete(project1.id, member1.id, err, () => {
- const removedMember = {
- projectId: project1.id,
- userId: 40051332,
- role: 'copilot',
- isPrimary: true,
- };
- server.services.pubsub.publish.calledWith('project.member.removed',
- sinon.match(removedMember)).should.be.true;
done();
});
@@ -187,14 +179,6 @@ describe('Project members delete', () => {
.expect(204)
.end((err) => {
expectAfterDelete(project1.id, member1.id, err, () => {
- const removedMember = {
- projectId: project1.id,
- userId: 40051332,
- role: 'copilot',
- isPrimary: true,
- };
- server.services.pubsub.publish.calledWith('project.member.removed',
- sinon.match(removedMember)).should.be.true;
// validate the primary copilot
models.ProjectMember.findAll({
paranoid: true,
@@ -245,14 +229,6 @@ describe('Project members delete', () => {
.expect(204)
.end((err) => {
expectAfterDelete(project1.id, member2.id, err, () => {
- const removedMember = {
- projectId: project1.id,
- userId: 40051334,
- role: 'manager',
- isPrimary: true,
- };
- server.services.pubsub.publish.calledWith('project.member.removed',
- sinon.match(removedMember)).should.be.true;
postSpy.should.have.been.calledOnce;
done();
});
@@ -284,14 +260,6 @@ describe('Project members delete', () => {
.expect(204)
.end((err) => {
expectAfterDelete(project1.id, member2.id, err, () => {
- const removedMember = {
- projectId: project1.id,
- userId: 40051334,
- role: 'manager',
- isPrimary: true,
- };
- server.services.pubsub.publish.calledWith('project.member.removed',
- sinon.match(removedMember)).should.be.true;
postSpy.should.have.been.calledOnce;
done();
});
@@ -331,14 +299,6 @@ describe('Project members delete', () => {
.expect(204)
.end((err) => {
expectAfterDelete(project1.id, member2.id, err, () => {
- const removedMember = {
- projectId: project1.id,
- userId: 40051334,
- role: 'manager',
- isPrimary: true,
- };
- server.services.pubsub.publish.calledWith('project.member.removed',
- sinon.match(removedMember)).should.be.true;
postSpy.should.not.have.been.calledOnce;
done();
});
diff --git a/src/routes/projectMembers/update.js b/src/routes/projectMembers/update.js
index ddf318b1..48ba6946 100644
--- a/src/routes/projectMembers/update.js
+++ b/src/routes/projectMembers/update.js
@@ -136,12 +136,6 @@ module.exports = [
})
))
.then((memberWithDetails) => {
- // emit original and updated project information
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_MEMBER_UPDATED,
- { original: previousValue, updated: projectMember },
- { correlationId: req.id },
- );
util.sendResourceToKafkaBus(
req,
EVENT.ROUTING_KEY.PROJECT_MEMBER_UPDATED,
diff --git a/src/routes/projectMembers/update.spec.js b/src/routes/projectMembers/update.spec.js
index 44ce364d..f2ffbb9c 100644
--- a/src/routes/projectMembers/update.spec.js
+++ b/src/routes/projectMembers/update.spec.js
@@ -199,7 +199,6 @@ describe('Project members update', () => {
resJson.role.should.equal('customer');
resJson.isPrimary.should.be.true;
resJson.updatedBy.should.equal(testUtil.userIds.manager);
- server.services.pubsub.publish.calledWith('project.member.updated').should.be.true;
done();
}
});
@@ -246,7 +245,6 @@ describe('Project members update', () => {
resJson.role.should.equal(body.role);
resJson.isPrimary.should.be.false;
resJson.updatedBy.should.equal(testUtil.userIds.manager);
- server.services.pubsub.publish.calledWith('project.member.updated').should.be.true;
done();
}
});
@@ -288,7 +286,6 @@ describe('Project members update', () => {
resJson.isPrimary.should.be.false;
resJson.updatedBy.should.equal(testUtil.userIds.manager);
deleteSpy.should.have.been.calledOnce;
- server.services.pubsub.publish.calledWith('project.member.updated').should.be.true;
done();
}
});
diff --git a/src/routes/projectUpgrade/create.js b/src/routes/projectUpgrade/create.js
index 1037f00d..71fd1a54 100644
--- a/src/routes/projectUpgrade/create.js
+++ b/src/routes/projectUpgrade/create.js
@@ -183,12 +183,6 @@ async function migrateFromV2ToV3(req, project, defaultProductTemplateId, phaseNa
newPhasesAndProducts.forEach(({ phase, products }) => {
const phaseJSON = phase.toJSON();
phaseJSON.products = products;
- // Send events to buses (ProjectPhase)
- req.log.debug('Sending event to RabbitMQ bus for project phase %d', phase.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.PROJECT_PHASE_ADDED,
- phaseJSON,
- { correlationId: req.id },
- );
req.log.debug('Sending event to Kafka bus for project phase %d', phase.id);
req.app.emit(EVENT.ROUTING_KEY.PROJECT_PHASE_ADDED, { req, created: phaseJSON });
});
@@ -196,16 +190,6 @@ async function migrateFromV2ToV3(req, project, defaultProductTemplateId, phaseNa
// Send events to buses (Project)
req.log.debug('updated project', project);
- // publish original and updated project data
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_UPDATED, {
- original: previousValue,
- updated: project,
- }, {
- correlationId: req.id,
- },
- );
-
req.app.emit(EVENT.ROUTING_KEY.PROJECT_UPDATED, {
req,
original: previousValue,
diff --git a/src/routes/projectUpgrade/create.spec.js b/src/routes/projectUpgrade/create.spec.js
index c0a1b680..70529ad9 100644
--- a/src/routes/projectUpgrade/create.spec.js
+++ b/src/routes/projectUpgrade/create.spec.js
@@ -1,7 +1,6 @@
/* eslint-disable no-unused-expressions, no-await-in-loop, no-restricted-syntax */
import { expect } from 'chai';
-import sinon from 'sinon';
import request from 'supertest';
import server from '../../app';
import { PROJECT_STATUS } from '../../constants';
@@ -133,17 +132,9 @@ describe('Project upgrade', () => {
targetVersion: 'v3',
defaultProductTemplateId: defaultProductTemplate.id,
};
- // restoring the stubs in beforeEach instead of afterEach because these methods are already stubbed
- server.services.pubsub.init.restore();
- server.services.pubsub.publish.restore();
- sinon.stub(server.services.pubsub, 'init', () => {});
- sinon.stub(server.services.pubsub, 'publish', () => {});
});
afterEach(async () => {
- // restoring the stubs in beforeEach instead of afterEach because these methods are already stubbed
- // server.services.pubsub.init.restore();
- // server.services.pubsub.publish.restore();
await testUtil.clearDb();
});
@@ -299,12 +290,6 @@ describe('Project upgrade', () => {
});
}
}
-
- expect(server.services.pubsub.publish.calledWith('project.phase.added')).to.be.true;
- // we should not raise product added event as when we are adding a phase, it automatically adds the product
- // product added event should be raised only when a new product is added to an existing phase
- expect(server.services.pubsub.publish.calledWith('project.phase.product.added')).to.be.false;
- expect(server.services.pubsub.publish.calledWith('project.updated')).to.be.true;
};
it('should migrate a non completed project to the expected state', async () => {
diff --git a/src/routes/projects/create.js b/src/routes/projects/create.js
index 8c8b59b2..a7b7cd71 100644
--- a/src/routes/projects/create.js
+++ b/src/routes/projects/create.js
@@ -501,11 +501,6 @@ module.exports = [
newProject.estimations = projectEstimations;
}
- req.log.debug('Sending event to RabbitMQ bus for project %d', newProject.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.PROJECT_DRAFT_CREATED,
- newProject,
- { correlationId: req.id },
- );
req.log.debug('Sending event to Kafka bus for project %d', newProject.id);
// emit event
req.app.emit(EVENT.ROUTING_KEY.PROJECT_DRAFT_CREATED,
diff --git a/src/routes/projects/create.spec.js b/src/routes/projects/create.spec.js
index 6f95d39d..7dc8d551 100644
--- a/src/routes/projects/create.spec.js
+++ b/src/routes/projects/create.spec.js
@@ -9,7 +9,6 @@ import request from 'supertest';
import util from '../../util';
import server from '../../app';
import testUtil from '../../tests/util';
-import RabbitMQService from '../../services/rabbitmq';
import models from '../../models';
import { ATTACHMENT_TYPES } from '../../constants';
@@ -18,8 +17,6 @@ const expect = chai.expect;
describe('Project create', () => {
before((done) => {
- sinon.stub(RabbitMQService.prototype, 'init', () => {});
- sinon.stub(RabbitMQService.prototype, 'publish', () => {});
testUtil.clearDb()
.then(() => testUtil.clearES())
.then(() => models.ProjectType.bulkCreate([
@@ -255,8 +252,6 @@ describe('Project create', () => {
});
after((done) => {
- RabbitMQService.prototype.init.restore();
- RabbitMQService.prototype.publish.restore();
testUtil.clearDb(done);
});
@@ -475,7 +470,6 @@ describe('Project create', () => {
// Check that activity fields are set
resJson.lastActivityUserId.should.be.eql('40051331');
resJson.lastActivityAt.should.be.not.null;
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
done();
}
});
@@ -531,7 +525,6 @@ describe('Project create', () => {
// Check that activity fields are set
resJson.lastActivityUserId.should.be.eql(config.DEFAULT_M2M_USERID.toString());
resJson.lastActivityAt.should.be.not.null;
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
done();
}
});
@@ -583,7 +576,6 @@ describe('Project create', () => {
resJson.bookmarks.should.have.lengthOf(1);
resJson.bookmarks[0].title.should.be.eql('title1');
resJson.bookmarks[0].address.should.be.eql('http://www.address.com');
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
// should not create phases without a template id
resJson.phases.should.have.lengthOf(0);
done();
@@ -664,7 +656,6 @@ describe('Project create', () => {
resJson.attachments[1].type.should.equal(bodyWithAttachments.attachments[1].type);
resJson.attachments[1].tags.should.eql(bodyWithAttachments.attachments[1].tags);
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
// should not create phases without a template id
resJson.phases.should.have.lengthOf(0);
done();
@@ -733,7 +724,6 @@ describe('Project create', () => {
phases[0].products.should.have.lengthOf(1);
phases[0].products[0].name.should.be.eql('product 1');
phases[0].products[0].templateId.should.be.eql(21);
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
done();
}
});
@@ -791,7 +781,6 @@ describe('Project create', () => {
resJson.bookmarks[0].title.should.be.eql('title1');
resJson.bookmarks[0].address.should.be.eql('http://www.address.com');
resJson.phases.should.have.lengthOf(0);
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
// verify that project has been marked to use workstreams
resJson.details.settings.workstreams.should.be.true;
@@ -927,7 +916,6 @@ describe('Project create', () => {
// Check that activity fields are set
resJson.lastActivityUserId.should.be.eql('40051331');
resJson.lastActivityAt.should.be.not.null;
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
// Check new ProjectEstimation records are created.
models.ProjectEstimation.findAll({
@@ -1027,7 +1015,6 @@ describe('Project create', () => {
phases[0].products.should.have.lengthOf(1);
phases[0].products[0].name.should.be.eql('product 1');
phases[0].products[0].templateId.should.be.eql(21);
- server.services.pubsub.publish.calledWith('project.draft-created').should.be.true;
done();
}
});
diff --git a/src/routes/projects/delete.js b/src/routes/projects/delete.js
index 8ae66a92..4eea078a 100644
--- a/src/routes/projects/delete.js
+++ b/src/routes/projects/delete.js
@@ -29,11 +29,6 @@ module.exports = [
})
.then(project => project.destroy({ cascade: true })))
.then((project) => {
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_DELETED,
- { id: projectId },
- { correlationId: req.id },
- );
// emit event
req.app.emit(EVENT.ROUTING_KEY.PROJECT_DELETED,
{ req, project: _.assign({ resource: RESOURCES.PROJECT }, _.pick(project.toJSON(), 'id')),
diff --git a/src/routes/projects/get.spec.js b/src/routes/projects/get.spec.js
index be72238b..7b0c0ff4 100644
--- a/src/routes/projects/get.spec.js
+++ b/src/routes/projects/get.spec.js
@@ -8,10 +8,11 @@ import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
import { ATTACHMENT_TYPES } from '../../constants';
-
+import util from '../../util';
const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
+const eClient = util.getElasticSearchClient();
const should = chai.should();
@@ -180,7 +181,7 @@ describe('GET Project', () => {
});
});
return Promise.all([p1, p2])
- .then(() => server.services.es.index({
+ .then(() => eClient.index({
index: ES_PROJECT_INDEX,
type: ES_PROJECT_TYPE,
id: data[0].id,
diff --git a/src/routes/projects/list.spec.js b/src/routes/projects/list.spec.js
index 7e00c2f8..0790b889 100644
--- a/src/routes/projects/list.spec.js
+++ b/src/routes/projects/list.spec.js
@@ -8,10 +8,11 @@ import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
import { ATTACHMENT_TYPES } from '../../constants';
-
+import util from '../../util';
const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
+const eClient = util.getElasticSearchClient();
const should = chai.should();
// test data for 3 projects
@@ -301,7 +302,7 @@ describe('LIST Project', () => {
data[0].id = project1.id;
data[1].id = project2.id;
data[2].id = project3.id;
- const esp1 = server.services.es.index({
+ const esp1 = eClient.index({
index: ES_PROJECT_INDEX,
type: ES_PROJECT_TYPE,
id: project1.id,
@@ -309,7 +310,7 @@ describe('LIST Project', () => {
refresh: 'wait_for',
});
- const esp2 = server.services.es.index({
+ const esp2 = eClient.index({
index: ES_PROJECT_INDEX,
type: ES_PROJECT_TYPE,
id: project2.id,
@@ -317,7 +318,7 @@ describe('LIST Project', () => {
refresh: 'wait_for',
});
- const esp3 = server.services.es.index({
+ const esp3 = eClient.index({
index: ES_PROJECT_INDEX,
type: ES_PROJECT_TYPE,
id: project3.id,
diff --git a/src/routes/projects/update.js b/src/routes/projects/update.js
index 9c2abc3a..3e44381d 100644
--- a/src/routes/projects/update.js
+++ b/src/routes/projects/update.js
@@ -275,15 +275,6 @@ module.exports = [
project = _.omit(project, ['deletedAt']);
req.log.debug('updated project', project);
previousValue = _.omit(previousValue, ['deletedAt']);
- // publish original and updated project data
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_UPDATED, {
- original: previousValue,
- updated: project,
- }, {
- correlationId: req.id,
- },
- );
req.app.emit(EVENT.ROUTING_KEY.PROJECT_UPDATED, {
req,
original: previousValue,
diff --git a/src/routes/projects/update.spec.js b/src/routes/projects/update.spec.js
index 01b559d9..1f952e09 100644
--- a/src/routes/projects/update.spec.js
+++ b/src/routes/projects/update.spec.js
@@ -181,14 +181,7 @@ describe('Project', () => {
})
.expect('Content-Type', /json/)
.expect(200)
- .end((err) => {
- if (err) {
- done(err);
- } else {
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
- done();
- }
- });
+ .end(done);
});
it('should return the project using M2M token with "write:projects" scope', (done) => {
@@ -211,7 +204,6 @@ describe('Project', () => {
resJson.name.should.equal('updateProject name by M2M');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(config.DEFAULT_M2M_USERID);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
done();
}
});
@@ -235,7 +227,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051332);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
done();
}
});
@@ -264,7 +255,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051332);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
// validate that project history is updated
models.ProjectHistory.findAll({
limit: 1,
@@ -310,7 +300,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051332);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
// validate that project history is not updated
models.ProjectHistory.findAll({
where: {
@@ -385,7 +374,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051332);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
// validate that project history is updated
models.ProjectHistory.findAll({
where: {
@@ -436,7 +424,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051334);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
// validate that project history is updated
models.ProjectHistory.findAll({
where: {
@@ -488,7 +475,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051333);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
// validate that project history is updated
models.ProjectHistory.findAll({
where: {
@@ -564,7 +550,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051332);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
// validate that project history is not updated
models.ProjectHistory.findAll({
where: {
@@ -600,7 +585,6 @@ describe('Project', () => {
resJson.billingAccountId.should.equal(123);
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051334);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
done();
}
});
@@ -626,7 +610,6 @@ describe('Project', () => {
should.exist(resJson);
resJson.billingAccountId.should.equal(1);
resJson.billingAccountId.should.equal(1);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
done();
}
});
@@ -652,7 +635,6 @@ describe('Project', () => {
resJson.billingAccountId.should.equal(1);
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051333);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
done();
}
});
@@ -713,7 +695,6 @@ describe('Project', () => {
resJson = resp.body;
should.exist(resJson);
should.not.exist(resJson.bookmarks);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
done();
}
});
@@ -754,7 +735,6 @@ describe('Project', () => {
resJson.name.should.equal('updatedProject name');
resJson.updatedAt.should.not.equal('2016-06-30 00:33:07+00');
resJson.updatedBy.should.equal(40051333);
- server.services.pubsub.publish.calledWith('project.updated').should.be.true;
// validate that project history is updated
models.ProjectHistory.findAll({
where: {
diff --git a/src/routes/scopeChangeRequests/update.js b/src/routes/scopeChangeRequests/update.js
index 6111e248..71c2df10 100644
--- a/src/routes/scopeChangeRequests/update.js
+++ b/src/routes/scopeChangeRequests/update.js
@@ -54,12 +54,6 @@ function updateProjectDetails(req, newScope, projectId) {
const updated = updatedProject.get({ plain: true });
const original = _.omit(previousValue, ['deletedAt', 'deletedBy']);
- // publish original and updated project data
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_UPDATED,
- { original, updated },
- { correlationId: req.id },
- );
req.app.emit(EVENT.ROUTING_KEY.PROJECT_UPDATED, { req, original, updated });
return updatedProject;
diff --git a/src/routes/timelines/create.js b/src/routes/timelines/create.js
index bc523dcc..9645de37 100644
--- a/src/routes/timelines/create.js
+++ b/src/routes/timelines/create.js
@@ -111,13 +111,6 @@ module.exports = [
.catch(next);
})
.then(() => {
- // Send event to bus
- req.log.debug('Sending event to RabbitMQ bus for timeline %d', result.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.TIMELINE_ADDED,
- _.assign({ projectId: req.params.projectId }, result),
- { correlationId: req.id },
- );
-
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/timelines/create.spec.js b/src/routes/timelines/create.spec.js
index 8e94687e..7dc235b4 100644
--- a/src/routes/timelines/create.spec.js
+++ b/src/routes/timelines/create.spec.js
@@ -8,7 +8,7 @@ import _ from 'lodash';
import server from '../../app';
import testUtil from '../../tests/util';
import models from '../../models';
-import { EVENT, MILESTONE_STATUS } from '../../constants';
+import { MILESTONE_STATUS } from '../../constants';
const should = chai.should();
@@ -425,9 +425,6 @@ describe('CREATE timeline', () => {
should.not.exist(resJson.deletedBy);
should.not.exist(resJson.deletedAt);
- // eslint-disable-next-line no-unused-expressions
- server.services.pubsub.publish.calledWith(EVENT.ROUTING_KEY.TIMELINE_ADDED).should.be.true;
-
done();
});
});
@@ -516,9 +513,6 @@ describe('CREATE timeline', () => {
});
});
- // eslint-disable-next-line no-unused-expressions
- server.services.pubsub.publish.calledWith(EVENT.ROUTING_KEY.TIMELINE_ADDED).should.be.true;
-
done();
});
});
diff --git a/src/routes/timelines/delete.js b/src/routes/timelines/delete.js
index 7a8af7eb..a1c2a5c0 100644
--- a/src/routes/timelines/delete.js
+++ b/src/routes/timelines/delete.js
@@ -26,7 +26,6 @@ module.exports = [
permissions('timeline.delete'),
(req, res, next) => {
const timeline = req.timeline;
- const deleted = _.omit(timeline.toJSON(), ['deletedAt', 'deletedBy']);
return models.sequelize.transaction(() =>
// Update the deletedBy, then delete
@@ -46,13 +45,6 @@ module.exports = [
})),
)
.then((milestones) => {
- // Send event to bus
- req.log.debug('Sending event to RabbitMQ bus for timeline %d', deleted.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.TIMELINE_REMOVED,
- deleted,
- { correlationId: req.id },
- );
-
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/timelines/delete.spec.js b/src/routes/timelines/delete.spec.js
index d75b47ad..1614e7fd 100644
--- a/src/routes/timelines/delete.spec.js
+++ b/src/routes/timelines/delete.spec.js
@@ -7,7 +7,6 @@ import chai from 'chai';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
-import { EVENT } from '../../constants';
const should = chai.should(); // eslint-disable-line no-unused-vars
@@ -283,9 +282,6 @@ describe('DELETE timeline', () => {
.expect(204)
.end((err) => {
expectAfterDelete(1, err, () => {
- // eslint-disable-next-line no-unused-expressions
- server.services.pubsub.publish.calledWith(EVENT.ROUTING_KEY.TIMELINE_REMOVED).should.be.true;
-
// Milestones are cascade deleted
setTimeout(() => {
models.Milestone.findAll({ where: { timelineId: 1 } })
diff --git a/src/routes/timelines/get.spec.js b/src/routes/timelines/get.spec.js
index 001c7324..6678c3d1 100644
--- a/src/routes/timelines/get.spec.js
+++ b/src/routes/timelines/get.spec.js
@@ -9,11 +9,13 @@ import _ from 'lodash';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
+import util from '../../util';
const should = chai.should();
const ES_TIMELINE_INDEX = config.get('elasticsearchConfig.timelineIndexName');
const ES_TIMELINE_TYPE = config.get('elasticsearchConfig.timelineDocType');
+const eClient = util.getElasticSearchClient();
const timelines = [
{
@@ -202,7 +204,7 @@ describe('GET timeline', () => {
timelineJson.description = 'from ES';
}
- await server.services.es.index({
+ await eClient.index({
index: ES_TIMELINE_INDEX,
type: ES_TIMELINE_TYPE,
id: timelineJson.id,
diff --git a/src/routes/timelines/list.spec.js b/src/routes/timelines/list.spec.js
index 04a9640e..1d5a1c0c 100644
--- a/src/routes/timelines/list.spec.js
+++ b/src/routes/timelines/list.spec.js
@@ -9,9 +9,11 @@ import _ from 'lodash';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
+import util from '../../util';
const ES_TIMELINE_INDEX = config.get('elasticsearchConfig.timelineIndexName');
const ES_TIMELINE_TYPE = config.get('elasticsearchConfig.timelineDocType');
+const eClient = util.getElasticSearchClient();
const should = chai.should();
@@ -199,7 +201,7 @@ describe('LIST timelines', () => {
);
}
- return server.services.es.index({
+ return eClient.index({
index: ES_TIMELINE_INDEX,
type: ES_TIMELINE_TYPE,
id: timelineJson.id,
diff --git a/src/routes/timelines/update.js b/src/routes/timelines/update.js
index 885d7db4..05a07f3c 100644
--- a/src/routes/timelines/update.js
+++ b/src/routes/timelines/update.js
@@ -96,12 +96,6 @@ module.exports = [
return Promise.resolve();
})
.then(() => {
- // Send event to bus
- req.log.debug('Sending event to RabbitMQ bus for timeline %d', updated.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.TIMELINE_UPDATED,
- { original, updated },
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/timelines/update.spec.js b/src/routes/timelines/update.spec.js
index 73ae6267..87492500 100644
--- a/src/routes/timelines/update.spec.js
+++ b/src/routes/timelines/update.spec.js
@@ -9,7 +9,7 @@ import _ from 'lodash';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
-import { EVENT, BUS_API_EVENT, RESOURCES, CONNECT_NOTIFICATION_EVENT } from '../../constants';
+import { BUS_API_EVENT, RESOURCES, CONNECT_NOTIFICATION_EVENT } from '../../constants';
import busApi from '../../services/busApi';
const should = chai.should();
@@ -483,9 +483,6 @@ describe('UPDATE timeline', () => {
});
});
- // eslint-disable-next-line no-unused-expressions
- server.services.pubsub.publish.calledWith(EVENT.ROUTING_KEY.TIMELINE_UPDATED).should.be.true;
-
done();
});
});
diff --git a/src/routes/workItems/create.js b/src/routes/workItems/create.js
index 0852a26d..d42ec13a 100644
--- a/src/routes/workItems/create.js
+++ b/src/routes/workItems/create.js
@@ -118,12 +118,6 @@ module.exports = [
});
}))
.then(() => {
- // Send events to buses
- req.log.debug('Sending event to RabbitMQ bus for phase product %d', newPhaseProduct.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_ADDED,
- newPhaseProduct,
- { correlationId: req.id },
- );
req.log.debug('Sending event to Kafka bus for phase product %d', newPhaseProduct.id);
// emit the event
util.sendResourceToKafkaBus(
diff --git a/src/routes/workItems/delete.js b/src/routes/workItems/delete.js
index 6c561fd4..6ba799c5 100644
--- a/src/routes/workItems/delete.js
+++ b/src/routes/workItems/delete.js
@@ -77,13 +77,6 @@ module.exports = [
.then(entity => entity.destroy()))
.then((deleted) => {
req.log.debug('deleted work item', JSON.stringify(deleted, null, 2));
-
- // Send events to buses
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_REMOVED,
- deleted,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/workItems/update.js b/src/routes/workItems/update.js
index 77daab17..d7e8aa1b 100644
--- a/src/routes/workItems/update.js
+++ b/src/routes/workItems/update.js
@@ -97,13 +97,6 @@ module.exports = [
req.log.debug('updated work item', JSON.stringify(updated, null, 2));
const updatedValue = updated.get({ plain: true });
-
- // emit original and updated project phase information
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_UPDATED,
- { original: previousValue, updated: updatedValue },
- { correlationId: req.id },
- );
util.sendResourceToKafkaBus(
req,
EVENT.ROUTING_KEY.PROJECT_PHASE_PRODUCT_UPDATED,
diff --git a/src/routes/works/create.js b/src/routes/works/create.js
index 89fa0fe1..d543806c 100644
--- a/src/routes/works/create.js
+++ b/src/routes/works/create.js
@@ -8,7 +8,7 @@ import Sequelize from 'sequelize';
import models from '../../models';
import util from '../../util';
-import { EVENT, RESOURCES, TIMELINE_REFERENCES } from '../../constants';
+import { EVENT, RESOURCES } from '../../constants';
const permissions = require('tc-core-library-js').middleware.permissions;
@@ -135,13 +135,6 @@ module.exports = [
}),
)
.then(() => {
- // Send events to buses
- req.log.debug('Sending event to RabbitMQ bus for project phase %d', newProjectPhase.id);
- req.app.services.pubsub.publish(EVENT.ROUTING_KEY.PROJECT_PHASE_ADDED,
- { added: newProjectPhase, route: TIMELINE_REFERENCES.WORK },
- { correlationId: req.id },
- );
-
req.log.debug('Sending event to Kafka bus for project phase %d', newProjectPhase.id);
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/works/create.spec.js b/src/routes/works/create.spec.js
index 2c85ae01..29d56abe 100644
--- a/src/routes/works/create.spec.js
+++ b/src/routes/works/create.spec.js
@@ -7,19 +7,12 @@ import _ from 'lodash';
import chai from 'chai';
import sinon from 'sinon';
import request from 'supertest';
-import config from 'config';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
import busApi from '../../services/busApi';
-import messageService from '../../services/messageService';
-import RabbitMQService from '../../services/rabbitmq';
-import mockRabbitMQ from '../../tests/mockRabbitMQ';
import { BUS_API_EVENT, CONNECT_NOTIFICATION_EVENT, RESOURCES } from '../../constants';
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
const should = chai.should();
const validatePhase = (resJson, expectedPhase) => {
@@ -377,89 +370,5 @@ describe('CREATE work', () => {
});
});
});
-
- describe('RabbitMQ Message topic', () => {
- let createMessageSpy;
- let publishSpy;
- let sandbox;
-
- before((done) => {
- // Wait for 500ms in order to wait for createEvent calls from previous tests to complete
- testUtil.wait(done);
- });
-
- beforeEach(async () => {
- sandbox = sinon.sandbox.create();
- server.services.pubsub = new RabbitMQService(server.logger);
-
- // initialize RabbitMQ
- server.services.pubsub.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- );
-
- // add project to ES index
- await server.services.es.index({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: projectId,
- body: {
- doc: project,
- },
- });
-
- return new Promise(resolve => setTimeout(() => {
- publishSpy = sandbox.spy(server.services.pubsub, 'publish');
- createMessageSpy = sandbox.spy(messageService, 'createTopic');
- resolve();
- }, 500));
- });
-
- afterEach(() => {
- sandbox.restore();
- });
-
- after(() => {
- mockRabbitMQ(server);
- });
-
- it('should send message topic when work added', (done) => {
- const mockHttpClient = _.merge(testUtil.mockHttpClient, {
- post: () => Promise.resolve({
- status: 200,
- data: {
- id: 'requesterId',
- version: 'v3',
- result: {
- success: true,
- status: 200,
- content: {},
- },
- },
- }),
- });
- sandbox.stub(messageService, 'getClient', () => mockHttpClient);
- request(server)
- .post(`/v5/projects/${projectId}/workstreams/${workStreamId}/works`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.connectAdmin}`,
- })
- .send(body)
- .expect(201)
- .end((err) => {
- if (err) {
- done(err);
- } else {
- testUtil.wait(() => {
- publishSpy.calledOnce.should.be.true;
- publishSpy.calledWith('project.phase.added').should.be.true;
- createMessageSpy.calledTwice.should.be.true;
- done();
- });
- }
- });
- });
- });
});
});
diff --git a/src/routes/works/delete.js b/src/routes/works/delete.js
index 9491e5db..dd877a25 100644
--- a/src/routes/works/delete.js
+++ b/src/routes/works/delete.js
@@ -7,7 +7,7 @@ import Joi from 'joi';
import { middleware as tcMiddleware } from 'tc-core-library-js';
import models from '../../models';
import util from '../../util';
-import { EVENT, RESOURCES, TIMELINE_REFERENCES } from '../../constants';
+import { EVENT, RESOURCES } from '../../constants';
const permissions = tcMiddleware.permissions;
@@ -61,13 +61,6 @@ module.exports = [
.then((deleted) => {
req.log.debug('deleted work', JSON.stringify(deleted, null, 2));
- // Send events to buses
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_REMOVED,
- { deleted, route: TIMELINE_REFERENCES.WORK },
- { correlationId: req.id },
- );
-
// emit event
util.sendResourceToKafkaBus(
req,
diff --git a/src/routes/works/delete.spec.js b/src/routes/works/delete.spec.js
index a45644ad..6782ab2f 100644
--- a/src/routes/works/delete.spec.js
+++ b/src/routes/works/delete.spec.js
@@ -6,19 +6,12 @@ import _ from 'lodash';
import request from 'supertest';
import chai from 'chai';
import sinon from 'sinon';
-import config from 'config';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
import busApi from '../../services/busApi';
-import messageService from '../../services/messageService';
-import RabbitMQService from '../../services/rabbitmq';
-import mockRabbitMQ from '../../tests/mockRabbitMQ';
import { BUS_API_EVENT, CONNECT_NOTIFICATION_EVENT, RESOURCES } from '../../constants';
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
chai.should();
const expectAfterDelete = (workId, projectId, workStreamId, err, next) => {
@@ -79,15 +72,6 @@ describe('DELETE work', () => {
lastActivityAt: 1,
lastActivityUserId: '1',
};
- const topic = {
- id: 1,
- title: 'test project phase',
- posts:
- [{ id: 1,
- type: 'post',
- body: 'body',
- }],
- };
beforeEach((done) => {
testUtil.clearDb()
.then(() => {
@@ -315,94 +299,5 @@ describe('DELETE work', () => {
});
});
});
-
- describe('RabbitMQ Message topic', () => {
- let deleteTopicSpy;
- let deletePostsSpy;
- let publishSpy;
- let sandbox;
-
- before((done) => {
- // Wait for 500ms in order to wait for createEvent calls from previous tests to complete
- testUtil.wait(done);
- });
-
- beforeEach(async () => {
- sandbox = sinon.sandbox.create();
- server.services.pubsub = new RabbitMQService(server.logger);
-
- // initialize RabbitMQ
- server.services.pubsub.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- );
-
- // add project to ES index
- await server.services.es.index({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: projectId,
- body: {
- doc: _.assign(project, { phases: [_.assign({
- name: 'test project phase',
- status: 'active',
- startDate: '2018-05-15T00:00:00Z',
- endDate: '2018-05-15T12:00:00Z',
- budget: 20.0,
- progress: 1.23456,
- details: {
- message: 'This can be any json',
- },
- createdBy: 1,
- updatedBy: 1,
- projectId,
- }, { id: workId, projectId })] }),
- },
- });
-
- return new Promise(resolve => setTimeout(() => {
- publishSpy = sandbox.spy(server.services.pubsub, 'publish');
- deleteTopicSpy = sandbox.spy(messageService, 'deleteTopic');
- deletePostsSpy = sandbox.spy(messageService, 'deletePosts');
- sandbox.stub(messageService, 'getTopicByTag', () => Promise.resolve(topic));
- resolve();
- }, 500));
- });
-
- afterEach(() => {
- sandbox.restore();
- });
-
- after(() => {
- mockRabbitMQ(server);
- });
-
- it('should send message topic when work deleted', (done) => {
- const mockHttpClient = _.merge(testUtil.mockHttpClient, {
- delete: () => Promise.resolve(true),
- });
- sandbox.stub(messageService, 'getClient', () => mockHttpClient);
- request(server)
- .delete(`/v5/projects/${projectId}/workstreams/${workStreamId}/works/${workId}`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.admin}`,
- })
- .expect(204)
- .end((err) => {
- if (err) {
- done(err);
- } else {
- testUtil.wait(() => {
- publishSpy.calledOnce.should.be.true;
- publishSpy.calledWith('project.phase.removed').should.be.true;
- deleteTopicSpy.calledTwice.should.be.true;
- deletePostsSpy.calledTwice.should.be.true;
- done();
- });
- }
- });
- });
- });
});
});
diff --git a/src/routes/works/update.js b/src/routes/works/update.js
index 8550f821..84e1c52f 100644
--- a/src/routes/works/update.js
+++ b/src/routes/works/update.js
@@ -8,7 +8,7 @@ import Sequelize from 'sequelize';
import { middleware as tcMiddleware } from 'tc-core-library-js';
import models from '../../models';
import util from '../../util';
-import { EVENT, RESOURCES, TIMELINE_REFERENCES, ROUTES } from '../../constants';
+import { EVENT, RESOURCES, ROUTES } from '../../constants';
const permissions = tcMiddleware.permissions;
@@ -139,6 +139,13 @@ module.exports = [
id: { $ne: updated.id },
order: { $between: [previousValue.order + 1, updated.order] },
},
+ include: [{
+ model: models.WorkStream,
+ where: {
+ id: workStreamId,
+ projectId,
+ },
+ }],
});
}
@@ -155,29 +162,49 @@ module.exports = [
],
},
},
+ include: [{
+ model: models.WorkStream,
+ where: {
+ id: workStreamId,
+ projectId,
+ },
+ }],
});
});
- })
- .then(() =>
- // To simpify the logic, reload the phases from DB and send to the message queue
- models.ProjectPhase.findAll({
- where: {
- projectId,
- },
- include: [{ model: models.PhaseProduct, as: 'products' }],
- })),
+ }),
)
- .then((allPhases) => {
+ .then((updatedCount) => {
+ if (updatedCount) {
+ return models.ProjectPhase.findAll({
+ where: {
+ projectId,
+ id: { $ne: updated.id },
+ order: {
+ $between: !_.isNil(previousValue.order) && previousValue.order < updated.order
+ ? [previousValue.order + 1, updated.order]
+ : [
+ updated.order,
+ (previousValue.order ? previousValue.order : Number.MAX_SAFE_INTEGER) - 1,
+ ],
+ },
+ },
+ include: [{
+ model: models.WorkStream,
+ where: {
+ id: workStreamId,
+ projectId,
+ },
+ }],
+ order: [['updatedAt', 'DESC']],
+ limit: updatedCount[0],
+ });
+ }
+ return Promise.resolve([]);
+ })
+ .then((otherUpdated) => {
req.log.debug('updated project phase', JSON.stringify(updated, null, 2));
const updatedValue = updated.get({ plain: true });
-
- // emit original and updated project phase information
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_PHASE_UPDATED,
- { original: previousValue, updated: updatedValue, allPhases, route: TIMELINE_REFERENCES.WORK },
- { correlationId: req.id },
- );
util.sendResourceToKafkaBus(
req,
EVENT.ROUTING_KEY.PROJECT_PHASE_UPDATED,
@@ -187,6 +214,20 @@ module.exports = [
ROUTES.WORKS.UPDATE,
);
+ // send updated event for all other phases which have been cascading updated
+ _.map(otherUpdated, phase =>
+ util.sendResourceToKafkaBus(
+ req,
+ EVENT.ROUTING_KEY.PROJECT_PHASE_UPDATED,
+ RESOURCES.PHASE,
+ _.assign(_.pick(phase.toJSON(), 'id', 'order', 'updatedBy', 'updatedAt')),
+ // Pass the same object as original phase even though, the order has changed.
+ // So far we don't use the order so it's ok. But in general, we should pass
+ // the original phases. <- TODO
+ _.assign(_.pick(phase.toJSON(), 'id', 'order', 'updatedBy', 'updatedAt'))),
+ true, // don't send event to Notification Service as the main event here is updating one phase
+ );
+
res.json(updated);
})
.catch(err => next(err));
diff --git a/src/routes/works/update.spec.js b/src/routes/works/update.spec.js
index cf79077a..df87f44a 100644
--- a/src/routes/works/update.spec.js
+++ b/src/routes/works/update.spec.js
@@ -6,19 +6,12 @@ import _ from 'lodash';
import chai from 'chai';
import request from 'supertest';
import sinon from 'sinon';
-import config from 'config';
import models from '../../models';
import server from '../../app';
import testUtil from '../../tests/util';
import busApi from '../../services/busApi';
-import messageService from '../../services/messageService';
-import RabbitMQService from '../../services/rabbitmq';
-import mockRabbitMQ from '../../tests/mockRabbitMQ';
import { BUS_API_EVENT, RESOURCES, CONNECT_NOTIFICATION_EVENT } from '../../constants';
-const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName');
-const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType');
-
const should = chai.should();
const body = {
@@ -94,15 +87,6 @@ describe('UPDATE work', () => {
lastActivityAt: 1,
lastActivityUserId: '1',
};
- const topic = {
- id: 1,
- title: 'test project phase',
- posts:
- [{ id: 1,
- type: 'post',
- body: 'body',
- }],
- };
beforeEach((done) => {
testUtil.clearDb()
.then(() => {
@@ -706,91 +690,5 @@ describe('UPDATE work', () => {
});
});
});
-
- describe('RabbitMQ Message topic', () => {
- let updateMessageSpy;
- let publishSpy;
- let sandbox;
-
- before((done) => {
- // Wait for 500ms in order to wait for createEvent calls from previous tests to complete
- testUtil.wait(done);
- });
-
- beforeEach(async () => {
- sandbox = sinon.sandbox.create();
- server.services.pubsub = new RabbitMQService(server.logger);
-
- // initialize RabbitMQ
- server.services.pubsub.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- );
-
- // add project to ES index
- await server.services.es.index({
- index: ES_PROJECT_INDEX,
- type: ES_PROJECT_TYPE,
- id: projectId,
- body: {
- doc: _.assign(project, { phases: [_.assign(body, { id: workId, projectId })] }),
- },
- });
-
- return new Promise(resolve => setTimeout(() => {
- publishSpy = sandbox.spy(server.services.pubsub, 'publish');
- updateMessageSpy = sandbox.spy(messageService, 'updateTopic');
- sandbox.stub(messageService, 'getTopicByTag', () => Promise.resolve(topic));
- resolve();
- }, 500));
- });
-
- afterEach(() => {
- sandbox.restore();
- });
-
- after(() => {
- mockRabbitMQ(server);
- });
-
- it('should send message topic when work updated', (done) => {
- const mockHttpClient = _.merge(testUtil.mockHttpClient, {
- post: () => Promise.resolve({
- status: 200,
- data: {
- id: 'requesterId',
- version: 'v3',
- result: {
- success: true,
- status: 200,
- content: {},
- },
- },
- }),
- });
- sandbox.stub(messageService, 'getClient', () => mockHttpClient);
- request(server)
- .patch(`/v5/projects/${projectId}/workstreams/${workStreamId}/works/${workId}`)
- .set({
- Authorization: `Bearer ${testUtil.jwts.admin}`,
- })
- .send(_.assign(updateBody, { budget: 123 }))
- .expect('Content-Type', /json/)
- .expect(200)
- .end((err) => {
- if (err) {
- done(err);
- } else {
- testUtil.wait(() => {
- publishSpy.calledOnce.should.be.true;
- publishSpy.calledWith('project.phase.updated').should.be.true;
- updateMessageSpy.calledTwice.should.be.true;
- done();
- });
- }
- });
- });
- });
});
});
diff --git a/src/services/index.js b/src/services/index.js
deleted file mode 100644
index ff13ac32..00000000
--- a/src/services/index.js
+++ /dev/null
@@ -1,45 +0,0 @@
-
-
-import config from 'config';
-import RabbitMQService from './rabbitmq';
-// import startKafkaConsumer from './kafkaConsumer';
-// import { kafkaHandlers } from '../events';
-
-/**
- * Responsible for establishing connections to all external services
- * Also has a hook to load mock services for unit testing.
- *
- * @param {Object} fapp the app object
- * @param {Object} logger the logger to use
- *
- * @return {Void} the function returns void
- */
-module.exports = (fapp, logger) => {
- const app = fapp;
- app.services = app.service || {};
- if (process.env.NODE_ENV.toLowerCase() === 'test') {
- require('../tests/serviceMocks')(app); // eslint-disable-line global-require
- } else {
- logger.info('initializing RabbitMQ service');
- // RabbitMQ Initialization
- app.services.pubsub = new RabbitMQService(logger);
-
- // initialize RabbitMQ
- app.services.pubsub.init(
- config.get('rabbitmqURL'),
- config.get('pubsubExchangeName'),
- config.get('pubsubQueueName'),
- )
- .then(() => {
- logger.info('RabbitMQ service initialized');
- })
- // .then(() => startKafkaConsumer(kafkaHandlers, app, logger))
- // .then(() => {
- // logger.info('Kafka consumer service initialized');
- // })
- .catch((err) => {
- logger.error('Error initializing services', err);
- // gracefulShutdown()
- });
- }
-};
diff --git a/src/services/kafkaConsumer.js b/src/services/kafkaConsumer.js
index b930ab4f..840fe841 100644
--- a/src/services/kafkaConsumer.js
+++ b/src/services/kafkaConsumer.js
@@ -1,5 +1,6 @@
import Kafka from 'no-kafka';
import config from 'config';
+import _ from 'lodash';
/**
* Initializes Kafka consumer and subscribes for the topics
* @param {Object} handlers Object that holds kafka handlers. Where property name is kafka topic and value is handler
@@ -42,22 +43,46 @@ export default async function startKafkaConsumer(handlers, app, logger) {
const onConsume = async (messageSet, topic, partition) => {
for (let messageIndex = 0; messageIndex < messageSet.length; messageIndex += 1) {
const kafkaMessage = messageSet[messageIndex];
- logger.debug(`Consume topic '${topic}' with message: '${kafkaMessage.message.value.toString('utf8')}'.`);
+ // logger.debug(`Consume topic '${topic}' with message: '${kafkaMessage.message.value.toString('utf8')}'.`);
try {
- const handler = handlers[topic];
- if (!handler) {
- logger.info(`No handler configured for topic: ${topic}`);
+ const topicConfig = handlers[topic];
+ if (!topicConfig) {
+ logger.info(`No handler configured for topic "${topic}".`);
return;
}
const busMessage = JSON.parse(kafkaMessage.message.value.toString('utf8'));
- const payload = busMessage.payload;
- // we want message to be processed one by one, so we use `await` inside a loop
- await handler(app, topic, payload); // eslint-disable-line no-await-in-loop
+ const resource = _.get(busMessage, 'payload.resource');
+ // for the message with `resource` remove it from the `payload`
+ const payload = resource ? _.omit(busMessage.payload, 'resource') : busMessage.payload;
+
+ // Topic config might have a function directly or object where each resource would have its own handler
+ // Function directly:
+ // ```
+ // topicConfig: function() {}
+ // ```
+ // Object with function per resource:
+ // ```
+ // topicConfig: {
+ // : function() {},
+ // : function() {},
+ // : function() {},
+ // }
+ const handler = _.isFunction(topicConfig) ? topicConfig : topicConfig[resource];
+
+ // some topics may have handlers only for some `resource`
+ // if we don't find a handler for particular resource then we don't process the message
+ if (handler) {
+ // we want message to be processed one by one, so we use `await` inside a loop
+ await handler(app, topic, payload); // eslint-disable-line no-await-in-loop
+ const resourceMessage = resource ? `resource '${resource}' ` : '';
+ logger.info(`Message for topic '${topic}' ${resourceMessage}was successfully processed`);
+ }
+
+ // we have commit offset even if don't process the message
await consumer.commitOffset({ topic, partition, offset: kafkaMessage.offset }); // eslint-disable-line no-await-in-loop
- logger.info(`Message for topic '${topic}' was successfully processed`);
} catch (error) {
- logger.error(`Message processing failed: ${error}`);
+ logger.error(`Message processing for topic '${topic}' failed: ${error}`);
}
}
};
@@ -65,4 +90,6 @@ export default async function startKafkaConsumer(handlers, app, logger) {
// Subscribe for all topics defined in handlers
const promises = Object.keys(handlers).map(topic => consumer.subscribe(topic, onConsume));
await Promise.all(promises);
+
+ return consumer;
}
diff --git a/src/services/kafkaConsumer.spec.js b/src/services/kafkaConsumer.spec.js
index b38ae452..c71e2b47 100644
--- a/src/services/kafkaConsumer.spec.js
+++ b/src/services/kafkaConsumer.spec.js
@@ -119,7 +119,7 @@ describe('Kafka service', () => {
handlers.topic1.notCalled.should.be.true;
handlers.topic2.notCalled.should.be.true;
mockedLogger.info.calledOnce.should.be.true;
- mockedLogger.info.calledWith('No handler configured for topic: unknown-topic').should.be.true;
+ mockedLogger.info.calledWith('No handler configured for topic "unknown-topic".').should.be.true;
});
});
});
diff --git a/src/services/rabbitmq.js b/src/services/rabbitmq.js
deleted file mode 100644
index 3635a5db..00000000
--- a/src/services/rabbitmq.js
+++ /dev/null
@@ -1,175 +0,0 @@
-import _ from 'lodash';
-import amqplib from 'amqplib';
-import { rabbitHandlers as handlers } from '../events';
-
-module.exports = class RabbitMQService {
- /**
- * constructor
- * @param {Object} logger logger object
- */
- constructor(logger) {
- this.logger = logger;
- this.subscriberCxn = null;
- this.publisherCxn = null;
- this.subscriberQ = null;
- }
-
- /**
- * initialize rabbit mq connections / exchanges/ queues etc
- * @param {String} rabbitmqURL rabbitmq connection url
- * @param {String} exchangeName rabbitmq exchange name
- * @param {String} queueName rabbitmq queue name
- * @return {Promise} Resolved or rejected promise
- */
- init(rabbitmqURL, exchangeName, queueName) {
- const self = this;
- self.rabbitmqURL = rabbitmqURL;
- self.exchangeName = exchangeName;
- self.queueName = queueName;
- return self.createConnection()
- .then((conn) => {
- self.logger.debug('Publisher connection created');
- self.publisherCxn = conn;
- // subscriber connection
- return self.createConnection();
- }).then((conn) => {
- self.logger.debug('Subscriber connection created');
- self.subscriberCxn = conn;
- return self.initSubscriber();
- })
- .catch((err) => {
- self.logger.error(err);
- });
- }
-
- /**
- * helper function to create a connection to rabbitmq
- * @return {promise} promise
- * @private
- */
- createConnection() {
- return amqplib.connect(this.rabbitmqURL);
- }
-
- /**
- * Helper function to handle initializing subscribers
- * @return {promise} resolved promise
- * @private
- */
- initSubscriber() {
- const self = this;
- let channel = null;
- // create channel to setup exchanges + queues + bindings
- // on subscriber connection
- return self.subscriberCxn.createChannel()
- .then((ch) => {
- // assert / create exchanges
- self.logger.debug('Channel created');
- channel = ch;
- return channel.assertExchange(self.exchangeName, 'topic', {
- durable: true,
- });
- }).then(() => {
- // create queue
- // a single queue for project service will suffice
- self.logger.debug('Exchange created');
- // with default params - exclusive:false, durable: true, autoDelete: false
- return channel.assertQueue(self.queueName);
- }).then((qok) => {
- self.logger.debug('Queue %s created', self.queueName);
- self.subscriberQ = qok.queue;
- // bindings for the queue
- // all these keys/bindings should be routed to the same queue
- const bindings = _.keys(handlers);
- self.logger.debug('Adding bindings: ', bindings);
- const bindingPromises = _.map(bindings, rk =>
- channel.bindQueue(self.subscriberQ, self.exchangeName, rk));
- return Promise.all(bindingPromises);
- })
- .then(() =>
- channel.consume(self.subscriberQ, (msg) => {
- const key = msg.fields.routingKey;
- // create a child logger so we can trace with original request id
- const cLogger = self.logger.child({
- requestId: msg.properties.correlationId,
- });
- cLogger.debug('Received Message', key, msg.fields);
- const handler = handlers[key];
- if (!_.isFunction(handler)) {
- cLogger.error(`Unknown message type: ${key}, NACKing... `);
- // channel.nack(msg, false, false)
- } else {
- handler(cLogger, msg, channel);
- }
- }),
- )
- .then(() => {
- self.logger.info('Waiting for messages .... ');
- })
- .catch((err) => {
- // channel.close()
- self.logger.error(err);
- });
- }
-
-
- /**
- * gracefully shutdown any open connections
- * @return {[type]} [description]
- */
- disconnect() {
- // TODO shutdown channel
- // shutdown connections
- const self = this;
- return new Promise((resolve) => {
- const promises = _.map([self.subscriberCxn, self.publisherCxn], (conn) => {
- conn.close();
- });
- Promise.all(promises)
- .then(() => {
- self.logger.info('Disconnected from rabbitmq');
- resolve();
- }).catch((err) => {
- self.logger.error('ERROR Closing connection', err);
- });
- });
- }
-
- /**
- * Publish message to default exchange
- * @param {string} key routing key
- * @param {object} payload message payload
- * @param {Object} props message properties (optional)
- * @returns {Promise} promise
- */
- publish(key, payload, props = {}) {
- let channel = null;
- const self = this;
- // first create a channel - this is a lightweight connection
- return self.publisherCxn.createChannel()
- .then((ch) => {
- channel = ch;
- // make sure the exchance exisits, else create it
- return channel.assertExchange(self.exchangeName, 'topic', {
- durable: true,
- });
- }).then(() => {
- // publish the message
- const updatedProps = _.defaults(props, {
- contentType: 'application/json',
- });
- channel.publish(
- self.exchangeName,
- key,
- new Buffer(JSON.stringify(payload)),
- updatedProps,
- );
- self.logger.debug('Published msg to exchange %s with key: %s', self.exchangeName, key);
- return channel.close();
- })
- .catch((err) => {
- self.logger.error(err);
- return channel.close();
- });
- }
-};
diff --git a/src/tests/index.js b/src/tests/index.js
new file mode 100644
index 00000000..3f798ee5
--- /dev/null
+++ b/src/tests/index.js
@@ -0,0 +1,7 @@
+/**
+ * Tests Initialization
+ */
+import sinon from 'sinon';
+import util from '../util';
+
+sinon.stub(util, 'getM2MToken', () => Promise.resolve('MOCK_TOKEN'));
diff --git a/src/tests/mockRabbitMQ.js b/src/tests/mockRabbitMQ.js
deleted file mode 100644
index 84fa6c84..00000000
--- a/src/tests/mockRabbitMQ.js
+++ /dev/null
@@ -1,16 +0,0 @@
-/**
- * Mock RabbitMQ service
- */
-import sinon from 'sinon';
-import _ from 'lodash';
-
-module.exports = (app) => {
- _.assign(app.services, {
- pubsub: {
- init: () => {},
- publish: () => {},
- },
- });
- sinon.stub(app.services.pubsub, 'init', () => Promise.resolve(true));
- sinon.stub(app.services.pubsub, 'publish', () => Promise.resolve(true));
-};
diff --git a/src/tests/serviceMocks.js b/src/tests/serviceMocks.js
deleted file mode 100644
index ef14eefc..00000000
--- a/src/tests/serviceMocks.js
+++ /dev/null
@@ -1,16 +0,0 @@
-import sinon from 'sinon';
-import _ from 'lodash';
-// we do need to test elasticsearch indexing
-import config from 'config';
-import elasticsearch from 'elasticsearch';
-import util from '../util';
-import mockRabbitMQ from './mockRabbitMQ';
-
-module.exports = (app) => {
- mockRabbitMQ(app);
-
- _.assign(app.services, {
- es: new elasticsearch.Client(_.cloneDeep(config.elasticsearchConfig)),
- });
- sinon.stub(util, 'getM2MToken', () => Promise.resolve('MOCK_TOKEN'));
-};
diff --git a/src/util.js b/src/util.js
index 3ae966e3..2ad0aa4a 100644
--- a/src/util.js
+++ b/src/util.js
@@ -890,14 +890,6 @@ const projectServiceUtils = {
return Promise.resolve();
}).then(() => {
- // TODO Should we also send Kafka event in case we removed some invite above?
-
- // publish event
- req.app.services.pubsub.publish(
- EVENT.ROUTING_KEY.PROJECT_MEMBER_ADDED,
- newMember,
- { correlationId: req.id },
- );
// emit the event
util.sendResourceToKafkaBus(
req,