diff --git a/.circleci/config.yml b/.circleci/config.yml index e6268124..d634f7e6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -68,6 +68,7 @@ workflows: branches: only: - dev + - change-validatations-in-job-jc # Production builds are exectuted only on tagged commits to the # master branch. diff --git a/README.md b/README.md index aa36c621..287bf7cb 100644 --- a/README.md +++ b/README.md @@ -103,10 +103,11 @@ tc-taas-es-processor | 2021-04-09T21:20:21.469Z DEBUG no-kafka-client Subscribed to taas.workperiodpayment.update:0 offset 0 leader kafka:9093 tc-taas-es-processor | 2021-04-09T21:20:21.470Z DEBUG no-kafka-client Subscribed to taas.workperiodpayment.delete:0 offset 0 leader kafka:9093 tc-taas-es-processor | 2021-04-09T21:20:21.471Z DEBUG no-kafka-client Subscribed to taas.workperiodpayment.create:0 offset 0 leader kafka:9093 + tc-taas-es-processor | 2021-04-09T21:20:21.472Z DEBUG no-kafka-client Subscribed to taas.action.retry:0 offset 0 leader kafka:9093 tc-taas-es-processor | 2021-04-09T21:20:21.473Z DEBUG no-kafka-client Subscribed to taas.job.update:0 offset 0 leader kafka:9093 tc-taas-es-processor | 2021-04-09T21:20:21.474Z DEBUG no-kafka-client Subscribed to taas.resourcebooking.update:0 offset 0 leader kafka:9093 tc-taas-es-processor | [2021-04-09T21:20:21.475Z] app INFO : Initialized....... - tc-taas-es-processor | [2021-04-09T21:20:21.479Z] app INFO : taas.job.create,taas.job.update,taas.job.delete,taas.jobcandidate.create,taas.jobcandidate.update,taas.jobcandidate.delete,taas.resourcebooking.create,taas.resourcebooking.update,taas.resourcebooking.delete,taas.workperiod.create,taas.workperiod.update,taas.workperiod.delete,taas.workperiodpayment.create,taas.workperiodpayment.update,taas.interview.requested,taas.interview.update,taas.interview.bulkUpdate,taas.role.requested,taas.role.update,taas.role.delete + tc-taas-es-processor | [2021-04-09T21:20:21.479Z] app INFO : common.error.reporting,taas.job.create,taas.job.update,taas.job.delete,taas.jobcandidate.create,taas.jobcandidate.update,taas.jobcandidate.delete,taas.resourcebooking.create,taas.resourcebooking.update,taas.resourcebooking.delete,taas.workperiod.create,taas.workperiod.update,taas.workperiod.delete,taas.workperiodpayment.create,taas.workperiodpayment.update,taas.interview.requested,taas.interview.update,taas.interview.bulkUpdate,taas.role.requested,taas.role.update,taas.role.delete,taas.action.retry tc-taas-es-processor | [2021-04-09T21:20:21.480Z] app INFO : Kick Start....... tc-taas-es-processor | ********** Topcoder Health Check DropIn listening on port 3001 tc-taas-es-processor | Topcoder Health Check DropIn started and ready to roll @@ -176,6 +177,19 @@ To be able to change and test `taas-es-processor` locally you can follow the nex 2. Run `taas-es-processor` separately from the source code. As `npm run services:up` already run all the dependencies for both `taas-apis` and for `taas-es-processor`. The only thing you need to do for running `taas-es-processor` locally is clone the [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor) repository and inside `taas-es-processor` folder run: - `nvm use` - to use correct Node version - `npm run install` + - Create `.env` file with the next environment variables. Values for **Auth0 config** should be shared with you on the forum.
+ + ```bash + # Auth0 config + AUTH0_URL= + AUTH0_AUDIENCE= + AUTH0_CLIENT_ID= + AUTH0_CLIENT_SECRET= + ``` + + - Values from this file would be automatically used by many `npm` commands. + - ⚠️ Never commit this file or its copy to the repository! + - `npm run start` ## NPM Commands @@ -206,6 +220,7 @@ To be able to change and test `taas-es-processor` locally you can follow the nex | `npm run cov` | Code Coverage Report. | | `npm run migrate` | Run any migration files which haven't run yet. | | `npm run migrate:undo` | Revert most recent migration. | +| `npm run demo-payment-scheduler` | Create 1000 Work Periods Payment records in with status "scheduled" and various "amount" | ## Import and Export data diff --git a/app-constants.js b/app-constants.js index d28f773c..2c232275 100644 --- a/app-constants.js +++ b/app-constants.js @@ -86,8 +86,24 @@ const ChallengeStatus = { const WorkPeriodPaymentStatus = { COMPLETED: 'completed', - CANCELLED: 'cancelled', - SCHEDULED: 'scheduled' + SCHEDULED: 'scheduled', + IN_PROGRESS: 'in-progress', + FAILED: 'failed', + CANCELLED: 'cancelled' +} + +const PaymentProcessingSwitch = { + ON: 'ON', + OFF: 'OFF' +} + +const PaymentSchedulerStatus = { + START_PROCESS: 'start-process', + CREATE_CHALLENGE: 'create-challenge', + ASSIGN_MEMBER: 'assign-member', + ACTIVATE_CHALLENGE: 'activate-challenge', + GET_USER_ID: 'get-userId', + CLOSE_CHALLENGE: 'close-challenge' } module.exports = { @@ -96,5 +112,7 @@ module.exports = { Scopes, Interviews, ChallengeStatus, - WorkPeriodPaymentStatus + WorkPeriodPaymentStatus, + PaymentSchedulerStatus, + PaymentProcessingSwitch } diff --git a/app.js b/app.js index 7f3d7d85..e6d79c69 100644 --- a/app.js +++ b/app.js @@ -13,6 +13,7 @@ const schedule = require('node-schedule') const logger = require('./src/common/logger') const eventHandlers = require('./src/eventHandlers') const interviewService = require('./src/services/InterviewService') +const { processScheduler } = require('./src/services/PaymentSchedulerService') // setup express app const app = express() @@ -97,6 +98,9 @@ const server = app.listen(app.get('port'), () => { eventHandlers.init() // schedule updateCompletedInterviews to run every hour schedule.scheduleJob('0 0 * * * *', interviewService.updateCompletedInterviews) + + // schedule payment processing + schedule.scheduleJob(config.PAYMENT_PROCESSING.CRON, processScheduler) }) if (process.env.NODE_ENV === 'test') { diff --git a/config/default.js b/config/default.js index bbb4b722..726c40fa 100644 --- a/config/default.js +++ b/config/default.js @@ -140,6 +140,8 @@ module.exports = { TAAS_ROLE_UPDATE_TOPIC: process.env.TAAS_ROLE_UPDATE_TOPIC || 'taas.role.update', // the delete role entity Kafka message topic TAAS_ROLE_DELETE_TOPIC: process.env.TAAS_ROLE_DELETE_TOPIC || 'taas.role.delete', + // special kafka topics + TAAS_ACTION_RETRY_TOPIC: process.env.TAAS_ACTION_RETRY_TOPIC || 'taas.action.retry', // the Kafka message topic for sending email EMAIL_TOPIC: process.env.EMAIL_TOPIC || 'external.action.email', @@ -173,5 +175,42 @@ module.exports = { // the minimum matching rate when searching roles by skills ROLE_MATCHING_RATE: process.env.ROLE_MATCHING_RATE || 0.70, // member groups representing Wipro or TopCoder employee - INTERNAL_MEMBER_GROUPS: process.env.INTERNAL_MEMBER_GROUPS || ['20000000', '20000001', '20000003', '20000010', '20000015'] + INTERNAL_MEMBER_GROUPS: process.env.INTERNAL_MEMBER_GROUPS || ['20000000', '20000001', '20000003', '20000010', '20000015'], + // Topcoder skills cache time in minutes + TOPCODER_SKILLS_CACHE_TIME: process.env.TOPCODER_SKILLS_CACHE_TIME || 60, + // payment scheduler config + PAYMENT_PROCESSING: { + // switch off actual API calls in Payment Scheduler + SWITCH: process.env.PAYMENT_PROCESSING_SWITCH || 'OFF', + // the payment scheduler cron config + CRON: process.env.PAYMENT_PROCESSING_CRON || '0 */5 * * * *', + // the number of records processed by one time + BATCH_SIZE: parseInt(process.env.PAYMENT_PROCESSING_BATCH_SIZE || 50), + // in-progress expired to determine whether a record has been processed abnormally, moment duration format + IN_PROGRESS_EXPIRED: process.env.IN_PROGRESS_EXPIRED || 'PT1H', + // the number of max retry config + MAX_RETRY_COUNT: parseInt(process.env.PAYMENT_PROCESSING_MAX_RETRY_COUNT || 10), + // the time of retry base delay, unit: ms + RETRY_BASE_DELAY: parseInt(process.env.PAYMENT_PROCESSING_RETRY_BASE_DELAY || 100), + // the time of retry max delay, unit: ms + RETRY_MAX_DELAY: parseInt(process.env.PAYMENT_PROCESSING_RETRY_MAX_DELAY || 10000), + // the max time of one request, unit: ms + PER_REQUEST_MAX_TIME: parseInt(process.env.PAYMENT_PROCESSING_PER_REQUEST_MAX_TIME || 30000), + // the max time of one payment record, unit: ms + PER_PAYMENT_MAX_TIME: parseInt(process.env.PAYMENT_PROCESSING_PER_PAYMENT_MAX_TIME || 60000), + // the max records of payment of a minute + PER_MINUTE_PAYMENT_MAX_COUNT: parseInt(process.env.PAYMENT_PROCESSING_PER_MINUTE_PAYMENT_MAX_COUNT || 12), + // the max requests of challenge of a minute + PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT: parseInt(process.env.PAYMENT_PROCESSING_PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT || 60), + // the max requests of resource of a minute + PER_MINUTE_RESOURCE_REQUEST_MAX_COUNT: parseInt(process.env.PAYMENT_PROCESSING_PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT || 20), + // the default step fix delay, unit: ms + FIX_DELAY_STEP: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500), + // the fix delay after step of create challenge, unit: ms + FIX_DELAY_STEP_CREATE_CHALLENGE: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP_CREATE_CHALLENGE || process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500), + // the fix delay after step of assign member, unit: ms + FIX_DELAY_STEP_ASSIGN_MEMBER: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP_ASSIGN_MEMBER || process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500), + // the fix delay after step of activate challenge, unit: ms + FIX_DELAY_STEP_ACTIVATE_CHALLENGE: parseInt(process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP_ACTIVATE_CHALLENGE || process.env.PAYMENT_PROCESSING_FIX_DELAY_STEP || 500) + } } diff --git a/docs/Topcoder-bookings-api.postman_collection.json b/docs/Topcoder-bookings-api.postman_collection.json index 479ddceb..b37011a5 100644 --- a/docs/Topcoder-bookings-api.postman_collection.json +++ b/docs/Topcoder-bookings-api.postman_collection.json @@ -1,6 +1,6 @@ { "info": { - "_postman_id": "18310e1b-429d-49db-8555-f4a54404271f", + "_postman_id": "d413d21d-272f-454f-b26a-0d7e3bf926d9", "name": "Topcoder-bookings-api", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" }, @@ -21165,6 +21165,221 @@ } ] }, + { + "name": "Get Skills by Job Description", + "item": [ + { + "name": "get skills successfully", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test('Status code is 200', function () {\r", + " pm.response.to.have.status(200);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "type": "text", + "value": "Bearer {{token_administrator}}" + }, + { + "key": "Content-Type", + "type": "text", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{URL}}/taas-teams/getSkillsByJobDescription", + "host": [ + "{{URL}}" + ], + "path": [ + "taas-teams", + "getSkillsByJobDescription" + ] + } + }, + "response": [] + }, + { + "name": "get skills by invalid token", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test('Status code is 401', function () {\r", + " pm.response.to.have.status(401);\r", + " const response = pm.response.json()\r", + " pm.expect(response.message).to.eq(\"Invalid Token.\")\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "type": "text", + "value": "Bearer invalid_token" + }, + { + "key": "Content-Type", + "type": "text", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{URL}}/taas-teams/getSkillsByJobDescription", + "host": [ + "{{URL}}" + ], + "path": [ + "taas-teams", + "getSkillsByJobDescription" + ] + } + }, + "response": [] + }, + { + "name": "get skills by invalid field", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test('Status code is 400', function () {\r", + " pm.response.to.have.status(400);\r", + " const response = pm.response.json()\r", + " pm.expect(response.message).to.eq(\"\\\"data.description\\\" is not allowed to be empty\")\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "type": "text", + "value": "Bearer {{token_administrator}}" + }, + { + "key": "Content-Type", + "type": "text", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{ \"description\": \"\" }", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{URL}}/taas-teams/getSkillsByJobDescription", + "host": [ + "{{URL}}" + ], + "path": [ + "taas-teams", + "getSkillsByJobDescription" + ] + } + }, + "response": [] + }, + { + "name": "get skills by missing field", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test('Status code is 400', function () {\r", + " pm.response.to.have.status(400);\r", + " const response = pm.response.json()\r", + " pm.expect(response.message).to.eq(\"\\\"data.description\\\" is required\")\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "Authorization", + "type": "text", + "value": "Bearer {{token_administrator}}" + }, + { + "key": "Content-Type", + "type": "text", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{URL}}/taas-teams/getSkillsByJobDescription", + "host": [ + "{{URL}}" + ], + "path": [ + "taas-teams", + "getSkillsByJobDescription" + ] + } + }, + "response": [] + } + ] + }, { "name": "GET /taas-teams", "request": { @@ -21648,44 +21863,6 @@ }, "response": [] }, - { - "name": "POST /taas-teams/getSkillsByJobDescription", - "request": { - "method": "POST", - "header": [ - { - "key": "Authorization", - "type": "text", - "value": "Bearer {{token_member}}" - }, - { - "key": "Content-Type", - "type": "text", - "value": "application/json" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"description\": \"nodejs react c++ hello\"\n}", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "{{URL}}/taas-teams/getSkillsByJobDescription", - "host": [ - "{{URL}}" - ], - "path": [ - "taas-teams", - "getSkillsByJobDescription" - ] - } - }, - "response": [] - }, { "name": "GET /taas-teams/:id/members", "request": { diff --git a/docs/stopWords.json b/docs/stopWords.json new file mode 100644 index 00000000..dded6681 --- /dev/null +++ b/docs/stopWords.json @@ -0,0 +1,574 @@ +[ + "dr", + "dra", + "mr", + "ms", + "a", + "a's", + "able", + "about", + "above", + "according", + "accordingly", + "across", + "actually", + "after", + "afterwards", + "again", + "against", + "ain't", + "all", + "allow", + "allows", + "almost", + "alone", + "along", + "already", + "also", + "although", + "always", + "am", + "among", + "amongst", + "an", + "and", + "another", + "any", + "anybody", + "anyhow", + "anyone", + "anything", + "anyway", + "anyways", + "anywhere", + "apart", + "appear", + "appreciate", + "appropriate", + "are", + "aren't", + "around", + "as", + "aside", + "ask", + "asking", + "associated", + "at", + "available", + "away", + "awfully", + "b", + "be", + "became", + "because", + "become", + "becomes", + "becoming", + "been", + "before", + "beforehand", + "behind", + "being", + "believe", + "below", + "beside", + "besides", + "best", + "better", + "between", + "beyond", + "both", + "brief", + "but", + "by", + "c'mon", + "c's", + "came", + "can", + "can't", + "cannot", + "cant", + "cause", + "causes", + "certain", + "certainly", + "changes", + "clearly", + "co", + "come", + "comes", + "concerning", + "consequently", + "consider", + "considering", + "contain", + "containing", + "contains", + "corresponding", + "could", + "couldn't", + "course", + "currently", + "d", + "definitely", + "described", + "despite", + "did", + "didn't", + "different", + "do", + "does", + "doesn't", + "doing", + "don't", + "done", + "down", + "downwards", + "during", + "e", + "each", + "edu", + "eg", + "eight", + "either", + "else", + "elsewhere", + "enough", + "entirely", + "especially", + "et", + "etc", + "even", + "ever", + "every", + "everybody", + "everyone", + "everything", + "everywhere", + "ex", + "exactly", + "example", + "except", + "f", + "far", + "few", + "fifth", + "first", + "five", + "followed", + "following", + "follows", + "for", + "former", + "formerly", + "forth", + "four", + "from", + "further", + "furthermore", + "g", + "get", + "gets", + "getting", + "given", + "gives", + "goes", + "going", + "gone", + "got", + "gotten", + "greetings", + "h", + "had", + "hadn't", + "happens", + "hardly", + "has", + "hasn't", + "have", + "haven't", + "having", + "he", + "he's", + "hello", + "help", + "hence", + "her", + "here", + "here's", + "hereafter", + "hereby", + "herein", + "hereupon", + "hers", + "herself", + "hi", + "him", + "himself", + "his", + "hither", + "hopefully", + "how", + "howbeit", + "however", + "i", + "i'd", + "i'll", + "i'm", + "i've", + "ie", + "if", + "ignored", + "immediate", + "in", + "inasmuch", + "inc", + "indeed", + "indicate", + "indicated", + "indicates", + "inner", + "insofar", + "instead", + "into", + "inward", + "is", + "isn't", + "it", + "it'd", + "it'll", + "it's", + "its", + "itself", + "j", + "just", + "k", + "keep", + "keeps", + "kept", + "know", + "knows", + "known", + "l", + "last", + "lately", + "later", + "latter", + "latterly", + "least", + "lest", + "let", + "let's", + "like", + "liked", + "likely", + "little", + "look", + "looking", + "looks", + "ltd", + "m", + "mainly", + "many", + "may", + "maybe", + "me", + "mean", + "meanwhile", + "merely", + "might", + "more", + "moreover", + "most", + "mostly", + "much", + "must", + "my", + "myself", + "n", + "name", + "namely", + "nd", + "near", + "nearly", + "necessary", + "need", + "needs", + "neither", + "never", + "nevertheless", + "new", + "next", + "nine", + "no", + "nobody", + "non", + "none", + "noone", + "nor", + "normally", + "not", + "nothing", + "novel", + "now", + "nowhere", + "o", + "obviously", + "of", + "off", + "often", + "oh", + "ok", + "okay", + "old", + "on", + "once", + "one", + "ones", + "only", + "onto", + "or", + "other", + "others", + "otherwise", + "ought", + "our", + "ours", + "ourselves", + "out", + "outside", + "over", + "overall", + "own", + "p", + "particular", + "particularly", + "per", + "perhaps", + "placed", + "please", + "plus", + "point", + "possible", + "presumably", + "probably", + "provides", + "q", + "que", + "quite", + "qv", + "rather", + "rd", + "re", + "really", + "reasonably", + "regarding", + "regardless", + "regards", + "relatively", + "respectively", + "right", + "s", + "said", + "same", + "saw", + "say", + "saying", + "says", + "second", + "secondly", + "see", + "seeing", + "seem", + "seemed", + "seeming", + "seems", + "seen", + "self", + "selves", + "sensible", + "sent", + "serious", + "seriously", + "seven", + "several", + "shall", + "she", + "should", + "shouldn't", + "since", + "six", + "so", + "some", + "somebody", + "somehow", + "someone", + "something", + "sometime", + "sometimes", + "somewhat", + "somewhere", + "soon", + "sorry", + "specified", + "specify", + "specifying", + "still", + "strong", + "sub", + "such", + "sup", + "sure", + "t", + "t's", + "take", + "taken", + "tell", + "tends", + "th", + "than", + "thank", + "thanks", + "thanx", + "that", + "that's", + "thats", + "the", + "their", + "theirs", + "them", + "themselves", + "then", + "thence", + "there", + "there's", + "thereafter", + "thereby", + "therefore", + "therein", + "theres", + "thereupon", + "these", + "they", + "they'd", + "they'll", + "they're", + "they've", + "think", + "third", + "this", + "thorough", + "thoroughly", + "those", + "though", + "three", + "through", + "throughout", + "thru", + "thus", + "to", + "together", + "too", + "took", + "toward", + "towards", + "tried", + "tries", + "truly", + "try", + "trying", + "twice", + "two", + "u", + "un", + "under", + "unfortunately", + "unless", + "unlikely", + "until", + "unto", + "up", + "upon", + "us", + "use", + "used", + "useful", + "uses", + "using", + "usually", + "uucp", + "v", + "value", + "various", + "very", + "via", + "viz", + "vs", + "w", + "want", + "wants", + "was", + "wasn't", + "way", + "we", + "we'd", + "we'll", + "we're", + "we've", + "welcome", + "well", + "went", + "were", + "weren't", + "what", + "what's", + "whatever", + "when", + "whence", + "whenever", + "where", + "where's", + "whereafter", + "whereas", + "whereby", + "wherein", + "whereupon", + "wherever", + "whether", + "which", + "while", + "whither", + "who", + "who's", + "whoever", + "whole", + "whom", + "whose", + "why", + "will", + "willing", + "wish", + "with", + "within", + "without", + "won't", + "wonder", + "would", + "would", + "wouldn't", + "x", + "y", + "yes", + "yet", + "you", + "you'd", + "you'll", + "you're", + "you've", + "your", + "yours", + "yourself", + "yourselves", + "z", + "zero" +] \ No newline at end of file diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 6e2508c1..e6cb5988 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -174,7 +174,7 @@ paths: required: false schema: type: string - enum: ["hourly", "daily", "weekly", "monthly"] + enum: ["hourly", "daily", "weekly", "monthly","annual"] description: The rate type. - in: query name: status @@ -1505,9 +1505,14 @@ paths: name: workPeriods.paymentStatus required: false schema: - type: string - enum: ["pending", "partially-completed", "completed", "cancelled"] - description: The payment status. + oneOf: + - type: array + items: + type: string + enum: ["pending", "partially-completed", "completed", "cancelled"] + - type: string + enum: ["pending", "partially-completed", "completed", "cancelled"] + description: comma separated payment status. - in: query name: workPeriods.startDate required: false @@ -1956,9 +1961,14 @@ paths: name: paymentStatus required: false schema: - type: string - enum: ["pending", "partially-completed", "completed", "cancelled"] - description: The payment status. + oneOf: + - type: array + items: + type: string + enum: ["pending", "partially-completed", "completed", "cancelled"] + - type: string + enum: ["pending", "partially-completed", "completed", "cancelled"] + description: comma separated payment status. - in: query name: startDate required: false @@ -2403,7 +2413,7 @@ paths: required: false schema: type: string - enum: ["completed", "scheduled", "cancelled"] + enum: ["completed", "scheduled", "in-progress", "failed", "cancelled"] description: The payment status. responses: "200": @@ -2519,7 +2529,7 @@ paths: application/json: schema: $ref: "#/components/schemas/Error" - + /work-period-payments/{id}: get: tags: @@ -3269,12 +3279,6 @@ paths: application/json: schema: $ref: "#/components/schemas/Error" - "403": - description: Forbidden - content: - application/json: - schema: - $ref: "#/components/schemas/Error" "500": description: Internal Server Error content: @@ -3755,10 +3759,13 @@ components: properties: tag: type: string + example: "Java" type: type: string + example: "taas_skill" source: type: string + example: "taas-jd-parser" Job: required: @@ -4830,8 +4837,22 @@ components: description: "The amount to be paid." status: type: string - enum: ["completed", "scheduled", "cancelled"] + enum: ["completed", "scheduled", "in-progress", "failed", "cancelled"] description: "The payment status." + statusDetails: + type: object + properties: + errorMessage: + type: string + errorCode: + type: integer + retry: + type: integer + step: + type: string + challengeId: + type: string + format: uuid billingAccountId: type: integer example: 80000071 @@ -4888,7 +4909,7 @@ components: description: "The amount to be paid." status: type: string - enum: ["completed", "scheduled", "cancelled"] + enum: ["completed", "scheduled", "in-progress", "failed", "cancelled"] description: "The payment status." WorkPeriodPaymentCreateRequestBody: required: @@ -4979,7 +5000,7 @@ components: description: "The amount to be paid." status: type: string - enum: ["completed", "scheduled", "cancelled"] + enum: ["completed", "scheduled", "in-progress", "failed", "cancelled"] description: "The payment status." CheckRun: type: object diff --git a/local/kafka-client/topics.txt b/local/kafka-client/topics.txt index 760c3a82..2611220b 100644 --- a/local/kafka-client/topics.txt +++ b/local/kafka-client/topics.txt @@ -20,3 +20,4 @@ taas.interview.requested taas.interview.update taas.interview.bulkUpdate external.action.email +taas.action.retry diff --git a/migrations/2021-05-29-create-payment-scheduler-table-add-status-details-to-payment.js b/migrations/2021-05-29-create-payment-scheduler-table-add-status-details-to-payment.js new file mode 100644 index 00000000..3931f4bb --- /dev/null +++ b/migrations/2021-05-29-create-payment-scheduler-table-add-status-details-to-payment.js @@ -0,0 +1,111 @@ +'use strict'; + +const config = require('config') +const _ = require('lodash') +const { PaymentSchedulerStatus } = require('../app-constants') + +/** + * Create `payment_schedulers` table & relations. + */ +module.exports = { + up: async (queryInterface, Sequelize) => { + const transaction = await queryInterface.sequelize.transaction() + try { + await queryInterface.createTable('payment_schedulers', { + id: { + type: Sequelize.UUID, + primaryKey: true, + allowNull: false, + defaultValue: Sequelize.UUIDV4 + }, + challengeId: { + field: 'challenge_id', + type: Sequelize.UUID, + allowNull: false + }, + workPeriodPaymentId: { + field: 'work_period_payment_id', + type: Sequelize.UUID, + allowNull: false, + references: { + model: { + tableName: 'work_period_payments', + schema: config.DB_SCHEMA_NAME + }, + key: 'id' + } + }, + step: { + type: Sequelize.ENUM(_.values(PaymentSchedulerStatus)), + allowNull: false + }, + status: { + type: Sequelize.ENUM( + 'in-progress', + 'completed', + 'failed' + ), + allowNull: false + }, + userId: { + field: 'user_id', + type: Sequelize.BIGINT + }, + userHandle: { + field: 'user_handle', + type: Sequelize.STRING, + allowNull: false + }, + createdAt: { + field: 'created_at', + type: Sequelize.DATE + }, + updatedAt: { + field: 'updated_at', + type: Sequelize.DATE + }, + deletedAt: { + field: 'deleted_at', + type: Sequelize.DATE + } + }, { schema: config.DB_SCHEMA_NAME, transaction }) + await queryInterface.addColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'status_details', + { type: Sequelize.JSONB }, + { transaction }) + await queryInterface.changeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'challenge_id', + { type: Sequelize.UUID }, + { transaction }) + await queryInterface.sequelize.query(`ALTER TYPE ${config.DB_SCHEMA_NAME}.enum_work_period_payments_status ADD VALUE 'in-progress'`) + await queryInterface.sequelize.query(`ALTER TYPE ${config.DB_SCHEMA_NAME}.enum_work_period_payments_status ADD VALUE 'failed'`) + await transaction.commit() + } catch (err) { + await transaction.rollback() + throw err + } + }, + + down: async (queryInterface, Sequelize) => { + const table = { schema: config.DB_SCHEMA_NAME, tableName: 'payment_schedulers' } + const statusTypeName = `${table.schema}.enum_${table.tableName}_status` + const stepTypeName = `${table.schema}.enum_${table.tableName}_step` + const transaction = await queryInterface.sequelize.transaction() + try { + await queryInterface.dropTable(table, { transaction }) + // drop enum type for status and step column + await queryInterface.sequelize.query(`DROP TYPE ${statusTypeName}`, { transaction }) + await queryInterface.sequelize.query(`DROP TYPE ${stepTypeName}`, { transaction }) + + await queryInterface.changeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'challenge_id', + { type: Sequelize.UUID, allowNull: false }, + { transaction }) + await queryInterface.removeColumn({ tableName: 'work_period_payments', schema: config.DB_SCHEMA_NAME }, 'status_details', + { transaction }) + await queryInterface.sequelize.query(`DELETE FROM pg_enum WHERE enumlabel in ('in-progress', 'failed') AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'enum_work_period_payments_status')`, + { transaction }) + await transaction.commit() + } catch (err) { + await transaction.rollback() + throw err + } + } +}; diff --git a/package.json b/package.json index 510504f1..3e2e7b18 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "local:init": "npm run local:reset && npm run data:import -- --force", "local:reset": "npm run delete-index -- --force || true && npm run create-index -- --force && npm run init-db force", "cov": "nyc --reporter=html --reporter=text npm run test", + "demo-payment-scheduler": "node scripts/demo-payment-scheduler/index.js && npm run index:all -- --force", "demo-payment": "node scripts/demo-payment" }, "keywords": [], diff --git a/scripts/demo-payment-scheduler/data.json b/scripts/demo-payment-scheduler/data.json new file mode 100644 index 00000000..5023842c --- /dev/null +++ b/scripts/demo-payment-scheduler/data.json @@ -0,0 +1,103 @@ +{ + "Job":{ + "id":"43d695d4-e926-41d5-ad42-a899612b5246", + "projectId":17234, + "title":"Dummy title - at most 64 characters", + "numPositions":13, + "skills":[ + "23e00d92-207a-4b5b-b3c9-4c5662644941", + "7d076384-ccf6-4e43-a45d-1b24b1e624aa", + "cbac57a3-7180-4316-8769-73af64893158", + "a2b4bc11-c641-4a19-9eb7-33980378f82e" + ], + "status":"in-review", + "isApplicationPageActive":false, + "createdBy":"57646ff9-1cd3-4d3c-88ba-eb09a395366c", + "updatedBy":"00000000-0000-0000-0000-000000000000", + "createdAt":"2021-05-09T21:21:10.394Z", + "updatedAt":"2021-05-09T21:21:14.010Z" + }, + "ResourceBooking":{ + "id":"41671764-0ded-46fd-b7de-2af5d5e4f3fc", + "projectId":17234, + "userId":"05e988b7-7d54-4c10-ada1-1a04870a88a8", + "jobId":"43d695d4-e926-41d5-ad42-a899612b5246", + "status":"placed", + "startDate":"2020-09-27", + "endDate":"2020-10-27", + "memberRate":13.23, + "customerRate":13, + "rateType":"hourly", + "billingAccountId":80000069, + "createdBy":"57646ff9-1cd3-4d3c-88ba-eb09a395366c", + "updatedBy":null, + "createdAt":"2021-05-09T21:25:46.728Z", + "updatedAt":"2021-05-09T21:25:46.728Z" + }, + "WorkPeriods":[ + { + "id":"4baae2cf-fd70-4ab3-9959-e826257b7e0f", + "resourceBookingId":"41671764-0ded-46fd-b7de-2af5d5e4f3fc", + "userHandle":"pshah_manager", + "projectId":17234, + "startDate":"2020-09-27", + "endDate":"2020-10-03", + "daysWorked":4, + "memberRate":27.06, + "customerRate":13.13, + "paymentStatus":"partially-completed", + "createdBy":"00000000-0000-0000-0000-000000000000", + "updatedBy":"57646ff9-1cd3-4d3c-88ba-eb09a395366c", + "createdAt":"2021-05-09T21:25:47.813Z", + "updatedAt":"2021-05-09T21:45:32.659Z" + }, + { + "id":"9918e1b7-acbc-41ae-baa6-fdcb2386681d", + "resourceBookingId":"41671764-0ded-46fd-b7de-2af5d5e4f3fc", + "userHandle":"Shuchikr", + "projectId":17234, + "startDate":"2020-10-18", + "endDate":"2020-10-24", + "daysWorked":4, + "memberRate":4.08, + "customerRate":3.89, + "paymentStatus":"cancelled", + "createdBy":"00000000-0000-0000-0000-000000000000", + "updatedBy":"57646ff9-1cd3-4d3c-88ba-eb09a395366c", + "createdAt":"2021-05-09T21:25:47.834Z", + "updatedAt":"2021-05-09T21:45:37.647Z" + }, + { + "id":"42e990c9-b14c-4496-9977-c3024aa90024", + "resourceBookingId":"41671764-0ded-46fd-b7de-2af5d5e4f3fc", + "userHandle":"vkumars", + "projectId":17234, + "startDate":"2020-10-25", + "endDate":"2020-10-31", + "daysWorked":3, + "memberRate":15.61, + "customerRate":9.76, + "paymentStatus":"pending", + "createdBy":"00000000-0000-0000-0000-000000000000", + "updatedBy":"57646ff9-1cd3-4d3c-88ba-eb09a395366c", + "createdAt":"2021-05-09T21:25:47.824Z", + "updatedAt":"2021-05-09T21:45:48.727Z" + }, + { + "id":"8bf64481-ae7b-4e51-b48c-000cd90c87d1", + "resourceBookingId":"41671764-0ded-46fd-b7de-2af5d5e4f3fc", + "userHandle":"chandanant", + "projectId":17234, + "startDate":"2020-10-11", + "endDate":"2020-10-17", + "daysWorked":4, + "memberRate":10.82, + "customerRate":30.71, + "paymentStatus":"pending", + "createdBy":"00000000-0000-0000-0000-000000000000", + "updatedBy":"57646ff9-1cd3-4d3c-88ba-eb09a395366c", + "createdAt":"2021-05-09T21:25:47.815Z", + "updatedAt":"2021-05-09T21:45:41.810Z" + } + ] + } \ No newline at end of file diff --git a/scripts/demo-payment-scheduler/index.js b/scripts/demo-payment-scheduler/index.js new file mode 100644 index 00000000..8f76a814 --- /dev/null +++ b/scripts/demo-payment-scheduler/index.js @@ -0,0 +1,81 @@ +const { v4: uuid } = require('uuid') +const config = require('config') +const _ = require('lodash') +const data = require('./data.json') +const model = require('../../src/models') +const logger = require('../../src/common/logger') + +const payments = [] +for (let i = 0; i < 1000; i++) { + payments.push({ + id: uuid(), + workPeriodId: data.WorkPeriods[_.random(3)].id, + amount: _.round(_.random(1000, true), 2), + status: 'scheduled', + billingAccountId: data.ResourceBooking.billingAccountId, + createdBy: '57646ff9-1cd3-4d3c-88ba-eb09a395366c', + updatedBy: null, + createdAt: `2021-05-19T21:3${i % 10}:46.507Z`, + updatedAt: '2021-05-19T21:33:46.507Z' + }) +} + +/** + * Clear old demo data + */ +async function clearData () { + const workPeriodIds = _.join(_.map(data.WorkPeriods, w => `'${w.id}'`), ',') + await model.PaymentScheduler.destroy({ + where: { + workPeriodPaymentId: { + [model.Sequelize.Op.in]: [ + model.sequelize.literal(`select id from ${config.DB_SCHEMA_NAME}.work_period_payments where work_period_id in (${workPeriodIds})`) + ] + } + }, + force: true + }) + await model.WorkPeriodPayment.destroy({ + where: { + workPeriodId: _.map(data.WorkPeriods, 'id') + }, + force: true + }) + await model.WorkPeriod.destroy({ + where: { + id: _.map(data.WorkPeriods, 'id') + }, + force: true + }) + await model.ResourceBooking.destroy({ + where: { + id: data.ResourceBooking.id + }, + force: true + }) + await model.Job.destroy({ + where: { + id: data.Job.id + }, + force: true + }) +} + +/** + * Insert payment scheduler demo data + */ +async function insertPaymentSchedulerDemoData () { + logger.info({ component: 'payment-scheduler-demo-data', context: 'insertPaymentSchedulerDemoData', message: 'Starting to remove demo data if exists' }) + await clearData() + logger.info({ component: 'payment-scheduler-demo-data', context: 'insertPaymentSchedulerDemoData', message: 'Data cleared' }) + await model.Job.create(data.Job) + logger.info({ component: 'payment-scheduler-demo-data', context: 'insertPaymentSchedulerDemoData', message: `Job ${data.Job.id} created` }) + await model.ResourceBooking.create(data.ResourceBooking) + logger.info({ component: 'payment-scheduler-demo-data', context: 'insertPaymentSchedulerDemoData', message: `ResourceBooking: ${data.ResourceBooking.id} create` }) + await model.WorkPeriod.bulkCreate(data.WorkPeriods) + logger.info({ component: 'payment-scheduler-demo-data', context: 'insertPaymentSchedulerDemoData', message: `WorkPeriods: ${_.map(data.WorkPeriods, 'id')} created` }) + await model.WorkPeriodPayment.bulkCreate(payments) + logger.info({ component: 'payment-scheduler-demo-data', context: 'insertPaymentSchedulerDemoData', message: `${payments.length} of WorkPeriodPayments scheduled` }) +} + +insertPaymentSchedulerDemoData() diff --git a/src/bootstrap.js b/src/bootstrap.js index 259c4a2c..aebac2c2 100644 --- a/src/bootstrap.js +++ b/src/bootstrap.js @@ -1,8 +1,9 @@ const fs = require('fs') const Joi = require('joi') +const config = require('config') const path = require('path') const _ = require('lodash') -const { Interviews, WorkPeriodPaymentStatus } = require('../app-constants') +const { Interviews, WorkPeriodPaymentStatus, PaymentProcessingSwitch } = require('../app-constants') const logger = require('./common/logger') const allowedInterviewStatuses = _.values(Interviews.Status) @@ -10,7 +11,7 @@ const allowedXAITemplate = _.keys(Interviews.XaiTemplate) Joi.page = () => Joi.number().integer().min(1).default(1) Joi.perPage = () => Joi.number().integer().min(1).default(20) -Joi.rateType = () => Joi.string().valid('hourly', 'daily', 'weekly', 'monthly') +Joi.rateType = () => Joi.string().valid('hourly', 'daily', 'weekly', 'monthly', 'annual') Joi.jobStatus = () => Joi.string().valid('sourcing', 'in-review', 'assigned', 'closed', 'cancelled') Joi.resourceBookingStatus = () => Joi.string().valid('placed', 'closed', 'cancelled') Joi.workload = () => Joi.string().valid('full-time', 'fractional') @@ -44,3 +45,14 @@ function buildServices (dir) { } buildServices(path.join(__dirname, 'services')) + +// validate some configurable parameters for the app +const paymentProcessingSwitchSchema = Joi.string().label('PAYMENT_PROCESSING_SWITCH').valid( + ...Object.values(PaymentProcessingSwitch) +) +try { + Joi.attempt(config.PAYMENT_PROCESSING.SWITCH, paymentProcessingSwitchSchema) +} catch (err) { + console.error(err.message) + process.exit(1) +} diff --git a/src/common/helper.js b/src/common/helper.js index e35c661e..f4a1aac2 100644 --- a/src/common/helper.js +++ b/src/common/helper.js @@ -202,6 +202,16 @@ esIndexPropertyMapping[config.get('esConfig.ES_INDEX_RESOURCE_BOOKING')] = { challengeId: { type: 'keyword' }, amount: { type: 'float' }, status: { type: 'keyword' }, + statusDetails: { + type: 'nested', + properties: { + errorMessage: { type: 'text' }, + errorCode: { type: 'integer' }, + retry: { type: 'integer' }, + step: { type: 'keyword' }, + challengeId: { type: 'keyword' } + } + }, billingAccountId: { type: 'integer' }, createdAt: { type: 'date' }, createdBy: { type: 'keyword' }, @@ -286,6 +296,16 @@ async function promptUser (promptQuery, cb) { }) } +/** + * Sleep for a given number of milliseconds. + * + * @param {Number} milliseconds the sleep time + * @returns {undefined} + */ +async function sleep (milliseconds) { + return new Promise((resolve) => setTimeout(resolve, milliseconds)) +} + /** * Create index in elasticsearch * @param {Object} index the index name @@ -952,6 +972,9 @@ async function postEvent (topic, payload, options = {}) { 'mime-type': 'application/json', payload } + if (options.key) { + message.key = options.key + } await client.postEvent(message) await eventDispatcher.handleEvent(topic, { value: payload, options }) } @@ -1615,6 +1638,26 @@ async function createChallenge (data, token) { return challenge } +/** + * Get a challenge + * + * @param {Object} data challenge data + * @returns {Object} the challenge + */ +async function getChallenge (challengeId) { + const token = await getM2MToken() + const url = `${config.TC_API}/challenges/${challengeId}` + localLogger.debug({ context: 'getChallenge', message: `EndPoint: GET ${url}` }) + const { body: challenge, status: httpStatus } = await request + .get(url) + .set('Authorization', `Bearer ${token}`) + .set('Content-Type', 'application/json') + .set('Accept', 'application/json') + localLogger.debug({ context: 'getChallenge', message: `Status Code: ${httpStatus}` }) + localLogger.debug({ context: 'getChallenge', message: `Response Body: ${JSON.stringify(challenge)}` }) + return challenge +} + /** * Update a challenge * @@ -1690,6 +1733,35 @@ async function createChallengeResource (data, token) { return resource } +/** + * + * @param {String} challengeId the challenge id + * @param {String} memberHandle the member handle + * @param {String} roleId the role id + * @returns {Object} the resource + */ +async function getChallengeResource (challengeId, memberHandle, roleId) { + const token = await getM2MToken() + const url = `${config.TC_API}/resources?challengeId=${challengeId}&memberHandle=${memberHandle}&roleId=${roleId}` + localLogger.debug({ context: 'createChallengeResource', message: `EndPoint: POST ${url}` }) + try { + const { body: resource, status: httpStatus } = await request + .get(url) + .set('Authorization', `Bearer ${token}`) + .set('Content-Type', 'application/json') + .set('Accept', 'application/json') + localLogger.debug({ context: 'getChallengeResource', message: `Status Code: ${httpStatus}` }) + localLogger.debug({ context: 'getChallengeResource', message: `Response Body: ${JSON.stringify(resource)}` }) + return resource[0] + } catch (err) { + if (err.status === 404) { + localLogger.debug({ context: 'getChallengeResource', message: `Status Code: ${err.status}` }) + } else { + throw err + } + } +} + /** * Populates workPeriods from start and end date of resource booking * @param {Date} start start date of the resource booking @@ -1833,6 +1905,7 @@ async function getMemberGroups (userId) { module.exports = { getParamFromCliArgs, promptUser, + sleep, createIndex, deleteIndex, indexBulkDataToES, @@ -1879,8 +1952,10 @@ module.exports = { deleteProjectMember, getUserAttributeValue, createChallenge, + getChallenge, updateChallenge, createChallengeResource, + getChallengeResource, extractWorkPeriods, getUserByHandle, substituteStringByObject, diff --git a/src/controllers/JobController.js b/src/controllers/JobController.js index b7cad958..606c690b 100644 --- a/src/controllers/JobController.js +++ b/src/controllers/JobController.js @@ -2,6 +2,7 @@ * Controller for Job endpoints */ const HttpStatus = require('http-status-codes') +const _ = require('lodash') const service = require('../services/JobService') const helper = require('../common/helper') @@ -57,10 +58,8 @@ async function deleteJob (req, res) { * @param res the response */ async function searchJobs (req, res) { - if (req.body && req.body.jobIds) { - req.query.jobIds = req.body.jobIds - } - const result = await service.searchJobs(req.authUser, req.query) + const query = { ...req.query, jobIds: _.get(req, 'body.jobIds', []) } + const result = await service.searchJobs(req.authUser, query) helper.setResHeaders(req, res, result) res.send(result.result) } diff --git a/src/eventHandlers/JobCandidateEventHandler.js b/src/eventHandlers/JobCandidateEventHandler.js index c199cb7d..8780ab08 100644 --- a/src/eventHandlers/JobCandidateEventHandler.js +++ b/src/eventHandlers/JobCandidateEventHandler.js @@ -23,17 +23,25 @@ async function inReviewJob (payload) { }) return } - await JobService.partiallyUpdateJob( - helper.getAuditM2Muser(), - job.id, - { status: 'in-review' } - ).then(result => { - logger.info({ + if (payload.value.status === 'open') { + await JobService.partiallyUpdateJob( + helper.getAuditM2Muser(), + job.id, + { status: 'in-review' } + ).then(result => { + logger.info({ + component: 'JobCandidateEventHandler', + context: 'inReviewJob', + message: `id: ${result.id} job got in-review status.` + }) + }) + } else { + logger.debug({ component: 'JobCandidateEventHandler', context: 'inReviewJob', - message: `id: ${result.id} job got in-review status.` + message: `id: ${payload.value.id} candidate is not in open status` }) - }) + } } /** @@ -46,6 +54,17 @@ async function processCreate (payload) { await inReviewJob(payload) } +/** + * Process job candidate update event. + * + * @param {Object} payload the event payload + * @returns {undefined} + */ +async function processUpdate (payload) { + await inReviewJob(payload) +} + module.exports = { - processCreate + processCreate, + processUpdate } diff --git a/src/eventHandlers/index.js b/src/eventHandlers/index.js index 6e0ec2a8..c88ef929 100644 --- a/src/eventHandlers/index.js +++ b/src/eventHandlers/index.js @@ -14,6 +14,7 @@ const logger = require('../common/logger') const TopicOperationMapping = { [config.TAAS_JOB_UPDATE_TOPIC]: JobEventHandler.processUpdate, [config.TAAS_JOB_CANDIDATE_CREATE_TOPIC]: JobCandidateEventHandler.processCreate, + [config.TAAS_JOB_CANDIDATE_UPDATE_TOPIC]: JobCandidateEventHandler.processUpdate, [config.TAAS_RESOURCE_BOOKING_CREATE_TOPIC]: ResourceBookingEventHandler.processCreate, [config.TAAS_RESOURCE_BOOKING_UPDATE_TOPIC]: ResourceBookingEventHandler.processUpdate, [config.TAAS_RESOURCE_BOOKING_DELETE_TOPIC]: ResourceBookingEventHandler.processDelete, diff --git a/src/models/PaymentScheduler.js b/src/models/PaymentScheduler.js new file mode 100644 index 00000000..f8f64dfa --- /dev/null +++ b/src/models/PaymentScheduler.js @@ -0,0 +1,109 @@ +const { Sequelize, Model } = require('sequelize') +const config = require('config') +const _ = require('lodash') +const errors = require('../common/errors') +const { PaymentSchedulerStatus } = require('../../app-constants') + +module.exports = (sequelize) => { + class PaymentScheduler extends Model { + /** + * Create association between models + * @param {Object} models the database models + */ + static associate (models) { + PaymentScheduler.belongsTo(models.WorkPeriodPayment, { foreignKey: 'workPeriodPaymentId' }) + } + + /** + * Get payment scheduler by id + * @param {String} id the payment scheduler id + * @returns {PaymentScheduler} the payment scheduler instance + */ + static async findById (id) { + const paymentScheduler = await PaymentScheduler.findOne({ + where: { + id + } + }) + if (!paymentScheduler) { + throw new errors.NotFoundError(`id: ${id} "paymentScheduler" doesn't exists`) + } + return paymentScheduler + } + } + PaymentScheduler.init( + { + id: { + type: Sequelize.UUID, + primaryKey: true, + allowNull: false, + defaultValue: Sequelize.UUIDV4 + }, + challengeId: { + field: 'challenge_id', + type: Sequelize.UUID, + allowNull: false + }, + workPeriodPaymentId: { + field: 'work_period_payment_id', + type: Sequelize.UUID, + allowNull: false + }, + step: { + type: Sequelize.ENUM(_.values(PaymentSchedulerStatus)), + allowNull: false + }, + status: { + type: Sequelize.ENUM( + 'in-progress', + 'completed', + 'failed' + ), + allowNull: false + }, + userId: { + field: 'user_id', + type: Sequelize.BIGINT + }, + userHandle: { + field: 'user_handle', + type: Sequelize.STRING, + allowNull: false + }, + createdAt: { + field: 'created_at', + type: Sequelize.DATE + }, + updatedAt: { + field: 'updated_at', + type: Sequelize.DATE + }, + deletedAt: { + field: 'deleted_at', + type: Sequelize.DATE + } + }, + { + schema: config.DB_SCHEMA_NAME, + sequelize, + tableName: 'payment_schedulers', + paranoid: true, + deletedAt: 'deletedAt', + createdAt: 'createdAt', + updatedAt: 'updatedAt', + timestamps: true, + defaultScope: { + attributes: { + exclude: ['deletedAt'] + } + }, + hooks: { + afterCreate: (paymentScheduler) => { + delete paymentScheduler.dataValues.deletedAt + } + } + } + ) + + return PaymentScheduler +} diff --git a/src/models/WorkPeriodPayment.js b/src/models/WorkPeriodPayment.js index 7db484ea..8d23487a 100644 --- a/src/models/WorkPeriodPayment.js +++ b/src/models/WorkPeriodPayment.js @@ -55,6 +55,10 @@ module.exports = (sequelize) => { type: Sequelize.ENUM(_.values(WorkPeriodPaymentStatus)), allowNull: false }, + statusDetails: { + field: 'status_details', + type: Sequelize.JSONB + }, billingAccountId: { field: 'billing_account_id', type: Sequelize.BIGINT diff --git a/src/services/JobCandidateService.js b/src/services/JobCandidateService.js index cc059c0c..9a152c49 100644 --- a/src/services/JobCandidateService.js +++ b/src/services/JobCandidateService.js @@ -203,8 +203,8 @@ fullyUpdateJobCandidate.schema = Joi.object().keys({ userId: Joi.string().uuid().required(), status: Joi.jobCandidateStatus().default('open'), externalId: Joi.string().allow(null).default(null), - resume: Joi.string().uri().allow(null).default(null), - remark: Joi.string().allow(null).default(null) + resume: Joi.string().uri().allow('').allow(null).default(null), + remark: Joi.string().allow('').allow(null).default(null) }).required() }).required() diff --git a/src/services/JobService.js b/src/services/JobService.js index 1947fb9e..7291070b 100644 --- a/src/services/JobService.js +++ b/src/services/JobService.js @@ -187,31 +187,35 @@ async function createJob (currentUser, job) { return created.toJSON() } -createJob.schema = Joi.object().keys({ - currentUser: Joi.object().required(), - job: Joi.object().keys({ - status: Joi.jobStatus().default('sourcing'), - projectId: Joi.number().integer().required(), - externalId: Joi.string().allow(null), - description: Joi.stringAllowEmpty().allow(null), - title: Joi.title().required(), - startDate: Joi.date().allow(null), - duration: Joi.number().integer().min(1).allow(null), - numPositions: Joi.number().integer().min(1).required(), - resourceType: Joi.stringAllowEmpty().allow(null), - rateType: Joi.rateType().allow(null), - workload: Joi.workload().allow(null), - skills: Joi.array().items(Joi.string().uuid()).required(), - isApplicationPageActive: Joi.boolean(), - minSalary: Joi.number().integer().allow(null), - maxSalary: Joi.number().integer().allow(null), - hoursPerWeek: Joi.number().integer().allow(null), - jobLocation: Joi.string().allow(null), - jobTimezone: Joi.string().allow(null), - currency: Joi.string().allow(null), - roleIds: Joi.array().items(Joi.string().uuid().required()) - }).required() -}).required() +createJob.schema = Joi.object() + .keys({ + currentUser: Joi.object().required(), + job: Joi.object() + .keys({ + status: Joi.jobStatus().default('sourcing'), + projectId: Joi.number().integer().required(), + externalId: Joi.string().allow(null), + description: Joi.stringAllowEmpty().allow(null), + title: Joi.title().required(), + startDate: Joi.date().allow(null), + duration: Joi.number().integer().min(1).allow(null), + numPositions: Joi.number().integer().min(1).required(), + resourceType: Joi.stringAllowEmpty().allow(null), + rateType: Joi.rateType().allow(null), + workload: Joi.workload().allow(null), + skills: Joi.array().items(Joi.string().uuid()).required(), + isApplicationPageActive: Joi.boolean(), + minSalary: Joi.number().integer().allow(null), + maxSalary: Joi.number().integer().allow(null), + hoursPerWeek: Joi.number().integer().allow(null), + jobLocation: Joi.string().allow(null).allow(''), + jobTimezone: Joi.string().allow(null).allow(''), + currency: Joi.string().allow(null).allow(''), + roleIds: Joi.array().items(Joi.string().uuid().required()) + }) + .required() + }) + .required() /** * Update job. Normal user can only update the job he/she created. diff --git a/src/services/PaymentSchedulerService.js b/src/services/PaymentSchedulerService.js new file mode 100644 index 00000000..20ebf249 --- /dev/null +++ b/src/services/PaymentSchedulerService.js @@ -0,0 +1,337 @@ +const _ = require('lodash') +const config = require('config') +const moment = require('moment') +const models = require('../models') +const { getMemberDetailsByHandle, getChallenge, getChallengeResource, sleep, postEvent } = require('../common/helper') +const logger = require('../common/logger') +const { createChallenge, addResourceToChallenge, activateChallenge, closeChallenge } = require('./PaymentService') +const { ChallengeStatus, PaymentSchedulerStatus, PaymentProcessingSwitch } = require('../../app-constants') + +const WorkPeriodPayment = models.WorkPeriodPayment +const WorkPeriod = models.WorkPeriod +const PaymentScheduler = models.PaymentScheduler +const { + SWITCH, BATCH_SIZE, IN_PROGRESS_EXPIRED, MAX_RETRY_COUNT, RETRY_BASE_DELAY, RETRY_MAX_DELAY, PER_REQUEST_MAX_TIME, PER_PAYMENT_MAX_TIME, + PER_MINUTE_PAYMENT_MAX_COUNT, PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT, PER_MINUTE_RESOURCE_REQUEST_MAX_COUNT, + FIX_DELAY_STEP_CREATE_CHALLENGE, FIX_DELAY_STEP_ASSIGN_MEMBER, FIX_DELAY_STEP_ACTIVATE_CHALLENGE +} = config.PAYMENT_PROCESSING +const processStatus = { + perMin: { + minute: '0:0', + paymentsProcessed: 0, + challengeRequested: 0, + resourceRequested: 0 + }, + perMinThreshold: { + paymentsProcessed: PER_MINUTE_PAYMENT_MAX_COUNT, + challengeRequested: PER_MINUTE_CHALLENGE_REQUEST_MAX_COUNT, + resourceRequested: PER_MINUTE_RESOURCE_REQUEST_MAX_COUNT + }, + paymentStartTime: 0, + requestStartTime: 0 +} +const processResult = { + SUCCESS: 'success', + FAIL: 'fail', + SKIP: 'skip' +} + +const localLogger = { + debug: (message, context) => logger.debug({ component: 'PaymentSchedulerService', context, message }), + error: (message, context) => logger.error({ component: 'PaymentSchedulerService', context, message }), + info: (message, context) => logger.info({ component: 'PaymentSchedulerService', context, message }) +} + +/** + * Scheduler process entrance + */ +async function processScheduler () { + // Get the oldest Work Periods Payment records in status "scheduled" and "in-progress", + // the in progress state may be caused by an abnormal shutdown, + // or it may be a normal record that is still being processed + const workPeriodPaymentList = await WorkPeriodPayment.findAll({ where: { status: ['in-progress', 'scheduled'] }, order: [['status', 'desc'], ['createdAt']], limit: BATCH_SIZE }) + localLogger.info(`start processing ${workPeriodPaymentList.length} of payments`, 'processScheduler') + const failIds = [] + const skipIds = [] + for (const workPeriodPayment of workPeriodPaymentList) { + const result = await processPayment(workPeriodPayment) + if (result === processResult.FAIL) { + failIds.push(workPeriodPayment.id) + } else if (result === processResult.SKIP) { + skipIds.push(workPeriodPayment.id) + } + } + localLogger.info(`process end. ${workPeriodPaymentList.length - failIds.length - skipIds.length} of payments processed successfully`, 'processScheduler') + if (!_.isEmpty(skipIds)) { + localLogger.info(`payments: ${_.join(skipIds, ',')} are processing by other processor`, 'processScheduler') + } + if (!_.isEmpty(failIds)) { + localLogger.error(`payments: ${_.join(failIds, ',')} are processed failed`, 'processScheduler') + } +} + +/** + * Process a record of payment + * @param {Object} workPeriodPayment the work period payment + * @returns {String} process result + */ +async function processPayment (workPeriodPayment) { + processStatus.paymentStartTime = Date.now() + let paymentScheduler + if (workPeriodPayment.status === 'in-progress') { + paymentScheduler = await PaymentScheduler.findOne({ where: { workPeriodPaymentId: workPeriodPayment.id, status: 'in-progress' } }) + + // If the in-progress record has not expired, it is considered to be being processed by other processes + if (paymentScheduler && moment(paymentScheduler.updatedAt).add(moment.duration(IN_PROGRESS_EXPIRED)).isAfter(moment())) { + localLogger.info(`workPeriodPayment: ${workPeriodPayment.id} is being processed by other processor`, 'processPayment') + return processResult.SKIP + } + } else { + const oldValue = workPeriodPayment.toJSON() + const updated = await workPeriodPayment.update({ status: 'in-progress' }) + // Update the modified status to es + await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue }) + } + // Check whether the number of processed records per minute exceeds the specified number, if it exceeds, wait for the next minute before processing + await checkWait(PaymentSchedulerStatus.START_PROCESS) + localLogger.info(`Processing workPeriodPayment ${workPeriodPayment.id}`, 'processPayment') + + const workPeriod = await WorkPeriod.findById(workPeriodPayment.workPeriodId) + try { + if (!paymentScheduler) { + // 1. create challenge + const challengeId = await withRetry(createChallenge, [getCreateChallengeParam(workPeriod, workPeriodPayment)], validateError, PaymentSchedulerStatus.CREATE_CHALLENGE) + paymentScheduler = await PaymentScheduler.create({ challengeId, step: PaymentSchedulerStatus.CREATE_CHALLENGE, workPeriodPaymentId: workPeriodPayment.id, userHandle: workPeriod.userHandle, status: 'in-progress' }) + } else { + // If the paymentScheduler already exists, it means that this is a record caused by an abnormal shutdown + await setPaymentSchedulerStep(paymentScheduler) + } + // Start from unprocessed step, perform the process step by step + while (paymentScheduler.step !== PaymentSchedulerStatus.CLOSE_CHALLENGE) { + await processStep(paymentScheduler) + } + + const oldValue = workPeriodPayment.toJSON() + // 5. update wp and save it should only update already existent Work Period Payment record with created "challengeId" and "status=completed". + const updated = await workPeriodPayment.update({ challengeId: paymentScheduler.challengeId, status: 'completed' }) + // Update the modified status to es + await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue }) + + await paymentScheduler.update({ step: PaymentSchedulerStatus.CLOSE_CHALLENGE, userId: paymentScheduler.userId, status: 'completed' }) + + localLogger.info(`Processed workPeriodPayment ${workPeriodPayment.id} successfully`, 'processPayment') + return processResult.SUCCESS + } catch (err) { + logger.logFullError(err, { component: 'PaymentSchedulerService', context: 'processPayment' }) + const statusDetails = { errorMessage: err.message, errorCode: _.get(err, 'status', -1), retry: _.get(err, 'retry', -1), step: _.get(err, 'step'), challengeId: paymentScheduler ? paymentScheduler.challengeId : null } + const oldValue = workPeriodPayment.toJSON() + // If payment processing failed Work Periods Payment "status" should be changed to "failed" and populate "statusDetails" field with error details in JSON format. + const updated = await workPeriodPayment.update({ statusDetails, status: 'failed' }) + // Update the modified status to es + await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue }) + + if (paymentScheduler) { + await paymentScheduler.update({ step: PaymentSchedulerStatus.CLOSE_CHALLENGE, userId: paymentScheduler.userId, status: 'failed' }) + } + localLogger.error(`Processed workPeriodPayment ${workPeriodPayment.id} failed`, 'processPayment') + return processResult.FAIL + } +} + +/** + * Perform a specific step in the process + * @param {Object} paymentScheduler the payment scheduler + */ +async function processStep (paymentScheduler) { + if (paymentScheduler.step === PaymentSchedulerStatus.CREATE_CHALLENGE) { + // 2. assign member to the challenge + await withRetry(addResourceToChallenge, [paymentScheduler.challengeId, paymentScheduler.userHandle], validateError, PaymentSchedulerStatus.ASSIGN_MEMBER) + paymentScheduler.step = PaymentSchedulerStatus.ASSIGN_MEMBER + } else if (paymentScheduler.step === PaymentSchedulerStatus.ASSIGN_MEMBER) { + // 3. active the challenge + await withRetry(activateChallenge, [paymentScheduler.challengeId], validateError, PaymentSchedulerStatus.ACTIVATE_CHALLENGE) + paymentScheduler.step = PaymentSchedulerStatus.ACTIVATE_CHALLENGE + } else if (paymentScheduler.step === PaymentSchedulerStatus.ACTIVATE_CHALLENGE) { + // 4.1. get user id + const { userId } = await withRetry(getMemberDetailsByHandle, [paymentScheduler.userHandle], validateError, PaymentSchedulerStatus.GET_USER_ID) + paymentScheduler.userId = userId + paymentScheduler.step = PaymentSchedulerStatus.GET_USER_ID + } else if (paymentScheduler.step === PaymentSchedulerStatus.GET_USER_ID) { + // 4.2. close the challenge + await withRetry(closeChallenge, [paymentScheduler.challengeId, paymentScheduler.userId, paymentScheduler.userHandle], validateError, PaymentSchedulerStatus.CLOSE_CHALLENGE) + paymentScheduler.step = PaymentSchedulerStatus.CLOSE_CHALLENGE + } +} + +/** + * Set the scheduler actual step + * @param {Object} paymentScheduler the scheduler object + */ +async function setPaymentSchedulerStep (paymentScheduler) { + const challenge = await getChallenge(paymentScheduler.challengeId) + if (SWITCH === PaymentProcessingSwitch.OFF) { + paymentScheduler.step = PaymentSchedulerStatus.CLOSE_CHALLENGE + } else if (challenge.status === ChallengeStatus.COMPLETED) { + paymentScheduler.step = PaymentSchedulerStatus.CLOSE_CHALLENGE + } else if (challenge.status === ChallengeStatus.ACTIVE) { + paymentScheduler.step = PaymentSchedulerStatus.ACTIVATE_CHALLENGE + } else { + const resource = await getChallengeResource(paymentScheduler.challengeId, paymentScheduler.userHandle, config.ROLE_ID_SUBMITTER) + if (resource) { + paymentScheduler.step = PaymentSchedulerStatus.ASSIGN_MEMBER + } else { + paymentScheduler.step = PaymentSchedulerStatus.CREATE_CHALLENGE + } + } + // The main purpose is updating the updatedAt of payment scheduler to avoid simultaneous processing + await paymentScheduler.update({ step: paymentScheduler.step }) +} + +/** + * Generate the create challenge parameter + * @param {Object} workPeriod the work period + * @param {Object} workPeriodPayment the work period payment + * @returns {Object} the create challenge parameter + */ +function getCreateChallengeParam (workPeriod, workPeriodPayment) { + return { + projectId: workPeriod.projectId, + userHandle: workPeriod.userHandle, + amount: workPeriodPayment.amount, + name: `TaaS Payment - ${workPeriod.userHandle} - Week Ending ${moment(workPeriod.endDate).format('D/M/YYYY')}`, + description: `TaaS Payment - ${workPeriod.userHandle} - Week Ending ${moment(workPeriod.endDate).format('D/M/YYYY')}`, + billingAccountId: workPeriodPayment.billingAccountId + } +} + +/** + * Before each step is processed, wait for the corresponding time + * @param {String} step the step name + * @param {Number} tryCount the try count + */ +async function checkWait (step, tryCount) { + // When calculating the retry time later, we need to subtract the time that has been waited before + let lapse = 0 + if (step === PaymentSchedulerStatus.START_PROCESS) { + lapse += await checkPerMinThreshold('paymentsProcessed') + } else if (step === PaymentSchedulerStatus.CREATE_CHALLENGE) { + await checkPerMinThreshold('challengeRequested') + } else if (step === PaymentSchedulerStatus.ASSIGN_MEMBER) { + // Only when tryCount = 0, it comes from the previous step, and it is necessary to wait for a fixed time + if (FIX_DELAY_STEP_CREATE_CHALLENGE > 0 && tryCount === 0) { + await sleep(FIX_DELAY_STEP_CREATE_CHALLENGE) + } + lapse += await checkPerMinThreshold('resourceRequested') + } else if (step === PaymentSchedulerStatus.ACTIVATE_CHALLENGE) { + // Only when tryCount = 0, it comes from the previous step, and it is necessary to wait for a fixed time + if (FIX_DELAY_STEP_ASSIGN_MEMBER > 0 && tryCount === 0) { + await sleep(FIX_DELAY_STEP_ASSIGN_MEMBER) + } + lapse += await checkPerMinThreshold('challengeRequested') + } else if (step === PaymentSchedulerStatus.CLOSE_CHALLENGE) { + // Only when tryCount = 0, it comes from the previous step, and it is necessary to wait for a fixed time + if (FIX_DELAY_STEP_ACTIVATE_CHALLENGE > 0 && tryCount === 0) { + await sleep(FIX_DELAY_STEP_ACTIVATE_CHALLENGE) + } + lapse += await checkPerMinThreshold('challengeRequested') + } + + if (tryCount > 0) { + // exponential backoff and do not exceed the maximum retry delay + const retryDelay = Math.min(RETRY_BASE_DELAY * Math.pow(2, tryCount), RETRY_MAX_DELAY) + await sleep(retryDelay - lapse) + } +} + +/** + * Determine whether the number of records processed every minute exceeds the specified number, if it exceeds, wait for the next minute + * @param {String} key the min threshold key + * @returns {Number} wait time + */ +async function checkPerMinThreshold (key) { + const mt = moment() + const min = mt.format('h:m') + let waitMs = 0 + if (processStatus.perMin.minute === min) { + if (processStatus.perMin[key] >= processStatus.perMinThreshold[key]) { + waitMs = (60 - mt.seconds()) * 1000 + localLogger.info(`The number of records of ${key} processed per minute reaches ${processStatus.perMinThreshold[key]}, and it need to wait for ${60 - mt.seconds()} seconds until the next minute`) + await sleep(waitMs) + processStatus.perMin = { + minute: moment().format('h:m'), + paymentsProcessed: 0, + challengeRequested: 0, + resourceRequested: 0 + } + } + } else { + processStatus.perMin = { + minute: min, + paymentsProcessed: 0, + challengeRequested: 0, + resourceRequested: 0 + } + } + processStatus.perMin[key]++ + return waitMs +} + +/** + * Determine whether it can try again + * @param {Object} err the process error + * @returns {Boolean} + */ +function validateError (err) { + return !err.status || err.status >= 500 +} + +/** + * Execute the function, if an exception occurs, retry according to the conditions + * @param {Function} func the main function + * @param {Array} argArr the args of main function + * @param {Function} predictFunc the determine error function + * @param {String} step the step name + * @returns the result of main function + */ +async function withRetry (func, argArr, predictFunc, step) { + let tryCount = 0 + processStatus.requestStartTime = Date.now() + while (true) { + await checkWait(step, tryCount) + tryCount++ + try { + // mock code + if (SWITCH === PaymentProcessingSwitch.OFF) { + // without actual API calls by adding delay (for example 1 second for each step), to simulate the act + sleep(1000) + if (step === PaymentSchedulerStatus.CREATE_CHALLENGE) { + return '00000000-0000-0000-0000-000000000000' + } else if (step === PaymentSchedulerStatus.GET_USER_ID) { + return { userId: 100001 } + } + return + } else { + // Execute the main function + const result = await func(...argArr) + return result + } + } catch (err) { + const now = Date.now() + // The following is the case of not retrying: + // 1. The number of retries exceeds the configured number + // 2. The thrown error does not match the retry conditions + // 3. The request execution time exceeds the configured time + // 4. The processing time of the payment record exceeds the configured time + if (tryCount > MAX_RETRY_COUNT || !predictFunc(err) || now - processStatus.requestStartTime > PER_REQUEST_MAX_TIME || now - processStatus.paymentStartTime > PER_PAYMENT_MAX_TIME) { + err.retry = tryCount + err.step = step + throw err + } + localLogger.info(`execute ${step} with error: ${err.message}, retry...`, 'withRetry') + } + } +} + +module.exports = { + processScheduler +} diff --git a/src/services/PaymentService.js b/src/services/PaymentService.js index e61a6c34..93d04e41 100644 --- a/src/services/PaymentService.js +++ b/src/services/PaymentService.js @@ -39,7 +39,8 @@ async function createPayment (options) { const challengeId = await createChallenge(options, token) await addResourceToChallenge(challengeId, options.userHandle, token) await activateChallenge(challengeId, token) - const completedChallenge = await closeChallenge(challengeId, options.userHandle, token) + const { userId } = await helper.getV3MemberDetailsByHandle(options.userHandle) + const completedChallenge = await closeChallenge(challengeId, userId, options.userHandle, token) return completedChallenge } @@ -117,6 +118,13 @@ async function addResourceToChallenge (id, handle, token) { await helper.createChallengeResource(body, token) localLogger.info({ context: 'addResourceToChallenge', message: `${handle} added to challenge ${id}` }) } catch (err) { + if (err.status === 409) { + const resource = await helper.getChallengeResource(id, handle, config.ROLE_ID_SUBMITTER) + if (resource) { + localLogger.info({ context: 'addResourceToChallenge', message: `${handle} exists in challenge ${id}` }) + return + } + } localLogger.error({ context: 'addResourceToChallenge', message: `Status Code: ${err.status}` }) localLogger.error({ context: 'addResourceToChallenge', message: err.response.text }) throw err @@ -137,6 +145,13 @@ async function activateChallenge (id, token) { await helper.updateChallenge(id, body, token) localLogger.info({ context: 'activateChallenge', message: `Challenge ${id} is activated successfully.` }) } catch (err) { + if (err.status >= 500) { + const challenge = await helper.getChallenge(id) + if (_.includes([constants.ChallengeStatus.ACTIVE, constants.ChallengeStatus.COMPLETED], challenge.status)) { + localLogger.info({ context: 'activateChallenge', message: `the status of Challenge ${id} had been ${challenge.status}.` }) + return + } + } localLogger.error({ context: 'activateChallenge', message: `Status Code: ${err.status}` }) localLogger.error({ context: 'activateChallenge', message: err.response.text }) throw err @@ -146,14 +161,14 @@ async function activateChallenge (id, token) { /** * closes the topcoder challenge * @param {String} id the challenge id + * @param {String} userId the user id * @param {String} userHandle the user handle * @param {String} token m2m token * @returns {Object} the closed challenge */ -async function closeChallenge (id, userHandle, token) { +async function closeChallenge (id, userId, userHandle, token) { localLogger.info({ context: 'closeChallenge', message: `Closing challenge ${id}` }) try { - const { userId } = await helper.getMemberDetailsByHandle(userHandle) const body = { status: constants.ChallengeStatus.COMPLETED, winners: [{ @@ -166,6 +181,13 @@ async function closeChallenge (id, userHandle, token) { localLogger.info({ context: 'closeChallenge', message: `Challenge ${id} is closed successfully.` }) return response } catch (err) { + if (err.status >= 500) { + const challenge = await helper.getChallenge(id) + if (constants.ChallengeStatus.COMPLETED === challenge.status) { + localLogger.info({ context: 'activateChallenge', message: `the status of Challenge ${id} had been ${challenge.status}.` }) + return challenge + } + } localLogger.error({ context: 'closeChallenge', message: `Status Code: ${err.status}` }) localLogger.error({ context: 'closeChallenge', message: err.response.text }) throw err @@ -173,5 +195,9 @@ async function closeChallenge (id, userHandle, token) { } module.exports = { - createPayment + createPayment, + createChallenge, + addResourceToChallenge, + activateChallenge, + closeChallenge } diff --git a/src/services/ResourceBookingService.js b/src/services/ResourceBookingService.js index 7be3833e..a23c336e 100644 --- a/src/services/ResourceBookingService.js +++ b/src/services/ResourceBookingService.js @@ -454,6 +454,11 @@ async function searchResourceBookings (currentUser, criteria, options = { return return projectId }) } + // `criteria[workPeriods.paymentStatus]` could be array of paymentStatus, or comma separated string of paymentStatus + // in case it's comma separated string of paymentStatus we have to convert it to an array of paymentStatus + if ((typeof criteria['workPeriods.paymentStatus']) === 'string') { + criteria['workPeriods.paymentStatus'] = criteria['workPeriods.paymentStatus'].trim().split(',').map(ps => Joi.attempt({ paymentStatus: ps.trim() }, Joi.object().keys({ paymentStatus: Joi.paymentStatus() })).paymentStatus) + } const page = criteria.page let perPage if (options.returnAll) { @@ -535,13 +540,21 @@ async function searchResourceBookings (currentUser, criteria, options = { return if (!_.isEmpty(workPeriodFilters)) { const workPeriodsMust = [] _.each(workPeriodFilters, (value, key) => { - workPeriodsMust.push({ - term: { - [key]: { - value + if (key === 'workPeriods.paymentStatus') { + workPeriodsMust.push({ + terms: { + [key]: value } - } - }) + }) + } else { + workPeriodsMust.push({ + term: { + [key]: { + value + } + } + }) + } }) esQuery.body.query.bool.must.push({ @@ -560,7 +573,13 @@ async function searchResourceBookings (currentUser, criteria, options = { return _.each(_.omit(workPeriodFilters, 'workPeriods.userHandle'), (value, key) => { key = key.split('.')[1] _.each(resourceBookings, r => { - r.workPeriods = _.filter(r.workPeriods, { [key]: value }) + r.workPeriods = _.filter(r.workPeriods, wp => { + if (key === 'paymentStatus') { + return _.includes(value, wp[key]) + } else { + return wp[key] === value + } + }) }) }) @@ -664,7 +683,10 @@ searchResourceBookings.schema = Joi.object().keys({ Joi.string(), Joi.array().items(Joi.number().integer()) ), - 'workPeriods.paymentStatus': Joi.paymentStatus(), + 'workPeriods.paymentStatus': Joi.alternatives( + Joi.string(), + Joi.array().items(Joi.paymentStatus()) + ), 'workPeriods.startDate': Joi.date().format('YYYY-MM-DD'), 'workPeriods.endDate': Joi.date().format('YYYY-MM-DD'), 'workPeriods.userHandle': Joi.string() diff --git a/src/services/TeamService.js b/src/services/TeamService.js index c32b2776..af006603 100644 --- a/src/services/TeamService.js +++ b/src/services/TeamService.js @@ -15,8 +15,10 @@ const ResourceBookingService = require('./ResourceBookingService') const HttpStatus = require('http-status-codes') const { Op } = require('sequelize') const models = require('../models') +const stopWords = require('../../docs/stopWords.json') const Role = models.Role const RoleSearchRequest = models.RoleSearchRequest +const topcoderSkills = {} const emailTemplates = _.mapValues(emailTemplateConfig, (template) => { return { @@ -60,6 +62,73 @@ async function _getJobsByProjectIds (currentUser, projectIds) { return result } +/** + * Gets topcoder skills and stores their name and compiled + * regex patters according to Levenshtein distance <=1 + */ +async function _reloadCachedTopcoderSkills () { + // do not reload if cache time is not expired + if (!_.isUndefined(topcoderSkills.time)) { + const cacheTime = config.TOPCODER_SKILLS_CACHE_TIME * 60 * 1000 + if (new Date().getTime() - topcoderSkills.time < cacheTime) { + return + } + } + // collect all skills + const skills = await helper.getAllTopcoderSkills() + // set the last cached time + topcoderSkills.time = new Date().getTime() + topcoderSkills.skills = [] + // store skill names and compiled regex paterns + _.each(skills, skill => { + topcoderSkills.skills.push({ + name: skill.name, + pattern: _compileRegexPatternForSkillName(skill.name) + }) + }) +} + +/** + * Prepares the regex pattern for the given word + * according to Levenshtein distance of 1 (insertions, deletions or substitutions) + * @param {String} skillName the name of the skill + * @returns {RegExp} the compiled regex pattern + */ +function _compileRegexPatternForSkillName (skillName) { + // split the name into its chars + let chars = _.split(skillName, '') + // escape characters reserved to regex + chars = _.map(chars, _.escapeRegExp) + // Its not a good idea to apply tolerance according to + // Levenshtein distance for the words have less than 3 letters + // We expect the skill names have 1 or 2 letters to take place + // in job description as how they are exactly spelled + if (chars.length < 3) { + return new RegExp(`^(?:${_.join(chars, '')})$`, 'i') + } + + const res = [] + // include the skill name itself + res.push(_.join(chars, '')) + // include every transposition combination + // E.g. java => ajva, jvaa, jaav + for (let i = 0; i < chars.length - 1; i++) { + res.push(_.join(_.slice(chars, 0, i), '') + chars[i + 1] + chars[i] + _.join(_.slice(chars, i + 2), '')) + } + // include every insertion combination + // E.g. java => .java, j.ava, ja.va, jav.a, java. + for (let i = 0; i <= chars.length; i++) { + res.push(_.join(_.slice(chars, 0, i), '') + '.' + _.join(_.slice(chars, i), '')) + } + // include every deletion/substitution combination + // E.g. java => .?ava, j.?va, ja.?a, jav.? + for (let i = 0; i < chars.length; i++) { + res.push(_.join(_.slice(chars, 0, i), '') + '.?' + _.join(_.slice(chars, i + 1), '')) + } + // return the regex pattern + return new RegExp(`^(?:${_.join(res, '|')})$`, 'i') +} + /** * List teams * @param {Object} currentUser the user who perform this operation @@ -695,9 +764,7 @@ async function roleSearchRequest (currentUser, data) { } else { // if only job description is provided, collect skill names from description const tags = await getSkillsByJobDescription(currentUser, { description: data.jobDescription }) - // collected tags from description has inconsistency with topcoder skills - // we need to filter invalid skills - const skills = await getSkillNamesByNames(_.map(tags, 'tag')) + const skills = _.map(tags, 'tag') // find the best matching role role = await getRoleBySkills(skills) } @@ -763,7 +830,40 @@ getRoleBySkills.schema = Joi.object() * @returns {Object} the result */ async function getSkillsByJobDescription (currentUser, data) { - return helper.getTags(data.description) + // load topcoder skills if needed. Using cached skills helps to avoid + // unnecessary api calls which is extremely time comsuming. + await _reloadCachedTopcoderSkills() + // replace markdown tags with spaces + let description = _.replace(data.description, /[`|^[\]{}~/,:-]|#{2,}|
/gi, ' ') + // replace all whitespace characters with single space + description = _.replace(description, /\s\s+/g, ' ') + // extract words from description + let words = _.split(description, ' ') + // remove stopwords from description + words = _.filter(words, word => stopWords.indexOf(word.toLowerCase()) === -1) + let foundSkills = [] + const result = [] + // try to match each word with skill names + // using pre-compiled regex pattern + _.each(words, word => { + _.each(topcoderSkills.skills, skill => { + // do not stop searching after a match in order to detect more lookalikes + if (skill.pattern.test(word)) { + foundSkills.push(skill.name) + } + }) + }) + foundSkills = _.uniq(foundSkills) + // apply desired template + _.each(foundSkills, skill => { + result.push({ + tag: skill, + type: 'taas_skill', + source: 'taas-jd-parser' + }) + }) + + return result } getSkillsByJobDescription.schema = Joi.object() @@ -828,27 +928,6 @@ getSkillIdsByNames.schema = Joi.object() skills: Joi.array().items(Joi.string().required()).required() }).required() -/** - * Filters invalid skills from given skill names - * - * @param {Array} skills the array of skill names - * @returns {Array} the array of skill names - */ -async function getSkillNamesByNames (skills) { - // remove duplicates, leading and trailing whitespaces, empties. - const cleanedSkills = _.uniq(_.filter(_.map(skills, skill => _.trim(skill)), skill => !_.isEmpty(skill))) - const result = await helper.getAllTopcoderSkills({ name: _.join(cleanedSkills, ',') }) - const skillNames = _.map(result, 'name') - // endpoint returns the partial matched skills - // we need to filter by exact match case insensitive - return _.intersectionBy(skillNames, cleanedSkills, _.toLower) -} - -getSkillNamesByNames.schema = Joi.object() - .keys({ - skills: Joi.array().items(Joi.string().required()).required() - }).required() - /** * Creates the role search request * @@ -1054,7 +1133,6 @@ module.exports = { getSkillsByJobDescription, getSkillNamesByIds, getSkillIdsByNames, - getSkillNamesByNames, createRoleSearchRequest, isExternalMember, createTeam, diff --git a/src/services/WorkPeriodPaymentService.js b/src/services/WorkPeriodPaymentService.js index 59f63720..80299628 100644 --- a/src/services/WorkPeriodPaymentService.js +++ b/src/services/WorkPeriodPaymentService.js @@ -81,7 +81,7 @@ async function _createSingleWorkPeriodPaymentWithWorkPeriodAndResourceBooking (w if (daysWorked === 0) { workPeriodPayment.amount = 0 } else { - workPeriodPayment.amount = _.round(memberRate * 5 / daysWorked, 2) + workPeriodPayment.amount = _.round(memberRate * daysWorked / 5, 2) } } @@ -96,7 +96,7 @@ async function _createSingleWorkPeriodPaymentWithWorkPeriodAndResourceBooking (w } } - await helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_CREATE_TOPIC, created.toJSON()) + await helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_CREATE_TOPIC, created.toJSON(), { key: `workPeriodPayment.billingAccountId:${workPeriodPayment.billingAccountId}` }) return created.dataValues } @@ -183,7 +183,7 @@ async function createWorkPeriodPayment (currentUser, workPeriodPayment) { const successResult = await _createSingleWorkPeriodPayment(wp, createdBy) result.push(successResult) } catch (e) { - result.push(_.extend(wp, { error: { message: e.message, code: e.httpStatus } })) + result.push(_.extend(_.pick(wp, 'workPeriodId'), { error: { message: e.message, code: e.httpStatus } })) } } return result @@ -234,7 +234,7 @@ async function updateWorkPeriodPayment (currentUser, id, data) { } } - await helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue: oldValue }) + await helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue: oldValue, key: `workPeriodPayment.billingAccountId:${updated.billingAccountId}` }) return updated.dataValues } diff --git a/src/services/WorkPeriodService.js b/src/services/WorkPeriodService.js index fc750bd7..9e555b90 100644 --- a/src/services/WorkPeriodService.js +++ b/src/services/WorkPeriodService.js @@ -235,7 +235,7 @@ async function createWorkPeriod (currentUser, workPeriod) { } } - await helper.postEvent(config.TAAS_WORK_PERIOD_CREATE_TOPIC, created.toJSON()) + await helper.postEvent(config.TAAS_WORK_PERIOD_CREATE_TOPIC, created.toJSON(), { key: `resourceBooking.id:${workPeriod.resourceBookingId}` }) return created.dataValues } @@ -289,7 +289,7 @@ async function updateWorkPeriod (currentUser, id, data) { } } - await helper.postEvent(config.TAAS_WORK_PERIOD_UPDATE_TOPIC, updated.toJSON(), { oldValue: oldValue }) + await helper.postEvent(config.TAAS_WORK_PERIOD_UPDATE_TOPIC, updated.toJSON(), { oldValue: oldValue, key: `resourceBooking.id:${data.resourceBookingId}` }) return updated.dataValues } @@ -363,9 +363,9 @@ async function deleteWorkPeriod (currentUser, id) { workPeriodId: id } }) - await Promise.all(workPeriod.payments.map(({ id }) => helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_DELETE_TOPIC, { id }))) + await Promise.all(workPeriod.payments.map(({ id, billingAccountId }) => helper.postEvent(config.TAAS_WORK_PERIOD_PAYMENT_DELETE_TOPIC, { id }, { key: `workPeriodPayment.billingAccountId:${billingAccountId}` }))) await workPeriod.destroy() - await helper.postEvent(config.TAAS_WORK_PERIOD_DELETE_TOPIC, { id }) + await helper.postEvent(config.TAAS_WORK_PERIOD_DELETE_TOPIC, { id }, { key: `resourceBooking.id:${workPeriod.resourceBookingId}` }) } deleteWorkPeriod.schema = Joi.object().keys({ @@ -400,6 +400,11 @@ async function searchWorkPeriods (currentUser, criteria, options = { returnAll: return resourceBookingId }) } + // `criteria.paymentStatus` could be array of paymentStatus, or comma separated string of paymentStatus + // in case it's comma separated string of paymentStatus we have to convert it to an array of paymentStatus + if ((typeof criteria.paymentStatus) === 'string') { + criteria.paymentStatus = criteria.paymentStatus.trim().split(',').map(ps => Joi.attempt({ paymentStatus: ps.trim() }, Joi.object().keys({ paymentStatus: Joi.paymentStatus() })).paymentStatus) + } const page = criteria.page const perPage = criteria.perPage if (!criteria.sortBy) { @@ -435,7 +440,7 @@ async function searchWorkPeriods (currentUser, criteria, options = { returnAll: criteria.endDate = moment(criteria.endDate).format('YYYY-MM-DD') } // Apply filters - _.each(_.pick(criteria, ['resourceBookingId', 'userHandle', 'projectId', 'startDate', 'endDate', 'paymentStatus']), (value, key) => { + _.each(_.pick(criteria, ['resourceBookingId', 'userHandle', 'projectId', 'startDate', 'endDate']), (value, key) => { esQuery.body.query.nested.query.bool.must.push({ term: { [`workPeriods.${key}`]: { @@ -444,6 +449,13 @@ async function searchWorkPeriods (currentUser, criteria, options = { returnAll: } }) }) + if (criteria.paymentStatus) { + esQuery.body.query.nested.query.bool.must.push({ + terms: { + 'workPeriods.paymentStatus': criteria.paymentStatus + } + }) + } // if criteria contains resourceBookingIds, filter resourceBookingId with this value if (criteria.resourceBookingIds) { esQuery.body.query.nested.query.bool.filter = [{ @@ -458,9 +470,12 @@ async function searchWorkPeriods (currentUser, criteria, options = { returnAll: let workPeriods = _.reduce(body.hits.hits, (acc, resourceBooking) => _.concat(acc, resourceBooking._source.workPeriods), []) // ESClient will return ResourceBookings with it's all nested WorkPeriods // We re-apply WorkPeriod filters - _.each(_.pick(criteria, ['startDate', 'endDate', 'paymentStatus']), (value, key) => { + _.each(_.pick(criteria, ['startDate', 'endDate']), (value, key) => { workPeriods = _.filter(workPeriods, { [key]: value }) }) + if (criteria.paymentStatus) { + workPeriods = _.filter(workPeriods, wp => _.includes(criteria.paymentStatus, wp.paymentStatus)) + } workPeriods = _.sortBy(workPeriods, [criteria.sortBy]) if (criteria.sortOrder === 'desc') { workPeriods = _.reverse(workPeriods) @@ -521,7 +536,10 @@ searchWorkPeriods.schema = Joi.object().keys({ perPage: Joi.number().integer().min(1).max(10000).default(20), sortBy: Joi.string().valid('id', 'resourceBookingId', 'userHandle', 'projectId', 'startDate', 'endDate', 'daysWorked', 'customerRate', 'memberRate', 'paymentStatus'), sortOrder: Joi.string().valid('desc', 'asc'), - paymentStatus: Joi.paymentStatus(), + paymentStatus: Joi.alternatives( + Joi.string(), + Joi.array().items(Joi.paymentStatus()) + ), startDate: Joi.date().format('YYYY-MM-DD'), endDate: Joi.date().format('YYYY-MM-DD'), userHandle: Joi.string(), diff --git a/test/unit/ResourceBookingService.test.js b/test/unit/ResourceBookingService.test.js index 862ef357..7b52bde2 100644 --- a/test/unit/ResourceBookingService.test.js +++ b/test/unit/ResourceBookingService.test.js @@ -447,7 +447,7 @@ describe('resourceBooking service test', () => { expect(esClientSearch.calledOnce).to.be.true expect(result).to.deep.eq(data.result) }) - it('T25:Search resource bookin from DB', async () => { + it('T25:Search resource booking from DB', async () => { const data = testData.T25 const ESClient = commonData.ESClient ESClient.search = () => {} @@ -456,7 +456,7 @@ describe('resourceBooking service test', () => { return data.resourceBookingFindAll }) const stubResourceBookingCount = sinon.stub(ResourceBooking, 'count').callsFake(async () => { - return data.resourceBookingFindAll.length + return data.resourceBookingCount }) const result = await service.searchResourceBookings(commonData.userWithManagePermission, data.criteria) expect(esClientSearch.calledOnce).to.be.true diff --git a/test/unit/common/ResourceBookingData.js b/test/unit/common/ResourceBookingData.js index b296a384..78732e91 100644 --- a/test/unit/common/ResourceBookingData.js +++ b/test/unit/common/ResourceBookingData.js @@ -1229,6 +1229,8 @@ const T25 = { updatedAt: '2021-05-08T18:47:37.268Z' } ], + resourceBookingCount: [{ id: 'fbe133dd-0e36-4d0c-8197-49307b13ce75', count: 1 }, + { id: '60e99790-8da0-4596-badc-29a06feb78a0', count: 1 }], criteria: {}, result: { fromDb: true,