diff --git a/app-routes.js b/app-routes.js index d3987ba2..1c407aee 100644 --- a/app-routes.js +++ b/app-routes.js @@ -59,6 +59,36 @@ module.exports = (app) => { next() } }) + } else { + // public API, but still try to authenticate token if provided, but allow missing/invalid token + actions.push((req, res, next) => { + const interceptRes = {} + interceptRes.status = () => interceptRes + interceptRes.json = () => interceptRes + interceptRes.send = () => next() + authenticator(_.pick(config, ['AUTH_SECRET', 'VALID_ISSUERS']))(req, interceptRes, next) + }) + + actions.push((req, res, next) => { + if (!req.authUser) { + next() + } else if (req.authUser.isMachine) { + if (!def.scopes || !req.authUser.scopes || !helper.checkIfExists(def.scopes, req.authUser.scopes)) { + req.authUser = undefined + } + next() + } else { + req.authUser.jwtToken = req.headers.authorization + // check if user has full manage permission + if (_.intersection(req.authUser.roles, constants.FullManagePermissionRoles).length) { + req.authUser.hasManagePermission = true + } + if (_.includes(req.authUser.roles, constants.UserRoles.ConnectManager)) { + req.authUser.isConnectManager = true + } + next() + } + }) } actions.push(method) diff --git a/data/demo-data.json b/data/demo-data.json index 59b2ba50..aad3fbe7 100644 --- a/data/demo-data.json +++ b/data/demo-data.json @@ -7646,10 +7646,10 @@ "name": "Angular Developer", "description": "* Writes tested and documented JavaScript, HTML and CSS\n* Makes design and technical decisions for AngularJS projects\n* Develops application code and unit test in the AngularJS, Rest Web Services and Java technologies", "listOfSkills": [ - "database", - "winforms", - "user interface (ui)", - "photoshop" + "Database", + "Winforms", + "User Interface (Ui)", + "Photoshop" ], "rates": [ { @@ -7678,10 +7678,10 @@ "name": "Dev Ops Engineer", "description": "* Introduces processes, tools, and methodologies\n* Balances needs throughout the software development life cycle\n* Configures server images, optimizes task performance in correspondence with engineers", "listOfSkills": [ - "dropwizard", - "nginx", - "machine learning", - "force.com" + "Dropwizard", + "NGINX", + "Machine Learning", + "Force.com" ], "rates": [ { @@ -7722,10 +7722,10 @@ "name": "Salesforce Developer", "description": "* Meets with project managers to determine CRM needs\n* Develops customized solutions within the Salesforce platform\n* Designs, codes, and implements Salesforce applications\n* Creates timelines and development goals\n* Tests the stability and functionality of the application\n* Troubleshoots and fixes bugs\n* Writes documents and provides technical training for Salesforce Staff\n* Maintains the security and integrity of the application software", "listOfSkills": [ - "docker", - ".net", + "Docker", + ".NET", "appcelerator", - "flux" + "Flux" ], "rates": [ { diff --git a/docs/Topcoder-bookings-api.postman_collection.json b/docs/Topcoder-bookings-api.postman_collection.json index 845523ab..a3e40abe 100644 --- a/docs/Topcoder-bookings-api.postman_collection.json +++ b/docs/Topcoder-bookings-api.postman_collection.json @@ -1,6 +1,6 @@ { "info": { - "_postman_id": "6f274c86-24a5-412e-95e6-fafa34e2a936", + "_postman_id": "15f10b58-dda5-4aaf-96e5-061a5c901717", "name": "Topcoder-bookings-api", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" }, @@ -18153,16 +18153,14 @@ "response": [] }, { - "name": "send request with invalid token", + "name": "send request with public", "event": [ { "listen": "test", "script": { "exec": [ - "pm.test('Status code is 401', function () {\r", - " pm.response.to.have.status(401);\r", - " const response = pm.response.json()\r", - " pm.expect(response.message).to.eq(\"Invalid Token.\")\r", + "pm.test('Status code is 200', function () {\r", + " pm.response.to.have.status(200);\r", "});" ], "type": "text/javascript" @@ -18171,16 +18169,10 @@ ], "request": { "method": "POST", - "header": [ - { - "key": "Authorization", - "value": "Bearer invalid_token", - "type": "text" - } - ], + "header": [], "body": { "mode": "raw", - "raw": "{\r\n \"jobDescription\": \"Should have these skills: Machine Learning, Dropwizard, NGINX, appcelerator\"\r\n}", + "raw": "{\r\n \"jobDescription\": \"Should have these skills: Machine Learning, Dropwizard, NGINX, appcelerator, C#\"\r\n}", "options": { "raw": { "language": "json" @@ -18210,7 +18202,7 @@ "pm.test('Status code is 400', function () {\r", " pm.response.to.have.status(400);\r", " const response = pm.response.json()\r", - " pm.expect(response.message).to.eq(\"\\\"data\\\" must have at least 1 key\")\r", + " pm.expect(response.message).to.eq(\"\\\"data\\\" must contain at least one of [roleId, jobDescription, skills]\")\r", "});" ], "type": "text/javascript" @@ -19211,71 +19203,7 @@ ], "request": { "method": "POST", - "header": [ - { - "key": "Authorization", - "type": "text", - "value": "Bearer {{token_administrator}}" - }, - { - "key": "Content-Type", - "type": "text", - "value": "application/json" - } - ], - "body": { - "mode": "raw", - "raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }", - "options": { - "raw": { - "language": "json" - } - } - }, - "url": { - "raw": "{{URL}}/taas-teams/getSkillsByJobDescription", - "host": [ - "{{URL}}" - ], - "path": [ - "taas-teams", - "getSkillsByJobDescription" - ] - } - }, - "response": [] - }, - { - "name": "get skills by invalid token", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test('Status code is 401', function () {\r", - " pm.response.to.have.status(401);\r", - " const response = pm.response.json()\r", - " pm.expect(response.message).to.eq(\"Invalid Token.\")\r", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "POST", - "header": [ - { - "key": "Authorization", - "type": "text", - "value": "Bearer invalid_token" - }, - { - "key": "Content-Type", - "type": "text", - "value": "application/json" - } - ], + "header": [], "body": { "mode": "raw", "raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }", @@ -19317,18 +19245,7 @@ ], "request": { "method": "POST", - "header": [ - { - "key": "Authorization", - "type": "text", - "value": "Bearer {{token_administrator}}" - }, - { - "key": "Content-Type", - "type": "text", - "value": "application/json" - } - ], + "header": [], "body": { "mode": "raw", "raw": "{ \"description\": \"\" }", @@ -19370,18 +19287,7 @@ ], "request": { "method": "POST", - "header": [ - { - "key": "Authorization", - "type": "text", - "value": "Bearer {{token_administrator}}" - }, - { - "key": "Content-Type", - "type": "text", - "value": "application/json" - } - ], + "header": [], "body": { "mode": "raw", "raw": "{}", diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 6ffdf86d..26b389f4 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -5249,6 +5249,9 @@ components: jobDescription: type: string description: "The description of the job." + jobTitle: + type: string + description: "An optional job title." - type: object required: - skills @@ -5281,6 +5284,10 @@ components: format: float description: "Rate at which searched skills match the given role" example: 0.75 + jobTitle: + type: string + description: "Optional job title." + example: "Lead Application Developer" SubmitTeamRequestBody: properties: teamName: diff --git a/migrations/2021-06-22-role-search-request-add-job-title.js b/migrations/2021-06-22-role-search-request-add-job-title.js new file mode 100644 index 00000000..c1df8fba --- /dev/null +++ b/migrations/2021-06-22-role-search-request-add-job-title.js @@ -0,0 +1,18 @@ +const config = require('config') + +/** + * Add jobTitle field to the RoleSearchRequest model. + */ + +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface.addColumn({ tableName: 'role_search_requests', schema: config.DB_SCHEMA_NAME }, 'job_title', + { + type: Sequelize.STRING(100), + allowNull: true + }) + }, + down: async (queryInterface, Sequelize) => { + await queryInterface.removeColumn({ tableName: 'role_search_requests', schema: config.DB_SCHEMA_NAME}, 'job_title') + } +} \ No newline at end of file diff --git a/src/common/helper.js b/src/common/helper.js index 773dde0f..5f9f13db 100644 --- a/src/common/helper.js +++ b/src/common/helper.js @@ -1960,8 +1960,8 @@ function removeTextFormatting (text) { // Remove footnotes text = _.replace(text, /\[\^.+?\](: .*?$)?/g, ' ') text = _.replace(text, /\s{0,2}\[.*?\]: .*?$/g, ' ') - // Remove images - text = _.replace(text, /!\[(.*?)\][[(].*?[\])]/g, ' $1 ') + // Remove images and keep description unless it is default description "image" + text = _.replace(text, /!(\[((?!image).*?)\]|\[.*?\])[[(].*?[\])]/g, ' $2 ') // Remove inline links text = _.replace(text, /\[(.*?)\][[(].*?[\])]/g, ' $1 ') // Remove blockquotes diff --git a/src/controllers/TeamController.js b/src/controllers/TeamController.js index d7992e14..998f9a81 100644 --- a/src/controllers/TeamController.js +++ b/src/controllers/TeamController.js @@ -114,7 +114,7 @@ async function getMe (req, res) { * @param res the response */ async function getSkillsByJobDescription (req, res) { - res.send(await service.getSkillsByJobDescription(req.authUser, req.body)) + res.send(await service.getSkillsByJobDescription(req.body)) } /** diff --git a/src/models/RoleSearchRequest.js b/src/models/RoleSearchRequest.js index 384b74d0..c79ae84f 100644 --- a/src/models/RoleSearchRequest.js +++ b/src/models/RoleSearchRequest.js @@ -62,6 +62,11 @@ module.exports = (sequelize) => { type: Sequelize.UUID }) }, + jobTitle: { + field: 'job_title', + type: Sequelize.STRING(100), + allowNull: true + }, createdBy: { field: 'created_by', type: Sequelize.UUID, diff --git a/src/routes/TeamRoutes.js b/src/routes/TeamRoutes.js index 941be406..b2415e17 100644 --- a/src/routes/TeamRoutes.js +++ b/src/routes/TeamRoutes.js @@ -23,7 +23,7 @@ module.exports = { '/taas-teams/skills': { get: { controller: 'TeamController', - method: 'searchSkills', + method: 'searchSkills' } }, '/taas-teams/me': { @@ -37,9 +37,7 @@ module.exports = { '/taas-teams/getSkillsByJobDescription': { post: { controller: 'TeamController', - method: 'getSkillsByJobDescription', - auth: 'jwt', - scopes: [constants.Scopes.READ_TAAS_TEAM] + method: 'getSkillsByJobDescription' } }, '/taas-teams/:id': { @@ -91,7 +89,7 @@ module.exports = { '/taas-teams/sendRoleSearchRequest': { post: { controller: 'TeamController', - method: 'roleSearchRequest', + method: 'roleSearchRequest' } }, '/taas-teams/submitTeamRequest': { diff --git a/src/services/PaymentSchedulerService.js b/src/services/PaymentSchedulerService.js index b07694e4..f38eab6f 100644 --- a/src/services/PaymentSchedulerService.js +++ b/src/services/PaymentSchedulerService.js @@ -90,7 +90,7 @@ async function processPayment (workPeriodPayment) { const oldValue = workPeriodPayment.toJSON() const updated = await workPeriodPayment.update({ status: 'in-progress' }) // Update the modified status to es - await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue }) + await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue, key: `workPeriodPayment.billingAccountId:${updated.billingAccountId}` }) } // Check whether the number of processed records per minute exceeds the specified number, if it exceeds, wait for the next minute before processing await checkWait(PaymentSchedulerStatus.START_PROCESS) @@ -115,7 +115,7 @@ async function processPayment (workPeriodPayment) { // 5. update wp and save it should only update already existent Work Period Payment record with created "challengeId" and "status=completed". const updated = await workPeriodPayment.update({ challengeId: paymentScheduler.challengeId, status: 'completed' }) // Update the modified status to es - await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue }) + await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue, key: `workPeriodPayment.billingAccountId:${updated.billingAccountId}` }) await paymentScheduler.update({ step: PaymentSchedulerStatus.CLOSE_CHALLENGE, userId: paymentScheduler.userId, status: 'completed' }) @@ -128,7 +128,7 @@ async function processPayment (workPeriodPayment) { // If payment processing failed Work Periods Payment "status" should be changed to "failed" and populate "statusDetails" field with error details in JSON format. const updated = await workPeriodPayment.update({ statusDetails, status: 'failed' }) // Update the modified status to es - await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue }) + await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue, key: `workPeriodPayment.billingAccountId:${updated.billingAccountId}` }) if (paymentScheduler) { await paymentScheduler.update({ step: _.get(err, 'step'), userId: paymentScheduler.userId, status: 'failed' }) diff --git a/src/services/TeamService.js b/src/services/TeamService.js index bc236cf1..94865aae 100644 --- a/src/services/TeamService.js +++ b/src/services/TeamService.js @@ -760,7 +760,7 @@ async function roleSearchRequest (currentUser, data) { if (!_.isUndefined(data.roleId)) { role = await Role.findById(data.roleId) role = role.toJSON() - role.skillsMatch = 1; + role.skillsMatch = 1 // if skills is provided then use skills to find role } else if (!_.isUndefined(data.skills)) { // validate given skillIds and convert them into skill names @@ -769,18 +769,19 @@ async function roleSearchRequest (currentUser, data) { role = await getRoleBySkills(skills) } else { // if only job description is provided, collect skill names from description - const tags = await getSkillsByJobDescription(currentUser, { description: data.jobDescription }) + const tags = await getSkillsByJobDescription({ description: data.jobDescription }) const skills = _.map(tags, 'tag') // find the best matching role role = await getRoleBySkills(skills) } data.roleId = role.id // create roleSearchRequest entity with found roleId - const { id: roleSearchRequestId } = await createRoleSearchRequest(currentUser, data) + const { id: roleSearchRequestId, jobTitle } = await createRoleSearchRequest(currentUser, data) + const entity = jobTitle ? { jobTitle, roleSearchRequestId } : { roleSearchRequestId } // clean Role role = await _cleanRoleDTO(currentUser, role) // return Role - return _.assign(role, { roleSearchRequestId }) + return _.assign(role, entity) } roleSearchRequest.schema = Joi.object() @@ -789,8 +790,10 @@ roleSearchRequest.schema = Joi.object() data: Joi.object().keys({ roleId: Joi.string().uuid(), jobDescription: Joi.string().max(255), - skills: Joi.array().items(Joi.string().uuid().required()) - }).required().min(1) + skills: Joi.array().items(Joi.string().uuid().required()), + jobTitle: Joi.string().max(100), + previousRoleSearchRequestId: Joi.string().uuid() + }).required().or('roleId', 'jobDescription', 'skills') }).required() /** @@ -799,17 +802,16 @@ roleSearchRequest.schema = Joi.object() * @returns {Role} the best matching Role */ async function getRoleBySkills (skills) { - const lowerCaseSkills = skills.map(skill => skill.toLowerCase()) // find all roles which includes any of the given skills const queryCriteria = { - where: { listOfSkills: { [Op.overlap]: lowerCaseSkills } }, + where: { listOfSkills: { [Op.overlap]: skills } }, raw: true } const roles = await Role.findAll(queryCriteria) if (roles.length > 0) { let result = _.each(roles, role => { // calculate each found roles matching rate - role.skillsMatch = _.intersection(role.listOfSkills, lowerCaseSkills).length / skills.length + role.skillsMatch = _.intersection(role.listOfSkills, skills).length / skills.length // each role can have multiple rates, get the maximum of global rates role.maxGlobal = _.maxBy(role.rates, 'global').global }) @@ -821,7 +823,7 @@ async function getRoleBySkills (skills) { } } // if no matching role found then return Custom role or empty object - return await Role.findOne({ where: { name: { [Op.iLike]: 'Custom' } } }) || {} + return await Role.findOne({ where: { name: { [Op.iLike]: 'Custom' } }, raw: true }) || {} } getRoleBySkills.schema = Joi.object() @@ -836,7 +838,7 @@ getRoleBySkills.schema = Joi.object() * @param {Object} data the search criteria * @returns {Object} the result */ -async function getSkillsByJobDescription (currentUser, data) { +async function getSkillsByJobDescription (data) { // load topcoder skills if needed. Using cached skills helps to avoid // unnecessary api calls which is extremely time comsuming. await _reloadCachedTopcoderSkills() @@ -879,7 +881,6 @@ async function getSkillsByJobDescription (currentUser, data) { getSkillsByJobDescription.schema = Joi.object() .keys({ - currentUser: Joi.object().required(), data: Joi.object().keys({ description: Joi.string().required() }).required() @@ -984,7 +985,7 @@ createRoleSearchRequest.schema = Joi.object() */ async function _cleanRoleDTO (currentUser, role) { // if current user is machine, it means user is not logged in - if (currentUser.isMachine || await isExternalMember(currentUser.userId)) { + if (_.isNil(currentUser) || currentUser.isMachine || await isExternalMember(currentUser.userId)) { role.isExternalMember = true if (role.rates) { role.rates = _.map(role.rates, rate =>