Skip to content

Create script for migrating isApplicationPageActive from CSV file #174

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 10, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 84 additions & 0 deletions scripts/common/helper.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Provide some commonly used functions for scripts.
*/
const csv = require('csv-parser')
const fs = require('fs')
const request = require('superagent')

/**
* Load CSV data from file.
*
* @param {String} pathname the pathname for the file
* @param {Object} fieldNameMap mapping values of headers
* @returns {Array} the result jobs data
*/
async function loadCSVFromFile (pathname, fieldNameMap = {}) {
let lnum = 1
const result = []
return new Promise((resolve, reject) => {
fs.createReadStream(pathname)
.pipe(csv({
mapHeaders: ({ header }) => fieldNameMap[header] || header
}))
.on('data', (data) => {
result.push({ ...data, _lnum: lnum })
lnum += 1
})
.on('error', err => reject(err))
.on('end', () => resolve(result))
})
}

/**
* Get pathname from command line arguments.
*
* @returns {String} the pathname
*/
function getPathnameFromCommandline () {
if (process.argv.length < 3) {
throw new Error('pathname for the csv file is required')
}
const pathname = process.argv[2]
if (!fs.existsSync(pathname)) {
throw new Error(`pathname: ${pathname} path not exist`)
}
if (!fs.lstatSync(pathname).isFile()) {
throw new Error(`pathname: ${pathname} path is not a regular file`)
}
return pathname
}

/**
* Sleep for a given number of milliseconds.
*
* @param {Number} milliseconds the sleep time
* @returns {undefined}
*/
async function sleep (milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds))
}

/**
* Find taas job by external id.
*
* @param {String} token the auth token
* @param {String} taasApiUrl url for TaaS API
* @param {String} externalId the external id
* @returns {Object} the result
*/
async function getJobByExternalId (token, taasApiUrl, externalId) {
const { body: jobs } = await request.get(`${taasApiUrl}/jobs`)
.query({ externalId })
.set('Authorization', `Bearer ${token}`)
if (!jobs.length) {
throw new Error(`externalId: ${externalId} job not found`)
}
return jobs[0]
}

module.exports = {
loadCSVFromFile,
getPathnameFromCommandline,
sleep,
getJobByExternalId
}
10 changes: 10 additions & 0 deletions scripts/common/logger.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
/*
* Logger for scripts.
*/

module.exports = {
info: (message) => console.log(`INFO: ${message}`),
debug: (message) => console.log(`DEBUG: ${message}`),
warn: (message) => console.log(`WARN: ${message}`),
error: (message) => console.log(`ERROR: ${message}`)
}
39 changes: 22 additions & 17 deletions scripts/recruit-crm-job-import/helper.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,27 @@
* Provide some commonly used functions for the RCRM import script.
*/
const config = require('./config')
const _ = require('lodash')
const request = require('superagent')
const { getM2MToken } = require('../../src/common/helper')
const commonHelper = require('../common/helper')

/**
* Sleep for a given number of milliseconds.
*
* @param {Number} milliseconds the sleep time
* @returns {undefined}
/*
* Function to get M2M token
* @returns {Promise}
*/
async function sleep (milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds))
}
const getM2MToken = (() => {
const m2mAuth = require('tc-core-library-js').auth.m2m
const m2m = m2mAuth(_.pick(config, [
'AUTH0_URL',
'AUTH0_AUDIENCE',
'AUTH0_CLIENT_ID',
'AUTH0_CLIENT_SECRET',
'AUTH0_PROXY_SERVER_URL'
]))
return async () => {
return await m2m.getMachineToken(config.AUTH0_CLIENT_ID, config.AUTH0_CLIENT_SECRET)
}
})()

/**
* Create a new job via taas api.
Expand All @@ -38,13 +47,7 @@ async function createJob (data) {
*/
async function getJobByExternalId (externalId) {
const token = await getM2MToken()
const { body: jobs } = await request.get(`${config.TAAS_API_URL}/jobs`)
.query({ externalId })
.set('Authorization', `Bearer ${token}`)
if (!jobs.length) {
throw new Error(`externalId: ${externalId} job not found`)
}
return jobs[0]
return commonHelper.getJobByExternalId(token, config.TAAS_API_URL, externalId)
}

/**
Expand Down Expand Up @@ -131,7 +134,9 @@ async function getProjectByDirectProjectId (directProjectId) {
}

module.exports = {
sleep,
sleep: commonHelper.sleep,
loadCSVFromFile: commonHelper.loadCSVFromFile,
getPathnameFromCommandline: commonHelper.getPathnameFromCommandline,
createJob,
getJobByExternalId,
updateResourceBookingStatus,
Expand Down
48 changes: 2 additions & 46 deletions scripts/recruit-crm-job-import/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
* Script to import Jobs data from Recruit CRM to Taas API.
*/

const csv = require('csv-parser')
const fs = require('fs')
const Joi = require('joi')
.extend(require('@joi/date'))
const _ = require('lodash')
Expand Down Expand Up @@ -38,48 +36,6 @@ function validateJob (job) {
return jobSchema.validate(job)
}

/**
* Load Recruit CRM jobs data from file.
*
* @param {String} pathname the pathname for the file
* @returns {Array} the result jobs data
*/
async function loadRcrmJobsFromFile (pathname) {
let lnum = 1
const result = []
return new Promise((resolve, reject) => {
fs.createReadStream(pathname)
.pipe(csv({
mapHeaders: ({ header }) => constants.fieldNameMap[header] || header
}))
.on('data', (data) => {
result.push({ ...data, _lnum: lnum })
lnum += 1
})
.on('error', err => reject(err))
.on('end', () => resolve(result))
})
}

/**
* Get pathname for a csv file from command line arguments.
*
* @returns {undefined}
*/
function getPathname () {
if (process.argv.length < 3) {
throw new Error('pathname for the csv file is required')
}
const pathname = process.argv[2]
if (!fs.existsSync(pathname)) {
throw new Error(`pathname: ${pathname} path not exist`)
}
if (!fs.lstatSync(pathname).isFile()) {
throw new Error(`pathname: ${pathname} path is not a regular file`)
}
return pathname
}

/**
* Process single job data. The processing consists of:
* - Validate the data.
Expand Down Expand Up @@ -146,8 +102,8 @@ async function processJob (job, info = []) {
* @returns {undefined}
*/
async function main () {
const pathname = getPathname()
const jobs = await loadRcrmJobsFromFile(pathname)
const pathname = helper.getPathnameFromCommandline()
const jobs = await helper.loadCSVFromFile(pathname, constants.fieldNameMap)
const report = new Report()
for (const job of jobs) {
logger.debug(`processing line #${job._lnum} - ${JSON.stringify(job)}`)
Expand Down
8 changes: 2 additions & 6 deletions scripts/recruit-crm-job-import/logger.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
/*
* Logger for the RCRM import script.
*/
const logger = require('../common/logger')

module.exports = {
info: (message) => console.log(`INFO: ${message}`),
debug: (message) => console.log(`DEBUG: ${message}`),
warn: (message) => console.log(`WARN: ${message}`),
error: (message) => console.log(`ERROR: ${message}`)
}
module.exports = logger
80 changes: 80 additions & 0 deletions scripts/recruit-crm-job-sync/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
Recruit CRM Job Data Sync Script
===

# Configuration
Configuration file is at `./scripts/recruit-crm-job-sync/config.js`.


# Usage
``` bash
node scripts/recruit-crm-job-sync <pathname-to-a-csv-file>
```

By default the script updates jobs via `TC_API`.

# Example

1. Follow the README for `taas-apis` to deploy Taas API locally
2. Create two jobs via `Jobs > create job with booking manager` in Postman, with external ids `51913016` and `51892637` for each of the jobs.

**NOTE**: The external ids `51913016` and `51902826` could be found at `scripts/recruit-crm-job-sync/example_data.csv` under the Slug column.

3. Configure env variable `RCRM_SYNC_TAAS_API_URL` so that the script could make use of the local API:

``` bash
export RCRM_SYNC_TAAS_API_URL=http://localhost:3000/api/v5
```

4. Run the script against the sample CSV file and pipe the output from the script to a temporary file:

``` bash
node scripts/recruit-crm-job-sync scripts/recruit-crm-job-sync/example_data.csv | tee /tmp/report.txt
```

The output should be like this:

``` bash
DEBUG: processing line #1 - {"ID":"1","Name":"Data job Engineer","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"1","Maximum Experience In Years":"3","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"2","Job Status":"Closed","Company":"company 1","Contact":" ","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123456","Createdby":"abc","Created On":"02-Jun-20","Updated By":"abc","Updated On":"17-Feb-21","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51892637","_lnum":1}
ERROR: #1 - [EXTERNAL_ID_NOT_FOUND] externalId: 51892637 job not found
DEBUG: processed line #1
DEBUG: processing line #2 - {"ID":"2","Name":"JAVA coffee engineer","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"2","Maximum Experience In Years":"5","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"10","Job Status":"Closed","Company":"company 2","Contact":"abc","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123457","Createdby":"abc","Created On":"02-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51913016","_lnum":2}
DEBUG: jobId: 34cee9aa-e45f-47ed-9555-ffd3f7196fec isApplicationPageActive(current): false - isApplicationPageActive(to be synced): true
INFO: #2 - id: 34cee9aa-e45f-47ed-9555-ffd3f7196fec isApplicationPageActive: true "job" updated
DEBUG: processed line #2
DEBUG: processing line #3 - {"ID":"3","Name":"QA Seleinium","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"3","Maximum Experience In Years":"7","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"4","Job Status":"Canceled","Company":"company 3","Contact":" ","Currency":"$","allowApply":"No","Collaborator":"","Locality":"","City":"","Job Code":"J123458","Createdby":"abc","Created On":"04-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51902826","_lnum":3}
DEBUG: jobId: 4acde317-c364-4b79-aa77-295b98143c8b isApplicationPageActive(current): false - isApplicationPageActive(to be synced): false
WARN: #3 - isApplicationPageActive is already set
DEBUG: processed line #3
DEBUG: processing line #4 - {"ID":"5","Name":"Data Engineers and Data Architects","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"4","Maximum Experience In Years":"9","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"8","Job Status":"Closed","Company":"company 4","Contact":" ","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123459","Createdby":"abc","Created On":"09-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51811161","_lnum":4}
ERROR: #4 - [EXTERNAL_ID_NOT_FOUND] externalId: 51811161 job not found
DEBUG: processed line #4
DEBUG: processing line #5 - {"ID":"6","Name":"Docker Engineer","Description":"Java & J2EE or Python, Docker, Kubernetes, AWS or GCP","Qualification":"","Specialization":"","Minimum Experience In Years":"5","Maximum Experience In Years":"10","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"5","Job Status":"Closed","Company":"company 5","Contact":" ","Currency":"$","allowApply":"No","Collaborator":"","Locality":"","City":"","Job Code":"J123460","Createdby":"abc","Created On":"12-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51821342","_lnum":5}
ERROR: #5 - [EXTERNAL_ID_NOT_FOUND] externalId: 51821342 job not found
DEBUG: processed line #5
DEBUG: processing line #6 - {"ID":"7","Name":"lambda Developers","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"0","Maximum Experience In Years":"0","Minimum Annual Salary":"10","Maximum Annual Salary":"20","Number Of Openings":"2","Job Status":"Closed","Company":"company 6","Contact":"abc","Currency":"$","allowApply":"Yes","Collaborator":"","Locality":"","City":"","Job Code":"J123461","Createdby":"abc","Created On":"12-Jun-20","Updated By":"abc","Updated On":"12-Nov-20","Owner":"abc","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"51831524","_lnum":6}
ERROR: #6 - [EXTERNAL_ID_NOT_FOUND] externalId: 51831524 job not found
DEBUG: processed line #6
DEBUG: processing line #7 - {"ID":"","Name":"","Description":"","Qualification":"","Specialization":"","Minimum Experience In Years":"","Maximum Experience In Years":"","Minimum Annual Salary":"","Maximum Annual Salary":"","Number Of Openings":"","Job Status":"","Company":"","Contact":"","Currency":"","allowApply":"","Collaborator":"","Locality":"","City":"","Job Code":"","Createdby":"","Created On":"","Updated By":"","Updated On":"","Owner":"","Custom Column 1":"","Custom Column 2":"","Custom Column 3":"","Custom Column 4":"","Custom Column 5":"","Custom Column 6":"","Custom Column 7":"","Custom Column 8":"","Custom Column 9":"","Custom Column 10":"","Custom Column 11":"","Custom Column 12":"","Custom Column 13":"","Custom Column 14":"","Custom Column 15":"","externalId":"","_lnum":7}
ERROR: #7 - "allowApply" must be one of [Yes, No]
DEBUG: processed line #7
INFO: === summary ===
INFO: No. of records read = 7
INFO: No. of records updated for field isApplicationPageActive = true = 1
INFO: No. of records updated for field isApplicationPageActive = false = 0
INFO: No. of records : externalId not found = 4
INFO: No. of records failed(all) = 5
INFO: No. of records failed(excluding "externalId not found") = 1
INFO: No. of records skipped = 1
INFO: done!
```

The following command could be used to extract the summary from the output:

``` bash
cat /tmp/report.txt | grep 'No. of records' | cut -d' ' -f2-
```

To list all skipped lines:

``` bash
cat /tmp/report.txt | grep 'WARN' -B 3
20 changes: 20 additions & 0 deletions scripts/recruit-crm-job-sync/config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Configuration for the RCRM sync script.
* Namespace is created to allow to configure the env variables for this script independently.
*/

const config = require('config')

const namespace = process.env.RCRM_SYNC_CONFIG_NAMESAPCE || 'RCRM_SYNC_'

module.exports = {
SLEEP_TIME: process.env[`${namespace}SLEEP_TIME`] || 500,
TAAS_API_URL: process.env[`${namespace}TAAS_API_URL`] || config.TC_API,

AUTH0_URL: process.env[`${namespace}AUTH0_URL`] || config.AUTH0_URL,
AUTH0_AUDIENCE: process.env[`${namespace}AUTH0_AUDIENCE`] || config.AUTH0_AUDIENCE,
TOKEN_CACHE_TIME: process.env[`${namespace}TOKEN_CACHE_TIME`] || config.TOKEN_CACHE_TIME,
AUTH0_CLIENT_ID: process.env[`${namespace}AUTH0_CLIENT_ID`] || config.AUTH0_CLIENT_ID,
AUTH0_CLIENT_SECRET: process.env[`${namespace}AUTH0_CLIENT_SECRET`] || config.AUTH0_CLIENT_SECRET,
AUTH0_PROXY_SERVER_URL: process.env[`${namespace}AUTH0_PROXY_SERVER_URL`] || config.AUTH0_PROXY_SERVER_URL
}
15 changes: 15 additions & 0 deletions scripts/recruit-crm-job-sync/constants.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
/*
* Constants for the RCRM sync script.
*/

module.exports = {
ProcessingStatus: {
Successful: 'successful',
Failed: 'failed',
Skipped: 'skipped'
},
fieldNameMap: {
'Allow Apply': 'allowApply',
Slug: 'externalId'
}
}
8 changes: 8 additions & 0 deletions scripts/recruit-crm-job-sync/example_data.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
ID,Name,Description,Qualification,Specialization,Minimum Experience In Years,Maximum Experience In Years,Minimum Annual Salary,Maximum Annual Salary,Number Of Openings,Job Status,Company,Contact,Currency,Allow Apply,Collaborator,Locality,City,Job Code,Createdby,Created On,Updated By,Updated On,Owner,Custom Column 1,Custom Column 2,Custom Column 3,Custom Column 4,Custom Column 5,Custom Column 6,Custom Column 7,Custom Column 8,Custom Column 9,Custom Column 10,Custom Column 11,Custom Column 12,Custom Column 13,Custom Column 14,Custom Column 15,Slug
1,Data job Engineer,,,,1,3,10,20,2,Closed,company 1, ,$,Yes,,,,J123456,abc,02-Jun-20,abc,17-Feb-21,abc,,,,,,,,,,,,,,,,51892637
2,JAVA coffee engineer,,,,2,5,10,20,10,Closed,company 2,abc,$,Yes,,,,J123457,abc,02-Jun-20,abc,12-Nov-20,abc,,,,,,,,,,,,,,,,51913016
3,QA Seleinium,,,,3,7,10,20,4,Canceled,company 3, ,$,No,,,,J123458,abc,04-Jun-20,abc,12-Nov-20,abc,,,,,,,,,,,,,,,,51902826
5,Data Engineers and Data Architects,,,,4,9,10,20,8,Closed,company 4, ,$,Yes,,,,J123459,abc,09-Jun-20,abc,12-Nov-20,abc,,,,,,,,,,,,,,,,51811161
6,Docker Engineer,"Java & J2EE or Python, Docker, Kubernetes, AWS or GCP",,,5,10,10,20,5,Closed,company 5, ,$,No,,,,J123460,abc,12-Jun-20,abc,12-Nov-20,abc,,,,,,,,,,,,,,,,51821342
7,lambda Developers,,,,0,0,10,20,2,Closed,company 6,abc,$,Yes,,,,J123461,abc,12-Jun-20,abc,12-Nov-20,abc,,,,,,,,,,,,,,,,51831524
,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
Loading