Skip to content

Commit 8f48ffc

Browse files
authored
Merge pull request #554 from topcoder-platform/revert-552-revert-546-feature/export-import
Revert "Revert "Import and Export Data Commands""
2 parents e8ac9f7 + 85b4c0f commit 8f48ffc

File tree

13 files changed

+720
-928
lines changed

13 files changed

+720
-928
lines changed

README.md

Lines changed: 129 additions & 94 deletions
Large diffs are not rendered by default.

data/demo-data.json

Lines changed: 1 addition & 0 deletions
Large diffs are not rendered by default.

local/seed/seedMetadata.js

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,28 @@ if (!process.env.CONNECT_USER_TOKEN) {
77
process.exit(1);
88
}
99

10+
/**
11+
* Iteratively goes through the object and replaces prices with random values.
12+
*
13+
* This method MUTATES object.
14+
*
15+
* @param {Object} o object
16+
*/
17+
function dummifyPrices(o) {
18+
Object.keys(o).forEach(function (k) {
19+
if (o[k] !== null && typeof o[k] === 'object') {
20+
dummifyPrices(o[k]);
21+
return;
22+
}
23+
if (k === 'price' && typeof o[k] === 'number') {
24+
o[k] = 100 + Math.round(Math.random() * 10000);
25+
}
26+
if (k === 'price' && typeof o[k] === 'string') {
27+
o[k] = (100 + Math.round(Math.random() * 10000)).toFixed(0);
28+
}
29+
});
30+
}
31+
1032
// we need to know any logged in Connect user token to retrieve data from DEV
1133
const CONNECT_USER_TOKEN = process.env.CONNECT_USER_TOKEN;
1234

@@ -29,6 +51,7 @@ module.exports = (targetUrl, token) => {
2951
})
3052
.then(async function (response) {
3153
let data = response.data;
54+
dummifyPrices(data)
3255

3356
console.log('Creating metadata objects locally...');
3457

package.json

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
"lint": "./node_modules/.bin/eslint .",
1111
"lint:fix": "./node_modules/.bin/eslint . --fix || true",
1212
"build": "babel src -d dist --presets es2015 --copy-files",
13+
"sync:all": "NODE_ENV=development npm run sync:db && NODE_ENV=development npm run sync:es",
1314
"sync:db": "./node_modules/.bin/babel-node migrations/sync.js",
1415
"sync:es": "./node_modules/.bin/babel-node migrations/elasticsearch_sync.js",
1516
"sync:es:metadata": "./node_modules/.bin/babel-node migrations/elasticsearch_sync.js --index-name metadata",
@@ -24,9 +25,12 @@
2425
"startKafkaConsumers:dev": "NODE_ENV=development nodemon -w src --exec \"babel-node src/index-kafka.js --presets es2015\" | ./node_modules/.bin/bunyan",
2526
"test": "NODE_ENV=test npm run lint && NODE_ENV=test npm run sync:es && NODE_ENV=test npm run sync:db && NODE_ENV=test ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- --timeout 10000 --require babel-core/register $(find src -path '*spec.js*') --exit",
2627
"test:watch": "NODE_ENV=test ./node_modules/.bin/mocha -w --require babel-core/register $(find src -path '*spec.js*')",
27-
"seed": "babel-node src/tests/seed.js --presets es2015",
2828
"demo-data": "babel-node local/seed",
29-
"es-db-compare": "babel-node scripts/es-db-compare"
29+
"es-db-compare": "babel-node scripts/es-db-compare",
30+
"data:export": "NODE_ENV=development LOG_LEVEL=info node --require dotenv/config --require babel-core/register scripts/data/export",
31+
"data:import": "NODE_ENV=development LOG_LEVEL=info node --require dotenv/config --require babel-core/register scripts/data/import",
32+
"local:run-docker": "docker-compose -f ./local/full/docker-compose.yml up -d",
33+
"local:init": "npm run sync:all && npm run data:import"
3034
},
3135
"repository": {
3236
"type": "git",
@@ -72,7 +76,7 @@
7276
"pg-native": "^3.0.0",
7377
"sequelize": "^5.8.7",
7478
"swagger-ui-express": "^4.0.6",
75-
"tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.6.3",
79+
"tc-core-library-js": "github:appirio-tech/tc-core-library-js#v2.6.3",
7680
"traverse": "^0.6.6",
7781
"urlencode": "^1.1.0",
7882
"yamljs": "^0.3.0"

scripts/data/dataModels.js

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
// So that importing operation succeeds, models are added to list such that each model comes after its dependencies,
2+
// So, when you add a new model to the list, make sure that its dependencies exist and come before it.
3+
import models from '../../src/models';
4+
5+
const dataModels = [
6+
'ProjectTemplate',
7+
'ProductTemplate',
8+
'ProjectType',
9+
'ProductCategory',
10+
'MilestoneTemplate',
11+
'OrgConfig',
12+
'Form',
13+
'PlanConfig',
14+
'PriceConfig',
15+
'BuildingBlock',
16+
'Project',
17+
'ProjectPhase',
18+
'PhaseProduct',
19+
'ProjectAttachment',
20+
'ProjectMember',
21+
'ProjectMemberInvite',
22+
];
23+
/**
24+
* Validate that data models to be imported/exported are defined in model scope
25+
* @return {void} Returns void
26+
*/
27+
function validateDataModels() {
28+
// Validate model names
29+
dataModels.forEach((modelName) => {
30+
if (!models[modelName]) {
31+
throw new Error(`Invalid model: ${modelName}`);
32+
}
33+
});
34+
}
35+
module.exports = { dataModels, validateDataModels };

scripts/data/export/exportData.js

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import * as fs from 'fs';
2+
import models from '../../../src/models';
3+
import { dataModels, validateDataModels } from '../dataModels';
4+
5+
/**
6+
* saves data to file
7+
* @param {string} filePath path of file where to save data
8+
* @param {object} data object contains loaded data for specified models
9+
* @param {object} logger logger instance
10+
* @return {Promise} Returns a promise
11+
*/
12+
function saveExportedData(filePath, data, logger) {
13+
logger.info('Start Saving data to file....');
14+
fs.writeFileSync(filePath, JSON.stringify(data));
15+
logger.info('End Saving data to file....');
16+
}
17+
/**
18+
* loads data from database and export it to specified file path
19+
* @param {string} filePath path of file where to save data
20+
* @param {object} logger logger instance
21+
* @return {Promise} Returns a promise
22+
*/
23+
async function exportDatabaseToJson(filePath, logger) {
24+
const queries = [];
25+
for (let index = 0; index < dataModels.length; index += 1) {
26+
const modelName = dataModels[index];
27+
// queries.push(models[modelName].findAll({ raw: true }));
28+
// We use direct select to ignore hooks as we want to export database as it including soft-deleted records
29+
queries.push(
30+
models.sequelize.query(
31+
`SELECT * from ${models[modelName].getTableName()}`,
32+
),
33+
);
34+
}
35+
const results = await Promise.all(queries);
36+
const allModelsRecords = {};
37+
for (let index = 0; index < dataModels.length; index += 1) {
38+
const modelName = dataModels[index];
39+
const modelRecords = results[index][0];
40+
allModelsRecords[modelName] = modelRecords;
41+
logger.info(
42+
`Records loaded for model: ${modelName} = ${modelRecords.length}`,
43+
);
44+
}
45+
46+
saveExportedData(filePath, allModelsRecords, logger);
47+
}
48+
/**
49+
* validates data models existence, then loads their data from database, and export it to specified file path
50+
* @param {string} filePath path of file where to save data
51+
* @param {object} logger logger instance
52+
* @return {Promise} Returns a promise
53+
*/
54+
async function exportData(filePath, logger) {
55+
validateDataModels();
56+
await exportDatabaseToJson(filePath, logger);
57+
}
58+
module.exports = {
59+
exportData,
60+
};

scripts/data/export/index.js

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import * as fs from 'fs';
2+
import * as path from 'path';
3+
import * as readline from 'readline';
4+
import Promise from 'bluebird';
5+
import util from '../../../src/util';
6+
import { exportData } from './exportData';
7+
/**
8+
* executes export data function and handles error
9+
* @param {string} filePath path of file where to save data
10+
* @param {object} logger logger instance
11+
* @return {Promise} Returns a promise
12+
*/
13+
function runExportData(filePath, logger) {
14+
exportData(filePath, logger)
15+
.then(() => {
16+
logger.info('Successfully exported data');
17+
process.exit(0);
18+
})
19+
.catch((err) => {
20+
logger.error('Failed to export data, ERROR:', err.message || err);
21+
process.exit(1);
22+
});
23+
}
24+
25+
const logger = util.getScriptsLogger();
26+
const filePath =
27+
process.argv[2] === '--file' && process.argv[3]
28+
? process.argv[3]
29+
: 'data/demo-data.json';
30+
logger.info('Script will export data to file:', filePath);
31+
// check if file exists
32+
if (fs.existsSync(filePath)) {
33+
// We delay question for overwrite file, because the question overlaps with a warning message from sequelize module
34+
Promise.delay(1).then(() => {
35+
const rl = readline.createInterface({
36+
input: process.stdin,
37+
output: process.stdout,
38+
});
39+
// confirm overwritting to file
40+
rl.question(
41+
'File already exists, Are you sure to overwrite it? [Y] to overwrite: ',
42+
(answer) => {
43+
rl.close();
44+
if (answer.toLowerCase() === 'y') {
45+
logger.info('File will be overwritten.');
46+
runExportData(filePath, logger);
47+
} else {
48+
logger.info('Exit without exporting any data');
49+
process.exit(0);
50+
}
51+
},
52+
); // question()
53+
});
54+
} else {
55+
// get base directory of the file
56+
const baseDir = path.resolve(filePath, '..');
57+
// create directory recursively if it does not exist
58+
util.mkdirSyncRecursive(baseDir);
59+
runExportData(filePath, logger);
60+
}

scripts/data/import/importData.js

Lines changed: 135 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,135 @@
1+
import * as fs from 'fs';
2+
import models from '../../../src/models';
3+
import { dataModels, validateDataModels } from '../dataModels';
4+
import { indexMetadata, indexProjectsRange } from '../../../src/utils/es';
5+
6+
/**
7+
* import data from json file to database
8+
* @param {string} filePath path of file where to save data
9+
* @param {object} logger logger instance
10+
* @return {Promise} Returns a promise
11+
*/
12+
async function writeDataToDatabase(filePath, logger) {
13+
let transaction = null;
14+
let currentModelName = null;
15+
try {
16+
// Start a transaction
17+
transaction = await models.sequelize.transaction();
18+
const jsonData = JSON.parse(fs.readFileSync(filePath).toString());
19+
// we disable no-await-in-loop because we need to run insert operations sequentially to avoid FK constraints errors
20+
/* eslint-disable no-await-in-loop */
21+
for (let index = 0; index < dataModels.length; index += 1) {
22+
const modelName = dataModels[index];
23+
currentModelName = modelName;
24+
const model = models[modelName];
25+
const modelRecords = jsonData[modelName];
26+
if (modelRecords && modelRecords.length > 0) {
27+
logger.info(`Importing data for model: ${modelName}`);
28+
await model.bulkCreate(modelRecords, {
29+
transaction,
30+
});
31+
logger.info(
32+
`Records imported for model: ${modelName} = ${modelRecords.length}`,
33+
);
34+
35+
// Set autoincrement sequencers in the database to be set to max of the autoincrement column,
36+
// so that, when next insertions don't provide value of autoincrement column, as in case of using APIs,
37+
// it should be set automatically based on last value of sequencers.
38+
const modelAttributes = Object.keys(model.rawAttributes);
39+
const tableName = model.getTableName();
40+
/* eslint-disable no-await-in-loop */
41+
for (
42+
let attributeIndex = 0;
43+
attributeIndex < modelAttributes.length;
44+
attributeIndex += 1
45+
) {
46+
const field = modelAttributes[attributeIndex];
47+
const fieldInfo = model.rawAttributes[field];
48+
if (fieldInfo.autoIncrement) {
49+
// Get sequence name corresponding to automincrment column in a table
50+
const selectSequenceQuery = `SELECT pg_get_serial_sequence('${tableName}', '${field}')`;
51+
const sequenceName = (
52+
await models.sequelize.query(selectSequenceQuery, {
53+
transaction,
54+
})
55+
)[0][0].pg_get_serial_sequence;
56+
57+
// update sequence value to be set to max value of the autoincrement column in the table
58+
const query = `SELECT setval('${sequenceName}', (SELECT MAX(${field}) FROM ${tableName}))`;
59+
const setValue = (
60+
await models.sequelize.query(query, {
61+
transaction,
62+
})
63+
)[0][0].setval;
64+
logger.debug(
65+
`Updated autoIncrement for ${modelName}.${field} with max value = ${setValue}`,
66+
);
67+
}
68+
}
69+
} else {
70+
logger.info(`No records to import for model: ${modelName}`);
71+
}
72+
}
73+
// commit transaction only if all things went ok
74+
logger.info('committing transaction to database...');
75+
await transaction.commit();
76+
} catch (error) {
77+
logger.error('Error while writing data of model:', currentModelName);
78+
// rollback all insert operations
79+
if (transaction) {
80+
logger.info('rollback database transaction...');
81+
transaction.rollback();
82+
}
83+
if (error.name && error.errors && error.fields) {
84+
// For sequelize validation errors, we throw only fields with data that helps in debugging error,
85+
// because the error object has many fields that contains very big sql query for the insert bulk operation
86+
throw new Error(
87+
JSON.stringify({
88+
modelName: currentModelName,
89+
name: error.name,
90+
errors: error.errors,
91+
fields: error.fields,
92+
}),
93+
);
94+
} else {
95+
throw error;
96+
}
97+
}
98+
}
99+
100+
/**
101+
* index imported data to Elasticsearch
102+
* @param {object} logger logger instance
103+
* @return {Promise} Returns a promise
104+
*/
105+
async function indexDataToES(logger) {
106+
logger.info('Indexing metatdata...');
107+
await indexMetadata();
108+
109+
logger.info('Indexing projects data...');
110+
const req = {
111+
logger,
112+
projectIdStart: 1,
113+
projectIdEnd: Number.MAX_SAFE_INTEGER,
114+
indexName: null,
115+
docType: null,
116+
fields: null,
117+
id: 0,
118+
};
119+
await indexProjectsRange(req);
120+
}
121+
122+
/**
123+
* import data from json file to database and index it to Elasticsearch
124+
* @param {string} filePath path of file where to save data
125+
* @param {object} logger logger instance
126+
* @return {Promise} Returns a promise
127+
*/
128+
async function importData(filePath, logger) {
129+
validateDataModels(logger);
130+
await writeDataToDatabase(filePath, logger);
131+
await indexDataToES(logger);
132+
}
133+
module.exports = {
134+
importData,
135+
};

scripts/data/import/index.js

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import * as fs from 'fs';
2+
import util from '../../../src/util';
3+
import { importData } from './importData';
4+
5+
const logger = util.getScriptsLogger();
6+
const filePath = (process.argv[2] === '--file' && process.argv[3]) ? process.argv[3] : 'data/demo-data.json';
7+
// check if file exists
8+
if (!fs.existsSync(filePath)) {
9+
logger.error('File is not existing:', filePath);
10+
process.exit(1);
11+
} else {
12+
logger.info('Script will import data from file:', filePath);
13+
importData(filePath, logger)
14+
.then(() => {
15+
logger.info('Successfully imported data');
16+
process.exit(0);
17+
})
18+
.catch((err) => {
19+
logger.error('Failed to import data, ERROR:', err.message || err);
20+
process.exit(1);
21+
});
22+
}

0 commit comments

Comments
 (0)