Skip to content

Revert "Revert "Import and Export Data Commands"" #554

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 22, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
223 changes: 129 additions & 94 deletions README.md

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions data/demo-data.json

Large diffs are not rendered by default.

23 changes: 23 additions & 0 deletions local/seed/seedMetadata.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,28 @@ if (!process.env.CONNECT_USER_TOKEN) {
process.exit(1);
}

/**
* Iteratively goes through the object and replaces prices with random values.
*
* This method MUTATES object.
*
* @param {Object} o object
*/
function dummifyPrices(o) {
Object.keys(o).forEach(function (k) {
if (o[k] !== null && typeof o[k] === 'object') {
dummifyPrices(o[k]);
return;
}
if (k === 'price' && typeof o[k] === 'number') {
o[k] = 100 + Math.round(Math.random() * 10000);
}
if (k === 'price' && typeof o[k] === 'string') {
o[k] = (100 + Math.round(Math.random() * 10000)).toFixed(0);
}
});
}

// we need to know any logged in Connect user token to retrieve data from DEV
const CONNECT_USER_TOKEN = process.env.CONNECT_USER_TOKEN;

Expand All @@ -29,6 +51,7 @@ module.exports = (targetUrl, token) => {
})
.then(async function (response) {
let data = response.data;
dummifyPrices(data)

console.log('Creating metadata objects locally...');

Expand Down
10 changes: 7 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"lint": "./node_modules/.bin/eslint .",
"lint:fix": "./node_modules/.bin/eslint . --fix || true",
"build": "babel src -d dist --presets es2015 --copy-files",
"sync:all": "NODE_ENV=development npm run sync:db && NODE_ENV=development npm run sync:es",
"sync:db": "./node_modules/.bin/babel-node migrations/sync.js",
"sync:es": "./node_modules/.bin/babel-node migrations/elasticsearch_sync.js",
"sync:es:metadata": "./node_modules/.bin/babel-node migrations/elasticsearch_sync.js --index-name metadata",
Expand All @@ -24,9 +25,12 @@
"startKafkaConsumers:dev": "NODE_ENV=development nodemon -w src --exec \"babel-node src/index-kafka.js --presets es2015\" | ./node_modules/.bin/bunyan",
"test": "NODE_ENV=test npm run lint && NODE_ENV=test npm run sync:es && NODE_ENV=test npm run sync:db && NODE_ENV=test ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- --timeout 10000 --require babel-core/register $(find src -path '*spec.js*') --exit",
"test:watch": "NODE_ENV=test ./node_modules/.bin/mocha -w --require babel-core/register $(find src -path '*spec.js*')",
"seed": "babel-node src/tests/seed.js --presets es2015",
"demo-data": "babel-node local/seed",
"es-db-compare": "babel-node scripts/es-db-compare"
"es-db-compare": "babel-node scripts/es-db-compare",
"data:export": "NODE_ENV=development LOG_LEVEL=info node --require dotenv/config --require babel-core/register scripts/data/export",
"data:import": "NODE_ENV=development LOG_LEVEL=info node --require dotenv/config --require babel-core/register scripts/data/import",
"local:run-docker": "docker-compose -f ./local/full/docker-compose.yml up -d",
"local:init": "npm run sync:all && npm run data:import"
},
"repository": {
"type": "git",
Expand Down Expand Up @@ -72,7 +76,7 @@
"pg-native": "^3.0.0",
"sequelize": "^5.8.7",
"swagger-ui-express": "^4.0.6",
"tc-core-library-js": "appirio-tech/tc-core-library-js.git#v2.6.3",
"tc-core-library-js": "github:appirio-tech/tc-core-library-js#v2.6.3",
"traverse": "^0.6.6",
"urlencode": "^1.1.0",
"yamljs": "^0.3.0"
Expand Down
35 changes: 35 additions & 0 deletions scripts/data/dataModels.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
// So that importing operation succeeds, models are added to list such that each model comes after its dependencies,
// So, when you add a new model to the list, make sure that its dependencies exist and come before it.
import models from '../../src/models';

const dataModels = [
'ProjectTemplate',
'ProductTemplate',
'ProjectType',
'ProductCategory',
'MilestoneTemplate',
'OrgConfig',
'Form',
'PlanConfig',
'PriceConfig',
'BuildingBlock',
'Project',
'ProjectPhase',
'PhaseProduct',
'ProjectAttachment',
'ProjectMember',
'ProjectMemberInvite',
];
/**
* Validate that data models to be imported/exported are defined in model scope
* @return {void} Returns void
*/
function validateDataModels() {
// Validate model names
dataModels.forEach((modelName) => {
if (!models[modelName]) {
throw new Error(`Invalid model: ${modelName}`);
}
});
}
module.exports = { dataModels, validateDataModels };
60 changes: 60 additions & 0 deletions scripts/data/export/exportData.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import * as fs from 'fs';
import models from '../../../src/models';
import { dataModels, validateDataModels } from '../dataModels';

/**
* saves data to file
* @param {string} filePath path of file where to save data
* @param {object} data object contains loaded data for specified models
* @param {object} logger logger instance
* @return {Promise} Returns a promise
*/
function saveExportedData(filePath, data, logger) {
logger.info('Start Saving data to file....');
fs.writeFileSync(filePath, JSON.stringify(data));
logger.info('End Saving data to file....');
}
/**
* loads data from database and export it to specified file path
* @param {string} filePath path of file where to save data
* @param {object} logger logger instance
* @return {Promise} Returns a promise
*/
async function exportDatabaseToJson(filePath, logger) {
const queries = [];
for (let index = 0; index < dataModels.length; index += 1) {
const modelName = dataModels[index];
// queries.push(models[modelName].findAll({ raw: true }));
// We use direct select to ignore hooks as we want to export database as it including soft-deleted records
queries.push(
models.sequelize.query(
`SELECT * from ${models[modelName].getTableName()}`,
),
);
}
const results = await Promise.all(queries);
const allModelsRecords = {};
for (let index = 0; index < dataModels.length; index += 1) {
const modelName = dataModels[index];
const modelRecords = results[index][0];
allModelsRecords[modelName] = modelRecords;
logger.info(
`Records loaded for model: ${modelName} = ${modelRecords.length}`,
);
}

saveExportedData(filePath, allModelsRecords, logger);
}
/**
* validates data models existence, then loads their data from database, and export it to specified file path
* @param {string} filePath path of file where to save data
* @param {object} logger logger instance
* @return {Promise} Returns a promise
*/
async function exportData(filePath, logger) {
validateDataModels();
await exportDatabaseToJson(filePath, logger);
}
module.exports = {
exportData,
};
60 changes: 60 additions & 0 deletions scripts/data/export/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import * as fs from 'fs';
import * as path from 'path';
import * as readline from 'readline';
import Promise from 'bluebird';
import util from '../../../src/util';
import { exportData } from './exportData';
/**
* executes export data function and handles error
* @param {string} filePath path of file where to save data
* @param {object} logger logger instance
* @return {Promise} Returns a promise
*/
function runExportData(filePath, logger) {
exportData(filePath, logger)
.then(() => {
logger.info('Successfully exported data');
process.exit(0);
})
.catch((err) => {
logger.error('Failed to export data, ERROR:', err.message || err);
process.exit(1);
});
}

const logger = util.getScriptsLogger();
const filePath =
process.argv[2] === '--file' && process.argv[3]
? process.argv[3]
: 'data/demo-data.json';
logger.info('Script will export data to file:', filePath);
// check if file exists
if (fs.existsSync(filePath)) {
// We delay question for overwrite file, because the question overlaps with a warning message from sequelize module
Promise.delay(1).then(() => {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
// confirm overwritting to file
rl.question(
'File already exists, Are you sure to overwrite it? [Y] to overwrite: ',
(answer) => {
rl.close();
if (answer.toLowerCase() === 'y') {
logger.info('File will be overwritten.');
runExportData(filePath, logger);
} else {
logger.info('Exit without exporting any data');
process.exit(0);
}
},
); // question()
});
} else {
// get base directory of the file
const baseDir = path.resolve(filePath, '..');
// create directory recursively if it does not exist
util.mkdirSyncRecursive(baseDir);
runExportData(filePath, logger);
}
135 changes: 135 additions & 0 deletions scripts/data/import/importData.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
import * as fs from 'fs';
import models from '../../../src/models';
import { dataModels, validateDataModels } from '../dataModels';
import { indexMetadata, indexProjectsRange } from '../../../src/utils/es';

/**
* import data from json file to database
* @param {string} filePath path of file where to save data
* @param {object} logger logger instance
* @return {Promise} Returns a promise
*/
async function writeDataToDatabase(filePath, logger) {
let transaction = null;
let currentModelName = null;
try {
// Start a transaction
transaction = await models.sequelize.transaction();
const jsonData = JSON.parse(fs.readFileSync(filePath).toString());
// we disable no-await-in-loop because we need to run insert operations sequentially to avoid FK constraints errors
/* eslint-disable no-await-in-loop */
for (let index = 0; index < dataModels.length; index += 1) {
const modelName = dataModels[index];
currentModelName = modelName;
const model = models[modelName];
const modelRecords = jsonData[modelName];
if (modelRecords && modelRecords.length > 0) {
logger.info(`Importing data for model: ${modelName}`);
await model.bulkCreate(modelRecords, {
transaction,
});
logger.info(
`Records imported for model: ${modelName} = ${modelRecords.length}`,
);

// Set autoincrement sequencers in the database to be set to max of the autoincrement column,
// so that, when next insertions don't provide value of autoincrement column, as in case of using APIs,
// it should be set automatically based on last value of sequencers.
const modelAttributes = Object.keys(model.rawAttributes);
const tableName = model.getTableName();
/* eslint-disable no-await-in-loop */
for (
let attributeIndex = 0;
attributeIndex < modelAttributes.length;
attributeIndex += 1
) {
const field = modelAttributes[attributeIndex];
const fieldInfo = model.rawAttributes[field];
if (fieldInfo.autoIncrement) {
// Get sequence name corresponding to automincrment column in a table
const selectSequenceQuery = `SELECT pg_get_serial_sequence('${tableName}', '${field}')`;
const sequenceName = (
await models.sequelize.query(selectSequenceQuery, {
transaction,
})
)[0][0].pg_get_serial_sequence;

// update sequence value to be set to max value of the autoincrement column in the table
const query = `SELECT setval('${sequenceName}', (SELECT MAX(${field}) FROM ${tableName}))`;
const setValue = (
await models.sequelize.query(query, {
transaction,
})
)[0][0].setval;
logger.debug(
`Updated autoIncrement for ${modelName}.${field} with max value = ${setValue}`,
);
}
}
} else {
logger.info(`No records to import for model: ${modelName}`);
}
}
// commit transaction only if all things went ok
logger.info('committing transaction to database...');
await transaction.commit();
} catch (error) {
logger.error('Error while writing data of model:', currentModelName);
// rollback all insert operations
if (transaction) {
logger.info('rollback database transaction...');
transaction.rollback();
}
if (error.name && error.errors && error.fields) {
// For sequelize validation errors, we throw only fields with data that helps in debugging error,
// because the error object has many fields that contains very big sql query for the insert bulk operation
throw new Error(
JSON.stringify({
modelName: currentModelName,
name: error.name,
errors: error.errors,
fields: error.fields,
}),
);
} else {
throw error;
}
}
}

/**
* index imported data to Elasticsearch
* @param {object} logger logger instance
* @return {Promise} Returns a promise
*/
async function indexDataToES(logger) {
logger.info('Indexing metatdata...');
await indexMetadata();

logger.info('Indexing projects data...');
const req = {
logger,
projectIdStart: 1,
projectIdEnd: Number.MAX_SAFE_INTEGER,
indexName: null,
docType: null,
fields: null,
id: 0,
};
await indexProjectsRange(req);
}

/**
* import data from json file to database and index it to Elasticsearch
* @param {string} filePath path of file where to save data
* @param {object} logger logger instance
* @return {Promise} Returns a promise
*/
async function importData(filePath, logger) {
validateDataModels(logger);
await writeDataToDatabase(filePath, logger);
await indexDataToES(logger);
}
module.exports = {
importData,
};
22 changes: 22 additions & 0 deletions scripts/data/import/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import * as fs from 'fs';
import util from '../../../src/util';
import { importData } from './importData';

const logger = util.getScriptsLogger();
const filePath = (process.argv[2] === '--file' && process.argv[3]) ? process.argv[3] : 'data/demo-data.json';
// check if file exists
if (!fs.existsSync(filePath)) {
logger.error('File is not existing:', filePath);
process.exit(1);
} else {
logger.info('Script will import data from file:', filePath);
importData(filePath, logger)
.then(() => {
logger.info('Successfully imported data');
process.exit(0);
})
.catch((err) => {
logger.error('Failed to import data, ERROR:', err.message || err);
process.exit(1);
});
}
Loading