From 06d13414ee467688cc58ea5ae951b8ad1ef356e7 Mon Sep 17 00:00:00 2001 From: xxcxy Date: Wed, 14 Apr 2021 19:26:31 +0800 Subject: [PATCH 1/2] Recollect dumpDbToEs.js script --- scripts/db/dumpDbToEs.js | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/scripts/db/dumpDbToEs.js b/scripts/db/dumpDbToEs.js index 4f6147c..2f48c5b 100644 --- a/scripts/db/dumpDbToEs.js +++ b/scripts/db/dumpDbToEs.js @@ -391,14 +391,12 @@ async function main () { for (let i = 0; i < keys.length; i++) { const key = keys[i] + const queryPage = { perPage: parseInt(config.get('ES.MAX_BATCH_SIZE')), page: 1 } try { - const allData = await dbHelper.find(models[key], {}) - let j = 0 - const dataset = _.chunk(allData, config.get('ES.MAX_BATCH_SIZE')) - for (const data of dataset) { + while (true) { + const data = await dbHelper.find(models[key], { ...queryPage }) for (let i = 0; i < data.length; i++) { - j++ - logger.info(`Inserting data ${j} of ${allData.length}`) + logger.info(`Inserting data ${i + 1} of ${data.length}`) logger.info(JSON.stringify(data[i])) if (!_.isString(data[i].created)) { data[i].created = new Date() @@ -414,14 +412,18 @@ async function main () { } } await insertIntoES(key, data) + logger.info('import data for ' + key + ' done') + if (data.length < queryPage.perPage) { + break + } else { + queryPage.page = queryPage.page + 1 + } } - logger.info('import data for ' + key + ' done') } catch (e) { logger.error(e) logger.warn('import data for ' + key + ' failed') continue } - try { await createAndExecuteEnrichPolicy(key) logger.info('create and execute enrich policy for ' + key + ' done') From 1d01022a19ce11f256ccd266564a2959a92408ef Mon Sep 17 00:00:00 2001 From: Mithun Kamath Date: Wed, 14 Apr 2021 18:39:07 +0530 Subject: [PATCH 2/2] #90 - misc --- config/default.js | 2 +- scripts/db/dumpDbToEs.js | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/config/default.js b/config/default.js index 0766f4d..1df223b 100755 --- a/config/default.js +++ b/config/default.js @@ -126,7 +126,7 @@ module.exports = { orgField: process.env.ORGANIZATION_SKILLPROVIDER_PROPERTY_NAME || 'skillProviders' } }, - MAX_BATCH_SIZE: parseInt(process.env.MAX_RESULT_SIZE, 10) || 10000, + MAX_BATCH_SIZE: parseInt(process.env.MAX_BATCH_SIZE, 10) || 10000, MAX_RESULT_SIZE: parseInt(process.env.MAX_RESULT_SIZE, 10) || 1000, MAX_BULK_SIZE: parseInt(process.env.MAX_BULK_SIZE, 10) || 100 } diff --git a/scripts/db/dumpDbToEs.js b/scripts/db/dumpDbToEs.js index 2f48c5b..6f3d85d 100644 --- a/scripts/db/dumpDbToEs.js +++ b/scripts/db/dumpDbToEs.js @@ -391,12 +391,12 @@ async function main () { for (let i = 0; i < keys.length; i++) { const key = keys[i] - const queryPage = { perPage: parseInt(config.get('ES.MAX_BATCH_SIZE')), page: 1 } + const queryPage = { perPage: parseInt(config.get('ES.MAX_BATCH_SIZE'), 10), page: 1 } try { while (true) { const data = await dbHelper.find(models[key], { ...queryPage }) for (let i = 0; i < data.length; i++) { - logger.info(`Inserting data ${i + 1} of ${data.length}`) + logger.info(`Inserting data ${(i + 1) + (queryPage.perPage * (queryPage.page - 1))}`) logger.info(JSON.stringify(data[i])) if (!_.isString(data[i].created)) { data[i].created = new Date() @@ -412,8 +412,8 @@ async function main () { } } await insertIntoES(key, data) - logger.info('import data for ' + key + ' done') if (data.length < queryPage.perPage) { + logger.info('import data for ' + key + ' done') break } else { queryPage.page = queryPage.page + 1