@@ -391,14 +391,12 @@ async function main () {
391
391
392
392
for ( let i = 0 ; i < keys . length ; i ++ ) {
393
393
const key = keys [ i ]
394
+ const queryPage = { perPage : parseInt ( config . get ( 'ES.MAX_BATCH_SIZE' ) , 10 ) , page : 1 }
394
395
try {
395
- const allData = await dbHelper . find ( models [ key ] , { } )
396
- let j = 0
397
- const dataset = _ . chunk ( allData , config . get ( 'ES.MAX_BATCH_SIZE' ) )
398
- for ( const data of dataset ) {
396
+ while ( true ) {
397
+ const data = await dbHelper . find ( models [ key ] , { ...queryPage } )
399
398
for ( let i = 0 ; i < data . length ; i ++ ) {
400
- j ++
401
- logger . info ( `Inserting data ${ j } of ${ allData . length } ` )
399
+ logger . info ( `Inserting data ${ ( i + 1 ) + ( queryPage . perPage * ( queryPage . page - 1 ) ) } ` )
402
400
logger . info ( JSON . stringify ( data [ i ] ) )
403
401
if ( ! _ . isString ( data [ i ] . created ) ) {
404
402
data [ i ] . created = new Date ( )
@@ -414,14 +412,18 @@ async function main () {
414
412
}
415
413
}
416
414
await insertIntoES ( key , data )
415
+ if ( data . length < queryPage . perPage ) {
416
+ logger . info ( 'import data for ' + key + ' done' )
417
+ break
418
+ } else {
419
+ queryPage . page = queryPage . page + 1
420
+ }
417
421
}
418
- logger . info ( 'import data for ' + key + ' done' )
419
422
} catch ( e ) {
420
423
logger . error ( e )
421
424
logger . warn ( 'import data for ' + key + ' failed' )
422
425
continue
423
426
}
424
-
425
427
try {
426
428
await createAndExecuteEnrichPolicy ( key )
427
429
logger . info ( 'create and execute enrich policy for ' + key + ' done' )
0 commit comments