@@ -266,7 +266,7 @@ internal async Task<object[]> GetEntityBatchAsync(IEntityPersister persister, ob
266
266
foreach ( var key in set )
267
267
{
268
268
cancellationToken . ThrowIfCancellationRequested ( ) ;
269
- if ( await ( ProcessKeyAsync ( key ) ) . ConfigureAwait ( false ) )
269
+ if ( ProcessKey ( key ) ?? await ( CheckCacheAndProcessResultAsync ( ) ) . ConfigureAwait ( false ) )
270
270
{
271
271
return ids ;
272
272
}
@@ -299,7 +299,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
299
299
{
300
300
for ( var j = 0 ; j < entityKeys . Count ; j ++ )
301
301
{
302
- if ( await ( ProcessKeyAsync ( entityKeys [ indexes [ j ] ] . Key ) ) . ConfigureAwait ( false ) )
302
+ if ( ProcessKey ( entityKeys [ indexes [ j ] ] . Key ) == true )
303
303
{
304
304
return true ;
305
305
}
@@ -310,7 +310,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
310
310
var results = await ( AreCachedAsync ( entityKeys , indexes , persister , batchableCache , checkCache , cancellationToken ) ) . ConfigureAwait ( false ) ;
311
311
for ( var j = 0 ; j < results . Length ; j ++ )
312
312
{
313
- if ( ! results [ j ] && await ( ProcessKeyAsync ( entityKeys [ indexes [ j ] ] . Key , true ) ) . ConfigureAwait ( false ) )
313
+ if ( ! results [ j ] && ProcessKey ( entityKeys [ indexes [ j ] ] . Key , true ) == true )
314
314
{
315
315
return true ;
316
316
}
@@ -324,62 +324,55 @@ async Task<bool> CheckCacheAndProcessResultAsync()
324
324
return false ;
325
325
}
326
326
327
- Task < bool > ProcessKeyAsync ( EntityKey key , bool ignoreCache = false )
327
+ bool ? ProcessKey ( EntityKey key , bool ignoreCache = false )
328
328
{
329
- try
329
+ //TODO: this needn't exclude subclasses...
330
+ if ( checkForEnd && ( index == set . Count || index >= idIndex . Value + batchSize ) )
330
331
{
331
- //TODO: this needn't exclude subclasses...
332
- if ( checkForEnd && ( index == set . Count || index >= idIndex . Value + batchSize ) )
333
- {
334
- return Task . FromResult < bool > ( true ) ;
335
- }
336
- if ( persister . IdentifierType . IsEqual ( id , key . Identifier ) )
337
- {
338
- idIndex = index ;
339
- }
340
- else if ( ! checkCache || batchableCache == null )
341
- {
342
- if ( index < set . Count && ( ! idIndex . HasValue || index < idIndex . Value ) )
343
- {
344
- entityKeys . Add ( new KeyValuePair < EntityKey , int > ( key , index ) ) ;
345
- return Task . FromResult < bool > ( false ) ;
346
- }
347
-
348
- // No need to check "!checkCache || !IsCached(key, persister)": "batchableCache == null"
349
- // already means there is no cache, so IsCached can only yield false. (This method is now
350
- // removed.)
351
- ids [ i ++ ] = key . Identifier ;
352
- }
353
- else if ( ignoreCache )
354
- {
355
- ids [ i ++ ] = key . Identifier ;
356
- }
357
- else
332
+ return true ;
333
+ }
334
+ if ( persister . IdentifierType . IsEqual ( id , key . Identifier ) )
335
+ {
336
+ idIndex = index ;
337
+ }
338
+ else if ( ! checkCache || batchableCache == null )
339
+ {
340
+ if ( index < set . Count && ( ! idIndex . HasValue || index < idIndex . Value ) )
358
341
{
359
342
entityKeys . Add ( new KeyValuePair < EntityKey , int > ( key , index ) ) ;
360
- // Check the cache only when we have collected as many keys as are needed to fill the batch,
361
- // that are after the demanded key.
362
- if ( ! idIndex . HasValue || index < idIndex . Value + batchSize )
363
- {
364
- return Task . FromResult < bool > ( false ) ;
365
- }
366
- return CheckCacheAndProcessResultAsync ( ) ;
343
+ return false ;
367
344
}
368
- if ( i == batchSize )
345
+
346
+ // No need to check "!checkCache || !IsCached(key, persister)": "batchableCache == null"
347
+ // already means there is no cache, so IsCached can only yield false. (This method is now
348
+ // removed.)
349
+ ids [ i ++ ] = key . Identifier ;
350
+ }
351
+ else if ( ignoreCache )
352
+ {
353
+ ids [ i ++ ] = key . Identifier ;
354
+ }
355
+ else
356
+ {
357
+ entityKeys . Add ( new KeyValuePair < EntityKey , int > ( key , index ) ) ;
358
+ // Check the cache only when we have collected as many keys as are needed to fill the batch,
359
+ // that are after the demanded key.
360
+ if ( ! idIndex . HasValue || index < idIndex . Value + batchSize )
369
361
{
370
- i = 1 ; // End of array, start filling again from start
371
- if ( index == set . Count || idIndex . HasValue )
372
- {
373
- checkForEnd = true ;
374
- return Task . FromResult < bool > ( index == set . Count || index >= idIndex . Value + batchSize ) ;
375
- }
362
+ return false ;
376
363
}
377
- return Task . FromResult < bool > ( false ) ;
364
+ return null ;
378
365
}
379
- catch ( Exception ex )
366
+ if ( i == batchSize )
380
367
{
381
- return Task . FromException < bool > ( ex ) ;
368
+ i = 1 ; // End of array, start filling again from start
369
+ if ( index == set . Count || idIndex . HasValue )
370
+ {
371
+ checkForEnd = true ;
372
+ return index == set . Count || index >= idIndex . Value + batchSize ;
373
+ }
382
374
}
375
+ return false ;
383
376
}
384
377
}
385
378
0 commit comments