From 542245c7858cc9b89308d7681a7f6cd3d4ea7f46 Mon Sep 17 00:00:00 2001 From: maca88 Date: Thu, 29 Mar 2018 18:47:04 +0200 Subject: [PATCH 1/8] Added support for batching cache get operation --- src/AsyncGenerator.yml | 7 +- .../Async/CacheTest/BatchableCacheFixture.cs | 519 ++++++++++++++++++ .../Async/CacheTest/Caches/BatchableCache.cs | 118 ++++ .../CacheTest/BatchableCacheFixture.cs | 507 +++++++++++++++++ .../CacheTest/Caches/BatchableCache.cs | 103 ++++ .../Caches/BatchableCacheProvider.cs | 34 ++ src/NHibernate.Test/CacheTest/ReadOnly.cs | 25 + .../CacheTest/ReadOnly.hbm.xml | 30 + src/NHibernate.Test/CacheTest/ReadWrite.cs | 20 + .../CacheTest/ReadWrite.hbm.xml | 30 + .../Async/Cache/IBatchableReadCache.cs | 29 + .../Async/Cache/ICacheConcurrencyStrategy.cs | 38 +- .../Async/Cache/NonstrictReadWriteCache.cs | 31 ++ src/NHibernate/Async/Cache/ReadOnlyCache.cs | 31 ++ src/NHibernate/Async/Cache/ReadWriteCache.cs | 48 ++ .../Async/Cache/UpdateTimestampsCache.cs | 40 +- .../Async/Engine/BatchFetchQueue.cs | 434 ++++++++++++--- ...efaultInitializeCollectionEventListener.cs | 86 ++- .../Event/Default/DefaultLoadEventListener.cs | 91 ++- src/NHibernate/Cache/IBatchableReadCache.cs | 27 + .../Cache/ICacheConcurrencyStrategy.cs | 42 +- .../Cache/NonstrictReadWriteCache.cs | 31 +- src/NHibernate/Cache/ReadOnlyCache.cs | 31 +- src/NHibernate/Cache/ReadWriteCache.cs | 48 +- src/NHibernate/Cache/UpdateTimestampsCache.cs | 75 ++- src/NHibernate/Engine/BatchFetchQueue.cs | 464 ++++++++++++++-- ...efaultInitializeCollectionEventListener.cs | 85 ++- .../Event/Default/DefaultLoadEventListener.cs | 53 +- .../Entity/AbstractEntityPersister.cs | 2 + .../Persister/Entity/IEntityPersister.cs | 21 + 30 files changed, 2842 insertions(+), 258 deletions(-) create mode 100644 src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs create mode 100644 src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs create mode 100644 src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs create mode 100644 src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs create mode 100644 src/NHibernate.Test/CacheTest/Caches/BatchableCacheProvider.cs create mode 100644 src/NHibernate.Test/CacheTest/ReadOnly.cs create mode 100644 src/NHibernate.Test/CacheTest/ReadOnly.hbm.xml create mode 100644 src/NHibernate.Test/CacheTest/ReadWrite.cs create mode 100644 src/NHibernate.Test/CacheTest/ReadWrite.hbm.xml create mode 100644 src/NHibernate/Async/Cache/IBatchableReadCache.cs create mode 100644 src/NHibernate/Cache/IBatchableReadCache.cs diff --git a/src/AsyncGenerator.yml b/src/AsyncGenerator.yml index 15df51734d0..55d84dbc7e2 100644 --- a/src/AsyncGenerator.yml +++ b/src/AsyncGenerator.yml @@ -107,7 +107,7 @@ - conversion: ToAsync rule: EventListener - conversion: ToAsync - rule: ICache + rule: Cache typeConversion: - conversion: Ignore name: EnumerableImpl @@ -134,7 +134,7 @@ - parameter: Required requiresCancellationToken: - rule: EventListener - - rule: ICache + - rule: Cache scanMethodBody: true scanForMissingAsyncMembers: - all: true @@ -258,7 +258,8 @@ methodRules: name: Lock - containingType: NHibernate.Cache.ICache name: Unlock - name: ICache + - containingType: NHibernate.Cache.IBatchableReadCache + name: Cache - filters: - containingNamespace: NHibernate - containingType: NHibernate.Tool.hbm2ddl.SchemaUpdate diff --git a/src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs b/src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs new file mode 100644 index 00000000000..a4efe7439d3 --- /dev/null +++ b/src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs @@ -0,0 +1,519 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; +using NHibernate.Cache; +using NHibernate.Cfg; +using NHibernate.DomainModel; +using NHibernate.Test.CacheTest.Caches; +using NUnit.Framework; +using Environment = NHibernate.Cfg.Environment; +using NHibernate.Linq; + +namespace NHibernate.Test.CacheTest +{ + using System.Threading; + [TestFixture] + public class BatchableCacheFixtureAsync : TestCase + { + protected override IList Mappings => new[] + { + "CacheTest.ReadOnly.hbm.xml", + "CacheTest.ReadWrite.hbm.xml" + }; + + protected override string MappingsAssembly => "NHibernate.Test"; + + protected override void Configure(Configuration configuration) + { + configuration.SetProperty(Environment.UseSecondLevelCache, "true"); + configuration.SetProperty(Environment.UseQueryCache, "true"); + configuration.SetProperty(Environment.CacheProvider, typeof(BatchableCacheProvider).AssemblyQualifiedName); + } + + protected override void OnSetUp() + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var totalItems = 6; + for (var i = 1; i <= totalItems; i++) + { + var parent = new ReadOnly + { + Name = $"Name{i}" + }; + for (var j = 1; j <= totalItems; j++) + { + var child = new ReadOnlyItem + { + Parent = parent + }; + parent.Items.Add(child); + } + s.Save(parent); + } + tx.Commit(); + } + } + + protected override void OnTearDown() + { + using (var s = OpenSession()) + using (var tx = s.BeginTransaction()) + { + s.Delete("from ReadOnly"); + s.Flush(); + tx.Commit(); + } + } + + [Test] + public async Task MultipleGetReadOnlyCollectionTestAsync() + { + var persister = Sfi.GetCollectionPersister($"{typeof(ReadOnly).FullName}.Items"); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = await (s.Query().ToListAsync()); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + await (tx.CommitAsync()); + } + + // Batch size 5 + var testCases = new List>> + { + // When the cache is empty, GetMultiple will be called two times. One time in type + // DefaultInitializeCollectionEventListener and the other time in BatchingCollectionInitializer. + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3, 4}, // triggered by InitializeCollectionFromCache method of DefaultInitializeCollectionEventListener type + new[] {1, 2, 3, 4, 5}, // triggered by Initialize method of BatchingCollectionInitializer type + }, + null + ), + // When there are not enough uninitialized collections after the demanded one to fill the batch, + // the nearest before the demanded collection are added. + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2, 1}, + new[] {5, 3, 2, 1, 0}, + }, + null + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2, 1}, + new[] {4, 3, 2, 1, 0}, + }, + null + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3, 4} // 0 get assembled and no further processing is done + }, + (i) => i % 2 == 0 // Cache all even indexes before loading + ), + new Tuple>( + 1, + new int[][] + { + new[] {1, 2, 3, 4, 5}, // 2 and 4 get assembled inside InitializeCollectionFromCache + new[] {3, 5, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2, 1}, // 4 and 2 get assembled inside InitializeCollectionFromCache + new[] {3, 1, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3, 4}, // 1 and 3 get assembled inside InitializeCollectionFromCache + new[] {2, 4, 5} + }, + (i) => i % 2 != 0 + ), + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2, 1}, // 5, 3 and 1 get assembled inside InitializeCollectionFromCache + new[] {2, 0} + }, + (i) => i % 2 != 0 + ) + }; + + foreach (var tuple in testCases) + { + await (AssertMultipleGetCollectionCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3)); + } + } + + [Test] + public async Task MultipleGetReadOnlyTestAsync() + { + var persister = Sfi.GetEntityPersister(typeof(ReadOnly).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = await (s.Query().ToListAsync()); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + await (tx.CommitAsync()); + } + // Batch size 3 + var parentTestCases = new List>> + { + // When the cache is empty, GetMultiple will be called two times. One time in type + // DefaultLoadEventListener and the other time in BatchingEntityLoader. + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type + new[] {1, 2, 3}, // triggered by Load method of BatchingEntityLoader type + }, + null + ), + // When there are not enough uninitialized entities after the demanded one to fill the batch, + // the nearest before the demanded entity are added. + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3}, + new[] {5, 3, 2}, + }, + null + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3}, + new[] {4, 3, 2}, + }, + null + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2} // 0 get assembled and no further processing is done + }, + (i) => i % 2 == 0 // Cache all even indexes before loading + ), + new Tuple>( + 1, + new int[][] + { + new[] {1, 2, 3}, // 2 gets assembled inside LoadFromSecondLevelCache + new[] {3, 4, 5} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3}, // 4 gets assembled inside LoadFromSecondLevelCache + new[] {3, 2, 1} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2}, // 1 gets assembled inside LoadFromSecondLevelCache + new[] {2, 3, 4} + }, + (i) => i % 2 != 0 + ), + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3}, // 5 and 3 get assembled inside LoadFromSecondLevelCache + new[] {2, 1, 0} + }, + (i) => i % 2 != 0 + ) + }; + + foreach (var tuple in parentTestCases) + { + await (AssertMultipleGetCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3)); + } + } + + [Test] + public async Task MultipleGetReadOnlyItemTestAsync() + { + var persister = Sfi.GetEntityPersister(typeof(ReadOnlyItem).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = await (s.Query().Take(6).ToListAsync()); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + await (tx.CommitAsync()); + } + // Batch size 4 + var parentTestCases = new List>> + { + // When the cache is empty, GetMultiple will be called two times. One time in type + // DefaultLoadEventListener and the other time in BatchingEntityLoader. + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type + new[] {1, 2, 3, 4}, // triggered by Load method of BatchingEntityLoader type + }, + null + ), + // When there are not enough uninitialized entities after the demanded one to fill the batch, + // the nearest before the demanded entity are added. + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2}, + new[] {5, 3, 2, 1}, + }, + null + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2}, + new[] {4, 3, 2, 1}, + }, + null + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3} // 0 get assembled and no further processing is done + }, + (i) => i % 2 == 0 // Cache all even indexes before loading + ), + new Tuple>( + 1, + new int[][] + { + new[] {1, 2, 3, 4}, // 2 and 4 get assembled inside LoadFromSecondLevelCache + new[] {3, 5, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2}, // 4 and 2 get assembled inside LoadFromSecondLevelCache + new[] {3, 1, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3}, // 1 and 3 get assembled inside LoadFromSecondLevelCache + new[] {2, 4, 5} + }, + (i) => i % 2 != 0 + ), + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2}, // 5 and 3 get assembled inside LoadFromSecondLevelCache + new[] {2, 1, 0} + }, + (i) => i % 2 != 0 + ) + }; + + foreach (var tuple in parentTestCases) + { + await (AssertMultipleGetCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3)); + } + } + + [Test] + public async Task UpdateTimestampsCacheTestAsync() + { + var timestamp = Sfi.UpdateTimestampsCache; + var field = typeof(UpdateTimestampsCache).GetField( + "_batchUpdateTimestamps", + BindingFlags.NonPublic | BindingFlags.Instance); + Assert.That(field, Is.Not.Null); + var cache = (BatchableCache) field.GetValue(timestamp); + Assert.That(cache, Is.Not.Null); + + using (var s = OpenSession()) + { + const string query = "from ReadOnly e where e.Name = :name"; + const string name = "Name1"; + await (s + .CreateQuery(query) + .SetString("name", name) + .SetCacheable(true) + .UniqueResultAsync()); + + // Run a second time, just to test the query cache + var result = await (s + .CreateQuery(query) + .SetString("name", name) + .SetCacheable(true) + .UniqueResultAsync()); + + Assert.That(result, Is.Not.Null); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(1)); + Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + } + } + + private async Task AssertMultipleGetCallsAsync(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken)) + where TEntity : CacheEntity + { + var persister = Sfi.GetEntityPersister(typeof(TEntity).FullName); + var cache = (BatchableCache) persister.Cache.Cache; + await (cache.ClearAsync(cancellationToken)); + + if (cacheBeforeLoadFn != null) + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + foreach (var id in ids.Where((o, i) => cacheBeforeLoadFn(i))) + { + await (s.GetAsync(id, cancellationToken)); + } + await (tx.CommitAsync(cancellationToken)); + } + } + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + cache.GetCalls.Clear(); + cache.GetMultipleCalls.Clear(); + + foreach (var id in ids) + { + await (s.LoadAsync(id, cancellationToken)); + } + var item = await (s.GetAsync(ids[idIndex], cancellationToken)); + Assert.That(item, Is.Not.Null); + Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + + for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) + { + Assert.That( + cache.GetMultipleCalls[i].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(fetchedIdIndexes[i].Select(o => ids[o]))); + } + + await (tx.CommitAsync(cancellationToken)); + } + } + + private async Task AssertMultipleGetCollectionCallsAsync(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken)) + { + var persister = Sfi.GetCollectionPersister($"{typeof(ReadOnly).FullName}.Items"); + var cache = (BatchableCache) persister.Cache.Cache; + await (cache.ClearAsync(cancellationToken)); + + if (cacheBeforeLoadFn != null) + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + foreach (var id in ids.Where((o, i) => cacheBeforeLoadFn(i))) + { + var item = await (s.GetAsync(id, cancellationToken)); + await (NHibernateUtil.InitializeAsync(item.Items, cancellationToken)); + } + await (tx.CommitAsync(cancellationToken)); + } + } + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + cache.GetCalls.Clear(); + cache.GetMultipleCalls.Clear(); + + foreach (var id in ids) + { + await (s.GetAsync(id, cancellationToken)); + } + var item = await (s.GetAsync(ids[idIndex], cancellationToken)); + Assert.That(item, Is.Not.Null); + await (NHibernateUtil.InitializeAsync(item.Items, cancellationToken)); + Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + + for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) + { + Assert.That( + cache.GetMultipleCalls[i].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(fetchedIdIndexes[i].Select(o => ids[o]))); + } + + await (tx.CommitAsync(cancellationToken)); + } + } + + } +} diff --git a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs new file mode 100644 index 00000000000..51b4d300e5a --- /dev/null +++ b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs @@ -0,0 +1,118 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using NHibernate.Cache; + +namespace NHibernate.Test.CacheTest.Caches +{ + public partial class BatchableCache : ICache, IBatchableReadCache + { + + #region ICache Members + + /// + public Task GetAsync(object key, CancellationToken cancellationToken) + { + try + { + GetCalls.Add(key); + return Task.FromResult(_hashtable[key]); + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + public Task GetMultipleAsync(object[] keys, CancellationToken cancellationToken) + { + try + { + GetMultipleCalls.Add(keys); + var result = new object[keys.Length]; + for (var i = 0; i < keys.Length; i++) + { + result[i] = _hashtable[keys[i]]; + } + return Task.FromResult(result); + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + /// + public Task PutAsync(object key, object value, CancellationToken cancellationToken) + { + try + { + _hashtable[key] = value; + return Task.CompletedTask; + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + /// + public Task RemoveAsync(object key, CancellationToken cancellationToken) + { + try + { + _hashtable.Remove(key); + return Task.CompletedTask; + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + /// + /// A cancellation token that can be used to cancel the work + public Task ClearAsync(CancellationToken cancellationToken) + { + try + { + _hashtable.Clear(); + return Task.CompletedTask; + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + /// + public Task LockAsync(object key, CancellationToken cancellationToken) + { + return Task.CompletedTask; + // local cache, so we use synchronization + } + + /// + public Task UnlockAsync(object key, CancellationToken cancellationToken) + { + return Task.CompletedTask; + // local cache, so we use synchronization + } + + #endregion + } +} diff --git a/src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs b/src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs new file mode 100644 index 00000000000..7cb567dd200 --- /dev/null +++ b/src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs @@ -0,0 +1,507 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; +using NHibernate.Cache; +using NHibernate.Cfg; +using NHibernate.DomainModel; +using NHibernate.Test.CacheTest.Caches; +using NUnit.Framework; +using Environment = NHibernate.Cfg.Environment; + +namespace NHibernate.Test.CacheTest +{ + [TestFixture] + public class BatchableCacheFixture : TestCase + { + protected override IList Mappings => new[] + { + "CacheTest.ReadOnly.hbm.xml", + "CacheTest.ReadWrite.hbm.xml" + }; + + protected override string MappingsAssembly => "NHibernate.Test"; + + protected override void Configure(Configuration configuration) + { + configuration.SetProperty(Environment.UseSecondLevelCache, "true"); + configuration.SetProperty(Environment.UseQueryCache, "true"); + configuration.SetProperty(Environment.CacheProvider, typeof(BatchableCacheProvider).AssemblyQualifiedName); + } + + protected override void OnSetUp() + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var totalItems = 6; + for (var i = 1; i <= totalItems; i++) + { + var parent = new ReadOnly + { + Name = $"Name{i}" + }; + for (var j = 1; j <= totalItems; j++) + { + var child = new ReadOnlyItem + { + Parent = parent + }; + parent.Items.Add(child); + } + s.Save(parent); + } + tx.Commit(); + } + } + + protected override void OnTearDown() + { + using (var s = OpenSession()) + using (var tx = s.BeginTransaction()) + { + s.Delete("from ReadOnly"); + s.Flush(); + tx.Commit(); + } + } + + [Test] + public void MultipleGetReadOnlyCollectionTest() + { + var persister = Sfi.GetCollectionPersister($"{typeof(ReadOnly).FullName}.Items"); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = s.Query().ToList(); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + tx.Commit(); + } + + // Batch size 5 + var testCases = new List>> + { + // When the cache is empty, GetMultiple will be called two times. One time in type + // DefaultInitializeCollectionEventListener and the other time in BatchingCollectionInitializer. + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3, 4}, // triggered by InitializeCollectionFromCache method of DefaultInitializeCollectionEventListener type + new[] {1, 2, 3, 4, 5}, // triggered by Initialize method of BatchingCollectionInitializer type + }, + null + ), + // When there are not enough uninitialized collections after the demanded one to fill the batch, + // the nearest before the demanded collection are added. + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2, 1}, + new[] {5, 3, 2, 1, 0}, + }, + null + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2, 1}, + new[] {4, 3, 2, 1, 0}, + }, + null + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3, 4} // 0 get assembled and no further processing is done + }, + (i) => i % 2 == 0 // Cache all even indexes before loading + ), + new Tuple>( + 1, + new int[][] + { + new[] {1, 2, 3, 4, 5}, // 2 and 4 get assembled inside InitializeCollectionFromCache + new[] {3, 5, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2, 1}, // 4 and 2 get assembled inside InitializeCollectionFromCache + new[] {3, 1, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3, 4}, // 1 and 3 get assembled inside InitializeCollectionFromCache + new[] {2, 4, 5} + }, + (i) => i % 2 != 0 + ), + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2, 1}, // 5, 3 and 1 get assembled inside InitializeCollectionFromCache + new[] {2, 0} + }, + (i) => i % 2 != 0 + ) + }; + + foreach (var tuple in testCases) + { + AssertMultipleGetCollectionCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3); + } + } + + [Test] + public void MultipleGetReadOnlyTest() + { + var persister = Sfi.GetEntityPersister(typeof(ReadOnly).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = s.Query().ToList(); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + tx.Commit(); + } + // Batch size 3 + var parentTestCases = new List>> + { + // When the cache is empty, GetMultiple will be called two times. One time in type + // DefaultLoadEventListener and the other time in BatchingEntityLoader. + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type + new[] {1, 2, 3}, // triggered by Load method of BatchingEntityLoader type + }, + null + ), + // When there are not enough uninitialized entities after the demanded one to fill the batch, + // the nearest before the demanded entity are added. + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3}, + new[] {5, 3, 2}, + }, + null + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3}, + new[] {4, 3, 2}, + }, + null + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2} // 0 get assembled and no further processing is done + }, + (i) => i % 2 == 0 // Cache all even indexes before loading + ), + new Tuple>( + 1, + new int[][] + { + new[] {1, 2, 3}, // 2 gets assembled inside LoadFromSecondLevelCache + new[] {3, 4, 5} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3}, // 4 gets assembled inside LoadFromSecondLevelCache + new[] {3, 2, 1} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2}, // 1 gets assembled inside LoadFromSecondLevelCache + new[] {2, 3, 4} + }, + (i) => i % 2 != 0 + ), + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3}, // 5 and 3 get assembled inside LoadFromSecondLevelCache + new[] {2, 1, 0} + }, + (i) => i % 2 != 0 + ) + }; + + foreach (var tuple in parentTestCases) + { + AssertMultipleGetCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3); + } + } + + [Test] + public void MultipleGetReadOnlyItemTest() + { + var persister = Sfi.GetEntityPersister(typeof(ReadOnlyItem).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = s.Query().Take(6).ToList(); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + tx.Commit(); + } + // Batch size 4 + var parentTestCases = new List>> + { + // When the cache is empty, GetMultiple will be called two times. One time in type + // DefaultLoadEventListener and the other time in BatchingEntityLoader. + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type + new[] {1, 2, 3, 4}, // triggered by Load method of BatchingEntityLoader type + }, + null + ), + // When there are not enough uninitialized entities after the demanded one to fill the batch, + // the nearest before the demanded entity are added. + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2}, + new[] {5, 3, 2, 1}, + }, + null + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2}, + new[] {4, 3, 2, 1}, + }, + null + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3} // 0 get assembled and no further processing is done + }, + (i) => i % 2 == 0 // Cache all even indexes before loading + ), + new Tuple>( + 1, + new int[][] + { + new[] {1, 2, 3, 4}, // 2 and 4 get assembled inside LoadFromSecondLevelCache + new[] {3, 5, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 5, + new int[][] + { + new[] {5, 4, 3, 2}, // 4 and 2 get assembled inside LoadFromSecondLevelCache + new[] {3, 1, 0} + }, + (i) => i % 2 == 0 + ), + new Tuple>( + 0, + new int[][] + { + new[] {0, 1, 2, 3}, // 1 and 3 get assembled inside LoadFromSecondLevelCache + new[] {2, 4, 5} + }, + (i) => i % 2 != 0 + ), + new Tuple>( + 4, + new int[][] + { + new[] {4, 5, 3, 2}, // 5 and 3 get assembled inside LoadFromSecondLevelCache + new[] {2, 1, 0} + }, + (i) => i % 2 != 0 + ) + }; + + foreach (var tuple in parentTestCases) + { + AssertMultipleGetCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3); + } + } + + [Test] + public void UpdateTimestampsCacheTest() + { + var timestamp = Sfi.UpdateTimestampsCache; + var field = typeof(UpdateTimestampsCache).GetField( + "_batchUpdateTimestamps", + BindingFlags.NonPublic | BindingFlags.Instance); + Assert.That(field, Is.Not.Null); + var cache = (BatchableCache) field.GetValue(timestamp); + Assert.That(cache, Is.Not.Null); + + using (var s = OpenSession()) + { + const string query = "from ReadOnly e where e.Name = :name"; + const string name = "Name1"; + s + .CreateQuery(query) + .SetString("name", name) + .SetCacheable(true) + .UniqueResult(); + + // Run a second time, just to test the query cache + var result = s + .CreateQuery(query) + .SetString("name", name) + .SetCacheable(true) + .UniqueResult(); + + Assert.That(result, Is.Not.Null); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(1)); + Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + } + } + + private void AssertMultipleGetCalls(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null) + where TEntity : CacheEntity + { + var persister = Sfi.GetEntityPersister(typeof(TEntity).FullName); + var cache = (BatchableCache) persister.Cache.Cache; + cache.Clear(); + + if (cacheBeforeLoadFn != null) + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + foreach (var id in ids.Where((o, i) => cacheBeforeLoadFn(i))) + { + s.Get(id); + } + tx.Commit(); + } + } + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + cache.GetCalls.Clear(); + cache.GetMultipleCalls.Clear(); + + foreach (var id in ids) + { + s.Load(id); + } + var item = s.Get(ids[idIndex]); + Assert.That(item, Is.Not.Null); + Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + + for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) + { + Assert.That( + cache.GetMultipleCalls[i].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(fetchedIdIndexes[i].Select(o => ids[o]))); + } + + tx.Commit(); + } + } + + private void AssertMultipleGetCollectionCalls(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null) + { + var persister = Sfi.GetCollectionPersister($"{typeof(ReadOnly).FullName}.Items"); + var cache = (BatchableCache) persister.Cache.Cache; + cache.Clear(); + + if (cacheBeforeLoadFn != null) + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + foreach (var id in ids.Where((o, i) => cacheBeforeLoadFn(i))) + { + var item = s.Get(id); + NHibernateUtil.Initialize(item.Items); + } + tx.Commit(); + } + } + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + cache.GetCalls.Clear(); + cache.GetMultipleCalls.Clear(); + + foreach (var id in ids) + { + s.Get(id); + } + var item = s.Get(ids[idIndex]); + Assert.That(item, Is.Not.Null); + NHibernateUtil.Initialize(item.Items); + Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + + for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) + { + Assert.That( + cache.GetMultipleCalls[i].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(fetchedIdIndexes[i].Select(o => ids[o]))); + } + + tx.Commit(); + } + } + + } +} diff --git a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs new file mode 100644 index 00000000000..cb202520c64 --- /dev/null +++ b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs @@ -0,0 +1,103 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using NHibernate.Cache; + +namespace NHibernate.Test.CacheTest.Caches +{ + public partial class BatchableCache : ICache, IBatchableReadCache + { + private readonly IDictionary _hashtable = new Hashtable(); + private readonly string _regionName; + + public List GetMultipleCalls { get; } = new List(); + + public List GetCalls { get; } = new List(); + + #region ICache Members + + public BatchableCache(string regionName) + { + _regionName = regionName; + } + + /// + public object Get(object key) + { + GetCalls.Add(key); + return _hashtable[key]; + } + + public object[] GetMultiple(object[] keys) + { + GetMultipleCalls.Add(keys); + var result = new object[keys.Length]; + for (var i = 0; i < keys.Length; i++) + { + result[i] = _hashtable[keys[i]]; + } + return result; + } + + /// + public void Put(object key, object value) + { + _hashtable[key] = value; + } + + /// + public void Remove(object key) + { + _hashtable.Remove(key); + } + + /// + public void Clear() + { + _hashtable.Clear(); + } + + /// + public void Destroy() + { + } + + /// + public void Lock(object key) + { + // local cache, so we use synchronization + } + + /// + public void Unlock(object key) + { + // local cache, so we use synchronization + } + + /// + public long NextTimestamp() + { + return Timestamper.Next(); + } + + /// + public int Timeout + { + get + { + return Timestamper.OneMs * 60000; // ie. 60 seconds + } + } + + public string RegionName + { + get { return _regionName; } + } + + #endregion + } +} diff --git a/src/NHibernate.Test/CacheTest/Caches/BatchableCacheProvider.cs b/src/NHibernate.Test/CacheTest/Caches/BatchableCacheProvider.cs new file mode 100644 index 00000000000..aef5bd5a7c6 --- /dev/null +++ b/src/NHibernate.Test/CacheTest/Caches/BatchableCacheProvider.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using NHibernate.Cache; + +namespace NHibernate.Test.CacheTest.Caches +{ + public class BatchableCacheProvider : ICacheProvider + { + #region ICacheProvider Members + + public ICache BuildCache(string regionName, IDictionary properties) + { + return new BatchableCache(regionName); + } + + public long NextTimestamp() + { + return Timestamper.Next(); + } + + public void Start(IDictionary properties) + { + } + + public void Stop() + { + } + + #endregion + } +} diff --git a/src/NHibernate.Test/CacheTest/ReadOnly.cs b/src/NHibernate.Test/CacheTest/ReadOnly.cs new file mode 100644 index 00000000000..c509e0cc2ec --- /dev/null +++ b/src/NHibernate.Test/CacheTest/ReadOnly.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NHibernate.Test.CacheTest +{ + public class ReadOnly : CacheEntity + { + public virtual string Name { get; set; } + + public virtual ISet Items { get; set; } = new HashSet(); + } + + public class ReadOnlyItem : CacheEntity + { + public virtual ReadOnly Parent { get; set; } + } + + public abstract class CacheEntity + { + public virtual int Id { get; protected set; } + } +} diff --git a/src/NHibernate.Test/CacheTest/ReadOnly.hbm.xml b/src/NHibernate.Test/CacheTest/ReadOnly.hbm.xml new file mode 100644 index 00000000000..10f70f3a7a4 --- /dev/null +++ b/src/NHibernate.Test/CacheTest/ReadOnly.hbm.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/NHibernate.Test/CacheTest/ReadWrite.cs b/src/NHibernate.Test/CacheTest/ReadWrite.cs new file mode 100644 index 00000000000..f08added0e2 --- /dev/null +++ b/src/NHibernate.Test/CacheTest/ReadWrite.cs @@ -0,0 +1,20 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace NHibernate.Test.CacheTest +{ + public class ReadWrite : CacheEntity + { + public virtual string Name { get; set; } + + public virtual ISet Items { get; set; } = new HashSet(); + } + + public class ReadWriteItem : CacheEntity + { + public virtual ReadWrite Parent { get; set; } + } +} diff --git a/src/NHibernate.Test/CacheTest/ReadWrite.hbm.xml b/src/NHibernate.Test/CacheTest/ReadWrite.hbm.xml new file mode 100644 index 00000000000..c035e40f915 --- /dev/null +++ b/src/NHibernate.Test/CacheTest/ReadWrite.hbm.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/NHibernate/Async/Cache/IBatchableReadCache.cs b/src/NHibernate/Async/Cache/IBatchableReadCache.cs new file mode 100644 index 00000000000..40c2460a7cd --- /dev/null +++ b/src/NHibernate/Async/Cache/IBatchableReadCache.cs @@ -0,0 +1,29 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections.Generic; +using System.Text; + +namespace NHibernate.Cache +{ + using System.Threading.Tasks; + using System.Threading; + public partial interface IBatchableReadCache + { + /// + /// Get multiple objects from the cache. + /// + /// The keys to be retrieved from the cache. + /// A cancellation token that can be used to cancel the work + /// + Task GetMultipleAsync(object[] keys, CancellationToken cancellationToken); + } +} diff --git a/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs index 0d843204aad..08eab473bc9 100644 --- a/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs @@ -8,6 +8,7 @@ //------------------------------------------------------------------------------ +using System; using System.Collections; using NHibernate.Cache.Access; using NHibernate.Cache.Entry; @@ -121,4 +122,39 @@ public partial interface ICacheConcurrencyStrategy /// Task ClearAsync(CancellationToken cancellationToken); } -} \ No newline at end of file + + internal static partial class CacheConcurrencyStrategyExtensions + { + /// + /// Attempt to retrieve multiple objects from the Cache + /// + /// The cache concurrency strategy. + /// The keys (id) of the objects to get out of the Cache. + /// A timestamp prior to the transaction start time + /// A cancellation token that can be used to cancel the work + /// An array of cached objects or + /// + //6.0 TODO: Merge into ICacheConcurrencyStrategy. + public static async Task GetMultipleAsync(this ICacheConcurrencyStrategy cache, CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + switch (cache) + { + case ReadOnlyCache readOnly: + return await (readOnly.GetMultipleAsync(keys, txTimestamp, cancellationToken)).ConfigureAwait(false); + case ReadWriteCache readWrite: + return await (readWrite.GetMultipleAsync(keys, txTimestamp, cancellationToken)).ConfigureAwait(false); + case NonstrictReadWriteCache nonstrictReadWrite: + return await (nonstrictReadWrite.GetMultipleAsync(keys, txTimestamp, cancellationToken)).ConfigureAwait(false); + } + + // Fallback to Get + var objects = new object[keys.Length]; + for (var i = 0; i < keys.Length; i++) + { + objects[i] = await (cache.GetAsync(keys[i], txTimestamp, cancellationToken)).ConfigureAwait(false); + } + return objects; + } + } +} diff --git a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs index 0fad6274303..d22ea3b3157 100644 --- a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs @@ -10,6 +10,7 @@ using System; using System.Collections; +using System.Linq; using NHibernate.Cache.Access; namespace NHibernate.Cache @@ -42,6 +43,36 @@ public async Task GetAsync(CacheKey key, long txTimestamp, CancellationT return result; } + public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + { + if (_batchableReadCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); + } + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return InternalGetMultipleAsync(); + async Task InternalGetMultipleAsync() + { + if (log.IsDebugEnabled()) + { + log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); + } + var results = await (_batchableReadCache.GetMultipleAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + if (!log.IsDebugEnabled()) + { + return results; + } + for (var i = 0; i < keys.Length; i++) + { + log.Debug(results[i] != null ? $"Cache hit: {keys[i]}" : $"Cache miss: {keys[i]}"); + } + return results; + } + } + /// /// Add an item to the cache /// diff --git a/src/NHibernate/Async/Cache/ReadOnlyCache.cs b/src/NHibernate/Async/Cache/ReadOnlyCache.cs index c7692bc6ca5..aa2cff7e871 100644 --- a/src/NHibernate/Async/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Async/Cache/ReadOnlyCache.cs @@ -10,6 +10,7 @@ using System; using System.Collections; +using System.Linq; using NHibernate.Cache.Access; namespace NHibernate.Cache @@ -30,6 +31,36 @@ public async Task GetAsync(CacheKey key, long timestamp, CancellationTok return result; } + public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + { + if (_batchableReadCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); + } + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return InternalGetMultipleAsync(); + async Task InternalGetMultipleAsync() + { + if (log.IsDebugEnabled()) + { + log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); + } + var results = await (_batchableReadCache.GetMultipleAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + if (!log.IsDebugEnabled()) + { + return results; + } + for (var i = 0; i < keys.Length; i++) + { + log.Debug(results[i] != null ? $"Cache hit: {keys[i]}" : $"Cache miss: {keys[i]}"); + } + return results; + } + } + /// /// Unsupported! /// diff --git a/src/NHibernate/Async/Cache/ReadWriteCache.cs b/src/NHibernate/Async/Cache/ReadWriteCache.cs index 217ac12cada..286304c3558 100644 --- a/src/NHibernate/Async/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/ReadWriteCache.cs @@ -10,6 +10,7 @@ using System; using System.Collections; +using System.Linq; using NHibernate.Cache.Access; namespace NHibernate.Cache @@ -88,6 +89,53 @@ public async Task GetAsync(CacheKey key, long txTimestamp, CancellationT } } + public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + { + if (_batchableReadCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); + } + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return InternalGetMultipleAsync(); + async Task InternalGetMultipleAsync() + { + if (log.IsDebugEnabled()) + { + log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); + } + var result = new object[keys.Length]; + using (await _lockObjectAsync.LockAsync()) + { + var lockables = await (_batchableReadCache.GetMultipleAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + for (var i = 0; i < lockables.Length; i++) + { + var lockable = (ILockable) lockables[i]; + var gettable = lockable != null && lockable.IsGettable(txTimestamp); + + if (gettable) + { + if (log.IsDebugEnabled()) + { + log.Debug("Cache hit: {0}", keys[i]); + } + result[i] = ((CachedItem) lockable).Value; + } + + if (log.IsDebugEnabled()) + { + log.Debug(lockable == null ? "Cache miss: {0}" : "Cached item was locked: {0}", keys[i]); + } + + result[i] = null; + } + } + return result; + } + } + /// /// Stop any other transactions reading or writing this item to/from /// the cache. Send them straight to the database instead. (The lock diff --git a/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs b/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs index 9a086f16333..5a5f5aa3725 100644 --- a/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs +++ b/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs @@ -114,35 +114,33 @@ public virtual async Task IsUpToDateAsync(ISet spaces, long timest cancellationToken.ThrowIfCancellationRequested(); using (await _isUpToDate.LockAsync()) { - foreach (string space in spaces) + if (_batchUpdateTimestamps != null) { - object lastUpdate = await (updateTimestamps.GetAsync(space, cancellationToken)).ConfigureAwait(false); - if (lastUpdate == null) + var keys = new object[spaces.Count]; + var index = 0; + foreach (var space in spaces) { - //the last update timestamp was lost from the cache - //(or there were no updates since startup!) - - //NOTE: commented out, since users found the "safe" behavior - // counter-intuitive when testing, and we couldn't deal - // with all the forum posts :-( - //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) ); - //result = false; // safer - - //OR: put a timestamp there, to avoid subsequent expensive - // lookups to a distributed cache - this is no good, since - // it is non-threadsafe (could hammer effect of an actual - // invalidation), and because this is not the way our - // preferred distributed caches work (they work by - // replication) - //updateTimestamps.put( space, new Long(Long.MIN_VALUE) ); + keys[index++] = space; } - else + var lastUpdates = await (_batchUpdateTimestamps.GetMultipleAsync(keys, cancellationToken)).ConfigureAwait(false); + foreach (var lastUpdate in lastUpdates) { - if ((long) lastUpdate >= timestamp) + if (IsOutdated(lastUpdate, timestamp)) { return false; } } + return true; + } + + foreach (string space in spaces) + { + object lastUpdate = await (updateTimestamps.GetAsync(space, cancellationToken)).ConfigureAwait(false); + if (IsOutdated(lastUpdate, timestamp)) + { + return false; + } + } return true; } diff --git a/src/NHibernate/Async/Engine/BatchFetchQueue.cs b/src/NHibernate/Async/Engine/BatchFetchQueue.cs index 470619da3d5..e66d2cb18e4 100644 --- a/src/NHibernate/Async/Engine/BatchFetchQueue.cs +++ b/src/NHibernate/Async/Engine/BatchFetchQueue.cs @@ -8,6 +8,7 @@ //------------------------------------------------------------------------------ +using System; using System.Collections; using NHibernate.Cache; using NHibernate.Collection; @@ -15,6 +16,7 @@ using NHibernate.Persister.Entity; using NHibernate.Util; using System.Collections.Generic; +using System.Linq; using Iesi.Collections.Generic; namespace NHibernate.Engine @@ -32,67 +34,186 @@ public partial class BatchFetchQueue /// the maximum number of keys to return /// A cancellation token that can be used to cancel the work /// an array of collection keys, of length batchSize (padded with nulls) - public async Task GetCollectionBatchAsync(ICollectionPersister collectionPersister, object id, int batchSize, CancellationToken cancellationToken) + public Task GetCollectionBatchAsync(ICollectionPersister collectionPersister, object id, int batchSize, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return GetCollectionBatchAsync(collectionPersister, id, batchSize, true, null, cancellationToken); + } + + /// + /// Get a batch of uninitialized collection keys for a given role + /// + /// The persister for the collection role. + /// A key that must be included in the batch fetch + /// the maximum number of keys to return + /// Whether to check the cache for uninitialized collection keys. + /// An array that will be filled with collection entries if set. + /// A cancellation token that can be used to cancel the work + /// An array of collection keys, of length (padded with nulls) + internal async Task GetCollectionBatchAsync(ICollectionPersister collectionPersister, object id, int batchSize, bool checkCache, + CollectionEntry[] collectionEntries, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); - object[] keys = new object[batchSize]; - keys[0] = id; - int i = 1; - int end = -1; - bool checkForEnd = false; + var keys = new object[batchSize]; + keys[0] = id; // The first element of array is reserved for the actual instance we are loading + var i = 1; // The current index of keys array + int? keyIndex = null; // The index of the demanding key in the linked hash set + var checkForEnd = false; // Stores whether we found the demanded collection and reached the batchSize + var index = 0; // The current index of the linked hash set iteration + // List of collection entries that haven't been checked for their existance in the cache. Besides the collection entry, + // the index where the entry was found is also stored in order to correctly order the returning keys. + var collectionKeys = new List, int>>(batchSize); + var batchableCache = collectionPersister.Cache?.Cache as IBatchableReadCache; + + if (!batchLoadableCollections.TryGetValue(collectionPersister.Role, out var map)) + { + return keys; + } - if (batchLoadableCollections.TryGetValue(collectionPersister.Role, out var map)) + foreach (KeyValuePair me in map) { - foreach (KeyValuePair me in map) + cancellationToken.ThrowIfCancellationRequested(); + if (await (ProcessKeyAsync(me)).ConfigureAwait(false)) { - var ce = me.Key; - var collection = me.Value; - if (ce.LoadedKey == null) - { - // the LoadedKey of the CollectionEntry might be null as it might have been reset to null - // (see for example Collections.ProcessDereferencedCollection() - // and CollectionEntry.AfterAction()) - // though we clear the queue on flush, it seems like a good idea to guard - // against potentially null LoadedKey:s - continue; - } + return keys; + } + index++; + } - if (collection.WasInitialized) + // If by the end of the iteration we haven't filled the whole array of keys to fetch, + // we have to check the remaining collection keys. + while (i != batchSize && collectionKeys.Count > 0) + { + cancellationToken.ThrowIfCancellationRequested(); + if (await (CheckCacheAndProcessResultAsync()).ConfigureAwait(false)) + { + return keys; + } + } + + return keys; //we ran out of keys to try + + // Calls the cache to check if any of the keys is cached and continues the key processing for those + // that are not stored in the cache. + async Task CheckCacheAndProcessResultAsync() + { + var fromIndex = batchableCache != null + ? collectionKeys.Count - Math.Min(batchSize, collectionKeys.Count) + : 0; + var toIndex = collectionKeys.Count - 1; + var indexes = GetSortedKeyIndexes(collectionKeys, keyIndex.Value, fromIndex, toIndex); + if (batchableCache == null) + { + for (var j = 0; j < collectionKeys.Count; j++) { - log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen."); - continue; + if (await (ProcessKeyAsync(collectionKeys[indexes[j]].Key)).ConfigureAwait(false)) + { + return true; + } } - - if (checkForEnd && i == end) + } + else + { + var results = await (AreCachedAsync(collectionKeys, indexes, collectionPersister, batchableCache, checkCache, cancellationToken)).ConfigureAwait(false); + var k = toIndex; + for (var j = 0; j < results.Length; j++) { - return keys; //the first key found after the given key + if (!results[j] && await (ProcessKeyAsync(collectionKeys[indexes[j]].Key, true)).ConfigureAwait(false)) + { + return true; + } } + } - bool isEqual = collectionPersister.KeyType.IsEqual(id, ce.LoadedKey, collectionPersister.Factory); + for (var j = toIndex; j >= fromIndex; j--) + { + collectionKeys.RemoveAt(j); + } + return false; + } - if (isEqual) + async Task ProcessKeyAsync(KeyValuePair me, bool ignoreCache = false) + { + var ce = me.Key; + var collection = me.Value; + if (ce.LoadedKey == null) + { + // the LoadedKey of the CollectionEntry might be null as it might have been reset to null + // (see for example Collections.ProcessDereferencedCollection() + // and CollectionEntry.AfterAction()) + // though we clear the queue on flush, it seems like a good idea to guard + // against potentially null LoadedKey:s + return false; + } + + if (collection.WasInitialized) + { + log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen."); + return false; + } + + if (checkForEnd && (index >= keyIndex.Value + batchSize || index == map.Count)) + { + return true; + } + if (collectionPersister.KeyType.IsEqual(id, ce.LoadedKey, collectionPersister.Factory)) + { + if (collectionEntries != null) { - end = i; - //checkForEnd = false; + collectionEntries[0] = ce; } - else if (!await (IsCachedAsync(ce.LoadedKey, collectionPersister, cancellationToken)).ConfigureAwait(false)) + keyIndex = index; + } + else if (!checkCache || batchableCache == null) + { + if (!keyIndex.HasValue || index < keyIndex.Value) { - keys[i++] = ce.LoadedKey; - //count++; + collectionKeys.Add(new KeyValuePair, int>(me, index)); + return false; } - if (i == batchSize) + if (!checkCache || !await (IsCachedAsync(ce.LoadedKey, collectionPersister, cancellationToken)).ConfigureAwait(false)) { - i = 1; //end of array, start filling again from start - if (end != -1) + if (collectionEntries != null) { - checkForEnd = true; + collectionEntries[i] = ce; } + keys[i++] = ce.LoadedKey; + } + } + else if (ignoreCache) + { + if (collectionEntries != null) + { + collectionEntries[i] = ce; + } + keys[i++] = ce.LoadedKey; + } + else + { + collectionKeys.Add(new KeyValuePair, int>(me, index)); + // Check the cache only when we have collected as many keys as are needed to fill the batch, + // that are after the demanded key. + if (!keyIndex.HasValue || index < keyIndex.Value + batchSize) + { + return false; + } + return await (CheckCacheAndProcessResultAsync()).ConfigureAwait(false); + } + if (i == batchSize) + { + i = 1; // End of array, start filling again from start + if (keyIndex.HasValue) + { + checkForEnd = true; + return index >= keyIndex.Value + batchSize || index == map.Count; } } + return false; } - - return keys; //we ran out of keys to try } /// @@ -105,45 +226,157 @@ public async Task GetCollectionBatchAsync(ICollectionPersister collect /// The maximum number of keys to return /// A cancellation token that can be used to cancel the work /// an array of identifiers, of length batchSize (possibly padded with nulls) - public async Task GetEntityBatchAsync(IEntityPersister persister, object id, int batchSize, CancellationToken cancellationToken) + public Task GetEntityBatchAsync(IEntityPersister persister, object id, int batchSize, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return GetEntityBatchAsync(persister, id, batchSize, true, cancellationToken); + } + + /// + /// Get a batch of unloaded identifiers for this class, using a slightly + /// complex algorithm that tries to grab keys registered immediately after + /// the given key. + /// + /// The persister for the entities being loaded. + /// The identifier of the entity currently demanding load. + /// The maximum number of keys to return + /// Whether to check the cache for uninitialized keys. + /// A cancellation token that can be used to cancel the work + /// An array of identifiers, of length (possibly padded with nulls) + internal async Task GetEntityBatchAsync(IEntityPersister persister, object id, int batchSize, bool checkCache, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); - object[] ids = new object[batchSize]; - ids[0] = id; //first element of array is reserved for the actual instance we are loading! - int i = 1; - int end = -1; - bool checkForEnd = false; + var ids = new object[batchSize]; + ids[0] = id; // The first element of array is reserved for the actual instance we are loading + var i = 1; // The current index of ids array + int? idIndex = null; // The index of the demanding id in the linked hash set + var checkForEnd = false; // Stores whether we found the demanded id and reached the batchSize + var index = 0; // The current index of the linked hash set iteration + // List of entity keys that haven't been checked for their existance in the cache. Besides the entity key, + // the index where the key was found is also stored in order to correctly order the returning keys. + var entityKeys = new List>(batchSize); + var batchableCache = persister.Cache?.Cache as IBatchableReadCache; + + if (!batchLoadableEntityKeys.TryGetValue(persister.EntityName, out var set)) + { + return ids; + } - if (batchLoadableEntityKeys.TryGetValue(persister.EntityName, out var set)) + foreach (var key in set) { - foreach (var key in set) + cancellationToken.ThrowIfCancellationRequested(); + if (await (ProcessKeyAsync(key)).ConfigureAwait(false)) { - //TODO: this needn't exclude subclasses... - if (checkForEnd && i == end) - { - //the first id found after the given id - return ids; - } - if (persister.IdentifierType.IsEqual(id, key.Identifier)) + return ids; + } + index++; + } + + // If by the end of the iteration we haven't filled the whole array of ids to fetch, + // we have to check the remaining entity keys. + while (i != batchSize && entityKeys.Count > 0) + { + cancellationToken.ThrowIfCancellationRequested(); + if (await (CheckCacheAndProcessResultAsync()).ConfigureAwait(false)) + { + return ids; + } + } + + return ids; + + // Calls the cache to check if any of the keys is cached and continues the key processing for those + // that are not stored in the cache. + async Task CheckCacheAndProcessResultAsync() + { + var fromIndex = batchableCache != null + ? entityKeys.Count - Math.Min(batchSize, entityKeys.Count) + : 0; + var toIndex = entityKeys.Count - 1; + var indexes = GetSortedKeyIndexes(entityKeys, idIndex.Value, fromIndex, toIndex); + if (batchableCache == null) + { + for (var j = 0; j < entityKeys.Count; j++) { - end = i; + if (await (ProcessKeyAsync(entityKeys[indexes[j]].Key)).ConfigureAwait(false)) + { + return true; + } } - else + } + else + { + var results = await (AreCachedAsync(entityKeys, indexes, persister, batchableCache, checkCache, cancellationToken)).ConfigureAwait(false); + var k = toIndex; + for (var j = 0; j < results.Length; j++) { - if (!await (IsCachedAsync(key, persister, cancellationToken)).ConfigureAwait(false)) + if (!results[j] && await (ProcessKeyAsync(entityKeys[indexes[j]].Key, true)).ConfigureAwait(false)) { - ids[i++] = key.Identifier; + return true; } } - if (i == batchSize) + } + + for (var j = toIndex; j >= fromIndex; j--) + { + entityKeys.RemoveAt(j); + } + return false; + } + + async Task ProcessKeyAsync(EntityKey key, bool ignoreCache = false) + { + //TODO: this needn't exclude subclasses... + if (checkForEnd && (index >= idIndex.Value + batchSize || index == set.Count)) + { + return true; + } + if (persister.IdentifierType.IsEqual(id, key.Identifier)) + { + idIndex = index; + } + else if (!checkCache || batchableCache == null) + { + if (!idIndex.HasValue || index < idIndex.Value) + { + entityKeys.Add(new KeyValuePair(key, index)); + return false; + } + + if (!checkCache || !await (IsCachedAsync(key, persister, cancellationToken)).ConfigureAwait(false)) { - i = 1; //end of array, start filling again from start - if (end != -1) - checkForEnd = true; + ids[i++] = key.Identifier; } } + else if (ignoreCache) + { + ids[i++] = key.Identifier; + } + else + { + entityKeys.Add(new KeyValuePair(key, index)); + // Check the cache only when we have collected as many keys as are needed to fill the batch, + // that are after the demanded key. + if (!idIndex.HasValue || index < idIndex.Value + batchSize) + { + return false; + } + return await (CheckCacheAndProcessResultAsync()).ConfigureAwait(false); + } + if (i == batchSize) + { + i = 1; // End of array, start filling again from start + if (idIndex.HasValue) + { + checkForEnd = true; + return index >= idIndex.Value + batchSize || index == set.Count; + } + } + return false; } - return ids; //we ran out of ids to try } private async Task IsCachedAsync(EntityKey entityKey, IEntityPersister persister, CancellationToken cancellationToken) @@ -167,5 +400,82 @@ private async Task IsCachedAsync(object collectionKey, ICollectionPersiste } return false; } + + /// + /// Checks whether the given entity key indexes are cached. + /// + /// The list of pairs of entity keys and thier indexes. + /// The array of indexes of that have to be checked. + /// The entity persister. + /// The batchable cache. + /// Whether to check the cache or just return for all keys. + /// A cancellation token that can be used to cancel the work + /// An array of booleans that contains the result for each key. + private async Task AreCachedAsync(List> entityKeys, int[] keyIndexes, IEntityPersister persister, + IBatchableReadCache batchableCache, bool checkCache, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + var result = new bool[keyIndexes.Length]; + if (!checkCache || !persister.HasCache || !context.Session.CacheMode.HasFlag(CacheMode.Get)) + { + return result; + } + var cacheKeys = new object[keyIndexes.Length]; + var i = 0; + foreach (var index in keyIndexes) + { + var entityKey = entityKeys[index].Key; + cacheKeys[i++] = context.Session.GenerateCacheKey( + entityKey.Identifier, + persister.IdentifierType, + entityKey.EntityName); + } + var cacheResult = await (batchableCache.GetMultipleAsync(cacheKeys, cancellationToken)).ConfigureAwait(false); + for (var j = 0; j < result.Length; j++) + { + result[j] = cacheResult[j] != null; + } + + return result; + } + + /// + /// Checks whether the given collection key indexes are cached. + /// + /// The list of pairs of collection entries and thier indexes. + /// The array of indexes of that have to be checked. + /// The collection persister. + /// The batchable cache. + /// Whether to check the cache or just return for all keys. + /// A cancellation token that can be used to cancel the work + /// An array of booleans that contains the result for each key. + private async Task AreCachedAsync(List, int>> collectionKeys, + int[] keyIndexes, ICollectionPersister persister, IBatchableReadCache batchableCache, + bool checkCache, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + var result = new bool[keyIndexes.Length]; + if (!checkCache || !persister.HasCache || !context.Session.CacheMode.HasFlag(CacheMode.Get)) + { + return result; + } + var cacheKeys = new object[keyIndexes.Length]; + var i = 0; + foreach (var index in keyIndexes) + { + var collectionKey = collectionKeys[index].Key; + cacheKeys[i++] = context.Session.GenerateCacheKey( + collectionKey.Key.LoadedKey, + persister.KeyType, + persister.Role); + } + var cacheResult = await (batchableCache.GetMultipleAsync(cacheKeys, cancellationToken)).ConfigureAwait(false); + for (var j = 0; j < result.Length; j++) + { + result[j] = cacheResult[j] != null; + } + + return result; + } } } diff --git a/src/NHibernate/Async/Event/Default/DefaultInitializeCollectionEventListener.cs b/src/NHibernate/Async/Event/Default/DefaultInitializeCollectionEventListener.cs index 1f0d706bed1..9ec4144cb7a 100644 --- a/src/NHibernate/Async/Event/Default/DefaultInitializeCollectionEventListener.cs +++ b/src/NHibernate/Async/Event/Default/DefaultInitializeCollectionEventListener.cs @@ -9,6 +9,7 @@ using System; +using System.Collections.Generic; using System.Diagnostics; using NHibernate.Cache; @@ -88,48 +89,79 @@ private async Task InitializeCollectionFromCacheAsync(object id, ICollecti { return false; } - else - { - ISessionFactoryImplementor factory = source.Factory; - - CacheKey ck = source.GenerateCacheKey(id, persister.KeyType, persister.Role); - object ce = await (persister.Cache.GetAsync(ck, source.Timestamp, cancellationToken)).ConfigureAwait(false); - if (factory.Statistics.IsStatisticsEnabled) + var batchSize = persister.GetBatchSize(); + if (batchSize > 1 && persister.Cache.IsBatchingGetSupported()) + { + var collectionEntries = new CollectionEntry[batchSize]; + // The first item in the array is the item that we want to load + var collectionBatch = await (source.PersistenceContext.BatchFetchQueue + .GetCollectionBatchAsync(persister, id, batchSize, false, collectionEntries, cancellationToken)).ConfigureAwait(false); + // Ignore null values as the retrieved batch may contains them when there are not enough + // uninitialized collection in the queue + var keys = new List(batchSize); + for (var i = 0; i < collectionBatch.Length; i++) { - if (ce == null) + var key = collectionBatch[i]; + if (key == null) { - factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); - } - else - { - factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); + break; } + keys.Add(source.GenerateCacheKey(key, persister.KeyType, persister.Role)); } - - if (ce == null) + var cachedObjects = await (persister.Cache.GetMultipleAsync(keys.ToArray(), source.Timestamp, cancellationToken)).ConfigureAwait(false); + for (var i = 1; i < cachedObjects.Length; i++) { - log.Debug("Collection cache miss: {0}", ck); - } - else - { - log.Debug("Collection cache hit: {0}", ck); + var coll = source.PersistenceContext.BatchFetchQueue.GetBatchLoadableCollection(persister, collectionEntries[i]); + await (AssembleAsync(keys[i], cachedObjects[i], persister, source, coll, collectionBatch[i], false, cancellationToken)).ConfigureAwait(false); } + return await (AssembleAsync(keys[0], cachedObjects[0], persister, source, collection, id, true, cancellationToken)).ConfigureAwait(false); + } + + var cacheKey = source.GenerateCacheKey(id, persister.KeyType, persister.Role); + var cachedObject = await (persister.Cache.GetAsync(cacheKey, source.Timestamp, cancellationToken)).ConfigureAwait(false); + return await (AssembleAsync(cacheKey, cachedObject, persister, source, collection, id, true, cancellationToken)).ConfigureAwait(false); + } + private async Task AssembleAsync(CacheKey ck, object ce, ICollectionPersister persister, ISessionImplementor source, + IPersistentCollection collection, object id, bool alterStatistics, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + ISessionFactoryImplementor factory = source.Factory; + if (factory.Statistics.IsStatisticsEnabled && alterStatistics) + { if (ce == null) { - return false; + factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); } else { - IPersistenceContext persistenceContext = source.PersistenceContext; + factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); + } + } - CollectionCacheEntry cacheEntry = (CollectionCacheEntry)persister.CacheEntryStructure.Destructure(ce, factory); - await (cacheEntry.AssembleAsync(collection, persister, persistenceContext.GetCollectionOwner(id, persister), cancellationToken)).ConfigureAwait(false); + if (ce == null) + { + log.Debug("Collection cache miss: {0}", ck); + } + else + { + log.Debug("Collection cache hit: {0}", ck); + } - persistenceContext.GetCollectionEntry(collection).PostInitialize(collection, persistenceContext); - return true; - } + if (ce == null) + { + return false; + } + else + { + IPersistenceContext persistenceContext = source.PersistenceContext; + + CollectionCacheEntry cacheEntry = (CollectionCacheEntry) persister.CacheEntryStructure.Destructure(ce, factory); + await (cacheEntry.AssembleAsync(collection, persister, persistenceContext.GetCollectionOwner(id, persister), cancellationToken)).ConfigureAwait(false); + + persistenceContext.GetCollectionEntry(collection).PostInitialize(collection, persistenceContext); + return true; } } } diff --git a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs index 8247333e98c..7dd1897fa65 100644 --- a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs +++ b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs @@ -9,6 +9,7 @@ using System; +using System.Collections.Generic; using System.Diagnostics; using System.Text; using NHibernate.Cache; @@ -405,41 +406,85 @@ protected virtual async Task LoadFromSecondLevelCacheAsync(LoadEvent @ev bool useCache = persister.HasCache && source.CacheMode .HasFlag(CacheMode.Get) && @event.LockMode.LessThan(LockMode.Read); - if (useCache) + if (!useCache) { - ISessionFactoryImplementor factory = source.Factory; - - CacheKey ck = source.GenerateCacheKey(@event.EntityId, persister.IdentifierType, persister.RootEntityName); - object ce = await (persister.Cache.GetAsync(ck, source.Timestamp, cancellationToken)).ConfigureAwait(false); - - if (factory.Statistics.IsStatisticsEnabled) + return null; + } + ISessionFactoryImplementor factory = source.Factory; + var batchSize = persister.GetBatchSize(); + // TODO: check for subclass support + if (batchSize > 1 && persister.Cache.IsBatchingGetSupported() && !persister.EntityMetamodel.HasSubclasses) + { + // The first item in the array is the item that we want to load + var entityBatch = + await (source.PersistenceContext.BatchFetchQueue.GetEntityBatchAsync(persister, @event.EntityId, batchSize, false, cancellationToken)).ConfigureAwait(false); + // Ignore null values as the retrieved batch may contains them when there are not enough + // uninitialized entities in the queue + var keys = new List(batchSize); + for (var i = 0; i < entityBatch.Length; i++) { - if (ce == null) - { - factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); - log.Debug("Entity cache miss: {0}", ck); - } - else + var key = entityBatch[i]; + if (key == null) { - factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); - log.Debug("Entity cache hit: {0}", ck); + break; } + keys.Add(source.GenerateCacheKey(key, persister.IdentifierType, persister.RootEntityName)); } + var cachedObjects = await (persister.Cache.GetMultipleAsync(keys.ToArray(), source.Timestamp, cancellationToken)).ConfigureAwait(false); + for (var i = 1; i < cachedObjects.Length; i++) + { + cancellationToken.ThrowIfCancellationRequested(); + await (AssembleAsync( + keys[i], + cachedObjects[i], + new LoadEvent(entityBatch[i], @event.EntityClassName, @event.LockMode, @event.Session), + false)).ConfigureAwait(false); + } + cancellationToken.ThrowIfCancellationRequested(); + return await (AssembleAsync(keys[0], cachedObjects[0], @event, true)).ConfigureAwait(false); + } + var cacheKey = source.GenerateCacheKey(@event.EntityId, persister.IdentifierType, persister.RootEntityName); + var cachedObject = await (persister.Cache.GetAsync(cacheKey, source.Timestamp, cancellationToken)).ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); + return await (AssembleAsync(cacheKey, cachedObject, @event, true)).ConfigureAwait(false); - if (ce != null) + Task AssembleAsync(CacheKey ck, object ce, LoadEvent evt, bool alterStatistics) + { + try { - CacheEntry entry = (CacheEntry) persister.CacheEntryStructure.Destructure(ce, factory); + if (factory.Statistics.IsStatisticsEnabled && alterStatistics) + { + if (ce == null) + { + factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); + log.Debug("Entity cache miss: {0}", ck); + } + else + { + factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); + log.Debug("Entity cache hit: {0}", ck); + } + } - // Entity was found in second-level cache... - // NH: Different behavior (take a look to options.ExactPersister (NH-295)) - if (!options.ExactPersister || persister.EntityMetamodel.SubclassEntityNames.Contains(entry.Subclass)) + if (ce != null) { - return await (AssembleCacheEntryAsync(entry, @event.EntityId, persister, @event, cancellationToken)).ConfigureAwait(false); + CacheEntry entry = (CacheEntry) persister.CacheEntryStructure.Destructure(ce, factory); + + // Entity was found in second-level cache... + // NH: Different behavior (take a look to options.ExactPersister (NH-295)) + if (!options.ExactPersister || persister.EntityMetamodel.SubclassEntityNames.Contains(entry.Subclass)) + { + return AssembleCacheEntryAsync(entry, evt.EntityId, persister, evt, cancellationToken); + } } + + return Task.FromResult(null); + } + catch (Exception ex) + { + return Task.FromException(ex); } } - - return null; } private async Task AssembleCacheEntryAsync(CacheEntry entry, object id, IEntityPersister persister, LoadEvent @event, CancellationToken cancellationToken) diff --git a/src/NHibernate/Cache/IBatchableReadCache.cs b/src/NHibernate/Cache/IBatchableReadCache.cs new file mode 100644 index 00000000000..e2b94691d58 --- /dev/null +++ b/src/NHibernate/Cache/IBatchableReadCache.cs @@ -0,0 +1,27 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NHibernate.Cache +{ + /// + /// Defines a method for retrieving multiple keys from the cache at once. The implementor + /// should use this interface along with when the cache supports + /// a multiple get operation. + /// + /// + /// + /// + /// All implementations must be threadsafe. + /// + /// + public partial interface IBatchableReadCache + { + /// + /// Get multiple objects from the cache. + /// + /// The keys to be retrieved from the cache. + /// + object[] GetMultiple(object[] keys); + } +} diff --git a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs index 624838f5d38..9dd1f00f2d5 100644 --- a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs @@ -1,3 +1,4 @@ +using System; using System.Collections; using NHibernate.Cache.Access; using NHibernate.Cache.Entry; @@ -139,4 +140,43 @@ public partial interface ICacheConcurrencyStrategy /// The for this strategy to use. ICache Cache { get; set; } } -} \ No newline at end of file + + internal static partial class CacheConcurrencyStrategyExtensions + { + /// + /// Attempt to retrieve multiple objects from the Cache + /// + /// The cache concurrency strategy. + /// The keys (id) of the objects to get out of the Cache. + /// A timestamp prior to the transaction start time + /// An array of cached objects or + /// + //6.0 TODO: Merge into ICacheConcurrencyStrategy. + public static object[] GetMultiple(this ICacheConcurrencyStrategy cache, CacheKey[] keys, long txTimestamp) + { + switch (cache) + { + case ReadOnlyCache readOnly: + return readOnly.GetMultiple(keys, txTimestamp); + case ReadWriteCache readWrite: + return readWrite.GetMultiple(keys, txTimestamp); + case NonstrictReadWriteCache nonstrictReadWrite: + return nonstrictReadWrite.GetMultiple(keys, txTimestamp); + } + + // Fallback to Get + var objects = new object[keys.Length]; + for (var i = 0; i < keys.Length; i++) + { + objects[i] = cache.Get(keys[i], txTimestamp); + } + return objects; + } + + public static bool IsBatchingGetSupported(this ICacheConcurrencyStrategy cache) + { + // ReSharper disable once SuspiciousTypeConversion.Global + return cache.Cache is IBatchableReadCache; + } + } +} diff --git a/src/NHibernate/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Cache/NonstrictReadWriteCache.cs index f131ac41902..adf0fd0e985 100644 --- a/src/NHibernate/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Cache/NonstrictReadWriteCache.cs @@ -1,5 +1,6 @@ using System; using System.Collections; +using System.Linq; using NHibernate.Cache.Access; namespace NHibernate.Cache @@ -15,6 +16,7 @@ namespace NHibernate.Cache public partial class NonstrictReadWriteCache : ICacheConcurrencyStrategy { private ICache cache; + private IBatchableReadCache _batchableReadCache; private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(NonstrictReadWriteCache)); @@ -29,7 +31,12 @@ public string RegionName public ICache Cache { get { return cache; } - set { cache = value; } + set + { + cache = value; + // ReSharper disable once SuspiciousTypeConversion.Global + _batchableReadCache = value as IBatchableReadCache; + } } /// @@ -54,6 +61,28 @@ public object Get(CacheKey key, long txTimestamp) return result; } + public object[] GetMultiple(CacheKey[] keys, long txTimestamp) + { + if (_batchableReadCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); + } + if (log.IsDebugEnabled()) + { + log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); + } + var results = _batchableReadCache.GetMultiple(keys.Select(o => (object) o).ToArray()); + if (!log.IsDebugEnabled()) + { + return results; + } + for (var i = 0; i < keys.Length; i++) + { + log.Debug(results[i] != null ? $"Cache hit: {keys[i]}" : $"Cache miss: {keys[i]}"); + } + return results; + } + /// /// Add an item to the cache /// diff --git a/src/NHibernate/Cache/ReadOnlyCache.cs b/src/NHibernate/Cache/ReadOnlyCache.cs index 5bce536d4b9..87b04296559 100644 --- a/src/NHibernate/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Cache/ReadOnlyCache.cs @@ -1,5 +1,6 @@ using System; using System.Collections; +using System.Linq; using NHibernate.Cache.Access; namespace NHibernate.Cache @@ -10,6 +11,7 @@ namespace NHibernate.Cache public partial class ReadOnlyCache : ICacheConcurrencyStrategy { private ICache cache; + private IBatchableReadCache _batchableReadCache; private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(ReadOnlyCache)); /// @@ -23,7 +25,12 @@ public string RegionName public ICache Cache { get { return cache; } - set { cache = value; } + set + { + cache = value; + // ReSharper disable once SuspiciousTypeConversion.Global + _batchableReadCache = value as IBatchableReadCache; + } } public object Get(CacheKey key, long timestamp) @@ -36,6 +43,28 @@ public object Get(CacheKey key, long timestamp) return result; } + public object[] GetMultiple(CacheKey[] keys, long txTimestamp) + { + if (_batchableReadCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); + } + if (log.IsDebugEnabled()) + { + log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); + } + var results = _batchableReadCache.GetMultiple(keys.Select(o => (object) o).ToArray()); + if (!log.IsDebugEnabled()) + { + return results; + } + for (var i = 0; i < keys.Length; i++) + { + log.Debug(results[i] != null ? $"Cache hit: {keys[i]}" : $"Cache miss: {keys[i]}"); + } + return results; + } + /// /// Unsupported! /// diff --git a/src/NHibernate/Cache/ReadWriteCache.cs b/src/NHibernate/Cache/ReadWriteCache.cs index 98da04caa17..6f92b556f5a 100644 --- a/src/NHibernate/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Cache/ReadWriteCache.cs @@ -1,5 +1,6 @@ using System; using System.Collections; +using System.Linq; using NHibernate.Cache.Access; namespace NHibernate.Cache @@ -34,6 +35,7 @@ public interface ILockable private readonly object _lockObject = new object(); private ICache cache; + private IBatchableReadCache _batchableReadCache; private int _nextLockId; public ReadWriteCache() @@ -51,7 +53,12 @@ public string RegionName public ICache Cache { get { return cache; } - set { cache = value; } + set + { + cache = value; + // ReSharper disable once SuspiciousTypeConversion.Global + _batchableReadCache = value as IBatchableReadCache; + } } /// @@ -136,6 +143,45 @@ public object Get(CacheKey key, long txTimestamp) } } + public object[] GetMultiple(CacheKey[] keys, long txTimestamp) + { + if (_batchableReadCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); + } + if (log.IsDebugEnabled()) + { + log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); + } + var result = new object[keys.Length]; + lock (_lockObject) + { + var lockables = _batchableReadCache.GetMultiple(keys.Select(o => (object) o).ToArray()); + for (var i = 0; i < lockables.Length; i++) + { + var lockable = (ILockable) lockables[i]; + var gettable = lockable != null && lockable.IsGettable(txTimestamp); + + if (gettable) + { + if (log.IsDebugEnabled()) + { + log.Debug("Cache hit: {0}", keys[i]); + } + result[i] = ((CachedItem) lockable).Value; + } + + if (log.IsDebugEnabled()) + { + log.Debug(lockable == null ? "Cache miss: {0}" : "Cached item was locked: {0}", keys[i]); + } + + result[i] = null; + } + } + return result; + } + /// /// Stop any other transactions reading or writing this item to/from /// the cache. Send them straight to the database instead. (The lock diff --git a/src/NHibernate/Cache/UpdateTimestampsCache.cs b/src/NHibernate/Cache/UpdateTimestampsCache.cs index 450ea3ce469..5d77a1b9c16 100644 --- a/src/NHibernate/Cache/UpdateTimestampsCache.cs +++ b/src/NHibernate/Cache/UpdateTimestampsCache.cs @@ -18,6 +18,7 @@ public partial class UpdateTimestampsCache { private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(UpdateTimestampsCache)); private ICache updateTimestamps; + private readonly IBatchableReadCache _batchUpdateTimestamps; private readonly string regionName = typeof(UpdateTimestampsCache).Name; @@ -32,6 +33,8 @@ public UpdateTimestampsCache(Settings settings, IDictionary prop regionName = prefix == null ? regionName : prefix + '.' + regionName; log.Info("starting update timestamps cache at region: {0}", regionName); updateTimestamps = settings.CacheProvider.BuildCache(regionName, props); + // ReSharper disable once SuspiciousTypeConversion.Global + _batchUpdateTimestamps = updateTimestamps as IBatchableReadCache; } //Since v5.1 @@ -79,35 +82,33 @@ public virtual void Invalidate(IReadOnlyCollection spaces) [MethodImpl(MethodImplOptions.Synchronized)] public virtual bool IsUpToDate(ISet spaces, long timestamp /* H2.1 has Long here */) { - foreach (string space in spaces) + if (_batchUpdateTimestamps != null) { - object lastUpdate = updateTimestamps.Get(space); - if (lastUpdate == null) + var keys = new object[spaces.Count]; + var index = 0; + foreach (var space in spaces) { - //the last update timestamp was lost from the cache - //(or there were no updates since startup!) - - //NOTE: commented out, since users found the "safe" behavior - // counter-intuitive when testing, and we couldn't deal - // with all the forum posts :-( - //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) ); - //result = false; // safer - - //OR: put a timestamp there, to avoid subsequent expensive - // lookups to a distributed cache - this is no good, since - // it is non-threadsafe (could hammer effect of an actual - // invalidation), and because this is not the way our - // preferred distributed caches work (they work by - // replication) - //updateTimestamps.put( space, new Long(Long.MIN_VALUE) ); + keys[index++] = space; } - else + var lastUpdates = _batchUpdateTimestamps.GetMultiple(keys); + foreach (var lastUpdate in lastUpdates) { - if ((long) lastUpdate >= timestamp) + if (IsOutdated(lastUpdate, timestamp)) { return false; } } + return true; + } + + foreach (string space in spaces) + { + object lastUpdate = updateTimestamps.Get(space); + if (IsOutdated(lastUpdate, timestamp)) + { + return false; + } + } return true; } @@ -123,5 +124,37 @@ public virtual void Destroy() log.Warn(e, "could not destroy UpdateTimestamps cache"); } } + + private bool IsOutdated(object lastUpdate, long timestamp) + { + if (lastUpdate == null) + { + //the last update timestamp was lost from the cache + //(or there were no updates since startup!) + + //NOTE: commented out, since users found the "safe" behavior + // counter-intuitive when testing, and we couldn't deal + // with all the forum posts :-( + //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) ); + //result = false; // safer + + //OR: put a timestamp there, to avoid subsequent expensive + // lookups to a distributed cache - this is no good, since + // it is non-threadsafe (could hammer effect of an actual + // invalidation), and because this is not the way our + // preferred distributed caches work (they work by + // replication) + //updateTimestamps.put( space, new Long(Long.MIN_VALUE) ); + } + else + { + if ((long) lastUpdate >= timestamp) + { + return true; + } + } + + return false; + } } } diff --git a/src/NHibernate/Engine/BatchFetchQueue.cs b/src/NHibernate/Engine/BatchFetchQueue.cs index dd668788379..99bf8696408 100644 --- a/src/NHibernate/Engine/BatchFetchQueue.cs +++ b/src/NHibernate/Engine/BatchFetchQueue.cs @@ -1,3 +1,4 @@ +using System; using System.Collections; using NHibernate.Cache; using NHibernate.Collection; @@ -5,6 +6,7 @@ using NHibernate.Persister.Entity; using NHibernate.Util; using System.Collections.Generic; +using System.Linq; using Iesi.Collections.Generic; namespace NHibernate.Engine @@ -158,6 +160,25 @@ public void AddBatchLoadableCollection(IPersistentCollection collection, Collect map[ce] = collection; } + /// + /// Retrives the uninitialized persistent collection from the queue. + /// + /// The collection persister. + /// The collection entry. + /// A persistent collection if found, otherwise. + internal IPersistentCollection GetBatchLoadableCollection(ICollectionPersister persister, CollectionEntry ce) + { + if (!batchLoadableCollections.TryGetValue(persister.Role, out var map)) + { + return null; + } + if (!map.TryGetValue(ce, out var collection)) + { + return null; + } + return collection; + } + /// /// After a collection was initialized or evicted, we don't /// need to batch fetch it anymore, remove it from the queue @@ -181,64 +202,176 @@ public void RemoveBatchLoadableCollection(CollectionEntry ce) /// an array of collection keys, of length batchSize (padded with nulls) public object[] GetCollectionBatch(ICollectionPersister collectionPersister, object id, int batchSize) { - object[] keys = new object[batchSize]; - keys[0] = id; - int i = 1; - int end = -1; - bool checkForEnd = false; + return GetCollectionBatch(collectionPersister, id, batchSize, true, null); + } + + /// + /// Get a batch of uninitialized collection keys for a given role + /// + /// The persister for the collection role. + /// A key that must be included in the batch fetch + /// the maximum number of keys to return + /// Whether to check the cache for uninitialized collection keys. + /// An array that will be filled with collection entries if set. + /// An array of collection keys, of length (padded with nulls) + internal object[] GetCollectionBatch(ICollectionPersister collectionPersister, object id, int batchSize, bool checkCache, + CollectionEntry[] collectionEntries) + { + var keys = new object[batchSize]; + keys[0] = id; // The first element of array is reserved for the actual instance we are loading + var i = 1; // The current index of keys array + int? keyIndex = null; // The index of the demanding key in the linked hash set + var checkForEnd = false; // Stores whether we found the demanded collection and reached the batchSize + var index = 0; // The current index of the linked hash set iteration + // List of collection entries that haven't been checked for their existance in the cache. Besides the collection entry, + // the index where the entry was found is also stored in order to correctly order the returning keys. + var collectionKeys = new List, int>>(batchSize); + var batchableCache = collectionPersister.Cache?.Cache as IBatchableReadCache; + + if (!batchLoadableCollections.TryGetValue(collectionPersister.Role, out var map)) + { + return keys; + } - if (batchLoadableCollections.TryGetValue(collectionPersister.Role, out var map)) + foreach (KeyValuePair me in map) { - foreach (KeyValuePair me in map) + if (ProcessKey(me)) { - var ce = me.Key; - var collection = me.Value; - if (ce.LoadedKey == null) - { - // the LoadedKey of the CollectionEntry might be null as it might have been reset to null - // (see for example Collections.ProcessDereferencedCollection() - // and CollectionEntry.AfterAction()) - // though we clear the queue on flush, it seems like a good idea to guard - // against potentially null LoadedKey:s - continue; - } + return keys; + } + index++; + } + + // If by the end of the iteration we haven't filled the whole array of keys to fetch, + // we have to check the remaining collection keys. + while (i != batchSize && collectionKeys.Count > 0) + { + if (CheckCacheAndProcessResult()) + { + return keys; + } + } + + return keys; //we ran out of keys to try - if (collection.WasInitialized) + // Calls the cache to check if any of the keys is cached and continues the key processing for those + // that are not stored in the cache. + bool CheckCacheAndProcessResult() + { + var fromIndex = batchableCache != null + ? collectionKeys.Count - Math.Min(batchSize, collectionKeys.Count) + : 0; + var toIndex = collectionKeys.Count - 1; + var indexes = GetSortedKeyIndexes(collectionKeys, keyIndex.Value, fromIndex, toIndex); + if (batchableCache == null) + { + for (var j = 0; j < collectionKeys.Count; j++) { - log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen."); - continue; + if (ProcessKey(collectionKeys[indexes[j]].Key)) + { + return true; + } } - - if (checkForEnd && i == end) + } + else + { + var results = AreCached(collectionKeys, indexes, collectionPersister, batchableCache, checkCache); + var k = toIndex; + for (var j = 0; j < results.Length; j++) { - return keys; //the first key found after the given key + if (!results[j] && ProcessKey(collectionKeys[indexes[j]].Key, true)) + { + return true; + } } + } - bool isEqual = collectionPersister.KeyType.IsEqual(id, ce.LoadedKey, collectionPersister.Factory); + for (var j = toIndex; j >= fromIndex; j--) + { + collectionKeys.RemoveAt(j); + } + return false; + } - if (isEqual) + bool ProcessKey(KeyValuePair me, bool ignoreCache = false) + { + var ce = me.Key; + var collection = me.Value; + if (ce.LoadedKey == null) + { + // the LoadedKey of the CollectionEntry might be null as it might have been reset to null + // (see for example Collections.ProcessDereferencedCollection() + // and CollectionEntry.AfterAction()) + // though we clear the queue on flush, it seems like a good idea to guard + // against potentially null LoadedKey:s + return false; + } + + if (collection.WasInitialized) + { + log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen."); + return false; + } + + if (checkForEnd && (index >= keyIndex.Value + batchSize || index == map.Count)) + { + return true; + } + if (collectionPersister.KeyType.IsEqual(id, ce.LoadedKey, collectionPersister.Factory)) + { + if (collectionEntries != null) { - end = i; - //checkForEnd = false; + collectionEntries[0] = ce; } - else if (!IsCached(ce.LoadedKey, collectionPersister)) + keyIndex = index; + } + else if (!checkCache || batchableCache == null) + { + if (!keyIndex.HasValue || index < keyIndex.Value) { - keys[i++] = ce.LoadedKey; - //count++; + collectionKeys.Add(new KeyValuePair, int>(me, index)); + return false; } - if (i == batchSize) + if (!checkCache || !IsCached(ce.LoadedKey, collectionPersister)) { - i = 1; //end of array, start filling again from start - if (end != -1) + if (collectionEntries != null) { - checkForEnd = true; + collectionEntries[i] = ce; } + keys[i++] = ce.LoadedKey; + } + } + else if (ignoreCache) + { + if (collectionEntries != null) + { + collectionEntries[i] = ce; } + keys[i++] = ce.LoadedKey; } + else + { + collectionKeys.Add(new KeyValuePair, int>(me, index)); + // Check the cache only when we have collected as many keys as are needed to fill the batch, + // that are after the demanded key. + if (!keyIndex.HasValue || index < keyIndex.Value + batchSize) + { + return false; + } + return CheckCacheAndProcessResult(); + } + if (i == batchSize) + { + i = 1; // End of array, start filling again from start + if (keyIndex.HasValue) + { + checkForEnd = true; + return index >= keyIndex.Value + batchSize || index == map.Count; + } + } + return false; } - - return keys; //we ran out of keys to try } /// @@ -252,42 +385,147 @@ public object[] GetCollectionBatch(ICollectionPersister collectionPersister, obj /// an array of identifiers, of length batchSize (possibly padded with nulls) public object[] GetEntityBatch(IEntityPersister persister, object id, int batchSize) { - object[] ids = new object[batchSize]; - ids[0] = id; //first element of array is reserved for the actual instance we are loading! - int i = 1; - int end = -1; - bool checkForEnd = false; + return GetEntityBatch(persister, id, batchSize, true); + } + + /// + /// Get a batch of unloaded identifiers for this class, using a slightly + /// complex algorithm that tries to grab keys registered immediately after + /// the given key. + /// + /// The persister for the entities being loaded. + /// The identifier of the entity currently demanding load. + /// The maximum number of keys to return + /// Whether to check the cache for uninitialized keys. + /// An array of identifiers, of length (possibly padded with nulls) + internal object[] GetEntityBatch(IEntityPersister persister, object id, int batchSize, bool checkCache) + { + var ids = new object[batchSize]; + ids[0] = id; // The first element of array is reserved for the actual instance we are loading + var i = 1; // The current index of ids array + int? idIndex = null; // The index of the demanding id in the linked hash set + var checkForEnd = false; // Stores whether we found the demanded id and reached the batchSize + var index = 0; // The current index of the linked hash set iteration + // List of entity keys that haven't been checked for their existance in the cache. Besides the entity key, + // the index where the key was found is also stored in order to correctly order the returning keys. + var entityKeys = new List>(batchSize); + var batchableCache = persister.Cache?.Cache as IBatchableReadCache; - if (batchLoadableEntityKeys.TryGetValue(persister.EntityName, out var set)) + if (!batchLoadableEntityKeys.TryGetValue(persister.EntityName, out var set)) { - foreach (var key in set) + return ids; + } + + foreach (var key in set) + { + if (ProcessKey(key)) { - //TODO: this needn't exclude subclasses... - if (checkForEnd && i == end) - { - //the first id found after the given id - return ids; - } - if (persister.IdentifierType.IsEqual(id, key.Identifier)) + return ids; + } + index++; + } + + // If by the end of the iteration we haven't filled the whole array of ids to fetch, + // we have to check the remaining entity keys. + while (i != batchSize && entityKeys.Count > 0) + { + if (CheckCacheAndProcessResult()) + { + return ids; + } + } + + return ids; + + // Calls the cache to check if any of the keys is cached and continues the key processing for those + // that are not stored in the cache. + bool CheckCacheAndProcessResult() + { + var fromIndex = batchableCache != null + ? entityKeys.Count - Math.Min(batchSize, entityKeys.Count) + : 0; + var toIndex = entityKeys.Count - 1; + var indexes = GetSortedKeyIndexes(entityKeys, idIndex.Value, fromIndex, toIndex); + if (batchableCache == null) + { + for (var j = 0; j < entityKeys.Count; j++) { - end = i; + if (ProcessKey(entityKeys[indexes[j]].Key)) + { + return true; + } } - else + } + else + { + var results = AreCached(entityKeys, indexes, persister, batchableCache, checkCache); + var k = toIndex; + for (var j = 0; j < results.Length; j++) { - if (!IsCached(key, persister)) + if (!results[j] && ProcessKey(entityKeys[indexes[j]].Key, true)) { - ids[i++] = key.Identifier; + return true; } } - if (i == batchSize) + } + + for (var j = toIndex; j >= fromIndex; j--) + { + entityKeys.RemoveAt(j); + } + return false; + } + + bool ProcessKey(EntityKey key, bool ignoreCache = false) + { + //TODO: this needn't exclude subclasses... + if (checkForEnd && (index >= idIndex.Value + batchSize || index == set.Count)) + { + return true; + } + if (persister.IdentifierType.IsEqual(id, key.Identifier)) + { + idIndex = index; + } + else if (!checkCache || batchableCache == null) + { + if (!idIndex.HasValue || index < idIndex.Value) { - i = 1; //end of array, start filling again from start - if (end != -1) - checkForEnd = true; + entityKeys.Add(new KeyValuePair(key, index)); + return false; } + + if (!checkCache || !IsCached(key, persister)) + { + ids[i++] = key.Identifier; + } + } + else if (ignoreCache) + { + ids[i++] = key.Identifier; + } + else + { + entityKeys.Add(new KeyValuePair(key, index)); + // Check the cache only when we have collected as many keys as are needed to fill the batch, + // that are after the demanded key. + if (!idIndex.HasValue || index < idIndex.Value + batchSize) + { + return false; + } + return CheckCacheAndProcessResult(); } + if (i == batchSize) + { + i = 1; // End of array, start filling again from start + if (idIndex.HasValue) + { + checkForEnd = true; + return index >= idIndex.Value + batchSize || index == set.Count; + } + } + return false; } - return ids; //we ran out of ids to try } private bool IsCached(EntityKey entityKey, IEntityPersister persister) @@ -309,5 +547,111 @@ private bool IsCached(object collectionKey, ICollectionPersister persister) } return false; } + + /// + /// Checks whether the given entity key indexes are cached. + /// + /// The list of pairs of entity keys and thier indexes. + /// The array of indexes of that have to be checked. + /// The entity persister. + /// The batchable cache. + /// Whether to check the cache or just return for all keys. + /// An array of booleans that contains the result for each key. + private bool[] AreCached(List> entityKeys, int[] keyIndexes, IEntityPersister persister, + IBatchableReadCache batchableCache, bool checkCache) + { + var result = new bool[keyIndexes.Length]; + if (!checkCache || !persister.HasCache || !context.Session.CacheMode.HasFlag(CacheMode.Get)) + { + return result; + } + var cacheKeys = new object[keyIndexes.Length]; + var i = 0; + foreach (var index in keyIndexes) + { + var entityKey = entityKeys[index].Key; + cacheKeys[i++] = context.Session.GenerateCacheKey( + entityKey.Identifier, + persister.IdentifierType, + entityKey.EntityName); + } + var cacheResult = batchableCache.GetMultiple(cacheKeys); + for (var j = 0; j < result.Length; j++) + { + result[j] = cacheResult[j] != null; + } + + return result; + } + + /// + /// Checks whether the given collection key indexes are cached. + /// + /// The list of pairs of collection entries and thier indexes. + /// The array of indexes of that have to be checked. + /// The collection persister. + /// The batchable cache. + /// Whether to check the cache or just return for all keys. + /// An array of booleans that contains the result for each key. + private bool[] AreCached(List, int>> collectionKeys, + int[] keyIndexes, ICollectionPersister persister, IBatchableReadCache batchableCache, + bool checkCache) + { + var result = new bool[keyIndexes.Length]; + if (!checkCache || !persister.HasCache || !context.Session.CacheMode.HasFlag(CacheMode.Get)) + { + return result; + } + var cacheKeys = new object[keyIndexes.Length]; + var i = 0; + foreach (var index in keyIndexes) + { + var collectionKey = collectionKeys[index].Key; + cacheKeys[i++] = context.Session.GenerateCacheKey( + collectionKey.Key.LoadedKey, + persister.KeyType, + persister.Role); + } + var cacheResult = batchableCache.GetMultiple(cacheKeys); + for (var j = 0; j < result.Length; j++) + { + result[j] = cacheResult[j] != null; + } + + return result; + } + + /// + /// Sorts the given keys by thier indexes, where the keys that are after the demanded key will be located + /// at the start and the remaining indexes at the end of the returned array. + /// + /// The type of the key + /// The list of pairs of keys and thier indexes. + /// The index of the demanded key + /// The index where the sorting will begin. + /// The index where the sorting will end. + /// An array of sorted key indexes. + private static int[] GetSortedKeyIndexes(List> keys, int keyIndex, int fromIndex, int toIndex) + { + var result = new int[Math.Abs(toIndex - fromIndex) + 1]; + var lowerIndexes = new List(); + var i = 0; + for (var j = fromIndex; j <= toIndex; j++) + { + if (keys[j].Value < keyIndex) + { + lowerIndexes.Add(j); + } + else + { + result[i++] = j; + } + } + for (var j = lowerIndexes.Count - 1; j >= 0; j--) + { + result[i++] = lowerIndexes[j]; + } + return result; + } } } diff --git a/src/NHibernate/Event/Default/DefaultInitializeCollectionEventListener.cs b/src/NHibernate/Event/Default/DefaultInitializeCollectionEventListener.cs index 2f37f28d1ea..8ac0cc422ab 100644 --- a/src/NHibernate/Event/Default/DefaultInitializeCollectionEventListener.cs +++ b/src/NHibernate/Event/Default/DefaultInitializeCollectionEventListener.cs @@ -1,4 +1,5 @@ using System; +using System.Collections.Generic; using System.Diagnostics; using NHibernate.Cache; @@ -76,48 +77,78 @@ private bool InitializeCollectionFromCache(object id, ICollectionPersister persi { return false; } - else - { - ISessionFactoryImplementor factory = source.Factory; - - CacheKey ck = source.GenerateCacheKey(id, persister.KeyType, persister.Role); - object ce = persister.Cache.Get(ck, source.Timestamp); - if (factory.Statistics.IsStatisticsEnabled) + var batchSize = persister.GetBatchSize(); + if (batchSize > 1 && persister.Cache.IsBatchingGetSupported()) + { + var collectionEntries = new CollectionEntry[batchSize]; + // The first item in the array is the item that we want to load + var collectionBatch = source.PersistenceContext.BatchFetchQueue + .GetCollectionBatch(persister, id, batchSize, false, collectionEntries); + // Ignore null values as the retrieved batch may contains them when there are not enough + // uninitialized collection in the queue + var keys = new List(batchSize); + for (var i = 0; i < collectionBatch.Length; i++) { - if (ce == null) + var key = collectionBatch[i]; + if (key == null) { - factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); - } - else - { - factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); + break; } + keys.Add(source.GenerateCacheKey(key, persister.KeyType, persister.Role)); } - - if (ce == null) + var cachedObjects = persister.Cache.GetMultiple(keys.ToArray(), source.Timestamp); + for (var i = 1; i < cachedObjects.Length; i++) { - log.Debug("Collection cache miss: {0}", ck); - } - else - { - log.Debug("Collection cache hit: {0}", ck); + var coll = source.PersistenceContext.BatchFetchQueue.GetBatchLoadableCollection(persister, collectionEntries[i]); + Assemble(keys[i], cachedObjects[i], persister, source, coll, collectionBatch[i], false); } + return Assemble(keys[0], cachedObjects[0], persister, source, collection, id, true); + } + + var cacheKey = source.GenerateCacheKey(id, persister.KeyType, persister.Role); + var cachedObject = persister.Cache.Get(cacheKey, source.Timestamp); + return Assemble(cacheKey, cachedObject, persister, source, collection, id, true); + } + private bool Assemble(CacheKey ck, object ce, ICollectionPersister persister, ISessionImplementor source, + IPersistentCollection collection, object id, bool alterStatistics) + { + ISessionFactoryImplementor factory = source.Factory; + if (factory.Statistics.IsStatisticsEnabled && alterStatistics) + { if (ce == null) { - return false; + factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); } else { - IPersistenceContext persistenceContext = source.PersistenceContext; + factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); + } + } - CollectionCacheEntry cacheEntry = (CollectionCacheEntry)persister.CacheEntryStructure.Destructure(ce, factory); - cacheEntry.Assemble(collection, persister, persistenceContext.GetCollectionOwner(id, persister)); + if (ce == null) + { + log.Debug("Collection cache miss: {0}", ck); + } + else + { + log.Debug("Collection cache hit: {0}", ck); + } - persistenceContext.GetCollectionEntry(collection).PostInitialize(collection, persistenceContext); - return true; - } + if (ce == null) + { + return false; + } + else + { + IPersistenceContext persistenceContext = source.PersistenceContext; + + CollectionCacheEntry cacheEntry = (CollectionCacheEntry) persister.CacheEntryStructure.Destructure(ce, factory); + cacheEntry.Assemble(collection, persister, persistenceContext.GetCollectionOwner(id, persister)); + + persistenceContext.GetCollectionEntry(collection).PostInitialize(collection, persistenceContext); + return true; } } } diff --git a/src/NHibernate/Event/Default/DefaultLoadEventListener.cs b/src/NHibernate/Event/Default/DefaultLoadEventListener.cs index b919263755d..6fdc1666911 100644 --- a/src/NHibernate/Event/Default/DefaultLoadEventListener.cs +++ b/src/NHibernate/Event/Default/DefaultLoadEventListener.cs @@ -1,4 +1,5 @@ using System; +using System.Collections.Generic; using System.Diagnostics; using System.Text; using NHibernate.Cache; @@ -416,14 +417,48 @@ protected virtual object LoadFromSecondLevelCache(LoadEvent @event, IEntityPersi bool useCache = persister.HasCache && source.CacheMode .HasFlag(CacheMode.Get) && @event.LockMode.LessThan(LockMode.Read); - if (useCache) + if (!useCache) { - ISessionFactoryImplementor factory = source.Factory; - - CacheKey ck = source.GenerateCacheKey(@event.EntityId, persister.IdentifierType, persister.RootEntityName); - object ce = persister.Cache.Get(ck, source.Timestamp); + return null; + } + ISessionFactoryImplementor factory = source.Factory; + var batchSize = persister.GetBatchSize(); + // TODO: check for subclass support + if (batchSize > 1 && persister.Cache.IsBatchingGetSupported() && !persister.EntityMetamodel.HasSubclasses) + { + // The first item in the array is the item that we want to load + var entityBatch = + source.PersistenceContext.BatchFetchQueue.GetEntityBatch(persister, @event.EntityId, batchSize, false); + // Ignore null values as the retrieved batch may contains them when there are not enough + // uninitialized entities in the queue + var keys = new List(batchSize); + for (var i = 0; i < entityBatch.Length; i++) + { + var key = entityBatch[i]; + if (key == null) + { + break; + } + keys.Add(source.GenerateCacheKey(key, persister.IdentifierType, persister.RootEntityName)); + } + var cachedObjects = persister.Cache.GetMultiple(keys.ToArray(), source.Timestamp); + for (var i = 1; i < cachedObjects.Length; i++) + { + Assemble( + keys[i], + cachedObjects[i], + new LoadEvent(entityBatch[i], @event.EntityClassName, @event.LockMode, @event.Session), + false); + } + return Assemble(keys[0], cachedObjects[0], @event, true); + } + var cacheKey = source.GenerateCacheKey(@event.EntityId, persister.IdentifierType, persister.RootEntityName); + var cachedObject = persister.Cache.Get(cacheKey, source.Timestamp); + return Assemble(cacheKey, cachedObject, @event, true); - if (factory.Statistics.IsStatisticsEnabled) + object Assemble(CacheKey ck, object ce, LoadEvent evt, bool alterStatistics) + { + if (factory.Statistics.IsStatisticsEnabled && alterStatistics) { if (ce == null) { @@ -445,12 +480,12 @@ protected virtual object LoadFromSecondLevelCache(LoadEvent @event, IEntityPersi // NH: Different behavior (take a look to options.ExactPersister (NH-295)) if (!options.ExactPersister || persister.EntityMetamodel.SubclassEntityNames.Contains(entry.Subclass)) { - return AssembleCacheEntry(entry, @event.EntityId, persister, @event); + return AssembleCacheEntry(entry, evt.EntityId, persister, evt); } } - } - return null; + return null; + } } private object AssembleCacheEntry(CacheEntry entry, object id, IEntityPersister persister, LoadEvent @event) diff --git a/src/NHibernate/Persister/Entity/AbstractEntityPersister.cs b/src/NHibernate/Persister/Entity/AbstractEntityPersister.cs index 403dc578d8a..dd94d81e84a 100644 --- a/src/NHibernate/Persister/Entity/AbstractEntityPersister.cs +++ b/src/NHibernate/Persister/Entity/AbstractEntityPersister.cs @@ -635,6 +635,8 @@ public bool IsBatchLoadable get { return batchSize > 1; } } + public int BatchSize => batchSize; + public virtual string[] IdentifierColumnNames { get { return rootTableKeyColumnNames; } diff --git a/src/NHibernate/Persister/Entity/IEntityPersister.cs b/src/NHibernate/Persister/Entity/IEntityPersister.cs index 88f1c65b380..585222d4be4 100644 --- a/src/NHibernate/Persister/Entity/IEntityPersister.cs +++ b/src/NHibernate/Persister/Entity/IEntityPersister.cs @@ -599,4 +599,25 @@ void Update( IEntityTuplizer EntityTuplizer { get; } } + + internal static class EntityPersisterExtensions + { + /// + /// Get the batch size of a entity persister. + /// + //6.0 TODO: Merge into IEntityPersister. + public static int GetBatchSize(this IEntityPersister persister) + { + if (persister is AbstractEntityPersister acp) + { + return acp.BatchSize; + } + + NHibernateLogger + .For(typeof(EntityPersisterExtensions)) + .Warn("Entity persister of {0} type is not supported, returning 1 as a batch size.", persister?.GetType()); + + return 1; + } + } } From fb258015bc2a5a8e807d436391b18bef64742bcc Mon Sep 17 00:00:00 2001 From: maca88 Date: Sat, 7 Apr 2018 21:14:17 +0200 Subject: [PATCH 2/8] Added support for batching cache put operation --- src/AsyncGenerator.yml | 1 + .../Async/CacheTest/BatchableCacheFixture.cs | 259 +++++++++++++++--- .../Async/CacheTest/Caches/BatchableCache.cs | 46 +++- .../CacheTest/BatchableCacheFixture.cs | 259 +++++++++++++++--- .../CacheTest/Caches/BatchableCache.cs | 40 ++- .../Async/Cache/AbstractCacheBatch.cs | 47 ++++ src/NHibernate/Async/Cache/CacheBatcher.cs | 104 +++++++ src/NHibernate/Async/Cache/CachePutBatch.cs | 55 ++++ .../Async/Cache/IBatchableWriteCache.cs | 43 +++ .../Async/Cache/ICacheConcurrencyStrategy.cs | 37 +++ .../Async/Cache/NonstrictReadWriteCache.cs | 75 +++++ src/NHibernate/Async/Cache/ReadOnlyCache.cs | 72 +++++ src/NHibernate/Async/Cache/ReadWriteCache.cs | 89 ++++++ .../Engine/Loading/CollectionLoadContext.cs | 39 ++- src/NHibernate/Async/Engine/TwoPhaseLoad.cs | 46 +++- src/NHibernate/Async/Loader/Loader.cs | 5 +- src/NHibernate/Cache/AbstractCacheBatch.cs | 54 ++++ src/NHibernate/Cache/CacheBatcher.cs | 125 +++++++++ src/NHibernate/Cache/CachePutBatch.cs | 48 ++++ src/NHibernate/Cache/CachePutData.cs | 34 +++ src/NHibernate/Cache/IBatchableReadCache.cs | 2 +- src/NHibernate/Cache/IBatchableWriteCache.cs | 39 +++ .../Cache/ICacheConcurrencyStrategy.cs | 41 +++ .../Cache/NonstrictReadWriteCache.cs | 69 +++++ src/NHibernate/Cache/ReadOnlyCache.cs | 66 +++++ src/NHibernate/Cache/ReadWriteCache.cs | 83 ++++++ .../Engine/Loading/CollectionLoadContext.cs | 39 ++- src/NHibernate/Engine/TwoPhaseLoad.cs | 40 ++- src/NHibernate/Loader/Loader.cs | 5 +- 29 files changed, 1750 insertions(+), 112 deletions(-) create mode 100644 src/NHibernate/Async/Cache/AbstractCacheBatch.cs create mode 100644 src/NHibernate/Async/Cache/CacheBatcher.cs create mode 100644 src/NHibernate/Async/Cache/CachePutBatch.cs create mode 100644 src/NHibernate/Async/Cache/IBatchableWriteCache.cs create mode 100644 src/NHibernate/Cache/AbstractCacheBatch.cs create mode 100644 src/NHibernate/Cache/CacheBatcher.cs create mode 100644 src/NHibernate/Cache/CachePutBatch.cs create mode 100644 src/NHibernate/Cache/CachePutData.cs create mode 100644 src/NHibernate/Cache/IBatchableWriteCache.cs diff --git a/src/AsyncGenerator.yml b/src/AsyncGenerator.yml index 55d84dbc7e2..05062816966 100644 --- a/src/AsyncGenerator.yml +++ b/src/AsyncGenerator.yml @@ -259,6 +259,7 @@ methodRules: - containingType: NHibernate.Cache.ICache name: Unlock - containingType: NHibernate.Cache.IBatchableReadCache + - containingType: NHibernate.Cache.IBatchableReadWriteCache name: Cache - filters: - containingNamespace: NHibernate diff --git a/src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs b/src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs index a4efe7439d3..b8f96393a28 100644 --- a/src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs +++ b/src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs @@ -37,6 +37,8 @@ public class BatchableCacheFixtureAsync : TestCase protected override string MappingsAssembly => "NHibernate.Test"; + protected override string CacheConcurrencyStrategy => null; + protected override void Configure(Configuration configuration) { configuration.SetProperty(Environment.UseSecondLevelCache, "true"); @@ -44,6 +46,12 @@ protected override void Configure(Configuration configuration) configuration.SetProperty(Environment.CacheProvider, typeof(BatchableCacheProvider).AssemblyQualifiedName); } + protected override bool CheckDatabaseWasCleaned() + { + base.CheckDatabaseWasCleaned(); + return true; // We are unable to delete read-only items. + } + protected override void OnSetUp() { using (var s = Sfi.OpenSession()) @@ -66,6 +74,22 @@ protected override void OnSetUp() } s.Save(parent); } + for (var i = 1; i <= totalItems; i++) + { + var parent = new ReadWrite + { + Name = $"Name{i}" + }; + for (var j = 1; j <= totalItems; j++) + { + var child = new ReadWriteItem + { + Parent = parent + }; + parent.Items.Add(child); + } + s.Save(parent); + } tx.Commit(); } } @@ -75,8 +99,7 @@ protected override void OnTearDown() using (var s = OpenSession()) using (var tx = s.BeginTransaction()) { - s.Delete("from ReadOnly"); - s.Flush(); + s.Delete("from ReadWrite"); tx.Commit(); } } @@ -99,88 +122,96 @@ public async Task MultipleGetReadOnlyCollectionTestAsync() } // Batch size 5 - var testCases = new List>> + var testCases = new List>> { // When the cache is empty, GetMultiple will be called two times. One time in type // DefaultInitializeCollectionEventListener and the other time in BatchingCollectionInitializer. - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3, 4}, // triggered by InitializeCollectionFromCache method of DefaultInitializeCollectionEventListener type new[] {1, 2, 3, 4, 5}, // triggered by Initialize method of BatchingCollectionInitializer type }, + new[] {0, 1, 2, 3, 4}, null ), // When there are not enough uninitialized collections after the demanded one to fill the batch, // the nearest before the demanded collection are added. - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2, 1}, new[] {5, 3, 2, 1, 0}, }, + new[] {1, 2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2, 1}, new[] {4, 3, 2, 1, 0}, }, + new[] {1, 2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3, 4} // 0 get assembled and no further processing is done }, + null, (i) => i % 2 == 0 // Cache all even indexes before loading ), - new Tuple>( + new Tuple>( 1, new int[][] { new[] {1, 2, 3, 4, 5}, // 2 and 4 get assembled inside InitializeCollectionFromCache new[] {3, 5, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2, 1}, // 4 and 2 get assembled inside InitializeCollectionFromCache new[] {3, 1, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3, 4}, // 1 and 3 get assembled inside InitializeCollectionFromCache new[] {2, 4, 5} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ), - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2, 1}, // 5, 3 and 1 get assembled inside InitializeCollectionFromCache new[] {2, 0} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ) }; foreach (var tuple in testCases) { - await (AssertMultipleGetCollectionCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3)); + await (AssertMultipleCacheCollectionCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4)); } } @@ -201,88 +232,96 @@ public async Task MultipleGetReadOnlyTestAsync() await (tx.CommitAsync()); } // Batch size 3 - var parentTestCases = new List>> + var parentTestCases = new List>> { // When the cache is empty, GetMultiple will be called two times. One time in type // DefaultLoadEventListener and the other time in BatchingEntityLoader. - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type new[] {1, 2, 3}, // triggered by Load method of BatchingEntityLoader type }, + new[] {0, 1, 2}, null ), // When there are not enough uninitialized entities after the demanded one to fill the batch, // the nearest before the demanded entity are added. - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3}, new[] {5, 3, 2}, }, + new[] {3, 4, 5}, null ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3}, new[] {4, 3, 2}, }, + new[] {3, 4, 5}, null ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2} // 0 get assembled and no further processing is done }, + null, (i) => i % 2 == 0 // Cache all even indexes before loading ), - new Tuple>( + new Tuple>( 1, new int[][] { new[] {1, 2, 3}, // 2 gets assembled inside LoadFromSecondLevelCache new[] {3, 4, 5} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3}, // 4 gets assembled inside LoadFromSecondLevelCache new[] {3, 2, 1} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2}, // 1 gets assembled inside LoadFromSecondLevelCache new[] {2, 3, 4} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ), - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3}, // 5 and 3 get assembled inside LoadFromSecondLevelCache new[] {2, 1, 0} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ) }; foreach (var tuple in parentTestCases) { - await (AssertMultipleGetCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3)); + await (AssertMultipleCacheCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4)); } } @@ -303,89 +342,200 @@ public async Task MultipleGetReadOnlyItemTestAsync() await (tx.CommitAsync()); } // Batch size 4 - var parentTestCases = new List>> + var parentTestCases = new List>> { // When the cache is empty, GetMultiple will be called two times. One time in type // DefaultLoadEventListener and the other time in BatchingEntityLoader. - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type new[] {1, 2, 3, 4}, // triggered by Load method of BatchingEntityLoader type }, + new[] {0, 1, 2, 3}, null ), // When there are not enough uninitialized entities after the demanded one to fill the batch, // the nearest before the demanded entity are added. - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2}, new[] {5, 3, 2, 1}, }, + new[] {2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2}, new[] {4, 3, 2, 1}, }, + new[] {2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3} // 0 get assembled and no further processing is done }, + null, (i) => i % 2 == 0 // Cache all even indexes before loading ), - new Tuple>( + new Tuple>( 1, new int[][] { new[] {1, 2, 3, 4}, // 2 and 4 get assembled inside LoadFromSecondLevelCache new[] {3, 5, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2}, // 4 and 2 get assembled inside LoadFromSecondLevelCache new[] {3, 1, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3}, // 1 and 3 get assembled inside LoadFromSecondLevelCache new[] {2, 4, 5} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ), - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2}, // 5 and 3 get assembled inside LoadFromSecondLevelCache new[] {2, 1, 0} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ) }; foreach (var tuple in parentTestCases) { - await (AssertMultipleGetCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3)); + await (AssertMultipleCacheCallsAsync(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4)); + } + } + + [Test] + public async Task MultiplePutReadWriteTestAsync() + { + var persister = Sfi.GetEntityPersister(typeof(ReadWrite).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + await (cache.ClearAsync(CancellationToken.None)); + cache.ClearStatistics(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = await (s.Query().ToListAsync()); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + await (tx.CommitAsync()); + } + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(2)); + + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2}, + new[] {3, 4, 5} + }, + cache.PutMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2}, + new[] {3, 4, 5} + }, + cache.LockMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2}, + new[] {3, 4, 5} + }, + cache.UnlockMultipleCalls + ); + } + + [Test] + public async Task MultiplePutReadWriteItemTestAsync() + { + var persister = Sfi.GetCollectionPersister($"{typeof(ReadWrite).FullName}.Items"); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + await (cache.ClearAsync(CancellationToken.None)); + cache.ClearStatistics(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = await (s.Query().ToListAsync()); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + + // Initialize the first item collection + await (NHibernateUtil.InitializeAsync(items.First(o => o.Id == ids[0]).Items)); + await (tx.CommitAsync()); } + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); + // Called in: DefaultInitializeCollectionEventListener, BatchingCollectionInitializer and ReadWriteCache + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(3)); + + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2, 3, 4} + }, + cache.PutMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2, 3, 4} + }, + cache.LockMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2, 3, 4} + }, + cache.UnlockMultipleCalls + ); } [Test] @@ -422,7 +572,7 @@ public async Task UpdateTimestampsCacheTestAsync() } } - private async Task AssertMultipleGetCallsAsync(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken)) + private async Task AssertMultipleCacheCallsAsync(List ids, int idIndex, int[][] fetchedIdIndexes, int[] putIdIndexes, Func cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken)) where TEntity : CacheEntity { var persister = Sfi.GetEntityPersister(typeof(TEntity).FullName); @@ -445,8 +595,7 @@ public async Task UpdateTimestampsCacheTestAsync() using (var s = Sfi.OpenSession()) using (var tx = s.BeginTransaction()) { - cache.GetCalls.Clear(); - cache.GetMultipleCalls.Clear(); + cache.ClearStatistics(); foreach (var id in ids) { @@ -455,7 +604,19 @@ public async Task UpdateTimestampsCacheTestAsync() var item = await (s.GetAsync(ids[idIndex], cancellationToken)); Assert.That(item, Is.Not.Null); Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + if (putIdIndexes == null) + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(0)); + } + else + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(1)); + Assert.That( + cache.PutMultipleCalls[0].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(putIdIndexes.Select(o => ids[o]))); + } for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) { @@ -468,7 +629,18 @@ public async Task UpdateTimestampsCacheTestAsync() } } - private async Task AssertMultipleGetCollectionCallsAsync(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken)) + private void AssertEquivalent(List ids, int[][] expectedIdIndexes, List result) + { + Assert.That(result, Has.Count.EqualTo(expectedIdIndexes.GetLength(0))); + for (int i = 0; i < expectedIdIndexes.GetLength(0); i++) + { + Assert.That( + result[i].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(expectedIdIndexes[i].Select(o => ids[o]))); + } + } + + private async Task AssertMultipleCacheCollectionCallsAsync(List ids, int idIndex, int[][] fetchedIdIndexes, int[] putIdIndexes, Func cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken)) { var persister = Sfi.GetCollectionPersister($"{typeof(ReadOnly).FullName}.Items"); var cache = (BatchableCache) persister.Cache.Cache; @@ -491,8 +663,7 @@ public async Task UpdateTimestampsCacheTestAsync() using (var s = Sfi.OpenSession()) using (var tx = s.BeginTransaction()) { - cache.GetCalls.Clear(); - cache.GetMultipleCalls.Clear(); + cache.ClearStatistics(); foreach (var id in ids) { @@ -502,7 +673,19 @@ public async Task UpdateTimestampsCacheTestAsync() Assert.That(item, Is.Not.Null); await (NHibernateUtil.InitializeAsync(item.Items, cancellationToken)); Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + if (putIdIndexes == null) + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(0)); + } + else + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(1)); + Assert.That( + cache.PutMultipleCalls[0].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(putIdIndexes.Select(o => ids[o]))); + } for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) { diff --git a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs index 51b4d300e5a..055c383d15b 100644 --- a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs @@ -19,9 +19,52 @@ namespace NHibernate.Test.CacheTest.Caches { - public partial class BatchableCache : ICache, IBatchableReadCache + public partial class BatchableCache : ICache, IBatchableReadWriteCache { + public Task PutMultipleAsync(object[] keys, object[] values, CancellationToken cancellationToken) + { + try + { + PutMultipleCalls.Add(keys); + for (int i = 0; i < keys.Length; i++) + { + _hashtable[keys[i]] = values[i]; + } + return Task.CompletedTask; + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + public Task LockMultipleAsync(object[] keys, CancellationToken cancellationToken) + { + try + { + LockMultipleCalls.Add(keys); + return Task.CompletedTask; + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + public Task UnlockMultipleAsync(object[] keys, CancellationToken cancellationToken) + { + try + { + UnlockMultipleCalls.Add(keys); + return Task.CompletedTask; + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + #region ICache Members /// @@ -61,6 +104,7 @@ public Task PutAsync(object key, object value, CancellationToken cancellationTok { try { + PutCalls.Add(key); _hashtable[key] = value; return Task.CompletedTask; } diff --git a/src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs b/src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs index 7cb567dd200..84a679aa8d3 100644 --- a/src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs +++ b/src/NHibernate.Test/CacheTest/BatchableCacheFixture.cs @@ -25,6 +25,8 @@ public class BatchableCacheFixture : TestCase protected override string MappingsAssembly => "NHibernate.Test"; + protected override string CacheConcurrencyStrategy => null; + protected override void Configure(Configuration configuration) { configuration.SetProperty(Environment.UseSecondLevelCache, "true"); @@ -32,6 +34,12 @@ protected override void Configure(Configuration configuration) configuration.SetProperty(Environment.CacheProvider, typeof(BatchableCacheProvider).AssemblyQualifiedName); } + protected override bool CheckDatabaseWasCleaned() + { + base.CheckDatabaseWasCleaned(); + return true; // We are unable to delete read-only items. + } + protected override void OnSetUp() { using (var s = Sfi.OpenSession()) @@ -54,6 +62,22 @@ protected override void OnSetUp() } s.Save(parent); } + for (var i = 1; i <= totalItems; i++) + { + var parent = new ReadWrite + { + Name = $"Name{i}" + }; + for (var j = 1; j <= totalItems; j++) + { + var child = new ReadWriteItem + { + Parent = parent + }; + parent.Items.Add(child); + } + s.Save(parent); + } tx.Commit(); } } @@ -63,8 +87,7 @@ protected override void OnTearDown() using (var s = OpenSession()) using (var tx = s.BeginTransaction()) { - s.Delete("from ReadOnly"); - s.Flush(); + s.Delete("from ReadWrite"); tx.Commit(); } } @@ -87,88 +110,96 @@ public void MultipleGetReadOnlyCollectionTest() } // Batch size 5 - var testCases = new List>> + var testCases = new List>> { // When the cache is empty, GetMultiple will be called two times. One time in type // DefaultInitializeCollectionEventListener and the other time in BatchingCollectionInitializer. - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3, 4}, // triggered by InitializeCollectionFromCache method of DefaultInitializeCollectionEventListener type new[] {1, 2, 3, 4, 5}, // triggered by Initialize method of BatchingCollectionInitializer type }, + new[] {0, 1, 2, 3, 4}, null ), // When there are not enough uninitialized collections after the demanded one to fill the batch, // the nearest before the demanded collection are added. - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2, 1}, new[] {5, 3, 2, 1, 0}, }, + new[] {1, 2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2, 1}, new[] {4, 3, 2, 1, 0}, }, + new[] {1, 2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3, 4} // 0 get assembled and no further processing is done }, + null, (i) => i % 2 == 0 // Cache all even indexes before loading ), - new Tuple>( + new Tuple>( 1, new int[][] { new[] {1, 2, 3, 4, 5}, // 2 and 4 get assembled inside InitializeCollectionFromCache new[] {3, 5, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2, 1}, // 4 and 2 get assembled inside InitializeCollectionFromCache new[] {3, 1, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3, 4}, // 1 and 3 get assembled inside InitializeCollectionFromCache new[] {2, 4, 5} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ), - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2, 1}, // 5, 3 and 1 get assembled inside InitializeCollectionFromCache new[] {2, 0} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ) }; foreach (var tuple in testCases) { - AssertMultipleGetCollectionCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3); + AssertMultipleCacheCollectionCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); } } @@ -189,88 +220,96 @@ public void MultipleGetReadOnlyTest() tx.Commit(); } // Batch size 3 - var parentTestCases = new List>> + var parentTestCases = new List>> { // When the cache is empty, GetMultiple will be called two times. One time in type // DefaultLoadEventListener and the other time in BatchingEntityLoader. - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type new[] {1, 2, 3}, // triggered by Load method of BatchingEntityLoader type }, + new[] {0, 1, 2}, null ), // When there are not enough uninitialized entities after the demanded one to fill the batch, // the nearest before the demanded entity are added. - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3}, new[] {5, 3, 2}, }, + new[] {3, 4, 5}, null ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3}, new[] {4, 3, 2}, }, + new[] {3, 4, 5}, null ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2} // 0 get assembled and no further processing is done }, + null, (i) => i % 2 == 0 // Cache all even indexes before loading ), - new Tuple>( + new Tuple>( 1, new int[][] { new[] {1, 2, 3}, // 2 gets assembled inside LoadFromSecondLevelCache new[] {3, 4, 5} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3}, // 4 gets assembled inside LoadFromSecondLevelCache new[] {3, 2, 1} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2}, // 1 gets assembled inside LoadFromSecondLevelCache new[] {2, 3, 4} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ), - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3}, // 5 and 3 get assembled inside LoadFromSecondLevelCache new[] {2, 1, 0} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ) }; foreach (var tuple in parentTestCases) { - AssertMultipleGetCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3); + AssertMultipleCacheCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); } } @@ -291,89 +330,200 @@ public void MultipleGetReadOnlyItemTest() tx.Commit(); } // Batch size 4 - var parentTestCases = new List>> + var parentTestCases = new List>> { // When the cache is empty, GetMultiple will be called two times. One time in type // DefaultLoadEventListener and the other time in BatchingEntityLoader. - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type new[] {1, 2, 3, 4}, // triggered by Load method of BatchingEntityLoader type }, + new[] {0, 1, 2, 3}, null ), // When there are not enough uninitialized entities after the demanded one to fill the batch, // the nearest before the demanded entity are added. - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2}, new[] {5, 3, 2, 1}, }, + new[] {2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2}, new[] {4, 3, 2, 1}, }, + new[] {2, 3, 4, 5}, null ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3} // 0 get assembled and no further processing is done }, + null, (i) => i % 2 == 0 // Cache all even indexes before loading ), - new Tuple>( + new Tuple>( 1, new int[][] { new[] {1, 2, 3, 4}, // 2 and 4 get assembled inside LoadFromSecondLevelCache new[] {3, 5, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 5, new int[][] { new[] {5, 4, 3, 2}, // 4 and 2 get assembled inside LoadFromSecondLevelCache new[] {3, 1, 0} }, + new[] {1, 3, 5}, (i) => i % 2 == 0 ), - new Tuple>( + new Tuple>( 0, new int[][] { new[] {0, 1, 2, 3}, // 1 and 3 get assembled inside LoadFromSecondLevelCache new[] {2, 4, 5} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ), - new Tuple>( + new Tuple>( 4, new int[][] { new[] {4, 5, 3, 2}, // 5 and 3 get assembled inside LoadFromSecondLevelCache new[] {2, 1, 0} }, + new[] {0, 2, 4}, (i) => i % 2 != 0 ) }; foreach (var tuple in parentTestCases) { - AssertMultipleGetCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3); + AssertMultipleCacheCalls(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4); + } + } + + [Test] + public void MultiplePutReadWriteTest() + { + var persister = Sfi.GetEntityPersister(typeof(ReadWrite).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + cache.Clear(); + cache.ClearStatistics(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = s.Query().ToList(); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + tx.Commit(); + } + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(2)); + + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2}, + new[] {3, 4, 5} + }, + cache.PutMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2}, + new[] {3, 4, 5} + }, + cache.LockMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2}, + new[] {3, 4, 5} + }, + cache.UnlockMultipleCalls + ); + } + + [Test] + public void MultiplePutReadWriteItemTest() + { + var persister = Sfi.GetCollectionPersister($"{typeof(ReadWrite).FullName}.Items"); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var cache = (BatchableCache) persister.Cache.Cache; + var ids = new List(); + + cache.Clear(); + cache.ClearStatistics(); + + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var items = s.Query().ToList(); + ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id)); + + // Initialize the first item collection + NHibernateUtil.Initialize(items.First(o => o.Id == ids[0]).Items); + tx.Commit(); } + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); + // Called in: DefaultInitializeCollectionEventListener, BatchingCollectionInitializer and ReadWriteCache + Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(3)); + + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2, 3, 4} + }, + cache.PutMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2, 3, 4} + }, + cache.LockMultipleCalls + ); + AssertEquivalent( + ids, + new int[][] + { + new[] {0, 1, 2, 3, 4} + }, + cache.UnlockMultipleCalls + ); } [Test] @@ -410,7 +560,7 @@ public void UpdateTimestampsCacheTest() } } - private void AssertMultipleGetCalls(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null) + private void AssertMultipleCacheCalls(List ids, int idIndex, int[][] fetchedIdIndexes, int[] putIdIndexes, Func cacheBeforeLoadFn = null) where TEntity : CacheEntity { var persister = Sfi.GetEntityPersister(typeof(TEntity).FullName); @@ -433,8 +583,7 @@ private void AssertMultipleGetCalls(List ids, int idIndex, int[][] using (var s = Sfi.OpenSession()) using (var tx = s.BeginTransaction()) { - cache.GetCalls.Clear(); - cache.GetMultipleCalls.Clear(); + cache.ClearStatistics(); foreach (var id in ids) { @@ -443,7 +592,19 @@ private void AssertMultipleGetCalls(List ids, int idIndex, int[][] var item = s.Get(ids[idIndex]); Assert.That(item, Is.Not.Null); Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + if (putIdIndexes == null) + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(0)); + } + else + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(1)); + Assert.That( + cache.PutMultipleCalls[0].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(putIdIndexes.Select(o => ids[o]))); + } for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) { @@ -456,7 +617,18 @@ private void AssertMultipleGetCalls(List ids, int idIndex, int[][] } } - private void AssertMultipleGetCollectionCalls(List ids, int idIndex, int[][] fetchedIdIndexes, Func cacheBeforeLoadFn = null) + private void AssertEquivalent(List ids, int[][] expectedIdIndexes, List result) + { + Assert.That(result, Has.Count.EqualTo(expectedIdIndexes.GetLength(0))); + for (int i = 0; i < expectedIdIndexes.GetLength(0); i++) + { + Assert.That( + result[i].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(expectedIdIndexes[i].Select(o => ids[o]))); + } + } + + private void AssertMultipleCacheCollectionCalls(List ids, int idIndex, int[][] fetchedIdIndexes, int[] putIdIndexes, Func cacheBeforeLoadFn = null) { var persister = Sfi.GetCollectionPersister($"{typeof(ReadOnly).FullName}.Items"); var cache = (BatchableCache) persister.Cache.Cache; @@ -479,8 +651,7 @@ private void AssertMultipleGetCollectionCalls(List ids, int idIndex, int[][ using (var s = Sfi.OpenSession()) using (var tx = s.BeginTransaction()) { - cache.GetCalls.Clear(); - cache.GetMultipleCalls.Clear(); + cache.ClearStatistics(); foreach (var id in ids) { @@ -490,7 +661,19 @@ private void AssertMultipleGetCollectionCalls(List ids, int idIndex, int[][ Assert.That(item, Is.Not.Null); NHibernateUtil.Initialize(item.Items); Assert.That(cache.GetCalls, Has.Count.EqualTo(0)); + Assert.That(cache.PutCalls, Has.Count.EqualTo(0)); Assert.That(cache.GetMultipleCalls, Has.Count.EqualTo(fetchedIdIndexes.GetLength(0))); + if (putIdIndexes == null) + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(0)); + } + else + { + Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(1)); + Assert.That( + cache.PutMultipleCalls[0].OfType().Select(o => (int) o.Key), + Is.EquivalentTo(putIdIndexes.Select(o => ids[o]))); + } for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++) { diff --git a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs index cb202520c64..de2c7edcca6 100644 --- a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs @@ -9,15 +9,42 @@ namespace NHibernate.Test.CacheTest.Caches { - public partial class BatchableCache : ICache, IBatchableReadCache + public partial class BatchableCache : ICache, IBatchableReadWriteCache { private readonly IDictionary _hashtable = new Hashtable(); private readonly string _regionName; public List GetMultipleCalls { get; } = new List(); + public List PutMultipleCalls { get; } = new List(); + + public List LockMultipleCalls { get; } = new List(); + + public List UnlockMultipleCalls { get; } = new List(); + public List GetCalls { get; } = new List(); + public List PutCalls { get; } = new List(); + + public void PutMultiple(object[] keys, object[] values) + { + PutMultipleCalls.Add(keys); + for (int i = 0; i < keys.Length; i++) + { + _hashtable[keys[i]] = values[i]; + } + } + + public void LockMultiple(object[] keys) + { + LockMultipleCalls.Add(keys); + } + + public void UnlockMultiple(object[] keys) + { + UnlockMultipleCalls.Add(keys); + } + #region ICache Members public BatchableCache(string regionName) @@ -46,6 +73,7 @@ public object[] GetMultiple(object[] keys) /// public void Put(object key, object value) { + PutCalls.Add(key); _hashtable[key] = value; } @@ -61,6 +89,16 @@ public void Clear() _hashtable.Clear(); } + public void ClearStatistics() + { + GetCalls.Clear(); + GetMultipleCalls.Clear(); + PutMultipleCalls.Clear(); + PutCalls.Clear(); + UnlockMultipleCalls.Clear(); + LockMultipleCalls.Clear(); + } + /// public void Destroy() { diff --git a/src/NHibernate/Async/Cache/AbstractCacheBatch.cs b/src/NHibernate/Async/Cache/AbstractCacheBatch.cs new file mode 100644 index 00000000000..c019130ef65 --- /dev/null +++ b/src/NHibernate/Async/Cache/AbstractCacheBatch.cs @@ -0,0 +1,47 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections.Generic; +using System.Text; +using NHibernate.Engine; + +namespace NHibernate.Cache +{ + using System.Threading.Tasks; + using System.Threading; + internal abstract partial class AbstractCacheBatch + { + + public abstract Task ExecuteAsync(CancellationToken cancellationToken); + } + + internal abstract partial class AbstractCacheBatch : AbstractCacheBatch + { + + public override sealed Task ExecuteAsync(CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + try + { + return ExecuteAsync(_batch.ToArray(), cancellationToken); + } + catch (Exception ex) + { + return Task.FromException(ex); + } + } + + protected abstract Task ExecuteAsync(TData[] data, CancellationToken cancellationToken); + } +} diff --git a/src/NHibernate/Async/Cache/CacheBatcher.cs b/src/NHibernate/Async/Cache/CacheBatcher.cs new file mode 100644 index 00000000000..bce22267614 --- /dev/null +++ b/src/NHibernate/Async/Cache/CacheBatcher.cs @@ -0,0 +1,104 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; +using NHibernate.Cache.Access; +using NHibernate.Engine; +using NHibernate.Persister.Collection; +using NHibernate.Persister.Entity; + +namespace NHibernate.Cache +{ + using System.Threading.Tasks; + using System.Threading; + internal partial class CacheBatcher + { + + /// + /// Adds a put operation to the batch. If the batch size reached the persister batch + /// size, the batch will be executed. + /// + /// The entity persister. + /// The data to put in the cache. + /// A cancellation token that can be used to cancel the work + public async Task AddToBatchAsync(IEntityPersister persister, CachePutData data, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + if (ShouldExecuteBatch(persister, _putBatch)) + { + await (ExecuteBatchAsync(cancellationToken)).ConfigureAwait(false); + _currentPersister = persister; + _currentBatch = _putBatch = new CachePutBatch(_session, persister.Cache); + } + if (Log.IsDebugEnabled()) + { + Log.Debug("Adding a put operation to batch for entity {0} and key {1}", persister.EntityName, data.Key); + } + _putBatch.Add(data); + } + + /// + /// Adds a put operation to the batch. If the batch size reached the persister batch + /// size, the batch will be executed. + /// + /// The collection persister. + /// The data to put in the cache. + /// A cancellation token that can be used to cancel the work + public async Task AddToBatchAsync(ICollectionPersister persister, CachePutData data, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + if (ShouldExecuteBatch(persister, _putBatch)) + { + await (ExecuteBatchAsync(cancellationToken)).ConfigureAwait(false); + _currentPersister = persister; + _currentBatch = _putBatch = new CachePutBatch(_session, persister.Cache); + } + if (Log.IsDebugEnabled()) + { + Log.Debug("Adding a put operation to batch for collection role {0} and key {1}", persister.Role, data.Key); + } + _putBatch.Add(data); + } + + /// + /// Executes the current batch. + /// + /// A cancellation token that can be used to cancel the work + public async Task ExecuteBatchAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + if (_currentBatch == null || _currentBatch.BatchSize == 0) + { + return; + } + + try + { + Stopwatch duration = null; + if (Log.IsDebugEnabled()) + { + duration = Stopwatch.StartNew(); + } + await (_currentBatch.ExecuteAsync(cancellationToken)).ConfigureAwait(false); + if (Log.IsDebugEnabled() && duration != null) + { + Log.Debug("ExecuteBatch for {0} keys took {1} ms", _currentBatch.BatchSize, duration.ElapsedMilliseconds); + } + } + finally + { + Cleanup(); + } + } + } +} diff --git a/src/NHibernate/Async/Cache/CachePutBatch.cs b/src/NHibernate/Async/Cache/CachePutBatch.cs new file mode 100644 index 00000000000..78bee14f440 --- /dev/null +++ b/src/NHibernate/Async/Cache/CachePutBatch.cs @@ -0,0 +1,55 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Linq; +using System.Collections; +using System.Collections.Generic; +using System.Text; +using NHibernate.Engine; + +namespace NHibernate.Cache +{ + using System.Threading.Tasks; + using System.Threading; + internal partial class CachePutBatch : AbstractCacheBatch + { + + protected override async Task ExecuteAsync(CachePutData[] data, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + var length = data.Length; + var keys = new CacheKey[length]; + var values = new object[length]; + var versions = new object[length]; + var versionComparers = new IComparer[length]; + var minimalPuts = new bool[length]; + + for (int i = 0; i < length; i++) + { + var item = data[i]; + keys[i] = item.Key; + values[i] = item.Value; + versions[i] = item.Version; + versionComparers[i] = item.VersionComparer; + minimalPuts[i] = item.MinimalPut; + } + + var factory = Session.Factory; + var cacheStrategy = CacheConcurrencyStrategy; + var puts = await (cacheStrategy.PutMultipleAsync(keys, values, Session.Timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); + + if (factory.Statistics.IsStatisticsEnabled && puts.Any(o => o)) + { + factory.StatisticsImplementor.SecondLevelCachePut(cacheStrategy.RegionName); + } + } + } +} diff --git a/src/NHibernate/Async/Cache/IBatchableWriteCache.cs b/src/NHibernate/Async/Cache/IBatchableWriteCache.cs new file mode 100644 index 00000000000..271eeff0059 --- /dev/null +++ b/src/NHibernate/Async/Cache/IBatchableWriteCache.cs @@ -0,0 +1,43 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections.Generic; +using System.Text; + +namespace NHibernate.Cache +{ + using System.Threading.Tasks; + using System.Threading; + public partial interface IBatchableReadWriteCache : IBatchableReadCache + { + /// + /// Add multiple objects to the cache. + /// + /// The keys to cache. + /// The objects to cache. + /// A cancellation token that can be used to cancel the work + Task PutMultipleAsync(object[] keys, object[] values, CancellationToken cancellationToken); + + /// + /// Lock the objects from being changed by another thread. + /// + /// The keys to lock. + /// A cancellation token that can be used to cancel the work + Task LockMultipleAsync(object[] keys, CancellationToken cancellationToken); + + /// + /// Unlock the objects that were previously locked. + /// + /// The keys to unlock. + /// A cancellation token that can be used to cancel the work + Task UnlockMultipleAsync(object[] keys, CancellationToken cancellationToken); + } +} diff --git a/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs index 08eab473bc9..89c44f6eb6c 100644 --- a/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs @@ -156,5 +156,42 @@ public static async Task GetMultipleAsync(this ICacheConcurrencyStrate } return objects; } + + /// + /// Attempt to cache objects, after loading them from the database. + /// + /// The cache concurrency strategy. + /// The keys (id) of the objects to put in the Cache. + /// The objects to put in the cache. + /// A timestamp prior to the transaction start time. + /// The version numbers of the objects we are putting. + /// The comparers to be used to compare version numbers + /// Indicates that the cache should avoid a put if the item is already cached. + /// A cancellation token that can be used to cancel the work + /// if the objects were successfully cached. + /// + //6.0 TODO: Merge into ICacheConcurrencyStrategy. + public static async Task PutMultipleAsync(this ICacheConcurrencyStrategy cache, CacheKey[] keys, object[] values, long timestamp, + object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + switch (cache) + { + case ReadOnlyCache readOnly: + return await (readOnly.PutMultipleAsync(keys, values, timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); + case ReadWriteCache readWrite: + return await (readWrite.PutMultipleAsync(keys, values, timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); + case NonstrictReadWriteCache nonstrictReadWrite: + return await (nonstrictReadWrite.PutMultipleAsync(keys, values, timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); + } + + // Fallback to Put + var result = new bool[keys.Length]; + for (var i = 0; i < keys.Length; i++) + { + result[i] = await (cache.PutAsync(keys[i], values[i], timestamp, versions[i], versionComparers[i], minimalPuts[i], cancellationToken)).ConfigureAwait(false); + } + return result; + } } } diff --git a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs index d22ea3b3157..3fa4dfe789a 100644 --- a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs @@ -10,6 +10,7 @@ using System; using System.Collections; +using System.Collections.Generic; using System.Linq; using NHibernate.Cache.Access; @@ -73,6 +74,80 @@ async Task InternalGetMultipleAsync() } } + /// + /// Add multiple items to the cache + /// + public Task PutMultipleAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts, CancellationToken cancellationToken) + { + if (_batchableReadWriteCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); + } + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return InternalPutMultipleAsync(); + async Task InternalPutMultipleAsync() + { + var result = new bool[keys.Length]; + if (timestamp == long.MinValue) + { + // MinValue means cache is disabled + return result; + } + + var checkKeys = new List(); + var checkKeyIndexes = new List(); + for (var i = 0; i < minimalPuts.Length; i++) + { + if (minimalPuts[i]) + { + checkKeys.Add(keys[i]); + checkKeyIndexes.Add(i); + } + } + var skipKeyIndexes = new HashSet(); + if (checkKeys.Any()) + { + var objects = await (_batchableReadWriteCache.GetMultipleAsync(checkKeys.ToArray(), cancellationToken)).ConfigureAwait(false); + for (var i = 0; i < objects.Length; i++) + { + if (objects[i] != null) + { + if (log.IsDebugEnabled()) + { + log.Debug("item already cached: {0}", checkKeys[i]); + } + skipKeyIndexes.Add(checkKeyIndexes[i]); + } + } + } + + if (skipKeyIndexes.Count == keys.Length) + { + return result; + } + + var putKeys = new object[keys.Length - skipKeyIndexes.Count]; + var putValues = new object[putKeys.Length]; + var j = 0; + for (var i = 0; i < keys.Length; i++) + { + if (skipKeyIndexes.Contains(i)) + { + continue; + } + putKeys[j] = keys[i]; + putValues[j++] = values[i]; + result[i] = true; + } + await (_batchableReadWriteCache.PutMultipleAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); + return result; + } + } + /// /// Add an item to the cache /// diff --git a/src/NHibernate/Async/Cache/ReadOnlyCache.cs b/src/NHibernate/Async/Cache/ReadOnlyCache.cs index aa2cff7e871..91f22d778a4 100644 --- a/src/NHibernate/Async/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Async/Cache/ReadOnlyCache.cs @@ -10,6 +10,7 @@ using System; using System.Collections; +using System.Collections.Generic; using System.Linq; using NHibernate.Cache.Access; @@ -80,6 +81,77 @@ public Task LockAsync(CacheKey key, object version, CancellationToken } } + public Task PutMultipleAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts, CancellationToken cancellationToken) + { + if (_batchableReadWriteCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); + } + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return InternalPutMultipleAsync(); + async Task InternalPutMultipleAsync() + { + var result = new bool[keys.Length]; + if (timestamp == long.MinValue) + { + // MinValue means cache is disabled + return result; + } + + var checkKeys = new List(); + var checkKeyIndexes = new List(); + for (var i = 0; i < minimalPuts.Length; i++) + { + if (minimalPuts[i]) + { + checkKeys.Add(keys[i]); + checkKeyIndexes.Add(i); + } + } + var skipKeyIndexes = new HashSet(); + if (checkKeys.Any()) + { + var objects = await (_batchableReadWriteCache.GetMultipleAsync(checkKeys.ToArray(), cancellationToken)).ConfigureAwait(false); + for (var i = 0; i < objects.Length; i++) + { + if (objects[i] != null) + { + if (log.IsDebugEnabled()) + { + log.Debug("item already cached: {0}", checkKeys[i]); + } + skipKeyIndexes.Add(checkKeyIndexes[i]); + } + } + } + + if (skipKeyIndexes.Count == keys.Length) + { + return result; + } + + var putKeys = new object[keys.Length - skipKeyIndexes.Count]; + var putValues = new object[putKeys.Length]; + var j = 0; + for (var i = 0; i < keys.Length; i++) + { + if (skipKeyIndexes.Contains(i)) + { + continue; + } + putKeys[j] = keys[i]; + putValues[j++] = values[i]; + result[i] = true; + } + await (_batchableReadWriteCache.PutMultipleAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); + return result; + } + } + public async Task PutAsync(CacheKey key, object value, long timestamp, object version, IComparer versionComparator, bool minimalPut, CancellationToken cancellationToken) { diff --git a/src/NHibernate/Async/Cache/ReadWriteCache.cs b/src/NHibernate/Async/Cache/ReadWriteCache.cs index 286304c3558..f8fcc4bbeb1 100644 --- a/src/NHibernate/Async/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/ReadWriteCache.cs @@ -10,6 +10,7 @@ using System; using System.Collections; +using System.Collections.Generic; using System.Linq; using NHibernate.Cache.Access; @@ -172,6 +173,94 @@ public async Task LockAsync(CacheKey key, object version, Cancellatio } } + /// + /// Do not add an item to the cache unless the current transaction + /// timestamp is later than the timestamp at which the item was + /// invalidated. (Otherwise, a stale item might be re-added if the + /// database is operating in repeatable read isolation mode.) + /// + /// Whether the items were actually put into the cache + public Task PutMultipleAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts, CancellationToken cancellationToken) + { + if (_batchableReadWriteCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); + } + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return InternalPutMultipleAsync(); + async Task InternalPutMultipleAsync() + { + + var result = new bool[keys.Length]; + if (timestamp == long.MinValue) + { + // MinValue means cache is disabled + return result; + } + + using (await _lockObjectAsync.LockAsync()) + { + if (log.IsDebugEnabled()) + { + log.Debug("Caching: {0}", string.Join(",", keys.AsEnumerable())); + } + var keysArr = keys.Cast().ToArray(); + try + { + await (_batchableReadWriteCache.LockMultipleAsync(keysArr, cancellationToken)).ConfigureAwait(false); + var putBatch = new Dictionary(); + var lockables = await (_batchableReadWriteCache.GetMultipleAsync(keys, cancellationToken)).ConfigureAwait(false); + for (var i = 0; i < keys.Length; i++) + { + var key = keys[i]; + var version = versions[i]; + var lockable = (ILockable) lockables[i]; + bool puttable = lockable == null || + lockable.IsPuttable(timestamp, version, versionComparers[i]); + if (puttable) + { + putBatch.Add(key, new CachedItem(values[i], cache.NextTimestamp(), version)); + if (log.IsDebugEnabled()) + { + log.Debug("Cached: {0}", key); + } + result[i] = true; + } + else + { + if (log.IsDebugEnabled()) + { + if (lockable.IsLock) + { + log.Debug("Item was locked: {0}", key); + } + else + { + log.Debug("Item was already cached: {0}", key); + } + } + result[i] = false; + } + } + + if (putBatch.Count > 0) + { + await (_batchableReadWriteCache.PutMultipleAsync(putBatch.Keys.ToArray(), putBatch.Values.ToArray(), cancellationToken)).ConfigureAwait(false); + } + } + finally + { + await (_batchableReadWriteCache.UnlockMultipleAsync(keysArr, cancellationToken)).ConfigureAwait(false); + } + } + return result; + } + } + /// /// Do not add an item to the cache unless the current transaction /// timestamp is later than the timestamp at which the item was diff --git a/src/NHibernate/Async/Engine/Loading/CollectionLoadContext.cs b/src/NHibernate/Async/Engine/Loading/CollectionLoadContext.cs index 0ab01e61d22..fa14c601daf 100644 --- a/src/NHibernate/Async/Engine/Loading/CollectionLoadContext.cs +++ b/src/NHibernate/Async/Engine/Loading/CollectionLoadContext.cs @@ -8,6 +8,7 @@ //------------------------------------------------------------------------------ +using System; using System.Collections; using System.Collections.Generic; using System.Data.Common; @@ -110,10 +111,13 @@ private async Task EndLoadingCollectionsAsync(ICollectionPersister persister, IL log.Debug("{0} collections were found in result set for role: {1}", count, persister.Role); } + var cacheBatcher = new CacheBatcher(LoadContext.PersistenceContext.Session); for (int i = 0; i < count; i++) { - await (EndLoadingCollectionAsync(matchedCollectionEntries[i], persister, cancellationToken)).ConfigureAwait(false); + await (EndLoadingCollectionAsync(matchedCollectionEntries[i], persister, + data => cacheBatcher.AddToBatch(persister, data), cancellationToken)).ConfigureAwait(false); } + await (cacheBatcher.ExecuteBatchAsync(cancellationToken)).ConfigureAwait(false); if (log.IsDebugEnabled()) { @@ -121,7 +125,8 @@ private async Task EndLoadingCollectionsAsync(ICollectionPersister persister, IL } } - private async Task EndLoadingCollectionAsync(LoadingCollectionEntry lce, ICollectionPersister persister, CancellationToken cancellationToken) + private async Task EndLoadingCollectionAsync(LoadingCollectionEntry lce, ICollectionPersister persister, + Action cacheBatchingHandler, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); if (log.IsDebugEnabled()) @@ -161,7 +166,7 @@ private async Task EndLoadingCollectionAsync(LoadingCollectionEntry lce, ICollec if (addToCache) { - await (AddCollectionToCacheAsync(lce, persister, cancellationToken)).ConfigureAwait(false); + await (AddCollectionToCacheAsync(lce, persister, cacheBatchingHandler, cancellationToken)).ConfigureAwait(false); } if (log.IsDebugEnabled()) @@ -179,8 +184,10 @@ private async Task EndLoadingCollectionAsync(LoadingCollectionEntry lce, ICollec /// Add the collection to the second-level cache /// The entry representing the collection to add /// The persister + /// The action for handling cache batching /// A cancellation token that can be used to cancel the work - private async Task AddCollectionToCacheAsync(LoadingCollectionEntry lce, ICollectionPersister persister, CancellationToken cancellationToken) + private async Task AddCollectionToCacheAsync(LoadingCollectionEntry lce, ICollectionPersister persister, + Action cacheBatchingHandler, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); ISessionImplementor session = LoadContext.PersistenceContext.Session; @@ -219,13 +226,27 @@ private async Task AddCollectionToCacheAsync(LoadingCollectionEntry lce, ICollec CollectionCacheEntry entry = new CollectionCacheEntry(lce.Collection, persister); CacheKey cacheKey = session.GenerateCacheKey(lce.Key, persister.KeyType, persister.Role); - bool put = await (persister.Cache.PutAsync(cacheKey, persister.CacheEntryStructure.Structure(entry), - session.Timestamp, version, versionComparator, - factory.Settings.IsMinimalPutsEnabled && session.CacheMode != CacheMode.Refresh, cancellationToken)).ConfigureAwait(false); - if (put && factory.Statistics.IsStatisticsEnabled) + if (persister.GetBatchSize() > 1 && persister.Cache.IsBatchingPutSupported()) { - factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + cacheBatchingHandler( + new CachePutData( + cacheKey, + persister.CacheEntryStructure.Structure(entry), + version, + versionComparator, + factory.Settings.IsMinimalPutsEnabled && session.CacheMode != CacheMode.Refresh)); + } + else + { + bool put = await (persister.Cache.PutAsync(cacheKey, persister.CacheEntryStructure.Structure(entry), + session.Timestamp, version, versionComparator, + factory.Settings.IsMinimalPutsEnabled && session.CacheMode != CacheMode.Refresh, cancellationToken)).ConfigureAwait(false); + + if (put && factory.Statistics.IsStatisticsEnabled) + { + factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + } } } } diff --git a/src/NHibernate/Async/Engine/TwoPhaseLoad.cs b/src/NHibernate/Async/Engine/TwoPhaseLoad.cs index 6579b270c6f..dda0ea12e0e 100644 --- a/src/NHibernate/Async/Engine/TwoPhaseLoad.cs +++ b/src/NHibernate/Async/Engine/TwoPhaseLoad.cs @@ -19,6 +19,7 @@ using NHibernate.Proxy; using NHibernate.Type; using NHibernate.Properties; +using System; namespace NHibernate.Engine { @@ -33,7 +34,23 @@ public static partial class TwoPhaseLoad /// between the entities which were instantiated and had their state /// "hydrated" into an array /// - public static async Task InitializeEntityAsync(object entity, bool readOnly, ISessionImplementor session, PreLoadEvent preLoadEvent, PostLoadEvent postLoadEvent, CancellationToken cancellationToken) + public static Task InitializeEntityAsync(object entity, bool readOnly, ISessionImplementor session, PreLoadEvent preLoadEvent, PostLoadEvent postLoadEvent, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) + { + return Task.FromCanceled(cancellationToken); + } + return InitializeEntityAsync(entity, readOnly, session, preLoadEvent, postLoadEvent, null, cancellationToken); + } + + /// + /// Perform the second step of 2-phase load. Fully initialize the entity instance. + /// After processing a JDBC result set, we "resolve" all the associations + /// between the entities which were instantiated and had their state + /// "hydrated" into an array + /// + internal static async Task InitializeEntityAsync(object entity, bool readOnly, ISessionImplementor session, PreLoadEvent preLoadEvent, PostLoadEvent postLoadEvent, + Action cacheBatchingHandler, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); //TODO: Should this be an InitializeEntityEventListener??? (watch out for performance!) @@ -95,14 +112,29 @@ public static async Task InitializeEntityAsync(object entity, bool readOnly, ISe CacheEntry entry = new CacheEntry(hydratedState, persister, entityEntry.LoadedWithLazyPropertiesUnfetched, version, session, entity); CacheKey cacheKey = session.GenerateCacheKey(id, persister.IdentifierType, persister.RootEntityName); - bool put = - await (persister.Cache.PutAsync(cacheKey, persister.CacheEntryStructure.Structure(entry), session.Timestamp, version, - persister.IsVersioned ? persister.VersionType.Comparator : null, - UseMinimalPuts(session, entityEntry), cancellationToken)).ConfigureAwait(false); - if (put && factory.Statistics.IsStatisticsEnabled) + if (cacheBatchingHandler != null && persister.IsBatchLoadable && persister.Cache.IsBatchingPutSupported()) + { + cacheBatchingHandler( + persister, + new CachePutData( + cacheKey, + persister.CacheEntryStructure.Structure(entry), + version, + persister.IsVersioned ? persister.VersionType.Comparator : null, + UseMinimalPuts(session, entityEntry))); + } + else { - factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + bool put = + await (persister.Cache.PutAsync(cacheKey, persister.CacheEntryStructure.Structure(entry), session.Timestamp, version, + persister.IsVersioned ? persister.VersionType.Comparator : null, + UseMinimalPuts(session, entityEntry), cancellationToken)).ConfigureAwait(false); + + if (put && factory.Statistics.IsStatisticsEnabled) + { + factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + } } } diff --git a/src/NHibernate/Async/Loader/Loader.cs b/src/NHibernate/Async/Loader/Loader.cs index 491e8153aa9..e4f360a2faf 100644 --- a/src/NHibernate/Async/Loader/Loader.cs +++ b/src/NHibernate/Async/Loader/Loader.cs @@ -362,10 +362,13 @@ internal async Task InitializeEntitiesAndCollectionsAsync(IList hydratedObjects, Log.Debug("total objects hydrated: {0}", hydratedObjectsSize); } + var cacheBatcher = new CacheBatcher(session); for (int i = 0; i < hydratedObjectsSize; i++) { - await (TwoPhaseLoad.InitializeEntityAsync(hydratedObjects[i], readOnly, session, pre, post, cancellationToken)).ConfigureAwait(false); + await (TwoPhaseLoad.InitializeEntityAsync(hydratedObjects[i], readOnly, session, pre, post, + (persister, data) => cacheBatcher.AddToBatch(persister, data), cancellationToken)).ConfigureAwait(false); } + await (cacheBatcher.ExecuteBatchAsync(cancellationToken)).ConfigureAwait(false); } if (collectionPersisters != null) diff --git a/src/NHibernate/Cache/AbstractCacheBatch.cs b/src/NHibernate/Cache/AbstractCacheBatch.cs new file mode 100644 index 00000000000..74f33932670 --- /dev/null +++ b/src/NHibernate/Cache/AbstractCacheBatch.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Generic; +using System.Text; +using NHibernate.Engine; + +namespace NHibernate.Cache +{ + /// + /// An abstract batch used for implementing a batch operation of . + /// + internal abstract partial class AbstractCacheBatch + { + public AbstractCacheBatch(ISessionImplementor session, ICacheConcurrencyStrategy cacheConcurrencyStrategy) + { + Session = session; + CacheConcurrencyStrategy = cacheConcurrencyStrategy; + } + + protected ISessionImplementor Session { get; } + + public ICacheConcurrencyStrategy CacheConcurrencyStrategy { get; } + + public abstract int BatchSize { get; } + + public abstract void Execute(); + } + + /// + /// An abstract batch used for implementing a batch operation of . + /// + internal abstract partial class AbstractCacheBatch : AbstractCacheBatch + { + private List _batch = new List(); + + public AbstractCacheBatch(ISessionImplementor session, ICacheConcurrencyStrategy cacheConcurrencyStrategy) + : base(session, cacheConcurrencyStrategy) + { + } + + public void Add(TData data) + { + _batch.Add(data); + } + + public override int BatchSize => _batch.Count; + + public override sealed void Execute() + { + Execute(_batch.ToArray()); + } + + protected abstract void Execute(TData[] data); + } +} diff --git a/src/NHibernate/Cache/CacheBatcher.cs b/src/NHibernate/Cache/CacheBatcher.cs new file mode 100644 index 00000000000..88a6b548636 --- /dev/null +++ b/src/NHibernate/Cache/CacheBatcher.cs @@ -0,0 +1,125 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; +using NHibernate.Cache.Access; +using NHibernate.Engine; +using NHibernate.Persister.Collection; +using NHibernate.Persister.Entity; + +namespace NHibernate.Cache +{ + /// + /// A batcher for batching operations of , where the batch size is retrived + /// from an or . + /// When a different persister or a different operation is added to the batch, the current batch will be executed. + /// + internal partial class CacheBatcher + { + private CachePutBatch _putBatch; + private ISessionImplementor _session; + private AbstractCacheBatch _currentBatch; + private object _currentPersister; + + protected static readonly INHibernateLogger Log = NHibernateLogger.For(typeof(CacheBatcher)); + + public CacheBatcher(ISessionImplementor session) + { + _session = session; + } + + /// + /// Adds a put operation to the batch. If the batch size reached the persister batch + /// size, the batch will be executed. + /// + /// The entity persister. + /// The data to put in the cache. + public void AddToBatch(IEntityPersister persister, CachePutData data) + { + if (ShouldExecuteBatch(persister, _putBatch)) + { + ExecuteBatch(); + _currentPersister = persister; + _currentBatch = _putBatch = new CachePutBatch(_session, persister.Cache); + } + if (Log.IsDebugEnabled()) + { + Log.Debug("Adding a put operation to batch for entity {0} and key {1}", persister.EntityName, data.Key); + } + _putBatch.Add(data); + } + + /// + /// Adds a put operation to the batch. If the batch size reached the persister batch + /// size, the batch will be executed. + /// + /// The collection persister. + /// The data to put in the cache. + public void AddToBatch(ICollectionPersister persister, CachePutData data) + { + if (ShouldExecuteBatch(persister, _putBatch)) + { + ExecuteBatch(); + _currentPersister = persister; + _currentBatch = _putBatch = new CachePutBatch(_session, persister.Cache); + } + if (Log.IsDebugEnabled()) + { + Log.Debug("Adding a put operation to batch for collection role {0} and key {1}", persister.Role, data.Key); + } + _putBatch.Add(data); + } + + /// + /// Executes the current batch. + /// + public void ExecuteBatch() + { + if (_currentBatch == null || _currentBatch.BatchSize == 0) + { + return; + } + + try + { + Stopwatch duration = null; + if (Log.IsDebugEnabled()) + { + duration = Stopwatch.StartNew(); + } + _currentBatch.Execute(); + if (Log.IsDebugEnabled() && duration != null) + { + Log.Debug("ExecuteBatch for {0} keys took {1} ms", _currentBatch.BatchSize, duration.ElapsedMilliseconds); + } + } + finally + { + Cleanup(); + } + } + + /// + /// Cleans up the current batch. + /// + public void Cleanup() + { + _putBatch = null; + + _currentBatch = null; + _currentPersister = null; + } + + private bool ShouldExecuteBatch(IEntityPersister persister, AbstractCacheBatch batch) + { + return batch != _currentBatch || _currentPersister != persister || + _currentBatch.BatchSize >= persister.GetBatchSize(); + } + + private bool ShouldExecuteBatch(ICollectionPersister persister, AbstractCacheBatch batch) + { + return batch != _currentBatch || _currentPersister != persister || + _currentBatch.BatchSize >= persister.GetBatchSize(); + } + } +} diff --git a/src/NHibernate/Cache/CachePutBatch.cs b/src/NHibernate/Cache/CachePutBatch.cs new file mode 100644 index 00000000000..ed2a3d48d74 --- /dev/null +++ b/src/NHibernate/Cache/CachePutBatch.cs @@ -0,0 +1,48 @@ +using System; +using System.Linq; +using System.Collections; +using System.Collections.Generic; +using System.Text; +using NHibernate.Engine; + +namespace NHibernate.Cache +{ + /// + /// A batch for batching the operation. + /// + internal partial class CachePutBatch : AbstractCacheBatch + { + public CachePutBatch(ISessionImplementor session, ICacheConcurrencyStrategy cacheConcurrencyStrategy) : base(session, cacheConcurrencyStrategy) + { + } + + protected override void Execute(CachePutData[] data) + { + var length = data.Length; + var keys = new CacheKey[length]; + var values = new object[length]; + var versions = new object[length]; + var versionComparers = new IComparer[length]; + var minimalPuts = new bool[length]; + + for (int i = 0; i < length; i++) + { + var item = data[i]; + keys[i] = item.Key; + values[i] = item.Value; + versions[i] = item.Version; + versionComparers[i] = item.VersionComparer; + minimalPuts[i] = item.MinimalPut; + } + + var factory = Session.Factory; + var cacheStrategy = CacheConcurrencyStrategy; + var puts = cacheStrategy.PutMultiple(keys, values, Session.Timestamp, versions, versionComparers, minimalPuts); + + if (factory.Statistics.IsStatisticsEnabled && puts.Any(o => o)) + { + factory.StatisticsImplementor.SecondLevelCachePut(cacheStrategy.RegionName); + } + } + } +} diff --git a/src/NHibernate/Cache/CachePutData.cs b/src/NHibernate/Cache/CachePutData.cs new file mode 100644 index 00000000000..463a3405862 --- /dev/null +++ b/src/NHibernate/Cache/CachePutData.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Text; +using NHibernate.Persister.Collection; +using NHibernate.Persister.Entity; + +namespace NHibernate.Cache +{ + /// + /// The data used to put a value to the 2nd level cache. + /// + internal class CachePutData + { + public CachePutData(CacheKey key, object value, object version, IComparer versionComparer, bool minimalPut) + { + Key = key; + Value = value; + Version = version; + VersionComparer = versionComparer; + MinimalPut = minimalPut; + } + + public CacheKey Key { get; } + + public object Value { get; } + + public object Version { get; } + + public IComparer VersionComparer { get; } + + public bool MinimalPut { get; } + } +} diff --git a/src/NHibernate/Cache/IBatchableReadCache.cs b/src/NHibernate/Cache/IBatchableReadCache.cs index e2b94691d58..79b6bf388a6 100644 --- a/src/NHibernate/Cache/IBatchableReadCache.cs +++ b/src/NHibernate/Cache/IBatchableReadCache.cs @@ -5,7 +5,7 @@ namespace NHibernate.Cache { /// - /// Defines a method for retrieving multiple keys from the cache at once. The implementor + /// Defines a method for retrieving multiple objects from the cache at once. The implementor /// should use this interface along with when the cache supports /// a multiple get operation. /// diff --git a/src/NHibernate/Cache/IBatchableWriteCache.cs b/src/NHibernate/Cache/IBatchableWriteCache.cs new file mode 100644 index 00000000000..f7712049687 --- /dev/null +++ b/src/NHibernate/Cache/IBatchableWriteCache.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace NHibernate.Cache +{ + /// + /// Defines methods for retrieving and adding multiple objects from/to the cache at once. + /// The implementor should use this interface along with when the + /// cache supports a multiple get and put operation. + /// + /// + /// + /// + /// All implementations must be threadsafe. + /// + /// + public partial interface IBatchableReadWriteCache : IBatchableReadCache + { + /// + /// Add multiple objects to the cache. + /// + /// The keys to cache. + /// The objects to cache. + void PutMultiple(object[] keys, object[] values); + + /// + /// Lock the objects from being changed by another thread. + /// + /// The keys to lock. + void LockMultiple(object[] keys); + + /// + /// Unlock the objects that were previously locked. + /// + /// The keys to unlock. + void UnlockMultiple(object[] keys); + } +} diff --git a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs index 9dd1f00f2d5..ce1a6d536f3 100644 --- a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs @@ -173,10 +173,51 @@ public static object[] GetMultiple(this ICacheConcurrencyStrategy cache, CacheKe return objects; } + /// + /// Attempt to cache objects, after loading them from the database. + /// + /// The cache concurrency strategy. + /// The keys (id) of the objects to put in the Cache. + /// The objects to put in the cache. + /// A timestamp prior to the transaction start time. + /// The version numbers of the objects we are putting. + /// The comparers to be used to compare version numbers + /// Indicates that the cache should avoid a put if the item is already cached. + /// if the objects were successfully cached. + /// + //6.0 TODO: Merge into ICacheConcurrencyStrategy. + public static bool[] PutMultiple(this ICacheConcurrencyStrategy cache, CacheKey[] keys, object[] values, long timestamp, + object[] versions, IComparer[] versionComparers, bool[] minimalPuts) + { + switch (cache) + { + case ReadOnlyCache readOnly: + return readOnly.PutMultiple(keys, values, timestamp, versions, versionComparers, minimalPuts); + case ReadWriteCache readWrite: + return readWrite.PutMultiple(keys, values, timestamp, versions, versionComparers, minimalPuts); + case NonstrictReadWriteCache nonstrictReadWrite: + return nonstrictReadWrite.PutMultiple(keys, values, timestamp, versions, versionComparers, minimalPuts); + } + + // Fallback to Put + var result = new bool[keys.Length]; + for (var i = 0; i < keys.Length; i++) + { + result[i] = cache.Put(keys[i], values[i], timestamp, versions[i], versionComparers[i], minimalPuts[i]); + } + return result; + } + public static bool IsBatchingGetSupported(this ICacheConcurrencyStrategy cache) { // ReSharper disable once SuspiciousTypeConversion.Global return cache.Cache is IBatchableReadCache; } + + public static bool IsBatchingPutSupported(this ICacheConcurrencyStrategy cache) + { + // ReSharper disable once SuspiciousTypeConversion.Global + return cache.Cache is IBatchableReadWriteCache; + } } } diff --git a/src/NHibernate/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Cache/NonstrictReadWriteCache.cs index adf0fd0e985..786506c05dd 100644 --- a/src/NHibernate/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Cache/NonstrictReadWriteCache.cs @@ -1,5 +1,6 @@ using System; using System.Collections; +using System.Collections.Generic; using System.Linq; using NHibernate.Cache.Access; @@ -17,6 +18,7 @@ public partial class NonstrictReadWriteCache : ICacheConcurrencyStrategy { private ICache cache; private IBatchableReadCache _batchableReadCache; + private IBatchableReadWriteCache _batchableReadWriteCache; private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(NonstrictReadWriteCache)); @@ -36,6 +38,7 @@ public ICache Cache cache = value; // ReSharper disable once SuspiciousTypeConversion.Global _batchableReadCache = value as IBatchableReadCache; + _batchableReadWriteCache = value as IBatchableReadWriteCache; } } @@ -83,6 +86,72 @@ public object[] GetMultiple(CacheKey[] keys, long txTimestamp) return results; } + /// + /// Add multiple items to the cache + /// + public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts) + { + if (_batchableReadWriteCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); + } + var result = new bool[keys.Length]; + if (timestamp == long.MinValue) + { + // MinValue means cache is disabled + return result; + } + + var checkKeys = new List(); + var checkKeyIndexes = new List(); + for (var i = 0; i < minimalPuts.Length; i++) + { + if (minimalPuts[i]) + { + checkKeys.Add(keys[i]); + checkKeyIndexes.Add(i); + } + } + var skipKeyIndexes = new HashSet(); + if (checkKeys.Any()) + { + var objects = _batchableReadWriteCache.GetMultiple(checkKeys.ToArray()); + for (var i = 0; i < objects.Length; i++) + { + if (objects[i] != null) + { + if (log.IsDebugEnabled()) + { + log.Debug("item already cached: {0}", checkKeys[i]); + } + skipKeyIndexes.Add(checkKeyIndexes[i]); + } + } + } + + if (skipKeyIndexes.Count == keys.Length) + { + return result; + } + + var putKeys = new object[keys.Length - skipKeyIndexes.Count]; + var putValues = new object[putKeys.Length]; + var j = 0; + for (var i = 0; i < keys.Length; i++) + { + if (skipKeyIndexes.Contains(i)) + { + continue; + } + putKeys[j] = keys[i]; + putValues[j++] = values[i]; + result[i] = true; + } + _batchableReadWriteCache.PutMultiple(putKeys, putValues); + return result; + } + /// /// Add an item to the cache /// diff --git a/src/NHibernate/Cache/ReadOnlyCache.cs b/src/NHibernate/Cache/ReadOnlyCache.cs index 87b04296559..d1cc9c73aeb 100644 --- a/src/NHibernate/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Cache/ReadOnlyCache.cs @@ -1,5 +1,6 @@ using System; using System.Collections; +using System.Collections.Generic; using System.Linq; using NHibernate.Cache.Access; @@ -12,6 +13,7 @@ public partial class ReadOnlyCache : ICacheConcurrencyStrategy { private ICache cache; private IBatchableReadCache _batchableReadCache; + private IBatchableReadWriteCache _batchableReadWriteCache; private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(ReadOnlyCache)); /// @@ -30,6 +32,7 @@ public ICache Cache cache = value; // ReSharper disable once SuspiciousTypeConversion.Global _batchableReadCache = value as IBatchableReadCache; + _batchableReadWriteCache = value as IBatchableReadWriteCache; } } @@ -74,6 +77,69 @@ public ISoftLock Lock(CacheKey key, object version) throw new InvalidOperationException("ReadOnlyCache: Can't write to a readonly object " + key.EntityOrRoleName); } + public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts) + { + if (_batchableReadWriteCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); + } + var result = new bool[keys.Length]; + if (timestamp == long.MinValue) + { + // MinValue means cache is disabled + return result; + } + + var checkKeys = new List(); + var checkKeyIndexes = new List(); + for (var i = 0; i < minimalPuts.Length; i++) + { + if (minimalPuts[i]) + { + checkKeys.Add(keys[i]); + checkKeyIndexes.Add(i); + } + } + var skipKeyIndexes = new HashSet(); + if (checkKeys.Any()) + { + var objects = _batchableReadWriteCache.GetMultiple(checkKeys.ToArray()); + for (var i = 0; i < objects.Length; i++) + { + if (objects[i] != null) + { + if (log.IsDebugEnabled()) + { + log.Debug("item already cached: {0}", checkKeys[i]); + } + skipKeyIndexes.Add(checkKeyIndexes[i]); + } + } + } + + if (skipKeyIndexes.Count == keys.Length) + { + return result; + } + + var putKeys = new object[keys.Length - skipKeyIndexes.Count]; + var putValues = new object[putKeys.Length]; + var j = 0; + for (var i = 0; i < keys.Length; i++) + { + if (skipKeyIndexes.Contains(i)) + { + continue; + } + putKeys[j] = keys[i]; + putValues[j++] = values[i]; + result[i] = true; + } + _batchableReadWriteCache.PutMultiple(putKeys, putValues); + return result; + } + public bool Put(CacheKey key, object value, long timestamp, object version, IComparer versionComparator, bool minimalPut) { diff --git a/src/NHibernate/Cache/ReadWriteCache.cs b/src/NHibernate/Cache/ReadWriteCache.cs index 6f92b556f5a..8c1879b64ec 100644 --- a/src/NHibernate/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Cache/ReadWriteCache.cs @@ -1,5 +1,6 @@ using System; using System.Collections; +using System.Collections.Generic; using System.Linq; using NHibernate.Cache.Access; @@ -36,6 +37,7 @@ public interface ILockable private readonly object _lockObject = new object(); private ICache cache; private IBatchableReadCache _batchableReadCache; + private IBatchableReadWriteCache _batchableReadWriteCache; private int _nextLockId; public ReadWriteCache() @@ -58,6 +60,7 @@ public ICache Cache cache = value; // ReSharper disable once SuspiciousTypeConversion.Global _batchableReadCache = value as IBatchableReadCache; + _batchableReadWriteCache = value as IBatchableReadWriteCache; } } @@ -217,6 +220,86 @@ public ISoftLock Lock(CacheKey key, object version) } } + /// + /// Do not add an item to the cache unless the current transaction + /// timestamp is later than the timestamp at which the item was + /// invalidated. (Otherwise, a stale item might be re-added if the + /// database is operating in repeatable read isolation mode.) + /// + /// Whether the items were actually put into the cache + public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts) + { + if (_batchableReadWriteCache == null) + { + throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); + } + + var result = new bool[keys.Length]; + if (timestamp == long.MinValue) + { + // MinValue means cache is disabled + return result; + } + + lock (_lockObject) + { + if (log.IsDebugEnabled()) + { + log.Debug("Caching: {0}", string.Join(",", keys.AsEnumerable())); + } + var keysArr = keys.Cast().ToArray(); + try + { + _batchableReadWriteCache.LockMultiple(keysArr); + var putBatch = new Dictionary(); + var lockables = _batchableReadWriteCache.GetMultiple(keys); + for (var i = 0; i < keys.Length; i++) + { + var key = keys[i]; + var version = versions[i]; + var lockable = (ILockable) lockables[i]; + bool puttable = lockable == null || + lockable.IsPuttable(timestamp, version, versionComparers[i]); + if (puttable) + { + putBatch.Add(key, new CachedItem(values[i], cache.NextTimestamp(), version)); + if (log.IsDebugEnabled()) + { + log.Debug("Cached: {0}", key); + } + result[i] = true; + } + else + { + if (log.IsDebugEnabled()) + { + if (lockable.IsLock) + { + log.Debug("Item was locked: {0}", key); + } + else + { + log.Debug("Item was already cached: {0}", key); + } + } + result[i] = false; + } + } + + if (putBatch.Count > 0) + { + _batchableReadWriteCache.PutMultiple(putBatch.Keys.ToArray(), putBatch.Values.ToArray()); + } + } + finally + { + _batchableReadWriteCache.UnlockMultiple(keysArr); + } + } + return result; + } + /// /// Do not add an item to the cache unless the current transaction /// timestamp is later than the timestamp at which the item was diff --git a/src/NHibernate/Engine/Loading/CollectionLoadContext.cs b/src/NHibernate/Engine/Loading/CollectionLoadContext.cs index 9dd97e4351e..5adb6e4899f 100644 --- a/src/NHibernate/Engine/Loading/CollectionLoadContext.cs +++ b/src/NHibernate/Engine/Loading/CollectionLoadContext.cs @@ -1,3 +1,4 @@ +using System; using System.Collections; using System.Collections.Generic; using System.Data.Common; @@ -217,10 +218,13 @@ private void EndLoadingCollections(ICollectionPersister persister, IList cacheBatcher.AddToBatch(persister, data)); } + cacheBatcher.ExecuteBatch(); if (log.IsDebugEnabled()) { @@ -228,7 +232,8 @@ private void EndLoadingCollections(ICollectionPersister persister, IList cacheBatchingHandler) { if (log.IsDebugEnabled()) { @@ -267,7 +272,7 @@ private void EndLoadingCollection(LoadingCollectionEntry lce, ICollectionPersist if (addToCache) { - AddCollectionToCache(lce, persister); + AddCollectionToCache(lce, persister, cacheBatchingHandler); } if (log.IsDebugEnabled()) @@ -285,7 +290,9 @@ private void EndLoadingCollection(LoadingCollectionEntry lce, ICollectionPersist /// Add the collection to the second-level cache /// The entry representing the collection to add /// The persister - private void AddCollectionToCache(LoadingCollectionEntry lce, ICollectionPersister persister) + /// The action for handling cache batching + private void AddCollectionToCache(LoadingCollectionEntry lce, ICollectionPersister persister, + Action cacheBatchingHandler) { ISessionImplementor session = LoadContext.PersistenceContext.Session; ISessionFactoryImplementor factory = session.Factory; @@ -323,13 +330,27 @@ private void AddCollectionToCache(LoadingCollectionEntry lce, ICollectionPersist CollectionCacheEntry entry = new CollectionCacheEntry(lce.Collection, persister); CacheKey cacheKey = session.GenerateCacheKey(lce.Key, persister.KeyType, persister.Role); - bool put = persister.Cache.Put(cacheKey, persister.CacheEntryStructure.Structure(entry), - session.Timestamp, version, versionComparator, - factory.Settings.IsMinimalPutsEnabled && session.CacheMode != CacheMode.Refresh); - if (put && factory.Statistics.IsStatisticsEnabled) + if (persister.GetBatchSize() > 1 && persister.Cache.IsBatchingPutSupported()) { - factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + cacheBatchingHandler( + new CachePutData( + cacheKey, + persister.CacheEntryStructure.Structure(entry), + version, + versionComparator, + factory.Settings.IsMinimalPutsEnabled && session.CacheMode != CacheMode.Refresh)); + } + else + { + bool put = persister.Cache.Put(cacheKey, persister.CacheEntryStructure.Structure(entry), + session.Timestamp, version, versionComparator, + factory.Settings.IsMinimalPutsEnabled && session.CacheMode != CacheMode.Refresh); + + if (put && factory.Statistics.IsStatisticsEnabled) + { + factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + } } } diff --git a/src/NHibernate/Engine/TwoPhaseLoad.cs b/src/NHibernate/Engine/TwoPhaseLoad.cs index 54786a29d4e..c2df76ce4dc 100644 --- a/src/NHibernate/Engine/TwoPhaseLoad.cs +++ b/src/NHibernate/Engine/TwoPhaseLoad.cs @@ -9,6 +9,7 @@ using NHibernate.Proxy; using NHibernate.Type; using NHibernate.Properties; +using System; namespace NHibernate.Engine { @@ -47,6 +48,18 @@ public static void PostHydrate(IEntityPersister persister, object id, object[] v /// "hydrated" into an array /// public static void InitializeEntity(object entity, bool readOnly, ISessionImplementor session, PreLoadEvent preLoadEvent, PostLoadEvent postLoadEvent) + { + InitializeEntity(entity, readOnly, session, preLoadEvent, postLoadEvent, null); + } + + /// + /// Perform the second step of 2-phase load. Fully initialize the entity instance. + /// After processing a JDBC result set, we "resolve" all the associations + /// between the entities which were instantiated and had their state + /// "hydrated" into an array + /// + internal static void InitializeEntity(object entity, bool readOnly, ISessionImplementor session, PreLoadEvent preLoadEvent, PostLoadEvent postLoadEvent, + Action cacheBatchingHandler) { //TODO: Should this be an InitializeEntityEventListener??? (watch out for performance!) @@ -107,14 +120,29 @@ public static void InitializeEntity(object entity, bool readOnly, ISessionImplem CacheEntry entry = new CacheEntry(hydratedState, persister, entityEntry.LoadedWithLazyPropertiesUnfetched, version, session, entity); CacheKey cacheKey = session.GenerateCacheKey(id, persister.IdentifierType, persister.RootEntityName); - bool put = - persister.Cache.Put(cacheKey, persister.CacheEntryStructure.Structure(entry), session.Timestamp, version, - persister.IsVersioned ? persister.VersionType.Comparator : null, - UseMinimalPuts(session, entityEntry)); - if (put && factory.Statistics.IsStatisticsEnabled) + if (cacheBatchingHandler != null && persister.IsBatchLoadable && persister.Cache.IsBatchingPutSupported()) + { + cacheBatchingHandler( + persister, + new CachePutData( + cacheKey, + persister.CacheEntryStructure.Structure(entry), + version, + persister.IsVersioned ? persister.VersionType.Comparator : null, + UseMinimalPuts(session, entityEntry))); + } + else { - factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + bool put = + persister.Cache.Put(cacheKey, persister.CacheEntryStructure.Structure(entry), session.Timestamp, version, + persister.IsVersioned ? persister.VersionType.Comparator : null, + UseMinimalPuts(session, entityEntry)); + + if (put && factory.Statistics.IsStatisticsEnabled) + { + factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); + } } } diff --git a/src/NHibernate/Loader/Loader.cs b/src/NHibernate/Loader/Loader.cs index fb069e6eb98..9f1ef6a7cc0 100644 --- a/src/NHibernate/Loader/Loader.cs +++ b/src/NHibernate/Loader/Loader.cs @@ -626,10 +626,13 @@ internal void InitializeEntitiesAndCollections(IList hydratedObjects, object res Log.Debug("total objects hydrated: {0}", hydratedObjectsSize); } + var cacheBatcher = new CacheBatcher(session); for (int i = 0; i < hydratedObjectsSize; i++) { - TwoPhaseLoad.InitializeEntity(hydratedObjects[i], readOnly, session, pre, post); + TwoPhaseLoad.InitializeEntity(hydratedObjects[i], readOnly, session, pre, post, + (persister, data) => cacheBatcher.AddToBatch(persister, data)); } + cacheBatcher.ExecuteBatch(); } if (collectionPersisters != null) From efdeac82ca8e8d68930b43e776c56aec55ea591c Mon Sep 17 00:00:00 2001 From: maca88 Date: Tue, 10 Apr 2018 00:00:32 +0200 Subject: [PATCH 3/8] Added batching cache operations support for subclasses --- .../BatchableCacheSubclassFixture.cs | 151 ++++++++++++++++++ .../BatchableCacheSubclassFixture.cs | 140 ++++++++++++++++ .../Event/Default/DefaultLoadEventListener.cs | 3 +- src/NHibernate/Engine/BatchFetchQueue.cs | 11 ++ src/NHibernate/Engine/EntityKey.cs | 5 + .../Event/Default/DefaultLoadEventListener.cs | 3 +- 6 files changed, 309 insertions(+), 4 deletions(-) create mode 100644 src/NHibernate.Test/Async/CacheTest/BatchableCacheSubclassFixture.cs create mode 100644 src/NHibernate.Test/CacheTest/BatchableCacheSubclassFixture.cs diff --git a/src/NHibernate.Test/Async/CacheTest/BatchableCacheSubclassFixture.cs b/src/NHibernate.Test/Async/CacheTest/BatchableCacheSubclassFixture.cs new file mode 100644 index 00000000000..0c916dfa495 --- /dev/null +++ b/src/NHibernate.Test/Async/CacheTest/BatchableCacheSubclassFixture.cs @@ -0,0 +1,151 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using NHibernate.Cache; +using NHibernate.Cfg; +using NHibernate.DomainModel; +using NHibernate.Test.CacheTest.Caches; +using NUnit.Framework; + +namespace NHibernate.Test.CacheTest +{ + using System.Threading.Tasks; + [TestFixture] + public class BatchableCacheSubclassFixtureAsync : TestCase + { + protected override IList Mappings + { + get + { + return new string[] + { + "FooBar.hbm.xml", + "Baz.hbm.xml", + "Qux.hbm.xml", + "Glarch.hbm.xml", + "Fum.hbm.xml", + "Fumm.hbm.xml", + "Fo.hbm.xml", + "One.hbm.xml", + "Many.hbm.xml", + "Immutable.hbm.xml", + "Fee.hbm.xml", + "Vetoer.hbm.xml", + "Holder.hbm.xml", + "Location.hbm.xml", + "Stuff.hbm.xml", + "Container.hbm.xml", + "Simple.hbm.xml" + }; + } + } + + protected override void Configure(Configuration configuration) + { + configuration.SetProperty(Cfg.Environment.UseSecondLevelCache, "true"); + configuration.SetProperty(Cfg.Environment.UseQueryCache, "true"); + configuration.SetProperty(Cfg.Environment.CacheProvider, typeof(BatchableCacheProvider).AssemblyQualifiedName); + } + + protected override void OnSetUp() + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + FooProxy flast = new Bar(); + s.Save(flast); + for (int i = 0; i < 5; i++) + { + FooProxy foo = new Bar(); + s.Save(foo); + flast.TheFoo = foo; + flast = flast.TheFoo; + flast.String = "foo" + (i + 1); + } + tx.Commit(); + } + } + + protected override void OnTearDown() + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + s.Delete("from NHibernate.DomainModel.Foo as foo"); + tx.Commit(); + } + } + + [Test] + public async Task BatchableRootEntityTestAsync() + { + var persister = Sfi.GetEntityPersister(typeof(Foo).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var fooCache = (BatchableCache) persister.Cache.Cache; + + persister = Sfi.GetEntityPersister(typeof(Bar).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var barCache = (BatchableCache) persister.Cache.Cache; + + Assert.That(barCache, Is.EqualTo(fooCache)); + + // Add Bar to cache + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var list = await (s.CreateQuery("from foo in class NHibernate.DomainModel.Foo").ListAsync()); + Assert.AreEqual(6, list.Count); + await (tx.CommitAsync()); + } + + Assert.That(fooCache.PutCalls, Has.Count.EqualTo(6)); // Bar is not batchable + Assert.That(fooCache.PutMultipleCalls, Has.Count.EqualTo(0)); + + // Batch fetch by two from cache + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var enumerator = + (await (s.CreateQuery("from foo in class NHibernate.DomainModel.Foo order by foo.String").EnumerableAsync())).GetEnumerator(); + var i = 1; + while (enumerator.MoveNext()) + { + BarProxy bar = (BarProxy) enumerator.Current; + if (i % 2 == 0) + { + string theString = bar.String; // Load the entity + } + i++; + } + await (tx.CommitAsync()); + } + + Assert.That(fooCache.GetMultipleCalls, Has.Count.EqualTo(3)); + + // Check that each key was used only once when retriving objects from the cache + var uniqueKeys = new HashSet(); + foreach (var keys in fooCache.GetMultipleCalls) + { + Assert.That(keys, Has.Length.EqualTo(2)); + foreach (var key in keys.OfType().Select(o => (string) o.Key)) + { + Assert.That(uniqueKeys, Does.Not.Contains(key)); + uniqueKeys.Add(key); + } + } + } + } +} diff --git a/src/NHibernate.Test/CacheTest/BatchableCacheSubclassFixture.cs b/src/NHibernate.Test/CacheTest/BatchableCacheSubclassFixture.cs new file mode 100644 index 00000000000..5f2024049a5 --- /dev/null +++ b/src/NHibernate.Test/CacheTest/BatchableCacheSubclassFixture.cs @@ -0,0 +1,140 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using NHibernate.Cache; +using NHibernate.Cfg; +using NHibernate.DomainModel; +using NHibernate.Test.CacheTest.Caches; +using NUnit.Framework; + +namespace NHibernate.Test.CacheTest +{ + [TestFixture] + public class BatchableCacheSubclassFixture : TestCase + { + protected override IList Mappings + { + get + { + return new string[] + { + "FooBar.hbm.xml", + "Baz.hbm.xml", + "Qux.hbm.xml", + "Glarch.hbm.xml", + "Fum.hbm.xml", + "Fumm.hbm.xml", + "Fo.hbm.xml", + "One.hbm.xml", + "Many.hbm.xml", + "Immutable.hbm.xml", + "Fee.hbm.xml", + "Vetoer.hbm.xml", + "Holder.hbm.xml", + "Location.hbm.xml", + "Stuff.hbm.xml", + "Container.hbm.xml", + "Simple.hbm.xml" + }; + } + } + + protected override void Configure(Configuration configuration) + { + configuration.SetProperty(Cfg.Environment.UseSecondLevelCache, "true"); + configuration.SetProperty(Cfg.Environment.UseQueryCache, "true"); + configuration.SetProperty(Cfg.Environment.CacheProvider, typeof(BatchableCacheProvider).AssemblyQualifiedName); + } + + protected override void OnSetUp() + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + FooProxy flast = new Bar(); + s.Save(flast); + for (int i = 0; i < 5; i++) + { + FooProxy foo = new Bar(); + s.Save(foo); + flast.TheFoo = foo; + flast = flast.TheFoo; + flast.String = "foo" + (i + 1); + } + tx.Commit(); + } + } + + protected override void OnTearDown() + { + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + s.Delete("from NHibernate.DomainModel.Foo as foo"); + tx.Commit(); + } + } + + [Test] + public void BatchableRootEntityTest() + { + var persister = Sfi.GetEntityPersister(typeof(Foo).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var fooCache = (BatchableCache) persister.Cache.Cache; + + persister = Sfi.GetEntityPersister(typeof(Bar).FullName); + Assert.That(persister.Cache.Cache, Is.Not.Null); + Assert.That(persister.Cache.Cache, Is.TypeOf()); + var barCache = (BatchableCache) persister.Cache.Cache; + + Assert.That(barCache, Is.EqualTo(fooCache)); + + // Add Bar to cache + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var list = s.CreateQuery("from foo in class NHibernate.DomainModel.Foo").List(); + Assert.AreEqual(6, list.Count); + tx.Commit(); + } + + Assert.That(fooCache.PutCalls, Has.Count.EqualTo(6)); // Bar is not batchable + Assert.That(fooCache.PutMultipleCalls, Has.Count.EqualTo(0)); + + // Batch fetch by two from cache + using (var s = Sfi.OpenSession()) + using (var tx = s.BeginTransaction()) + { + var enumerator = + s.CreateQuery("from foo in class NHibernate.DomainModel.Foo order by foo.String").Enumerable().GetEnumerator(); + var i = 1; + while (enumerator.MoveNext()) + { + BarProxy bar = (BarProxy) enumerator.Current; + if (i % 2 == 0) + { + string theString = bar.String; // Load the entity + } + i++; + } + tx.Commit(); + } + + Assert.That(fooCache.GetMultipleCalls, Has.Count.EqualTo(3)); + + // Check that each key was used only once when retriving objects from the cache + var uniqueKeys = new HashSet(); + foreach (var keys in fooCache.GetMultipleCalls) + { + Assert.That(keys, Has.Length.EqualTo(2)); + foreach (var key in keys.OfType().Select(o => (string) o.Key)) + { + Assert.That(uniqueKeys, Does.Not.Contains(key)); + uniqueKeys.Add(key); + } + } + } + } +} diff --git a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs index 7dd1897fa65..fa1b61c84de 100644 --- a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs +++ b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs @@ -412,8 +412,7 @@ protected virtual async Task LoadFromSecondLevelCacheAsync(LoadEvent @ev } ISessionFactoryImplementor factory = source.Factory; var batchSize = persister.GetBatchSize(); - // TODO: check for subclass support - if (batchSize > 1 && persister.Cache.IsBatchingGetSupported() && !persister.EntityMetamodel.HasSubclasses) + if (batchSize > 1 && persister.Cache.IsBatchingGetSupported()) { // The first item in the array is the item that we want to load var entityBatch = diff --git a/src/NHibernate/Engine/BatchFetchQueue.cs b/src/NHibernate/Engine/BatchFetchQueue.cs index 99bf8696408..a53b39fda8f 100644 --- a/src/NHibernate/Engine/BatchFetchQueue.cs +++ b/src/NHibernate/Engine/BatchFetchQueue.cs @@ -140,6 +140,17 @@ public void RemoveBatchLoadableEntityKey(EntityKey key) set.Remove(key); } } + // A subclass will be added to the batch by the root entity name, when querying by the root entity. + // When removing a subclass key, we need to consider that the subclass may not be batchable but + // its root class may be. In order to prevent having in batch entity keys that are already loaded, + // we have to try to remove the key by the root entity, even if the subclass is not batchable. + if (key.RootEntityName != key.EntityName) + { + if (batchLoadableEntityKeys.TryGetValue(key.RootEntityName, out var set)) + { + set.Remove(key); + } + } } /// diff --git a/src/NHibernate/Engine/EntityKey.cs b/src/NHibernate/Engine/EntityKey.cs index 0db22d945cf..041ee9c80dc 100644 --- a/src/NHibernate/Engine/EntityKey.cs +++ b/src/NHibernate/Engine/EntityKey.cs @@ -62,6 +62,11 @@ public string EntityName get { return entityName; } } + internal string RootEntityName + { + get { return rootEntityName; } + } + public override bool Equals(object other) { var otherKey = other as EntityKey; diff --git a/src/NHibernate/Event/Default/DefaultLoadEventListener.cs b/src/NHibernate/Event/Default/DefaultLoadEventListener.cs index 6fdc1666911..cec1068d800 100644 --- a/src/NHibernate/Event/Default/DefaultLoadEventListener.cs +++ b/src/NHibernate/Event/Default/DefaultLoadEventListener.cs @@ -423,8 +423,7 @@ protected virtual object LoadFromSecondLevelCache(LoadEvent @event, IEntityPersi } ISessionFactoryImplementor factory = source.Factory; var batchSize = persister.GetBatchSize(); - // TODO: check for subclass support - if (batchSize > 1 && persister.Cache.IsBatchingGetSupported() && !persister.EntityMetamodel.HasSubclasses) + if (batchSize > 1 && persister.Cache.IsBatchingGetSupported()) { // The first item in the array is the item that we want to load var entityBatch = From a6955fc9e4bad1674e9e20738f4c94e41603acef Mon Sep 17 00:00:00 2001 From: maca88 Date: Sat, 12 May 2018 20:04:32 +0200 Subject: [PATCH 4/8] Added IBatchableCacheConcurrencyStrategy and replaced XXXMultiple with XXXMany. --- .../Async/CacheTest/Caches/BatchableCache.cs | 8 ++-- .../CacheTest/Caches/BatchableCache.cs | 24 +++------- src/NHibernate/Async/Cache/CachePutBatch.cs | 2 +- .../IBatchableCacheConcurrencyStrategy.cs | 47 ++++++++++++++++++ .../Async/Cache/IBatchableReadCache.cs | 2 +- .../Async/Cache/IBatchableWriteCache.cs | 6 +-- .../Async/Cache/ICacheConcurrencyStrategy.cs | 46 +++++------------- .../Async/Cache/NonstrictReadWriteCache.cs | 20 ++++---- src/NHibernate/Async/Cache/ReadOnlyCache.cs | 20 ++++---- src/NHibernate/Async/Cache/ReadWriteCache.cs | 26 +++++----- .../Async/Cache/UpdateTimestampsCache.cs | 2 +- .../Async/Engine/BatchFetchQueue.cs | 4 +- ...efaultInitializeCollectionEventListener.cs | 2 +- .../Event/Default/DefaultLoadEventListener.cs | 2 +- src/NHibernate/Cache/CachePutBatch.cs | 2 +- .../IBatchableCacheConcurrencyStrategy.cs | 47 ++++++++++++++++++ src/NHibernate/Cache/IBatchableReadCache.cs | 2 +- src/NHibernate/Cache/IBatchableWriteCache.cs | 6 +-- .../Cache/ICacheConcurrencyStrategy.cs | 48 +++++-------------- .../Cache/NonstrictReadWriteCache.cs | 12 ++--- src/NHibernate/Cache/ReadOnlyCache.cs | 12 ++--- src/NHibernate/Cache/ReadWriteCache.cs | 18 +++---- src/NHibernate/Cache/UpdateTimestampsCache.cs | 2 +- src/NHibernate/Engine/BatchFetchQueue.cs | 4 +- ...efaultInitializeCollectionEventListener.cs | 2 +- .../Event/Default/DefaultLoadEventListener.cs | 2 +- 26 files changed, 203 insertions(+), 165 deletions(-) create mode 100644 src/NHibernate/Async/Cache/IBatchableCacheConcurrencyStrategy.cs create mode 100644 src/NHibernate/Cache/IBatchableCacheConcurrencyStrategy.cs diff --git a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs index 055c383d15b..d7b77008a19 100644 --- a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs @@ -22,7 +22,7 @@ namespace NHibernate.Test.CacheTest.Caches public partial class BatchableCache : ICache, IBatchableReadWriteCache { - public Task PutMultipleAsync(object[] keys, object[] values, CancellationToken cancellationToken) + public Task PutManyAsync(object[] keys, object[] values, CancellationToken cancellationToken) { try { @@ -39,7 +39,7 @@ public Task PutMultipleAsync(object[] keys, object[] values, CancellationToken c } } - public Task LockMultipleAsync(object[] keys, CancellationToken cancellationToken) + public Task LockManyAsync(object[] keys, CancellationToken cancellationToken) { try { @@ -52,7 +52,7 @@ public Task LockMultipleAsync(object[] keys, CancellationToken cancellationToken } } - public Task UnlockMultipleAsync(object[] keys, CancellationToken cancellationToken) + public Task UnlockManyAsync(object[] keys, CancellationToken cancellationToken) { try { @@ -81,7 +81,7 @@ public Task GetAsync(object key, CancellationToken cancellationToken) } } - public Task GetMultipleAsync(object[] keys, CancellationToken cancellationToken) + public Task GetManyAsync(object[] keys, CancellationToken cancellationToken) { try { diff --git a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs index de2c7edcca6..c4684f4e135 100644 --- a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs @@ -12,7 +12,6 @@ namespace NHibernate.Test.CacheTest.Caches public partial class BatchableCache : ICache, IBatchableReadWriteCache { private readonly IDictionary _hashtable = new Hashtable(); - private readonly string _regionName; public List GetMultipleCalls { get; } = new List(); @@ -26,7 +25,7 @@ public partial class BatchableCache : ICache, IBatchableReadWriteCache public List PutCalls { get; } = new List(); - public void PutMultiple(object[] keys, object[] values) + public void PutMany(object[] keys, object[] values) { PutMultipleCalls.Add(keys); for (int i = 0; i < keys.Length; i++) @@ -35,12 +34,12 @@ public void PutMultiple(object[] keys, object[] values) } } - public void LockMultiple(object[] keys) + public void LockMany(object[] keys) { LockMultipleCalls.Add(keys); } - public void UnlockMultiple(object[] keys) + public void UnlockMany(object[] keys) { UnlockMultipleCalls.Add(keys); } @@ -49,7 +48,7 @@ public void UnlockMultiple(object[] keys) public BatchableCache(string regionName) { - _regionName = regionName; + RegionName = regionName; } /// @@ -59,7 +58,7 @@ public object Get(object key) return _hashtable[key]; } - public object[] GetMultiple(object[] keys) + public object[] GetMany(object[] keys) { GetMultipleCalls.Add(keys); var result = new object[keys.Length]; @@ -123,18 +122,9 @@ public long NextTimestamp() } /// - public int Timeout - { - get - { - return Timestamper.OneMs * 60000; // ie. 60 seconds - } - } + public int Timeout => Timestamper.OneMs * 60000; - public string RegionName - { - get { return _regionName; } - } + public string RegionName { get; } #endregion } diff --git a/src/NHibernate/Async/Cache/CachePutBatch.cs b/src/NHibernate/Async/Cache/CachePutBatch.cs index 78bee14f440..1d6705cfe95 100644 --- a/src/NHibernate/Async/Cache/CachePutBatch.cs +++ b/src/NHibernate/Async/Cache/CachePutBatch.cs @@ -44,7 +44,7 @@ protected override async Task ExecuteAsync(CachePutData[] data, CancellationToke var factory = Session.Factory; var cacheStrategy = CacheConcurrencyStrategy; - var puts = await (cacheStrategy.PutMultipleAsync(keys, values, Session.Timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); + var puts = await (cacheStrategy.PutManyAsync(keys, values, Session.Timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); if (factory.Statistics.IsStatisticsEnabled && puts.Any(o => o)) { diff --git a/src/NHibernate/Async/Cache/IBatchableCacheConcurrencyStrategy.cs b/src/NHibernate/Async/Cache/IBatchableCacheConcurrencyStrategy.cs new file mode 100644 index 00000000000..78a38e73d22 --- /dev/null +++ b/src/NHibernate/Async/Cache/IBatchableCacheConcurrencyStrategy.cs @@ -0,0 +1,47 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by AsyncGenerator. +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + + +using System; +using System.Collections; +using System.Text; +using NHibernate.Cache.Entry; + +namespace NHibernate.Cache +{ + using System.Threading.Tasks; + using System.Threading; + public partial interface IBatchableCacheConcurrencyStrategy : ICacheConcurrencyStrategy + { + /// + /// Attempt to retrieve multiple objects from the Cache + /// + /// The keys (id) of the objects to get out of the Cache. + /// A timestamp prior to the transaction start time + /// A cancellation token that can be used to cancel the work + /// An array of cached objects or + /// + Task GetManyAsync(CacheKey[] keys, long timestamp, CancellationToken cancellationToken); + + /// + /// Attempt to cache objects, after loading them from the database. + /// + /// The keys (id) of the objects to put in the Cache. + /// The objects to put in the cache. + /// A timestamp prior to the transaction start time. + /// The version numbers of the objects we are putting. + /// The comparers to be used to compare version numbers + /// Indicates that the cache should avoid a put if the item is already cached. + /// A cancellation token that can be used to cancel the work + /// if the objects were successfully cached. + /// + Task PutManyAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts, CancellationToken cancellationToken); + } +} diff --git a/src/NHibernate/Async/Cache/IBatchableReadCache.cs b/src/NHibernate/Async/Cache/IBatchableReadCache.cs index 40c2460a7cd..045d4639bdf 100644 --- a/src/NHibernate/Async/Cache/IBatchableReadCache.cs +++ b/src/NHibernate/Async/Cache/IBatchableReadCache.cs @@ -24,6 +24,6 @@ public partial interface IBatchableReadCache /// The keys to be retrieved from the cache. /// A cancellation token that can be used to cancel the work /// - Task GetMultipleAsync(object[] keys, CancellationToken cancellationToken); + Task GetManyAsync(object[] keys, CancellationToken cancellationToken); } } diff --git a/src/NHibernate/Async/Cache/IBatchableWriteCache.cs b/src/NHibernate/Async/Cache/IBatchableWriteCache.cs index 271eeff0059..9b810b8ff09 100644 --- a/src/NHibernate/Async/Cache/IBatchableWriteCache.cs +++ b/src/NHibernate/Async/Cache/IBatchableWriteCache.cs @@ -24,20 +24,20 @@ public partial interface IBatchableReadWriteCache : IBatchableReadCache /// The keys to cache. /// The objects to cache. /// A cancellation token that can be used to cancel the work - Task PutMultipleAsync(object[] keys, object[] values, CancellationToken cancellationToken); + Task PutManyAsync(object[] keys, object[] values, CancellationToken cancellationToken); /// /// Lock the objects from being changed by another thread. /// /// The keys to lock. /// A cancellation token that can be used to cancel the work - Task LockMultipleAsync(object[] keys, CancellationToken cancellationToken); + Task LockManyAsync(object[] keys, CancellationToken cancellationToken); /// /// Unlock the objects that were previously locked. /// /// The keys to unlock. /// A cancellation token that can be used to cancel the work - Task UnlockMultipleAsync(object[] keys, CancellationToken cancellationToken); + Task UnlockManyAsync(object[] keys, CancellationToken cancellationToken); } } diff --git a/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs index 89c44f6eb6c..5474e15a68b 100644 --- a/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Async/Cache/ICacheConcurrencyStrategy.cs @@ -130,31 +130,21 @@ internal static partial class CacheConcurrencyStrategyExtensions /// /// The cache concurrency strategy. /// The keys (id) of the objects to get out of the Cache. - /// A timestamp prior to the transaction start time + /// A timestamp prior to the transaction start time /// A cancellation token that can be used to cancel the work /// An array of cached objects or /// - //6.0 TODO: Merge into ICacheConcurrencyStrategy. - public static async Task GetMultipleAsync(this ICacheConcurrencyStrategy cache, CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + public static Task GetManyAsync(this ICacheConcurrencyStrategy cache, CacheKey[] keys, long timestamp, CancellationToken cancellationToken) { - cancellationToken.ThrowIfCancellationRequested(); - switch (cache) + if (!(cache is IBatchableCacheConcurrencyStrategy batchableCache)) { - case ReadOnlyCache readOnly: - return await (readOnly.GetMultipleAsync(keys, txTimestamp, cancellationToken)).ConfigureAwait(false); - case ReadWriteCache readWrite: - return await (readWrite.GetMultipleAsync(keys, txTimestamp, cancellationToken)).ConfigureAwait(false); - case NonstrictReadWriteCache nonstrictReadWrite: - return await (nonstrictReadWrite.GetMultipleAsync(keys, txTimestamp, cancellationToken)).ConfigureAwait(false); + throw new InvalidOperationException($"Cache concurrency strategy {cache.GetType()} does not support batching"); } - - // Fallback to Get - var objects = new object[keys.Length]; - for (var i = 0; i < keys.Length; i++) + if (cancellationToken.IsCancellationRequested) { - objects[i] = await (cache.GetAsync(keys[i], txTimestamp, cancellationToken)).ConfigureAwait(false); + return Task.FromCanceled(cancellationToken); } - return objects; + return batchableCache.GetManyAsync(keys, timestamp, cancellationToken); } /// @@ -170,28 +160,18 @@ public static async Task GetMultipleAsync(this ICacheConcurrencyStrate /// A cancellation token that can be used to cancel the work /// if the objects were successfully cached. /// - //6.0 TODO: Merge into ICacheConcurrencyStrategy. - public static async Task PutMultipleAsync(this ICacheConcurrencyStrategy cache, CacheKey[] keys, object[] values, long timestamp, + public static Task PutManyAsync(this ICacheConcurrencyStrategy cache, CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) { - cancellationToken.ThrowIfCancellationRequested(); - switch (cache) + if (!(cache is IBatchableCacheConcurrencyStrategy batchableCache)) { - case ReadOnlyCache readOnly: - return await (readOnly.PutMultipleAsync(keys, values, timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); - case ReadWriteCache readWrite: - return await (readWrite.PutMultipleAsync(keys, values, timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); - case NonstrictReadWriteCache nonstrictReadWrite: - return await (nonstrictReadWrite.PutMultipleAsync(keys, values, timestamp, versions, versionComparers, minimalPuts, cancellationToken)).ConfigureAwait(false); + throw new InvalidOperationException($"Cache concurrency strategy {cache.GetType()} does not support batching"); } - - // Fallback to Put - var result = new bool[keys.Length]; - for (var i = 0; i < keys.Length; i++) + if (cancellationToken.IsCancellationRequested) { - result[i] = await (cache.PutAsync(keys[i], values[i], timestamp, versions[i], versionComparers[i], minimalPuts[i], cancellationToken)).ConfigureAwait(false); + return Task.FromCanceled(cancellationToken); } - return result; + return batchableCache.PutManyAsync(keys, values, timestamp, versions, versionComparers, minimalPuts, cancellationToken); } } } diff --git a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs index 3fa4dfe789a..c6cd692bb43 100644 --- a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs @@ -18,7 +18,7 @@ namespace NHibernate.Cache { using System.Threading.Tasks; using System.Threading; - public partial class NonstrictReadWriteCache : ICacheConcurrencyStrategy + public partial class NonstrictReadWriteCache : IBatchableCacheConcurrencyStrategy { /// @@ -44,7 +44,7 @@ public async Task GetAsync(CacheKey key, long txTimestamp, CancellationT return result; } - public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + public Task GetManyAsync(CacheKey[] keys, long timestamp, CancellationToken cancellationToken) { if (_batchableReadCache == null) { @@ -54,14 +54,14 @@ public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, Cancel { return Task.FromCanceled(cancellationToken); } - return InternalGetMultipleAsync(); - async Task InternalGetMultipleAsync() + return InternalGetManyAsync(); + async Task InternalGetManyAsync() { if (log.IsDebugEnabled()) { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = await (_batchableReadCache.GetMultipleAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + var results = await (_batchableReadCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); if (!log.IsDebugEnabled()) { return results; @@ -77,7 +77,7 @@ async Task InternalGetMultipleAsync() /// /// Add multiple items to the cache /// - public Task PutMultipleAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + public Task PutManyAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) { if (_batchableReadWriteCache == null) @@ -88,8 +88,8 @@ public Task PutMultipleAsync(CacheKey[] keys, object[] values, long time { return Task.FromCanceled(cancellationToken); } - return InternalPutMultipleAsync(); - async Task InternalPutMultipleAsync() + return InternalPutManyAsync(); + async Task InternalPutManyAsync() { var result = new bool[keys.Length]; if (timestamp == long.MinValue) @@ -111,7 +111,7 @@ async Task InternalPutMultipleAsync() var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = await (_batchableReadWriteCache.GetMultipleAsync(checkKeys.ToArray(), cancellationToken)).ConfigureAwait(false); + var objects = await (_batchableReadWriteCache.GetManyAsync(checkKeys.ToArray(), cancellationToken)).ConfigureAwait(false); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -143,7 +143,7 @@ async Task InternalPutMultipleAsync() putValues[j++] = values[i]; result[i] = true; } - await (_batchableReadWriteCache.PutMultipleAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); + await (_batchableReadWriteCache.PutManyAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); return result; } } diff --git a/src/NHibernate/Async/Cache/ReadOnlyCache.cs b/src/NHibernate/Async/Cache/ReadOnlyCache.cs index 91f22d778a4..97254fe7183 100644 --- a/src/NHibernate/Async/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Async/Cache/ReadOnlyCache.cs @@ -18,7 +18,7 @@ namespace NHibernate.Cache { using System.Threading.Tasks; using System.Threading; - public partial class ReadOnlyCache : ICacheConcurrencyStrategy + public partial class ReadOnlyCache : IBatchableCacheConcurrencyStrategy { public async Task GetAsync(CacheKey key, long timestamp, CancellationToken cancellationToken) @@ -32,7 +32,7 @@ public async Task GetAsync(CacheKey key, long timestamp, CancellationTok return result; } - public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + public Task GetManyAsync(CacheKey[] keys, long timestamp, CancellationToken cancellationToken) { if (_batchableReadCache == null) { @@ -42,14 +42,14 @@ public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, Cancel { return Task.FromCanceled(cancellationToken); } - return InternalGetMultipleAsync(); - async Task InternalGetMultipleAsync() + return InternalGetManyAsync(); + async Task InternalGetManyAsync() { if (log.IsDebugEnabled()) { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = await (_batchableReadCache.GetMultipleAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + var results = await (_batchableReadCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); if (!log.IsDebugEnabled()) { return results; @@ -81,7 +81,7 @@ public Task LockAsync(CacheKey key, object version, CancellationToken } } - public Task PutMultipleAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + public Task PutManyAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) { if (_batchableReadWriteCache == null) @@ -92,8 +92,8 @@ public Task PutMultipleAsync(CacheKey[] keys, object[] values, long time { return Task.FromCanceled(cancellationToken); } - return InternalPutMultipleAsync(); - async Task InternalPutMultipleAsync() + return InternalPutManyAsync(); + async Task InternalPutManyAsync() { var result = new bool[keys.Length]; if (timestamp == long.MinValue) @@ -115,7 +115,7 @@ async Task InternalPutMultipleAsync() var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = await (_batchableReadWriteCache.GetMultipleAsync(checkKeys.ToArray(), cancellationToken)).ConfigureAwait(false); + var objects = await (_batchableReadWriteCache.GetManyAsync(checkKeys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -147,7 +147,7 @@ async Task InternalPutMultipleAsync() putValues[j++] = values[i]; result[i] = true; } - await (_batchableReadWriteCache.PutMultipleAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); + await (_batchableReadWriteCache.PutManyAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); return result; } } diff --git a/src/NHibernate/Async/Cache/ReadWriteCache.cs b/src/NHibernate/Async/Cache/ReadWriteCache.cs index f8fcc4bbeb1..1708276456f 100644 --- a/src/NHibernate/Async/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/ReadWriteCache.cs @@ -18,7 +18,7 @@ namespace NHibernate.Cache { using System.Threading.Tasks; using System.Threading; - public partial class ReadWriteCache : ICacheConcurrencyStrategy + public partial class ReadWriteCache : IBatchableCacheConcurrencyStrategy { private readonly NHibernate.Util.AsyncLock _lockObjectAsync = new NHibernate.Util.AsyncLock(); @@ -90,7 +90,7 @@ public async Task GetAsync(CacheKey key, long txTimestamp, CancellationT } } - public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, CancellationToken cancellationToken) + public Task GetManyAsync(CacheKey[] keys, long timestamp, CancellationToken cancellationToken) { if (_batchableReadCache == null) { @@ -100,8 +100,8 @@ public Task GetMultipleAsync(CacheKey[] keys, long txTimestamp, Cancel { return Task.FromCanceled(cancellationToken); } - return InternalGetMultipleAsync(); - async Task InternalGetMultipleAsync() + return InternalGetManyAsync(); + async Task InternalGetManyAsync() { if (log.IsDebugEnabled()) { @@ -110,11 +110,11 @@ async Task InternalGetMultipleAsync() var result = new object[keys.Length]; using (await _lockObjectAsync.LockAsync()) { - var lockables = await (_batchableReadCache.GetMultipleAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + var lockables = await (_batchableReadCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); for (var i = 0; i < lockables.Length; i++) { var lockable = (ILockable) lockables[i]; - var gettable = lockable != null && lockable.IsGettable(txTimestamp); + var gettable = lockable != null && lockable.IsGettable(timestamp); if (gettable) { @@ -180,7 +180,7 @@ public async Task LockAsync(CacheKey key, object version, Cancellatio /// database is operating in repeatable read isolation mode.) /// /// Whether the items were actually put into the cache - public Task PutMultipleAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + public Task PutManyAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) { if (_batchableReadWriteCache == null) @@ -191,8 +191,8 @@ public Task PutMultipleAsync(CacheKey[] keys, object[] values, long time { return Task.FromCanceled(cancellationToken); } - return InternalPutMultipleAsync(); - async Task InternalPutMultipleAsync() + return InternalPutManyAsync(); + async Task InternalPutManyAsync() { var result = new bool[keys.Length]; @@ -211,9 +211,9 @@ async Task InternalPutMultipleAsync() var keysArr = keys.Cast().ToArray(); try { - await (_batchableReadWriteCache.LockMultipleAsync(keysArr, cancellationToken)).ConfigureAwait(false); + await (_batchableReadWriteCache.LockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); var putBatch = new Dictionary(); - var lockables = await (_batchableReadWriteCache.GetMultipleAsync(keys, cancellationToken)).ConfigureAwait(false); + var lockables = await (_batchableReadWriteCache.GetManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); for (var i = 0; i < keys.Length; i++) { var key = keys[i]; @@ -249,12 +249,12 @@ async Task InternalPutMultipleAsync() if (putBatch.Count > 0) { - await (_batchableReadWriteCache.PutMultipleAsync(putBatch.Keys.ToArray(), putBatch.Values.ToArray(), cancellationToken)).ConfigureAwait(false); + await (_batchableReadWriteCache.PutManyAsync(putBatch.Keys.ToArray(), putBatch.Values.ToArray(), cancellationToken)).ConfigureAwait(false); } } finally { - await (_batchableReadWriteCache.UnlockMultipleAsync(keysArr, cancellationToken)).ConfigureAwait(false); + await (_batchableReadWriteCache.UnlockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); } } return result; diff --git a/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs b/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs index 5a5f5aa3725..843160a1824 100644 --- a/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs +++ b/src/NHibernate/Async/Cache/UpdateTimestampsCache.cs @@ -122,7 +122,7 @@ public virtual async Task IsUpToDateAsync(ISet spaces, long timest { keys[index++] = space; } - var lastUpdates = await (_batchUpdateTimestamps.GetMultipleAsync(keys, cancellationToken)).ConfigureAwait(false); + var lastUpdates = await (_batchUpdateTimestamps.GetManyAsync(keys, cancellationToken)).ConfigureAwait(false); foreach (var lastUpdate in lastUpdates) { if (IsOutdated(lastUpdate, timestamp)) diff --git a/src/NHibernate/Async/Engine/BatchFetchQueue.cs b/src/NHibernate/Async/Engine/BatchFetchQueue.cs index e66d2cb18e4..3467fa5cddc 100644 --- a/src/NHibernate/Async/Engine/BatchFetchQueue.cs +++ b/src/NHibernate/Async/Engine/BatchFetchQueue.cs @@ -430,7 +430,7 @@ private async Task AreCachedAsync(List> ent persister.IdentifierType, entityKey.EntityName); } - var cacheResult = await (batchableCache.GetMultipleAsync(cacheKeys, cancellationToken)).ConfigureAwait(false); + var cacheResult = await (batchableCache.GetManyAsync(cacheKeys, cancellationToken)).ConfigureAwait(false); for (var j = 0; j < result.Length; j++) { result[j] = cacheResult[j] != null; @@ -469,7 +469,7 @@ private async Task AreCachedAsync(List InitializeCollectionFromCacheAsync(object id, ICollecti } keys.Add(source.GenerateCacheKey(key, persister.KeyType, persister.Role)); } - var cachedObjects = await (persister.Cache.GetMultipleAsync(keys.ToArray(), source.Timestamp, cancellationToken)).ConfigureAwait(false); + var cachedObjects = await (persister.Cache.GetManyAsync(keys.ToArray(), source.Timestamp, cancellationToken)).ConfigureAwait(false); for (var i = 1; i < cachedObjects.Length; i++) { var coll = source.PersistenceContext.BatchFetchQueue.GetBatchLoadableCollection(persister, collectionEntries[i]); diff --git a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs index fa1b61c84de..ead3cbe14c2 100644 --- a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs +++ b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs @@ -429,7 +429,7 @@ protected virtual async Task LoadFromSecondLevelCacheAsync(LoadEvent @ev } keys.Add(source.GenerateCacheKey(key, persister.IdentifierType, persister.RootEntityName)); } - var cachedObjects = await (persister.Cache.GetMultipleAsync(keys.ToArray(), source.Timestamp, cancellationToken)).ConfigureAwait(false); + var cachedObjects = await (persister.Cache.GetManyAsync(keys.ToArray(), source.Timestamp, cancellationToken)).ConfigureAwait(false); for (var i = 1; i < cachedObjects.Length; i++) { cancellationToken.ThrowIfCancellationRequested(); diff --git a/src/NHibernate/Cache/CachePutBatch.cs b/src/NHibernate/Cache/CachePutBatch.cs index ed2a3d48d74..96457406f06 100644 --- a/src/NHibernate/Cache/CachePutBatch.cs +++ b/src/NHibernate/Cache/CachePutBatch.cs @@ -37,7 +37,7 @@ protected override void Execute(CachePutData[] data) var factory = Session.Factory; var cacheStrategy = CacheConcurrencyStrategy; - var puts = cacheStrategy.PutMultiple(keys, values, Session.Timestamp, versions, versionComparers, minimalPuts); + var puts = cacheStrategy.PutMany(keys, values, Session.Timestamp, versions, versionComparers, minimalPuts); if (factory.Statistics.IsStatisticsEnabled && puts.Any(o => o)) { diff --git a/src/NHibernate/Cache/IBatchableCacheConcurrencyStrategy.cs b/src/NHibernate/Cache/IBatchableCacheConcurrencyStrategy.cs new file mode 100644 index 00000000000..56f15f6a9a8 --- /dev/null +++ b/src/NHibernate/Cache/IBatchableCacheConcurrencyStrategy.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections; +using System.Text; +using NHibernate.Cache.Entry; + +namespace NHibernate.Cache +{ + /// + /// Implementors manage transactional access to cached data. + /// + /// + /// + /// Transactions pass in a timestamp indicating transaction start time. + /// + /// + /// When used to cache entities and collections the key is the identifier of the + /// entity/collection and the value should be set to the + /// for an entity and the results of + /// for a collection. + /// + /// + public partial interface IBatchableCacheConcurrencyStrategy : ICacheConcurrencyStrategy + { + /// + /// Attempt to retrieve multiple objects from the Cache + /// + /// The keys (id) of the objects to get out of the Cache. + /// A timestamp prior to the transaction start time + /// An array of cached objects or + /// + object[] GetMany(CacheKey[] keys, long timestamp); + + /// + /// Attempt to cache objects, after loading them from the database. + /// + /// The keys (id) of the objects to put in the Cache. + /// The objects to put in the cache. + /// A timestamp prior to the transaction start time. + /// The version numbers of the objects we are putting. + /// The comparers to be used to compare version numbers + /// Indicates that the cache should avoid a put if the item is already cached. + /// if the objects were successfully cached. + /// + bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + bool[] minimalPuts); + } +} diff --git a/src/NHibernate/Cache/IBatchableReadCache.cs b/src/NHibernate/Cache/IBatchableReadCache.cs index 79b6bf388a6..9f0f3d8cd0b 100644 --- a/src/NHibernate/Cache/IBatchableReadCache.cs +++ b/src/NHibernate/Cache/IBatchableReadCache.cs @@ -22,6 +22,6 @@ public partial interface IBatchableReadCache /// /// The keys to be retrieved from the cache. /// - object[] GetMultiple(object[] keys); + object[] GetMany(object[] keys); } } diff --git a/src/NHibernate/Cache/IBatchableWriteCache.cs b/src/NHibernate/Cache/IBatchableWriteCache.cs index f7712049687..ac53ec110a1 100644 --- a/src/NHibernate/Cache/IBatchableWriteCache.cs +++ b/src/NHibernate/Cache/IBatchableWriteCache.cs @@ -22,18 +22,18 @@ public partial interface IBatchableReadWriteCache : IBatchableReadCache /// /// The keys to cache. /// The objects to cache. - void PutMultiple(object[] keys, object[] values); + void PutMany(object[] keys, object[] values); /// /// Lock the objects from being changed by another thread. /// /// The keys to lock. - void LockMultiple(object[] keys); + void LockMany(object[] keys); /// /// Unlock the objects that were previously locked. /// /// The keys to unlock. - void UnlockMultiple(object[] keys); + void UnlockMany(object[] keys); } } diff --git a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs index ce1a6d536f3..77b86ff3369 100644 --- a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs @@ -148,29 +148,16 @@ internal static partial class CacheConcurrencyStrategyExtensions /// /// The cache concurrency strategy. /// The keys (id) of the objects to get out of the Cache. - /// A timestamp prior to the transaction start time + /// A timestamp prior to the transaction start time /// An array of cached objects or /// - //6.0 TODO: Merge into ICacheConcurrencyStrategy. - public static object[] GetMultiple(this ICacheConcurrencyStrategy cache, CacheKey[] keys, long txTimestamp) + public static object[] GetMany(this ICacheConcurrencyStrategy cache, CacheKey[] keys, long timestamp) { - switch (cache) - { - case ReadOnlyCache readOnly: - return readOnly.GetMultiple(keys, txTimestamp); - case ReadWriteCache readWrite: - return readWrite.GetMultiple(keys, txTimestamp); - case NonstrictReadWriteCache nonstrictReadWrite: - return nonstrictReadWrite.GetMultiple(keys, txTimestamp); - } - - // Fallback to Get - var objects = new object[keys.Length]; - for (var i = 0; i < keys.Length; i++) + if (!(cache is IBatchableCacheConcurrencyStrategy batchableCache)) { - objects[i] = cache.Get(keys[i], txTimestamp); + throw new InvalidOperationException($"Cache concurrency strategy {cache.GetType()} does not support batching"); } - return objects; + return batchableCache.GetMany(keys, timestamp); } /// @@ -185,39 +172,26 @@ public static object[] GetMultiple(this ICacheConcurrencyStrategy cache, CacheKe /// Indicates that the cache should avoid a put if the item is already cached. /// if the objects were successfully cached. /// - //6.0 TODO: Merge into ICacheConcurrencyStrategy. - public static bool[] PutMultiple(this ICacheConcurrencyStrategy cache, CacheKey[] keys, object[] values, long timestamp, + public static bool[] PutMany(this ICacheConcurrencyStrategy cache, CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts) { - switch (cache) - { - case ReadOnlyCache readOnly: - return readOnly.PutMultiple(keys, values, timestamp, versions, versionComparers, minimalPuts); - case ReadWriteCache readWrite: - return readWrite.PutMultiple(keys, values, timestamp, versions, versionComparers, minimalPuts); - case NonstrictReadWriteCache nonstrictReadWrite: - return nonstrictReadWrite.PutMultiple(keys, values, timestamp, versions, versionComparers, minimalPuts); - } - - // Fallback to Put - var result = new bool[keys.Length]; - for (var i = 0; i < keys.Length; i++) + if (!(cache is IBatchableCacheConcurrencyStrategy batchableCache)) { - result[i] = cache.Put(keys[i], values[i], timestamp, versions[i], versionComparers[i], minimalPuts[i]); + throw new InvalidOperationException($"Cache concurrency strategy {cache.GetType()} does not support batching"); } - return result; + return batchableCache.PutMany(keys, values, timestamp, versions, versionComparers, minimalPuts); } public static bool IsBatchingGetSupported(this ICacheConcurrencyStrategy cache) { // ReSharper disable once SuspiciousTypeConversion.Global - return cache.Cache is IBatchableReadCache; + return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableReadCache; } public static bool IsBatchingPutSupported(this ICacheConcurrencyStrategy cache) { // ReSharper disable once SuspiciousTypeConversion.Global - return cache.Cache is IBatchableReadWriteCache; + return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableReadWriteCache; } } } diff --git a/src/NHibernate/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Cache/NonstrictReadWriteCache.cs index 786506c05dd..6875583c76d 100644 --- a/src/NHibernate/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Cache/NonstrictReadWriteCache.cs @@ -14,7 +14,7 @@ namespace NHibernate.Cache /// This is an "asynchronous" concurrency strategy. /// for a much stricter algorithm /// - public partial class NonstrictReadWriteCache : ICacheConcurrencyStrategy + public partial class NonstrictReadWriteCache : IBatchableCacheConcurrencyStrategy { private ICache cache; private IBatchableReadCache _batchableReadCache; @@ -64,7 +64,7 @@ public object Get(CacheKey key, long txTimestamp) return result; } - public object[] GetMultiple(CacheKey[] keys, long txTimestamp) + public object[] GetMany(CacheKey[] keys, long timestamp) { if (_batchableReadCache == null) { @@ -74,7 +74,7 @@ public object[] GetMultiple(CacheKey[] keys, long txTimestamp) { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = _batchableReadCache.GetMultiple(keys.Select(o => (object) o).ToArray()); + var results = _batchableReadCache.GetMany(keys.Select(o => (object) o).ToArray()); if (!log.IsDebugEnabled()) { return results; @@ -89,7 +89,7 @@ public object[] GetMultiple(CacheKey[] keys, long txTimestamp) /// /// Add multiple items to the cache /// - public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts) { if (_batchableReadWriteCache == null) @@ -116,7 +116,7 @@ public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, obje var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = _batchableReadWriteCache.GetMultiple(checkKeys.ToArray()); + var objects = _batchableReadWriteCache.GetMany(checkKeys.ToArray()); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -148,7 +148,7 @@ public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, obje putValues[j++] = values[i]; result[i] = true; } - _batchableReadWriteCache.PutMultiple(putKeys, putValues); + _batchableReadWriteCache.PutMany(putKeys, putValues); return result; } diff --git a/src/NHibernate/Cache/ReadOnlyCache.cs b/src/NHibernate/Cache/ReadOnlyCache.cs index d1cc9c73aeb..98c9cb9447d 100644 --- a/src/NHibernate/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Cache/ReadOnlyCache.cs @@ -9,7 +9,7 @@ namespace NHibernate.Cache /// /// Caches data that is never updated /// - public partial class ReadOnlyCache : ICacheConcurrencyStrategy + public partial class ReadOnlyCache : IBatchableCacheConcurrencyStrategy { private ICache cache; private IBatchableReadCache _batchableReadCache; @@ -46,7 +46,7 @@ public object Get(CacheKey key, long timestamp) return result; } - public object[] GetMultiple(CacheKey[] keys, long txTimestamp) + public object[] GetMany(CacheKey[] keys, long timestamp) { if (_batchableReadCache == null) { @@ -56,7 +56,7 @@ public object[] GetMultiple(CacheKey[] keys, long txTimestamp) { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = _batchableReadCache.GetMultiple(keys.Select(o => (object) o).ToArray()); + var results = _batchableReadCache.GetMany(keys.Select(o => (object) o).ToArray()); if (!log.IsDebugEnabled()) { return results; @@ -77,7 +77,7 @@ public ISoftLock Lock(CacheKey key, object version) throw new InvalidOperationException("ReadOnlyCache: Can't write to a readonly object " + key.EntityOrRoleName); } - public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts) { if (_batchableReadWriteCache == null) @@ -104,7 +104,7 @@ public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, obje var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = _batchableReadWriteCache.GetMultiple(checkKeys.ToArray()); + var objects = _batchableReadWriteCache.GetMany(checkKeys.Select(o => (object) o).ToArray()); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -136,7 +136,7 @@ public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, obje putValues[j++] = values[i]; result[i] = true; } - _batchableReadWriteCache.PutMultiple(putKeys, putValues); + _batchableReadWriteCache.PutMany(putKeys, putValues); return result; } diff --git a/src/NHibernate/Cache/ReadWriteCache.cs b/src/NHibernate/Cache/ReadWriteCache.cs index 8c1879b64ec..3113ae9d009 100644 --- a/src/NHibernate/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Cache/ReadWriteCache.cs @@ -22,7 +22,7 @@ namespace NHibernate.Cache /// for a faster algorithm /// /// - public partial class ReadWriteCache : ICacheConcurrencyStrategy + public partial class ReadWriteCache : IBatchableCacheConcurrencyStrategy { public interface ILockable { @@ -146,7 +146,7 @@ public object Get(CacheKey key, long txTimestamp) } } - public object[] GetMultiple(CacheKey[] keys, long txTimestamp) + public object[] GetMany(CacheKey[] keys, long timestamp) { if (_batchableReadCache == null) { @@ -159,11 +159,11 @@ public object[] GetMultiple(CacheKey[] keys, long txTimestamp) var result = new object[keys.Length]; lock (_lockObject) { - var lockables = _batchableReadCache.GetMultiple(keys.Select(o => (object) o).ToArray()); + var lockables = _batchableReadCache.GetMany(keys.Select(o => (object) o).ToArray()); for (var i = 0; i < lockables.Length; i++) { var lockable = (ILockable) lockables[i]; - var gettable = lockable != null && lockable.IsGettable(txTimestamp); + var gettable = lockable != null && lockable.IsGettable(timestamp); if (gettable) { @@ -227,7 +227,7 @@ public ISoftLock Lock(CacheKey key, object version) /// database is operating in repeatable read isolation mode.) /// /// Whether the items were actually put into the cache - public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, + public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts) { if (_batchableReadWriteCache == null) @@ -251,9 +251,9 @@ public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, obje var keysArr = keys.Cast().ToArray(); try { - _batchableReadWriteCache.LockMultiple(keysArr); + _batchableReadWriteCache.LockMany(keysArr); var putBatch = new Dictionary(); - var lockables = _batchableReadWriteCache.GetMultiple(keys); + var lockables = _batchableReadWriteCache.GetMany(keysArr); for (var i = 0; i < keys.Length; i++) { var key = keys[i]; @@ -289,12 +289,12 @@ public bool[] PutMultiple(CacheKey[] keys, object[] values, long timestamp, obje if (putBatch.Count > 0) { - _batchableReadWriteCache.PutMultiple(putBatch.Keys.ToArray(), putBatch.Values.ToArray()); + _batchableReadWriteCache.PutMany(putBatch.Keys.ToArray(), putBatch.Values.ToArray()); } } finally { - _batchableReadWriteCache.UnlockMultiple(keysArr); + _batchableReadWriteCache.UnlockMany(keysArr); } } return result; diff --git a/src/NHibernate/Cache/UpdateTimestampsCache.cs b/src/NHibernate/Cache/UpdateTimestampsCache.cs index 5d77a1b9c16..45b16bca781 100644 --- a/src/NHibernate/Cache/UpdateTimestampsCache.cs +++ b/src/NHibernate/Cache/UpdateTimestampsCache.cs @@ -90,7 +90,7 @@ public virtual bool IsUpToDate(ISet spaces, long timestamp /* H2.1 has L { keys[index++] = space; } - var lastUpdates = _batchUpdateTimestamps.GetMultiple(keys); + var lastUpdates = _batchUpdateTimestamps.GetMany(keys); foreach (var lastUpdate in lastUpdates) { if (IsOutdated(lastUpdate, timestamp)) diff --git a/src/NHibernate/Engine/BatchFetchQueue.cs b/src/NHibernate/Engine/BatchFetchQueue.cs index a53b39fda8f..a0174c0273c 100644 --- a/src/NHibernate/Engine/BatchFetchQueue.cs +++ b/src/NHibernate/Engine/BatchFetchQueue.cs @@ -586,7 +586,7 @@ private bool[] AreCached(List> entityKeys, int[] ke persister.IdentifierType, entityKey.EntityName); } - var cacheResult = batchableCache.GetMultiple(cacheKeys); + var cacheResult = batchableCache.GetMany(cacheKeys); for (var j = 0; j < result.Length; j++) { result[j] = cacheResult[j] != null; @@ -623,7 +623,7 @@ private bool[] AreCached(List Date: Sat, 12 May 2018 20:17:37 +0200 Subject: [PATCH 5/8] Regenerate with AsyncGenerator 0.8.2.4 --- .../Event/Default/DefaultLoadEventListener.cs | 45 ++++++++----------- 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs index ead3cbe14c2..24d2752e3cd 100644 --- a/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs +++ b/src/NHibernate/Async/Event/Default/DefaultLoadEventListener.cs @@ -449,40 +449,33 @@ protected virtual async Task LoadFromSecondLevelCacheAsync(LoadEvent @ev Task AssembleAsync(CacheKey ck, object ce, LoadEvent evt, bool alterStatistics) { - try + if (factory.Statistics.IsStatisticsEnabled && alterStatistics) { - if (factory.Statistics.IsStatisticsEnabled && alterStatistics) + if (ce == null) { - if (ce == null) - { - factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); - log.Debug("Entity cache miss: {0}", ck); - } - else - { - factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); - log.Debug("Entity cache hit: {0}", ck); - } + factory.StatisticsImplementor.SecondLevelCacheMiss(persister.Cache.RegionName); + log.Debug("Entity cache miss: {0}", ck); } - - if (ce != null) + else { - CacheEntry entry = (CacheEntry) persister.CacheEntryStructure.Destructure(ce, factory); - - // Entity was found in second-level cache... - // NH: Different behavior (take a look to options.ExactPersister (NH-295)) - if (!options.ExactPersister || persister.EntityMetamodel.SubclassEntityNames.Contains(entry.Subclass)) - { - return AssembleCacheEntryAsync(entry, evt.EntityId, persister, evt, cancellationToken); - } + factory.StatisticsImplementor.SecondLevelCacheHit(persister.Cache.RegionName); + log.Debug("Entity cache hit: {0}", ck); } - - return Task.FromResult(null); } - catch (Exception ex) + + if (ce != null) { - return Task.FromException(ex); + CacheEntry entry = (CacheEntry) persister.CacheEntryStructure.Destructure(ce, factory); + + // Entity was found in second-level cache... + // NH: Different behavior (take a look to options.ExactPersister (NH-295)) + if (!options.ExactPersister || persister.EntityMetamodel.SubclassEntityNames.Contains(entry.Subclass)) + { + return AssembleCacheEntryAsync(entry, evt.EntityId, persister, evt, cancellationToken); + } } + + return Task.FromResult(null); } } From fae4917fc961d6349ac4307483f81adf07ad9b69 Mon Sep 17 00:00:00 2001 From: maca88 Date: Fri, 1 Jun 2018 23:05:57 +0200 Subject: [PATCH 6/8] Renamed IBatchableReadWriteCache to IBatchableCache and IBatchableReadCache to IBatchableReadOnlyCache --- src/AsyncGenerator.yml | 4 ++-- .../Async/CacheTest/Caches/BatchableCache.cs | 2 +- .../CacheTest/Caches/BatchableCache.cs | 2 +- ...chableWriteCache.cs => IBatchableCache.cs} | 2 +- ...eadCache.cs => IBatchableReadOnlyCache.cs} | 2 +- .../Async/Cache/NonstrictReadWriteCache.cs | 10 ++++----- src/NHibernate/Async/Cache/ReadOnlyCache.cs | 10 ++++----- src/NHibernate/Async/Cache/ReadWriteCache.cs | 14 ++++++------ .../Async/Engine/BatchFetchQueue.cs | 8 +++---- ...chableWriteCache.cs => IBatchableCache.cs} | 2 +- ...eadCache.cs => IBatchableReadOnlyCache.cs} | 2 +- .../Cache/ICacheConcurrencyStrategy.cs | 4 ++-- .../Cache/NonstrictReadWriteCache.cs | 18 +++++++-------- src/NHibernate/Cache/ReadOnlyCache.cs | 18 +++++++-------- src/NHibernate/Cache/ReadWriteCache.cs | 22 +++++++++---------- src/NHibernate/Cache/UpdateTimestampsCache.cs | 4 ++-- src/NHibernate/Engine/BatchFetchQueue.cs | 8 +++---- 17 files changed, 66 insertions(+), 66 deletions(-) rename src/NHibernate/Async/Cache/{IBatchableWriteCache.cs => IBatchableCache.cs} (95%) rename src/NHibernate/Async/Cache/{IBatchableReadCache.cs => IBatchableReadOnlyCache.cs} (94%) rename src/NHibernate/Cache/{IBatchableWriteCache.cs => IBatchableCache.cs} (93%) rename src/NHibernate/Cache/{IBatchableReadCache.cs => IBatchableReadOnlyCache.cs} (93%) diff --git a/src/AsyncGenerator.yml b/src/AsyncGenerator.yml index 05062816966..2948e57a205 100644 --- a/src/AsyncGenerator.yml +++ b/src/AsyncGenerator.yml @@ -258,8 +258,8 @@ methodRules: name: Lock - containingType: NHibernate.Cache.ICache name: Unlock - - containingType: NHibernate.Cache.IBatchableReadCache - - containingType: NHibernate.Cache.IBatchableReadWriteCache + - containingType: NHibernate.Cache.IBatchableReadOnlyCache + - containingType: NHibernate.Cache.IBatchableCache name: Cache - filters: - containingNamespace: NHibernate diff --git a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs index d7b77008a19..e39c3aaa11e 100644 --- a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs @@ -19,7 +19,7 @@ namespace NHibernate.Test.CacheTest.Caches { - public partial class BatchableCache : ICache, IBatchableReadWriteCache + public partial class BatchableCache : ICache, IBatchableCache { public Task PutManyAsync(object[] keys, object[] values, CancellationToken cancellationToken) diff --git a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs index c4684f4e135..067293c906a 100644 --- a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs @@ -9,7 +9,7 @@ namespace NHibernate.Test.CacheTest.Caches { - public partial class BatchableCache : ICache, IBatchableReadWriteCache + public partial class BatchableCache : ICache, IBatchableCache { private readonly IDictionary _hashtable = new Hashtable(); diff --git a/src/NHibernate/Async/Cache/IBatchableWriteCache.cs b/src/NHibernate/Async/Cache/IBatchableCache.cs similarity index 95% rename from src/NHibernate/Async/Cache/IBatchableWriteCache.cs rename to src/NHibernate/Async/Cache/IBatchableCache.cs index 9b810b8ff09..23d0b968205 100644 --- a/src/NHibernate/Async/Cache/IBatchableWriteCache.cs +++ b/src/NHibernate/Async/Cache/IBatchableCache.cs @@ -16,7 +16,7 @@ namespace NHibernate.Cache { using System.Threading.Tasks; using System.Threading; - public partial interface IBatchableReadWriteCache : IBatchableReadCache + public partial interface IBatchableCache : IBatchableReadOnlyCache { /// /// Add multiple objects to the cache. diff --git a/src/NHibernate/Async/Cache/IBatchableReadCache.cs b/src/NHibernate/Async/Cache/IBatchableReadOnlyCache.cs similarity index 94% rename from src/NHibernate/Async/Cache/IBatchableReadCache.cs rename to src/NHibernate/Async/Cache/IBatchableReadOnlyCache.cs index 045d4639bdf..750c5df8280 100644 --- a/src/NHibernate/Async/Cache/IBatchableReadCache.cs +++ b/src/NHibernate/Async/Cache/IBatchableReadOnlyCache.cs @@ -16,7 +16,7 @@ namespace NHibernate.Cache { using System.Threading.Tasks; using System.Threading; - public partial interface IBatchableReadCache + public partial interface IBatchableReadOnlyCache { /// /// Get multiple objects from the cache. diff --git a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs index c6cd692bb43..f8a21aa2d88 100644 --- a/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/NonstrictReadWriteCache.cs @@ -46,7 +46,7 @@ public async Task GetAsync(CacheKey key, long txTimestamp, CancellationT public Task GetManyAsync(CacheKey[] keys, long timestamp, CancellationToken cancellationToken) { - if (_batchableReadCache == null) + if (_batchableReadOnlyCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); } @@ -61,7 +61,7 @@ async Task InternalGetManyAsync() { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = await (_batchableReadCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + var results = await (_batchableReadOnlyCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); if (!log.IsDebugEnabled()) { return results; @@ -80,7 +80,7 @@ async Task InternalGetManyAsync() public Task PutManyAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) { - if (_batchableReadWriteCache == null) + if (_batchableCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); } @@ -111,7 +111,7 @@ async Task InternalPutManyAsync() var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = await (_batchableReadWriteCache.GetManyAsync(checkKeys.ToArray(), cancellationToken)).ConfigureAwait(false); + var objects = await (_batchableCache.GetManyAsync(checkKeys.ToArray(), cancellationToken)).ConfigureAwait(false); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -143,7 +143,7 @@ async Task InternalPutManyAsync() putValues[j++] = values[i]; result[i] = true; } - await (_batchableReadWriteCache.PutManyAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); + await (_batchableCache.PutManyAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); return result; } } diff --git a/src/NHibernate/Async/Cache/ReadOnlyCache.cs b/src/NHibernate/Async/Cache/ReadOnlyCache.cs index 97254fe7183..14530333b71 100644 --- a/src/NHibernate/Async/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Async/Cache/ReadOnlyCache.cs @@ -34,7 +34,7 @@ public async Task GetAsync(CacheKey key, long timestamp, CancellationTok public Task GetManyAsync(CacheKey[] keys, long timestamp, CancellationToken cancellationToken) { - if (_batchableReadCache == null) + if (_batchableReadOnlyCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); } @@ -49,7 +49,7 @@ async Task InternalGetManyAsync() { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = await (_batchableReadCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + var results = await (_batchableReadOnlyCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); if (!log.IsDebugEnabled()) { return results; @@ -84,7 +84,7 @@ public Task LockAsync(CacheKey key, object version, CancellationToken public Task PutManyAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) { - if (_batchableReadWriteCache == null) + if (_batchableCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); } @@ -115,7 +115,7 @@ async Task InternalPutManyAsync() var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = await (_batchableReadWriteCache.GetManyAsync(checkKeys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + var objects = await (_batchableCache.GetManyAsync(checkKeys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -147,7 +147,7 @@ async Task InternalPutManyAsync() putValues[j++] = values[i]; result[i] = true; } - await (_batchableReadWriteCache.PutManyAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); + await (_batchableCache.PutManyAsync(putKeys, putValues, cancellationToken)).ConfigureAwait(false); return result; } } diff --git a/src/NHibernate/Async/Cache/ReadWriteCache.cs b/src/NHibernate/Async/Cache/ReadWriteCache.cs index 1708276456f..5e7340ae58b 100644 --- a/src/NHibernate/Async/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/ReadWriteCache.cs @@ -92,7 +92,7 @@ public async Task GetAsync(CacheKey key, long txTimestamp, CancellationT public Task GetManyAsync(CacheKey[] keys, long timestamp, CancellationToken cancellationToken) { - if (_batchableReadCache == null) + if (_batchableReadOnlyCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); } @@ -110,7 +110,7 @@ async Task InternalGetManyAsync() var result = new object[keys.Length]; using (await _lockObjectAsync.LockAsync()) { - var lockables = await (_batchableReadCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); + var lockables = await (_batchableReadOnlyCache.GetManyAsync(keys.Select(o => (object) o).ToArray(), cancellationToken)).ConfigureAwait(false); for (var i = 0; i < lockables.Length; i++) { var lockable = (ILockable) lockables[i]; @@ -183,7 +183,7 @@ public async Task LockAsync(CacheKey key, object version, Cancellatio public Task PutManyAsync(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts, CancellationToken cancellationToken) { - if (_batchableReadWriteCache == null) + if (_batchableCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); } @@ -211,9 +211,9 @@ async Task InternalPutManyAsync() var keysArr = keys.Cast().ToArray(); try { - await (_batchableReadWriteCache.LockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); + await (_batchableCache.LockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); var putBatch = new Dictionary(); - var lockables = await (_batchableReadWriteCache.GetManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); + var lockables = await (_batchableCache.GetManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); for (var i = 0; i < keys.Length; i++) { var key = keys[i]; @@ -249,12 +249,12 @@ async Task InternalPutManyAsync() if (putBatch.Count > 0) { - await (_batchableReadWriteCache.PutManyAsync(putBatch.Keys.ToArray(), putBatch.Values.ToArray(), cancellationToken)).ConfigureAwait(false); + await (_batchableCache.PutManyAsync(putBatch.Keys.ToArray(), putBatch.Values.ToArray(), cancellationToken)).ConfigureAwait(false); } } finally { - await (_batchableReadWriteCache.UnlockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); + await (_batchableCache.UnlockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); } } return result; diff --git a/src/NHibernate/Async/Engine/BatchFetchQueue.cs b/src/NHibernate/Async/Engine/BatchFetchQueue.cs index 3467fa5cddc..66ff2292419 100644 --- a/src/NHibernate/Async/Engine/BatchFetchQueue.cs +++ b/src/NHibernate/Async/Engine/BatchFetchQueue.cs @@ -66,7 +66,7 @@ internal async Task GetCollectionBatchAsync(ICollectionPersister colle // List of collection entries that haven't been checked for their existance in the cache. Besides the collection entry, // the index where the entry was found is also stored in order to correctly order the returning keys. var collectionKeys = new List, int>>(batchSize); - var batchableCache = collectionPersister.Cache?.Cache as IBatchableReadCache; + var batchableCache = collectionPersister.Cache?.Cache as IBatchableReadOnlyCache; if (!batchLoadableCollections.TryGetValue(collectionPersister.Role, out var map)) { @@ -258,7 +258,7 @@ internal async Task GetEntityBatchAsync(IEntityPersister persister, ob // List of entity keys that haven't been checked for their existance in the cache. Besides the entity key, // the index where the key was found is also stored in order to correctly order the returning keys. var entityKeys = new List>(batchSize); - var batchableCache = persister.Cache?.Cache as IBatchableReadCache; + var batchableCache = persister.Cache?.Cache as IBatchableReadOnlyCache; if (!batchLoadableEntityKeys.TryGetValue(persister.EntityName, out var set)) { @@ -412,7 +412,7 @@ private async Task IsCachedAsync(object collectionKey, ICollectionPersiste /// A cancellation token that can be used to cancel the work /// An array of booleans that contains the result for each key. private async Task AreCachedAsync(List> entityKeys, int[] keyIndexes, IEntityPersister persister, - IBatchableReadCache batchableCache, bool checkCache, CancellationToken cancellationToken) + IBatchableReadOnlyCache batchableCache, bool checkCache, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var result = new bool[keyIndexes.Length]; @@ -450,7 +450,7 @@ private async Task AreCachedAsync(List> ent /// A cancellation token that can be used to cancel the work /// An array of booleans that contains the result for each key. private async Task AreCachedAsync(List, int>> collectionKeys, - int[] keyIndexes, ICollectionPersister persister, IBatchableReadCache batchableCache, + int[] keyIndexes, ICollectionPersister persister, IBatchableReadOnlyCache batchableCache, bool checkCache, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); diff --git a/src/NHibernate/Cache/IBatchableWriteCache.cs b/src/NHibernate/Cache/IBatchableCache.cs similarity index 93% rename from src/NHibernate/Cache/IBatchableWriteCache.cs rename to src/NHibernate/Cache/IBatchableCache.cs index ac53ec110a1..5aa8a7dfd0d 100644 --- a/src/NHibernate/Cache/IBatchableWriteCache.cs +++ b/src/NHibernate/Cache/IBatchableCache.cs @@ -15,7 +15,7 @@ namespace NHibernate.Cache /// All implementations must be threadsafe. /// /// - public partial interface IBatchableReadWriteCache : IBatchableReadCache + public partial interface IBatchableCache : IBatchableReadOnlyCache { /// /// Add multiple objects to the cache. diff --git a/src/NHibernate/Cache/IBatchableReadCache.cs b/src/NHibernate/Cache/IBatchableReadOnlyCache.cs similarity index 93% rename from src/NHibernate/Cache/IBatchableReadCache.cs rename to src/NHibernate/Cache/IBatchableReadOnlyCache.cs index 9f0f3d8cd0b..7bdbaa07bce 100644 --- a/src/NHibernate/Cache/IBatchableReadCache.cs +++ b/src/NHibernate/Cache/IBatchableReadOnlyCache.cs @@ -15,7 +15,7 @@ namespace NHibernate.Cache /// All implementations must be threadsafe. /// /// - public partial interface IBatchableReadCache + public partial interface IBatchableReadOnlyCache { /// /// Get multiple objects from the cache. diff --git a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs index 77b86ff3369..eab328135d4 100644 --- a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs @@ -185,13 +185,13 @@ public static bool[] PutMany(this ICacheConcurrencyStrategy cache, CacheKey[] ke public static bool IsBatchingGetSupported(this ICacheConcurrencyStrategy cache) { // ReSharper disable once SuspiciousTypeConversion.Global - return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableReadCache; + return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableReadOnlyCache; } public static bool IsBatchingPutSupported(this ICacheConcurrencyStrategy cache) { // ReSharper disable once SuspiciousTypeConversion.Global - return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableReadWriteCache; + return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableCache; } } } diff --git a/src/NHibernate/Cache/NonstrictReadWriteCache.cs b/src/NHibernate/Cache/NonstrictReadWriteCache.cs index 6875583c76d..ff7f6ce7ece 100644 --- a/src/NHibernate/Cache/NonstrictReadWriteCache.cs +++ b/src/NHibernate/Cache/NonstrictReadWriteCache.cs @@ -17,8 +17,8 @@ namespace NHibernate.Cache public partial class NonstrictReadWriteCache : IBatchableCacheConcurrencyStrategy { private ICache cache; - private IBatchableReadCache _batchableReadCache; - private IBatchableReadWriteCache _batchableReadWriteCache; + private IBatchableReadOnlyCache _batchableReadOnlyCache; + private IBatchableCache _batchableCache; private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(NonstrictReadWriteCache)); @@ -37,8 +37,8 @@ public ICache Cache { cache = value; // ReSharper disable once SuspiciousTypeConversion.Global - _batchableReadCache = value as IBatchableReadCache; - _batchableReadWriteCache = value as IBatchableReadWriteCache; + _batchableReadOnlyCache = value as IBatchableReadOnlyCache; + _batchableCache = value as IBatchableCache; } } @@ -66,7 +66,7 @@ public object Get(CacheKey key, long txTimestamp) public object[] GetMany(CacheKey[] keys, long timestamp) { - if (_batchableReadCache == null) + if (_batchableReadOnlyCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); } @@ -74,7 +74,7 @@ public object[] GetMany(CacheKey[] keys, long timestamp) { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = _batchableReadCache.GetMany(keys.Select(o => (object) o).ToArray()); + var results = _batchableReadOnlyCache.GetMany(keys.Select(o => (object) o).ToArray()); if (!log.IsDebugEnabled()) { return results; @@ -92,7 +92,7 @@ public object[] GetMany(CacheKey[] keys, long timestamp) public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts) { - if (_batchableReadWriteCache == null) + if (_batchableCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); } @@ -116,7 +116,7 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = _batchableReadWriteCache.GetMany(checkKeys.ToArray()); + var objects = _batchableCache.GetMany(checkKeys.ToArray()); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -148,7 +148,7 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] putValues[j++] = values[i]; result[i] = true; } - _batchableReadWriteCache.PutMany(putKeys, putValues); + _batchableCache.PutMany(putKeys, putValues); return result; } diff --git a/src/NHibernate/Cache/ReadOnlyCache.cs b/src/NHibernate/Cache/ReadOnlyCache.cs index 98c9cb9447d..e5df5e3eead 100644 --- a/src/NHibernate/Cache/ReadOnlyCache.cs +++ b/src/NHibernate/Cache/ReadOnlyCache.cs @@ -12,8 +12,8 @@ namespace NHibernate.Cache public partial class ReadOnlyCache : IBatchableCacheConcurrencyStrategy { private ICache cache; - private IBatchableReadCache _batchableReadCache; - private IBatchableReadWriteCache _batchableReadWriteCache; + private IBatchableReadOnlyCache _batchableReadOnlyCache; + private IBatchableCache _batchableCache; private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(ReadOnlyCache)); /// @@ -31,8 +31,8 @@ public ICache Cache { cache = value; // ReSharper disable once SuspiciousTypeConversion.Global - _batchableReadCache = value as IBatchableReadCache; - _batchableReadWriteCache = value as IBatchableReadWriteCache; + _batchableReadOnlyCache = value as IBatchableReadOnlyCache; + _batchableCache = value as IBatchableCache; } } @@ -48,7 +48,7 @@ public object Get(CacheKey key, long timestamp) public object[] GetMany(CacheKey[] keys, long timestamp) { - if (_batchableReadCache == null) + if (_batchableReadOnlyCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); } @@ -56,7 +56,7 @@ public object[] GetMany(CacheKey[] keys, long timestamp) { log.Debug("Cache lookup: {0}", string.Join(",", keys.AsEnumerable())); } - var results = _batchableReadCache.GetMany(keys.Select(o => (object) o).ToArray()); + var results = _batchableReadOnlyCache.GetMany(keys.Select(o => (object) o).ToArray()); if (!log.IsDebugEnabled()) { return results; @@ -80,7 +80,7 @@ public ISoftLock Lock(CacheKey key, object version) public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts) { - if (_batchableReadWriteCache == null) + if (_batchableCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); } @@ -104,7 +104,7 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] var skipKeyIndexes = new HashSet(); if (checkKeys.Any()) { - var objects = _batchableReadWriteCache.GetMany(checkKeys.Select(o => (object) o).ToArray()); + var objects = _batchableCache.GetMany(checkKeys.Select(o => (object) o).ToArray()); for (var i = 0; i < objects.Length; i++) { if (objects[i] != null) @@ -136,7 +136,7 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] putValues[j++] = values[i]; result[i] = true; } - _batchableReadWriteCache.PutMany(putKeys, putValues); + _batchableCache.PutMany(putKeys, putValues); return result; } diff --git a/src/NHibernate/Cache/ReadWriteCache.cs b/src/NHibernate/Cache/ReadWriteCache.cs index 3113ae9d009..047d7164ad8 100644 --- a/src/NHibernate/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Cache/ReadWriteCache.cs @@ -36,8 +36,8 @@ public interface ILockable private readonly object _lockObject = new object(); private ICache cache; - private IBatchableReadCache _batchableReadCache; - private IBatchableReadWriteCache _batchableReadWriteCache; + private IBatchableReadOnlyCache _batchableReadOnlyCache; + private IBatchableCache _batchableCache; private int _nextLockId; public ReadWriteCache() @@ -59,8 +59,8 @@ public ICache Cache { cache = value; // ReSharper disable once SuspiciousTypeConversion.Global - _batchableReadCache = value as IBatchableReadCache; - _batchableReadWriteCache = value as IBatchableReadWriteCache; + _batchableReadOnlyCache = value as IBatchableReadOnlyCache; + _batchableCache = value as IBatchableCache; } } @@ -148,7 +148,7 @@ public object Get(CacheKey key, long txTimestamp) public object[] GetMany(CacheKey[] keys, long timestamp) { - if (_batchableReadCache == null) + if (_batchableReadOnlyCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching get operation"); } @@ -159,7 +159,7 @@ public object[] GetMany(CacheKey[] keys, long timestamp) var result = new object[keys.Length]; lock (_lockObject) { - var lockables = _batchableReadCache.GetMany(keys.Select(o => (object) o).ToArray()); + var lockables = _batchableReadOnlyCache.GetMany(keys.Select(o => (object) o).ToArray()); for (var i = 0; i < lockables.Length; i++) { var lockable = (ILockable) lockables[i]; @@ -230,7 +230,7 @@ public ISoftLock Lock(CacheKey key, object version) public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] versions, IComparer[] versionComparers, bool[] minimalPuts) { - if (_batchableReadWriteCache == null) + if (_batchableCache == null) { throw new InvalidOperationException($"Cache {cache.GetType()} does not support batching operations"); } @@ -251,9 +251,9 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] var keysArr = keys.Cast().ToArray(); try { - _batchableReadWriteCache.LockMany(keysArr); + _batchableCache.LockMany(keysArr); var putBatch = new Dictionary(); - var lockables = _batchableReadWriteCache.GetMany(keysArr); + var lockables = _batchableCache.GetMany(keysArr); for (var i = 0; i < keys.Length; i++) { var key = keys[i]; @@ -289,12 +289,12 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] if (putBatch.Count > 0) { - _batchableReadWriteCache.PutMany(putBatch.Keys.ToArray(), putBatch.Values.ToArray()); + _batchableCache.PutMany(putBatch.Keys.ToArray(), putBatch.Values.ToArray()); } } finally { - _batchableReadWriteCache.UnlockMany(keysArr); + _batchableCache.UnlockMany(keysArr); } } return result; diff --git a/src/NHibernate/Cache/UpdateTimestampsCache.cs b/src/NHibernate/Cache/UpdateTimestampsCache.cs index 45b16bca781..8641bbdab1b 100644 --- a/src/NHibernate/Cache/UpdateTimestampsCache.cs +++ b/src/NHibernate/Cache/UpdateTimestampsCache.cs @@ -18,7 +18,7 @@ public partial class UpdateTimestampsCache { private static readonly INHibernateLogger log = NHibernateLogger.For(typeof(UpdateTimestampsCache)); private ICache updateTimestamps; - private readonly IBatchableReadCache _batchUpdateTimestamps; + private readonly IBatchableReadOnlyCache _batchUpdateTimestamps; private readonly string regionName = typeof(UpdateTimestampsCache).Name; @@ -34,7 +34,7 @@ public UpdateTimestampsCache(Settings settings, IDictionary prop log.Info("starting update timestamps cache at region: {0}", regionName); updateTimestamps = settings.CacheProvider.BuildCache(regionName, props); // ReSharper disable once SuspiciousTypeConversion.Global - _batchUpdateTimestamps = updateTimestamps as IBatchableReadCache; + _batchUpdateTimestamps = updateTimestamps as IBatchableReadOnlyCache; } //Since v5.1 diff --git a/src/NHibernate/Engine/BatchFetchQueue.cs b/src/NHibernate/Engine/BatchFetchQueue.cs index a0174c0273c..1c02286959b 100644 --- a/src/NHibernate/Engine/BatchFetchQueue.cs +++ b/src/NHibernate/Engine/BatchFetchQueue.cs @@ -237,7 +237,7 @@ internal object[] GetCollectionBatch(ICollectionPersister collectionPersister, o // List of collection entries that haven't been checked for their existance in the cache. Besides the collection entry, // the index where the entry was found is also stored in order to correctly order the returning keys. var collectionKeys = new List, int>>(batchSize); - var batchableCache = collectionPersister.Cache?.Cache as IBatchableReadCache; + var batchableCache = collectionPersister.Cache?.Cache as IBatchableReadOnlyCache; if (!batchLoadableCollections.TryGetValue(collectionPersister.Role, out var map)) { @@ -420,7 +420,7 @@ internal object[] GetEntityBatch(IEntityPersister persister, object id, int batc // List of entity keys that haven't been checked for their existance in the cache. Besides the entity key, // the index where the key was found is also stored in order to correctly order the returning keys. var entityKeys = new List>(batchSize); - var batchableCache = persister.Cache?.Cache as IBatchableReadCache; + var batchableCache = persister.Cache?.Cache as IBatchableReadOnlyCache; if (!batchLoadableEntityKeys.TryGetValue(persister.EntityName, out var set)) { @@ -569,7 +569,7 @@ private bool IsCached(object collectionKey, ICollectionPersister persister) /// Whether to check the cache or just return for all keys. /// An array of booleans that contains the result for each key. private bool[] AreCached(List> entityKeys, int[] keyIndexes, IEntityPersister persister, - IBatchableReadCache batchableCache, bool checkCache) + IBatchableReadOnlyCache batchableCache, bool checkCache) { var result = new bool[keyIndexes.Length]; if (!checkCache || !persister.HasCache || !context.Session.CacheMode.HasFlag(CacheMode.Get)) @@ -605,7 +605,7 @@ private bool[] AreCached(List> entityKeys, int[] ke /// Whether to check the cache or just return for all keys. /// An array of booleans that contains the result for each key. private bool[] AreCached(List, int>> collectionKeys, - int[] keyIndexes, ICollectionPersister persister, IBatchableReadCache batchableCache, + int[] keyIndexes, ICollectionPersister persister, IBatchableReadOnlyCache batchableCache, bool checkCache) { var result = new bool[keyIndexes.Length]; From ce04db3f99af11ffcd139dff625bb6b08102b359 Mon Sep 17 00:00:00 2001 From: maca88 Date: Fri, 1 Jun 2018 23:10:22 +0200 Subject: [PATCH 7/8] Reorder the condition for checking if batching is supported in order to reduce type checking. --- src/NHibernate/Cache/ICacheConcurrencyStrategy.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs index eab328135d4..d22862d128a 100644 --- a/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs +++ b/src/NHibernate/Cache/ICacheConcurrencyStrategy.cs @@ -185,13 +185,13 @@ public static bool[] PutMany(this ICacheConcurrencyStrategy cache, CacheKey[] ke public static bool IsBatchingGetSupported(this ICacheConcurrencyStrategy cache) { // ReSharper disable once SuspiciousTypeConversion.Global - return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableReadOnlyCache; + return cache.Cache is IBatchableReadOnlyCache && cache is IBatchableCacheConcurrencyStrategy; } public static bool IsBatchingPutSupported(this ICacheConcurrencyStrategy cache) { // ReSharper disable once SuspiciousTypeConversion.Global - return cache is IBatchableCacheConcurrencyStrategy && cache.Cache is IBatchableCache; + return cache.Cache is IBatchableCache && cache is IBatchableCacheConcurrencyStrategy; } } } From 23d59d8da6e642e48aa6c5763fc0f769388e5022 Mon Sep 17 00:00:00 2001 From: maca88 Date: Sat, 16 Jun 2018 18:14:51 +0200 Subject: [PATCH 8/8] Changed the LockMany and UnlockMany method declarations to be more adequate for distributed locks. --- .../Async/CacheTest/Caches/BatchableCache.cs | 6 +++--- src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs | 5 +++-- src/NHibernate/Async/Cache/IBatchableCache.cs | 6 ++++-- src/NHibernate/Async/Cache/ReadWriteCache.cs | 10 ++++++++-- src/NHibernate/Cache/IBatchableCache.cs | 6 ++++-- src/NHibernate/Cache/ReadWriteCache.cs | 10 ++++++++-- 6 files changed, 30 insertions(+), 13 deletions(-) diff --git a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs index e39c3aaa11e..808997b9323 100644 --- a/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/Async/CacheTest/Caches/BatchableCache.cs @@ -39,12 +39,12 @@ public Task PutManyAsync(object[] keys, object[] values, CancellationToken cance } } - public Task LockManyAsync(object[] keys, CancellationToken cancellationToken) + public Task LockManyAsync(object[] keys, CancellationToken cancellationToken) { try { LockMultipleCalls.Add(keys); - return Task.CompletedTask; + return Task.FromResult(null); } catch (Exception ex) { @@ -52,7 +52,7 @@ public Task LockManyAsync(object[] keys, CancellationToken cancellationToken) } } - public Task UnlockManyAsync(object[] keys, CancellationToken cancellationToken) + public Task UnlockManyAsync(object[] keys, object lockValue, CancellationToken cancellationToken) { try { diff --git a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs index 067293c906a..e19985f1422 100644 --- a/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs +++ b/src/NHibernate.Test/CacheTest/Caches/BatchableCache.cs @@ -34,12 +34,13 @@ public void PutMany(object[] keys, object[] values) } } - public void LockMany(object[] keys) + public object LockMany(object[] keys) { LockMultipleCalls.Add(keys); + return null; } - public void UnlockMany(object[] keys) + public void UnlockMany(object[] keys, object lockValue) { UnlockMultipleCalls.Add(keys); } diff --git a/src/NHibernate/Async/Cache/IBatchableCache.cs b/src/NHibernate/Async/Cache/IBatchableCache.cs index 23d0b968205..f52892a2569 100644 --- a/src/NHibernate/Async/Cache/IBatchableCache.cs +++ b/src/NHibernate/Async/Cache/IBatchableCache.cs @@ -31,13 +31,15 @@ public partial interface IBatchableCache : IBatchableReadOnlyCache /// /// The keys to lock. /// A cancellation token that can be used to cancel the work - Task LockManyAsync(object[] keys, CancellationToken cancellationToken); + /// The value that was used to lock the keys. + Task LockManyAsync(object[] keys, CancellationToken cancellationToken); /// /// Unlock the objects that were previously locked. /// /// The keys to unlock. + /// The value that was used to lock the keys. /// A cancellation token that can be used to cancel the work - Task UnlockManyAsync(object[] keys, CancellationToken cancellationToken); + Task UnlockManyAsync(object[] keys, object lockValue, CancellationToken cancellationToken); } } diff --git a/src/NHibernate/Async/Cache/ReadWriteCache.cs b/src/NHibernate/Async/Cache/ReadWriteCache.cs index 5e7340ae58b..4c83d96b122 100644 --- a/src/NHibernate/Async/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Async/Cache/ReadWriteCache.cs @@ -209,9 +209,12 @@ async Task InternalPutManyAsync() log.Debug("Caching: {0}", string.Join(",", keys.AsEnumerable())); } var keysArr = keys.Cast().ToArray(); + var lockAquired = false; + object lockValue = null; try { - await (_batchableCache.LockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); + lockValue = await (_batchableCache.LockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); + lockAquired = true; var putBatch = new Dictionary(); var lockables = await (_batchableCache.GetManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); for (var i = 0; i < keys.Length; i++) @@ -254,7 +257,10 @@ async Task InternalPutManyAsync() } finally { - await (_batchableCache.UnlockManyAsync(keysArr, cancellationToken)).ConfigureAwait(false); + if (lockAquired) + { + await (_batchableCache.UnlockManyAsync(keysArr, lockValue, cancellationToken)).ConfigureAwait(false); + } } } return result; diff --git a/src/NHibernate/Cache/IBatchableCache.cs b/src/NHibernate/Cache/IBatchableCache.cs index 5aa8a7dfd0d..b85dc83c9cf 100644 --- a/src/NHibernate/Cache/IBatchableCache.cs +++ b/src/NHibernate/Cache/IBatchableCache.cs @@ -28,12 +28,14 @@ public partial interface IBatchableCache : IBatchableReadOnlyCache /// Lock the objects from being changed by another thread. /// /// The keys to lock. - void LockMany(object[] keys); + /// The value that was used to lock the keys. + object LockMany(object[] keys); /// /// Unlock the objects that were previously locked. /// /// The keys to unlock. - void UnlockMany(object[] keys); + /// The value that was used to lock the keys. + void UnlockMany(object[] keys, object lockValue); } } diff --git a/src/NHibernate/Cache/ReadWriteCache.cs b/src/NHibernate/Cache/ReadWriteCache.cs index 047d7164ad8..fa66ed9bd16 100644 --- a/src/NHibernate/Cache/ReadWriteCache.cs +++ b/src/NHibernate/Cache/ReadWriteCache.cs @@ -249,9 +249,12 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] log.Debug("Caching: {0}", string.Join(",", keys.AsEnumerable())); } var keysArr = keys.Cast().ToArray(); + var lockAquired = false; + object lockValue = null; try { - _batchableCache.LockMany(keysArr); + lockValue = _batchableCache.LockMany(keysArr); + lockAquired = true; var putBatch = new Dictionary(); var lockables = _batchableCache.GetMany(keysArr); for (var i = 0; i < keys.Length; i++) @@ -294,7 +297,10 @@ public bool[] PutMany(CacheKey[] keys, object[] values, long timestamp, object[] } finally { - _batchableCache.UnlockMany(keysArr); + if (lockAquired) + { + _batchableCache.UnlockMany(keysArr, lockValue); + } } } return result;