Skip to content

Commit 04abe50

Browse files
committed
Optimize cache to lock per key
1 parent b53ccd9 commit 04abe50

File tree

1 file changed

+22
-13
lines changed

1 file changed

+22
-13
lines changed

LazyCache/CachingService.cs

Lines changed: 22 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ public class CachingService : IAppCache
1313
{
1414
private readonly Lazy<ICacheProvider> cacheProvider;
1515

16-
private readonly SemaphoreSlim locker = new SemaphoreSlim(1, 1);
16+
private readonly int[] keyLocks = new int[8192];
1717

1818
public CachingService() : this(DefaultCacheProvider)
1919
{
@@ -104,14 +104,17 @@ object CacheFactory(ICacheEntry entry) =>
104104
return result;
105105
});
106106

107-
locker.Wait(); //TODO: do we really need this? Could we just lock on the key? like this? https://github.com/zkSNACKs/WalletWasabi/blob/7780db075685d2dc13620e0bcf6cc07578b627c2/WalletWasabi/Extensions/MemoryExtensions.cs
107+
// acquire lock per key
108+
uint hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
109+
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }
110+
108111
try
109112
{
110113
cacheItem = CacheProvider.GetOrCreate<object>(key, policy, CacheFactory);
111114
}
112115
finally
113116
{
114-
locker.Release();
117+
keyLocks[hash] = 0;
115118
}
116119

117120
try
@@ -122,14 +125,18 @@ object CacheFactory(ICacheEntry entry) =>
122125
if (valueHasChangedType)
123126
{
124127
CacheProvider.Remove(key);
125-
locker.Wait(); //TODO: do we really need this? Could we just lock on the key?
128+
129+
// acquire lock again
130+
hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
131+
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }
132+
126133
try
127134
{
128135
cacheItem = CacheProvider.GetOrCreate<object>(key, CacheFactory);
129136
}
130137
finally
131138
{
132-
locker.Release();
139+
keyLocks[hash] = 0;
133140
}
134141
result = GetValueFromLazy<T>(cacheItem, out _ /* we just evicted so type change cannot happen this time */);
135142
}
@@ -176,9 +183,9 @@ public virtual async Task<T> GetOrAddAsync<T>(string key, Func<ICacheEntry, Task
176183
// below, and guarded using the async lazy. Here we just ensure only one thread can place
177184
// the AsyncLazy into the cache at one time
178185

179-
await locker.WaitAsync()
180-
.ConfigureAwait(
181-
false); //TODO: do we really need to lock everything here - faster if we could lock on just the key?
186+
// acquire lock
187+
uint hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
188+
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }
182189

183190
object CacheFactory(ICacheEntry entry) =>
184191
new AsyncLazy<T>(() =>
@@ -195,7 +202,7 @@ object CacheFactory(ICacheEntry entry) =>
195202
}
196203
finally
197204
{
198-
locker.Release();
205+
keyLocks[hash] = 0;
199206
}
200207

201208
try
@@ -206,16 +213,18 @@ object CacheFactory(ICacheEntry entry) =>
206213
if (valueHasChangedType)
207214
{
208215
CacheProvider.Remove(key);
209-
await locker.WaitAsync()
210-
.ConfigureAwait(
211-
false); //TODO: do we really need to lock everything here - faster if we could lock on just the key?
216+
217+
// acquire lock
218+
hash = (uint)key.GetHashCode() % (uint)keyLocks.Length;
219+
while (Interlocked.CompareExchange(ref keyLocks[hash], 1, 0) == 1) { Thread.Yield(); }
220+
212221
try
213222
{
214223
cacheItem = CacheProvider.GetOrCreate<object>(key, CacheFactory);
215224
}
216225
finally
217226
{
218-
locker.Release();
227+
keyLocks[hash] = 0;
219228
}
220229
result = GetValueFromAsyncLazy<T>(cacheItem, out _ /* we just evicted so type change cannot happen this time */);
221230
}

0 commit comments

Comments
 (0)