Skip to content

Commit 5fbd251

Browse files
authored
implement remove (#17)
1 parent c6e16b8 commit 5fbd251

File tree

8 files changed

+253
-22
lines changed

8 files changed

+253
-22
lines changed

Lightweight.Caching.UnitTests/ClassicLruTests.cs

Lines changed: 48 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using FluentAssertions;
2+
using Lightweight.Caching.UnitTests.Lru;
23
using System;
34
using System.Collections.Generic;
45
using System.Linq;
@@ -62,6 +63,16 @@ public void WhenKeyIsRequestedItIsCreatedAndCached()
6263
result1.Should().Be(result2);
6364
}
6465

66+
[Fact]
67+
public async Task WhenKeyIsRequesteItIsCreatedAndCachedAsync()
68+
{
69+
var result1 = await lru.GetOrAddAsync(1, valueFactory.CreateAsync).ConfigureAwait(false);
70+
var result2 = await lru.GetOrAddAsync(1, valueFactory.CreateAsync).ConfigureAwait(false);
71+
72+
valueFactory.timesCalled.Should().Be(1);
73+
result1.Should().Be(result2);
74+
}
75+
6576
[Fact]
6677
public void WhenDifferentKeysAreRequestedValueIsCreatedForEach()
6778
{
@@ -74,6 +85,18 @@ public void WhenDifferentKeysAreRequestedValueIsCreatedForEach()
7485
result2.Should().Be("2");
7586
}
7687

88+
[Fact]
89+
public async Task WhenDifferentKeysAreRequesteValueIsCreatedForEachAsync()
90+
{
91+
var result1 = await lru.GetOrAddAsync(1, valueFactory.CreateAsync).ConfigureAwait(false);
92+
var result2 = await lru.GetOrAddAsync(2, valueFactory.CreateAsync).ConfigureAwait(false);
93+
94+
valueFactory.timesCalled.Should().Be(2);
95+
96+
result1.Should().Be("1");
97+
result2.Should().Be("2");
98+
}
99+
77100
[Fact]
78101
public void WhenMoreKeysRequestedThanCapacityCountDoesNotIncrease()
79102
{
@@ -86,6 +109,18 @@ public void WhenMoreKeysRequestedThanCapacityCountDoesNotIncrease()
86109
valueFactory.timesCalled.Should().Be(capacity + 1);
87110
}
88111

112+
[Fact]
113+
public async Task WhenMoreKeysRequestedThanCapacityCountDoesNotIncreaseAsync()
114+
{
115+
for (int i = 0; i < capacity + 1; i++)
116+
{
117+
await lru.GetOrAddAsync(i, valueFactory.CreateAsync);
118+
}
119+
120+
lru.Count.Should().Be(capacity);
121+
valueFactory.timesCalled.Should().Be(capacity + 1);
122+
}
123+
89124
[Fact]
90125
public void WhenMoreKeysRequestedThanCapacityOldestItemIsEvicted()
91126
{
@@ -132,15 +167,21 @@ public void WhenKeyExistsTryGetReturnsTrueAndOutValueIsCorrect()
132167
value.Should().Be("1");
133168
}
134169

135-
private class ValueFactory
170+
[Fact]
171+
public void WhenKeyExistsTryRemoveRemovesItemAndReturnsTrue()
172+
{
173+
lru.GetOrAdd(1, valueFactory.Create);
174+
175+
lru.TryRemove(1).Should().BeTrue();
176+
lru.TryGet(1, out var value).Should().BeFalse();
177+
}
178+
179+
[Fact]
180+
public void WhenKeyDoesNotExistTryRemoveReturnsFalse()
136181
{
137-
public int timesCalled;
182+
lru.GetOrAdd(1, valueFactory.Create);
138183

139-
public string Create(int key)
140-
{
141-
timesCalled++;
142-
return key.ToString();
143-
}
184+
lru.TryRemove(2).Should().BeFalse();
144185
}
145186
}
146187
}

Lightweight.Caching.UnitTests/Lru/ConcurrentLruTests.cs

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -299,6 +299,35 @@ public void WhenValueExpiresItIsDisposed()
299299
disposableValueFactory.Items[1].IsDisposed.Should().BeFalse();
300300
}
301301

302+
[Fact]
303+
public void WhenKeyExistsTryRemoveRemovesItemAndReturnsTrue()
304+
{
305+
lru.GetOrAdd(1, valueFactory.Create);
306+
307+
lru.TryRemove(1).Should().BeTrue();
308+
lru.TryGet(1, out var value).Should().BeFalse();
309+
}
310+
311+
[Fact]
312+
public void WhenItemIsRemovedItIsDisposed()
313+
{
314+
var lruOfDisposable = new ConcurrentLru<int, DisposableItem>(1, 6, EqualityComparer<int>.Default);
315+
var disposableValueFactory = new DisposableValueFactory();
316+
317+
lruOfDisposable.GetOrAdd(1, disposableValueFactory.Create);
318+
lruOfDisposable.TryRemove(1);
319+
320+
disposableValueFactory.Items[1].IsDisposed.Should().BeTrue();
321+
}
322+
323+
[Fact]
324+
public void WhenKeyDoesNotExistTryRemoveReturnsFalse()
325+
{
326+
lru.GetOrAdd(1, valueFactory.Create);
327+
328+
lru.TryRemove(2).Should().BeFalse();
329+
}
330+
302331
private class DisposableItem : IDisposable
303332
{
304333
public bool IsDisposed { get; private set; }

Lightweight.Caching.UnitTests/Lru/ConcurrentTLruTests.cs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,5 +30,14 @@ public async Task WhenItemIsExpiredItIsRemoved()
3030

3131
lru.TryGet(1, out var value).Should().BeFalse();
3232
}
33+
34+
[Fact]
35+
public void WhenItemIsAddedThenRetrievedHitRatioIsHalf()
36+
{
37+
lru.GetOrAdd(1, valueFactory.Create);
38+
bool result = lru.TryGet(1, out var value);
39+
40+
lru.HitRatio.Should().Be(0.5);
41+
}
3342
}
3443
}
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
using FluentAssertions;
2+
using Lightweight.Caching.Lru;
3+
using System;
4+
using System.Collections.Generic;
5+
using System.Text;
6+
using Xunit;
7+
8+
namespace Lightweight.Caching.UnitTests.Lru
9+
{
10+
public class FastConcurrentLruTests
11+
{
12+
[Fact]
13+
public void ConstructAddAndRetrieveWithCustomComparerReturnsValue()
14+
{
15+
var lru = new FastConcurrentLru<string, int>(9, 9, StringComparer.OrdinalIgnoreCase);
16+
17+
lru.GetOrAdd("foo", k => 1);
18+
19+
lru.TryGet("FOO", out var value).Should().BeTrue();
20+
value.Should().Be(1);
21+
}
22+
}
23+
}
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
using FluentAssertions;
2+
using Lightweight.Caching.Lru;
3+
using System;
4+
using System.Collections.Generic;
5+
using System.Text;
6+
using Xunit;
7+
8+
namespace Lightweight.Caching.UnitTests.Lru
9+
{
10+
public class FastConcurrentTLruTests
11+
{
12+
[Fact]
13+
public void ConstructAddAndRetrieveWithCustomComparerReturnsValue()
14+
{
15+
var lru = new FastConcurrentTLru<string, int>(9, 9, StringComparer.OrdinalIgnoreCase, TimeSpan.FromSeconds(10));
16+
17+
lru.GetOrAdd("foo", k => 1);
18+
19+
lru.TryGet("FOO", out var value).Should().BeTrue();
20+
value.Should().Be(1);
21+
}
22+
}
23+
}

Lightweight.Caching/ClassicLru.cs

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,71 @@ public V GetOrAdd(K key, Func<K, V> valueFactory)
9393
return this.GetOrAdd(key, valueFactory);
9494
}
9595

96+
public async Task<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
97+
{
98+
if (this.TryGet(key, out var value))
99+
{
100+
return value;
101+
}
102+
103+
var node = new LinkedListNode<LruItem>(new LruItem(key, await valueFactory(key)));
104+
105+
if (this.dictionary.TryAdd(key, node))
106+
{
107+
LinkedListNode<LruItem> first = null;
108+
109+
lock (this.linkedList)
110+
{
111+
if (linkedList.Count >= capacity)
112+
{
113+
first = linkedList.First;
114+
linkedList.RemoveFirst();
115+
}
116+
117+
linkedList.AddLast(node);
118+
}
119+
120+
// Remove from the dictionary outside the lock. This means that the dictionary at this moment
121+
// contains an item that is not in the linked list. If another thread fetches this item,
122+
// LockAndMoveToEnd will ignore it, since it is detached. This means we potentially 'lose' an
123+
// item just as it was about to move to the back of the LRU list and be preserved. The next request
124+
// for the same key will be a miss. Dictionary and list are eventually consistent.
125+
// However, all operations inside the lock are extremely fast, so contention is minimized.
126+
if (first != null)
127+
{
128+
dictionary.TryRemove(first.Value.Key, out var removed);
129+
}
130+
131+
return node.Value.Value;
132+
}
133+
134+
return await this.GetOrAddAsync(key, valueFactory);
135+
}
136+
137+
public bool TryRemove(K key)
138+
{
139+
if (dictionary.TryRemove(key, out var node))
140+
{
141+
// If the node has already been removed from the list, ignore.
142+
// E.g. thread A reads x from the dictionary. Thread B adds a new item, removes x from
143+
// the List & Dictionary. Now thread A will try to move x to the end of the list.
144+
if (node.List != null)
145+
{
146+
lock (this.linkedList)
147+
{
148+
if (node.List != null)
149+
{
150+
linkedList.Remove(node);
151+
}
152+
}
153+
}
154+
155+
return true;
156+
}
157+
158+
return false;
159+
}
160+
96161
// Thead A reads x from the dictionary. Thread B adds a new item. Thread A moves x to the end. Thread B now removes the new first Node (removal is atomic on both data structures).
97162
private void LockAndMoveToEnd(LinkedListNode<LruItem> node)
98163
{

Lightweight.Caching/ICache.cs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,5 +11,9 @@ public interface ICache<K, V>
1111
bool TryGet(K key, out V value);
1212

1313
V GetOrAdd(K key, Func<K, V> valueFactory);
14+
15+
Task<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory);
16+
17+
bool TryRemove(K key);
1418
}
1519
}

Lightweight.Caching/Lru/TemplateConcurrentLru.cs

Lines changed: 52 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,22 @@
88

99
namespace Lightweight.Caching.Lru
1010
{
11+
/// <summary>
12+
/// LRU implementation where LRU list is composed of 3 segments: hot, warm and cold. Cost of maintaining
13+
/// segments is amortized across requests. Items are only cycled when capacity is exceeded. Pure read does
14+
/// not cycle items if all segments are within capacity constraints.
15+
/// There are no global locks. On cache miss, a new item is added. Tail items in each segment are dequeued,
16+
/// examined, and are either enqueued or discarded.
17+
/// </summary>
18+
/// <remarks>
19+
/// Each segment has a capacity. When segment capacity is exceeded, items are moved as follows:
20+
/// 1. New items are added to hot, WasAccessed = false
21+
/// 2. When items are accessed, update WasAccessed = true
22+
/// 3. When items are moved WasAccessed is set to false.
23+
/// 4. When hot is full, hot tail is moved to either Warm or Cold depending on WasAccessed.
24+
/// 5. When warm is full, warm tail is moved to warm head or cold depending on WasAccessed.
25+
/// 6. When cold is full, cold tail is moved to warm head or removed from dictionary on depending on WasAccessed.
26+
/// </remarks>
1127
public class TemplateConcurrentLru<K, V, I, P, H> : ICache<K, V>
1228
where I : LruItem<K, V>
1329
where P : struct, IPolicy<K, V, I>
@@ -56,7 +72,7 @@ public TemplateConcurrentLru(
5672
this.hitCounter = hitCounter;
5773
}
5874

59-
public int Count => this.hotCount + this.warmCount + this.coldCount;
75+
public int Count => this.dictionary.Count;
6076

6177
public int HotCount => this.hotCount;
6278

@@ -103,7 +119,7 @@ public V GetOrAdd(K key, Func<K, V> valueFactory)
103119
{
104120
this.hotQueue.Enqueue(newItem);
105121
Interlocked.Increment(ref hotCount);
106-
BumpItems();
122+
Cycle();
107123
return newItem.Value;
108124
}
109125

@@ -125,30 +141,51 @@ public async Task<V> GetOrAddAsync(K key, Func<K, Task<V>> valueFactory)
125141
{
126142
this.hotQueue.Enqueue(newItem);
127143
Interlocked.Increment(ref hotCount);
128-
BumpItems();
144+
Cycle();
129145
return newItem.Value;
130146
}
131147

132148
return await this.GetOrAddAsync(key, valueFactory).ConfigureAwait(false);
133149
}
134150

135-
private void BumpItems()
151+
public bool TryRemove(K key)
152+
{
153+
if (this.dictionary.TryRemove(key, out var removedItem))
154+
{
155+
// Mark as not accessed, it will later be cycled out of the queues because it can never be fetched
156+
// from the dictionary. Note: Hot/Warm/Cold count will reflect the removed item until it is cycled
157+
// from the queue.
158+
removedItem.WasAccessed = false;
159+
160+
if (removedItem.Value is IDisposable d)
161+
{
162+
d.Dispose();
163+
}
164+
165+
return true;
166+
}
167+
168+
return false;
169+
}
170+
171+
private void Cycle()
136172
{
137173
// There will be races when queue count == queue capacity. Two threads may each dequeue items.
138-
// This will prematurely free slots for the next caller. Each thread will still only bump at most 5 items.
174+
// This will prematurely free slots for the next caller. Each thread will still only cycle at most 5 items.
139175
// Since TryDequeue is thread safe, only 1 thread can dequeue each item. Thus counts and queue state will always
140176
// converge on correct over time.
141-
BumpHot();
177+
CycleHot();
178+
142179
// Multi-threaded stress tests show that due to races, the warm and cold count can increase beyond capacity when
143-
// hit rate is very high. Double bump results in stable count under all conditions. When contention is low,
144-
// secondary bumps have no effect.
145-
BumpWarm();
146-
BumpWarm();
147-
BumpCold();
148-
BumpCold();
180+
// hit rate is very high. Double cycle results in stable count under all conditions. When contention is low,
181+
// secondary cycles have no effect.
182+
CycleWarm();
183+
CycleWarm();
184+
CycleCold();
185+
CycleCold();
149186
}
150187

151-
private void BumpHot()
188+
private void CycleHot()
152189
{
153190
if (this.hotCount > this.hotCapacity)
154191
{
@@ -166,7 +203,7 @@ private void BumpHot()
166203
}
167204
}
168205

169-
private void BumpWarm()
206+
private void CycleWarm()
170207
{
171208
if (this.warmCount > this.warmCapacity)
172209
{
@@ -195,7 +232,7 @@ private void BumpWarm()
195232
}
196233
}
197234

198-
private void BumpCold()
235+
private void CycleCold()
199236
{
200237
if (this.coldCount > this.coldCapacity)
201238
{

0 commit comments

Comments
 (0)