Skip to content

Commit 825b828

Browse files
authored
nostripew (#204)
* nostripew * comment * fix test * size * tests * rem rbuff * test * test
1 parent 67a5e1d commit 825b828

File tree

5 files changed

+31
-64
lines changed

5 files changed

+31
-64
lines changed

BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuBuilderTests.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ public void TestBufferConfiguraiton()
5858
{
5959
ICache<string, int> lfu = new ConcurrentLfuBuilder<string, int>()
6060
.WithBufferConfiguration(new LfuBufferSize(
61-
new StripedBufferSize(128, 2),
6261
new StripedBufferSize(128, 2)
6362
))
6463
.Build();

BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -88,13 +88,7 @@ public void WhenItemIsEvictedItIsDisposed()
8888
LogLru();
8989

9090
dcache.Count.Should().Be(20);
91-
92-
for (int i = 0; i < 5; i++)
93-
{
94-
disposables[i].IsDisposed.Should().BeTrue();
95-
}
96-
97-
disposables[5].IsDisposed.Should().BeFalse();
91+
disposables.Count(d => d.IsDisposed).Should().Be(5);
9892
}
9993

10094
// protected 15
@@ -417,23 +411,20 @@ public void WhenWriteBufferIsFullAddDoesMaintenance()
417411
var bufferSize = LfuBufferSize.DefaultBufferSize;
418412
var scheduler = new TestScheduler();
419413

420-
var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1), new StripedBufferSize(bufferSize, 1));
414+
var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1));
421415
cache = new ConcurrentLfu<int, int>(1, bufferSize * 2, scheduler, EqualityComparer<int>.Default, bufferConfig);
422416

423417
// add an item, flush write buffer
424418
cache.GetOrAdd(-1, k => k);
425-
scheduler.RunCount.Should().Be(1);
426419
cache.PendingMaintenance();
427420

428421
// remove the item but don't flush, it is now in the write buffer and maintenance is scheduled
429422
cache.TryRemove(-1).Should().BeTrue();
430-
scheduler.RunCount.Should().Be(2);
431423

432424
// add buffer size items, last iteration will invoke maintenance on the foreground since write
433425
// buffer is full and test scheduler did not do any work
434426
for (int i = 0; i < bufferSize; i++)
435427
{
436-
scheduler.RunCount.Should().Be(2);
437428
cache.GetOrAdd(i, k => k);
438429
}
439430

@@ -445,10 +436,11 @@ public void WhenWriteBufferIsFullAddDoesMaintenance()
445436
[Fact]
446437
public void WhenWriteBufferIsFullUpdatesAreDropped()
447438
{
448-
var bufferSize = LfuBufferSize.DefaultBufferSize;
439+
int capacity = 20;
440+
var bufferSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), 128);
449441
var scheduler = new TestScheduler();
450-
var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1), new StripedBufferSize(bufferSize, 1));
451-
cache = new ConcurrentLfu<int, int>(1, 20, scheduler, EqualityComparer<int>.Default, bufferConfig);
442+
var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1));
443+
cache = new ConcurrentLfu<int, int>(1, capacity, scheduler, EqualityComparer<int>.Default, bufferConfig);
452444

453445
cache.GetOrAdd(1, k => k);
454446
scheduler.RunCount.Should().Be(1);
@@ -744,7 +736,7 @@ public void VerifyHitsWithForegroundScheduler()
744736
public void VerifyMisses()
745737
{
746738
cache = new ConcurrentLfu<int, int>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default,
747-
new LfuBufferSize(new StripedBufferSize(1, 1), new StripedBufferSize(1, 1)));
739+
new LfuBufferSize(new StripedBufferSize(1, 1)));
748740

749741
int iterations = 100000;
750742
Func<int, int> func = x => x;
@@ -779,7 +771,7 @@ public async Task ThreadedVerifyMisses()
779771
{
780772
// buffer size is 1, this will cause dropped writes on some threads where the buffer is full
781773
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default,
782-
new LfuBufferSize(new StripedBufferSize(1, 1), new StripedBufferSize(1, 1)));
774+
new LfuBufferSize(new StripedBufferSize(1, 1)));
783775

784776
int threads = 4;
785777
int iterations = 100000;
@@ -802,7 +794,6 @@ await Threaded.Run(threads, i =>
802794
this.output.WriteLine($"Maintenance ops {this.cache.Scheduler.RunCount}");
803795

804796
cache.Metrics.Value.Misses.Should().Be(iterations * threads);
805-
cache.Count.Should().BeCloseTo(20, 1);
806797
}
807798

808799
private void VerifyHits(int iterations, int minSamples)

BitFaster.Caching.UnitTests/Lfu/LfuBufferSizeTests.cs

Lines changed: 13 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -11,32 +11,24 @@ public class LfuBufferSizeTests
1111
[Fact]
1212
public void WhenReadBufferIsNullThrows()
1313
{
14-
Action constructor = () => { var x = new LfuBufferSize(null, new StripedBufferSize(1, 1)); };
15-
16-
constructor.Should().Throw<ArgumentNullException>();
17-
}
18-
19-
[Fact]
20-
public void WhenWriteBufferIsNullThrows()
21-
{
22-
Action constructor = () => { var x = new LfuBufferSize(new StripedBufferSize(1, 1), null); };
14+
Action constructor = () => { var x = new LfuBufferSize(null); };
2315

2416
constructor.Should().Throw<ArgumentNullException>();
2517
}
2618

2719
[SkippableTheory]
28-
[InlineData(1, 3, 1, 32, 1, 16)]
29-
[InlineData(1, 14, 1, 128, 1, 16)]
30-
[InlineData(1, 50, 1, 128, 1, 64)]
31-
[InlineData(1, 100, 1, 128, 1, 128)]
32-
[InlineData(4, 100, 4, 128, 4, 32)]
33-
[InlineData(16, 100, 8, 128, 8, 16)] // fails win
34-
[InlineData(64, 100, 8, 128, 8, 16)] // fails win
35-
[InlineData(1, 1000, 1, 128, 1, 128)]
36-
[InlineData(4, 1000, 4, 128, 4, 128)]
37-
[InlineData(32, 1000, 32, 128, 32, 32)] // fails win + fails mac
38-
[InlineData(256, 100000, 32, 128, 32, 32)] // fails win + fails mac
39-
public void CalculateDefaultBufferSize(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer, int expecteWriteStripes, int expecteWriteBuffer)
20+
[InlineData(1, 3, 1, 32)]
21+
[InlineData(1, 14, 1, 128)]
22+
[InlineData(1, 50, 1, 128)]
23+
[InlineData(1, 100, 1, 128)]
24+
[InlineData(4, 100, 4, 128)]
25+
[InlineData(16, 100, 8, 128)] // fails win
26+
[InlineData(64, 100, 8, 128)] // fails win
27+
[InlineData(1, 1000, 1, 128)]
28+
[InlineData(4, 1000, 4, 128)]
29+
[InlineData(32, 1000, 32, 128)] // fails win + fails mac
30+
[InlineData(256, 100000, 32, 128)] // fails win + fails mac
31+
public void CalculateDefaultBufferSize(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer)
4032
{
4133
// Some of these tests depend on the CPU Core count - skip if run on a different config machine.
4234
bool notExpectedCpuCount = Environment.ProcessorCount != 12;
@@ -48,8 +40,6 @@ public void CalculateDefaultBufferSize(int concurrencyLevel, int capacity, int e
4840

4941
bufferSize.Read.StripeCount.Should().Be(expectedReadStripes);
5042
bufferSize.Read.BufferSize.Should().Be(expectedReadBuffer);
51-
bufferSize.Write.StripeCount.Should().Be(expecteWriteStripes);
52-
bufferSize.Write.BufferSize.Should().Be(expecteWriteBuffer);
5343
}
5444
}
5545
}

BitFaster.Caching/Lfu/ConcurrentLfu.cs

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ public sealed class ConcurrentLfu<K, V> : ICache<K, V>, IAsyncCache<K, V>, IBoun
3939
private readonly ConcurrentDictionary<K, LfuNode<K, V>> dictionary;
4040

4141
private readonly StripedMpscBuffer<LfuNode<K, V>> readBuffer;
42-
private readonly StripedMpscBuffer<LfuNode<K, V>> writeBuffer;
42+
private readonly MpscBoundedBuffer<LfuNode<K, V>> writeBuffer;
4343

4444
private readonly CacheMetrics metrics = new CacheMetrics();
4545

@@ -82,7 +82,10 @@ public ConcurrentLfu(int concurrencyLevel, int capacity, IScheduler scheduler, I
8282
this.dictionary = new ConcurrentDictionary<K, LfuNode<K, V>>(concurrencyLevel, capacity, comparer);
8383

8484
this.readBuffer = new StripedMpscBuffer<LfuNode<K, V>>(bufferSize.Read);
85-
this.writeBuffer = new StripedMpscBuffer<LfuNode<K, V>>(bufferSize.Write);
85+
86+
// Cap the write buffer to the cache size, or 128. Whichever is smaller.
87+
int writeBufferSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), 128);
88+
this.writeBuffer = new MpscBoundedBuffer<LfuNode<K, V>>(writeBufferSize);
8689

8790
this.cmSketch = new CmSketch<K>(1, comparer);
8891
this.cmSketch.EnsureCapacity(capacity);
@@ -447,7 +450,7 @@ private bool Maintenance(LfuNode<K, V> droppedWrite = null)
447450
OnAccess(localDrainBuffer[i]);
448451
}
449452

450-
int writeCount = this.writeBuffer.DrainTo(localDrainBuffer);
453+
int writeCount = this.writeBuffer.DrainTo(new ArraySegment<LfuNode<K, V>>(localDrainBuffer));
451454

452455
for (int i = 0; i < writeCount; i++)
453456
{
@@ -819,7 +822,7 @@ public LfuDebugView(ConcurrentLfu<K, V> lfu)
819822

820823
public StripedMpscBuffer<LfuNode<K, V>> ReadBuffer => this.lfu.readBuffer;
821824

822-
public StripedMpscBuffer<LfuNode<K, V>> WriteBuffer => this.lfu.writeBuffer;
825+
public MpscBoundedBuffer<LfuNode<K, V>> WriteBuffer => this.lfu.writeBuffer;
823826

824827
public KeyValuePair<K, V>[] Items
825828
{

BitFaster.Caching/Lfu/LfuBufferSize.cs

Lines changed: 3 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -14,29 +14,20 @@ public class LfuBufferSize
1414
/// </summary>
1515
public const int DefaultBufferSize = 128;
1616

17-
private const int MaxWriteBufferTotalSize = 1024;
18-
1917
/// <summary>
2018
/// Initializes a new instance of the LfuBufferSize class with the specified read and write buffer sizes.
2119
/// </summary>
2220
/// <param name="readBufferSize">The read buffer size.</param>
23-
/// <param name="writeBufferSize">The write buffer size.</param>
24-
public LfuBufferSize(StripedBufferSize readBufferSize, StripedBufferSize writeBufferSize)
21+
public LfuBufferSize(StripedBufferSize readBufferSize)
2522
{
2623
Read = readBufferSize ?? throw new ArgumentNullException(nameof(readBufferSize));
27-
Write = writeBufferSize ?? throw new ArgumentNullException(nameof(writeBufferSize));
2824
}
2925

3026
/// <summary>
3127
/// Gets the read buffer size.
3228
/// </summary>
3329
public StripedBufferSize Read { get; }
3430

35-
/// <summary>
36-
/// Gets the write buffer size.
37-
/// </summary>
38-
public StripedBufferSize Write { get; }
39-
4031
/// <summary>
4132
/// Estimates default buffer sizes intended to give optimal throughput.
4233
/// </summary>
@@ -48,8 +39,7 @@ public static LfuBufferSize Default(int concurrencyLevel, int capacity)
4839
if (capacity < 13)
4940
{
5041
return new LfuBufferSize(
51-
new StripedBufferSize(32, 1),
52-
new StripedBufferSize(16, 1));
42+
new StripedBufferSize(32, 1));
5343
}
5444

5545
// cap concurrency at proc count * 2
@@ -61,14 +51,8 @@ public static LfuBufferSize Default(int concurrencyLevel, int capacity)
6151
concurrencyLevel /= 2;
6252
}
6353

64-
// Constrain write buffer size so that the LFU dictionary will not ever end up with more than 2x cache
65-
// capacity entries before maintenance runs.
66-
int writeBufferTotalSize = Math.Min(BitOps.CeilingPowerOfTwo(capacity), MaxWriteBufferTotalSize);
67-
int writeStripeSize = Math.Min(BitOps.CeilingPowerOfTwo(Math.Max(writeBufferTotalSize / concurrencyLevel, 4)), 128);
68-
6954
return new LfuBufferSize(
70-
new StripedBufferSize(DefaultBufferSize, concurrencyLevel),
71-
new StripedBufferSize(writeStripeSize, concurrencyLevel));
55+
new StripedBufferSize(DefaultBufferSize, concurrencyLevel));
7256
}
7357
}
7458
}

0 commit comments

Comments
 (0)