Skip to content

Commit 5656045

Browse files
authored
buffer (#227)
* buffer * cleanup * skip * missing file * tests * rename
1 parent a6a1e91 commit 5656045

File tree

21 files changed

+290
-89
lines changed

21 files changed

+290
-89
lines changed

BitFaster.Caching.Benchmarks/Lfu/LfuJustGetOrAdd.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,11 @@ public class LfuJustGetOrAdd
2222

2323
const int stripes = 1;
2424
private static readonly BackgroundThreadScheduler background = new BackgroundThreadScheduler();
25-
private static readonly ConcurrentLfu<int, int> concurrentLfu = new ConcurrentLfu<int, int>(stripes, 9, background, EqualityComparer<int>.Default);
25+
private static readonly ConcurrentLfu<int, int> concurrentLfu = new ConcurrentLfu<int, int>(stripes, 9, background, EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
2626

27-
private static readonly ConcurrentLfu<int, int> concurrentLfuFore = new ConcurrentLfu<int, int>(stripes, 9, new ForegroundScheduler(), EqualityComparer<int>.Default);
28-
private static readonly ConcurrentLfu<int, int> concurrentLfuTp = new ConcurrentLfu<int, int>(stripes, 9, new ThreadPoolScheduler(), EqualityComparer<int>.Default);
29-
private static readonly ConcurrentLfu<int, int> concurrentLfuNull = new ConcurrentLfu<int, int>(stripes, 9, new NullScheduler(), EqualityComparer<int>.Default);
27+
private static readonly ConcurrentLfu<int, int> concurrentLfuFore = new ConcurrentLfu<int, int>(stripes, 9, new ForegroundScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
28+
private static readonly ConcurrentLfu<int, int> concurrentLfuTp = new ConcurrentLfu<int, int>(stripes, 9, new ThreadPoolScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
29+
private static readonly ConcurrentLfu<int, int> concurrentLfuNull = new ConcurrentLfu<int, int>(stripes, 9, new NullScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
3030

3131
[GlobalSetup]
3232
public void GlobalSetup()

BitFaster.Caching.Benchmarks/Lru/LruJustGetOrAdd.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ public class LruJustGetOrAdd
4848
private static readonly ICache<int, int> atomicFastLru = new ConcurrentLruBuilder<int, int>().WithConcurrencyLevel(8).WithCapacity(9).WithAtomicGetOrAdd().Build();
4949

5050
private static readonly BackgroundThreadScheduler background = new BackgroundThreadScheduler();
51-
private static readonly ConcurrentLfu<int, int> concurrentLfu = new ConcurrentLfu<int, int>(1, 9, background, EqualityComparer<int>.Default);
51+
private static readonly ConcurrentLfu<int, int> concurrentLfu = new ConcurrentLfu<int, int>(1, 9, background, EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
5252

5353

5454
private static readonly int key = 1;

BitFaster.Caching.HitRateAnalysis/Analysis.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ public Analysis(int cacheSize)
2222
{
2323
concurrentLru = new ConcurrentLru<K, int>(1, cacheSize, EqualityComparer<K>.Default);
2424
classicLru = new ClassicLru<K, int>(1, cacheSize, EqualityComparer<K>.Default);
25-
concurrentLfu = new ConcurrentLfu<K, int>(1, cacheSize, new ForegroundScheduler(), EqualityComparer<K>.Default);
25+
concurrentLfu = new ConcurrentLfu<K, int>(1, cacheSize, new ForegroundScheduler(), EqualityComparer<K>.Default, LfuBufferSize.Default(1, 128));
2626
}
2727

2828
public int CacheSize => concurrentLru.Capacity;

BitFaster.Caching.ThroughputAnalysis/Program.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ class Program
2222
const double s = 0.86;
2323
const int n = 500;
2424
static int capacity = 500;
25-
const int maxThreads = 52;
25+
const int maxThreads = 64;
2626
const int sampleCount = 2000;
2727
const int repeatCount = 400;
2828

@@ -102,7 +102,7 @@ static void Main(string[] args)
102102
for (int i = 0; i < warmup + runs; i++)
103103
{
104104
var scheduler = new BackgroundThreadScheduler();
105-
results[i] = MeasureThroughput(new ConcurrentLfu<int, int>(concurrencyLevel: tc, capacity: capacity, scheduler: scheduler, EqualityComparer<int>.Default), tc);
105+
results[i] = MeasureThroughput(new ConcurrentLfu<int, int>(concurrencyLevel: tc, capacity: capacity, scheduler: scheduler, EqualityComparer<int>.Default, LfuBufferSize.Default(concurrencyLevel: tc, capacity: capacity)), tc);
106106
scheduler.Dispose();
107107
}
108108
avg = AverageLast(results, runs) / 1000000;

BitFaster.Caching.UnitTests/AssemblyInitialize.cs

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,4 @@
1-
using System;
2-
using System.Collections.Generic;
3-
using System.Linq;
4-
using System.Text;
5-
using System.Threading;
6-
using System.Threading.Tasks;
1+
using System.Threading;
72
using Xunit.Abstractions;
83
using Xunit.Sdk;
94

BitFaster.Caching.UnitTests/BitFaster.Caching.UnitTests.csproj

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
<PrivateAssets>all</PrivateAssets>
1818
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
1919
</PackageReference>
20+
<PackageReference Include="Xunit.SkippableFact" Version="1.4.13" />
2021
</ItemGroup>
2122

2223
<ItemGroup>
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
using System;
2+
using BitFaster.Caching.Buffers;
3+
using FluentAssertions;
4+
using Xunit;
5+
6+
namespace BitFaster.Caching.UnitTests.Buffers
7+
{
8+
public class StripedBufferSizeTests
9+
{
10+
[Fact]
11+
public void WhenBufferSizeIsLessThan1CtorThrows()
12+
{
13+
Action constructor = () => { var x = new StripedBufferSize(-1, 1); };
14+
15+
constructor.Should().Throw<ArgumentOutOfRangeException>();
16+
}
17+
18+
[Fact]
19+
public void WhenStripeCountIsLessThan1CtorThrows()
20+
{
21+
Action constructor = () => { var x = new StripedBufferSize(1, -1); };
22+
23+
constructor.Should().Throw<ArgumentOutOfRangeException>();
24+
}
25+
26+
[Fact]
27+
public void SizeIsRoundedToNextPowerOf2()
28+
{
29+
var bs = new StripedBufferSize(6, 16);
30+
31+
bs.BufferSize.Should().Be(8);
32+
}
33+
34+
[Fact]
35+
public void StripeCountIsRoundedToNextPowerOf2()
36+
{
37+
var bs = new StripedBufferSize(16, 6);
38+
39+
bs.StripeCount.Should().Be(8);
40+
}
41+
}
42+
}

BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuBuilderTests.cs

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
using System;
2-
using System.Collections.Generic;
3-
using System.Linq;
4-
using System.Text;
5-
using System.Threading.Tasks;
62
using BitFaster.Caching.Atomic;
3+
using BitFaster.Caching.Buffers;
74
using BitFaster.Caching.Lfu;
85
using BitFaster.Caching.Scheduler;
96
using FluentAssertions;
@@ -56,6 +53,17 @@ public void TestComparer()
5653
lfu.TryGet("A", out var value).Should().BeTrue();
5754
}
5855

56+
[Fact]
57+
public void TestBufferConfiguraiton()
58+
{
59+
ICache<string, int> lfu = new ConcurrentLfuBuilder<string, int>()
60+
.WithBufferConfiguration(new LfuBufferSize(
61+
new StripedBufferSize(128, 2),
62+
new StripedBufferSize(128, 2)
63+
))
64+
.Build();
65+
}
66+
5967
// 1
6068
[Fact]
6169
public void WithScopedValues()

BitFaster.Caching.UnitTests/Lfu/ConcurrentLfuTests.cs

Lines changed: 23 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,8 @@
33
using System.Collections.Generic;
44
using System.Diagnostics;
55
using System.Linq;
6-
using System.Text;
7-
using System.Threading;
86
using System.Threading.Tasks;
7+
using BitFaster.Caching.Buffers;
98
using BitFaster.Caching.Lfu;
109
using BitFaster.Caching.Scheduler;
1110
using BitFaster.Caching.UnitTests.Lru;
@@ -19,7 +18,7 @@ public class ConcurrentLfuTests
1918
{
2019
private readonly ITestOutputHelper output;
2120

22-
private ConcurrentLfu<int, int> cache = new ConcurrentLfu<int, int>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default);
21+
private ConcurrentLfu<int, int> cache = new ConcurrentLfu<int, int>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
2322
private ValueFactory valueFactory = new ValueFactory();
2423

2524
public ConcurrentLfuTests(ITestOutputHelper output)
@@ -76,7 +75,7 @@ public void WhenItemsAddedExceedsCapacityItemsAreDiscarded()
7675
[Fact]
7776
public void WhenItemIsEvictedItIsDisposed()
7877
{
79-
var dcache = new ConcurrentLfu<int, DisposableItem>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default);
78+
var dcache = new ConcurrentLfu<int, DisposableItem>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
8079
var disposables = new DisposableItem[25];
8180

8281
for (int i = 0; i < 25; i++)
@@ -306,7 +305,7 @@ public void WriteUpdatesProtectedLruOrder()
306305
[Fact]
307306
public void WhenHitRateChangesWindowSizeIsAdapted()
308307
{
309-
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default);
308+
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
310309

311310
// First completely fill the cache, push entries into protected
312311
for (int i = 0; i < 20; i++)
@@ -375,13 +374,13 @@ public void WhenHitRateChangesWindowSizeIsAdapted()
375374
public void ReadSchedulesMaintenanceWhenBufferIsFull()
376375
{
377376
var scheduler = new TestScheduler();
378-
cache = new ConcurrentLfu<int, int>(1, 20, scheduler, EqualityComparer<int>.Default);
377+
cache = new ConcurrentLfu<int, int>(1, 20, scheduler, EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
379378

380379
cache.GetOrAdd(1, k => k);
381380
scheduler.RunCount.Should().Be(1);
382381
cache.PendingMaintenance();
383382

384-
for (int i = 0; i < ConcurrentLfu<int, int>.BufferSize; i++)
383+
for (int i = 0; i < LfuBufferSize.DefaultBufferSize; i++)
385384
{
386385
scheduler.RunCount.Should().Be(1);
387386
cache.GetOrAdd(1, k => k);
@@ -395,29 +394,31 @@ public void ReadSchedulesMaintenanceWhenBufferIsFull()
395394
[Fact]
396395
public void WhenReadBufferIsFullReadsAreDropped()
397396
{
398-
int bufferSize = ConcurrentLfu<int, int>.BufferSize;
399397
var scheduler = new TestScheduler();
400-
cache = new ConcurrentLfu<int, int>(1, 20, scheduler, EqualityComparer<int>.Default);
398+
cache = new ConcurrentLfu<int, int>(1, 20, scheduler, EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
401399

402400
cache.GetOrAdd(1, k => k);
403401
scheduler.RunCount.Should().Be(1);
404402
cache.PendingMaintenance();
405403

406-
for (int i = 0; i < bufferSize * 2; i++)
404+
for (int i = 0; i < LfuBufferSize.DefaultBufferSize * 2; i++)
407405
{
408406
cache.GetOrAdd(1, k => k);
409407
}
410408

411409
cache.PendingMaintenance();
412410

413-
cache.Metrics.Value.Hits.Should().Be(bufferSize);
411+
cache.Metrics.Value.Hits.Should().Be(LfuBufferSize.DefaultBufferSize);
414412
}
415413

416414
[Fact]
417415
public void WhenWriteBufferIsFullAddDoesMaintenance()
418416
{
417+
var bufferSize = LfuBufferSize.DefaultBufferSize;
419418
var scheduler = new TestScheduler();
420-
cache = new ConcurrentLfu<int, int>(1, ConcurrentLfu<int, int>.BufferSize * 2, scheduler, EqualityComparer<int>.Default);
419+
420+
var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1), new StripedBufferSize(bufferSize, 1));
421+
cache = new ConcurrentLfu<int, int>(1, bufferSize * 2, scheduler, EqualityComparer<int>.Default, bufferConfig);
421422

422423
// add an item, flush write buffer
423424
cache.GetOrAdd(-1, k => k);
@@ -430,7 +431,7 @@ public void WhenWriteBufferIsFullAddDoesMaintenance()
430431

431432
// add buffer size items, last iteration will invoke maintenance on the foreground since write
432433
// buffer is full and test scheduler did not do any work
433-
for (int i = 0; i < ConcurrentLfu<int, int>.BufferSize; i++)
434+
for (int i = 0; i < bufferSize; i++)
434435
{
435436
scheduler.RunCount.Should().Be(2);
436437
cache.GetOrAdd(i, k => k);
@@ -444,9 +445,10 @@ public void WhenWriteBufferIsFullAddDoesMaintenance()
444445
[Fact]
445446
public void WhenWriteBufferIsFullUpdatesAreDropped()
446447
{
447-
int bufferSize = ConcurrentLfu<int, int>.BufferSize;
448+
var bufferSize = LfuBufferSize.DefaultBufferSize;
448449
var scheduler = new TestScheduler();
449-
cache = new ConcurrentLfu<int, int>(1, 20, scheduler, EqualityComparer<int>.Default);
450+
var bufferConfig = new LfuBufferSize(new StripedBufferSize(bufferSize, 1), new StripedBufferSize(bufferSize, 1));
451+
cache = new ConcurrentLfu<int, int>(1, 20, scheduler, EqualityComparer<int>.Default, bufferConfig);
450452

451453
cache.GetOrAdd(1, k => k);
452454
scheduler.RunCount.Should().Be(1);
@@ -578,7 +580,7 @@ public void WhenItemIsRemovedItIsRemoved()
578580
[Fact]
579581
public void WhenItemIsRemovedItIsDisposed()
580582
{
581-
var dcache = new ConcurrentLfu<int, DisposableItem>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default);
583+
var dcache = new ConcurrentLfu<int, DisposableItem>(1, 20, new BackgroundThreadScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
582584
var disposable = new DisposableItem();
583585

584586
dcache.GetOrAdd(1, k => disposable);
@@ -667,7 +669,7 @@ public void TrimRemovesNItems()
667669
public void TrimWhileItemsInWriteBufferRemovesNItems()
668670
{
669671
// null scheduler == no maintenance, all writes fit in buffer
670-
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default);
672+
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
671673

672674
for (int i = 0; i < 25; i++)
673675
{
@@ -705,7 +707,7 @@ public void VerifyHitsWithBackgroundScheduler()
705707
public void VerifyHitsWithThreadPoolScheduler()
706708
{
707709
// when running all tests in parallel, sample count drops significantly: set low bar for stability.
708-
cache = new ConcurrentLfu<int, int>(1, 20, new ThreadPoolScheduler(), EqualityComparer<int>.Default);
710+
cache = new ConcurrentLfu<int, int>(1, 20, new ThreadPoolScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
709711
VerifyHits(iterations: 10000000, minSamples: 500000);
710712
}
711713

@@ -715,7 +717,7 @@ public void VerifyHitsWithThreadPoolScheduler()
715717
[Fact]
716718
public void VerifyHitsWithNullScheduler()
717719
{
718-
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default);
720+
cache = new ConcurrentLfu<int, int>(1, 20, new NullScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
719721
VerifyHits(iterations: 10000000, minSamples: -1);
720722
}
721723

@@ -726,12 +728,12 @@ public void VerifyHitsWithNullScheduler()
726728
[Fact]
727729
public void VerifyHitsWithForegroundScheduler()
728730
{
729-
cache = new ConcurrentLfu<int, int>(1, 20, new ForegroundScheduler(), EqualityComparer<int>.Default);
731+
cache = new ConcurrentLfu<int, int>(1, 20, new ForegroundScheduler(), EqualityComparer<int>.Default, LfuBufferSize.Default(1, 128));
730732

731733
// Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false
732734
// before TryScheduleDrain is called. This serves as sanity check.
733735
int iterations = 10000000;
734-
int dropped = iterations / ConcurrentLfu<int, int>.BufferSize;
736+
int dropped = iterations / LfuBufferSize.DefaultBufferSize;
735737

736738
this.output.WriteLine($"Will drop {dropped} reads.");
737739

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
using System;
2+
using BitFaster.Caching.Buffers;
3+
using BitFaster.Caching.Lfu;
4+
using FluentAssertions;
5+
using Xunit;
6+
7+
namespace BitFaster.Caching.UnitTests.Lfu
8+
{
9+
public class LfuBufferSizeTests
10+
{
11+
[Fact]
12+
public void WhenReadBufferIsNullThrows()
13+
{
14+
Action constructor = () => { var x = new LfuBufferSize(null, new StripedBufferSize(1, 1)); };
15+
16+
constructor.Should().Throw<ArgumentNullException>();
17+
}
18+
19+
[Fact]
20+
public void WhenWriteBufferIsNullThrows()
21+
{
22+
Action constructor = () => { var x = new LfuBufferSize(new StripedBufferSize(1, 1), null); };
23+
24+
constructor.Should().Throw<ArgumentNullException>();
25+
}
26+
27+
[SkippableTheory]
28+
[InlineData(1, 3, 1, 32, 1, 16)]
29+
[InlineData(1, 14, 1, 128, 1, 16)]
30+
[InlineData(1, 50, 1, 128, 1, 64)]
31+
[InlineData(1, 100, 1, 128, 1, 128)]
32+
[InlineData(4, 100, 4, 128, 4, 32)]
33+
[InlineData(16, 100, 8, 128, 8, 16)] // fails win
34+
[InlineData(64, 100, 8, 128, 8, 16)] // fails win
35+
[InlineData(1, 1000, 1, 128, 1, 128)]
36+
[InlineData(4, 1000, 4, 128, 4, 128)]
37+
[InlineData(32, 1000, 32, 128, 32, 32)] // fails win + fails mac
38+
[InlineData(256, 100000, 32, 128, 32, 32)] // fails win + fails mac
39+
public void CalculateDefaultBufferSize(int concurrencyLevel, int capacity, int expectedReadStripes, int expectedReadBuffer, int expecteWriteStripes, int expecteWriteBuffer)
40+
{
41+
// Some of these tests depend on the CPU Core count - skip if run on a different config machine.
42+
bool notExpectedCpuCount = Environment.ProcessorCount != 12;
43+
bool concurrencyLevelThresholdExceeded = BitOps.CeilingPowerOfTwo(concurrencyLevel) > BitOps.CeilingPowerOfTwo(Environment.ProcessorCount * 2);
44+
45+
Skip.If(concurrencyLevelThresholdExceeded && notExpectedCpuCount, "Test outcome depends on machine CPU count");
46+
47+
var bufferSize = LfuBufferSize.Default(concurrencyLevel, capacity);
48+
49+
bufferSize.Read.StripeCount.Should().Be(expectedReadStripes);
50+
bufferSize.Read.BufferSize.Should().Be(expectedReadBuffer);
51+
bufferSize.Write.StripeCount.Should().Be(expecteWriteStripes);
52+
bufferSize.Write.BufferSize.Should().Be(expecteWriteBuffer);
53+
}
54+
}
55+
}

0 commit comments

Comments
 (0)