33using System . Collections . Generic ;
44using System . Diagnostics ;
55using System . Linq ;
6- using System . Text ;
7- using System . Threading ;
86using System . Threading . Tasks ;
7+ using BitFaster . Caching . Buffers ;
98using BitFaster . Caching . Lfu ;
109using BitFaster . Caching . Scheduler ;
1110using BitFaster . Caching . UnitTests . Lru ;
@@ -19,7 +18,7 @@ public class ConcurrentLfuTests
1918 {
2019 private readonly ITestOutputHelper output ;
2120
22- private ConcurrentLfu < int , int > cache = new ConcurrentLfu < int , int > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ) ;
21+ private ConcurrentLfu < int , int > cache = new ConcurrentLfu < int , int > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
2322 private ValueFactory valueFactory = new ValueFactory ( ) ;
2423
2524 public ConcurrentLfuTests ( ITestOutputHelper output )
@@ -76,7 +75,7 @@ public void WhenItemsAddedExceedsCapacityItemsAreDiscarded()
7675 [ Fact ]
7776 public void WhenItemIsEvictedItIsDisposed ( )
7877 {
79- var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ) ;
78+ var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
8079 var disposables = new DisposableItem [ 25 ] ;
8180
8281 for ( int i = 0 ; i < 25 ; i ++ )
@@ -306,7 +305,7 @@ public void WriteUpdatesProtectedLruOrder()
306305 [ Fact ]
307306 public void WhenHitRateChangesWindowSizeIsAdapted ( )
308307 {
309- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ) ;
308+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
310309
311310 // First completely fill the cache, push entries into protected
312311 for ( int i = 0 ; i < 20 ; i ++ )
@@ -375,13 +374,13 @@ public void WhenHitRateChangesWindowSizeIsAdapted()
375374 public void ReadSchedulesMaintenanceWhenBufferIsFull ( )
376375 {
377376 var scheduler = new TestScheduler ( ) ;
378- cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default ) ;
377+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
379378
380379 cache . GetOrAdd ( 1 , k => k ) ;
381380 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
382381 cache . PendingMaintenance ( ) ;
383382
384- for ( int i = 0 ; i < ConcurrentLfu < int , int > . BufferSize ; i ++ )
383+ for ( int i = 0 ; i < LfuBufferSize . DefaultBufferSize ; i ++ )
385384 {
386385 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
387386 cache . GetOrAdd ( 1 , k => k ) ;
@@ -395,29 +394,31 @@ public void ReadSchedulesMaintenanceWhenBufferIsFull()
395394 [ Fact ]
396395 public void WhenReadBufferIsFullReadsAreDropped ( )
397396 {
398- int bufferSize = ConcurrentLfu < int , int > . BufferSize ;
399397 var scheduler = new TestScheduler ( ) ;
400- cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default ) ;
398+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
401399
402400 cache . GetOrAdd ( 1 , k => k ) ;
403401 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
404402 cache . PendingMaintenance ( ) ;
405403
406- for ( int i = 0 ; i < bufferSize * 2 ; i ++ )
404+ for ( int i = 0 ; i < LfuBufferSize . DefaultBufferSize * 2 ; i ++ )
407405 {
408406 cache . GetOrAdd ( 1 , k => k ) ;
409407 }
410408
411409 cache . PendingMaintenance ( ) ;
412410
413- cache . Metrics . Value . Hits . Should ( ) . Be ( bufferSize ) ;
411+ cache . Metrics . Value . Hits . Should ( ) . Be ( LfuBufferSize . DefaultBufferSize ) ;
414412 }
415413
416414 [ Fact ]
417415 public void WhenWriteBufferIsFullAddDoesMaintenance ( )
418416 {
417+ var bufferSize = LfuBufferSize . DefaultBufferSize ;
419418 var scheduler = new TestScheduler ( ) ;
420- cache = new ConcurrentLfu < int , int > ( 1 , ConcurrentLfu < int , int > . BufferSize * 2 , scheduler , EqualityComparer < int > . Default ) ;
419+
420+ var bufferConfig = new LfuBufferSize ( new StripedBufferSize ( bufferSize , 1 ) , new StripedBufferSize ( bufferSize , 1 ) ) ;
421+ cache = new ConcurrentLfu < int , int > ( 1 , bufferSize * 2 , scheduler , EqualityComparer < int > . Default , bufferConfig ) ;
421422
422423 // add an item, flush write buffer
423424 cache . GetOrAdd ( - 1 , k => k ) ;
@@ -430,7 +431,7 @@ public void WhenWriteBufferIsFullAddDoesMaintenance()
430431
431432 // add buffer size items, last iteration will invoke maintenance on the foreground since write
432433 // buffer is full and test scheduler did not do any work
433- for ( int i = 0 ; i < ConcurrentLfu < int , int > . BufferSize ; i ++ )
434+ for ( int i = 0 ; i < bufferSize ; i ++ )
434435 {
435436 scheduler . RunCount . Should ( ) . Be ( 2 ) ;
436437 cache . GetOrAdd ( i , k => k ) ;
@@ -444,9 +445,10 @@ public void WhenWriteBufferIsFullAddDoesMaintenance()
444445 [ Fact ]
445446 public void WhenWriteBufferIsFullUpdatesAreDropped ( )
446447 {
447- int bufferSize = ConcurrentLfu < int , int > . BufferSize ;
448+ var bufferSize = LfuBufferSize . DefaultBufferSize ;
448449 var scheduler = new TestScheduler ( ) ;
449- cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default ) ;
450+ var bufferConfig = new LfuBufferSize ( new StripedBufferSize ( bufferSize , 1 ) , new StripedBufferSize ( bufferSize , 1 ) ) ;
451+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default , bufferConfig ) ;
450452
451453 cache . GetOrAdd ( 1 , k => k ) ;
452454 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
@@ -578,7 +580,7 @@ public void WhenItemIsRemovedItIsRemoved()
578580 [ Fact ]
579581 public void WhenItemIsRemovedItIsDisposed ( )
580582 {
581- var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ) ;
583+ var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
582584 var disposable = new DisposableItem ( ) ;
583585
584586 dcache . GetOrAdd ( 1 , k => disposable ) ;
@@ -667,7 +669,7 @@ public void TrimRemovesNItems()
667669 public void TrimWhileItemsInWriteBufferRemovesNItems ( )
668670 {
669671 // null scheduler == no maintenance, all writes fit in buffer
670- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ) ;
672+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
671673
672674 for ( int i = 0 ; i < 25 ; i ++ )
673675 {
@@ -705,7 +707,7 @@ public void VerifyHitsWithBackgroundScheduler()
705707 public void VerifyHitsWithThreadPoolScheduler ( )
706708 {
707709 // when running all tests in parallel, sample count drops significantly: set low bar for stability.
708- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ThreadPoolScheduler ( ) , EqualityComparer < int > . Default ) ;
710+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ThreadPoolScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
709711 VerifyHits ( iterations : 10000000 , minSamples : 500000 ) ;
710712 }
711713
@@ -715,7 +717,7 @@ public void VerifyHitsWithThreadPoolScheduler()
715717 [ Fact ]
716718 public void VerifyHitsWithNullScheduler ( )
717719 {
718- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ) ;
720+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
719721 VerifyHits ( iterations : 10000000 , minSamples : - 1 ) ;
720722 }
721723
@@ -726,12 +728,12 @@ public void VerifyHitsWithNullScheduler()
726728 [ Fact ]
727729 public void VerifyHitsWithForegroundScheduler ( )
728730 {
729- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ForegroundScheduler ( ) , EqualityComparer < int > . Default ) ;
731+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ForegroundScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
730732
731733 // Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false
732734 // before TryScheduleDrain is called. This serves as sanity check.
733735 int iterations = 10000000 ;
734- int dropped = iterations / ConcurrentLfu < int , int > . BufferSize ;
736+ int dropped = iterations / LfuBufferSize . DefaultBufferSize ;
735737
736738 this . output . WriteLine ( $ "Will drop { dropped } reads.") ;
737739
0 commit comments