@@ -18,7 +18,7 @@ public class ConcurrentLfuTests
1818 {
1919 private readonly ITestOutputHelper output ;
2020
21- private ConcurrentLfu < int , int > cache = new ConcurrentLfu < int , int > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
21+ private ConcurrentLfu < int , int > cache = new ConcurrentLfu < int , int > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ) ;
2222 private ValueFactory valueFactory = new ValueFactory ( ) ;
2323
2424 public ConcurrentLfuTests ( ITestOutputHelper output )
@@ -75,7 +75,7 @@ public void WhenItemsAddedExceedsCapacityItemsAreDiscarded()
7575 [ Fact ]
7676 public void WhenItemIsEvictedItIsDisposed ( )
7777 {
78- var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
78+ var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ) ;
7979 var disposables = new DisposableItem [ 25 ] ;
8080
8181 for ( int i = 0 ; i < 25 ; i ++ )
@@ -299,7 +299,7 @@ public void WriteUpdatesProtectedLruOrder()
299299 [ Fact ]
300300 public void WhenHitRateChangesWindowSizeIsAdapted ( )
301301 {
302- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
302+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ) ;
303303
304304 // First completely fill the cache, push entries into protected
305305 for ( int i = 0 ; i < 20 ; i ++ )
@@ -368,13 +368,13 @@ public void WhenHitRateChangesWindowSizeIsAdapted()
368368 public void ReadSchedulesMaintenanceWhenBufferIsFull ( )
369369 {
370370 var scheduler = new TestScheduler ( ) ;
371- cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
371+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default ) ;
372372
373373 cache . GetOrAdd ( 1 , k => k ) ;
374374 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
375375 cache . PendingMaintenance ( ) ;
376376
377- for ( int i = 0 ; i < LfuBufferSize . DefaultBufferSize ; i ++ )
377+ for ( int i = 0 ; i < ConcurrentLfu < int , int > . DefaultBufferSize ; i ++ )
378378 {
379379 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
380380 cache . GetOrAdd ( 1 , k => k ) ;
@@ -389,30 +389,29 @@ public void ReadSchedulesMaintenanceWhenBufferIsFull()
389389 public void WhenReadBufferIsFullReadsAreDropped ( )
390390 {
391391 var scheduler = new TestScheduler ( ) ;
392- cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
392+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , scheduler , EqualityComparer < int > . Default ) ;
393393
394394 cache . GetOrAdd ( 1 , k => k ) ;
395395 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
396396 cache . PendingMaintenance ( ) ;
397397
398- for ( int i = 0 ; i < LfuBufferSize . DefaultBufferSize * 2 ; i ++ )
398+ for ( int i = 0 ; i < ConcurrentLfu < int , int > . DefaultBufferSize * 2 ; i ++ )
399399 {
400400 cache . GetOrAdd ( 1 , k => k ) ;
401401 }
402402
403403 cache . PendingMaintenance ( ) ;
404404
405- cache . Metrics . Value . Hits . Should ( ) . Be ( LfuBufferSize . DefaultBufferSize ) ;
405+ cache . Metrics . Value . Hits . Should ( ) . Be ( ConcurrentLfu < int , int > . DefaultBufferSize ) ;
406406 }
407407
408408 [ Fact ]
409409 public void WhenWriteBufferIsFullAddDoesMaintenance ( )
410410 {
411- var bufferSize = LfuBufferSize . DefaultBufferSize ;
411+ var bufferSize = ConcurrentLfu < int , int > . DefaultBufferSize ;
412412 var scheduler = new TestScheduler ( ) ;
413413
414- var bufferConfig = new LfuBufferSize ( new StripedBufferSize ( bufferSize , 1 ) ) ;
415- cache = new ConcurrentLfu < int , int > ( 1 , bufferSize * 2 , scheduler , EqualityComparer < int > . Default , bufferConfig ) ;
414+ cache = new ConcurrentLfu < int , int > ( 1 , bufferSize * 2 , scheduler , EqualityComparer < int > . Default ) ;
416415
417416 // add an item, flush write buffer
418417 cache . GetOrAdd ( - 1 , k => k ) ;
@@ -439,8 +438,7 @@ public void WhenWriteBufferIsFullUpdatesAreDropped()
439438 int capacity = 20 ;
440439 var bufferSize = Math . Min ( BitOps . CeilingPowerOfTwo ( capacity ) , 128 ) ;
441440 var scheduler = new TestScheduler ( ) ;
442- var bufferConfig = new LfuBufferSize ( new StripedBufferSize ( bufferSize , 1 ) ) ;
443- cache = new ConcurrentLfu < int , int > ( 1 , capacity , scheduler , EqualityComparer < int > . Default , bufferConfig ) ;
441+ cache = new ConcurrentLfu < int , int > ( 1 , capacity , scheduler , EqualityComparer < int > . Default ) ;
444442
445443 cache . GetOrAdd ( 1 , k => k ) ;
446444 scheduler . RunCount . Should ( ) . Be ( 1 ) ;
@@ -572,7 +570,7 @@ public void WhenItemIsRemovedItIsRemoved()
572570 [ Fact ]
573571 public void WhenItemIsRemovedItIsDisposed ( )
574572 {
575- var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
573+ var dcache = new ConcurrentLfu < int , DisposableItem > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ) ;
576574 var disposable = new DisposableItem ( ) ;
577575
578576 dcache . GetOrAdd ( 1 , k => disposable ) ;
@@ -661,7 +659,7 @@ public void TrimRemovesNItems()
661659 public void TrimWhileItemsInWriteBufferRemovesNItems ( )
662660 {
663661 // null scheduler == no maintenance, all writes fit in buffer
664- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
662+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ) ;
665663
666664 for ( int i = 0 ; i < 25 ; i ++ )
667665 {
@@ -699,7 +697,7 @@ public void VerifyHitsWithBackgroundScheduler()
699697 public void VerifyHitsWithThreadPoolScheduler ( )
700698 {
701699 // when running all tests in parallel, sample count drops significantly: set low bar for stability.
702- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ThreadPoolScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
700+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ThreadPoolScheduler ( ) , EqualityComparer < int > . Default ) ;
703701 VerifyHits ( iterations : 10000000 , minSamples : 500000 ) ;
704702 }
705703
@@ -709,7 +707,7 @@ public void VerifyHitsWithThreadPoolScheduler()
709707 [ Fact ]
710708 public void VerifyHitsWithNullScheduler ( )
711709 {
712- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
710+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ) ;
713711 VerifyHits ( iterations : 10000000 , minSamples : - 1 ) ;
714712 }
715713
@@ -720,12 +718,12 @@ public void VerifyHitsWithNullScheduler()
720718 [ Fact ]
721719 public void VerifyHitsWithForegroundScheduler ( )
722720 {
723- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ForegroundScheduler ( ) , EqualityComparer < int > . Default , LfuBufferSize . Default ( 1 , 128 ) ) ;
721+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new ForegroundScheduler ( ) , EqualityComparer < int > . Default ) ;
724722
725723 // Note: TryAdd will drop 1 read per full read buffer, since TryAdd will return false
726724 // before TryScheduleDrain is called. This serves as sanity check.
727725 int iterations = 10000000 ;
728- int dropped = iterations / LfuBufferSize . DefaultBufferSize ;
726+ int dropped = iterations / ConcurrentLfu < int , int > . DefaultBufferSize ;
729727
730728 this . output . WriteLine ( $ "Will drop { dropped } reads.") ;
731729
@@ -735,8 +733,7 @@ public void VerifyHitsWithForegroundScheduler()
735733 [ Fact ]
736734 public void VerifyMisses ( )
737735 {
738- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ,
739- new LfuBufferSize ( new StripedBufferSize ( 1 , 1 ) ) ) ;
736+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new BackgroundThreadScheduler ( ) , EqualityComparer < int > . Default ) ;
740737
741738 int iterations = 100000 ;
742739 Func < int , int > func = x => x ;
@@ -770,8 +767,7 @@ public void VerifyMisses()
770767 public async Task ThreadedVerifyMisses ( )
771768 {
772769 // buffer size is 1, this will cause dropped writes on some threads where the buffer is full
773- cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ,
774- new LfuBufferSize ( new StripedBufferSize ( 1 , 1 ) ) ) ;
770+ cache = new ConcurrentLfu < int , int > ( 1 , 20 , new NullScheduler ( ) , EqualityComparer < int > . Default ) ;
775771
776772 int threads = 4 ;
777773 int iterations = 100000 ;
0 commit comments