We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 8617eeb commit 223580aCopy full SHA for 223580a
LLama/Batched/BatchedExecutor.cs
@@ -19,6 +19,7 @@ public sealed class BatchedExecutor
19
private int _batchQueueHead;
20
private int _batchedTokenCount;
21
private bool _batchedTokenCountDirty = true;
22
+ private const int CleanupThreshold = 16;
23
24
/// <summary>
25
/// Set to 1 using interlocked exchange while inference is running
@@ -216,7 +217,7 @@ void CleanupQueue()
216
217
return;
218
}
219
- if (_batchQueueHead > 16 && _batchQueueHead > _batchQueue.Count / 2)
220
+ if (_batchQueueHead > CleanupThreshold && _batchQueueHead > _batchQueue.Count / 2)
221
{
222
_batchQueue.RemoveRange(0, _batchQueueHead);
223
_batchQueueHead = 0;
0 commit comments