We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent fb9e154 commit 9e7db0dCopy full SHA for 9e7db0d
mlperf/mixtral_run.sh
100644
100755
mlperf/offline_mode.py
@@ -354,11 +354,11 @@ def main(argv):
354
(3072, 256),
355
)
356
engines = []
357
- params = 1
+ params = None
358
for i, (length, max_batch) in enumerate(length_and_batch):
359
batch = min(counts_by_bucket[i], max_batch)
360
log.info(f"Using batch size of {batch} for {length}")
361
- engine = create_engine_from_config_flags(batch=batch, cache_size=length)
+ engine = create_engine_from_config_flags(batch=batch, cache_len=length)
362
offline_inf = offline_inference.OfflineInference(engine, params)
363
offline_inf.dummy = FLAGS.internal_dummy_model
364
params = offline_inf.params
0 commit comments