File tree Expand file tree Collapse file tree 2 files changed +2
-1
lines changed
torchtitan/models/flux/inference Expand file tree Collapse file tree 2 files changed +2
-1
lines changed Original file line number Diff line number Diff line change 5454 python -m tests.integration_tests.run_tests --test_suite models artifacts-to-be-uploaded --ngpu 8
5555 python -m tests.integration_tests.flux artifacts-to-be-uploaded/flux --ngpu 8
5656 rm -rf artifacts-to-be-uploaded/*/checkpoint
57+ rm -rf artifacts-to-be-uploaded/flux/test_generate/inference_results/
Original file line number Diff line number Diff line change @@ -34,7 +34,6 @@ def inference(config: JobConfig):
3434 f"FSDP all-gather will hang if some ranks have no prompts to process."
3535 )
3636
37- bs = config .inference .local_batch_size
3837 # Distribute prompts across processes using round-robin assignment
3938 prompts = original_prompts [global_rank ::world_size ]
4039
@@ -46,6 +45,7 @@ def inference(config: JobConfig):
4645
4746 if prompts :
4847 # Generate images for this process's assigned prompts
48+ bs = config .inference .local_batch_size
4949
5050 output_dir = os .path .join (
5151 config .job .dump_folder ,
You can’t perform that action at this time.
0 commit comments