@@ -85,7 +85,7 @@ public virtual async Task<DenseTensor<float>> DiffuseAsync(IModelOptions modelOp
8585 schedulerOptions . Seed = schedulerOptions . Seed > 0 ? schedulerOptions . Seed : Random . Shared . Next ( ) ;
8686
8787 var diffuseTime = _logger ? . LogBegin ( "Begin..." ) ;
88- _logger ? . Log ( $ "Model: { modelOptions . Name } , Pipeline: { modelOptions . PipelineType } , Diffuser: { promptOptions . DiffuserType } , Scheduler: { promptOptions . SchedulerType } ") ;
88+ _logger ? . Log ( $ "Model: { modelOptions . Name } , Pipeline: { modelOptions . PipelineType } , Diffuser: { promptOptions . DiffuserType } , Scheduler: { schedulerOptions . SchedulerType } ") ;
8989
9090 // LCM does not support negative prompting
9191 var performGuidance = false ;
@@ -117,7 +117,7 @@ public virtual async Task<DenseTensor<float>> DiffuseAsync(IModelOptions modelOp
117117 public async IAsyncEnumerable < BatchResult > DiffuseBatchAsync ( IModelOptions modelOptions , PromptOptions promptOptions , SchedulerOptions schedulerOptions , BatchOptions batchOptions , Action < int , int , int , int > progressCallback = null , [ EnumeratorCancellation ] CancellationToken cancellationToken = default )
118118 {
119119 var diffuseBatchTime = _logger ? . LogBegin ( "Begin..." ) ;
120- _logger ? . Log ( $ "Model: { modelOptions . Name } , Pipeline: { modelOptions . PipelineType } , Diffuser: { promptOptions . DiffuserType } , Scheduler: { promptOptions . SchedulerType } ") ;
120+ _logger ? . Log ( $ "Model: { modelOptions . Name } , Pipeline: { modelOptions . PipelineType } , Diffuser: { promptOptions . DiffuserType } , Scheduler: { schedulerOptions . SchedulerType } ") ;
121121
122122 // LCM does not support negative prompting
123123 var performGuidance = false ;
@@ -130,9 +130,7 @@ public async IAsyncEnumerable<BatchResult> DiffuseBatchAsync(IModelOptions model
130130 var batchSchedulerOptions = BatchGenerator . GenerateBatch ( batchOptions , schedulerOptions ) ;
131131
132132 var batchIndex = 1 ;
133- var batchCount = batchSchedulerOptions . Count ;
134- var schedulerCallback = ( int p , int t ) => progressCallback ? . Invoke ( batchIndex , batchCount , p , t ) ;
135-
133+ var schedulerCallback = ( int step , int steps ) => progressCallback ? . Invoke ( batchIndex , batchSchedulerOptions . Count , step , steps ) ;
136134 foreach ( var batchSchedulerOption in batchSchedulerOptions )
137135 {
138136 yield return new BatchResult ( batchSchedulerOption , await RunSchedulerSteps ( modelOptions , promptOptions , batchSchedulerOption , promptEmbeddings , performGuidance , schedulerCallback , cancellationToken ) ) ;
@@ -281,7 +279,7 @@ protected virtual IReadOnlyList<NamedOnnxValue> CreateUnetInputParams(IModelOpti
281279 /// <returns></returns>
282280 protected IScheduler GetScheduler ( PromptOptions prompt , SchedulerOptions options )
283281 {
284- return prompt . SchedulerType switch
282+ return options . SchedulerType switch
285283 {
286284 SchedulerType . LCM => new LCMScheduler ( options ) ,
287285 _ => default
0 commit comments