Skip to content
This repository was archived by the owner on Nov 27, 2024. It is now read-only.

Commit 82826ac

Browse files
committed
Use DefaultSchedulerOptions id none supplied
1 parent d5414a3 commit 82826ac

File tree

6 files changed

+15
-13
lines changed

6 files changed

+15
-13
lines changed

OnnxStack.StableDiffusion/Pipelines/Base/IPipeline.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ public interface IPipeline
6161
/// <param name="progressCallback">The progress callback.</param>
6262
/// <param name="cancellationToken">The cancellation token.</param>
6363
/// <returns></returns>
64-
Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default);
64+
Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default);
6565

6666

6767
/// <summary>
@@ -74,6 +74,6 @@ public interface IPipeline
7474
/// <param name="progressCallback">The progress callback.</param>
7575
/// <param name="cancellationToken">The cancellation token.</param>
7676
/// <returns></returns>
77-
IAsyncEnumerable<BatchResult> RunBatchAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, BatchOptions batchOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default);
77+
IAsyncEnumerable<BatchResult> RunBatchAsync(BatchOptions batchOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default);
7878
}
7979
}

OnnxStack.StableDiffusion/Pipelines/Base/PipelineBase.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ protected PipelineBase(ILogger logger)
9090
/// <param name="progressCallback">The progress callback.</param>
9191
/// <param name="cancellationToken">The cancellation token.</param>
9292
/// <returns></returns>
93-
public abstract Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default);
93+
public abstract Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default);
9494

9595

9696
/// <summary>
@@ -103,7 +103,7 @@ protected PipelineBase(ILogger logger)
103103
/// <param name="progressCallback">The progress callback.</param>
104104
/// <param name="cancellationToken">The cancellation token.</param>
105105
/// <returns></returns>
106-
public abstract IAsyncEnumerable<BatchResult> RunBatchAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, BatchOptions batchOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default);
106+
public abstract IAsyncEnumerable<BatchResult> RunBatchAsync(BatchOptions batchOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default);
107107

108108

109109
/// <summary>

OnnxStack.StableDiffusion/Pipelines/LatentConsistencyPipeline.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ public LatentConsistencyPipeline(string name, TokenizerModel tokenizer, TextEnco
6161
/// <param name="progressCallback">The progress callback.</param>
6262
/// <param name="cancellationToken">The cancellation token.</param>
6363
/// <returns></returns>
64-
public override Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default)
64+
public override Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default)
6565
{
6666
// LCM does not support negative prompting
6767
promptOptions.NegativePrompt = string.Empty;
@@ -79,11 +79,11 @@ public override Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, S
7979
/// <param name="progressCallback">The progress callback.</param>
8080
/// <param name="cancellationToken">The cancellation token.</param>
8181
/// <returns></returns>
82-
public override IAsyncEnumerable<BatchResult> RunBatchAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, BatchOptions batchOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
82+
public override IAsyncEnumerable<BatchResult> RunBatchAsync(BatchOptions batchOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
8383
{
8484
// LCM does not support negative prompting
8585
promptOptions.NegativePrompt = string.Empty;
86-
return base.RunBatchAsync(promptOptions, schedulerOptions, batchOptions, controlNet, progressCallback, cancellationToken);
86+
return base.RunBatchAsync(batchOptions, promptOptions, schedulerOptions, controlNet, progressCallback, cancellationToken);
8787
}
8888

8989

OnnxStack.StableDiffusion/Pipelines/LatentConsistencyXLPipeline.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ public LatentConsistencyXLPipeline(string name, TokenizerModel tokenizer, Tokeni
6464
/// <param name="progressCallback">The progress callback.</param>
6565
/// <param name="cancellationToken">The cancellation token.</param>
6666
/// <returns></returns>
67-
public override Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default)
67+
public override Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default)
6868
{
6969
// LCM does not support negative prompting
7070
promptOptions.NegativePrompt = string.Empty;
@@ -82,11 +82,11 @@ public override Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, S
8282
/// <param name="progressCallback">The progress callback.</param>
8383
/// <param name="cancellationToken">The cancellation token.</param>
8484
/// <returns></returns>
85-
public override IAsyncEnumerable<BatchResult> RunBatchAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, BatchOptions batchOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
85+
public override IAsyncEnumerable<BatchResult> RunBatchAsync(BatchOptions batchOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
8686
{
8787
// LCM does not support negative prompting
8888
promptOptions.NegativePrompt = string.Empty;
89-
return base.RunBatchAsync(promptOptions, schedulerOptions, batchOptions, controlNet, progressCallback, cancellationToken);
89+
return base.RunBatchAsync(batchOptions, promptOptions, schedulerOptions, controlNet, progressCallback, cancellationToken);
9090
}
9191

9292

OnnxStack.StableDiffusion/Pipelines/StableDiffusionPipeline.cs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,9 +154,10 @@ public override void ValidateInputs(PromptOptions promptOptions, SchedulerOption
154154
/// <param name="progressCallback">The progress callback.</param>
155155
/// <param name="cancellationToken">The cancellation token.</param>
156156
/// <returns></returns>
157-
public override async Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default)
157+
public override async Task<DenseTensor<float>> RunAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, CancellationToken cancellationToken = default)
158158
{
159159
// Create random seed if none was set
160+
schedulerOptions ??= _defaultSchedulerOptions;
160161
schedulerOptions.Seed = schedulerOptions.Seed > 0 ? schedulerOptions.Seed : Random.Shared.Next();
161162

162163
var diffuseTime = _logger?.LogBegin("Diffuser starting...");
@@ -191,9 +192,10 @@ public override async Task<DenseTensor<float>> RunAsync(PromptOptions promptOpti
191192
/// <param name="progressCallback">The progress callback.</param>
192193
/// <param name="cancellationToken">The cancellation token.</param>
193194
/// <returns></returns>
194-
public override async IAsyncEnumerable<BatchResult> RunBatchAsync(PromptOptions promptOptions, SchedulerOptions schedulerOptions, BatchOptions batchOptions, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
195+
public override async IAsyncEnumerable<BatchResult> RunBatchAsync(BatchOptions batchOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions = default, ControlNetModel controlNet = default, Action<DiffusionProgress> progressCallback = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
195196
{
196197
// Create random seed if none was set
198+
schedulerOptions ??= _defaultSchedulerOptions;
197199
schedulerOptions.Seed = schedulerOptions.Seed > 0 ? schedulerOptions.Seed : Random.Shared.Next();
198200

199201
var diffuseBatchTime = _logger?.LogBegin("Batch Diffuser starting...");

OnnxStack.StableDiffusion/Services/StableDiffusionService.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,7 @@ private async IAsyncEnumerable<BatchResult> DiffuseBatchAsync(ModelOptions model
324324
pipeline.ValidateInputs(promptOptions, schedulerOptions);
325325

326326
await GenerateInputVideoFrames(promptOptions, progressCallback);
327-
await foreach (var result in pipeline.RunBatchAsync(promptOptions, schedulerOptions, batchOptions, controlNet, progressCallback, cancellationToken))
327+
await foreach (var result in pipeline.RunBatchAsync(batchOptions, promptOptions, schedulerOptions, controlNet, progressCallback, cancellationToken))
328328
{
329329
yield return result;
330330
}

0 commit comments

Comments
 (0)