Skip to content
This repository was archived by the owner on Nov 27, 2024. It is now read-only.

Commit 8b0595a

Browse files
authored
Merge pull request #50 from saddam213/SDXL
SDXL Pipeline support
2 parents 5d35a84 + e55600b commit 8b0595a

32 files changed

+1192
-61
lines changed

OnnxStack.Console/appsettings.json

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
"TokenizerLimit": 77,
1717
"EmbeddingsLength": 768,
1818
"ScaleFactor": 0.18215,
19+
"SampleSize": 512,
1920
"PipelineType": "StableDiffusion",
2021
"Diffusers": [
2122
"TextToImage",
@@ -58,6 +59,7 @@
5859
"TokenizerLimit": 77,
5960
"EmbeddingsLength": 768,
6061
"ScaleFactor": 0.18215,
62+
"SampleSize": 512,
6163
"PipelineType": "LatentConsistency",
6264
"Diffusers": [
6365
"TextToImage",
@@ -99,6 +101,7 @@
99101
"TokenizerLimit": 77,
100102
"EmbeddingsLength": 768,
101103
"ScaleFactor": 0.18215,
104+
"SampleSize": 512,
102105
"PipelineType": "StableDiffusion",
103106
"Diffusers": [
104107
"TextToImage",
@@ -141,6 +144,7 @@
141144
"TokenizerLimit": 77,
142145
"EmbeddingsLength": 768,
143146
"ScaleFactor": 0.18215,
147+
"SampleSize": 512,
144148
"PipelineType": "InstaFlow",
145149
"Diffusers": [
146150
"TextToImage"
@@ -172,6 +176,57 @@
172176
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\vae_decoder\\model.onnx"
173177
}
174178
]
179+
},
180+
{
181+
"Name": "DreamShaper XL",
182+
"IsEnabled": true,
183+
"PadTokenId": 1,
184+
"BlankTokenId": 49407,
185+
"TokenizerLimit": 77,
186+
"EmbeddingsLength": 768,
187+
"DualEmbeddingsLength": 1280,
188+
"IsDualTokenizer": true,
189+
"ScaleFactor": 0.13025,
190+
"SampleSize": 1024,
191+
"PipelineType": "StableDiffusionXL",
192+
"Diffusers": [
193+
"TextToImage"
194+
],
195+
"DeviceId": 0,
196+
"InterOpNumThreads": 0,
197+
"IntraOpNumThreads": 0,
198+
"ExecutionMode": "ORT_SEQUENTIAL",
199+
"ExecutionProvider": "DirectML",
200+
"ModelConfigurations": [
201+
{
202+
"Type": "Tokenizer",
203+
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\tokenizer\\model.onnx"
204+
},
205+
{
206+
"Type": "Tokenizer2",
207+
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\tokenizer_2\\model.onnx"
208+
},
209+
{
210+
"Type": "Unet",
211+
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\unet\\model.onnx"
212+
},
213+
{
214+
"Type": "TextEncoder",
215+
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\text_encoder\\model.onnx"
216+
},
217+
{
218+
"Type": "TextEncoder2",
219+
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\text_encoder_2\\model.onnx"
220+
},
221+
{
222+
"Type": "VaeEncoder",
223+
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\vae_encoder\\model.onnx"
224+
},
225+
{
226+
"Type": "VaeDecoder",
227+
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\vae_decoder\\model.onnx"
228+
}
229+
]
175230
}
176231
]
177232
}

OnnxStack.Core/Config/OnnxModelType.cs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@ public enum OnnxModelType
44
{
55
Unet = 0,
66
Tokenizer = 10,
7+
Tokenizer2 = 11,
78
TextEncoder = 20,
9+
TextEncoder2 = 21,
810
VaeEncoder = 30,
911
VaeDecoder = 40,
1012
}

OnnxStack.Core/Extensions/OrtValueExtensions.cs

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,17 @@ public static OrtValue ToOrtValue(this DenseTensor<int> tensor, OnnxNamedMetadat
4949
}
5050

5151

52+
/// <summary>
53+
/// Converts DenseTensor<int> to OrtValue.
54+
/// </summary>
55+
/// <param name="tensor">The tensor.</param>
56+
/// <returns></returns>
57+
public static OrtValue ToOrtValue(this DenseTensor<long> tensor, OnnxNamedMetadata metadata)
58+
{
59+
return OrtValue.CreateTensorValueFromMemory(OrtMemoryInfo.DefaultInstance, tensor.Buffer, tensor.Dimensions.ToLong());
60+
}
61+
62+
5263
/// <summary>
5364
/// Creates and allocates the output tensors buffer.
5465
/// </summary>

OnnxStack.Core/Model/OnnxInferenceParameters.cs

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,16 @@ public void AddInputTensor(DenseTensor<int> value)
6767
_inputs.Add(metaData, value.ToOrtValue(metaData));
6868
}
6969

70+
/// <summary>
71+
/// Adds the input tensor.
72+
/// </summary>
73+
/// <param name="value">The value.</param>
74+
public void AddInputTensor(DenseTensor<long> value)
75+
{
76+
var metaData = GetNextInputMetadata();
77+
_inputs.Add(metaData, value.ToOrtValue(metaData));
78+
}
79+
7080

7181
/// <summary>
7282
/// Adds an output parameter with known output size.
@@ -90,6 +100,19 @@ public void AddOutputBuffer(ReadOnlySpan<int> bufferDimension)
90100
}
91101

92102

103+
/// <summary>
104+
/// Adds the output buffer.
105+
/// </summary>
106+
/// <param name="index">The index.</param>
107+
/// <param name="bufferDimension">The buffer dimension.</param>
108+
public void AddOutputBuffer(int index, ReadOnlySpan<int> bufferDimension)
109+
{
110+
var metadata = _metadata.Outputs[index];
111+
_outputs.Add(metadata, metadata.CreateOutputBuffer(bufferDimension));
112+
}
113+
114+
115+
93116
/// <summary>
94117
/// Adds an output parameter with unknown output size.
95118
/// </summary>

OnnxStack.StableDiffusion/Common/IModelOptions.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,12 @@ public interface IModelOptions : IOnnxModel
1010
bool IsEnabled { get; set; }
1111
int PadTokenId { get; set; }
1212
int BlankTokenId { get; set; }
13+
int SampleSize { get; set; }
1314
float ScaleFactor { get; set; }
1415
int TokenizerLimit { get; set; }
1516
int EmbeddingsLength { get; set; }
17+
int DualEmbeddingsLength { get; set; }
18+
bool IsDualTokenizer { get; set; }
1619
DiffuserPipelineType PipelineType { get; set; }
1720
List<DiffuserType> Diffusers { get; set; }
1821
ImmutableArray<int> BlankTokenValueArray { get; set; }

OnnxStack.StableDiffusion/Common/IPromptService.cs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@ namespace OnnxStack.StableDiffusion.Common
66
{
77
public interface IPromptService
88
{
9-
Task<DenseTensor<float>> CreatePromptAsync(IModelOptions model, PromptOptions promptOptions, bool isGuidanceEnabled);
10-
Task<int[]> DecodeTextAsync(IModelOptions model, string inputText);
11-
Task<float[]> EncodeTokensAsync(IModelOptions model, int[] tokenizedInput);
9+
Task<PromptEmbeddingsResult> CreatePromptAsync(IModelOptions model, PromptOptions promptOptions, bool isGuidanceEnabled);
1210
}
1311
}
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
using Microsoft.ML.OnnxRuntime.Tensors;
2+
3+
namespace OnnxStack.StableDiffusion.Common
4+
{
5+
public record PromptEmbeddingsResult(DenseTensor<float> PromptEmbeds, DenseTensor<float> PooledPromptEmbeds = default);
6+
}

OnnxStack.StableDiffusion/Config/ModelOptions.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@ public class ModelOptions : IModelOptions, IOnnxModelSetConfig
1717
public int BlankTokenId { get; set; }
1818
public int TokenizerLimit { get; set; }
1919
public int EmbeddingsLength { get; set; }
20+
public int DualEmbeddingsLength { get; set; }
21+
public bool IsDualTokenizer { get; set; }
22+
public int SampleSize { get; set; } = 512;
2023
public float ScaleFactor { get; set; }
2124
public DiffuserPipelineType PipelineType { get; set; }
2225
public List<DiffuserType> Diffusers { get; set; } = new List<DiffuserType>();

OnnxStack.StableDiffusion/Diffusers/DiffuserBase.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ public DiffuserBase(IOnnxModelService onnxModelService, IPromptService promptSer
8888
/// <param name="progressCallback">The progress callback.</param>
8989
/// <param name="cancellationToken">The cancellation token.</param>
9090
/// <returns></returns>
91-
protected abstract Task<DenseTensor<float>> SchedulerStepAsync(IModelOptions modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, DenseTensor<float> promptEmbeddings, bool performGuidance, Action<int, int> progressCallback = null, CancellationToken cancellationToken = default);
91+
protected abstract Task<DenseTensor<float>> SchedulerStepAsync(IModelOptions modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, PromptEmbeddingsResult promptEmbeddings, bool performGuidance, Action<int, int> progressCallback = null, CancellationToken cancellationToken = default);
9292

9393

9494
/// <summary>

OnnxStack.StableDiffusion/Diffusers/InstaFlow/InstaFlowDiffuser.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ public InstaFlowDiffuser(IOnnxModelService onnxModelService, IPromptService prom
4545
/// <param name="progressCallback">The progress callback.</param>
4646
/// <param name="cancellationToken">The cancellation token.</param>
4747
/// <returns></returns>
48-
protected override async Task<DenseTensor<float>> SchedulerStepAsync(IModelOptions modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, DenseTensor<float> promptEmbeddings, bool performGuidance, Action<int, int> progressCallback = null, CancellationToken cancellationToken = default)
48+
protected override async Task<DenseTensor<float>> SchedulerStepAsync(IModelOptions modelOptions, PromptOptions promptOptions, SchedulerOptions schedulerOptions, PromptEmbeddingsResult promptEmbeddings, bool performGuidance, Action<int, int> progressCallback = null, CancellationToken cancellationToken = default)
4949
{
5050
// Get Scheduler
5151
using (var scheduler = GetScheduler(schedulerOptions))
@@ -81,7 +81,7 @@ protected override async Task<DenseTensor<float>> SchedulerStepAsync(IModelOptio
8181
{
8282
inferenceParameters.AddInputTensor(inputTensor);
8383
inferenceParameters.AddInputTensor(timestepTensor);
84-
inferenceParameters.AddInputTensor(promptEmbeddings);
84+
inferenceParameters.AddInputTensor(promptEmbeddings.PromptEmbeds);
8585
inferenceParameters.AddOutputBuffer(outputDimension);
8686

8787
var results = await _onnxModelService.RunInferenceAsync(modelOptions, OnnxModelType.Unet, inferenceParameters);

0 commit comments

Comments
 (0)