Skip to content
This repository was archived by the owner on Nov 27, 2024. It is now read-only.

Commit c7b7f8a

Browse files
committed
New API design
1 parent 5d74009 commit c7b7f8a

File tree

73 files changed

+1932
-2287
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

73 files changed

+1932
-2287
lines changed

OnnxStack.Console/Examples/StableDiffusionBatch.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
using OnnxStack.StableDiffusion.Config;
44
using OnnxStack.StableDiffusion.Enums;
55
using OnnxStack.StableDiffusion.Helpers;
6-
using OnnxStack.StableDiffusion.Models;
76
using SixLabors.ImageSharp;
87

98
namespace OnnxStack.Console.Runner

OnnxStack.Console/appsettings.json

Lines changed: 84 additions & 143 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,6 @@
1111
{
1212
"Name": "StableDiffusion 1.5",
1313
"IsEnabled": true,
14-
"PadTokenId": 49407,
15-
"BlankTokenId": 49407,
16-
"TokenizerLimit": 77,
17-
"TokenizerLength": 768,
18-
"TokenizerType": "One",
19-
"ScaleFactor": 0.18215,
2014
"SampleSize": 512,
2115
"PipelineType": "StableDiffusion",
2216
"Diffusers": [
@@ -29,164 +23,114 @@
2923
"IntraOpNumThreads": 0,
3024
"ExecutionMode": "ORT_SEQUENTIAL",
3125
"ExecutionProvider": "DirectML",
32-
"ModelConfigurations": [
33-
{
34-
"Type": "Tokenizer",
35-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
36-
},
37-
{
38-
"Type": "Unet",
39-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\unet\\model.onnx"
40-
},
41-
{
42-
"Type": "TextEncoder",
43-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\text_encoder\\model.onnx"
44-
},
45-
{
46-
"Type": "VaeEncoder",
47-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_encoder\\model.onnx"
48-
},
49-
{
50-
"Type": "VaeDecoder",
51-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_decoder\\model.onnx"
52-
}
53-
]
26+
"TokenizerConfig": {
27+
"PadTokenId": 49407,
28+
"BlankTokenId": 49407,
29+
"TokenizerLimit": 77,
30+
"TokenizerLength": 768,
31+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
32+
},
33+
"TextEncoderConfig": {
34+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\text_encoder\\model.onnx"
35+
},
36+
"UnetConfig": {
37+
"ModelType": "Base",
38+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\unet\\model.onnx"
39+
},
40+
"VaeDecoderConfig": {
41+
"ScaleFactor": 0.18215,
42+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_decoder\\model.onnx"
43+
},
44+
"VaeEncoderConfig": {
45+
"ScaleFactor": 0.18215,
46+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_encoder\\model.onnx"
47+
}
5448
},
5549
{
5650
"Name": "LCM-Dreamshaper-V7",
5751
"IsEnabled": true,
58-
"PadTokenId": 49407,
59-
"BlankTokenId": 49407,
60-
"TokenizerLimit": 77,
61-
"TokenizerLength": 768,
62-
"TokenizerType": "One",
63-
"ScaleFactor": 0.18215,
6452
"SampleSize": 512,
6553
"PipelineType": "LatentConsistency",
6654
"Diffusers": [
6755
"TextToImage",
68-
"ImageToImage"
69-
],
70-
"DeviceId": 0,
71-
"InterOpNumThreads": 0,
72-
"IntraOpNumThreads": 0,
73-
"ExecutionMode": "ORT_SEQUENTIAL",
74-
"ExecutionProvider": "DirectML",
75-
"ModelConfigurations": [
76-
{
77-
"Type": "Tokenizer",
78-
"OnnxModelPath": "D:\\Repositories\\LCM-Dreamshaper-V7-ONNX\\tokenizer\\model.onnx"
79-
},
80-
{
81-
"Type": "Unet",
82-
"OnnxModelPath": "D:\\Repositories\\LCM-Dreamshaper-V7-ONNX\\unet\\model.onnx"
83-
},
84-
{
85-
"Type": "TextEncoder",
86-
"OnnxModelPath": "D:\\Repositories\\LCM-Dreamshaper-V7-ONNX\\text_encoder\\model.onnx"
87-
},
88-
{
89-
"Type": "VaeEncoder",
90-
"OnnxModelPath": "D:\\Repositories\\LCM-Dreamshaper-V7-ONNX\\vae_encoder\\model.onnx"
91-
},
92-
{
93-
"Type": "VaeDecoder",
94-
"OnnxModelPath": "D:\\Repositories\\LCM-Dreamshaper-V7-ONNX\\vae_decoder\\model.onnx"
95-
}
96-
]
97-
},
98-
{
99-
"Name": "InstaFlow",
100-
"IsEnabled": true,
101-
"PadTokenId": 49407,
102-
"BlankTokenId": 49407,
103-
"TokenizerLimit": 77,
104-
"TokenizerLength": 768,
105-
"TokenizerType": "One",
106-
"ScaleFactor": 0.18215,
107-
"SampleSize": 512,
108-
"PipelineType": "InstaFlow",
109-
"Diffusers": [
110-
"TextToImage"
56+
"ImageToImage",
57+
"ImageInpaintLegacy"
11158
],
11259
"DeviceId": 0,
11360
"InterOpNumThreads": 0,
11461
"IntraOpNumThreads": 0,
11562
"ExecutionMode": "ORT_SEQUENTIAL",
11663
"ExecutionProvider": "DirectML",
117-
"ModelConfigurations": [
118-
{
119-
"Type": "Tokenizer",
120-
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\tokenizer\\model.onnx"
121-
},
122-
{
123-
"Type": "Unet",
124-
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\unet\\model.onnx"
125-
},
126-
{
127-
"Type": "TextEncoder",
128-
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\text_encoder\\model.onnx"
129-
},
130-
{
131-
"Type": "VaeEncoder",
132-
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\vae_encoder\\model.onnx"
133-
},
134-
{
135-
"Type": "VaeDecoder",
136-
"OnnxModelPath": "D:\\Repositories\\InstaFlow-0.9B-ONNX\\vae_decoder\\model.onnx"
137-
}
138-
]
64+
"TokenizerConfig": {
65+
"PadTokenId": 49407,
66+
"BlankTokenId": 49407,
67+
"TokenizerLimit": 77,
68+
"TokenizerLength": 768,
69+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
70+
},
71+
"TextEncoderConfig": {
72+
"OnnxModelPath": "D:\\Repositories\\lcm-dreamshaper-v7-f16\\text_encoder\\model.onnx"
73+
},
74+
"UnetConfig": {
75+
"ModelType": "Base",
76+
"OnnxModelPath": "D:\\Repositories\\lcm-dreamshaper-v7-f16\\unet\\model.onnx"
77+
},
78+
"VaeDecoderConfig": {
79+
"ScaleFactor": 0.18215,
80+
"OnnxModelPath": "D:\\Repositories\\lcm-dreamshaper-v7-f16\\vae_decoder\\model.onnx"
81+
},
82+
"VaeEncoderConfig": {
83+
"ScaleFactor": 0.18215,
84+
"OnnxModelPath": "D:\\Repositories\\lcm-dreamshaper-v7-f16\\vae_encoder\\model.onnx"
85+
}
13986
},
14087
{
14188
"Name": "Stable Diffusion XL",
14289
"IsEnabled": true,
143-
"PadTokenId": 1,
144-
"BlankTokenId": 49407,
145-
"TokenizerLimit": 77,
146-
"TokenizerLength": 768,
147-
"Tokenizer2Length": 1280,
148-
"TokenizerType": "Both",
149-
"ScaleFactor": 0.13025,
15090
"SampleSize": 1024,
15191
"PipelineType": "StableDiffusionXL",
15292
"Diffusers": [
153-
"TextToImage"
93+
"TextToImage",
94+
"ImageToImage",
95+
"ImageInpaintLegacy"
15496
],
15597
"DeviceId": 0,
15698
"InterOpNumThreads": 0,
15799
"IntraOpNumThreads": 0,
158100
"ExecutionMode": "ORT_SEQUENTIAL",
159101
"ExecutionProvider": "DirectML",
160-
"ModelConfigurations": [
161-
{
162-
"Type": "Tokenizer",
163-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\tokenizer\\model.onnx"
164-
},
165-
{
166-
"Type": "Tokenizer2",
167-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\tokenizer_2\\model.onnx"
168-
},
169-
{
170-
"Type": "Unet",
171-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\unet\\model.onnx"
172-
},
173-
{
174-
"Type": "TextEncoder",
175-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\text_encoder\\model.onnx"
176-
},
177-
{
178-
"Type": "TextEncoder2",
179-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\text_encoder_2\\model.onnx"
180-
},
181-
{
182-
"Type": "VaeEncoder",
183-
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\vae_encoder\\model.onnx"
184-
},
185-
{
186-
"Type": "VaeDecoder",
187-
"OnnxModelPath": "D:\\Repositories\\dreamshaper-xl-1-0-Olive-Onnx\\vae_decoder\\model.onnx"
188-
}
189-
]
102+
"TokenizerConfig": {
103+
"PadTokenId": 49407,
104+
"BlankTokenId": 49407,
105+
"TokenizerLimit": 77,
106+
"TokenizerLength": 768,
107+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
108+
},
109+
"Tokenizer2Config": {
110+
"PadTokenId": 1,
111+
"BlankTokenId": 49407,
112+
"TokenizerLimit": 77,
113+
"TokenizerLength": 1280,
114+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
115+
},
116+
"TextEncoderConfig": {
117+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\text_encoder\\model.onnx"
118+
},
119+
"TextEncoder2Config": {
120+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\text_encoder_2\\model.onnx"
121+
},
122+
"UnetConfig": {
123+
"ModelType": "Base",
124+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\unet\\model.onnx"
125+
},
126+
"VaeDecoderConfig": {
127+
"ScaleFactor": 0.13025,
128+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\vae_decoder\\model.onnx"
129+
},
130+
"VaeEncoderConfig": {
131+
"ScaleFactor": 0.13025,
132+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-xl-base-1.0-Olive-Onnx\\vae_encoder\\model.onnx"
133+
}
190134
}
191135
]
192136
},
@@ -203,12 +147,9 @@
203147
"IntraOpNumThreads": 0,
204148
"ExecutionMode": "ORT_SEQUENTIAL",
205149
"ExecutionProvider": "DirectML",
206-
"ModelConfigurations": [
207-
{
208-
"Type": "Upscaler",
209-
"OnnxModelPath": "D:\\Repositories\\upscaler\\SwinIR\\003_realSR_BSRGAN_DFO_s64w8_SwinIR-M_x4_GAN.onnx"
210-
}
211-
]
150+
"UpscaleModelConfig": {
151+
"OnnxModelPath": "D:\\Repositories\\upscaler\\SwinIR\\003_realSR_BSRGAN_DFO_s64w8_SwinIR-M_x4_GAN.onnx"
152+
}
212153
}
213154
]
214155
}

OnnxStack.Core/Config/IOnnxModelSetConfig.cs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
using Microsoft.ML.OnnxRuntime;
2-
using System.Collections.Generic;
32

43
namespace OnnxStack.Core.Config
54
{
@@ -11,6 +10,5 @@ public interface IOnnxModelSetConfig : IOnnxModel
1110
int IntraOpNumThreads { get; set; }
1211
ExecutionMode ExecutionMode { get; set; }
1312
ExecutionProvider ExecutionProvider { get; set; }
14-
List<OnnxModelConfig> ModelConfigurations { get; set; }
1513
}
1614
}

OnnxStack.Core/Config/OnnxModelConfig.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
namespace OnnxStack.Core.Config
55
{
6-
public class OnnxModelConfig
6+
public record OnnxModelConfig
77
{
88
public OnnxModelType Type { get; set; }
99
public string OnnxModelPath { get; set; }

OnnxStack.Core/Extensions/Extensions.cs

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
using System.Collections.Generic;
66
using System.Linq;
77
using System.Numerics;
8+
using System.Runtime.CompilerServices;
89

910
namespace OnnxStack.Core
1011
{
@@ -58,6 +59,17 @@ public static SessionOptions GetSessionOptions(this OnnxModelConfig configuratio
5859
}
5960

6061

62+
public static T ApplyDefaults<T>(this T config, IOnnxModelSetConfig defaults) where T : OnnxModelConfig
63+
{
64+
config.DeviceId ??= defaults.DeviceId;
65+
config.ExecutionMode ??= defaults.ExecutionMode;
66+
config.ExecutionProvider ??= defaults.ExecutionProvider;
67+
config.InterOpNumThreads ??= defaults.InterOpNumThreads;
68+
config.IntraOpNumThreads ??= defaults.IntraOpNumThreads;
69+
return config;
70+
}
71+
72+
6173
/// <summary>
6274
/// Determines whether the the source sequence is null or empty
6375
/// </summary>

OnnxStack.Core/Extensions/OrtValueExtensions.cs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,13 @@ public static float[] ToArray(this OrtValue ortValue)
114114
}
115115

116116

117+
public static T[] ToArray<T>(this OrtValue ortValue) where T : unmanaged
118+
{
119+
return ortValue.GetTensorDataAsSpan<T>().ToArray();
120+
}
121+
122+
123+
117124
/// <summary>
118125
/// Converts to float16.
119126
/// TODO: Optimization

OnnxStack.Core/LogExtensions.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ private static void LogEndInternal(ILogger logger, LogLevel logLevel, string mes
6464

6565
private static void LogInternal(ILogger logger, LogLevel logLevel, string message, string caller)
6666
{
67-
logger.Log(logLevel, string.IsNullOrEmpty(caller) ? message : $"[{caller}] - {message}", args: default);
67+
logger?.Log(logLevel, string.IsNullOrEmpty(caller) ? message : $"[{caller}] - {message}", args: default);
6868
}
6969
}
7070
}

0 commit comments

Comments
 (0)