Skip to content
This repository was archived by the owner on Nov 27, 2024. It is now read-only.

Commit 851de21

Browse files
committed
Update README
1 parent 99ee5fd commit 851de21

File tree

2 files changed

+43
-47
lines changed

2 files changed

+43
-47
lines changed

OnnxStack.Core/README.md

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -38,14 +38,12 @@ The `appsettings.json` is the easiest option for configuring model sets. Below i
3838
"OnnxStackConfig": {
3939
"Name": "Clip Tokenizer",
4040
"TokenizerLimit": 77,
41-
"ModelConfigurations": [
42-
{
43-
"Type": "Tokenizer",
44-
"DeviceId": 0,
45-
"ExecutionProvider": "Cpu",
46-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
47-
}
48-
]
41+
"ModelConfigurations": [{
42+
"Type": "Tokenizer",
43+
"DeviceId": 0,
44+
"ExecutionProvider": "Cpu",
45+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
46+
}]
4947
}
5048
}
5149
```

OnnxStack.StableDiffusion/README.md

Lines changed: 37 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -158,45 +158,43 @@ Each model can be assigned to its own device, which is handy if you have only a
158158
"TokenizerLimit": 77,
159159
"EmbeddingsLength": 768,
160160
"ScaleFactor": 0.18215,
161-
"ModelConfigurations": [
162-
{
163-
"Type": "Unet",
164-
"DeviceId": 0,
165-
"ExecutionProvider": "DirectML",
166-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\unet\\model.onnx"
167-
},
168-
{
169-
"Type": "Tokenizer",
170-
"DeviceId": 0,
171-
"ExecutionProvider": "Cpu",
172-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
173-
},
174-
{
175-
"Type": "TextEncoder",
176-
"DeviceId": 0,
177-
"ExecutionProvider": "Cpu",
178-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\text_encoder\\model.onnx"
179-
},
180-
{
181-
"Type": "VaeEncoder",
182-
"DeviceId": 0,
183-
"ExecutionProvider": "Cpu",
184-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_encoder\\model.onnx"
185-
},
186-
{
187-
"Type": "VaeDecoder",
188-
"DeviceId": 0,
189-
"ExecutionProvider": "Cpu",
190-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_decoder\\model.onnx"
191-
},
192-
{
193-
"Type": "SafetyChecker",
194-
"IsDisabled": true,
195-
"DeviceId": 0,
196-
"ExecutionProvider": "Cpu",
197-
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\safety_checker\\model.onnx"
198-
}
199-
]
161+
"ModelConfigurations": [{
162+
"Type": "Unet",
163+
"DeviceId": 0,
164+
"ExecutionProvider": "DirectML",
165+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\unet\\model.onnx"
166+
},
167+
{
168+
"Type": "Tokenizer",
169+
"DeviceId": 0,
170+
"ExecutionProvider": "Cpu",
171+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\cliptokenizer.onnx"
172+
},
173+
{
174+
"Type": "TextEncoder",
175+
"DeviceId": 0,
176+
"ExecutionProvider": "Cpu",
177+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\text_encoder\\model.onnx"
178+
},
179+
{
180+
"Type": "VaeEncoder",
181+
"DeviceId": 0,
182+
"ExecutionProvider": "Cpu",
183+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_encoder\\model.onnx"
184+
},
185+
{
186+
"Type": "VaeDecoder",
187+
"DeviceId": 0,
188+
"ExecutionProvider": "Cpu",
189+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\vae_decoder\\model.onnx"
190+
},
191+
{
192+
"Type": "SafetyChecker",
193+
"IsDisabled": true,
194+
"DeviceId": 0,
195+
"ExecutionProvider": "Cpu",
196+
"OnnxModelPath": "D:\\Repositories\\stable-diffusion-v1-5\\safety_checker\\model.onnx"
197+
}]
200198
}
201199
}
202200
```

0 commit comments

Comments
 (0)