|
1 | 1 | # OnnxStack.Core - Onnx Services for .NET Applications |
2 | 2 |
|
3 | | -OnnxStack.Core is a library that provides higher-level ONNX services for use in .NET applications. It offers extensive support for features such as dependency injection, .NET configuration implementations, ASP.NET Core integration, and IHostedService support. |
4 | | - |
5 | | -You can configure a model set for runtime, offloading individual models to different devices to make better use of resources or run on lower-end hardware. The first use-case is StableDiffusion; however, it will be expanded, and other model sets, such as object detection and classification, will be added. |
| 3 | +OnnxStack.Core is a library that provides simplified wrappers for OnnxRuntime |
6 | 4 |
|
7 | 5 | ## Getting Started |
8 | 6 |
|
9 | | - |
10 | 7 | OnnxStack.Core can be found via the nuget package manager, download and install it. |
11 | 8 | ``` |
12 | 9 | PM> Install-Package OnnxStack.Core |
13 | 10 | ``` |
14 | 11 |
|
15 | | - |
16 | | -### .NET Core Registration |
17 | | - |
18 | | -You can easily integrate `OnnxStack.Core` into your application services layer. This registration process sets up the necessary services and loads the `appsettings.json` configuration. |
19 | | - |
20 | | -Example: Registering OnnxStack |
21 | | -```csharp |
22 | | -builder.Services.AddOnnxStack(); |
23 | | -``` |
24 | | - |
25 | 12 | ## Dependencies |
26 | | -Video processing support requires FFMPEG and FFPROBE binaries, files must be present in your output folder or the destinations configured in the `appsettings.json` |
| 13 | +Video processing support requires FFMPEG and FFPROBE binaries, files must be present in your output folder or the destinations configured at runtime |
27 | 14 | ``` |
28 | 15 | https://ffbinaries.com/downloads |
29 | 16 | https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffmpeg-6.1-win-64.zip |
30 | 17 | https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffprobe-6.1-win-64.zip |
31 | 18 | ``` |
32 | 19 |
|
33 | | -## Configuration example |
34 | | -The `appsettings.json` is the easiest option for configuring model sets. Below is an example of `clip tokenizer`. |
35 | 20 |
|
36 | | -```json |
37 | | -{ |
38 | | - "Logging": { |
39 | | - "LogLevel": { |
40 | | - "Default": "Information", |
41 | | - "Microsoft.AspNetCore": "Warning" |
42 | | - } |
43 | | - }, |
44 | | - "AllowedHosts": "*", |
45 | | - |
46 | | - "OnnxStackConfig": { |
47 | | - "OnnxModelSets": [ |
48 | | - { |
49 | | - "Name": "ClipTokenizer", |
50 | | - "IsEnabled": true, |
51 | | - "DeviceId": 0, |
52 | | - "InterOpNumThreads": 0, |
53 | | - "IntraOpNumThreads": 0, |
54 | | - "ExecutionMode": "ORT_SEQUENTIAL", |
55 | | - "ExecutionProvider": "DirectML", |
56 | | - "ModelConfigurations": [ |
57 | | - { |
58 | | - "Type": "Tokenizer", |
59 | | - "OnnxModelPath": "cliptokenizer.onnx" |
60 | | - }, |
61 | | - ] |
62 | | - } |
63 | | - ] |
64 | | - } |
65 | | -} |
66 | | -``` |
67 | | - |
68 | | - |
69 | | - |
70 | | -### Basic C# Example |
| 21 | +### OnnxModelSession Example |
71 | 22 | ```csharp |
72 | 23 |
|
73 | | -// Tokenizer model Example |
| 24 | +// CLIP Tokenizer Example |
74 | 25 | //----------------------// |
75 | 26 |
|
76 | | -// From DI |
77 | | -OnnxStackConfig _onnxStackConfig; |
78 | | -IOnnxModelService _onnxModelService; |
| 27 | +// Model Configuration |
| 28 | +var config = new OnnxModelConfig |
| 29 | +{ |
| 30 | + DeviceId = 0, |
| 31 | + InterOpNumThreads = 0, |
| 32 | + IntraOpNumThreads = 0, |
| 33 | + ExecutionMode = ExecutionMode.ORT_SEQUENTIAL, |
| 34 | + ExecutionProvider = ExecutionProvider.DirectML, |
| 35 | + OnnxModelPath = "cliptokenizer.onnx" |
| 36 | +}; |
79 | 37 |
|
80 | | -// Get Model |
81 | | -var model = _onnxStackConfig.OnnxModelSets.First(); |
| 38 | +// Create Model Session |
| 39 | +var modelSession = new OnnxModelSession(config); |
82 | 40 |
|
83 | | -// Get Model Metadata |
84 | | -var metadata = _onnxModelService.GetModelMetadata(model, OnnxModelType.Tokenizer); |
| 41 | +// Get Metatdata |
| 42 | +var modelMetadata = await modelSession.GetMetadataAsync(); |
85 | 43 |
|
86 | | -// Create Input |
| 44 | +// Create Input Tensor |
87 | 45 | var text = "Text To Tokenize"; |
88 | 46 | var inputTensor = new DenseTensor<string>(new string[] { text }, new int[] { 1 }); |
89 | 47 |
|
90 | | -// Create Inference Parameters container |
91 | | -using (var inferenceParameters = new OnnxInferenceParameters(metadata)) |
| 48 | +// Create Inference Parameters |
| 49 | +using (var inferenceParameters = new OnnxInferenceParameters(modelMetadata)) |
92 | 50 | { |
93 | | - // Set Inputs and Outputs |
94 | | - inferenceParameters.AddInputTensor(inputTensor); |
95 | | - inferenceParameters.AddOutputBuffer(); |
96 | | - |
97 | | - // Run Inference |
98 | | - using (var results = _onnxModelService.RunInference(model, OnnxModelType.Tokenizer, inferenceParameters)) |
99 | | - { |
100 | | - // Extract Result |
101 | | - var resultData = results[0].ToDenseTensor(); |
102 | | - } |
| 51 | + // Set Inputs and Outputs |
| 52 | + inferenceParameters.AddInputTensor(inputTensor); |
| 53 | + inferenceParameters.AddOutputBuffer(); |
| 54 | + |
| 55 | + // Run Inference |
| 56 | + using (var results = modelSession.RunInference(inferenceParameters)) |
| 57 | + { |
| 58 | + // Extract Result Tokens |
| 59 | + var resultData = results[0].ToArray<long>(); |
| 60 | + } |
103 | 61 | } |
104 | 62 |
|
105 | 63 | ``` |
|
0 commit comments