-
Notifications
You must be signed in to change notification settings - Fork 45
Expand file tree
/
Copy pathaddon.cs
More file actions
198 lines (162 loc) · 6.19 KB
/
addon.cs
File metadata and controls
198 lines (162 loc) · 6.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
using System;
using System.Drawing;
using System.Reflection;
using System.Runtime.InteropServices;
using Microsoft.JavaScript.NodeApi;
using Microsoft.ML.OnnxRuntime;
using Microsoft.ML.OnnxRuntime.Tensors;
namespace winMlAddon;
/// <summary>
/// Sample C# addon for Node.js using node-api-dotnet.
/// This class demonstrates how to export C# methods to JavaScript.
/// </summary>
[JSExport]
public class Addon
{
private InferenceSession? _inferenceSession;
private string _projectRoot;
private Addon(string projectRoot)
{
_projectRoot = projectRoot;
}
[JSExport]
public static async Task<Addon> CreateAsync(string projectRoot)
{
if (!Path.Exists(projectRoot))
{
throw new Exception("Project root is invalid.");
}
var addon = new Addon(projectRoot);
addon.PreloadNativeDependencies();
string modelPath = Path.Join(projectRoot, "models", @"squeezenet1.1-7.onnx");
await addon.InitModel(modelPath, ExecutionProviderDevicePolicy.DEFAULT, null, false, null);
return addon;
}
[JSExport]
public async Task<Prediction[]> ClassifyImage(string imagePath)
{
// InitializeWindowsAppRuntimeInUnpackagedApp(2, 0, "experimental3");
if (_inferenceSession == null)
{
throw new Exception("Model is not loaded.");
}
if (!Path.Exists(imagePath))
{
throw new Exception("Image path is invalid.");
}
// Grab model metadata
var inputName = _inferenceSession.InputNames[0];
var inputMetadata = _inferenceSession.InputMetadata[inputName];
var dimensions = inputMetadata.Dimensions;
// Set batch size to 1
int batchSize = 1;
dimensions[0] = batchSize;
int inputWidth = dimensions[2];
int inputHeight = dimensions[3];
var predictions = await Task.Run(() =>
{
Bitmap image = new(imagePath);
// Resize image
var resizedImage = BitmapFunctions.ResizeBitmap(image, inputWidth, inputHeight);
image.Dispose();
image = resizedImage;
// Preprocess image
Tensor<float> input = new DenseTensor<float>(dimensions);
input = BitmapFunctions.PreprocessBitmapWithStdDev(image, input);
image.Dispose();
// Setup inputs
var inputs = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor(inputName, input)
};
// Run inference
using IDisposableReadOnlyCollection<DisposableNamedOnnxValue> results = _inferenceSession!.Run(inputs);
// Postprocess to get softmax vector
IEnumerable<float> output = results[0].AsEnumerable<float>();
return ImageNet.GetSoftmax(output);
});
// Placeholder for image classification logic using _inferenceSession
// In a real implementation, you would load the image, preprocess it,
// run inference, and return the classification result.
return predictions;
}
public static void InitializeWindowsAppRuntimeInUnpackagedApp(
int majorVersion,
int minorVersion,
string versionTag)
{
Microsoft.Windows.ApplicationModel.DynamicDependency.Bootstrap.Initialize(
((uint)majorVersion) << 16 | (uint)minorVersion,
versionTag);
}
private Task InitModel(string modelPath, ExecutionProviderDevicePolicy? policy, string? epName, bool compileModel, string? deviceType)
{
return Task.Run(async () =>
{
if (_inferenceSession != null)
{
return;
}
var catalog = Microsoft.Windows.AI.MachineLearning.ExecutionProviderCatalog.GetDefault();
try
{
var registeredProviders = await catalog.EnsureAndRegisterCertifiedAsync();
}
catch (Exception ex)
{
throw new Exception($"WARNING: Failed to install packages: {ex.Message}");
}
SessionOptions sessionOptions = new();
sessionOptions.RegisterOrtExtensions();
if (policy != null)
{
sessionOptions.SetEpSelectionPolicy(policy.Value);
}
else if (epName != null)
{
sessionOptions.AppendExecutionProviderFromEpName(epName, deviceType);
if (compileModel)
{
modelPath = sessionOptions.GetCompiledModel(modelPath, epName) ?? modelPath;
}
}
_inferenceSession = new InferenceSession(modelPath, sessionOptions);
});
}
public void PreloadNativeDependencies()
{
// 1. Get the folder where THIS .NET assembly lives (your bin folder)
string assemblyDir = Path.Join(_projectRoot, "winMlAddon", "dist");
Console.WriteLine($"Initializing {assemblyDir}");
// 2. Define the critical DLLs to preload
string[] dllsToLoad = new[]
{
"ortextensions.dll", // extensions for ONNX Runtime
// "Microsoft.WindowsAppRuntime.Bootstrap.dll" // if using unpackaged
};
foreach (var dllName in dllsToLoad)
{
string fullPath = Path.Combine(assemblyDir, dllName);
Console.WriteLine($"Attempting to preload: {fullPath}");
if (File.Exists(fullPath))
{
// Load it into the process address space
IntPtr handle = NativeLibrary.Load(fullPath);
if (handle != IntPtr.Zero)
{
Console.WriteLine($"[Success] Pre-loaded: {dllName}");
}
else
{
Console.WriteLine($"[Error] Found but failed to load: {dllName} (Architecture mismatch?)");
}
}
else
{
// If it's not in the bin folder, it might be in 'runtimes/win-x64/native'
// You might need to adjust the path search here
Console.WriteLine($"[Warning] Could not find file to preload: {fullPath}");
}
}
}
}