onnxruntime-node 1.22.0-dev.20250415-c18e06d5e3 → 1.22.0-rev
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -51
- package/bin/{napi-v3 → napi-v6}/darwin/arm64/libonnxruntime.1.22.0.dylib +0 -0
- package/bin/napi-v6/darwin/arm64/onnxruntime_binding.node +0 -0
- package/bin/{napi-v3 → napi-v6}/darwin/x64/libonnxruntime.1.22.0.dylib +0 -0
- package/bin/napi-v6/darwin/x64/onnxruntime_binding.node +0 -0
- package/bin/{napi-v3 → napi-v6}/linux/arm64/libonnxruntime.so.1 +0 -0
- package/bin/napi-v6/linux/arm64/onnxruntime_binding.node +0 -0
- package/bin/{napi-v3 → napi-v6}/linux/x64/libonnxruntime.so.1 +0 -0
- package/bin/napi-v6/linux/x64/onnxruntime_binding.node +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/arm64/DirectML.dll +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/arm64/dxcompiler.dll +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/arm64/dxil.dll +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/arm64/onnxruntime.dll +0 -0
- package/bin/napi-v6/win32/arm64/onnxruntime_binding.node +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/x64/DirectML.dll +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/x64/dxcompiler.dll +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/x64/dxil.dll +0 -0
- package/bin/{napi-v3 → napi-v6}/win32/x64/onnxruntime.dll +0 -0
- package/bin/napi-v6/win32/x64/onnxruntime_binding.node +0 -0
- package/dist/binding.js +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/version.js.map +1 -1
- package/lib/backend.ts +158 -158
- package/lib/binding.ts +91 -91
- package/lib/index.ts +15 -15
- package/lib/version.ts +7 -7
- package/package.json +4 -6
- package/script/build.ts +130 -130
- package/script/install-metadata-versions.js +7 -0
- package/script/install-metadata.js +58 -0
- package/script/install-utils.js +306 -0
- package/script/install.js +134 -198
- package/script/prepack.ts +20 -20
- package/__commit.txt +0 -1
- package/bin/napi-v3/darwin/arm64/onnxruntime_binding.node +0 -0
- package/bin/napi-v3/darwin/x64/onnxruntime_binding.node +0 -0
- package/bin/napi-v3/linux/arm64/onnxruntime_binding.node +0 -0
- package/bin/napi-v3/linux/x64/onnxruntime_binding.node +0 -0
- package/bin/napi-v3/win32/arm64/onnxruntime_binding.node +0 -0
- package/bin/napi-v3/win32/x64/onnxruntime_binding.node +0 -0
package/README.md
CHANGED
|
@@ -1,51 +1,56 @@
|
|
|
1
|
-
# ONNX Runtime Node.js Binding
|
|
2
|
-
|
|
3
|
-
ONNX Runtime Node.js binding enables Node.js applications to run ONNX model inference.
|
|
4
|
-
|
|
5
|
-
## Usage
|
|
6
|
-
|
|
7
|
-
Install the latest stable version:
|
|
8
|
-
|
|
9
|
-
```
|
|
10
|
-
npm install onnxruntime-node
|
|
11
|
-
```
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
1
|
+
# ONNX Runtime Node.js Binding
|
|
2
|
+
|
|
3
|
+
ONNX Runtime Node.js binding enables Node.js applications to run ONNX model inference.
|
|
4
|
+
|
|
5
|
+
## Usage
|
|
6
|
+
|
|
7
|
+
Install the latest stable version:
|
|
8
|
+
|
|
9
|
+
```
|
|
10
|
+
npm install onnxruntime-node
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
Install the nightly version:
|
|
14
|
+
|
|
15
|
+
```
|
|
16
|
+
npm install onnxruntime-node@dev
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
Refer to [ONNX Runtime JavaScript examples](https://github.com/microsoft/onnxruntime-inference-examples/tree/main/js) for samples and tutorials.
|
|
20
|
+
|
|
21
|
+
## Requirements
|
|
22
|
+
|
|
23
|
+
ONNXRuntime works on Node.js v16.x+ (recommend v20.x+) or Electron v15.x+ (recommend v28.x+).
|
|
24
|
+
|
|
25
|
+
The following table lists the supported versions of ONNX Runtime Node.js binding provided with pre-built binaries.
|
|
26
|
+
|
|
27
|
+
| EPs/Platforms | Windows x64 | Windows arm64 | Linux x64 | Linux arm64 | MacOS x64 | MacOS arm64 |
|
|
28
|
+
| ------------- | ------------------ | ------------------ | ------------------ | ------------------ | ------------------ | ------------------ |
|
|
29
|
+
| CPU | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ |
|
|
30
|
+
| WebGPU | ✔️ <sup>\[1]</sup> | ✔️ <sup>\[1]</sup> | ✔️ <sup>\[1]</sup> | ✔️ <sup>\[1]</sup> | ✔️ <sup>\[1]</sup> | ✔️ <sup>\[1]</sup> |
|
|
31
|
+
| DirectML | ✔️ | ✔️ | ❌ | ❌ | ❌ | ❌ |
|
|
32
|
+
| CUDA | ❌ | ❌ | ✔️<sup>\[2]</sup> | ❌ | ❌ | ❌ |
|
|
33
|
+
| CoreML | ❌ | ❌ | ❌ | ❌ | ✔️ | ✔️ |
|
|
34
|
+
|
|
35
|
+
- \[1]: WebGPU support is currently experimental.
|
|
36
|
+
- \[2]: CUDA v12. See [CUDA EP Installation](#cuda-ep-installation) for details.
|
|
37
|
+
|
|
38
|
+
To use on platforms without pre-built binaries, you can build Node.js binding from source and consume it by `npm install <onnxruntime_repo_root>/js/node/`. See also [instructions](https://onnxruntime.ai/docs/build/inferencing.html#apis-and-language-bindings) for building ONNX Runtime Node.js binding locally.
|
|
39
|
+
|
|
40
|
+
# GPU Support
|
|
41
|
+
|
|
42
|
+
Right now, the Windows version supports WebGPU execution provider and DML execution provider. Linux x64 can use CUDA and TensorRT.
|
|
43
|
+
|
|
44
|
+
## CUDA EP Installation
|
|
45
|
+
|
|
46
|
+
To use CUDA EP, you need to install the CUDA EP binaries. By default, the CUDA EP binaries are installed automatically when you install the package. If you want to skip the installation, you can pass the `--onnxruntime-node-install=skip` flag to the installation command.
|
|
47
|
+
|
|
48
|
+
```
|
|
49
|
+
npm install onnxruntime-node --onnxruntime-node-install=skip
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
~~You can also use this flag to specify the version of the CUDA: (v11 or v12)~~ CUDA v11 is no longer supported since v1.22.
|
|
53
|
+
|
|
54
|
+
## License
|
|
55
|
+
|
|
56
|
+
License information can be found [here](https://github.com/microsoft/onnxruntime/blob/main/README.md#license).
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/dist/binding.js
CHANGED
|
@@ -7,7 +7,7 @@ const onnxruntime_common_1 = require("onnxruntime-common");
|
|
|
7
7
|
// export native binding
|
|
8
8
|
exports.binding =
|
|
9
9
|
// eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires
|
|
10
|
-
require(`../bin/napi-
|
|
10
|
+
require(`../bin/napi-v6/${process.platform}/${process.arch}/onnxruntime_binding.node`);
|
|
11
11
|
let ortInitialized = false;
|
|
12
12
|
const initOrt = () => {
|
|
13
13
|
if (!ortInitialized) {
|
package/dist/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const version = "1.22.0-
|
|
1
|
+
export declare const version = "1.22.0-rev";
|
package/dist/version.js
CHANGED
|
@@ -5,5 +5,5 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
5
5
|
exports.version = void 0;
|
|
6
6
|
// This file is generated by /js/scripts/update-version.ts
|
|
7
7
|
// Do not modify file content manually.
|
|
8
|
-
exports.version = '1.22.0-
|
|
8
|
+
exports.version = '1.22.0-rev';
|
|
9
9
|
//# sourceMappingURL=version.js.map
|
package/dist/version.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"version.js","sourceRoot":"","sources":["../lib/version.ts"],"names":[],"mappings":";AAAA,4DAA4D;AAC5D,kCAAkC;;;AAElC,0DAA0D;AAC1D,uCAAuC;AAE1B,QAAA,OAAO,GAAG,
|
|
1
|
+
{"version":3,"file":"version.js","sourceRoot":"","sources":["../lib/version.ts"],"names":[],"mappings":";AAAA,4DAA4D;AAC5D,kCAAkC;;;AAElC,0DAA0D;AAC1D,uCAAuC;AAE1B,QAAA,OAAO,GAAG,YAAY,CAAC"}
|
package/lib/backend.ts
CHANGED
|
@@ -1,158 +1,158 @@
|
|
|
1
|
-
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
-
// Licensed under the MIT License.
|
|
3
|
-
|
|
4
|
-
import { Backend, InferenceSession, InferenceSessionHandler, SessionHandler } from 'onnxruntime-common';
|
|
5
|
-
|
|
6
|
-
import { Binding, binding, initOrt } from './binding';
|
|
7
|
-
|
|
8
|
-
const dataTypeStrings = [
|
|
9
|
-
undefined, // 0
|
|
10
|
-
'float32',
|
|
11
|
-
'uint8',
|
|
12
|
-
'int8',
|
|
13
|
-
'uint16',
|
|
14
|
-
'int16',
|
|
15
|
-
'int32',
|
|
16
|
-
'int64',
|
|
17
|
-
'string',
|
|
18
|
-
'bool',
|
|
19
|
-
'float16',
|
|
20
|
-
'float64',
|
|
21
|
-
'uint32',
|
|
22
|
-
'uint64',
|
|
23
|
-
undefined, // 14
|
|
24
|
-
undefined, // 15
|
|
25
|
-
undefined, // 16
|
|
26
|
-
undefined, // 17
|
|
27
|
-
undefined, // 18
|
|
28
|
-
undefined, // 19
|
|
29
|
-
undefined, // 20
|
|
30
|
-
'uint4',
|
|
31
|
-
'int4',
|
|
32
|
-
] as const;
|
|
33
|
-
|
|
34
|
-
class OnnxruntimeSessionHandler implements InferenceSessionHandler {
|
|
35
|
-
#inferenceSession: Binding.InferenceSession;
|
|
36
|
-
|
|
37
|
-
constructor(pathOrBuffer: string | Uint8Array, options: InferenceSession.SessionOptions) {
|
|
38
|
-
initOrt();
|
|
39
|
-
|
|
40
|
-
this.#inferenceSession = new binding.InferenceSession();
|
|
41
|
-
if (typeof pathOrBuffer === 'string') {
|
|
42
|
-
this.#inferenceSession.loadModel(pathOrBuffer, options);
|
|
43
|
-
} else {
|
|
44
|
-
this.#inferenceSession.loadModel(pathOrBuffer.buffer, pathOrBuffer.byteOffset, pathOrBuffer.byteLength, options);
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
// prepare input/output names and metadata
|
|
48
|
-
this.inputNames = [];
|
|
49
|
-
this.outputNames = [];
|
|
50
|
-
this.inputMetadata = [];
|
|
51
|
-
this.outputMetadata = [];
|
|
52
|
-
|
|
53
|
-
// this function takes raw metadata from binding and returns a tuple of the following 2 items:
|
|
54
|
-
// - an array of string representing names
|
|
55
|
-
// - an array of converted InferenceSession.ValueMetadata
|
|
56
|
-
const fillNamesAndMetadata = (
|
|
57
|
-
rawMetadata: readonly Binding.ValueMetadata[],
|
|
58
|
-
): [names: string[], metadata: InferenceSession.ValueMetadata[]] => {
|
|
59
|
-
const names: string[] = [];
|
|
60
|
-
const metadata: InferenceSession.ValueMetadata[] = [];
|
|
61
|
-
|
|
62
|
-
for (const m of rawMetadata) {
|
|
63
|
-
names.push(m.name);
|
|
64
|
-
if (!m.isTensor) {
|
|
65
|
-
metadata.push({ name: m.name, isTensor: false });
|
|
66
|
-
} else {
|
|
67
|
-
const type = dataTypeStrings[m.type];
|
|
68
|
-
if (type === undefined) {
|
|
69
|
-
throw new Error(`Unsupported data type: ${m.type}`);
|
|
70
|
-
}
|
|
71
|
-
const shape: Array<number | string> = [];
|
|
72
|
-
for (let i = 0; i < m.shape.length; ++i) {
|
|
73
|
-
const dim = m.shape[i];
|
|
74
|
-
if (dim === -1) {
|
|
75
|
-
shape.push(m.symbolicDimensions[i]);
|
|
76
|
-
} else if (dim >= 0) {
|
|
77
|
-
shape.push(dim);
|
|
78
|
-
} else {
|
|
79
|
-
throw new Error(`Invalid dimension: ${dim}`);
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
metadata.push({
|
|
83
|
-
name: m.name,
|
|
84
|
-
isTensor: m.isTensor,
|
|
85
|
-
type,
|
|
86
|
-
shape,
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
return [names, metadata];
|
|
92
|
-
};
|
|
93
|
-
|
|
94
|
-
[this.inputNames, this.inputMetadata] = fillNamesAndMetadata(this.#inferenceSession.inputMetadata);
|
|
95
|
-
[this.outputNames, this.outputMetadata] = fillNamesAndMetadata(this.#inferenceSession.outputMetadata);
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
async dispose(): Promise<void> {
|
|
99
|
-
this.#inferenceSession.dispose();
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
readonly inputNames: string[];
|
|
103
|
-
readonly outputNames: string[];
|
|
104
|
-
|
|
105
|
-
readonly inputMetadata: InferenceSession.ValueMetadata[];
|
|
106
|
-
readonly outputMetadata: InferenceSession.ValueMetadata[];
|
|
107
|
-
|
|
108
|
-
startProfiling(): void {
|
|
109
|
-
// startProfiling is a no-op.
|
|
110
|
-
//
|
|
111
|
-
// if sessionOptions.enableProfiling is true, profiling will be enabled when the model is loaded.
|
|
112
|
-
}
|
|
113
|
-
endProfiling(): void {
|
|
114
|
-
this.#inferenceSession.endProfiling();
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
async run(
|
|
118
|
-
feeds: SessionHandler.FeedsType,
|
|
119
|
-
fetches: SessionHandler.FetchesType,
|
|
120
|
-
options: InferenceSession.RunOptions,
|
|
121
|
-
): Promise<SessionHandler.ReturnType> {
|
|
122
|
-
return new Promise((resolve, reject) => {
|
|
123
|
-
setImmediate(() => {
|
|
124
|
-
try {
|
|
125
|
-
resolve(this.#inferenceSession.run(feeds, fetches, options));
|
|
126
|
-
} catch (e) {
|
|
127
|
-
// reject if any error is thrown
|
|
128
|
-
reject(e);
|
|
129
|
-
}
|
|
130
|
-
});
|
|
131
|
-
});
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
class OnnxruntimeBackend implements Backend {
|
|
136
|
-
async init(): Promise<void> {
|
|
137
|
-
return Promise.resolve();
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
async createInferenceSessionHandler(
|
|
141
|
-
pathOrBuffer: string | Uint8Array,
|
|
142
|
-
options?: InferenceSession.SessionOptions,
|
|
143
|
-
): Promise<InferenceSessionHandler> {
|
|
144
|
-
return new Promise((resolve, reject) => {
|
|
145
|
-
setImmediate(() => {
|
|
146
|
-
try {
|
|
147
|
-
resolve(new OnnxruntimeSessionHandler(pathOrBuffer, options || {}));
|
|
148
|
-
} catch (e) {
|
|
149
|
-
// reject if any error is thrown
|
|
150
|
-
reject(e);
|
|
151
|
-
}
|
|
152
|
-
});
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
export const onnxruntimeBackend = new OnnxruntimeBackend();
|
|
158
|
-
export const listSupportedBackends = binding.listSupportedBackends;
|
|
1
|
+
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
// Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
import { Backend, InferenceSession, InferenceSessionHandler, SessionHandler } from 'onnxruntime-common';
|
|
5
|
+
|
|
6
|
+
import { Binding, binding, initOrt } from './binding';
|
|
7
|
+
|
|
8
|
+
const dataTypeStrings = [
|
|
9
|
+
undefined, // 0
|
|
10
|
+
'float32',
|
|
11
|
+
'uint8',
|
|
12
|
+
'int8',
|
|
13
|
+
'uint16',
|
|
14
|
+
'int16',
|
|
15
|
+
'int32',
|
|
16
|
+
'int64',
|
|
17
|
+
'string',
|
|
18
|
+
'bool',
|
|
19
|
+
'float16',
|
|
20
|
+
'float64',
|
|
21
|
+
'uint32',
|
|
22
|
+
'uint64',
|
|
23
|
+
undefined, // 14
|
|
24
|
+
undefined, // 15
|
|
25
|
+
undefined, // 16
|
|
26
|
+
undefined, // 17
|
|
27
|
+
undefined, // 18
|
|
28
|
+
undefined, // 19
|
|
29
|
+
undefined, // 20
|
|
30
|
+
'uint4',
|
|
31
|
+
'int4',
|
|
32
|
+
] as const;
|
|
33
|
+
|
|
34
|
+
class OnnxruntimeSessionHandler implements InferenceSessionHandler {
|
|
35
|
+
#inferenceSession: Binding.InferenceSession;
|
|
36
|
+
|
|
37
|
+
constructor(pathOrBuffer: string | Uint8Array, options: InferenceSession.SessionOptions) {
|
|
38
|
+
initOrt();
|
|
39
|
+
|
|
40
|
+
this.#inferenceSession = new binding.InferenceSession();
|
|
41
|
+
if (typeof pathOrBuffer === 'string') {
|
|
42
|
+
this.#inferenceSession.loadModel(pathOrBuffer, options);
|
|
43
|
+
} else {
|
|
44
|
+
this.#inferenceSession.loadModel(pathOrBuffer.buffer, pathOrBuffer.byteOffset, pathOrBuffer.byteLength, options);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// prepare input/output names and metadata
|
|
48
|
+
this.inputNames = [];
|
|
49
|
+
this.outputNames = [];
|
|
50
|
+
this.inputMetadata = [];
|
|
51
|
+
this.outputMetadata = [];
|
|
52
|
+
|
|
53
|
+
// this function takes raw metadata from binding and returns a tuple of the following 2 items:
|
|
54
|
+
// - an array of string representing names
|
|
55
|
+
// - an array of converted InferenceSession.ValueMetadata
|
|
56
|
+
const fillNamesAndMetadata = (
|
|
57
|
+
rawMetadata: readonly Binding.ValueMetadata[],
|
|
58
|
+
): [names: string[], metadata: InferenceSession.ValueMetadata[]] => {
|
|
59
|
+
const names: string[] = [];
|
|
60
|
+
const metadata: InferenceSession.ValueMetadata[] = [];
|
|
61
|
+
|
|
62
|
+
for (const m of rawMetadata) {
|
|
63
|
+
names.push(m.name);
|
|
64
|
+
if (!m.isTensor) {
|
|
65
|
+
metadata.push({ name: m.name, isTensor: false });
|
|
66
|
+
} else {
|
|
67
|
+
const type = dataTypeStrings[m.type];
|
|
68
|
+
if (type === undefined) {
|
|
69
|
+
throw new Error(`Unsupported data type: ${m.type}`);
|
|
70
|
+
}
|
|
71
|
+
const shape: Array<number | string> = [];
|
|
72
|
+
for (let i = 0; i < m.shape.length; ++i) {
|
|
73
|
+
const dim = m.shape[i];
|
|
74
|
+
if (dim === -1) {
|
|
75
|
+
shape.push(m.symbolicDimensions[i]);
|
|
76
|
+
} else if (dim >= 0) {
|
|
77
|
+
shape.push(dim);
|
|
78
|
+
} else {
|
|
79
|
+
throw new Error(`Invalid dimension: ${dim}`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
metadata.push({
|
|
83
|
+
name: m.name,
|
|
84
|
+
isTensor: m.isTensor,
|
|
85
|
+
type,
|
|
86
|
+
shape,
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return [names, metadata];
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
[this.inputNames, this.inputMetadata] = fillNamesAndMetadata(this.#inferenceSession.inputMetadata);
|
|
95
|
+
[this.outputNames, this.outputMetadata] = fillNamesAndMetadata(this.#inferenceSession.outputMetadata);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
async dispose(): Promise<void> {
|
|
99
|
+
this.#inferenceSession.dispose();
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
readonly inputNames: string[];
|
|
103
|
+
readonly outputNames: string[];
|
|
104
|
+
|
|
105
|
+
readonly inputMetadata: InferenceSession.ValueMetadata[];
|
|
106
|
+
readonly outputMetadata: InferenceSession.ValueMetadata[];
|
|
107
|
+
|
|
108
|
+
startProfiling(): void {
|
|
109
|
+
// startProfiling is a no-op.
|
|
110
|
+
//
|
|
111
|
+
// if sessionOptions.enableProfiling is true, profiling will be enabled when the model is loaded.
|
|
112
|
+
}
|
|
113
|
+
endProfiling(): void {
|
|
114
|
+
this.#inferenceSession.endProfiling();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
async run(
|
|
118
|
+
feeds: SessionHandler.FeedsType,
|
|
119
|
+
fetches: SessionHandler.FetchesType,
|
|
120
|
+
options: InferenceSession.RunOptions,
|
|
121
|
+
): Promise<SessionHandler.ReturnType> {
|
|
122
|
+
return new Promise((resolve, reject) => {
|
|
123
|
+
setImmediate(() => {
|
|
124
|
+
try {
|
|
125
|
+
resolve(this.#inferenceSession.run(feeds, fetches, options));
|
|
126
|
+
} catch (e) {
|
|
127
|
+
// reject if any error is thrown
|
|
128
|
+
reject(e);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
class OnnxruntimeBackend implements Backend {
|
|
136
|
+
async init(): Promise<void> {
|
|
137
|
+
return Promise.resolve();
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
async createInferenceSessionHandler(
|
|
141
|
+
pathOrBuffer: string | Uint8Array,
|
|
142
|
+
options?: InferenceSession.SessionOptions,
|
|
143
|
+
): Promise<InferenceSessionHandler> {
|
|
144
|
+
return new Promise((resolve, reject) => {
|
|
145
|
+
setImmediate(() => {
|
|
146
|
+
try {
|
|
147
|
+
resolve(new OnnxruntimeSessionHandler(pathOrBuffer, options || {}));
|
|
148
|
+
} catch (e) {
|
|
149
|
+
// reject if any error is thrown
|
|
150
|
+
reject(e);
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
export const onnxruntimeBackend = new OnnxruntimeBackend();
|
|
158
|
+
export const listSupportedBackends = binding.listSupportedBackends;
|