onnxruntime-node 1.21.0-dev.20241112-cdc8db9984 → 1.21.0-dev.20250228-beb1a9242e

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/__commit.txt CHANGED
@@ -1 +1 @@
1
- cdc8db998475d6b259119fa89531f88a1172c3a9
1
+ beb1a9242eaf46ef885f86c75202a94b13dab428
@@ -0,0 +1,9 @@
1
+ import { Backend, InferenceSession, InferenceSessionHandler } from 'onnxruntime-common';
2
+ import { Binding } from './binding';
3
+ declare class OnnxruntimeBackend implements Backend {
4
+ init(): Promise<void>;
5
+ createInferenceSessionHandler(pathOrBuffer: string | Uint8Array, options?: InferenceSession.SessionOptions): Promise<InferenceSessionHandler>;
6
+ }
7
+ export declare const onnxruntimeBackend: OnnxruntimeBackend;
8
+ export declare const listSupportedBackends: () => Binding.SupportedBackend[];
9
+ export {};
@@ -0,0 +1,40 @@
1
+ import { InferenceSession, OnnxValue, TensorConstructor } from 'onnxruntime-common';
2
+ type SessionOptions = InferenceSession.SessionOptions;
3
+ type FeedsType = {
4
+ [name: string]: OnnxValue;
5
+ };
6
+ type FetchesType = {
7
+ [name: string]: OnnxValue | null;
8
+ };
9
+ type ReturnType = {
10
+ [name: string]: OnnxValue;
11
+ };
12
+ type RunOptions = InferenceSession.RunOptions;
13
+ /**
14
+ * Binding exports a simple synchronized inference session object wrap.
15
+ */
16
+ export declare namespace Binding {
17
+ interface InferenceSession {
18
+ loadModel(modelPath: string, options: SessionOptions): void;
19
+ loadModel(buffer: ArrayBuffer, byteOffset: number, byteLength: number, options: SessionOptions): void;
20
+ readonly inputNames: string[];
21
+ readonly outputNames: string[];
22
+ run(feeds: FeedsType, fetches: FetchesType, options: RunOptions): ReturnType;
23
+ endProfiling(): void;
24
+ dispose(): void;
25
+ }
26
+ interface InferenceSessionConstructor {
27
+ new (): InferenceSession;
28
+ }
29
+ interface SupportedBackend {
30
+ name: string;
31
+ bundled: boolean;
32
+ }
33
+ }
34
+ export declare const binding: {
35
+ InferenceSession: Binding.InferenceSessionConstructor;
36
+ listSupportedBackends: () => Binding.SupportedBackend[];
37
+ initOrtOnce: (logLevel: number, tensorConstructor: TensorConstructor) => void;
38
+ };
39
+ export declare const initOrt: () => void;
40
+ export {};
@@ -0,0 +1,2 @@
1
+ export * from 'onnxruntime-common';
2
+ export { listSupportedBackends } from './backend';
@@ -0,0 +1 @@
1
+ export declare const version = "1.21.0-dev.20250228-beb1a9242e";
package/dist/version.js CHANGED
@@ -5,5 +5,5 @@ Object.defineProperty(exports, "__esModule", { value: true });
5
5
  exports.version = void 0;
6
6
  // This file is generated by /js/scripts/update-version.ts
7
7
  // Do not modify file content manually.
8
- exports.version = '1.21.0-dev.20241112-cdc8db9984';
8
+ exports.version = '1.21.0-dev.20250228-beb1a9242e';
9
9
  //# sourceMappingURL=version.js.map
package/lib/version.ts CHANGED
@@ -4,4 +4,4 @@
4
4
  // This file is generated by /js/scripts/update-version.ts
5
5
  // Do not modify file content manually.
6
6
 
7
- export const version = '1.21.0-dev.20241112-cdc8db9984';
7
+ export const version = '1.21.0-dev.20250228-beb1a9242e';
package/package.json CHANGED
@@ -13,9 +13,10 @@
13
13
  3
14
14
  ]
15
15
  },
16
- "version": "1.21.0-dev.20241112-cdc8db9984",
16
+ "version": "1.21.0-dev.20250228-beb1a9242e",
17
17
  "dependencies": {
18
- "onnxruntime-common": "1.21.0-dev.20241026-05fbb43b34",
18
+ "global-agent": "^3.0.0",
19
+ "onnxruntime-common": "1.21.0-dev.20250206-d981b153d3",
19
20
  "tar": "^7.0.1"
20
21
  },
21
22
  "scripts": {
package/script/build.js CHANGED
@@ -82,6 +82,8 @@ const USE_TENSORRT = !!buildArgs.use_tensorrt;
82
82
  const USE_COREML = !!buildArgs.use_coreml;
83
83
  // --use_qnn
84
84
  const USE_QNN = !!buildArgs.use_qnn;
85
+ // --dll_deps=
86
+ const DLL_DEPS = buildArgs.dll_deps;
85
87
  // build path
86
88
  const ROOT_FOLDER = path.join(__dirname, '..');
87
89
  const BIN_FOLDER = path.join(ROOT_FOLDER, 'bin');
@@ -122,6 +124,9 @@ if (USE_COREML) {
122
124
  if (USE_QNN) {
123
125
  args.push('--CDUSE_QNN=ON');
124
126
  }
127
+ if (DLL_DEPS) {
128
+ args.push(`--CDORT_NODEJS_DLL_DEPS=${DLL_DEPS}`);
129
+ }
125
130
  // set CMAKE_OSX_ARCHITECTURES for macOS build
126
131
  if (os.platform() === 'darwin') {
127
132
  if (ARCH === 'x64') {
package/script/build.ts CHANGED
@@ -39,6 +39,8 @@ const USE_TENSORRT = !!buildArgs.use_tensorrt;
39
39
  const USE_COREML = !!buildArgs.use_coreml;
40
40
  // --use_qnn
41
41
  const USE_QNN = !!buildArgs.use_qnn;
42
+ // --dll_deps=
43
+ const DLL_DEPS = buildArgs.dll_deps;
42
44
 
43
45
  // build path
44
46
  const ROOT_FOLDER = path.join(__dirname, '..');
@@ -82,6 +84,9 @@ if (USE_COREML) {
82
84
  if (USE_QNN) {
83
85
  args.push('--CDUSE_QNN=ON');
84
86
  }
87
+ if (DLL_DEPS) {
88
+ args.push(`--CDORT_NODEJS_DLL_DEPS=${DLL_DEPS}`);
89
+ }
85
90
 
86
91
  // set CMAKE_OSX_ARCHITECTURES for macOS build
87
92
  if (os.platform() === 'darwin') {
package/script/install.js CHANGED
@@ -19,9 +19,16 @@
19
19
  // Step.1: Check if we should exit early
20
20
  const os = require('os');
21
21
  const fs = require('fs');
22
+ const https = require('https');
22
23
  const path = require('path');
23
24
  const tar = require('tar');
24
- const { Readable } = require('stream');
25
+ const { execFileSync } = require('child_process');
26
+ const { bootstrap: globalAgentBootstrap } = require('global-agent');
27
+
28
+ // Bootstrap global-agent to honor the proxy settings in
29
+ // environment variables, e.g. GLOBAL_AGENT_HTTPS_PROXY.
30
+ // See https://github.com/gajus/global-agent/blob/v3.0.0/README.md#environment-variables for details.
31
+ globalAgentBootstrap();
25
32
 
26
33
  // commandline flag:
27
34
  // --onnxruntime-node-install-cuda Force install the CUDA EP binaries. Try to detect the CUDA version.
@@ -58,59 +65,108 @@ if (NO_INSTALL || !shouldInstall) {
58
65
 
59
66
  // Step.2: Download the required binaries
60
67
  const artifactUrl = {
61
- 11: `https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VERSION}/onnxruntime-linux-x64-gpu-${
62
- ORT_VERSION
63
- }.tgz`,
64
- 12: `https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VERSION}/onnxruntime-linux-x64-gpu-cuda12-${
68
+ get 11() {
69
+ // TODO: support ORT Cuda v11 binaries
70
+ throw new Error(`CUDA 11 binaries are not supported by this script yet.
71
+
72
+ To use ONNX Runtime Node.js binding with CUDA v11 support, please follow the manual steps:
73
+
74
+ 1. Use "--onnxruntime-node-install-cuda=skip" to skip the auto installation.
75
+ 2. Navigate to https://aiinfra.visualstudio.com/PublicPackages/_artifacts/feed/onnxruntime-cuda-11
76
+ 3. Download the binaries for your platform and architecture
77
+ 4. Extract the following binaries to "node_modules/onnxruntime-node/bin/napi-v3/linux/x64:
78
+ - libonnxruntime_providers_tensorrt.so
79
+ - libonnxruntime_providers_shared.so
80
+ - libonnxruntime.so.${ORT_VERSION}
81
+ - libonnxruntime_providers_cuda.so
82
+ `);
83
+ },
84
+ 12: `https://github.com/microsoft/onnxruntime/releases/download/v${ORT_VERSION}/onnxruntime-linux-x64-gpu-${
65
85
  ORT_VERSION
66
86
  }.tgz`,
67
87
  }[INSTALL_CUDA_FLAG || tryGetCudaVersion()];
68
88
  console.log(`Downloading "${artifactUrl}"...`);
69
- fetch(artifactUrl).then((res) => {
70
- if (!res.ok) {
71
- throw new Error(`Failed to download the binaries: ${res.status} ${res.statusText}.
89
+
90
+ const FILES = new Set([
91
+ 'libonnxruntime_providers_tensorrt.so',
92
+ 'libonnxruntime_providers_shared.so',
93
+ `libonnxruntime.so.${ORT_VERSION}`,
94
+ 'libonnxruntime_providers_cuda.so',
95
+ ]);
96
+
97
+ downloadAndExtract(artifactUrl, BIN_FOLDER, FILES);
98
+
99
+ async function downloadAndExtract(url, dest, files) {
100
+ return new Promise((resolve, reject) => {
101
+ https.get(url, (res) => {
102
+ const { statusCode } = res;
103
+ const contentType = res.headers['content-type'];
104
+
105
+ if (statusCode === 301 || statusCode === 302) {
106
+ downloadAndExtract(res.headers.location, dest, files).then(
107
+ (value) => resolve(value),
108
+ (reason) => reject(reason),
109
+ );
110
+ return;
111
+ } else if (statusCode !== 200) {
112
+ throw new Error(`Failed to download the binaries: ${res.statusCode} ${res.statusMessage}.
72
113
 
73
114
  Use "--onnxruntime-node-install-cuda=skip" to skip the installation. You will still be able to use ONNX Runtime, but the CUDA EP will not be available.`);
74
- }
115
+ }
75
116
 
76
- // Extract the binaries
77
-
78
- const FILES = new Set([
79
- 'libonnxruntime_providers_tensorrt.so',
80
- 'libonnxruntime_providers_shared.so',
81
- `libonnxruntime.so.${ORT_VERSION}`,
82
- 'libonnxruntime_providers_cuda.so',
83
- ]);
84
-
85
- Readable.fromWeb(res.body)
86
- .pipe(
87
- tar.t({
88
- strict: true,
89
- onentry: (entry) => {
90
- const filename = path.basename(entry.path);
91
- if (entry.type === 'File' && FILES.has(filename)) {
92
- console.log(`Extracting "${filename}" to "${BIN_FOLDER}"...`);
93
- entry.pipe(fs.createWriteStream(path.join(BIN_FOLDER, filename)));
94
- entry.on('finish', () => {
95
- console.log(`Finished extracting "${filename}".`);
96
- });
97
- }
98
- },
99
- }),
100
- )
101
- .on('error', (err) => {
102
- throw new Error(`Failed to extract the binaries: ${err.message}.
117
+ if (!contentType || !/^application\/octet-stream/.test(contentType)) {
118
+ throw new Error(`unexpected content type: ${contentType}`);
119
+ }
120
+
121
+ res
122
+ .pipe(
123
+ tar.t({
124
+ strict: true,
125
+ onentry: (entry) => {
126
+ const filename = path.basename(entry.path);
127
+ if (entry.type === 'File' && files.has(filename)) {
128
+ console.log(`Extracting "${filename}" to "${dest}"...`);
129
+ entry.pipe(fs.createWriteStream(path.join(dest, filename)));
130
+ entry.on('finish', () => {
131
+ console.log(`Finished extracting "${filename}".`);
132
+ });
133
+ }
134
+ },
135
+ }),
136
+ )
137
+ .on('error', (err) => {
138
+ throw new Error(`Failed to extract the binaries: ${err.message}.
103
139
 
104
140
  Use "--onnxruntime-node-install-cuda=skip" to skip the installation. You will still be able to use ONNX Runtime, but the CUDA EP will not be available.`);
141
+ });
105
142
  });
106
- });
143
+ });
144
+ }
107
145
 
108
146
  function tryGetCudaVersion() {
109
147
  // Should only return 11 or 12.
110
148
 
111
- // TODO: try to get the CUDA version from the system ( `nvcc --version` )
149
+ // try to get the CUDA version from the system ( `nvcc --version` )
150
+ let ver = 12;
151
+ try {
152
+ const nvccVersion = execFileSync('nvcc', ['--version'], { encoding: 'utf8' });
153
+ const match = nvccVersion.match(/release (\d+)/);
154
+ if (match) {
155
+ ver = parseInt(match[1]);
156
+ if (ver !== 11 && ver !== 12) {
157
+ throw new Error(`Unsupported CUDA version: ${ver}`);
158
+ }
159
+ }
160
+ } catch (e) {
161
+ if (e?.code === 'ENOENT') {
162
+ console.warn('`nvcc` not found. Assuming CUDA 12.');
163
+ } else {
164
+ console.warn('Failed to detect CUDA version from `nvcc --version`:', e.message);
165
+ }
166
+ }
112
167
 
113
- return 11;
168
+ // assume CUDA 12 if failed to detect
169
+ return ver;
114
170
  }
115
171
 
116
172
  function parseInstallCudaFlag() {