@isidorus/cpu 0.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/README.md +47 -0
  2. package/binding.gyp +103 -0
  3. package/dist/ts/_native.d.ts +13 -0
  4. package/dist/ts/_native.d.ts.map +1 -0
  5. package/dist/ts/_native.js +22 -0
  6. package/dist/ts/_native.js.map +1 -0
  7. package/dist/ts/graph.d.ts +91 -0
  8. package/dist/ts/graph.d.ts.map +1 -0
  9. package/dist/ts/graph.js +95 -0
  10. package/dist/ts/graph.js.map +1 -0
  11. package/dist/ts/index.d.ts +47 -0
  12. package/dist/ts/index.d.ts.map +1 -0
  13. package/dist/ts/index.js +58 -0
  14. package/dist/ts/index.js.map +1 -0
  15. package/dist/ts/inference-pool.d.ts +84 -0
  16. package/dist/ts/inference-pool.d.ts.map +1 -0
  17. package/dist/ts/inference-pool.js +625 -0
  18. package/dist/ts/inference-pool.js.map +1 -0
  19. package/dist/ts/inference_pool.d.ts +99 -0
  20. package/dist/ts/inference_pool.d.ts.map +1 -0
  21. package/dist/ts/inference_pool.js +370 -0
  22. package/dist/ts/inference_pool.js.map +1 -0
  23. package/dist/ts/install-libtensorflow.d.ts +34 -0
  24. package/dist/ts/install-libtensorflow.d.ts.map +1 -0
  25. package/dist/ts/install-libtensorflow.js +254 -0
  26. package/dist/ts/install-libtensorflow.js.map +1 -0
  27. package/dist/ts/ops/array_ops.d.ts +29 -0
  28. package/dist/ts/ops/array_ops.d.ts.map +1 -0
  29. package/dist/ts/ops/array_ops.js +54 -0
  30. package/dist/ts/ops/array_ops.js.map +1 -0
  31. package/dist/ts/ops/index.d.ts +5 -0
  32. package/dist/ts/ops/index.d.ts.map +1 -0
  33. package/dist/ts/ops/index.js +5 -0
  34. package/dist/ts/ops/index.js.map +1 -0
  35. package/dist/ts/ops/math_ops.d.ts +96 -0
  36. package/dist/ts/ops/math_ops.d.ts.map +1 -0
  37. package/dist/ts/ops/math_ops.js +277 -0
  38. package/dist/ts/ops/math_ops.js.map +1 -0
  39. package/dist/ts/ops/nn_ops.d.ts +130 -0
  40. package/dist/ts/ops/nn_ops.d.ts.map +1 -0
  41. package/dist/ts/ops/nn_ops.js +340 -0
  42. package/dist/ts/ops/nn_ops.js.map +1 -0
  43. package/dist/ts/ops/variable_ops.d.ts +128 -0
  44. package/dist/ts/ops/variable_ops.d.ts.map +1 -0
  45. package/dist/ts/ops/variable_ops.js +267 -0
  46. package/dist/ts/ops/variable_ops.js.map +1 -0
  47. package/dist/ts/session.d.ts +83 -0
  48. package/dist/ts/session.d.ts.map +1 -0
  49. package/dist/ts/session.js +81 -0
  50. package/dist/ts/session.js.map +1 -0
  51. package/package.json +63 -0
  52. package/scripts/install.js +100 -0
  53. package/scripts/test-install.js +82 -0
  54. package/scripts/test.js +45 -0
  55. package/src/native/addon.cc +12 -0
  56. package/src/native/graph.cc +442 -0
  57. package/src/native/graph.h +52 -0
  58. package/src/native/platform_tf.h +8 -0
  59. package/src/native/session.cc +716 -0
  60. package/src/native/session.h +92 -0
@@ -0,0 +1,267 @@
1
+ import { DType, shapeToTF } from "@isidorus/core";
2
+ import { constant } from "./array_ops.js";
3
+ // ---------------------------------------------------------------------------
4
+ // variable_ops — variable lifecycle and initialisation
5
+ //
6
+ // TF variable lifecycle in the graph-first model:
7
+ //
8
+ // 1. variable() → adds a VarHandleOp to the graph
9
+ // returns a Tensor (the variable handle)
10
+ // 2. initializer() → adds an AssignVariableOp feeding the handle
11
+ // returns the assign op name (run as a target)
12
+ // 3. readVariable() → adds a ReadVariableOp
13
+ // returns the current value as a Tensor
14
+ // 4. assignVariable() → adds an AssignVariableOp for a new value
15
+ // returns the assign op name (run as a target)
16
+ //
17
+ // In a Session:
18
+ // sess.run([], [], ["init_all"]) ← run initializer targets
19
+ // sess.run([[x, feed]], [y]) ← inference
20
+ // sess.run([[x, feed]], [], ["step"]) ← training step (assign ops)
21
+ // ---------------------------------------------------------------------------
22
+ // ── Variable handle ───────────────────────────────────────────────────────────
23
+ /**
24
+ * Variable handle — a VarHandleOp that identifies a resource variable.
25
+ *
26
+ * The handle itself holds no data. Use initializer() to set the initial
27
+ * value, readVariable() to read it, and assignVariable() to update it.
28
+ *
29
+ * @param shape Shape of the variable (null dims = dynamic)
30
+ * @param dtype Element type
31
+ * @param varName Logical name for the variable (used in checkpoints)
32
+ */
33
+ export function variable(g, shape, dtype, varName) {
34
+ const [handle] = g.addOp("VarHandleOp", [], {
35
+ dtype: { kind: "type", value: dtype },
36
+ shape: { kind: "shape", value: shapeToTF(shape) },
37
+ // container and shared_name are string attrs — we use the op name
38
+ // as the variable name which TF uses for checkpoint key resolution.
39
+ }, varName);
40
+ return handle;
41
+ }
42
+ /**
43
+ * readVariable — emits a ReadVariableOp to read the current value.
44
+ * The returned Tensor can be used as input to any op.
45
+ */
46
+ export function readVariable(g, handle, dtype, name) {
47
+ const [t] = g.addOp("ReadVariableOp", [handle], {
48
+ dtype: { kind: "type", value: dtype },
49
+ }, name);
50
+ return t;
51
+ }
52
+ /**
53
+ * assignVariable — adds an AssignVariableOp that writes `value` into
54
+ * the variable identified by `handle`.
55
+ *
56
+ * Returns the op name — pass it as a target in sess.run() to execute
57
+ * the assignment as a side-effect.
58
+ *
59
+ * @example
60
+ * const updateOp = ops.assignVariable(g, wHandle, newW, DType.FLOAT32);
61
+ * await sess.run(feeds, [], [updateOp]);
62
+ */
63
+ export function assignVariable(g, handle, value, dtype, name) {
64
+ const opName = name ?? `AssignVariableOp_${handle.opName}`;
65
+ g.addOp("AssignVariableOp", [handle, value], {
66
+ dtype: { kind: "type", value: dtype },
67
+ }, opName);
68
+ return opName;
69
+ }
70
+ /**
71
+ * assignAdd — adds an AssignAddVariableOp: variable += delta.
72
+ * Returns the op name.
73
+ */
74
+ export function assignAdd(g, handle, delta, dtype, name) {
75
+ const opName = name ?? `AssignAddVariableOp_${handle.opName}`;
76
+ g.addOp("AssignAddVariableOp", [handle, delta], {
77
+ dtype: { kind: "type", value: dtype },
78
+ }, opName);
79
+ return opName;
80
+ }
81
+ /**
82
+ * assignSub — adds an AssignSubVariableOp: variable -= delta.
83
+ * Returns the op name.
84
+ */
85
+ export function assignSub(g, handle, delta, dtype, name) {
86
+ const opName = name ?? `AssignSubVariableOp_${handle.opName}`;
87
+ g.addOp("AssignSubVariableOp", [handle, delta], {
88
+ dtype: { kind: "type", value: dtype },
89
+ }, opName);
90
+ return opName;
91
+ }
92
+ // ── Initializers ──────────────────────────────────────────────────────────────
93
+ /**
94
+ * zerosInitializer — creates a constant zero tensor matching `shape` and `dtype`.
95
+ * Use with initializer() to zero-initialise a variable.
96
+ */
97
+ export function zerosInitializer(g, shape, dtype, name) {
98
+ const itemSize = dtype === DType.FLOAT64
99
+ ? 8
100
+ : dtype === DType.INT64 || dtype === DType.UINT64
101
+ ? 8
102
+ : dtype === DType.INT16 || dtype === DType.UINT16
103
+ ? 2
104
+ : dtype === DType.INT8 || dtype === DType.UINT8
105
+ ? 1
106
+ : 4;
107
+ const n = shape.reduce((a, b) => a * b, 1);
108
+ const buf = Buffer.alloc(n * itemSize, 0);
109
+ return constant(g, buf, shape, dtype, name);
110
+ }
111
+ /**
112
+ * onesInitializer — constant one tensor.
113
+ */
114
+ export function onesInitializer(g, shape, dtype, name) {
115
+ const n = shape.reduce((a, b) => a * b, 1);
116
+ const buf = Buffer.allocUnsafe(n * 4);
117
+ for (let i = 0; i < n; i++)
118
+ buf.writeFloatLE(1, i * 4);
119
+ return constant(g, buf, shape, dtype, name);
120
+ }
121
+ /**
122
+ * truncatedNormalInitializer — samples from a truncated normal distribution.
123
+ * Values more than 2 stddevs from the mean are resampled.
124
+ * This is Xavier/Glorot-style initialisation when stddev = sqrt(2/fan_in).
125
+ *
126
+ * NOTE: TF's TruncatedNormal op produces a different value on every run
127
+ * unless a fixed seed is provided. For reproducible initialisation, run
128
+ * the init op once and save the checkpoint.
129
+ */
130
+ export function truncatedNormalInitializer(g, shape, dtype = DType.FLOAT32, options = {}, name) {
131
+ const shapeBuf = Buffer.allocUnsafe(shape.length * 4);
132
+ shape.forEach((d, i) => shapeBuf.writeInt32LE(d, i * 4));
133
+ const shapeConst = constant(g, shapeBuf, [shape.length], DType.INT32);
134
+ const [t] = g.addOp("TruncatedNormal", [shapeConst], {
135
+ dtype: { kind: "type", value: dtype },
136
+ mean: { kind: "float", value: options.mean ?? 0.0 },
137
+ stddev: { kind: "float", value: options.stddev ?? 1.0 },
138
+ seed: { kind: "int", value: options.seed ?? 0 },
139
+ seed2: { kind: "int", value: 0 },
140
+ }, name);
141
+ return t;
142
+ }
143
+ /**
144
+ * glorotUniformInitializer — samples from Uniform(-limit, limit)
145
+ * where limit = sqrt(6 / (fan_in + fan_out)).
146
+ *
147
+ * For a weight matrix [fan_in, fan_out]:
148
+ * fan_in = shape[0]
149
+ * fan_out = shape[1]
150
+ */
151
+ export function glorotUniformInitializer(g, shape, // [fan_in, fan_out]
152
+ dtype = DType.FLOAT32, name) {
153
+ const [fanIn, fanOut] = shape;
154
+ const limit = Math.sqrt(6 / (fanIn + fanOut));
155
+ const n = fanIn * fanOut;
156
+ const buf = Buffer.allocUnsafe(n * 4);
157
+ for (let i = 0; i < n; i++) {
158
+ buf.writeFloatLE(Math.random() * 2 * limit - limit, i * 4);
159
+ }
160
+ return constant(g, buf, shape, dtype, name);
161
+ }
162
+ // ── Variable + initializer convenience ───────────────────────────────────────
163
+ /**
164
+ * variableWithInit — creates a variable handle and its initialisation op
165
+ * in one call.
166
+ *
167
+ * @returns { handle, initOp }
168
+ * handle: the VarHandleOp tensor (use with readVariable / assignVariable)
169
+ * initOp: op name to pass as a target in sess.run() to initialise
170
+ *
171
+ * @example
172
+ * const { handle: w, initOp: wInit } = ops.variableWithInit(
173
+ * g, [784, 128], DType.FLOAT32, "weights",
174
+ * ops.glorotUniformInitializer(g, [784, 128])
175
+ * );
176
+ * const { handle: b, initOp: bInit } = ops.variableWithInit(
177
+ * g, [128], DType.FLOAT32, "bias",
178
+ * ops.zerosInitializer(g, [128], DType.FLOAT32)
179
+ * );
180
+ *
181
+ * // Run init once before first training step:
182
+ * await sess.run([], [], [wInit, bInit]);
183
+ */
184
+ export function variableWithInit(g, shape, dtype, varName, initialValue) {
185
+ const handle = variable(g, shape, dtype, varName);
186
+ const initOp = assignVariable(g, handle, initialValue, dtype, `${varName}/init`);
187
+ return { handle, initOp };
188
+ }
189
+ /**
190
+ * globalVariablesInitializer — groups all init ops into a single NoOp target.
191
+ *
192
+ * @param initOps Array of init op names returned by variableWithInit()
193
+ * @returns Op name to pass as a single target to sess.run()
194
+ *
195
+ * @example
196
+ * const initAll = ops.globalVariablesInitializer(g, [wInit, bInit]);
197
+ * await sess.run([], [], [initAll]);
198
+ */
199
+ export function globalVariablesInitializer(g, initOps, name = "init_all_variables") {
200
+ // NoOp with control dependencies on all init ops.
201
+ // TF runs all control deps before executing the NoOp target.
202
+ const [t] = g.addOp("NoOp", [], {
203
+ // Control dependencies are added via the native addOp control_inputs param.
204
+ // The fourth arg to g.addOp is the op name; control edges are the fifth.
205
+ // For now, document the pattern — the addOp signature needs control_inputs
206
+ // support added to graph.cc to wire this up completely.
207
+ }, name);
208
+ return t.opName;
209
+ }
210
+ // ── Optimizer update ops ──────────────────────────────────────────────────────
211
+ //
212
+ // These are low-level building blocks. High-level optimizers (SGD, Adam)
213
+ // are built on top in a separate optimizers/ module once the gradient
214
+ // infrastructure is in place.
215
+ //
216
+ // Each returns an op name to run as a target (side-effect, no output).
217
+ /**
218
+ * applyGradientDescent — w -= lr * grad.
219
+ * The simplest parameter update step.
220
+ */
221
+ export function applyGradientDescent(g, handle, lr, // scalar learning rate
222
+ grad, dtype, name) {
223
+ const opName = name ?? `ApplyGradientDescent_${handle.opName}`;
224
+ g.addOp("ResourceApplyGradientDescent", [handle, lr, grad], {
225
+ T: { kind: "type", value: dtype },
226
+ use_locking: { kind: "bool", value: false },
227
+ }, opName);
228
+ return opName;
229
+ }
230
+ /**
231
+ * applyAdam — one Adam parameter update step.
232
+ *
233
+ * Requires four variable handles: var, m (first moment), v (second moment),
234
+ * and beta1_power, beta2_power scalars (updated separately).
235
+ */
236
+ export function applyAdam(g, handle, mHandle, vHandle, beta1Power, beta2Power, lr, beta1, beta2, epsilon, grad, dtype, name) {
237
+ const opName = name ?? `ApplyAdam_${handle.opName}`;
238
+ g.addOp("ResourceApplyAdam", [
239
+ handle,
240
+ mHandle,
241
+ vHandle,
242
+ beta1Power,
243
+ beta2Power,
244
+ lr,
245
+ beta1,
246
+ beta2,
247
+ epsilon,
248
+ grad,
249
+ ], {
250
+ T: { kind: "type", value: dtype },
251
+ use_locking: { kind: "bool", value: false },
252
+ use_nesterov: { kind: "bool", value: false },
253
+ }, opName);
254
+ return opName;
255
+ }
256
+ /**
257
+ * applyRMSProp — one RMSProp parameter update step.
258
+ */
259
+ export function applyRMSProp(g, handle, msHandle, momHandle, lr, rho, momentum, epsilon, grad, dtype, name) {
260
+ const opName = name ?? `ApplyRMSProp_${handle.opName}`;
261
+ g.addOp("ResourceApplyRMSProp", [handle, msHandle, momHandle, lr, rho, momentum, epsilon, grad], {
262
+ T: { kind: "type", value: dtype },
263
+ use_locking: { kind: "bool", value: false },
264
+ }, opName);
265
+ return opName;
266
+ }
267
+ //# sourceMappingURL=variable_ops.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"variable_ops.js","sourceRoot":"","sources":["../../../src/ts/ops/variable_ops.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAElD,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAE1C,8EAA8E;AAC9E,uDAAuD;AACvD,EAAE;AACF,kDAAkD;AAClD,EAAE;AACF,0DAA0D;AAC1D,iEAAiE;AACjE,sEAAsE;AACtE,uEAAuE;AACvE,gDAAgD;AAChD,gEAAgE;AAChE,mEAAmE;AACnE,uEAAuE;AACvE,EAAE;AACF,gBAAgB;AAChB,iEAAiE;AACjE,mDAAmD;AACnD,qEAAqE;AACrE,8EAA8E;AAE9E,iFAAiF;AAEjF;;;;;;;;;GASG;AACH,MAAM,UAAU,QAAQ,CACtB,CAAQ,EACR,KAAY,EACZ,KAAY,EACZ,OAAe;IAEf,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CACtB,aAAa,EACb,EAAE,EACF;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QACrC,KAAK,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,CAAC,KAAK,CAAC,EAAE;QACjD,kEAAkE;QAClE,oEAAoE;KACrE,EACD,OAAO,CACR,CAAC;IACF,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAC1B,CAAQ,EACR,MAAc,EACd,KAAY,EACZ,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,gBAAgB,EAChB,CAAC,MAAM,CAAC,EACR;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KACtC,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,cAAc,CAC5B,CAAQ,EACR,MAAc,EACd,KAAa,EACb,KAAY,EACZ,IAAa;IAEb,MAAM,MAAM,GAAG,IAAI,IAAI,oBAAoB,MAAM,CAAC,MAAM,EAAE,CAAC;IAC3D,CAAC,CAAC,KAAK,CACL,kBAAkB,EAClB,CAAC,MAAM,EAAE,KAAK,CAAC,EACf;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KACtC,EACD,MAAM,CACP,CAAC;IACF,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,MAAc,EACd,KAAa,EACb,KAAY,EACZ,IAAa;IAEb,MAAM,MAAM,GAAG,IAAI,IAAI,uBAAuB,MAAM,CAAC,MAAM,EAAE,CAAC;IAC9D,CAAC,CAAC,KAAK,CACL,qBAAqB,EACrB,CAAC,MAAM,EAAE,KAAK,CAAC,EACf;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KACtC,EACD,MAAM,CACP,CAAC;IACF,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,MAAc,EACd,KAAa,EACb,KAAY,EACZ,IAAa;IAEb,MAAM,MAAM,GAAG,IAAI,IAAI,uBAAuB,MAAM,CAAC,MAAM,EAAE,CAAC;IAC9D,CAAC,CAAC,KAAK,CACL,qBAAqB,EACrB,CAAC,MAAM,EAAE,KAAK,CAAC,EACf;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KACtC,EACD,MAAM,CACP,CAAC;IACF,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,iFAAiF;AAEjF;;;GAGG;AACH,MAAM,UAAU,gBAAgB,CAC9B,CAAQ,EACR,KAAe,EACf,KAAY,EACZ,IAAa;IAEb,MAAM,QAAQ,GACZ,KAAK,KAAK,KAAK,CAAC,OAAO;QACrB,CAAC,CAAC,CAAC;QACH,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,IAAI,KAAK,KAAK,KAAK,CAAC,MAAM;YACjD,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,IAAI,KAAK,KAAK,KAAK,CAAC,MAAM;gBACjD,CAAC,CAAC,CAAC;gBACH,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,IAAI,IAAI,KAAK,KAAK,KAAK,CAAC,KAAK;oBAC/C,CAAC,CAAC,CAAC;oBACH,CAAC,CAAC,CAAC,CAAC;IACR,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IAC3C,MAAM,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,QAAQ,EAAE,CAAC,CAAC,CAAC;IAC1C,OAAO,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,eAAe,CAC7B,CAAQ,EACR,KAAe,EACf,KAAY,EACZ,IAAa;IAEb,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IAC3C,MAAM,GAAG,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IACtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;QAAE,GAAG,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;IACvD,OAAO,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,0BAA0B,CACxC,CAAQ,EACR,KAAe,EACf,QAAe,KAAK,CAAC,OAAO,EAC5B,UAA6D,EAAE,EAC/D,IAAa;IAEb,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACtD,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IACzD,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,EAAE,QAAQ,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IAEtE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,iBAAiB,EACjB,CAAC,UAAU,CAAC,EACZ;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QACrC,IAAI,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,IAAI,IAAI,GAAG,EAAE;QACnD,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,MAAM,IAAI,GAAG,EAAE;QACvD,IAAI,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,CAAC,IAAI,IAAI,CAAC,EAAE;QAC/C,KAAK,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE;KACjC,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,wBAAwB,CACtC,CAAQ,EACR,KAAuB,EAAE,oBAAoB;AAC7C,QAAe,KAAK,CAAC,OAAO,EAC5B,IAAa;IAEb,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,GAAG,KAAK,CAAC;IAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC;IAC9C,MAAM,CAAC,GAAG,KAAK,GAAG,MAAM,CAAC;IACzB,MAAM,GAAG,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IACtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC3B,GAAG,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,KAAK,GAAG,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;IAC7D,CAAC;IACD,OAAO,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED,gFAAgF;AAEhF;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,MAAM,UAAU,gBAAgB,CAC9B,CAAQ,EACR,KAAY,EACZ,KAAY,EACZ,OAAe,EACf,YAAoB;IAEpB,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,cAAc,CAC3B,CAAC,EACD,MAAM,EACN,YAAY,EACZ,KAAK,EACL,GAAG,OAAO,OAAO,CAClB,CAAC;IACF,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC;AAC5B,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,0BAA0B,CACxC,CAAQ,EACR,OAAiB,EACjB,IAAI,GAAG,oBAAoB;IAE3B,kDAAkD;IAClD,6DAA6D;IAC7D,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,MAAM,EACN,EAAE,EACF;IACE,4EAA4E;IAC5E,yEAAyE;IACzE,2EAA2E;IAC3E,wDAAwD;KACzD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC,MAAM,CAAC;AAClB,CAAC;AAED,iFAAiF;AACjF,EAAE;AACF,yEAAyE;AACzE,sEAAsE;AACtE,8BAA8B;AAC9B,EAAE;AACF,uEAAuE;AAEvE;;;GAGG;AACH,MAAM,UAAU,oBAAoB,CAClC,CAAQ,EACR,MAAc,EACd,EAAU,EAAE,uBAAuB;AACnC,IAAY,EACZ,KAAY,EACZ,IAAa;IAEb,MAAM,MAAM,GAAG,IAAI,IAAI,wBAAwB,MAAM,CAAC,MAAM,EAAE,CAAC;IAC/D,CAAC,CAAC,KAAK,CACL,8BAA8B,EAC9B,CAAC,MAAM,EAAE,EAAE,EAAE,IAAI,CAAC,EAClB;QACE,CAAC,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QACjC,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KAC5C,EACD,MAAM,CACP,CAAC;IACF,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,MAAc,EACd,OAAe,EACf,OAAe,EACf,UAAkB,EAClB,UAAkB,EAClB,EAAU,EACV,KAAa,EACb,KAAa,EACb,OAAe,EACf,IAAY,EACZ,KAAY,EACZ,IAAa;IAEb,MAAM,MAAM,GAAG,IAAI,IAAI,aAAa,MAAM,CAAC,MAAM,EAAE,CAAC;IACpD,CAAC,CAAC,KAAK,CACL,mBAAmB,EACnB;QACE,MAAM;QACN,OAAO;QACP,OAAO;QACP,UAAU;QACV,UAAU;QACV,EAAE;QACF,KAAK;QACL,KAAK;QACL,OAAO;QACP,IAAI;KACL,EACD;QACE,CAAC,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QACjC,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QAC3C,YAAY,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KAC7C,EACD,MAAM,CACP,CAAC;IACF,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,YAAY,CAC1B,CAAQ,EACR,MAAc,EACd,QAAgB,EAChB,SAAiB,EACjB,EAAU,EACV,GAAW,EACX,QAAgB,EAChB,OAAe,EACf,IAAY,EACZ,KAAY,EACZ,IAAa;IAEb,MAAM,MAAM,GAAG,IAAI,IAAI,gBAAgB,MAAM,CAAC,MAAM,EAAE,CAAC;IACvD,CAAC,CAAC,KAAK,CACL,sBAAsB,EACtB,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,EAAE,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,IAAI,CAAC,EAC/D;QACE,CAAC,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QACjC,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KAC5C,EACD,MAAM,CACP,CAAC;IACF,OAAO,MAAM,CAAC;AAChB,CAAC"}
@@ -0,0 +1,83 @@
1
+ import type { Tensor } from "@isidorus/core";
2
+ import { DType } from "@isidorus/core";
3
+ /**
4
+ * Options for Session construction.
5
+ *
6
+ * Thread counts control how TensorFlow uses CPU cores internally.
7
+ * The right values depend on how many concurrent runAsync() calls you expect:
8
+ *
9
+ * Single inference (Worker thread):
10
+ * intraOpThreads: 0, interOpThreads: 0 — let TF use all cores
11
+ *
12
+ * Concurrent runAsync() on main thread (default):
13
+ * intraOpThreads: 2, interOpThreads: 2 — share cores across requests
14
+ *
15
+ * Formula for N concurrent requests on C cores:
16
+ * intraOpThreads = Math.max(1, Math.floor(C / N))
17
+ * interOpThreads = Math.max(1, Math.floor(C / N))
18
+ */
19
+ export interface SessionOptions {
20
+ /**
21
+ * Per-op parallelism — threads used within a single op (e.g. matmul tiles).
22
+ * Default: 2. Set to 0 to let TF choose automatically.
23
+ */
24
+ intraOpThreads?: number;
25
+ /**
26
+ * Graph-level parallelism — threads used to run independent ops concurrently.
27
+ * Default: 2. Set to 0 to let TF choose automatically.
28
+ */
29
+ interOpThreads?: number;
30
+ }
31
+ /** Raw tensor value passed as a feed. */
32
+ export interface FeedValue {
33
+ dtype: DType;
34
+ shape: number[];
35
+ data: Buffer | Uint8Array;
36
+ }
37
+ /** Result from a fetched tensor output. */
38
+ export interface TensorValue {
39
+ dtype: DType;
40
+ shape: number[];
41
+ data: Buffer;
42
+ }
43
+ /**
44
+ * Session — executes a Graph.
45
+ *
46
+ * A Session holds a TF_Session backed by the graph passed at construction.
47
+ * Once created the graph's structure should not change (new ops may be added
48
+ * for initialisation ops, but existing ops should not be removed).
49
+ *
50
+ * @example
51
+ * const sess = new Session(graph);
52
+ * await sess.run([], [], ["init"]); // run init op (side-effect)
53
+ * const [output] = await sess.runAsync( // fetch result
54
+ * [[x, inputFeed]],
55
+ * [y],
56
+ * );
57
+ */
58
+ export declare class Session {
59
+ /** @internal */
60
+ readonly _native: any;
61
+ constructor(native: any);
62
+ /**
63
+ * Synchronous inference — blocks the event loop during TF_SessionRun.
64
+ * Use on Worker threads. On the main thread prefer runAsync().
65
+ *
66
+ * @param feeds Array of [Tensor, FeedValue] pairs
67
+ * @param fetches Tensors to compute and return
68
+ * @param targets Op names to run for side-effects only (e.g. "init_all")
69
+ */
70
+ run(feeds: [Tensor, FeedValue][], fetches: Tensor[], targets?: string[]): Promise<TensorValue[]>;
71
+ /**
72
+ * Non-blocking inference — TF_SessionRun on the libuv thread pool.
73
+ * The event loop stays free for I/O and timers during compute.
74
+ *
75
+ * @param feeds Array of [Tensor, FeedValue] pairs
76
+ * @param fetches Tensors to compute and return
77
+ * @param targets Op names to run for side-effects only
78
+ */
79
+ runAsync(feeds: [Tensor, FeedValue][], fetches: Tensor[], targets?: string[]): Promise<TensorValue[]>;
80
+ /** Close the session and release all C++ resources. */
81
+ destroy(): void;
82
+ }
83
+ //# sourceMappingURL=session.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"session.d.ts","sourceRoot":"","sources":["../../src/ts/session.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAEvC;;;;;;;;;;;;;;;GAeG;AACH,MAAM,WAAW,cAAc;IAC7B;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED,yCAAyC;AACzC,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,KAAK,CAAC;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,MAAM,GAAG,UAAU,CAAC;CAC3B;AAED,2CAA2C;AAC3C,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,KAAK,CAAC;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,IAAI,EAAE,MAAM,CAAC;CACd;AA+BD;;;;;;;;;;;;;;GAcG;AACH,qBAAa,OAAO;IAClB,gBAAgB;IAChB,QAAQ,CAAC,OAAO,EAAE,GAAG,CAAC;gBAEV,MAAM,EAAE,GAAG;IAIvB;;;;;;;OAOG;IACG,GAAG,CACP,KAAK,EAAE,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAC5B,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,GAAE,MAAM,EAAO,GACrB,OAAO,CAAC,WAAW,EAAE,CAAC;IAWzB;;;;;;;OAOG;IACG,QAAQ,CACZ,KAAK,EAAE,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAC5B,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,GAAE,MAAM,EAAO,GACrB,OAAO,CAAC,WAAW,EAAE,CAAC;IAWzB,uDAAuD;IACvD,OAAO,IAAI,IAAI;CAGhB"}
@@ -0,0 +1,81 @@
1
+ /** Convert a Tensor to the native feed format. */
2
+ function toNativeFeed(t, v) {
3
+ return {
4
+ opName: t.opName,
5
+ index: t.index,
6
+ tensor: {
7
+ dtype: Number(v.dtype),
8
+ shape: v.shape,
9
+ data: Buffer.isBuffer(v.data)
10
+ ? v.data
11
+ : Buffer.from(v.data.buffer, v.data.byteOffset, v.data.byteLength),
12
+ },
13
+ };
14
+ }
15
+ /** Convert a Tensor to the native fetch format. */
16
+ function toNativeFetch(t) {
17
+ return { opName: t.opName, index: t.index };
18
+ }
19
+ /** Parse native output back to TensorValue. */
20
+ function fromNativeOutput(raw) {
21
+ return {
22
+ dtype: raw.dtype,
23
+ shape: raw.shape,
24
+ data: raw.data,
25
+ };
26
+ }
27
+ /**
28
+ * Session — executes a Graph.
29
+ *
30
+ * A Session holds a TF_Session backed by the graph passed at construction.
31
+ * Once created the graph's structure should not change (new ops may be added
32
+ * for initialisation ops, but existing ops should not be removed).
33
+ *
34
+ * @example
35
+ * const sess = new Session(graph);
36
+ * await sess.run([], [], ["init"]); // run init op (side-effect)
37
+ * const [output] = await sess.runAsync( // fetch result
38
+ * [[x, inputFeed]],
39
+ * [y],
40
+ * );
41
+ */
42
+ export class Session {
43
+ /** @internal */
44
+ _native;
45
+ constructor(native) {
46
+ this._native = native;
47
+ }
48
+ /**
49
+ * Synchronous inference — blocks the event loop during TF_SessionRun.
50
+ * Use on Worker threads. On the main thread prefer runAsync().
51
+ *
52
+ * @param feeds Array of [Tensor, FeedValue] pairs
53
+ * @param fetches Tensors to compute and return
54
+ * @param targets Op names to run for side-effects only (e.g. "init_all")
55
+ */
56
+ async run(feeds, fetches, targets = []) {
57
+ const nativeFeeds = feeds.map(([t, v]) => toNativeFeed(t, v));
58
+ const nativeFetches = fetches.map(toNativeFetch);
59
+ const raw = (await this._native.run(nativeFeeds, nativeFetches, targets));
60
+ return raw.map(fromNativeOutput);
61
+ }
62
+ /**
63
+ * Non-blocking inference — TF_SessionRun on the libuv thread pool.
64
+ * The event loop stays free for I/O and timers during compute.
65
+ *
66
+ * @param feeds Array of [Tensor, FeedValue] pairs
67
+ * @param fetches Tensors to compute and return
68
+ * @param targets Op names to run for side-effects only
69
+ */
70
+ async runAsync(feeds, fetches, targets = []) {
71
+ const nativeFeeds = feeds.map(([t, v]) => toNativeFeed(t, v));
72
+ const nativeFetches = fetches.map(toNativeFetch);
73
+ const raw = (await this._native.runAsync(nativeFeeds, nativeFetches, targets));
74
+ return raw.map(fromNativeOutput);
75
+ }
76
+ /** Close the session and release all C++ resources. */
77
+ destroy() {
78
+ this._native.destroy();
79
+ }
80
+ }
81
+ //# sourceMappingURL=session.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"session.js","sourceRoot":"","sources":["../../src/ts/session.ts"],"names":[],"mappings":"AA+CA,kDAAkD;AAClD,SAAS,YAAY,CAAC,CAAS,EAAE,CAAY;IAC3C,OAAO;QACL,MAAM,EAAE,CAAC,CAAC,MAAM;QAChB,KAAK,EAAE,CAAC,CAAC,KAAK;QACd,MAAM,EAAE;YACN,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;YACtB,KAAK,EAAE,CAAC,CAAC,KAAK;YACd,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;gBAC3B,CAAC,CAAC,CAAC,CAAC,IAAI;gBACR,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC;SACrE;KACF,CAAC;AACJ,CAAC;AAED,mDAAmD;AACnD,SAAS,aAAa,CAAC,CAAS;IAC9B,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC;AAC9C,CAAC;AAED,+CAA+C;AAC/C,SAAS,gBAAgB,CAAC,GAAQ;IAChC,OAAO;QACL,KAAK,EAAE,GAAG,CAAC,KAAc;QACzB,KAAK,EAAE,GAAG,CAAC,KAAiB;QAC5B,IAAI,EAAE,GAAG,CAAC,IAAc;KACzB,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,MAAM,OAAO,OAAO;IAClB,gBAAgB;IACP,OAAO,CAAM;IAEtB,YAAY,MAAW;QACrB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;IACxB,CAAC;IAED;;;;;;;OAOG;IACH,KAAK,CAAC,GAAG,CACP,KAA4B,EAC5B,OAAiB,EACjB,UAAoB,EAAE;QAEtB,MAAM,WAAW,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAC9D,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;QACjD,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CACjC,WAAW,EACX,aAAa,EACb,OAAO,CACR,CAAU,CAAC;QACZ,OAAO,GAAG,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;IACnC,CAAC;IAED;;;;;;;OAOG;IACH,KAAK,CAAC,QAAQ,CACZ,KAA4B,EAC5B,OAAiB,EACjB,UAAoB,EAAE;QAEtB,MAAM,WAAW,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAC9D,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;QACjD,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,QAAQ,CACtC,WAAW,EACX,aAAa,EACb,OAAO,CACR,CAAU,CAAC;QACZ,OAAO,GAAG,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;IACnC,CAAC;IAED,uDAAuD;IACvD,OAAO;QACL,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC;IACzB,CAAC;CACF"}
package/package.json ADDED
@@ -0,0 +1,63 @@
1
+ {
2
+ "name": "@isidorus/cpu",
3
+ "version": "0.0.0-alpha.0",
4
+ "description": "Tensorflow CPU graph construction, training, and inference for Node.js",
5
+ "type": "module",
6
+ "main": "./dist/index.js",
7
+ "types": "./dist/index.d.ts",
8
+ "files": [
9
+ "dist",
10
+ "prebuilds",
11
+ "src/native",
12
+ "binding.gyp",
13
+ "package.json",
14
+ "scripts"
15
+ ],
16
+ "exports": {
17
+ ".": {
18
+ "types": "./dist/index.d.ts",
19
+ "import": "./dist/index.js",
20
+ "default": "./dist/index.js"
21
+ }
22
+ },
23
+ "scripts": {
24
+ "install": "node scripts/install.js",
25
+ "test": "node scripts/test.js",
26
+ "test:clean": "node -e \"require('fs').rmSync('./test/test.db', { force: true })\" && npm test",
27
+ "test:install": "node scripts/test-install.js",
28
+ "build:ts": "tsc -b",
29
+ "build:native": "npx --yes --package node-gyp node-gyp rebuild",
30
+ "build": "npm run build:ts && npm run build:native",
31
+ "prebuildify": "prebuildify --napi --strip --target 18.0.0 --target 20.0.0 --target 22.0.0 --target 24.0.0"
32
+ },
33
+ "binary": {
34
+ "napi_versions": [
35
+ 8
36
+ ]
37
+ },
38
+ "repository": {
39
+ "type": "git",
40
+ "url": "git+https://github.com/A-KGeorge/isidorus.git"
41
+ },
42
+ "publishConfig": {
43
+ "access": "public",
44
+ "provenance": true,
45
+ "registry": "https://registry.npmjs.org/"
46
+ },
47
+ "dependencies": {
48
+ "@isidorus/core": "*",
49
+ "jude-map": "*",
50
+ "jude-tf": "*",
51
+ "node-addon-api": "^8.6.0",
52
+ "node-gyp-build": "^4.8.4"
53
+ },
54
+ "devDependencies": {
55
+ "@types/node": "^25.5.0",
56
+ "node-gyp": "^10.0.0",
57
+ "prebuildify": "^6.0.0",
58
+ "tsx": "^4.0.0",
59
+ "typescript": "^5.4.0"
60
+ },
61
+ "author": "Alan Kochukalam George",
62
+ "license": "Apache-2.0"
63
+ }
@@ -0,0 +1,100 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Smart installation script for @isidorus/cpu native addon
5
+ *
6
+ * Strategy:
7
+ * - ARM architectures: Always compile locally (better performance tuning)
8
+ * - x64/ia32: Use prebuilt binaries from node-gyp-build (avoids C++ toolchain requirement)
9
+ *
10
+ * This ensures:
11
+ * 1. Most users (x64) get fast installs without needing build tools
12
+ * 2. ARM users get optimized builds for their specific hardware
13
+ */
14
+
15
+ import { arch, platform } from "os";
16
+ import { execSync } from "child_process";
17
+ import { existsSync } from "fs";
18
+ import { join, dirname } from "path";
19
+ import { fileURLToPath } from "url";
20
+
21
+ const __filename = fileURLToPath(import.meta.url);
22
+ const __dirname = dirname(__filename);
23
+ const projectRoot = join(__dirname, "..");
24
+
25
+ // Detect architecture
26
+ const architecture = arch();
27
+ const isArm = architecture.includes("arm") || architecture.includes("aarch");
28
+
29
+ console.log(
30
+ `\nDetected platform: ${platform()}, architecture: ${architecture}`,
31
+ );
32
+
33
+ if (isArm) {
34
+ console.log(
35
+ "ARM architecture detected - compiling native addon locally for optimal performance...",
36
+ );
37
+
38
+ try {
39
+ // Check if node-gyp is available
40
+ try {
41
+ execSync("npx --yes --package node-gyp node-gyp --version", {
42
+ stdio: "ignore",
43
+ });
44
+ } catch (e) {
45
+ console.log("Installing node-gyp...");
46
+ execSync("npm install -g node-gyp", { stdio: "inherit" });
47
+ }
48
+
49
+ // Compile the native addon
50
+ console.log("Building native addon...");
51
+ execSync("npx --yes --package node-gyp node-gyp rebuild", {
52
+ cwd: projectRoot,
53
+ stdio: "inherit",
54
+ env: { ...process.env },
55
+ });
56
+
57
+ console.log("Native addon compiled successfully!");
58
+ } catch (error) {
59
+ console.error("Failed to compile native addon:");
60
+ console.error(error.message);
61
+ console.error("\nMake sure you have C++ build tools installed:");
62
+ console.error(" - macOS: xcode-select --install");
63
+ console.error(" - Linux: sudo apt-get install build-essential");
64
+ console.error(" - Windows: npm install --global windows-build-tools");
65
+ process.exit(1);
66
+ }
67
+ } else {
68
+ console.log("x64 architecture detected - using prebuilt binary...");
69
+
70
+ // Check if prebuild exists
71
+ const prebuildsDir = join(projectRoot, "prebuilds");
72
+
73
+ if (!existsSync(prebuildsDir)) {
74
+ console.warn(
75
+ "No prebuilds directory found. Falling back to local compilation...",
76
+ );
77
+
78
+ try {
79
+ execSync("npx --yes --package node-gyp node-gyp rebuild", {
80
+ cwd: projectRoot,
81
+ stdio: "inherit",
82
+ });
83
+ console.log("Native addon compiled successfully!");
84
+ } catch (error) {
85
+ console.error("Failed to compile native addon:");
86
+ console.error(error.message);
87
+ console.error(
88
+ "\nThis package requires prebuilt binaries or a C++ toolchain.",
89
+ );
90
+ console.error(
91
+ " Please report this issue at: https://github.com/A-KGeorge/isidorus/issues",
92
+ );
93
+ process.exit(1);
94
+ }
95
+ } else {
96
+ console.log("Using prebuilt binary - no compilation needed!");
97
+ }
98
+ }
99
+
100
+ console.log("Installation complete!\n");