@isidorus/cpu 0.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/README.md +47 -0
  2. package/binding.gyp +103 -0
  3. package/dist/ts/_native.d.ts +13 -0
  4. package/dist/ts/_native.d.ts.map +1 -0
  5. package/dist/ts/_native.js +22 -0
  6. package/dist/ts/_native.js.map +1 -0
  7. package/dist/ts/graph.d.ts +91 -0
  8. package/dist/ts/graph.d.ts.map +1 -0
  9. package/dist/ts/graph.js +95 -0
  10. package/dist/ts/graph.js.map +1 -0
  11. package/dist/ts/index.d.ts +47 -0
  12. package/dist/ts/index.d.ts.map +1 -0
  13. package/dist/ts/index.js +58 -0
  14. package/dist/ts/index.js.map +1 -0
  15. package/dist/ts/inference-pool.d.ts +84 -0
  16. package/dist/ts/inference-pool.d.ts.map +1 -0
  17. package/dist/ts/inference-pool.js +625 -0
  18. package/dist/ts/inference-pool.js.map +1 -0
  19. package/dist/ts/inference_pool.d.ts +99 -0
  20. package/dist/ts/inference_pool.d.ts.map +1 -0
  21. package/dist/ts/inference_pool.js +370 -0
  22. package/dist/ts/inference_pool.js.map +1 -0
  23. package/dist/ts/install-libtensorflow.d.ts +34 -0
  24. package/dist/ts/install-libtensorflow.d.ts.map +1 -0
  25. package/dist/ts/install-libtensorflow.js +254 -0
  26. package/dist/ts/install-libtensorflow.js.map +1 -0
  27. package/dist/ts/ops/array_ops.d.ts +29 -0
  28. package/dist/ts/ops/array_ops.d.ts.map +1 -0
  29. package/dist/ts/ops/array_ops.js +54 -0
  30. package/dist/ts/ops/array_ops.js.map +1 -0
  31. package/dist/ts/ops/index.d.ts +5 -0
  32. package/dist/ts/ops/index.d.ts.map +1 -0
  33. package/dist/ts/ops/index.js +5 -0
  34. package/dist/ts/ops/index.js.map +1 -0
  35. package/dist/ts/ops/math_ops.d.ts +96 -0
  36. package/dist/ts/ops/math_ops.d.ts.map +1 -0
  37. package/dist/ts/ops/math_ops.js +277 -0
  38. package/dist/ts/ops/math_ops.js.map +1 -0
  39. package/dist/ts/ops/nn_ops.d.ts +130 -0
  40. package/dist/ts/ops/nn_ops.d.ts.map +1 -0
  41. package/dist/ts/ops/nn_ops.js +340 -0
  42. package/dist/ts/ops/nn_ops.js.map +1 -0
  43. package/dist/ts/ops/variable_ops.d.ts +128 -0
  44. package/dist/ts/ops/variable_ops.d.ts.map +1 -0
  45. package/dist/ts/ops/variable_ops.js +267 -0
  46. package/dist/ts/ops/variable_ops.js.map +1 -0
  47. package/dist/ts/session.d.ts +83 -0
  48. package/dist/ts/session.d.ts.map +1 -0
  49. package/dist/ts/session.js +81 -0
  50. package/dist/ts/session.js.map +1 -0
  51. package/package.json +63 -0
  52. package/scripts/install.js +100 -0
  53. package/scripts/test-install.js +82 -0
  54. package/scripts/test.js +45 -0
  55. package/src/native/addon.cc +12 -0
  56. package/src/native/graph.cc +442 -0
  57. package/src/native/graph.h +52 -0
  58. package/src/native/platform_tf.h +8 -0
  59. package/src/native/session.cc +716 -0
  60. package/src/native/session.h +92 -0
@@ -0,0 +1,277 @@
1
+ import { DType } from "@isidorus/core";
2
+ import { constant } from "./array_ops.js";
3
+ // ---------------------------------------------------------------------------
4
+ // math_ops — elementwise and reduction ops
5
+ //
6
+ // All ops follow the same pattern:
7
+ // function opName(g: Graph, ...inputs: Tensor[], options?, name?: string): Tensor
8
+ //
9
+ // The graph is always the first argument so ops can be composed without
10
+ // needing a "default graph" global — consistent with the graph-first design.
11
+ // ---------------------------------------------------------------------------
12
+ // ── Binary elementwise ───────────────────────────────────────────────────────
13
+ /**
14
+ * matmul — matrix multiplication.
15
+ * Supports optional transpose of either operand.
16
+ */
17
+ export function matmul(g, a, b, options = {}, name) {
18
+ const [t] = g.addOp("MatMul", [a, b], {
19
+ transpose_a: { kind: "bool", value: options.transposeA ?? false },
20
+ transpose_b: { kind: "bool", value: options.transposeB ?? false },
21
+ }, name);
22
+ return t;
23
+ }
24
+ /**
25
+ * batchMatmul — batched matrix multiplication (BatchMatMulV2).
26
+ * Inputs must have rank >= 2. Leading dims are treated as batch dims.
27
+ */
28
+ export function batchMatmul(g, a, b, options = {}, name) {
29
+ const [t] = g.addOp("BatchMatMulV2", [a, b], {
30
+ adj_x: { kind: "bool", value: options.adjX ?? false },
31
+ adj_y: { kind: "bool", value: options.adjY ?? false },
32
+ }, name);
33
+ return t;
34
+ }
35
+ /** Element-wise addition. Supports broadcasting. */
36
+ export function add(g, a, b, name) {
37
+ const [t] = g.addOp("AddV2", [a, b], {}, name);
38
+ return t;
39
+ }
40
+ /** Element-wise subtraction. Supports broadcasting. */
41
+ export function sub(g, a, b, name) {
42
+ const [t] = g.addOp("Sub", [a, b], {}, name);
43
+ return t;
44
+ }
45
+ /** Element-wise multiplication. Supports broadcasting. */
46
+ export function mul(g, a, b, name) {
47
+ const [t] = g.addOp("Mul", [a, b], {}, name);
48
+ return t;
49
+ }
50
+ /** Element-wise (real) division. Supports broadcasting. */
51
+ export function div(g, a, b, name) {
52
+ const [t] = g.addOp("RealDiv", [a, b], {}, name);
53
+ return t;
54
+ }
55
+ /**
56
+ * biasAdd — adds a 1-D bias tensor to a value tensor.
57
+ * The bias is added to the last dimension of value.
58
+ * Equivalent to add() but semantically clearer for neural network layers.
59
+ */
60
+ export function biasAdd(g, value, bias, name) {
61
+ const [t] = g.addOp("BiasAdd", [value, bias], {
62
+ data_format: { kind: "bool", value: false }, // NHWC default
63
+ }, name);
64
+ return t;
65
+ }
66
+ /** Element-wise maximum. Supports broadcasting. */
67
+ export function maximum(g, a, b, name) {
68
+ const [t] = g.addOp("Maximum", [a, b], {}, name);
69
+ return t;
70
+ }
71
+ /** Element-wise minimum. Supports broadcasting. */
72
+ export function minimum(g, a, b, name) {
73
+ const [t] = g.addOp("Minimum", [a, b], {}, name);
74
+ return t;
75
+ }
76
+ /** Element-wise x^y. Supports broadcasting. */
77
+ export function pow(g, base, exp, name) {
78
+ const [t] = g.addOp("Pow", [base, exp], {}, name);
79
+ return t;
80
+ }
81
+ // ── Unary elementwise ────────────────────────────────────────────────────────
82
+ /** Element-wise negation. */
83
+ export function neg(g, x, name) {
84
+ const [t] = g.addOp("Neg", [x], {}, name);
85
+ return t;
86
+ }
87
+ /** Element-wise absolute value. */
88
+ export function abs(g, x, name) {
89
+ const [t] = g.addOp("Abs", [x], {}, name);
90
+ return t;
91
+ }
92
+ /** Element-wise e^x. */
93
+ export function exp(g, x, name) {
94
+ const [t] = g.addOp("Exp", [x], {}, name);
95
+ return t;
96
+ }
97
+ /** Element-wise natural log. */
98
+ export function log(g, x, name) {
99
+ const [t] = g.addOp("Log", [x], {}, name);
100
+ return t;
101
+ }
102
+ /** Element-wise square root. */
103
+ export function sqrt(g, x, name) {
104
+ const [t] = g.addOp("Sqrt", [x], {}, name);
105
+ return t;
106
+ }
107
+ /** Element-wise square. */
108
+ export function square(g, x, name) {
109
+ const [t] = g.addOp("Square", [x], {}, name);
110
+ return t;
111
+ }
112
+ /** Element-wise reciprocal (1/x). */
113
+ export function reciprocal(g, x, name) {
114
+ const [t] = g.addOp("Reciprocal", [x], {}, name);
115
+ return t;
116
+ }
117
+ /** Element-wise floor. */
118
+ export function floor(g, x, name) {
119
+ const [t] = g.addOp("Floor", [x], {}, name);
120
+ return t;
121
+ }
122
+ /** Element-wise ceiling. */
123
+ export function ceil(g, x, name) {
124
+ const [t] = g.addOp("Ceil", [x], {}, name);
125
+ return t;
126
+ }
127
+ /** Element-wise round (ties to even). */
128
+ export function round(g, x, name) {
129
+ const [t] = g.addOp("Round", [x], {}, name);
130
+ return t;
131
+ }
132
+ /** Element-wise sign (-1, 0, or 1). */
133
+ export function sign(g, x, name) {
134
+ const [t] = g.addOp("Sign", [x], {}, name);
135
+ return t;
136
+ }
137
+ /** Cast tensor to a different dtype. */
138
+ export function cast(g, x, dtype, name) {
139
+ const [t] = g.addOp("Cast", [x], {
140
+ DstT: { kind: "type", value: dtype },
141
+ }, name);
142
+ return t;
143
+ }
144
+ // ── Reductions ────────────────────────────────────────────────────────────────
145
+ /**
146
+ * Build a constant int32 axis tensor for reduction ops.
147
+ * axes: array of axis indices to reduce over.
148
+ */
149
+ function makeAxisConst(g, axes) {
150
+ const buf = Buffer.allocUnsafe(axes.length * 4);
151
+ axes.forEach((a, i) => buf.writeInt32LE(a, i * 4));
152
+ return constant(g, buf, [axes.length], DType.INT32);
153
+ }
154
+ /** Sum over the given axes. keepDims preserves reduced dimensions as size 1. */
155
+ export function sum(g, x, axes, keepDims = false, name) {
156
+ const axisT = makeAxisConst(g, axes);
157
+ const [t] = g.addOp("Sum", [x, axisT], {
158
+ keep_dims: { kind: "bool", value: keepDims },
159
+ }, name);
160
+ return t;
161
+ }
162
+ /** Mean over the given axes. */
163
+ export function mean(g, x, axes, keepDims = false, name) {
164
+ const axisT = makeAxisConst(g, axes);
165
+ const [t] = g.addOp("Mean", [x, axisT], {
166
+ keep_dims: { kind: "bool", value: keepDims },
167
+ }, name);
168
+ return t;
169
+ }
170
+ /** Max over the given axes. */
171
+ export function reduceMax(g, x, axes, keepDims = false, name) {
172
+ const axisT = makeAxisConst(g, axes);
173
+ const [t] = g.addOp("Max", [x, axisT], {
174
+ keep_dims: { kind: "bool", value: keepDims },
175
+ }, name);
176
+ return t;
177
+ }
178
+ /** Min over the given axes. */
179
+ export function reduceMin(g, x, axes, keepDims = false, name) {
180
+ const axisT = makeAxisConst(g, axes);
181
+ const [t] = g.addOp("Min", [x, axisT], {
182
+ keep_dims: { kind: "bool", value: keepDims },
183
+ }, name);
184
+ return t;
185
+ }
186
+ /** Product over the given axes. */
187
+ export function prod(g, x, axes, keepDims = false, name) {
188
+ const axisT = makeAxisConst(g, axes);
189
+ const [t] = g.addOp("Prod", [x, axisT], {
190
+ keep_dims: { kind: "bool", value: keepDims },
191
+ }, name);
192
+ return t;
193
+ }
194
+ /** Variance over the given axes (naive: E[x^2] - E[x]^2). */
195
+ export function variance(g, x, axes, keepDims = false, name) {
196
+ const xMean = mean(g, x, axes, /* keepDims */ true);
197
+ const diff = sub(g, x, xMean);
198
+ const sq = square(g, diff);
199
+ return mean(g, sq, axes, keepDims, name);
200
+ }
201
+ /** Standard deviation over the given axes. */
202
+ export function std(g, x, axes, keepDims = false, name) {
203
+ return sqrt(g, variance(g, x, axes, keepDims), name);
204
+ }
205
+ // ── Comparison ────────────────────────────────────────────────────────────────
206
+ /** Element-wise a == b → bool tensor. */
207
+ export function equal(g, a, b, name) {
208
+ const [t] = g.addOp("Equal", [a, b], {}, name);
209
+ return t;
210
+ }
211
+ /** Element-wise a != b → bool tensor. */
212
+ export function notEqual(g, a, b, name) {
213
+ const [t] = g.addOp("NotEqual", [a, b], {}, name);
214
+ return t;
215
+ }
216
+ /** Element-wise a > b → bool tensor. */
217
+ export function greater(g, a, b, name) {
218
+ const [t] = g.addOp("Greater", [a, b], {}, name);
219
+ return t;
220
+ }
221
+ /** Element-wise a >= b → bool tensor. */
222
+ export function greaterEqual(g, a, b, name) {
223
+ const [t] = g.addOp("GreaterEqual", [a, b], {}, name);
224
+ return t;
225
+ }
226
+ /** Element-wise a < b → bool tensor. */
227
+ export function less(g, a, b, name) {
228
+ const [t] = g.addOp("Less", [a, b], {}, name);
229
+ return t;
230
+ }
231
+ /** Element-wise a <= b → bool tensor. */
232
+ export function lessEqual(g, a, b, name) {
233
+ const [t] = g.addOp("LessEqual", [a, b], {}, name);
234
+ return t;
235
+ }
236
+ // ── Clipping ─────────────────────────────────────────────────────────────────
237
+ /** Clip values to [minVal, maxVal]. */
238
+ export function clipByValue(g, x, minVal, maxVal, name) {
239
+ const minT = constant(g, (() => {
240
+ const b = Buffer.allocUnsafe(4);
241
+ b.writeFloatLE(minVal, 0);
242
+ return b;
243
+ })(), [], DType.FLOAT32);
244
+ const maxT = constant(g, (() => {
245
+ const b = Buffer.allocUnsafe(4);
246
+ b.writeFloatLE(maxVal, 0);
247
+ return b;
248
+ })(), [], DType.FLOAT32);
249
+ const [t] = g.addOp("ClipByValue", [x, minT, maxT], {}, name);
250
+ return t;
251
+ }
252
+ // ── Index ops ────────────────────────────────────────────────────────────────
253
+ /** Index of the maximum value along an axis. */
254
+ export function argMax(g, x, axis, name) {
255
+ const axisT = constant(g, (() => {
256
+ const b = Buffer.allocUnsafe(4);
257
+ b.writeInt32LE(axis, 0);
258
+ return b;
259
+ })(), [], DType.INT32);
260
+ const [t] = g.addOp("ArgMax", [x, axisT], {
261
+ output_type: { kind: "type", value: DType.INT64 },
262
+ }, name);
263
+ return t;
264
+ }
265
+ /** Index of the minimum value along an axis. */
266
+ export function argMin(g, x, axis, name) {
267
+ const axisT = constant(g, (() => {
268
+ const b = Buffer.allocUnsafe(4);
269
+ b.writeInt32LE(axis, 0);
270
+ return b;
271
+ })(), [], DType.INT32);
272
+ const [t] = g.addOp("ArgMin", [x, axisT], {
273
+ output_type: { kind: "type", value: DType.INT64 },
274
+ }, name);
275
+ return t;
276
+ }
277
+ //# sourceMappingURL=math_ops.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"math_ops.js","sourceRoot":"","sources":["../../../src/ts/ops/math_ops.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAEvC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAE1C,8EAA8E;AAC9E,2CAA2C;AAC3C,EAAE;AACF,mCAAmC;AACnC,oFAAoF;AACpF,EAAE;AACF,wEAAwE;AACxE,6EAA6E;AAC7E,8EAA8E;AAE9E,gFAAgF;AAEhF;;;GAGG;AACH,MAAM,UAAU,MAAM,CACpB,CAAQ,EACR,CAAS,EACT,CAAS,EACT,UAA0D,EAAE,EAC5D,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,QAAQ,EACR,CAAC,CAAC,EAAE,CAAC,CAAC,EACN;QACE,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,UAAU,IAAI,KAAK,EAAE;QACjE,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,UAAU,IAAI,KAAK,EAAE;KAClE,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,WAAW,CACzB,CAAQ,EACR,CAAS,EACT,CAAS,EACT,UAA8C,EAAE,EAChD,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,eAAe,EACf,CAAC,CAAC,EAAE,CAAC,CAAC,EACN;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,IAAI,IAAI,KAAK,EAAE;QACrD,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,IAAI,IAAI,KAAK,EAAE;KACtD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,oDAAoD;AACpD,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IAC/D,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC/C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,uDAAuD;AACvD,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IAC/D,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC7C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,0DAA0D;AAC1D,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IAC/D,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC7C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,2DAA2D;AAC3D,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IAC/D,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,OAAO,CACrB,CAAQ,EACR,KAAa,EACb,IAAY,EACZ,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,SAAS,EACT,CAAC,KAAK,EAAE,IAAI,CAAC,EACb;QACE,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,eAAe;KAC7D,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,mDAAmD;AACnD,MAAM,UAAU,OAAO,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IACnE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,mDAAmD;AACnD,MAAM,UAAU,OAAO,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IACnE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,+CAA+C;AAC/C,MAAM,UAAU,GAAG,CACjB,CAAQ,EACR,IAAY,EACZ,GAAW,EACX,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAClD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,gFAAgF;AAEhF,6BAA6B;AAC7B,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACpD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC1C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,mCAAmC;AACnC,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACpD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC1C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,wBAAwB;AACxB,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACpD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC1C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,gCAAgC;AAChC,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACpD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC1C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,gCAAgC;AAChC,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACrD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC3C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,2BAA2B;AAC3B,MAAM,UAAU,MAAM,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACvD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC7C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,qCAAqC;AACrC,MAAM,UAAU,UAAU,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IAC3D,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,0BAA0B;AAC1B,MAAM,UAAU,KAAK,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACtD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC5C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,4BAA4B;AAC5B,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACrD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC3C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,yCAAyC;AACzC,MAAM,UAAU,KAAK,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACtD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC5C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,uCAAuC;AACvC,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACrD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC3C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,wCAAwC;AACxC,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,KAAY,EAAE,IAAa;IACnE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,MAAM,EACN,CAAC,CAAC,CAAC,EACH;QACE,IAAI,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KACrC,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,iFAAiF;AAEjF;;;GAGG;AACH,SAAS,aAAa,CAAC,CAAQ,EAAE,IAAc;IAC7C,MAAM,GAAG,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAChD,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IACnD,OAAO,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC;AAED,gFAAgF;AAChF,MAAM,UAAU,GAAG,CACjB,CAAQ,EACR,CAAS,EACT,IAAc,EACd,QAAQ,GAAG,KAAK,EAChB,IAAa;IAEb,MAAM,KAAK,GAAG,aAAa,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;IACrC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,KAAK,EACL,CAAC,CAAC,EAAE,KAAK,CAAC,EACV;QACE,SAAS,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE;KAC7C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,gCAAgC;AAChC,MAAM,UAAU,IAAI,CAClB,CAAQ,EACR,CAAS,EACT,IAAc,EACd,QAAQ,GAAG,KAAK,EAChB,IAAa;IAEb,MAAM,KAAK,GAAG,aAAa,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;IACrC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,MAAM,EACN,CAAC,CAAC,EAAE,KAAK,CAAC,EACV;QACE,SAAS,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE;KAC7C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,CAAS,EACT,IAAc,EACd,QAAQ,GAAG,KAAK,EAChB,IAAa;IAEb,MAAM,KAAK,GAAG,aAAa,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;IACrC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,KAAK,EACL,CAAC,CAAC,EAAE,KAAK,CAAC,EACV;QACE,SAAS,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE;KAC7C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,CAAS,EACT,IAAc,EACd,QAAQ,GAAG,KAAK,EAChB,IAAa;IAEb,MAAM,KAAK,GAAG,aAAa,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;IACrC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,KAAK,EACL,CAAC,CAAC,EAAE,KAAK,CAAC,EACV;QACE,SAAS,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE;KAC7C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,mCAAmC;AACnC,MAAM,UAAU,IAAI,CAClB,CAAQ,EACR,CAAS,EACT,IAAc,EACd,QAAQ,GAAG,KAAK,EAChB,IAAa;IAEb,MAAM,KAAK,GAAG,aAAa,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;IACrC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,MAAM,EACN,CAAC,CAAC,EAAE,KAAK,CAAC,EACV;QACE,SAAS,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE;KAC7C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,6DAA6D;AAC7D,MAAM,UAAU,QAAQ,CACtB,CAAQ,EACR,CAAS,EACT,IAAc,EACd,QAAQ,GAAG,KAAK,EAChB,IAAa;IAEb,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC;IACpD,MAAM,IAAI,GAAG,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;IAC9B,MAAM,EAAE,GAAG,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;IAC3B,OAAO,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC3C,CAAC;AAED,8CAA8C;AAC9C,MAAM,UAAU,GAAG,CACjB,CAAQ,EACR,CAAS,EACT,IAAc,EACd,QAAQ,GAAG,KAAK,EAChB,IAAa;IAEb,OAAO,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC;AACvD,CAAC;AAED,iFAAiF;AAEjF,yCAAyC;AACzC,MAAM,UAAU,KAAK,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IACjE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC/C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,yCAAyC;AACzC,MAAM,UAAU,QAAQ,CACtB,CAAQ,EACR,CAAS,EACT,CAAS,EACT,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAClD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,wCAAwC;AACxC,MAAM,UAAU,OAAO,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IACnE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,yCAAyC;AACzC,MAAM,UAAU,YAAY,CAC1B,CAAQ,EACR,CAAS,EACT,CAAS,EACT,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACtD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,wCAAwC;AACxC,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,CAAS,EAAE,IAAa;IAChE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC9C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,yCAAyC;AACzC,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,CAAS,EACT,CAAS,EACT,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACnD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,gFAAgF;AAEhF,uCAAuC;AACvC,MAAM,UAAU,WAAW,CACzB,CAAQ,EACR,CAAS,EACT,MAAc,EACd,MAAc,EACd,IAAa;IAEb,MAAM,IAAI,GAAG,QAAQ,CACnB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC1B,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,OAAO,CACd,CAAC;IACF,MAAM,IAAI,GAAG,QAAQ,CACnB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC1B,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,OAAO,CACd,CAAC;IACF,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC9D,OAAO,CAAC,CAAC;AACX,CAAC;AAED,gFAAgF;AAEhF,gDAAgD;AAChD,MAAM,UAAU,MAAM,CACpB,CAAQ,EACR,CAAS,EACT,IAAY,EACZ,IAAa;IAEb,MAAM,KAAK,GAAG,QAAQ,CACpB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;QACxB,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,KAAK,CACZ,CAAC;IACF,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,QAAQ,EACR,CAAC,CAAC,EAAE,KAAK,CAAC,EACV;QACE,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;KAClD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,gDAAgD;AAChD,MAAM,UAAU,MAAM,CACpB,CAAQ,EACR,CAAS,EACT,IAAY,EACZ,IAAa;IAEb,MAAM,KAAK,GAAG,QAAQ,CACpB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;QACxB,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,KAAK,CACZ,CAAC;IACF,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,QAAQ,EACR,CAAC,CAAC,EAAE,KAAK,CAAC,EACV;QACE,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;KAClD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC"}
@@ -0,0 +1,130 @@
1
+ import type { Tensor } from "@isidorus/core";
2
+ import type { Graph } from "../graph.js";
3
+ /** Rectified linear unit: max(0, x). */
4
+ export declare function relu(g: Graph, x: Tensor, name?: string): Tensor;
5
+ /** Leaky ReLU: max(alpha * x, x). Default alpha = 0.2. */
6
+ export declare function leakyRelu(g: Graph, x: Tensor, alpha?: number, name?: string): Tensor;
7
+ /** ReLU6: min(max(0, x), 6). Common in MobileNet. */
8
+ export declare function relu6(g: Graph, x: Tensor, name?: string): Tensor;
9
+ /** Sigmoid: 1 / (1 + e^-x). */
10
+ export declare function sigmoid(g: Graph, x: Tensor, name?: string): Tensor;
11
+ /** Hyperbolic tangent. */
12
+ export declare function tanh(g: Graph, x: Tensor, name?: string): Tensor;
13
+ /**
14
+ * Softmax along the last axis.
15
+ * For other axes use softmaxAxis().
16
+ */
17
+ export declare function softmax(g: Graph, x: Tensor, name?: string): Tensor;
18
+ /** ELU: x if x > 0, else e^x - 1. */
19
+ export declare function elu(g: Graph, x: Tensor, name?: string): Tensor;
20
+ /** SELU: scaled ELU. */
21
+ export declare function selu(g: Graph, x: Tensor, name?: string): Tensor;
22
+ /**
23
+ * Swish: x * sigmoid(x).
24
+ * Built from primitives — no single TF op for this.
25
+ */
26
+ export declare function swish(g: Graph, x: Tensor, name?: string): Tensor;
27
+ /**
28
+ * GELU (approximate): x * 0.5 * (1 + tanh(sqrt(2/pi) * (x + 0.044715 * x^3)))
29
+ * Used in BERT, GPT.
30
+ * Built from primitives.
31
+ */
32
+ export declare function gelu(g: Graph, x: Tensor, name?: string): Tensor;
33
+ /** Log-softmax: log(softmax(x)). More numerically stable for cross-entropy. */
34
+ export declare function logSoftmax(g: Graph, x: Tensor, name?: string): Tensor;
35
+ /**
36
+ * Batch normalisation (inference mode).
37
+ *
38
+ * Uses pre-computed running mean/variance from training.
39
+ * For training-mode BN (with FusedBatchNormV3 + gradient), use batchNormTraining().
40
+ */
41
+ export declare function batchNorm(g: Graph, x: Tensor, scale: Tensor, offset: Tensor, mean_: Tensor, variance_: Tensor, options?: {
42
+ epsilon?: number;
43
+ dataFormat?: "NHWC" | "NCHW";
44
+ }, name?: string): Tensor;
45
+ /**
46
+ * Layer normalisation — normalises across the last `numAxes` dimensions.
47
+ * Built from primitives (TF has no single LayerNorm C op in the public API).
48
+ *
49
+ * @param numAxes Number of trailing axes to normalise over (default 1).
50
+ */
51
+ export declare function layerNorm(g: Graph, x: Tensor, scale: Tensor, offset: Tensor, options?: {
52
+ epsilon?: number;
53
+ axes?: number[];
54
+ }, name?: string): Tensor;
55
+ /**
56
+ * Build a per-step, per-layer dropout seed tensor for StatelessRandomUniform.
57
+ *
58
+ * Seed layout: [global_step_int32, layer_id]
59
+ * - global_step changes every forward pass → different mask each step
60
+ * - layer_id is a compile-time constant → different mask per layer
61
+ *
62
+ * Usage:
63
+ * const stepVar = ops.variable(g, [], DType.INT64, "global_step");
64
+ * // ... in training loop:
65
+ * const seed = ops.makeDropoutSeed(g, stepVar, 0); // layer 0
66
+ * const dropped = ops.dropout(g, x, 0.5, true, seed);
67
+ * // after forward pass, increment: ops.assignAdd(g, stepVar, one, DType.INT64)
68
+ *
69
+ * @param stepHandle VarHandleOp tensor for an int64 global step counter.
70
+ * @param layerId Compile-time unique integer for this dropout layer.
71
+ * Use 0, 1, 2... for each dropout op in the graph.
72
+ */
73
+ export declare function makeDropoutSeed(g: Graph, stepHandle: Tensor, layerId: number): Tensor;
74
+ /**
75
+ * Dropout — applies inverted dropout during training.
76
+ *
77
+ * Inverted dropout keeps expected activation magnitude equal to training time:
78
+ * kept units are scaled up by 1/(1-rate), dropped units become 0.
79
+ * This means inference code runs unchanged with no scaling needed.
80
+ *
81
+ * @param rate Fraction of units to drop, in [0, 1). 0 = no dropout.
82
+ * @param training If false, returns x unchanged (identity). Rate is ignored.
83
+ *
84
+ * ⚠ SEED WARNING:
85
+ * The mask is generated with StatelessRandomUniform using a constant seed
86
+ * [0, 0]. This means the SAME neurons are dropped on every forward pass.
87
+ * For real regularisation, use a counter-based seed (e.g. step variable
88
+ * cast to [step, 0]) — wire this up via variable_ops once seeding infrastructure
89
+ * is in place. Until then this is only useful for testing graph construction.
90
+ */
91
+ export declare function dropout(g: Graph, x: Tensor, rate: number, training: boolean, seed?: Tensor, name?: string): Tensor;
92
+ /**
93
+ * 2D convolution (NHWC).
94
+ *
95
+ * @param x Input tensor [batch, height, width, in_channels]
96
+ * @param filter Filter tensor [filter_height, filter_width, in_channels, out_channels]
97
+ * @param strides [1, stride_h, stride_w, 1]
98
+ * @param padding "SAME" or "VALID"
99
+ */
100
+ export declare function conv2d(g: Graph, x: Tensor, filter: Tensor, strides?: [number, number, number, number], padding?: "SAME" | "VALID", name?: string): Tensor;
101
+ /**
102
+ * Depthwise 2D convolution.
103
+ * Each input channel is convolved with its own filter of depth channel_multiplier.
104
+ */
105
+ export declare function depthwiseConv2d(g: Graph, x: Tensor, filter: Tensor, strides?: [number, number, number, number], padding?: "SAME" | "VALID", name?: string): Tensor;
106
+ /** Max pooling. */
107
+ export declare function maxPool(g: Graph, x: Tensor, ksize?: [number, number, number, number], strides?: [number, number, number, number], padding?: "SAME" | "VALID", name?: string): Tensor;
108
+ /** Average pooling. */
109
+ export declare function avgPool(g: Graph, x: Tensor, ksize?: [number, number, number, number], strides?: [number, number, number, number], padding?: "SAME" | "VALID", name?: string): Tensor;
110
+ /** Global average pooling — reduces spatial dims to [batch, channels]. */
111
+ export declare function globalAvgPool(g: Graph, x: Tensor, name?: string): Tensor;
112
+ /**
113
+ * Sparse softmax cross-entropy.
114
+ * labels: int32/int64 class indices [batch]
115
+ * logits: float32 [batch, num_classes]
116
+ * Returns per-example loss [batch].
117
+ */
118
+ export declare function sparseSoftmaxCrossEntropyWithLogits(g: Graph, labels: Tensor, logits: Tensor, name?: string): Tensor;
119
+ /**
120
+ * Sigmoid cross-entropy with logits (binary classification).
121
+ * labels: float32 {0, 1} [batch]
122
+ * logits: float32 [batch]
123
+ */
124
+ export declare function sigmoidCrossEntropyWithLogits(g: Graph, labels: Tensor, logits: Tensor, name?: string): Tensor;
125
+ /**
126
+ * L2 loss: 0.5 * sum(t^2).
127
+ * Used as a regularisation term.
128
+ */
129
+ export declare function l2Loss(g: Graph, x: Tensor, name?: string): Tensor;
130
+ //# sourceMappingURL=nn_ops.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"nn_ops.d.ts","sourceRoot":"","sources":["../../../src/ts/ops/nn_ops.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AAqBzC,wCAAwC;AACxC,wBAAgB,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAG/D;AAED,0DAA0D;AAC1D,wBAAgB,SAAS,CACvB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,KAAK,SAAM,EACX,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAUR;AAED,qDAAqD;AACrD,wBAAgB,KAAK,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAGhE;AAED,+BAA+B;AAC/B,wBAAgB,OAAO,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAGlE;AAED,0BAA0B;AAC1B,wBAAgB,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAG/D;AAED;;;GAGG;AACH,wBAAgB,OAAO,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAGlE;AAED,qCAAqC;AACrC,wBAAgB,GAAG,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAG9D;AAED,wBAAwB;AACxB,wBAAgB,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAG/D;AAED;;;GAGG;AACH,wBAAgB,KAAK,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAEhE;AAED;;;;GAIG;AACH,wBAAgB,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAgD/D;AAED,+EAA+E;AAC/E,wBAAgB,UAAU,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAGrE;AAID;;;;;GAKG;AACH,wBAAgB,SAAS,CACvB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,SAAS,EAAE,MAAM,EACjB,OAAO,GAAE;IAAE,OAAO,CAAC,EAAE,MAAM,CAAC;IAAC,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;CAAO,EAChE,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAYR;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CACvB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,EACd,OAAO,GAAE;IAAE,OAAO,CAAC,EAAE,MAAM,CAAC;IAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAA;CAAO,EACnD,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAiBR;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,eAAe,CAC7B,CAAC,EAAE,KAAK,EACR,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,MAAM,GACd,MAAM,CAqBR;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,OAAO,CACrB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,OAAO,EACjB,IAAI,CAAC,EAAE,MAAM,EACb,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CA6DR;AAID;;;;;;;GAOG;AACH,wBAAgB,MAAM,CACpB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,MAAM,EAAE,MAAM,EACd,OAAO,GAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAgB,EACxD,OAAO,GAAE,MAAM,GAAG,OAAgB,EAClC,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAYR;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAC7B,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,MAAM,EAAE,MAAM,EACd,OAAO,GAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAgB,EACxD,OAAO,GAAE,MAAM,GAAG,OAAgB,EAClC,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAYR;AAID,mBAAmB;AACnB,wBAAgB,OAAO,CACrB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,KAAK,GAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAgB,EACtD,OAAO,GAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAgB,EACxD,OAAO,GAAE,MAAM,GAAG,OAAiB,EACnC,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAaR;AAED,uBAAuB;AACvB,wBAAgB,OAAO,CACrB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,KAAK,GAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAgB,EACtD,OAAO,GAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAgB,EACxD,OAAO,GAAE,MAAM,GAAG,OAAiB,EACnC,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAaR;AAED,0EAA0E;AAC1E,wBAAgB,aAAa,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAGxE;AAID;;;;;GAKG;AACH,wBAAgB,mCAAmC,CACjD,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,MAAM,EACd,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAQR;AAED;;;;GAIG;AACH,wBAAgB,6BAA6B,CAC3C,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,MAAM,EACd,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAwBR;AAED;;;GAGG;AACH,wBAAgB,MAAM,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAGjE"}