@isidorus/cpu 0.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/README.md +47 -0
  2. package/binding.gyp +103 -0
  3. package/dist/ts/_native.d.ts +13 -0
  4. package/dist/ts/_native.d.ts.map +1 -0
  5. package/dist/ts/_native.js +22 -0
  6. package/dist/ts/_native.js.map +1 -0
  7. package/dist/ts/graph.d.ts +91 -0
  8. package/dist/ts/graph.d.ts.map +1 -0
  9. package/dist/ts/graph.js +95 -0
  10. package/dist/ts/graph.js.map +1 -0
  11. package/dist/ts/index.d.ts +47 -0
  12. package/dist/ts/index.d.ts.map +1 -0
  13. package/dist/ts/index.js +58 -0
  14. package/dist/ts/index.js.map +1 -0
  15. package/dist/ts/inference-pool.d.ts +84 -0
  16. package/dist/ts/inference-pool.d.ts.map +1 -0
  17. package/dist/ts/inference-pool.js +625 -0
  18. package/dist/ts/inference-pool.js.map +1 -0
  19. package/dist/ts/inference_pool.d.ts +99 -0
  20. package/dist/ts/inference_pool.d.ts.map +1 -0
  21. package/dist/ts/inference_pool.js +370 -0
  22. package/dist/ts/inference_pool.js.map +1 -0
  23. package/dist/ts/install-libtensorflow.d.ts +34 -0
  24. package/dist/ts/install-libtensorflow.d.ts.map +1 -0
  25. package/dist/ts/install-libtensorflow.js +254 -0
  26. package/dist/ts/install-libtensorflow.js.map +1 -0
  27. package/dist/ts/ops/array_ops.d.ts +29 -0
  28. package/dist/ts/ops/array_ops.d.ts.map +1 -0
  29. package/dist/ts/ops/array_ops.js +54 -0
  30. package/dist/ts/ops/array_ops.js.map +1 -0
  31. package/dist/ts/ops/index.d.ts +5 -0
  32. package/dist/ts/ops/index.d.ts.map +1 -0
  33. package/dist/ts/ops/index.js +5 -0
  34. package/dist/ts/ops/index.js.map +1 -0
  35. package/dist/ts/ops/math_ops.d.ts +96 -0
  36. package/dist/ts/ops/math_ops.d.ts.map +1 -0
  37. package/dist/ts/ops/math_ops.js +277 -0
  38. package/dist/ts/ops/math_ops.js.map +1 -0
  39. package/dist/ts/ops/nn_ops.d.ts +130 -0
  40. package/dist/ts/ops/nn_ops.d.ts.map +1 -0
  41. package/dist/ts/ops/nn_ops.js +340 -0
  42. package/dist/ts/ops/nn_ops.js.map +1 -0
  43. package/dist/ts/ops/variable_ops.d.ts +128 -0
  44. package/dist/ts/ops/variable_ops.d.ts.map +1 -0
  45. package/dist/ts/ops/variable_ops.js +267 -0
  46. package/dist/ts/ops/variable_ops.js.map +1 -0
  47. package/dist/ts/session.d.ts +83 -0
  48. package/dist/ts/session.d.ts.map +1 -0
  49. package/dist/ts/session.js +81 -0
  50. package/dist/ts/session.js.map +1 -0
  51. package/package.json +63 -0
  52. package/scripts/install.js +100 -0
  53. package/scripts/test-install.js +82 -0
  54. package/scripts/test.js +45 -0
  55. package/src/native/addon.cc +12 -0
  56. package/src/native/graph.cc +442 -0
  57. package/src/native/graph.h +52 -0
  58. package/src/native/platform_tf.h +8 -0
  59. package/src/native/session.cc +716 -0
  60. package/src/native/session.h +92 -0
@@ -0,0 +1,340 @@
1
+ import { DType } from "@isidorus/core";
2
+ import { constant } from "./array_ops.js";
3
+ import { sub, mul, div, add, mean, sqrt, square, } from "./math_ops.js";
4
+ // ---------------------------------------------------------------------------
5
+ // nn_ops — neural network operations
6
+ // ---------------------------------------------------------------------------
7
+ // ── Activations ──────────────────────────────────────────────────────────────
8
+ /** Rectified linear unit: max(0, x). */
9
+ export function relu(g, x, name) {
10
+ const [t] = g.addOp("Relu", [x], {}, name);
11
+ return t;
12
+ }
13
+ /** Leaky ReLU: max(alpha * x, x). Default alpha = 0.2. */
14
+ export function leakyRelu(g, x, alpha = 0.2, name) {
15
+ const [t] = g.addOp("LeakyRelu", [x], {
16
+ alpha: { kind: "float", value: alpha },
17
+ }, name);
18
+ return t;
19
+ }
20
+ /** ReLU6: min(max(0, x), 6). Common in MobileNet. */
21
+ export function relu6(g, x, name) {
22
+ const [t] = g.addOp("Relu6", [x], {}, name);
23
+ return t;
24
+ }
25
+ /** Sigmoid: 1 / (1 + e^-x). */
26
+ export function sigmoid(g, x, name) {
27
+ const [t] = g.addOp("Sigmoid", [x], {}, name);
28
+ return t;
29
+ }
30
+ /** Hyperbolic tangent. */
31
+ export function tanh(g, x, name) {
32
+ const [t] = g.addOp("Tanh", [x], {}, name);
33
+ return t;
34
+ }
35
+ /**
36
+ * Softmax along the last axis.
37
+ * For other axes use softmaxAxis().
38
+ */
39
+ export function softmax(g, x, name) {
40
+ const [t] = g.addOp("Softmax", [x], {}, name);
41
+ return t;
42
+ }
43
+ /** ELU: x if x > 0, else e^x - 1. */
44
+ export function elu(g, x, name) {
45
+ const [t] = g.addOp("Elu", [x], {}, name);
46
+ return t;
47
+ }
48
+ /** SELU: scaled ELU. */
49
+ export function selu(g, x, name) {
50
+ const [t] = g.addOp("Selu", [x], {}, name);
51
+ return t;
52
+ }
53
+ /**
54
+ * Swish: x * sigmoid(x).
55
+ * Built from primitives — no single TF op for this.
56
+ */
57
+ export function swish(g, x, name) {
58
+ return mul(g, x, sigmoid(g, x), name);
59
+ }
60
+ /**
61
+ * GELU (approximate): x * 0.5 * (1 + tanh(sqrt(2/pi) * (x + 0.044715 * x^3)))
62
+ * Used in BERT, GPT.
63
+ * Built from primitives.
64
+ */
65
+ export function gelu(g, x, name) {
66
+ // 0.044715
67
+ const c1 = constant(g, (() => {
68
+ const b = Buffer.allocUnsafe(4);
69
+ b.writeFloatLE(0.044715, 0);
70
+ return b;
71
+ })(), [], DType.FLOAT32);
72
+ // sqrt(2 / pi) ≈ 0.7978845608
73
+ const c2 = constant(g, (() => {
74
+ const b = Buffer.allocUnsafe(4);
75
+ b.writeFloatLE(0.7978845608, 0);
76
+ return b;
77
+ })(), [], DType.FLOAT32);
78
+ const half = constant(g, (() => {
79
+ const b = Buffer.allocUnsafe(4);
80
+ b.writeFloatLE(0.5, 0);
81
+ return b;
82
+ })(), [], DType.FLOAT32);
83
+ const one = constant(g, (() => {
84
+ const b = Buffer.allocUnsafe(4);
85
+ b.writeFloatLE(1.0, 0);
86
+ return b;
87
+ })(), [], DType.FLOAT32);
88
+ const x3 = mul(g, mul(g, x, x), x);
89
+ const inner = mul(g, c2, add(g, x, mul(g, c1, x3)));
90
+ const tanhInner = tanh(g, inner);
91
+ return mul(g, mul(g, x, half), add(g, one, tanhInner), name);
92
+ }
93
+ /** Log-softmax: log(softmax(x)). More numerically stable for cross-entropy. */
94
+ export function logSoftmax(g, x, name) {
95
+ const [t] = g.addOp("LogSoftmax", [x], {}, name);
96
+ return t;
97
+ }
98
+ // ── Normalisation ─────────────────────────────────────────────────────────────
99
+ /**
100
+ * Batch normalisation (inference mode).
101
+ *
102
+ * Uses pre-computed running mean/variance from training.
103
+ * For training-mode BN (with FusedBatchNormV3 + gradient), use batchNormTraining().
104
+ */
105
+ export function batchNorm(g, x, scale, offset, mean_, variance_, options = {}, name) {
106
+ const [y] = g.addOp("FusedBatchNormV3", [x, scale, offset, mean_, variance_], {
107
+ epsilon: { kind: "float", value: options.epsilon ?? 1e-3 },
108
+ data_format: { kind: "bool", value: false }, // NHWC
109
+ is_training: { kind: "bool", value: false },
110
+ }, name);
111
+ return y; // index 0 is the normalised output; other outputs are for training
112
+ }
113
+ /**
114
+ * Layer normalisation — normalises across the last `numAxes` dimensions.
115
+ * Built from primitives (TF has no single LayerNorm C op in the public API).
116
+ *
117
+ * @param numAxes Number of trailing axes to normalise over (default 1).
118
+ */
119
+ export function layerNorm(g, x, scale, offset, options = {}, name) {
120
+ const eps = options.epsilon ?? 1e-5;
121
+ // Default: normalise over last axis only. Caller should specify axes
122
+ // explicitly when normalising over multiple dims (e.g. for transformers).
123
+ const axes = options.axes ?? [-1];
124
+ const mu = mean(g, x, axes, /* keepDims */ true);
125
+ const diff = sub(g, x, mu);
126
+ const var_ = mean(g, square(g, diff), axes, true);
127
+ const epsBuf = Buffer.allocUnsafe(4);
128
+ epsBuf.writeFloatLE(eps, 0);
129
+ const epsT = constant(g, epsBuf, [], DType.FLOAT32);
130
+ const denom = sqrt(g, add(g, var_, epsT));
131
+ const xNorm = div(g, diff, denom);
132
+ return add(g, mul(g, scale, xNorm), offset, name);
133
+ }
134
+ /**
135
+ * Build a per-step, per-layer dropout seed tensor for StatelessRandomUniform.
136
+ *
137
+ * Seed layout: [global_step_int32, layer_id]
138
+ * - global_step changes every forward pass → different mask each step
139
+ * - layer_id is a compile-time constant → different mask per layer
140
+ *
141
+ * Usage:
142
+ * const stepVar = ops.variable(g, [], DType.INT64, "global_step");
143
+ * // ... in training loop:
144
+ * const seed = ops.makeDropoutSeed(g, stepVar, 0); // layer 0
145
+ * const dropped = ops.dropout(g, x, 0.5, true, seed);
146
+ * // after forward pass, increment: ops.assignAdd(g, stepVar, one, DType.INT64)
147
+ *
148
+ * @param stepHandle VarHandleOp tensor for an int64 global step counter.
149
+ * @param layerId Compile-time unique integer for this dropout layer.
150
+ * Use 0, 1, 2... for each dropout op in the graph.
151
+ */
152
+ export function makeDropoutSeed(g, stepHandle, layerId) {
153
+ // Read the current step value
154
+ const [stepInt64] = g.addOp("ReadVariableOp", [stepHandle], {
155
+ dtype: { kind: "type", value: DType.INT64 },
156
+ });
157
+ // Cast int64 step → int32 (StatelessRandomUniform requires int32 seed)
158
+ const [stepInt32] = g.addOp("Cast", [stepInt64], {
159
+ DstT: { kind: "type", value: DType.INT32 },
160
+ });
161
+ // Layer id as a compile-time int32 scalar constant
162
+ const layerBuf = Buffer.allocUnsafe(4);
163
+ layerBuf.writeInt32LE(layerId, 0);
164
+ const layerT = constant(g, layerBuf, [], DType.INT32);
165
+ // Stack [step, layer_id] → shape [2] int32
166
+ const [seed] = g.addOp("Pack", [stepInt32, layerT], {
167
+ axis: { kind: "int", value: 0 },
168
+ });
169
+ return seed;
170
+ }
171
+ /**
172
+ * Dropout — applies inverted dropout during training.
173
+ *
174
+ * Inverted dropout keeps expected activation magnitude equal to training time:
175
+ * kept units are scaled up by 1/(1-rate), dropped units become 0.
176
+ * This means inference code runs unchanged with no scaling needed.
177
+ *
178
+ * @param rate Fraction of units to drop, in [0, 1). 0 = no dropout.
179
+ * @param training If false, returns x unchanged (identity). Rate is ignored.
180
+ *
181
+ * ⚠ SEED WARNING:
182
+ * The mask is generated with StatelessRandomUniform using a constant seed
183
+ * [0, 0]. This means the SAME neurons are dropped on every forward pass.
184
+ * For real regularisation, use a counter-based seed (e.g. step variable
185
+ * cast to [step, 0]) — wire this up via variable_ops once seeding infrastructure
186
+ * is in place. Until then this is only useful for testing graph construction.
187
+ */
188
+ export function dropout(g, x, rate, training, seed, name) {
189
+ // Validate rate unconditionally — even when training=false, an out-of-range
190
+ // rate is a caller bug that should surface immediately, not silently pass.
191
+ if (rate < 0 || rate >= 1) {
192
+ throw new RangeError(`dropout rate must be in [0, 1), got ${rate}`);
193
+ }
194
+ if (!training || rate === 0) {
195
+ // Inference path or no dropout: identity pass-through.
196
+ const [t] = g.addOp("Identity", [x], {}, name);
197
+ return t;
198
+ }
199
+ // ── Inverted dropout ────────────────────────────────────────────────────
200
+ //
201
+ // uniform = StatelessRandomUniform(shape(x), seed=[0,0]) ∈ [0, 1)
202
+ // keepMask = uniform >= rate → bool
203
+ // masked = x * cast(keepMask, float32)
204
+ // output = masked * (1 / (1 - rate)) inverted scale
205
+ // shape of x as a runtime int32 tensor (handles dynamic batch dims)
206
+ const [xShape] = g.addOp("Shape", [x], {
207
+ out_type: { kind: "type", value: DType.INT32 },
208
+ });
209
+ // Use provided seed or fall back to constant [0, 0]
210
+ let seedT;
211
+ if (seed) {
212
+ seedT = seed;
213
+ }
214
+ else {
215
+ const seedBuf = Buffer.allocUnsafe(8);
216
+ seedBuf.writeInt32LE(0, 0);
217
+ seedBuf.writeInt32LE(0, 4);
218
+ seedT = constant(g, seedBuf, [2], DType.INT32);
219
+ }
220
+ // Uniform samples in [0, 1).
221
+ const [uniform] = g.addOp("StatelessRandomUniform", [xShape, seedT], {
222
+ dtype: { kind: "type", value: DType.FLOAT32 },
223
+ });
224
+ // keep mask: uniform >= rate → bool
225
+ const rateBuf = Buffer.allocUnsafe(4);
226
+ rateBuf.writeFloatLE(rate, 0);
227
+ const rateT = constant(g, rateBuf, [], DType.FLOAT32);
228
+ const [keepMaskBool] = g.addOp("GreaterEqual", [uniform, rateT], {});
229
+ // cast bool → float32 (1.0 = keep, 0.0 = drop)
230
+ const [keepMaskFloat] = g.addOp("Cast", [keepMaskBool], {
231
+ DstT: { kind: "type", value: DType.FLOAT32 },
232
+ });
233
+ // apply mask
234
+ const [masked] = g.addOp("Mul", [x, keepMaskFloat], {});
235
+ // scale by 1 / (1 - rate) — inverted dropout keeps E[output] = E[x]
236
+ const scaleBuf = Buffer.allocUnsafe(4);
237
+ scaleBuf.writeFloatLE(1 / (1 - rate), 0);
238
+ const scaleT = constant(g, scaleBuf, [], DType.FLOAT32);
239
+ const [t] = g.addOp("Mul", [masked, scaleT], {}, name);
240
+ return t;
241
+ }
242
+ // ── Convolution ───────────────────────────────────────────────────────────────
243
+ /**
244
+ * 2D convolution (NHWC).
245
+ *
246
+ * @param x Input tensor [batch, height, width, in_channels]
247
+ * @param filter Filter tensor [filter_height, filter_width, in_channels, out_channels]
248
+ * @param strides [1, stride_h, stride_w, 1]
249
+ * @param padding "SAME" or "VALID"
250
+ */
251
+ export function conv2d(g, x, filter, strides = [1, 1, 1, 1], padding = "SAME", name) {
252
+ const [t] = g.addOp("Conv2D", [x, filter], {
253
+ strides: { kind: "list_int", value: strides },
254
+ padding: { kind: "bool", value: padding === "SAME" },
255
+ data_format: { kind: "bool", value: false }, // NHWC
256
+ }, name);
257
+ return t;
258
+ }
259
+ /**
260
+ * Depthwise 2D convolution.
261
+ * Each input channel is convolved with its own filter of depth channel_multiplier.
262
+ */
263
+ export function depthwiseConv2d(g, x, filter, strides = [1, 1, 1, 1], padding = "SAME", name) {
264
+ const [t] = g.addOp("DepthwiseConv2dNative", [x, filter], {
265
+ strides: { kind: "list_int", value: strides },
266
+ padding: { kind: "bool", value: padding === "SAME" },
267
+ data_format: { kind: "bool", value: false }, // NHWC
268
+ }, name);
269
+ return t;
270
+ }
271
+ // ── Pooling ───────────────────────────────────────────────────────────────────
272
+ /** Max pooling. */
273
+ export function maxPool(g, x, ksize = [1, 2, 2, 1], strides = [1, 2, 2, 1], padding = "VALID", name) {
274
+ const [t] = g.addOp("MaxPool", [x], {
275
+ ksize: { kind: "list_int", value: ksize },
276
+ strides: { kind: "list_int", value: strides },
277
+ padding: { kind: "bool", value: padding === "SAME" },
278
+ data_format: { kind: "bool", value: false },
279
+ }, name);
280
+ return t;
281
+ }
282
+ /** Average pooling. */
283
+ export function avgPool(g, x, ksize = [1, 2, 2, 1], strides = [1, 2, 2, 1], padding = "VALID", name) {
284
+ const [t] = g.addOp("AvgPool", [x], {
285
+ ksize: { kind: "list_int", value: ksize },
286
+ strides: { kind: "list_int", value: strides },
287
+ padding: { kind: "bool", value: padding === "SAME" },
288
+ data_format: { kind: "bool", value: false },
289
+ }, name);
290
+ return t;
291
+ }
292
+ /** Global average pooling — reduces spatial dims to [batch, channels]. */
293
+ export function globalAvgPool(g, x, name) {
294
+ // Reduce over H and W (axes 1 and 2 for NHWC).
295
+ return mean(g, x, [1, 2], /* keepDims */ false, name);
296
+ }
297
+ // ── Loss functions ────────────────────────────────────────────────────────────
298
+ /**
299
+ * Sparse softmax cross-entropy.
300
+ * labels: int32/int64 class indices [batch]
301
+ * logits: float32 [batch, num_classes]
302
+ * Returns per-example loss [batch].
303
+ */
304
+ export function sparseSoftmaxCrossEntropyWithLogits(g, labels, logits, name) {
305
+ const [loss] = g.addOp("SparseSoftmaxCrossEntropyWithLogits", [logits, labels], {}, name);
306
+ return loss; // output 0 is per-example loss; output 1 is backprop grad
307
+ }
308
+ /**
309
+ * Sigmoid cross-entropy with logits (binary classification).
310
+ * labels: float32 {0, 1} [batch]
311
+ * logits: float32 [batch]
312
+ */
313
+ export function sigmoidCrossEntropyWithLogits(g, labels, logits, name) {
314
+ // max(logits, 0) - logits * labels + log(1 + exp(-abs(logits)))
315
+ // Built from primitives for numerical stability.
316
+ const zeroBuf = Buffer.allocUnsafe(4);
317
+ zeroBuf.writeFloatLE(0, 0);
318
+ const zero = constant(g, zeroBuf, [], DType.FLOAT32);
319
+ const oneBuf = Buffer.allocUnsafe(4);
320
+ oneBuf.writeFloatLE(1, 0);
321
+ const one = constant(g, oneBuf, [], DType.FLOAT32);
322
+ const [relu_logits] = g.addOp("Relu", [logits], {});
323
+ const [abs_logits] = g.addOp("Abs", [logits], {});
324
+ const [neg_abs] = g.addOp("Neg", [abs_logits], {});
325
+ const [exp_neg] = g.addOp("Exp", [neg_abs], {});
326
+ const [one_plus] = g.addOp("AddV2", [one, exp_neg], {});
327
+ const [log_part] = g.addOp("Log", [one_plus], {});
328
+ const [prod] = g.addOp("Mul", [logits, labels], {});
329
+ const [t] = g.addOp("AddV2", [g.addOp("Sub", [relu_logits, prod], {})[0], log_part], {}, name);
330
+ return t;
331
+ }
332
+ /**
333
+ * L2 loss: 0.5 * sum(t^2).
334
+ * Used as a regularisation term.
335
+ */
336
+ export function l2Loss(g, x, name) {
337
+ const [t] = g.addOp("L2Loss", [x], {}, name);
338
+ return t;
339
+ }
340
+ //# sourceMappingURL=nn_ops.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"nn_ops.js","sourceRoot":"","sources":["../../../src/ts/ops/nn_ops.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAEvC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EACL,GAAG,EACH,GAAG,EACH,GAAG,EACH,GAAG,EAIH,IAAI,EACJ,IAAI,EACJ,MAAM,GACP,MAAM,eAAe,CAAC;AAEvB,8EAA8E;AAC9E,qCAAqC;AACrC,8EAA8E;AAE9E,gFAAgF;AAEhF,wCAAwC;AACxC,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACrD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC3C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,0DAA0D;AAC1D,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,CAAS,EACT,KAAK,GAAG,GAAG,EACX,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,WAAW,EACX,CAAC,CAAC,CAAC,EACH;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE;KACvC,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,qDAAqD;AACrD,MAAM,UAAU,KAAK,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACtD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC5C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,OAAO,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACxD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC9C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,0BAA0B;AAC1B,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACrD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC3C,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,OAAO,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACxD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC9C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,qCAAqC;AACrC,MAAM,UAAU,GAAG,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACpD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC1C,OAAO,CAAC,CAAC;AACX,CAAC;AAED,wBAAwB;AACxB,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACrD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC3C,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,KAAK,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACtD,OAAO,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,IAAI,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACrD,WAAW;IACX,MAAM,EAAE,GAAG,QAAQ,CACjB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;QAC5B,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,OAAO,CACd,CAAC;IACF,8BAA8B;IAC9B,MAAM,EAAE,GAAG,QAAQ,CACjB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;QAChC,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,OAAO,CACd,CAAC;IACF,MAAM,IAAI,GAAG,QAAQ,CACnB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;QACvB,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,OAAO,CACd,CAAC;IACF,MAAM,GAAG,GAAG,QAAQ,CAClB,CAAC,EACD,CAAC,GAAG,EAAE;QACJ,MAAM,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;QACvB,OAAO,CAAC,CAAC;IACX,CAAC,CAAC,EAAE,EACJ,EAAE,EACF,KAAK,CAAC,OAAO,CACd,CAAC;IAEF,MAAM,EAAE,GAAG,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACnC,MAAM,KAAK,GAAG,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;IACpD,MAAM,SAAS,GAAG,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;IACjC,OAAO,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,CAAC;AAC/D,CAAC;AAED,+EAA+E;AAC/E,MAAM,UAAU,UAAU,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IAC3D,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,iFAAiF;AAEjF;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,CAAS,EACT,KAAa,EACb,MAAc,EACd,KAAa,EACb,SAAiB,EACjB,UAA8D,EAAE,EAChE,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,kBAAkB,EAClB,CAAC,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,SAAS,CAAC,EACpC;QACE,OAAO,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,OAAO,IAAI,IAAI,EAAE;QAC1D,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,OAAO;QACpD,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KAC5C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC,CAAC,mEAAmE;AAC/E,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,CAAS,EACT,KAAa,EACb,MAAc,EACd,UAAiD,EAAE,EACnD,IAAa;IAEb,MAAM,GAAG,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC;IACpC,qEAAqE;IACrE,0EAA0E;IAC1E,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;IAElC,MAAM,EAAE,GAAG,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC;IACjD,MAAM,IAAI,GAAG,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAC3B,MAAM,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IACrC,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;IAC5B,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;IAEpD,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAC1C,MAAM,KAAK,GAAG,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;IAClC,OAAO,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;AACpD,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,eAAe,CAC7B,CAAQ,EACR,UAAkB,EAClB,OAAe;IAEf,8BAA8B;IAC9B,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,gBAAgB,EAAE,CAAC,UAAU,CAAC,EAAE;QAC1D,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;KAC5C,CAAC,CAAC;IAEH,uEAAuE;IACvE,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,EAAE;QAC/C,IAAI,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;KAC3C,CAAC,CAAC;IAEH,mDAAmD;IACnD,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IACvC,QAAQ,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IAClC,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IAEtD,2CAA2C;IAC3C,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE;QAClD,IAAI,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE;KAChC,CAAC,CAAC;IACH,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;;;;;;;;;;;GAgBG;AACH,MAAM,UAAU,OAAO,CACrB,CAAQ,EACR,CAAS,EACT,IAAY,EACZ,QAAiB,EACjB,IAAa,EACb,IAAa;IAEb,4EAA4E;IAC5E,2EAA2E;IAC3E,IAAI,IAAI,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,EAAE,CAAC;QAC1B,MAAM,IAAI,UAAU,CAAC,uCAAuC,IAAI,EAAE,CAAC,CAAC;IACtE,CAAC;IAED,IAAI,CAAC,QAAQ,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;QAC5B,uDAAuD;QACvD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;QAC/C,OAAO,CAAC,CAAC;IACX,CAAC;IAED,2EAA2E;IAC3E,EAAE;IACF,sEAAsE;IACtE,sEAAsE;IACtE,2CAA2C;IAC3C,6EAA6E;IAE7E,oEAAoE;IACpE,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE;QACrC,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;KAC/C,CAAC,CAAC;IAEH,oDAAoD;IACpD,IAAI,KAAa,CAAC;IAClB,IAAI,IAAI,EAAE,CAAC;QACT,KAAK,GAAG,IAAI,CAAC;IACf,CAAC;SAAM,CAAC;QACN,MAAM,OAAO,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QACtC,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC3B,KAAK,GAAG,QAAQ,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACjD,CAAC;IAED,6BAA6B;IAC7B,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,wBAAwB,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE;QACnE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE;KAC9C,CAAC,CAAC;IAEH,oCAAoC;IACpC,MAAM,OAAO,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IACtC,OAAO,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;IAC9B,MAAM,KAAK,GAAG,QAAQ,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;IACtD,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,cAAc,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC;IAErE,+CAA+C;IAC/C,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,EAAE;QACtD,IAAI,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE;KAC7C,CAAC,CAAC;IAEH,aAAa;IACb,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,EAAE,aAAa,CAAC,EAAE,EAAE,CAAC,CAAC;IAExD,oEAAoE;IACpE,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IACvC,QAAQ,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;IACzC,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;IACxD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACvD,OAAO,CAAC,CAAC;AACX,CAAC;AAED,iFAAiF;AAEjF;;;;;;;GAOG;AACH,MAAM,UAAU,MAAM,CACpB,CAAQ,EACR,CAAS,EACT,MAAc,EACd,UAA4C,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EACxD,UAA4B,MAAM,EAClC,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,QAAQ,EACR,CAAC,CAAC,EAAE,MAAM,CAAC,EACX;QACE,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE;QAC7C,OAAO,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,MAAM,EAAE;QACpD,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,OAAO;KACrD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAC7B,CAAQ,EACR,CAAS,EACT,MAAc,EACd,UAA4C,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EACxD,UAA4B,MAAM,EAClC,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,uBAAuB,EACvB,CAAC,CAAC,EAAE,MAAM,CAAC,EACX;QACE,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE;QAC7C,OAAO,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,MAAM,EAAE;QACpD,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,OAAO;KACrD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,iFAAiF;AAEjF,mBAAmB;AACnB,MAAM,UAAU,OAAO,CACrB,CAAQ,EACR,CAAS,EACT,QAA0C,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EACtD,UAA4C,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EACxD,UAA4B,OAAO,EACnC,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,SAAS,EACT,CAAC,CAAC,CAAC,EACH;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE;QACzC,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE;QAC7C,OAAO,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,MAAM,EAAE;QACpD,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KAC5C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,uBAAuB;AACvB,MAAM,UAAU,OAAO,CACrB,CAAQ,EACR,CAAS,EACT,QAA0C,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EACtD,UAA4C,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EACxD,UAA4B,OAAO,EACnC,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,SAAS,EACT,CAAC,CAAC,CAAC,EACH;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE;QACzC,OAAO,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,OAAO,EAAE;QAC7C,OAAO,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,MAAM,EAAE;QACpD,WAAW,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;KAC5C,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,0EAA0E;AAC1E,MAAM,UAAU,aAAa,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IAC9D,+CAA+C;IAC/C,OAAO,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;AACxD,CAAC;AAED,iFAAiF;AAEjF;;;;;GAKG;AACH,MAAM,UAAU,mCAAmC,CACjD,CAAQ,EACR,MAAc,EACd,MAAc,EACd,IAAa;IAEb,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,CACpB,qCAAqC,EACrC,CAAC,MAAM,EAAE,MAAM,CAAC,EAChB,EAAE,EACF,IAAI,CACL,CAAC;IACF,OAAO,IAAI,CAAC,CAAC,0DAA0D;AACzE,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,6BAA6B,CAC3C,CAAQ,EACR,MAAc,EACd,MAAc,EACd,IAAa;IAEb,gEAAgE;IAChE,iDAAiD;IACjD,MAAM,OAAO,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IACtC,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAC3B,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;IACrD,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IACrC,MAAM,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAC1B,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,EAAE,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;IAEnD,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC;IACpD,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC;IAClD,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,UAAU,CAAC,EAAE,EAAE,CAAC,CAAC;IACnD,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;IAChD,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;IACxD,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC,CAAC;IAClD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC;IACpD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,OAAO,EACP,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,WAAW,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC,EACtD,EAAE,EACF,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,MAAM,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACvD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC7C,OAAO,CAAC,CAAC;AACX,CAAC"}
@@ -0,0 +1,128 @@
1
+ import type { Tensor, Shape } from "@isidorus/core";
2
+ import { DType } from "@isidorus/core";
3
+ import type { Graph } from "../graph.js";
4
+ /**
5
+ * Variable handle — a VarHandleOp that identifies a resource variable.
6
+ *
7
+ * The handle itself holds no data. Use initializer() to set the initial
8
+ * value, readVariable() to read it, and assignVariable() to update it.
9
+ *
10
+ * @param shape Shape of the variable (null dims = dynamic)
11
+ * @param dtype Element type
12
+ * @param varName Logical name for the variable (used in checkpoints)
13
+ */
14
+ export declare function variable(g: Graph, shape: Shape, dtype: DType, varName: string): Tensor;
15
+ /**
16
+ * readVariable — emits a ReadVariableOp to read the current value.
17
+ * The returned Tensor can be used as input to any op.
18
+ */
19
+ export declare function readVariable(g: Graph, handle: Tensor, dtype: DType, name?: string): Tensor;
20
+ /**
21
+ * assignVariable — adds an AssignVariableOp that writes `value` into
22
+ * the variable identified by `handle`.
23
+ *
24
+ * Returns the op name — pass it as a target in sess.run() to execute
25
+ * the assignment as a side-effect.
26
+ *
27
+ * @example
28
+ * const updateOp = ops.assignVariable(g, wHandle, newW, DType.FLOAT32);
29
+ * await sess.run(feeds, [], [updateOp]);
30
+ */
31
+ export declare function assignVariable(g: Graph, handle: Tensor, value: Tensor, dtype: DType, name?: string): string;
32
+ /**
33
+ * assignAdd — adds an AssignAddVariableOp: variable += delta.
34
+ * Returns the op name.
35
+ */
36
+ export declare function assignAdd(g: Graph, handle: Tensor, delta: Tensor, dtype: DType, name?: string): string;
37
+ /**
38
+ * assignSub — adds an AssignSubVariableOp: variable -= delta.
39
+ * Returns the op name.
40
+ */
41
+ export declare function assignSub(g: Graph, handle: Tensor, delta: Tensor, dtype: DType, name?: string): string;
42
+ /**
43
+ * zerosInitializer — creates a constant zero tensor matching `shape` and `dtype`.
44
+ * Use with initializer() to zero-initialise a variable.
45
+ */
46
+ export declare function zerosInitializer(g: Graph, shape: number[], dtype: DType, name?: string): Tensor;
47
+ /**
48
+ * onesInitializer — constant one tensor.
49
+ */
50
+ export declare function onesInitializer(g: Graph, shape: number[], dtype: DType, name?: string): Tensor;
51
+ /**
52
+ * truncatedNormalInitializer — samples from a truncated normal distribution.
53
+ * Values more than 2 stddevs from the mean are resampled.
54
+ * This is Xavier/Glorot-style initialisation when stddev = sqrt(2/fan_in).
55
+ *
56
+ * NOTE: TF's TruncatedNormal op produces a different value on every run
57
+ * unless a fixed seed is provided. For reproducible initialisation, run
58
+ * the init op once and save the checkpoint.
59
+ */
60
+ export declare function truncatedNormalInitializer(g: Graph, shape: number[], dtype?: DType, options?: {
61
+ mean?: number;
62
+ stddev?: number;
63
+ seed?: number;
64
+ }, name?: string): Tensor;
65
+ /**
66
+ * glorotUniformInitializer — samples from Uniform(-limit, limit)
67
+ * where limit = sqrt(6 / (fan_in + fan_out)).
68
+ *
69
+ * For a weight matrix [fan_in, fan_out]:
70
+ * fan_in = shape[0]
71
+ * fan_out = shape[1]
72
+ */
73
+ export declare function glorotUniformInitializer(g: Graph, shape: [number, number], // [fan_in, fan_out]
74
+ dtype?: DType, name?: string): Tensor;
75
+ /**
76
+ * variableWithInit — creates a variable handle and its initialisation op
77
+ * in one call.
78
+ *
79
+ * @returns { handle, initOp }
80
+ * handle: the VarHandleOp tensor (use with readVariable / assignVariable)
81
+ * initOp: op name to pass as a target in sess.run() to initialise
82
+ *
83
+ * @example
84
+ * const { handle: w, initOp: wInit } = ops.variableWithInit(
85
+ * g, [784, 128], DType.FLOAT32, "weights",
86
+ * ops.glorotUniformInitializer(g, [784, 128])
87
+ * );
88
+ * const { handle: b, initOp: bInit } = ops.variableWithInit(
89
+ * g, [128], DType.FLOAT32, "bias",
90
+ * ops.zerosInitializer(g, [128], DType.FLOAT32)
91
+ * );
92
+ *
93
+ * // Run init once before first training step:
94
+ * await sess.run([], [], [wInit, bInit]);
95
+ */
96
+ export declare function variableWithInit(g: Graph, shape: Shape, dtype: DType, varName: string, initialValue: Tensor): {
97
+ handle: Tensor;
98
+ initOp: string;
99
+ };
100
+ /**
101
+ * globalVariablesInitializer — groups all init ops into a single NoOp target.
102
+ *
103
+ * @param initOps Array of init op names returned by variableWithInit()
104
+ * @returns Op name to pass as a single target to sess.run()
105
+ *
106
+ * @example
107
+ * const initAll = ops.globalVariablesInitializer(g, [wInit, bInit]);
108
+ * await sess.run([], [], [initAll]);
109
+ */
110
+ export declare function globalVariablesInitializer(g: Graph, initOps: string[], name?: string): string;
111
+ /**
112
+ * applyGradientDescent — w -= lr * grad.
113
+ * The simplest parameter update step.
114
+ */
115
+ export declare function applyGradientDescent(g: Graph, handle: Tensor, lr: Tensor, // scalar learning rate
116
+ grad: Tensor, dtype: DType, name?: string): string;
117
+ /**
118
+ * applyAdam — one Adam parameter update step.
119
+ *
120
+ * Requires four variable handles: var, m (first moment), v (second moment),
121
+ * and beta1_power, beta2_power scalars (updated separately).
122
+ */
123
+ export declare function applyAdam(g: Graph, handle: Tensor, mHandle: Tensor, vHandle: Tensor, beta1Power: Tensor, beta2Power: Tensor, lr: Tensor, beta1: Tensor, beta2: Tensor, epsilon: Tensor, grad: Tensor, dtype: DType, name?: string): string;
124
+ /**
125
+ * applyRMSProp — one RMSProp parameter update step.
126
+ */
127
+ export declare function applyRMSProp(g: Graph, handle: Tensor, msHandle: Tensor, momHandle: Tensor, lr: Tensor, rho: Tensor, momentum: Tensor, epsilon: Tensor, grad: Tensor, dtype: DType, name?: string): string;
128
+ //# sourceMappingURL=variable_ops.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"variable_ops.d.ts","sourceRoot":"","sources":["../../../src/ts/ops/variable_ops.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AACpD,OAAO,EAAE,KAAK,EAAa,MAAM,gBAAgB,CAAC;AAClD,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AAyBzC;;;;;;;;;GASG;AACH,wBAAgB,QAAQ,CACtB,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,KAAK,EACZ,OAAO,EAAE,MAAM,GACd,MAAM,CAaR;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAUR;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,cAAc,CAC5B,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAWR;AAED;;;GAGG;AACH,wBAAgB,SAAS,CACvB,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAWR;AAED;;;GAGG;AACH,wBAAgB,SAAS,CACvB,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAWR;AAID;;;GAGG;AACH,wBAAgB,gBAAgB,CAC9B,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,MAAM,EAAE,EACf,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAcR;AAED;;GAEG;AACH,wBAAgB,eAAe,CAC7B,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,MAAM,EAAE,EACf,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAKR;AAED;;;;;;;;GAQG;AACH,wBAAgB,0BAA0B,CACxC,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,MAAM,EAAE,EACf,KAAK,GAAE,KAAqB,EAC5B,OAAO,GAAE;IAAE,IAAI,CAAC,EAAE,MAAM,CAAC;IAAC,MAAM,CAAC,EAAE,MAAM,CAAC;IAAC,IAAI,CAAC,EAAE,MAAM,CAAA;CAAO,EAC/D,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAkBR;AAED;;;;;;;GAOG;AACH,wBAAgB,wBAAwB,CACtC,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,oBAAoB;AAC7C,KAAK,GAAE,KAAqB,EAC5B,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CASR;AAID;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAgB,gBAAgB,CAC9B,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,KAAK,EACZ,OAAO,EAAE,MAAM,EACf,YAAY,EAAE,MAAM,GACnB;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAUpC;AAED;;;;;;;;;GASG;AACH,wBAAgB,0BAA0B,CACxC,CAAC,EAAE,KAAK,EACR,OAAO,EAAE,MAAM,EAAE,EACjB,IAAI,SAAuB,GAC1B,MAAM,CAeR;AAUD;;;GAGG;AACH,wBAAgB,oBAAoB,CAClC,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,EAAE,EAAE,MAAM,EAAE,uBAAuB;AACnC,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAYR;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CACvB,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,MAAM,EACf,UAAU,EAAE,MAAM,EAClB,UAAU,EAAE,MAAM,EAClB,EAAE,EAAE,MAAM,EACV,KAAK,EAAE,MAAM,EACb,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EACf,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAwBR;AAED;;GAEG;AACH,wBAAgB,YAAY,CAC1B,CAAC,EAAE,KAAK,EACR,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,EAAE,EAAE,MAAM,EACV,GAAG,EAAE,MAAM,EACX,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAYR"}