tensorgrad 0.0.14 → 0.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +154 -170
- package/dist/index.js +2208 -39
- package/dist/index.js.map +7 -1
- package/dist/worker.debug.js +553 -0
- package/package.json +60 -58
- package/src/adam.ts +69 -15
- package/src/compile.ts +334 -154
- package/src/index.ts +8 -4
- package/src/module.ts +72 -34
- package/src/runtime.ts +64 -11
- package/src/worker-protocol.ts +183 -0
- package/src/worker-proxy.ts +76 -0
- package/src/worker.ts +281 -0
- package/dist/adam.js +0 -111
- package/dist/adam.js.map +0 -1
- package/dist/buffers.js +0 -120
- package/dist/buffers.js.map +0 -1
- package/dist/capture.js +0 -33
- package/dist/capture.js.map +0 -1
- package/dist/codegen.js +0 -724
- package/dist/codegen.js.map +0 -1
- package/dist/compile.js +0 -180
- package/dist/compile.js.map +0 -1
- package/dist/grad.js +0 -380
- package/dist/grad.js.map +0 -1
- package/dist/ir.js +0 -60
- package/dist/ir.js.map +0 -1
- package/dist/module.js +0 -155
- package/dist/module.js.map +0 -1
- package/dist/nn.js +0 -135
- package/dist/nn.js.map +0 -1
- package/dist/ops.js +0 -326
- package/dist/ops.js.map +0 -1
- package/dist/runtime.js +0 -375
- package/dist/runtime.js.map +0 -1
- package/dist/shape.js +0 -259
- package/dist/shape.js.map +0 -1
- package/dist/trace.js +0 -100
- package/dist/trace.js.map +0 -1
package/dist/ops.js
DELETED
|
@@ -1,326 +0,0 @@
|
|
|
1
|
-
// User-facing op surface.
|
|
2
|
-
//
|
|
3
|
-
// Each function here is a thin wrapper:
|
|
4
|
-
// 1. capture the call site (for error attribution)
|
|
5
|
-
// 2. validate input shapes via src/shape.ts (which throws on mismatch)
|
|
6
|
-
// 3. compute the output shape and dtype
|
|
7
|
-
// 4. append the op to the current Graph (held in module state by src/trace.ts)
|
|
8
|
-
// 5. return the produced Tensor handle
|
|
9
|
-
//
|
|
10
|
-
// No actual numeric work happens here. These calls just build the IR.
|
|
11
|
-
import { addOp, captureSite } from './ir.js';
|
|
12
|
-
import { currentGraph } from './trace.js';
|
|
13
|
-
import { inferElementwiseBinop, inferUnary, inferMeanLast, inferSumLast, inferReshape, inferTranspose, inferMatmul, inferMatmulBatched, inferOneHot, inferWhereCausal, inferSliceLastRange, inferBroadcastTo, inferSumToShape, inferReluGrad, inferWhere, ShapeError, showShape, } from './shape.js';
|
|
14
|
-
// ----------------------------------------------------------------------------
|
|
15
|
-
// Element-wise binops (add/sub/mul/div). Trailing-suffix broadcast.
|
|
16
|
-
// ----------------------------------------------------------------------------
|
|
17
|
-
/**
|
|
18
|
-
* Build an element-wise binop op (forward declaration only — appends to the
|
|
19
|
-
* graph). Used by both arithmetic ops (add/sub/mul/div, output dtype = input
|
|
20
|
-
* dtype) and comparisons (less/greater, output dtype = bool).
|
|
21
|
-
*/
|
|
22
|
-
function binopOp(name, kind, a, b, outDtype = a.dtype) {
|
|
23
|
-
const site = captureSite(name);
|
|
24
|
-
if (a.dtype !== b.dtype)
|
|
25
|
-
throw new ShapeError(`${name}: dtype mismatch (${a.dtype} vs ${b.dtype})`, site);
|
|
26
|
-
const outShape = inferElementwiseBinop(name, a.shape, b.shape, site);
|
|
27
|
-
return addOp(currentGraph(), kind, outShape, outDtype, site, { a: a.id, b: b.id });
|
|
28
|
-
}
|
|
29
|
-
// Element-wise binops. Second arg can be a Tensor or a JS number; the latter
|
|
30
|
-
// dispatches to scalar-fused IR ops internally. `mul(x, 2)` and `mul(x, y)`
|
|
31
|
-
// both work — matches every NumPy-shaped library.
|
|
32
|
-
export function add(a, b) {
|
|
33
|
-
return typeof b === 'number' ? addScalar(a, b) : binopOp('add', 'add', a, b);
|
|
34
|
-
}
|
|
35
|
-
export function sub(a, b) {
|
|
36
|
-
return typeof b === 'number' ? addScalar(a, -b) : binopOp('sub', 'sub', a, b);
|
|
37
|
-
}
|
|
38
|
-
export function mul(a, b) {
|
|
39
|
-
return typeof b === 'number' ? mulScalar(a, b) : binopOp('mul', 'mul', a, b);
|
|
40
|
-
}
|
|
41
|
-
export function div(a, b) {
|
|
42
|
-
if (typeof b === 'number') {
|
|
43
|
-
if (b === 0)
|
|
44
|
-
throw new ShapeError(`div: scalar divisor cannot be zero`, captureSite('div'));
|
|
45
|
-
return mulScalar(a, 1 / b);
|
|
46
|
-
}
|
|
47
|
-
return binopOp('div', 'div', a, b);
|
|
48
|
-
}
|
|
49
|
-
// ----------------------------------------------------------------------------
|
|
50
|
-
// Element-wise scalar binops (mul/add by JS number). Used for things like
|
|
51
|
-
// `scores * (1/sqrt(d))` and `logits + 1e-5` where allocating a 0-d tensor
|
|
52
|
-
// for the scalar is wasteful.
|
|
53
|
-
// ----------------------------------------------------------------------------
|
|
54
|
-
export function mulScalar(a, scalar) {
|
|
55
|
-
const site = captureSite('mulScalar');
|
|
56
|
-
return addOp(currentGraph(), 'mul_scalar', a.shape, a.dtype, site, { a: a.id, scalar });
|
|
57
|
-
}
|
|
58
|
-
export function addScalar(a, scalar) {
|
|
59
|
-
const site = captureSite('addScalar');
|
|
60
|
-
return addOp(currentGraph(), 'add_scalar', a.shape, a.dtype, site, { a: a.id, scalar });
|
|
61
|
-
}
|
|
62
|
-
// ----------------------------------------------------------------------------
|
|
63
|
-
// Unary ops.
|
|
64
|
-
// ----------------------------------------------------------------------------
|
|
65
|
-
function unary(name, a) {
|
|
66
|
-
const site = captureSite(name);
|
|
67
|
-
if (a.dtype !== 'f32')
|
|
68
|
-
throw new ShapeError(`${name}: requires f32, got ${a.dtype}`, site);
|
|
69
|
-
return addOp(currentGraph(), name, inferUnary(name, a.shape, site), 'f32', site, { a: a.id });
|
|
70
|
-
}
|
|
71
|
-
export const sqrt = (a) => unary('sqrt', a);
|
|
72
|
-
export const rsqrt = (a) => unary('rsqrt', a);
|
|
73
|
-
export const log = (a) => unary('log', a);
|
|
74
|
-
export const exp = (a) => unary('exp', a);
|
|
75
|
-
export const relu = (a) => unary('relu', a);
|
|
76
|
-
// ----------------------------------------------------------------------------
|
|
77
|
-
// Reductions over the last axis. To reduce along other axes, transpose first.
|
|
78
|
-
// (This is intentional — keeps codegen and autograd small.)
|
|
79
|
-
// ----------------------------------------------------------------------------
|
|
80
|
-
export function meanLast(a) {
|
|
81
|
-
const site = captureSite('meanLast');
|
|
82
|
-
if (a.dtype !== 'f32')
|
|
83
|
-
throw new ShapeError(`meanLast: requires f32, got ${a.dtype}`, site);
|
|
84
|
-
const outShape = inferMeanLast('meanLast', a.shape, site);
|
|
85
|
-
return addOp(currentGraph(), 'mean_last', outShape, a.dtype, site, { a: a.id });
|
|
86
|
-
}
|
|
87
|
-
export function sumLast(a) {
|
|
88
|
-
const site = captureSite('sumLast');
|
|
89
|
-
if (a.dtype !== 'f32')
|
|
90
|
-
throw new ShapeError(`sumLast: requires f32, got ${a.dtype}`, site);
|
|
91
|
-
const outShape = inferSumLast('sumLast', a.shape, site);
|
|
92
|
-
return addOp(currentGraph(), 'sum_last', outShape, a.dtype, site, { a: a.id });
|
|
93
|
-
}
|
|
94
|
-
/** Reduce all elements to a 0-d scalar. Composes `reshape` + `sumLast`. */
|
|
95
|
-
export function sumAll(a) {
|
|
96
|
-
return sumLast(reshape(a, [-1]));
|
|
97
|
-
}
|
|
98
|
-
// ----------------------------------------------------------------------------
|
|
99
|
-
// Shape ops.
|
|
100
|
-
// ----------------------------------------------------------------------------
|
|
101
|
-
export function reshape(a, newShape) {
|
|
102
|
-
const site = captureSite('reshape');
|
|
103
|
-
const outShape = inferReshape('reshape', a.shape, newShape, site);
|
|
104
|
-
return addOp(currentGraph(), 'reshape', outShape, a.dtype, site, { a: a.id, newShape: outShape });
|
|
105
|
-
}
|
|
106
|
-
export function transpose(a, perm) {
|
|
107
|
-
const site = captureSite('transpose');
|
|
108
|
-
const outShape = inferTranspose('transpose', a.shape, perm, site);
|
|
109
|
-
return addOp(currentGraph(), 'transpose', outShape, a.dtype, site, { a: a.id, perm });
|
|
110
|
-
}
|
|
111
|
-
/** Swap two axes of a tensor. Negative indices count from the end (so
|
|
112
|
-
* `swapAxes(x, -1, -2)` swaps the last two — the common attention pattern).
|
|
113
|
-
* All other axes keep their position. Implemented as `transpose` with the
|
|
114
|
-
* permutation `[0, 1, ..., axis2, ..., axis1, ..., n-1]`. */
|
|
115
|
-
export function swapAxes(a, axis1, axis2) {
|
|
116
|
-
const r = a.shape.length;
|
|
117
|
-
const norm = (axis) => axis < 0 ? r + axis : axis;
|
|
118
|
-
const i1 = norm(axis1);
|
|
119
|
-
const i2 = norm(axis2);
|
|
120
|
-
const site = captureSite('swapAxes');
|
|
121
|
-
if (i1 < 0 || i1 >= r || i2 < 0 || i2 >= r) {
|
|
122
|
-
throw new ShapeError(`swapAxes: axis out of range — got (${axis1}, ${axis2}) for rank-${r} tensor`, site);
|
|
123
|
-
}
|
|
124
|
-
if (i1 === i2)
|
|
125
|
-
return a;
|
|
126
|
-
const perm = Array.from({ length: r }, (_, k) => k);
|
|
127
|
-
perm[i1] = i2;
|
|
128
|
-
perm[i2] = i1;
|
|
129
|
-
return transpose(a, perm);
|
|
130
|
-
}
|
|
131
|
-
// ----------------------------------------------------------------------------
|
|
132
|
-
// Linear algebra.
|
|
133
|
-
// ----------------------------------------------------------------------------
|
|
134
|
-
export function matmul(a, b) {
|
|
135
|
-
const site = captureSite('matmul');
|
|
136
|
-
if (a.dtype !== 'f32' || b.dtype !== 'f32') {
|
|
137
|
-
throw new ShapeError(`matmul: requires f32, got ${a.dtype} and ${b.dtype}`, site);
|
|
138
|
-
}
|
|
139
|
-
const outShape = inferMatmul('matmul', a.shape, b.shape, site);
|
|
140
|
-
return addOp(currentGraph(), 'matmul', outShape, 'f32', site, { a: a.id, b: b.id });
|
|
141
|
-
}
|
|
142
|
-
export function matmulBatched(a, b) {
|
|
143
|
-
const site = captureSite('matmulBatched');
|
|
144
|
-
if (a.dtype !== 'f32' || b.dtype !== 'f32') {
|
|
145
|
-
throw new ShapeError(`matmulBatched: requires f32, got ${a.dtype} and ${b.dtype}`, site);
|
|
146
|
-
}
|
|
147
|
-
const outShape = inferMatmulBatched('matmulBatched', a.shape, b.shape, site);
|
|
148
|
-
return addOp(currentGraph(), 'matmul_batched', outShape, 'f32', site, { a: a.id, b: b.id });
|
|
149
|
-
}
|
|
150
|
-
// ----------------------------------------------------------------------------
|
|
151
|
-
// Indexing / casting.
|
|
152
|
-
// ----------------------------------------------------------------------------
|
|
153
|
-
export function oneHot(indices, depth, dtype = 'f32') {
|
|
154
|
-
const site = captureSite('oneHot');
|
|
155
|
-
if (indices.dtype !== 'i32') {
|
|
156
|
-
throw new ShapeError(`oneHot: indices must be i32, got ${indices.dtype}`, site);
|
|
157
|
-
}
|
|
158
|
-
const outShape = inferOneHot('oneHot', indices.shape, depth, site);
|
|
159
|
-
return addOp(currentGraph(), 'one_hot', outShape, dtype, site, { indices: indices.id, depth, dtype });
|
|
160
|
-
}
|
|
161
|
-
/** Embedding lookup: pull rows from `table` indexed by `indices`. Decomposes
|
|
162
|
-
* to `oneHot(indices, vocab) @ table` so autograd works without a dedicated
|
|
163
|
-
* scatter-with-atomic-add backward — the matmul transpose rule handles it.
|
|
164
|
-
* `table` is `[vocab, dim]`; `indices` is any shape `[...]` of i32; result
|
|
165
|
-
* is `[..., dim]`. The vocab size is taken from `table.shape[0]`. */
|
|
166
|
-
export function embedding(table, indices) {
|
|
167
|
-
const site = captureSite('embedding');
|
|
168
|
-
if (table.shape.length !== 2) {
|
|
169
|
-
throw new ShapeError(`embedding: table must be 2-d [vocab, dim], got ${showShape(table.shape)}`, site);
|
|
170
|
-
}
|
|
171
|
-
if (indices.dtype !== 'i32') {
|
|
172
|
-
throw new ShapeError(`embedding: indices must be i32, got ${indices.dtype}`, site);
|
|
173
|
-
}
|
|
174
|
-
return matmul(oneHot(indices, table.shape[0], 'f32'), table);
|
|
175
|
-
}
|
|
176
|
-
// arange(n) → [n] of values [0, 1, ..., n-1]. Used for position embeddings.
|
|
177
|
-
export function arange(n, dtype = 'i32') {
|
|
178
|
-
const site = captureSite('arange');
|
|
179
|
-
if (n <= 0 || !Number.isInteger(n)) {
|
|
180
|
-
throw new ShapeError(`arange: n must be a positive integer, got ${n}`, site);
|
|
181
|
-
}
|
|
182
|
-
return addOp(currentGraph(), 'arange', [n], dtype, site, { n, dtype });
|
|
183
|
-
}
|
|
184
|
-
// ----------------------------------------------------------------------------
|
|
185
|
-
// ML primitives. Fused so autograd's transpose rule is straightforward and the
|
|
186
|
-
// kernels can be hand-tuned for our specific shapes.
|
|
187
|
-
// ----------------------------------------------------------------------------
|
|
188
|
-
// Causal-masked softmax along the last axis. Shape preserved. Last two axes
|
|
189
|
-
// must be square (TxT attention scores).
|
|
190
|
-
export function softmaxCausalLast(a) {
|
|
191
|
-
const site = captureSite('softmaxCausalLast');
|
|
192
|
-
if (a.dtype !== 'f32')
|
|
193
|
-
throw new ShapeError(`softmaxCausalLast: requires f32, got ${a.dtype}`, site);
|
|
194
|
-
inferWhereCausal('softmaxCausalLast', a.shape, site); // shape check (square last 2 axes)
|
|
195
|
-
return addOp(currentGraph(), 'softmax_causal_last', a.shape, 'f32', site, { a: a.id });
|
|
196
|
-
}
|
|
197
|
-
// Numerically-stable log-softmax along the last axis. Shape preserved.
|
|
198
|
-
export function logSoftmaxLast(a) {
|
|
199
|
-
const site = captureSite('logSoftmaxLast');
|
|
200
|
-
if (a.dtype !== 'f32')
|
|
201
|
-
throw new ShapeError(`logSoftmaxLast: requires f32, got ${a.dtype}`, site);
|
|
202
|
-
return addOp(currentGraph(), 'log_softmax_last', a.shape, 'f32', site, { a: a.id });
|
|
203
|
-
}
|
|
204
|
-
// Pre-softmax causal mask. Sets cells where (i < j) on the last two axes to
|
|
205
|
-
// `fillValue` (typically -1e30). Lower-triangle entries pass through.
|
|
206
|
-
// Use this when you want the masked scores explicitly (e.g. for capture);
|
|
207
|
-
// for the common case, prefer softmaxCausalLast which fuses both.
|
|
208
|
-
export function whereCausal(a, fillValue) {
|
|
209
|
-
const site = captureSite('whereCausal');
|
|
210
|
-
if (a.dtype !== 'f32')
|
|
211
|
-
throw new ShapeError(`whereCausal: requires f32, got ${a.dtype}`, site);
|
|
212
|
-
inferWhereCausal('whereCausal', a.shape, site);
|
|
213
|
-
return addOp(currentGraph(), 'where_causal', a.shape, 'f32', site, { a: a.id, fillValue });
|
|
214
|
-
}
|
|
215
|
-
// ----------------------------------------------------------------------------
|
|
216
|
-
// Slicing.
|
|
217
|
-
// ----------------------------------------------------------------------------
|
|
218
|
-
// sliceLastRange(a, start, end): slice [start, end) along the last axis.
|
|
219
|
-
// Used for splitting Q/K/V from a fused QKV matmul.
|
|
220
|
-
export function sliceLastRange(a, start, end) {
|
|
221
|
-
const site = captureSite('sliceLastRange');
|
|
222
|
-
const outShape = inferSliceLastRange('sliceLastRange', a.shape, start, end, site);
|
|
223
|
-
return addOp(currentGraph(), 'slice_last_range', outShape, a.dtype, site, { a: a.id, start, end });
|
|
224
|
-
}
|
|
225
|
-
// ----------------------------------------------------------------------------
|
|
226
|
-
// Broadcast / un-broadcast. Mostly used by autograd, but exposed in case user
|
|
227
|
-
// code needs them (e.g. explicit broadcasting for clarity).
|
|
228
|
-
// ----------------------------------------------------------------------------
|
|
229
|
-
export function broadcastTo(a, targetShape) {
|
|
230
|
-
const site = captureSite('broadcastTo');
|
|
231
|
-
inferBroadcastTo('broadcastTo', a.shape, targetShape, site);
|
|
232
|
-
return addOp(currentGraph(), 'broadcast_to', targetShape, a.dtype, site, { a: a.id, targetShape });
|
|
233
|
-
}
|
|
234
|
-
export function sumToShape(a, targetShape) {
|
|
235
|
-
const site = captureSite('sumToShape');
|
|
236
|
-
inferSumToShape('sumToShape', a.shape, targetShape, site);
|
|
237
|
-
return addOp(currentGraph(), 'sum_to_shape', targetShape, a.dtype, site, { a: a.id, targetShape });
|
|
238
|
-
}
|
|
239
|
-
// ----------------------------------------------------------------------------
|
|
240
|
-
// Constants.
|
|
241
|
-
// ----------------------------------------------------------------------------
|
|
242
|
-
// 0-d tensor with a constant value. Used by autograd to seed the loss cotangent.
|
|
243
|
-
export function constScalar(value, dtype = 'f32') {
|
|
244
|
-
const site = captureSite('constScalar');
|
|
245
|
-
return addOp(currentGraph(), 'const_scalar', [], dtype, site, { value, dtype });
|
|
246
|
-
}
|
|
247
|
-
// ----------------------------------------------------------------------------
|
|
248
|
-
// Autograd-internal helpers (exposed for users writing custom transpose rules).
|
|
249
|
-
// ----------------------------------------------------------------------------
|
|
250
|
-
// ----------------------------------------------------------------------------
|
|
251
|
-
// Comparisons and selection.
|
|
252
|
-
// ----------------------------------------------------------------------------
|
|
253
|
-
// Comparisons reuse the binop helper but return bool.
|
|
254
|
-
export const less = (a, b) => binopOp('less', 'less', a, b, 'bool');
|
|
255
|
-
export const greater = (a, b) => binopOp('greater', 'greater', a, b, 'bool');
|
|
256
|
-
// where(cond, a, b): elementwise select. cond is bool; a and b can be any matching dtype.
|
|
257
|
-
export function where(cond, a, b) {
|
|
258
|
-
const site = captureSite('where');
|
|
259
|
-
if (cond.dtype !== 'bool')
|
|
260
|
-
throw new ShapeError(`where: cond must be bool, got ${cond.dtype}`, site);
|
|
261
|
-
if (a.dtype !== b.dtype)
|
|
262
|
-
throw new ShapeError(`where: a/b dtype mismatch (${a.dtype} vs ${b.dtype})`, site);
|
|
263
|
-
const outShape = inferWhere('where', cond.shape, a.shape, b.shape, site);
|
|
264
|
-
return addOp(currentGraph(), 'where', outShape, a.dtype, site, { cond: cond.id, a: a.id, b: b.id });
|
|
265
|
-
}
|
|
266
|
-
// reluGrad(x, dy) = dy where x > 0, else 0. Same shape as x. This is the
|
|
267
|
-
// transpose rule for relu, exposed as an op so codegen can emit it.
|
|
268
|
-
export function reluGrad(x, dy) {
|
|
269
|
-
const site = captureSite('reluGrad');
|
|
270
|
-
if (x.dtype !== 'f32' || dy.dtype !== 'f32') {
|
|
271
|
-
throw new ShapeError(`reluGrad: requires f32, got ${x.dtype} and ${dy.dtype}`, site);
|
|
272
|
-
}
|
|
273
|
-
const outShape = inferReluGrad('reluGrad', x.shape, dy.shape, site);
|
|
274
|
-
return addOp(currentGraph(), 'relu_grad', outShape, 'f32', site, { x: x.id, dy: dy.id });
|
|
275
|
-
}
|
|
276
|
-
// ----------------------------------------------------------------------------
|
|
277
|
-
// Adam-fused ops. Each does its full per-element update in one kernel.
|
|
278
|
-
// ----------------------------------------------------------------------------
|
|
279
|
-
export function adamUpdateM(m, g, b1) {
|
|
280
|
-
const site = captureSite('adamUpdateM');
|
|
281
|
-
if (m.dtype !== 'f32' || g.dtype !== 'f32')
|
|
282
|
-
throw new ShapeError(`adamUpdateM: requires f32`, site);
|
|
283
|
-
if (m.shape.length !== g.shape.length || m.shape.some((d, i) => d !== g.shape[i])) {
|
|
284
|
-
throw new ShapeError(`adamUpdateM: shape mismatch`, site);
|
|
285
|
-
}
|
|
286
|
-
return addOp(currentGraph(), 'adam_update_m', m.shape, 'f32', site, { m: m.id, g: g.id, b1 });
|
|
287
|
-
}
|
|
288
|
-
export function adamUpdateV(v, g, b2) {
|
|
289
|
-
const site = captureSite('adamUpdateV');
|
|
290
|
-
if (v.dtype !== 'f32' || g.dtype !== 'f32')
|
|
291
|
-
throw new ShapeError(`adamUpdateV: requires f32`, site);
|
|
292
|
-
if (v.shape.length !== g.shape.length || v.shape.some((d, i) => d !== g.shape[i])) {
|
|
293
|
-
throw new ShapeError(`adamUpdateV: shape mismatch`, site);
|
|
294
|
-
}
|
|
295
|
-
return addOp(currentGraph(), 'adam_update_v', v.shape, 'f32', site, { v: v.id, g: g.id, b2 });
|
|
296
|
-
}
|
|
297
|
-
export function adamUpdateP(p, mNew, vNew, lrt, eps, decayShrink = 1) {
|
|
298
|
-
const site = captureSite('adamUpdateP');
|
|
299
|
-
if (p.dtype !== 'f32')
|
|
300
|
-
throw new ShapeError(`adamUpdateP: requires f32`, site);
|
|
301
|
-
if (lrt.dtype !== 'f32' || lrt.shape.length !== 0) {
|
|
302
|
-
throw new ShapeError(`adamUpdateP: lrt must be a 0-d f32 scalar`, site);
|
|
303
|
-
}
|
|
304
|
-
if (p.shape.length !== mNew.shape.length || p.shape.some((d, i) => d !== mNew.shape[i])) {
|
|
305
|
-
throw new ShapeError(`adamUpdateP: p/mNew shape mismatch`, site);
|
|
306
|
-
}
|
|
307
|
-
// decayShrink is either a literal (baked into the kernel) or a 0-d scalar
|
|
308
|
-
// tensor input the runtime updates per step. The kernel binds at most one,
|
|
309
|
-
// chosen by whichever the caller provided.
|
|
310
|
-
const isTensor = typeof decayShrink === 'object';
|
|
311
|
-
if (isTensor) {
|
|
312
|
-
if (decayShrink.dtype !== 'f32' || decayShrink.shape.length !== 0) {
|
|
313
|
-
throw new ShapeError(`adamUpdateP: decayShrink tensor must be a 0-d f32 scalar`, site);
|
|
314
|
-
}
|
|
315
|
-
}
|
|
316
|
-
return addOp(currentGraph(), 'adam_update_p', p.shape, 'f32', site, {
|
|
317
|
-
p: p.id,
|
|
318
|
-
mNew: mNew.id,
|
|
319
|
-
vNew: vNew.id,
|
|
320
|
-
lrt: lrt.id,
|
|
321
|
-
eps,
|
|
322
|
-
decayShrink: isTensor ? 1 : decayShrink,
|
|
323
|
-
decayShrinkTensor: isTensor ? decayShrink.id : null,
|
|
324
|
-
});
|
|
325
|
-
}
|
|
326
|
-
//# sourceMappingURL=ops.js.map
|
package/dist/ops.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"ops.js","sourceRoot":"","sources":["../src/ops.ts"],"names":[],"mappings":"AAAA,0BAA0B;AAC1B,EAAE;AACF,wCAAwC;AACxC,qDAAqD;AACrD,yEAAyE;AACzE,0CAA0C;AAC1C,iFAAiF;AACjF,yCAAyC;AACzC,EAAE;AACF,sEAAsE;AAGtE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAC5C,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AACzC,OAAO,EACL,qBAAqB,EAAE,UAAU,EAAE,aAAa,EAAE,YAAY,EAC9D,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,kBAAkB,EAC7D,WAAW,EAAE,gBAAgB,EAAE,mBAAmB,EAClD,gBAAgB,EAAE,eAAe,EAAE,aAAa,EAAE,UAAU,EAC5D,UAAU,EAAE,SAAS,GACtB,MAAM,YAAY,CAAA;AAEnB,+EAA+E;AAC/E,oEAAoE;AACpE,+EAA+E;AAE/E;;;;GAIG;AACH,SAAS,OAAO,CACd,IAAY,EACZ,IAAoB,EACpB,CAAS,EAAE,CAAS,EACpB,WAAkB,CAAC,CAAC,KAAK;IAEzB,MAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAC,CAAA;IAC9B,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,qBAAqB,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,KAAK,GAAG,EAAE,IAAI,CAAC,CAAA;IACzG,MAAM,QAAQ,GAAG,qBAAqB,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACpE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACpF,CAAC;AAED,6EAA6E;AAC7E,4EAA4E;AAC5E,kDAAkD;AAClD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC9E,CAAC;AACD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC/E,CAAC;AACD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC9E,CAAC;AACD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE,CAAC;QAC1B,IAAI,CAAC,KAAK,CAAC;YAAE,MAAM,IAAI,UAAU,CAAC,oCAAoC,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC,CAAA;QAC3F,OAAO,SAAS,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAA;IAC5B,CAAC;IACD,OAAO,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AACpC,CAAC;AAED,+EAA+E;AAC/E,0EAA0E;AAC1E,2EAA2E;AAC3E,8BAA8B;AAC9B,+EAA+E;AAE/E,MAAM,UAAU,SAAS,CAAC,CAAS,EAAE,MAAc;IACjD,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,CAAA;IACrC,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAA;AACzF,CAAC;AAED,MAAM,UAAU,SAAS,CAAC,CAAS,EAAE,MAAc;IACjD,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,CAAA;IACrC,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAA;AACzF,CAAC;AAED,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E,SAAS,KAAK,CAAC,IAA+C,EAAE,CAAS;IACvE,MAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAC,CAAA;IAC9B,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,uBAAuB,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC1F,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,IAAI,EAAE,UAAU,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AAC/F,CAAC;AAED,MAAM,CAAC,MAAM,IAAI,GAAI,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,MAAM,EAAG,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,GAAG,GAAK,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,KAAK,EAAI,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,GAAG,GAAK,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,KAAK,EAAI,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,IAAI,GAAI,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,MAAM,EAAG,CAAC,CAAC,CAAA;AAE7D,+EAA+E;AAC/E,8EAA8E;AAC9E,4DAA4D;AAC5D,+EAA+E;AAE/E,MAAM,UAAU,QAAQ,CAAC,CAAS;IAChC,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC3F,MAAM,QAAQ,GAAG,aAAa,CAAC,UAAU,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACzD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACjF,CAAC;AAED,MAAM,UAAU,OAAO,CAAC,CAAS;IAC/B,MAAM,IAAI,GAAG,WAAW,CAAC,SAAS,CAAC,CAAA;IACnC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC1F,MAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACvD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,UAAU,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AAChF,CAAC;AAED,2EAA2E;AAC3E,MAAM,UAAU,MAAM,CAAC,CAAS;IAC9B,OAAO,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;AAClC,CAAC;AAED,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E,MAAM,UAAU,OAAO,CAAC,CAAS,EAAE,QAAe;IAChD,MAAM,IAAI,GAAG,WAAW,CAAC,SAAS,CAAC,CAAA;IACnC,MAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAA;IACjE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,SAAS,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC,CAAA;AACnG,CAAC;AAED,MAAM,UAAU,SAAS,CAAC,CAAS,EAAE,IAAuB;IAC1D,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,CAAA;IACrC,MAAM,QAAQ,GAAG,cAAc,CAAC,WAAW,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;IACjE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,IAAI,EAAE,CAAC,CAAA;AACvF,CAAC;AAED;;;8DAG8D;AAC9D,MAAM,UAAU,QAAQ,CAAC,CAAS,EAAE,KAAa,EAAE,KAAa;IAC9D,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,CAAA;IACxB,MAAM,IAAI,GAAG,CAAC,IAAY,EAAU,EAAE,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;IACjE,MAAM,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,CAAA;IACtB,MAAM,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,CAAA;IACtB,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC,CAAA;IACpC,IAAI,EAAE,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,CAAC;QAC3C,MAAM,IAAI,UAAU,CAAC,sCAAsC,KAAK,KAAK,KAAK,cAAc,CAAC,SAAS,EAAE,IAAI,CAAC,CAAA;IAC3G,CAAC;IACD,IAAI,EAAE,KAAK,EAAE;QAAE,OAAO,CAAC,CAAA;IACvB,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;IACnD,IAAI,CAAC,EAAE,CAAC,GAAG,EAAE,CAAA;IACb,IAAI,CAAC,EAAE,CAAC,GAAG,EAAE,CAAA;IACb,OAAO,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;AAC3B,CAAC;AAED,+EAA+E;AAC/E,kBAAkB;AAClB,+EAA+E;AAE/E,MAAM,UAAU,MAAM,CAAC,CAAS,EAAE,CAAS;IACzC,MAAM,IAAI,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAA;IAClC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC3C,MAAM,IAAI,UAAU,CAAC,6BAA6B,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACnF,CAAC;IACD,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC9D,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACrF,CAAC;AAED,MAAM,UAAU,aAAa,CAAC,CAAS,EAAE,CAAS;IAChD,MAAM,IAAI,GAAG,WAAW,CAAC,eAAe,CAAC,CAAA;IACzC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC3C,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC1F,CAAC;IACD,MAAM,QAAQ,GAAG,kBAAkB,CAAC,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC5E,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,gBAAgB,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AAC7F,CAAC;AAED,+EAA+E;AAC/E,sBAAsB;AACtB,+EAA+E;AAE/E,MAAM,UAAU,MAAM,CAAC,OAAe,EAAE,KAAa,EAAE,QAAe,KAAK;IACzE,MAAM,IAAI,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAA;IAClC,IAAI,OAAO,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC5B,MAAM,IAAI,UAAU,CAAC,oCAAoC,OAAO,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACjF,CAAC;IACD,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,EAAE,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,CAAA;IAClE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAA;AACvG,CAAC;AAED;;;;sEAIsE;AACtE,MAAM,UAAU,SAAS,CAAC,KAAa,EAAE,OAAe;IACtD,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,CAAA;IACrC,IAAI,KAAK,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC7B,MAAM,IAAI,UAAU,CAAC,kDAAkD,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;IACxG,CAAC;IACD,IAAI,OAAO,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC5B,MAAM,IAAI,UAAU,CAAC,uCAAuC,OAAO,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACpF,CAAC;IACD,OAAO,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAE,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC,CAAA;AAC/D,CAAC;AAED,4EAA4E;AAC5E,MAAM,UAAU,MAAM,CAAC,CAAS,EAAE,QAAe,KAAK;IACpD,MAAM,IAAI,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAA;IAClC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;QACnC,MAAM,IAAI,UAAU,CAAC,6CAA6C,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;IAC9E,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAA;AACxE,CAAC;AAED,+EAA+E;AAC/E,+EAA+E;AAC/E,qDAAqD;AACrD,+EAA+E;AAE/E,4EAA4E;AAC5E,yCAAyC;AACzC,MAAM,UAAU,iBAAiB,CAAC,CAAS;IACzC,MAAM,IAAI,GAAG,WAAW,CAAC,mBAAmB,CAAC,CAAA;IAC7C,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,wCAAwC,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACpG,gBAAgB,CAAC,mBAAmB,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA,CAAE,mCAAmC;IACzF,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,qBAAqB,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACxF,CAAC;AAED,uEAAuE;AACvE,MAAM,UAAU,cAAc,CAAC,CAAS;IACtC,MAAM,IAAI,GAAG,WAAW,CAAC,gBAAgB,CAAC,CAAA;IAC1C,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACjG,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,kBAAkB,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACrF,CAAC;AAED,4EAA4E;AAC5E,sEAAsE;AACtE,0EAA0E;AAC1E,kEAAkE;AAClE,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,SAAiB;IACtD,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,kCAAkC,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC9F,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC9C,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,CAAA;AAC5F,CAAC;AAED,+EAA+E;AAC/E,WAAW;AACX,+EAA+E;AAE/E,yEAAyE;AACzE,oDAAoD;AACpD,MAAM,UAAU,cAAc,CAAC,CAAS,EAAE,KAAa,EAAE,GAAW;IAClE,MAAM,IAAI,GAAG,WAAW,CAAC,gBAAgB,CAAC,CAAA;IAC1C,MAAM,QAAQ,GAAG,mBAAmB,CAAC,gBAAgB,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,CAAC,CAAA;IACjF,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,kBAAkB,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,CAAA;AACpG,CAAC;AAED,+EAA+E;AAC/E,8EAA8E;AAC9E,4DAA4D;AAC5D,+EAA+E;AAE/E,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,WAAkB;IACvD,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,CAAA;IAC3D,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,WAAW,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,CAAA;AACpG,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,CAAS,EAAE,WAAkB;IACtD,MAAM,IAAI,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;IACtC,eAAe,CAAC,YAAY,EAAE,CAAC,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,CAAA;IACzD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,WAAW,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,CAAA;AACpG,CAAC;AAED,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E,iFAAiF;AACjF,MAAM,UAAU,WAAW,CAAC,KAAa,EAAE,QAAe,KAAK;IAC7D,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAA;AACjF,CAAC;AAED,+EAA+E;AAC/E,gFAAgF;AAChF,+EAA+E;AAE/E,+EAA+E;AAC/E,6BAA6B;AAC7B,+EAA+E;AAE/E,sDAAsD;AACtD,MAAM,CAAC,MAAM,IAAI,GAAM,CAAC,CAAS,EAAE,CAAS,EAAU,EAAE,CAAC,OAAO,CAAC,MAAM,EAAK,MAAM,EAAK,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,CAAA;AACpG,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,CAAS,EAAE,CAAS,EAAU,EAAE,CAAC,OAAO,CAAC,SAAS,EAAE,SAAS,EAAE,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,CAAA;AAEpG,0FAA0F;AAC1F,MAAM,UAAU,KAAK,CAAC,IAAY,EAAE,CAAS,EAAE,CAAS;IACtD,MAAM,IAAI,GAAG,WAAW,CAAC,OAAO,CAAC,CAAA;IACjC,IAAI,IAAI,CAAC,KAAK,KAAK,MAAM;QAAE,MAAM,IAAI,UAAU,CAAC,iCAAiC,IAAI,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACpG,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,KAAK,GAAG,EAAE,IAAI,CAAC,CAAA;IAC3G,MAAM,QAAQ,GAAG,UAAU,CAAC,OAAO,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACxE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACrG,CAAC;AAED,yEAAyE;AACzE,oEAAoE;AACpE,MAAM,UAAU,QAAQ,CAAC,CAAS,EAAE,EAAU;IAC5C,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,EAAE,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC5C,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC,KAAK,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACtF,CAAC;IACD,MAAM,QAAQ,GAAG,aAAa,CAAC,UAAU,EAAE,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACnE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;AAC1F,CAAC;AAED,+EAA+E;AAC/E,uEAAuE;AACvE,+EAA+E;AAE/E,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,CAAS,EAAE,EAAU;IAC1D,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,2BAA2B,EAAE,IAAI,CAAC,CAAA;IACnG,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QAClF,MAAM,IAAI,UAAU,CAAC,6BAA6B,EAAE,IAAI,CAAC,CAAA;IAC3D,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,CAAA;AAC/F,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,CAAS,EAAE,EAAU;IAC1D,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,2BAA2B,EAAE,IAAI,CAAC,CAAA;IACnG,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QAClF,MAAM,IAAI,UAAU,CAAC,6BAA6B,EAAE,IAAI,CAAC,CAAA;IAC3D,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,CAAA;AAC/F,CAAC;AAED,MAAM,UAAU,WAAW,CACzB,CAAS,EACT,IAAY,EACZ,IAAY,EACZ,GAAW,EACX,GAAW,EACX,cAA+B,CAAC;IAEhC,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,2BAA2B,EAAE,IAAI,CAAC,CAAA;IAC9E,IAAI,GAAG,CAAC,KAAK,KAAK,KAAK,IAAI,GAAG,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAClD,MAAM,IAAI,UAAU,CAAC,2CAA2C,EAAE,IAAI,CAAC,CAAA;IACzE,CAAC;IACD,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,KAAK,IAAI,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QACxF,MAAM,IAAI,UAAU,CAAC,oCAAoC,EAAE,IAAI,CAAC,CAAA;IAClE,CAAC;IACD,0EAA0E;IAC1E,2EAA2E;IAC3E,2CAA2C;IAC3C,MAAM,QAAQ,GAAG,OAAO,WAAW,KAAK,QAAQ,CAAA;IAChD,IAAI,QAAQ,EAAE,CAAC;QACb,IAAI,WAAW,CAAC,KAAK,KAAK,KAAK,IAAI,WAAW,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAClE,MAAM,IAAI,UAAU,CAAC,0DAA0D,EAAE,IAAI,CAAC,CAAA;QACxF,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE;QAClE,CAAC,EAAE,CAAC,CAAC,EAAE;QACP,IAAI,EAAE,IAAI,CAAC,EAAE;QACb,IAAI,EAAE,IAAI,CAAC,EAAE;QACb,GAAG,EAAE,GAAG,CAAC,EAAE;QACX,GAAG;QACH,WAAW,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW;QACvC,iBAAiB,EAAE,QAAQ,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;KACpD,CAAC,CAAA;AACJ,CAAC"}
|