tensorgrad 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +121 -0
- package/SPEC.md +293 -0
- package/dist/adam.d.ts +31 -0
- package/dist/adam.d.ts.map +1 -0
- package/dist/adam.js +66 -0
- package/dist/adam.js.map +1 -0
- package/dist/buffers.d.ts +56 -0
- package/dist/buffers.d.ts.map +1 -0
- package/dist/buffers.js +114 -0
- package/dist/buffers.js.map +1 -0
- package/dist/codegen.d.ts +23 -0
- package/dist/codegen.d.ts.map +1 -0
- package/dist/codegen.js +709 -0
- package/dist/codegen.js.map +1 -0
- package/dist/compile.d.ts +53 -0
- package/dist/compile.d.ts.map +1 -0
- package/dist/compile.js +76 -0
- package/dist/compile.js.map +1 -0
- package/dist/grad.d.ts +8 -0
- package/dist/grad.d.ts.map +1 -0
- package/dist/grad.js +404 -0
- package/dist/grad.js.map +1 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +37 -0
- package/dist/index.js.map +1 -0
- package/dist/ir.d.ts +204 -0
- package/dist/ir.d.ts.map +1 -0
- package/dist/ir.js +60 -0
- package/dist/ir.js.map +1 -0
- package/dist/module.d.ts +21 -0
- package/dist/module.d.ts.map +1 -0
- package/dist/module.js +113 -0
- package/dist/module.js.map +1 -0
- package/dist/ops.d.ts +35 -0
- package/dist/ops.d.ts.map +1 -0
- package/dist/ops.js +270 -0
- package/dist/ops.js.map +1 -0
- package/dist/runtime.d.ts +26 -0
- package/dist/runtime.d.ts.map +1 -0
- package/dist/runtime.js +190 -0
- package/dist/runtime.js.map +1 -0
- package/dist/shape.d.ts +24 -0
- package/dist/shape.d.ts.map +1 -0
- package/dist/shape.js +259 -0
- package/dist/shape.js.map +1 -0
- package/dist/trace.d.ts +8 -0
- package/dist/trace.d.ts.map +1 -0
- package/dist/trace.js +93 -0
- package/dist/trace.js.map +1 -0
- package/package.json +62 -0
- package/src/adam.ts +95 -0
- package/src/buffers.ts +173 -0
- package/src/codegen.ts +758 -0
- package/src/compile.ts +120 -0
- package/src/grad.ts +459 -0
- package/src/index.ts +40 -0
- package/src/ir.ts +197 -0
- package/src/module.ts +126 -0
- package/src/ops.ts +311 -0
- package/src/runtime.ts +232 -0
- package/src/shape.ts +263 -0
- package/src/trace.ts +101 -0
package/dist/ops.js
ADDED
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
// User-facing op surface.
|
|
2
|
+
//
|
|
3
|
+
// Each function here is a thin wrapper:
|
|
4
|
+
// 1. capture the call site (for error attribution)
|
|
5
|
+
// 2. validate input shapes via src/shape.ts (which throws on mismatch)
|
|
6
|
+
// 3. compute the output shape and dtype
|
|
7
|
+
// 4. append the op to the current Graph (held in module state by src/trace.ts)
|
|
8
|
+
// 5. return the produced Tensor handle
|
|
9
|
+
//
|
|
10
|
+
// No actual numeric work happens here. These calls just build the IR.
|
|
11
|
+
import { addOp, captureSite } from './ir.js';
|
|
12
|
+
import { currentGraph } from './trace.js';
|
|
13
|
+
import { inferElementwiseBinop, inferUnary, inferMeanLast, inferSumLast, inferReshape, inferTranspose, inferMatmul, inferMatmulBatched, inferOneHot, inferWhereCausal, inferSliceLastRange, inferBroadcastTo, inferSumToShape, inferReluGrad, inferWhere, ShapeError, } from './shape.js';
|
|
14
|
+
// ----------------------------------------------------------------------------
|
|
15
|
+
// Element-wise binops (add/sub/mul/div). Trailing-suffix broadcast.
|
|
16
|
+
// ----------------------------------------------------------------------------
|
|
17
|
+
/**
|
|
18
|
+
* Build an element-wise binop op (forward declaration only — appends to the
|
|
19
|
+
* graph). Used by both arithmetic ops (add/sub/mul/div, output dtype = input
|
|
20
|
+
* dtype) and comparisons (less/greater, output dtype = bool).
|
|
21
|
+
*/
|
|
22
|
+
function binopOp(name, kind, a, b, outDtype = a.dtype) {
|
|
23
|
+
const site = captureSite(name);
|
|
24
|
+
if (a.dtype !== b.dtype)
|
|
25
|
+
throw new ShapeError(`${name}: dtype mismatch (${a.dtype} vs ${b.dtype})`, site);
|
|
26
|
+
const outShape = inferElementwiseBinop(name, a.shape, b.shape, site);
|
|
27
|
+
return addOp(currentGraph(), kind, outShape, outDtype, site, { a: a.id, b: b.id });
|
|
28
|
+
}
|
|
29
|
+
// Element-wise binops. Second arg can be a Tensor or a JS number; the latter
|
|
30
|
+
// dispatches to scalar-fused IR ops internally. `mul(x, 2)` and `mul(x, y)`
|
|
31
|
+
// both work — matches every NumPy-shaped library.
|
|
32
|
+
export function add(a, b) {
|
|
33
|
+
return typeof b === 'number' ? addScalar(a, b) : binopOp('add', 'add', a, b);
|
|
34
|
+
}
|
|
35
|
+
export function sub(a, b) {
|
|
36
|
+
return typeof b === 'number' ? addScalar(a, -b) : binopOp('sub', 'sub', a, b);
|
|
37
|
+
}
|
|
38
|
+
export function mul(a, b) {
|
|
39
|
+
return typeof b === 'number' ? mulScalar(a, b) : binopOp('mul', 'mul', a, b);
|
|
40
|
+
}
|
|
41
|
+
export function div(a, b) {
|
|
42
|
+
if (typeof b === 'number') {
|
|
43
|
+
if (b === 0)
|
|
44
|
+
throw new ShapeError(`div: scalar divisor cannot be zero`, captureSite('div'));
|
|
45
|
+
return mulScalar(a, 1 / b);
|
|
46
|
+
}
|
|
47
|
+
return binopOp('div', 'div', a, b);
|
|
48
|
+
}
|
|
49
|
+
// ----------------------------------------------------------------------------
|
|
50
|
+
// Element-wise scalar binops (mul/add by JS number). Used for things like
|
|
51
|
+
// `scores * (1/sqrt(d))` and `logits + 1e-5` where allocating a 0-d tensor
|
|
52
|
+
// for the scalar is wasteful.
|
|
53
|
+
// ----------------------------------------------------------------------------
|
|
54
|
+
export function mulScalar(a, scalar) {
|
|
55
|
+
const site = captureSite('mulScalar');
|
|
56
|
+
return addOp(currentGraph(), 'mul_scalar', a.shape, a.dtype, site, { a: a.id, scalar });
|
|
57
|
+
}
|
|
58
|
+
export function addScalar(a, scalar) {
|
|
59
|
+
const site = captureSite('addScalar');
|
|
60
|
+
return addOp(currentGraph(), 'add_scalar', a.shape, a.dtype, site, { a: a.id, scalar });
|
|
61
|
+
}
|
|
62
|
+
// ----------------------------------------------------------------------------
|
|
63
|
+
// Unary ops.
|
|
64
|
+
// ----------------------------------------------------------------------------
|
|
65
|
+
function unary(name, a) {
|
|
66
|
+
const site = captureSite(name);
|
|
67
|
+
if (a.dtype !== 'f32')
|
|
68
|
+
throw new ShapeError(`${name}: requires f32, got ${a.dtype}`, site);
|
|
69
|
+
return addOp(currentGraph(), name, inferUnary(name, a.shape, site), 'f32', site, { a: a.id });
|
|
70
|
+
}
|
|
71
|
+
export const sqrt = (a) => unary('sqrt', a);
|
|
72
|
+
export const rsqrt = (a) => unary('rsqrt', a);
|
|
73
|
+
export const log = (a) => unary('log', a);
|
|
74
|
+
export const exp = (a) => unary('exp', a);
|
|
75
|
+
export const relu = (a) => unary('relu', a);
|
|
76
|
+
// ----------------------------------------------------------------------------
|
|
77
|
+
// Reductions over the last axis. To reduce along other axes, transpose first.
|
|
78
|
+
// (This is intentional — keeps codegen and autograd small.)
|
|
79
|
+
// ----------------------------------------------------------------------------
|
|
80
|
+
export function meanLast(a) {
|
|
81
|
+
const site = captureSite('meanLast');
|
|
82
|
+
if (a.dtype !== 'f32')
|
|
83
|
+
throw new ShapeError(`meanLast: requires f32, got ${a.dtype}`, site);
|
|
84
|
+
const outShape = inferMeanLast('meanLast', a.shape, site);
|
|
85
|
+
return addOp(currentGraph(), 'mean_last', outShape, a.dtype, site, { a: a.id });
|
|
86
|
+
}
|
|
87
|
+
export function sumLast(a) {
|
|
88
|
+
const site = captureSite('sumLast');
|
|
89
|
+
if (a.dtype !== 'f32')
|
|
90
|
+
throw new ShapeError(`sumLast: requires f32, got ${a.dtype}`, site);
|
|
91
|
+
const outShape = inferSumLast('sumLast', a.shape, site);
|
|
92
|
+
return addOp(currentGraph(), 'sum_last', outShape, a.dtype, site, { a: a.id });
|
|
93
|
+
}
|
|
94
|
+
// ----------------------------------------------------------------------------
|
|
95
|
+
// Shape ops.
|
|
96
|
+
// ----------------------------------------------------------------------------
|
|
97
|
+
export function reshape(a, newShape) {
|
|
98
|
+
const site = captureSite('reshape');
|
|
99
|
+
const outShape = inferReshape('reshape', a.shape, newShape, site);
|
|
100
|
+
return addOp(currentGraph(), 'reshape', outShape, a.dtype, site, { a: a.id, newShape: outShape });
|
|
101
|
+
}
|
|
102
|
+
export function transpose(a, perm) {
|
|
103
|
+
const site = captureSite('transpose');
|
|
104
|
+
const outShape = inferTranspose('transpose', a.shape, perm, site);
|
|
105
|
+
return addOp(currentGraph(), 'transpose', outShape, a.dtype, site, { a: a.id, perm });
|
|
106
|
+
}
|
|
107
|
+
// ----------------------------------------------------------------------------
|
|
108
|
+
// Linear algebra.
|
|
109
|
+
// ----------------------------------------------------------------------------
|
|
110
|
+
export function matmul(a, b) {
|
|
111
|
+
const site = captureSite('matmul');
|
|
112
|
+
if (a.dtype !== 'f32' || b.dtype !== 'f32') {
|
|
113
|
+
throw new ShapeError(`matmul: requires f32, got ${a.dtype} and ${b.dtype}`, site);
|
|
114
|
+
}
|
|
115
|
+
const outShape = inferMatmul('matmul', a.shape, b.shape, site);
|
|
116
|
+
return addOp(currentGraph(), 'matmul', outShape, 'f32', site, { a: a.id, b: b.id });
|
|
117
|
+
}
|
|
118
|
+
export function matmulBatched(a, b) {
|
|
119
|
+
const site = captureSite('matmulBatched');
|
|
120
|
+
if (a.dtype !== 'f32' || b.dtype !== 'f32') {
|
|
121
|
+
throw new ShapeError(`matmulBatched: requires f32, got ${a.dtype} and ${b.dtype}`, site);
|
|
122
|
+
}
|
|
123
|
+
const outShape = inferMatmulBatched('matmulBatched', a.shape, b.shape, site);
|
|
124
|
+
return addOp(currentGraph(), 'matmul_batched', outShape, 'f32', site, { a: a.id, b: b.id });
|
|
125
|
+
}
|
|
126
|
+
// ----------------------------------------------------------------------------
|
|
127
|
+
// Indexing / casting.
|
|
128
|
+
// ----------------------------------------------------------------------------
|
|
129
|
+
export function oneHot(indices, depth, dtype = 'f32') {
|
|
130
|
+
const site = captureSite('oneHot');
|
|
131
|
+
if (indices.dtype !== 'i32') {
|
|
132
|
+
throw new ShapeError(`oneHot: indices must be i32, got ${indices.dtype}`, site);
|
|
133
|
+
}
|
|
134
|
+
const outShape = inferOneHot('oneHot', indices.shape, depth, site);
|
|
135
|
+
return addOp(currentGraph(), 'one_hot', outShape, dtype, site, { indices: indices.id, depth, dtype });
|
|
136
|
+
}
|
|
137
|
+
// arange(n) → [n] of values [0, 1, ..., n-1]. Used for position embeddings.
|
|
138
|
+
export function arange(n, dtype = 'i32') {
|
|
139
|
+
const site = captureSite('arange');
|
|
140
|
+
if (n <= 0 || !Number.isInteger(n)) {
|
|
141
|
+
throw new ShapeError(`arange: n must be a positive integer, got ${n}`, site);
|
|
142
|
+
}
|
|
143
|
+
return addOp(currentGraph(), 'arange', [n], dtype, site, { n, dtype });
|
|
144
|
+
}
|
|
145
|
+
// ----------------------------------------------------------------------------
|
|
146
|
+
// ML primitives. Fused so autograd's transpose rule is straightforward and the
|
|
147
|
+
// kernels can be hand-tuned for our specific shapes.
|
|
148
|
+
// ----------------------------------------------------------------------------
|
|
149
|
+
// Causal-masked softmax along the last axis. Shape preserved. Last two axes
|
|
150
|
+
// must be square (TxT attention scores).
|
|
151
|
+
export function softmaxCausalLast(a) {
|
|
152
|
+
const site = captureSite('softmaxCausalLast');
|
|
153
|
+
if (a.dtype !== 'f32')
|
|
154
|
+
throw new ShapeError(`softmaxCausalLast: requires f32, got ${a.dtype}`, site);
|
|
155
|
+
inferWhereCausal('softmaxCausalLast', a.shape, site); // shape check (square last 2 axes)
|
|
156
|
+
return addOp(currentGraph(), 'softmax_causal_last', a.shape, 'f32', site, { a: a.id });
|
|
157
|
+
}
|
|
158
|
+
// Numerically-stable log-softmax along the last axis. Shape preserved.
|
|
159
|
+
export function logSoftmaxLast(a) {
|
|
160
|
+
const site = captureSite('logSoftmaxLast');
|
|
161
|
+
if (a.dtype !== 'f32')
|
|
162
|
+
throw new ShapeError(`logSoftmaxLast: requires f32, got ${a.dtype}`, site);
|
|
163
|
+
return addOp(currentGraph(), 'log_softmax_last', a.shape, 'f32', site, { a: a.id });
|
|
164
|
+
}
|
|
165
|
+
// Pre-softmax causal mask. Sets cells where (i < j) on the last two axes to
|
|
166
|
+
// `fillValue` (typically -1e30). Lower-triangle entries pass through.
|
|
167
|
+
// Use this when you want the masked scores explicitly (e.g. for capture);
|
|
168
|
+
// for the common case, prefer softmaxCausalLast which fuses both.
|
|
169
|
+
export function whereCausal(a, fillValue) {
|
|
170
|
+
const site = captureSite('whereCausal');
|
|
171
|
+
if (a.dtype !== 'f32')
|
|
172
|
+
throw new ShapeError(`whereCausal: requires f32, got ${a.dtype}`, site);
|
|
173
|
+
inferWhereCausal('whereCausal', a.shape, site);
|
|
174
|
+
return addOp(currentGraph(), 'where_causal', a.shape, 'f32', site, { a: a.id, fillValue });
|
|
175
|
+
}
|
|
176
|
+
// ----------------------------------------------------------------------------
|
|
177
|
+
// Slicing.
|
|
178
|
+
// ----------------------------------------------------------------------------
|
|
179
|
+
// sliceLastRange(a, start, end): slice [start, end) along the last axis.
|
|
180
|
+
// Used for splitting Q/K/V from a fused QKV matmul.
|
|
181
|
+
export function sliceLastRange(a, start, end) {
|
|
182
|
+
const site = captureSite('sliceLastRange');
|
|
183
|
+
const outShape = inferSliceLastRange('sliceLastRange', a.shape, start, end, site);
|
|
184
|
+
return addOp(currentGraph(), 'slice_last_range', outShape, a.dtype, site, { a: a.id, start, end });
|
|
185
|
+
}
|
|
186
|
+
// ----------------------------------------------------------------------------
|
|
187
|
+
// Broadcast / un-broadcast. Mostly used by autograd, but exposed in case user
|
|
188
|
+
// code needs them (e.g. explicit broadcasting for clarity).
|
|
189
|
+
// ----------------------------------------------------------------------------
|
|
190
|
+
export function broadcastTo(a, targetShape) {
|
|
191
|
+
const site = captureSite('broadcastTo');
|
|
192
|
+
inferBroadcastTo('broadcastTo', a.shape, targetShape, site);
|
|
193
|
+
return addOp(currentGraph(), 'broadcast_to', targetShape, a.dtype, site, { a: a.id, targetShape });
|
|
194
|
+
}
|
|
195
|
+
export function sumToShape(a, targetShape) {
|
|
196
|
+
const site = captureSite('sumToShape');
|
|
197
|
+
inferSumToShape('sumToShape', a.shape, targetShape, site);
|
|
198
|
+
return addOp(currentGraph(), 'sum_to_shape', targetShape, a.dtype, site, { a: a.id, targetShape });
|
|
199
|
+
}
|
|
200
|
+
// ----------------------------------------------------------------------------
|
|
201
|
+
// Constants.
|
|
202
|
+
// ----------------------------------------------------------------------------
|
|
203
|
+
// 0-d tensor with a constant value. Used by autograd to seed the loss cotangent.
|
|
204
|
+
export function constScalar(value, dtype = 'f32') {
|
|
205
|
+
const site = captureSite('constScalar');
|
|
206
|
+
return addOp(currentGraph(), 'const_scalar', [], dtype, site, { value, dtype });
|
|
207
|
+
}
|
|
208
|
+
// ----------------------------------------------------------------------------
|
|
209
|
+
// Autograd-internal helpers (exposed for users writing custom transpose rules).
|
|
210
|
+
// ----------------------------------------------------------------------------
|
|
211
|
+
// ----------------------------------------------------------------------------
|
|
212
|
+
// Comparisons and selection.
|
|
213
|
+
// ----------------------------------------------------------------------------
|
|
214
|
+
// Comparisons reuse the binop helper but return bool.
|
|
215
|
+
export const less = (a, b) => binopOp('less', 'less', a, b, 'bool');
|
|
216
|
+
export const greater = (a, b) => binopOp('greater', 'greater', a, b, 'bool');
|
|
217
|
+
// where(cond, a, b): elementwise select. cond is bool; a and b can be any matching dtype.
|
|
218
|
+
export function where(cond, a, b) {
|
|
219
|
+
const site = captureSite('where');
|
|
220
|
+
if (cond.dtype !== 'bool')
|
|
221
|
+
throw new ShapeError(`where: cond must be bool, got ${cond.dtype}`, site);
|
|
222
|
+
if (a.dtype !== b.dtype)
|
|
223
|
+
throw new ShapeError(`where: a/b dtype mismatch (${a.dtype} vs ${b.dtype})`, site);
|
|
224
|
+
const outShape = inferWhere('where', cond.shape, a.shape, b.shape, site);
|
|
225
|
+
return addOp(currentGraph(), 'where', outShape, a.dtype, site, { cond: cond.id, a: a.id, b: b.id });
|
|
226
|
+
}
|
|
227
|
+
// reluGrad(x, dy) = dy where x > 0, else 0. Same shape as x. This is the
|
|
228
|
+
// transpose rule for relu, exposed as an op so codegen can emit it.
|
|
229
|
+
export function reluGrad(x, dy) {
|
|
230
|
+
const site = captureSite('reluGrad');
|
|
231
|
+
if (x.dtype !== 'f32' || dy.dtype !== 'f32') {
|
|
232
|
+
throw new ShapeError(`reluGrad: requires f32, got ${x.dtype} and ${dy.dtype}`, site);
|
|
233
|
+
}
|
|
234
|
+
const outShape = inferReluGrad('reluGrad', x.shape, dy.shape, site);
|
|
235
|
+
return addOp(currentGraph(), 'relu_grad', outShape, 'f32', site, { x: x.id, dy: dy.id });
|
|
236
|
+
}
|
|
237
|
+
// ----------------------------------------------------------------------------
|
|
238
|
+
// Adam-fused ops. Each does its full per-element update in one kernel.
|
|
239
|
+
// ----------------------------------------------------------------------------
|
|
240
|
+
export function adamUpdateM(m, g, b1) {
|
|
241
|
+
const site = captureSite('adamUpdateM');
|
|
242
|
+
if (m.dtype !== 'f32' || g.dtype !== 'f32')
|
|
243
|
+
throw new ShapeError(`adamUpdateM: requires f32`, site);
|
|
244
|
+
if (m.shape.length !== g.shape.length || m.shape.some((d, i) => d !== g.shape[i])) {
|
|
245
|
+
throw new ShapeError(`adamUpdateM: shape mismatch`, site);
|
|
246
|
+
}
|
|
247
|
+
return addOp(currentGraph(), 'adam_update_m', m.shape, 'f32', site, { m: m.id, g: g.id, b1 });
|
|
248
|
+
}
|
|
249
|
+
export function adamUpdateV(v, g, b2) {
|
|
250
|
+
const site = captureSite('adamUpdateV');
|
|
251
|
+
if (v.dtype !== 'f32' || g.dtype !== 'f32')
|
|
252
|
+
throw new ShapeError(`adamUpdateV: requires f32`, site);
|
|
253
|
+
if (v.shape.length !== g.shape.length || v.shape.some((d, i) => d !== g.shape[i])) {
|
|
254
|
+
throw new ShapeError(`adamUpdateV: shape mismatch`, site);
|
|
255
|
+
}
|
|
256
|
+
return addOp(currentGraph(), 'adam_update_v', v.shape, 'f32', site, { v: v.id, g: g.id, b2 });
|
|
257
|
+
}
|
|
258
|
+
export function adamUpdateP(p, mNew, vNew, lrt, eps) {
|
|
259
|
+
const site = captureSite('adamUpdateP');
|
|
260
|
+
if (p.dtype !== 'f32')
|
|
261
|
+
throw new ShapeError(`adamUpdateP: requires f32`, site);
|
|
262
|
+
if (lrt.dtype !== 'f32' || lrt.shape.length !== 0) {
|
|
263
|
+
throw new ShapeError(`adamUpdateP: lrt must be a 0-d f32 scalar`, site);
|
|
264
|
+
}
|
|
265
|
+
if (p.shape.length !== mNew.shape.length || p.shape.some((d, i) => d !== mNew.shape[i])) {
|
|
266
|
+
throw new ShapeError(`adamUpdateP: p/mNew shape mismatch`, site);
|
|
267
|
+
}
|
|
268
|
+
return addOp(currentGraph(), 'adam_update_p', p.shape, 'f32', site, { p: p.id, mNew: mNew.id, vNew: vNew.id, lrt: lrt.id, eps });
|
|
269
|
+
}
|
|
270
|
+
//# sourceMappingURL=ops.js.map
|
package/dist/ops.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ops.js","sourceRoot":"","sources":["../src/ops.ts"],"names":[],"mappings":"AAAA,0BAA0B;AAC1B,EAAE;AACF,wCAAwC;AACxC,qDAAqD;AACrD,yEAAyE;AACzE,0CAA0C;AAC1C,iFAAiF;AACjF,yCAAyC;AACzC,EAAE;AACF,sEAAsE;AAGtE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAC5C,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AACzC,OAAO,EACL,qBAAqB,EAAE,UAAU,EAAE,aAAa,EAAE,YAAY,EAC9D,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,kBAAkB,EAC7D,WAAW,EAAE,gBAAgB,EAAE,mBAAmB,EAClD,gBAAgB,EAAE,eAAe,EAAE,aAAa,EAAE,UAAU,EAC5D,UAAU,GACX,MAAM,YAAY,CAAA;AAEnB,+EAA+E;AAC/E,oEAAoE;AACpE,+EAA+E;AAE/E;;;;GAIG;AACH,SAAS,OAAO,CACd,IAAY,EACZ,IAAoB,EACpB,CAAS,EAAE,CAAS,EACpB,WAAkB,CAAC,CAAC,KAAK;IAEzB,MAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAC,CAAA;IAC9B,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,qBAAqB,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,KAAK,GAAG,EAAE,IAAI,CAAC,CAAA;IACzG,MAAM,QAAQ,GAAG,qBAAqB,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACpE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACpF,CAAC;AAED,6EAA6E;AAC7E,4EAA4E;AAC5E,kDAAkD;AAClD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC9E,CAAC;AACD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC/E,CAAC;AACD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC9E,CAAC;AACD,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAkB;IAC/C,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE,CAAC;QAC1B,IAAI,CAAC,KAAK,CAAC;YAAE,MAAM,IAAI,UAAU,CAAC,oCAAoC,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC,CAAA;QAC3F,OAAO,SAAS,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAA;IAC5B,CAAC;IACD,OAAO,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AACpC,CAAC;AAED,+EAA+E;AAC/E,0EAA0E;AAC1E,2EAA2E;AAC3E,8BAA8B;AAC9B,+EAA+E;AAE/E,MAAM,UAAU,SAAS,CAAC,CAAS,EAAE,MAAc;IACjD,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,CAAA;IACrC,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAA;AACzF,CAAC;AAED,MAAM,UAAU,SAAS,CAAC,CAAS,EAAE,MAAc;IACjD,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,CAAA;IACrC,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,YAAY,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,CAAA;AACzF,CAAC;AAED,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E,SAAS,KAAK,CAAC,IAA+C,EAAE,CAAS;IACvE,MAAM,IAAI,GAAG,WAAW,CAAC,IAAI,CAAC,CAAA;IAC9B,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,uBAAuB,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC1F,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,IAAI,EAAE,UAAU,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AAC/F,CAAC;AAED,MAAM,CAAC,MAAM,IAAI,GAAI,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,MAAM,EAAG,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,GAAG,GAAK,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,KAAK,EAAI,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,GAAG,GAAK,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,KAAK,EAAI,CAAC,CAAC,CAAA;AAC7D,MAAM,CAAC,MAAM,IAAI,GAAI,CAAC,CAAS,EAAU,EAAE,CAAC,KAAK,CAAC,MAAM,EAAG,CAAC,CAAC,CAAA;AAE7D,+EAA+E;AAC/E,8EAA8E;AAC9E,4DAA4D;AAC5D,+EAA+E;AAE/E,MAAM,UAAU,QAAQ,CAAC,CAAS;IAChC,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC3F,MAAM,QAAQ,GAAG,aAAa,CAAC,UAAU,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACzD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACjF,CAAC;AAED,MAAM,UAAU,OAAO,CAAC,CAAS;IAC/B,MAAM,IAAI,GAAG,WAAW,CAAC,SAAS,CAAC,CAAA;IACnC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC1F,MAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACvD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,UAAU,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AAChF,CAAC;AAED,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E,MAAM,UAAU,OAAO,CAAC,CAAS,EAAE,QAAe;IAChD,MAAM,IAAI,GAAG,WAAW,CAAC,SAAS,CAAC,CAAA;IACnC,MAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAA;IACjE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,SAAS,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC,CAAA;AACnG,CAAC;AAED,MAAM,UAAU,SAAS,CAAC,CAAS,EAAE,IAAuB;IAC1D,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,CAAA;IACrC,MAAM,QAAQ,GAAG,cAAc,CAAC,WAAW,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;IACjE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,IAAI,EAAE,CAAC,CAAA;AACvF,CAAC;AAED,+EAA+E;AAC/E,kBAAkB;AAClB,+EAA+E;AAE/E,MAAM,UAAU,MAAM,CAAC,CAAS,EAAE,CAAS;IACzC,MAAM,IAAI,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAA;IAClC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC3C,MAAM,IAAI,UAAU,CAAC,6BAA6B,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACnF,CAAC;IACD,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC9D,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACrF,CAAC;AAED,MAAM,UAAU,aAAa,CAAC,CAAS,EAAE,CAAS;IAChD,MAAM,IAAI,GAAG,WAAW,CAAC,eAAe,CAAC,CAAA;IACzC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC3C,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC1F,CAAC;IACD,MAAM,QAAQ,GAAG,kBAAkB,CAAC,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC5E,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,gBAAgB,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AAC7F,CAAC;AAED,+EAA+E;AAC/E,sBAAsB;AACtB,+EAA+E;AAE/E,MAAM,UAAU,MAAM,CAAC,OAAe,EAAE,KAAa,EAAE,QAAe,KAAK;IACzE,MAAM,IAAI,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAA;IAClC,IAAI,OAAO,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC5B,MAAM,IAAI,UAAU,CAAC,oCAAoC,OAAO,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACjF,CAAC;IACD,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,EAAE,OAAO,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,CAAA;IAClE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAA;AACvG,CAAC;AAED,4EAA4E;AAC5E,MAAM,UAAU,MAAM,CAAC,CAAS,EAAE,QAAe,KAAK;IACpD,MAAM,IAAI,GAAG,WAAW,CAAC,QAAQ,CAAC,CAAA;IAClC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;QACnC,MAAM,IAAI,UAAU,CAAC,6CAA6C,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;IAC9E,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAA;AACxE,CAAC;AAED,+EAA+E;AAC/E,+EAA+E;AAC/E,qDAAqD;AACrD,+EAA+E;AAE/E,4EAA4E;AAC5E,yCAAyC;AACzC,MAAM,UAAU,iBAAiB,CAAC,CAAS;IACzC,MAAM,IAAI,GAAG,WAAW,CAAC,mBAAmB,CAAC,CAAA;IAC7C,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,wCAAwC,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACpG,gBAAgB,CAAC,mBAAmB,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA,CAAE,mCAAmC;IACzF,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,qBAAqB,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACxF,CAAC;AAED,uEAAuE;AACvE,MAAM,UAAU,cAAc,CAAC,CAAS;IACtC,MAAM,IAAI,GAAG,WAAW,CAAC,gBAAgB,CAAC,CAAA;IAC1C,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACjG,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,kBAAkB,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACrF,CAAC;AAED,4EAA4E;AAC5E,sEAAsE;AACtE,0EAA0E;AAC1E,kEAAkE;AAClE,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,SAAiB;IACtD,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,kCAAkC,CAAC,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IAC9F,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC9C,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,CAAA;AAC5F,CAAC;AAED,+EAA+E;AAC/E,WAAW;AACX,+EAA+E;AAE/E,yEAAyE;AACzE,oDAAoD;AACpD,MAAM,UAAU,cAAc,CAAC,CAAS,EAAE,KAAa,EAAE,GAAW;IAClE,MAAM,IAAI,GAAG,WAAW,CAAC,gBAAgB,CAAC,CAAA;IAC1C,MAAM,QAAQ,GAAG,mBAAmB,CAAC,gBAAgB,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,CAAC,CAAA;IACjF,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,kBAAkB,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,CAAA;AACpG,CAAC;AAED,+EAA+E;AAC/E,8EAA8E;AAC9E,4DAA4D;AAC5D,+EAA+E;AAE/E,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,WAAkB;IACvD,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,CAAA;IAC3D,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,WAAW,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,CAAA;AACpG,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,CAAS,EAAE,WAAkB;IACtD,MAAM,IAAI,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;IACtC,eAAe,CAAC,YAAY,EAAE,CAAC,CAAC,KAAK,EAAE,WAAW,EAAE,IAAI,CAAC,CAAA;IACzD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,WAAW,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,CAAA;AACpG,CAAC;AAED,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E,iFAAiF;AACjF,MAAM,UAAU,WAAW,CAAC,KAAa,EAAE,QAAe,KAAK;IAC7D,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,cAAc,EAAE,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAA;AACjF,CAAC;AAED,+EAA+E;AAC/E,gFAAgF;AAChF,+EAA+E;AAE/E,+EAA+E;AAC/E,6BAA6B;AAC7B,+EAA+E;AAE/E,sDAAsD;AACtD,MAAM,CAAC,MAAM,IAAI,GAAM,CAAC,CAAS,EAAE,CAAS,EAAU,EAAE,CAAC,OAAO,CAAC,MAAM,EAAK,MAAM,EAAK,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,CAAA;AACpG,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,CAAS,EAAE,CAAS,EAAU,EAAE,CAAC,OAAO,CAAC,SAAS,EAAE,SAAS,EAAE,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,CAAA;AAEpG,0FAA0F;AAC1F,MAAM,UAAU,KAAK,CAAC,IAAY,EAAE,CAAS,EAAE,CAAS;IACtD,MAAM,IAAI,GAAG,WAAW,CAAC,OAAO,CAAC,CAAA;IACjC,IAAI,IAAI,CAAC,KAAK,KAAK,MAAM;QAAE,MAAM,IAAI,UAAU,CAAC,iCAAiC,IAAI,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACpG,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,KAAK,GAAG,EAAE,IAAI,CAAC,CAAA;IAC3G,MAAM,QAAQ,GAAG,UAAU,CAAC,OAAO,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACxE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;AACrG,CAAC;AAED,yEAAyE;AACzE,oEAAoE;AACpE,MAAM,UAAU,QAAQ,CAAC,CAAS,EAAE,EAAU;IAC5C,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,EAAE,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAC5C,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC,KAAK,QAAQ,EAAE,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;IACtF,CAAC;IACD,MAAM,QAAQ,GAAG,aAAa,CAAC,UAAU,EAAE,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACnE,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;AAC1F,CAAC;AAED,+EAA+E;AAC/E,uEAAuE;AACvE,+EAA+E;AAE/E,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,CAAS,EAAE,EAAU;IAC1D,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,2BAA2B,EAAE,IAAI,CAAC,CAAA;IACnG,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QAClF,MAAM,IAAI,UAAU,CAAC,6BAA6B,EAAE,IAAI,CAAC,CAAA;IAC3D,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,CAAA;AAC/F,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,CAAS,EAAE,EAAU;IAC1D,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,2BAA2B,EAAE,IAAI,CAAC,CAAA;IACnG,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QAClF,MAAM,IAAI,UAAU,CAAC,6BAA6B,EAAE,IAAI,CAAC,CAAA;IAC3D,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,CAAA;AAC/F,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,CAAS,EAAE,IAAY,EAAE,IAAY,EAAE,GAAW,EAAE,GAAW;IACzF,MAAM,IAAI,GAAG,WAAW,CAAC,aAAa,CAAC,CAAA;IACvC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK;QAAE,MAAM,IAAI,UAAU,CAAC,2BAA2B,EAAE,IAAI,CAAC,CAAA;IAC9E,IAAI,GAAG,CAAC,KAAK,KAAK,KAAK,IAAI,GAAG,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAClD,MAAM,IAAI,UAAU,CAAC,2CAA2C,EAAE,IAAI,CAAC,CAAA;IACzE,CAAC;IACD,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,KAAK,IAAI,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QACxF,MAAM,IAAI,UAAU,CAAC,oCAAoC,EAAE,IAAI,CAAC,CAAA;IAClE,CAAC;IACD,OAAO,KAAK,CAAC,YAAY,EAAE,EAAE,eAAe,EAAE,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAChE,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,EAAE,CAAC,CAAA;AAChE,CAAC"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { BufferPlan } from './buffers.js';
|
|
2
|
+
import type { KernelSpec } from './codegen.js';
|
|
3
|
+
export interface CompiledRuntime {
|
|
4
|
+
/** Upload one or more parameter Float32Arrays to their GPU buffers. */
|
|
5
|
+
uploadParams(params: Record<string, Float32Array>): void;
|
|
6
|
+
/** Read all parameters back as Float32Arrays — used for UI panels. */
|
|
7
|
+
downloadParams(): Promise<Record<string, Float32Array>>;
|
|
8
|
+
/** Read all parameter gradients back. Mostly for verification / debugging. */
|
|
9
|
+
downloadParamGrads(): Promise<Record<string, Float32Array>>;
|
|
10
|
+
/**
|
|
11
|
+
* One full forward+backward step.
|
|
12
|
+
* 1. Uploads `inputs` (tokens, targets, masks) to input buffers.
|
|
13
|
+
* 2. Dispatches every kernel in order.
|
|
14
|
+
* 3. Reads back the loss scalar.
|
|
15
|
+
* Returns the loss as a JS number.
|
|
16
|
+
*/
|
|
17
|
+
step(inputs: Record<string, Int32Array | Float32Array>): Promise<number>;
|
|
18
|
+
/** Free GPU resources. */
|
|
19
|
+
destroy(): void;
|
|
20
|
+
}
|
|
21
|
+
export interface RuntimeOpts {
|
|
22
|
+
/** Pre-acquired GPUDevice. If omitted, runtime requests its own. */
|
|
23
|
+
device?: GPUDevice;
|
|
24
|
+
}
|
|
25
|
+
export declare function createRuntime(plan: BufferPlan, kernels: KernelSpec[], lossBufferId: number, opts?: RuntimeOpts): Promise<CompiledRuntime>;
|
|
26
|
+
//# sourceMappingURL=runtime.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"runtime.d.ts","sourceRoot":"","sources":["../src/runtime.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAC9C,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAM9C,MAAM,WAAW,eAAe;IAC9B,uEAAuE;IACvE,YAAY,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,GAAG,IAAI,CAAA;IACxD,sEAAsE;IACtE,cAAc,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC,CAAA;IACvD,8EAA8E;IAC9E,kBAAkB,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC,CAAA;IAC3D;;;;;;OAMG;IACH,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,UAAU,GAAG,YAAY,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IACxE,0BAA0B;IAC1B,OAAO,IAAI,IAAI,CAAA;CAChB;AAED,MAAM,WAAW,WAAW;IAC1B,oEAAoE;IACpE,MAAM,CAAC,EAAE,SAAS,CAAA;CACnB;AAQD,wBAAsB,aAAa,CACjC,IAAI,EAAE,UAAU,EAChB,OAAO,EAAE,UAAU,EAAE,EACrB,YAAY,EAAE,MAAM,EACpB,IAAI,GAAE,WAAgB,GACrB,OAAO,CAAC,eAAe,CAAC,CA8K1B"}
|
package/dist/runtime.js
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
// WebGPU runtime. Reads a BufferPlan + KernelSpec[] (produced by codegen),
|
|
2
|
+
// allocates real GPU buffers and pipelines, and provides a `step()` method
|
|
3
|
+
// that uploads inputs, dispatches all kernels, and reads back outputs.
|
|
4
|
+
//
|
|
5
|
+
// Browser-only: this module needs `navigator.gpu` at runtime.
|
|
6
|
+
// Inlined numeric values (per WebGPU spec) so this module is importable in Node
|
|
7
|
+
// for codegen-only usage. The browser provides GPUBufferUsage as a global, but
|
|
8
|
+
// referencing it at module scope would crash before any browser code runs.
|
|
9
|
+
const STORAGE_RW = 0x80 /*STORAGE*/ | 0x8 /*COPY_DST*/ | 0x4; /*COPY_SRC*/
|
|
10
|
+
const READBACK = 0x1 /*MAP_READ*/ | 0x8; /*COPY_DST*/
|
|
11
|
+
export async function createRuntime(plan, kernels, lossBufferId, opts = {}) {
|
|
12
|
+
const device = opts.device ?? await acquireDevice();
|
|
13
|
+
const queue = device.queue;
|
|
14
|
+
// ---- Allocate one GPUBuffer per BufferSpec --------------------------------
|
|
15
|
+
// State buffers also get filled with their initValue at allocation time.
|
|
16
|
+
const buffers = new Map();
|
|
17
|
+
for (const spec of plan.buffers) {
|
|
18
|
+
const buf = device.createBuffer({
|
|
19
|
+
size: spec.byteSize,
|
|
20
|
+
usage: STORAGE_RW,
|
|
21
|
+
label: spec.name ?? `t${spec.id}-${spec.kind}`,
|
|
22
|
+
});
|
|
23
|
+
buffers.set(spec.id, buf);
|
|
24
|
+
if (spec.kind === 'state') {
|
|
25
|
+
// Fill with initValue (typically 0). Float and int both 4 bytes per element.
|
|
26
|
+
const elements = spec.byteSize / 4;
|
|
27
|
+
const init = spec.dtype === 'f32'
|
|
28
|
+
? new Float32Array(elements).fill(spec.initValue ?? 0)
|
|
29
|
+
: new Int32Array(elements).fill(Math.trunc(spec.initValue ?? 0));
|
|
30
|
+
queue.writeBuffer(buf, 0, init);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
// ---- Compile pipelines per kernel; cache by WGSL source -------------------
|
|
34
|
+
// Push an error scope around each shader+pipeline creation so we can surface
|
|
35
|
+
// the actual compile error rather than the cryptic "previous error" that
|
|
36
|
+
// comes from using an invalid pipeline at dispatch time.
|
|
37
|
+
const moduleCache = new Map();
|
|
38
|
+
const pipelines = [];
|
|
39
|
+
const probes = [];
|
|
40
|
+
for (const k of kernels) {
|
|
41
|
+
if (!k.wgsl) {
|
|
42
|
+
pipelines.push(null);
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
let module = moduleCache.get(k.wgsl);
|
|
46
|
+
if (!module) {
|
|
47
|
+
module = device.createShaderModule({ code: k.wgsl, label: k.opKind });
|
|
48
|
+
moduleCache.set(k.wgsl, module);
|
|
49
|
+
}
|
|
50
|
+
device.pushErrorScope('validation');
|
|
51
|
+
const pipeline = device.createComputePipeline({
|
|
52
|
+
layout: 'auto',
|
|
53
|
+
compute: { module, entryPoint: 'main' },
|
|
54
|
+
label: k.opKind,
|
|
55
|
+
});
|
|
56
|
+
pipelines.push(pipeline);
|
|
57
|
+
probes.push(device.popErrorScope().then(err => err ? { k, module: module, err } : null));
|
|
58
|
+
}
|
|
59
|
+
const probeResults = await Promise.all(probes);
|
|
60
|
+
const failures = probeResults.filter((p) => p != null);
|
|
61
|
+
if (failures.length > 0) {
|
|
62
|
+
const reports = [];
|
|
63
|
+
for (const { k, module, err } of failures) {
|
|
64
|
+
const info = await module.getCompilationInfo();
|
|
65
|
+
const messages = info.messages
|
|
66
|
+
.map(m => ` L${m.lineNum}:${m.linePos} [${m.type}] ${m.message}`)
|
|
67
|
+
.join('\n');
|
|
68
|
+
reports.push(`[shader compile error] ${k.opKind} (op #${k.opIndex}): ${err.message}\n` +
|
|
69
|
+
(messages || ' (no compilation messages)') +
|
|
70
|
+
`\n--- WGSL ---\n${k.wgsl}\n-----------`);
|
|
71
|
+
}
|
|
72
|
+
// eslint-disable-next-line no-console
|
|
73
|
+
console.error(reports.join('\n\n'));
|
|
74
|
+
throw new Error(`tensorgrad: ${failures.length} shader(s) failed to compile (see console).`);
|
|
75
|
+
}
|
|
76
|
+
// ---- Pre-build bind groups (static — buffer ids don't change per step) ---
|
|
77
|
+
const bindGroups = kernels.map((k, i) => {
|
|
78
|
+
const pipeline = pipelines[i];
|
|
79
|
+
if (!pipeline)
|
|
80
|
+
return null;
|
|
81
|
+
return device.createBindGroup({
|
|
82
|
+
layout: pipeline.getBindGroupLayout(0),
|
|
83
|
+
entries: k.bindings.map((bufId, idx) => ({
|
|
84
|
+
binding: idx,
|
|
85
|
+
resource: { buffer: buffers.get(bufId) },
|
|
86
|
+
})),
|
|
87
|
+
});
|
|
88
|
+
});
|
|
89
|
+
// ---- Loss readback staging buffer -----------------------------------------
|
|
90
|
+
const lossSpec = plan.buffers[lossBufferId];
|
|
91
|
+
const lossReadback = device.createBuffer({ size: lossSpec.byteSize, usage: READBACK });
|
|
92
|
+
// ---- step() ---------------------------------------------------------------
|
|
93
|
+
async function step(inputs) {
|
|
94
|
+
for (const [name, bufId] of plan.inputsByName) {
|
|
95
|
+
const data = inputs[name];
|
|
96
|
+
if (!data)
|
|
97
|
+
throw new Error(`tensorgrad: missing input '${name}'`);
|
|
98
|
+
const expectedBytes = plan.buffers[bufId].byteSize;
|
|
99
|
+
if (data.byteLength !== expectedBytes) {
|
|
100
|
+
throw new Error(`tensorgrad: input '${name}' has ${data.byteLength} bytes, expected ${expectedBytes}`);
|
|
101
|
+
}
|
|
102
|
+
// Cast to BufferSource: typed arrays are accepted by writeBuffer at runtime
|
|
103
|
+
// but TS may infer ArrayBufferLike (vs ArrayBuffer) under strict configs.
|
|
104
|
+
queue.writeBuffer(buffers.get(bufId), 0, data);
|
|
105
|
+
}
|
|
106
|
+
const encoder = device.createCommandEncoder({ label: 'tensorgrad-step' });
|
|
107
|
+
for (let i = 0; i < kernels.length; i++) {
|
|
108
|
+
const k = kernels[i];
|
|
109
|
+
if (!k.wgsl || k.threads === 0)
|
|
110
|
+
continue;
|
|
111
|
+
const pipeline = pipelines[i];
|
|
112
|
+
const bindGroup = bindGroups[i];
|
|
113
|
+
const pass = encoder.beginComputePass({ label: k.opKind });
|
|
114
|
+
pass.setPipeline(pipeline);
|
|
115
|
+
pass.setBindGroup(0, bindGroup);
|
|
116
|
+
// WebGPU caps each dispatch dimension at 65535 workgroups. Split into 2D
|
|
117
|
+
// when a kernel needs more than that on the X axis. Kernels compute their
|
|
118
|
+
// global index as `gid.x + gid.y * (65535 * workgroup_size)`, matching the
|
|
119
|
+
// stride we set here. For dispatches that fit in one row, gid.y is 0.
|
|
120
|
+
const wgCount = Math.max(1, Math.ceil(k.threads / k.workgroupSize));
|
|
121
|
+
const MAX_X = 65535;
|
|
122
|
+
const wgX = Math.min(wgCount, MAX_X);
|
|
123
|
+
const wgY = Math.ceil(wgCount / MAX_X);
|
|
124
|
+
pass.dispatchWorkgroups(wgX, wgY, 1);
|
|
125
|
+
pass.end();
|
|
126
|
+
}
|
|
127
|
+
// After all dispatches: writebacks (Adam state, updated params).
|
|
128
|
+
// copyBufferToBuffer is queued onto the same encoder so it's ordered after
|
|
129
|
+
// all kernel dispatches.
|
|
130
|
+
for (const wb of plan.writebacks) {
|
|
131
|
+
encoder.copyBufferToBuffer(buffers.get(wb.source), 0, buffers.get(wb.dest), 0, wb.bytes);
|
|
132
|
+
}
|
|
133
|
+
encoder.copyBufferToBuffer(buffers.get(lossBufferId), 0, lossReadback, 0, lossSpec.byteSize);
|
|
134
|
+
queue.submit([encoder.finish()]);
|
|
135
|
+
await lossReadback.mapAsync(GPUMapMode.READ);
|
|
136
|
+
const view = new Float32Array(lossReadback.getMappedRange().slice(0));
|
|
137
|
+
lossReadback.unmap();
|
|
138
|
+
return view[0];
|
|
139
|
+
}
|
|
140
|
+
// ---- uploadParams ---------------------------------------------------------
|
|
141
|
+
function uploadParams(params) {
|
|
142
|
+
for (const [name, bufId] of plan.paramsByName) {
|
|
143
|
+
const data = params[name];
|
|
144
|
+
if (!data)
|
|
145
|
+
continue;
|
|
146
|
+
queue.writeBuffer(buffers.get(bufId), 0, data);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
// ---- download helpers -----------------------------------------------------
|
|
150
|
+
async function downloadFromMap(map) {
|
|
151
|
+
const stagings = [];
|
|
152
|
+
const encoder = device.createCommandEncoder({ label: 'tensorgrad-download' });
|
|
153
|
+
for (const [name, bufId] of map) {
|
|
154
|
+
const spec = plan.buffers[bufId];
|
|
155
|
+
const staging = device.createBuffer({ size: spec.byteSize, usage: READBACK });
|
|
156
|
+
encoder.copyBufferToBuffer(buffers.get(bufId), 0, staging, 0, spec.byteSize);
|
|
157
|
+
stagings.push({ name, buf: staging, bytes: spec.byteSize });
|
|
158
|
+
}
|
|
159
|
+
queue.submit([encoder.finish()]);
|
|
160
|
+
const out = {};
|
|
161
|
+
for (const s of stagings) {
|
|
162
|
+
await s.buf.mapAsync(GPUMapMode.READ);
|
|
163
|
+
out[s.name] = new Float32Array(s.buf.getMappedRange().slice(0));
|
|
164
|
+
s.buf.unmap();
|
|
165
|
+
s.buf.destroy();
|
|
166
|
+
}
|
|
167
|
+
return out;
|
|
168
|
+
}
|
|
169
|
+
return {
|
|
170
|
+
uploadParams,
|
|
171
|
+
downloadParams: () => downloadFromMap(plan.paramsByName),
|
|
172
|
+
downloadParamGrads: () => downloadFromMap(plan.paramGradsByName),
|
|
173
|
+
step,
|
|
174
|
+
destroy: () => {
|
|
175
|
+
for (const b of buffers.values())
|
|
176
|
+
b.destroy();
|
|
177
|
+
lossReadback.destroy();
|
|
178
|
+
},
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
async function acquireDevice() {
|
|
182
|
+
if (typeof navigator === 'undefined' || !navigator.gpu) {
|
|
183
|
+
throw new Error('tensorgrad: WebGPU not available in this environment');
|
|
184
|
+
}
|
|
185
|
+
const adapter = await navigator.gpu.requestAdapter();
|
|
186
|
+
if (!adapter)
|
|
187
|
+
throw new Error('tensorgrad: no WebGPU adapter');
|
|
188
|
+
return await adapter.requestDevice();
|
|
189
|
+
}
|
|
190
|
+
//# sourceMappingURL=runtime.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"runtime.js","sourceRoot":"","sources":["../src/runtime.ts"],"names":[],"mappings":"AAAA,2EAA2E;AAC3E,2EAA2E;AAC3E,uEAAuE;AACvE,EAAE;AACF,8DAA8D;AAiC9D,gFAAgF;AAChF,+EAA+E;AAC/E,2EAA2E;AAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAA,CAAC,YAAY;AACzE,MAAM,QAAQ,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAA,CAAC,YAAY;AAEpD,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,IAAgB,EAChB,OAAqB,EACrB,YAAoB,EACpB,OAAoB,EAAE;IAEtB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,MAAM,aAAa,EAAE,CAAA;IACnD,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAA;IAE1B,8EAA8E;IAC9E,yEAAyE;IACzE,MAAM,OAAO,GAAG,IAAI,GAAG,EAAqB,CAAA;IAC5C,KAAK,MAAM,IAAI,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;QAChC,MAAM,GAAG,GAAG,MAAM,CAAC,YAAY,CAAC;YAC9B,IAAI,EAAE,IAAI,CAAC,QAAQ;YACnB,KAAK,EAAE,UAAU;YACjB,KAAK,EAAE,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,CAAC,EAAE,IAAI,IAAI,CAAC,IAAI,EAAE;SAC/C,CAAC,CAAA;QACF,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,GAAG,CAAC,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,6EAA6E;YAC7E,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAA;YAClC,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,KAAK,KAAK;gBAC/B,CAAC,CAAC,IAAI,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,CAAC;gBACtD,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,CAAC,CAAC,CAAA;YAClE,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,EAAE,IAA+B,CAAC,CAAA;QAC5D,CAAC;IACH,CAAC;IAED,8EAA8E;IAC9E,6EAA6E;IAC7E,yEAAyE;IACzE,yDAAyD;IACzD,MAAM,WAAW,GAAG,IAAI,GAAG,EAA2B,CAAA;IACtD,MAAM,SAAS,GAAkC,EAAE,CAAA;IAEnD,MAAM,MAAM,GAAiB,EAAE,CAAA;IAC/B,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;QACxB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;YAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAAC,SAAQ;QAAC,CAAC;QAC/C,IAAI,MAAM,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;QACpC,IAAI,CAAC,MAAM,EAAE,CAAC;YACZ,MAAM,GAAG,MAAM,CAAC,kBAAkB,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAA;YACrE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;QACjC,CAAC;QACD,MAAM,CAAC,cAAc,CAAC,YAAY,CAAC,CAAA;QACnC,MAAM,QAAQ,GAAG,MAAM,CAAC,qBAAqB,CAAC;YAC5C,MAAM,EAAE,MAAM;YACd,OAAO,EAAE,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE;YACvC,KAAK,EAAE,CAAC,CAAC,MAAM;SAChB,CAAC,CAAA;QACF,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;QACxB,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,MAAO,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3F,CAAC;IACD,MAAM,YAAY,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;IAC9C,MAAM,QAAQ,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,EAAkE,EAAE,CAAC,CAAC,IAAI,IAAI,CAAC,CAAA;IACtH,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACxB,MAAM,OAAO,GAAa,EAAE,CAAA;QAC5B,KAAK,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,IAAI,QAAQ,EAAE,CAAC;YAC1C,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,kBAAkB,EAAE,CAAA;YAC9C,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ;iBAC3B,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,OAAO,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC;iBACjE,IAAI,CAAC,IAAI,CAAC,CAAA;YACb,OAAO,CAAC,IAAI,CACV,0BAA0B,CAAC,CAAC,MAAM,SAAS,CAAC,CAAC,OAAO,MAAM,GAAG,CAAC,OAAO,IAAI;gBACzE,CAAC,QAAQ,IAAI,6BAA6B,CAAC;gBAC3C,mBAAmB,CAAC,CAAC,IAAI,eAAe,CACzC,CAAA;QACH,CAAC;QACD,sCAAsC;QACtC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAA;QACnC,MAAM,IAAI,KAAK,CAAC,eAAe,QAAQ,CAAC,MAAM,6CAA6C,CAAC,CAAA;IAC9F,CAAC;IAED,6EAA6E;IAC7E,MAAM,UAAU,GAA4B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QAC/D,MAAM,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,CAAA;QAC7B,IAAI,CAAC,QAAQ;YAAE,OAAO,IAAI,CAAA;QAC1B,OAAO,MAAM,CAAC,eAAe,CAAC;YAC5B,MAAM,EAAE,QAAQ,CAAC,kBAAkB,CAAC,CAAC,CAAC;YACtC,OAAO,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;gBACvC,OAAO,EAAE,GAAG;gBACZ,QAAQ,EAAE,EAAE,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,KAAK,CAAE,EAAE;aAC1C,CAAC,CAAC;SACJ,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,8EAA8E;IAC9E,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,YAAY,CAAE,CAAA;IAC5C,MAAM,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,QAAQ,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC,CAAA;IAEtF,8EAA8E;IAC9E,KAAK,UAAU,IAAI,CAAC,MAAiD;QACnE,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YAC9C,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAA;YACzB,IAAI,CAAC,IAAI;gBAAE,MAAM,IAAI,KAAK,CAAC,8BAA8B,IAAI,GAAG,CAAC,CAAA;YACjE,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAE,CAAC,QAAQ,CAAA;YACnD,IAAI,IAAI,CAAC,UAAU,KAAK,aAAa,EAAE,CAAC;gBACtC,MAAM,IAAI,KAAK,CAAC,sBAAsB,IAAI,SAAS,IAAI,CAAC,UAAU,oBAAoB,aAAa,EAAE,CAAC,CAAA;YACxG,CAAC;YACD,4EAA4E;YAC5E,0EAA0E;YAC1E,KAAK,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAE,EAAE,CAAC,EAAE,IAA+B,CAAC,CAAA;QAC5E,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,CAAC,oBAAoB,CAAC,EAAE,KAAK,EAAE,iBAAiB,EAAE,CAAC,CAAA;QACzE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACxC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAE,CAAA;YACrB,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC;gBAAE,SAAQ;YACxC,MAAM,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAE,CAAA;YAC9B,MAAM,SAAS,GAAG,UAAU,CAAC,CAAC,CAAE,CAAA;YAChC,MAAM,IAAI,GAAG,OAAO,CAAC,gBAAgB,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAA;YAC1D,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAA;YAC1B,IAAI,CAAC,YAAY,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;YAC/B,yEAAyE;YACzE,0EAA0E;YAC1E,2EAA2E;YAC3E,sEAAsE;YACtE,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,GAAG,CAAC,CAAC,aAAa,CAAC,CAAC,CAAA;YACnE,MAAM,KAAK,GAAG,KAAK,CAAA;YACnB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,CAAC,CAAA;YACpC,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,CAAA;YACtC,IAAI,CAAC,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;YACpC,IAAI,CAAC,GAAG,EAAE,CAAA;QACZ,CAAC;QACD,iEAAiE;QACjE,2EAA2E;QAC3E,yBAAyB;QACzB,KAAK,MAAM,EAAE,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;YACjC,OAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAE,EAAE,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAE,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,CAAA;QAC5F,CAAC;QACD,OAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAE,EAAE,CAAC,EAAE,YAAY,EAAE,CAAC,EAAE,QAAQ,CAAC,QAAQ,CAAC,CAAA;QAC7F,KAAK,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;QAEhC,MAAM,YAAY,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;QAC5C,MAAM,IAAI,GAAG,IAAI,YAAY,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;QACrE,YAAY,CAAC,KAAK,EAAE,CAAA;QACpB,OAAO,IAAI,CAAC,CAAC,CAAE,CAAA;IACjB,CAAC;IAED,8EAA8E;IAC9E,SAAS,YAAY,CAAC,MAAoC;QACxD,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YAC9C,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAA;YACzB,IAAI,CAAC,IAAI;gBAAE,SAAQ;YACnB,KAAK,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAE,EAAE,CAAC,EAAE,IAA+B,CAAC,CAAA;QAC5E,CAAC;IACH,CAAC;IAED,8EAA8E;IAC9E,KAAK,UAAU,eAAe,CAAC,GAAwB;QACrD,MAAM,QAAQ,GAAsD,EAAE,CAAA;QACtE,MAAM,OAAO,GAAG,MAAM,CAAC,oBAAoB,CAAC,EAAE,KAAK,EAAE,qBAAqB,EAAE,CAAC,CAAA;QAC7E,KAAK,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,GAAG,EAAE,CAAC;YAChC,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAE,CAAA;YACjC,MAAM,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC,CAAA;YAC7E,OAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAE,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;YAC7E,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAA;QAC7D,CAAC;QACD,KAAK,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;QAChC,MAAM,GAAG,GAAiC,EAAE,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE,CAAC;YACzB,MAAM,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;YACrC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;YAC/D,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,CAAA;YACb,CAAC,CAAC,GAAG,CAAC,OAAO,EAAE,CAAA;QACjB,CAAC;QACD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,OAAO;QACL,YAAY;QACZ,cAAc,EAAE,GAAG,EAAE,CAAC,eAAe,CAAC,IAAI,CAAC,YAAY,CAAC;QACxD,kBAAkB,EAAE,GAAG,EAAE,CAAC,eAAe,CAAC,IAAI,CAAC,gBAAgB,CAAC;QAChE,IAAI;QACJ,OAAO,EAAE,GAAG,EAAE;YACZ,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,MAAM,EAAE;gBAAE,CAAC,CAAC,OAAO,EAAE,CAAA;YAC7C,YAAY,CAAC,OAAO,EAAE,CAAA;QACxB,CAAC;KACF,CAAA;AACH,CAAC;AAED,KAAK,UAAU,aAAa;IAC1B,IAAI,OAAO,SAAS,KAAK,WAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC;QACvD,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAA;IACzE,CAAC;IACD,MAAM,OAAO,GAAG,MAAM,SAAS,CAAC,GAAG,CAAC,cAAc,EAAE,CAAA;IACpD,IAAI,CAAC,OAAO;QAAE,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAA;IAC9D,OAAO,MAAM,OAAO,CAAC,aAAa,EAAE,CAAA;AACtC,CAAC"}
|
package/dist/shape.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { Shape, CallSite } from './ir.js';
|
|
2
|
+
export declare class ShapeError extends Error {
|
|
3
|
+
constructor(message: string, site: CallSite | null);
|
|
4
|
+
}
|
|
5
|
+
export declare function shapesEqual(a: Shape, b: Shape): boolean;
|
|
6
|
+
export declare function shapeSize(shape: Shape): number;
|
|
7
|
+
export declare function showShape(shape: Shape): string;
|
|
8
|
+
export declare function broadcastTrailing(a: Shape, b: Shape): Shape | null;
|
|
9
|
+
export declare function inferElementwiseBinop(opName: string, aShape: Shape, bShape: Shape, site: CallSite | null): Shape;
|
|
10
|
+
export declare function inferUnary(_opName: string, aShape: Shape, _site: CallSite | null): Shape;
|
|
11
|
+
export declare function inferMeanLast(opName: string, aShape: Shape, site: CallSite | null): Shape;
|
|
12
|
+
export declare function inferSumLast(opName: string, aShape: Shape, site: CallSite | null): Shape;
|
|
13
|
+
export declare function inferReshape(opName: string, aShape: Shape, newShape: Shape, site: CallSite | null): Shape;
|
|
14
|
+
export declare function inferTranspose(opName: string, aShape: Shape, perm: readonly number[], site: CallSite | null): Shape;
|
|
15
|
+
export declare function inferMatmul(opName: string, aShape: Shape, bShape: Shape, site: CallSite | null): Shape;
|
|
16
|
+
export declare function inferMatmulBatched(opName: string, aShape: Shape, bShape: Shape, site: CallSite | null): Shape;
|
|
17
|
+
export declare function inferOneHot(opName: string, indicesShape: Shape, depth: number, site: CallSite | null): Shape;
|
|
18
|
+
export declare function inferWhereCausal(opName: string, aShape: Shape, site: CallSite | null): Shape;
|
|
19
|
+
export declare function inferSliceLastRange(opName: string, aShape: Shape, start: number, end: number, site: CallSite | null): Shape;
|
|
20
|
+
export declare function inferBroadcastTo(opName: string, aShape: Shape, targetShape: Shape, site: CallSite | null): Shape;
|
|
21
|
+
export declare function inferSumToShape(opName: string, aShape: Shape, targetShape: Shape, site: CallSite | null): Shape;
|
|
22
|
+
export declare function inferWhere(opName: string, condShape: Shape, aShape: Shape, bShape: Shape, site: CallSite | null): Shape;
|
|
23
|
+
export declare function inferReluGrad(opName: string, xShape: Shape, dyShape: Shape, site: CallSite | null): Shape;
|
|
24
|
+
//# sourceMappingURL=shape.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"shape.d.ts","sourceRoot":"","sources":["../src/shape.ts"],"names":[],"mappings":"AAkBA,OAAO,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAA;AAO9C,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI;CAKnD;AAUD,wBAAgB,WAAW,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,KAAK,GAAG,OAAO,CAIvD;AAED,wBAAgB,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAI9C;AAED,wBAAgB,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAE9C;AAKD,wBAAgB,iBAAiB,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,KAAK,GAAG,KAAK,GAAG,IAAI,CAclE;AASD,wBAAgB,qBAAqB,CACnC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAClE,KAAK,CAWP;AAED,wBAAgB,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAExF;AAED,wBAAgB,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAIzF;AAED,wBAAgB,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAIxF;AAED,wBAAgB,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CA0BzG;AAED,wBAAgB,cAAc,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,SAAS,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAWnH;AAGD,wBAAgB,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAStG;AAGD,wBAAgB,kBAAkB,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAoB7G;AAED,wBAAgB,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAG5G;AAGD,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAM5F;AAED,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAO3H;AAID,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAahH;AAID,wBAAgB,eAAe,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAa/G;AAID,wBAAgB,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAMvH;AAED,wBAAgB,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,GAAG,IAAI,GAAG,KAAK,CAKzG"}
|