@mni-ml/framework 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/autodiff.d.ts +13 -0
- package/dist/autodiff.d.ts.map +1 -0
- package/dist/autodiff.js +91 -0
- package/dist/autodiff.js.map +1 -0
- package/dist/datasets.d.ts +16 -0
- package/dist/datasets.d.ts.map +1 -0
- package/dist/datasets.js +64 -0
- package/dist/datasets.js.map +1 -0
- package/dist/fast_ops.d.ts +23 -0
- package/dist/fast_ops.d.ts.map +1 -0
- package/dist/fast_ops.js +263 -0
- package/dist/fast_ops.js.map +1 -0
- package/dist/fast_ops_worker.d.ts +2 -0
- package/dist/fast_ops_worker.d.ts.map +1 -0
- package/dist/fast_ops_worker.js +119 -0
- package/dist/fast_ops_worker.js.map +1 -0
- package/dist/gpu_backend.d.ts +37 -0
- package/dist/gpu_backend.d.ts.map +1 -0
- package/dist/gpu_backend.js +163 -0
- package/dist/gpu_backend.js.map +1 -0
- package/dist/gpu_kernels.d.ts +74 -0
- package/dist/gpu_kernels.d.ts.map +1 -0
- package/dist/gpu_kernels.js +571 -0
- package/dist/gpu_kernels.js.map +1 -0
- package/dist/gpu_ops.d.ts +43 -0
- package/dist/gpu_ops.d.ts.map +1 -0
- package/dist/gpu_ops.js +365 -0
- package/dist/gpu_ops.js.map +1 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +20 -0
- package/dist/index.js.map +1 -0
- package/dist/module.d.ts +23 -0
- package/dist/module.d.ts.map +1 -0
- package/dist/module.js +97 -0
- package/dist/module.js.map +1 -0
- package/dist/nn.d.ts +63 -0
- package/dist/nn.d.ts.map +1 -0
- package/dist/nn.js +234 -0
- package/dist/nn.js.map +1 -0
- package/dist/operators.d.ts +29 -0
- package/dist/operators.d.ts.map +1 -0
- package/dist/operators.js +91 -0
- package/dist/operators.js.map +1 -0
- package/dist/optimizer.d.ts +15 -0
- package/dist/optimizer.d.ts.map +1 -0
- package/dist/optimizer.js +62 -0
- package/dist/optimizer.js.map +1 -0
- package/dist/scalar.d.ts +42 -0
- package/dist/scalar.d.ts.map +1 -0
- package/dist/scalar.js +126 -0
- package/dist/scalar.js.map +1 -0
- package/dist/scalar_functions.d.ts +62 -0
- package/dist/scalar_functions.d.ts.map +1 -0
- package/dist/scalar_functions.js +127 -0
- package/dist/scalar_functions.js.map +1 -0
- package/dist/tensor.d.ts +58 -0
- package/dist/tensor.d.ts.map +1 -0
- package/dist/tensor.js +288 -0
- package/dist/tensor.js.map +1 -0
- package/dist/tensor_data.d.ts +29 -0
- package/dist/tensor_data.d.ts.map +1 -0
- package/dist/tensor_data.js +131 -0
- package/dist/tensor_data.js.map +1 -0
- package/dist/tensor_functions.d.ts +97 -0
- package/dist/tensor_functions.d.ts.map +1 -0
- package/dist/tensor_functions.js +465 -0
- package/dist/tensor_functions.js.map +1 -0
- package/dist/tensor_ops.d.ts +47 -0
- package/dist/tensor_ops.d.ts.map +1 -0
- package/dist/tensor_ops.js +249 -0
- package/dist/tensor_ops.js.map +1 -0
- package/package.json +45 -0
|
@@ -0,0 +1,465 @@
|
|
|
1
|
+
import { TensorData, shapeProduct, shapeBroadcast, strides, } from './tensor_data.js';
|
|
2
|
+
import { fastTensorMap as tensorMap, fastTensorZip as tensorZip, fastTensorReduce as tensorReduce } from './fast_ops.js';
|
|
3
|
+
import * as operators from './operators.js';
|
|
4
|
+
import { Tensor } from './tensor.js';
|
|
5
|
+
import { tensorMatrixMultiply, tensorConv1d, _tensorConv1d, tensorConv2d, _tensorConv2d } from './tensor_ops.js';
|
|
6
|
+
function zeros(shape) {
|
|
7
|
+
return TensorData.zeros(shape);
|
|
8
|
+
}
|
|
9
|
+
export function neg(a) {
|
|
10
|
+
const out = zeros(a.shape);
|
|
11
|
+
const mapFn = tensorMap(operators.neg);
|
|
12
|
+
mapFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides);
|
|
13
|
+
return out;
|
|
14
|
+
}
|
|
15
|
+
export function sigmoid(a) {
|
|
16
|
+
const out = zeros(a.shape);
|
|
17
|
+
const mapFn = tensorMap(operators.sigmoid);
|
|
18
|
+
mapFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides);
|
|
19
|
+
return out;
|
|
20
|
+
}
|
|
21
|
+
export function relu(a) {
|
|
22
|
+
const out = zeros(a.shape);
|
|
23
|
+
const mapFn = tensorMap(operators.relu);
|
|
24
|
+
mapFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides);
|
|
25
|
+
return out;
|
|
26
|
+
}
|
|
27
|
+
export function log(a) {
|
|
28
|
+
const out = zeros(a.shape);
|
|
29
|
+
const mapFn = tensorMap(operators.log);
|
|
30
|
+
mapFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides);
|
|
31
|
+
return out;
|
|
32
|
+
}
|
|
33
|
+
export function exp(a) {
|
|
34
|
+
const out = zeros(a.shape);
|
|
35
|
+
const mapFn = tensorMap(operators.exp);
|
|
36
|
+
mapFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides);
|
|
37
|
+
return out;
|
|
38
|
+
}
|
|
39
|
+
export function id(a) {
|
|
40
|
+
const out = zeros(a.shape);
|
|
41
|
+
const mapFn = tensorMap(operators.id);
|
|
42
|
+
mapFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides);
|
|
43
|
+
return out;
|
|
44
|
+
}
|
|
45
|
+
export function inv(a) {
|
|
46
|
+
const out = zeros(a.shape);
|
|
47
|
+
const mapFn = tensorMap(operators.inv);
|
|
48
|
+
mapFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides);
|
|
49
|
+
return out;
|
|
50
|
+
}
|
|
51
|
+
export function add(a, b) {
|
|
52
|
+
const outShape = shapeBroadcast(a.shape, b.shape);
|
|
53
|
+
const out = zeros(outShape);
|
|
54
|
+
const zipFn = tensorZip(operators.add);
|
|
55
|
+
zipFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, b.storage, b.shape, b.strides);
|
|
56
|
+
return out;
|
|
57
|
+
}
|
|
58
|
+
export function mul(a, b) {
|
|
59
|
+
const outShape = shapeBroadcast(a.shape, b.shape);
|
|
60
|
+
const out = zeros(outShape);
|
|
61
|
+
const zipFn = tensorZip(operators.mul);
|
|
62
|
+
zipFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, b.storage, b.shape, b.strides);
|
|
63
|
+
return out;
|
|
64
|
+
}
|
|
65
|
+
export function lt(a, b) {
|
|
66
|
+
const outShape = shapeBroadcast(a.shape, b.shape);
|
|
67
|
+
const out = zeros(outShape);
|
|
68
|
+
const zipFn = tensorZip(operators.lt);
|
|
69
|
+
zipFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, b.storage, b.shape, b.strides);
|
|
70
|
+
return out;
|
|
71
|
+
}
|
|
72
|
+
export function eq(a, b) {
|
|
73
|
+
const outShape = shapeBroadcast(a.shape, b.shape);
|
|
74
|
+
const out = zeros(outShape);
|
|
75
|
+
const zipFn = tensorZip(operators.eq);
|
|
76
|
+
zipFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, b.storage, b.shape, b.strides);
|
|
77
|
+
return out;
|
|
78
|
+
}
|
|
79
|
+
export function isClose(a, b) {
|
|
80
|
+
const outShape = shapeBroadcast(a.shape, b.shape);
|
|
81
|
+
const out = zeros(outShape);
|
|
82
|
+
const zipFn = tensorZip(operators.isClose);
|
|
83
|
+
zipFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, b.storage, b.shape, b.strides);
|
|
84
|
+
return out;
|
|
85
|
+
}
|
|
86
|
+
export function sum(a, dim) {
|
|
87
|
+
const outShape = a.shape.map((s, i) => (i === dim ? 1 : s));
|
|
88
|
+
const out = zeros(outShape);
|
|
89
|
+
const reduceFn = tensorReduce(operators.add);
|
|
90
|
+
reduceFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, dim);
|
|
91
|
+
return out;
|
|
92
|
+
}
|
|
93
|
+
export function prod(a, dim) {
|
|
94
|
+
const outShape = a.shape.map((s, i) => (i === dim ? 1 : s));
|
|
95
|
+
const out = zeros(outShape);
|
|
96
|
+
const reduceFn = tensorReduce(operators.mul);
|
|
97
|
+
reduceFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, dim);
|
|
98
|
+
return out;
|
|
99
|
+
}
|
|
100
|
+
export function max(a, dim) {
|
|
101
|
+
const outShape = a.shape.map((s, i) => (i === dim ? 1 : s));
|
|
102
|
+
const out = zeros(outShape);
|
|
103
|
+
const reduceFn = tensorReduce(operators.max);
|
|
104
|
+
reduceFn(out.storage, out.shape, out.strides, a.storage, a.shape, a.strides, dim);
|
|
105
|
+
return out;
|
|
106
|
+
}
|
|
107
|
+
export function permute(a, order) {
|
|
108
|
+
return a.permute(...order);
|
|
109
|
+
}
|
|
110
|
+
export function view(a, shape) {
|
|
111
|
+
const expectedStrides = strides(a.shape);
|
|
112
|
+
const isContiguous = a.strides.every((s, i) => s === expectedStrides[i]);
|
|
113
|
+
if (!isContiguous) {
|
|
114
|
+
throw new Error('Cannot view a non-contiguous tensor');
|
|
115
|
+
}
|
|
116
|
+
const newSize = shapeProduct(shape);
|
|
117
|
+
if (newSize !== a.size) {
|
|
118
|
+
throw new Error(`Size mismatch of tensor and shape [${shape.join(', ')}]`);
|
|
119
|
+
}
|
|
120
|
+
return new TensorData(a.storage, shape);
|
|
121
|
+
}
|
|
122
|
+
export function contiguous(a) {
|
|
123
|
+
const expectedStrides = strides(a.shape);
|
|
124
|
+
const isContiguous = a.strides.every((s, i) => s === expectedStrides[i]);
|
|
125
|
+
if (isContiguous) {
|
|
126
|
+
return a;
|
|
127
|
+
}
|
|
128
|
+
return id(a);
|
|
129
|
+
}
|
|
130
|
+
export class TensorContext {
|
|
131
|
+
_savedTensors = [];
|
|
132
|
+
saveForBackward(...tensors) {
|
|
133
|
+
this._savedTensors = tensors;
|
|
134
|
+
}
|
|
135
|
+
get savedTensors() {
|
|
136
|
+
return this._savedTensors;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
export class TensorHistory {
|
|
140
|
+
lastFn;
|
|
141
|
+
ctx;
|
|
142
|
+
inputs;
|
|
143
|
+
constructor(lastFn = null, ctx = null, inputs = []) {
|
|
144
|
+
this.lastFn = lastFn;
|
|
145
|
+
this.ctx = ctx;
|
|
146
|
+
this.inputs = inputs;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
export class TensorFunction {
|
|
150
|
+
static forward(ctx, ...inputs) {
|
|
151
|
+
throw new Error("forward not implemented");
|
|
152
|
+
}
|
|
153
|
+
static backward(ctx, gradOutput) {
|
|
154
|
+
throw new Error("backward not implemented");
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
export class Neg extends TensorFunction {
|
|
158
|
+
static forward(ctx, a) {
|
|
159
|
+
return new Tensor(neg(a.data));
|
|
160
|
+
}
|
|
161
|
+
static backward(ctx, gradOutput) {
|
|
162
|
+
return [gradOutput.neg()];
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
export class Sigmoid extends TensorFunction {
|
|
166
|
+
static forward(ctx, a) {
|
|
167
|
+
const result = new Tensor(sigmoid(a.data));
|
|
168
|
+
ctx.saveForBackward(result); // Save output
|
|
169
|
+
return result;
|
|
170
|
+
}
|
|
171
|
+
static backward(ctx, gradOutput) {
|
|
172
|
+
const [sigResult] = ctx.savedTensors;
|
|
173
|
+
// grad * sig * (1 - sig)
|
|
174
|
+
const ones = Tensor.ones(sigResult.shape);
|
|
175
|
+
return [gradOutput.mul(sigResult).mul(ones.sub(sigResult))];
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
export class ReLU extends TensorFunction {
|
|
179
|
+
static forward(ctx, a) {
|
|
180
|
+
ctx.saveForBackward(a); // Save input
|
|
181
|
+
return new Tensor(relu(a.data));
|
|
182
|
+
}
|
|
183
|
+
static backward(ctx, gradOutput) {
|
|
184
|
+
const [a] = ctx.savedTensors;
|
|
185
|
+
const mask = new Tensor(lt(Tensor.zeros(a.shape).data, a.data));
|
|
186
|
+
return [gradOutput.mul(mask)];
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
export class Log extends TensorFunction {
|
|
190
|
+
static forward(ctx, a) {
|
|
191
|
+
ctx.saveForBackward(a);
|
|
192
|
+
return new Tensor(log(a.data));
|
|
193
|
+
}
|
|
194
|
+
static backward(ctx, gradOutput) {
|
|
195
|
+
const [a] = ctx.savedTensors;
|
|
196
|
+
// grad / a
|
|
197
|
+
return [gradOutput.mul(a.inv())];
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
export class Exp extends TensorFunction {
|
|
201
|
+
static forward(ctx, a) {
|
|
202
|
+
const result = new Tensor(exp(a.data));
|
|
203
|
+
ctx.saveForBackward(result); // Save output
|
|
204
|
+
return result;
|
|
205
|
+
}
|
|
206
|
+
static backward(ctx, gradOutput) {
|
|
207
|
+
const [expResult] = ctx.savedTensors;
|
|
208
|
+
return [gradOutput.mul(expResult)];
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
export class Inv extends TensorFunction {
|
|
212
|
+
static forward(ctx, a) {
|
|
213
|
+
ctx.saveForBackward(a);
|
|
214
|
+
return new Tensor(inv(a.data));
|
|
215
|
+
}
|
|
216
|
+
static backward(ctx, gradOutput) {
|
|
217
|
+
const [a] = ctx.savedTensors;
|
|
218
|
+
// -grad / a^2
|
|
219
|
+
return [gradOutput.neg().mul(a.mul(a).inv())];
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
function unbroadcast(grad, originalShape) {
|
|
223
|
+
let result = grad;
|
|
224
|
+
while (result.dims > originalShape.length) {
|
|
225
|
+
result = result.sum(0);
|
|
226
|
+
const newShape = result.shape.slice(1);
|
|
227
|
+
result = result.view(...newShape);
|
|
228
|
+
}
|
|
229
|
+
for (let i = 0; i < originalShape.length; i++) {
|
|
230
|
+
if (originalShape[i] === 1 && result.shape[i] > 1) {
|
|
231
|
+
result = result.sum(i);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
return result;
|
|
235
|
+
}
|
|
236
|
+
export class Add extends TensorFunction {
|
|
237
|
+
static forward(ctx, a, b) {
|
|
238
|
+
ctx.saveForBackward(a, b);
|
|
239
|
+
return new Tensor(add(a.data, b.data));
|
|
240
|
+
}
|
|
241
|
+
static backward(ctx, gradOutput) {
|
|
242
|
+
const [a, b] = ctx.savedTensors;
|
|
243
|
+
return [
|
|
244
|
+
unbroadcast(gradOutput, a.shape),
|
|
245
|
+
unbroadcast(gradOutput, b.shape)
|
|
246
|
+
];
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
export class Mul extends TensorFunction {
|
|
250
|
+
static forward(ctx, a, b) {
|
|
251
|
+
ctx.saveForBackward(a, b);
|
|
252
|
+
return new Tensor(mul(a.data, b.data));
|
|
253
|
+
}
|
|
254
|
+
static backward(ctx, gradOutput) {
|
|
255
|
+
const [a, b] = ctx.savedTensors;
|
|
256
|
+
return [
|
|
257
|
+
unbroadcast(gradOutput.mul(b), a.shape),
|
|
258
|
+
unbroadcast(gradOutput.mul(a), b.shape)
|
|
259
|
+
];
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
export class LT extends TensorFunction {
|
|
263
|
+
static forward(ctx, a, b) {
|
|
264
|
+
ctx.saveForBackward(a, b);
|
|
265
|
+
return new Tensor(lt(a.data, b.data));
|
|
266
|
+
}
|
|
267
|
+
static backward(ctx, gradOutput) {
|
|
268
|
+
const [a, b] = ctx.savedTensors;
|
|
269
|
+
return [Tensor.zeros(a.shape), Tensor.zeros(b.shape)];
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
export class EQ extends TensorFunction {
|
|
273
|
+
static forward(ctx, a, b) {
|
|
274
|
+
ctx.saveForBackward(a, b);
|
|
275
|
+
return new Tensor(eq(a.data, b.data));
|
|
276
|
+
}
|
|
277
|
+
static backward(ctx, gradOutput) {
|
|
278
|
+
const [a, b] = ctx.savedTensors;
|
|
279
|
+
return [Tensor.zeros(a.shape), Tensor.zeros(b.shape)];
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
export function Sum(dim) {
|
|
283
|
+
return class extends TensorFunction {
|
|
284
|
+
static forward(ctx, a) {
|
|
285
|
+
ctx.saveForBackward(a);
|
|
286
|
+
return new Tensor(sum(a.data, dim));
|
|
287
|
+
}
|
|
288
|
+
static backward(ctx, gradOutput) {
|
|
289
|
+
const [a] = ctx.savedTensors;
|
|
290
|
+
return [gradOutput.mul(Tensor.ones(a.shape))];
|
|
291
|
+
}
|
|
292
|
+
};
|
|
293
|
+
}
|
|
294
|
+
export function Max(dim) {
|
|
295
|
+
return class extends TensorFunction {
|
|
296
|
+
static forward(ctx, a) {
|
|
297
|
+
const out = new Tensor(max(a.data, dim));
|
|
298
|
+
ctx.saveForBackward(a, out);
|
|
299
|
+
return out;
|
|
300
|
+
}
|
|
301
|
+
static backward(ctx, gradOutput) {
|
|
302
|
+
const [a, maxVals] = ctx.savedTensors;
|
|
303
|
+
// Gradient flows only to positions equal to the max (argmax mask)
|
|
304
|
+
const mask = a.is_close(maxVals);
|
|
305
|
+
return [gradOutput.mul(mask)];
|
|
306
|
+
}
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
export function Permute(order) {
|
|
310
|
+
return class extends TensorFunction {
|
|
311
|
+
static forward(ctx, a) {
|
|
312
|
+
ctx.saveForBackward(a);
|
|
313
|
+
return new Tensor(permute(a.data, order));
|
|
314
|
+
}
|
|
315
|
+
static backward(ctx, gradOutput) {
|
|
316
|
+
const inverseOrder = new Array(order.length);
|
|
317
|
+
for (let i = 0; i < order.length; i++) {
|
|
318
|
+
inverseOrder[order[i]] = i;
|
|
319
|
+
}
|
|
320
|
+
return [gradOutput.permute(...inverseOrder)];
|
|
321
|
+
}
|
|
322
|
+
};
|
|
323
|
+
}
|
|
324
|
+
export function View(newShape) {
|
|
325
|
+
return class extends TensorFunction {
|
|
326
|
+
static forward(ctx, a) {
|
|
327
|
+
ctx.saveForBackward(a);
|
|
328
|
+
return new Tensor(view(a.data, newShape));
|
|
329
|
+
}
|
|
330
|
+
static backward(ctx, gradOutput) {
|
|
331
|
+
const [a] = ctx.savedTensors;
|
|
332
|
+
return [gradOutput.contiguous().view(...a.shape)];
|
|
333
|
+
}
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
export class Contiguous extends TensorFunction {
|
|
337
|
+
static forward(ctx, a) {
|
|
338
|
+
return new Tensor(contiguous(a.data));
|
|
339
|
+
}
|
|
340
|
+
static backward(ctx, gradOutput) {
|
|
341
|
+
return [gradOutput];
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
function transposeLast2(x) {
|
|
345
|
+
// swap the last two dimension and the batch dimensions stay the same
|
|
346
|
+
const d = x.dims;
|
|
347
|
+
if (d < 2) {
|
|
348
|
+
throw new Error("transposeLast2 needs at least 2 dims");
|
|
349
|
+
}
|
|
350
|
+
const order = [...Array(d).keys()];
|
|
351
|
+
const tmp = order[d - 2];
|
|
352
|
+
order[d - 2] = order[d - 1];
|
|
353
|
+
order[d - 1] = tmp;
|
|
354
|
+
return x.permute(...order);
|
|
355
|
+
}
|
|
356
|
+
function reduceToShape(t, targetShape) {
|
|
357
|
+
let result = t;
|
|
358
|
+
const tShape = t.shape;
|
|
359
|
+
const tDims = tShape.length;
|
|
360
|
+
const targetDims = targetShape.length;
|
|
361
|
+
// Pad target shape on the left
|
|
362
|
+
const paddedTarget = [
|
|
363
|
+
...Array(tDims - targetDims).fill(1),
|
|
364
|
+
...targetShape,
|
|
365
|
+
];
|
|
366
|
+
for (let dim = 0; dim < tDims; dim++) {
|
|
367
|
+
if (paddedTarget[dim] === 1 && tShape[dim] !== 1) {
|
|
368
|
+
result = result.sum(dim);
|
|
369
|
+
result.history = null;
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
// If we added leading dimensions, remove them
|
|
373
|
+
if (tDims !== targetDims) {
|
|
374
|
+
result = result.view(...targetShape);
|
|
375
|
+
result.history = null;
|
|
376
|
+
}
|
|
377
|
+
return result;
|
|
378
|
+
}
|
|
379
|
+
export class MatMul extends TensorFunction {
|
|
380
|
+
static forward(ctx, a, b) {
|
|
381
|
+
ctx.saveForBackward(a, b);
|
|
382
|
+
return tensorMatrixMultiply(a, b);
|
|
383
|
+
}
|
|
384
|
+
static backward(ctx, gradOut) {
|
|
385
|
+
const saved = ctx.savedTensors;
|
|
386
|
+
if (!saved || saved.length !== 2) {
|
|
387
|
+
throw new Error("MatMul backward: saved tensors missing");
|
|
388
|
+
}
|
|
389
|
+
const a = saved[0];
|
|
390
|
+
const b = saved[1];
|
|
391
|
+
const bT = transposeLast2(b);
|
|
392
|
+
const aT = transposeLast2(a);
|
|
393
|
+
const gradA = tensorMatrixMultiply(gradOut, bT);
|
|
394
|
+
const gradB = tensorMatrixMultiply(aT, gradOut);
|
|
395
|
+
gradA.history = null;
|
|
396
|
+
gradB.history = null;
|
|
397
|
+
const gradAFinal = reduceToShape(gradA, a.shape);
|
|
398
|
+
const gradBFinal = reduceToShape(gradB, b.shape);
|
|
399
|
+
gradAFinal.history = null;
|
|
400
|
+
gradBFinal.history = null;
|
|
401
|
+
return [gradAFinal, gradBFinal];
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
export class Conv1d extends TensorFunction {
|
|
405
|
+
static forward(ctx, input, weight) {
|
|
406
|
+
ctx.saveForBackward(input, weight);
|
|
407
|
+
return tensorConv1d(input, weight, false);
|
|
408
|
+
}
|
|
409
|
+
static backward(ctx, gradOut) {
|
|
410
|
+
const saved = ctx.savedTensors;
|
|
411
|
+
if (!saved || saved.length !== 2) {
|
|
412
|
+
throw new Error("Conv1d backward: saved tensors missing");
|
|
413
|
+
}
|
|
414
|
+
const input = saved[0];
|
|
415
|
+
const weight = saved[1];
|
|
416
|
+
const inChannels = input.shape[1];
|
|
417
|
+
const outChannels = weight.shape[0];
|
|
418
|
+
const kw = weight.shape[2];
|
|
419
|
+
// grad_input: convolve grad_output with transposed weight, reversed
|
|
420
|
+
const newWeight = weight.permute(1, 0, 2);
|
|
421
|
+
const gradInput = tensorConv1d(gradOut, newWeight, true);
|
|
422
|
+
gradInput.history = null;
|
|
423
|
+
// grad_weight: use _tensorConv1d with custom output shape [IC, OC, KW]
|
|
424
|
+
const newInput = input.permute(1, 0, 2);
|
|
425
|
+
const newGradOut = gradOut.permute(1, 0, 2);
|
|
426
|
+
const gradWeightData = TensorData.zeros([inChannels, outChannels, kw]);
|
|
427
|
+
_tensorConv1d(gradWeightData.storage, gradWeightData.shape, gradWeightData.strides, newInput.data.storage, newInput.data.shape, newInput.data.strides, newGradOut.data.storage, newGradOut.data.shape, newGradOut.data.strides, false);
|
|
428
|
+
// Permute [IC, OC, KW] -> [OC, IC, KW] and make contiguous
|
|
429
|
+
const gradWeight = new Tensor(contiguous(gradWeightData.permute(1, 0, 2)));
|
|
430
|
+
gradWeight.history = null;
|
|
431
|
+
return [gradInput, gradWeight];
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
export class Conv2d extends TensorFunction {
|
|
435
|
+
static forward(ctx, input, weight) {
|
|
436
|
+
ctx.saveForBackward(input, weight);
|
|
437
|
+
return tensorConv2d(input, weight, false);
|
|
438
|
+
}
|
|
439
|
+
static backward(ctx, gradOut) {
|
|
440
|
+
const saved = ctx.savedTensors;
|
|
441
|
+
if (!saved || saved.length !== 2) {
|
|
442
|
+
throw new Error("Conv2d backward: saved tensors missing");
|
|
443
|
+
}
|
|
444
|
+
const input = saved[0];
|
|
445
|
+
const weight = saved[1];
|
|
446
|
+
const inChannels = input.shape[1];
|
|
447
|
+
const outChannels = weight.shape[0];
|
|
448
|
+
const kH = weight.shape[2];
|
|
449
|
+
const kW = weight.shape[3];
|
|
450
|
+
// grad_input: convolve grad_output with transposed weight, reversed
|
|
451
|
+
const newWeight = weight.permute(1, 0, 2, 3);
|
|
452
|
+
const gradInput = tensorConv2d(gradOut, newWeight, true);
|
|
453
|
+
gradInput.history = null;
|
|
454
|
+
// grad_weight: use _tensorConv2d with custom output shape [IC, OC, KH, KW]
|
|
455
|
+
const newInput = input.permute(1, 0, 2, 3);
|
|
456
|
+
const newGradOut = gradOut.permute(1, 0, 2, 3);
|
|
457
|
+
const gradWeightData = TensorData.zeros([inChannels, outChannels, kH, kW]);
|
|
458
|
+
_tensorConv2d(gradWeightData.storage, gradWeightData.shape, gradWeightData.strides, newInput.data.storage, newInput.data.shape, newInput.data.strides, newGradOut.data.storage, newGradOut.data.shape, newGradOut.data.strides, false);
|
|
459
|
+
// Permute [IC, OC, KH, KW] -> [OC, IC, KH, KW] and make contiguous
|
|
460
|
+
const gradWeight2d = new Tensor(contiguous(gradWeightData.permute(1, 0, 2, 3)));
|
|
461
|
+
gradWeight2d.history = null;
|
|
462
|
+
return [gradInput, gradWeight2d];
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
//# sourceMappingURL=tensor_functions.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tensor_functions.js","sourceRoot":"","sources":["../src/tensor_functions.ts"],"names":[],"mappings":"AAIA,OAAO,EACH,UAAU,EACV,YAAY,EACZ,cAAc,EACd,OAAO,GACV,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,aAAa,IAAI,SAAS,EAAE,aAAa,IAAI,SAAS,EAAE,gBAAgB,IAAI,YAAY,EAAE,MAAM,eAAe,CAAC;AACzH,OAAO,KAAK,SAAS,MAAM,gBAAgB,CAAA;AAC3C,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,OAAO,EAAE,oBAAoB,EAAE,YAAY,EAAE,aAAa,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGjH,SAAS,KAAK,CAAC,KAAY;IACvB,OAAO,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACnC,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa;IAC7B,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC3B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACvC,KAAK,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC1E,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,OAAO,CAAC,CAAa;IACjC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC3B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;IAC3C,KAAK,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC1E,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,IAAI,CAAC,CAAa;IAC9B,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC3B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;IACxC,KAAK,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC1E,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa;IAC7B,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC3B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACvC,KAAK,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC1E,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa;IAC7B,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC3B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACvC,KAAK,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC1E,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,EAAE,CAAC,CAAa;IAC5B,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC3B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IACtC,KAAK,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC1E,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa;IAC7B,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC3B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACvC,KAAK,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC;IAC1E,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa,EAAE,CAAa;IAC5C,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IAClD,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACvC,KAAK,CACD,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAChC,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa,EAAE,CAAa;IAC5C,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IAClD,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IACvC,KAAK,CACD,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAChC,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,EAAE,CAAC,CAAa,EAAE,CAAa;IAC3C,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IAClD,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IACtC,KAAK,CACD,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAChC,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,EAAE,CAAC,CAAa,EAAE,CAAa;IAC3C,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IAClD,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IACtC,KAAK,CACD,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAChC,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,OAAO,CAAC,CAAa,EAAE,CAAa;IAChD,MAAM,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IAClD,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,KAAK,GAAG,SAAS,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;IAC3C,KAAK,CACD,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAChC,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa,EAAE,GAAW;IAC1C,MAAM,QAAQ,GAAG,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC5D,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAC7C,QAAQ,CACJ,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,GAAG,CACN,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,IAAI,CAAC,CAAa,EAAE,GAAW;IAC3C,MAAM,QAAQ,GAAG,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC5D,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAC7C,QAAQ,CACJ,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,GAAG,CACN,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAa,EAAE,GAAW;IAC1C,MAAM,QAAQ,GAAG,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC5D,MAAM,GAAG,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC5B,MAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAC7C,QAAQ,CACJ,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,EACnC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAC7B,GAAG,CACN,CAAC;IACF,OAAO,GAAG,CAAC;AACf,CAAC;AAED,MAAM,UAAU,OAAO,CAAC,CAAa,EAAE,KAAe;IAClD,OAAO,CAAC,CAAC,OAAO,CAAC,GAAG,KAAK,CAAC,CAAC;AAC/B,CAAC;AAED,MAAM,UAAU,IAAI,CAAC,CAAa,EAAE,KAAY;IAC5C,MAAM,eAAe,GAAG,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IACzC,MAAM,YAAY,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,YAAY,EAAE,CAAC;QAChB,MAAM,IAAI,KAAK,CACX,qCAAqC,CACxC,CAAC;IACN,CAAC;IAED,MAAM,OAAO,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;IACpC,IAAI,OAAO,KAAK,CAAC,CAAC,IAAI,EAAE,CAAC;QACrB,MAAM,IAAI,KAAK,CACX,sCAAsC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAC5D,CAAC;IACN,CAAC;IAED,OAAO,IAAI,UAAU,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AAC5C,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,CAAa;IACpC,MAAM,eAAe,GAAG,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IACzC,MAAM,YAAY,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;IAEzE,IAAI,YAAY,EAAE,CAAC;QACf,OAAO,CAAC,CAAC;IACb,CAAC;IAED,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC;AAED,MAAM,OAAO,aAAa;IACd,aAAa,GAAa,EAAE,CAAC;IAErC,eAAe,CAAC,GAAG,OAAiB;QAChC,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC;IACjC,CAAC;IAED,IAAI,YAAY;QACZ,OAAO,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;CACJ;AAED,MAAM,OAAO,aAAa;IAEX;IACA;IACA;IAHX,YACW,SAAuC,IAAI,EAC3C,MAA4B,IAAI,EAChC,SAAmB,EAAE;QAFrB,WAAM,GAAN,MAAM,CAAqC;QAC3C,QAAG,GAAH,GAAG,CAA6B;QAChC,WAAM,GAAN,MAAM,CAAe;IAC7B,CAAC;CACP;AAED,MAAM,OAAgB,cAAc;IAChC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,GAAG,MAAgB;QAClD,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IAED,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAChD,CAAC;CACJ;AAED,MAAM,OAAO,GAAI,SAAQ,cAAc;IACnC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;QACxC,OAAO,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IACnC,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,CAAC;IAC9B,CAAC;CACJ;AAED,MAAM,OAAO,OAAQ,SAAQ,cAAc;IACvC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;QACxC,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;QAC3C,GAAG,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAE,cAAc;QAC5C,OAAO,MAAM,CAAC;IAClB,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,SAAS,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QACrC,yBAAyB;QACzB,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,SAAU,CAAC,KAAK,CAAC,CAAC;QAC3C,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,SAAU,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,SAAU,CAAC,CAAC,CAAC,CAAC;IAClE,CAAC;CACJ;AAED,MAAM,OAAO,IAAK,SAAQ,cAAc;IACpC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;QACxC,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAE,aAAa;QACtC,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IACpC,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QAC7B,MAAM,IAAI,GAAG,IAAI,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAE,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,CAAE,CAAC,IAAI,CAAC,CAAC,CAAC;QAClE,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;IAClC,CAAC;CACJ;AAED,MAAM,OAAO,GAAI,SAAQ,cAAc;IACnC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;QACxC,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;QACvB,OAAO,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IACnC,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QAC7B,WAAW;QACX,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;IACtC,CAAC;CACJ;AAED,MAAM,OAAO,GAAI,SAAQ,cAAc;IACnC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;QACxC,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;QACvC,GAAG,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAE,cAAc;QAC5C,OAAO,MAAM,CAAC;IAClB,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,SAAS,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QACrC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,SAAU,CAAC,CAAC,CAAC;IACxC,CAAC;CACJ;AAED,MAAM,OAAO,GAAI,SAAQ,cAAc;IACnC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;QACxC,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;QACvB,OAAO,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IACnC,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QAC7B,cAAc;QACd,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,CAAE,CAAC,GAAG,CAAC,CAAE,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;IACpD,CAAC;CACJ;AAED,SAAS,WAAW,CAAC,IAAY,EAAE,aAAoB;IACnD,IAAI,MAAM,GAAG,IAAI,CAAC;IAElB,OAAO,MAAM,CAAC,IAAI,GAAG,aAAa,CAAC,MAAM,EAAE,CAAC;QACxC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACvB,MAAM,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACvC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,CAAC;IACtC,CAAC;IAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5C,IAAI,aAAa,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,MAAM,CAAC,KAAK,CAAC,CAAC,CAAE,GAAG,CAAC,EAAE,CAAC;YACjD,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAC3B,CAAC;IACL,CAAC;IAED,OAAO,MAAM,CAAC;AAClB,CAAC;AAED,MAAM,OAAO,GAAI,SAAQ,cAAc;IACnC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS,EAAE,CAAS;QACnD,GAAG,CAAC,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC1B,OAAO,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IAC3C,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QAChC,OAAO;YACH,WAAW,CAAC,UAAU,EAAE,CAAE,CAAC,KAAK,CAAC;YACjC,WAAW,CAAC,UAAU,EAAE,CAAE,CAAC,KAAK,CAAC;SACpC,CAAA;IACL,CAAC;CACJ;AAED,MAAM,OAAO,GAAI,SAAQ,cAAc;IACnC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS,EAAE,CAAS;QACnD,GAAG,CAAC,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC1B,OAAO,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IAC3C,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QAChC,OAAO;YACH,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAE,CAAC,EAAE,CAAE,CAAC,KAAK,CAAC;YACzC,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAE,CAAC,EAAE,CAAE,CAAC,KAAK,CAAC;SAC5C,CAAC;IACN,CAAC;CACJ;AAED,MAAM,OAAO,EAAG,SAAQ,cAAc;IAClC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS,EAAE,CAAS;QACnD,GAAG,CAAC,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC1B,OAAO,IAAI,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QAChC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAE,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC,KAAK,CAAC,CAAE,CAAC,KAAK,CAAC,CAAC,CAAC;IAC5D,CAAC;CACJ;AAED,MAAM,OAAO,EAAG,SAAQ,cAAc;IAClC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS,EAAE,CAAS;QACnD,GAAG,CAAC,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC1B,OAAO,IAAI,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;QAChC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAE,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC,KAAK,CAAC,CAAE,CAAC,KAAK,CAAC,CAAC,CAAC;IAC5D,CAAC;CACJ;AAED,MAAM,UAAU,GAAG,CAAC,GAAW;IAC3B,OAAO,KAAM,SAAQ,cAAc;QAC/B,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;YACxC,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;YACvB,OAAO,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,CAAC;QACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;YAClD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;YAC7B,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC;KACJ,CAAC;AACN,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,GAAW;IAC3B,OAAO,KAAM,SAAQ,cAAc;QAC/B,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;YACxC,MAAM,GAAG,GAAG,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;YACzC,GAAG,CAAC,eAAe,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;YAC5B,OAAO,GAAG,CAAC;QACf,CAAC;QACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;YAClD,MAAM,CAAC,CAAC,EAAE,OAAO,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;YACtC,kEAAkE;YAClE,MAAM,IAAI,GAAG,CAAE,CAAC,QAAQ,CAAC,OAAQ,CAAC,CAAC;YACnC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;QAClC,CAAC;KACJ,CAAC;AACN,CAAC;AAED,MAAM,UAAU,OAAO,CAAC,KAAe;IACnC,OAAO,KAAM,SAAQ,cAAc;QAC/B,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;YACxC,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;YACvB,OAAO,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;QAC9C,CAAC;QACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;YAClD,MAAM,YAAY,GAAG,IAAI,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;YAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACpC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC,GAAG,CAAC,CAAC;YAChC,CAAC;YACD,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC;QACjD,CAAC;KACJ,CAAC;AACN,CAAC;AAED,MAAM,UAAU,IAAI,CAAC,QAAe;IAChC,OAAO,KAAM,SAAQ,cAAc;QAC/B,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;YACxC,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;YACvB,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;QAC9C,CAAC;QACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;YAClD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,CAAC;YAC7B,OAAO,CAAC,UAAU,CAAC,UAAU,EAAE,CAAC,IAAI,CAAC,GAAG,CAAE,CAAC,KAAK,CAAC,CAAC,CAAC;QACvD,CAAC;KACJ,CAAC;AACN,CAAC;AAED,MAAM,OAAO,UAAW,SAAQ,cAAc;IAC1C,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS;QACxC,OAAO,IAAI,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,UAAkB;QAClD,OAAO,CAAC,UAAU,CAAC,CAAC;IACxB,CAAC;CACJ;AAED,SAAS,cAAc,CAAC,CAAS;IAC7B,qEAAqE;IACrE,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC;IACjB,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;QACR,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;IAC5D,CAAC;IAED,MAAM,KAAK,GAAa,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;IAC7C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IACzB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAE,CAAC;IAC7B,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,GAAI,CAAC;IACpB,OAAO,CAAC,CAAC,OAAO,CAAC,GAAG,KAAK,CAAC,CAAC;AAC/B,CAAC;AAED,SAAS,aAAa,CAAC,CAAS,EAAE,WAAkB;IAChD,IAAI,MAAM,GAAG,CAAC,CAAC;IAEf,MAAM,MAAM,GAAG,CAAC,CAAC,KAAK,CAAC;IACvB,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC;IAC5B,MAAM,UAAU,GAAG,WAAW,CAAC,MAAM,CAAC;IAEtC,+BAA+B;IAC/B,MAAM,YAAY,GAAG;QACjB,GAAG,KAAK,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;QACpC,GAAG,WAAW;KACjB,CAAC;IAEF,KAAK,IAAI,GAAG,GAAG,CAAC,EAAE,GAAG,GAAG,KAAK,EAAE,GAAG,EAAE,EAAE,CAAC;QACnC,IAAI,YAAY,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;YAC/C,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACzB,MAAM,CAAC,OAAO,GAAG,IAAI,CAAC;QAC1B,CAAC;IACL,CAAC;IAED,8CAA8C;IAC9C,IAAI,KAAK,KAAK,UAAU,EAAE,CAAC;QACvB,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,CAAC;QACrC,MAAM,CAAC,OAAO,GAAG,IAAI,CAAC;IAC1B,CAAC;IAED,OAAO,MAAM,CAAC;AAClB,CAAC;AAED,MAAM,OAAO,MAAO,SAAQ,cAAc;IACtC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,CAAS,EAAE,CAAS;QACnD,GAAG,CAAC,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC1B,OAAO,oBAAoB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACtC,CAAC;IAED,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,OAAe;QAC/C,MAAM,KAAK,GAAG,GAAG,CAAC,YAAY,CAAC;QAC/B,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAC;QAC9D,CAAC;QAED,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAE,CAAC;QACpB,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAE,CAAC;QAEpB,MAAM,EAAE,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;QAC7B,MAAM,EAAE,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;QAE7B,MAAM,KAAK,GAAG,oBAAoB,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;QAChD,MAAM,KAAK,GAAG,oBAAoB,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC;QAEhD,KAAK,CAAC,OAAO,GAAG,IAAI,CAAC;QACrB,KAAK,CAAC,OAAO,GAAG,IAAI,CAAC;QAErB,MAAM,UAAU,GAAG,aAAa,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;QACjD,MAAM,UAAU,GAAG,aAAa,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;QAEjD,UAAU,CAAC,OAAO,GAAG,IAAI,CAAC;QAC1B,UAAU,CAAC,OAAO,GAAG,IAAI,CAAC;QAE1B,OAAO,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;IACpC,CAAC;CACJ;AAED,MAAM,OAAO,MAAO,SAAQ,cAAc;IACtC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,KAAa,EAAE,MAAc;QAC5D,GAAG,CAAC,eAAe,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QACnC,OAAO,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,CAAC;IAC9C,CAAC;IAED,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,OAAe;QAC/C,MAAM,KAAK,GAAG,GAAG,CAAC,YAAY,CAAC;QAC/B,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAC;QAC9D,CAAC;QAED,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAE,CAAC;QACxB,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAE,CAAC;QACzB,MAAM,UAAU,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC;QACnC,MAAM,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC;QACrC,MAAM,EAAE,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC;QAE5B,oEAAoE;QACpE,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAC1C,MAAM,SAAS,GAAG,YAAY,CAAC,OAAO,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;QACzD,SAAS,CAAC,OAAO,GAAG,IAAI,CAAC;QAEzB,uEAAuE;QACvE,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QACxC,MAAM,UAAU,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAC5C,MAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,UAAU,EAAE,WAAW,EAAE,EAAE,CAAC,CAAC,CAAC;QAEvE,aAAa,CACT,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,OAAO,EACpE,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,IAAI,CAAC,OAAO,EACjE,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,CAAC,IAAI,CAAC,KAAK,EAAE,UAAU,CAAC,IAAI,CAAC,OAAO,EACvE,KAAK,CACR,CAAC;QAEF,2DAA2D;QAC3D,MAAM,UAAU,GAAG,IAAI,MAAM,CAAC,UAAU,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,UAAU,CAAC,OAAO,GAAG,IAAI,CAAC;QAE1B,OAAO,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC;IACnC,CAAC;CACJ;AAED,MAAM,OAAO,MAAO,SAAQ,cAAc;IACtC,MAAM,CAAC,OAAO,CAAC,GAAkB,EAAE,KAAa,EAAE,MAAc;QAC5D,GAAG,CAAC,eAAe,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QACnC,OAAO,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,CAAC;IAC9C,CAAC;IAED,MAAM,CAAC,QAAQ,CAAC,GAAkB,EAAE,OAAe;QAC/C,MAAM,KAAK,GAAG,GAAG,CAAC,YAAY,CAAC;QAC/B,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAC;QAC9D,CAAC;QAED,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAE,CAAC;QACxB,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAE,CAAC;QACzB,MAAM,UAAU,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC;QACnC,MAAM,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC;QACrC,MAAM,EAAE,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC;QAC5B,MAAM,EAAE,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAE,CAAC;QAE5B,oEAAoE;QACpE,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAC7C,MAAM,SAAS,GAAG,YAAY,CAAC,OAAO,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;QACzD,SAAS,CAAC,OAAO,GAAG,IAAI,CAAC;QAEzB,2EAA2E;QAC3E,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAC3C,MAAM,UAAU,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;QAC/C,MAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,UAAU,EAAE,WAAW,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;QAE3E,aAAa,CACT,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,OAAO,EACpE,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,IAAI,CAAC,OAAO,EACjE,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,CAAC,IAAI,CAAC,KAAK,EAAE,UAAU,CAAC,IAAI,CAAC,OAAO,EACvE,KAAK,CACR,CAAC;QAEF,mEAAmE;QACnE,MAAM,YAAY,GAAG,IAAI,MAAM,CAAC,UAAU,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAChF,YAAY,CAAC,OAAO,GAAG,IAAI,CAAC;QAE5B,OAAO,CAAC,SAAS,EAAE,YAAY,CAAC,CAAC;IACrC,CAAC;CACJ"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { Storage, Shape, Strides } from './tensor_data.js';
|
|
2
|
+
import { Tensor } from './tensor.js';
|
|
3
|
+
export declare function tensorMap(fn: (x: number) => number): (outStorage: Storage, outShape: Shape, outStrides: Strides, inStorage: Storage, inShape: Shape, inStrides: Strides) => void;
|
|
4
|
+
export declare function tensorZip(fn: (a: number, b: number) => number): (outStorage: Storage, outShape: Shape, outStrides: Strides, aStorage: Storage, aShape: Shape, aStrides: Strides, bStorage: Storage, bShape: Shape, bStrides: Strides) => void;
|
|
5
|
+
export declare function tensorReduce(fn: (acc: number, x: number) => number): (outStorage: Storage, outShape: Shape, outStrides: Strides, aStorage: Storage, aShape: Shape, aStrides: Strides, reduceDim: number) => void;
|
|
6
|
+
/**
|
|
7
|
+
* Matrix multiply supporting 2D and 3D inputs with batch broadcasting.
|
|
8
|
+
* 2D inputs are padded to 3D internally; if both were 2D the output is squeezed back.
|
|
9
|
+
*/
|
|
10
|
+
export declare function tensorMatrixMultiply(A: Tensor, B: Tensor): Tensor;
|
|
11
|
+
/**
|
|
12
|
+
* Low-level 1D convolution kernel operating on raw Storage/Shape/Strides.
|
|
13
|
+
*
|
|
14
|
+
* Input shape: [batch, in_channels, width]
|
|
15
|
+
* Weight shape: [out_channels, in_channels, kernel_width]
|
|
16
|
+
* Output shape: [batch, out_channels, out_width] (caller pre-allocates)
|
|
17
|
+
*
|
|
18
|
+
* When reverse=false: output[b,oc,t] = sum_{ic,k} input[b,ic,t+k] * weight[oc,ic,k]
|
|
19
|
+
* When reverse=true: output[b,oc,t] = sum_{ic,k} input[b,ic,t-k] * weight[oc,ic,k]
|
|
20
|
+
*
|
|
21
|
+
* Out-of-bounds input positions are treated as 0.
|
|
22
|
+
*/
|
|
23
|
+
export declare function _tensorConv1d(outStorage: Storage, outShape: Shape, outStrides: Strides, inputStorage: Storage, inputShape: Shape, inputStrides: Strides, weightStorage: Storage, weightShape: Shape, weightStrides: Strides, reverse: boolean): void;
|
|
24
|
+
/**
|
|
25
|
+
* 1D convolution: input [batch, in_channels, width] x weight [out_channels, in_channels, kw]
|
|
26
|
+
* -> output [batch, out_channels, width].
|
|
27
|
+
*/
|
|
28
|
+
export declare function tensorConv1d(input: Tensor, weight: Tensor, reverse?: boolean): Tensor;
|
|
29
|
+
/**
|
|
30
|
+
* Low-level 2D convolution kernel operating on raw Storage/Shape/Strides.
|
|
31
|
+
*
|
|
32
|
+
* Input shape: [batch, in_channels, height, width]
|
|
33
|
+
* Weight shape: [out_channels, in_channels, kH, kW]
|
|
34
|
+
* Output shape: [batch, out_channels, out_height, out_width] (caller pre-allocates)
|
|
35
|
+
*
|
|
36
|
+
* When reverse=false: output[b,oc,h,w] = sum_{ic,kh,kw} input[b,ic,h+kh,w+kw] * weight[oc,ic,kh,kw]
|
|
37
|
+
* When reverse=true: output[b,oc,h,w] = sum_{ic,kh,kw} input[b,ic,h-kh,w-kw] * weight[oc,ic,kh,kw]
|
|
38
|
+
*
|
|
39
|
+
* Out-of-bounds input positions are treated as 0.
|
|
40
|
+
*/
|
|
41
|
+
export declare function _tensorConv2d(outStorage: Storage, outShape: Shape, outStrides: Strides, inputStorage: Storage, inputShape: Shape, inputStrides: Strides, weightStorage: Storage, weightShape: Shape, weightStrides: Strides, reverse: boolean): void;
|
|
42
|
+
/**
|
|
43
|
+
* 2D convolution: input [batch, in_channels, height, width] x weight [out_channels, in_channels, kH, kW]
|
|
44
|
+
* -> output [batch, out_channels, height, width].
|
|
45
|
+
*/
|
|
46
|
+
export declare function tensorConv2d(input: Tensor, weight: Tensor, reverse?: boolean): Tensor;
|
|
47
|
+
//# sourceMappingURL=tensor_ops.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tensor_ops.d.ts","sourceRoot":"","sources":["../src/tensor_ops.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACR,OAAO,EACP,KAAK,EACL,OAAO,EACV,MAAM,kBAAkB,CAAC;AAS1B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AAGrC,wBAAgB,SAAS,CACrB,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,KAAK,MAAM,GAC1B,CACC,UAAU,EAAE,OAAO,EACnB,QAAQ,EAAE,KAAK,EACf,UAAU,EAAE,OAAO,EACnB,SAAS,EAAE,OAAO,EAClB,OAAO,EAAE,KAAK,EACd,SAAS,EAAE,OAAO,KACjB,IAAI,CAuBR;AAED,wBAAgB,SAAS,CACrB,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,GACrC,CACC,UAAU,EAAE,OAAO,EACnB,QAAQ,EAAE,KAAK,EACf,UAAU,EAAE,OAAO,EACnB,QAAQ,EAAE,OAAO,EACjB,MAAM,EAAE,KAAK,EACb,QAAQ,EAAE,OAAO,EACjB,QAAQ,EAAE,OAAO,EACjB,MAAM,EAAE,KAAK,EACb,QAAQ,EAAE,OAAO,KAChB,IAAI,CA6BR;AAED,wBAAgB,YAAY,CACxB,EAAE,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,GACvC,CACC,UAAU,EAAE,OAAO,EACnB,QAAQ,EAAE,KAAK,EACf,UAAU,EAAE,OAAO,EACnB,QAAQ,EAAE,OAAO,EACjB,MAAM,EAAE,KAAK,EACb,QAAQ,EAAE,OAAO,EACjB,SAAS,EAAE,MAAM,KAChB,IAAI,CAmCR;AAED;;;GAGG;AACH,wBAAgB,oBAAoB,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM,CAkEjE;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,aAAa,CACzB,UAAU,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,UAAU,EAAE,OAAO,EACzD,YAAY,EAAE,OAAO,EAAE,UAAU,EAAE,KAAK,EAAE,YAAY,EAAE,OAAO,EAC/D,aAAa,EAAE,OAAO,EAAE,WAAW,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAClE,OAAO,EAAE,OAAO,GACjB,IAAI,CAmCN;AAED;;;GAGG;AACH,wBAAgB,YAAY,CACxB,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,GAAE,OAAe,GACxD,MAAM,CAwBR;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,aAAa,CACzB,UAAU,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,UAAU,EAAE,OAAO,EACzD,YAAY,EAAE,OAAO,EAAE,UAAU,EAAE,KAAK,EAAE,YAAY,EAAE,OAAO,EAC/D,aAAa,EAAE,OAAO,EAAE,WAAW,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,EAClE,OAAO,EAAE,OAAO,GACjB,IAAI,CA4CN;AAED;;;GAGG;AACH,wBAAgB,YAAY,CACxB,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,GAAE,OAAe,GACxD,MAAM,CAyBR"}
|