@isidorus/cpu 0.0.0-alpha.0 → 0.0.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -47
- package/binding.gyp +115 -102
- package/dist/_native.d.ts.map +1 -0
- package/dist/_native.js.map +1 -0
- package/dist/{ts/graph.d.ts → graph.d.ts} +25 -1
- package/dist/graph.d.ts.map +1 -0
- package/dist/{ts/graph.js → graph.js} +30 -2
- package/dist/graph.js.map +1 -0
- package/dist/{ts/index.d.ts → index.d.ts} +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/{ts/index.js → index.js} +3 -0
- package/dist/index.js.map +1 -0
- package/dist/inference-pool.d.ts.map +1 -0
- package/dist/inference-pool.js.map +1 -0
- package/dist/install-libtensorflow.d.ts.map +1 -0
- package/dist/{ts/install-libtensorflow.js → install-libtensorflow.js} +6 -6
- package/dist/install-libtensorflow.js.map +1 -0
- package/dist/model/index.d.ts +5 -0
- package/dist/model/index.d.ts.map +1 -0
- package/dist/model/index.js +3 -0
- package/dist/model/index.js.map +1 -0
- package/dist/model/layer.d.ts +25 -0
- package/dist/model/layer.d.ts.map +1 -0
- package/dist/model/layer.js +2 -0
- package/dist/model/layer.js.map +1 -0
- package/dist/model/layers.d.ts +47 -0
- package/dist/model/layers.d.ts.map +1 -0
- package/dist/model/layers.js +191 -0
- package/dist/model/layers.js.map +1 -0
- package/dist/model/sequential.d.ts +91 -0
- package/dist/model/sequential.d.ts.map +1 -0
- package/dist/model/sequential.js +248 -0
- package/dist/model/sequential.js.map +1 -0
- package/dist/ops/array_ops.d.ts.map +1 -0
- package/dist/ops/array_ops.js.map +1 -0
- package/dist/ops/index.d.ts.map +1 -0
- package/dist/ops/index.js.map +1 -0
- package/dist/ops/math_ops.d.ts.map +1 -0
- package/dist/{ts/ops → ops}/math_ops.js +1 -1
- package/dist/ops/math_ops.js.map +1 -0
- package/dist/ops/nn_ops.d.ts.map +1 -0
- package/dist/{ts/ops → ops}/nn_ops.js +9 -9
- package/dist/ops/nn_ops.js.map +1 -0
- package/dist/ops/variable_ops.d.ts.map +1 -0
- package/dist/{ts/ops → ops}/variable_ops.js +7 -9
- package/dist/ops/variable_ops.js.map +1 -0
- package/dist/optimizers/adam.d.ts +26 -0
- package/dist/optimizers/adam.d.ts.map +1 -0
- package/dist/optimizers/adam.js +97 -0
- package/dist/optimizers/adam.js.map +1 -0
- package/dist/optimizers/index.d.ts +5 -0
- package/dist/optimizers/index.d.ts.map +1 -0
- package/dist/optimizers/index.js +4 -0
- package/dist/optimizers/index.js.map +1 -0
- package/dist/optimizers/rmsprop.d.ts +22 -0
- package/dist/optimizers/rmsprop.d.ts.map +1 -0
- package/dist/optimizers/rmsprop.js +65 -0
- package/dist/optimizers/rmsprop.js.map +1 -0
- package/dist/optimizers/sgd.d.ts +53 -0
- package/dist/optimizers/sgd.d.ts.map +1 -0
- package/dist/optimizers/sgd.js +76 -0
- package/dist/optimizers/sgd.js.map +1 -0
- package/dist/session.d.ts.map +1 -0
- package/dist/session.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/package.json +63 -63
- package/scripts/install.js +100 -100
- package/scripts/test-install.js +82 -82
- package/scripts/test.js +45 -45
- package/src/native/addon.cc +11 -11
- package/src/native/graph.cc +577 -442
- package/src/native/graph.h +41 -51
- package/src/native/platform_tf.h +7 -7
- package/src/native/session.cc +796 -715
- package/src/native/session.h +91 -91
- package/dist/ts/_native.d.ts.map +0 -1
- package/dist/ts/_native.js.map +0 -1
- package/dist/ts/graph.d.ts.map +0 -1
- package/dist/ts/graph.js.map +0 -1
- package/dist/ts/index.d.ts.map +0 -1
- package/dist/ts/index.js.map +0 -1
- package/dist/ts/inference-pool.d.ts.map +0 -1
- package/dist/ts/inference-pool.js.map +0 -1
- package/dist/ts/inference_pool.d.ts +0 -99
- package/dist/ts/inference_pool.d.ts.map +0 -1
- package/dist/ts/inference_pool.js +0 -370
- package/dist/ts/inference_pool.js.map +0 -1
- package/dist/ts/install-libtensorflow.d.ts.map +0 -1
- package/dist/ts/install-libtensorflow.js.map +0 -1
- package/dist/ts/ops/array_ops.d.ts.map +0 -1
- package/dist/ts/ops/array_ops.js.map +0 -1
- package/dist/ts/ops/index.d.ts.map +0 -1
- package/dist/ts/ops/index.js.map +0 -1
- package/dist/ts/ops/math_ops.d.ts.map +0 -1
- package/dist/ts/ops/math_ops.js.map +0 -1
- package/dist/ts/ops/nn_ops.d.ts.map +0 -1
- package/dist/ts/ops/nn_ops.js.map +0 -1
- package/dist/ts/ops/variable_ops.d.ts.map +0 -1
- package/dist/ts/ops/variable_ops.js.map +0 -1
- package/dist/ts/session.d.ts.map +0 -1
- package/dist/ts/session.js.map +0 -1
- /package/dist/{ts/_native.d.ts → _native.d.ts} +0 -0
- /package/dist/{ts/_native.js → _native.js} +0 -0
- /package/dist/{ts/inference-pool.d.ts → inference-pool.d.ts} +0 -0
- /package/dist/{ts/inference-pool.js → inference-pool.js} +0 -0
- /package/dist/{ts/install-libtensorflow.d.ts → install-libtensorflow.d.ts} +0 -0
- /package/dist/{ts/ops → ops}/array_ops.d.ts +0 -0
- /package/dist/{ts/ops → ops}/array_ops.js +0 -0
- /package/dist/{ts/ops → ops}/index.d.ts +0 -0
- /package/dist/{ts/ops → ops}/index.js +0 -0
- /package/dist/{ts/ops → ops}/math_ops.d.ts +0 -0
- /package/dist/{ts/ops → ops}/nn_ops.d.ts +0 -0
- /package/dist/{ts/ops → ops}/variable_ops.d.ts +0 -0
- /package/dist/{ts/session.d.ts → session.d.ts} +0 -0
- /package/dist/{ts/session.js → session.js} +0 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { Tensor } from "@isidorus/core";
|
|
2
|
+
import type { Graph } from "../graph.js";
|
|
3
|
+
import type { Layer, LayerParam, ActivationFn } from "./layer.js";
|
|
4
|
+
export declare class Dense implements Layer {
|
|
5
|
+
readonly name: string;
|
|
6
|
+
output: Tensor;
|
|
7
|
+
readonly layerParams: LayerParam[];
|
|
8
|
+
private readonly units;
|
|
9
|
+
private readonly activation;
|
|
10
|
+
private readonly useBias;
|
|
11
|
+
constructor(units: number, options?: {
|
|
12
|
+
activation?: ActivationFn;
|
|
13
|
+
useBias?: boolean;
|
|
14
|
+
name?: string;
|
|
15
|
+
});
|
|
16
|
+
build(g: Graph, input: Tensor, inputShape: (number | null)[]): (number | null)[];
|
|
17
|
+
}
|
|
18
|
+
export declare class Flatten implements Layer {
|
|
19
|
+
readonly name: string;
|
|
20
|
+
output: Tensor;
|
|
21
|
+
readonly layerParams: LayerParam[];
|
|
22
|
+
constructor(options?: {
|
|
23
|
+
name?: string;
|
|
24
|
+
});
|
|
25
|
+
build(g: Graph, input: Tensor, inputShape: (number | null)[]): (number | null)[];
|
|
26
|
+
}
|
|
27
|
+
export declare class Conv2D implements Layer {
|
|
28
|
+
readonly name: string;
|
|
29
|
+
output: Tensor;
|
|
30
|
+
readonly layerParams: LayerParam[];
|
|
31
|
+
private readonly filters;
|
|
32
|
+
private readonly kernelSize;
|
|
33
|
+
private readonly strides;
|
|
34
|
+
private readonly padding;
|
|
35
|
+
private readonly activation;
|
|
36
|
+
private readonly useBias;
|
|
37
|
+
constructor(filters: number, options?: {
|
|
38
|
+
kernelSize?: number | [number, number];
|
|
39
|
+
strides?: number | [number, number];
|
|
40
|
+
padding?: "SAME" | "VALID";
|
|
41
|
+
activation?: ActivationFn;
|
|
42
|
+
useBias?: boolean;
|
|
43
|
+
name?: string;
|
|
44
|
+
});
|
|
45
|
+
build(g: Graph, input: Tensor, inputShape: (number | null)[]): (number | null)[];
|
|
46
|
+
}
|
|
47
|
+
//# sourceMappingURL=layers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"layers.d.ts","sourceRoot":"","sources":["../../src/ts/model/layers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AA8DlE,qBAAa,KAAM,YAAW,KAAK;IACjC,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,MAAM,EAAG,MAAM,CAAC;IAChB,QAAQ,CAAC,WAAW,EAAE,UAAU,EAAE,CAAM;IAExC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAS;IAC/B,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAe;IAC1C,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAU;gBAGhC,KAAK,EAAE,MAAM,EACb,OAAO,GAAE;QACP,UAAU,CAAC,EAAE,YAAY,CAAC;QAC1B,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,IAAI,CAAC,EAAE,MAAM,CAAC;KACV;IAQR,KAAK,CACH,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,MAAM,EACb,UAAU,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,GAC5B,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;CAoErB;AAKD,qBAAa,OAAQ,YAAW,KAAK;IACnC,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,MAAM,EAAG,MAAM,CAAC;IAChB,QAAQ,CAAC,WAAW,EAAE,UAAU,EAAE,CAAM;gBAE5B,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAA;KAAO;IAI3C,KAAK,CACH,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,MAAM,EACb,UAAU,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,GAC5B,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;CA2BrB;AAKD,qBAAa,MAAO,YAAW,KAAK;IAClC,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,MAAM,EAAG,MAAM,CAAC;IAChB,QAAQ,CAAC,WAAW,EAAE,UAAU,EAAE,CAAM;IAExC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAmB;IAC9C,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAmC;IAC3D,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAmB;IAC3C,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAe;IAC1C,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAU;gBAGhC,OAAO,EAAE,MAAM,EACf,OAAO,GAAE;QACP,UAAU,CAAC,EAAE,MAAM,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvC,OAAO,CAAC,EAAE,MAAM,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACpC,OAAO,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAC3B,UAAU,CAAC,EAAE,YAAY,CAAC;QAC1B,OAAO,CAAC,EAAE,OAAO,CAAC;QAClB,IAAI,CAAC,EAAE,MAAM,CAAC;KACV;IAgBR,KAAK,CACH,CAAC,EAAE,KAAK,EACR,KAAK,EAAE,MAAM,EACb,UAAU,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE,GAC5B,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;CAqGrB"}
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
import { DType } from "@isidorus/core";
|
|
2
|
+
import { variableWithInit, readVariable, zerosInitializer, glorotUniformInitializer, truncatedNormalInitializer, } from "../ops/variable_ops.js";
|
|
3
|
+
import { constant } from "../ops/array_ops.js";
|
|
4
|
+
import { matmul, biasAdd } from "../ops/math_ops.js";
|
|
5
|
+
import { relu, leakyRelu, relu6, sigmoid, conv2d as conv2dOp, tanh, softmax, logSoftmax, elu, selu, swish, gelu, } from "../ops/nn_ops.js";
|
|
6
|
+
// ---------------------------------------------------------------------------
|
|
7
|
+
// Activation helper
|
|
8
|
+
// ---------------------------------------------------------------------------
|
|
9
|
+
function activate(g, x, fn, name) {
|
|
10
|
+
switch (fn) {
|
|
11
|
+
case "relu":
|
|
12
|
+
return relu(g, x, `${name}/relu`);
|
|
13
|
+
case "leaky_relu":
|
|
14
|
+
return leakyRelu(g, x, 0.2, `${name}/leaky_relu`);
|
|
15
|
+
case "relu6":
|
|
16
|
+
return relu6(g, x, `${name}/relu6`);
|
|
17
|
+
case "sigmoid":
|
|
18
|
+
return sigmoid(g, x, `${name}/sigmoid`);
|
|
19
|
+
case "tanh":
|
|
20
|
+
return tanh(g, x, `${name}/tanh`);
|
|
21
|
+
case "softmax":
|
|
22
|
+
return softmax(g, x, `${name}/softmax`);
|
|
23
|
+
case "log_softmax":
|
|
24
|
+
return logSoftmax(g, x, `${name}/log_softmax`);
|
|
25
|
+
case "elu":
|
|
26
|
+
return elu(g, x, `${name}/elu`);
|
|
27
|
+
case "selu":
|
|
28
|
+
return selu(g, x, `${name}/selu`);
|
|
29
|
+
case "swish":
|
|
30
|
+
return swish(g, x, `${name}/swish`);
|
|
31
|
+
case "gelu":
|
|
32
|
+
return gelu(g, x, `${name}/gelu`);
|
|
33
|
+
case "linear":
|
|
34
|
+
return x;
|
|
35
|
+
default:
|
|
36
|
+
throw new Error(`Unknown activation: ${fn}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
// ---------------------------------------------------------------------------
|
|
40
|
+
// Dense — fully-connected layer: output = activation(input @ W + b)
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
export class Dense {
|
|
43
|
+
name;
|
|
44
|
+
output;
|
|
45
|
+
layerParams = [];
|
|
46
|
+
units;
|
|
47
|
+
activation;
|
|
48
|
+
useBias;
|
|
49
|
+
constructor(units, options = {}) {
|
|
50
|
+
this.units = units;
|
|
51
|
+
this.activation = options.activation ?? "linear";
|
|
52
|
+
this.useBias = options.useBias ?? true;
|
|
53
|
+
this.name = options.name ?? `dense_${units}`;
|
|
54
|
+
}
|
|
55
|
+
build(g, input, inputShape) {
|
|
56
|
+
const inFeatures = inputShape[inputShape.length - 1];
|
|
57
|
+
if (!inFeatures || inFeatures < 1)
|
|
58
|
+
throw new Error(`Dense "${this.name}": last input dim must be known, got ${JSON.stringify(inputShape)}`);
|
|
59
|
+
// ── Weight W: [inFeatures, units] ────────────────────────────────────
|
|
60
|
+
const wInitVal = glorotUniformInitializer(g, [inFeatures, this.units], DType.FLOAT32, `${this.name}/w_glorot`);
|
|
61
|
+
const { handle: wHandle, initOp: wInitOp } = variableWithInit(g, [inFeatures, this.units], DType.FLOAT32, `${this.name}/w`, wInitVal);
|
|
62
|
+
const wRead = readVariable(g, wHandle, DType.FLOAT32, `${this.name}/w_read`);
|
|
63
|
+
let out = matmul(g, input, wRead, {}, `${this.name}/matmul`);
|
|
64
|
+
this.layerParams.push({
|
|
65
|
+
handle: wHandle,
|
|
66
|
+
read: wRead,
|
|
67
|
+
dtype: DType.FLOAT32,
|
|
68
|
+
name: `${this.name}/w`,
|
|
69
|
+
initOp: wInitOp,
|
|
70
|
+
});
|
|
71
|
+
// ── Bias b: [units] ───────────────────────────────────────────────────
|
|
72
|
+
if (this.useBias) {
|
|
73
|
+
const bInitVal = zerosInitializer(g, [this.units], DType.FLOAT32);
|
|
74
|
+
const { handle: bHandle, initOp: bInitOp } = variableWithInit(g, [this.units], DType.FLOAT32, `${this.name}/b`, bInitVal);
|
|
75
|
+
const bRead = readVariable(g, bHandle, DType.FLOAT32, `${this.name}/b_read`);
|
|
76
|
+
out = biasAdd(g, out, bRead, `${this.name}/bias_add`);
|
|
77
|
+
this.layerParams.push({
|
|
78
|
+
handle: bHandle,
|
|
79
|
+
read: bRead,
|
|
80
|
+
dtype: DType.FLOAT32,
|
|
81
|
+
name: `${this.name}/b`,
|
|
82
|
+
initOp: bInitOp,
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
this.output = activate(g, out, this.activation, this.name);
|
|
86
|
+
return [...inputShape.slice(0, -1), this.units];
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
// ---------------------------------------------------------------------------
|
|
90
|
+
// Flatten — reshapes [batch, d1, d2, ...] → [batch, d1*d2*...]
|
|
91
|
+
// ---------------------------------------------------------------------------
|
|
92
|
+
export class Flatten {
|
|
93
|
+
name;
|
|
94
|
+
output;
|
|
95
|
+
layerParams = [];
|
|
96
|
+
constructor(options = {}) {
|
|
97
|
+
this.name = options.name ?? "flatten";
|
|
98
|
+
}
|
|
99
|
+
build(g, input, inputShape) {
|
|
100
|
+
const spatialDims = inputShape.slice(1);
|
|
101
|
+
const hasUnknown = spatialDims.some((d) => d === null);
|
|
102
|
+
const flatSize = hasUnknown
|
|
103
|
+
? null
|
|
104
|
+
: spatialDims.reduce((a, b) => a * b, 1);
|
|
105
|
+
const flatDim = flatSize ?? -1;
|
|
106
|
+
const shapeBuf = Buffer.allocUnsafe(8);
|
|
107
|
+
shapeBuf.writeInt32LE(-1, 0);
|
|
108
|
+
shapeBuf.writeInt32LE(flatDim, 4);
|
|
109
|
+
const shapeConst = constant(g, shapeBuf, [2], DType.INT32, `${this.name}/shape`);
|
|
110
|
+
const [out] = g.addOp("Reshape", [input, shapeConst], {}, `${this.name}/reshape`);
|
|
111
|
+
this.output = out;
|
|
112
|
+
return [null, flatSize];
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
// ---------------------------------------------------------------------------
|
|
116
|
+
// Conv2D — 2D convolution (NHWC): output = activation(conv2d(input, W) + b)
|
|
117
|
+
// ---------------------------------------------------------------------------
|
|
118
|
+
export class Conv2D {
|
|
119
|
+
name;
|
|
120
|
+
output;
|
|
121
|
+
layerParams = [];
|
|
122
|
+
filters;
|
|
123
|
+
kernelSize;
|
|
124
|
+
strides;
|
|
125
|
+
padding;
|
|
126
|
+
activation;
|
|
127
|
+
useBias;
|
|
128
|
+
constructor(filters, options = {}) {
|
|
129
|
+
this.filters = filters;
|
|
130
|
+
this.padding = options.padding ?? "SAME";
|
|
131
|
+
this.activation = options.activation ?? "linear";
|
|
132
|
+
this.useBias = options.useBias ?? true;
|
|
133
|
+
this.name = options.name ?? `conv2d_${filters}f`;
|
|
134
|
+
const ks = options.kernelSize ?? 3;
|
|
135
|
+
this.kernelSize = Array.isArray(ks) ? ks : [ks, ks];
|
|
136
|
+
const st = options.strides ?? 1;
|
|
137
|
+
const [sH, sW] = Array.isArray(st) ? st : [st, st];
|
|
138
|
+
this.strides = [1, sH, sW, 1];
|
|
139
|
+
}
|
|
140
|
+
build(g, input, inputShape) {
|
|
141
|
+
if (inputShape.length !== 4)
|
|
142
|
+
throw new Error(`Conv2D "${this.name}": expects 4D input [batch,H,W,C], got rank ${inputShape.length}`);
|
|
143
|
+
const inChannels = inputShape[3];
|
|
144
|
+
if (!inChannels || inChannels < 1)
|
|
145
|
+
throw new Error(`Conv2D "${this.name}": in_channels must be known, got ${JSON.stringify(inputShape)}`);
|
|
146
|
+
const [kH, kW] = this.kernelSize;
|
|
147
|
+
const wShape = [kH, kW, inChannels, this.filters];
|
|
148
|
+
// He normal init for conv: stddev = sqrt(2 / (kH * kW * inChannels))
|
|
149
|
+
const stddev = Math.sqrt(2 / (kH * kW * inChannels));
|
|
150
|
+
const wInitVal = truncatedNormalInitializer(g, wShape, DType.FLOAT32, { stddev }, `${this.name}/w_init`);
|
|
151
|
+
const { handle: wHandle, initOp: wInitOp } = variableWithInit(g, wShape, DType.FLOAT32, `${this.name}/w`, wInitVal);
|
|
152
|
+
const wRead = readVariable(g, wHandle, DType.FLOAT32, `${this.name}/w_read`);
|
|
153
|
+
let out = conv2dOp(g, input, wRead, this.strides, this.padding, `${this.name}/conv`);
|
|
154
|
+
this.layerParams.push({
|
|
155
|
+
handle: wHandle,
|
|
156
|
+
read: wRead,
|
|
157
|
+
dtype: DType.FLOAT32,
|
|
158
|
+
name: `${this.name}/w`,
|
|
159
|
+
initOp: wInitOp,
|
|
160
|
+
});
|
|
161
|
+
if (this.useBias) {
|
|
162
|
+
const bInitVal = zerosInitializer(g, [this.filters], DType.FLOAT32);
|
|
163
|
+
const { handle: bHandle, initOp: bInitOp } = variableWithInit(g, [this.filters], DType.FLOAT32, `${this.name}/b`, bInitVal);
|
|
164
|
+
const bRead = readVariable(g, bHandle, DType.FLOAT32, `${this.name}/b_read`);
|
|
165
|
+
out = biasAdd(g, out, bRead, `${this.name}/bias_add`);
|
|
166
|
+
this.layerParams.push({
|
|
167
|
+
handle: bHandle,
|
|
168
|
+
read: bRead,
|
|
169
|
+
dtype: DType.FLOAT32,
|
|
170
|
+
name: `${this.name}/b`,
|
|
171
|
+
initOp: bInitOp,
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
this.output = activate(g, out, this.activation, this.name);
|
|
175
|
+
// Output spatial dimensions
|
|
176
|
+
const [, H, W] = inputShape;
|
|
177
|
+
const [, sH, sW] = this.strides;
|
|
178
|
+
const outH = H === null
|
|
179
|
+
? null
|
|
180
|
+
: this.padding === "SAME"
|
|
181
|
+
? Math.ceil(H / sH)
|
|
182
|
+
: Math.ceil((H - kH + 1) / sH);
|
|
183
|
+
const outW = W === null
|
|
184
|
+
? null
|
|
185
|
+
: this.padding === "SAME"
|
|
186
|
+
? Math.ceil(W / sW)
|
|
187
|
+
: Math.ceil((W - kW + 1) / sW);
|
|
188
|
+
return [null, outH, outW, this.filters];
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
//# sourceMappingURL=layers.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"layers.js","sourceRoot":"","sources":["../../src/ts/model/layers.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAGvC,OAAO,EACL,gBAAgB,EAChB,YAAY,EACZ,gBAAgB,EAChB,wBAAwB,EACxB,0BAA0B,GAC3B,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAC;AAC/C,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EACL,IAAI,EACJ,SAAS,EACT,KAAK,EACL,OAAO,EACP,MAAM,IAAI,QAAQ,EAClB,IAAI,EACJ,OAAO,EACP,UAAU,EACV,GAAG,EACH,IAAI,EACJ,KAAK,EACL,IAAI,GACL,MAAM,kBAAkB,CAAC;AAE1B,8EAA8E;AAC9E,oBAAoB;AACpB,8EAA8E;AAC9E,SAAS,QAAQ,CAAC,CAAQ,EAAE,CAAS,EAAE,EAAgB,EAAE,IAAY;IACnE,QAAQ,EAAE,EAAE,CAAC;QACX,KAAK,MAAM;YACT,OAAO,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,OAAO,CAAC,CAAC;QACpC,KAAK,YAAY;YACf,OAAO,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,GAAG,IAAI,aAAa,CAAC,CAAC;QACpD,KAAK,OAAO;YACV,OAAO,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,QAAQ,CAAC,CAAC;QACtC,KAAK,SAAS;YACZ,OAAO,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,UAAU,CAAC,CAAC;QAC1C,KAAK,MAAM;YACT,OAAO,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,OAAO,CAAC,CAAC;QACpC,KAAK,SAAS;YACZ,OAAO,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,UAAU,CAAC,CAAC;QAC1C,KAAK,aAAa;YAChB,OAAO,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,cAAc,CAAC,CAAC;QACjD,KAAK,KAAK;YACR,OAAO,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,MAAM,CAAC,CAAC;QAClC,KAAK,MAAM;YACT,OAAO,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,OAAO,CAAC,CAAC;QACpC,KAAK,OAAO;YACV,OAAO,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,QAAQ,CAAC,CAAC;QACtC,KAAK,MAAM;YACT,OAAO,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,GAAG,IAAI,OAAO,CAAC,CAAC;QACpC,KAAK,QAAQ;YACX,OAAO,CAAC,CAAC;QACX;YACE,MAAM,IAAI,KAAK,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;IACjD,CAAC;AACH,CAAC;AAED,8EAA8E;AAC9E,oEAAoE;AACpE,8EAA8E;AAC9E,MAAM,OAAO,KAAK;IACP,IAAI,CAAS;IACtB,MAAM,CAAU;IACP,WAAW,GAAiB,EAAE,CAAC;IAEvB,KAAK,CAAS;IACd,UAAU,CAAe;IACzB,OAAO,CAAU;IAElC,YACE,KAAa,EACb,UAII,EAAE;QAEN,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,QAAQ,CAAC;QACjD,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC;QACvC,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,IAAI,SAAS,KAAK,EAAE,CAAC;IAC/C,CAAC;IAED,KAAK,CACH,CAAQ,EACR,KAAa,EACb,UAA6B;QAE7B,MAAM,UAAU,GAAG,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAW,CAAC;QAC/D,IAAI,CAAC,UAAU,IAAI,UAAU,GAAG,CAAC;YAC/B,MAAM,IAAI,KAAK,CACb,UACE,IAAI,CAAC,IACP,wCAAwC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,CACrE,CAAC;QAEJ,wEAAwE;QACxE,MAAM,QAAQ,GAAG,wBAAwB,CACvC,CAAC,EACD,CAAC,UAAU,EAAE,IAAI,CAAC,KAAK,CAAC,EACxB,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,WAAW,CACxB,CAAC;QACF,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,gBAAgB,CAC3D,CAAC,EACD,CAAC,UAAU,EAAE,IAAI,CAAC,KAAK,CAAC,EACxB,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,IAAI,EAChB,QAAQ,CACT,CAAC;QACF,MAAM,KAAK,GAAG,YAAY,CACxB,CAAC,EACD,OAAO,EACP,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,SAAS,CACtB,CAAC;QAEF,IAAI,GAAG,GAAG,MAAM,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,EAAE,GAAG,IAAI,CAAC,IAAI,SAAS,CAAC,CAAC;QAC7D,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;YACpB,MAAM,EAAE,OAAO;YACf,IAAI,EAAE,KAAK;YACX,KAAK,EAAE,KAAK,CAAC,OAAO;YACpB,IAAI,EAAE,GAAG,IAAI,CAAC,IAAI,IAAI;YACtB,MAAM,EAAE,OAAO;SAChB,CAAC,CAAC;QAEH,yEAAyE;QACzE,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,MAAM,QAAQ,GAAG,gBAAgB,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;YAClE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,gBAAgB,CAC3D,CAAC,EACD,CAAC,IAAI,CAAC,KAAK,CAAC,EACZ,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,IAAI,EAChB,QAAQ,CACT,CAAC;YACF,MAAM,KAAK,GAAG,YAAY,CACxB,CAAC,EACD,OAAO,EACP,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,SAAS,CACtB,CAAC;YACF,GAAG,GAAG,OAAO,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,IAAI,WAAW,CAAC,CAAC;YACtD,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;gBACpB,MAAM,EAAE,OAAO;gBACf,IAAI,EAAE,KAAK;gBACX,KAAK,EAAE,KAAK,CAAC,OAAO;gBACpB,IAAI,EAAE,GAAG,IAAI,CAAC,IAAI,IAAI;gBACtB,MAAM,EAAE,OAAO;aAChB,CAAC,CAAC;QACL,CAAC;QAED,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;QAC3D,OAAO,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;IAClD,CAAC;CACF;AAED,8EAA8E;AAC9E,+DAA+D;AAC/D,8EAA8E;AAC9E,MAAM,OAAO,OAAO;IACT,IAAI,CAAS;IACtB,MAAM,CAAU;IACP,WAAW,GAAiB,EAAE,CAAC;IAExC,YAAY,UAA6B,EAAE;QACzC,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,IAAI,SAAS,CAAC;IACxC,CAAC;IAED,KAAK,CACH,CAAQ,EACR,KAAa,EACb,UAA6B;QAE7B,MAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACxC,MAAM,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC;QACvD,MAAM,QAAQ,GAAG,UAAU;YACzB,CAAC,CAAC,IAAI;YACN,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAE,GAAG,CAAE,EAAE,CAAC,CAAC,CAAC;QAC7C,MAAM,OAAO,GAAG,QAAQ,IAAI,CAAC,CAAC,CAAC;QAE/B,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;QACvC,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC7B,QAAQ,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;QAClC,MAAM,UAAU,GAAG,QAAQ,CACzB,CAAC,EACD,QAAQ,EACR,CAAC,CAAC,CAAC,EACH,KAAK,CAAC,KAAK,EACX,GAAG,IAAI,CAAC,IAAI,QAAQ,CACrB,CAAC;QACF,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,KAAK,CACnB,SAAS,EACT,CAAC,KAAK,EAAE,UAAU,CAAC,EACnB,EAAE,EACF,GAAG,IAAI,CAAC,IAAI,UAAU,CACvB,CAAC;QACF,IAAI,CAAC,MAAM,GAAG,GAAG,CAAC;QAClB,OAAO,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC1B,CAAC;CACF;AAED,8EAA8E;AAC9E,4EAA4E;AAC5E,8EAA8E;AAC9E,MAAM,OAAO,MAAM;IACR,IAAI,CAAS;IACtB,MAAM,CAAU;IACP,WAAW,GAAiB,EAAE,CAAC;IAEvB,OAAO,CAAS;IAChB,UAAU,CAAmB;IAC7B,OAAO,CAAmC;IAC1C,OAAO,CAAmB;IAC1B,UAAU,CAAe;IACzB,OAAO,CAAU;IAElC,YACE,OAAe,EACf,UAOI,EAAE;QAEN,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,MAAM,CAAC;QACzC,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,QAAQ,CAAC;QACjD,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,IAAI,CAAC;QACvC,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,IAAI,UAAU,OAAO,GAAG,CAAC;QAEjD,MAAM,EAAE,GAAG,OAAO,CAAC,UAAU,IAAI,CAAC,CAAC;QACnC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,CAAE,EAAuB,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC;QAE1E,MAAM,EAAE,GAAG,OAAO,CAAC,OAAO,IAAI,CAAC,CAAC;QAChC,MAAM,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,CAAE,EAAuB,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC;QACzE,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;IAChC,CAAC;IAED,KAAK,CACH,CAAQ,EACR,KAAa,EACb,UAA6B;QAE7B,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC;YACzB,MAAM,IAAI,KAAK,CACb,WAAW,IAAI,CAAC,IAAI,+CAA+C,UAAU,CAAC,MAAM,EAAE,CACvF,CAAC;QAEJ,MAAM,UAAU,GAAG,UAAU,CAAC,CAAC,CAAW,CAAC;QAC3C,IAAI,CAAC,UAAU,IAAI,UAAU,GAAG,CAAC;YAC/B,MAAM,IAAI,KAAK,CACb,WAAW,IAAI,CAAC,IAAI,qCAAqC,IAAI,CAAC,SAAS,CACrE,UAAU,CACX,EAAE,CACJ,CAAC;QAEJ,MAAM,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG,IAAI,CAAC,UAAU,CAAC;QACjC,MAAM,MAAM,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAElD,qEAAqE;QACrE,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,GAAG,UAAU,CAAC,CAAC,CAAC;QACrD,MAAM,QAAQ,GAAG,0BAA0B,CACzC,CAAC,EACD,MAAM,EACN,KAAK,CAAC,OAAO,EACb,EAAE,MAAM,EAAE,EACV,GAAG,IAAI,CAAC,IAAI,SAAS,CACtB,CAAC;QACF,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,gBAAgB,CAC3D,CAAC,EACD,MAAM,EACN,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,IAAI,EAChB,QAAQ,CACT,CAAC;QACF,MAAM,KAAK,GAAG,YAAY,CACxB,CAAC,EACD,OAAO,EACP,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,SAAS,CACtB,CAAC;QAEF,IAAI,GAAG,GAAG,QAAQ,CAChB,CAAC,EACD,KAAK,EACL,KAAK,EACL,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,OAAO,EACZ,GAAG,IAAI,CAAC,IAAI,OAAO,CACpB,CAAC;QACF,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;YACpB,MAAM,EAAE,OAAO;YACf,IAAI,EAAE,KAAK;YACX,KAAK,EAAE,KAAK,CAAC,OAAO;YACpB,IAAI,EAAE,GAAG,IAAI,CAAC,IAAI,IAAI;YACtB,MAAM,EAAE,OAAO;SAChB,CAAC,CAAC;QAEH,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjB,MAAM,QAAQ,GAAG,gBAAgB,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;YACpE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,gBAAgB,CAC3D,CAAC,EACD,CAAC,IAAI,CAAC,OAAO,CAAC,EACd,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,IAAI,EAChB,QAAQ,CACT,CAAC;YACF,MAAM,KAAK,GAAG,YAAY,CACxB,CAAC,EACD,OAAO,EACP,KAAK,CAAC,OAAO,EACb,GAAG,IAAI,CAAC,IAAI,SAAS,CACtB,CAAC;YACF,GAAG,GAAG,OAAO,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,IAAI,WAAW,CAAC,CAAC;YACtD,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;gBACpB,MAAM,EAAE,OAAO;gBACf,IAAI,EAAE,KAAK;gBACX,KAAK,EAAE,KAAK,CAAC,OAAO;gBACpB,IAAI,EAAE,GAAG,IAAI,CAAC,IAAI,IAAI;gBACtB,MAAM,EAAE,OAAO;aAChB,CAAC,CAAC;QACL,CAAC;QAED,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;QAE3D,4BAA4B;QAC5B,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,GAAG,UAAU,CAAC;QAC5B,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC;QAChC,MAAM,IAAI,GACR,CAAC,KAAK,IAAI;YACR,CAAC,CAAC,IAAI;YACN,CAAC,CAAC,IAAI,CAAC,OAAO,KAAK,MAAM;gBACzB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC;gBACnB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;QACnC,MAAM,IAAI,GACR,CAAC,KAAK,IAAI;YACR,CAAC,CAAC,IAAI;YACN,CAAC,CAAC,IAAI,CAAC,OAAO,KAAK,MAAM;gBACzB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC;gBACnB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;QAEnC,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;IAC1C,CAAC;CACF"}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import type { Tensor } from "@isidorus/core";
|
|
2
|
+
import { DType } from "@isidorus/core";
|
|
3
|
+
import type { Graph } from "../graph.js";
|
|
4
|
+
import type { Session } from "../session.js";
|
|
5
|
+
import type { Layer } from "./layer.js";
|
|
6
|
+
import type { ParamSpec, FeedEntry } from "../optimizers/sgd.js";
|
|
7
|
+
export type LossFn = "sparse_categorical_crossentropy" | "binary_crossentropy" | "mse";
|
|
8
|
+
export interface Optimizer {
|
|
9
|
+
init(sess: Session): Promise<void>;
|
|
10
|
+
applyGradients(sess: Session, feeds: FeedEntry[]): Promise<void>;
|
|
11
|
+
}
|
|
12
|
+
export interface TrainStepResult {
|
|
13
|
+
loss: number;
|
|
14
|
+
}
|
|
15
|
+
export declare class Sequential {
|
|
16
|
+
private readonly g;
|
|
17
|
+
private readonly layers;
|
|
18
|
+
private _xPlaceholder;
|
|
19
|
+
private _yPlaceholder;
|
|
20
|
+
private _outputTensor;
|
|
21
|
+
private _lossTensor;
|
|
22
|
+
private _allParams;
|
|
23
|
+
private _allInitOp;
|
|
24
|
+
private _labelDtype;
|
|
25
|
+
private compiled;
|
|
26
|
+
constructor(g: Graph, layers: Layer[]);
|
|
27
|
+
/**
|
|
28
|
+
* compile — wire the full computation graph.
|
|
29
|
+
*
|
|
30
|
+
* After this returns, model.params contains ParamSpec entries with
|
|
31
|
+
* real gradient tensors from g.addGradients(). Use those to construct
|
|
32
|
+
* your optimizer before calling init().
|
|
33
|
+
*/
|
|
34
|
+
compile(opts: {
|
|
35
|
+
loss: LossFn;
|
|
36
|
+
inputShape: number[];
|
|
37
|
+
labelDtype?: DType;
|
|
38
|
+
}): void;
|
|
39
|
+
/**
|
|
40
|
+
* init — run all variable initialisations and the optimizer's state init.
|
|
41
|
+
*
|
|
42
|
+
* @param sess The session to run on
|
|
43
|
+
* @param opt The optimizer (must be constructed from model.params)
|
|
44
|
+
*/
|
|
45
|
+
init(sess: Session, opt: Optimizer): Promise<void>;
|
|
46
|
+
/**
|
|
47
|
+
* trainStep — one forward pass, gradient computation, and weight update.
|
|
48
|
+
*
|
|
49
|
+
* Two sequential TF_SessionRun calls:
|
|
50
|
+
* 1. Fetch loss (forward pass runs as part of this)
|
|
51
|
+
* 2. Run optimizer update ops (backward pass + weight update)
|
|
52
|
+
*
|
|
53
|
+
* Keeping them separate avoids the optimizer needing to expose its
|
|
54
|
+
* internal step op name. The overhead is one extra C++ call per step,
|
|
55
|
+
* which is negligible compared to the matmul/conv cost.
|
|
56
|
+
*
|
|
57
|
+
* @param sess Session to run on
|
|
58
|
+
* @param opt Optimizer (same instance used in init)
|
|
59
|
+
* @param xBuf Float32 input bytes [batchSize, ...inputShape]
|
|
60
|
+
* @param yBuf Label bytes (INT32 class indices or FLOAT32 values)
|
|
61
|
+
* @param xShape [batchSize, ...inputShape]
|
|
62
|
+
* @param yShape [batchSize] for classification, [batchSize, units] for mse
|
|
63
|
+
* @param labelDtype DType for labels — defaults to what was set in compile()
|
|
64
|
+
*/
|
|
65
|
+
trainStep(sess: Session, opt: Optimizer, xBuf: Buffer, yBuf: Buffer, xShape: number[], yShape: number[], labelDtype?: DType): Promise<TrainStepResult>;
|
|
66
|
+
/**
|
|
67
|
+
* predict — forward pass only, no gradient computation or update.
|
|
68
|
+
*/
|
|
69
|
+
predict(sess: Session, xBuf: Buffer, xShape: number[]): Promise<{
|
|
70
|
+
data: Buffer;
|
|
71
|
+
shape: number[];
|
|
72
|
+
dtype: DType;
|
|
73
|
+
}>;
|
|
74
|
+
/** Input placeholder tensor — available after compile(). */
|
|
75
|
+
get xPlaceholder(): Tensor;
|
|
76
|
+
/** Label placeholder tensor — available after compile(). */
|
|
77
|
+
get yPlaceholder(): Tensor;
|
|
78
|
+
/** Final layer output tensor — available after compile(). */
|
|
79
|
+
get output(): Tensor;
|
|
80
|
+
/** Scalar mean loss tensor — available after compile(). */
|
|
81
|
+
get loss(): Tensor;
|
|
82
|
+
/**
|
|
83
|
+
* All parameter specs with real gradient tensors.
|
|
84
|
+
* Available after compile(). Use these to construct your optimizer.
|
|
85
|
+
*/
|
|
86
|
+
get params(): ParamSpec[];
|
|
87
|
+
/** Label dtype resolved during compile(). */
|
|
88
|
+
get labelDtype(): DType;
|
|
89
|
+
private assertCompiled;
|
|
90
|
+
}
|
|
91
|
+
//# sourceMappingURL=sequential.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sequential.d.ts","sourceRoot":"","sources":["../../src/ts/model/sequential.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AAC7C,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,YAAY,CAAC;AACxC,OAAO,KAAK,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AASjE,MAAM,MAAM,MAAM,GACd,iCAAiC,GACjC,qBAAqB,GACrB,KAAK,CAAC;AAKV,MAAM,WAAW,SAAS;IACxB,IAAI,CAAC,IAAI,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACnC,cAAc,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;CAClE;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,MAAM,CAAC;CACd;AAqCD,qBAAa,UAAU;IACrB,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAQ;IAC1B,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAU;IAGjC,OAAO,CAAC,aAAa,CAAU;IAC/B,OAAO,CAAC,aAAa,CAAU;IAC/B,OAAO,CAAC,aAAa,CAAU;IAC/B,OAAO,CAAC,WAAW,CAAU;IAC7B,OAAO,CAAC,UAAU,CAAe;IACjC,OAAO,CAAC,UAAU,CAAU;IAC5B,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,QAAQ,CAAS;gBAEb,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE;IAKrC;;;;;;OAMG;IACH,OAAO,CAAC,IAAI,EAAE;QACZ,IAAI,EAAE,MAAM,CAAC;QACb,UAAU,EAAE,MAAM,EAAE,CAAC;QACrB,UAAU,CAAC,EAAE,KAAK,CAAC;KACpB,GAAG,IAAI;IAmJR;;;;;OAKG;IACG,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IAMxD;;;;;;;;;;;;;;;;;;OAkBG;IACG,SAAS,CACb,IAAI,EAAE,OAAO,EACb,GAAG,EAAE,SAAS,EACd,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,MAAM,EAAE,EAChB,MAAM,EAAE,MAAM,EAAE,EAChB,UAAU,CAAC,EAAE,KAAK,GACjB,OAAO,CAAC,eAAe,CAAC;IAuB3B;;OAEG;IACG,OAAO,CACX,IAAI,EAAE,OAAO,EACb,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,MAAM,EAAE,GACf,OAAO,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,KAAK,EAAE,KAAK,CAAA;KAAE,CAAC;IAgB3D,4DAA4D;IAC5D,IAAI,YAAY,IAAI,MAAM,CAGzB;IAED,4DAA4D;IAC5D,IAAI,YAAY,IAAI,MAAM,CAGzB;IAED,6DAA6D;IAC7D,IAAI,MAAM,IAAI,MAAM,CAGnB;IAED,2DAA2D;IAC3D,IAAI,IAAI,IAAI,MAAM,CAGjB;IAED;;;OAGG;IACH,IAAI,MAAM,IAAI,SAAS,EAAE,CAGxB;IAED,6CAA6C;IAC7C,IAAI,UAAU,IAAI,KAAK,CAGtB;IAED,OAAO,CAAC,cAAc;CAIvB"}
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
import { DType } from "@isidorus/core";
|
|
2
|
+
import { placeholder } from "../ops/array_ops.js";
|
|
3
|
+
import { mean } from "../ops/math_ops.js";
|
|
4
|
+
import { globalVariablesInitializer } from "../ops/variable_ops.js";
|
|
5
|
+
import { sigmoidCrossEntropyWithLogits } from "../ops/nn_ops.js";
|
|
6
|
+
// ---------------------------------------------------------------------------
|
|
7
|
+
// Sequential
|
|
8
|
+
//
|
|
9
|
+
// Correct usage:
|
|
10
|
+
//
|
|
11
|
+
// // 1. Build graph
|
|
12
|
+
// const model = new Sequential(g, [
|
|
13
|
+
// new Dense(128, { activation: "relu" }),
|
|
14
|
+
// new Dense(10, { activation: "softmax" }),
|
|
15
|
+
// ]);
|
|
16
|
+
//
|
|
17
|
+
// // 2. compile() — no optimizer here. Builds graph, exposes params.
|
|
18
|
+
// model.compile({ loss: "sparse_categorical_crossentropy", inputShape: [784] });
|
|
19
|
+
//
|
|
20
|
+
// // 3. Construct optimizer from model.params — grad tensors exist now.
|
|
21
|
+
// const opt = new Adam(g, model.params, 0.001);
|
|
22
|
+
//
|
|
23
|
+
// // 4. init(sess, opt) — run variable inits + optimizer state init.
|
|
24
|
+
// await model.init(sess, opt);
|
|
25
|
+
//
|
|
26
|
+
// // 5. Training loop — pass opt explicitly each step.
|
|
27
|
+
// for (const [xBuf, yBuf] of batches) {
|
|
28
|
+
// const { loss } = await model.trainStep(sess, opt, xBuf, yBuf, ...);
|
|
29
|
+
// }
|
|
30
|
+
//
|
|
31
|
+
// // 6. Inference — no optimizer needed.
|
|
32
|
+
// const preds = await model.predict(sess, xBuf, xShape);
|
|
33
|
+
//
|
|
34
|
+
// Why optimizer is not in compile():
|
|
35
|
+
// compile() calls g.addGradients() which is what populates model.params
|
|
36
|
+
// with real gradient tensors. The optimizer constructor needs those grad
|
|
37
|
+
// tensors to wire update ops. So the optimizer must be created AFTER
|
|
38
|
+
// compile(), not passed into it.
|
|
39
|
+
// ---------------------------------------------------------------------------
|
|
40
|
+
export class Sequential {
|
|
41
|
+
g;
|
|
42
|
+
layers;
|
|
43
|
+
// Populated during compile()
|
|
44
|
+
_xPlaceholder;
|
|
45
|
+
_yPlaceholder;
|
|
46
|
+
_outputTensor;
|
|
47
|
+
_lossTensor;
|
|
48
|
+
_allParams;
|
|
49
|
+
_allInitOp;
|
|
50
|
+
_labelDtype;
|
|
51
|
+
compiled = false;
|
|
52
|
+
constructor(g, layers) {
|
|
53
|
+
this.g = g;
|
|
54
|
+
this.layers = layers;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* compile — wire the full computation graph.
|
|
58
|
+
*
|
|
59
|
+
* After this returns, model.params contains ParamSpec entries with
|
|
60
|
+
* real gradient tensors from g.addGradients(). Use those to construct
|
|
61
|
+
* your optimizer before calling init().
|
|
62
|
+
*/
|
|
63
|
+
compile(opts) {
|
|
64
|
+
if (this.compiled)
|
|
65
|
+
throw new Error("Sequential.compile() called twice");
|
|
66
|
+
const { loss, inputShape } = opts;
|
|
67
|
+
this._labelDtype = opts.labelDtype ?? DType.INT32;
|
|
68
|
+
// ── Input placeholder ─────────────────────────────────────────────────
|
|
69
|
+
this._xPlaceholder = placeholder(this.g, "x", [null, ...inputShape], DType.FLOAT32);
|
|
70
|
+
// ── Thread layers ─────────────────────────────────────────────────────
|
|
71
|
+
let current = this._xPlaceholder;
|
|
72
|
+
let shape = [null, ...inputShape];
|
|
73
|
+
for (const layer of this.layers) {
|
|
74
|
+
shape = layer.build(this.g, current, shape);
|
|
75
|
+
current = layer.output;
|
|
76
|
+
}
|
|
77
|
+
this._outputTensor = current;
|
|
78
|
+
// ── Collect all layer params ──────────────────────────────────────────
|
|
79
|
+
const allLayerParams = this.layers.flatMap((l) => l.layerParams);
|
|
80
|
+
// ── Label placeholder ─────────────────────────────────────────────────
|
|
81
|
+
const yShape = loss === "mse" ? [null, shape[shape.length - 1] ?? null] : [null];
|
|
82
|
+
this._yPlaceholder = placeholder(this.g, "y", yShape, this._labelDtype);
|
|
83
|
+
// ── Loss op + gradient computation ────────────────────────────────────
|
|
84
|
+
//
|
|
85
|
+
// SparseSoftmaxCrossEntropyWithLogits has no registered gradient in the
|
|
86
|
+
// TF C API gradient registry, so calling addGradients([meanLoss], params)
|
|
87
|
+
// fails with "No gradient defined for op: SparseSoftmaxCrossEntropyWithLogits".
|
|
88
|
+
//
|
|
89
|
+
// The fix: the op exposes output[1] — the exact analytical gradient
|
|
90
|
+
// of the loss w.r.t. logits (= softmax(logits) - one_hot(labels)).
|
|
91
|
+
// We scale it by 1/batch_size to get dMeanLoss/dLogits, then pass it
|
|
92
|
+
// as the initial dx to addGradients([outputTensor], params, [dLogits]).
|
|
93
|
+
// TF differentiates backward through matmul/relu/biasAdd/conv just fine.
|
|
94
|
+
//
|
|
95
|
+
// binary_crossentropy and mse are built entirely from primitives that
|
|
96
|
+
// TF can differentiate through, so they use addGradients([loss], params)
|
|
97
|
+
// directly without this workaround.
|
|
98
|
+
const readTensors = allLayerParams.map((p) => p.read);
|
|
99
|
+
let grads;
|
|
100
|
+
switch (loss) {
|
|
101
|
+
case "sparse_categorical_crossentropy": {
|
|
102
|
+
// Output[0]: per-example loss [batch]
|
|
103
|
+
// Output[1]: dLoss/dLogits per example [batch, num_classes]
|
|
104
|
+
const [lossPerEx, backprop] = this.g.addOp("SparseSoftmaxCrossEntropyWithLogits", [this._outputTensor, this._yPlaceholder], {}, "loss/xent");
|
|
105
|
+
this._lossTensor = mean(this.g, lossPerEx, [0], false, "loss/mean");
|
|
106
|
+
// Dynamic batch size — Size on a 1-D label tensor = batch size.
|
|
107
|
+
const [batchN] = this.g.addOp("Size", [this._yPlaceholder], {
|
|
108
|
+
out_type: { kind: "type", value: DType.INT32 },
|
|
109
|
+
}, "loss/batch_n");
|
|
110
|
+
const [batchNf] = this.g.addOp("Cast", [batchN], {
|
|
111
|
+
DstT: { kind: "type", value: DType.FLOAT32 },
|
|
112
|
+
}, "loss/batch_n_f32");
|
|
113
|
+
// dMeanLoss/dLogits = backprop / batch_size [batch, num_classes]
|
|
114
|
+
const [dLogits] = this.g.addOp("RealDiv", [backprop, batchNf], {}, "loss/dL_dlogits");
|
|
115
|
+
// Differentiate through the network layers only — not through
|
|
116
|
+
// the loss op which has no registered C API gradient.
|
|
117
|
+
grads = this.g.addGradients([this._outputTensor], readTensors, [
|
|
118
|
+
dLogits,
|
|
119
|
+
]);
|
|
120
|
+
break;
|
|
121
|
+
}
|
|
122
|
+
case "binary_crossentropy": {
|
|
123
|
+
// Built from primitives — TF differentiates through all of them.
|
|
124
|
+
const lossPerEx = sigmoidCrossEntropyWithLogits(this.g, this._yPlaceholder, this._outputTensor, "loss/bce");
|
|
125
|
+
this._lossTensor = mean(this.g, lossPerEx, [0], false, "loss/mean");
|
|
126
|
+
grads = this.g.addGradients([this._lossTensor], readTensors);
|
|
127
|
+
break;
|
|
128
|
+
}
|
|
129
|
+
case "mse": {
|
|
130
|
+
const [diff] = this.g.addOp("Sub", [this._outputTensor, this._yPlaceholder], {}, "loss/diff");
|
|
131
|
+
const [sq] = this.g.addOp("Square", [diff], {}, "loss/sq");
|
|
132
|
+
const lossPerEx = mean(this.g, sq, [1], false, "loss/mse_per_sample");
|
|
133
|
+
this._lossTensor = mean(this.g, lossPerEx, [0], false, "loss/mean");
|
|
134
|
+
grads = this.g.addGradients([this._lossTensor], readTensors);
|
|
135
|
+
break;
|
|
136
|
+
}
|
|
137
|
+
default:
|
|
138
|
+
throw new Error(`Unknown loss: ${loss}`);
|
|
139
|
+
}
|
|
140
|
+
// ── Assemble ParamSpec with real grad tensors ─────────────────────────
|
|
141
|
+
this._allParams = allLayerParams.map((p, i) => ({
|
|
142
|
+
handle: p.handle,
|
|
143
|
+
grad: grads[i],
|
|
144
|
+
dtype: p.dtype,
|
|
145
|
+
name: p.name,
|
|
146
|
+
}));
|
|
147
|
+
// ── Global variable init op ───────────────────────────────────────────
|
|
148
|
+
const initOpNames = allLayerParams.map((p) => p.initOp);
|
|
149
|
+
this._allInitOp = globalVariablesInitializer(this.g, initOpNames, "model_init");
|
|
150
|
+
this.compiled = true;
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* init — run all variable initialisations and the optimizer's state init.
|
|
154
|
+
*
|
|
155
|
+
* @param sess The session to run on
|
|
156
|
+
* @param opt The optimizer (must be constructed from model.params)
|
|
157
|
+
*/
|
|
158
|
+
async init(sess, opt) {
|
|
159
|
+
this.assertCompiled("init");
|
|
160
|
+
await sess.run([], [], [this._allInitOp]);
|
|
161
|
+
await opt.init(sess);
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* trainStep — one forward pass, gradient computation, and weight update.
|
|
165
|
+
*
|
|
166
|
+
* Two sequential TF_SessionRun calls:
|
|
167
|
+
* 1. Fetch loss (forward pass runs as part of this)
|
|
168
|
+
* 2. Run optimizer update ops (backward pass + weight update)
|
|
169
|
+
*
|
|
170
|
+
* Keeping them separate avoids the optimizer needing to expose its
|
|
171
|
+
* internal step op name. The overhead is one extra C++ call per step,
|
|
172
|
+
* which is negligible compared to the matmul/conv cost.
|
|
173
|
+
*
|
|
174
|
+
* @param sess Session to run on
|
|
175
|
+
* @param opt Optimizer (same instance used in init)
|
|
176
|
+
* @param xBuf Float32 input bytes [batchSize, ...inputShape]
|
|
177
|
+
* @param yBuf Label bytes (INT32 class indices or FLOAT32 values)
|
|
178
|
+
* @param xShape [batchSize, ...inputShape]
|
|
179
|
+
* @param yShape [batchSize] for classification, [batchSize, units] for mse
|
|
180
|
+
* @param labelDtype DType for labels — defaults to what was set in compile()
|
|
181
|
+
*/
|
|
182
|
+
async trainStep(sess, opt, xBuf, yBuf, xShape, yShape, labelDtype) {
|
|
183
|
+
this.assertCompiled("trainStep");
|
|
184
|
+
const lDtype = labelDtype ?? this._labelDtype;
|
|
185
|
+
const feeds = [
|
|
186
|
+
[this._xPlaceholder, { dtype: DType.FLOAT32, shape: xShape, data: xBuf }],
|
|
187
|
+
[this._yPlaceholder, { dtype: lDtype, shape: yShape, data: yBuf }],
|
|
188
|
+
];
|
|
189
|
+
// Forward pass + fetch loss.
|
|
190
|
+
const [lossOut] = await sess.run(feeds, [this._lossTensor], []);
|
|
191
|
+
// Backward pass + weight update.
|
|
192
|
+
await opt.applyGradients(sess, feeds);
|
|
193
|
+
const lossVal = new Float32Array(lossOut.data.buffer, lossOut.data.byteOffset, 1)[0];
|
|
194
|
+
return { loss: lossVal };
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* predict — forward pass only, no gradient computation or update.
|
|
198
|
+
*/
|
|
199
|
+
async predict(sess, xBuf, xShape) {
|
|
200
|
+
this.assertCompiled("predict");
|
|
201
|
+
const [out] = await sess.run([
|
|
202
|
+
[
|
|
203
|
+
this._xPlaceholder,
|
|
204
|
+
{ dtype: DType.FLOAT32, shape: xShape, data: xBuf },
|
|
205
|
+
],
|
|
206
|
+
], [this._outputTensor]);
|
|
207
|
+
return out;
|
|
208
|
+
}
|
|
209
|
+
// ── Accessors ─────────────────────────────────────────────────────────────
|
|
210
|
+
/** Input placeholder tensor — available after compile(). */
|
|
211
|
+
get xPlaceholder() {
|
|
212
|
+
this.assertCompiled("xPlaceholder");
|
|
213
|
+
return this._xPlaceholder;
|
|
214
|
+
}
|
|
215
|
+
/** Label placeholder tensor — available after compile(). */
|
|
216
|
+
get yPlaceholder() {
|
|
217
|
+
this.assertCompiled("yPlaceholder");
|
|
218
|
+
return this._yPlaceholder;
|
|
219
|
+
}
|
|
220
|
+
/** Final layer output tensor — available after compile(). */
|
|
221
|
+
get output() {
|
|
222
|
+
this.assertCompiled("output");
|
|
223
|
+
return this._outputTensor;
|
|
224
|
+
}
|
|
225
|
+
/** Scalar mean loss tensor — available after compile(). */
|
|
226
|
+
get loss() {
|
|
227
|
+
this.assertCompiled("loss");
|
|
228
|
+
return this._lossTensor;
|
|
229
|
+
}
|
|
230
|
+
/**
|
|
231
|
+
* All parameter specs with real gradient tensors.
|
|
232
|
+
* Available after compile(). Use these to construct your optimizer.
|
|
233
|
+
*/
|
|
234
|
+
get params() {
|
|
235
|
+
this.assertCompiled("params");
|
|
236
|
+
return this._allParams;
|
|
237
|
+
}
|
|
238
|
+
/** Label dtype resolved during compile(). */
|
|
239
|
+
get labelDtype() {
|
|
240
|
+
this.assertCompiled("labelDtype");
|
|
241
|
+
return this._labelDtype;
|
|
242
|
+
}
|
|
243
|
+
assertCompiled(caller) {
|
|
244
|
+
if (!this.compiled)
|
|
245
|
+
throw new Error(`Sequential.${caller}(): call compile() first`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
//# sourceMappingURL=sequential.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sequential.js","sourceRoot":"","sources":["../../src/ts/model/sequential.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAKvC,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAAE,IAAI,EAAE,MAAM,oBAAoB,CAAC;AAC1C,OAAO,EAAE,0BAA0B,EAAE,MAAM,wBAAwB,CAAC;AACpE,OAAO,EAAE,6BAA6B,EAAE,MAAM,kBAAkB,CAAC;AAsBjE,8EAA8E;AAC9E,aAAa;AACb,EAAE;AACF,iBAAiB;AACjB,EAAE;AACF,sBAAsB;AACtB,sCAAsC;AACtC,8CAA8C;AAC9C,iDAAiD;AACjD,QAAQ;AACR,EAAE;AACF,uEAAuE;AACvE,mFAAmF;AACnF,EAAE;AACF,0EAA0E;AAC1E,kDAAkD;AAClD,EAAE;AACF,uEAAuE;AACvE,iCAAiC;AACjC,EAAE;AACF,yDAAyD;AACzD,0CAA0C;AAC1C,0EAA0E;AAC1E,MAAM;AACN,EAAE;AACF,2CAA2C;AAC3C,2DAA2D;AAC3D,EAAE;AACF,qCAAqC;AACrC,0EAA0E;AAC1E,2EAA2E;AAC3E,uEAAuE;AACvE,mCAAmC;AACnC,8EAA8E;AAE9E,MAAM,OAAO,UAAU;IACJ,CAAC,CAAQ;IACT,MAAM,CAAU;IAEjC,6BAA6B;IACrB,aAAa,CAAU;IACvB,aAAa,CAAU;IACvB,aAAa,CAAU;IACvB,WAAW,CAAU;IACrB,UAAU,CAAe;IACzB,UAAU,CAAU;IACpB,WAAW,CAAS;IACpB,QAAQ,GAAG,KAAK,CAAC;IAEzB,YAAY,CAAQ,EAAE,MAAe;QACnC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC;QACX,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;;OAMG;IACH,OAAO,CAAC,IAIP;QACC,IAAI,IAAI,CAAC,QAAQ;YAAE,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;QAExE,MAAM,EAAE,IAAI,EAAE,UAAU,EAAE,GAAG,IAAI,CAAC;QAClC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,UAAU,IAAI,KAAK,CAAC,KAAK,CAAC;QAElD,yEAAyE;QACzE,IAAI,CAAC,aAAa,GAAG,WAAW,CAC9B,IAAI,CAAC,CAAC,EACN,GAAG,EACH,CAAC,IAAI,EAAE,GAAG,UAAU,CAAC,EACrB,KAAK,CAAC,OAAO,CACd,CAAC;QAEF,yEAAyE;QACzE,IAAI,OAAO,GAAW,IAAI,CAAC,aAAa,CAAC;QACzC,IAAI,KAAK,GAAsB,CAAC,IAAI,EAAE,GAAG,UAAU,CAAC,CAAC;QAErD,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;YAC5C,OAAO,GAAG,KAAK,CAAC,MAAM,CAAC;QACzB,CAAC;QACD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC;QAE7B,yEAAyE;QACzE,MAAM,cAAc,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC;QAEjE,yEAAyE;QACzE,MAAM,MAAM,GACV,IAAI,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACpE,IAAI,CAAC,aAAa,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;QAExE,yEAAyE;QACzE,EAAE;QACF,wEAAwE;QACxE,0EAA0E;QAC1E,gFAAgF;QAChF,EAAE;QACF,oEAAoE;QACpE,mEAAmE;QACnE,qEAAqE;QACrE,wEAAwE;QACxE,yEAAyE;QACzE,EAAE;QACF,sEAAsE;QACtE,yEAAyE;QACzE,oCAAoC;QAEpC,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACtD,IAAI,KAAe,CAAC;QAEpB,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,iCAAiC,CAAC,CAAC,CAAC;gBACvC,sCAAsC;gBACtC,4DAA4D;gBAC5D,MAAM,CAAC,SAAS,EAAE,QAAQ,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,KAAK,CACxC,qCAAqC,EACrC,CAAC,IAAI,CAAC,aAAa,EAAE,IAAI,CAAC,aAAa,CAAC,EACxC,EAAE,EACF,WAAW,CACZ,CAAC;gBAEF,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,WAAW,CAAC,CAAC;gBAEpE,gEAAgE;gBAChE,MAAM,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,KAAK,CAC3B,MAAM,EACN,CAAC,IAAI,CAAC,aAAa,CAAC,EACpB;oBACE,QAAQ,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;iBAC/C,EACD,cAAc,CACf,CAAC;gBACF,MAAM,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,KAAK,CAC5B,MAAM,EACN,CAAC,MAAM,CAAC,EACR;oBACE,IAAI,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE;iBAC7C,EACD,kBAAkB,CACnB,CAAC;gBAEF,kEAAkE;gBAClE,MAAM,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,KAAK,CAC5B,SAAS,EACT,CAAC,QAAQ,EAAE,OAAO,CAAC,EACnB,EAAE,EACF,iBAAiB,CAClB,CAAC;gBAEF,8DAA8D;gBAC9D,sDAAsD;gBACtD,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE,WAAW,EAAE;oBAC7D,OAAO;iBACR,CAAC,CAAC;gBACH,MAAM;YACR,CAAC;YAED,KAAK,qBAAqB,CAAC,CAAC,CAAC;gBAC3B,iEAAiE;gBACjE,MAAM,SAAS,GAAG,6BAA6B,CAC7C,IAAI,CAAC,CAAC,EACN,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,EAClB,UAAU,CACX,CAAC;gBACF,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,WAAW,CAAC,CAAC;gBACpE,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE,WAAW,CAAC,CAAC;gBAC7D,MAAM;YACR,CAAC;YAED,KAAK,KAAK,CAAC,CAAC,CAAC;gBACX,MAAM,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,KAAK,CACzB,KAAK,EACL,CAAC,IAAI,CAAC,aAAa,EAAE,IAAI,CAAC,aAAa,CAAC,EACxC,EAAE,EACF,WAAW,CACZ,CAAC;gBACF,MAAM,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,SAAS,CAAC,CAAC;gBAC3D,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,qBAAqB,CAAC,CAAC;gBACtE,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,WAAW,CAAC,CAAC;gBACpE,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE,WAAW,CAAC,CAAC;gBAC7D,MAAM;YACR,CAAC;YAED;gBACE,MAAM,IAAI,KAAK,CAAC,iBAAiB,IAAI,EAAE,CAAC,CAAC;QAC7C,CAAC;QACD,yEAAyE;QACzE,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;YAC9C,MAAM,EAAE,CAAC,CAAC,MAAM;YAChB,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;YACd,KAAK,EAAE,CAAC,CAAC,KAAK;YACd,IAAI,EAAE,CAAC,CAAC,IAAI;SACb,CAAC,CAAC,CAAC;QAEJ,yEAAyE;QACzE,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;QACxD,IAAI,CAAC,UAAU,GAAG,0BAA0B,CAC1C,IAAI,CAAC,CAAC,EACN,WAAW,EACX,YAAY,CACb,CAAC;QAEF,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;IACvB,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,IAAI,CAAC,IAAa,EAAE,GAAc;QACtC,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;QAC5B,MAAM,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC;QAC1C,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACvB,CAAC;IAED;;;;;;;;;;;;;;;;;;OAkBG;IACH,KAAK,CAAC,SAAS,CACb,IAAa,EACb,GAAc,EACd,IAAY,EACZ,IAAY,EACZ,MAAgB,EAChB,MAAgB,EAChB,UAAkB;QAElB,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC;QAEjC,MAAM,MAAM,GAAG,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC;QAC9C,MAAM,KAAK,GAAgB;YACzB,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;YACzE,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;SACnE,CAAC;QAEF,6BAA6B;QAC7B,MAAM,CAAC,OAAO,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE,EAAE,CAAC,CAAC;QAEhE,iCAAiC;QACjC,MAAM,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAEtC,MAAM,OAAO,GAAG,IAAI,YAAY,CAC9B,OAAO,CAAC,IAAI,CAAC,MAAM,EACnB,OAAO,CAAC,IAAI,CAAC,UAAU,EACvB,CAAC,CACF,CAAC,CAAC,CAAC,CAAC;QACL,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,OAAO,CACX,IAAa,EACb,IAAY,EACZ,MAAgB;QAEhB,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC;QAC/B,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,CAC1B;YACE;gBACE,IAAI,CAAC,aAAa;gBAClB,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE;aACpD;SACF,EACD,CAAC,IAAI,CAAC,aAAa,CAAC,CACrB,CAAC;QACF,OAAO,GAAG,CAAC;IACb,CAAC;IAED,6EAA6E;IAE7E,4DAA4D;IAC5D,IAAI,YAAY;QACd,IAAI,CAAC,cAAc,CAAC,cAAc,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED,4DAA4D;IAC5D,IAAI,YAAY;QACd,IAAI,CAAC,cAAc,CAAC,cAAc,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED,6DAA6D;IAC7D,IAAI,MAAM;QACR,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;QAC9B,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED,2DAA2D;IAC3D,IAAI,IAAI;QACN,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;QAC5B,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;IAED;;;OAGG;IACH,IAAI,MAAM;QACR,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;QAC9B,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAED,6CAA6C;IAC7C,IAAI,UAAU;QACZ,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,CAAC;QAClC,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;IAEO,cAAc,CAAC,MAAc;QACnC,IAAI,CAAC,IAAI,CAAC,QAAQ;YAChB,MAAM,IAAI,KAAK,CAAC,cAAc,MAAM,0BAA0B,CAAC,CAAC;IACpE,CAAC;CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"array_ops.d.ts","sourceRoot":"","sources":["../../src/ts/ops/array_ops.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AACpD,OAAO,EAAE,KAAK,EAAa,MAAM,gBAAgB,CAAC;AAClD,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AAEzC;;;GAGG;AACH,wBAAgB,WAAW,CACzB,CAAC,EAAE,KAAK,EACR,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,KAAK,EACZ,KAAK,GAAE,KAAqB,GAC3B,MAAM,CAWR;AAED;;;GAGG;AACH,wBAAgB,QAAQ,CACtB,CAAC,EAAE,KAAK,EACR,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,MAAM,EAAE,EACf,KAAK,EAAE,KAAK,EACZ,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAWR;AAED,4CAA4C;AAC5C,wBAAgB,MAAM,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAIrE;AAED;;;;GAIG;AACH,wBAAgB,OAAO,CACrB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,KAAK,EAAE,MAAM,EACb,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAGR;AAED;;;GAGG;AACH,wBAAgB,SAAS,CACvB,CAAC,EAAE,KAAK,EACR,CAAC,EAAE,MAAM,EACT,QAAQ,EAAE,MAAM,EAAE,EAClB,IAAI,CAAC,EAAE,MAAM,GACZ,MAAM,CAKR;AAED,wEAAwE;AACxE,wBAAgB,QAAQ,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAGnE"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"array_ops.js","sourceRoot":"","sources":["../../src/ts/ops/array_ops.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAGlD;;;GAGG;AACH,MAAM,UAAU,WAAW,CACzB,CAAQ,EACR,IAAY,EACZ,KAAY,EACZ,QAAe,KAAK,CAAC,OAAO;IAE5B,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,aAAa,EACb,EAAE,EACF;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QACrC,KAAK,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,CAAC,KAAK,CAAC,EAAE;KAClD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,QAAQ,CACtB,CAAQ,EACR,IAAY,EACZ,KAAe,EACf,KAAY,EACZ,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CACjB,OAAO,EACP,EAAE,EACF;QACE,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE;QACrC,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,KAAK,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;KACzD,EACD,IAAI,CACL,CAAC;IACF,OAAO,CAAC,CAAC;AACX,CAAC;AAED,4CAA4C;AAC5C,MAAM,UAAU,MAAM,CAAC,CAAQ,EAAE,KAAa,EAAE,IAAa;IAC3D,MAAM,GAAG,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;IAClC,GAAG,CAAC,YAAY,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;IAC3B,OAAO,QAAQ,CAAC,CAAC,EAAE,GAAG,EAAE,EAAE,EAAE,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACnD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,OAAO,CACrB,CAAQ,EACR,CAAS,EACT,KAAa,EACb,IAAa;IAEb,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IACrD,OAAO,CAAC,CAAC;AACX,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,SAAS,CACvB,CAAQ,EACR,CAAS,EACT,QAAkB,EAClB,IAAa;IAEb,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACzD,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAC5D,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;IACzE,OAAO,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,IAAI,CAAC,CAAC;AACzC,CAAC;AAED,wEAAwE;AACxE,MAAM,UAAU,QAAQ,CAAC,CAAQ,EAAE,CAAS,EAAE,IAAa;IACzD,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC;IAC/C,OAAO,CAAC,CAAC;AACX,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/ts/ops/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,eAAe,CAAC;AAC9B,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/ts/ops/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,eAAe,CAAC;AAC9B,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC"}
|