@genai-fi/nanogpt 0.4.1 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.js +3 -3
- package/dist/NanoGPTModel.js +84 -74
- package/dist/TeachableLLM.js +1 -1
- package/dist/{random_width-CMHmdbSu.js → TiedEmbedding-CnJ1bx4q.js} +760 -719
- package/dist/{axis_util-DeydwOoC.js → axis_util-BgTGy5w8.js} +1 -1
- package/dist/{concat-DS_qH7MI.js → concat-CuRsVY-K.js} +1 -1
- package/dist/dropout-DfDdklfL.js +193 -0
- package/dist/{gather-BUmJIS8n.js → gather-ZYRWhmXR.js} +1 -1
- package/dist/gelu-CnCt17Lk.js +26 -0
- package/dist/{index-XjBAhiFO.js → index-C4JCoBvj.js} +61 -61
- package/dist/kernel_funcs_utils-CAd1h9X1.js +388 -0
- package/dist/layers/CausalSelfAttention.js +74 -73
- package/dist/layers/MLP.d.ts +3 -1
- package/dist/layers/MLP.js +93 -5
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +3 -3
- package/dist/layers/TiedEmbedding.js +6 -46
- package/dist/layers/TransformerBlock.js +2 -2
- package/dist/{log_sum_exp-DJPkVZZn.js → log_sum_exp-BswFnwOb.js} +5 -5
- package/dist/main.js +1 -1
- package/dist/{mat_mul-CKwFEV1Q.js → mat_mul-415y5Qn2.js} +1 -1
- package/dist/{max-DJvEiCAJ.js → max-CP_9O2Yd.js} +1 -1
- package/dist/{moments-CrWRPcR3.js → moments-CjeIaVdp.js} +3 -3
- package/dist/{norm-BzY929B_.js → norm-CZM380I3.js} +5 -5
- package/dist/{ones-BO01zpJG.js → ones-Bf3YR48P.js} +2 -2
- package/dist/ops/appendCache.d.ts +1 -1
- package/dist/ops/appendCache.js +10 -4
- package/dist/ops/attentionMask.d.ts +1 -1
- package/dist/ops/attentionMask.js +4 -4
- package/dist/ops/cpu/appendCache.d.ts +1 -2
- package/dist/ops/cpu/appendCache.js +15 -20
- package/dist/ops/cpu/attentionMask.js +15 -11
- package/dist/ops/cpu/fusedSoftmax.js +2 -2
- package/dist/ops/cpu/gatherSub.js +3 -3
- package/dist/ops/cpu/gelu.d.ts +1 -0
- package/dist/ops/cpu/gelu.js +40 -0
- package/dist/ops/cpu/mulDropout.js +1 -1
- package/dist/ops/cpu/qkv.js +3 -3
- package/dist/ops/cpu/rope.js +5 -5
- package/dist/ops/cpu/scatterSub.js +4 -4
- package/dist/ops/fusedSoftmax.js +1 -1
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.d.ts +3 -0
- package/dist/ops/gelu.js +8 -0
- package/dist/ops/grads/attentionMask.js +1 -1
- package/dist/ops/grads/fusedSoftmax.js +2 -2
- package/dist/ops/grads/gelu.d.ts +2 -0
- package/dist/ops/grads/gelu.js +5 -0
- package/dist/ops/grads/qkv.js +1 -1
- package/dist/ops/grads/rope.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/node/sparseCrossEntropy.js +1 -1
- package/dist/ops/qkv.js +1 -1
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/webgl/appendCache.js +14 -13
- package/dist/ops/webgl/attentionMask.js +19 -18
- package/dist/ops/webgl/fusedSoftmax.js +483 -782
- package/dist/ops/webgl/gatherSub.js +1 -1
- package/dist/ops/webgl/gelu.d.ts +2 -0
- package/dist/ops/webgl/gelu.js +50 -0
- package/dist/ops/webgl/mulDropout.js +1 -1
- package/dist/ops/webgl/qkv.js +1 -1
- package/dist/ops/webgl/rope.js +1 -1
- package/dist/ops/webgl/scatterSub.js +1 -1
- package/dist/{range-DQMNzBWs.js → range-9AzeApCc.js} +1 -1
- package/dist/{reshape-DFzh97Sc.js → reshape-Boe4DuIO.js} +1 -1
- package/dist/{sin-BYM-U4Ut.js → sin-KmhiDuMa.js} +1 -1
- package/dist/{slice_util-CnVNPQI-.js → slice_util-19zDNNSn.js} +2 -2
- package/dist/{softmax-4DOn6cPq.js → softmax-Cujsg4ay.js} +1 -1
- package/dist/{split-CkbeVdF8.js → split-DbcNm1-i.js} +1 -1
- package/dist/{stack-DaIMO5iX.js → stack-D1YjmgKN.js} +1 -1
- package/dist/{sum-C6u3xMi3.js → sum-R28pucR5.js} +1 -1
- package/dist/{tensor-Cu1fU7H7.js → tensor-BVeHdl7V.js} +1 -1
- package/dist/{tensor2d-D0CKdG6B.js → tensor2d-DqFGNs_K.js} +1 -1
- package/dist/{tfjs_backend-Bzl2SrRo.js → tfjs_backend-Cug-PH75.js} +826 -1015
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.js +3 -3
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.js +5 -5
- package/dist/training/sparseCrossEntropy.js +4 -4
- package/dist/utilities/dummy.js +2 -2
- package/dist/utilities/generate.js +3 -3
- package/dist/utilities/load.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-BS4AKqNU.js → variable-LJT9Ld63.js} +1 -1
- package/dist/{zeros-CmJFiC84.js → zeros-dnQxFgAD.js} +1 -1
- package/package.json +1 -1
- package/dist/MLP-KHhikThU.js +0 -83
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import { r as
|
|
1
|
+
import { r as p } from "../../index-C4JCoBvj.js";
|
|
2
2
|
class m {
|
|
3
3
|
variableNames = ["cache", "item"];
|
|
4
4
|
outputShape;
|
|
5
5
|
userCode;
|
|
6
6
|
customUniforms = [{ name: "cacheT", type: "int" }];
|
|
7
|
-
constructor(t, a,
|
|
8
|
-
const
|
|
9
|
-
this.outputShape = [t, a,
|
|
7
|
+
constructor(t, a, n, o, c) {
|
|
8
|
+
const s = Math.min(n + 1, c);
|
|
9
|
+
this.outputShape = [t, a, s, o], this.userCode = `
|
|
10
10
|
void main() {
|
|
11
11
|
ivec4 coords = getOutputCoords(); // [b, h, t, d]
|
|
12
12
|
int b = coords.x;
|
|
@@ -15,7 +15,7 @@ class m {
|
|
|
15
15
|
int d = coords.w;
|
|
16
16
|
|
|
17
17
|
int itemT = 1;
|
|
18
|
-
int maxSize = ${
|
|
18
|
+
int maxSize = ${c};
|
|
19
19
|
int totalT = cacheT + itemT;
|
|
20
20
|
int start = totalT >= maxSize ? 1 : 0;
|
|
21
21
|
|
|
@@ -23,21 +23,22 @@ class m {
|
|
|
23
23
|
float val = 0.0;
|
|
24
24
|
if (srcT < cacheT) {
|
|
25
25
|
val = getCache(b, h, srcT, d);
|
|
26
|
-
} else {
|
|
26
|
+
} else if (srcT == cacheT) {
|
|
27
27
|
val = getItem(b, h, 0, d);
|
|
28
|
-
}
|
|
28
|
+
} else {
|
|
29
|
+
val = 0.0;}
|
|
29
30
|
setOutput(val);
|
|
30
31
|
}
|
|
31
32
|
`;
|
|
32
33
|
}
|
|
33
34
|
}
|
|
34
|
-
function
|
|
35
|
-
const { cache: t, item: a } = e.inputs, { maxSize: o } = e.attrs,
|
|
36
|
-
return
|
|
35
|
+
function d(e) {
|
|
36
|
+
const { cache: t, item: a } = e.inputs, { maxSize: n, pastLen: o } = e.attrs, c = e.backend, s = t.shape[0], r = t.shape[2], i = t.shape[1], h = new m(s, i, r, a.shape[3], n);
|
|
37
|
+
return c.runWebGLProgram(h, [t, a], "float32", [[o]]);
|
|
37
38
|
}
|
|
38
|
-
const
|
|
39
|
+
const l = {
|
|
39
40
|
kernelName: "AppendCache",
|
|
40
41
|
backendName: "webgl",
|
|
41
|
-
kernelFunc:
|
|
42
|
+
kernelFunc: d
|
|
42
43
|
};
|
|
43
|
-
|
|
44
|
+
p(l);
|
|
@@ -1,11 +1,14 @@
|
|
|
1
|
-
import { r as
|
|
2
|
-
class
|
|
3
|
-
variableNames = ["q", "k"
|
|
1
|
+
import { r as h } from "../../index-C4JCoBvj.js";
|
|
2
|
+
class l {
|
|
3
|
+
variableNames = ["q", "k"];
|
|
4
4
|
outputShape;
|
|
5
5
|
userCode;
|
|
6
|
-
customUniforms = [
|
|
7
|
-
|
|
8
|
-
|
|
6
|
+
customUniforms = [
|
|
7
|
+
{ name: "divisor", type: "float" },
|
|
8
|
+
{ name: "pastLen", type: "int" }
|
|
9
|
+
];
|
|
10
|
+
constructor(t, s, e, n, a) {
|
|
11
|
+
this.outputShape = [t, s, e, n], this.userCode = `
|
|
9
12
|
void main() {
|
|
10
13
|
ivec4 coords = getOutputCoords(); // [batch, nh, t1, t2]
|
|
11
14
|
int b = coords.x;
|
|
@@ -14,30 +17,28 @@ class k {
|
|
|
14
17
|
int t2 = coords.w;
|
|
15
18
|
|
|
16
19
|
float sum = 0.0;
|
|
17
|
-
for (int i = 0; i < ${
|
|
20
|
+
for (int i = 0; i < ${a}; ++i) {
|
|
18
21
|
float qv = getQ(b, h, t1, i);
|
|
19
|
-
float kv = getK(b, h, t2, i);
|
|
22
|
+
float kv = getK(b, h, t2, i);
|
|
20
23
|
sum += qv * kv;
|
|
21
24
|
}
|
|
22
25
|
|
|
23
26
|
// Scale by divisor
|
|
24
27
|
float scaled = sum * divisor;
|
|
25
28
|
|
|
26
|
-
//
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
setOutput(scaled + maskVal);
|
|
29
|
+
// Mask out future positions
|
|
30
|
+
setOutput((t2 > t1 + pastLen) ? -1.0/0.0 : scaled);
|
|
30
31
|
}
|
|
31
32
|
`;
|
|
32
33
|
}
|
|
33
34
|
}
|
|
34
|
-
function
|
|
35
|
-
const { q: t, k:
|
|
36
|
-
return
|
|
35
|
+
function m(o) {
|
|
36
|
+
const { q: t, k: s } = o.inputs, { divisor: e, pastLen: n } = o.attrs, a = o.backend, i = t.shape[0], r = t.shape[2], c = s.shape[2], u = t.shape[1], p = t.shape[3], d = new l(i, u, r, c, p);
|
|
37
|
+
return a.runWebGLProgram(d, [t, s], "float32", [[e], [n]]);
|
|
37
38
|
}
|
|
38
|
-
const
|
|
39
|
+
const k = {
|
|
39
40
|
kernelName: "AttentionMask",
|
|
40
41
|
backendName: "webgl",
|
|
41
|
-
kernelFunc:
|
|
42
|
+
kernelFunc: m
|
|
42
43
|
};
|
|
43
|
-
|
|
44
|
+
h(k);
|