@genai-fi/nanogpt 0.10.3 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.d.ts +10 -5
- package/dist/Generator.js +1789 -1765
- package/dist/{RealDiv-KAPDe8zB.js → RealDiv-C8neBwFi.js} +15 -15
- package/dist/{Reshape-BYkmUnAv.js → Reshape-Bd4V_4X7.js} +1 -1
- package/dist/{Reshape-Zt6eb7yh.js → Reshape-Ck29jQSY.js} +5 -5
- package/dist/TeachableLLM.d.ts +5 -3
- package/dist/TeachableLLM.js +14 -14
- package/dist/Trainer.d.ts +3 -1
- package/dist/Trainer.js +11 -8
- package/dist/{axis_util-BaG7mf5A.js → axis_util-DGqbT-FX.js} +3 -3
- package/dist/backend.js +2 -2
- package/dist/{backend_util-RCe-rHaj.js → backend_util-DC3rBo_H.js} +18 -18
- package/dist/{backend_webgpu-DE3ACOLx.js → backend_webgpu-mbhNnlx9.js} +3 -3
- package/dist/{broadcast_to-B3eYlZm7.js → broadcast_to-D1Dmg2Oz.js} +2 -2
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/matMulGelu.js +2 -2
- package/dist/checks/normRMS.js +4 -4
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.js +2 -2
- package/dist/checks/qkv.js +4 -4
- package/dist/checks/rope.js +2 -2
- package/dist/{clip_by_value-BnO7-a88.js → clip_by_value-fg2aKzUy.js} +5 -5
- package/dist/complex-Cyg-eQeZ.js +11 -0
- package/dist/concat-CSm2rMwe.js +17 -0
- package/dist/{concat_util-DpW8mL_l.js → concat_util-D0je5Ppu.js} +1 -1
- package/dist/{dataset-BcwmTGYc.js → dataset-CVIJu7Xa.js} +7 -7
- package/dist/{dropout-BcvN9JYi.js → dropout-DLhSMNTZ.js} +9 -9
- package/dist/expand_dims-ChkuOp6I.js +11 -0
- package/dist/{exports_initializers-Hta_rEnm.js → exports_initializers-1KWPiStI.js} +1 -1
- package/dist/{floor-D5QdR_le.js → floor-BRMPgeIs.js} +1 -1
- package/dist/{gather-D3JcZUaI.js → gather-BSULDalH.js} +1 -1
- package/dist/{gelu-CjNPL4OH.js → gelu-BK1k-n1i.js} +1 -1
- package/dist/{gpgpu_math-DAOmgtXR.js → gpgpu_math-BJSTk_mW.js} +25 -25
- package/dist/{index-BwexR4lA.js → index-BBVLAXZD.js} +89 -89
- package/dist/{index-DOvlwCh-.js → index-Duu1Lvvv.js} +53 -53
- package/dist/{kernel_funcs_utils-CCzYdUZg.js → kernel_funcs_utils-BtYrPoJu.js} +6 -6
- package/dist/layers/BaseLayer.js +2 -2
- package/dist/layers/CausalSelfAttention.js +6 -6
- package/dist/layers/MLP.js +4 -4
- package/dist/layers/PositionEmbedding.js +5 -5
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +4 -4
- package/dist/layers/TiedEmbedding.js +6 -6
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +9 -9
- package/dist/log_sum_exp-CVqLsVLl.js +39 -0
- package/dist/main.d.ts +10 -1
- package/dist/main.js +68 -58
- package/dist/{matMul16-BWRSOCWB.js → matMul16-xswmhSuF.js} +3 -3
- package/dist/{matMulGelu-CzfgT6Wq.js → matMulGelu-BpvgnYG8.js} +14 -14
- package/dist/mat_mul-Bn2BDpT4.js +11 -0
- package/dist/{mod-AnXEvvpo.js → mod-B4AUd1Np.js} +1 -1
- package/dist/models/NanoGPTV1.js +2 -2
- package/dist/models/model.js +9 -9
- package/dist/{ones-D2rT0xk2.js → ones-CBI1AQjb.js} +3 -3
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.js +1 -1
- package/dist/ops/add16.js +1 -1
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/concat16.js +2 -2
- package/dist/ops/cpu/adamAdjust.js +9 -9
- package/dist/ops/cpu/adamMoments.js +5 -5
- package/dist/ops/cpu/appendCache.js +6 -6
- package/dist/ops/cpu/attentionMask.js +10 -10
- package/dist/ops/cpu/fusedSoftmax.js +5 -5
- package/dist/ops/cpu/gatherSub.js +9 -9
- package/dist/ops/cpu/gelu.js +5 -5
- package/dist/ops/cpu/matMul16.js +2 -2
- package/dist/ops/cpu/matMulGelu.js +3 -3
- package/dist/ops/cpu/matMulMul.js +5 -5
- package/dist/ops/cpu/mulDropout.js +1 -1
- package/dist/ops/cpu/normRMS.js +7 -7
- package/dist/ops/cpu/qkv.js +3 -3
- package/dist/ops/cpu/rope.js +5 -5
- package/dist/ops/cpu/scatterSub.js +11 -11
- package/dist/ops/dot16.js +2 -2
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.js +4 -4
- package/dist/ops/grads/attentionMask.js +2 -2
- package/dist/ops/grads/gelu.js +2 -2
- package/dist/ops/grads/matMul16.js +3 -3
- package/dist/ops/grads/matMulGelu.js +6 -6
- package/dist/ops/grads/normRMS.js +4 -4
- package/dist/ops/grads/pack16.js +3 -3
- package/dist/ops/grads/qkv.js +10 -10
- package/dist/ops/grads/rope.js +2 -2
- package/dist/ops/grads/softmax16.js +1 -1
- package/dist/ops/grads/unpack16.js +2 -2
- package/dist/ops/matMul16.js +3 -3
- package/dist/ops/matMulGelu.js +2 -2
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.js +2 -2
- package/dist/ops/qkv.js +1 -1
- package/dist/ops/reshape16.js +2 -2
- package/dist/ops/rope.js +2 -2
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.js +2 -2
- package/dist/ops/softmax16.js +1 -1
- package/dist/ops/sub16.js +1 -1
- package/dist/ops/sum16.js +2 -2
- package/dist/ops/transpose16.js +6 -6
- package/dist/ops/unpack16.js +2 -2
- package/dist/ops/webgl/adamAdjust.js +2 -2
- package/dist/ops/webgl/adamMoments.js +1 -1
- package/dist/ops/webgl/appendCache.js +1 -1
- package/dist/ops/webgl/attentionMask.js +1 -1
- package/dist/ops/webgl/fusedSoftmax.js +4 -4
- package/dist/ops/webgl/gatherSub.js +1 -1
- package/dist/ops/webgl/gelu.js +2 -2
- package/dist/ops/webgl/log.js +3 -3
- package/dist/ops/webgl/matMul16.js +8 -8
- package/dist/ops/webgl/matMulGelu.js +4 -4
- package/dist/ops/webgl/matMulMul.js +7 -7
- package/dist/ops/webgl/mulDropout.js +1 -1
- package/dist/ops/webgl/normRMS.js +7 -7
- package/dist/ops/webgl/qkv.js +1 -1
- package/dist/ops/webgl/rope.js +1 -1
- package/dist/ops/webgl/scatterSub.js +1 -1
- package/dist/ops/webgpu/adamAdjust.js +3 -3
- package/dist/ops/webgpu/adamMoments.js +5 -5
- package/dist/ops/webgpu/add16.js +1 -1
- package/dist/ops/webgpu/appendCache.js +3 -3
- package/dist/ops/webgpu/attentionMask.js +2 -2
- package/dist/ops/webgpu/attentionMask32_program.js +2 -2
- package/dist/ops/webgpu/concat16.js +5 -5
- package/dist/ops/webgpu/gatherSub.js +5 -5
- package/dist/ops/webgpu/gelu.js +3 -3
- package/dist/ops/webgpu/matMul16.js +19 -19
- package/dist/ops/webgpu/matMul16_program.js +2 -2
- package/dist/ops/webgpu/mul16.js +4 -4
- package/dist/ops/webgpu/normRMS.js +6 -6
- package/dist/ops/webgpu/normRMSGrad.js +4 -4
- package/dist/ops/webgpu/pack16.js +3 -3
- package/dist/ops/webgpu/pack16_program.js +2 -2
- package/dist/ops/webgpu/qkv.js +8 -8
- package/dist/ops/webgpu/rope.js +3 -3
- package/dist/ops/webgpu/scatterSub.js +3 -3
- package/dist/ops/webgpu/slice16.js +4 -4
- package/dist/ops/webgpu/softmax16.js +4 -4
- package/dist/ops/webgpu/softmax16_program.js +2 -2
- package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
- package/dist/ops/webgpu/softmax16grad.js +1 -1
- package/dist/ops/webgpu/sub16.js +4 -4
- package/dist/ops/webgpu/sum16.js +5 -5
- package/dist/ops/webgpu/transpose16.js +2 -2
- package/dist/ops/webgpu/transpose16_program.js +2 -2
- package/dist/ops/webgpu/transpose16_shared_program.js +3 -3
- package/dist/ops/webgpu/unpack16.js +5 -5
- package/dist/ops/webgpu/utils/binary_op.js +3 -3
- package/dist/ops/webgpu/utils/reductions.js +4 -4
- package/dist/{ops-B5yanEdW.js → ops-C2_OXuZ4.js} +69 -69
- package/dist/{pack16-nQ6JaLo-.js → pack16-atD0eYRm.js} +9 -9
- package/dist/patches/webgpu_backend.js +6 -6
- package/dist/patches/webgpu_base.js +1 -1
- package/dist/patches/webgpu_program.js +8 -8
- package/dist/{random_width-or-CEftb.js → random_width-BN4wGJaW.js} +33 -33
- package/dist/range-DKmP1-OQ.js +10 -0
- package/dist/relu-BsXmGzzu.js +9 -0
- package/dist/{reshape-ByE68wS9.js → reshape-BI0yzp1T.js} +1 -1
- package/dist/{resize_nearest_neighbor-B19mCEg2.js → resize_nearest_neighbor-BA_BX-ub.js} +26 -26
- package/dist/{rope-Ir4mTyD1.js → rope-DJ7Y7c-u.js} +1 -1
- package/dist/{scatter_nd_util-lvSiX8q4.js → scatter_nd_util-k9MUVUkn.js} +1 -1
- package/dist/{selu_util-kbhpTdYD.js → selu_util-DyW0X1WG.js} +5 -5
- package/dist/{shared-DT1TkE6w.js → shared-Q3BS6T03.js} +1 -1
- package/dist/{shared-dntlHIDQ.js → shared-nnSWpC3u.js} +86 -86
- package/dist/{slice-BfEGSH82.js → slice-wBNvzVyz.js} +1 -1
- package/dist/{slice_util-uTKwiEpW.js → slice_util-zN8KFC5I.js} +1 -1
- package/dist/{softmax-CA5jFsLR.js → softmax-DfuYyjMh.js} +1 -1
- package/dist/split-BYrLboMq.js +9 -0
- package/dist/squeeze-Bk8Brcct.js +10 -0
- package/dist/{stack-Cf4n9h0N.js → stack-CDWShFHF.js} +1 -1
- package/dist/{step-CINUs5QB.js → step-BS5JXRR6.js} +23 -23
- package/dist/{sum-DWAtNGez.js → sum-BPUfDB2X.js} +3 -3
- package/dist/tensor-CEt9Nm2s.js +8 -0
- package/dist/tensor1d-Cc_KCIDg.js +11 -0
- package/dist/{tensor2d-Bs9wZRc7.js → tensor2d-BN97fF71.js} +3 -3
- package/dist/{tensor4d-BARPdTaS.js → tensor4d-vuDDgdUI.js} +1 -1
- package/dist/{tfjs_backend-y1cvNhLA.js → tfjs_backend-806hyYve.js} +49 -49
- package/dist/{tile-mbfagpsB.js → tile-OWUvpIVt.js} +3 -3
- package/dist/tokeniser/BaseTokeniser.d.ts +25 -0
- package/dist/tokeniser/BaseTokeniser.js +94 -0
- package/dist/tokeniser/CharTokeniser.d.ts +10 -9
- package/dist/tokeniser/CharTokeniser.js +44 -30
- package/dist/tokeniser/bpe.d.ts +10 -9
- package/dist/tokeniser/bpe.js +67 -52
- package/dist/tokeniser/type.d.ts +14 -5
- package/dist/training/Adam.js +2 -2
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.d.ts +3 -3
- package/dist/training/DatasetBuilder.js +34 -38
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.d.ts +4 -3
- package/dist/training/Trainer.js +22 -25
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/training/tasks/ConversationTask.d.ts +11 -0
- package/dist/training/tasks/ConversationTask.js +26 -0
- package/dist/training/tasks/PretrainingTask.d.ts +11 -0
- package/dist/training/tasks/PretrainingTask.js +34 -0
- package/dist/training/tasks/StartSentenceTask.d.ts +12 -0
- package/dist/training/tasks/StartSentenceTask.js +42 -0
- package/dist/training/tasks/Task.d.ts +8 -0
- package/dist/training/tasks/Task.js +41 -0
- package/dist/{transpose-ClWiBS_b.js → transpose-BUkQCJp9.js} +6 -6
- package/dist/{unsorted_segment_sum-BDDhB_E6.js → unsorted_segment_sum-BljxHhCY.js} +5 -5
- package/dist/utilities/dummy.js +3 -3
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.js +1 -1
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.d.ts +1 -1
- package/dist/utilities/sentences.js +11 -11
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-WawDEaAb.js → variable-DPt_Iuog.js} +1 -1
- package/dist/{webgpu_program-DuOXPQol.js → webgpu_program-BpWRlghH.js} +3 -3
- package/dist/{webgpu_util-RxEF33Rj.js → webgpu_util-DMiKzzQM.js} +7 -7
- package/dist/{zeros-KnWaWf-X.js → zeros-5YROwwUH.js} +2 -2
- package/dist/{zeros_like-DvE73F4e.js → zeros_like-De4n1C3m.js} +71 -71
- package/package.json +1 -1
- package/dist/complex-DjxcVmoX.js +0 -11
- package/dist/concat-BV8bt5H-.js +0 -17
- package/dist/expand_dims-DT4tEPwA.js +0 -11
- package/dist/log_sum_exp-ngO0-4pK.js +0 -39
- package/dist/mat_mul-SjpJRLyL.js +0 -11
- package/dist/range-BklejeeW.js +0 -10
- package/dist/relu-CP0ZcxWO.js +0 -9
- package/dist/split-CVLc0w--.js +0 -9
- package/dist/squeeze-C7Z2srUo.js +0 -10
- package/dist/tensor-DJoc7gJU.js +0 -8
- package/dist/tensor1d-D11P_7Dp.js +0 -11
|
@@ -1,77 +1,91 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import k, { SPECIALS as d } from "./BaseTokeniser.js";
|
|
2
2
|
const u = ["<eos>", "<unk>"];
|
|
3
3
|
class b extends k {
|
|
4
4
|
vocabSize = 0;
|
|
5
5
|
eosToken = 0;
|
|
6
|
+
bosToken = 0;
|
|
6
7
|
unkToken = 0;
|
|
7
8
|
vocab = [];
|
|
8
9
|
cache = /* @__PURE__ */ new Map();
|
|
9
10
|
_trained = !1;
|
|
10
|
-
constructor(
|
|
11
|
-
if (super(), Array.isArray(
|
|
12
|
-
if (this.vocab =
|
|
13
|
-
this.vocabSize = this.vocab.length,
|
|
14
|
-
this.
|
|
11
|
+
constructor(i) {
|
|
12
|
+
if (super(), Array.isArray(i)) {
|
|
13
|
+
if (this.vocab = i, this.vocab.length > 0)
|
|
14
|
+
this.vocabSize = this.vocab.length, d.forEach((t) => {
|
|
15
|
+
const e = this.vocab.indexOf(t);
|
|
16
|
+
e !== -1 && this.addSpecialToken(t, e);
|
|
17
|
+
}), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex("") ?? -1, this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((t) => t === "<pad>" ? "" : t), this.vocab.forEach((t, e) => {
|
|
18
|
+
this.cache.set(t, e);
|
|
15
19
|
});
|
|
16
20
|
else
|
|
17
21
|
throw new Error("Vocab cannot be empty");
|
|
18
22
|
this._trained = !0;
|
|
19
23
|
} else
|
|
20
|
-
this.vocabSize =
|
|
24
|
+
this.vocabSize = i, this.vocab = new Array(this.vocabSize).fill(""), this.addSpecialTokens(), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex(""), this.vocab.forEach((t, e) => {
|
|
25
|
+
this.cache.set(t, e);
|
|
26
|
+
}), this.cache.set("", this.unkToken);
|
|
27
|
+
}
|
|
28
|
+
addToken(i, t) {
|
|
29
|
+
if (this.cache.has(i))
|
|
30
|
+
return this.cache.get(i);
|
|
31
|
+
let e;
|
|
32
|
+
if (t !== void 0 ? e = t : (e = this.vocab.indexOf("", this.unkToken + 1), e === -1 && (e = this.vocabSize)), e >= this.vocabSize)
|
|
33
|
+
throw new Error("Vocab size exceeded");
|
|
34
|
+
return this.vocab[e] = i, this.cache.set(i, e), e;
|
|
21
35
|
}
|
|
22
36
|
get trained() {
|
|
23
37
|
return this.vocab.length === this.vocabSize && this._trained;
|
|
24
38
|
}
|
|
25
39
|
destroy() {
|
|
26
40
|
}
|
|
27
|
-
async train(
|
|
28
|
-
const
|
|
41
|
+
async train(i) {
|
|
42
|
+
const t = i.map((n) => n.split("")).flat(), e = new Set(t), s = Array.from(e), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
|
|
29
43
|
if (h === -1)
|
|
30
44
|
return this.vocabSize;
|
|
31
45
|
if (this._trained = !0, s.length > o) {
|
|
32
|
-
const
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
}), s.sort((a, r) => (
|
|
46
|
+
const n = /* @__PURE__ */ new Map();
|
|
47
|
+
t.forEach((a) => {
|
|
48
|
+
n.set(a, (n.get(a) || 0) + 1);
|
|
49
|
+
}), s.sort((a, r) => (n.get(a) || 0) - (n.get(r) || 0)), s.splice(0, s.length - o);
|
|
36
50
|
}
|
|
37
51
|
let c = h;
|
|
38
52
|
if (c !== -1) {
|
|
39
|
-
const
|
|
53
|
+
const n = new Set(this.vocab);
|
|
40
54
|
for (const a of s)
|
|
41
|
-
if (!
|
|
55
|
+
if (!n.has(a) && (this.vocab[c] = a, n.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
|
|
42
56
|
break;
|
|
43
57
|
}
|
|
44
|
-
return this.cache.clear(), this.vocab.forEach((
|
|
45
|
-
this.cache.set(
|
|
58
|
+
return this.cache.clear(), this.vocab.forEach((n, a) => {
|
|
59
|
+
this.cache.set(n, a);
|
|
46
60
|
}), this.emit("trainStatus", "trained"), this.vocabSize;
|
|
47
61
|
}
|
|
48
|
-
|
|
62
|
+
tokenise(i, t) {
|
|
49
63
|
if (!this.trained)
|
|
50
64
|
throw new Error("Tokeniser not trained");
|
|
51
|
-
return
|
|
65
|
+
return i.map((s) => t ? s.split("").map((h) => this.cache.get(h) ?? this.unkToken) : s.split("").map((h) => {
|
|
52
66
|
const o = this.cache.get(h);
|
|
53
67
|
return o !== void 0 ? this.vocab[o] : "";
|
|
54
68
|
}));
|
|
55
69
|
}
|
|
56
|
-
|
|
57
|
-
return
|
|
70
|
+
detokenise(i) {
|
|
71
|
+
return i.map((e) => Array.from(e).map((s) => this.vocab[s] || "").join(""));
|
|
58
72
|
}
|
|
59
|
-
|
|
60
|
-
return
|
|
73
|
+
encode(i) {
|
|
74
|
+
return this.tokenise([i], !0)[0];
|
|
61
75
|
}
|
|
62
|
-
|
|
63
|
-
return
|
|
76
|
+
decode(i) {
|
|
77
|
+
return this.detokenise([i])[0];
|
|
64
78
|
}
|
|
65
79
|
getVocab() {
|
|
66
80
|
return this.vocab;
|
|
67
81
|
}
|
|
68
|
-
|
|
82
|
+
getMerges() {
|
|
69
83
|
return [];
|
|
70
84
|
}
|
|
71
|
-
async createTrainingData(
|
|
72
|
-
const
|
|
73
|
-
for (let o = 0; o <
|
|
74
|
-
s.push(...
|
|
85
|
+
async createTrainingData(i, t = 5) {
|
|
86
|
+
const e = await this.tokenise(i, !0), s = [], h = [];
|
|
87
|
+
for (let o = 0; o < e.length - t; o++)
|
|
88
|
+
s.push(...e[o].slice(0, t)), h.push(e[o + 1][0]);
|
|
75
89
|
return [s, h];
|
|
76
90
|
}
|
|
77
91
|
}
|
package/dist/tokeniser/bpe.d.ts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { default as
|
|
2
|
-
|
|
3
|
-
export default class BPETokeniser extends EE<'trainStatus'> implements ITokeniser {
|
|
1
|
+
import { default as BaseTokeniser } from './BaseTokeniser';
|
|
2
|
+
export default class BPETokeniser extends BaseTokeniser {
|
|
4
3
|
private targetSize;
|
|
5
4
|
private vocab;
|
|
6
5
|
private vocabIndex;
|
|
@@ -8,19 +7,21 @@ export default class BPETokeniser extends EE<'trainStatus'> implements ITokenise
|
|
|
8
7
|
private pretokenMap;
|
|
9
8
|
constructor(vocabSize: number);
|
|
10
9
|
constructor(vocab: string[], merges?: [string, string][]);
|
|
10
|
+
addToken(token: string, index?: number): number;
|
|
11
11
|
destroy(): void;
|
|
12
12
|
get trained(): boolean;
|
|
13
13
|
get vocabSize(): number;
|
|
14
14
|
get eosToken(): number;
|
|
15
|
+
get bosToken(): number;
|
|
15
16
|
get unkToken(): number;
|
|
16
17
|
train(text: string[]): Promise<number>;
|
|
17
18
|
getVocab(): string[];
|
|
18
|
-
getMerges():
|
|
19
|
+
getMerges(): [string, string][];
|
|
19
20
|
private tokeniseWord;
|
|
20
21
|
private tokeniseStrings;
|
|
21
|
-
tokenise(text: string[], numeric: true):
|
|
22
|
-
tokenise(text: string[]):
|
|
23
|
-
detokenise(tokens: number[][]):
|
|
24
|
-
encode(text: string):
|
|
25
|
-
decode(tokens: number[]):
|
|
22
|
+
tokenise(text: string[], numeric: true): number[][];
|
|
23
|
+
tokenise(text: string[]): string[][];
|
|
24
|
+
detokenise(tokens: number[][]): string[];
|
|
25
|
+
encode(text: string): number[];
|
|
26
|
+
decode(tokens: number[]): string;
|
|
26
27
|
}
|
package/dist/tokeniser/bpe.js
CHANGED
|
@@ -1,68 +1,80 @@
|
|
|
1
1
|
import l from "../utilities/tokenParse.js";
|
|
2
|
-
import {
|
|
2
|
+
import d, { SPECIALS as f } from "./BaseTokeniser.js";
|
|
3
3
|
function u(o, e) {
|
|
4
4
|
return `${o}-::-${e}`;
|
|
5
5
|
}
|
|
6
|
-
function
|
|
6
|
+
function b(o) {
|
|
7
7
|
const e = /* @__PURE__ */ new Map();
|
|
8
8
|
for (let s = 0; s < o.length; s++) {
|
|
9
9
|
const t = o[s];
|
|
10
|
-
for (let
|
|
11
|
-
const
|
|
12
|
-
a: t[
|
|
13
|
-
b: t[
|
|
10
|
+
for (let n = 0; n < t.length - 1; n++) {
|
|
11
|
+
const r = u(t[n], t[n + 1]), i = e.get(r) || {
|
|
12
|
+
a: t[n],
|
|
13
|
+
b: t[n + 1],
|
|
14
14
|
count: 0,
|
|
15
15
|
instances: /* @__PURE__ */ new Set()
|
|
16
16
|
};
|
|
17
|
-
|
|
17
|
+
i.count += 1, i.instances.add(s), e.set(r, i);
|
|
18
18
|
}
|
|
19
19
|
}
|
|
20
20
|
return { pairs: e, tokens: o };
|
|
21
21
|
}
|
|
22
|
-
function h(o, e, s, t,
|
|
23
|
-
const
|
|
24
|
-
if (o.pairs.has(
|
|
25
|
-
const
|
|
26
|
-
|
|
22
|
+
function h(o, e, s, t, n) {
|
|
23
|
+
const r = u(e, s);
|
|
24
|
+
if (o.pairs.has(r)) {
|
|
25
|
+
const i = o.pairs.get(r);
|
|
26
|
+
i.count += n, n > 0 ? i.instances.add(t) : i.count <= 0 ? o.pairs.delete(r) : i.instances.delete(t);
|
|
27
27
|
} else
|
|
28
|
-
o.pairs.set(
|
|
28
|
+
o.pairs.set(r, { a: e, b: s, count: n, instances: /* @__PURE__ */ new Set([t]) });
|
|
29
29
|
}
|
|
30
|
-
function
|
|
30
|
+
function k(o) {
|
|
31
31
|
let e = null, s = 0;
|
|
32
32
|
for (const t of o.pairs.values())
|
|
33
33
|
t.count > s && (s = t.count, e = t);
|
|
34
34
|
return e;
|
|
35
35
|
}
|
|
36
|
-
function
|
|
36
|
+
function m(o, e) {
|
|
37
37
|
return o.map((s) => {
|
|
38
38
|
const t = [];
|
|
39
|
-
for (let
|
|
40
|
-
|
|
39
|
+
for (let n = 0; n < s.length; n++)
|
|
40
|
+
n < s.length - 1 && s[n] === e[0] && s[n + 1] === e[1] ? (t.push(e[0] + e[1]), n++) : t.push(s[n]);
|
|
41
41
|
return t;
|
|
42
42
|
});
|
|
43
43
|
}
|
|
44
|
-
function
|
|
44
|
+
function v(o, e) {
|
|
45
45
|
e.instances.forEach((s) => {
|
|
46
|
-
const t = o.tokens[s],
|
|
47
|
-
for (let
|
|
48
|
-
if (
|
|
49
|
-
const
|
|
50
|
-
|
|
46
|
+
const t = o.tokens[s], n = [];
|
|
47
|
+
for (let r = 0; r < t.length; r++)
|
|
48
|
+
if (r < t.length - 1 && t[r] === e.a && t[r + 1] === e.b) {
|
|
49
|
+
const i = e.a + e.b;
|
|
50
|
+
n.push(i), r > 0 && (h(o, t[r - 1], e.a, s, -1), h(o, t[r - 1], i, s, 1)), r++, r < t.length - 1 && (h(o, e.b, t[r + 1], s, -1), h(o, i, t[r + 1], s, 1));
|
|
51
51
|
} else
|
|
52
|
-
|
|
53
|
-
o.tokens[s] =
|
|
52
|
+
n.push(t[r]);
|
|
53
|
+
o.tokens[s] = n;
|
|
54
54
|
}), o.pairs.delete(u(e.a, e.b));
|
|
55
55
|
}
|
|
56
|
-
class
|
|
56
|
+
class x extends d {
|
|
57
57
|
targetSize;
|
|
58
58
|
vocab = /* @__PURE__ */ new Set();
|
|
59
59
|
vocabIndex = /* @__PURE__ */ new Map();
|
|
60
60
|
merges = [];
|
|
61
61
|
pretokenMap = /* @__PURE__ */ new Map();
|
|
62
62
|
constructor(e, s) {
|
|
63
|
-
super(), Array.isArray(e) ? (e.forEach((t,
|
|
64
|
-
this.vocab.add(t), this.vocabIndex.set(t,
|
|
65
|
-
}), s && (this.merges = s), this.targetSize = e.length
|
|
63
|
+
super(), Array.isArray(e) ? (e.forEach((t, n) => {
|
|
64
|
+
this.vocab.add(t), this.vocabIndex.set(t, n);
|
|
65
|
+
}), s && (this.merges = s), this.targetSize = e.length, f.forEach((t) => {
|
|
66
|
+
const n = e.indexOf(t);
|
|
67
|
+
n !== -1 && this.addSpecialToken(t, n);
|
|
68
|
+
})) : (this.addSpecialTokens(), this.targetSize = e);
|
|
69
|
+
}
|
|
70
|
+
addToken(e, s) {
|
|
71
|
+
if (this.vocab.has(e))
|
|
72
|
+
return this.vocabIndex.get(e);
|
|
73
|
+
{
|
|
74
|
+
this.vocab.add(e);
|
|
75
|
+
const t = s !== void 0 ? s : this.vocab.size - 1;
|
|
76
|
+
return this.vocabIndex.set(e, t), t;
|
|
77
|
+
}
|
|
66
78
|
}
|
|
67
79
|
destroy() {
|
|
68
80
|
this.vocab.clear(), this.vocabIndex.clear(), this.merges = [], this.pretokenMap.clear();
|
|
@@ -76,58 +88,61 @@ class S extends f {
|
|
|
76
88
|
get eosToken() {
|
|
77
89
|
return this.vocabIndex.get("<eos>") ?? 0;
|
|
78
90
|
}
|
|
91
|
+
get bosToken() {
|
|
92
|
+
return this.vocabIndex.get("<bos>") ?? 0;
|
|
93
|
+
}
|
|
79
94
|
get unkToken() {
|
|
80
95
|
return this.vocabIndex.get("") ?? 1;
|
|
81
96
|
}
|
|
82
97
|
async train(e) {
|
|
83
|
-
const s = e.map((
|
|
84
|
-
this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.
|
|
85
|
-
const
|
|
98
|
+
const s = e.map((a) => l(a)).flat(1), t = new Set(s);
|
|
99
|
+
this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.addSpecialTokens();
|
|
100
|
+
const n = Array.from(t), r = n.map((a) => Array.from(a).map((c) => (this.vocab.add(c), c))), i = b(r);
|
|
86
101
|
for (; this.vocab.size < this.targetSize && this.merges.length < this.targetSize; ) {
|
|
87
|
-
const
|
|
88
|
-
if (!
|
|
102
|
+
const a = k(i);
|
|
103
|
+
if (!a)
|
|
89
104
|
break;
|
|
90
|
-
this.merges.push([
|
|
105
|
+
this.merges.push([a.a, a.b]), this.vocab.add(a.a + a.b), v(i, a);
|
|
91
106
|
}
|
|
92
|
-
|
|
93
|
-
const c =
|
|
94
|
-
this.pretokenMap.set(
|
|
107
|
+
n.forEach((a, p) => {
|
|
108
|
+
const c = r[p];
|
|
109
|
+
this.pretokenMap.set(a, c);
|
|
95
110
|
}), this.vocabIndex.clear();
|
|
96
111
|
let g = 0;
|
|
97
|
-
for (const
|
|
98
|
-
this.vocabIndex.set(
|
|
112
|
+
for (const a of this.vocab.keys())
|
|
113
|
+
this.vocabIndex.set(a, g++);
|
|
99
114
|
return this.emit("trainStatus", "trained"), this.vocab.size;
|
|
100
115
|
}
|
|
101
116
|
getVocab() {
|
|
102
117
|
return Array.from(this.vocab);
|
|
103
118
|
}
|
|
104
|
-
|
|
119
|
+
getMerges() {
|
|
105
120
|
return this.merges;
|
|
106
121
|
}
|
|
107
122
|
tokeniseWord(e) {
|
|
108
123
|
let s = Array.from(e);
|
|
109
124
|
return this.merges.forEach((t) => {
|
|
110
|
-
s =
|
|
125
|
+
s = m([s], t)[0];
|
|
111
126
|
}), this.pretokenMap.set(e, s), s;
|
|
112
127
|
}
|
|
113
128
|
tokeniseStrings(e) {
|
|
114
|
-
return e.map((s) => l(s).map((
|
|
129
|
+
return e.map((s) => l(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
|
|
115
130
|
}
|
|
116
|
-
|
|
131
|
+
tokenise(e, s) {
|
|
117
132
|
const t = this.tokeniseStrings(e);
|
|
118
|
-
return s ? t.map((
|
|
133
|
+
return s ? t.map((n) => n.map((r) => this.vocabIndex.get(r) ?? this.unkToken)) : t.map((n) => n.map((r) => this.vocab.has(r) ? r : ""));
|
|
119
134
|
}
|
|
120
|
-
|
|
135
|
+
detokenise(e) {
|
|
121
136
|
const s = this.getVocab();
|
|
122
|
-
return e.map((
|
|
137
|
+
return e.map((n) => n.map((r) => s[r]).join(""));
|
|
123
138
|
}
|
|
124
|
-
|
|
125
|
-
return
|
|
139
|
+
encode(e) {
|
|
140
|
+
return this.tokenise([e], !0)[0];
|
|
126
141
|
}
|
|
127
|
-
|
|
128
|
-
return
|
|
142
|
+
decode(e) {
|
|
143
|
+
return this.detokenise([e])[0];
|
|
129
144
|
}
|
|
130
145
|
}
|
|
131
146
|
export {
|
|
132
|
-
|
|
147
|
+
x as default
|
|
133
148
|
};
|
package/dist/tokeniser/type.d.ts
CHANGED
|
@@ -1,14 +1,23 @@
|
|
|
1
1
|
import { default as EE } from 'eventemitter3';
|
|
2
|
+
export type Roles = 'user' | 'assistant' | 'system';
|
|
3
|
+
export interface Conversation {
|
|
4
|
+
role: Roles;
|
|
5
|
+
content: string;
|
|
6
|
+
}
|
|
2
7
|
export interface ITokeniser extends EE<'trainStatus'> {
|
|
3
8
|
train(text: string[]): Promise<number>;
|
|
4
|
-
tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
|
|
5
|
-
detokenise(tokens: string[][] | number[][]): Promise<string[]>;
|
|
6
9
|
getVocab(): string[];
|
|
7
|
-
getMerges():
|
|
10
|
+
getMerges(): [string, string][];
|
|
8
11
|
destroy(): void;
|
|
9
|
-
encode(text: string):
|
|
10
|
-
|
|
12
|
+
encode(text: string): number[];
|
|
13
|
+
encodeConversation(conversation: Conversation[], completion?: boolean): number[];
|
|
14
|
+
encodeSequence(text: string): number[];
|
|
15
|
+
decode(tokens: number[] | Uint16Array): string;
|
|
16
|
+
decodeConversation(tokens: number[] | Uint16Array): Conversation[];
|
|
11
17
|
vocabSize: number;
|
|
12
18
|
eosToken: number;
|
|
19
|
+
bosToken: number;
|
|
13
20
|
trained: boolean;
|
|
21
|
+
getSpecialTokenIndex(token: string): number | undefined;
|
|
22
|
+
isSpecialToken(index: number): boolean;
|
|
14
23
|
}
|
package/dist/training/Adam.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { adamAdjust as b } from "../ops/adamAdjust.js";
|
|
2
2
|
import { adamMoments as d } from "../ops/adamMoments.js";
|
|
3
|
-
import { O as g, e as h, t as o, d as B } from "../index-
|
|
4
|
-
import { z as M } from "../zeros-
|
|
3
|
+
import { O as g, e as h, t as o, d as B } from "../index-Duu1Lvvv.js";
|
|
4
|
+
import { z as M } from "../zeros-5YROwwUH.js";
|
|
5
5
|
class R extends g {
|
|
6
6
|
constructor(t, a, e, s, i = null) {
|
|
7
7
|
super(), this.learningRate = t, this.beta1 = a, this.beta2 = e, this.lossScaling = s, this.epsilon = i, this.accBeta1 = a, this.accBeta2 = e, i === null && (this.epsilon = h().backend.epsilon());
|
package/dist/training/AdamExt.js
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import { Tensor } from '@tensorflow/tfjs-core';
|
|
2
|
-
import { ITokeniser } from '../tokeniser/type';
|
|
2
|
+
import { Conversation, ITokeniser } from '../tokeniser/type';
|
|
3
3
|
import { Dataset } from '@tensorflow/tfjs-data';
|
|
4
4
|
export declare const PAGE_FACTOR = 8;
|
|
5
|
-
export declare function flattenTokens(textData:
|
|
5
|
+
export declare function flattenTokens(textData: Conversation[][], tokenizer: ITokeniser): Promise<number[]>;
|
|
6
6
|
export declare class DatasetBuilder {
|
|
7
7
|
tokenizer: ITokeniser;
|
|
8
8
|
blockSize: number;
|
|
9
9
|
private pageSize;
|
|
10
10
|
constructor(tokenizer: ITokeniser, blockSize?: number);
|
|
11
|
-
createTextDataset(flatTokens:
|
|
11
|
+
createTextDataset(flatTokens: Uint16Array, batchSize?: number, masked?: Set<number>, invertMask?: boolean): Promise<Dataset<{
|
|
12
12
|
xs: Tensor;
|
|
13
13
|
ys: Tensor;
|
|
14
14
|
}>>;
|
|
@@ -1,67 +1,63 @@
|
|
|
1
|
-
import { t as
|
|
2
|
-
import { d as
|
|
1
|
+
import { t as y } from "../index-Duu1Lvvv.js";
|
|
2
|
+
import { d as g, i as z } from "../dataset-CVIJu7Xa.js";
|
|
3
3
|
import "../index-Cp39cXWe.js";
|
|
4
|
-
function
|
|
5
|
-
return
|
|
6
|
-
const t = await
|
|
7
|
-
return
|
|
4
|
+
function b(a) {
|
|
5
|
+
return g(async () => {
|
|
6
|
+
const t = await a();
|
|
7
|
+
return z(() => t.next());
|
|
8
8
|
});
|
|
9
9
|
}
|
|
10
|
-
const
|
|
11
|
-
async function
|
|
12
|
-
|
|
13
|
-
for (const e of a)
|
|
14
|
-
if (e < 0 || e >= t.vocabSize)
|
|
15
|
-
throw new Error(`Invalid token index ${e} found in tokenised data`);
|
|
16
|
-
return a;
|
|
10
|
+
const f = 8;
|
|
11
|
+
async function w(a, t) {
|
|
12
|
+
return (await Promise.all(a.map((s) => t.encodeConversation(s)))).flat();
|
|
17
13
|
}
|
|
18
|
-
class
|
|
14
|
+
class m {
|
|
19
15
|
tokenizer;
|
|
20
16
|
blockSize;
|
|
21
17
|
pageSize;
|
|
22
|
-
constructor(t,
|
|
23
|
-
this.tokenizer = t, this.blockSize =
|
|
18
|
+
constructor(t, r = 128) {
|
|
19
|
+
this.tokenizer = t, this.blockSize = r, this.pageSize = r * f;
|
|
24
20
|
}
|
|
25
21
|
// Create dataset from text files
|
|
26
|
-
async createTextDataset(t,
|
|
22
|
+
async createTextDataset(t, r = 32, i, s) {
|
|
27
23
|
if (t.length < this.blockSize + 1)
|
|
28
24
|
throw new Error(`Not enough tokens (${t.length}) for block size ${this.blockSize}`);
|
|
29
|
-
if (
|
|
25
|
+
if (i && i.size > t.length / this.pageSize / 2)
|
|
30
26
|
throw new Error("Too many masked pages - would leave insufficient training data");
|
|
31
|
-
const
|
|
32
|
-
if (
|
|
33
|
-
const
|
|
27
|
+
const l = (function* () {
|
|
28
|
+
if (i && s) {
|
|
29
|
+
const e = Array.from(i);
|
|
34
30
|
for (; ; ) {
|
|
35
|
-
const
|
|
31
|
+
const o = Math.floor(Math.random() * e.length), h = Math.floor(Math.random() * this.pageSize), n = e[o] * this.pageSize + h;
|
|
36
32
|
if (n + this.blockSize + 1 > t.length)
|
|
37
33
|
continue;
|
|
38
|
-
const
|
|
39
|
-
yield { xs:
|
|
34
|
+
const c = new Int32Array(t.subarray(n, n + this.blockSize)), u = new Int32Array(t.subarray(n + 1, n + this.blockSize + 1));
|
|
35
|
+
yield { xs: c, ys: u };
|
|
40
36
|
}
|
|
41
37
|
} else
|
|
42
38
|
for (; ; ) {
|
|
43
|
-
const
|
|
44
|
-
if (
|
|
45
|
-
const n = Math.floor(
|
|
46
|
-
if (
|
|
39
|
+
const e = Math.floor(Math.random() * (t.length - this.blockSize - 1));
|
|
40
|
+
if (i) {
|
|
41
|
+
const n = Math.floor(e / this.pageSize), c = i.has(n);
|
|
42
|
+
if (c && !s || !c && s)
|
|
47
43
|
continue;
|
|
48
44
|
}
|
|
49
|
-
const
|
|
50
|
-
yield { xs:
|
|
45
|
+
const o = new Int32Array(t.subarray(e, e + this.blockSize)), h = new Int32Array(t.subarray(e + 1, e + this.blockSize + 1));
|
|
46
|
+
yield { xs: o, ys: h };
|
|
51
47
|
}
|
|
52
48
|
}).bind(this);
|
|
53
|
-
return
|
|
54
|
-
const
|
|
55
|
-
return
|
|
56
|
-
xs:
|
|
57
|
-
ys:
|
|
49
|
+
return b(l).batch(r).map((e) => {
|
|
50
|
+
const o = e;
|
|
51
|
+
return y(() => ({
|
|
52
|
+
xs: o.xs.cast("int32"),
|
|
53
|
+
ys: o.ys.cast("int32")
|
|
58
54
|
// this.tf.oneHot(batchData.ys.cast('int32'), this.tokenizer.vocabSize),
|
|
59
55
|
}));
|
|
60
56
|
}).prefetch(2);
|
|
61
57
|
}
|
|
62
58
|
}
|
|
63
59
|
export {
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
60
|
+
m as DatasetBuilder,
|
|
61
|
+
f as PAGE_FACTOR,
|
|
62
|
+
w as flattenTokens
|
|
67
63
|
};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import b from "./Trainer.js";
|
|
2
2
|
import L from "./Evaluator.js";
|
|
3
|
-
import { d as w } from "../index-
|
|
3
|
+
import { d as w } from "../index-Duu1Lvvv.js";
|
|
4
4
|
import y from "../utilities/profile.js";
|
|
5
5
|
import { createTensorStatistics as D } from "../checks/weights.js";
|
|
6
6
|
const T = {
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
import { ITokeniser } from '../tokeniser/type';
|
|
2
2
|
import { DatasetBuilder } from './DatasetBuilder';
|
|
3
3
|
import { default as AdamExt } from './AdamExt';
|
|
4
|
-
import { NamedTensorMap
|
|
4
|
+
import { NamedTensorMap } from '@tensorflow/tfjs-core/dist/tensor_types';
|
|
5
5
|
import { Scalar, Tensor } from '@tensorflow/tfjs-core';
|
|
6
6
|
import { Dataset } from '@tensorflow/tfjs-data';
|
|
7
7
|
import { default as Model, ModelForwardAttributes } from '../models/model';
|
|
8
8
|
import { TensorStatistics } from '../checks/weights';
|
|
9
|
+
import { Task } from './tasks/Task';
|
|
9
10
|
export interface TrainingLogEntry {
|
|
10
11
|
loss: number;
|
|
11
12
|
valLoss?: number;
|
|
@@ -93,7 +94,7 @@ export default abstract class GPTTrainer {
|
|
|
93
94
|
log: TrainingLogEntry;
|
|
94
95
|
progress: TrainingProgress;
|
|
95
96
|
}>;
|
|
96
|
-
createTrainValidationSplit(
|
|
97
|
+
createTrainValidationSplit(tasks: Task[], batchSize?: number, validationSplit?: number): Promise<{
|
|
97
98
|
trainDataset: Dataset<{
|
|
98
99
|
xs: Tensor;
|
|
99
100
|
ys: Tensor;
|
|
@@ -102,7 +103,7 @@ export default abstract class GPTTrainer {
|
|
|
102
103
|
xs: Tensor;
|
|
103
104
|
ys: Tensor;
|
|
104
105
|
}>;
|
|
106
|
+
size: number;
|
|
105
107
|
}>;
|
|
106
|
-
createDataset(textData: string[], batchSize?: number): Promise<Dataset<TensorContainer>>;
|
|
107
108
|
dispose(): void;
|
|
108
109
|
}
|