@genai-fi/nanogpt 0.10.2 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.d.ts +10 -5
- package/dist/Generator.js +11760 -146
- package/dist/{RealDiv-zz7FpkKX.js → RealDiv-Ds-jvL09.js} +28 -30
- package/dist/Reshape-Cd6e-Otn.js +14 -0
- package/dist/{Reshape-CHdUjC72.js → Reshape-Ct266DEk.js} +21 -23
- package/dist/TeachableLLM.d.ts +4 -3
- package/dist/TeachableLLM.js +15 -16
- package/dist/Trainer.d.ts +2 -2
- package/dist/Trainer.js +6 -6
- package/dist/{axis_util-BsIr9ZNu.js → axis_util-DofAuy0p.js} +1 -1
- package/dist/backend.js +2 -2
- package/dist/{backend_util-B1XRLuq9.js → backend_util-C7NWHpv7.js} +72 -73
- package/dist/{backend_webgpu-CqpfEImu.js → backend_webgpu-B0Vls736.js} +52 -54
- package/dist/broadcast_to-DDaNMbX7.js +28 -0
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/matMulGelu.js +7 -11
- package/dist/checks/normRMS.js +9 -9
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.js +2 -2
- package/dist/checks/qkv.js +11 -12
- package/dist/checks/rope.js +2 -2
- package/dist/clip_by_value-Dn5tzexi.js +12 -0
- package/dist/complex-DClmWqJt.js +11 -0
- package/dist/concat-C6X3AAlQ.js +17 -0
- package/dist/{concat_util-iBYIyuQe.js → concat_util-CHsJFZJJ.js} +1 -1
- package/dist/{dataset-D2P7rHAw.js → dataset-DcjWqUVQ.js} +135 -137
- package/dist/dropout-OxuaJz6z.js +92 -0
- package/dist/expand_dims-BzfJK2uc.js +11 -0
- package/dist/{exports_initializers-CZSUJoVE.js → exports_initializers-eS9QJ6ut.js} +1 -1
- package/dist/floor-DIb-lN_u.js +9 -0
- package/dist/gather-BcO5UQNJ.js +9 -0
- package/dist/{gelu-Bmhopi0J.js → gelu-DqTbCx5x.js} +10 -11
- package/dist/{gpgpu_math-DsCcikas.js → gpgpu_math-CJcbnKPC.js} +841 -1015
- package/dist/index-D0RBWjq8.js +3520 -0
- package/dist/{index-DRyE072i.js → index-Dj5TkmPY.js} +330 -331
- package/dist/{kernel_funcs_utils-CWfOAPGO.js → kernel_funcs_utils-CSaumNDs.js} +132 -134
- package/dist/layers/BaseLayer.js +15 -16
- package/dist/layers/CausalSelfAttention.js +6 -6
- package/dist/layers/MLP.js +4 -4
- package/dist/layers/PositionEmbedding.js +7 -7
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +9 -9
- package/dist/layers/TiedEmbedding.js +6 -6
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +21 -22
- package/dist/log_sum_exp-VLZgbFAH.js +39 -0
- package/dist/main.d.ts +1 -1
- package/dist/main.js +49 -50
- package/dist/{matMul16-fEAJ4smh.js → matMul16-cDxwemKj.js} +14 -15
- package/dist/matMulGelu-B2s_80-H.js +163 -0
- package/dist/mat_mul-DxpNTCRz.js +11 -0
- package/dist/mod-PrOKlFxH.js +11 -0
- package/dist/models/NanoGPTV1.js +2 -2
- package/dist/models/model.js +13 -14
- package/dist/ones-BX_wEgzB.js +14 -0
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.js +1 -1
- package/dist/ops/add16.js +1 -1
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/concat16.js +2 -2
- package/dist/ops/cpu/adamAdjust.js +12 -13
- package/dist/ops/cpu/adamMoments.js +6 -7
- package/dist/ops/cpu/appendCache.js +7 -8
- package/dist/ops/cpu/attentionMask.js +11 -11
- package/dist/ops/cpu/fusedSoftmax.js +10 -11
- package/dist/ops/cpu/gatherSub.js +10 -11
- package/dist/ops/cpu/gelu.js +14 -15
- package/dist/ops/cpu/matMul16.js +6 -7
- package/dist/ops/cpu/matMulGelu.js +5 -6
- package/dist/ops/cpu/matMulMul.js +3 -4
- package/dist/ops/cpu/mulDropout.js +3 -4
- package/dist/ops/cpu/normRMS.js +11 -12
- package/dist/ops/cpu/qkv.js +8 -9
- package/dist/ops/cpu/rope.js +9 -10
- package/dist/ops/cpu/scatterSub.js +14 -16
- package/dist/ops/dot16.js +2 -2
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.js +10 -11
- package/dist/ops/grads/attentionMask.js +5 -6
- package/dist/ops/grads/gelu.js +3 -4
- package/dist/ops/grads/matMul16.js +4 -5
- package/dist/ops/grads/matMulGelu.js +8 -9
- package/dist/ops/grads/normRMS.js +9 -10
- package/dist/ops/grads/pack16.js +4 -5
- package/dist/ops/grads/qkv.js +17 -19
- package/dist/ops/grads/rope.js +3 -5
- package/dist/ops/grads/softmax16.js +3 -4
- package/dist/ops/grads/unpack16.js +3 -4
- package/dist/ops/grads/utils.d.ts +1 -0
- package/dist/ops/grads/utils.js +8 -4
- package/dist/ops/matMul16.js +3 -3
- package/dist/ops/matMulGelu.js +2 -2
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.js +3 -4
- package/dist/ops/qkv.js +4 -8
- package/dist/ops/reshape16.js +16 -18
- package/dist/ops/rope.d.ts +1 -1
- package/dist/ops/rope.js +3 -8
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.js +2 -2
- package/dist/ops/softmax16.js +5 -8
- package/dist/ops/sub16.js +1 -1
- package/dist/ops/sum16.js +2 -2
- package/dist/ops/transpose16.js +23 -24
- package/dist/ops/unpack16.js +2 -2
- package/dist/ops/webgl/adamAdjust.js +2 -3
- package/dist/ops/webgl/adamMoments.js +1 -2
- package/dist/ops/webgl/appendCache.js +1 -2
- package/dist/ops/webgl/attentionMask.js +5 -6
- package/dist/ops/webgl/fusedSoftmax.js +6 -8
- package/dist/ops/webgl/gatherSub.js +6 -7
- package/dist/ops/webgl/gelu.js +2 -3
- package/dist/ops/webgl/log.js +11 -12
- package/dist/ops/webgl/matMul16.js +15 -16
- package/dist/ops/webgl/matMulGelu.js +7 -111
- package/dist/ops/webgl/matMulMul.js +14 -15
- package/dist/ops/webgl/mulDropout.js +8 -9
- package/dist/ops/webgl/normRMS.js +7 -8
- package/dist/ops/webgl/qkv.js +5 -6
- package/dist/ops/webgl/rope.js +7 -8
- package/dist/ops/webgl/scatterSub.js +5 -6
- package/dist/ops/webgpu/adamAdjust.js +10 -12
- package/dist/ops/webgpu/adamMoments.js +8 -10
- package/dist/ops/webgpu/add16.js +8 -9
- package/dist/ops/webgpu/appendCache.js +23 -25
- package/dist/ops/webgpu/attentionMask.js +10 -12
- package/dist/ops/webgpu/attentionMask32_program.js +2 -2
- package/dist/ops/webgpu/concat16.js +12 -14
- package/dist/ops/webgpu/gatherSub.js +9 -11
- package/dist/ops/webgpu/gelu.js +28 -29
- package/dist/ops/webgpu/matMul16.js +26 -28
- package/dist/ops/webgpu/matMul16_program.js +4 -5
- package/dist/ops/webgpu/mul16.js +7 -8
- package/dist/ops/webgpu/normRMS.js +17 -19
- package/dist/ops/webgpu/normRMSGrad.js +21 -28
- package/dist/ops/webgpu/pack16.js +12 -13
- package/dist/ops/webgpu/pack16_program.js +2 -2
- package/dist/ops/webgpu/qkv.js +13 -15
- package/dist/ops/webgpu/rope.js +25 -27
- package/dist/ops/webgpu/scatterSub.js +7 -9
- package/dist/ops/webgpu/slice16.js +21 -23
- package/dist/ops/webgpu/softmax16.js +17 -19
- package/dist/ops/webgpu/softmax16_program.js +2 -2
- package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
- package/dist/ops/webgpu/softmax16grad.js +7 -8
- package/dist/ops/webgpu/sub16.js +8 -9
- package/dist/ops/webgpu/sum16.js +19 -21
- package/dist/ops/webgpu/transpose16.js +19 -20
- package/dist/ops/webgpu/transpose16_program.js +2 -2
- package/dist/ops/webgpu/transpose16_shared_program.js +11 -12
- package/dist/ops/webgpu/unpack16.js +3 -4
- package/dist/ops/webgpu/utils/binary_op.js +7 -8
- package/dist/ops/webgpu/utils/reductions.js +14 -22
- package/dist/ops-FJapAPfm.js +476 -0
- package/dist/pack16-k4jq6aMX.js +39 -0
- package/dist/patches/webgpu_backend.js +19 -20
- package/dist/patches/webgpu_base.js +1 -1
- package/dist/patches/webgpu_program.js +15 -16
- package/dist/{random_width-BVV9HveY.js → random_width-UGQn4OWb.js} +2506 -2761
- package/dist/range-CuGvVN2c.js +10 -0
- package/dist/relu-Cf80uA2p.js +9 -0
- package/dist/reshape-CkjKPPqB.js +9 -0
- package/dist/resize_nearest_neighbor-DB8k9KN_.js +175 -0
- package/dist/rope-BmZmp9uP.js +24 -0
- package/dist/{scatter_nd_util-C7zXRT_h.js → scatter_nd_util-BY22Cc-C.js} +1 -1
- package/dist/selu_util-BuLbmbrl.js +44 -0
- package/dist/{shared-CHhxz-O5.js → shared-B7USJZgw.js} +1 -1
- package/dist/{shared-D2NP_CpY.js → shared-BQboIImQ.js} +379 -381
- package/dist/slice-Aqy7KbJh.js +12 -0
- package/dist/{slice_util-DyjSAD0u.js → slice_util-D8CQRenR.js} +7 -7
- package/dist/{softmax-C9JQEtnO.js → softmax-faLoUZVT.js} +4 -5
- package/dist/split-BNz5jcGc.js +9 -0
- package/dist/squeeze--YMgaAAf.js +10 -0
- package/dist/stack-WJK22CFn.js +11 -0
- package/dist/step-dXR33iOg.js +261 -0
- package/dist/sum-BdplSvq_.js +11 -0
- package/dist/{tensor-0r5yOo2R.js → tensor-BQqrDvpx.js} +1 -1
- package/dist/tensor1d-LxP9asMm.js +11 -0
- package/dist/{tensor2d-CSB4KOb0.js → tensor2d-BN1sSfQO.js} +6 -7
- package/dist/{tensor4d-D7bLqGqz.js → tensor4d-DVwr7pLF.js} +6 -7
- package/dist/{tfjs_backend-CNkSTL0c.js → tfjs_backend-Vi4JfLzT.js} +256 -265
- package/dist/tile-CvN_LyVr.js +11 -0
- package/dist/tokeniser/BaseTokeniser.d.ts +27 -0
- package/dist/tokeniser/BaseTokeniser.js +94 -0
- package/dist/tokeniser/CharTokeniser.d.ts +4 -3
- package/dist/tokeniser/CharTokeniser.js +46 -32
- package/dist/tokeniser/bpe.d.ts +4 -3
- package/dist/tokeniser/bpe.js +60 -45
- package/dist/tokeniser/type.d.ts +11 -0
- package/dist/training/Adam.js +2 -2
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.d.ts +2 -2
- package/dist/training/DatasetBuilder.js +32 -36
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.d.ts +3 -3
- package/dist/training/Trainer.js +2 -2
- package/dist/training/sparseCrossEntropy.js +5 -5
- package/dist/transpose-JawVKyZy.js +36 -0
- package/dist/unsorted_segment_sum-LAbmE9G4.js +277 -0
- package/dist/utilities/dummy.js +3 -3
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.d.ts +1 -4
- package/dist/utilities/packed.js +10 -745
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.js +5 -5
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-DzfrwYuP.js → variable-DQ9yYgEU.js} +1 -1
- package/dist/{webgpu_program-DzaQiqel.js → webgpu_program-CAE4RICo.js} +177 -171
- package/dist/{webgpu_util-0_ubCEHJ.js → webgpu_util-BdovYhXr.js} +34 -35
- package/dist/zeros-DeiE2zTa.js +13 -0
- package/dist/zeros_like-BAz3iKru.js +721 -0
- package/package.json +4 -2
- package/dist/Reshape-CDVLyVfz.js +0 -16
- package/dist/broadcast_to-B0ChcDaz.js +0 -30
- package/dist/complex-BBiRlsVq.js +0 -13
- package/dist/concat-DmBLPVGC.js +0 -19
- package/dist/dropout-B1x1kYMa.js +0 -99
- package/dist/expand_dims-ouvfxQ1n.js +0 -13
- package/dist/gather-CH9sdacz.js +0 -10
- package/dist/index-D6Q1lPZO.js +0 -2157
- package/dist/log_sum_exp-D3ftBNY5.js +0 -41
- package/dist/mat_mul-C59XWcJd.js +0 -12
- package/dist/mod-DESSvHIU.js +0 -12
- package/dist/mulmat_packed_gpu-Coh6qbJk.js +0 -55
- package/dist/ones-jU9jlQvM.js +0 -15
- package/dist/ops-BFDtP6th.js +0 -645
- package/dist/pack16-CmVZs6af.js +0 -41
- package/dist/patches/PackedTensor.d.ts +0 -12
- package/dist/patches/PackedTensor.js +0 -11
- package/dist/patches/engine.d.ts +0 -261
- package/dist/patches/engine.js +0 -12
- package/dist/patches/tape.d.ts +0 -12
- package/dist/patches/tape.js +0 -5
- package/dist/range-ZZZD60Fx.js +0 -11
- package/dist/reciprocal-CrYlsAGD.js +0 -10
- package/dist/register_all_kernels-nvj2k7OC.js +0 -12307
- package/dist/relu-BYDneVPn.js +0 -10
- package/dist/reshape-CaPQzFvz.js +0 -10
- package/dist/rope-s4W2XO9B.js +0 -32
- package/dist/selu_util-BGPXmd4B.js +0 -303
- package/dist/sin-Djs4aQiu.js +0 -16
- package/dist/slice-DvovR5wq.js +0 -13
- package/dist/split-DBck65sX.js +0 -10
- package/dist/squeeze-C00Ipm_7.js +0 -11
- package/dist/stack-ChnHwRpX.js +0 -13
- package/dist/sum-ywRJj3Zr.js +0 -12
- package/dist/tensor-CzmOBsdf.js +0 -909
- package/dist/tensor1d-BlUT89BP.js +0 -12
- package/dist/tensor_util-DfwaWayG.js +0 -523
- package/dist/tile-CR074jmp.js +0 -13
- package/dist/transpose-DH4gmHvu.js +0 -38
- package/dist/zeros-DBFVbpv5.js +0 -14
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { q as e, u as a, y as i, E as c, T as u } from "./index-D0RBWjq8.js";
|
|
2
|
+
function l(r, t) {
|
|
3
|
+
const n = a(r, "x", "tile", "string_or_numeric");
|
|
4
|
+
i(n.rank === t.length, () => `Error in transpose: rank of input ${n.rank} must match length of reps ${t}.`);
|
|
5
|
+
const s = { x: n }, o = { reps: t };
|
|
6
|
+
return c.runKernel(u, s, o);
|
|
7
|
+
}
|
|
8
|
+
const p = /* @__PURE__ */ e({ tile_: l });
|
|
9
|
+
export {
|
|
10
|
+
p as t
|
|
11
|
+
};
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { Conversation, ITokeniser } from './type';
|
|
2
|
+
import { default as EE } from 'eventemitter3';
|
|
3
|
+
export declare const SPECIALS: string[];
|
|
4
|
+
export default abstract class BaseTokeniser extends EE<'trainStatus'> implements ITokeniser {
|
|
5
|
+
protected specialTokens: Map<string, number>;
|
|
6
|
+
protected specialTokenSet: Set<number>;
|
|
7
|
+
abstract vocabSize: number;
|
|
8
|
+
abstract eosToken: number;
|
|
9
|
+
abstract bosToken: number;
|
|
10
|
+
abstract trained: boolean;
|
|
11
|
+
abstract addToken(token: string, index?: number): number;
|
|
12
|
+
isSpecialToken(index: number): boolean;
|
|
13
|
+
protected addSpecialTokens(): void;
|
|
14
|
+
protected addSpecialToken(token: string, index: number): void;
|
|
15
|
+
abstract train(text: string[]): Promise<number>;
|
|
16
|
+
abstract tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
|
|
17
|
+
abstract detokenise(tokens: string[][] | number[][]): Promise<string[]>;
|
|
18
|
+
abstract getVocab(): string[];
|
|
19
|
+
abstract getMerges(): Promise<[string, string][]>;
|
|
20
|
+
abstract destroy(): void;
|
|
21
|
+
abstract encode(text: string): Promise<number[]>;
|
|
22
|
+
encodeSequence(text: string): Promise<number[]>;
|
|
23
|
+
encodeConversation(conversation: Conversation[], completion?: boolean): Promise<number[]>;
|
|
24
|
+
abstract decode(tokens: number[]): Promise<string>;
|
|
25
|
+
decodeConversation(tokens: number[]): Promise<Conversation[]>;
|
|
26
|
+
getSpecialTokenIndex(token: string): number | undefined;
|
|
27
|
+
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { E as r } from "../index-DvYrXKkX.js";
|
|
2
|
+
const h = [
|
|
3
|
+
"<eos>",
|
|
4
|
+
"<bos>",
|
|
5
|
+
"",
|
|
6
|
+
"<|user_start|>",
|
|
7
|
+
"<|user_end|>",
|
|
8
|
+
"<|assistant_start|>",
|
|
9
|
+
"<|assistant_end|>",
|
|
10
|
+
"<|system_start|>",
|
|
11
|
+
"<|system_end|>"
|
|
12
|
+
];
|
|
13
|
+
class k extends r {
|
|
14
|
+
specialTokens = /* @__PURE__ */ new Map();
|
|
15
|
+
specialTokenSet = /* @__PURE__ */ new Set();
|
|
16
|
+
isSpecialToken(e) {
|
|
17
|
+
return this.specialTokenSet.has(e);
|
|
18
|
+
}
|
|
19
|
+
addSpecialTokens() {
|
|
20
|
+
h.forEach((e, t) => {
|
|
21
|
+
this.addToken(e, t), this.specialTokens.set(e, t), this.specialTokenSet.add(t);
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
addSpecialToken(e, t) {
|
|
25
|
+
this.specialTokens.set(e, t), this.specialTokenSet.add(t);
|
|
26
|
+
}
|
|
27
|
+
async encodeSequence(e) {
|
|
28
|
+
const t = await this.encode(e);
|
|
29
|
+
return [this.bosToken, ...t, this.eosToken];
|
|
30
|
+
}
|
|
31
|
+
async encodeConversation(e, t) {
|
|
32
|
+
const s = [[this.bosToken]], a = [
|
|
33
|
+
this.getSpecialTokenIndex("<|user_start|>"),
|
|
34
|
+
this.getSpecialTokenIndex("<|assistant_start|>"),
|
|
35
|
+
this.getSpecialTokenIndex("<|system_start|>")
|
|
36
|
+
], n = [
|
|
37
|
+
this.getSpecialTokenIndex("<|user_end|>"),
|
|
38
|
+
this.getSpecialTokenIndex("<|assistant_end|>"),
|
|
39
|
+
this.getSpecialTokenIndex("<|system_end|>")
|
|
40
|
+
];
|
|
41
|
+
for (const i of e) {
|
|
42
|
+
const c = await this.encode(i.content);
|
|
43
|
+
switch (i.role) {
|
|
44
|
+
case "user":
|
|
45
|
+
s.push([a[0]]);
|
|
46
|
+
break;
|
|
47
|
+
case "assistant":
|
|
48
|
+
s.push([a[1]]);
|
|
49
|
+
break;
|
|
50
|
+
case "system":
|
|
51
|
+
s.push([a[2]]);
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
switch (s.push(c), i.role) {
|
|
55
|
+
case "user":
|
|
56
|
+
s.push([n[0]]);
|
|
57
|
+
break;
|
|
58
|
+
case "assistant":
|
|
59
|
+
s.push([n[1]]);
|
|
60
|
+
break;
|
|
61
|
+
case "system":
|
|
62
|
+
s.push([n[2]]);
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
const o = s.flat();
|
|
67
|
+
return t ? o.push(a[1]) : o.push(this.eosToken), o;
|
|
68
|
+
}
|
|
69
|
+
async decodeConversation(e) {
|
|
70
|
+
const t = [];
|
|
71
|
+
let s = 0;
|
|
72
|
+
for (; s < e.length; ) {
|
|
73
|
+
const a = e[s];
|
|
74
|
+
let n = null;
|
|
75
|
+
if (a === this.getSpecialTokenIndex("<|user_start|>") ? n = "user" : a === this.getSpecialTokenIndex("<|assistant_start|>") ? n = "assistant" : a === this.getSpecialTokenIndex("<|system_start|>") && (n = "system"), n) {
|
|
76
|
+
s++;
|
|
77
|
+
const o = [];
|
|
78
|
+
for (; s < e.length && e[s] !== this.getSpecialTokenIndex(`<|${n}_end|>`); )
|
|
79
|
+
o.push(e[s]), s++;
|
|
80
|
+
const i = await this.decode(o);
|
|
81
|
+
t.push({ role: n, content: i });
|
|
82
|
+
}
|
|
83
|
+
s++;
|
|
84
|
+
}
|
|
85
|
+
return t;
|
|
86
|
+
}
|
|
87
|
+
getSpecialTokenIndex(e) {
|
|
88
|
+
return this.specialTokens.get(e);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
export {
|
|
92
|
+
h as SPECIALS,
|
|
93
|
+
k as default
|
|
94
|
+
};
|
|
@@ -1,14 +1,15 @@
|
|
|
1
|
-
import { default as
|
|
2
|
-
|
|
3
|
-
export default class CharTokeniser extends EE<'trainStatus'> implements ITokeniser {
|
|
1
|
+
import { default as BaseTokeniser } from './BaseTokeniser';
|
|
2
|
+
export default class CharTokeniser extends BaseTokeniser {
|
|
4
3
|
vocabSize: number;
|
|
5
4
|
eosToken: number;
|
|
5
|
+
bosToken: number;
|
|
6
6
|
unkToken: number;
|
|
7
7
|
vocab: string[];
|
|
8
8
|
private cache;
|
|
9
9
|
private _trained;
|
|
10
10
|
constructor(vocabSize: number);
|
|
11
11
|
constructor(vocab: string[]);
|
|
12
|
+
addToken(token: string, index?: number): number;
|
|
12
13
|
get trained(): boolean;
|
|
13
14
|
destroy(): void;
|
|
14
15
|
train(text: string[]): Promise<number>;
|
|
@@ -1,66 +1,80 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import k, { SPECIALS as d } from "./BaseTokeniser.js";
|
|
2
2
|
const u = ["<eos>", "<unk>"];
|
|
3
3
|
class b extends k {
|
|
4
4
|
vocabSize = 0;
|
|
5
5
|
eosToken = 0;
|
|
6
|
+
bosToken = 0;
|
|
6
7
|
unkToken = 0;
|
|
7
8
|
vocab = [];
|
|
8
9
|
cache = /* @__PURE__ */ new Map();
|
|
9
10
|
_trained = !1;
|
|
10
|
-
constructor(
|
|
11
|
-
if (super(), Array.isArray(
|
|
12
|
-
if (this.vocab =
|
|
13
|
-
this.vocabSize = this.vocab.length,
|
|
14
|
-
this.
|
|
11
|
+
constructor(s) {
|
|
12
|
+
if (super(), Array.isArray(s)) {
|
|
13
|
+
if (this.vocab = s, this.vocab.length > 0)
|
|
14
|
+
this.vocabSize = this.vocab.length, d.forEach((t) => {
|
|
15
|
+
const e = this.vocab.indexOf(t);
|
|
16
|
+
e !== -1 && this.addSpecialToken(t, e);
|
|
17
|
+
}), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex("") ?? -1, this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((t) => t === "<pad>" ? "" : t), this.vocab.forEach((t, e) => {
|
|
18
|
+
this.cache.set(t, e);
|
|
15
19
|
});
|
|
16
20
|
else
|
|
17
21
|
throw new Error("Vocab cannot be empty");
|
|
18
22
|
this._trained = !0;
|
|
19
23
|
} else
|
|
20
|
-
this.vocabSize =
|
|
24
|
+
this.vocabSize = s, this.vocab = new Array(this.vocabSize).fill(""), this.addSpecialTokens(), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex(""), this.vocab.forEach((t, e) => {
|
|
25
|
+
this.cache.set(t, e);
|
|
26
|
+
}), this.cache.set("", this.unkToken);
|
|
27
|
+
}
|
|
28
|
+
addToken(s, t) {
|
|
29
|
+
if (this.cache.has(s))
|
|
30
|
+
return this.cache.get(s);
|
|
31
|
+
let e;
|
|
32
|
+
if (t !== void 0 ? e = t : (e = this.vocab.indexOf("", this.unkToken + 1), e === -1 && (e = this.vocabSize)), e >= this.vocabSize)
|
|
33
|
+
throw new Error("Vocab size exceeded");
|
|
34
|
+
return this.vocab[e] = s, this.cache.set(s, e), e;
|
|
21
35
|
}
|
|
22
36
|
get trained() {
|
|
23
37
|
return this.vocab.length === this.vocabSize && this._trained;
|
|
24
38
|
}
|
|
25
39
|
destroy() {
|
|
26
40
|
}
|
|
27
|
-
async train(
|
|
28
|
-
const
|
|
41
|
+
async train(s) {
|
|
42
|
+
const t = s.map((n) => n.split("")).flat(), e = new Set(t), i = Array.from(e), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
|
|
29
43
|
if (h === -1)
|
|
30
44
|
return this.vocabSize;
|
|
31
|
-
if (this._trained = !0,
|
|
32
|
-
const
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
}),
|
|
45
|
+
if (this._trained = !0, i.length > o) {
|
|
46
|
+
const n = /* @__PURE__ */ new Map();
|
|
47
|
+
t.forEach((a) => {
|
|
48
|
+
n.set(a, (n.get(a) || 0) + 1);
|
|
49
|
+
}), i.sort((a, r) => (n.get(a) || 0) - (n.get(r) || 0)), i.splice(0, i.length - o);
|
|
36
50
|
}
|
|
37
51
|
let c = h;
|
|
38
52
|
if (c !== -1) {
|
|
39
|
-
const
|
|
40
|
-
for (const a of
|
|
41
|
-
if (!
|
|
53
|
+
const n = new Set(this.vocab);
|
|
54
|
+
for (const a of i)
|
|
55
|
+
if (!n.has(a) && (this.vocab[c] = a, n.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
|
|
42
56
|
break;
|
|
43
57
|
}
|
|
44
|
-
return this.cache.clear(), this.vocab.forEach((
|
|
45
|
-
this.cache.set(
|
|
58
|
+
return this.cache.clear(), this.vocab.forEach((n, a) => {
|
|
59
|
+
this.cache.set(n, a);
|
|
46
60
|
}), this.emit("trainStatus", "trained"), this.vocabSize;
|
|
47
61
|
}
|
|
48
|
-
async tokenise(
|
|
62
|
+
async tokenise(s, t) {
|
|
49
63
|
if (!this.trained)
|
|
50
64
|
throw new Error("Tokeniser not trained");
|
|
51
|
-
return
|
|
65
|
+
return s.map((i) => t ? i.split("").map((h) => this.cache.get(h) ?? this.unkToken) : i.split("").map((h) => {
|
|
52
66
|
const o = this.cache.get(h);
|
|
53
67
|
return o !== void 0 ? this.vocab[o] : "";
|
|
54
68
|
}));
|
|
55
69
|
}
|
|
56
|
-
async detokenise(
|
|
57
|
-
return
|
|
70
|
+
async detokenise(s) {
|
|
71
|
+
return s.map((e) => e.map((i) => this.vocab[i]).join(""));
|
|
58
72
|
}
|
|
59
|
-
async encode(
|
|
60
|
-
return (await this.tokenise([
|
|
73
|
+
async encode(s) {
|
|
74
|
+
return (await this.tokenise([s], !0))[0];
|
|
61
75
|
}
|
|
62
|
-
async decode(
|
|
63
|
-
return (await this.detokenise([
|
|
76
|
+
async decode(s) {
|
|
77
|
+
return (await this.detokenise([s]))[0];
|
|
64
78
|
}
|
|
65
79
|
getVocab() {
|
|
66
80
|
return this.vocab;
|
|
@@ -68,11 +82,11 @@ class b extends k {
|
|
|
68
82
|
async getMerges() {
|
|
69
83
|
return [];
|
|
70
84
|
}
|
|
71
|
-
async createTrainingData(
|
|
72
|
-
const
|
|
73
|
-
for (let o = 0; o <
|
|
74
|
-
|
|
75
|
-
return [
|
|
85
|
+
async createTrainingData(s, t = 5) {
|
|
86
|
+
const e = await this.tokenise(s, !0), i = [], h = [];
|
|
87
|
+
for (let o = 0; o < e.length - t; o++)
|
|
88
|
+
i.push(...e[o].slice(0, t)), h.push(e[o + 1][0]);
|
|
89
|
+
return [i, h];
|
|
76
90
|
}
|
|
77
91
|
}
|
|
78
92
|
export {
|
package/dist/tokeniser/bpe.d.ts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { default as
|
|
2
|
-
|
|
3
|
-
export default class BPETokeniser extends EE<'trainStatus'> implements ITokeniser {
|
|
1
|
+
import { default as BaseTokeniser } from './BaseTokeniser';
|
|
2
|
+
export default class BPETokeniser extends BaseTokeniser {
|
|
4
3
|
private targetSize;
|
|
5
4
|
private vocab;
|
|
6
5
|
private vocabIndex;
|
|
@@ -8,10 +7,12 @@ export default class BPETokeniser extends EE<'trainStatus'> implements ITokenise
|
|
|
8
7
|
private pretokenMap;
|
|
9
8
|
constructor(vocabSize: number);
|
|
10
9
|
constructor(vocab: string[], merges?: [string, string][]);
|
|
10
|
+
addToken(token: string, index?: number): number;
|
|
11
11
|
destroy(): void;
|
|
12
12
|
get trained(): boolean;
|
|
13
13
|
get vocabSize(): number;
|
|
14
14
|
get eosToken(): number;
|
|
15
|
+
get bosToken(): number;
|
|
15
16
|
get unkToken(): number;
|
|
16
17
|
train(text: string[]): Promise<number>;
|
|
17
18
|
getVocab(): string[];
|
package/dist/tokeniser/bpe.js
CHANGED
|
@@ -1,68 +1,80 @@
|
|
|
1
1
|
import l from "../utilities/tokenParse.js";
|
|
2
|
-
import {
|
|
2
|
+
import d, { SPECIALS as f } from "./BaseTokeniser.js";
|
|
3
3
|
function u(o, e) {
|
|
4
4
|
return `${o}-::-${e}`;
|
|
5
5
|
}
|
|
6
|
-
function
|
|
6
|
+
function b(o) {
|
|
7
7
|
const e = /* @__PURE__ */ new Map();
|
|
8
8
|
for (let s = 0; s < o.length; s++) {
|
|
9
9
|
const t = o[s];
|
|
10
|
-
for (let
|
|
11
|
-
const
|
|
12
|
-
a: t[
|
|
13
|
-
b: t[
|
|
10
|
+
for (let n = 0; n < t.length - 1; n++) {
|
|
11
|
+
const r = u(t[n], t[n + 1]), i = e.get(r) || {
|
|
12
|
+
a: t[n],
|
|
13
|
+
b: t[n + 1],
|
|
14
14
|
count: 0,
|
|
15
15
|
instances: /* @__PURE__ */ new Set()
|
|
16
16
|
};
|
|
17
|
-
|
|
17
|
+
i.count += 1, i.instances.add(s), e.set(r, i);
|
|
18
18
|
}
|
|
19
19
|
}
|
|
20
20
|
return { pairs: e, tokens: o };
|
|
21
21
|
}
|
|
22
|
-
function h(o, e, s, t,
|
|
23
|
-
const
|
|
24
|
-
if (o.pairs.has(
|
|
25
|
-
const
|
|
26
|
-
|
|
22
|
+
function h(o, e, s, t, n) {
|
|
23
|
+
const r = u(e, s);
|
|
24
|
+
if (o.pairs.has(r)) {
|
|
25
|
+
const i = o.pairs.get(r);
|
|
26
|
+
i.count += n, n > 0 ? i.instances.add(t) : i.count <= 0 ? o.pairs.delete(r) : i.instances.delete(t);
|
|
27
27
|
} else
|
|
28
|
-
o.pairs.set(
|
|
28
|
+
o.pairs.set(r, { a: e, b: s, count: n, instances: /* @__PURE__ */ new Set([t]) });
|
|
29
29
|
}
|
|
30
|
-
function
|
|
30
|
+
function k(o) {
|
|
31
31
|
let e = null, s = 0;
|
|
32
32
|
for (const t of o.pairs.values())
|
|
33
33
|
t.count > s && (s = t.count, e = t);
|
|
34
34
|
return e;
|
|
35
35
|
}
|
|
36
|
-
function
|
|
36
|
+
function m(o, e) {
|
|
37
37
|
return o.map((s) => {
|
|
38
38
|
const t = [];
|
|
39
|
-
for (let
|
|
40
|
-
|
|
39
|
+
for (let n = 0; n < s.length; n++)
|
|
40
|
+
n < s.length - 1 && s[n] === e[0] && s[n + 1] === e[1] ? (t.push(e[0] + e[1]), n++) : t.push(s[n]);
|
|
41
41
|
return t;
|
|
42
42
|
});
|
|
43
43
|
}
|
|
44
|
-
function
|
|
44
|
+
function v(o, e) {
|
|
45
45
|
e.instances.forEach((s) => {
|
|
46
|
-
const t = o.tokens[s],
|
|
47
|
-
for (let
|
|
48
|
-
if (
|
|
49
|
-
const
|
|
50
|
-
|
|
46
|
+
const t = o.tokens[s], n = [];
|
|
47
|
+
for (let r = 0; r < t.length; r++)
|
|
48
|
+
if (r < t.length - 1 && t[r] === e.a && t[r + 1] === e.b) {
|
|
49
|
+
const i = e.a + e.b;
|
|
50
|
+
n.push(i), r > 0 && (h(o, t[r - 1], e.a, s, -1), h(o, t[r - 1], i, s, 1)), r++, r < t.length - 1 && (h(o, e.b, t[r + 1], s, -1), h(o, i, t[r + 1], s, 1));
|
|
51
51
|
} else
|
|
52
|
-
|
|
53
|
-
o.tokens[s] =
|
|
52
|
+
n.push(t[r]);
|
|
53
|
+
o.tokens[s] = n;
|
|
54
54
|
}), o.pairs.delete(u(e.a, e.b));
|
|
55
55
|
}
|
|
56
|
-
class
|
|
56
|
+
class T extends d {
|
|
57
57
|
targetSize;
|
|
58
58
|
vocab = /* @__PURE__ */ new Set();
|
|
59
59
|
vocabIndex = /* @__PURE__ */ new Map();
|
|
60
60
|
merges = [];
|
|
61
61
|
pretokenMap = /* @__PURE__ */ new Map();
|
|
62
62
|
constructor(e, s) {
|
|
63
|
-
super(), Array.isArray(e) ? (e.forEach((t,
|
|
64
|
-
this.vocab.add(t), this.vocabIndex.set(t,
|
|
65
|
-
}), s && (this.merges = s), this.targetSize = e.length
|
|
63
|
+
super(), Array.isArray(e) ? (e.forEach((t, n) => {
|
|
64
|
+
this.vocab.add(t), this.vocabIndex.set(t, n);
|
|
65
|
+
}), s && (this.merges = s), this.targetSize = e.length, f.forEach((t) => {
|
|
66
|
+
const n = e.indexOf(t);
|
|
67
|
+
n !== -1 && this.addSpecialToken(t, n);
|
|
68
|
+
})) : (this.addSpecialTokens(), this.targetSize = e);
|
|
69
|
+
}
|
|
70
|
+
addToken(e, s) {
|
|
71
|
+
if (this.vocab.has(e))
|
|
72
|
+
return this.vocabIndex.get(e);
|
|
73
|
+
{
|
|
74
|
+
this.vocab.add(e);
|
|
75
|
+
const t = s !== void 0 ? s : this.vocab.size - 1;
|
|
76
|
+
return this.vocabIndex.set(e, t), t;
|
|
77
|
+
}
|
|
66
78
|
}
|
|
67
79
|
destroy() {
|
|
68
80
|
this.vocab.clear(), this.vocabIndex.clear(), this.merges = [], this.pretokenMap.clear();
|
|
@@ -76,26 +88,29 @@ class S extends f {
|
|
|
76
88
|
get eosToken() {
|
|
77
89
|
return this.vocabIndex.get("<eos>") ?? 0;
|
|
78
90
|
}
|
|
91
|
+
get bosToken() {
|
|
92
|
+
return this.vocabIndex.get("<bos>") ?? 0;
|
|
93
|
+
}
|
|
79
94
|
get unkToken() {
|
|
80
95
|
return this.vocabIndex.get("") ?? 1;
|
|
81
96
|
}
|
|
82
97
|
async train(e) {
|
|
83
|
-
const s = e.map((
|
|
84
|
-
this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.
|
|
85
|
-
const
|
|
98
|
+
const s = e.map((a) => l(a)).flat(1), t = new Set(s);
|
|
99
|
+
this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.addSpecialTokens();
|
|
100
|
+
const n = Array.from(t), r = n.map((a) => Array.from(a).map((c) => (this.vocab.add(c), c))), i = b(r);
|
|
86
101
|
for (; this.vocab.size < this.targetSize && this.merges.length < this.targetSize; ) {
|
|
87
|
-
const
|
|
88
|
-
if (!
|
|
102
|
+
const a = k(i);
|
|
103
|
+
if (!a)
|
|
89
104
|
break;
|
|
90
|
-
this.merges.push([
|
|
105
|
+
this.merges.push([a.a, a.b]), this.vocab.add(a.a + a.b), v(i, a);
|
|
91
106
|
}
|
|
92
|
-
|
|
93
|
-
const c =
|
|
94
|
-
this.pretokenMap.set(
|
|
107
|
+
n.forEach((a, p) => {
|
|
108
|
+
const c = r[p];
|
|
109
|
+
this.pretokenMap.set(a, c);
|
|
95
110
|
}), this.vocabIndex.clear();
|
|
96
111
|
let g = 0;
|
|
97
|
-
for (const
|
|
98
|
-
this.vocabIndex.set(
|
|
112
|
+
for (const a of this.vocab.keys())
|
|
113
|
+
this.vocabIndex.set(a, g++);
|
|
99
114
|
return this.emit("trainStatus", "trained"), this.vocab.size;
|
|
100
115
|
}
|
|
101
116
|
getVocab() {
|
|
@@ -107,19 +122,19 @@ class S extends f {
|
|
|
107
122
|
tokeniseWord(e) {
|
|
108
123
|
let s = Array.from(e);
|
|
109
124
|
return this.merges.forEach((t) => {
|
|
110
|
-
s =
|
|
125
|
+
s = m([s], t)[0];
|
|
111
126
|
}), this.pretokenMap.set(e, s), s;
|
|
112
127
|
}
|
|
113
128
|
tokeniseStrings(e) {
|
|
114
|
-
return e.map((s) => l(s).map((
|
|
129
|
+
return e.map((s) => l(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
|
|
115
130
|
}
|
|
116
131
|
async tokenise(e, s) {
|
|
117
132
|
const t = this.tokeniseStrings(e);
|
|
118
|
-
return s ? t.map((
|
|
133
|
+
return s ? t.map((n) => n.map((r) => this.vocabIndex.get(r) ?? this.unkToken)) : t.map((n) => n.map((r) => this.vocab.has(r) ? r : ""));
|
|
119
134
|
}
|
|
120
135
|
async detokenise(e) {
|
|
121
136
|
const s = this.getVocab();
|
|
122
|
-
return e.map((
|
|
137
|
+
return e.map((n) => n.map((r) => s[r]).join(""));
|
|
123
138
|
}
|
|
124
139
|
async encode(e) {
|
|
125
140
|
return (await this.tokenise([e], !0))[0];
|
|
@@ -129,5 +144,5 @@ class S extends f {
|
|
|
129
144
|
}
|
|
130
145
|
}
|
|
131
146
|
export {
|
|
132
|
-
|
|
147
|
+
T as default
|
|
133
148
|
};
|
package/dist/tokeniser/type.d.ts
CHANGED
|
@@ -1,4 +1,9 @@
|
|
|
1
1
|
import { default as EE } from 'eventemitter3';
|
|
2
|
+
export type Roles = 'user' | 'assistant' | 'system';
|
|
3
|
+
export interface Conversation {
|
|
4
|
+
role: Roles;
|
|
5
|
+
content: string;
|
|
6
|
+
}
|
|
2
7
|
export interface ITokeniser extends EE<'trainStatus'> {
|
|
3
8
|
train(text: string[]): Promise<number>;
|
|
4
9
|
tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
|
|
@@ -7,8 +12,14 @@ export interface ITokeniser extends EE<'trainStatus'> {
|
|
|
7
12
|
getMerges(): Promise<[string, string][]>;
|
|
8
13
|
destroy(): void;
|
|
9
14
|
encode(text: string): Promise<number[]>;
|
|
15
|
+
encodeConversation(conversation: Conversation[], completion?: boolean): Promise<number[]>;
|
|
16
|
+
encodeSequence(text: string): Promise<number[]>;
|
|
10
17
|
decode(tokens: number[]): Promise<string>;
|
|
18
|
+
decodeConversation(tokens: number[]): Promise<Conversation[]>;
|
|
11
19
|
vocabSize: number;
|
|
12
20
|
eosToken: number;
|
|
21
|
+
bosToken: number;
|
|
13
22
|
trained: boolean;
|
|
23
|
+
getSpecialTokenIndex(token: string): number | undefined;
|
|
24
|
+
isSpecialToken(index: number): boolean;
|
|
14
25
|
}
|
package/dist/training/Adam.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { adamAdjust as b } from "../ops/adamAdjust.js";
|
|
2
2
|
import { adamMoments as d } from "../ops/adamMoments.js";
|
|
3
|
-
import { O as g, e as h, t as o, d as B } from "../index-
|
|
4
|
-
import { z as M } from "../zeros-
|
|
3
|
+
import { O as g, e as h, t as o, d as B } from "../index-D0RBWjq8.js";
|
|
4
|
+
import { z as M } from "../zeros-DeiE2zTa.js";
|
|
5
5
|
class R extends g {
|
|
6
6
|
constructor(t, a, e, s, i = null) {
|
|
7
7
|
super(), this.learningRate = t, this.beta1 = a, this.beta2 = e, this.lossScaling = s, this.epsilon = i, this.accBeta1 = a, this.accBeta2 = e, i === null && (this.epsilon = h().backend.epsilon());
|
package/dist/training/AdamExt.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { Tensor } from '@tensorflow/tfjs-core';
|
|
2
|
-
import { ITokeniser } from '../tokeniser/type';
|
|
2
|
+
import { Conversation, ITokeniser } from '../tokeniser/type';
|
|
3
3
|
import { Dataset } from '@tensorflow/tfjs-data';
|
|
4
4
|
export declare const PAGE_FACTOR = 8;
|
|
5
|
-
export declare function flattenTokens(textData:
|
|
5
|
+
export declare function flattenTokens(textData: Conversation[][], tokenizer: ITokeniser): Promise<number[]>;
|
|
6
6
|
export declare class DatasetBuilder {
|
|
7
7
|
tokenizer: ITokeniser;
|
|
8
8
|
blockSize: number;
|
|
@@ -1,67 +1,63 @@
|
|
|
1
|
-
import { t as
|
|
2
|
-
import { d as u, i as
|
|
1
|
+
import { t as z } from "../index-D0RBWjq8.js";
|
|
2
|
+
import { d as u, i as f } from "../dataset-DcjWqUVQ.js";
|
|
3
3
|
import "../index-Cp39cXWe.js";
|
|
4
|
-
function
|
|
4
|
+
function S(a) {
|
|
5
5
|
return u(async () => {
|
|
6
|
-
const t = await
|
|
7
|
-
return
|
|
6
|
+
const t = await a();
|
|
7
|
+
return f(() => t.next());
|
|
8
8
|
});
|
|
9
9
|
}
|
|
10
|
-
const
|
|
11
|
-
async function y(
|
|
12
|
-
|
|
13
|
-
for (const e of a)
|
|
14
|
-
if (e < 0 || e >= t.vocabSize)
|
|
15
|
-
throw new Error(`Invalid token index ${e} found in tokenised data`);
|
|
16
|
-
return a;
|
|
10
|
+
const b = 8;
|
|
11
|
+
async function y(a, t) {
|
|
12
|
+
return (await Promise.all(a.map((r) => t.encodeConversation(r)))).flat();
|
|
17
13
|
}
|
|
18
|
-
class
|
|
14
|
+
class x {
|
|
19
15
|
tokenizer;
|
|
20
16
|
blockSize;
|
|
21
17
|
pageSize;
|
|
22
18
|
constructor(t, s = 128) {
|
|
23
|
-
this.tokenizer = t, this.blockSize = s, this.pageSize = s *
|
|
19
|
+
this.tokenizer = t, this.blockSize = s, this.pageSize = s * b;
|
|
24
20
|
}
|
|
25
21
|
// Create dataset from text files
|
|
26
|
-
async createTextDataset(t, s = 32,
|
|
22
|
+
async createTextDataset(t, s = 32, i, r) {
|
|
27
23
|
if (t.length < this.blockSize + 1)
|
|
28
24
|
throw new Error(`Not enough tokens (${t.length}) for block size ${this.blockSize}`);
|
|
29
|
-
if (
|
|
25
|
+
if (i && i.size > t.length / this.pageSize / 2)
|
|
30
26
|
throw new Error("Too many masked pages - would leave insufficient training data");
|
|
31
|
-
const
|
|
32
|
-
if (
|
|
33
|
-
const
|
|
27
|
+
const l = (function* () {
|
|
28
|
+
if (i && r) {
|
|
29
|
+
const e = Array.from(i);
|
|
34
30
|
for (; ; ) {
|
|
35
|
-
const
|
|
36
|
-
if (
|
|
31
|
+
const n = Math.floor(Math.random() * e.length), h = Math.floor(Math.random() * this.pageSize), o = e[n] * this.pageSize + h;
|
|
32
|
+
if (o + this.blockSize + 1 > t.length)
|
|
37
33
|
continue;
|
|
38
|
-
const
|
|
39
|
-
yield { xs:
|
|
34
|
+
const c = t.slice(o, o + this.blockSize), g = t.slice(o + 1, o + this.blockSize + 1);
|
|
35
|
+
yield { xs: c, ys: g };
|
|
40
36
|
}
|
|
41
37
|
} else
|
|
42
38
|
for (; ; ) {
|
|
43
|
-
const
|
|
44
|
-
if (
|
|
45
|
-
const
|
|
46
|
-
if (
|
|
39
|
+
const e = Math.floor(Math.random() * (t.length - this.blockSize - 1));
|
|
40
|
+
if (i) {
|
|
41
|
+
const o = Math.floor(e / this.pageSize), c = i.has(o);
|
|
42
|
+
if (c && !r || !c && r)
|
|
47
43
|
continue;
|
|
48
44
|
}
|
|
49
|
-
const
|
|
50
|
-
yield { xs:
|
|
45
|
+
const n = t.slice(e, e + this.blockSize), h = t.slice(e + 1, e + this.blockSize + 1);
|
|
46
|
+
yield { xs: n, ys: h };
|
|
51
47
|
}
|
|
52
48
|
}).bind(this);
|
|
53
|
-
return
|
|
54
|
-
const
|
|
55
|
-
return
|
|
56
|
-
xs:
|
|
57
|
-
ys:
|
|
49
|
+
return S(l).batch(s).map((e) => {
|
|
50
|
+
const n = e;
|
|
51
|
+
return z(() => ({
|
|
52
|
+
xs: n.xs.cast("int32"),
|
|
53
|
+
ys: n.ys.cast("int32")
|
|
58
54
|
// this.tf.oneHot(batchData.ys.cast('int32'), this.tokenizer.vocabSize),
|
|
59
55
|
}));
|
|
60
56
|
}).prefetch(2);
|
|
61
57
|
}
|
|
62
58
|
}
|
|
63
59
|
export {
|
|
64
|
-
|
|
65
|
-
|
|
60
|
+
x as DatasetBuilder,
|
|
61
|
+
b as PAGE_FACTOR,
|
|
66
62
|
y as flattenTokens
|
|
67
63
|
};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import b from "./Trainer.js";
|
|
2
2
|
import L from "./Evaluator.js";
|
|
3
|
-
import { d as w } from "../index-
|
|
3
|
+
import { d as w } from "../index-D0RBWjq8.js";
|
|
4
4
|
import y from "../utilities/profile.js";
|
|
5
5
|
import { createTensorStatistics as D } from "../checks/weights.js";
|
|
6
6
|
const T = {
|