@genai-fi/nanogpt 0.10.2 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (262) hide show
  1. package/dist/Generator.d.ts +10 -5
  2. package/dist/Generator.js +11760 -146
  3. package/dist/{RealDiv-zz7FpkKX.js → RealDiv-Ds-jvL09.js} +28 -30
  4. package/dist/Reshape-Cd6e-Otn.js +14 -0
  5. package/dist/{Reshape-CHdUjC72.js → Reshape-Ct266DEk.js} +21 -23
  6. package/dist/TeachableLLM.d.ts +4 -3
  7. package/dist/TeachableLLM.js +15 -16
  8. package/dist/Trainer.d.ts +2 -2
  9. package/dist/Trainer.js +6 -6
  10. package/dist/{axis_util-BsIr9ZNu.js → axis_util-DofAuy0p.js} +1 -1
  11. package/dist/backend.js +2 -2
  12. package/dist/{backend_util-B1XRLuq9.js → backend_util-C7NWHpv7.js} +72 -73
  13. package/dist/{backend_webgpu-CqpfEImu.js → backend_webgpu-B0Vls736.js} +52 -54
  14. package/dist/broadcast_to-DDaNMbX7.js +28 -0
  15. package/dist/checks/appendCache.js +2 -2
  16. package/dist/checks/attentionMask.js +3 -3
  17. package/dist/checks/gelu.js +2 -2
  18. package/dist/checks/matMulGelu.js +7 -11
  19. package/dist/checks/normRMS.js +9 -9
  20. package/dist/checks/normRMSGrad.js +3 -3
  21. package/dist/checks/packUnpack.js +2 -2
  22. package/dist/checks/qkv.js +11 -12
  23. package/dist/checks/rope.js +2 -2
  24. package/dist/clip_by_value-Dn5tzexi.js +12 -0
  25. package/dist/complex-DClmWqJt.js +11 -0
  26. package/dist/concat-C6X3AAlQ.js +17 -0
  27. package/dist/{concat_util-iBYIyuQe.js → concat_util-CHsJFZJJ.js} +1 -1
  28. package/dist/{dataset-D2P7rHAw.js → dataset-DcjWqUVQ.js} +135 -137
  29. package/dist/dropout-OxuaJz6z.js +92 -0
  30. package/dist/expand_dims-BzfJK2uc.js +11 -0
  31. package/dist/{exports_initializers-CZSUJoVE.js → exports_initializers-eS9QJ6ut.js} +1 -1
  32. package/dist/floor-DIb-lN_u.js +9 -0
  33. package/dist/gather-BcO5UQNJ.js +9 -0
  34. package/dist/{gelu-Bmhopi0J.js → gelu-DqTbCx5x.js} +10 -11
  35. package/dist/{gpgpu_math-DsCcikas.js → gpgpu_math-CJcbnKPC.js} +841 -1015
  36. package/dist/index-D0RBWjq8.js +3520 -0
  37. package/dist/{index-DRyE072i.js → index-Dj5TkmPY.js} +330 -331
  38. package/dist/{kernel_funcs_utils-CWfOAPGO.js → kernel_funcs_utils-CSaumNDs.js} +132 -134
  39. package/dist/layers/BaseLayer.js +15 -16
  40. package/dist/layers/CausalSelfAttention.js +6 -6
  41. package/dist/layers/MLP.js +4 -4
  42. package/dist/layers/PositionEmbedding.js +7 -7
  43. package/dist/layers/RMSNorm.js +3 -3
  44. package/dist/layers/RoPECache.js +9 -9
  45. package/dist/layers/TiedEmbedding.js +6 -6
  46. package/dist/layers/TransformerBlock.js +1 -1
  47. package/dist/loader/loadTransformers.js +1 -1
  48. package/dist/loader/oldZipLoad.js +21 -22
  49. package/dist/log_sum_exp-VLZgbFAH.js +39 -0
  50. package/dist/main.d.ts +1 -1
  51. package/dist/main.js +49 -50
  52. package/dist/{matMul16-fEAJ4smh.js → matMul16-cDxwemKj.js} +14 -15
  53. package/dist/matMulGelu-B2s_80-H.js +163 -0
  54. package/dist/mat_mul-DxpNTCRz.js +11 -0
  55. package/dist/mod-PrOKlFxH.js +11 -0
  56. package/dist/models/NanoGPTV1.js +2 -2
  57. package/dist/models/model.js +13 -14
  58. package/dist/ones-BX_wEgzB.js +14 -0
  59. package/dist/ops/adamAdjust.js +1 -1
  60. package/dist/ops/adamMoments.js +1 -1
  61. package/dist/ops/add16.js +1 -1
  62. package/dist/ops/appendCache.js +3 -3
  63. package/dist/ops/attentionMask.js +1 -1
  64. package/dist/ops/concat16.js +2 -2
  65. package/dist/ops/cpu/adamAdjust.js +12 -13
  66. package/dist/ops/cpu/adamMoments.js +6 -7
  67. package/dist/ops/cpu/appendCache.js +7 -8
  68. package/dist/ops/cpu/attentionMask.js +11 -11
  69. package/dist/ops/cpu/fusedSoftmax.js +10 -11
  70. package/dist/ops/cpu/gatherSub.js +10 -11
  71. package/dist/ops/cpu/gelu.js +14 -15
  72. package/dist/ops/cpu/matMul16.js +6 -7
  73. package/dist/ops/cpu/matMulGelu.js +5 -6
  74. package/dist/ops/cpu/matMulMul.js +3 -4
  75. package/dist/ops/cpu/mulDropout.js +3 -4
  76. package/dist/ops/cpu/normRMS.js +11 -12
  77. package/dist/ops/cpu/qkv.js +8 -9
  78. package/dist/ops/cpu/rope.js +9 -10
  79. package/dist/ops/cpu/scatterSub.js +14 -16
  80. package/dist/ops/dot16.js +2 -2
  81. package/dist/ops/gatherSub.js +1 -1
  82. package/dist/ops/gelu.js +2 -2
  83. package/dist/ops/grads/add16.js +10 -11
  84. package/dist/ops/grads/attentionMask.js +5 -6
  85. package/dist/ops/grads/gelu.js +3 -4
  86. package/dist/ops/grads/matMul16.js +4 -5
  87. package/dist/ops/grads/matMulGelu.js +8 -9
  88. package/dist/ops/grads/normRMS.js +9 -10
  89. package/dist/ops/grads/pack16.js +4 -5
  90. package/dist/ops/grads/qkv.js +17 -19
  91. package/dist/ops/grads/rope.js +3 -5
  92. package/dist/ops/grads/softmax16.js +3 -4
  93. package/dist/ops/grads/unpack16.js +3 -4
  94. package/dist/ops/grads/utils.d.ts +1 -0
  95. package/dist/ops/grads/utils.js +8 -4
  96. package/dist/ops/matMul16.js +3 -3
  97. package/dist/ops/matMulGelu.js +2 -2
  98. package/dist/ops/matMulMul.js +1 -1
  99. package/dist/ops/mul16.js +1 -1
  100. package/dist/ops/mulDrop.js +1 -1
  101. package/dist/ops/normRMS.js +1 -1
  102. package/dist/ops/pack16.js +3 -4
  103. package/dist/ops/qkv.js +4 -8
  104. package/dist/ops/reshape16.js +16 -18
  105. package/dist/ops/rope.d.ts +1 -1
  106. package/dist/ops/rope.js +3 -8
  107. package/dist/ops/scatterSub.js +1 -1
  108. package/dist/ops/slice16.js +2 -2
  109. package/dist/ops/softmax16.js +5 -8
  110. package/dist/ops/sub16.js +1 -1
  111. package/dist/ops/sum16.js +2 -2
  112. package/dist/ops/transpose16.js +23 -24
  113. package/dist/ops/unpack16.js +2 -2
  114. package/dist/ops/webgl/adamAdjust.js +2 -3
  115. package/dist/ops/webgl/adamMoments.js +1 -2
  116. package/dist/ops/webgl/appendCache.js +1 -2
  117. package/dist/ops/webgl/attentionMask.js +5 -6
  118. package/dist/ops/webgl/fusedSoftmax.js +6 -8
  119. package/dist/ops/webgl/gatherSub.js +6 -7
  120. package/dist/ops/webgl/gelu.js +2 -3
  121. package/dist/ops/webgl/log.js +11 -12
  122. package/dist/ops/webgl/matMul16.js +15 -16
  123. package/dist/ops/webgl/matMulGelu.js +7 -111
  124. package/dist/ops/webgl/matMulMul.js +14 -15
  125. package/dist/ops/webgl/mulDropout.js +8 -9
  126. package/dist/ops/webgl/normRMS.js +7 -8
  127. package/dist/ops/webgl/qkv.js +5 -6
  128. package/dist/ops/webgl/rope.js +7 -8
  129. package/dist/ops/webgl/scatterSub.js +5 -6
  130. package/dist/ops/webgpu/adamAdjust.js +10 -12
  131. package/dist/ops/webgpu/adamMoments.js +8 -10
  132. package/dist/ops/webgpu/add16.js +8 -9
  133. package/dist/ops/webgpu/appendCache.js +23 -25
  134. package/dist/ops/webgpu/attentionMask.js +10 -12
  135. package/dist/ops/webgpu/attentionMask32_program.js +2 -2
  136. package/dist/ops/webgpu/concat16.js +12 -14
  137. package/dist/ops/webgpu/gatherSub.js +9 -11
  138. package/dist/ops/webgpu/gelu.js +28 -29
  139. package/dist/ops/webgpu/matMul16.js +26 -28
  140. package/dist/ops/webgpu/matMul16_program.js +4 -5
  141. package/dist/ops/webgpu/mul16.js +7 -8
  142. package/dist/ops/webgpu/normRMS.js +17 -19
  143. package/dist/ops/webgpu/normRMSGrad.js +21 -28
  144. package/dist/ops/webgpu/pack16.js +12 -13
  145. package/dist/ops/webgpu/pack16_program.js +2 -2
  146. package/dist/ops/webgpu/qkv.js +13 -15
  147. package/dist/ops/webgpu/rope.js +25 -27
  148. package/dist/ops/webgpu/scatterSub.js +7 -9
  149. package/dist/ops/webgpu/slice16.js +21 -23
  150. package/dist/ops/webgpu/softmax16.js +17 -19
  151. package/dist/ops/webgpu/softmax16_program.js +2 -2
  152. package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
  153. package/dist/ops/webgpu/softmax16grad.js +7 -8
  154. package/dist/ops/webgpu/sub16.js +8 -9
  155. package/dist/ops/webgpu/sum16.js +19 -21
  156. package/dist/ops/webgpu/transpose16.js +19 -20
  157. package/dist/ops/webgpu/transpose16_program.js +2 -2
  158. package/dist/ops/webgpu/transpose16_shared_program.js +11 -12
  159. package/dist/ops/webgpu/unpack16.js +3 -4
  160. package/dist/ops/webgpu/utils/binary_op.js +7 -8
  161. package/dist/ops/webgpu/utils/reductions.js +14 -22
  162. package/dist/ops-FJapAPfm.js +476 -0
  163. package/dist/pack16-k4jq6aMX.js +39 -0
  164. package/dist/patches/webgpu_backend.js +19 -20
  165. package/dist/patches/webgpu_base.js +1 -1
  166. package/dist/patches/webgpu_program.js +15 -16
  167. package/dist/{random_width-BVV9HveY.js → random_width-UGQn4OWb.js} +2506 -2761
  168. package/dist/range-CuGvVN2c.js +10 -0
  169. package/dist/relu-Cf80uA2p.js +9 -0
  170. package/dist/reshape-CkjKPPqB.js +9 -0
  171. package/dist/resize_nearest_neighbor-DB8k9KN_.js +175 -0
  172. package/dist/rope-BmZmp9uP.js +24 -0
  173. package/dist/{scatter_nd_util-C7zXRT_h.js → scatter_nd_util-BY22Cc-C.js} +1 -1
  174. package/dist/selu_util-BuLbmbrl.js +44 -0
  175. package/dist/{shared-CHhxz-O5.js → shared-B7USJZgw.js} +1 -1
  176. package/dist/{shared-D2NP_CpY.js → shared-BQboIImQ.js} +379 -381
  177. package/dist/slice-Aqy7KbJh.js +12 -0
  178. package/dist/{slice_util-DyjSAD0u.js → slice_util-D8CQRenR.js} +7 -7
  179. package/dist/{softmax-C9JQEtnO.js → softmax-faLoUZVT.js} +4 -5
  180. package/dist/split-BNz5jcGc.js +9 -0
  181. package/dist/squeeze--YMgaAAf.js +10 -0
  182. package/dist/stack-WJK22CFn.js +11 -0
  183. package/dist/step-dXR33iOg.js +261 -0
  184. package/dist/sum-BdplSvq_.js +11 -0
  185. package/dist/{tensor-0r5yOo2R.js → tensor-BQqrDvpx.js} +1 -1
  186. package/dist/tensor1d-LxP9asMm.js +11 -0
  187. package/dist/{tensor2d-CSB4KOb0.js → tensor2d-BN1sSfQO.js} +6 -7
  188. package/dist/{tensor4d-D7bLqGqz.js → tensor4d-DVwr7pLF.js} +6 -7
  189. package/dist/{tfjs_backend-CNkSTL0c.js → tfjs_backend-Vi4JfLzT.js} +256 -265
  190. package/dist/tile-CvN_LyVr.js +11 -0
  191. package/dist/tokeniser/BaseTokeniser.d.ts +27 -0
  192. package/dist/tokeniser/BaseTokeniser.js +94 -0
  193. package/dist/tokeniser/CharTokeniser.d.ts +4 -3
  194. package/dist/tokeniser/CharTokeniser.js +46 -32
  195. package/dist/tokeniser/bpe.d.ts +4 -3
  196. package/dist/tokeniser/bpe.js +60 -45
  197. package/dist/tokeniser/type.d.ts +11 -0
  198. package/dist/training/Adam.js +2 -2
  199. package/dist/training/AdamExt.js +1 -1
  200. package/dist/training/DatasetBuilder.d.ts +2 -2
  201. package/dist/training/DatasetBuilder.js +32 -36
  202. package/dist/training/FullTrainer.js +1 -1
  203. package/dist/training/Trainer.d.ts +3 -3
  204. package/dist/training/Trainer.js +2 -2
  205. package/dist/training/sparseCrossEntropy.js +5 -5
  206. package/dist/transpose-JawVKyZy.js +36 -0
  207. package/dist/unsorted_segment_sum-LAbmE9G4.js +277 -0
  208. package/dist/utilities/dummy.js +3 -3
  209. package/dist/utilities/multinomialCPU.js +2 -2
  210. package/dist/utilities/packed.d.ts +1 -4
  211. package/dist/utilities/packed.js +10 -745
  212. package/dist/utilities/performance.js +1 -1
  213. package/dist/utilities/profile.js +1 -1
  214. package/dist/utilities/safetensors.js +2 -2
  215. package/dist/utilities/sentences.js +5 -5
  216. package/dist/utilities/weights.js +2 -2
  217. package/dist/{variable-DzfrwYuP.js → variable-DQ9yYgEU.js} +1 -1
  218. package/dist/{webgpu_program-DzaQiqel.js → webgpu_program-CAE4RICo.js} +177 -171
  219. package/dist/{webgpu_util-0_ubCEHJ.js → webgpu_util-BdovYhXr.js} +34 -35
  220. package/dist/zeros-DeiE2zTa.js +13 -0
  221. package/dist/zeros_like-BAz3iKru.js +721 -0
  222. package/package.json +4 -2
  223. package/dist/Reshape-CDVLyVfz.js +0 -16
  224. package/dist/broadcast_to-B0ChcDaz.js +0 -30
  225. package/dist/complex-BBiRlsVq.js +0 -13
  226. package/dist/concat-DmBLPVGC.js +0 -19
  227. package/dist/dropout-B1x1kYMa.js +0 -99
  228. package/dist/expand_dims-ouvfxQ1n.js +0 -13
  229. package/dist/gather-CH9sdacz.js +0 -10
  230. package/dist/index-D6Q1lPZO.js +0 -2157
  231. package/dist/log_sum_exp-D3ftBNY5.js +0 -41
  232. package/dist/mat_mul-C59XWcJd.js +0 -12
  233. package/dist/mod-DESSvHIU.js +0 -12
  234. package/dist/mulmat_packed_gpu-Coh6qbJk.js +0 -55
  235. package/dist/ones-jU9jlQvM.js +0 -15
  236. package/dist/ops-BFDtP6th.js +0 -645
  237. package/dist/pack16-CmVZs6af.js +0 -41
  238. package/dist/patches/PackedTensor.d.ts +0 -12
  239. package/dist/patches/PackedTensor.js +0 -11
  240. package/dist/patches/engine.d.ts +0 -261
  241. package/dist/patches/engine.js +0 -12
  242. package/dist/patches/tape.d.ts +0 -12
  243. package/dist/patches/tape.js +0 -5
  244. package/dist/range-ZZZD60Fx.js +0 -11
  245. package/dist/reciprocal-CrYlsAGD.js +0 -10
  246. package/dist/register_all_kernels-nvj2k7OC.js +0 -12307
  247. package/dist/relu-BYDneVPn.js +0 -10
  248. package/dist/reshape-CaPQzFvz.js +0 -10
  249. package/dist/rope-s4W2XO9B.js +0 -32
  250. package/dist/selu_util-BGPXmd4B.js +0 -303
  251. package/dist/sin-Djs4aQiu.js +0 -16
  252. package/dist/slice-DvovR5wq.js +0 -13
  253. package/dist/split-DBck65sX.js +0 -10
  254. package/dist/squeeze-C00Ipm_7.js +0 -11
  255. package/dist/stack-ChnHwRpX.js +0 -13
  256. package/dist/sum-ywRJj3Zr.js +0 -12
  257. package/dist/tensor-CzmOBsdf.js +0 -909
  258. package/dist/tensor1d-BlUT89BP.js +0 -12
  259. package/dist/tensor_util-DfwaWayG.js +0 -523
  260. package/dist/tile-CR074jmp.js +0 -13
  261. package/dist/transpose-DH4gmHvu.js +0 -38
  262. package/dist/zeros-DBFVbpv5.js +0 -14
@@ -0,0 +1,11 @@
1
+ import { q as e, u as a, y as i, E as c, T as u } from "./index-D0RBWjq8.js";
2
+ function l(r, t) {
3
+ const n = a(r, "x", "tile", "string_or_numeric");
4
+ i(n.rank === t.length, () => `Error in transpose: rank of input ${n.rank} must match length of reps ${t}.`);
5
+ const s = { x: n }, o = { reps: t };
6
+ return c.runKernel(u, s, o);
7
+ }
8
+ const p = /* @__PURE__ */ e({ tile_: l });
9
+ export {
10
+ p as t
11
+ };
@@ -0,0 +1,27 @@
1
+ import { Conversation, ITokeniser } from './type';
2
+ import { default as EE } from 'eventemitter3';
3
+ export declare const SPECIALS: string[];
4
+ export default abstract class BaseTokeniser extends EE<'trainStatus'> implements ITokeniser {
5
+ protected specialTokens: Map<string, number>;
6
+ protected specialTokenSet: Set<number>;
7
+ abstract vocabSize: number;
8
+ abstract eosToken: number;
9
+ abstract bosToken: number;
10
+ abstract trained: boolean;
11
+ abstract addToken(token: string, index?: number): number;
12
+ isSpecialToken(index: number): boolean;
13
+ protected addSpecialTokens(): void;
14
+ protected addSpecialToken(token: string, index: number): void;
15
+ abstract train(text: string[]): Promise<number>;
16
+ abstract tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
17
+ abstract detokenise(tokens: string[][] | number[][]): Promise<string[]>;
18
+ abstract getVocab(): string[];
19
+ abstract getMerges(): Promise<[string, string][]>;
20
+ abstract destroy(): void;
21
+ abstract encode(text: string): Promise<number[]>;
22
+ encodeSequence(text: string): Promise<number[]>;
23
+ encodeConversation(conversation: Conversation[], completion?: boolean): Promise<number[]>;
24
+ abstract decode(tokens: number[]): Promise<string>;
25
+ decodeConversation(tokens: number[]): Promise<Conversation[]>;
26
+ getSpecialTokenIndex(token: string): number | undefined;
27
+ }
@@ -0,0 +1,94 @@
1
+ import { E as r } from "../index-DvYrXKkX.js";
2
+ const h = [
3
+ "<eos>",
4
+ "<bos>",
5
+ "",
6
+ "<|user_start|>",
7
+ "<|user_end|>",
8
+ "<|assistant_start|>",
9
+ "<|assistant_end|>",
10
+ "<|system_start|>",
11
+ "<|system_end|>"
12
+ ];
13
+ class k extends r {
14
+ specialTokens = /* @__PURE__ */ new Map();
15
+ specialTokenSet = /* @__PURE__ */ new Set();
16
+ isSpecialToken(e) {
17
+ return this.specialTokenSet.has(e);
18
+ }
19
+ addSpecialTokens() {
20
+ h.forEach((e, t) => {
21
+ this.addToken(e, t), this.specialTokens.set(e, t), this.specialTokenSet.add(t);
22
+ });
23
+ }
24
+ addSpecialToken(e, t) {
25
+ this.specialTokens.set(e, t), this.specialTokenSet.add(t);
26
+ }
27
+ async encodeSequence(e) {
28
+ const t = await this.encode(e);
29
+ return [this.bosToken, ...t, this.eosToken];
30
+ }
31
+ async encodeConversation(e, t) {
32
+ const s = [[this.bosToken]], a = [
33
+ this.getSpecialTokenIndex("<|user_start|>"),
34
+ this.getSpecialTokenIndex("<|assistant_start|>"),
35
+ this.getSpecialTokenIndex("<|system_start|>")
36
+ ], n = [
37
+ this.getSpecialTokenIndex("<|user_end|>"),
38
+ this.getSpecialTokenIndex("<|assistant_end|>"),
39
+ this.getSpecialTokenIndex("<|system_end|>")
40
+ ];
41
+ for (const i of e) {
42
+ const c = await this.encode(i.content);
43
+ switch (i.role) {
44
+ case "user":
45
+ s.push([a[0]]);
46
+ break;
47
+ case "assistant":
48
+ s.push([a[1]]);
49
+ break;
50
+ case "system":
51
+ s.push([a[2]]);
52
+ break;
53
+ }
54
+ switch (s.push(c), i.role) {
55
+ case "user":
56
+ s.push([n[0]]);
57
+ break;
58
+ case "assistant":
59
+ s.push([n[1]]);
60
+ break;
61
+ case "system":
62
+ s.push([n[2]]);
63
+ break;
64
+ }
65
+ }
66
+ const o = s.flat();
67
+ return t ? o.push(a[1]) : o.push(this.eosToken), o;
68
+ }
69
+ async decodeConversation(e) {
70
+ const t = [];
71
+ let s = 0;
72
+ for (; s < e.length; ) {
73
+ const a = e[s];
74
+ let n = null;
75
+ if (a === this.getSpecialTokenIndex("<|user_start|>") ? n = "user" : a === this.getSpecialTokenIndex("<|assistant_start|>") ? n = "assistant" : a === this.getSpecialTokenIndex("<|system_start|>") && (n = "system"), n) {
76
+ s++;
77
+ const o = [];
78
+ for (; s < e.length && e[s] !== this.getSpecialTokenIndex(`<|${n}_end|>`); )
79
+ o.push(e[s]), s++;
80
+ const i = await this.decode(o);
81
+ t.push({ role: n, content: i });
82
+ }
83
+ s++;
84
+ }
85
+ return t;
86
+ }
87
+ getSpecialTokenIndex(e) {
88
+ return this.specialTokens.get(e);
89
+ }
90
+ }
91
+ export {
92
+ h as SPECIALS,
93
+ k as default
94
+ };
@@ -1,14 +1,15 @@
1
- import { default as EE } from 'eventemitter3';
2
- import { ITokeniser } from './type';
3
- export default class CharTokeniser extends EE<'trainStatus'> implements ITokeniser {
1
+ import { default as BaseTokeniser } from './BaseTokeniser';
2
+ export default class CharTokeniser extends BaseTokeniser {
4
3
  vocabSize: number;
5
4
  eosToken: number;
5
+ bosToken: number;
6
6
  unkToken: number;
7
7
  vocab: string[];
8
8
  private cache;
9
9
  private _trained;
10
10
  constructor(vocabSize: number);
11
11
  constructor(vocab: string[]);
12
+ addToken(token: string, index?: number): number;
12
13
  get trained(): boolean;
13
14
  destroy(): void;
14
15
  train(text: string[]): Promise<number>;
@@ -1,66 +1,80 @@
1
- import { E as k } from "../index-DvYrXKkX.js";
1
+ import k, { SPECIALS as d } from "./BaseTokeniser.js";
2
2
  const u = ["<eos>", "<unk>"];
3
3
  class b extends k {
4
4
  vocabSize = 0;
5
5
  eosToken = 0;
6
+ bosToken = 0;
6
7
  unkToken = 0;
7
8
  vocab = [];
8
9
  cache = /* @__PURE__ */ new Map();
9
10
  _trained = !1;
10
- constructor(t) {
11
- if (super(), Array.isArray(t)) {
12
- if (this.vocab = t, this.vocab.length > 0)
13
- this.vocabSize = this.vocab.length, this.eosToken = this.vocab.indexOf("<eos>"), this.unkToken = this.vocab.indexOf(""), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((e) => e === "<pad>" ? "" : e), this.vocab.forEach((e, n) => {
14
- this.cache.set(e, n);
11
+ constructor(s) {
12
+ if (super(), Array.isArray(s)) {
13
+ if (this.vocab = s, this.vocab.length > 0)
14
+ this.vocabSize = this.vocab.length, d.forEach((t) => {
15
+ const e = this.vocab.indexOf(t);
16
+ e !== -1 && this.addSpecialToken(t, e);
17
+ }), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex("") ?? -1, this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((t) => t === "<pad>" ? "" : t), this.vocab.forEach((t, e) => {
18
+ this.cache.set(t, e);
15
19
  });
16
20
  else
17
21
  throw new Error("Vocab cannot be empty");
18
22
  this._trained = !0;
19
23
  } else
20
- this.vocabSize = t, this.vocab = new Array(this.vocabSize).fill(""), this.vocab[0] = "<eos>", this.vocab[1] = "", this.eosToken = 0, this.unkToken = 1, this.cache.set("<eos>", 0), this.cache.set("", 1);
24
+ this.vocabSize = s, this.vocab = new Array(this.vocabSize).fill(""), this.addSpecialTokens(), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex(""), this.vocab.forEach((t, e) => {
25
+ this.cache.set(t, e);
26
+ }), this.cache.set("", this.unkToken);
27
+ }
28
+ addToken(s, t) {
29
+ if (this.cache.has(s))
30
+ return this.cache.get(s);
31
+ let e;
32
+ if (t !== void 0 ? e = t : (e = this.vocab.indexOf("", this.unkToken + 1), e === -1 && (e = this.vocabSize)), e >= this.vocabSize)
33
+ throw new Error("Vocab size exceeded");
34
+ return this.vocab[e] = s, this.cache.set(s, e), e;
21
35
  }
22
36
  get trained() {
23
37
  return this.vocab.length === this.vocabSize && this._trained;
24
38
  }
25
39
  destroy() {
26
40
  }
27
- async train(t) {
28
- const e = t.map((i) => i.split("")).flat(), n = new Set(e), s = Array.from(n), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
41
+ async train(s) {
42
+ const t = s.map((n) => n.split("")).flat(), e = new Set(t), i = Array.from(e), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
29
43
  if (h === -1)
30
44
  return this.vocabSize;
31
- if (this._trained = !0, s.length > o) {
32
- const i = /* @__PURE__ */ new Map();
33
- e.forEach((a) => {
34
- i.set(a, (i.get(a) || 0) + 1);
35
- }), s.sort((a, r) => (i.get(a) || 0) - (i.get(r) || 0)), s.splice(0, s.length - o);
45
+ if (this._trained = !0, i.length > o) {
46
+ const n = /* @__PURE__ */ new Map();
47
+ t.forEach((a) => {
48
+ n.set(a, (n.get(a) || 0) + 1);
49
+ }), i.sort((a, r) => (n.get(a) || 0) - (n.get(r) || 0)), i.splice(0, i.length - o);
36
50
  }
37
51
  let c = h;
38
52
  if (c !== -1) {
39
- const i = new Set(this.vocab);
40
- for (const a of s)
41
- if (!i.has(a) && (this.vocab[c] = a, i.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
53
+ const n = new Set(this.vocab);
54
+ for (const a of i)
55
+ if (!n.has(a) && (this.vocab[c] = a, n.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
42
56
  break;
43
57
  }
44
- return this.cache.clear(), this.vocab.forEach((i, a) => {
45
- this.cache.set(i, a);
58
+ return this.cache.clear(), this.vocab.forEach((n, a) => {
59
+ this.cache.set(n, a);
46
60
  }), this.emit("trainStatus", "trained"), this.vocabSize;
47
61
  }
48
- async tokenise(t, e) {
62
+ async tokenise(s, t) {
49
63
  if (!this.trained)
50
64
  throw new Error("Tokeniser not trained");
51
- return t.map((s) => e ? s.split("").map((h) => this.cache.get(h) ?? this.unkToken) : s.split("").map((h) => {
65
+ return s.map((i) => t ? i.split("").map((h) => this.cache.get(h) ?? this.unkToken) : i.split("").map((h) => {
52
66
  const o = this.cache.get(h);
53
67
  return o !== void 0 ? this.vocab[o] : "";
54
68
  }));
55
69
  }
56
- async detokenise(t) {
57
- return t.map((n) => n.map((s) => this.vocab[s]).join(""));
70
+ async detokenise(s) {
71
+ return s.map((e) => e.map((i) => this.vocab[i]).join(""));
58
72
  }
59
- async encode(t) {
60
- return (await this.tokenise([t], !0))[0];
73
+ async encode(s) {
74
+ return (await this.tokenise([s], !0))[0];
61
75
  }
62
- async decode(t) {
63
- return (await this.detokenise([t]))[0];
76
+ async decode(s) {
77
+ return (await this.detokenise([s]))[0];
64
78
  }
65
79
  getVocab() {
66
80
  return this.vocab;
@@ -68,11 +82,11 @@ class b extends k {
68
82
  async getMerges() {
69
83
  return [];
70
84
  }
71
- async createTrainingData(t, e = 5) {
72
- const n = await this.tokenise(t, !0), s = [], h = [];
73
- for (let o = 0; o < n.length - e; o++)
74
- s.push(...n[o].slice(0, e)), h.push(n[o + 1][0]);
75
- return [s, h];
85
+ async createTrainingData(s, t = 5) {
86
+ const e = await this.tokenise(s, !0), i = [], h = [];
87
+ for (let o = 0; o < e.length - t; o++)
88
+ i.push(...e[o].slice(0, t)), h.push(e[o + 1][0]);
89
+ return [i, h];
76
90
  }
77
91
  }
78
92
  export {
@@ -1,6 +1,5 @@
1
- import { default as EE } from 'eventemitter3';
2
- import { ITokeniser } from './type';
3
- export default class BPETokeniser extends EE<'trainStatus'> implements ITokeniser {
1
+ import { default as BaseTokeniser } from './BaseTokeniser';
2
+ export default class BPETokeniser extends BaseTokeniser {
4
3
  private targetSize;
5
4
  private vocab;
6
5
  private vocabIndex;
@@ -8,10 +7,12 @@ export default class BPETokeniser extends EE<'trainStatus'> implements ITokenise
8
7
  private pretokenMap;
9
8
  constructor(vocabSize: number);
10
9
  constructor(vocab: string[], merges?: [string, string][]);
10
+ addToken(token: string, index?: number): number;
11
11
  destroy(): void;
12
12
  get trained(): boolean;
13
13
  get vocabSize(): number;
14
14
  get eosToken(): number;
15
+ get bosToken(): number;
15
16
  get unkToken(): number;
16
17
  train(text: string[]): Promise<number>;
17
18
  getVocab(): string[];
@@ -1,68 +1,80 @@
1
1
  import l from "../utilities/tokenParse.js";
2
- import { E as f } from "../index-DvYrXKkX.js";
2
+ import d, { SPECIALS as f } from "./BaseTokeniser.js";
3
3
  function u(o, e) {
4
4
  return `${o}-::-${e}`;
5
5
  }
6
- function k(o) {
6
+ function b(o) {
7
7
  const e = /* @__PURE__ */ new Map();
8
8
  for (let s = 0; s < o.length; s++) {
9
9
  const t = o[s];
10
- for (let r = 0; r < t.length - 1; r++) {
11
- const n = u(t[r], t[r + 1]), a = e.get(n) || {
12
- a: t[r],
13
- b: t[r + 1],
10
+ for (let n = 0; n < t.length - 1; n++) {
11
+ const r = u(t[n], t[n + 1]), i = e.get(r) || {
12
+ a: t[n],
13
+ b: t[n + 1],
14
14
  count: 0,
15
15
  instances: /* @__PURE__ */ new Set()
16
16
  };
17
- a.count += 1, a.instances.add(s), e.set(n, a);
17
+ i.count += 1, i.instances.add(s), e.set(r, i);
18
18
  }
19
19
  }
20
20
  return { pairs: e, tokens: o };
21
21
  }
22
- function h(o, e, s, t, r) {
23
- const n = u(e, s);
24
- if (o.pairs.has(n)) {
25
- const a = o.pairs.get(n);
26
- a.count += r, r > 0 ? a.instances.add(t) : a.count <= 0 ? o.pairs.delete(n) : a.instances.delete(t);
22
+ function h(o, e, s, t, n) {
23
+ const r = u(e, s);
24
+ if (o.pairs.has(r)) {
25
+ const i = o.pairs.get(r);
26
+ i.count += n, n > 0 ? i.instances.add(t) : i.count <= 0 ? o.pairs.delete(r) : i.instances.delete(t);
27
27
  } else
28
- o.pairs.set(n, { a: e, b: s, count: r, instances: /* @__PURE__ */ new Set([t]) });
28
+ o.pairs.set(r, { a: e, b: s, count: n, instances: /* @__PURE__ */ new Set([t]) });
29
29
  }
30
- function b(o) {
30
+ function k(o) {
31
31
  let e = null, s = 0;
32
32
  for (const t of o.pairs.values())
33
33
  t.count > s && (s = t.count, e = t);
34
34
  return e;
35
35
  }
36
- function d(o, e) {
36
+ function m(o, e) {
37
37
  return o.map((s) => {
38
38
  const t = [];
39
- for (let r = 0; r < s.length; r++)
40
- r < s.length - 1 && s[r] === e[0] && s[r + 1] === e[1] ? (t.push(e[0] + e[1]), r++) : t.push(s[r]);
39
+ for (let n = 0; n < s.length; n++)
40
+ n < s.length - 1 && s[n] === e[0] && s[n + 1] === e[1] ? (t.push(e[0] + e[1]), n++) : t.push(s[n]);
41
41
  return t;
42
42
  });
43
43
  }
44
- function m(o, e) {
44
+ function v(o, e) {
45
45
  e.instances.forEach((s) => {
46
- const t = o.tokens[s], r = [];
47
- for (let n = 0; n < t.length; n++)
48
- if (n < t.length - 1 && t[n] === e.a && t[n + 1] === e.b) {
49
- const a = e.a + e.b;
50
- r.push(a), n > 0 && (h(o, t[n - 1], e.a, s, -1), h(o, t[n - 1], a, s, 1)), n++, n < t.length - 1 && (h(o, e.b, t[n + 1], s, -1), h(o, a, t[n + 1], s, 1));
46
+ const t = o.tokens[s], n = [];
47
+ for (let r = 0; r < t.length; r++)
48
+ if (r < t.length - 1 && t[r] === e.a && t[r + 1] === e.b) {
49
+ const i = e.a + e.b;
50
+ n.push(i), r > 0 && (h(o, t[r - 1], e.a, s, -1), h(o, t[r - 1], i, s, 1)), r++, r < t.length - 1 && (h(o, e.b, t[r + 1], s, -1), h(o, i, t[r + 1], s, 1));
51
51
  } else
52
- r.push(t[n]);
53
- o.tokens[s] = r;
52
+ n.push(t[r]);
53
+ o.tokens[s] = n;
54
54
  }), o.pairs.delete(u(e.a, e.b));
55
55
  }
56
- class S extends f {
56
+ class T extends d {
57
57
  targetSize;
58
58
  vocab = /* @__PURE__ */ new Set();
59
59
  vocabIndex = /* @__PURE__ */ new Map();
60
60
  merges = [];
61
61
  pretokenMap = /* @__PURE__ */ new Map();
62
62
  constructor(e, s) {
63
- super(), Array.isArray(e) ? (e.forEach((t, r) => {
64
- this.vocab.add(t), this.vocabIndex.set(t, r);
65
- }), s && (this.merges = s), this.targetSize = e.length) : (this.vocab.add("<eos>"), this.vocab.add(""), this.targetSize = e);
63
+ super(), Array.isArray(e) ? (e.forEach((t, n) => {
64
+ this.vocab.add(t), this.vocabIndex.set(t, n);
65
+ }), s && (this.merges = s), this.targetSize = e.length, f.forEach((t) => {
66
+ const n = e.indexOf(t);
67
+ n !== -1 && this.addSpecialToken(t, n);
68
+ })) : (this.addSpecialTokens(), this.targetSize = e);
69
+ }
70
+ addToken(e, s) {
71
+ if (this.vocab.has(e))
72
+ return this.vocabIndex.get(e);
73
+ {
74
+ this.vocab.add(e);
75
+ const t = s !== void 0 ? s : this.vocab.size - 1;
76
+ return this.vocabIndex.set(e, t), t;
77
+ }
66
78
  }
67
79
  destroy() {
68
80
  this.vocab.clear(), this.vocabIndex.clear(), this.merges = [], this.pretokenMap.clear();
@@ -76,26 +88,29 @@ class S extends f {
76
88
  get eosToken() {
77
89
  return this.vocabIndex.get("<eos>") ?? 0;
78
90
  }
91
+ get bosToken() {
92
+ return this.vocabIndex.get("<bos>") ?? 0;
93
+ }
79
94
  get unkToken() {
80
95
  return this.vocabIndex.get("") ?? 1;
81
96
  }
82
97
  async train(e) {
83
- const s = e.map((i) => l(i)).flat(1), t = new Set(s);
84
- this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.vocab.add("<eos>"), this.vocab.add("");
85
- const r = Array.from(t), n = r.map((i) => Array.from(i).map((c) => (this.vocab.add(c), c))), a = k(n);
98
+ const s = e.map((a) => l(a)).flat(1), t = new Set(s);
99
+ this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.addSpecialTokens();
100
+ const n = Array.from(t), r = n.map((a) => Array.from(a).map((c) => (this.vocab.add(c), c))), i = b(r);
86
101
  for (; this.vocab.size < this.targetSize && this.merges.length < this.targetSize; ) {
87
- const i = b(a);
88
- if (!i)
102
+ const a = k(i);
103
+ if (!a)
89
104
  break;
90
- this.merges.push([i.a, i.b]), this.vocab.add(i.a + i.b), m(a, i);
105
+ this.merges.push([a.a, a.b]), this.vocab.add(a.a + a.b), v(i, a);
91
106
  }
92
- r.forEach((i, p) => {
93
- const c = n[p];
94
- this.pretokenMap.set(i, c);
107
+ n.forEach((a, p) => {
108
+ const c = r[p];
109
+ this.pretokenMap.set(a, c);
95
110
  }), this.vocabIndex.clear();
96
111
  let g = 0;
97
- for (const i of this.vocab.keys())
98
- this.vocabIndex.set(i, g++);
112
+ for (const a of this.vocab.keys())
113
+ this.vocabIndex.set(a, g++);
99
114
  return this.emit("trainStatus", "trained"), this.vocab.size;
100
115
  }
101
116
  getVocab() {
@@ -107,19 +122,19 @@ class S extends f {
107
122
  tokeniseWord(e) {
108
123
  let s = Array.from(e);
109
124
  return this.merges.forEach((t) => {
110
- s = d([s], t)[0];
125
+ s = m([s], t)[0];
111
126
  }), this.pretokenMap.set(e, s), s;
112
127
  }
113
128
  tokeniseStrings(e) {
114
- return e.map((s) => l(s).map((n) => this.pretokenMap.has(n) ? this.pretokenMap.get(n) : this.tokeniseWord(n)).flat(1));
129
+ return e.map((s) => l(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
115
130
  }
116
131
  async tokenise(e, s) {
117
132
  const t = this.tokeniseStrings(e);
118
- return s ? t.map((r) => r.map((n) => this.vocabIndex.get(n) ?? this.unkToken)) : t.map((r) => r.map((n) => this.vocab.has(n) ? n : ""));
133
+ return s ? t.map((n) => n.map((r) => this.vocabIndex.get(r) ?? this.unkToken)) : t.map((n) => n.map((r) => this.vocab.has(r) ? r : ""));
119
134
  }
120
135
  async detokenise(e) {
121
136
  const s = this.getVocab();
122
- return e.map((r) => r.map((n) => s[n]).join(""));
137
+ return e.map((n) => n.map((r) => s[r]).join(""));
123
138
  }
124
139
  async encode(e) {
125
140
  return (await this.tokenise([e], !0))[0];
@@ -129,5 +144,5 @@ class S extends f {
129
144
  }
130
145
  }
131
146
  export {
132
- S as default
147
+ T as default
133
148
  };
@@ -1,4 +1,9 @@
1
1
  import { default as EE } from 'eventemitter3';
2
+ export type Roles = 'user' | 'assistant' | 'system';
3
+ export interface Conversation {
4
+ role: Roles;
5
+ content: string;
6
+ }
2
7
  export interface ITokeniser extends EE<'trainStatus'> {
3
8
  train(text: string[]): Promise<number>;
4
9
  tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
@@ -7,8 +12,14 @@ export interface ITokeniser extends EE<'trainStatus'> {
7
12
  getMerges(): Promise<[string, string][]>;
8
13
  destroy(): void;
9
14
  encode(text: string): Promise<number[]>;
15
+ encodeConversation(conversation: Conversation[], completion?: boolean): Promise<number[]>;
16
+ encodeSequence(text: string): Promise<number[]>;
10
17
  decode(tokens: number[]): Promise<string>;
18
+ decodeConversation(tokens: number[]): Promise<Conversation[]>;
11
19
  vocabSize: number;
12
20
  eosToken: number;
21
+ bosToken: number;
13
22
  trained: boolean;
23
+ getSpecialTokenIndex(token: string): number | undefined;
24
+ isSpecialToken(index: number): boolean;
14
25
  }
@@ -1,7 +1,7 @@
1
1
  import { adamAdjust as b } from "../ops/adamAdjust.js";
2
2
  import { adamMoments as d } from "../ops/adamMoments.js";
3
- import { O as g, e as h, t as o, d as B } from "../index-D6Q1lPZO.js";
4
- import { z as M } from "../zeros-DBFVbpv5.js";
3
+ import { O as g, e as h, t as o, d as B } from "../index-D0RBWjq8.js";
4
+ import { z as M } from "../zeros-DeiE2zTa.js";
5
5
  class R extends g {
6
6
  constructor(t, a, e, s, i = null) {
7
7
  super(), this.learningRate = t, this.beta1 = a, this.beta2 = e, this.lossScaling = s, this.epsilon = i, this.accBeta1 = a, this.accBeta2 = e, i === null && (this.epsilon = h().backend.epsilon());
@@ -1,4 +1,4 @@
1
- import { m as r, b as c, c as h, e as o } from "../index-D6Q1lPZO.js";
1
+ import { m as r, b as c, c as h, e as o } from "../index-D0RBWjq8.js";
2
2
  import { AdamOptimizer as g } from "./Adam.js";
3
3
  class y extends g {
4
4
  constructor(t, e, s, i, a) {
@@ -1,8 +1,8 @@
1
1
  import { Tensor } from '@tensorflow/tfjs-core';
2
- import { ITokeniser } from '../tokeniser/type';
2
+ import { Conversation, ITokeniser } from '../tokeniser/type';
3
3
  import { Dataset } from '@tensorflow/tfjs-data';
4
4
  export declare const PAGE_FACTOR = 8;
5
- export declare function flattenTokens(textData: string[], tokenizer: ITokeniser): Promise<number[]>;
5
+ export declare function flattenTokens(textData: Conversation[][], tokenizer: ITokeniser): Promise<number[]>;
6
6
  export declare class DatasetBuilder {
7
7
  tokenizer: ITokeniser;
8
8
  blockSize: number;
@@ -1,67 +1,63 @@
1
- import { t as g } from "../index-D6Q1lPZO.js";
2
- import { d as u, i as d } from "../dataset-D2P7rHAw.js";
1
+ import { t as z } from "../index-D0RBWjq8.js";
2
+ import { d as u, i as f } from "../dataset-DcjWqUVQ.js";
3
3
  import "../index-Cp39cXWe.js";
4
- function z(r) {
4
+ function S(a) {
5
5
  return u(async () => {
6
- const t = await r();
7
- return d(() => t.next());
6
+ const t = await a();
7
+ return f(() => t.next());
8
8
  });
9
9
  }
10
- const S = 8;
11
- async function y(r, t) {
12
- const s = await Promise.all(r.map((e) => t.encode(e))), o = t.eosToken >= 0, a = s.map((e) => o ? [...e, t.eosToken] : e).flat();
13
- for (const e of a)
14
- if (e < 0 || e >= t.vocabSize)
15
- throw new Error(`Invalid token index ${e} found in tokenised data`);
16
- return a;
10
+ const b = 8;
11
+ async function y(a, t) {
12
+ return (await Promise.all(a.map((r) => t.encodeConversation(r)))).flat();
17
13
  }
18
- class w {
14
+ class x {
19
15
  tokenizer;
20
16
  blockSize;
21
17
  pageSize;
22
18
  constructor(t, s = 128) {
23
- this.tokenizer = t, this.blockSize = s, this.pageSize = s * S;
19
+ this.tokenizer = t, this.blockSize = s, this.pageSize = s * b;
24
20
  }
25
21
  // Create dataset from text files
26
- async createTextDataset(t, s = 32, o, a) {
22
+ async createTextDataset(t, s = 32, i, r) {
27
23
  if (t.length < this.blockSize + 1)
28
24
  throw new Error(`Not enough tokens (${t.length}) for block size ${this.blockSize}`);
29
- if (o && o.size > t.length / this.pageSize / 2)
25
+ if (i && i.size > t.length / this.pageSize / 2)
30
26
  throw new Error("Too many masked pages - would leave insufficient training data");
31
- const e = (function* () {
32
- if (o && a) {
33
- const i = Array.from(o);
27
+ const l = (function* () {
28
+ if (i && r) {
29
+ const e = Array.from(i);
34
30
  for (; ; ) {
35
- const c = Math.floor(Math.random() * i.length), l = Math.floor(Math.random() * this.pageSize), n = i[c] * this.pageSize + l;
36
- if (n + this.blockSize + 1 > t.length)
31
+ const n = Math.floor(Math.random() * e.length), h = Math.floor(Math.random() * this.pageSize), o = e[n] * this.pageSize + h;
32
+ if (o + this.blockSize + 1 > t.length)
37
33
  continue;
38
- const h = t.slice(n, n + this.blockSize), f = t.slice(n + 1, n + this.blockSize + 1);
39
- yield { xs: h, ys: f };
34
+ const c = t.slice(o, o + this.blockSize), g = t.slice(o + 1, o + this.blockSize + 1);
35
+ yield { xs: c, ys: g };
40
36
  }
41
37
  } else
42
38
  for (; ; ) {
43
- const i = Math.floor(Math.random() * (t.length - this.blockSize - 1));
44
- if (o) {
45
- const n = Math.floor(i / this.pageSize), h = o.has(n);
46
- if (h && !a || !h && a)
39
+ const e = Math.floor(Math.random() * (t.length - this.blockSize - 1));
40
+ if (i) {
41
+ const o = Math.floor(e / this.pageSize), c = i.has(o);
42
+ if (c && !r || !c && r)
47
43
  continue;
48
44
  }
49
- const c = t.slice(i, i + this.blockSize), l = t.slice(i + 1, i + this.blockSize + 1);
50
- yield { xs: c, ys: l };
45
+ const n = t.slice(e, e + this.blockSize), h = t.slice(e + 1, e + this.blockSize + 1);
46
+ yield { xs: n, ys: h };
51
47
  }
52
48
  }).bind(this);
53
- return z(e).batch(s).map((i) => {
54
- const c = i;
55
- return g(() => ({
56
- xs: c.xs.cast("int32"),
57
- ys: c.ys.cast("int32")
49
+ return S(l).batch(s).map((e) => {
50
+ const n = e;
51
+ return z(() => ({
52
+ xs: n.xs.cast("int32"),
53
+ ys: n.ys.cast("int32")
58
54
  // this.tf.oneHot(batchData.ys.cast('int32'), this.tokenizer.vocabSize),
59
55
  }));
60
56
  }).prefetch(2);
61
57
  }
62
58
  }
63
59
  export {
64
- w as DatasetBuilder,
65
- S as PAGE_FACTOR,
60
+ x as DatasetBuilder,
61
+ b as PAGE_FACTOR,
66
62
  y as flattenTokens
67
63
  };
@@ -1,6 +1,6 @@
1
1
  import b from "./Trainer.js";
2
2
  import L from "./Evaluator.js";
3
- import { d as w } from "../index-D6Q1lPZO.js";
3
+ import { d as w } from "../index-D0RBWjq8.js";
4
4
  import y from "../utilities/profile.js";
5
5
  import { createTensorStatistics as D } from "../checks/weights.js";
6
6
  const T = {