@genai-fi/nanogpt 0.10.3 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (225) hide show
  1. package/dist/Generator.d.ts +10 -5
  2. package/dist/Generator.js +1789 -1765
  3. package/dist/{RealDiv-KAPDe8zB.js → RealDiv-Ds-jvL09.js} +22 -22
  4. package/dist/{Reshape-BYkmUnAv.js → Reshape-Cd6e-Otn.js} +1 -1
  5. package/dist/{Reshape-Zt6eb7yh.js → Reshape-Ct266DEk.js} +9 -9
  6. package/dist/TeachableLLM.d.ts +4 -3
  7. package/dist/TeachableLLM.js +14 -14
  8. package/dist/Trainer.d.ts +2 -2
  9. package/dist/Trainer.js +6 -6
  10. package/dist/{axis_util-BaG7mf5A.js → axis_util-DofAuy0p.js} +3 -3
  11. package/dist/backend.js +2 -2
  12. package/dist/{backend_util-RCe-rHaj.js → backend_util-C7NWHpv7.js} +7 -7
  13. package/dist/{backend_webgpu-DE3ACOLx.js → backend_webgpu-B0Vls736.js} +10 -10
  14. package/dist/{broadcast_to-B3eYlZm7.js → broadcast_to-DDaNMbX7.js} +2 -2
  15. package/dist/checks/appendCache.js +2 -2
  16. package/dist/checks/attentionMask.js +3 -3
  17. package/dist/checks/gelu.js +2 -2
  18. package/dist/checks/matMulGelu.js +2 -2
  19. package/dist/checks/normRMS.js +4 -4
  20. package/dist/checks/normRMSGrad.js +3 -3
  21. package/dist/checks/packUnpack.js +2 -2
  22. package/dist/checks/qkv.js +4 -4
  23. package/dist/checks/rope.js +2 -2
  24. package/dist/{clip_by_value-BnO7-a88.js → clip_by_value-Dn5tzexi.js} +4 -4
  25. package/dist/complex-DClmWqJt.js +11 -0
  26. package/dist/{concat-BV8bt5H-.js → concat-C6X3AAlQ.js} +1 -1
  27. package/dist/{concat_util-DpW8mL_l.js → concat_util-CHsJFZJJ.js} +1 -1
  28. package/dist/{dataset-BcwmTGYc.js → dataset-DcjWqUVQ.js} +7 -7
  29. package/dist/{dropout-BcvN9JYi.js → dropout-OxuaJz6z.js} +11 -11
  30. package/dist/{expand_dims-DT4tEPwA.js → expand_dims-BzfJK2uc.js} +3 -3
  31. package/dist/{exports_initializers-Hta_rEnm.js → exports_initializers-eS9QJ6ut.js} +1 -1
  32. package/dist/{floor-D5QdR_le.js → floor-DIb-lN_u.js} +1 -1
  33. package/dist/gather-BcO5UQNJ.js +9 -0
  34. package/dist/{gelu-CjNPL4OH.js → gelu-DqTbCx5x.js} +1 -1
  35. package/dist/{gpgpu_math-DAOmgtXR.js → gpgpu_math-CJcbnKPC.js} +2 -2
  36. package/dist/{index-DOvlwCh-.js → index-D0RBWjq8.js} +52 -52
  37. package/dist/{index-BwexR4lA.js → index-Dj5TkmPY.js} +89 -89
  38. package/dist/{kernel_funcs_utils-CCzYdUZg.js → kernel_funcs_utils-CSaumNDs.js} +11 -11
  39. package/dist/layers/BaseLayer.js +2 -2
  40. package/dist/layers/CausalSelfAttention.js +6 -6
  41. package/dist/layers/MLP.js +4 -4
  42. package/dist/layers/PositionEmbedding.js +5 -5
  43. package/dist/layers/RMSNorm.js +3 -3
  44. package/dist/layers/RoPECache.js +4 -4
  45. package/dist/layers/TiedEmbedding.js +6 -6
  46. package/dist/layers/TransformerBlock.js +1 -1
  47. package/dist/loader/loadTransformers.js +1 -1
  48. package/dist/loader/oldZipLoad.js +17 -17
  49. package/dist/log_sum_exp-VLZgbFAH.js +39 -0
  50. package/dist/main.d.ts +1 -1
  51. package/dist/main.js +9 -9
  52. package/dist/{matMul16-BWRSOCWB.js → matMul16-cDxwemKj.js} +7 -7
  53. package/dist/{matMulGelu-CzfgT6Wq.js → matMulGelu-B2s_80-H.js} +18 -18
  54. package/dist/{mat_mul-SjpJRLyL.js → mat_mul-DxpNTCRz.js} +3 -3
  55. package/dist/{mod-AnXEvvpo.js → mod-PrOKlFxH.js} +1 -1
  56. package/dist/models/NanoGPTV1.js +2 -2
  57. package/dist/models/model.js +9 -9
  58. package/dist/{ones-D2rT0xk2.js → ones-BX_wEgzB.js} +3 -3
  59. package/dist/ops/adamAdjust.js +1 -1
  60. package/dist/ops/adamMoments.js +1 -1
  61. package/dist/ops/add16.js +1 -1
  62. package/dist/ops/appendCache.js +3 -3
  63. package/dist/ops/attentionMask.js +1 -1
  64. package/dist/ops/concat16.js +2 -2
  65. package/dist/ops/cpu/adamAdjust.js +6 -6
  66. package/dist/ops/cpu/adamMoments.js +2 -2
  67. package/dist/ops/cpu/appendCache.js +5 -5
  68. package/dist/ops/cpu/attentionMask.js +10 -10
  69. package/dist/ops/cpu/fusedSoftmax.js +2 -2
  70. package/dist/ops/cpu/gatherSub.js +6 -6
  71. package/dist/ops/cpu/gelu.js +9 -9
  72. package/dist/ops/cpu/matMul16.js +2 -2
  73. package/dist/ops/cpu/matMulGelu.js +3 -3
  74. package/dist/ops/cpu/matMulMul.js +1 -1
  75. package/dist/ops/cpu/mulDropout.js +1 -1
  76. package/dist/ops/cpu/normRMS.js +3 -3
  77. package/dist/ops/cpu/qkv.js +3 -3
  78. package/dist/ops/cpu/rope.js +9 -9
  79. package/dist/ops/cpu/scatterSub.js +11 -11
  80. package/dist/ops/dot16.js +2 -2
  81. package/dist/ops/gatherSub.js +1 -1
  82. package/dist/ops/gelu.js +2 -2
  83. package/dist/ops/grads/add16.js +4 -4
  84. package/dist/ops/grads/attentionMask.js +2 -2
  85. package/dist/ops/grads/gelu.js +2 -2
  86. package/dist/ops/grads/matMul16.js +3 -3
  87. package/dist/ops/grads/matMulGelu.js +3 -3
  88. package/dist/ops/grads/normRMS.js +7 -7
  89. package/dist/ops/grads/pack16.js +3 -3
  90. package/dist/ops/grads/qkv.js +6 -6
  91. package/dist/ops/grads/rope.js +2 -2
  92. package/dist/ops/grads/softmax16.js +1 -1
  93. package/dist/ops/grads/unpack16.js +2 -2
  94. package/dist/ops/matMul16.js +3 -3
  95. package/dist/ops/matMulGelu.js +2 -2
  96. package/dist/ops/matMulMul.js +1 -1
  97. package/dist/ops/mul16.js +1 -1
  98. package/dist/ops/mulDrop.js +1 -1
  99. package/dist/ops/normRMS.js +1 -1
  100. package/dist/ops/pack16.js +2 -2
  101. package/dist/ops/qkv.js +1 -1
  102. package/dist/ops/reshape16.js +6 -6
  103. package/dist/ops/rope.js +2 -2
  104. package/dist/ops/scatterSub.js +1 -1
  105. package/dist/ops/slice16.js +2 -2
  106. package/dist/ops/softmax16.js +1 -1
  107. package/dist/ops/sub16.js +1 -1
  108. package/dist/ops/sum16.js +2 -2
  109. package/dist/ops/transpose16.js +3 -3
  110. package/dist/ops/unpack16.js +2 -2
  111. package/dist/ops/webgl/adamAdjust.js +2 -2
  112. package/dist/ops/webgl/adamMoments.js +1 -1
  113. package/dist/ops/webgl/appendCache.js +1 -1
  114. package/dist/ops/webgl/attentionMask.js +4 -4
  115. package/dist/ops/webgl/fusedSoftmax.js +6 -6
  116. package/dist/ops/webgl/gatherSub.js +1 -1
  117. package/dist/ops/webgl/gelu.js +2 -2
  118. package/dist/ops/webgl/log.js +3 -3
  119. package/dist/ops/webgl/matMul16.js +11 -11
  120. package/dist/ops/webgl/matMulGelu.js +4 -4
  121. package/dist/ops/webgl/matMulMul.js +7 -7
  122. package/dist/ops/webgl/mulDropout.js +1 -1
  123. package/dist/ops/webgl/normRMS.js +7 -7
  124. package/dist/ops/webgl/qkv.js +1 -1
  125. package/dist/ops/webgl/rope.js +4 -4
  126. package/dist/ops/webgl/scatterSub.js +1 -1
  127. package/dist/ops/webgpu/adamAdjust.js +3 -3
  128. package/dist/ops/webgpu/adamMoments.js +3 -3
  129. package/dist/ops/webgpu/add16.js +1 -1
  130. package/dist/ops/webgpu/appendCache.js +3 -3
  131. package/dist/ops/webgpu/attentionMask.js +5 -5
  132. package/dist/ops/webgpu/attentionMask32_program.js +2 -2
  133. package/dist/ops/webgpu/concat16.js +5 -5
  134. package/dist/ops/webgpu/gatherSub.js +5 -5
  135. package/dist/ops/webgpu/gelu.js +3 -3
  136. package/dist/ops/webgpu/matMul16.js +18 -18
  137. package/dist/ops/webgpu/matMul16_program.js +2 -2
  138. package/dist/ops/webgpu/mul16.js +4 -4
  139. package/dist/ops/webgpu/normRMS.js +6 -6
  140. package/dist/ops/webgpu/normRMSGrad.js +4 -4
  141. package/dist/ops/webgpu/pack16.js +1 -1
  142. package/dist/ops/webgpu/pack16_program.js +2 -2
  143. package/dist/ops/webgpu/qkv.js +6 -6
  144. package/dist/ops/webgpu/rope.js +3 -3
  145. package/dist/ops/webgpu/scatterSub.js +3 -3
  146. package/dist/ops/webgpu/slice16.js +4 -4
  147. package/dist/ops/webgpu/softmax16.js +2 -2
  148. package/dist/ops/webgpu/softmax16_program.js +2 -2
  149. package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
  150. package/dist/ops/webgpu/softmax16grad.js +1 -1
  151. package/dist/ops/webgpu/sub16.js +4 -4
  152. package/dist/ops/webgpu/sum16.js +6 -6
  153. package/dist/ops/webgpu/transpose16.js +2 -2
  154. package/dist/ops/webgpu/transpose16_program.js +2 -2
  155. package/dist/ops/webgpu/transpose16_shared_program.js +3 -3
  156. package/dist/ops/webgpu/unpack16.js +3 -3
  157. package/dist/ops/webgpu/utils/binary_op.js +3 -3
  158. package/dist/ops/webgpu/utils/reductions.js +4 -4
  159. package/dist/{ops-B5yanEdW.js → ops-FJapAPfm.js} +56 -56
  160. package/dist/{pack16-nQ6JaLo-.js → pack16-k4jq6aMX.js} +7 -7
  161. package/dist/patches/webgpu_backend.js +7 -7
  162. package/dist/patches/webgpu_base.js +1 -1
  163. package/dist/patches/webgpu_program.js +8 -8
  164. package/dist/{random_width-or-CEftb.js → random_width-UGQn4OWb.js} +33 -33
  165. package/dist/range-CuGvVN2c.js +10 -0
  166. package/dist/{relu-CP0ZcxWO.js → relu-Cf80uA2p.js} +1 -1
  167. package/dist/{reshape-ByE68wS9.js → reshape-CkjKPPqB.js} +1 -1
  168. package/dist/{resize_nearest_neighbor-B19mCEg2.js → resize_nearest_neighbor-DB8k9KN_.js} +43 -43
  169. package/dist/{rope-Ir4mTyD1.js → rope-BmZmp9uP.js} +1 -1
  170. package/dist/{scatter_nd_util-lvSiX8q4.js → scatter_nd_util-BY22Cc-C.js} +1 -1
  171. package/dist/{selu_util-kbhpTdYD.js → selu_util-BuLbmbrl.js} +5 -5
  172. package/dist/{shared-DT1TkE6w.js → shared-B7USJZgw.js} +1 -1
  173. package/dist/{shared-dntlHIDQ.js → shared-BQboIImQ.js} +86 -86
  174. package/dist/{slice-BfEGSH82.js → slice-Aqy7KbJh.js} +3 -3
  175. package/dist/{slice_util-uTKwiEpW.js → slice_util-D8CQRenR.js} +7 -7
  176. package/dist/{softmax-CA5jFsLR.js → softmax-faLoUZVT.js} +1 -1
  177. package/dist/{split-CVLc0w--.js → split-BNz5jcGc.js} +3 -3
  178. package/dist/{squeeze-C7Z2srUo.js → squeeze--YMgaAAf.js} +2 -2
  179. package/dist/{stack-Cf4n9h0N.js → stack-WJK22CFn.js} +1 -1
  180. package/dist/{step-CINUs5QB.js → step-dXR33iOg.js} +32 -32
  181. package/dist/sum-BdplSvq_.js +11 -0
  182. package/dist/tensor-BQqrDvpx.js +8 -0
  183. package/dist/tensor1d-LxP9asMm.js +11 -0
  184. package/dist/{tensor2d-Bs9wZRc7.js → tensor2d-BN1sSfQO.js} +3 -3
  185. package/dist/{tensor4d-BARPdTaS.js → tensor4d-DVwr7pLF.js} +1 -1
  186. package/dist/{tfjs_backend-y1cvNhLA.js → tfjs_backend-Vi4JfLzT.js} +28 -28
  187. package/dist/{tile-mbfagpsB.js → tile-CvN_LyVr.js} +4 -4
  188. package/dist/tokeniser/BaseTokeniser.d.ts +27 -0
  189. package/dist/tokeniser/BaseTokeniser.js +94 -0
  190. package/dist/tokeniser/CharTokeniser.d.ts +4 -3
  191. package/dist/tokeniser/CharTokeniser.js +46 -32
  192. package/dist/tokeniser/bpe.d.ts +4 -3
  193. package/dist/tokeniser/bpe.js +60 -45
  194. package/dist/tokeniser/type.d.ts +11 -0
  195. package/dist/training/Adam.js +2 -2
  196. package/dist/training/AdamExt.js +1 -1
  197. package/dist/training/DatasetBuilder.d.ts +2 -2
  198. package/dist/training/DatasetBuilder.js +32 -36
  199. package/dist/training/FullTrainer.js +1 -1
  200. package/dist/training/Trainer.d.ts +3 -3
  201. package/dist/training/Trainer.js +2 -2
  202. package/dist/training/sparseCrossEntropy.js +3 -3
  203. package/dist/{transpose-ClWiBS_b.js → transpose-JawVKyZy.js} +5 -5
  204. package/dist/{unsorted_segment_sum-BDDhB_E6.js → unsorted_segment_sum-LAbmE9G4.js} +78 -78
  205. package/dist/utilities/dummy.js +3 -3
  206. package/dist/utilities/multinomialCPU.js +2 -2
  207. package/dist/utilities/packed.js +1 -1
  208. package/dist/utilities/performance.js +1 -1
  209. package/dist/utilities/profile.js +1 -1
  210. package/dist/utilities/safetensors.js +2 -2
  211. package/dist/utilities/sentences.js +5 -5
  212. package/dist/utilities/weights.js +2 -2
  213. package/dist/{variable-WawDEaAb.js → variable-DQ9yYgEU.js} +1 -1
  214. package/dist/{webgpu_program-DuOXPQol.js → webgpu_program-CAE4RICo.js} +3 -3
  215. package/dist/{webgpu_util-RxEF33Rj.js → webgpu_util-BdovYhXr.js} +1 -1
  216. package/dist/{zeros-KnWaWf-X.js → zeros-DeiE2zTa.js} +2 -2
  217. package/dist/{zeros_like-DvE73F4e.js → zeros_like-BAz3iKru.js} +77 -77
  218. package/package.json +1 -1
  219. package/dist/complex-DjxcVmoX.js +0 -11
  220. package/dist/gather-D3JcZUaI.js +0 -9
  221. package/dist/log_sum_exp-ngO0-4pK.js +0 -39
  222. package/dist/range-BklejeeW.js +0 -10
  223. package/dist/sum-DWAtNGez.js +0 -11
  224. package/dist/tensor-DJoc7gJU.js +0 -8
  225. package/dist/tensor1d-D11P_7Dp.js +0 -11
@@ -1,66 +1,80 @@
1
- import { E as k } from "../index-DvYrXKkX.js";
1
+ import k, { SPECIALS as d } from "./BaseTokeniser.js";
2
2
  const u = ["<eos>", "<unk>"];
3
3
  class b extends k {
4
4
  vocabSize = 0;
5
5
  eosToken = 0;
6
+ bosToken = 0;
6
7
  unkToken = 0;
7
8
  vocab = [];
8
9
  cache = /* @__PURE__ */ new Map();
9
10
  _trained = !1;
10
- constructor(t) {
11
- if (super(), Array.isArray(t)) {
12
- if (this.vocab = t, this.vocab.length > 0)
13
- this.vocabSize = this.vocab.length, this.eosToken = this.vocab.indexOf("<eos>"), this.unkToken = this.vocab.indexOf(""), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((e) => e === "<pad>" ? "" : e), this.vocab.forEach((e, n) => {
14
- this.cache.set(e, n);
11
+ constructor(s) {
12
+ if (super(), Array.isArray(s)) {
13
+ if (this.vocab = s, this.vocab.length > 0)
14
+ this.vocabSize = this.vocab.length, d.forEach((t) => {
15
+ const e = this.vocab.indexOf(t);
16
+ e !== -1 && this.addSpecialToken(t, e);
17
+ }), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex("") ?? -1, this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((t) => t === "<pad>" ? "" : t), this.vocab.forEach((t, e) => {
18
+ this.cache.set(t, e);
15
19
  });
16
20
  else
17
21
  throw new Error("Vocab cannot be empty");
18
22
  this._trained = !0;
19
23
  } else
20
- this.vocabSize = t, this.vocab = new Array(this.vocabSize).fill(""), this.vocab[0] = "<eos>", this.vocab[1] = "", this.eosToken = 0, this.unkToken = 1, this.cache.set("<eos>", 0), this.cache.set("", 1);
24
+ this.vocabSize = s, this.vocab = new Array(this.vocabSize).fill(""), this.addSpecialTokens(), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex(""), this.vocab.forEach((t, e) => {
25
+ this.cache.set(t, e);
26
+ }), this.cache.set("", this.unkToken);
27
+ }
28
+ addToken(s, t) {
29
+ if (this.cache.has(s))
30
+ return this.cache.get(s);
31
+ let e;
32
+ if (t !== void 0 ? e = t : (e = this.vocab.indexOf("", this.unkToken + 1), e === -1 && (e = this.vocabSize)), e >= this.vocabSize)
33
+ throw new Error("Vocab size exceeded");
34
+ return this.vocab[e] = s, this.cache.set(s, e), e;
21
35
  }
22
36
  get trained() {
23
37
  return this.vocab.length === this.vocabSize && this._trained;
24
38
  }
25
39
  destroy() {
26
40
  }
27
- async train(t) {
28
- const e = t.map((i) => i.split("")).flat(), n = new Set(e), s = Array.from(n), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
41
+ async train(s) {
42
+ const t = s.map((n) => n.split("")).flat(), e = new Set(t), i = Array.from(e), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
29
43
  if (h === -1)
30
44
  return this.vocabSize;
31
- if (this._trained = !0, s.length > o) {
32
- const i = /* @__PURE__ */ new Map();
33
- e.forEach((a) => {
34
- i.set(a, (i.get(a) || 0) + 1);
35
- }), s.sort((a, r) => (i.get(a) || 0) - (i.get(r) || 0)), s.splice(0, s.length - o);
45
+ if (this._trained = !0, i.length > o) {
46
+ const n = /* @__PURE__ */ new Map();
47
+ t.forEach((a) => {
48
+ n.set(a, (n.get(a) || 0) + 1);
49
+ }), i.sort((a, r) => (n.get(a) || 0) - (n.get(r) || 0)), i.splice(0, i.length - o);
36
50
  }
37
51
  let c = h;
38
52
  if (c !== -1) {
39
- const i = new Set(this.vocab);
40
- for (const a of s)
41
- if (!i.has(a) && (this.vocab[c] = a, i.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
53
+ const n = new Set(this.vocab);
54
+ for (const a of i)
55
+ if (!n.has(a) && (this.vocab[c] = a, n.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
42
56
  break;
43
57
  }
44
- return this.cache.clear(), this.vocab.forEach((i, a) => {
45
- this.cache.set(i, a);
58
+ return this.cache.clear(), this.vocab.forEach((n, a) => {
59
+ this.cache.set(n, a);
46
60
  }), this.emit("trainStatus", "trained"), this.vocabSize;
47
61
  }
48
- async tokenise(t, e) {
62
+ async tokenise(s, t) {
49
63
  if (!this.trained)
50
64
  throw new Error("Tokeniser not trained");
51
- return t.map((s) => e ? s.split("").map((h) => this.cache.get(h) ?? this.unkToken) : s.split("").map((h) => {
65
+ return s.map((i) => t ? i.split("").map((h) => this.cache.get(h) ?? this.unkToken) : i.split("").map((h) => {
52
66
  const o = this.cache.get(h);
53
67
  return o !== void 0 ? this.vocab[o] : "";
54
68
  }));
55
69
  }
56
- async detokenise(t) {
57
- return t.map((n) => n.map((s) => this.vocab[s]).join(""));
70
+ async detokenise(s) {
71
+ return s.map((e) => e.map((i) => this.vocab[i]).join(""));
58
72
  }
59
- async encode(t) {
60
- return (await this.tokenise([t], !0))[0];
73
+ async encode(s) {
74
+ return (await this.tokenise([s], !0))[0];
61
75
  }
62
- async decode(t) {
63
- return (await this.detokenise([t]))[0];
76
+ async decode(s) {
77
+ return (await this.detokenise([s]))[0];
64
78
  }
65
79
  getVocab() {
66
80
  return this.vocab;
@@ -68,11 +82,11 @@ class b extends k {
68
82
  async getMerges() {
69
83
  return [];
70
84
  }
71
- async createTrainingData(t, e = 5) {
72
- const n = await this.tokenise(t, !0), s = [], h = [];
73
- for (let o = 0; o < n.length - e; o++)
74
- s.push(...n[o].slice(0, e)), h.push(n[o + 1][0]);
75
- return [s, h];
85
+ async createTrainingData(s, t = 5) {
86
+ const e = await this.tokenise(s, !0), i = [], h = [];
87
+ for (let o = 0; o < e.length - t; o++)
88
+ i.push(...e[o].slice(0, t)), h.push(e[o + 1][0]);
89
+ return [i, h];
76
90
  }
77
91
  }
78
92
  export {
@@ -1,6 +1,5 @@
1
- import { default as EE } from 'eventemitter3';
2
- import { ITokeniser } from './type';
3
- export default class BPETokeniser extends EE<'trainStatus'> implements ITokeniser {
1
+ import { default as BaseTokeniser } from './BaseTokeniser';
2
+ export default class BPETokeniser extends BaseTokeniser {
4
3
  private targetSize;
5
4
  private vocab;
6
5
  private vocabIndex;
@@ -8,10 +7,12 @@ export default class BPETokeniser extends EE<'trainStatus'> implements ITokenise
8
7
  private pretokenMap;
9
8
  constructor(vocabSize: number);
10
9
  constructor(vocab: string[], merges?: [string, string][]);
10
+ addToken(token: string, index?: number): number;
11
11
  destroy(): void;
12
12
  get trained(): boolean;
13
13
  get vocabSize(): number;
14
14
  get eosToken(): number;
15
+ get bosToken(): number;
15
16
  get unkToken(): number;
16
17
  train(text: string[]): Promise<number>;
17
18
  getVocab(): string[];
@@ -1,68 +1,80 @@
1
1
  import l from "../utilities/tokenParse.js";
2
- import { E as f } from "../index-DvYrXKkX.js";
2
+ import d, { SPECIALS as f } from "./BaseTokeniser.js";
3
3
  function u(o, e) {
4
4
  return `${o}-::-${e}`;
5
5
  }
6
- function k(o) {
6
+ function b(o) {
7
7
  const e = /* @__PURE__ */ new Map();
8
8
  for (let s = 0; s < o.length; s++) {
9
9
  const t = o[s];
10
- for (let r = 0; r < t.length - 1; r++) {
11
- const n = u(t[r], t[r + 1]), a = e.get(n) || {
12
- a: t[r],
13
- b: t[r + 1],
10
+ for (let n = 0; n < t.length - 1; n++) {
11
+ const r = u(t[n], t[n + 1]), i = e.get(r) || {
12
+ a: t[n],
13
+ b: t[n + 1],
14
14
  count: 0,
15
15
  instances: /* @__PURE__ */ new Set()
16
16
  };
17
- a.count += 1, a.instances.add(s), e.set(n, a);
17
+ i.count += 1, i.instances.add(s), e.set(r, i);
18
18
  }
19
19
  }
20
20
  return { pairs: e, tokens: o };
21
21
  }
22
- function h(o, e, s, t, r) {
23
- const n = u(e, s);
24
- if (o.pairs.has(n)) {
25
- const a = o.pairs.get(n);
26
- a.count += r, r > 0 ? a.instances.add(t) : a.count <= 0 ? o.pairs.delete(n) : a.instances.delete(t);
22
+ function h(o, e, s, t, n) {
23
+ const r = u(e, s);
24
+ if (o.pairs.has(r)) {
25
+ const i = o.pairs.get(r);
26
+ i.count += n, n > 0 ? i.instances.add(t) : i.count <= 0 ? o.pairs.delete(r) : i.instances.delete(t);
27
27
  } else
28
- o.pairs.set(n, { a: e, b: s, count: r, instances: /* @__PURE__ */ new Set([t]) });
28
+ o.pairs.set(r, { a: e, b: s, count: n, instances: /* @__PURE__ */ new Set([t]) });
29
29
  }
30
- function b(o) {
30
+ function k(o) {
31
31
  let e = null, s = 0;
32
32
  for (const t of o.pairs.values())
33
33
  t.count > s && (s = t.count, e = t);
34
34
  return e;
35
35
  }
36
- function d(o, e) {
36
+ function m(o, e) {
37
37
  return o.map((s) => {
38
38
  const t = [];
39
- for (let r = 0; r < s.length; r++)
40
- r < s.length - 1 && s[r] === e[0] && s[r + 1] === e[1] ? (t.push(e[0] + e[1]), r++) : t.push(s[r]);
39
+ for (let n = 0; n < s.length; n++)
40
+ n < s.length - 1 && s[n] === e[0] && s[n + 1] === e[1] ? (t.push(e[0] + e[1]), n++) : t.push(s[n]);
41
41
  return t;
42
42
  });
43
43
  }
44
- function m(o, e) {
44
+ function v(o, e) {
45
45
  e.instances.forEach((s) => {
46
- const t = o.tokens[s], r = [];
47
- for (let n = 0; n < t.length; n++)
48
- if (n < t.length - 1 && t[n] === e.a && t[n + 1] === e.b) {
49
- const a = e.a + e.b;
50
- r.push(a), n > 0 && (h(o, t[n - 1], e.a, s, -1), h(o, t[n - 1], a, s, 1)), n++, n < t.length - 1 && (h(o, e.b, t[n + 1], s, -1), h(o, a, t[n + 1], s, 1));
46
+ const t = o.tokens[s], n = [];
47
+ for (let r = 0; r < t.length; r++)
48
+ if (r < t.length - 1 && t[r] === e.a && t[r + 1] === e.b) {
49
+ const i = e.a + e.b;
50
+ n.push(i), r > 0 && (h(o, t[r - 1], e.a, s, -1), h(o, t[r - 1], i, s, 1)), r++, r < t.length - 1 && (h(o, e.b, t[r + 1], s, -1), h(o, i, t[r + 1], s, 1));
51
51
  } else
52
- r.push(t[n]);
53
- o.tokens[s] = r;
52
+ n.push(t[r]);
53
+ o.tokens[s] = n;
54
54
  }), o.pairs.delete(u(e.a, e.b));
55
55
  }
56
- class S extends f {
56
+ class T extends d {
57
57
  targetSize;
58
58
  vocab = /* @__PURE__ */ new Set();
59
59
  vocabIndex = /* @__PURE__ */ new Map();
60
60
  merges = [];
61
61
  pretokenMap = /* @__PURE__ */ new Map();
62
62
  constructor(e, s) {
63
- super(), Array.isArray(e) ? (e.forEach((t, r) => {
64
- this.vocab.add(t), this.vocabIndex.set(t, r);
65
- }), s && (this.merges = s), this.targetSize = e.length) : (this.vocab.add("<eos>"), this.vocab.add(""), this.targetSize = e);
63
+ super(), Array.isArray(e) ? (e.forEach((t, n) => {
64
+ this.vocab.add(t), this.vocabIndex.set(t, n);
65
+ }), s && (this.merges = s), this.targetSize = e.length, f.forEach((t) => {
66
+ const n = e.indexOf(t);
67
+ n !== -1 && this.addSpecialToken(t, n);
68
+ })) : (this.addSpecialTokens(), this.targetSize = e);
69
+ }
70
+ addToken(e, s) {
71
+ if (this.vocab.has(e))
72
+ return this.vocabIndex.get(e);
73
+ {
74
+ this.vocab.add(e);
75
+ const t = s !== void 0 ? s : this.vocab.size - 1;
76
+ return this.vocabIndex.set(e, t), t;
77
+ }
66
78
  }
67
79
  destroy() {
68
80
  this.vocab.clear(), this.vocabIndex.clear(), this.merges = [], this.pretokenMap.clear();
@@ -76,26 +88,29 @@ class S extends f {
76
88
  get eosToken() {
77
89
  return this.vocabIndex.get("<eos>") ?? 0;
78
90
  }
91
+ get bosToken() {
92
+ return this.vocabIndex.get("<bos>") ?? 0;
93
+ }
79
94
  get unkToken() {
80
95
  return this.vocabIndex.get("") ?? 1;
81
96
  }
82
97
  async train(e) {
83
- const s = e.map((i) => l(i)).flat(1), t = new Set(s);
84
- this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.vocab.add("<eos>"), this.vocab.add("");
85
- const r = Array.from(t), n = r.map((i) => Array.from(i).map((c) => (this.vocab.add(c), c))), a = k(n);
98
+ const s = e.map((a) => l(a)).flat(1), t = new Set(s);
99
+ this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.addSpecialTokens();
100
+ const n = Array.from(t), r = n.map((a) => Array.from(a).map((c) => (this.vocab.add(c), c))), i = b(r);
86
101
  for (; this.vocab.size < this.targetSize && this.merges.length < this.targetSize; ) {
87
- const i = b(a);
88
- if (!i)
102
+ const a = k(i);
103
+ if (!a)
89
104
  break;
90
- this.merges.push([i.a, i.b]), this.vocab.add(i.a + i.b), m(a, i);
105
+ this.merges.push([a.a, a.b]), this.vocab.add(a.a + a.b), v(i, a);
91
106
  }
92
- r.forEach((i, p) => {
93
- const c = n[p];
94
- this.pretokenMap.set(i, c);
107
+ n.forEach((a, p) => {
108
+ const c = r[p];
109
+ this.pretokenMap.set(a, c);
95
110
  }), this.vocabIndex.clear();
96
111
  let g = 0;
97
- for (const i of this.vocab.keys())
98
- this.vocabIndex.set(i, g++);
112
+ for (const a of this.vocab.keys())
113
+ this.vocabIndex.set(a, g++);
99
114
  return this.emit("trainStatus", "trained"), this.vocab.size;
100
115
  }
101
116
  getVocab() {
@@ -107,19 +122,19 @@ class S extends f {
107
122
  tokeniseWord(e) {
108
123
  let s = Array.from(e);
109
124
  return this.merges.forEach((t) => {
110
- s = d([s], t)[0];
125
+ s = m([s], t)[0];
111
126
  }), this.pretokenMap.set(e, s), s;
112
127
  }
113
128
  tokeniseStrings(e) {
114
- return e.map((s) => l(s).map((n) => this.pretokenMap.has(n) ? this.pretokenMap.get(n) : this.tokeniseWord(n)).flat(1));
129
+ return e.map((s) => l(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
115
130
  }
116
131
  async tokenise(e, s) {
117
132
  const t = this.tokeniseStrings(e);
118
- return s ? t.map((r) => r.map((n) => this.vocabIndex.get(n) ?? this.unkToken)) : t.map((r) => r.map((n) => this.vocab.has(n) ? n : ""));
133
+ return s ? t.map((n) => n.map((r) => this.vocabIndex.get(r) ?? this.unkToken)) : t.map((n) => n.map((r) => this.vocab.has(r) ? r : ""));
119
134
  }
120
135
  async detokenise(e) {
121
136
  const s = this.getVocab();
122
- return e.map((r) => r.map((n) => s[n]).join(""));
137
+ return e.map((n) => n.map((r) => s[r]).join(""));
123
138
  }
124
139
  async encode(e) {
125
140
  return (await this.tokenise([e], !0))[0];
@@ -129,5 +144,5 @@ class S extends f {
129
144
  }
130
145
  }
131
146
  export {
132
- S as default
147
+ T as default
133
148
  };
@@ -1,4 +1,9 @@
1
1
  import { default as EE } from 'eventemitter3';
2
+ export type Roles = 'user' | 'assistant' | 'system';
3
+ export interface Conversation {
4
+ role: Roles;
5
+ content: string;
6
+ }
2
7
  export interface ITokeniser extends EE<'trainStatus'> {
3
8
  train(text: string[]): Promise<number>;
4
9
  tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
@@ -7,8 +12,14 @@ export interface ITokeniser extends EE<'trainStatus'> {
7
12
  getMerges(): Promise<[string, string][]>;
8
13
  destroy(): void;
9
14
  encode(text: string): Promise<number[]>;
15
+ encodeConversation(conversation: Conversation[], completion?: boolean): Promise<number[]>;
16
+ encodeSequence(text: string): Promise<number[]>;
10
17
  decode(tokens: number[]): Promise<string>;
18
+ decodeConversation(tokens: number[]): Promise<Conversation[]>;
11
19
  vocabSize: number;
12
20
  eosToken: number;
21
+ bosToken: number;
13
22
  trained: boolean;
23
+ getSpecialTokenIndex(token: string): number | undefined;
24
+ isSpecialToken(index: number): boolean;
14
25
  }
@@ -1,7 +1,7 @@
1
1
  import { adamAdjust as b } from "../ops/adamAdjust.js";
2
2
  import { adamMoments as d } from "../ops/adamMoments.js";
3
- import { O as g, e as h, t as o, d as B } from "../index-DOvlwCh-.js";
4
- import { z as M } from "../zeros-KnWaWf-X.js";
3
+ import { O as g, e as h, t as o, d as B } from "../index-D0RBWjq8.js";
4
+ import { z as M } from "../zeros-DeiE2zTa.js";
5
5
  class R extends g {
6
6
  constructor(t, a, e, s, i = null) {
7
7
  super(), this.learningRate = t, this.beta1 = a, this.beta2 = e, this.lossScaling = s, this.epsilon = i, this.accBeta1 = a, this.accBeta2 = e, i === null && (this.epsilon = h().backend.epsilon());
@@ -1,4 +1,4 @@
1
- import { m as r, b as c, c as h, e as o } from "../index-DOvlwCh-.js";
1
+ import { m as r, b as c, c as h, e as o } from "../index-D0RBWjq8.js";
2
2
  import { AdamOptimizer as g } from "./Adam.js";
3
3
  class y extends g {
4
4
  constructor(t, e, s, i, a) {
@@ -1,8 +1,8 @@
1
1
  import { Tensor } from '@tensorflow/tfjs-core';
2
- import { ITokeniser } from '../tokeniser/type';
2
+ import { Conversation, ITokeniser } from '../tokeniser/type';
3
3
  import { Dataset } from '@tensorflow/tfjs-data';
4
4
  export declare const PAGE_FACTOR = 8;
5
- export declare function flattenTokens(textData: string[], tokenizer: ITokeniser): Promise<number[]>;
5
+ export declare function flattenTokens(textData: Conversation[][], tokenizer: ITokeniser): Promise<number[]>;
6
6
  export declare class DatasetBuilder {
7
7
  tokenizer: ITokeniser;
8
8
  blockSize: number;
@@ -1,67 +1,63 @@
1
- import { t as g } from "../index-DOvlwCh-.js";
2
- import { d as u, i as d } from "../dataset-BcwmTGYc.js";
1
+ import { t as z } from "../index-D0RBWjq8.js";
2
+ import { d as u, i as f } from "../dataset-DcjWqUVQ.js";
3
3
  import "../index-Cp39cXWe.js";
4
- function z(r) {
4
+ function S(a) {
5
5
  return u(async () => {
6
- const t = await r();
7
- return d(() => t.next());
6
+ const t = await a();
7
+ return f(() => t.next());
8
8
  });
9
9
  }
10
- const S = 8;
11
- async function y(r, t) {
12
- const s = await Promise.all(r.map((e) => t.encode(e))), o = t.eosToken >= 0, a = s.map((e) => o ? [...e, t.eosToken] : e).flat();
13
- for (const e of a)
14
- if (e < 0 || e >= t.vocabSize)
15
- throw new Error(`Invalid token index ${e} found in tokenised data`);
16
- return a;
10
+ const b = 8;
11
+ async function y(a, t) {
12
+ return (await Promise.all(a.map((r) => t.encodeConversation(r)))).flat();
17
13
  }
18
- class w {
14
+ class x {
19
15
  tokenizer;
20
16
  blockSize;
21
17
  pageSize;
22
18
  constructor(t, s = 128) {
23
- this.tokenizer = t, this.blockSize = s, this.pageSize = s * S;
19
+ this.tokenizer = t, this.blockSize = s, this.pageSize = s * b;
24
20
  }
25
21
  // Create dataset from text files
26
- async createTextDataset(t, s = 32, o, a) {
22
+ async createTextDataset(t, s = 32, i, r) {
27
23
  if (t.length < this.blockSize + 1)
28
24
  throw new Error(`Not enough tokens (${t.length}) for block size ${this.blockSize}`);
29
- if (o && o.size > t.length / this.pageSize / 2)
25
+ if (i && i.size > t.length / this.pageSize / 2)
30
26
  throw new Error("Too many masked pages - would leave insufficient training data");
31
- const e = (function* () {
32
- if (o && a) {
33
- const i = Array.from(o);
27
+ const l = (function* () {
28
+ if (i && r) {
29
+ const e = Array.from(i);
34
30
  for (; ; ) {
35
- const c = Math.floor(Math.random() * i.length), l = Math.floor(Math.random() * this.pageSize), n = i[c] * this.pageSize + l;
36
- if (n + this.blockSize + 1 > t.length)
31
+ const n = Math.floor(Math.random() * e.length), h = Math.floor(Math.random() * this.pageSize), o = e[n] * this.pageSize + h;
32
+ if (o + this.blockSize + 1 > t.length)
37
33
  continue;
38
- const h = t.slice(n, n + this.blockSize), f = t.slice(n + 1, n + this.blockSize + 1);
39
- yield { xs: h, ys: f };
34
+ const c = t.slice(o, o + this.blockSize), g = t.slice(o + 1, o + this.blockSize + 1);
35
+ yield { xs: c, ys: g };
40
36
  }
41
37
  } else
42
38
  for (; ; ) {
43
- const i = Math.floor(Math.random() * (t.length - this.blockSize - 1));
44
- if (o) {
45
- const n = Math.floor(i / this.pageSize), h = o.has(n);
46
- if (h && !a || !h && a)
39
+ const e = Math.floor(Math.random() * (t.length - this.blockSize - 1));
40
+ if (i) {
41
+ const o = Math.floor(e / this.pageSize), c = i.has(o);
42
+ if (c && !r || !c && r)
47
43
  continue;
48
44
  }
49
- const c = t.slice(i, i + this.blockSize), l = t.slice(i + 1, i + this.blockSize + 1);
50
- yield { xs: c, ys: l };
45
+ const n = t.slice(e, e + this.blockSize), h = t.slice(e + 1, e + this.blockSize + 1);
46
+ yield { xs: n, ys: h };
51
47
  }
52
48
  }).bind(this);
53
- return z(e).batch(s).map((i) => {
54
- const c = i;
55
- return g(() => ({
56
- xs: c.xs.cast("int32"),
57
- ys: c.ys.cast("int32")
49
+ return S(l).batch(s).map((e) => {
50
+ const n = e;
51
+ return z(() => ({
52
+ xs: n.xs.cast("int32"),
53
+ ys: n.ys.cast("int32")
58
54
  // this.tf.oneHot(batchData.ys.cast('int32'), this.tokenizer.vocabSize),
59
55
  }));
60
56
  }).prefetch(2);
61
57
  }
62
58
  }
63
59
  export {
64
- w as DatasetBuilder,
65
- S as PAGE_FACTOR,
60
+ x as DatasetBuilder,
61
+ b as PAGE_FACTOR,
66
62
  y as flattenTokens
67
63
  };
@@ -1,6 +1,6 @@
1
1
  import b from "./Trainer.js";
2
2
  import L from "./Evaluator.js";
3
- import { d as w } from "../index-DOvlwCh-.js";
3
+ import { d as w } from "../index-D0RBWjq8.js";
4
4
  import y from "../utilities/profile.js";
5
5
  import { createTensorStatistics as D } from "../checks/weights.js";
6
6
  const T = {
@@ -1,4 +1,4 @@
1
- import { ITokeniser } from '../tokeniser/type';
1
+ import { Conversation, ITokeniser } from '../tokeniser/type';
2
2
  import { DatasetBuilder } from './DatasetBuilder';
3
3
  import { default as AdamExt } from './AdamExt';
4
4
  import { NamedTensorMap, TensorContainer } from '@tensorflow/tfjs-core/dist/tensor_types';
@@ -93,7 +93,7 @@ export default abstract class GPTTrainer {
93
93
  log: TrainingLogEntry;
94
94
  progress: TrainingProgress;
95
95
  }>;
96
- createTrainValidationSplit(textData: string[], batchSize?: number, validationSplit?: number): Promise<{
96
+ createTrainValidationSplit(textData: Conversation[][], batchSize?: number, validationSplit?: number): Promise<{
97
97
  trainDataset: Dataset<{
98
98
  xs: Tensor;
99
99
  ys: Tensor;
@@ -103,6 +103,6 @@ export default abstract class GPTTrainer {
103
103
  ys: Tensor;
104
104
  }>;
105
105
  }>;
106
- createDataset(textData: string[], batchSize?: number): Promise<Dataset<TensorContainer>>;
106
+ createDataset(textData: Conversation[][], batchSize?: number): Promise<Dataset<TensorContainer>>;
107
107
  dispose(): void;
108
108
  }
@@ -1,7 +1,7 @@
1
1
  import { DatasetBuilder as f, flattenTokens as h, PAGE_FACTOR as y } from "./DatasetBuilder.js";
2
2
  import z from "./AdamExt.js";
3
- import { t as S, v as k, k as x, d as p, b as m } from "../index-DOvlwCh-.js";
4
- import { z as g } from "../zeros-KnWaWf-X.js";
3
+ import { t as S, v as k, k as x, d as p, b as m } from "../index-D0RBWjq8.js";
4
+ import { z as g } from "../zeros-DeiE2zTa.js";
5
5
  class M {
6
6
  constructor(t, e, s = 1e-3) {
7
7
  this.tokenizer = e, this.model = t, this.lossScaling = t.lossScaling, this.learningRate = s, this.resetOptimizer(), this.datasetBuilder = new f(e, t.config.blockSize);
@@ -1,8 +1,8 @@
1
1
  import { gatherSub as x } from "../ops/gatherSub.js";
2
2
  import { scatterSub as L } from "../ops/scatterSub.js";
3
- import { a6 as C, t as u, a7 as E, c as G } from "../index-DOvlwCh-.js";
4
- import { s as y } from "../softmax-CA5jFsLR.js";
5
- import { m as z, l as v } from "../log_sum_exp-ngO0-4pK.js";
3
+ import { a2 as C, t as u, a3 as E, c as G } from "../index-D0RBWjq8.js";
4
+ import { s as y } from "../softmax-faLoUZVT.js";
5
+ import { m as z, l as v } from "../log_sum_exp-VLZgbFAH.js";
6
6
  function k(t, s) {
7
7
  return u(() => {
8
8
  const n = t.shape[t.shape.length - 1], c = t.shape.slice(0, -1).reduce((o, e) => o * e, 1), h = t.shape.length > 2 ? t.reshape([c, n]) : t, p = s.shape.length > 1 ? s.reshape([c]).cast("int32") : s.cast("int32"), r = z(h, -1, !0), a = G(h, r), d = v(a, -1);
@@ -1,5 +1,5 @@
1
- import { A as u, B as i, E as o, ap as $, aq as g, ar as m, l, t as x, as as p } from "./index-DOvlwCh-.js";
2
- import { c as k } from "./complex-DjxcVmoX.js";
1
+ import { q as u, u as i, E as o, ap as $, aq as g, ar as m, y as l, t as x, as as p } from "./index-D0RBWjq8.js";
2
+ import { c as k } from "./complex-DClmWqJt.js";
3
3
  function K(r) {
4
4
  const e = { input: i(r, "input", "imag") };
5
5
  return o.runKernel($, e);
@@ -15,7 +15,7 @@ function b(r) {
15
15
  return o.runKernel(m, e);
16
16
  }
17
17
  const d = /* @__PURE__ */ u({ real_: b });
18
- function N(r, t, e) {
18
+ function y(r, t, e) {
19
19
  const n = i(r, "x", "transpose");
20
20
  if (t == null && (t = n.shape.map((s, a) => a).reverse()), l(n.rank === t.length, () => `Error in transpose: rank of input ${n.rank} must match length of perm ${t}.`), t.forEach((s) => {
21
21
  l(s >= 0 && s < n.rank, () => `All entries in 'perm' must be between 0 and ${n.rank - 1} but got ${t}`);
@@ -27,10 +27,10 @@ function N(r, t, e) {
27
27
  return s = o.runKernel(p, { x: s }, c), a = o.runKernel(p, { x: a }, c), e && (a = _(a)), k(s, a);
28
28
  }) : o.runKernel(p, f, c);
29
29
  }
30
- const y = /* @__PURE__ */ u({ transpose_: N });
30
+ const q = /* @__PURE__ */ u({ transpose_: y });
31
31
  export {
32
32
  h as i,
33
33
  _ as n,
34
34
  d as r,
35
- y as t
35
+ q as t
36
36
  };