@genai-fi/nanogpt 0.10.3 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. package/dist/Generator.d.ts +10 -5
  2. package/dist/Generator.js +1789 -1765
  3. package/dist/{RealDiv-KAPDe8zB.js → RealDiv-C8neBwFi.js} +15 -15
  4. package/dist/{Reshape-BYkmUnAv.js → Reshape-Bd4V_4X7.js} +1 -1
  5. package/dist/{Reshape-Zt6eb7yh.js → Reshape-Ck29jQSY.js} +5 -5
  6. package/dist/TeachableLLM.d.ts +5 -3
  7. package/dist/TeachableLLM.js +14 -14
  8. package/dist/Trainer.d.ts +3 -1
  9. package/dist/Trainer.js +11 -8
  10. package/dist/{axis_util-BaG7mf5A.js → axis_util-DGqbT-FX.js} +3 -3
  11. package/dist/backend.js +2 -2
  12. package/dist/{backend_util-RCe-rHaj.js → backend_util-DC3rBo_H.js} +18 -18
  13. package/dist/{backend_webgpu-DE3ACOLx.js → backend_webgpu-mbhNnlx9.js} +3 -3
  14. package/dist/{broadcast_to-B3eYlZm7.js → broadcast_to-D1Dmg2Oz.js} +2 -2
  15. package/dist/checks/appendCache.js +2 -2
  16. package/dist/checks/attentionMask.js +3 -3
  17. package/dist/checks/gelu.js +2 -2
  18. package/dist/checks/matMulGelu.js +2 -2
  19. package/dist/checks/normRMS.js +4 -4
  20. package/dist/checks/normRMSGrad.js +3 -3
  21. package/dist/checks/packUnpack.js +2 -2
  22. package/dist/checks/qkv.js +4 -4
  23. package/dist/checks/rope.js +2 -2
  24. package/dist/{clip_by_value-BnO7-a88.js → clip_by_value-fg2aKzUy.js} +5 -5
  25. package/dist/complex-Cyg-eQeZ.js +11 -0
  26. package/dist/concat-CSm2rMwe.js +17 -0
  27. package/dist/{concat_util-DpW8mL_l.js → concat_util-D0je5Ppu.js} +1 -1
  28. package/dist/{dataset-BcwmTGYc.js → dataset-CVIJu7Xa.js} +7 -7
  29. package/dist/{dropout-BcvN9JYi.js → dropout-DLhSMNTZ.js} +9 -9
  30. package/dist/expand_dims-ChkuOp6I.js +11 -0
  31. package/dist/{exports_initializers-Hta_rEnm.js → exports_initializers-1KWPiStI.js} +1 -1
  32. package/dist/{floor-D5QdR_le.js → floor-BRMPgeIs.js} +1 -1
  33. package/dist/{gather-D3JcZUaI.js → gather-BSULDalH.js} +1 -1
  34. package/dist/{gelu-CjNPL4OH.js → gelu-BK1k-n1i.js} +1 -1
  35. package/dist/{gpgpu_math-DAOmgtXR.js → gpgpu_math-BJSTk_mW.js} +25 -25
  36. package/dist/{index-BwexR4lA.js → index-BBVLAXZD.js} +89 -89
  37. package/dist/{index-DOvlwCh-.js → index-Duu1Lvvv.js} +53 -53
  38. package/dist/{kernel_funcs_utils-CCzYdUZg.js → kernel_funcs_utils-BtYrPoJu.js} +6 -6
  39. package/dist/layers/BaseLayer.js +2 -2
  40. package/dist/layers/CausalSelfAttention.js +6 -6
  41. package/dist/layers/MLP.js +4 -4
  42. package/dist/layers/PositionEmbedding.js +5 -5
  43. package/dist/layers/RMSNorm.js +3 -3
  44. package/dist/layers/RoPECache.js +4 -4
  45. package/dist/layers/TiedEmbedding.js +6 -6
  46. package/dist/layers/TransformerBlock.js +1 -1
  47. package/dist/loader/loadTransformers.js +1 -1
  48. package/dist/loader/oldZipLoad.js +9 -9
  49. package/dist/log_sum_exp-CVqLsVLl.js +39 -0
  50. package/dist/main.d.ts +10 -1
  51. package/dist/main.js +68 -58
  52. package/dist/{matMul16-BWRSOCWB.js → matMul16-xswmhSuF.js} +3 -3
  53. package/dist/{matMulGelu-CzfgT6Wq.js → matMulGelu-BpvgnYG8.js} +14 -14
  54. package/dist/mat_mul-Bn2BDpT4.js +11 -0
  55. package/dist/{mod-AnXEvvpo.js → mod-B4AUd1Np.js} +1 -1
  56. package/dist/models/NanoGPTV1.js +2 -2
  57. package/dist/models/model.js +9 -9
  58. package/dist/{ones-D2rT0xk2.js → ones-CBI1AQjb.js} +3 -3
  59. package/dist/ops/adamAdjust.js +1 -1
  60. package/dist/ops/adamMoments.js +1 -1
  61. package/dist/ops/add16.js +1 -1
  62. package/dist/ops/appendCache.js +3 -3
  63. package/dist/ops/attentionMask.js +1 -1
  64. package/dist/ops/concat16.js +2 -2
  65. package/dist/ops/cpu/adamAdjust.js +9 -9
  66. package/dist/ops/cpu/adamMoments.js +5 -5
  67. package/dist/ops/cpu/appendCache.js +6 -6
  68. package/dist/ops/cpu/attentionMask.js +10 -10
  69. package/dist/ops/cpu/fusedSoftmax.js +5 -5
  70. package/dist/ops/cpu/gatherSub.js +9 -9
  71. package/dist/ops/cpu/gelu.js +5 -5
  72. package/dist/ops/cpu/matMul16.js +2 -2
  73. package/dist/ops/cpu/matMulGelu.js +3 -3
  74. package/dist/ops/cpu/matMulMul.js +5 -5
  75. package/dist/ops/cpu/mulDropout.js +1 -1
  76. package/dist/ops/cpu/normRMS.js +7 -7
  77. package/dist/ops/cpu/qkv.js +3 -3
  78. package/dist/ops/cpu/rope.js +5 -5
  79. package/dist/ops/cpu/scatterSub.js +11 -11
  80. package/dist/ops/dot16.js +2 -2
  81. package/dist/ops/gatherSub.js +1 -1
  82. package/dist/ops/gelu.js +2 -2
  83. package/dist/ops/grads/add16.js +4 -4
  84. package/dist/ops/grads/attentionMask.js +2 -2
  85. package/dist/ops/grads/gelu.js +2 -2
  86. package/dist/ops/grads/matMul16.js +3 -3
  87. package/dist/ops/grads/matMulGelu.js +6 -6
  88. package/dist/ops/grads/normRMS.js +4 -4
  89. package/dist/ops/grads/pack16.js +3 -3
  90. package/dist/ops/grads/qkv.js +10 -10
  91. package/dist/ops/grads/rope.js +2 -2
  92. package/dist/ops/grads/softmax16.js +1 -1
  93. package/dist/ops/grads/unpack16.js +2 -2
  94. package/dist/ops/matMul16.js +3 -3
  95. package/dist/ops/matMulGelu.js +2 -2
  96. package/dist/ops/matMulMul.js +1 -1
  97. package/dist/ops/mul16.js +1 -1
  98. package/dist/ops/mulDrop.js +1 -1
  99. package/dist/ops/normRMS.js +1 -1
  100. package/dist/ops/pack16.js +2 -2
  101. package/dist/ops/qkv.js +1 -1
  102. package/dist/ops/reshape16.js +2 -2
  103. package/dist/ops/rope.js +2 -2
  104. package/dist/ops/scatterSub.js +1 -1
  105. package/dist/ops/slice16.js +2 -2
  106. package/dist/ops/softmax16.js +1 -1
  107. package/dist/ops/sub16.js +1 -1
  108. package/dist/ops/sum16.js +2 -2
  109. package/dist/ops/transpose16.js +6 -6
  110. package/dist/ops/unpack16.js +2 -2
  111. package/dist/ops/webgl/adamAdjust.js +2 -2
  112. package/dist/ops/webgl/adamMoments.js +1 -1
  113. package/dist/ops/webgl/appendCache.js +1 -1
  114. package/dist/ops/webgl/attentionMask.js +1 -1
  115. package/dist/ops/webgl/fusedSoftmax.js +4 -4
  116. package/dist/ops/webgl/gatherSub.js +1 -1
  117. package/dist/ops/webgl/gelu.js +2 -2
  118. package/dist/ops/webgl/log.js +3 -3
  119. package/dist/ops/webgl/matMul16.js +8 -8
  120. package/dist/ops/webgl/matMulGelu.js +4 -4
  121. package/dist/ops/webgl/matMulMul.js +7 -7
  122. package/dist/ops/webgl/mulDropout.js +1 -1
  123. package/dist/ops/webgl/normRMS.js +7 -7
  124. package/dist/ops/webgl/qkv.js +1 -1
  125. package/dist/ops/webgl/rope.js +1 -1
  126. package/dist/ops/webgl/scatterSub.js +1 -1
  127. package/dist/ops/webgpu/adamAdjust.js +3 -3
  128. package/dist/ops/webgpu/adamMoments.js +5 -5
  129. package/dist/ops/webgpu/add16.js +1 -1
  130. package/dist/ops/webgpu/appendCache.js +3 -3
  131. package/dist/ops/webgpu/attentionMask.js +2 -2
  132. package/dist/ops/webgpu/attentionMask32_program.js +2 -2
  133. package/dist/ops/webgpu/concat16.js +5 -5
  134. package/dist/ops/webgpu/gatherSub.js +5 -5
  135. package/dist/ops/webgpu/gelu.js +3 -3
  136. package/dist/ops/webgpu/matMul16.js +19 -19
  137. package/dist/ops/webgpu/matMul16_program.js +2 -2
  138. package/dist/ops/webgpu/mul16.js +4 -4
  139. package/dist/ops/webgpu/normRMS.js +6 -6
  140. package/dist/ops/webgpu/normRMSGrad.js +4 -4
  141. package/dist/ops/webgpu/pack16.js +3 -3
  142. package/dist/ops/webgpu/pack16_program.js +2 -2
  143. package/dist/ops/webgpu/qkv.js +8 -8
  144. package/dist/ops/webgpu/rope.js +3 -3
  145. package/dist/ops/webgpu/scatterSub.js +3 -3
  146. package/dist/ops/webgpu/slice16.js +4 -4
  147. package/dist/ops/webgpu/softmax16.js +4 -4
  148. package/dist/ops/webgpu/softmax16_program.js +2 -2
  149. package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
  150. package/dist/ops/webgpu/softmax16grad.js +1 -1
  151. package/dist/ops/webgpu/sub16.js +4 -4
  152. package/dist/ops/webgpu/sum16.js +5 -5
  153. package/dist/ops/webgpu/transpose16.js +2 -2
  154. package/dist/ops/webgpu/transpose16_program.js +2 -2
  155. package/dist/ops/webgpu/transpose16_shared_program.js +3 -3
  156. package/dist/ops/webgpu/unpack16.js +5 -5
  157. package/dist/ops/webgpu/utils/binary_op.js +3 -3
  158. package/dist/ops/webgpu/utils/reductions.js +4 -4
  159. package/dist/{ops-B5yanEdW.js → ops-C2_OXuZ4.js} +69 -69
  160. package/dist/{pack16-nQ6JaLo-.js → pack16-atD0eYRm.js} +9 -9
  161. package/dist/patches/webgpu_backend.js +6 -6
  162. package/dist/patches/webgpu_base.js +1 -1
  163. package/dist/patches/webgpu_program.js +8 -8
  164. package/dist/{random_width-or-CEftb.js → random_width-BN4wGJaW.js} +33 -33
  165. package/dist/range-DKmP1-OQ.js +10 -0
  166. package/dist/relu-BsXmGzzu.js +9 -0
  167. package/dist/{reshape-ByE68wS9.js → reshape-BI0yzp1T.js} +1 -1
  168. package/dist/{resize_nearest_neighbor-B19mCEg2.js → resize_nearest_neighbor-BA_BX-ub.js} +26 -26
  169. package/dist/{rope-Ir4mTyD1.js → rope-DJ7Y7c-u.js} +1 -1
  170. package/dist/{scatter_nd_util-lvSiX8q4.js → scatter_nd_util-k9MUVUkn.js} +1 -1
  171. package/dist/{selu_util-kbhpTdYD.js → selu_util-DyW0X1WG.js} +5 -5
  172. package/dist/{shared-DT1TkE6w.js → shared-Q3BS6T03.js} +1 -1
  173. package/dist/{shared-dntlHIDQ.js → shared-nnSWpC3u.js} +86 -86
  174. package/dist/{slice-BfEGSH82.js → slice-wBNvzVyz.js} +1 -1
  175. package/dist/{slice_util-uTKwiEpW.js → slice_util-zN8KFC5I.js} +1 -1
  176. package/dist/{softmax-CA5jFsLR.js → softmax-DfuYyjMh.js} +1 -1
  177. package/dist/split-BYrLboMq.js +9 -0
  178. package/dist/squeeze-Bk8Brcct.js +10 -0
  179. package/dist/{stack-Cf4n9h0N.js → stack-CDWShFHF.js} +1 -1
  180. package/dist/{step-CINUs5QB.js → step-BS5JXRR6.js} +23 -23
  181. package/dist/{sum-DWAtNGez.js → sum-BPUfDB2X.js} +3 -3
  182. package/dist/tensor-CEt9Nm2s.js +8 -0
  183. package/dist/tensor1d-Cc_KCIDg.js +11 -0
  184. package/dist/{tensor2d-Bs9wZRc7.js → tensor2d-BN97fF71.js} +3 -3
  185. package/dist/{tensor4d-BARPdTaS.js → tensor4d-vuDDgdUI.js} +1 -1
  186. package/dist/{tfjs_backend-y1cvNhLA.js → tfjs_backend-806hyYve.js} +49 -49
  187. package/dist/{tile-mbfagpsB.js → tile-OWUvpIVt.js} +3 -3
  188. package/dist/tokeniser/BaseTokeniser.d.ts +25 -0
  189. package/dist/tokeniser/BaseTokeniser.js +94 -0
  190. package/dist/tokeniser/CharTokeniser.d.ts +10 -9
  191. package/dist/tokeniser/CharTokeniser.js +44 -30
  192. package/dist/tokeniser/bpe.d.ts +10 -9
  193. package/dist/tokeniser/bpe.js +67 -52
  194. package/dist/tokeniser/type.d.ts +14 -5
  195. package/dist/training/Adam.js +2 -2
  196. package/dist/training/AdamExt.js +1 -1
  197. package/dist/training/DatasetBuilder.d.ts +3 -3
  198. package/dist/training/DatasetBuilder.js +34 -38
  199. package/dist/training/FullTrainer.js +1 -1
  200. package/dist/training/Trainer.d.ts +4 -3
  201. package/dist/training/Trainer.js +22 -25
  202. package/dist/training/sparseCrossEntropy.js +3 -3
  203. package/dist/training/tasks/ConversationTask.d.ts +11 -0
  204. package/dist/training/tasks/ConversationTask.js +26 -0
  205. package/dist/training/tasks/PretrainingTask.d.ts +11 -0
  206. package/dist/training/tasks/PretrainingTask.js +34 -0
  207. package/dist/training/tasks/StartSentenceTask.d.ts +12 -0
  208. package/dist/training/tasks/StartSentenceTask.js +42 -0
  209. package/dist/training/tasks/Task.d.ts +8 -0
  210. package/dist/training/tasks/Task.js +41 -0
  211. package/dist/{transpose-ClWiBS_b.js → transpose-BUkQCJp9.js} +6 -6
  212. package/dist/{unsorted_segment_sum-BDDhB_E6.js → unsorted_segment_sum-BljxHhCY.js} +5 -5
  213. package/dist/utilities/dummy.js +3 -3
  214. package/dist/utilities/multinomialCPU.js +2 -2
  215. package/dist/utilities/packed.js +1 -1
  216. package/dist/utilities/performance.js +1 -1
  217. package/dist/utilities/profile.js +1 -1
  218. package/dist/utilities/safetensors.js +2 -2
  219. package/dist/utilities/sentences.d.ts +1 -1
  220. package/dist/utilities/sentences.js +11 -11
  221. package/dist/utilities/weights.js +2 -2
  222. package/dist/{variable-WawDEaAb.js → variable-DPt_Iuog.js} +1 -1
  223. package/dist/{webgpu_program-DuOXPQol.js → webgpu_program-BpWRlghH.js} +3 -3
  224. package/dist/{webgpu_util-RxEF33Rj.js → webgpu_util-DMiKzzQM.js} +7 -7
  225. package/dist/{zeros-KnWaWf-X.js → zeros-5YROwwUH.js} +2 -2
  226. package/dist/{zeros_like-DvE73F4e.js → zeros_like-De4n1C3m.js} +71 -71
  227. package/package.json +1 -1
  228. package/dist/complex-DjxcVmoX.js +0 -11
  229. package/dist/concat-BV8bt5H-.js +0 -17
  230. package/dist/expand_dims-DT4tEPwA.js +0 -11
  231. package/dist/log_sum_exp-ngO0-4pK.js +0 -39
  232. package/dist/mat_mul-SjpJRLyL.js +0 -11
  233. package/dist/range-BklejeeW.js +0 -10
  234. package/dist/relu-CP0ZcxWO.js +0 -9
  235. package/dist/split-CVLc0w--.js +0 -9
  236. package/dist/squeeze-C7Z2srUo.js +0 -10
  237. package/dist/tensor-DJoc7gJU.js +0 -8
  238. package/dist/tensor1d-D11P_7Dp.js +0 -11
@@ -1,77 +1,91 @@
1
- import { E as k } from "../index-DvYrXKkX.js";
1
+ import k, { SPECIALS as d } from "./BaseTokeniser.js";
2
2
  const u = ["<eos>", "<unk>"];
3
3
  class b extends k {
4
4
  vocabSize = 0;
5
5
  eosToken = 0;
6
+ bosToken = 0;
6
7
  unkToken = 0;
7
8
  vocab = [];
8
9
  cache = /* @__PURE__ */ new Map();
9
10
  _trained = !1;
10
- constructor(t) {
11
- if (super(), Array.isArray(t)) {
12
- if (this.vocab = t, this.vocab.length > 0)
13
- this.vocabSize = this.vocab.length, this.eosToken = this.vocab.indexOf("<eos>"), this.unkToken = this.vocab.indexOf(""), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((e) => e === "<pad>" ? "" : e), this.vocab.forEach((e, n) => {
14
- this.cache.set(e, n);
11
+ constructor(i) {
12
+ if (super(), Array.isArray(i)) {
13
+ if (this.vocab = i, this.vocab.length > 0)
14
+ this.vocabSize = this.vocab.length, d.forEach((t) => {
15
+ const e = this.vocab.indexOf(t);
16
+ e !== -1 && this.addSpecialToken(t, e);
17
+ }), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex("") ?? -1, this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<unk>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab = this.vocab.map((t) => t === "<pad>" ? "" : t), this.vocab.forEach((t, e) => {
18
+ this.cache.set(t, e);
15
19
  });
16
20
  else
17
21
  throw new Error("Vocab cannot be empty");
18
22
  this._trained = !0;
19
23
  } else
20
- this.vocabSize = t, this.vocab = new Array(this.vocabSize).fill(""), this.vocab[0] = "<eos>", this.vocab[1] = "", this.eosToken = 0, this.unkToken = 1, this.cache.set("<eos>", 0), this.cache.set("", 1);
24
+ this.vocabSize = i, this.vocab = new Array(this.vocabSize).fill(""), this.addSpecialTokens(), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex(""), this.vocab.forEach((t, e) => {
25
+ this.cache.set(t, e);
26
+ }), this.cache.set("", this.unkToken);
27
+ }
28
+ addToken(i, t) {
29
+ if (this.cache.has(i))
30
+ return this.cache.get(i);
31
+ let e;
32
+ if (t !== void 0 ? e = t : (e = this.vocab.indexOf("", this.unkToken + 1), e === -1 && (e = this.vocabSize)), e >= this.vocabSize)
33
+ throw new Error("Vocab size exceeded");
34
+ return this.vocab[e] = i, this.cache.set(i, e), e;
21
35
  }
22
36
  get trained() {
23
37
  return this.vocab.length === this.vocabSize && this._trained;
24
38
  }
25
39
  destroy() {
26
40
  }
27
- async train(t) {
28
- const e = t.map((i) => i.split("")).flat(), n = new Set(e), s = Array.from(n), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
41
+ async train(i) {
42
+ const t = i.map((n) => n.split("")).flat(), e = new Set(t), s = Array.from(e), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
29
43
  if (h === -1)
30
44
  return this.vocabSize;
31
45
  if (this._trained = !0, s.length > o) {
32
- const i = /* @__PURE__ */ new Map();
33
- e.forEach((a) => {
34
- i.set(a, (i.get(a) || 0) + 1);
35
- }), s.sort((a, r) => (i.get(a) || 0) - (i.get(r) || 0)), s.splice(0, s.length - o);
46
+ const n = /* @__PURE__ */ new Map();
47
+ t.forEach((a) => {
48
+ n.set(a, (n.get(a) || 0) + 1);
49
+ }), s.sort((a, r) => (n.get(a) || 0) - (n.get(r) || 0)), s.splice(0, s.length - o);
36
50
  }
37
51
  let c = h;
38
52
  if (c !== -1) {
39
- const i = new Set(this.vocab);
53
+ const n = new Set(this.vocab);
40
54
  for (const a of s)
41
- if (!i.has(a) && (this.vocab[c] = a, i.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
55
+ if (!n.has(a) && (this.vocab[c] = a, n.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
42
56
  break;
43
57
  }
44
- return this.cache.clear(), this.vocab.forEach((i, a) => {
45
- this.cache.set(i, a);
58
+ return this.cache.clear(), this.vocab.forEach((n, a) => {
59
+ this.cache.set(n, a);
46
60
  }), this.emit("trainStatus", "trained"), this.vocabSize;
47
61
  }
48
- async tokenise(t, e) {
62
+ tokenise(i, t) {
49
63
  if (!this.trained)
50
64
  throw new Error("Tokeniser not trained");
51
- return t.map((s) => e ? s.split("").map((h) => this.cache.get(h) ?? this.unkToken) : s.split("").map((h) => {
65
+ return i.map((s) => t ? s.split("").map((h) => this.cache.get(h) ?? this.unkToken) : s.split("").map((h) => {
52
66
  const o = this.cache.get(h);
53
67
  return o !== void 0 ? this.vocab[o] : "";
54
68
  }));
55
69
  }
56
- async detokenise(t) {
57
- return t.map((n) => n.map((s) => this.vocab[s]).join(""));
70
+ detokenise(i) {
71
+ return i.map((e) => Array.from(e).map((s) => this.vocab[s] || "").join(""));
58
72
  }
59
- async encode(t) {
60
- return (await this.tokenise([t], !0))[0];
73
+ encode(i) {
74
+ return this.tokenise([i], !0)[0];
61
75
  }
62
- async decode(t) {
63
- return (await this.detokenise([t]))[0];
76
+ decode(i) {
77
+ return this.detokenise([i])[0];
64
78
  }
65
79
  getVocab() {
66
80
  return this.vocab;
67
81
  }
68
- async getMerges() {
82
+ getMerges() {
69
83
  return [];
70
84
  }
71
- async createTrainingData(t, e = 5) {
72
- const n = await this.tokenise(t, !0), s = [], h = [];
73
- for (let o = 0; o < n.length - e; o++)
74
- s.push(...n[o].slice(0, e)), h.push(n[o + 1][0]);
85
+ async createTrainingData(i, t = 5) {
86
+ const e = await this.tokenise(i, !0), s = [], h = [];
87
+ for (let o = 0; o < e.length - t; o++)
88
+ s.push(...e[o].slice(0, t)), h.push(e[o + 1][0]);
75
89
  return [s, h];
76
90
  }
77
91
  }
@@ -1,6 +1,5 @@
1
- import { default as EE } from 'eventemitter3';
2
- import { ITokeniser } from './type';
3
- export default class BPETokeniser extends EE<'trainStatus'> implements ITokeniser {
1
+ import { default as BaseTokeniser } from './BaseTokeniser';
2
+ export default class BPETokeniser extends BaseTokeniser {
4
3
  private targetSize;
5
4
  private vocab;
6
5
  private vocabIndex;
@@ -8,19 +7,21 @@ export default class BPETokeniser extends EE<'trainStatus'> implements ITokenise
8
7
  private pretokenMap;
9
8
  constructor(vocabSize: number);
10
9
  constructor(vocab: string[], merges?: [string, string][]);
10
+ addToken(token: string, index?: number): number;
11
11
  destroy(): void;
12
12
  get trained(): boolean;
13
13
  get vocabSize(): number;
14
14
  get eosToken(): number;
15
+ get bosToken(): number;
15
16
  get unkToken(): number;
16
17
  train(text: string[]): Promise<number>;
17
18
  getVocab(): string[];
18
- getMerges(): Promise<[string, string][]>;
19
+ getMerges(): [string, string][];
19
20
  private tokeniseWord;
20
21
  private tokeniseStrings;
21
- tokenise(text: string[], numeric: true): Promise<number[][]>;
22
- tokenise(text: string[]): Promise<string[][]>;
23
- detokenise(tokens: number[][]): Promise<string[]>;
24
- encode(text: string): Promise<number[]>;
25
- decode(tokens: number[]): Promise<string>;
22
+ tokenise(text: string[], numeric: true): number[][];
23
+ tokenise(text: string[]): string[][];
24
+ detokenise(tokens: number[][]): string[];
25
+ encode(text: string): number[];
26
+ decode(tokens: number[]): string;
26
27
  }
@@ -1,68 +1,80 @@
1
1
  import l from "../utilities/tokenParse.js";
2
- import { E as f } from "../index-DvYrXKkX.js";
2
+ import d, { SPECIALS as f } from "./BaseTokeniser.js";
3
3
  function u(o, e) {
4
4
  return `${o}-::-${e}`;
5
5
  }
6
- function k(o) {
6
+ function b(o) {
7
7
  const e = /* @__PURE__ */ new Map();
8
8
  for (let s = 0; s < o.length; s++) {
9
9
  const t = o[s];
10
- for (let r = 0; r < t.length - 1; r++) {
11
- const n = u(t[r], t[r + 1]), a = e.get(n) || {
12
- a: t[r],
13
- b: t[r + 1],
10
+ for (let n = 0; n < t.length - 1; n++) {
11
+ const r = u(t[n], t[n + 1]), i = e.get(r) || {
12
+ a: t[n],
13
+ b: t[n + 1],
14
14
  count: 0,
15
15
  instances: /* @__PURE__ */ new Set()
16
16
  };
17
- a.count += 1, a.instances.add(s), e.set(n, a);
17
+ i.count += 1, i.instances.add(s), e.set(r, i);
18
18
  }
19
19
  }
20
20
  return { pairs: e, tokens: o };
21
21
  }
22
- function h(o, e, s, t, r) {
23
- const n = u(e, s);
24
- if (o.pairs.has(n)) {
25
- const a = o.pairs.get(n);
26
- a.count += r, r > 0 ? a.instances.add(t) : a.count <= 0 ? o.pairs.delete(n) : a.instances.delete(t);
22
+ function h(o, e, s, t, n) {
23
+ const r = u(e, s);
24
+ if (o.pairs.has(r)) {
25
+ const i = o.pairs.get(r);
26
+ i.count += n, n > 0 ? i.instances.add(t) : i.count <= 0 ? o.pairs.delete(r) : i.instances.delete(t);
27
27
  } else
28
- o.pairs.set(n, { a: e, b: s, count: r, instances: /* @__PURE__ */ new Set([t]) });
28
+ o.pairs.set(r, { a: e, b: s, count: n, instances: /* @__PURE__ */ new Set([t]) });
29
29
  }
30
- function b(o) {
30
+ function k(o) {
31
31
  let e = null, s = 0;
32
32
  for (const t of o.pairs.values())
33
33
  t.count > s && (s = t.count, e = t);
34
34
  return e;
35
35
  }
36
- function d(o, e) {
36
+ function m(o, e) {
37
37
  return o.map((s) => {
38
38
  const t = [];
39
- for (let r = 0; r < s.length; r++)
40
- r < s.length - 1 && s[r] === e[0] && s[r + 1] === e[1] ? (t.push(e[0] + e[1]), r++) : t.push(s[r]);
39
+ for (let n = 0; n < s.length; n++)
40
+ n < s.length - 1 && s[n] === e[0] && s[n + 1] === e[1] ? (t.push(e[0] + e[1]), n++) : t.push(s[n]);
41
41
  return t;
42
42
  });
43
43
  }
44
- function m(o, e) {
44
+ function v(o, e) {
45
45
  e.instances.forEach((s) => {
46
- const t = o.tokens[s], r = [];
47
- for (let n = 0; n < t.length; n++)
48
- if (n < t.length - 1 && t[n] === e.a && t[n + 1] === e.b) {
49
- const a = e.a + e.b;
50
- r.push(a), n > 0 && (h(o, t[n - 1], e.a, s, -1), h(o, t[n - 1], a, s, 1)), n++, n < t.length - 1 && (h(o, e.b, t[n + 1], s, -1), h(o, a, t[n + 1], s, 1));
46
+ const t = o.tokens[s], n = [];
47
+ for (let r = 0; r < t.length; r++)
48
+ if (r < t.length - 1 && t[r] === e.a && t[r + 1] === e.b) {
49
+ const i = e.a + e.b;
50
+ n.push(i), r > 0 && (h(o, t[r - 1], e.a, s, -1), h(o, t[r - 1], i, s, 1)), r++, r < t.length - 1 && (h(o, e.b, t[r + 1], s, -1), h(o, i, t[r + 1], s, 1));
51
51
  } else
52
- r.push(t[n]);
53
- o.tokens[s] = r;
52
+ n.push(t[r]);
53
+ o.tokens[s] = n;
54
54
  }), o.pairs.delete(u(e.a, e.b));
55
55
  }
56
- class S extends f {
56
+ class x extends d {
57
57
  targetSize;
58
58
  vocab = /* @__PURE__ */ new Set();
59
59
  vocabIndex = /* @__PURE__ */ new Map();
60
60
  merges = [];
61
61
  pretokenMap = /* @__PURE__ */ new Map();
62
62
  constructor(e, s) {
63
- super(), Array.isArray(e) ? (e.forEach((t, r) => {
64
- this.vocab.add(t), this.vocabIndex.set(t, r);
65
- }), s && (this.merges = s), this.targetSize = e.length) : (this.vocab.add("<eos>"), this.vocab.add(""), this.targetSize = e);
63
+ super(), Array.isArray(e) ? (e.forEach((t, n) => {
64
+ this.vocab.add(t), this.vocabIndex.set(t, n);
65
+ }), s && (this.merges = s), this.targetSize = e.length, f.forEach((t) => {
66
+ const n = e.indexOf(t);
67
+ n !== -1 && this.addSpecialToken(t, n);
68
+ })) : (this.addSpecialTokens(), this.targetSize = e);
69
+ }
70
+ addToken(e, s) {
71
+ if (this.vocab.has(e))
72
+ return this.vocabIndex.get(e);
73
+ {
74
+ this.vocab.add(e);
75
+ const t = s !== void 0 ? s : this.vocab.size - 1;
76
+ return this.vocabIndex.set(e, t), t;
77
+ }
66
78
  }
67
79
  destroy() {
68
80
  this.vocab.clear(), this.vocabIndex.clear(), this.merges = [], this.pretokenMap.clear();
@@ -76,58 +88,61 @@ class S extends f {
76
88
  get eosToken() {
77
89
  return this.vocabIndex.get("<eos>") ?? 0;
78
90
  }
91
+ get bosToken() {
92
+ return this.vocabIndex.get("<bos>") ?? 0;
93
+ }
79
94
  get unkToken() {
80
95
  return this.vocabIndex.get("") ?? 1;
81
96
  }
82
97
  async train(e) {
83
- const s = e.map((i) => l(i)).flat(1), t = new Set(s);
84
- this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.vocab.add("<eos>"), this.vocab.add("");
85
- const r = Array.from(t), n = r.map((i) => Array.from(i).map((c) => (this.vocab.add(c), c))), a = k(n);
98
+ const s = e.map((a) => l(a)).flat(1), t = new Set(s);
99
+ this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.addSpecialTokens();
100
+ const n = Array.from(t), r = n.map((a) => Array.from(a).map((c) => (this.vocab.add(c), c))), i = b(r);
86
101
  for (; this.vocab.size < this.targetSize && this.merges.length < this.targetSize; ) {
87
- const i = b(a);
88
- if (!i)
102
+ const a = k(i);
103
+ if (!a)
89
104
  break;
90
- this.merges.push([i.a, i.b]), this.vocab.add(i.a + i.b), m(a, i);
105
+ this.merges.push([a.a, a.b]), this.vocab.add(a.a + a.b), v(i, a);
91
106
  }
92
- r.forEach((i, p) => {
93
- const c = n[p];
94
- this.pretokenMap.set(i, c);
107
+ n.forEach((a, p) => {
108
+ const c = r[p];
109
+ this.pretokenMap.set(a, c);
95
110
  }), this.vocabIndex.clear();
96
111
  let g = 0;
97
- for (const i of this.vocab.keys())
98
- this.vocabIndex.set(i, g++);
112
+ for (const a of this.vocab.keys())
113
+ this.vocabIndex.set(a, g++);
99
114
  return this.emit("trainStatus", "trained"), this.vocab.size;
100
115
  }
101
116
  getVocab() {
102
117
  return Array.from(this.vocab);
103
118
  }
104
- async getMerges() {
119
+ getMerges() {
105
120
  return this.merges;
106
121
  }
107
122
  tokeniseWord(e) {
108
123
  let s = Array.from(e);
109
124
  return this.merges.forEach((t) => {
110
- s = d([s], t)[0];
125
+ s = m([s], t)[0];
111
126
  }), this.pretokenMap.set(e, s), s;
112
127
  }
113
128
  tokeniseStrings(e) {
114
- return e.map((s) => l(s).map((n) => this.pretokenMap.has(n) ? this.pretokenMap.get(n) : this.tokeniseWord(n)).flat(1));
129
+ return e.map((s) => l(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
115
130
  }
116
- async tokenise(e, s) {
131
+ tokenise(e, s) {
117
132
  const t = this.tokeniseStrings(e);
118
- return s ? t.map((r) => r.map((n) => this.vocabIndex.get(n) ?? this.unkToken)) : t.map((r) => r.map((n) => this.vocab.has(n) ? n : ""));
133
+ return s ? t.map((n) => n.map((r) => this.vocabIndex.get(r) ?? this.unkToken)) : t.map((n) => n.map((r) => this.vocab.has(r) ? r : ""));
119
134
  }
120
- async detokenise(e) {
135
+ detokenise(e) {
121
136
  const s = this.getVocab();
122
- return e.map((r) => r.map((n) => s[n]).join(""));
137
+ return e.map((n) => n.map((r) => s[r]).join(""));
123
138
  }
124
- async encode(e) {
125
- return (await this.tokenise([e], !0))[0];
139
+ encode(e) {
140
+ return this.tokenise([e], !0)[0];
126
141
  }
127
- async decode(e) {
128
- return (await this.detokenise([e]))[0];
142
+ decode(e) {
143
+ return this.detokenise([e])[0];
129
144
  }
130
145
  }
131
146
  export {
132
- S as default
147
+ x as default
133
148
  };
@@ -1,14 +1,23 @@
1
1
  import { default as EE } from 'eventemitter3';
2
+ export type Roles = 'user' | 'assistant' | 'system';
3
+ export interface Conversation {
4
+ role: Roles;
5
+ content: string;
6
+ }
2
7
  export interface ITokeniser extends EE<'trainStatus'> {
3
8
  train(text: string[]): Promise<number>;
4
- tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
5
- detokenise(tokens: string[][] | number[][]): Promise<string[]>;
6
9
  getVocab(): string[];
7
- getMerges(): Promise<[string, string][]>;
10
+ getMerges(): [string, string][];
8
11
  destroy(): void;
9
- encode(text: string): Promise<number[]>;
10
- decode(tokens: number[]): Promise<string>;
12
+ encode(text: string): number[];
13
+ encodeConversation(conversation: Conversation[], completion?: boolean): number[];
14
+ encodeSequence(text: string): number[];
15
+ decode(tokens: number[] | Uint16Array): string;
16
+ decodeConversation(tokens: number[] | Uint16Array): Conversation[];
11
17
  vocabSize: number;
12
18
  eosToken: number;
19
+ bosToken: number;
13
20
  trained: boolean;
21
+ getSpecialTokenIndex(token: string): number | undefined;
22
+ isSpecialToken(index: number): boolean;
14
23
  }
@@ -1,7 +1,7 @@
1
1
  import { adamAdjust as b } from "../ops/adamAdjust.js";
2
2
  import { adamMoments as d } from "../ops/adamMoments.js";
3
- import { O as g, e as h, t as o, d as B } from "../index-DOvlwCh-.js";
4
- import { z as M } from "../zeros-KnWaWf-X.js";
3
+ import { O as g, e as h, t as o, d as B } from "../index-Duu1Lvvv.js";
4
+ import { z as M } from "../zeros-5YROwwUH.js";
5
5
  class R extends g {
6
6
  constructor(t, a, e, s, i = null) {
7
7
  super(), this.learningRate = t, this.beta1 = a, this.beta2 = e, this.lossScaling = s, this.epsilon = i, this.accBeta1 = a, this.accBeta2 = e, i === null && (this.epsilon = h().backend.epsilon());
@@ -1,4 +1,4 @@
1
- import { m as r, b as c, c as h, e as o } from "../index-DOvlwCh-.js";
1
+ import { m as r, b as c, c as h, e as o } from "../index-Duu1Lvvv.js";
2
2
  import { AdamOptimizer as g } from "./Adam.js";
3
3
  class y extends g {
4
4
  constructor(t, e, s, i, a) {
@@ -1,14 +1,14 @@
1
1
  import { Tensor } from '@tensorflow/tfjs-core';
2
- import { ITokeniser } from '../tokeniser/type';
2
+ import { Conversation, ITokeniser } from '../tokeniser/type';
3
3
  import { Dataset } from '@tensorflow/tfjs-data';
4
4
  export declare const PAGE_FACTOR = 8;
5
- export declare function flattenTokens(textData: string[], tokenizer: ITokeniser): Promise<number[]>;
5
+ export declare function flattenTokens(textData: Conversation[][], tokenizer: ITokeniser): Promise<number[]>;
6
6
  export declare class DatasetBuilder {
7
7
  tokenizer: ITokeniser;
8
8
  blockSize: number;
9
9
  private pageSize;
10
10
  constructor(tokenizer: ITokeniser, blockSize?: number);
11
- createTextDataset(flatTokens: number[], batchSize?: number, masked?: Set<number>, invertMask?: boolean): Promise<Dataset<{
11
+ createTextDataset(flatTokens: Uint16Array, batchSize?: number, masked?: Set<number>, invertMask?: boolean): Promise<Dataset<{
12
12
  xs: Tensor;
13
13
  ys: Tensor;
14
14
  }>>;
@@ -1,67 +1,63 @@
1
- import { t as g } from "../index-DOvlwCh-.js";
2
- import { d as u, i as d } from "../dataset-BcwmTGYc.js";
1
+ import { t as y } from "../index-Duu1Lvvv.js";
2
+ import { d as g, i as z } from "../dataset-CVIJu7Xa.js";
3
3
  import "../index-Cp39cXWe.js";
4
- function z(r) {
5
- return u(async () => {
6
- const t = await r();
7
- return d(() => t.next());
4
+ function b(a) {
5
+ return g(async () => {
6
+ const t = await a();
7
+ return z(() => t.next());
8
8
  });
9
9
  }
10
- const S = 8;
11
- async function y(r, t) {
12
- const s = await Promise.all(r.map((e) => t.encode(e))), o = t.eosToken >= 0, a = s.map((e) => o ? [...e, t.eosToken] : e).flat();
13
- for (const e of a)
14
- if (e < 0 || e >= t.vocabSize)
15
- throw new Error(`Invalid token index ${e} found in tokenised data`);
16
- return a;
10
+ const f = 8;
11
+ async function w(a, t) {
12
+ return (await Promise.all(a.map((s) => t.encodeConversation(s)))).flat();
17
13
  }
18
- class w {
14
+ class m {
19
15
  tokenizer;
20
16
  blockSize;
21
17
  pageSize;
22
- constructor(t, s = 128) {
23
- this.tokenizer = t, this.blockSize = s, this.pageSize = s * S;
18
+ constructor(t, r = 128) {
19
+ this.tokenizer = t, this.blockSize = r, this.pageSize = r * f;
24
20
  }
25
21
  // Create dataset from text files
26
- async createTextDataset(t, s = 32, o, a) {
22
+ async createTextDataset(t, r = 32, i, s) {
27
23
  if (t.length < this.blockSize + 1)
28
24
  throw new Error(`Not enough tokens (${t.length}) for block size ${this.blockSize}`);
29
- if (o && o.size > t.length / this.pageSize / 2)
25
+ if (i && i.size > t.length / this.pageSize / 2)
30
26
  throw new Error("Too many masked pages - would leave insufficient training data");
31
- const e = (function* () {
32
- if (o && a) {
33
- const i = Array.from(o);
27
+ const l = (function* () {
28
+ if (i && s) {
29
+ const e = Array.from(i);
34
30
  for (; ; ) {
35
- const c = Math.floor(Math.random() * i.length), l = Math.floor(Math.random() * this.pageSize), n = i[c] * this.pageSize + l;
31
+ const o = Math.floor(Math.random() * e.length), h = Math.floor(Math.random() * this.pageSize), n = e[o] * this.pageSize + h;
36
32
  if (n + this.blockSize + 1 > t.length)
37
33
  continue;
38
- const h = t.slice(n, n + this.blockSize), f = t.slice(n + 1, n + this.blockSize + 1);
39
- yield { xs: h, ys: f };
34
+ const c = new Int32Array(t.subarray(n, n + this.blockSize)), u = new Int32Array(t.subarray(n + 1, n + this.blockSize + 1));
35
+ yield { xs: c, ys: u };
40
36
  }
41
37
  } else
42
38
  for (; ; ) {
43
- const i = Math.floor(Math.random() * (t.length - this.blockSize - 1));
44
- if (o) {
45
- const n = Math.floor(i / this.pageSize), h = o.has(n);
46
- if (h && !a || !h && a)
39
+ const e = Math.floor(Math.random() * (t.length - this.blockSize - 1));
40
+ if (i) {
41
+ const n = Math.floor(e / this.pageSize), c = i.has(n);
42
+ if (c && !s || !c && s)
47
43
  continue;
48
44
  }
49
- const c = t.slice(i, i + this.blockSize), l = t.slice(i + 1, i + this.blockSize + 1);
50
- yield { xs: c, ys: l };
45
+ const o = new Int32Array(t.subarray(e, e + this.blockSize)), h = new Int32Array(t.subarray(e + 1, e + this.blockSize + 1));
46
+ yield { xs: o, ys: h };
51
47
  }
52
48
  }).bind(this);
53
- return z(e).batch(s).map((i) => {
54
- const c = i;
55
- return g(() => ({
56
- xs: c.xs.cast("int32"),
57
- ys: c.ys.cast("int32")
49
+ return b(l).batch(r).map((e) => {
50
+ const o = e;
51
+ return y(() => ({
52
+ xs: o.xs.cast("int32"),
53
+ ys: o.ys.cast("int32")
58
54
  // this.tf.oneHot(batchData.ys.cast('int32'), this.tokenizer.vocabSize),
59
55
  }));
60
56
  }).prefetch(2);
61
57
  }
62
58
  }
63
59
  export {
64
- w as DatasetBuilder,
65
- S as PAGE_FACTOR,
66
- y as flattenTokens
60
+ m as DatasetBuilder,
61
+ f as PAGE_FACTOR,
62
+ w as flattenTokens
67
63
  };
@@ -1,6 +1,6 @@
1
1
  import b from "./Trainer.js";
2
2
  import L from "./Evaluator.js";
3
- import { d as w } from "../index-DOvlwCh-.js";
3
+ import { d as w } from "../index-Duu1Lvvv.js";
4
4
  import y from "../utilities/profile.js";
5
5
  import { createTensorStatistics as D } from "../checks/weights.js";
6
6
  const T = {
@@ -1,11 +1,12 @@
1
1
  import { ITokeniser } from '../tokeniser/type';
2
2
  import { DatasetBuilder } from './DatasetBuilder';
3
3
  import { default as AdamExt } from './AdamExt';
4
- import { NamedTensorMap, TensorContainer } from '@tensorflow/tfjs-core/dist/tensor_types';
4
+ import { NamedTensorMap } from '@tensorflow/tfjs-core/dist/tensor_types';
5
5
  import { Scalar, Tensor } from '@tensorflow/tfjs-core';
6
6
  import { Dataset } from '@tensorflow/tfjs-data';
7
7
  import { default as Model, ModelForwardAttributes } from '../models/model';
8
8
  import { TensorStatistics } from '../checks/weights';
9
+ import { Task } from './tasks/Task';
9
10
  export interface TrainingLogEntry {
10
11
  loss: number;
11
12
  valLoss?: number;
@@ -93,7 +94,7 @@ export default abstract class GPTTrainer {
93
94
  log: TrainingLogEntry;
94
95
  progress: TrainingProgress;
95
96
  }>;
96
- createTrainValidationSplit(textData: string[], batchSize?: number, validationSplit?: number): Promise<{
97
+ createTrainValidationSplit(tasks: Task[], batchSize?: number, validationSplit?: number): Promise<{
97
98
  trainDataset: Dataset<{
98
99
  xs: Tensor;
99
100
  ys: Tensor;
@@ -102,7 +103,7 @@ export default abstract class GPTTrainer {
102
103
  xs: Tensor;
103
104
  ys: Tensor;
104
105
  }>;
106
+ size: number;
105
107
  }>;
106
- createDataset(textData: string[], batchSize?: number): Promise<Dataset<TensorContainer>>;
107
108
  dispose(): void;
108
109
  }