@genai-fi/nanogpt 0.11.0 → 0.12.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (236) hide show
  1. package/dist/Generator.js +29 -29
  2. package/dist/{RealDiv-Ds-jvL09.js → RealDiv-C8neBwFi.js} +17 -17
  3. package/dist/{Reshape-Cd6e-Otn.js → Reshape-Bd4V_4X7.js} +1 -1
  4. package/dist/{Reshape-Ct266DEk.js → Reshape-Ck29jQSY.js} +7 -7
  5. package/dist/TeachableLLM.d.ts +2 -1
  6. package/dist/TeachableLLM.js +9 -9
  7. package/dist/Trainer.d.ts +4 -2
  8. package/dist/Trainer.js +12 -9
  9. package/dist/{axis_util-DofAuy0p.js → axis_util-DGqbT-FX.js} +1 -1
  10. package/dist/backend.js +2 -2
  11. package/dist/{backend_util-C7NWHpv7.js → backend_util-DC3rBo_H.js} +18 -18
  12. package/dist/{backend_webgpu-B0Vls736.js → backend_webgpu-mbhNnlx9.js} +10 -10
  13. package/dist/{broadcast_to-DDaNMbX7.js → broadcast_to-D1Dmg2Oz.js} +2 -2
  14. package/dist/checks/appendCache.js +2 -2
  15. package/dist/checks/attentionMask.js +3 -3
  16. package/dist/checks/gelu.js +2 -2
  17. package/dist/checks/matMulGelu.js +2 -2
  18. package/dist/checks/normRMS.js +4 -4
  19. package/dist/checks/normRMSGrad.js +3 -3
  20. package/dist/checks/packUnpack.js +2 -2
  21. package/dist/checks/qkv.js +2 -2
  22. package/dist/checks/rope.js +2 -2
  23. package/dist/clip_by_value-fg2aKzUy.js +12 -0
  24. package/dist/{complex-DClmWqJt.js → complex-Cyg-eQeZ.js} +1 -1
  25. package/dist/concat-CSm2rMwe.js +17 -0
  26. package/dist/{concat_util-CHsJFZJJ.js → concat_util-D0je5Ppu.js} +1 -1
  27. package/dist/{dataset-DcjWqUVQ.js → dataset-CVIJu7Xa.js} +3 -3
  28. package/dist/{dropout-OxuaJz6z.js → dropout-DLhSMNTZ.js} +14 -14
  29. package/dist/expand_dims-ChkuOp6I.js +11 -0
  30. package/dist/{exports_initializers-eS9QJ6ut.js → exports_initializers-1KWPiStI.js} +1 -1
  31. package/dist/{floor-DIb-lN_u.js → floor-BRMPgeIs.js} +1 -1
  32. package/dist/gather-BSULDalH.js +9 -0
  33. package/dist/{gelu-DqTbCx5x.js → gelu-BK1k-n1i.js} +1 -1
  34. package/dist/{gpgpu_math-CJcbnKPC.js → gpgpu_math-BJSTk_mW.js} +25 -25
  35. package/dist/{index-Dj5TkmPY.js → index-BBVLAXZD.js} +14 -14
  36. package/dist/{index-D0RBWjq8.js → index-Duu1Lvvv.js} +45 -45
  37. package/dist/{kernel_funcs_utils-CSaumNDs.js → kernel_funcs_utils-BtYrPoJu.js} +8 -8
  38. package/dist/layers/BaseLayer.js +2 -2
  39. package/dist/layers/CausalSelfAttention.js +6 -6
  40. package/dist/layers/MLP.js +4 -4
  41. package/dist/layers/PositionEmbedding.js +5 -5
  42. package/dist/layers/RMSNorm.js +3 -3
  43. package/dist/layers/RoPECache.js +4 -4
  44. package/dist/layers/TiedEmbedding.js +6 -6
  45. package/dist/layers/TransformerBlock.js +1 -1
  46. package/dist/loader/loadTransformers.js +1 -1
  47. package/dist/loader/oldZipLoad.js +17 -17
  48. package/dist/{log_sum_exp-VLZgbFAH.js → log_sum_exp-CVqLsVLl.js} +4 -4
  49. package/dist/main.d.ts +9 -0
  50. package/dist/main.js +69 -58
  51. package/dist/{matMul16-cDxwemKj.js → matMul16-xswmhSuF.js} +7 -7
  52. package/dist/{matMulGelu-B2s_80-H.js → matMulGelu-BpvgnYG8.js} +26 -26
  53. package/dist/mat_mul-Bn2BDpT4.js +11 -0
  54. package/dist/{mod-PrOKlFxH.js → mod-B4AUd1Np.js} +1 -1
  55. package/dist/models/NanoGPTV1.js +2 -2
  56. package/dist/models/model.js +9 -9
  57. package/dist/{ones-BX_wEgzB.js → ones-CBI1AQjb.js} +3 -3
  58. package/dist/ops/adamAdjust.js +1 -1
  59. package/dist/ops/adamMoments.js +1 -1
  60. package/dist/ops/add16.js +1 -1
  61. package/dist/ops/appendCache.js +3 -3
  62. package/dist/ops/attentionMask.js +1 -1
  63. package/dist/ops/concat16.js +2 -2
  64. package/dist/ops/cpu/adamAdjust.js +7 -7
  65. package/dist/ops/cpu/adamMoments.js +5 -5
  66. package/dist/ops/cpu/appendCache.js +6 -6
  67. package/dist/ops/cpu/attentionMask.js +6 -6
  68. package/dist/ops/cpu/fusedSoftmax.js +5 -5
  69. package/dist/ops/cpu/gatherSub.js +7 -7
  70. package/dist/ops/cpu/gelu.js +5 -5
  71. package/dist/ops/cpu/matMul16.js +2 -2
  72. package/dist/ops/cpu/matMulGelu.js +3 -3
  73. package/dist/ops/cpu/matMulMul.js +5 -5
  74. package/dist/ops/cpu/mulDropout.js +1 -1
  75. package/dist/ops/cpu/normRMS.js +5 -5
  76. package/dist/ops/cpu/qkv.js +3 -3
  77. package/dist/ops/cpu/rope.js +9 -9
  78. package/dist/ops/cpu/scatterSub.js +5 -5
  79. package/dist/ops/dot16.js +2 -2
  80. package/dist/ops/gatherSub.js +1 -1
  81. package/dist/ops/gelu.js +2 -2
  82. package/dist/ops/grads/add16.js +1 -1
  83. package/dist/ops/grads/attentionMask.js +2 -2
  84. package/dist/ops/grads/gelu.js +2 -2
  85. package/dist/ops/grads/matMul16.js +3 -3
  86. package/dist/ops/grads/matMulGelu.js +5 -5
  87. package/dist/ops/grads/normRMS.js +6 -6
  88. package/dist/ops/grads/pack16.js +3 -3
  89. package/dist/ops/grads/qkv.js +9 -9
  90. package/dist/ops/grads/rope.js +2 -2
  91. package/dist/ops/grads/softmax16.js +1 -1
  92. package/dist/ops/grads/unpack16.js +2 -2
  93. package/dist/ops/matMul16.js +3 -3
  94. package/dist/ops/matMulGelu.js +2 -2
  95. package/dist/ops/matMulMul.js +1 -1
  96. package/dist/ops/mul16.js +1 -1
  97. package/dist/ops/mulDrop.js +1 -1
  98. package/dist/ops/normRMS.js +1 -1
  99. package/dist/ops/pack16.js +2 -2
  100. package/dist/ops/qkv.js +1 -1
  101. package/dist/ops/reshape16.js +6 -6
  102. package/dist/ops/rope.js +2 -2
  103. package/dist/ops/scatterSub.js +1 -1
  104. package/dist/ops/slice16.js +2 -2
  105. package/dist/ops/softmax16.js +1 -1
  106. package/dist/ops/sub16.js +1 -1
  107. package/dist/ops/sum16.js +2 -2
  108. package/dist/ops/transpose16.js +6 -6
  109. package/dist/ops/unpack16.js +2 -2
  110. package/dist/ops/webgl/adamAdjust.js +2 -2
  111. package/dist/ops/webgl/adamMoments.js +1 -1
  112. package/dist/ops/webgl/appendCache.js +1 -1
  113. package/dist/ops/webgl/attentionMask.js +4 -4
  114. package/dist/ops/webgl/fusedSoftmax.js +6 -6
  115. package/dist/ops/webgl/gatherSub.js +1 -1
  116. package/dist/ops/webgl/gelu.js +2 -2
  117. package/dist/ops/webgl/log.js +3 -3
  118. package/dist/ops/webgl/matMul16.js +10 -10
  119. package/dist/ops/webgl/matMulGelu.js +4 -4
  120. package/dist/ops/webgl/matMulMul.js +2 -2
  121. package/dist/ops/webgl/mulDropout.js +1 -1
  122. package/dist/ops/webgl/normRMS.js +2 -2
  123. package/dist/ops/webgl/qkv.js +1 -1
  124. package/dist/ops/webgl/rope.js +4 -4
  125. package/dist/ops/webgl/scatterSub.js +1 -1
  126. package/dist/ops/webgpu/adamAdjust.js +3 -3
  127. package/dist/ops/webgpu/adamMoments.js +5 -5
  128. package/dist/ops/webgpu/add16.js +1 -1
  129. package/dist/ops/webgpu/appendCache.js +3 -3
  130. package/dist/ops/webgpu/attentionMask.js +5 -5
  131. package/dist/ops/webgpu/attentionMask32_program.js +2 -2
  132. package/dist/ops/webgpu/concat16.js +5 -5
  133. package/dist/ops/webgpu/gatherSub.js +3 -3
  134. package/dist/ops/webgpu/gelu.js +3 -3
  135. package/dist/ops/webgpu/matMul16.js +19 -19
  136. package/dist/ops/webgpu/matMul16_program.js +2 -2
  137. package/dist/ops/webgpu/mul16.js +1 -1
  138. package/dist/ops/webgpu/normRMS.js +2 -2
  139. package/dist/ops/webgpu/normRMSGrad.js +4 -4
  140. package/dist/ops/webgpu/pack16.js +3 -3
  141. package/dist/ops/webgpu/pack16_program.js +2 -2
  142. package/dist/ops/webgpu/qkv.js +4 -4
  143. package/dist/ops/webgpu/rope.js +3 -3
  144. package/dist/ops/webgpu/scatterSub.js +3 -3
  145. package/dist/ops/webgpu/slice16.js +4 -4
  146. package/dist/ops/webgpu/softmax16.js +4 -4
  147. package/dist/ops/webgpu/softmax16_program.js +2 -2
  148. package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
  149. package/dist/ops/webgpu/softmax16grad.js +1 -1
  150. package/dist/ops/webgpu/sub16.js +1 -1
  151. package/dist/ops/webgpu/sum16.js +5 -5
  152. package/dist/ops/webgpu/transpose16.js +2 -2
  153. package/dist/ops/webgpu/transpose16_program.js +2 -2
  154. package/dist/ops/webgpu/transpose16_shared_program.js +3 -3
  155. package/dist/ops/webgpu/unpack16.js +5 -5
  156. package/dist/ops/webgpu/utils/binary_op.js +3 -3
  157. package/dist/ops/webgpu/utils/reductions.js +4 -4
  158. package/dist/{ops-FJapAPfm.js → ops-C2_OXuZ4.js} +35 -35
  159. package/dist/{pack16-k4jq6aMX.js → pack16-atD0eYRm.js} +6 -6
  160. package/dist/patches/webgpu_backend.js +8 -8
  161. package/dist/patches/webgpu_base.js +1 -1
  162. package/dist/patches/webgpu_program.js +2 -2
  163. package/dist/{random_width-UGQn4OWb.js → random_width-BN4wGJaW.js} +33 -33
  164. package/dist/{range-CuGvVN2c.js → range-DKmP1-OQ.js} +1 -1
  165. package/dist/relu-BsXmGzzu.js +9 -0
  166. package/dist/{reshape-CkjKPPqB.js → reshape-BI0yzp1T.js} +1 -1
  167. package/dist/{resize_nearest_neighbor-DB8k9KN_.js → resize_nearest_neighbor-BA_BX-ub.js} +25 -25
  168. package/dist/{rope-BmZmp9uP.js → rope-DJ7Y7c-u.js} +1 -1
  169. package/dist/{scatter_nd_util-BY22Cc-C.js → scatter_nd_util-k9MUVUkn.js} +1 -1
  170. package/dist/{selu_util-BuLbmbrl.js → selu_util-DyW0X1WG.js} +5 -5
  171. package/dist/{shared-B7USJZgw.js → shared-Q3BS6T03.js} +1 -1
  172. package/dist/{shared-BQboIImQ.js → shared-nnSWpC3u.js} +6 -6
  173. package/dist/{slice-Aqy7KbJh.js → slice-wBNvzVyz.js} +3 -3
  174. package/dist/{slice_util-D8CQRenR.js → slice_util-zN8KFC5I.js} +7 -7
  175. package/dist/{softmax-faLoUZVT.js → softmax-DfuYyjMh.js} +1 -1
  176. package/dist/split-BYrLboMq.js +9 -0
  177. package/dist/squeeze-Bk8Brcct.js +10 -0
  178. package/dist/{stack-WJK22CFn.js → stack-CDWShFHF.js} +1 -1
  179. package/dist/{step-dXR33iOg.js → step-BS5JXRR6.js} +14 -14
  180. package/dist/sum-BPUfDB2X.js +11 -0
  181. package/dist/{tensor-BQqrDvpx.js → tensor-CEt9Nm2s.js} +1 -1
  182. package/dist/{tensor1d-LxP9asMm.js → tensor1d-Cc_KCIDg.js} +1 -1
  183. package/dist/{tensor2d-BN1sSfQO.js → tensor2d-BN97fF71.js} +1 -1
  184. package/dist/{tensor4d-DVwr7pLF.js → tensor4d-vuDDgdUI.js} +1 -1
  185. package/dist/{tfjs_backend-Vi4JfLzT.js → tfjs_backend-806hyYve.js} +36 -36
  186. package/dist/tile-OWUvpIVt.js +11 -0
  187. package/dist/tokeniser/BaseTokeniser.d.ts +6 -8
  188. package/dist/tokeniser/BaseTokeniser.js +6 -6
  189. package/dist/tokeniser/CharTokeniser.d.ts +6 -6
  190. package/dist/tokeniser/CharTokeniser.js +26 -26
  191. package/dist/tokeniser/bpe.d.ts +6 -6
  192. package/dist/tokeniser/bpe.js +9 -9
  193. package/dist/tokeniser/type.d.ts +6 -8
  194. package/dist/training/Adam.js +2 -2
  195. package/dist/training/AdamExt.js +1 -1
  196. package/dist/training/DatasetBuilder.d.ts +1 -1
  197. package/dist/training/DatasetBuilder.js +29 -29
  198. package/dist/training/FullTrainer.js +1 -1
  199. package/dist/training/Trainer.d.ts +5 -4
  200. package/dist/training/Trainer.js +37 -40
  201. package/dist/training/sparseCrossEntropy.js +3 -3
  202. package/dist/training/tasks/ConversationTask.d.ts +11 -0
  203. package/dist/training/tasks/ConversationTask.js +26 -0
  204. package/dist/training/tasks/PretrainingTask.d.ts +11 -0
  205. package/dist/training/tasks/PretrainingTask.js +34 -0
  206. package/dist/training/tasks/StartSentenceTask.d.ts +12 -0
  207. package/dist/training/tasks/StartSentenceTask.js +42 -0
  208. package/dist/training/tasks/Task.d.ts +8 -0
  209. package/dist/training/tasks/Task.js +44 -0
  210. package/dist/{transpose-JawVKyZy.js → transpose-BUkQCJp9.js} +7 -7
  211. package/dist/{unsorted_segment_sum-LAbmE9G4.js → unsorted_segment_sum-BljxHhCY.js} +78 -78
  212. package/dist/utilities/dummy.js +3 -3
  213. package/dist/utilities/multinomialCPU.js +2 -2
  214. package/dist/utilities/packed.js +1 -1
  215. package/dist/utilities/performance.js +1 -1
  216. package/dist/utilities/profile.js +1 -1
  217. package/dist/utilities/safetensors.js +2 -2
  218. package/dist/utilities/sentences.d.ts +1 -1
  219. package/dist/utilities/sentences.js +11 -11
  220. package/dist/utilities/weights.js +2 -2
  221. package/dist/{variable-DQ9yYgEU.js → variable-DPt_Iuog.js} +1 -1
  222. package/dist/{webgpu_program-CAE4RICo.js → webgpu_program-BpWRlghH.js} +1 -1
  223. package/dist/{webgpu_util-BdovYhXr.js → webgpu_util-DMiKzzQM.js} +7 -7
  224. package/dist/{zeros-DeiE2zTa.js → zeros-5YROwwUH.js} +2 -2
  225. package/dist/{zeros_like-BAz3iKru.js → zeros_like-De4n1C3m.js} +57 -57
  226. package/package.json +1 -1
  227. package/dist/clip_by_value-Dn5tzexi.js +0 -12
  228. package/dist/concat-C6X3AAlQ.js +0 -17
  229. package/dist/expand_dims-BzfJK2uc.js +0 -11
  230. package/dist/gather-BcO5UQNJ.js +0 -9
  231. package/dist/mat_mul-DxpNTCRz.js +0 -11
  232. package/dist/relu-Cf80uA2p.js +0 -9
  233. package/dist/split-BNz5jcGc.js +0 -9
  234. package/dist/squeeze--YMgaAAf.js +0 -10
  235. package/dist/sum-BdplSvq_.js +0 -11
  236. package/dist/tile-CvN_LyVr.js +0 -11
@@ -24,11 +24,11 @@ class k extends r {
24
24
  addSpecialToken(e, t) {
25
25
  this.specialTokens.set(e, t), this.specialTokenSet.add(t);
26
26
  }
27
- async encodeSequence(e) {
28
- const t = await this.encode(e);
27
+ encodeSequence(e) {
28
+ const t = this.encode(e);
29
29
  return [this.bosToken, ...t, this.eosToken];
30
30
  }
31
- async encodeConversation(e, t) {
31
+ encodeConversation(e, t) {
32
32
  const s = [[this.bosToken]], a = [
33
33
  this.getSpecialTokenIndex("<|user_start|>"),
34
34
  this.getSpecialTokenIndex("<|assistant_start|>"),
@@ -39,7 +39,7 @@ class k extends r {
39
39
  this.getSpecialTokenIndex("<|system_end|>")
40
40
  ];
41
41
  for (const i of e) {
42
- const c = await this.encode(i.content);
42
+ const c = this.encode(i.content);
43
43
  switch (i.role) {
44
44
  case "user":
45
45
  s.push([a[0]]);
@@ -66,7 +66,7 @@ class k extends r {
66
66
  const o = s.flat();
67
67
  return t ? o.push(a[1]) : o.push(this.eosToken), o;
68
68
  }
69
- async decodeConversation(e) {
69
+ decodeConversation(e) {
70
70
  const t = [];
71
71
  let s = 0;
72
72
  for (; s < e.length; ) {
@@ -77,7 +77,7 @@ class k extends r {
77
77
  const o = [];
78
78
  for (; s < e.length && e[s] !== this.getSpecialTokenIndex(`<|${n}_end|>`); )
79
79
  o.push(e[s]), s++;
80
- const i = await this.decode(o);
80
+ const i = this.decode(o);
81
81
  t.push({ role: n, content: i });
82
82
  }
83
83
  s++;
@@ -13,12 +13,12 @@ export default class CharTokeniser extends BaseTokeniser {
13
13
  get trained(): boolean;
14
14
  destroy(): void;
15
15
  train(text: string[]): Promise<number>;
16
- tokenise(text: string[], numeric: true): Promise<number[][]>;
17
- tokenise(text: string[]): Promise<string[][]>;
18
- detokenise(tokens: number[][]): Promise<string[]>;
19
- encode(text: string): Promise<number[]>;
20
- decode(tokens: number[]): Promise<string>;
16
+ tokenise(text: string[], numeric: true): number[][];
17
+ tokenise(text: string[]): string[][];
18
+ detokenise(tokens: (number[] | Uint16Array)[]): string[];
19
+ encode(text: string): number[];
20
+ decode(tokens: number[] | Uint16Array): string;
21
21
  getVocab(): string[];
22
- getMerges(): Promise<[string, string][]>;
22
+ getMerges(): [string, string][];
23
23
  createTrainingData(text: string[], windowSize?: number): Promise<[number[], number[]]>;
24
24
  }
@@ -8,9 +8,9 @@ class b extends k {
8
8
  vocab = [];
9
9
  cache = /* @__PURE__ */ new Map();
10
10
  _trained = !1;
11
- constructor(s) {
12
- if (super(), Array.isArray(s)) {
13
- if (this.vocab = s, this.vocab.length > 0)
11
+ constructor(i) {
12
+ if (super(), Array.isArray(i)) {
13
+ if (this.vocab = i, this.vocab.length > 0)
14
14
  this.vocabSize = this.vocab.length, d.forEach((t) => {
15
15
  const e = this.vocab.indexOf(t);
16
16
  e !== -1 && this.addSpecialToken(t, e);
@@ -21,37 +21,37 @@ class b extends k {
21
21
  throw new Error("Vocab cannot be empty");
22
22
  this._trained = !0;
23
23
  } else
24
- this.vocabSize = s, this.vocab = new Array(this.vocabSize).fill(""), this.addSpecialTokens(), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex(""), this.vocab.forEach((t, e) => {
24
+ this.vocabSize = i, this.vocab = new Array(this.vocabSize).fill(""), this.addSpecialTokens(), this.eosToken = this.getSpecialTokenIndex("<eos>"), this.bosToken = this.getSpecialTokenIndex("<bos>") ?? this.eosToken, this.unkToken = this.getSpecialTokenIndex(""), this.vocab.forEach((t, e) => {
25
25
  this.cache.set(t, e);
26
26
  }), this.cache.set("", this.unkToken);
27
27
  }
28
- addToken(s, t) {
29
- if (this.cache.has(s))
30
- return this.cache.get(s);
28
+ addToken(i, t) {
29
+ if (this.cache.has(i))
30
+ return this.cache.get(i);
31
31
  let e;
32
32
  if (t !== void 0 ? e = t : (e = this.vocab.indexOf("", this.unkToken + 1), e === -1 && (e = this.vocabSize)), e >= this.vocabSize)
33
33
  throw new Error("Vocab size exceeded");
34
- return this.vocab[e] = s, this.cache.set(s, e), e;
34
+ return this.vocab[e] = i, this.cache.set(i, e), e;
35
35
  }
36
36
  get trained() {
37
37
  return this.vocab.length === this.vocabSize && this._trained;
38
38
  }
39
39
  destroy() {
40
40
  }
41
- async train(s) {
42
- const t = s.map((n) => n.split("")).flat(), e = new Set(t), i = Array.from(e), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
41
+ async train(i) {
42
+ const t = i.map((n) => n.split("")).flat(), e = new Set(t), s = Array.from(e), h = this.vocab.indexOf("", this.unkToken + 1), o = this.vocabSize - u.length;
43
43
  if (h === -1)
44
44
  return this.vocabSize;
45
- if (this._trained = !0, i.length > o) {
45
+ if (this._trained = !0, s.length > o) {
46
46
  const n = /* @__PURE__ */ new Map();
47
47
  t.forEach((a) => {
48
48
  n.set(a, (n.get(a) || 0) + 1);
49
- }), i.sort((a, r) => (n.get(a) || 0) - (n.get(r) || 0)), i.splice(0, i.length - o);
49
+ }), s.sort((a, r) => (n.get(a) || 0) - (n.get(r) || 0)), s.splice(0, s.length - o);
50
50
  }
51
51
  let c = h;
52
52
  if (c !== -1) {
53
53
  const n = new Set(this.vocab);
54
- for (const a of i)
54
+ for (const a of s)
55
55
  if (!n.has(a) && (this.vocab[c] = a, n.add(a), c = this.vocab.indexOf("", c + 1), c === -1))
56
56
  break;
57
57
  }
@@ -59,34 +59,34 @@ class b extends k {
59
59
  this.cache.set(n, a);
60
60
  }), this.emit("trainStatus", "trained"), this.vocabSize;
61
61
  }
62
- async tokenise(s, t) {
62
+ tokenise(i, t) {
63
63
  if (!this.trained)
64
64
  throw new Error("Tokeniser not trained");
65
- return s.map((i) => t ? i.split("").map((h) => this.cache.get(h) ?? this.unkToken) : i.split("").map((h) => {
65
+ return i.map((s) => t ? s.split("").map((h) => this.cache.get(h) ?? this.unkToken) : s.split("").map((h) => {
66
66
  const o = this.cache.get(h);
67
67
  return o !== void 0 ? this.vocab[o] : "";
68
68
  }));
69
69
  }
70
- async detokenise(s) {
71
- return s.map((e) => e.map((i) => this.vocab[i]).join(""));
70
+ detokenise(i) {
71
+ return i.map((e) => Array.from(e).map((s) => this.vocab[s] || "").join(""));
72
72
  }
73
- async encode(s) {
74
- return (await this.tokenise([s], !0))[0];
73
+ encode(i) {
74
+ return this.tokenise([i], !0)[0];
75
75
  }
76
- async decode(s) {
77
- return (await this.detokenise([s]))[0];
76
+ decode(i) {
77
+ return this.detokenise([i])[0];
78
78
  }
79
79
  getVocab() {
80
80
  return this.vocab;
81
81
  }
82
- async getMerges() {
82
+ getMerges() {
83
83
  return [];
84
84
  }
85
- async createTrainingData(s, t = 5) {
86
- const e = await this.tokenise(s, !0), i = [], h = [];
85
+ async createTrainingData(i, t = 5) {
86
+ const e = await this.tokenise(i, !0), s = [], h = [];
87
87
  for (let o = 0; o < e.length - t; o++)
88
- i.push(...e[o].slice(0, t)), h.push(e[o + 1][0]);
89
- return [i, h];
88
+ s.push(...e[o].slice(0, t)), h.push(e[o + 1][0]);
89
+ return [s, h];
90
90
  }
91
91
  }
92
92
  export {
@@ -16,12 +16,12 @@ export default class BPETokeniser extends BaseTokeniser {
16
16
  get unkToken(): number;
17
17
  train(text: string[]): Promise<number>;
18
18
  getVocab(): string[];
19
- getMerges(): Promise<[string, string][]>;
19
+ getMerges(): [string, string][];
20
20
  private tokeniseWord;
21
21
  private tokeniseStrings;
22
- tokenise(text: string[], numeric: true): Promise<number[][]>;
23
- tokenise(text: string[]): Promise<string[][]>;
24
- detokenise(tokens: number[][]): Promise<string[]>;
25
- encode(text: string): Promise<number[]>;
26
- decode(tokens: number[]): Promise<string>;
22
+ tokenise(text: string[], numeric: true): number[][];
23
+ tokenise(text: string[]): string[][];
24
+ detokenise(tokens: number[][]): string[];
25
+ encode(text: string): number[];
26
+ decode(tokens: number[]): string;
27
27
  }
@@ -53,7 +53,7 @@ function v(o, e) {
53
53
  o.tokens[s] = n;
54
54
  }), o.pairs.delete(u(e.a, e.b));
55
55
  }
56
- class T extends d {
56
+ class x extends d {
57
57
  targetSize;
58
58
  vocab = /* @__PURE__ */ new Set();
59
59
  vocabIndex = /* @__PURE__ */ new Map();
@@ -116,7 +116,7 @@ class T extends d {
116
116
  getVocab() {
117
117
  return Array.from(this.vocab);
118
118
  }
119
- async getMerges() {
119
+ getMerges() {
120
120
  return this.merges;
121
121
  }
122
122
  tokeniseWord(e) {
@@ -128,21 +128,21 @@ class T extends d {
128
128
  tokeniseStrings(e) {
129
129
  return e.map((s) => l(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
130
130
  }
131
- async tokenise(e, s) {
131
+ tokenise(e, s) {
132
132
  const t = this.tokeniseStrings(e);
133
133
  return s ? t.map((n) => n.map((r) => this.vocabIndex.get(r) ?? this.unkToken)) : t.map((n) => n.map((r) => this.vocab.has(r) ? r : ""));
134
134
  }
135
- async detokenise(e) {
135
+ detokenise(e) {
136
136
  const s = this.getVocab();
137
137
  return e.map((n) => n.map((r) => s[r]).join(""));
138
138
  }
139
- async encode(e) {
140
- return (await this.tokenise([e], !0))[0];
139
+ encode(e) {
140
+ return this.tokenise([e], !0)[0];
141
141
  }
142
- async decode(e) {
143
- return (await this.detokenise([e]))[0];
142
+ decode(e) {
143
+ return this.detokenise([e])[0];
144
144
  }
145
145
  }
146
146
  export {
147
- T as default
147
+ x as default
148
148
  };
@@ -6,16 +6,14 @@ export interface Conversation {
6
6
  }
7
7
  export interface ITokeniser extends EE<'trainStatus'> {
8
8
  train(text: string[]): Promise<number>;
9
- tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
10
- detokenise(tokens: string[][] | number[][]): Promise<string[]>;
11
9
  getVocab(): string[];
12
- getMerges(): Promise<[string, string][]>;
10
+ getMerges(): [string, string][];
13
11
  destroy(): void;
14
- encode(text: string): Promise<number[]>;
15
- encodeConversation(conversation: Conversation[], completion?: boolean): Promise<number[]>;
16
- encodeSequence(text: string): Promise<number[]>;
17
- decode(tokens: number[]): Promise<string>;
18
- decodeConversation(tokens: number[]): Promise<Conversation[]>;
12
+ encode(text: string): number[];
13
+ encodeConversation(conversation: Conversation[], completion?: boolean): number[];
14
+ encodeSequence(text: string): number[];
15
+ decode(tokens: number[] | Uint16Array): string;
16
+ decodeConversation(tokens: number[] | Uint16Array): Conversation[];
19
17
  vocabSize: number;
20
18
  eosToken: number;
21
19
  bosToken: number;
@@ -1,7 +1,7 @@
1
1
  import { adamAdjust as b } from "../ops/adamAdjust.js";
2
2
  import { adamMoments as d } from "../ops/adamMoments.js";
3
- import { O as g, e as h, t as o, d as B } from "../index-D0RBWjq8.js";
4
- import { z as M } from "../zeros-DeiE2zTa.js";
3
+ import { O as g, e as h, t as o, d as B } from "../index-Duu1Lvvv.js";
4
+ import { z as M } from "../zeros-5YROwwUH.js";
5
5
  class R extends g {
6
6
  constructor(t, a, e, s, i = null) {
7
7
  super(), this.learningRate = t, this.beta1 = a, this.beta2 = e, this.lossScaling = s, this.epsilon = i, this.accBeta1 = a, this.accBeta2 = e, i === null && (this.epsilon = h().backend.epsilon());
@@ -1,4 +1,4 @@
1
- import { m as r, b as c, c as h, e as o } from "../index-D0RBWjq8.js";
1
+ import { m as r, b as c, c as h, e as o } from "../index-Duu1Lvvv.js";
2
2
  import { AdamOptimizer as g } from "./Adam.js";
3
3
  class y extends g {
4
4
  constructor(t, e, s, i, a) {
@@ -8,7 +8,7 @@ export declare class DatasetBuilder {
8
8
  blockSize: number;
9
9
  private pageSize;
10
10
  constructor(tokenizer: ITokeniser, blockSize?: number);
11
- createTextDataset(flatTokens: number[], batchSize?: number, masked?: Set<number>, invertMask?: boolean): Promise<Dataset<{
11
+ createTextDataset(flatTokens: Uint16Array, batchSize?: number, masked?: Set<number>, invertMask?: boolean): Promise<Dataset<{
12
12
  xs: Tensor;
13
13
  ys: Tensor;
14
14
  }>>;
@@ -1,63 +1,63 @@
1
- import { t as z } from "../index-D0RBWjq8.js";
2
- import { d as u, i as f } from "../dataset-DcjWqUVQ.js";
1
+ import { t as y } from "../index-Duu1Lvvv.js";
2
+ import { d as g, i as z } from "../dataset-CVIJu7Xa.js";
3
3
  import "../index-Cp39cXWe.js";
4
- function S(a) {
5
- return u(async () => {
4
+ function b(a) {
5
+ return g(async () => {
6
6
  const t = await a();
7
- return f(() => t.next());
7
+ return z(() => t.next());
8
8
  });
9
9
  }
10
- const b = 8;
11
- async function y(a, t) {
12
- return (await Promise.all(a.map((r) => t.encodeConversation(r)))).flat();
10
+ const f = 8;
11
+ async function w(a, t) {
12
+ return (await Promise.all(a.map((s) => t.encodeConversation(s)))).flat();
13
13
  }
14
- class x {
14
+ class m {
15
15
  tokenizer;
16
16
  blockSize;
17
17
  pageSize;
18
- constructor(t, s = 128) {
19
- this.tokenizer = t, this.blockSize = s, this.pageSize = s * b;
18
+ constructor(t, r = 128) {
19
+ this.tokenizer = t, this.blockSize = r, this.pageSize = r * f;
20
20
  }
21
21
  // Create dataset from text files
22
- async createTextDataset(t, s = 32, i, r) {
22
+ async createTextDataset(t, r = 32, i, s) {
23
23
  if (t.length < this.blockSize + 1)
24
24
  throw new Error(`Not enough tokens (${t.length}) for block size ${this.blockSize}`);
25
25
  if (i && i.size > t.length / this.pageSize / 2)
26
26
  throw new Error("Too many masked pages - would leave insufficient training data");
27
27
  const l = (function* () {
28
- if (i && r) {
28
+ if (i && s) {
29
29
  const e = Array.from(i);
30
30
  for (; ; ) {
31
- const n = Math.floor(Math.random() * e.length), h = Math.floor(Math.random() * this.pageSize), o = e[n] * this.pageSize + h;
32
- if (o + this.blockSize + 1 > t.length)
31
+ const o = Math.floor(Math.random() * e.length), h = Math.floor(Math.random() * this.pageSize), n = e[o] * this.pageSize + h;
32
+ if (n + this.blockSize + 1 > t.length)
33
33
  continue;
34
- const c = t.slice(o, o + this.blockSize), g = t.slice(o + 1, o + this.blockSize + 1);
35
- yield { xs: c, ys: g };
34
+ const c = new Int32Array(t.subarray(n, n + this.blockSize)), u = new Int32Array(t.subarray(n + 1, n + this.blockSize + 1));
35
+ yield { xs: c, ys: u };
36
36
  }
37
37
  } else
38
38
  for (; ; ) {
39
39
  const e = Math.floor(Math.random() * (t.length - this.blockSize - 1));
40
40
  if (i) {
41
- const o = Math.floor(e / this.pageSize), c = i.has(o);
42
- if (c && !r || !c && r)
41
+ const n = Math.floor(e / this.pageSize), c = i.has(n);
42
+ if (c && !s || !c && s)
43
43
  continue;
44
44
  }
45
- const n = t.slice(e, e + this.blockSize), h = t.slice(e + 1, e + this.blockSize + 1);
46
- yield { xs: n, ys: h };
45
+ const o = new Int32Array(t.subarray(e, e + this.blockSize)), h = new Int32Array(t.subarray(e + 1, e + this.blockSize + 1));
46
+ yield { xs: o, ys: h };
47
47
  }
48
48
  }).bind(this);
49
- return S(l).batch(s).map((e) => {
50
- const n = e;
51
- return z(() => ({
52
- xs: n.xs.cast("int32"),
53
- ys: n.ys.cast("int32")
49
+ return b(l).batch(r).map((e) => {
50
+ const o = e;
51
+ return y(() => ({
52
+ xs: o.xs.cast("int32"),
53
+ ys: o.ys.cast("int32")
54
54
  // this.tf.oneHot(batchData.ys.cast('int32'), this.tokenizer.vocabSize),
55
55
  }));
56
56
  }).prefetch(2);
57
57
  }
58
58
  }
59
59
  export {
60
- x as DatasetBuilder,
61
- b as PAGE_FACTOR,
62
- y as flattenTokens
60
+ m as DatasetBuilder,
61
+ f as PAGE_FACTOR,
62
+ w as flattenTokens
63
63
  };
@@ -1,6 +1,6 @@
1
1
  import b from "./Trainer.js";
2
2
  import L from "./Evaluator.js";
3
- import { d as w } from "../index-D0RBWjq8.js";
3
+ import { d as w } from "../index-Duu1Lvvv.js";
4
4
  import y from "../utilities/profile.js";
5
5
  import { createTensorStatistics as D } from "../checks/weights.js";
6
6
  const T = {
@@ -1,11 +1,12 @@
1
- import { Conversation, ITokeniser } from '../tokeniser/type';
1
+ import { ITokeniser } from '../tokeniser/type';
2
2
  import { DatasetBuilder } from './DatasetBuilder';
3
3
  import { default as AdamExt } from './AdamExt';
4
- import { NamedTensorMap, TensorContainer } from '@tensorflow/tfjs-core/dist/tensor_types';
4
+ import { NamedTensorMap } from '@tensorflow/tfjs-core/dist/tensor_types';
5
5
  import { Scalar, Tensor } from '@tensorflow/tfjs-core';
6
6
  import { Dataset } from '@tensorflow/tfjs-data';
7
7
  import { default as Model, ModelForwardAttributes } from '../models/model';
8
8
  import { TensorStatistics } from '../checks/weights';
9
+ import { Task } from './tasks/Task';
9
10
  export interface TrainingLogEntry {
10
11
  loss: number;
11
12
  valLoss?: number;
@@ -93,7 +94,7 @@ export default abstract class GPTTrainer {
93
94
  log: TrainingLogEntry;
94
95
  progress: TrainingProgress;
95
96
  }>;
96
- createTrainValidationSplit(textData: Conversation[][], batchSize?: number, validationSplit?: number): Promise<{
97
+ createTrainValidationSplit(tasks: Task[] | Uint16Array, batchSize?: number, validationSplit?: number): Promise<{
97
98
  trainDataset: Dataset<{
98
99
  xs: Tensor;
99
100
  ys: Tensor;
@@ -102,7 +103,7 @@ export default abstract class GPTTrainer {
102
103
  xs: Tensor;
103
104
  ys: Tensor;
104
105
  }>;
106
+ size: number;
105
107
  }>;
106
- createDataset(textData: Conversation[][], batchSize?: number): Promise<Dataset<TensorContainer>>;
107
108
  dispose(): void;
108
109
  }
@@ -1,10 +1,11 @@
1
- import { DatasetBuilder as f, flattenTokens as h, PAGE_FACTOR as y } from "./DatasetBuilder.js";
2
- import z from "./AdamExt.js";
3
- import { t as S, v as k, k as x, d as p, b as m } from "../index-D0RBWjq8.js";
4
- import { z as g } from "../zeros-DeiE2zTa.js";
5
- class M {
6
- constructor(t, e, s = 1e-3) {
7
- this.tokenizer = e, this.model = t, this.lossScaling = t.lossScaling, this.learningRate = s, this.resetOptimizer(), this.datasetBuilder = new f(e, t.config.blockSize);
1
+ import { DatasetBuilder as f, PAGE_FACTOR as u } from "./DatasetBuilder.js";
2
+ import y from "./AdamExt.js";
3
+ import { t as z, v as S, k, d as h, b as p } from "../index-Duu1Lvvv.js";
4
+ import { tokensFromTasks as x } from "./tasks/Task.js";
5
+ import { z as m } from "../zeros-5YROwwUH.js";
6
+ class B {
7
+ constructor(t, e, i = 1e-3) {
8
+ this.tokenizer = e, this.model = t, this.lossScaling = t.lossScaling, this.learningRate = i, this.resetOptimizer(), this.datasetBuilder = new f(e, t.config.blockSize);
8
9
  }
9
10
  model;
10
11
  optimizer;
@@ -35,7 +36,7 @@ class M {
35
36
  }
36
37
  resetOptimizer(t = { learningRateFactor: 1, beta1: 0.9, beta2: 0.99, epsilon: 1e-8 }) {
37
38
  this.optimizer && this.optimizer.dispose();
38
- const e = new z(
39
+ const e = new y(
39
40
  t.learningRateFactor * this.learningRate,
40
41
  t.beta1,
41
42
  t.beta2,
@@ -50,11 +51,11 @@ class M {
50
51
  );
51
52
  this.optimizer = e;
52
53
  }
53
- trainStep(t, e, s = !1, i = !1) {
54
- return S(() => {
54
+ trainStep(t, e, i = !1, s = !1) {
55
+ return z(() => {
55
56
  this.model.getProfiler()?.startMemory();
56
57
  const { xs: a, ys: l } = e, c = () => {
57
- const [n, d] = this.model.forward(
58
+ const [o, d] = this.model.forward(
58
59
  {
59
60
  training: !0,
60
61
  checkpointing: this._gradientCheckpointing,
@@ -63,57 +64,53 @@ class M {
63
64
  a,
64
65
  l
65
66
  );
66
- n.dispose();
67
- const u = d.mul(m(this.lossScaling));
68
- return d.dispose(), u;
69
- }, { value: o, grads: r } = k(c);
70
- return s ? this.model.getProfiler()?.endMemory("Training") : (this.optimizer.applyGradients(r), this.model.getProfiler()?.endMemory("Training"), i ? (t.gradients = r, Object.values(r).forEach((n) => x(n))) : p(r)), o.mul(m(1 / this.lossScaling));
67
+ o.dispose();
68
+ const g = d.mul(p(this.lossScaling));
69
+ return d.dispose(), g;
70
+ }, { value: n, grads: r } = S(c);
71
+ return i ? this.model.getProfiler()?.endMemory("Training") : (this.optimizer.applyGradients(r), this.model.getProfiler()?.endMemory("Training"), s ? (t.gradients = r, Object.values(r).forEach((o) => k(o))) : h(r)), n.mul(p(1 / this.lossScaling));
71
72
  });
72
73
  }
73
74
  async dummyPass() {
74
- const t = g([1, this.model.config.blockSize], "int32"), e = g([1, this.model.config.blockSize], "int32");
75
+ const t = m([1, this.model.config.blockSize], "int32"), e = m([1, this.model.config.blockSize], "int32");
75
76
  try {
76
- const s = this.trainStep({}, { xs: t, ys: e }, !0);
77
- await s.data(), s.dispose();
78
- } catch (s) {
79
- console.error("Error during dummy pass:", s);
77
+ const i = this.trainStep({}, { xs: t, ys: e }, !0);
78
+ await i.data(), i.dispose();
79
+ } catch (i) {
80
+ console.error("Error during dummy pass:", i);
80
81
  } finally {
81
82
  t.dispose(), e.dispose();
82
83
  }
83
84
  }
84
- trainBatch(t, e, s = !1) {
85
+ trainBatch(t, e, i = !1) {
85
86
  try {
86
- const i = this.trainStep(t, e, !1, s);
87
- return e.xs.dispose(), e.ys.dispose(), t.step++, t.totalSteps++, i;
88
- } catch (i) {
89
- throw console.error(`Error processing batch at step ${t.step}:`, i), p(), i;
87
+ const s = this.trainStep(t, e, !1, i);
88
+ return e.xs.dispose(), e.ys.dispose(), t.step++, t.totalSteps++, s;
89
+ } catch (s) {
90
+ throw console.error(`Error processing batch at step ${t.step}:`, s), h(), s;
90
91
  }
91
92
  }
92
- async createTrainValidationSplit(t, e = 32, s = 0.1) {
93
- const i = await h(t, this.tokenizer), a = /* @__PURE__ */ new Set();
94
- if (s > 0) {
95
- const o = Math.floor(i.length / (this.datasetBuilder.blockSize * y)), r = Math.max(1, Math.floor(o * s));
93
+ async createTrainValidationSplit(t, e = 32, i = 0.1) {
94
+ const s = t instanceof Uint16Array ? t : await x(t, this.tokenizer), a = /* @__PURE__ */ new Set();
95
+ if (i > 0) {
96
+ const n = Math.floor(s.length / (this.datasetBuilder.blockSize * u)), r = Math.max(1, Math.floor(n * i));
96
97
  for (; a.size < r; ) {
97
- const n = Math.floor(Math.random() * o);
98
- a.add(n);
98
+ const o = Math.floor(Math.random() * n);
99
+ a.add(o);
99
100
  }
100
101
  }
101
- const l = await this.datasetBuilder.createTextDataset(i, e, a, !1), c = await this.datasetBuilder.createTextDataset(
102
- i,
102
+ const l = await this.datasetBuilder.createTextDataset(s, e, a, !1), c = await this.datasetBuilder.createTextDataset(
103
+ s,
103
104
  e,
104
105
  a,
105
106
  !0
106
107
  );
107
- return { trainDataset: l, validationDataset: c };
108
- }
109
- async createDataset(t, e = 32) {
110
- const s = await h(t, this.tokenizer);
111
- return await this.datasetBuilder.createTextDataset(s, e);
108
+ return { trainDataset: l, validationDataset: c, size: s.length };
112
109
  }
113
110
  dispose() {
114
111
  this.optimizer && this.optimizer.dispose();
115
112
  }
116
113
  }
117
114
  export {
118
- M as default
115
+ B as default
119
116
  };
@@ -1,8 +1,8 @@
1
1
  import { gatherSub as x } from "../ops/gatherSub.js";
2
2
  import { scatterSub as L } from "../ops/scatterSub.js";
3
- import { a2 as C, t as u, a3 as E, c as G } from "../index-D0RBWjq8.js";
4
- import { s as y } from "../softmax-faLoUZVT.js";
5
- import { m as z, l as v } from "../log_sum_exp-VLZgbFAH.js";
3
+ import { a1 as C, t as u, a2 as E, c as G } from "../index-Duu1Lvvv.js";
4
+ import { s as y } from "../softmax-DfuYyjMh.js";
5
+ import { m as z, l as v } from "../log_sum_exp-CVqLsVLl.js";
6
6
  function k(t, s) {
7
7
  return u(() => {
8
8
  const n = t.shape[t.shape.length - 1], c = t.shape.slice(0, -1).reduce((o, e) => o * e, 1), h = t.shape.length > 2 ? t.reshape([c, n]) : t, p = s.shape.length > 1 ? s.reshape([c]).cast("int32") : s.cast("int32"), r = z(h, -1, !0), a = G(h, r), d = v(a, -1);
@@ -0,0 +1,11 @@
1
+ import { Conversation, ITokeniser } from '../../main';
2
+ import { Task } from './Task';
3
+ export default class ConversationTask extends Task {
4
+ private rawConvo;
5
+ private index;
6
+ get length(): number;
7
+ constructor(conversations: Conversation[][]);
8
+ hasMoreConversations(): boolean;
9
+ nextConversation(): Conversation[] | null;
10
+ estimateTokens(tokeniser: ITokeniser): Promise<number>;
11
+ }
@@ -0,0 +1,26 @@
1
+ import { Task as t } from "./Task.js";
2
+ class s extends t {
3
+ rawConvo;
4
+ index = 0;
5
+ get length() {
6
+ return this.rawConvo.length;
7
+ }
8
+ constructor(n) {
9
+ super(), this.rawConvo = n;
10
+ }
11
+ hasMoreConversations() {
12
+ return this.index < this.rawConvo.length;
13
+ }
14
+ nextConversation() {
15
+ if (this.index >= this.rawConvo.length)
16
+ return null;
17
+ const n = this.rawConvo[this.index];
18
+ return this.index++, n;
19
+ }
20
+ async estimateTokens(n) {
21
+ return (await n.encodeConversation(this.rawConvo[0])).length * this.length;
22
+ }
23
+ }
24
+ export {
25
+ s as default
26
+ };
@@ -0,0 +1,11 @@
1
+ import { Conversation, ITokeniser } from '../../main';
2
+ import { Task } from './Task';
3
+ export default class PretrainingTask extends Task {
4
+ private rawText;
5
+ private index;
6
+ get length(): number;
7
+ constructor(texts: string[]);
8
+ hasMoreConversations(): boolean;
9
+ nextConversation(): Conversation[] | null;
10
+ estimateTokens(tokeniser: ITokeniser): Promise<number>;
11
+ }
@@ -0,0 +1,34 @@
1
+ import { Task as e } from "./Task.js";
2
+ class r extends e {
3
+ rawText;
4
+ index = 0;
5
+ get length() {
6
+ return this.rawText.length;
7
+ }
8
+ constructor(t) {
9
+ super(), this.rawText = t;
10
+ }
11
+ hasMoreConversations() {
12
+ return this.index < this.rawText.length;
13
+ }
14
+ nextConversation() {
15
+ if (this.index >= this.rawText.length)
16
+ return null;
17
+ const t = {
18
+ role: "assistant",
19
+ content: this.rawText[this.index]
20
+ };
21
+ return this.index++, [t];
22
+ }
23
+ async estimateTokens(t) {
24
+ return (await t.encodeConversation([
25
+ {
26
+ role: "assistant",
27
+ content: this.rawText[0]
28
+ }
29
+ ])).length * this.length;
30
+ }
31
+ }
32
+ export {
33
+ r as default
34
+ };