@genai-fi/nanogpt 0.6.1 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. package/dist/Generator.js +6 -6
  2. package/dist/NanoGPTModel.js +9 -9
  3. package/dist/{RealDiv-7xu-pkZN.js → RealDiv-BYViZwhN.js} +6 -6
  4. package/dist/{Reshape-BYC1oUku.js → Reshape-t7Kcikjk.js} +2 -2
  5. package/dist/TeachableLLM.js +5 -5
  6. package/dist/{TiedEmbedding-C1HBot-5.js → TiedEmbedding-9WeDwvjO.js} +4 -4
  7. package/dist/{axis_util-CCNL7jea.js → axis_util-Bu4h7XWV.js} +1 -1
  8. package/dist/{broadcast_to-CddAF879.js → broadcast_to-DARN-DBD.js} +2 -2
  9. package/dist/{concat-XOK9ANZu.js → concat-5aPGqw3Z.js} +8 -8
  10. package/dist/{dataset-BFFipD1c.js → dataset-pgqp-YfL.js} +5 -5
  11. package/dist/{dropout-xlKRoJyU.js → dropout-Bciw46HT.js} +10 -10
  12. package/dist/{gather-DKtUaTtA.js → gather-DjyCjmOD.js} +1 -1
  13. package/dist/{gpgpu_math-B_ycgZ4W.js → gpgpu_math-CNslybmD.js} +31 -31
  14. package/dist/{index-CamYe_M8.js → index-BAzbokzv.js} +31 -31
  15. package/dist/{kernel_funcs_utils-D5MS0JFg.js → kernel_funcs_utils-CUxJCg0g.js} +5 -5
  16. package/dist/layers/BaseLayer.js +2 -2
  17. package/dist/layers/CausalSelfAttention.js +6 -6
  18. package/dist/layers/MLP.js +5 -5
  19. package/dist/layers/RMSNorm.js +3 -3
  20. package/dist/layers/RoPECache.js +4 -4
  21. package/dist/layers/TiedEmbedding.js +5 -5
  22. package/dist/layers/TransformerBlock.js +1 -1
  23. package/dist/loader/load.d.ts +13 -0
  24. package/dist/loader/load.js +27 -0
  25. package/dist/loader/loadHF.d.ts +7 -0
  26. package/dist/loader/loadHF.js +22 -0
  27. package/dist/{utilities/load.d.ts → loader/loadTransformers.d.ts} +11 -11
  28. package/dist/loader/loadTransformers.js +28 -0
  29. package/dist/loader/newZipLoad.d.ts +8 -0
  30. package/dist/loader/newZipLoad.js +21 -0
  31. package/dist/loader/oldZipLoad.d.ts +7 -0
  32. package/dist/loader/oldZipLoad.js +76 -0
  33. package/dist/{log_sum_exp-CV_5-TTu.js → log_sum_exp-YEo2h3gb.js} +16 -16
  34. package/dist/main.js +4 -4
  35. package/dist/{mat_mul-CAbRFWUj.js → mat_mul-7121rsJk.js} +4 -4
  36. package/dist/{max-JBBv7aUf.js → max-DtlIuVeW.js} +3 -3
  37. package/dist/{mulmat_packed_gpu-DW4doKL_.js → mulmat_packed_gpu-D4nKF7Je.js} +1 -1
  38. package/dist/{norm-B9dQTFYn.js → norm-CzltS9Fz.js} +10 -10
  39. package/dist/{ones-CMHNqMr6.js → ones-BBlSRqn1.js} +2 -2
  40. package/dist/ops/appendCache.js +3 -3
  41. package/dist/ops/attentionMask.js +1 -1
  42. package/dist/ops/cpu/appendCache.js +2 -2
  43. package/dist/ops/cpu/attentionMask.js +6 -6
  44. package/dist/ops/cpu/fusedSoftmax.js +2 -2
  45. package/dist/ops/cpu/gatherSub.js +9 -9
  46. package/dist/ops/cpu/gelu.js +1 -1
  47. package/dist/ops/cpu/matMulGelu.js +1 -1
  48. package/dist/ops/cpu/matMulMul.js +1 -1
  49. package/dist/ops/cpu/mulDropout.js +1 -1
  50. package/dist/ops/cpu/normRMS.js +1 -1
  51. package/dist/ops/cpu/qkv.js +3 -3
  52. package/dist/ops/cpu/rope.js +5 -5
  53. package/dist/ops/cpu/scatterSub.js +14 -14
  54. package/dist/ops/fusedSoftmax.js +1 -1
  55. package/dist/ops/gatherSub.js +1 -1
  56. package/dist/ops/gelu.js +1 -1
  57. package/dist/ops/grads/attentionMask.js +1 -1
  58. package/dist/ops/grads/fusedSoftmax.js +4 -4
  59. package/dist/ops/grads/gelu.js +1 -1
  60. package/dist/ops/grads/matMulGelu.js +1 -1
  61. package/dist/ops/grads/normRMS.js +1 -1
  62. package/dist/ops/grads/qkv.js +1 -1
  63. package/dist/ops/grads/rope.js +1 -1
  64. package/dist/ops/matMulGelu.js +1 -1
  65. package/dist/ops/matMulMul.js +1 -1
  66. package/dist/ops/mulDrop.js +1 -1
  67. package/dist/ops/node/sparseCrossEntropy.js +1 -1
  68. package/dist/ops/normRMS.js +1 -1
  69. package/dist/ops/qkv.js +1 -1
  70. package/dist/ops/rope.js +4 -4
  71. package/dist/ops/scatterSub.js +1 -1
  72. package/dist/ops/webgl/appendCache.js +1 -1
  73. package/dist/ops/webgl/attentionMask.js +1 -1
  74. package/dist/ops/webgl/fusedSoftmax.js +4 -4
  75. package/dist/ops/webgl/gatherSub.js +1 -1
  76. package/dist/ops/webgl/gelu.js +2 -2
  77. package/dist/ops/webgl/log.js +3 -3
  78. package/dist/ops/webgl/matMulGelu.js +17 -17
  79. package/dist/ops/webgl/matMulMul.js +1 -1
  80. package/dist/ops/webgl/mulDropout.js +1 -1
  81. package/dist/ops/webgl/normRMS.js +2 -2
  82. package/dist/ops/webgl/qkv.js +1 -1
  83. package/dist/ops/webgl/rope.js +1 -1
  84. package/dist/ops/webgl/scatterSub.js +1 -1
  85. package/dist/{ops-DqtYemmV.js → ops-C0sQEcPw.js} +78 -78
  86. package/dist/{random_width-CLMQG5Jn.js → random_width-DWzaOgrn.js} +22 -22
  87. package/dist/{range-DqYjKnuG.js → range-DYsrnfiy.js} +1 -1
  88. package/dist/{reciprocal-z49filta.js → reciprocal-CJQeasVa.js} +1 -1
  89. package/dist/{register_all_kernels-COt6wLD0.js → register_all_kernels-BfFCQAqs.js} +28 -28
  90. package/dist/{reshape-C45vIIRU.js → reshape-krWGKraP.js} +1 -1
  91. package/dist/{scatter_nd_util-qgtnviTE.js → scatter_nd_util-93ln7Hut.js} +3 -3
  92. package/dist/{selu_util-4QV_GXTB.js → selu_util-sntGesxr.js} +41 -41
  93. package/dist/{shared-ByfrGA97.js → shared-Ca6iDobD.js} +6 -6
  94. package/dist/{sin-9JBrfVaB.js → sin-D_h-qCSx.js} +1 -1
  95. package/dist/{softmax-DvMvui-_.js → softmax-fsdtf6JC.js} +1 -1
  96. package/dist/{split-DxrHrPFK.js → split-eiktj-6L.js} +4 -4
  97. package/dist/{stack-DgaoDmnF.js → stack-dfEEz2OY.js} +2 -2
  98. package/dist/{sum-BpcpxNEh.js → sum-BE_Irnim.js} +3 -3
  99. package/dist/{tensor-CDz5x1mP.js → tensor-Xyi595sG.js} +1 -1
  100. package/dist/{tensor2d-jO8JY5Jd.js → tensor2d-CPEkynbH.js} +1 -1
  101. package/dist/training/AdamExt.js +1 -1
  102. package/dist/training/DatasetBuilder.js +2 -2
  103. package/dist/training/FullTrainer.js +1 -1
  104. package/dist/training/Trainer.js +3 -3
  105. package/dist/training/sparseCrossEntropy.js +5 -5
  106. package/dist/utilities/dummy.js +2 -2
  107. package/dist/utilities/generate.js +3 -3
  108. package/dist/utilities/profile.js +1 -1
  109. package/dist/utilities/safetensors.js +2 -2
  110. package/dist/utilities/save.js +1 -1
  111. package/dist/utilities/weights.js +2 -2
  112. package/dist/{variable-CLVXjN7F.js → variable-wSS22xj5.js} +1 -1
  113. package/dist/{zeros-DUkkVccu.js → zeros-YJDE7oRb.js} +10 -10
  114. package/package.json +3 -3
  115. package/dist/utilities/load.js +0 -99
@@ -3,13 +3,13 @@ import O from "./BaseLayer.js";
3
3
  import { qkv as P } from "../ops/qkv.js";
4
4
  import { rope as v } from "../ops/rope.js";
5
5
  import { appendCache as V } from "../ops/appendCache.js";
6
- import { n as c, t as C } from "../index-CamYe_M8.js";
6
+ import { k as c, t as C } from "../index-BAzbokzv.js";
7
7
  import { fusedSoftmax as T } from "../ops/fusedSoftmax.js";
8
- import { d as y } from "../random_width-CLMQG5Jn.js";
9
- import { v as b } from "../variable-CLVXjN7F.js";
10
- import { r as k, d as L } from "../dropout-xlKRoJyU.js";
11
- import { r as N } from "../reshape-C45vIIRU.js";
12
- import { m as R } from "../mat_mul-CAbRFWUj.js";
8
+ import { d as y } from "../random_width-DWzaOgrn.js";
9
+ import { v as b } from "../variable-wSS22xj5.js";
10
+ import { r as k, d as L } from "../dropout-Bciw46HT.js";
11
+ import { r as N } from "../reshape-krWGKraP.js";
12
+ import { m as R } from "../mat_mul-7121rsJk.js";
13
13
  class $ extends O {
14
14
  divisor;
15
15
  index;
@@ -1,10 +1,10 @@
1
- import { t as l } from "../index-CamYe_M8.js";
1
+ import { t as l } from "../index-BAzbokzv.js";
2
2
  import u from "./BaseLayer.js";
3
3
  import { matMulGelu as M } from "../ops/matMulGelu.js";
4
- import { v as o } from "../variable-CLVXjN7F.js";
5
- import { r as h, d as f } from "../dropout-xlKRoJyU.js";
6
- import { r as d } from "../reshape-C45vIIRU.js";
7
- import { m as c } from "../mat_mul-CAbRFWUj.js";
4
+ import { v as o } from "../variable-wSS22xj5.js";
5
+ import { r as h, d as f } from "../dropout-Bciw46HT.js";
6
+ import { r as d } from "../reshape-krWGKraP.js";
7
+ import { m as c } from "../mat_mul-7121rsJk.js";
8
8
  class V extends u {
9
9
  index;
10
10
  hiddenUnits;
@@ -1,8 +1,8 @@
1
- import { t as s } from "../index-CamYe_M8.js";
1
+ import { t as s } from "../index-BAzbokzv.js";
2
2
  import e from "./BaseLayer.js";
3
3
  import { normRMS as a } from "../ops/normRMS.js";
4
- import { v as i } from "../variable-CLVXjN7F.js";
5
- import { o as m } from "../ones-CMHNqMr6.js";
4
+ import { v as i } from "../variable-wSS22xj5.js";
5
+ import { o as m } from "../ones-BBlSRqn1.js";
6
6
  class f extends e {
7
7
  GAMMA;
8
8
  constructor(r, t = "", o) {
@@ -1,7 +1,7 @@
1
- import { f as t, l as h, t as n, n as p } from "../index-CamYe_M8.js";
2
- import { r as c } from "../reciprocal-z49filta.js";
3
- import { c as f, s as m } from "../sin-9JBrfVaB.js";
4
- import { r as a } from "../range-DqYjKnuG.js";
1
+ import { s as t, j as h, t as n, k as p } from "../index-BAzbokzv.js";
2
+ import { r as c } from "../reciprocal-CJQeasVa.js";
3
+ import { c as f, s as m } from "../sin-D_h-qCSx.js";
4
+ import { r as a } from "../range-DYsrnfiy.js";
5
5
  class D {
6
6
  constructor(o) {
7
7
  this.config = o;
@@ -1,9 +1,9 @@
1
- import "../random_width-CLMQG5Jn.js";
2
- import "../index-CamYe_M8.js";
3
- import { T as e } from "../TiedEmbedding-C1HBot-5.js";
1
+ import "../random_width-DWzaOgrn.js";
2
+ import "../index-BAzbokzv.js";
3
+ import { T as e } from "../TiedEmbedding-9WeDwvjO.js";
4
4
  import "./BaseLayer.js";
5
- import "../variable-CLVXjN7F.js";
6
- import "../gather-DKtUaTtA.js";
5
+ import "../variable-wSS22xj5.js";
6
+ import "../gather-DjyCjmOD.js";
7
7
  export {
8
8
  e as default
9
9
  };
@@ -2,7 +2,7 @@ import l from "./CausalSelfAttention.js";
2
2
  import r from "./MLP.js";
3
3
  import o from "./RMSNorm.js";
4
4
  import d from "./BaseLayer.js";
5
- import { t as p } from "../index-CamYe_M8.js";
5
+ import { t as p } from "../index-BAzbokzv.js";
6
6
  class k extends d {
7
7
  ln1;
8
8
  attn;
@@ -0,0 +1,13 @@
1
+ import { default as NanoGPT } from '../NanoGPTModel';
2
+ import { ITokeniser } from '../tokeniser/type';
3
+ export declare const VERSION = 2;
4
+ export interface Metadata {
5
+ version: string;
6
+ application: string;
7
+ name?: string;
8
+ }
9
+ export declare function loadModel(data: Blob | Buffer | string): Promise<{
10
+ model: NanoGPT;
11
+ tokeniser: ITokeniser;
12
+ name?: string;
13
+ }>;
@@ -0,0 +1,27 @@
1
+ import { j as o } from "../jszip.min-CjP2V1VV.js";
2
+ import i from "./oldZipLoad.js";
3
+ import s from "./newZipLoad.js";
4
+ import n from "./loadHF.js";
5
+ const u = 2;
6
+ async function f(t) {
7
+ const e = await fetch(t);
8
+ if (!e.ok)
9
+ throw new Error(`Failed to fetch ${t}: ${e.statusText}`);
10
+ return e.arrayBuffer();
11
+ }
12
+ async function m(t) {
13
+ if (typeof t == "string")
14
+ if (t.startsWith("http://") || t.startsWith("https://")) {
15
+ const e = await f(t), r = await o.loadAsync(e);
16
+ return r.file("manifest.json") ? i(r) : s(r);
17
+ } else
18
+ return n(t);
19
+ else {
20
+ const e = await o.loadAsync(t);
21
+ return e.file("manifest.json") ? i(e) : s(e);
22
+ }
23
+ }
24
+ export {
25
+ u as VERSION,
26
+ m as loadModel
27
+ };
@@ -0,0 +1,7 @@
1
+ import { default as NanoGPT } from '../NanoGPTModel';
2
+ import { ITokeniser } from '../main';
3
+ export default function loadHuggingFace(name: string): Promise<{
4
+ model: NanoGPT;
5
+ tokeniser: ITokeniser;
6
+ name?: string;
7
+ }>;
@@ -0,0 +1,22 @@
1
+ import w from "./loadTransformers.js";
2
+ async function u(t) {
3
+ const r = `https://huggingface.co/${t}/resolve/main/config.json`, a = `https://huggingface.co/${t}/resolve/main/tokeniser.json`, i = `https://huggingface.co/${t}/resolve/main/meta.json`, c = `https://huggingface.co/${t}/resolve/main/model.safetensors`, [o, e, s, n] = await Promise.all([
4
+ fetch(r),
5
+ fetch(a),
6
+ fetch(i),
7
+ fetch(c)
8
+ ]);
9
+ if (!o.ok)
10
+ throw new Error(`Failed to fetch config from ${r}: ${o.statusText}`);
11
+ if (!e.ok)
12
+ throw new Error(`Failed to fetch tokeniser from ${a}: ${e.statusText}`);
13
+ if (!s.ok)
14
+ throw new Error(`Failed to fetch meta from ${i}: ${s.statusText}`);
15
+ if (!n.ok)
16
+ throw new Error(`Failed to fetch weights from ${c}: ${n.statusText}`);
17
+ const f = await o.json(), g = await e.json(), h = await s.json(), l = await n.arrayBuffer();
18
+ return w(f, g, h, l);
19
+ }
20
+ export {
21
+ u as default
22
+ };
@@ -1,7 +1,5 @@
1
- import { default as zip } from 'jszip';
2
- import { default as NanoGPT } from '../NanoGPTModel';
3
1
  import { ITokeniser } from '../tokeniser/type';
4
- export declare const VERSION = 2;
2
+ import { default as NanoGPT } from '../NanoGPTModel';
5
3
  export interface TransformersConfig {
6
4
  model_type: string;
7
5
  vocab_size: number;
@@ -15,16 +13,18 @@ export interface TransformersConfig {
15
13
  mlpFactor: number;
16
14
  useRope: boolean;
17
15
  }
18
- export interface Metadata {
19
- version: string;
20
- application: string;
16
+ export interface TransformersTokeniser {
17
+ type: 'char' | 'bpe';
18
+ vocab: string[];
19
+ merges: [string, string][];
20
+ }
21
+ export interface TransformersMetadata {
21
22
  name?: string;
23
+ version: number;
24
+ application: string;
25
+ [key: string]: unknown;
22
26
  }
23
- export declare function loadOldModel(zipFile: zip): Promise<{
24
- model: NanoGPT;
25
- tokeniser: ITokeniser;
26
- }>;
27
- export declare function loadModel(data: Blob | Buffer | string): Promise<{
27
+ export default function loadTransformers(config: TransformersConfig, tokeniser: TransformersTokeniser, metadata: TransformersMetadata, weightData: ArrayBuffer): Promise<{
28
28
  model: NanoGPT;
29
29
  tokeniser: ITokeniser;
30
30
  name?: string;
@@ -0,0 +1,28 @@
1
+ import b from "../NanoGPTModel.js";
2
+ import c from "../tokeniser/CharTokeniser.js";
3
+ import l from "../tokeniser/bpe.js";
4
+ import { load_safetensors as u } from "../utilities/safetensors.js";
5
+ import { U as y } from "../index-BAzbokzv.js";
6
+ import { dummyPassAsync as h } from "../utilities/dummy.js";
7
+ async function L(e, a, r, t) {
8
+ const n = {
9
+ vocabSize: e.vocab_size,
10
+ blockSize: e.block_size,
11
+ nLayer: e.num_hidden_layers,
12
+ nHead: e.num_attention_heads,
13
+ nEmbed: e.hidden_size,
14
+ dropout: e.dropout,
15
+ biasInLinear: e.biasInLinear,
16
+ biasInLayerNorm: e.biasInLayerNorm,
17
+ mlpFactor: e.mlpFactor,
18
+ useRope: e.useRope
19
+ }, m = (a.type ?? "char") === "char" ? new c(a.vocab) : new l(a.vocab, a.merges), i = await u(t), s = /* @__PURE__ */ new Map();
20
+ for (const [p, d] of Object.entries(i))
21
+ s.set(p, [d]);
22
+ y();
23
+ const o = new b(n);
24
+ return await h(o), o.loadWeights(s), { model: o, tokeniser: m, name: r.name };
25
+ }
26
+ export {
27
+ L as default
28
+ };
@@ -0,0 +1,8 @@
1
+ import { ITokeniser } from '../main';
2
+ import { default as NanoGPT } from '../NanoGPTModel';
3
+ import { default as zip } from 'jszip';
4
+ export default function loadZipFile(zipFile: zip): Promise<{
5
+ model: NanoGPT;
6
+ tokeniser: ITokeniser;
7
+ name?: string;
8
+ }>;
@@ -0,0 +1,21 @@
1
+ import c from "./loadTransformers.js";
2
+ async function g(r) {
3
+ const e = await r.file("config.json")?.async("string");
4
+ if (!e)
5
+ throw new Error("Config file not found in the zip archive");
6
+ const t = JSON.parse(e), n = await r.file("tokeniser.json")?.async("string");
7
+ if (!n)
8
+ throw new Error("Tokeniser file not found in the zip archive");
9
+ const i = JSON.parse(n), s = await r.file("model.safetensors").async("arraybuffer"), o = await r.file("meta.json")?.async("string");
10
+ let a = { version: 0, application: "" };
11
+ if (o)
12
+ try {
13
+ a = JSON.parse(o);
14
+ } catch (f) {
15
+ console.error("Error parsing meta file:", f);
16
+ }
17
+ return c(t, i, a, s);
18
+ }
19
+ export {
20
+ g as default
21
+ };
@@ -0,0 +1,7 @@
1
+ import { default as zip } from 'jszip';
2
+ import { ITokeniser } from '../main';
3
+ import { default as NanoGPT } from '../NanoGPTModel';
4
+ export default function loadOldModel(zipFile: zip): Promise<{
5
+ model: NanoGPT;
6
+ tokeniser: ITokeniser;
7
+ }>;
@@ -0,0 +1,76 @@
1
+ import d from "../NanoGPTModel.js";
2
+ import "../jszip.min-CjP2V1VV.js";
3
+ import h from "../tokeniser/CharTokeniser.js";
4
+ import { U as k } from "../index-BAzbokzv.js";
5
+ import b from "../tokeniser/bpe.js";
6
+ import { dummyPassAsync as u } from "../utilities/dummy.js";
7
+ import "../Generator.js";
8
+ import "../index-Dwqa6Zy2.js";
9
+ import "../dataset-pgqp-YfL.js";
10
+ import "../index-Tf7vU29b.js";
11
+ import "../papaparse.min-C8l2Kvo1.js";
12
+ import "../ops/cpu/scatterSub.js";
13
+ import "../ops/webgl/scatterSub.js";
14
+ import "../ops/cpu/gatherSub.js";
15
+ import "../ops/webgl/gatherSub.js";
16
+ import "../ops/cpu/attentionMask.js";
17
+ import "../ops/webgl/attentionMask.js";
18
+ import "../ops/grads/attentionMask.js";
19
+ import "../ops/cpu/qkv.js";
20
+ import "../ops/webgl/qkv.js";
21
+ import "../ops/grads/qkv.js";
22
+ import "../random_width-DWzaOgrn.js";
23
+ import "../register_all_kernels-BfFCQAqs.js";
24
+ import "../ops/cpu/rope.js";
25
+ import "../ops/webgl/rope.js";
26
+ import "../ops/grads/rope.js";
27
+ import "../ops/cpu/appendCache.js";
28
+ import "../ops/webgl/appendCache.js";
29
+ import "../ops/cpu/fusedSoftmax.js";
30
+ import "../ops/webgl/fusedSoftmax.js";
31
+ import "../ops/grads/fusedSoftmax.js";
32
+ import "../ops/cpu/matMulGelu.js";
33
+ import "../ops/webgl/matMulGelu.js";
34
+ import "../ops/grads/matMulGelu.js";
35
+ import "../ops/cpu/gelu.js";
36
+ import "../ops/webgl/gelu.js";
37
+ import "../ops/grads/gelu.js";
38
+ import "../ops/cpu/normRMS.js";
39
+ import "../ops/webgl/normRMS.js";
40
+ import "../ops/grads/normRMS.js";
41
+ import "../ops/webgl/log.js";
42
+ import { importWeights as O } from "../utilities/weights.js";
43
+ async function ft(o) {
44
+ const n = /* @__PURE__ */ new Map(), s = await o.file("manifest.json")?.async("string");
45
+ if (!s)
46
+ throw new Error("Manifest file not found in the zip archive");
47
+ const m = JSON.parse(s);
48
+ for (const [t, r] of Object.entries(m.weightSpec))
49
+ n.set(t, { spec: r, data: new Float32Array() });
50
+ const p = await o.file("tokeniser.json")?.async("string");
51
+ if (!p)
52
+ throw new Error("Tokeniser file not found in the zip archive");
53
+ const i = JSON.parse(p), l = (i.type ?? "char") === "char" ? new h(i.vocab) : new b(i.vocab, i.merges), c = /* @__PURE__ */ new Map();
54
+ for (const t of Object.keys(o.files))
55
+ if (t.endsWith(".bin")) {
56
+ const r = t.replace(".bin", ""), w = await o.file(t).async("arraybuffer"), g = new Float32Array(w), a = n.get(r) || { spec: [], data: new Float32Array() };
57
+ a.data = g, n.set(r, a);
58
+ const y = await O(a);
59
+ c.set(r, y);
60
+ }
61
+ k();
62
+ const e = new d(m.config);
63
+ await u(e), e.loadWeights(c);
64
+ const f = await o.file("log.json")?.async("string");
65
+ if (f)
66
+ try {
67
+ const t = JSON.parse(f);
68
+ e.log = t;
69
+ } catch (t) {
70
+ throw console.error("Error parsing training log:", t), new Error(`Failed to parse training log: ${t}`);
71
+ }
72
+ return { model: e, tokeniser: l };
73
+ }
74
+ export {
75
+ ft as default
76
+ };
@@ -1,8 +1,8 @@
1
- import { q as r, w as p, E as u, a8 as E, a9 as h, p as S, s as $, a7 as d } from "./index-CamYe_M8.js";
2
- import { e as K } from "./axis_util-CCNL7jea.js";
3
- import { m as T } from "./max-JBBv7aUf.js";
4
- import { r as m } from "./reshape-C45vIIRU.js";
5
- import { s as _ } from "./sum-BpcpxNEh.js";
1
+ import { o as r, q as p, E as u, a8 as E, a9 as h, p as S, b as $, a7 as d } from "./index-BAzbokzv.js";
2
+ import { e as b } from "./axis_util-Bu4h7XWV.js";
3
+ import { m as K } from "./max-DtlIuVeW.js";
4
+ import { r as m } from "./reshape-krWGKraP.js";
5
+ import { s as T } from "./sum-BE_Irnim.js";
6
6
  /**
7
7
  * @license
8
8
  * Copyright 2018 Google LLC. All Rights Reserved.
@@ -19,11 +19,11 @@ import { s as _ } from "./sum-BpcpxNEh.js";
19
19
  * limitations under the License.
20
20
  * =============================================================================
21
21
  */
22
- function b(s) {
22
+ function _(s) {
23
23
  const o = { x: p(s, "x", "exp") };
24
24
  return u.runKernel(E, o);
25
25
  }
26
- const w = /* @__PURE__ */ r({ exp_: b });
26
+ const N = /* @__PURE__ */ r({ exp_: _ });
27
27
  /**
28
28
  * @license
29
29
  * Copyright 2018 Google LLC. All Rights Reserved.
@@ -40,11 +40,11 @@ const w = /* @__PURE__ */ r({ exp_: b });
40
40
  * limitations under the License.
41
41
  * =============================================================================
42
42
  */
43
- function N(s) {
43
+ function q(s) {
44
44
  const o = { x: p(s, "x", "log", "float32") };
45
45
  return u.runKernel(h, o);
46
46
  }
47
- const q = /* @__PURE__ */ r({ log_: N });
47
+ const v = /* @__PURE__ */ r({ log_: q });
48
48
  /**
49
49
  * @license
50
50
  * Copyright 2020 Google LLC. All Rights Reserved.
@@ -61,22 +61,22 @@ const q = /* @__PURE__ */ r({ log_: N });
61
61
  * limitations under the License.
62
62
  * =============================================================================
63
63
  */
64
- function v(s, n = null, o = !1) {
65
- const a = p(s, "x", "logSumExp"), t = S(n, a.shape), x = T(
64
+ function w(s, n = null, o = !1) {
65
+ const a = p(s, "x", "logSumExp"), t = S(n, a.shape), x = K(
66
66
  a,
67
67
  t,
68
68
  !0
69
69
  /* keepDims */
70
- ), i = $(a, x), l = w(i), f = _(l, t), c = q(f), e = d(m(x, c.shape), c);
70
+ ), i = $(a, x), l = N(i), f = T(l, t), c = v(f), e = d(m(x, c.shape), c);
71
71
  if (o) {
72
- const g = K(e.shape, t);
72
+ const g = b(e.shape, t);
73
73
  return m(e, g);
74
74
  }
75
75
  return e;
76
76
  }
77
- const M = /* @__PURE__ */ r({ logSumExp_: v });
77
+ const M = /* @__PURE__ */ r({ logSumExp_: w });
78
78
  export {
79
- q as a,
80
- w as e,
79
+ v as a,
80
+ N as e,
81
81
  M as l
82
82
  };
package/dist/main.js CHANGED
@@ -5,7 +5,7 @@ import { default as I } from "./tokeniser/bpe.js";
5
5
  import { default as K } from "./utilities/waitForModel.js";
6
6
  import { default as Q } from "./data/textLoader.js";
7
7
  import { estimateMemoryUsage as V, estimateParameterCount as W, estimateResources as X, estimateTrainingMemoryUsage as Y, validateConfig as Z } from "./utilities/parameters.js";
8
- import "./index-CamYe_M8.js";
8
+ import "./index-BAzbokzv.js";
9
9
  import "./ops/cpu/scatterSub.js";
10
10
  import "./ops/webgl/scatterSub.js";
11
11
  import "./ops/cpu/gatherSub.js";
@@ -16,10 +16,10 @@ import "./ops/grads/attentionMask.js";
16
16
  import "./ops/cpu/qkv.js";
17
17
  import "./ops/webgl/qkv.js";
18
18
  import "./ops/grads/qkv.js";
19
- import "./random_width-CLMQG5Jn.js";
20
- import "./register_all_kernels-COt6wLD0.js";
19
+ import "./random_width-DWzaOgrn.js";
20
+ import "./register_all_kernels-BfFCQAqs.js";
21
21
  import "./index-Tf7vU29b.js";
22
- import "./dataset-BFFipD1c.js";
22
+ import "./dataset-pgqp-YfL.js";
23
23
  import "./ops/cpu/rope.js";
24
24
  import "./ops/webgl/rope.js";
25
25
  import "./ops/grads/rope.js";
@@ -1,4 +1,4 @@
1
- import { q as m, w as s, C as c, E as M, D as p } from "./index-CamYe_M8.js";
1
+ import { o as m, q as s, B as c, E as M, C as p } from "./index-BAzbokzv.js";
2
2
  /**
3
3
  * @license
4
4
  * Copyright 2020 Google LLC. All Rights Reserved.
@@ -15,10 +15,10 @@ import { q as m, w as s, C as c, E as M, D as p } from "./index-CamYe_M8.js";
15
15
  * limitations under the License.
16
16
  * =============================================================================
17
17
  */
18
- function f(e, n, o = !1, l = !1) {
19
- let a = s(e, "a", "matMul"), t = s(n, "b", "matMul");
18
+ function f(e, o, n = !1, l = !1) {
19
+ let a = s(e, "a", "matMul"), t = s(o, "b", "matMul");
20
20
  [a, t] = c(a, t);
21
- const r = { a, b: t }, u = { transposeA: o, transposeB: l };
21
+ const r = { a, b: t }, u = { transposeA: n, transposeB: l };
22
22
  return M.runKernel(p, r, u);
23
23
  }
24
24
  const i = /* @__PURE__ */ m({ matMul_: f });
@@ -1,4 +1,4 @@
1
- import { q as r, w as e, E as x, M as c } from "./index-CamYe_M8.js";
1
+ import { o as r, q as e, E as x, M as c } from "./index-BAzbokzv.js";
2
2
  /**
3
3
  * @license
4
4
  * Copyright 2020 Google LLC. All Rights Reserved.
@@ -15,8 +15,8 @@ import { q as r, w as e, E as x, M as c } from "./index-CamYe_M8.js";
15
15
  * limitations under the License.
16
16
  * =============================================================================
17
17
  */
18
- function m(n, s = null, o = !1) {
19
- const t = { x: e(n, "x", "max") }, a = { reductionIndices: s, keepDims: o };
18
+ function m(n, o = null, s = !1) {
19
+ const t = { x: e(n, "x", "max") }, a = { reductionIndices: o, keepDims: s };
20
20
  return x.runKernel(c, t, a);
21
21
  }
22
22
  const l = /* @__PURE__ */ r({ max_: m });
@@ -1,4 +1,4 @@
1
- import { u as z } from "./gpgpu_math-B_ycgZ4W.js";
1
+ import { u as z } from "./gpgpu_math-CNslybmD.js";
2
2
  /**
3
3
  * @license
4
4
  * Copyright 2018 Google LLC. All Rights Reserved.
@@ -1,8 +1,8 @@
1
- import { q as l, w as c, E as y, a2 as E, p as w, a3 as o, a4 as u, l as v, f as I, a5 as A } from "./index-CamYe_M8.js";
2
- import { e as $ } from "./axis_util-CCNL7jea.js";
3
- import { m as f } from "./max-JBBv7aUf.js";
4
- import { r as h } from "./reshape-C45vIIRU.js";
5
- import { s as t } from "./sum-BpcpxNEh.js";
1
+ import { o as l, q as c, E as y, a2 as E, p as w, a3 as o, a4 as u, j as v, s as I, a5 as A } from "./index-BAzbokzv.js";
2
+ import { e as $ } from "./axis_util-Bu4h7XWV.js";
3
+ import { m } from "./max-DtlIuVeW.js";
4
+ import { r as h } from "./reshape-krWGKraP.js";
5
+ import { s as t } from "./sum-BE_Irnim.js";
6
6
  /**
7
7
  * @license
8
8
  * Copyright 2020 Google Inc. All Rights Reserved.
@@ -40,11 +40,11 @@ const s = /* @__PURE__ */ l({ min_: k });
40
40
  * limitations under the License.
41
41
  * =============================================================================
42
42
  */
43
- function q(n, e = "euclidean", r = null, m = !1) {
43
+ function q(n, e = "euclidean", r = null, f = !1) {
44
44
  n = c(n, "x", "norm");
45
45
  const a = p(n, e, r);
46
46
  let i = a.shape;
47
- if (m) {
47
+ if (f) {
48
48
  const d = w(r, n.shape);
49
49
  i = $(a.shape, d);
50
50
  }
@@ -59,7 +59,7 @@ function p(n, e, r = null) {
59
59
  if (e === 1)
60
60
  return t(o(n), r);
61
61
  if (e === 1 / 0)
62
- return f(o(n), r);
62
+ return m(o(n), r);
63
63
  if (e === -1 / 0)
64
64
  return s(o(n), r);
65
65
  if (e === "euclidean" || e === 2)
@@ -68,9 +68,9 @@ function p(n, e, r = null) {
68
68
  }
69
69
  if (Array.isArray(r) && r.length === 2) {
70
70
  if (e === 1)
71
- return f(t(o(n), r[0]), r[1] - 1);
71
+ return m(t(o(n), r[0]), r[1] - 1);
72
72
  if (e === 1 / 0)
73
- return f(t(o(n), r[1]), r[0]);
73
+ return m(t(o(n), r[1]), r[0]);
74
74
  if (e === -1 / 0)
75
75
  return s(t(o(n), r[1]), r[0]);
76
76
  if (e === "fro" || e === "euclidean")
@@ -1,5 +1,5 @@
1
- import { y as n, B as t, i as m, E as i } from "./index-CamYe_M8.js";
2
- import { z as c, c as f } from "./zeros-DUkkVccu.js";
1
+ import { x as n, y as t, g as m, E as i } from "./index-BAzbokzv.js";
2
+ import { z as c, c as f } from "./zeros-YJDE7oRb.js";
3
3
  /**
4
4
  * @license
5
5
  * Copyright 2018 Google LLC. All Rights Reserved.
@@ -1,8 +1,8 @@
1
- import { e as a } from "../index-CamYe_M8.js";
1
+ import { e as a } from "../index-BAzbokzv.js";
2
2
  import "./cpu/appendCache.js";
3
3
  import "./webgl/appendCache.js";
4
- import { c as s } from "../concat-XOK9ANZu.js";
5
- import { z as c } from "../zeros-DUkkVccu.js";
4
+ import { c as s } from "../concat-5aPGqw3Z.js";
5
+ import { z as c } from "../zeros-YJDE7oRb.js";
6
6
  function i(r, p, n, o) {
7
7
  if (!o) {
8
8
  const e = r.shape[2];
@@ -1,4 +1,4 @@
1
- import { e as o } from "../index-CamYe_M8.js";
1
+ import { e as o } from "../index-BAzbokzv.js";
2
2
  import "./cpu/attentionMask.js";
3
3
  import "./webgl/attentionMask.js";
4
4
  import "./grads/attentionMask.js";
@@ -1,5 +1,5 @@
1
- import { r as d } from "../../index-CamYe_M8.js";
2
- import { c as h } from "../../concat-XOK9ANZu.js";
1
+ import { r as d } from "../../index-BAzbokzv.js";
2
+ import { c as h } from "../../concat-5aPGqw3Z.js";
3
3
  function u(p) {
4
4
  const { cache: n, item: s } = p.inputs, { maxSize: r, pastLen: c } = p.attrs, t = n.shape[0], o = n.shape[1], a = n.shape[3], e = s.shape[2];
5
5
  if (c + e <= r) {
@@ -1,10 +1,10 @@
1
- import { r as a, g as p, f as u } from "../../index-CamYe_M8.js";
2
- import { l as N, w as b } from "../../ops-DqtYemmV.js";
3
- import { o as g } from "../../ones-CMHNqMr6.js";
4
- import { z as A } from "../../zeros-DUkkVccu.js";
5
- import { m as I } from "../../mat_mul-CAbRFWUj.js";
1
+ import { r as a, f as p, s as u } from "../../index-BAzbokzv.js";
2
+ import { l as N, w as b } from "../../ops-C0sQEcPw.js";
3
+ import { o as A } from "../../ones-BBlSRqn1.js";
4
+ import { z as I } from "../../zeros-YJDE7oRb.js";
5
+ import { m as g } from "../../mat_mul-7121rsJk.js";
6
6
  function o(n) {
7
- const { q: s, k: e } = n.inputs, { divisor: r } = n.attrs, c = s.shape[2], t = e.shape[2], m = N.bandPart(g([t, t]), -1, 0).cast("bool"), l = A([t, t]), i = p([t, t], Number.NEGATIVE_INFINITY), f = b(m, l, i), k = I(s, e, !1, !0).mul(u(r)), d = f.slice([0, 0], [c, t]).expandDims(0).expandDims(0);
7
+ const { q: s, k: e } = n.inputs, { divisor: r } = n.attrs, c = s.shape[2], t = e.shape[2], m = N.bandPart(A([t, t]), -1, 0).cast("bool"), l = I([t, t]), i = p([t, t], Number.NEGATIVE_INFINITY), f = b(m, l, i), k = g(s, e, !1, !0).mul(u(r)), d = f.slice([0, 0], [c, t]).expandDims(0).expandDims(0);
8
8
  return k.add(d);
9
9
  }
10
10
  const w = {
@@ -1,5 +1,5 @@
1
- import { r as n } from "../../index-CamYe_M8.js";
2
- import { s as f } from "../../softmax-DvMvui-_.js";
1
+ import { r as n } from "../../index-BAzbokzv.js";
2
+ import { s as f } from "../../softmax-fsdtf6JC.js";
3
3
  function r(t) {
4
4
  const { inputs: s, attrs: i } = t, { logits: o } = s, { dim: a, dropoutRate: e } = i;
5
5
  if (!o)
@@ -1,6 +1,6 @@
1
- import { q as u, w as c, E as g, Y as h, r as m, s as p } from "../../index-CamYe_M8.js";
2
- import { r as l } from "../../range-DqYjKnuG.js";
3
- import { s as N } from "../../stack-DgaoDmnF.js";
1
+ import { o as u, q as c, E as g, Y as h, r as m, b as p } from "../../index-BAzbokzv.js";
2
+ import { r as l } from "../../range-DYsrnfiy.js";
3
+ import { s as N } from "../../stack-dfEEz2OY.js";
4
4
  /**
5
5
  * @license
6
6
  * Copyright 2018 Google LLC. All Rights Reserved.
@@ -17,14 +17,14 @@ import { s as N } from "../../stack-DgaoDmnF.js";
17
17
  * limitations under the License.
18
18
  * =============================================================================
19
19
  */
20
- function f(e, s) {
21
- const n = c(s, "indices", "gatherND", "int32"), t = { params: c(e, "x", "gatherND", "string_or_numeric"), indices: n };
22
- return g.runKernel(h, t);
20
+ function b(e, t) {
21
+ const n = c(t, "indices", "gatherND", "int32"), s = { params: c(e, "x", "gatherND", "string_or_numeric"), indices: n };
22
+ return g.runKernel(h, s);
23
23
  }
24
- const b = /* @__PURE__ */ u({ gatherND_: f });
24
+ const f = /* @__PURE__ */ u({ gatherND_: b });
25
25
  function d(e) {
26
- const { values: s, labels: n, logits: r } = e.inputs, t = n.shape[0], a = l(0, t, 1, "int32"), i = N([a, n], 1), o = b(r, i);
27
- return p(s, o);
26
+ const { values: t, labels: n, logits: r } = e.inputs, s = n.shape[0], o = l(0, s, 1, "int32"), a = N([o, n], 1), i = f(r, a);
27
+ return p(t, i);
28
28
  }
29
29
  const k = {
30
30
  kernelName: "EfficientGatherSub",