@genai-fi/nanogpt 0.6.0 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. package/dist/Generator.js +7 -7
  2. package/dist/NanoGPTModel.js +70 -121
  3. package/dist/RealDiv-BYViZwhN.js +540 -0
  4. package/dist/Reshape-t7Kcikjk.js +127 -0
  5. package/dist/TeachableLLM.d.ts +2 -0
  6. package/dist/TeachableLLM.js +34 -27
  7. package/dist/{TiedEmbedding-BhxWO8QR.js → TiedEmbedding-9WeDwvjO.js} +12 -13
  8. package/dist/{axis_util-D17qZRQm.js → axis_util-Bu4h7XWV.js} +14 -12
  9. package/dist/{broadcast_to-BMQLjvt_.js → broadcast_to-DARN-DBD.js} +2 -2
  10. package/dist/{concat-DhZfF1GY.js → concat-5aPGqw3Z.js} +3 -3
  11. package/dist/{dataset-oilnemHf.js → dataset-pgqp-YfL.js} +3 -3
  12. package/dist/{dropout-CrMQPCeG.js → dropout-Bciw46HT.js} +7 -7
  13. package/dist/{gather-DZCMHZuN.js → gather-DjyCjmOD.js} +1 -1
  14. package/dist/gpgpu_math-CNslybmD.js +3115 -0
  15. package/dist/{index-bMBtI-WR.js → index-BAzbokzv.js} +846 -649
  16. package/dist/{kernel_funcs_utils-CNmjLWnB.js → kernel_funcs_utils-CUxJCg0g.js} +232 -138
  17. package/dist/layers/BaseLayer.js +2 -2
  18. package/dist/layers/CausalSelfAttention.js +6 -6
  19. package/dist/layers/MLP.js +5 -5
  20. package/dist/layers/RMSNorm.js +3 -3
  21. package/dist/layers/RoPECache.js +13 -33
  22. package/dist/layers/TiedEmbedding.js +6 -7
  23. package/dist/layers/TransformerBlock.js +1 -1
  24. package/dist/loader/load.d.ts +13 -0
  25. package/dist/loader/load.js +27 -0
  26. package/dist/loader/loadHF.d.ts +7 -0
  27. package/dist/loader/loadHF.js +22 -0
  28. package/dist/{utilities/load.d.ts → loader/loadTransformers.d.ts} +11 -11
  29. package/dist/loader/loadTransformers.js +28 -0
  30. package/dist/loader/newZipLoad.d.ts +8 -0
  31. package/dist/loader/newZipLoad.js +21 -0
  32. package/dist/loader/oldZipLoad.d.ts +7 -0
  33. package/dist/loader/oldZipLoad.js +76 -0
  34. package/dist/{log_sum_exp-BHdkCb4s.js → log_sum_exp-YEo2h3gb.js} +14 -14
  35. package/dist/main.js +23 -20
  36. package/dist/{mat_mul-BsrLfy81.js → mat_mul-7121rsJk.js} +1 -1
  37. package/dist/{max-DechV4Bc.js → max-DtlIuVeW.js} +1 -1
  38. package/dist/mulmat_packed_gpu-D4nKF7Je.js +71 -0
  39. package/dist/{norm-B9hWHZH1.js → norm-CzltS9Fz.js} +16 -16
  40. package/dist/{ones-g0K8jVwm.js → ones-BBlSRqn1.js} +2 -2
  41. package/dist/ops/appendCache.js +3 -3
  42. package/dist/ops/attentionMask.js +1 -1
  43. package/dist/ops/cpu/appendCache.js +2 -2
  44. package/dist/ops/cpu/attentionMask.js +6 -6
  45. package/dist/ops/cpu/fusedSoftmax.js +2 -2
  46. package/dist/ops/cpu/gatherSub.js +9 -9
  47. package/dist/ops/cpu/gelu.js +1 -1
  48. package/dist/ops/cpu/matMulGelu.js +1 -1
  49. package/dist/ops/cpu/matMulMul.js +1 -1
  50. package/dist/ops/cpu/mulDropout.js +1 -1
  51. package/dist/ops/cpu/normRMS.js +1 -1
  52. package/dist/ops/cpu/qkv.js +3 -3
  53. package/dist/ops/cpu/rope.js +5 -5
  54. package/dist/ops/cpu/scatterSub.js +17 -48
  55. package/dist/ops/fusedSoftmax.js +1 -1
  56. package/dist/ops/gatherSub.js +1 -1
  57. package/dist/ops/gelu.js +1 -1
  58. package/dist/ops/grads/attentionMask.js +1 -1
  59. package/dist/ops/grads/fusedSoftmax.js +4 -4
  60. package/dist/ops/grads/gelu.js +1 -1
  61. package/dist/ops/grads/matMulGelu.js +1 -1
  62. package/dist/ops/grads/normRMS.js +1 -1
  63. package/dist/ops/grads/qkv.js +1 -1
  64. package/dist/ops/grads/rope.js +1 -1
  65. package/dist/ops/matMulGelu.js +1 -1
  66. package/dist/ops/matMulMul.js +1 -1
  67. package/dist/ops/mulDrop.js +1 -1
  68. package/dist/ops/node/sparseCrossEntropy.js +1 -1
  69. package/dist/ops/normRMS.js +1 -1
  70. package/dist/ops/qkv.js +1 -1
  71. package/dist/ops/rope.js +8 -4
  72. package/dist/ops/scatterSub.js +1 -1
  73. package/dist/ops/webgl/appendCache.js +1 -1
  74. package/dist/ops/webgl/attentionMask.js +1 -1
  75. package/dist/ops/webgl/fusedSoftmax.js +29 -560
  76. package/dist/ops/webgl/gatherSub.js +1 -1
  77. package/dist/ops/webgl/gelu.js +2 -2
  78. package/dist/ops/webgl/log.js +3 -3
  79. package/dist/ops/webgl/matMulGelu.js +46 -113
  80. package/dist/ops/webgl/matMulMul.js +1 -1
  81. package/dist/ops/webgl/mulDropout.js +1 -1
  82. package/dist/ops/webgl/normRMS.js +2 -2
  83. package/dist/ops/webgl/qkv.js +1 -1
  84. package/dist/ops/webgl/rope.js +1 -1
  85. package/dist/ops/webgl/scatterSub.js +1 -1
  86. package/dist/{ops-Mv7Ta72x.js → ops-C0sQEcPw.js} +117 -109
  87. package/dist/{random_width-BBAWzDym.js → random_width-DWzaOgrn.js} +6925 -6291
  88. package/dist/{range-DMaG9A3G.js → range-DYsrnfiy.js} +1 -1
  89. package/dist/{gpgpu_math-Ctc31slO.js → reciprocal-CJQeasVa.js} +7 -5
  90. package/dist/register_all_kernels-BfFCQAqs.js +21397 -0
  91. package/dist/{reshape-T4yDEqoF.js → reshape-krWGKraP.js} +1 -1
  92. package/dist/scatter_nd_util-93ln7Hut.js +46 -0
  93. package/dist/selu_util-sntGesxr.js +740 -0
  94. package/dist/{shared-XNAoXhOa.js → shared-Ca6iDobD.js} +1462 -1089
  95. package/dist/{sin-EEhbrRO_.js → sin-D_h-qCSx.js} +1 -1
  96. package/dist/{softmax-B2_IKPDR.js → softmax-fsdtf6JC.js} +1 -1
  97. package/dist/{split-dcks18H1.js → split-eiktj-6L.js} +1 -1
  98. package/dist/{stack-lpJ5kYvE.js → stack-dfEEz2OY.js} +2 -2
  99. package/dist/{sum-CutF5lj2.js → sum-BE_Irnim.js} +1 -1
  100. package/dist/{tensor-C15NA2LA.js → tensor-Xyi595sG.js} +1 -1
  101. package/dist/{tensor2d-DZ_e5eKM.js → tensor2d-CPEkynbH.js} +1 -1
  102. package/dist/training/AdamExt.js +1 -1
  103. package/dist/training/DatasetBuilder.js +2 -2
  104. package/dist/training/FullTrainer.js +1 -1
  105. package/dist/training/Trainer.js +3 -3
  106. package/dist/training/sparseCrossEntropy.js +5 -5
  107. package/dist/utilities/dummy.d.ts +6 -0
  108. package/dist/utilities/dummy.js +31 -10
  109. package/dist/utilities/generate.js +3 -3
  110. package/dist/utilities/profile.d.ts +5 -0
  111. package/dist/utilities/profile.js +10 -7
  112. package/dist/utilities/safetensors.js +2 -2
  113. package/dist/utilities/save.js +1 -1
  114. package/dist/utilities/weights.js +2 -2
  115. package/dist/{variable-CdRKKp8x.js → variable-wSS22xj5.js} +1 -1
  116. package/dist/{zeros-CAbHfODe.js → zeros-YJDE7oRb.js} +4 -4
  117. package/package.json +2 -8
  118. package/dist/Reshape-CLOrdpve.js +0 -212
  119. package/dist/slice_util-Ddk0uxGJ.js +0 -49
  120. package/dist/tfjs_backend-BDb8r9qx.js +0 -1010
  121. package/dist/utilities/load.js +0 -99
@@ -1,99 +0,0 @@
1
- import { j as v } from "../jszip.min-CjP2V1VV.js";
2
- import { importWeights as F } from "./weights.js";
3
- import h from "../tokeniser/CharTokeniser.js";
4
- import b from "../NanoGPTModel.js";
5
- import { dummyPassAsync as u } from "./dummy.js";
6
- import { d as k } from "../index-bMBtI-WR.js";
7
- import j from "../tokeniser/bpe.js";
8
- import { load_safetensors as N } from "./safetensors.js";
9
- const I = 2;
10
- async function O(t) {
11
- const s = await fetch(t);
12
- if (!s.ok)
13
- throw new Error(`Failed to fetch ${t}: ${s.statusText}`);
14
- return s.arrayBuffer();
15
- }
16
- async function S(t) {
17
- const s = /* @__PURE__ */ new Map(), r = await t.file("manifest.json")?.async("string");
18
- if (!r)
19
- throw new Error("Manifest file not found in the zip archive");
20
- const p = JSON.parse(r);
21
- for (const [o, a] of Object.entries(p.weightSpec))
22
- s.set(o, { spec: a, data: new Float32Array() });
23
- const e = await t.file("tokeniser.json")?.async("string");
24
- if (!e)
25
- throw new Error("Tokeniser file not found in the zip archive");
26
- const i = JSON.parse(e), c = (i.type ?? "char") === "char" ? new h(i.vocab) : new j(i.vocab, i.merges), d = /* @__PURE__ */ new Map();
27
- for (const o of Object.keys(t.files))
28
- if (o.endsWith(".bin")) {
29
- const a = o.replace(".bin", ""), w = await t.file(o).async("arraybuffer"), g = new Float32Array(w), l = s.get(a) || { spec: [], data: new Float32Array() };
30
- l.data = g, s.set(a, l);
31
- const n = await F(l);
32
- d.set(a, n);
33
- }
34
- k();
35
- const f = new b(p.config);
36
- await u(f), f.loadWeights(d);
37
- const m = await t.file("log.json")?.async("string");
38
- if (m)
39
- try {
40
- const o = JSON.parse(m);
41
- f.log = o;
42
- } catch (o) {
43
- throw console.error("Error parsing training log:", o), new Error(`Failed to parse training log: ${o}`);
44
- }
45
- return { model: f, tokeniser: c };
46
- }
47
- async function R(t) {
48
- const s = typeof t == "string" ? await O(t) : t, r = await v.loadAsync(s);
49
- if (r.file("manifest.json"))
50
- return S(r);
51
- {
52
- const p = await r.file("config.json")?.async("string");
53
- if (!p)
54
- throw new Error("Config file not found in the zip archive");
55
- const e = JSON.parse(p), i = {
56
- vocabSize: e.vocab_size,
57
- blockSize: e.block_size,
58
- nLayer: e.num_hidden_layers,
59
- nHead: e.num_attention_heads,
60
- nEmbed: e.hidden_size,
61
- dropout: e.dropout,
62
- biasInLinear: e.biasInLinear,
63
- biasInLayerNorm: e.biasInLayerNorm,
64
- mlpFactor: e.mlpFactor,
65
- useRope: e.useRope
66
- }, y = await r.file("tokeniser.json")?.async("string");
67
- if (!y)
68
- throw new Error("Tokeniser file not found in the zip archive");
69
- const c = JSON.parse(y), f = (c.type ?? "char") === "char" ? new h(c.vocab) : new j(c.vocab, c.merges), m = await N(await r.file("model.safetensors").async("arraybuffer")), o = /* @__PURE__ */ new Map();
70
- for (const [n, E] of Object.entries(m))
71
- o.set(n, [E]);
72
- k();
73
- const a = new b(i);
74
- await u(a), a.loadWeights(o);
75
- const w = await r.file("meta.json")?.async("string");
76
- let g;
77
- if (w)
78
- try {
79
- const n = JSON.parse(w);
80
- n.name && (g = n.name);
81
- } catch (n) {
82
- console.error("Error parsing meta file:", n);
83
- }
84
- const l = await r.file("log.json")?.async("string");
85
- if (l)
86
- try {
87
- const n = JSON.parse(l);
88
- a.log = n;
89
- } catch (n) {
90
- throw console.error("Error parsing training log:", n), new Error(`Failed to parse training log: ${n}`);
91
- }
92
- return { model: a, tokeniser: f, name: g };
93
- }
94
- }
95
- export {
96
- I as VERSION,
97
- R as loadModel,
98
- S as loadOldModel
99
- };