@genai-fi/nanogpt 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,16 +7,20 @@ import { default as Trainer, ITrainerOptions } from './Trainer';
7
7
  import { default as EE } from 'eventemitter3';
8
8
  type TeachableLLMStatus = 'warmup' | 'ready' | 'training' | 'loading' | 'busy' | 'error';
9
9
  export default class TeachableLLM extends EE<'status' | 'error'> {
10
- readonly config: GPTConfig;
11
- readonly model: NanoGPT;
10
+ private _config?;
11
+ private _model?;
12
12
  readonly tf: typeof TF;
13
- readonly tokeniser: ITokeniser;
13
+ private _tokeniser?;
14
14
  private _status;
15
- constructor(tf: typeof TF, tokeniser: ITokeniser, model: NanoGPT);
15
+ constructor(tf: typeof TF, tokeniser?: ITokeniser, model?: NanoGPT);
16
+ get config(): GPTConfig;
17
+ get model(): NanoGPT;
18
+ get tokeniser(): ITokeniser;
16
19
  get status(): TeachableLLMStatus;
20
+ get ready(): boolean;
17
21
  private setStatus;
18
22
  saveModel(): Promise<Blob>;
19
- static loadModel(tf: typeof TF, data: Blob | Buffer | string): Promise<TeachableLLM>;
23
+ static loadModel(tf: typeof TF, data: Blob | Buffer | string): TeachableLLM;
20
24
  static create(tf: typeof TF, config?: Partial<GPTConfig>): TeachableLLM;
21
25
  getNumParams(): number;
22
26
  trainer(): Trainer;
@@ -1,60 +1,90 @@
1
1
  import a from "./NanoGPTModel.js";
2
- import { defaultConfig as m } from "./config.js";
3
- import { saveModel as u } from "./utilities/save.js";
4
- import { loadModel as h } from "./utilities/load.js";
5
- import c from "./Generator.js";
6
- import d from "./Trainer.js";
7
- import { E as l } from "./index-SOhdqzHq.js";
2
+ import { defaultConfig as h } from "./config.js";
3
+ import { saveModel as d } from "./utilities/save.js";
4
+ import { loadModel as m } from "./utilities/load.js";
5
+ import l from "./Generator.js";
6
+ import u from "./Trainer.js";
7
+ import { E as _ } from "./index-SOhdqzHq.js";
8
8
  import { dummyPassAsync as f } from "./utilities/dummy.js";
9
- import g from "./tokeniser/CharTokeniser.js";
10
- class n extends l {
11
- config;
12
- model;
9
+ import c from "./tokeniser/CharTokeniser.js";
10
+ class s extends _ {
11
+ _config;
12
+ _model;
13
13
  tf;
14
- tokeniser;
14
+ _tokeniser;
15
15
  _status = "loading";
16
- constructor(t, e, r) {
17
- super(), this.tf = t, this.config = r.config, this.tokeniser = e, this.model = r;
16
+ constructor(t, r, e) {
17
+ super(), this.tf = t, this._config = e?.config, this._tokeniser = r, this._model = e;
18
+ }
19
+ get config() {
20
+ if (!this._config)
21
+ throw new Error("Model configuration is not initialized.");
22
+ return this._config;
23
+ }
24
+ get model() {
25
+ if (!this._model)
26
+ throw new Error("Model is not initialized.");
27
+ return this._model;
28
+ }
29
+ get tokeniser() {
30
+ if (!this._tokeniser)
31
+ throw new Error("Tokeniser is not initialized.");
32
+ return this._tokeniser;
18
33
  }
19
34
  get status() {
20
35
  return this._status;
21
36
  }
37
+ get ready() {
38
+ return this._status === "ready" && !!this._model && !!this._tokeniser;
39
+ }
22
40
  setStatus(t) {
23
41
  this._status !== t && (this._status = t, this.emit("status", t));
24
42
  }
25
43
  saveModel() {
26
- return u(this.model, this.tokeniser);
44
+ if (!this._model || !this._tokeniser)
45
+ throw new Error("Model or tokeniser is not initialized.");
46
+ return d(this._model, this._tokeniser);
27
47
  }
28
- static async loadModel(t, e) {
29
- const { model: r, tokeniser: o } = await h(t, e), s = new n(t, o, r);
30
- return s.setStatus("warmup"), f(r).then(() => {
31
- s.setStatus("ready");
48
+ static loadModel(t, r) {
49
+ const e = new s(t);
50
+ return m(t, r).then(({ model: i, tokeniser: o }) => {
51
+ e._model = i, e._tokeniser = o, e._config = i.config, e.setStatus("warmup"), f(i).then(() => {
52
+ e.setStatus("ready");
53
+ }).catch((n) => {
54
+ e.setStatus("error"), e.emit("error", n);
55
+ });
32
56
  }).catch((i) => {
33
- s.setStatus("error"), s.emit("error", i);
34
- }), s;
57
+ e.setStatus("error"), e.emit("error", i);
58
+ }), e;
35
59
  }
36
- static create(t, e = {}) {
37
- const r = { ...m, ...e }, o = new g(r.vocabSize), s = new a(t, r);
38
- return new n(t, o, s);
60
+ static create(t, r = {}) {
61
+ const e = { ...h, ...r }, i = new c(e.vocabSize), o = new a(t, e);
62
+ return new s(t, i, o);
39
63
  }
40
64
  getNumParams() {
41
- return this.model.getNumParams();
65
+ if (!this._model)
66
+ throw new Error("Model is not initialized.");
67
+ return this._model.getNumParams();
42
68
  }
43
69
  trainer() {
44
- const t = new d(this.model, this.tokeniser);
70
+ if (!this._model || !this._tokeniser)
71
+ throw new Error("Model or tokeniser is not initialized.");
72
+ const t = new u(this._model, this._tokeniser);
45
73
  return t.on("start", () => this.setStatus("training")), t.on("stop", () => this.setStatus("ready")), t;
46
74
  }
47
- train(t, e) {
48
- return this.trainer().train(t, e);
75
+ train(t, r) {
76
+ return this.trainer().train(t, r);
49
77
  }
50
78
  generator() {
51
- const t = new c(this.model, this.tokeniser);
79
+ if (!this._model || !this._tokeniser)
80
+ throw new Error("Model or tokeniser is not initialized.");
81
+ const t = new l(this._model, this._tokeniser);
52
82
  return t.on("start", () => this.setStatus("busy")), t.on("stop", () => this.setStatus("ready")), t;
53
83
  }
54
- generateText(t, e) {
55
- return this.generator().generate(t, e);
84
+ generateText(t, r) {
85
+ return this.generator().generate(t, r);
56
86
  }
57
87
  }
58
88
  export {
59
- n as default
89
+ s as default
60
90
  };
@@ -1,5 +1,5 @@
1
- import { z as F } from "../jszip.min-BLbRbbKt.js";
2
- import { importWeights as b } from "./weights.js";
1
+ import { z as u } from "../jszip.min-BLbRbbKt.js";
2
+ import { importWeights as F } from "./weights.js";
3
3
  import k from "../tokeniser/CharTokeniser.js";
4
4
  import j from "../NanoGPTModel.js";
5
5
  import { dummyPassAsync as z } from "./dummy.js";
@@ -10,7 +10,7 @@ async function A(o) {
10
10
  return e.arrayBuffer();
11
11
  }
12
12
  async function T(o, e) {
13
- const m = typeof e == "string" ? await A(e) : e, n = await F.loadAsync(m), s = /* @__PURE__ */ new Map(), c = await n.file("manifest.json")?.async("string");
13
+ const m = typeof e == "string" ? await A(e) : e, n = await u.loadAsync(m), s = /* @__PURE__ */ new Map(), c = await n.file("manifest.json")?.async("string");
14
14
  if (!c)
15
15
  throw new Error("Manifest file not found in the zip archive");
16
16
  const f = JSON.parse(c);
@@ -19,20 +19,21 @@ async function T(o, e) {
19
19
  const l = await n.file("tokeniser.json")?.async("string");
20
20
  if (!l)
21
21
  throw new Error("Tokeniser file not found in the zip archive");
22
- const g = JSON.parse(l), y = new k(g.vocab), w = /* @__PURE__ */ new Map();
22
+ const g = JSON.parse(l), y = new k(g.vocab), p = /* @__PURE__ */ new Map();
23
23
  for (const t of Object.keys(n.files))
24
24
  if (t.endsWith(".bin")) {
25
25
  const r = t.replace(".bin", ""), h = await n.file(t).async("arraybuffer"), d = new Float32Array(h), i = s.get(r) || { spec: [], data: new Float32Array() };
26
26
  i.data = d, s.set(r, i);
27
- const u = await b(i, o);
28
- w.set(r, u);
27
+ const b = await F(i, o);
28
+ p.set(r, b);
29
29
  }
30
+ o.disposeVariables();
30
31
  const a = new j(o, f.config);
31
- await z(a), a.loadWeights(w);
32
- const p = await n.file("log.json")?.async("string");
33
- if (p)
32
+ await z(a), a.loadWeights(p);
33
+ const w = await n.file("log.json")?.async("string");
34
+ if (w)
34
35
  try {
35
- const t = JSON.parse(p);
36
+ const t = JSON.parse(w);
36
37
  a.log = t;
37
38
  } catch (t) {
38
39
  throw console.error("Error parsing training log:", t), new Error(`Failed to parse training log: ${t}`);
@@ -0,0 +1,2 @@
1
+ import { TeachableLLM } from '../main';
2
+ export default function waitForModel(model: TeachableLLM): Promise<void>;
@@ -0,0 +1,12 @@
1
+ function n(r) {
2
+ return new Promise((t, a) => {
3
+ r.ready ? t() : (r.on("status", (e) => {
4
+ e === "ready" && t();
5
+ }), r.on("error", (e) => {
6
+ a(e);
7
+ }));
8
+ });
9
+ }
10
+ export {
11
+ n as default
12
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@genai-fi/nanogpt",
3
- "version": "0.1.0",
3
+ "version": "0.1.1",
4
4
  "type": "module",
5
5
  "main": "dist/main.js",
6
6
  "types": "dist/main.d.ts",