@genai-fi/nanogpt 0.12.0 → 0.12.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/Trainer.d.ts CHANGED
@@ -32,7 +32,7 @@ export default class Trainer extends EE<'start' | 'stop' | 'log'> {
32
32
  stop(): void;
33
33
  reset(): void;
34
34
  getTotalSamples(): number;
35
- prepare(tasks: Task[], options?: ITrainerOptions): Promise<void>;
35
+ prepare(tasks?: Task[] | Uint16Array, options?: ITrainerOptions): Promise<void>;
36
36
  train(options?: ITrainerOptions): Promise<void>;
37
37
  step(options?: ITrainerOptions): Promise<void>;
38
38
  getLog(): TrainingLogEntry[];
package/dist/Trainer.js CHANGED
@@ -20,7 +20,7 @@ class p extends n {
20
20
  getTotalSamples() {
21
21
  return this.totalSamples;
22
22
  }
23
- async prepare(t, e) {
23
+ async prepare(t = [], e) {
24
24
  const { trainDataset: a, validationDataset: s, size: i } = await this.trainer.createTrainValidationSplit(
25
25
  t,
26
26
  e?.batchSize || 32,
package/dist/main.d.ts CHANGED
@@ -22,7 +22,7 @@ export type { TrainingProgress, TrainingLogEntry } from './training/Trainer';
22
22
  export type { GPTConfig } from './models/config';
23
23
  export { estimateParameterCount, estimateMemoryUsage, estimateTrainingMemoryUsage, estimateResources, validateConfig, } from './utilities/parameters';
24
24
  export { default as topP } from './utilities/topP';
25
- export { Task } from './training/tasks/Task';
25
+ export { Task, tokensFromTasks } from './training/tasks/Task';
26
26
  export declare const tasks: {
27
27
  PretrainingTask: typeof PretrainingTask;
28
28
  StartSentenceTask: typeof StartSentenceTask;
package/dist/main.js CHANGED
@@ -13,7 +13,7 @@ import { default as Co } from "./data/textLoader.js";
13
13
  import { default as Eo } from "./models/model.js";
14
14
  import { estimateMemoryUsage as Bo, estimateParameterCount as bo, estimateResources as yo, estimateTrainingMemoryUsage as Lo, validateConfig as So } from "./utilities/parameters.js";
15
15
  import { default as Ao } from "./utilities/topP.js";
16
- import { Task as Ro } from "./training/tasks/Task.js";
16
+ import { Task as Go, tokensFromTasks as Ro } from "./training/tasks/Task.js";
17
17
  import o from "./training/tasks/PretrainingTask.js";
18
18
  import r from "./training/tasks/StartSentenceTask.js";
19
19
  import t from "./training/tasks/ConversationTask.js";
@@ -53,14 +53,14 @@ import "./ops/webgl/matMul16.js";
53
53
  import "./ops/cpu/matMul16.js";
54
54
  import "./ops/transpose16.js";
55
55
  import { selectBackend as wo } from "./backend.js";
56
- import { default as Fo } from "./utilities/performance.js";
56
+ import { default as No } from "./utilities/performance.js";
57
57
  import p from "./layers/CausalSelfAttention.js";
58
58
  import a from "./layers/MLP.js";
59
59
  import i from "./layers/TransformerBlock.js";
60
60
  import s from "./layers/RoPECache.js";
61
- import { default as jo } from "./training/AdamExt.js";
62
- import { default as zo } from "./checks/index.js";
63
- import { sentenceEmbeddings as Io, sentenceEmbeddingsTensor as Jo } from "./utilities/sentences.js";
61
+ import { default as qo } from "./training/AdamExt.js";
62
+ import { default as Ho } from "./checks/index.js";
63
+ import { sentenceEmbeddings as Jo, sentenceEmbeddingsTensor as Ko } from "./utilities/sentences.js";
64
64
  const to = {
65
65
  PretrainingTask: o,
66
66
  StartSentenceTask: r,
@@ -75,15 +75,15 @@ const to = {
75
75
  RoPECache: s
76
76
  };
77
77
  export {
78
- jo as AdamExt,
78
+ qo as AdamExt,
79
79
  ko as BPETokeniser,
80
80
  co as CharTokeniser,
81
81
  io as Generator,
82
82
  Eo as Model,
83
83
  fo as NanoGPT,
84
- Ro as Task,
84
+ Go as Task,
85
85
  lo as TeachableLLM,
86
- zo as checks,
86
+ Ho as checks,
87
87
  Bo as estimateMemoryUsage,
88
88
  bo as estimateParameterCount,
89
89
  yo as estimateResources,
@@ -91,11 +91,12 @@ export {
91
91
  mo as layers,
92
92
  Co as loadTextData,
93
93
  eo as ops,
94
- Fo as performanceTest,
94
+ No as performanceTest,
95
95
  wo as selectBackend,
96
- Io as sentenceEmbeddings,
97
- Jo as sentenceEmbeddingsTensor,
96
+ Jo as sentenceEmbeddings,
97
+ Ko as sentenceEmbeddingsTensor,
98
98
  to as tasks,
99
+ Ro as tokensFromTasks,
99
100
  Ao as topP,
100
101
  So as validateConfig,
101
102
  go as waitForModel
@@ -1,5 +1,5 @@
1
- import l from "../utilities/tokenParse.js";
2
- import d, { SPECIALS as f } from "./BaseTokeniser.js";
1
+ import p from "../utilities/tokenParse.js";
2
+ import f, { SPECIALS as g } from "./BaseTokeniser.js";
3
3
  function u(o, e) {
4
4
  return `${o}-::-${e}`;
5
5
  }
@@ -53,7 +53,7 @@ function v(o, e) {
53
53
  o.tokens[s] = n;
54
54
  }), o.pairs.delete(u(e.a, e.b));
55
55
  }
56
- class x extends d {
56
+ class x extends f {
57
57
  targetSize;
58
58
  vocab = /* @__PURE__ */ new Set();
59
59
  vocabIndex = /* @__PURE__ */ new Map();
@@ -62,7 +62,7 @@ class x extends d {
62
62
  constructor(e, s) {
63
63
  super(), Array.isArray(e) ? (e.forEach((t, n) => {
64
64
  this.vocab.add(t), this.vocabIndex.set(t, n);
65
- }), s && (this.merges = s), this.targetSize = e.length, f.forEach((t) => {
65
+ }), s && (this.merges = s), this.targetSize = e.length, g.forEach((t) => {
66
66
  const n = e.indexOf(t);
67
67
  n !== -1 && this.addSpecialToken(t, n);
68
68
  })) : (this.addSpecialTokens(), this.targetSize = e);
@@ -80,7 +80,7 @@ class x extends d {
80
80
  this.vocab.clear(), this.vocabIndex.clear(), this.merges = [], this.pretokenMap.clear();
81
81
  }
82
82
  get trained() {
83
- return this.vocab.size === this.targetSize && this.merges.length > 0;
83
+ return this.vocab.size > g.length && this.vocab.size <= this.targetSize && this.merges.length > 0;
84
84
  }
85
85
  get vocabSize() {
86
86
  return this.vocab.size;
@@ -95,7 +95,7 @@ class x extends d {
95
95
  return this.vocabIndex.get("") ?? 1;
96
96
  }
97
97
  async train(e) {
98
- const s = e.map((a) => l(a)).flat(1), t = new Set(s);
98
+ const s = e.map((a) => p(a)).flat(1), t = new Set(s);
99
99
  this.vocab = /* @__PURE__ */ new Set(), this.pretokenMap.clear(), this.merges = [], this.addSpecialTokens();
100
100
  const n = Array.from(t), r = n.map((a) => Array.from(a).map((c) => (this.vocab.add(c), c))), i = b(r);
101
101
  for (; this.vocab.size < this.targetSize && this.merges.length < this.targetSize; ) {
@@ -104,13 +104,13 @@ class x extends d {
104
104
  break;
105
105
  this.merges.push([a.a, a.b]), this.vocab.add(a.a + a.b), v(i, a);
106
106
  }
107
- n.forEach((a, p) => {
108
- const c = r[p];
107
+ n.forEach((a, l) => {
108
+ const c = r[l];
109
109
  this.pretokenMap.set(a, c);
110
110
  }), this.vocabIndex.clear();
111
- let g = 0;
111
+ let d = 0;
112
112
  for (const a of this.vocab.keys())
113
- this.vocabIndex.set(a, g++);
113
+ this.vocabIndex.set(a, d++);
114
114
  return this.emit("trainStatus", "trained"), this.vocab.size;
115
115
  }
116
116
  getVocab() {
@@ -126,7 +126,7 @@ class x extends d {
126
126
  }), this.pretokenMap.set(e, s), s;
127
127
  }
128
128
  tokeniseStrings(e) {
129
- return e.map((s) => l(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
129
+ return e.map((s) => p(s).map((r) => this.pretokenMap.has(r) ? this.pretokenMap.get(r) : this.tokeniseWord(r)).flat(1));
130
130
  }
131
131
  tokenise(e, s) {
132
132
  const t = this.tokeniseStrings(e);
@@ -94,7 +94,7 @@ export default abstract class GPTTrainer {
94
94
  log: TrainingLogEntry;
95
95
  progress: TrainingProgress;
96
96
  }>;
97
- createTrainValidationSplit(tasks: Task[], batchSize?: number, validationSplit?: number): Promise<{
97
+ createTrainValidationSplit(tasks: Task[] | Uint16Array, batchSize?: number, validationSplit?: number): Promise<{
98
98
  trainDataset: Dataset<{
99
99
  xs: Tensor;
100
100
  ys: Tensor;
@@ -1,11 +1,11 @@
1
- import { DatasetBuilder as u, PAGE_FACTOR as f } from "./DatasetBuilder.js";
2
- import z from "./AdamExt.js";
3
- import { t as S, v as y, k, d as h, b as p } from "../index-Duu1Lvvv.js";
1
+ import { DatasetBuilder as f, PAGE_FACTOR as u } from "./DatasetBuilder.js";
2
+ import y from "./AdamExt.js";
3
+ import { t as z, v as S, k, d as h, b as p } from "../index-Duu1Lvvv.js";
4
4
  import { tokensFromTasks as x } from "./tasks/Task.js";
5
5
  import { z as m } from "../zeros-5YROwwUH.js";
6
6
  class B {
7
- constructor(t, e, s = 1e-3) {
8
- this.tokenizer = e, this.model = t, this.lossScaling = t.lossScaling, this.learningRate = s, this.resetOptimizer(), this.datasetBuilder = new u(e, t.config.blockSize);
7
+ constructor(t, e, i = 1e-3) {
8
+ this.tokenizer = e, this.model = t, this.lossScaling = t.lossScaling, this.learningRate = i, this.resetOptimizer(), this.datasetBuilder = new f(e, t.config.blockSize);
9
9
  }
10
10
  model;
11
11
  optimizer;
@@ -36,7 +36,7 @@ class B {
36
36
  }
37
37
  resetOptimizer(t = { learningRateFactor: 1, beta1: 0.9, beta2: 0.99, epsilon: 1e-8 }) {
38
38
  this.optimizer && this.optimizer.dispose();
39
- const e = new z(
39
+ const e = new y(
40
40
  t.learningRateFactor * this.learningRate,
41
41
  t.beta1,
42
42
  t.beta2,
@@ -51,11 +51,11 @@ class B {
51
51
  );
52
52
  this.optimizer = e;
53
53
  }
54
- trainStep(t, e, s = !1, i = !1) {
55
- return S(() => {
54
+ trainStep(t, e, i = !1, s = !1) {
55
+ return z(() => {
56
56
  this.model.getProfiler()?.startMemory();
57
- const { xs: a, ys: l } = e, d = () => {
58
- const [o, c] = this.model.forward(
57
+ const { xs: a, ys: l } = e, c = () => {
58
+ const [o, d] = this.model.forward(
59
59
  {
60
60
  training: !0,
61
61
  checkpointing: this._gradientCheckpointing,
@@ -65,47 +65,47 @@ class B {
65
65
  l
66
66
  );
67
67
  o.dispose();
68
- const g = c.mul(p(this.lossScaling));
69
- return c.dispose(), g;
70
- }, { value: n, grads: r } = y(d);
71
- return s ? this.model.getProfiler()?.endMemory("Training") : (this.optimizer.applyGradients(r), this.model.getProfiler()?.endMemory("Training"), i ? (t.gradients = r, Object.values(r).forEach((o) => k(o))) : h(r)), n.mul(p(1 / this.lossScaling));
68
+ const g = d.mul(p(this.lossScaling));
69
+ return d.dispose(), g;
70
+ }, { value: n, grads: r } = S(c);
71
+ return i ? this.model.getProfiler()?.endMemory("Training") : (this.optimizer.applyGradients(r), this.model.getProfiler()?.endMemory("Training"), s ? (t.gradients = r, Object.values(r).forEach((o) => k(o))) : h(r)), n.mul(p(1 / this.lossScaling));
72
72
  });
73
73
  }
74
74
  async dummyPass() {
75
75
  const t = m([1, this.model.config.blockSize], "int32"), e = m([1, this.model.config.blockSize], "int32");
76
76
  try {
77
- const s = this.trainStep({}, { xs: t, ys: e }, !0);
78
- await s.data(), s.dispose();
79
- } catch (s) {
80
- console.error("Error during dummy pass:", s);
77
+ const i = this.trainStep({}, { xs: t, ys: e }, !0);
78
+ await i.data(), i.dispose();
79
+ } catch (i) {
80
+ console.error("Error during dummy pass:", i);
81
81
  } finally {
82
82
  t.dispose(), e.dispose();
83
83
  }
84
84
  }
85
- trainBatch(t, e, s = !1) {
85
+ trainBatch(t, e, i = !1) {
86
86
  try {
87
- const i = this.trainStep(t, e, !1, s);
88
- return e.xs.dispose(), e.ys.dispose(), t.step++, t.totalSteps++, i;
89
- } catch (i) {
90
- throw console.error(`Error processing batch at step ${t.step}:`, i), h(), i;
87
+ const s = this.trainStep(t, e, !1, i);
88
+ return e.xs.dispose(), e.ys.dispose(), t.step++, t.totalSteps++, s;
89
+ } catch (s) {
90
+ throw console.error(`Error processing batch at step ${t.step}:`, s), h(), s;
91
91
  }
92
92
  }
93
- async createTrainValidationSplit(t, e = 32, s = 0.1) {
94
- const i = await x(t, this.tokenizer), a = /* @__PURE__ */ new Set();
95
- if (s > 0) {
96
- const n = Math.floor(i.length / (this.datasetBuilder.blockSize * f)), r = Math.max(1, Math.floor(n * s));
93
+ async createTrainValidationSplit(t, e = 32, i = 0.1) {
94
+ const s = t instanceof Uint16Array ? t : await x(t, this.tokenizer), a = /* @__PURE__ */ new Set();
95
+ if (i > 0) {
96
+ const n = Math.floor(s.length / (this.datasetBuilder.blockSize * u)), r = Math.max(1, Math.floor(n * i));
97
97
  for (; a.size < r; ) {
98
98
  const o = Math.floor(Math.random() * n);
99
99
  a.add(o);
100
100
  }
101
101
  }
102
- const l = await this.datasetBuilder.createTextDataset(i, e, a, !1), d = await this.datasetBuilder.createTextDataset(
103
- i,
102
+ const l = await this.datasetBuilder.createTextDataset(s, e, a, !1), c = await this.datasetBuilder.createTextDataset(
103
+ s,
104
104
  e,
105
105
  a,
106
106
  !0
107
107
  );
108
- return { trainDataset: l, validationDataset: d, size: i.length };
108
+ return { trainDataset: l, validationDataset: c, size: s.length };
109
109
  }
110
110
  dispose() {
111
111
  this.optimizer && this.optimizer.dispose();
@@ -5,4 +5,4 @@ export declare abstract class Task {
5
5
  abstract nextConversation(): Conversation[] | null;
6
6
  abstract estimateTokens(tokeniser: ITokeniser): Promise<number>;
7
7
  }
8
- export declare function tokensFromTasks(tasks: Task[], tokenizer: ITokeniser): Promise<Uint16Array>;
8
+ export declare function tokensFromTasks(tasks: Task[], tokenizer: ITokeniser, cb?: (tokens: number) => void): Promise<Uint16Array>;
@@ -1,41 +1,44 @@
1
- class g {
1
+ class w {
2
2
  }
3
- function h(f, a, l, e, r) {
4
- for (let i = 0; i < f.length; i++) {
5
- const c = f[i].nextConversation();
3
+ function g(l, i, h, n, t) {
4
+ for (let o = 0; o < l.length; o++) {
5
+ const c = l[o].nextConversation();
6
6
  if (c) {
7
- const o = l.encodeConversation(c), s = a[a.length - 1];
8
- if (e.offset + o.length > s.length) {
9
- const n = s.length - e.offset;
10
- s.set(o.slice(0, n), e.offset);
11
- const t = new Uint16Array(Math.floor(r * 0.1) + 100);
12
- t.set(o.slice(n), 0), a.push(t), e.offset = o.length - n;
7
+ const s = h.encodeConversation(c);
8
+ n.total += s.length;
9
+ const f = i[i.length - 1];
10
+ if (n.offset + s.length > f.length) {
11
+ const a = f.length - n.offset;
12
+ f.set(s.slice(0, a), n.offset);
13
+ const e = new Uint16Array(Math.floor(t * 0.1) + 100);
14
+ e.set(s.slice(a), 0), i.push(e), n.offset = s.length - a;
13
15
  } else
14
- s.set(o, e.offset), e.offset += o.length;
16
+ f.set(s, n.offset), n.offset += s.length;
15
17
  }
16
18
  }
17
19
  }
18
- async function w(f, a) {
19
- const l = (await Promise.all(f.map((n) => n.estimateTokens(a)))).reduce(
20
- (n, t) => n + t,
20
+ async function m(l, i, h) {
21
+ const n = (await Promise.all(l.map((e) => e.estimateTokens(i)))).reduce(
22
+ (e, r) => e + r,
21
23
  0
22
- ), e = [new Uint16Array(l)], r = {
23
- offset: 0
24
+ ), t = [new Uint16Array(n)], o = {
25
+ offset: 0,
26
+ total: 0
24
27
  };
25
- let i = performance.now();
26
- for (; r.offset < l && (h(f, e, a, r, l), !f.every((t) => !t.hasMoreConversations())); )
27
- performance.now() - i > 40 && (await new Promise(requestAnimationFrame), i = performance.now());
28
- if (e.length === 1)
29
- return e[0].subarray(0, r.offset);
30
- const c = e.reduce((n, t) => n + t.length, 0) - (e[e.length - 1].length - r.offset), o = new Uint16Array(c);
31
- let s = 0;
32
- for (let n = 0; n < e.length; n++) {
33
- const t = e[n];
34
- n === e.length - 1 ? (o.set(t.subarray(0, r.offset), s), s += r.offset) : (o.set(t, s), s += t.length);
28
+ let c = performance.now();
29
+ for (; o.offset < n && (g(l, t, i, o, n), !l.every((r) => !r.hasMoreConversations())); )
30
+ performance.now() - c > 40 && (await new Promise(requestAnimationFrame), c = performance.now()), h && h(o.total);
31
+ if (t.length === 1)
32
+ return t[0].subarray(0, o.offset);
33
+ const s = t.reduce((e, r) => e + r.length, 0) - (t[t.length - 1].length - o.offset), f = new Uint16Array(s);
34
+ let a = 0;
35
+ for (let e = 0; e < t.length; e++) {
36
+ const r = t[e];
37
+ e === t.length - 1 ? (f.set(r.subarray(0, o.offset), a), a += o.offset) : (f.set(r, a), a += r.length);
35
38
  }
36
- return o;
39
+ return f;
37
40
  }
38
41
  export {
39
- g as Task,
40
- w as tokensFromTasks
42
+ w as Task,
43
+ m as tokensFromTasks
41
44
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@genai-fi/nanogpt",
3
- "version": "0.12.0",
3
+ "version": "0.12.2",
4
4
  "type": "module",
5
5
  "main": "dist/main.js",
6
6
  "types": "dist/main.d.ts",