@genai-fi/nanogpt 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,150 @@
1
+ import { engine as $ } from "@tensorflow/tfjs";
2
+ import { i as u, j as S, k as h, E as f, l as E, o as N, c as l, n as y, r as p, a as D, m as x } from "../index-D1SlunD-.js";
3
+ import { c as m } from "../complex-D6Bq1XDf.js";
4
+ import { r as v, s as T } from "../stack-DB2YLlAs.js";
5
+ /**
6
+ * @license
7
+ * Copyright 2018 Google LLC. All Rights Reserved.
8
+ * Licensed under the Apache License, Version 2.0 (the "License");
9
+ * you may not use this file except in compliance with the License.
10
+ * You may obtain a copy of the License at
11
+ *
12
+ * http://www.apache.org/licenses/LICENSE-2.0
13
+ *
14
+ * Unless required by applicable law or agreed to in writing, software
15
+ * distributed under the License is distributed on an "AS IS" BASIS,
16
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ * See the License for the specific language governing permissions and
18
+ * limitations under the License.
19
+ * =============================================================================
20
+ */
21
+ function i(e, t = "float32") {
22
+ if (u(e), t === "complex64") {
23
+ const a = i(e, "float32"), o = i(e, "float32");
24
+ return m(a, o);
25
+ }
26
+ const r = S(h(e), t);
27
+ return f.makeTensor(r, e, t);
28
+ }
29
+ /**
30
+ * @license
31
+ * Copyright 2018 Google LLC. All Rights Reserved.
32
+ * Licensed under the Apache License, Version 2.0 (the "License");
33
+ * you may not use this file except in compliance with the License.
34
+ * You may obtain a copy of the License at
35
+ *
36
+ * http://www.apache.org/licenses/LICENSE-2.0
37
+ *
38
+ * Unless required by applicable law or agreed to in writing, software
39
+ * distributed under the License is distributed on an "AS IS" BASIS,
40
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
41
+ * See the License for the specific language governing permissions and
42
+ * limitations under the License.
43
+ * =============================================================================
44
+ */
45
+ function d(e, t = "float32") {
46
+ if (u(e), t === "complex64") {
47
+ const a = d(e, "float32"), o = i(e, "float32");
48
+ return m(a, o);
49
+ }
50
+ const r = E(h(e), t);
51
+ return f.makeTensor(r, e, t);
52
+ }
53
+ function C(e, t, r) {
54
+ const a = t.rank > 1 ? t.shape[t.rank - 1] : 1, o = t.rank > 1 ? t.rank - 1 : 1, s = `Must have updates.shape = indices.shape[:batchDim] + shape[sliceDim:], got updates.shape: ${r.shape}, indices.shape: ${t.shape}, shape: ${e}, sliceDim: ${a}, and batchDim: ${o}.`;
55
+ if (r.rank < o)
56
+ throw new Error(s + ` update.rank < ${o}. `);
57
+ if (e.length < a + (r.rank - o))
58
+ throw new Error(s + ` Output shape length < ${a + (r.rank - o)}`);
59
+ if (r.rank !== o + e.length - a)
60
+ throw new Error(s + ` update.rank != ${o + e.length - a}`);
61
+ for (let n = 0; n < o; ++n)
62
+ if (r.shape[n] !== t.shape[n])
63
+ throw new Error(s + ` updates.shape[${n}] (${r.shape[n]}) != indices.shape[${n}] (${t.shape[n]}).`);
64
+ for (let n = 0; n < r.rank - o; ++n)
65
+ if (r.shape[n + o] !== e[n + a])
66
+ throw new Error(s + ` updates.shape[${n + o}] (${r.shape[n + o]}) != shape[${n + o}] (${e[n + o]})`);
67
+ }
68
+ function O(e, t, r) {
69
+ if (t.rank < 1)
70
+ throw new Error(`tf.scatterND() expects the indices to be rank 1 or higher, but the rank was ${t.rank}.`);
71
+ if (e.rank < 1)
72
+ throw new Error(`tf.scatterND() expects the updates to be rank 1 or higher, but the rank was ${e.rank}.`);
73
+ if (t.dtype !== "int32")
74
+ throw new Error(`The dtype of 'indices' should be int32, but got dtype: ${t.dtype}`);
75
+ if (r.length < 1)
76
+ throw new Error(`Output rank must be greater or equal to 1, but got shape: ${r}`);
77
+ if (r.length === 0) {
78
+ if (t.size === 0)
79
+ throw new Error(`Indices specified for empty output. indices shape: ${t.shape}`);
80
+ if (e.size === 0)
81
+ throw new Error(`Updates specified for empty output. updates shape: ${e.shape}`);
82
+ }
83
+ C(r, t, e);
84
+ }
85
+ /**
86
+ * @license
87
+ * Copyright 2018 Google LLC. All Rights Reserved.
88
+ * Licensed under the Apache License, Version 2.0 (the "License");
89
+ * you may not use this file except in compliance with the License.
90
+ * You may obtain a copy of the License at
91
+ *
92
+ * http://www.apache.org/licenses/LICENSE-2.0
93
+ *
94
+ * Unless required by applicable law or agreed to in writing, software
95
+ * distributed under the License is distributed on an "AS IS" BASIS,
96
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
97
+ * See the License for the specific language governing permissions and
98
+ * limitations under the License.
99
+ * =============================================================================
100
+ */
101
+ function z(e, t, r) {
102
+ u(r);
103
+ const a = l(e, "indices", "scatterND", "int32"), o = l(t, "updates", "scatterND");
104
+ O(o, a, r);
105
+ const s = { indices: a, updates: o }, n = { shape: r };
106
+ return f.runKernel(y, s, n);
107
+ }
108
+ const I = /* @__PURE__ */ N({ scatterND_: z });
109
+ class L {
110
+ variableNames = ["labels", "softmaxProbs", "dy"];
111
+ outputShape;
112
+ userCode;
113
+ constructor(t, r) {
114
+ this.outputShape = [t, r], this.userCode = `
115
+ void main() {
116
+ ivec2 coords = getOutputCoords();
117
+ int index = int(getLabels(coords.x));
118
+ float prob = getSoftmaxProbsAtOutCoords();
119
+ float dy = getDy(coords.x);
120
+ setOutput((index == coords.y ? prob - 1.0 : prob) * dy);
121
+ }
122
+ `;
123
+ }
124
+ }
125
+ function P(e) {
126
+ const { logits: t, labels: r, dy: a } = e.inputs, o = e.backend, s = r.shape[0], n = t.shape[1], c = new L(s, n);
127
+ return o.runWebGLProgram(c, [r, t, a], "float32");
128
+ }
129
+ const K = {
130
+ kernelName: "EfficientScatterSub",
131
+ backendName: "webgl",
132
+ kernelFunc: P
133
+ };
134
+ p(K);
135
+ function A(e) {
136
+ const { logits: t, labels: r, dy: a } = e.inputs, o = r.shape[0], s = t.shape[1], n = v(0, o, 1, "int32"), c = T([n, r], 1), b = d([o]), g = I(c, b, [o, s]), k = D(t, g), w = a.reshape([o, 1]);
137
+ return x(k, w);
138
+ }
139
+ const F = {
140
+ kernelName: "EfficientScatterSub",
141
+ backendName: "cpu",
142
+ kernelFunc: A
143
+ };
144
+ p(F);
145
+ function M(e, t, r) {
146
+ return $().runKernel("EfficientScatterSub", { logits: e, labels: t, dy: r }, {});
147
+ }
148
+ export {
149
+ M as scatterSub
150
+ };
@@ -0,0 +1,50 @@
1
+ import { E as e, R as c, o as f, d as u, f as a, P as i } from "./index-D1SlunD-.js";
2
+ /**
3
+ * @license
4
+ * Copyright 2018 Google LLC. All Rights Reserved.
5
+ * Licensed under the Apache License, Version 2.0 (the "License");
6
+ * you may not use this file except in compliance with the License.
7
+ * You may obtain a copy of the License at
8
+ *
9
+ * http://www.apache.org/licenses/LICENSE-2.0
10
+ *
11
+ * Unless required by applicable law or agreed to in writing, software
12
+ * distributed under the License is distributed on an "AS IS" BASIS,
13
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ * See the License for the specific language governing permissions and
15
+ * limitations under the License.
16
+ * =============================================================================
17
+ */
18
+ function g(n, s, t = 1, r = "float32") {
19
+ if (t === 0)
20
+ throw new Error("Cannot have a step of zero");
21
+ const o = { start: n, stop: s, step: t, dtype: r };
22
+ return e.runKernel(c, {}, o);
23
+ }
24
+ /**
25
+ * @license
26
+ * Copyright 2020 Google LLC. All Rights Reserved.
27
+ * Licensed under the Apache License, Version 2.0 (the "License");
28
+ * you may not use this file except in compliance with the License.
29
+ * You may obtain a copy of the License at
30
+ *
31
+ * http://www.apache.org/licenses/LICENSE-2.0
32
+ *
33
+ * Unless required by applicable law or agreed to in writing, software
34
+ * distributed under the License is distributed on an "AS IS" BASIS,
35
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
36
+ * See the License for the specific language governing permissions and
37
+ * limitations under the License.
38
+ * =============================================================================
39
+ */
40
+ function k(n, s = 0) {
41
+ const t = u(n, "tensors", "stack", "string_or_numeric");
42
+ a(t.length >= 1, () => "Pass at least one tensor to tf.stack"), t.length > 0 && a(s <= t[0].rank, () => "Axis must be <= rank of the tensor");
43
+ const r = t, o = { axis: s };
44
+ return e.runKernel(i, r, o);
45
+ }
46
+ const h = /* @__PURE__ */ f({ stack_: k });
47
+ export {
48
+ g as r,
49
+ h as s
50
+ };
@@ -0,0 +1,49 @@
1
+ import { o, c as a, E as u, g as p, h as i, S as x } from "./index-D1SlunD-.js";
2
+ /**
3
+ * @license
4
+ * Copyright 2020 Google LLC. All Rights Reserved.
5
+ * Licensed under the Apache License, Version 2.0 (the "License");
6
+ * you may not use this file except in compliance with the License.
7
+ * You may obtain a copy of the License at
8
+ *
9
+ * http://www.apache.org/licenses/LICENSE-2.0
10
+ *
11
+ * Unless required by applicable law or agreed to in writing, software
12
+ * distributed under the License is distributed on an "AS IS" BASIS,
13
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ * See the License for the specific language governing permissions and
15
+ * limitations under the License.
16
+ * =============================================================================
17
+ */
18
+ function l(n, t) {
19
+ const s = { x: a(n, "x", "reshape", "string_or_numeric") }, r = { shape: t };
20
+ return u.runKernel(p, s, r);
21
+ }
22
+ const h = /* @__PURE__ */ o({ reshape_: l });
23
+ /**
24
+ * @license
25
+ * Copyright 2018 Google LLC. All Rights Reserved.
26
+ * Licensed under the Apache License, Version 2.0 (the "License");
27
+ * you may not use this file except in compliance with the License.
28
+ * You may obtain a copy of the License at
29
+ *
30
+ * http://www.apache.org/licenses/LICENSE-2.0
31
+ *
32
+ * Unless required by applicable law or agreed to in writing, software
33
+ * distributed under the License is distributed on an "AS IS" BASIS,
34
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
35
+ * See the License for the specific language governing permissions and
36
+ * limitations under the License.
37
+ * =============================================================================
38
+ */
39
+ function m(n, t = null, e = !1) {
40
+ let s = a(n, "x", "sum");
41
+ s.dtype === "bool" && (s = i(s, "int32"));
42
+ const r = { x: s }, c = { axis: t, keepDims: e };
43
+ return u.runKernel(x, r, c);
44
+ }
45
+ const _ = /* @__PURE__ */ o({ sum_: m });
46
+ export {
47
+ h as r,
48
+ _ as s
49
+ };
@@ -6,6 +6,7 @@ export default class CharTokeniser extends EE<'trainStatus'> implements ITokenis
6
6
  unkToken: number;
7
7
  vocab: string[];
8
8
  private cache;
9
+ private _trained;
9
10
  constructor(vocabSize: number);
10
11
  constructor(vocab: string[]);
11
12
  get trained(): boolean;
@@ -1,57 +1,66 @@
1
- import { E as r } from "../index-Dwqa6Zy2.js";
2
- const h = ["<eos>", "<unk>"];
3
- class l extends r {
1
+ import { E as k } from "../index-Dwqa6Zy2.js";
2
+ const u = ["<eos>", "<unk>"];
3
+ class b extends k {
4
4
  vocabSize = 0;
5
5
  eosToken = 0;
6
6
  unkToken = 0;
7
7
  vocab = [];
8
8
  cache = /* @__PURE__ */ new Map();
9
- constructor(s) {
10
- if (super(), Array.isArray(s))
11
- if (this.vocab = s, this.vocab.length > 0)
12
- this.vocabSize = this.vocab.length, this.eosToken = this.vocab.indexOf("<eos>"), this.unkToken = this.vocab.indexOf("<unk>"), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab.forEach((i, o) => {
13
- this.cache.set(i, o);
9
+ _trained = !1;
10
+ constructor(t) {
11
+ if (super(), Array.isArray(t)) {
12
+ if (this.vocab = t, this.vocab.length > 0)
13
+ this.vocabSize = this.vocab.length, this.eosToken = this.vocab.indexOf("<eos>"), this.unkToken = this.vocab.indexOf("<unk>"), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("<pad>")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf("_")), this.unkToken === -1 && (this.unkToken = this.vocab.indexOf(" ")), this.unkToken === -1 && (this.unkToken = this.eosToken), this.vocab.forEach((i, n) => {
14
+ this.cache.set(i, n);
14
15
  });
15
16
  else
16
17
  throw new Error("Vocab cannot be empty");
17
- else
18
- this.vocabSize = s;
18
+ this._trained = !0;
19
+ } else
20
+ this.vocabSize = t, this.vocab = new Array(this.vocabSize).fill("<pad>"), this.vocab[0] = "<eos>", this.vocab[1] = "<unk>", this.eosToken = 0, this.unkToken = 1, this.cache.set("<eos>", 0), this.cache.set("<unk>", 1);
19
21
  }
20
22
  get trained() {
21
- return this.vocab.length === this.vocabSize;
23
+ return this.vocab.length === this.vocabSize && this._trained;
22
24
  }
23
25
  destroy() {
24
26
  }
25
- async train(s) {
26
- const i = s.map((t) => t.split("")).flat(), o = new Set(i), e = Array.from(o), n = this.vocabSize - h.length;
27
- if (e.length > n) {
28
- const t = /* @__PURE__ */ new Map();
29
- i.forEach((a) => {
30
- t.set(a, (t.get(a) || 0) + 1);
31
- }), e.sort((a, c) => (t.get(a) || 0) - (t.get(c) || 0)), e.splice(0, e.length - n);
32
- } else if (e.length < n)
33
- for (; e.length < n; )
34
- e.push("<pad>");
35
- return e.sort((t, a) => t.charCodeAt(0) - a.charCodeAt(0)), this.vocab = [...e, ...h], this.eosToken = this.vocab.indexOf("<eos>"), this.unkToken = this.vocab.indexOf("<unk>"), this.vocabSize = this.vocab.length, this.cache.clear(), this.vocab.forEach((t, a) => {
36
- this.cache.set(t, a);
27
+ async train(t) {
28
+ const i = t.map((s) => s.split("")).flat(), n = new Set(i), e = Array.from(n), c = this.vocab.indexOf("<pad>"), a = this.vocabSize - u.length;
29
+ if (c === -1)
30
+ return this.vocabSize;
31
+ if (this._trained = !0, e.length > a) {
32
+ const s = /* @__PURE__ */ new Map();
33
+ i.forEach((o) => {
34
+ s.set(o, (s.get(o) || 0) + 1);
35
+ }), e.sort((o, r) => (s.get(o) || 0) - (s.get(r) || 0)), e.splice(0, e.length - a);
36
+ }
37
+ let h = c;
38
+ if (h !== -1) {
39
+ const s = new Set(this.vocab);
40
+ for (const o of e)
41
+ if (!s.has(o) && (this.vocab[h] = o, s.add(o), h = this.vocab.indexOf("<pad>", h + 1), h === -1))
42
+ break;
43
+ }
44
+ return this.cache.clear(), this.vocab.forEach((s, o) => {
45
+ this.cache.set(s, o);
37
46
  }), this.emit("trainStatus", "trained"), this.vocabSize;
38
47
  }
39
- async tokenise(s, i) {
48
+ async tokenise(t, i) {
40
49
  if (!this.trained)
41
50
  throw new Error("Tokeniser not trained");
42
- return s.map((e) => i ? e.split("").map((n) => this.cache.get(n) ?? this.unkToken) : e.split("").map((n) => {
43
- const t = this.cache.get(n);
44
- return t !== void 0 ? this.vocab[t] : "<unk>";
51
+ return t.map((e) => i ? e.split("").map((c) => this.cache.get(c) ?? this.unkToken) : e.split("").map((c) => {
52
+ const a = this.cache.get(c);
53
+ return a !== void 0 ? this.vocab[a] : "<unk>";
45
54
  }));
46
55
  }
47
- async detokenise(s) {
48
- return s.map((o) => o.map((e) => this.vocab[e]).join(""));
56
+ async detokenise(t) {
57
+ return t.map((n) => n.map((e) => this.vocab[e]).join(""));
49
58
  }
50
- async encode(s) {
51
- return (await this.tokenise([s], !0))[0];
59
+ async encode(t) {
60
+ return (await this.tokenise([t], !0))[0];
52
61
  }
53
- async decode(s) {
54
- return (await this.detokenise([s]))[0];
62
+ async decode(t) {
63
+ return (await this.detokenise([t]))[0];
55
64
  }
56
65
  getVocab() {
57
66
  return this.vocab;
@@ -59,13 +68,13 @@ class l extends r {
59
68
  async getMerges() {
60
69
  return [];
61
70
  }
62
- async createTrainingData(s, i = 5) {
63
- const o = await this.tokenise(s, !0), e = [], n = [];
64
- for (let t = 0; t < o.length - i; t++)
65
- e.push(...o[t].slice(0, i)), n.push(o[t + 1][0]);
66
- return [e, n];
71
+ async createTrainingData(t, i = 5) {
72
+ const n = await this.tokenise(t, !0), e = [], c = [];
73
+ for (let a = 0; a < n.length - i; a++)
74
+ e.push(...n[a].slice(0, i)), c.push(n[a + 1][0]);
75
+ return [e, c];
67
76
  }
68
77
  }
69
78
  export {
70
- l as default
79
+ b as default
71
80
  };
@@ -1,4 +1,4 @@
1
- import { A as r, m as c, s as h, a as g, e as o } from "../index-DcaSvB38.js";
1
+ import { A as r, m as c, s as h, a as g, e as o } from "../index-D1SlunD-.js";
2
2
  class u extends r {
3
3
  constructor(t, e, s, a, i) {
4
4
  super(t, e, s, a), this.config = i, this.startLearningRate = t;
@@ -7,7 +7,7 @@ class z {
7
7
  }
8
8
  // Create dataset from text files
9
9
  async createTextDataset(s, o = 32, i = 0, c = 1) {
10
- const h = await Promise.all(s.map((t) => this.tokenizer.encode(t))), r = this.tokenizer.eosToken >= 0, n = h.map((t) => r ? [...t, this.tokenizer.eosToken] : t).flat(), a = n.slice(
10
+ const r = await Promise.all(s.map((t) => this.tokenizer.encode(t))), h = this.tokenizer.eosToken >= 0, n = r.map((t) => h ? [...t, this.tokenizer.eosToken] : t).flat(), a = n.slice(
11
11
  Math.floor(i * n.length),
12
12
  c === 1 ? void 0 : Math.floor(c * n.length)
13
13
  ), l = (function* () {
@@ -20,7 +20,8 @@ class z {
20
20
  const e = t;
21
21
  return this.tf.tidy(() => ({
22
22
  xs: e.xs.cast("int32"),
23
- ys: this.tf.oneHot(e.ys.cast("int32"), this.tokenizer.vocabSize)
23
+ ys: e.ys.cast("int32")
24
+ // this.tf.oneHot(batchData.ys.cast('int32'), this.tokenizer.vocabSize),
24
25
  }));
25
26
  }).prefetch(2);
26
27
  }
@@ -1,5 +1,5 @@
1
1
  import { DatasetBuilder as d } from "./DatasetBuilder.js";
2
- import h from "./AdamExt.js";
2
+ import p from "./AdamExt.js";
3
3
  class u {
4
4
  constructor(t, e, s, i = 1e-3) {
5
5
  this.tokenizer = s, this.tf = t, this.model = e, this.learningRate = i, this.resetOptimizer(), this.datasetBuilder = new d(this.tf, s, e.config.blockSize);
@@ -25,7 +25,7 @@ class u {
25
25
  }
26
26
  resetOptimizer(t = { learningRateFactor: 1, beta1: 0.9, beta2: 0.99, epsilon: 1e-8 }) {
27
27
  this.optimizer && this.optimizer.dispose();
28
- const e = new h(
28
+ const e = new p(
29
29
  t.learningRateFactor * this.learningRate,
30
30
  t.beta1,
31
31
  t.beta2,
@@ -55,7 +55,7 @@ class u {
55
55
  });
56
56
  }
57
57
  dummyPass() {
58
- const t = this.tf.zeros([1, this.model.config.blockSize], "int32"), e = this.tf.zeros([1, this.model.config.blockSize, this.model.config.vocabSize]);
58
+ const t = this.tf.zeros([1, this.model.config.blockSize], "int32"), e = this.tf.zeros([1, this.model.config.blockSize], "int32");
59
59
  try {
60
60
  const s = this.trainStep({ xs: t, ys: e }, !0);
61
61
  s.dataSync(), s.dispose();
@@ -0,0 +1,11 @@
1
+ import * as tf from '@tensorflow/tfjs-core';
2
+ /**
3
+ * Numerically stable sparse cross-entropy with gradient support
4
+ * This version handles potential numerical issues better
5
+ */
6
+ export declare function sparseSoftmaxCrossEntropy(logits: tf.Tensor, labels: tf.Tensor): tf.Tensor;
7
+ /**
8
+ * Custom gradient implementation for sparse cross-entropy
9
+ * This ensures proper backpropagation
10
+ */
11
+ export declare function createSoftmaxCrossEntropyWithGrad(): (...args: tf.Tensor[]) => tf.Tensor<tf.Rank>;
@@ -0,0 +1,177 @@
1
+ import { gatherSub as K } from "../ops/gatherSub.js";
2
+ import { scatterSub as _ } from "../ops/scatterSub.js";
3
+ import { o as l, c as d, E as f, M as G, p as z, L as I, q as N, a as E, t as M, u as T, e as m, v as S, w as $, z as g } from "../index-D1SlunD-.js";
4
+ import { s as F, r as b } from "../sum-02UQ5Eaq.js";
5
+ /**
6
+ * @license
7
+ * Copyright 2017 Google LLC. All Rights Reserved.
8
+ * Licensed under the Apache License, Version 2.0 (the "License");
9
+ * you may not use this file except in compliance with the License.
10
+ * You may obtain a copy of the License at
11
+ *
12
+ * http://www.apache.org/licenses/LICENSE-2.0
13
+ *
14
+ * Unless required by applicable law or agreed to in writing, software
15
+ * distributed under the License is distributed on an "AS IS" BASIS,
16
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ * See the License for the specific language governing permissions and
18
+ * limitations under the License.
19
+ * =============================================================================
20
+ */
21
+ function P(n, s, t) {
22
+ const a = n.length + s.length, e = [];
23
+ let r = 0, c = 0;
24
+ for (let o = 0; o < a; o++)
25
+ t.indexOf(o) === -1 ? e.push(n[r++]) : e.push(s[c++]);
26
+ return e;
27
+ }
28
+ function q(n, s) {
29
+ const t = s.map((a) => 1);
30
+ return P(n, t, s);
31
+ }
32
+ /**
33
+ * @license
34
+ * Copyright 2020 Google LLC. All Rights Reserved.
35
+ * Licensed under the Apache License, Version 2.0 (the "License");
36
+ * you may not use this file except in compliance with the License.
37
+ * You may obtain a copy of the License at
38
+ *
39
+ * http://www.apache.org/licenses/LICENSE-2.0
40
+ *
41
+ * Unless required by applicable law or agreed to in writing, software
42
+ * distributed under the License is distributed on an "AS IS" BASIS,
43
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
44
+ * See the License for the specific language governing permissions and
45
+ * limitations under the License.
46
+ * =============================================================================
47
+ */
48
+ function A(n, s = null, t = !1) {
49
+ const e = { x: d(n, "x", "max") }, r = { reductionIndices: s, keepDims: t };
50
+ return f.runKernel(G, e, r);
51
+ }
52
+ const k = /* @__PURE__ */ l({ max_: A });
53
+ /**
54
+ * @license
55
+ * Copyright 2018 Google LLC. All Rights Reserved.
56
+ * Licensed under the Apache License, Version 2.0 (the "License");
57
+ * you may not use this file except in compliance with the License.
58
+ * You may obtain a copy of the License at
59
+ *
60
+ * http://www.apache.org/licenses/LICENSE-2.0
61
+ *
62
+ * Unless required by applicable law or agreed to in writing, software
63
+ * distributed under the License is distributed on an "AS IS" BASIS,
64
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
65
+ * See the License for the specific language governing permissions and
66
+ * limitations under the License.
67
+ * =============================================================================
68
+ */
69
+ function D(n) {
70
+ const t = { x: d(n, "x", "exp") };
71
+ return f.runKernel(z, t);
72
+ }
73
+ const O = /* @__PURE__ */ l({ exp_: D });
74
+ /**
75
+ * @license
76
+ * Copyright 2018 Google LLC. All Rights Reserved.
77
+ * Licensed under the Apache License, Version 2.0 (the "License");
78
+ * you may not use this file except in compliance with the License.
79
+ * You may obtain a copy of the License at
80
+ *
81
+ * http://www.apache.org/licenses/LICENSE-2.0
82
+ *
83
+ * Unless required by applicable law or agreed to in writing, software
84
+ * distributed under the License is distributed on an "AS IS" BASIS,
85
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
86
+ * See the License for the specific language governing permissions and
87
+ * limitations under the License.
88
+ * =============================================================================
89
+ */
90
+ function W(n) {
91
+ const t = { x: d(n, "x", "log", "float32") };
92
+ return f.runKernel(I, t);
93
+ }
94
+ const j = /* @__PURE__ */ l({ log_: W });
95
+ /**
96
+ * @license
97
+ * Copyright 2020 Google LLC. All Rights Reserved.
98
+ * Licensed under the Apache License, Version 2.0 (the "License");
99
+ * you may not use this file except in compliance with the License.
100
+ * You may obtain a copy of the License at
101
+ *
102
+ * http://www.apache.org/licenses/LICENSE-2.0
103
+ *
104
+ * Unless required by applicable law or agreed to in writing, software
105
+ * distributed under the License is distributed on an "AS IS" BASIS,
106
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
107
+ * See the License for the specific language governing permissions and
108
+ * limitations under the License.
109
+ * =============================================================================
110
+ */
111
+ function B(n, s = null, t = !1) {
112
+ const a = d(n, "x", "logSumExp"), e = N(s, a.shape), r = k(
113
+ a,
114
+ e,
115
+ !0
116
+ /* keepDims */
117
+ ), c = E(a, r), o = O(c), p = F(o, e), u = j(p), i = M(b(r, u.shape), u);
118
+ if (t) {
119
+ const h = q(i.shape, e);
120
+ return b(i, h);
121
+ }
122
+ return i;
123
+ }
124
+ const H = /* @__PURE__ */ l({ logSumExp_: B });
125
+ /**
126
+ * @license
127
+ * Copyright 2018 Google LLC. All Rights Reserved.
128
+ * Licensed under the Apache License, Version 2.0 (the "License");
129
+ * you may not use this file except in compliance with the License.
130
+ * You may obtain a copy of the License at
131
+ *
132
+ * http://www.apache.org/licenses/LICENSE-2.0
133
+ *
134
+ * Unless required by applicable law or agreed to in writing, software
135
+ * distributed under the License is distributed on an "AS IS" BASIS,
136
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
137
+ * See the License for the specific language governing permissions and
138
+ * limitations under the License.
139
+ * =============================================================================
140
+ */
141
+ function J(n, s = -1) {
142
+ const t = d(n, "logits", "softmax", "float32");
143
+ if (s === -1 && (s = t.rank - 1), s !== t.rank - 1)
144
+ throw Error(`Softmax along a non-last dimension is not yet supported. Logits was rank ${t.rank} and dim was ${s}`);
145
+ const a = { logits: t }, e = { dim: s };
146
+ return f.runKernel(T, a, e);
147
+ }
148
+ const Q = /* @__PURE__ */ l({ softmax_: J });
149
+ function R(n, s) {
150
+ return $(() => {
151
+ const t = n.shape[n.shape.length - 1], e = n.shape.slice(0, -1).reduce((h, x) => h * x, 1), r = n.shape.length > 2 ? n.reshape([e, t]) : n, c = s.shape.length > 1 ? s.reshape([e]).cast("int32") : s.cast("int32"), o = k(r, -1, !0), p = E(r, o), u = H(p, -1);
152
+ return K(u, c, p);
153
+ });
154
+ }
155
+ function Z() {
156
+ return m().backendName === "tensorflow" ? S((s, t, a) => {
157
+ const e = s.shape.length > 2 ? s.reshape([-1, s.shape[s.shape.length - 1]]) : s, r = t.shape.length > 1 ? t.reshape([-1]).cast("int32") : t.cast("int32"), [c, o] = m().runKernel(
158
+ "NativeSparseSoftmaxCrossEntropy",
159
+ { logits: e, labels: r },
160
+ {}
161
+ );
162
+ return a([o.reshape(s.shape)]), { value: c, gradFunc: (p, u) => [u[0], g(t)] };
163
+ }) : S(
164
+ // @ts-expect-error Invalid params
165
+ (s, t, a) => {
166
+ const e = s.shape[s.shape.length - 1], c = s.shape.slice(0, -1).reduce((h, x) => h * x, 1), o = s.reshape([c, e]), p = t.reshape([c]).cast("int32"), u = R(o, p);
167
+ return a([o, p]), o.dispose(), p.dispose(), { value: u, gradFunc: (h, x) => $(() => {
168
+ const y = x[0], C = x[1], L = Q(y), v = _(L, C, h), w = g(t);
169
+ return [v, w];
170
+ }) };
171
+ }
172
+ );
173
+ }
174
+ export {
175
+ Z as createSoftmaxCrossEntropyWithGrad,
176
+ R as sparseSoftmaxCrossEntropy
177
+ };
@@ -1,16 +1,16 @@
1
- import { z as u } from "../jszip.min-CAxN99oA.js";
1
+ import { j as u } from "../jszip.min-CjP2V1VV.js";
2
2
  import { importWeights as F } from "./weights.js";
3
3
  import k from "../tokeniser/CharTokeniser.js";
4
4
  import j from "../NanoGPTModel.js";
5
- import { dummyPassAsync as z } from "./dummy.js";
6
- async function A(o) {
5
+ import { dummyPassAsync as A } from "./dummy.js";
6
+ async function E(o) {
7
7
  const e = await fetch(o);
8
8
  if (!e.ok)
9
9
  throw new Error(`Failed to fetch ${o}: ${e.statusText}`);
10
10
  return e.arrayBuffer();
11
11
  }
12
12
  async function T(o, e) {
13
- const m = typeof e == "string" ? await A(e) : e, n = await u.loadAsync(m), s = /* @__PURE__ */ new Map(), c = await n.file("manifest.json")?.async("string");
13
+ const m = typeof e == "string" ? await E(e) : e, n = await u.loadAsync(m), s = /* @__PURE__ */ new Map(), c = await n.file("manifest.json")?.async("string");
14
14
  if (!c)
15
15
  throw new Error("Manifest file not found in the zip archive");
16
16
  const f = JSON.parse(c);
@@ -29,7 +29,7 @@ async function T(o, e) {
29
29
  }
30
30
  o.disposeVariables();
31
31
  const a = new j(o, f.config);
32
- await z(a), a.loadWeights(p);
32
+ await A(a), a.loadWeights(p);
33
33
  const w = await n.file("log.json")?.async("string");
34
34
  if (w)
35
35
  try {
@@ -0,0 +1,10 @@
1
+ import { GPTConfig } from '../config';
2
+ export declare function estimateParameterCount(config: GPTConfig): number;
3
+ export declare function estimateMemoryUsage(config: GPTConfig): number;
4
+ export declare function estimateTrainingMemoryUsage(config: GPTConfig, batchSize: number): number;
5
+ export declare function estimateResources(config: GPTConfig, batchSize: number): {
6
+ numParams: number;
7
+ modelMemoryMB: number;
8
+ trainingMemoryMB: number;
9
+ };
10
+ export declare function validateConfig(config: GPTConfig): void;