@genai-fi/nanogpt 0.6.3 → 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.js +17 -13
- package/dist/NanoGPTModel.d.ts +2 -2
- package/dist/NanoGPTModel.js +104 -136
- package/dist/{RealDiv-BYViZwhN.js → RealDiv-CVYNbZxu.js} +30 -29
- package/dist/{Reshape-t7Kcikjk.js → Reshape-CEsEp0AI.js} +5 -5
- package/dist/Reshape-Do18N3gO.js +30 -0
- package/dist/TeachableLLM.js +9 -5
- package/dist/{TiedEmbedding-9WeDwvjO.js → TiedEmbedding-ccLBFiZi.js} +4 -4
- package/dist/{axis_util-Bu4h7XWV.js → axis_util-5DTW2tFV.js} +3 -3
- package/dist/backend.d.ts +1 -0
- package/dist/backend.js +7 -0
- package/dist/backend_util-C9Ut8n0Q.js +749 -0
- package/dist/{broadcast_to-DARN-DBD.js → broadcast_to-Ba9h_8DO.js} +2 -2
- package/dist/{concat-5aPGqw3Z.js → concat-CbXTetof.js} +8 -8
- package/dist/{dataset-pgqp-YfL.js → dataset-U3PrjwgU.js} +7 -7
- package/dist/{dropout-Bciw46HT.js → dropout-DPfPgWWe.js} +8 -8
- package/dist/{gather-DjyCjmOD.js → gather-Bbh8DHhM.js} +1 -1
- package/dist/gelu-BFwVnd1r.js +26 -0
- package/dist/gpgpu_math-DffelNS-.js +2371 -0
- package/dist/index-DYD_yPa-.js +12076 -0
- package/dist/{index-BAzbokzv.js → index-UdZhlibC.js} +414 -398
- package/dist/{kernel_funcs_utils-CUxJCg0g.js → kernel_funcs_utils-CXDy3EN7.js} +31 -30
- package/dist/layers/BaseLayer.js +2 -2
- package/dist/layers/CausalSelfAttention.js +8 -8
- package/dist/layers/MLP.js +5 -5
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +4 -4
- package/dist/layers/TiedEmbedding.js +5 -5
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +11 -7
- package/dist/{log_sum_exp-YEo2h3gb.js → log_sum_exp-BnmCkHWl.js} +16 -16
- package/dist/main.d.ts +13 -0
- package/dist/main.js +44 -23
- package/dist/{mat_mul-7121rsJk.js → mat_mul-dwmZz69e.js} +4 -4
- package/dist/{max-DtlIuVeW.js → max-ByjEGoFx.js} +3 -3
- package/dist/{mulmat_packed_gpu-D4nKF7Je.js → mulmat_packed_gpu-IGPBp6h9.js} +1 -1
- package/dist/non_max_suppression_impl-CsEgBuMA.js +134 -0
- package/dist/{ones-BBlSRqn1.js → ones-C8Mfln6-.js} +2 -2
- package/dist/ops/adamAdjust.d.ts +2 -0
- package/dist/ops/adamAdjust.js +9 -0
- package/dist/ops/adamMoments.d.ts +2 -0
- package/dist/ops/adamMoments.js +9 -0
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/cpu/adamAdjust.d.ts +1 -0
- package/dist/ops/cpu/adamAdjust.js +18 -0
- package/dist/ops/cpu/adamMoments.d.ts +1 -0
- package/dist/ops/cpu/adamMoments.js +16 -0
- package/dist/ops/cpu/appendCache.js +8 -8
- package/dist/ops/cpu/attentionMask.js +9 -9
- package/dist/ops/cpu/fusedSoftmax.js +17 -11
- package/dist/ops/cpu/gatherSub.js +7 -7
- package/dist/ops/cpu/gelu.js +13 -13
- package/dist/ops/cpu/matMulGelu.js +36 -24
- package/dist/ops/cpu/matMulMul.js +14 -8
- package/dist/ops/cpu/mulDropout.js +9 -3
- package/dist/ops/cpu/normRMS.js +5 -5
- package/dist/ops/cpu/qkv.js +3 -3
- package/dist/ops/cpu/rope.js +5 -5
- package/dist/ops/cpu/scatterSub.js +11 -11
- package/dist/ops/fusedSoftmax.js +1 -1
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/attentionMask.js +1 -1
- package/dist/ops/grads/fusedSoftmax.js +2 -2
- package/dist/ops/grads/gelu.js +3 -24
- package/dist/ops/grads/matMulGelu.js +5 -5
- package/dist/ops/grads/normRMS.js +6 -6
- package/dist/ops/grads/qkv.js +1 -1
- package/dist/ops/grads/rope.js +3 -3
- package/dist/ops/matMulGelu.js +1 -1
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/qkv.js +1 -1
- package/dist/ops/rope.js +4 -4
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/webgl/adamAdjust.d.ts +1 -0
- package/dist/ops/webgl/adamAdjust.js +50 -0
- package/dist/ops/webgl/adamMoments.d.ts +1 -0
- package/dist/ops/webgl/adamMoments.js +38 -0
- package/dist/ops/webgl/appendCache.js +1 -1
- package/dist/ops/webgl/attentionMask.js +1 -1
- package/dist/ops/webgl/fusedSoftmax.js +4 -4
- package/dist/ops/webgl/gatherSub.js +8 -8
- package/dist/ops/webgl/gelu.js +2 -2
- package/dist/ops/webgl/log.js +5 -5
- package/dist/ops/webgl/matMulGelu.js +17 -17
- package/dist/ops/webgl/matMulMul.js +1 -1
- package/dist/ops/webgl/mulDropout.js +4 -4
- package/dist/ops/webgl/normRMS.js +2 -2
- package/dist/ops/webgl/qkv.js +1 -1
- package/dist/ops/webgl/rope.js +1 -1
- package/dist/ops/webgl/scatterSub.js +1 -1
- package/dist/ops/webgpu/adamAdjust.d.ts +1 -0
- package/dist/ops/webgpu/adamAdjust.js +52 -0
- package/dist/ops/webgpu/adamMoments.d.ts +1 -0
- package/dist/ops/webgpu/adamMoments.js +51 -0
- package/dist/ops/webgpu/appendCache.d.ts +1 -0
- package/dist/ops/webgpu/appendCache.js +57 -0
- package/dist/ops/webgpu/attentionMask.d.ts +1 -0
- package/dist/ops/webgpu/attentionMask.js +65 -0
- package/dist/ops/webgpu/gatherSub.d.ts +1 -0
- package/dist/ops/webgpu/gatherSub.js +52 -0
- package/dist/ops/webgpu/gelu.d.ts +14 -0
- package/dist/ops/webgpu/gelu.js +87 -0
- package/dist/ops/webgpu/index.d.ts +0 -0
- package/dist/ops/webgpu/index.js +11 -0
- package/dist/ops/webgpu/normRMS.d.ts +1 -0
- package/dist/ops/webgpu/normRMS.js +41 -0
- package/dist/ops/webgpu/normRMSGrad.d.ts +1 -0
- package/dist/ops/webgpu/normRMSGrad.js +128 -0
- package/dist/ops/webgpu/qkv.d.ts +1 -0
- package/dist/ops/webgpu/qkv.js +57 -0
- package/dist/ops/webgpu/rope.d.ts +1 -0
- package/dist/ops/webgpu/rope.js +69 -0
- package/dist/ops/webgpu/scatterSub.d.ts +1 -0
- package/dist/ops/webgpu/scatterSub.js +38 -0
- package/dist/ops/webgpu/utils/reductions.d.ts +9 -0
- package/dist/ops/webgpu/utils/reductions.js +68 -0
- package/dist/{ops-C0sQEcPw.js → ops-aRTXR2Sr.js} +433 -508
- package/dist/{random_width-DWzaOgrn.js → random_width-DbSpgl4o.js} +144 -144
- package/dist/{range-DYsrnfiy.js → range-D9CZhVlR.js} +1 -1
- package/dist/{reciprocal-CJQeasVa.js → reciprocal-CGB48wZB.js} +1 -1
- package/dist/{register_all_kernels-BfFCQAqs.js → register_all_kernels-DnbAyBXt.js} +203 -200
- package/dist/{reshape-krWGKraP.js → reshape-BR0eoLYN.js} +1 -1
- package/dist/{scatter_nd_util-93ln7Hut.js → scatter_nd_util-OjyAxku2.js} +3 -3
- package/dist/{selu_util-sntGesxr.js → selu_util-Ce6pu9IM.js} +44 -44
- package/dist/{shared-Ca6iDobD.js → shared-Czipaeb6.js} +541 -606
- package/dist/shared-DS5waSIY.js +69 -0
- package/dist/{sin-D_h-qCSx.js → sin-CiBxrDqX.js} +1 -1
- package/dist/slice-BHbDHObE.js +28 -0
- package/dist/{softmax-fsdtf6JC.js → softmax-JMEIUo2J.js} +1 -1
- package/dist/{split-eiktj-6L.js → split-CRU0PjVV.js} +4 -4
- package/dist/{stack-dfEEz2OY.js → stack-ikk2Y8_P.js} +2 -2
- package/dist/{sum-BE_Irnim.js → sum-NLYbiDag.js} +3 -3
- package/dist/{tensor-Xyi595sG.js → tensor-Do9PKbIE.js} +1 -1
- package/dist/{tensor2d-CPEkynbH.js → tensor2d-CWHxHpLh.js} +1 -1
- package/dist/training/Adam.d.ts +22 -0
- package/dist/training/Adam.js +93 -0
- package/dist/training/AdamExt.d.ts +1 -1
- package/dist/training/AdamExt.js +13 -12
- package/dist/training/DatasetBuilder.js +2 -2
- package/dist/training/Evaluator.js +1 -1
- package/dist/training/FullTrainer.js +27 -27
- package/dist/training/Trainer.d.ts +5 -6
- package/dist/training/Trainer.js +54 -55
- package/dist/training/sparseCrossEntropy.d.ts +0 -4
- package/dist/training/sparseCrossEntropy.js +7 -7
- package/dist/utilities/arrayClose.d.ts +1 -0
- package/dist/utilities/arrayClose.js +11 -0
- package/dist/utilities/dummy.js +19 -19
- package/dist/utilities/generate.js +15 -16
- package/dist/utilities/multinomialCPU.d.ts +2 -0
- package/dist/utilities/multinomialCPU.js +13 -0
- package/dist/utilities/performance.d.ts +2 -0
- package/dist/utilities/performance.js +16 -0
- package/dist/utilities/profile.d.ts +1 -0
- package/dist/utilities/profile.js +9 -6
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-wSS22xj5.js → variable-BTBkayv_.js} +1 -1
- package/dist/webgpu_program-WaoMq-WD.js +548 -0
- package/dist/webgpu_util-DhSeP4b6.js +80 -0
- package/dist/{zeros-YJDE7oRb.js → zeros-DnPT2nD4.js} +10 -10
- package/package.json +2 -1
- package/dist/gpgpu_math-CNslybmD.js +0 -3115
- package/dist/norm-CzltS9Fz.js +0 -86
|
@@ -1,22 +1,22 @@
|
|
|
1
1
|
import { gatherSub as x } from "../ops/gatherSub.js";
|
|
2
2
|
import { scatterSub as L } from "../ops/scatterSub.js";
|
|
3
|
-
import {
|
|
4
|
-
import { s as
|
|
5
|
-
import { m as z } from "../max-
|
|
6
|
-
import { l as v } from "../log_sum_exp-
|
|
3
|
+
import { y, t as u, z as C, c as E } from "../index-UdZhlibC.js";
|
|
4
|
+
import { s as G } from "../softmax-JMEIUo2J.js";
|
|
5
|
+
import { m as z } from "../max-ByjEGoFx.js";
|
|
6
|
+
import { l as v } from "../log_sum_exp-BnmCkHWl.js";
|
|
7
7
|
function k(t, s) {
|
|
8
8
|
return u(() => {
|
|
9
|
-
const n = t.shape[t.shape.length - 1], c = t.shape.slice(0, -1).reduce((o, e) => o * e, 1), h = t.shape.length > 2 ? t.reshape([c, n]) : t, p = s.shape.length > 1 ? s.reshape([c]).cast("int32") : s.cast("int32"), r = z(h, -1, !0), a =
|
|
9
|
+
const n = t.shape[t.shape.length - 1], c = t.shape.slice(0, -1).reduce((o, e) => o * e, 1), h = t.shape.length > 2 ? t.reshape([c, n]) : t, p = s.shape.length > 1 ? s.reshape([c]).cast("int32") : s.cast("int32"), r = z(h, -1, !0), a = E(h, r), m = v(a, -1);
|
|
10
10
|
return x(m, p, a);
|
|
11
11
|
});
|
|
12
12
|
}
|
|
13
13
|
function A() {
|
|
14
|
-
return
|
|
14
|
+
return y(
|
|
15
15
|
// @ts-expect-error Invalid params
|
|
16
16
|
(s, n, d) => {
|
|
17
17
|
const c = s.shape[s.shape.length - 1], p = s.shape.slice(0, -1).reduce((o, e) => o * e, 1), r = s.reshape([p, c]), a = n.reshape([p]).cast("int32"), m = k(r, a);
|
|
18
18
|
return d([r, a]), r.dispose(), a.dispose(), { value: m, gradFunc: (o, e) => u(() => {
|
|
19
|
-
const S = e[0], f = e[1], b =
|
|
19
|
+
const S = e[0], f = e[1], b = G(S), l = L(b, f, o), g = C(n);
|
|
20
20
|
return [l.reshape(s.shape), g];
|
|
21
21
|
}) };
|
|
22
22
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function arraysClose(a: unknown, b: unknown, epsilon?: number): boolean;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
function f(r, e, n = 1e-5) {
|
|
2
|
+
if (Array.isArray(r) && Array.isArray(e)) {
|
|
3
|
+
if (r.length !== e.length) return !1;
|
|
4
|
+
for (let t = 0; t < r.length; ++t)
|
|
5
|
+
if (!f(r[t], e[t], n)) return !1;
|
|
6
|
+
return !0;
|
|
7
|
+
} else return typeof r == "number" && typeof e == "number" ? r === -1 / 0 && e === -1 / 0 ? !0 : Math.abs(r - e) < n : !1;
|
|
8
|
+
}
|
|
9
|
+
export {
|
|
10
|
+
f as arraysClose
|
|
11
|
+
};
|
package/dist/utilities/dummy.js
CHANGED
|
@@ -1,35 +1,35 @@
|
|
|
1
|
-
import { m as
|
|
2
|
-
import { z as i } from "../zeros-
|
|
3
|
-
async function
|
|
1
|
+
import { m as y, v as P, e as S } from "../index-UdZhlibC.js";
|
|
2
|
+
import { z as i } from "../zeros-DnPT2nD4.js";
|
|
3
|
+
async function w(s) {
|
|
4
4
|
const t = i([1, s.config.gpt.blockSize], "int32"), [e, n] = s.forward({ training: !1 }, t);
|
|
5
5
|
await e.data(), e.dispose(), n && n.dispose(), t.dispose();
|
|
6
6
|
}
|
|
7
|
-
async function
|
|
8
|
-
const t =
|
|
9
|
-
await
|
|
7
|
+
async function k(s) {
|
|
8
|
+
const t = y(), e = t.numBytesInGPUAllocated ?? t.numBytesAllocatedInGPU ?? t.numBytes;
|
|
9
|
+
await w(s);
|
|
10
10
|
const n = i([1, s.config.gpt.blockSize], "int32"), r = i([1, s.config.gpt.blockSize], "int32"), o = {
|
|
11
11
|
perBatch: 0,
|
|
12
12
|
tapeSize: 0,
|
|
13
13
|
gradients: s.getNumParams() * 4
|
|
14
|
-
},
|
|
15
|
-
const [
|
|
14
|
+
}, f = () => {
|
|
15
|
+
const [c, l] = s.forward({ training: !0 }, n, r), p = S().state.activeTape;
|
|
16
16
|
let u = 0;
|
|
17
|
-
if (
|
|
18
|
-
for (const z of
|
|
17
|
+
if (p)
|
|
18
|
+
for (const z of p)
|
|
19
19
|
u += z.saved?.reduce((B, I) => B + I.size * 4, 0) || 0;
|
|
20
|
-
return o.tapeSize = u,
|
|
21
|
-
}, { value:
|
|
22
|
-
o.perBatch =
|
|
23
|
-
for (const
|
|
24
|
-
|
|
20
|
+
return o.tapeSize = u, c.dispose(), l;
|
|
21
|
+
}, { value: m, grads: d } = P(f), a = y(), g = a.numBytesInGPUAllocated ?? a.numBytesAllocatedInGPU ?? a.numBytes;
|
|
22
|
+
o.perBatch = g - e - o.gradients, console.log("Dummy training memory requirements:", o), await m.data(), m.dispose();
|
|
23
|
+
for (const c in d)
|
|
24
|
+
d[c].dispose();
|
|
25
25
|
return n.dispose(), r.dispose(), o;
|
|
26
26
|
}
|
|
27
|
-
function
|
|
27
|
+
function v(s) {
|
|
28
28
|
const t = i([1, s.config.gpt.blockSize], "int32"), [e, n] = s.forward({ training: !1 }, t);
|
|
29
29
|
e.dispose(), n && n.dispose(), t.dispose();
|
|
30
30
|
}
|
|
31
31
|
export {
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
32
|
+
v as dummyPass,
|
|
33
|
+
w as dummyPassAsync,
|
|
34
|
+
k as dummyPassTrainAsync
|
|
35
35
|
};
|
|
@@ -1,23 +1,22 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { t as
|
|
3
|
-
import { c as
|
|
4
|
-
async function
|
|
1
|
+
import "../index-UdZhlibC.js";
|
|
2
|
+
import { t as m } from "../tensor2d-CWHxHpLh.js";
|
|
3
|
+
import { c as u } from "../concat-CbXTetof.js";
|
|
4
|
+
async function v(o, r, a, c, f) {
|
|
5
5
|
if (c <= 0)
|
|
6
6
|
throw new Error("Length must be a positive integer");
|
|
7
7
|
if (a.length === 0)
|
|
8
8
|
throw new Error("Prompt cannot be an empty string");
|
|
9
|
-
const p = await o.tokenise([a], !0), s = r.config.gpt.useRope ? new Array(r.config.gpt.nLayer).fill(void 0) : void 0
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
d.
|
|
18
|
-
|
|
19
|
-
return u !== -1 && i.splice(u), await o.decode(i);
|
|
9
|
+
const p = await o.tokenise([a], !0), s = r.config.gpt.useRope ? new Array(r.config.gpt.nLayer).fill(void 0) : void 0;
|
|
10
|
+
let e = m(p, [1, p[0].length], "int32"), t = e;
|
|
11
|
+
for (let g = 0; g < c; g++) {
|
|
12
|
+
const { output: n } = await r.generate(e, s, f), T = e, l = t;
|
|
13
|
+
t = u([t, n], 1), e = s ? n : u([e, n], 1), T.dispose(), l.dispose(), s || n.dispose();
|
|
14
|
+
}
|
|
15
|
+
const w = await t.array();
|
|
16
|
+
t.dispose();
|
|
17
|
+
const i = w[0], d = i.indexOf(o.eosToken);
|
|
18
|
+
return d !== -1 && i.splice(d), await o.decode(i);
|
|
20
19
|
}
|
|
21
20
|
export {
|
|
22
|
-
|
|
21
|
+
v as generateText
|
|
23
22
|
};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import "../index-UdZhlibC.js";
|
|
2
|
+
import { t as e } from "../tensor2d-CWHxHpLh.js";
|
|
3
|
+
function l(n) {
|
|
4
|
+
let r = 0;
|
|
5
|
+
const i = Math.random();
|
|
6
|
+
for (let t = 0; t < n.length; t++)
|
|
7
|
+
if (r += n[t], i < r)
|
|
8
|
+
return e([[t]], [1, 1], "int32");
|
|
9
|
+
return e([[n.length - 1]], [1, 1], "int32");
|
|
10
|
+
}
|
|
11
|
+
export {
|
|
12
|
+
l as default
|
|
13
|
+
};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { t as s } from "../index-UdZhlibC.js";
|
|
2
|
+
async function f(e, o = 10, r = !1) {
|
|
3
|
+
for (let t = 0; t < 100; t++) {
|
|
4
|
+
const a = r ? await e() : s(e);
|
|
5
|
+
t === 99 && await a.data(), a.dispose();
|
|
6
|
+
}
|
|
7
|
+
const n = performance.now();
|
|
8
|
+
for (let t = 0; t < o; t++) {
|
|
9
|
+
const a = r ? await e() : s(e);
|
|
10
|
+
t === o - 1 && await a.data(), a.dispose();
|
|
11
|
+
}
|
|
12
|
+
return (performance.now() - n) / o;
|
|
13
|
+
}
|
|
14
|
+
export {
|
|
15
|
+
f as default
|
|
16
|
+
};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { m as a } from "../index-
|
|
2
|
-
const
|
|
1
|
+
import { m as a } from "../index-UdZhlibC.js";
|
|
2
|
+
const s = 1024 * 1024;
|
|
3
3
|
class l {
|
|
4
4
|
log = /* @__PURE__ */ new Map();
|
|
5
5
|
maxMemory = 0;
|
|
@@ -20,14 +20,17 @@ class l {
|
|
|
20
20
|
console.warn("MemoryProfiler: endMemory called without matching startMemory");
|
|
21
21
|
return;
|
|
22
22
|
}
|
|
23
|
-
const e = a(),
|
|
24
|
-
this.log.set(o, Math.max(this.log.get(o) || 0,
|
|
23
|
+
const e = a(), t = this.lastMemInfo.pop(), m = (e.numBytesInGPUAllocated ?? e.numBytesAllocatedInGPU ?? e.numBytes) - (t?.numBytesInGPUAllocated ?? t?.numBytesAllocatedInGPU ?? t?.numBytes ?? 0);
|
|
24
|
+
this.log.set(o, Math.max(this.log.get(o) || 0, m)), m > this.maxMemory && (this.maxMemory = m, this.maxLabel = o), this.peakMemory = Math.max(
|
|
25
|
+
this.peakMemory,
|
|
26
|
+
e.numBytesInGPUAllocated ?? e.numBytesAllocatedInGPU ?? e.numBytes
|
|
27
|
+
);
|
|
25
28
|
}
|
|
26
29
|
printSummary() {
|
|
27
30
|
console.log("Memory Usage Summary:");
|
|
28
31
|
for (const [o, e] of this.log.entries())
|
|
29
|
-
console.log(`- ${o}: ${(e /
|
|
30
|
-
this.maxLabel && console.log(`Peak Memory Usage: ${(this.maxMemory /
|
|
32
|
+
console.log(`- ${o}: ${(e / s).toFixed(2)} MB`);
|
|
33
|
+
this.maxLabel && console.log(`Peak Memory Usage: ${(this.maxMemory / s).toFixed(2)} MB at "${this.maxLabel}"`), console.log(`Overall Peak Memory Usage: ${(this.peakMemory / s).toFixed(2)} MB`);
|
|
31
34
|
}
|
|
32
35
|
}
|
|
33
36
|
export {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import "../index-
|
|
2
|
-
import { t as p } from "../tensor-
|
|
1
|
+
import "../index-UdZhlibC.js";
|
|
2
|
+
import { t as p } from "../tensor-Do9PKbIE.js";
|
|
3
3
|
function h(n) {
|
|
4
4
|
const e = n.reduce((s, o) => s + o.length, 0), a = new Float32Array(e);
|
|
5
5
|
let t = 0;
|