@genai-fi/nanogpt 0.10.2 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.d.ts +10 -5
- package/dist/Generator.js +11760 -146
- package/dist/{RealDiv-zz7FpkKX.js → RealDiv-Ds-jvL09.js} +28 -30
- package/dist/Reshape-Cd6e-Otn.js +14 -0
- package/dist/{Reshape-CHdUjC72.js → Reshape-Ct266DEk.js} +21 -23
- package/dist/TeachableLLM.d.ts +4 -3
- package/dist/TeachableLLM.js +15 -16
- package/dist/Trainer.d.ts +2 -2
- package/dist/Trainer.js +6 -6
- package/dist/{axis_util-BsIr9ZNu.js → axis_util-DofAuy0p.js} +1 -1
- package/dist/backend.js +2 -2
- package/dist/{backend_util-B1XRLuq9.js → backend_util-C7NWHpv7.js} +72 -73
- package/dist/{backend_webgpu-CqpfEImu.js → backend_webgpu-B0Vls736.js} +52 -54
- package/dist/broadcast_to-DDaNMbX7.js +28 -0
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/matMulGelu.js +7 -11
- package/dist/checks/normRMS.js +9 -9
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.js +2 -2
- package/dist/checks/qkv.js +11 -12
- package/dist/checks/rope.js +2 -2
- package/dist/clip_by_value-Dn5tzexi.js +12 -0
- package/dist/complex-DClmWqJt.js +11 -0
- package/dist/concat-C6X3AAlQ.js +17 -0
- package/dist/{concat_util-iBYIyuQe.js → concat_util-CHsJFZJJ.js} +1 -1
- package/dist/{dataset-D2P7rHAw.js → dataset-DcjWqUVQ.js} +135 -137
- package/dist/dropout-OxuaJz6z.js +92 -0
- package/dist/expand_dims-BzfJK2uc.js +11 -0
- package/dist/{exports_initializers-CZSUJoVE.js → exports_initializers-eS9QJ6ut.js} +1 -1
- package/dist/floor-DIb-lN_u.js +9 -0
- package/dist/gather-BcO5UQNJ.js +9 -0
- package/dist/{gelu-Bmhopi0J.js → gelu-DqTbCx5x.js} +10 -11
- package/dist/{gpgpu_math-DsCcikas.js → gpgpu_math-CJcbnKPC.js} +841 -1015
- package/dist/index-D0RBWjq8.js +3520 -0
- package/dist/{index-DRyE072i.js → index-Dj5TkmPY.js} +330 -331
- package/dist/{kernel_funcs_utils-CWfOAPGO.js → kernel_funcs_utils-CSaumNDs.js} +132 -134
- package/dist/layers/BaseLayer.js +15 -16
- package/dist/layers/CausalSelfAttention.js +6 -6
- package/dist/layers/MLP.js +4 -4
- package/dist/layers/PositionEmbedding.js +7 -7
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +9 -9
- package/dist/layers/TiedEmbedding.js +6 -6
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +21 -22
- package/dist/log_sum_exp-VLZgbFAH.js +39 -0
- package/dist/main.d.ts +1 -1
- package/dist/main.js +49 -50
- package/dist/{matMul16-fEAJ4smh.js → matMul16-cDxwemKj.js} +14 -15
- package/dist/matMulGelu-B2s_80-H.js +163 -0
- package/dist/mat_mul-DxpNTCRz.js +11 -0
- package/dist/mod-PrOKlFxH.js +11 -0
- package/dist/models/NanoGPTV1.js +2 -2
- package/dist/models/model.js +13 -14
- package/dist/ones-BX_wEgzB.js +14 -0
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.js +1 -1
- package/dist/ops/add16.js +1 -1
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/concat16.js +2 -2
- package/dist/ops/cpu/adamAdjust.js +12 -13
- package/dist/ops/cpu/adamMoments.js +6 -7
- package/dist/ops/cpu/appendCache.js +7 -8
- package/dist/ops/cpu/attentionMask.js +11 -11
- package/dist/ops/cpu/fusedSoftmax.js +10 -11
- package/dist/ops/cpu/gatherSub.js +10 -11
- package/dist/ops/cpu/gelu.js +14 -15
- package/dist/ops/cpu/matMul16.js +6 -7
- package/dist/ops/cpu/matMulGelu.js +5 -6
- package/dist/ops/cpu/matMulMul.js +3 -4
- package/dist/ops/cpu/mulDropout.js +3 -4
- package/dist/ops/cpu/normRMS.js +11 -12
- package/dist/ops/cpu/qkv.js +8 -9
- package/dist/ops/cpu/rope.js +9 -10
- package/dist/ops/cpu/scatterSub.js +14 -16
- package/dist/ops/dot16.js +2 -2
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.js +10 -11
- package/dist/ops/grads/attentionMask.js +5 -6
- package/dist/ops/grads/gelu.js +3 -4
- package/dist/ops/grads/matMul16.js +4 -5
- package/dist/ops/grads/matMulGelu.js +8 -9
- package/dist/ops/grads/normRMS.js +9 -10
- package/dist/ops/grads/pack16.js +4 -5
- package/dist/ops/grads/qkv.js +17 -19
- package/dist/ops/grads/rope.js +3 -5
- package/dist/ops/grads/softmax16.js +3 -4
- package/dist/ops/grads/unpack16.js +3 -4
- package/dist/ops/grads/utils.d.ts +1 -0
- package/dist/ops/grads/utils.js +8 -4
- package/dist/ops/matMul16.js +3 -3
- package/dist/ops/matMulGelu.js +2 -2
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.js +3 -4
- package/dist/ops/qkv.js +4 -8
- package/dist/ops/reshape16.js +16 -18
- package/dist/ops/rope.d.ts +1 -1
- package/dist/ops/rope.js +3 -8
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.js +2 -2
- package/dist/ops/softmax16.js +5 -8
- package/dist/ops/sub16.js +1 -1
- package/dist/ops/sum16.js +2 -2
- package/dist/ops/transpose16.js +23 -24
- package/dist/ops/unpack16.js +2 -2
- package/dist/ops/webgl/adamAdjust.js +2 -3
- package/dist/ops/webgl/adamMoments.js +1 -2
- package/dist/ops/webgl/appendCache.js +1 -2
- package/dist/ops/webgl/attentionMask.js +5 -6
- package/dist/ops/webgl/fusedSoftmax.js +6 -8
- package/dist/ops/webgl/gatherSub.js +6 -7
- package/dist/ops/webgl/gelu.js +2 -3
- package/dist/ops/webgl/log.js +11 -12
- package/dist/ops/webgl/matMul16.js +15 -16
- package/dist/ops/webgl/matMulGelu.js +7 -111
- package/dist/ops/webgl/matMulMul.js +14 -15
- package/dist/ops/webgl/mulDropout.js +8 -9
- package/dist/ops/webgl/normRMS.js +7 -8
- package/dist/ops/webgl/qkv.js +5 -6
- package/dist/ops/webgl/rope.js +7 -8
- package/dist/ops/webgl/scatterSub.js +5 -6
- package/dist/ops/webgpu/adamAdjust.js +10 -12
- package/dist/ops/webgpu/adamMoments.js +8 -10
- package/dist/ops/webgpu/add16.js +8 -9
- package/dist/ops/webgpu/appendCache.js +23 -25
- package/dist/ops/webgpu/attentionMask.js +10 -12
- package/dist/ops/webgpu/attentionMask32_program.js +2 -2
- package/dist/ops/webgpu/concat16.js +12 -14
- package/dist/ops/webgpu/gatherSub.js +9 -11
- package/dist/ops/webgpu/gelu.js +28 -29
- package/dist/ops/webgpu/matMul16.js +26 -28
- package/dist/ops/webgpu/matMul16_program.js +4 -5
- package/dist/ops/webgpu/mul16.js +7 -8
- package/dist/ops/webgpu/normRMS.js +17 -19
- package/dist/ops/webgpu/normRMSGrad.js +21 -28
- package/dist/ops/webgpu/pack16.js +12 -13
- package/dist/ops/webgpu/pack16_program.js +2 -2
- package/dist/ops/webgpu/qkv.js +13 -15
- package/dist/ops/webgpu/rope.js +25 -27
- package/dist/ops/webgpu/scatterSub.js +7 -9
- package/dist/ops/webgpu/slice16.js +21 -23
- package/dist/ops/webgpu/softmax16.js +17 -19
- package/dist/ops/webgpu/softmax16_program.js +2 -2
- package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
- package/dist/ops/webgpu/softmax16grad.js +7 -8
- package/dist/ops/webgpu/sub16.js +8 -9
- package/dist/ops/webgpu/sum16.js +19 -21
- package/dist/ops/webgpu/transpose16.js +19 -20
- package/dist/ops/webgpu/transpose16_program.js +2 -2
- package/dist/ops/webgpu/transpose16_shared_program.js +11 -12
- package/dist/ops/webgpu/unpack16.js +3 -4
- package/dist/ops/webgpu/utils/binary_op.js +7 -8
- package/dist/ops/webgpu/utils/reductions.js +14 -22
- package/dist/ops-FJapAPfm.js +476 -0
- package/dist/pack16-k4jq6aMX.js +39 -0
- package/dist/patches/webgpu_backend.js +19 -20
- package/dist/patches/webgpu_base.js +1 -1
- package/dist/patches/webgpu_program.js +15 -16
- package/dist/{random_width-BVV9HveY.js → random_width-UGQn4OWb.js} +2506 -2761
- package/dist/range-CuGvVN2c.js +10 -0
- package/dist/relu-Cf80uA2p.js +9 -0
- package/dist/reshape-CkjKPPqB.js +9 -0
- package/dist/resize_nearest_neighbor-DB8k9KN_.js +175 -0
- package/dist/rope-BmZmp9uP.js +24 -0
- package/dist/{scatter_nd_util-C7zXRT_h.js → scatter_nd_util-BY22Cc-C.js} +1 -1
- package/dist/selu_util-BuLbmbrl.js +44 -0
- package/dist/{shared-CHhxz-O5.js → shared-B7USJZgw.js} +1 -1
- package/dist/{shared-D2NP_CpY.js → shared-BQboIImQ.js} +379 -381
- package/dist/slice-Aqy7KbJh.js +12 -0
- package/dist/{slice_util-DyjSAD0u.js → slice_util-D8CQRenR.js} +7 -7
- package/dist/{softmax-C9JQEtnO.js → softmax-faLoUZVT.js} +4 -5
- package/dist/split-BNz5jcGc.js +9 -0
- package/dist/squeeze--YMgaAAf.js +10 -0
- package/dist/stack-WJK22CFn.js +11 -0
- package/dist/step-dXR33iOg.js +261 -0
- package/dist/sum-BdplSvq_.js +11 -0
- package/dist/{tensor-0r5yOo2R.js → tensor-BQqrDvpx.js} +1 -1
- package/dist/tensor1d-LxP9asMm.js +11 -0
- package/dist/{tensor2d-CSB4KOb0.js → tensor2d-BN1sSfQO.js} +6 -7
- package/dist/{tensor4d-D7bLqGqz.js → tensor4d-DVwr7pLF.js} +6 -7
- package/dist/{tfjs_backend-CNkSTL0c.js → tfjs_backend-Vi4JfLzT.js} +256 -265
- package/dist/tile-CvN_LyVr.js +11 -0
- package/dist/tokeniser/BaseTokeniser.d.ts +27 -0
- package/dist/tokeniser/BaseTokeniser.js +94 -0
- package/dist/tokeniser/CharTokeniser.d.ts +4 -3
- package/dist/tokeniser/CharTokeniser.js +46 -32
- package/dist/tokeniser/bpe.d.ts +4 -3
- package/dist/tokeniser/bpe.js +60 -45
- package/dist/tokeniser/type.d.ts +11 -0
- package/dist/training/Adam.js +2 -2
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.d.ts +2 -2
- package/dist/training/DatasetBuilder.js +32 -36
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.d.ts +3 -3
- package/dist/training/Trainer.js +2 -2
- package/dist/training/sparseCrossEntropy.js +5 -5
- package/dist/transpose-JawVKyZy.js +36 -0
- package/dist/unsorted_segment_sum-LAbmE9G4.js +277 -0
- package/dist/utilities/dummy.js +3 -3
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.d.ts +1 -4
- package/dist/utilities/packed.js +10 -745
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.js +5 -5
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-DzfrwYuP.js → variable-DQ9yYgEU.js} +1 -1
- package/dist/{webgpu_program-DzaQiqel.js → webgpu_program-CAE4RICo.js} +177 -171
- package/dist/{webgpu_util-0_ubCEHJ.js → webgpu_util-BdovYhXr.js} +34 -35
- package/dist/zeros-DeiE2zTa.js +13 -0
- package/dist/zeros_like-BAz3iKru.js +721 -0
- package/package.json +4 -2
- package/dist/Reshape-CDVLyVfz.js +0 -16
- package/dist/broadcast_to-B0ChcDaz.js +0 -30
- package/dist/complex-BBiRlsVq.js +0 -13
- package/dist/concat-DmBLPVGC.js +0 -19
- package/dist/dropout-B1x1kYMa.js +0 -99
- package/dist/expand_dims-ouvfxQ1n.js +0 -13
- package/dist/gather-CH9sdacz.js +0 -10
- package/dist/index-D6Q1lPZO.js +0 -2157
- package/dist/log_sum_exp-D3ftBNY5.js +0 -41
- package/dist/mat_mul-C59XWcJd.js +0 -12
- package/dist/mod-DESSvHIU.js +0 -12
- package/dist/mulmat_packed_gpu-Coh6qbJk.js +0 -55
- package/dist/ones-jU9jlQvM.js +0 -15
- package/dist/ops-BFDtP6th.js +0 -645
- package/dist/pack16-CmVZs6af.js +0 -41
- package/dist/patches/PackedTensor.d.ts +0 -12
- package/dist/patches/PackedTensor.js +0 -11
- package/dist/patches/engine.d.ts +0 -261
- package/dist/patches/engine.js +0 -12
- package/dist/patches/tape.d.ts +0 -12
- package/dist/patches/tape.js +0 -5
- package/dist/range-ZZZD60Fx.js +0 -11
- package/dist/reciprocal-CrYlsAGD.js +0 -10
- package/dist/register_all_kernels-nvj2k7OC.js +0 -12307
- package/dist/relu-BYDneVPn.js +0 -10
- package/dist/reshape-CaPQzFvz.js +0 -10
- package/dist/rope-s4W2XO9B.js +0 -32
- package/dist/selu_util-BGPXmd4B.js +0 -303
- package/dist/sin-Djs4aQiu.js +0 -16
- package/dist/slice-DvovR5wq.js +0 -13
- package/dist/split-DBck65sX.js +0 -10
- package/dist/squeeze-C00Ipm_7.js +0 -11
- package/dist/stack-ChnHwRpX.js +0 -13
- package/dist/sum-ywRJj3Zr.js +0 -12
- package/dist/tensor-CzmOBsdf.js +0 -909
- package/dist/tensor1d-BlUT89BP.js +0 -12
- package/dist/tensor_util-DfwaWayG.js +0 -523
- package/dist/tile-CR074jmp.js +0 -13
- package/dist/transpose-DH4gmHvu.js +0 -38
- package/dist/zeros-DBFVbpv5.js +0 -14
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { E as e, _ as f } from "./index-D0RBWjq8.js";
|
|
2
|
+
function E(n, o, r = 1, a = "float32") {
|
|
3
|
+
if (r === 0)
|
|
4
|
+
throw new Error("Cannot have a step of zero");
|
|
5
|
+
const t = { start: n, stop: o, step: r, dtype: a };
|
|
6
|
+
return e.runKernel(f, {}, t);
|
|
7
|
+
}
|
|
8
|
+
export {
|
|
9
|
+
E as r
|
|
10
|
+
};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { q as o, u as t, E as a, R as p } from "./index-D0RBWjq8.js";
|
|
2
|
+
function c(r, s) {
|
|
3
|
+
const e = { x: t(r, "x", "reshape", "string_or_numeric") }, n = { shape: s };
|
|
4
|
+
return a.runKernel(p, e, n);
|
|
5
|
+
}
|
|
6
|
+
const x = /* @__PURE__ */ o({ reshape_: c });
|
|
7
|
+
export {
|
|
8
|
+
x as r
|
|
9
|
+
};
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import { q as i, u as a, $ as f, E as u, a_ as y, a$ as A, af as D, aZ as p, o as q, p as z, b as I, b0 as B, D as m, b1 as v, b2 as T, b3 as S, b4 as G, b5 as L, b6 as M, b7 as R, N as k, b8 as j, b9 as C, ba as P, bb as U, y as h, bc as Z, bd as F, be as H } from "./index-D0RBWjq8.js";
|
|
2
|
+
import { b as _ } from "./broadcast_to-DDaNMbX7.js";
|
|
3
|
+
import { e as J } from "./axis_util-DofAuy0p.js";
|
|
4
|
+
import { m as E } from "./log_sum_exp-VLZgbFAH.js";
|
|
5
|
+
import { r as g } from "./reshape-CkjKPPqB.js";
|
|
6
|
+
import { s as b } from "./sum-BdplSvq_.js";
|
|
7
|
+
function O(s, r, e) {
|
|
8
|
+
const n = a(r, "a", "where"), t = a(e, "b", "where"), o = a(s, "condition", "where", "bool"), c = f(f(o.shape, n.shape), t.shape), d = _(o, c), $ = _(n, c), l = _(t, c), w = {
|
|
9
|
+
condition: d,
|
|
10
|
+
t: $,
|
|
11
|
+
e: l
|
|
12
|
+
};
|
|
13
|
+
return u.runKernel(y, w);
|
|
14
|
+
}
|
|
15
|
+
const ge = /* @__PURE__ */ i({ where_: O });
|
|
16
|
+
function Q(s, r = null, e = !1) {
|
|
17
|
+
const t = { x: a(s, "x", "min") }, o = { axis: r, keepDims: e };
|
|
18
|
+
return u.runKernel(A, t, o);
|
|
19
|
+
}
|
|
20
|
+
const N = /* @__PURE__ */ i({ min_: Q });
|
|
21
|
+
function V(s, r = "euclidean", e = null, n = !1) {
|
|
22
|
+
s = a(s, "x", "norm");
|
|
23
|
+
const t = K(s, r, e);
|
|
24
|
+
let o = t.shape;
|
|
25
|
+
if (n) {
|
|
26
|
+
const c = D(e, s.shape);
|
|
27
|
+
o = J(t.shape, c);
|
|
28
|
+
}
|
|
29
|
+
return g(t, o);
|
|
30
|
+
}
|
|
31
|
+
function K(s, r, e = null) {
|
|
32
|
+
if (s.rank === 0)
|
|
33
|
+
return p(s);
|
|
34
|
+
if (s.rank !== 1 && e === null)
|
|
35
|
+
return K(g(s, [-1]), r, e);
|
|
36
|
+
if (s.rank === 1 || typeof e == "number" || Array.isArray(e) && e.length === 1) {
|
|
37
|
+
if (r === 1)
|
|
38
|
+
return b(p(s), e);
|
|
39
|
+
if (r === 1 / 0)
|
|
40
|
+
return E(p(s), e);
|
|
41
|
+
if (r === -1 / 0)
|
|
42
|
+
return N(p(s), e);
|
|
43
|
+
if (r === "euclidean" || r === 2)
|
|
44
|
+
return q(b(z(p(s), I(2, "int32")), e));
|
|
45
|
+
throw new Error(`Error in norm: invalid ord value: ${r}`);
|
|
46
|
+
}
|
|
47
|
+
if (Array.isArray(e) && e.length === 2) {
|
|
48
|
+
if (r === 1)
|
|
49
|
+
return E(b(p(s), e[0]), e[1] - 1);
|
|
50
|
+
if (r === 1 / 0)
|
|
51
|
+
return E(b(p(s), e[1]), e[0]);
|
|
52
|
+
if (r === -1 / 0)
|
|
53
|
+
return N(b(p(s), e[1]), e[0]);
|
|
54
|
+
if (r === "fro" || r === "euclidean")
|
|
55
|
+
return q(b(B(s), e));
|
|
56
|
+
throw new Error(`Error in norm: invalid ord value: ${r}`);
|
|
57
|
+
}
|
|
58
|
+
throw new Error(`Error in norm: invalid axis: ${e}`);
|
|
59
|
+
}
|
|
60
|
+
const de = /* @__PURE__ */ i({ norm_: V });
|
|
61
|
+
function W(s, r) {
|
|
62
|
+
let e = a(s, "a", "greater", "string_or_numeric"), n = a(r, "b", "greater", "string_or_numeric");
|
|
63
|
+
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
64
|
+
const t = { a: e, b: n };
|
|
65
|
+
return u.runKernel(v, t);
|
|
66
|
+
}
|
|
67
|
+
const $e = /* @__PURE__ */ i({ greater_: W });
|
|
68
|
+
function X(s, r) {
|
|
69
|
+
let e = a(s, "a", "greaterEqual", "string_or_numeric"), n = a(r, "b", "greaterEqual", "string_or_numeric");
|
|
70
|
+
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
71
|
+
const t = { a: e, b: n };
|
|
72
|
+
return u.runKernel(T, t);
|
|
73
|
+
}
|
|
74
|
+
const _e = /* @__PURE__ */ i({ greaterEqual_: X });
|
|
75
|
+
function Y(s, r) {
|
|
76
|
+
let e = a(s, "a", "less", "string_or_numeric"), n = a(r, "b", "less", "string_or_numeric");
|
|
77
|
+
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
78
|
+
const t = { a: e, b: n };
|
|
79
|
+
return u.runKernel(S, t);
|
|
80
|
+
}
|
|
81
|
+
const Ee = /* @__PURE__ */ i({ less_: Y });
|
|
82
|
+
function x(s, r) {
|
|
83
|
+
let e = a(s, "a", "lessEqual", "string_or_numeric"), n = a(r, "b", "lessEqual", "string_or_numeric");
|
|
84
|
+
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
85
|
+
const t = { a: e, b: n };
|
|
86
|
+
return u.runKernel(G, t);
|
|
87
|
+
}
|
|
88
|
+
const qe = /* @__PURE__ */ i({ lessEqual_: x });
|
|
89
|
+
function ee(s) {
|
|
90
|
+
const e = { x: a(s, "x", "log1p") };
|
|
91
|
+
return u.runKernel(L, e);
|
|
92
|
+
}
|
|
93
|
+
const ke = /* @__PURE__ */ i({ log1p_: ee });
|
|
94
|
+
function ne(s, r) {
|
|
95
|
+
const e = a(s, "a", "logicalAnd", "bool"), n = a(r, "b", "logicalAnd", "bool");
|
|
96
|
+
f(e.shape, n.shape);
|
|
97
|
+
const t = { a: e, b: n };
|
|
98
|
+
return u.runKernel(M, t);
|
|
99
|
+
}
|
|
100
|
+
const Ne = /* @__PURE__ */ i({ logicalAnd_: ne });
|
|
101
|
+
function re(s, r = null, e = !1) {
|
|
102
|
+
const t = { x: a(s, "x", "mean") }, o = { axis: r, keepDims: e };
|
|
103
|
+
return u.runKernel(R, t, o);
|
|
104
|
+
}
|
|
105
|
+
const Ke = /* @__PURE__ */ i({ mean_: re });
|
|
106
|
+
function se(s, r) {
|
|
107
|
+
let e = a(s, "a", "minimum"), n = a(r, "b", "minimum");
|
|
108
|
+
[e, n] = m(e, n), e.dtype === "bool" && (e = k(e, "int32"), n = k(n, "int32")), f(e.shape, n.shape);
|
|
109
|
+
const t = { a: e, b: n };
|
|
110
|
+
return u.runKernel(j, t);
|
|
111
|
+
}
|
|
112
|
+
const we = /* @__PURE__ */ i({ minimum_: se });
|
|
113
|
+
function te(s, r) {
|
|
114
|
+
let e = a(s, "a", "notEqual", "string_or_numeric"), n = a(r, "b", "notEqual", "string_or_numeric");
|
|
115
|
+
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
116
|
+
const t = { a: e, b: n };
|
|
117
|
+
return u.runKernel(C, t);
|
|
118
|
+
}
|
|
119
|
+
const ye = /* @__PURE__ */ i({ notEqual_: te });
|
|
120
|
+
function ae(s) {
|
|
121
|
+
const e = { x: a(s, "x", "round") };
|
|
122
|
+
return u.runKernel(P, e);
|
|
123
|
+
}
|
|
124
|
+
const Ae = /* @__PURE__ */ i({ round_: ae });
|
|
125
|
+
function oe(s, r) {
|
|
126
|
+
let e = a(s, "a", "squaredDifference"), n = a(r, "b", "squaredDifference");
|
|
127
|
+
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
128
|
+
const t = { a: e, b: n }, o = {};
|
|
129
|
+
return u.runKernel(U, t, o);
|
|
130
|
+
}
|
|
131
|
+
const De = /* @__PURE__ */ i({ squaredDifference_: oe });
|
|
132
|
+
function ie(s, r = 0) {
|
|
133
|
+
const e = a(s, "x", "unstack", "string_or_numeric");
|
|
134
|
+
h(r >= -e.shape.length && r < e.shape.length, () => `Axis = ${r} is not in [-${e.shape.length}, ${e.shape.length})`);
|
|
135
|
+
const n = { value: e }, t = { axis: r };
|
|
136
|
+
return u.runKernel(Z, n, t);
|
|
137
|
+
}
|
|
138
|
+
const ze = /* @__PURE__ */ i({ unstack_: ie });
|
|
139
|
+
function ue(s, r, e = !1, n = !1) {
|
|
140
|
+
const t = a(s, "images", "resizeBilinear");
|
|
141
|
+
h(t.rank === 3 || t.rank === 4, () => `Error in resizeBilinear: x must be rank 3 or 4, but got rank ${t.rank}.`), h(r.length === 2, () => `Error in resizeBilinear: new shape must 2D, but got shape ${r}.`), h(n === !1 || e === !1, () => "Error in resizeBilinear: If halfPixelCenters is true, alignCorners must be false.");
|
|
142
|
+
let o = t, c = !1;
|
|
143
|
+
t.rank === 3 && (c = !0, o = g(t, [1, t.shape[0], t.shape[1], t.shape[2]]));
|
|
144
|
+
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(F, d, $);
|
|
145
|
+
return c ? g(l, [l.shape[1], l.shape[2], l.shape[3]]) : l;
|
|
146
|
+
}
|
|
147
|
+
const Ie = /* @__PURE__ */ i({ resizeBilinear_: ue });
|
|
148
|
+
function le(s, r, e = !1, n = !1) {
|
|
149
|
+
const t = a(s, "images", "resizeNearestNeighbor");
|
|
150
|
+
h(t.rank === 3 || t.rank === 4, () => `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got rank ${t.rank}.`), h(r.length === 2, () => `Error in resizeNearestNeighbor: new shape must 2D, but got shape ${r}.`), h(t.dtype === "float32" || t.dtype === "int32", () => "`images` must have `int32` or `float32` as dtype"), h(n === !1 || e === !1, () => "Error in resizeNearestNeighbor: If halfPixelCenters is true, alignCorners must be false.");
|
|
151
|
+
let o = t, c = !1;
|
|
152
|
+
t.rank === 3 && (c = !0, o = g(t, [1, t.shape[0], t.shape[1], t.shape[2]]));
|
|
153
|
+
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(H, d, $);
|
|
154
|
+
return c ? g(l, [l.shape[1], l.shape[2], l.shape[3]]) : l;
|
|
155
|
+
}
|
|
156
|
+
const Be = /* @__PURE__ */ i({ resizeNearestNeighbor_: le });
|
|
157
|
+
export {
|
|
158
|
+
Ee as a,
|
|
159
|
+
Ne as b,
|
|
160
|
+
_e as c,
|
|
161
|
+
Ke as d,
|
|
162
|
+
ye as e,
|
|
163
|
+
ke as f,
|
|
164
|
+
$e as g,
|
|
165
|
+
Be as h,
|
|
166
|
+
Ie as i,
|
|
167
|
+
N as j,
|
|
168
|
+
qe as l,
|
|
169
|
+
we as m,
|
|
170
|
+
de as n,
|
|
171
|
+
Ae as r,
|
|
172
|
+
De as s,
|
|
173
|
+
ze as u,
|
|
174
|
+
ge as w
|
|
175
|
+
};
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { j as p, e as s } from "./index-D0RBWjq8.js";
|
|
2
|
+
import "./ops/cpu/rope.js";
|
|
3
|
+
import "./ops/webgl/rope.js";
|
|
4
|
+
const u = {
|
|
5
|
+
kernelName: "Rope",
|
|
6
|
+
inputsToSave: [],
|
|
7
|
+
outputsToSave: [],
|
|
8
|
+
gradFunc: (e, n, r) => {
|
|
9
|
+
const { ropeCache: o } = r, a = i(e, o, 0, !0);
|
|
10
|
+
return { x: () => a };
|
|
11
|
+
}
|
|
12
|
+
};
|
|
13
|
+
p(u);
|
|
14
|
+
function i(e, n, r, o = !1) {
|
|
15
|
+
return n.ensureRopeCache(e.shape[1] + r), s().runKernel("Rope", { x: e }, {
|
|
16
|
+
pastLen: r,
|
|
17
|
+
negSin: o,
|
|
18
|
+
ropeCache: n
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
export {
|
|
22
|
+
u as a,
|
|
23
|
+
i as r
|
|
24
|
+
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { V as p, a9 as w } from "./index-D0RBWjq8.js";
|
|
2
2
|
function k(o, t, r) {
|
|
3
3
|
const n = t.rank > 1 ? t.shape[t.rank - 1] : 1, e = t.rank > 1 ? t.rank - 1 : 1, h = `Must have updates.shape = indices.shape[:batchDim] + shape[sliceDim:], got updates.shape: ${r.shape}, indices.shape: ${t.shape}, shape: ${o}, sliceDim: ${n}, and batchDim: ${e}.`;
|
|
4
4
|
if (r.rank < e)
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { m as n, a0 as l } from "./index-D0RBWjq8.js";
|
|
2
|
+
import { e as o, r as t, p as f, l as a, s as p, a as i } from "./step-dXR33iOg.js";
|
|
3
|
+
import { r as m } from "./relu-Cf80uA2p.js";
|
|
4
|
+
import { r as d } from "./reshape-CkjKPPqB.js";
|
|
5
|
+
import { s as g } from "./sum-BdplSvq_.js";
|
|
6
|
+
function L(e, r, s) {
|
|
7
|
+
if (s == null || s === "linear")
|
|
8
|
+
return e;
|
|
9
|
+
if (s === "relu")
|
|
10
|
+
return n(e, i(r));
|
|
11
|
+
throw new Error(`Cannot compute gradient for fused activation ${s}.`);
|
|
12
|
+
}
|
|
13
|
+
function S(e, r) {
|
|
14
|
+
let s = r;
|
|
15
|
+
const u = l(e.shape, r.shape);
|
|
16
|
+
return u.length > 0 && (s = g(s, u)), d(s, e.shape);
|
|
17
|
+
}
|
|
18
|
+
function k(e, r, s, u) {
|
|
19
|
+
if (r === "linear")
|
|
20
|
+
return e;
|
|
21
|
+
if (r === "relu")
|
|
22
|
+
return m(e);
|
|
23
|
+
if (r === "elu")
|
|
24
|
+
return o(e);
|
|
25
|
+
if (r === "relu6")
|
|
26
|
+
return t(e);
|
|
27
|
+
if (r === "prelu")
|
|
28
|
+
return f(e, s);
|
|
29
|
+
if (r === "leakyrelu")
|
|
30
|
+
return a(e, u);
|
|
31
|
+
if (r === "sigmoid")
|
|
32
|
+
return p(e);
|
|
33
|
+
throw new Error(`Unknown fused activation ${r}.`);
|
|
34
|
+
}
|
|
35
|
+
const C = (e, r) => !(e > 0) || r === "linear";
|
|
36
|
+
const F = 1.7580993408473768, U = 1.0507009873554805;
|
|
37
|
+
export {
|
|
38
|
+
U as S,
|
|
39
|
+
F as a,
|
|
40
|
+
k as b,
|
|
41
|
+
L as c,
|
|
42
|
+
S as g,
|
|
43
|
+
C as s
|
|
44
|
+
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { s as l } from "./shared-
|
|
1
|
+
import { s as l } from "./shared-BQboIImQ.js";
|
|
2
2
|
const { addImpl: m, bincountImpl: s, bincountReduceImpl: a, bitwiseAndImpl: I, castImpl: e, ceilImpl: t, concatImpl: r, equalImpl: i, expImpl: C, expm1Impl: P, floorImpl: U, gatherNdImpl: n, gatherV2Impl: g, greaterImpl: o, greaterEqualImpl: c, lessImpl: u, lessEqualImpl: d, linSpaceImpl: q, logImpl: R, maxImpl: h, maximumImpl: x, minimumImpl: b, multiplyImpl: E, negImpl: S, notEqualImpl: T, prodImpl: w, raggedGatherImpl: y, raggedRangeImpl: A, raggedTensorToTensorImpl: F, rangeImpl: G, rsqrtImpl: N, scatterImpl: f, sigmoidImpl: k, simpleAbsImpl: B, sliceImpl: H, sparseFillEmptyRowsImpl: K, sparseReshapeImpl: V, sparseSegmentReductionImpl: j, sqrtImpl: v, staticRegexReplaceImpl: z, stridedSliceImpl: D, stringNGramsImpl: J, stringSplitImpl: L, stringToHashBucketFastImpl: M, subImpl: O, tileImpl: Q, topKImpl: W, transposeImpl: X, uniqueImpl: Y } = l;
|
|
3
3
|
export {
|
|
4
4
|
b as A,
|