@genai-fi/nanogpt 0.10.3 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.d.ts +10 -5
- package/dist/Generator.js +1789 -1765
- package/dist/{RealDiv-KAPDe8zB.js → RealDiv-Ds-jvL09.js} +22 -22
- package/dist/{Reshape-BYkmUnAv.js → Reshape-Cd6e-Otn.js} +1 -1
- package/dist/{Reshape-Zt6eb7yh.js → Reshape-Ct266DEk.js} +9 -9
- package/dist/TeachableLLM.d.ts +4 -3
- package/dist/TeachableLLM.js +14 -14
- package/dist/Trainer.d.ts +2 -2
- package/dist/Trainer.js +6 -6
- package/dist/{axis_util-BaG7mf5A.js → axis_util-DofAuy0p.js} +3 -3
- package/dist/backend.js +2 -2
- package/dist/{backend_util-RCe-rHaj.js → backend_util-C7NWHpv7.js} +7 -7
- package/dist/{backend_webgpu-DE3ACOLx.js → backend_webgpu-B0Vls736.js} +10 -10
- package/dist/{broadcast_to-B3eYlZm7.js → broadcast_to-DDaNMbX7.js} +2 -2
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/matMulGelu.js +2 -2
- package/dist/checks/normRMS.js +4 -4
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.js +2 -2
- package/dist/checks/qkv.js +4 -4
- package/dist/checks/rope.js +2 -2
- package/dist/{clip_by_value-BnO7-a88.js → clip_by_value-Dn5tzexi.js} +4 -4
- package/dist/complex-DClmWqJt.js +11 -0
- package/dist/{concat-BV8bt5H-.js → concat-C6X3AAlQ.js} +1 -1
- package/dist/{concat_util-DpW8mL_l.js → concat_util-CHsJFZJJ.js} +1 -1
- package/dist/{dataset-BcwmTGYc.js → dataset-DcjWqUVQ.js} +7 -7
- package/dist/{dropout-BcvN9JYi.js → dropout-OxuaJz6z.js} +11 -11
- package/dist/{expand_dims-DT4tEPwA.js → expand_dims-BzfJK2uc.js} +3 -3
- package/dist/{exports_initializers-Hta_rEnm.js → exports_initializers-eS9QJ6ut.js} +1 -1
- package/dist/{floor-D5QdR_le.js → floor-DIb-lN_u.js} +1 -1
- package/dist/gather-BcO5UQNJ.js +9 -0
- package/dist/{gelu-CjNPL4OH.js → gelu-DqTbCx5x.js} +1 -1
- package/dist/{gpgpu_math-DAOmgtXR.js → gpgpu_math-CJcbnKPC.js} +2 -2
- package/dist/{index-DOvlwCh-.js → index-D0RBWjq8.js} +52 -52
- package/dist/{index-BwexR4lA.js → index-Dj5TkmPY.js} +89 -89
- package/dist/{kernel_funcs_utils-CCzYdUZg.js → kernel_funcs_utils-CSaumNDs.js} +11 -11
- package/dist/layers/BaseLayer.js +2 -2
- package/dist/layers/CausalSelfAttention.js +6 -6
- package/dist/layers/MLP.js +4 -4
- package/dist/layers/PositionEmbedding.js +5 -5
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +4 -4
- package/dist/layers/TiedEmbedding.js +6 -6
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +17 -17
- package/dist/log_sum_exp-VLZgbFAH.js +39 -0
- package/dist/main.d.ts +1 -1
- package/dist/main.js +9 -9
- package/dist/{matMul16-BWRSOCWB.js → matMul16-cDxwemKj.js} +7 -7
- package/dist/{matMulGelu-CzfgT6Wq.js → matMulGelu-B2s_80-H.js} +18 -18
- package/dist/{mat_mul-SjpJRLyL.js → mat_mul-DxpNTCRz.js} +3 -3
- package/dist/{mod-AnXEvvpo.js → mod-PrOKlFxH.js} +1 -1
- package/dist/models/NanoGPTV1.js +2 -2
- package/dist/models/model.js +9 -9
- package/dist/{ones-D2rT0xk2.js → ones-BX_wEgzB.js} +3 -3
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.js +1 -1
- package/dist/ops/add16.js +1 -1
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/concat16.js +2 -2
- package/dist/ops/cpu/adamAdjust.js +6 -6
- package/dist/ops/cpu/adamMoments.js +2 -2
- package/dist/ops/cpu/appendCache.js +5 -5
- package/dist/ops/cpu/attentionMask.js +10 -10
- package/dist/ops/cpu/fusedSoftmax.js +2 -2
- package/dist/ops/cpu/gatherSub.js +6 -6
- package/dist/ops/cpu/gelu.js +9 -9
- package/dist/ops/cpu/matMul16.js +2 -2
- package/dist/ops/cpu/matMulGelu.js +3 -3
- package/dist/ops/cpu/matMulMul.js +1 -1
- package/dist/ops/cpu/mulDropout.js +1 -1
- package/dist/ops/cpu/normRMS.js +3 -3
- package/dist/ops/cpu/qkv.js +3 -3
- package/dist/ops/cpu/rope.js +9 -9
- package/dist/ops/cpu/scatterSub.js +11 -11
- package/dist/ops/dot16.js +2 -2
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.js +4 -4
- package/dist/ops/grads/attentionMask.js +2 -2
- package/dist/ops/grads/gelu.js +2 -2
- package/dist/ops/grads/matMul16.js +3 -3
- package/dist/ops/grads/matMulGelu.js +3 -3
- package/dist/ops/grads/normRMS.js +7 -7
- package/dist/ops/grads/pack16.js +3 -3
- package/dist/ops/grads/qkv.js +6 -6
- package/dist/ops/grads/rope.js +2 -2
- package/dist/ops/grads/softmax16.js +1 -1
- package/dist/ops/grads/unpack16.js +2 -2
- package/dist/ops/matMul16.js +3 -3
- package/dist/ops/matMulGelu.js +2 -2
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.js +2 -2
- package/dist/ops/qkv.js +1 -1
- package/dist/ops/reshape16.js +6 -6
- package/dist/ops/rope.js +2 -2
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.js +2 -2
- package/dist/ops/softmax16.js +1 -1
- package/dist/ops/sub16.js +1 -1
- package/dist/ops/sum16.js +2 -2
- package/dist/ops/transpose16.js +3 -3
- package/dist/ops/unpack16.js +2 -2
- package/dist/ops/webgl/adamAdjust.js +2 -2
- package/dist/ops/webgl/adamMoments.js +1 -1
- package/dist/ops/webgl/appendCache.js +1 -1
- package/dist/ops/webgl/attentionMask.js +4 -4
- package/dist/ops/webgl/fusedSoftmax.js +6 -6
- package/dist/ops/webgl/gatherSub.js +1 -1
- package/dist/ops/webgl/gelu.js +2 -2
- package/dist/ops/webgl/log.js +3 -3
- package/dist/ops/webgl/matMul16.js +11 -11
- package/dist/ops/webgl/matMulGelu.js +4 -4
- package/dist/ops/webgl/matMulMul.js +7 -7
- package/dist/ops/webgl/mulDropout.js +1 -1
- package/dist/ops/webgl/normRMS.js +7 -7
- package/dist/ops/webgl/qkv.js +1 -1
- package/dist/ops/webgl/rope.js +4 -4
- package/dist/ops/webgl/scatterSub.js +1 -1
- package/dist/ops/webgpu/adamAdjust.js +3 -3
- package/dist/ops/webgpu/adamMoments.js +3 -3
- package/dist/ops/webgpu/add16.js +1 -1
- package/dist/ops/webgpu/appendCache.js +3 -3
- package/dist/ops/webgpu/attentionMask.js +5 -5
- package/dist/ops/webgpu/attentionMask32_program.js +2 -2
- package/dist/ops/webgpu/concat16.js +5 -5
- package/dist/ops/webgpu/gatherSub.js +5 -5
- package/dist/ops/webgpu/gelu.js +3 -3
- package/dist/ops/webgpu/matMul16.js +18 -18
- package/dist/ops/webgpu/matMul16_program.js +2 -2
- package/dist/ops/webgpu/mul16.js +4 -4
- package/dist/ops/webgpu/normRMS.js +6 -6
- package/dist/ops/webgpu/normRMSGrad.js +4 -4
- package/dist/ops/webgpu/pack16.js +1 -1
- package/dist/ops/webgpu/pack16_program.js +2 -2
- package/dist/ops/webgpu/qkv.js +6 -6
- package/dist/ops/webgpu/rope.js +3 -3
- package/dist/ops/webgpu/scatterSub.js +3 -3
- package/dist/ops/webgpu/slice16.js +4 -4
- package/dist/ops/webgpu/softmax16.js +2 -2
- package/dist/ops/webgpu/softmax16_program.js +2 -2
- package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
- package/dist/ops/webgpu/softmax16grad.js +1 -1
- package/dist/ops/webgpu/sub16.js +4 -4
- package/dist/ops/webgpu/sum16.js +6 -6
- package/dist/ops/webgpu/transpose16.js +2 -2
- package/dist/ops/webgpu/transpose16_program.js +2 -2
- package/dist/ops/webgpu/transpose16_shared_program.js +3 -3
- package/dist/ops/webgpu/unpack16.js +3 -3
- package/dist/ops/webgpu/utils/binary_op.js +3 -3
- package/dist/ops/webgpu/utils/reductions.js +4 -4
- package/dist/{ops-B5yanEdW.js → ops-FJapAPfm.js} +56 -56
- package/dist/{pack16-nQ6JaLo-.js → pack16-k4jq6aMX.js} +7 -7
- package/dist/patches/webgpu_backend.js +7 -7
- package/dist/patches/webgpu_base.js +1 -1
- package/dist/patches/webgpu_program.js +8 -8
- package/dist/{random_width-or-CEftb.js → random_width-UGQn4OWb.js} +33 -33
- package/dist/range-CuGvVN2c.js +10 -0
- package/dist/{relu-CP0ZcxWO.js → relu-Cf80uA2p.js} +1 -1
- package/dist/{reshape-ByE68wS9.js → reshape-CkjKPPqB.js} +1 -1
- package/dist/{resize_nearest_neighbor-B19mCEg2.js → resize_nearest_neighbor-DB8k9KN_.js} +43 -43
- package/dist/{rope-Ir4mTyD1.js → rope-BmZmp9uP.js} +1 -1
- package/dist/{scatter_nd_util-lvSiX8q4.js → scatter_nd_util-BY22Cc-C.js} +1 -1
- package/dist/{selu_util-kbhpTdYD.js → selu_util-BuLbmbrl.js} +5 -5
- package/dist/{shared-DT1TkE6w.js → shared-B7USJZgw.js} +1 -1
- package/dist/{shared-dntlHIDQ.js → shared-BQboIImQ.js} +86 -86
- package/dist/{slice-BfEGSH82.js → slice-Aqy7KbJh.js} +3 -3
- package/dist/{slice_util-uTKwiEpW.js → slice_util-D8CQRenR.js} +7 -7
- package/dist/{softmax-CA5jFsLR.js → softmax-faLoUZVT.js} +1 -1
- package/dist/{split-CVLc0w--.js → split-BNz5jcGc.js} +3 -3
- package/dist/{squeeze-C7Z2srUo.js → squeeze--YMgaAAf.js} +2 -2
- package/dist/{stack-Cf4n9h0N.js → stack-WJK22CFn.js} +1 -1
- package/dist/{step-CINUs5QB.js → step-dXR33iOg.js} +32 -32
- package/dist/sum-BdplSvq_.js +11 -0
- package/dist/tensor-BQqrDvpx.js +8 -0
- package/dist/tensor1d-LxP9asMm.js +11 -0
- package/dist/{tensor2d-Bs9wZRc7.js → tensor2d-BN1sSfQO.js} +3 -3
- package/dist/{tensor4d-BARPdTaS.js → tensor4d-DVwr7pLF.js} +1 -1
- package/dist/{tfjs_backend-y1cvNhLA.js → tfjs_backend-Vi4JfLzT.js} +28 -28
- package/dist/{tile-mbfagpsB.js → tile-CvN_LyVr.js} +4 -4
- package/dist/tokeniser/BaseTokeniser.d.ts +27 -0
- package/dist/tokeniser/BaseTokeniser.js +94 -0
- package/dist/tokeniser/CharTokeniser.d.ts +4 -3
- package/dist/tokeniser/CharTokeniser.js +46 -32
- package/dist/tokeniser/bpe.d.ts +4 -3
- package/dist/tokeniser/bpe.js +60 -45
- package/dist/tokeniser/type.d.ts +11 -0
- package/dist/training/Adam.js +2 -2
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.d.ts +2 -2
- package/dist/training/DatasetBuilder.js +32 -36
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.d.ts +3 -3
- package/dist/training/Trainer.js +2 -2
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/{transpose-ClWiBS_b.js → transpose-JawVKyZy.js} +5 -5
- package/dist/{unsorted_segment_sum-BDDhB_E6.js → unsorted_segment_sum-LAbmE9G4.js} +78 -78
- package/dist/utilities/dummy.js +3 -3
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.js +1 -1
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.js +5 -5
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-WawDEaAb.js → variable-DQ9yYgEU.js} +1 -1
- package/dist/{webgpu_program-DuOXPQol.js → webgpu_program-CAE4RICo.js} +3 -3
- package/dist/{webgpu_util-RxEF33Rj.js → webgpu_util-BdovYhXr.js} +1 -1
- package/dist/{zeros-KnWaWf-X.js → zeros-DeiE2zTa.js} +2 -2
- package/dist/{zeros_like-DvE73F4e.js → zeros_like-BAz3iKru.js} +77 -77
- package/package.json +1 -1
- package/dist/complex-DjxcVmoX.js +0 -11
- package/dist/gather-D3JcZUaI.js +0 -9
- package/dist/log_sum_exp-ngO0-4pK.js +0 -39
- package/dist/range-BklejeeW.js +0 -10
- package/dist/sum-DWAtNGez.js +0 -11
- package/dist/tensor-DJoc7gJU.js +0 -8
- package/dist/tensor1d-D11P_7Dp.js +0 -11
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { b as _ } from "./broadcast_to-
|
|
3
|
-
import { e as
|
|
4
|
-
import { m as E } from "./log_sum_exp-
|
|
5
|
-
import { r as g } from "./reshape-
|
|
6
|
-
import { s as b } from "./sum-
|
|
7
|
-
function
|
|
1
|
+
import { q as i, u as a, $ as f, E as u, a_ as y, a$ as A, af as D, aZ as p, o as q, p as z, b as I, b0 as B, D as m, b1 as v, b2 as T, b3 as S, b4 as G, b5 as L, b6 as M, b7 as R, N as k, b8 as j, b9 as C, ba as P, bb as U, y as h, bc as Z, bd as F, be as H } from "./index-D0RBWjq8.js";
|
|
2
|
+
import { b as _ } from "./broadcast_to-DDaNMbX7.js";
|
|
3
|
+
import { e as J } from "./axis_util-DofAuy0p.js";
|
|
4
|
+
import { m as E } from "./log_sum_exp-VLZgbFAH.js";
|
|
5
|
+
import { r as g } from "./reshape-CkjKPPqB.js";
|
|
6
|
+
import { s as b } from "./sum-BdplSvq_.js";
|
|
7
|
+
function O(s, r, e) {
|
|
8
8
|
const n = a(r, "a", "where"), t = a(e, "b", "where"), o = a(s, "condition", "where", "bool"), c = f(f(o.shape, n.shape), t.shape), d = _(o, c), $ = _(n, c), l = _(t, c), w = {
|
|
9
9
|
condition: d,
|
|
10
10
|
t: $,
|
|
@@ -12,36 +12,36 @@ function J(s, r, e) {
|
|
|
12
12
|
};
|
|
13
13
|
return u.runKernel(y, w);
|
|
14
14
|
}
|
|
15
|
-
const ge = /* @__PURE__ */ i({ where_:
|
|
16
|
-
function
|
|
15
|
+
const ge = /* @__PURE__ */ i({ where_: O });
|
|
16
|
+
function Q(s, r = null, e = !1) {
|
|
17
17
|
const t = { x: a(s, "x", "min") }, o = { axis: r, keepDims: e };
|
|
18
18
|
return u.runKernel(A, t, o);
|
|
19
19
|
}
|
|
20
|
-
const
|
|
21
|
-
function
|
|
20
|
+
const N = /* @__PURE__ */ i({ min_: Q });
|
|
21
|
+
function V(s, r = "euclidean", e = null, n = !1) {
|
|
22
22
|
s = a(s, "x", "norm");
|
|
23
|
-
const t =
|
|
23
|
+
const t = K(s, r, e);
|
|
24
24
|
let o = t.shape;
|
|
25
25
|
if (n) {
|
|
26
|
-
const c =
|
|
27
|
-
o =
|
|
26
|
+
const c = D(e, s.shape);
|
|
27
|
+
o = J(t.shape, c);
|
|
28
28
|
}
|
|
29
29
|
return g(t, o);
|
|
30
30
|
}
|
|
31
|
-
function
|
|
31
|
+
function K(s, r, e = null) {
|
|
32
32
|
if (s.rank === 0)
|
|
33
33
|
return p(s);
|
|
34
34
|
if (s.rank !== 1 && e === null)
|
|
35
|
-
return
|
|
35
|
+
return K(g(s, [-1]), r, e);
|
|
36
36
|
if (s.rank === 1 || typeof e == "number" || Array.isArray(e) && e.length === 1) {
|
|
37
37
|
if (r === 1)
|
|
38
38
|
return b(p(s), e);
|
|
39
39
|
if (r === 1 / 0)
|
|
40
40
|
return E(p(s), e);
|
|
41
41
|
if (r === -1 / 0)
|
|
42
|
-
return
|
|
42
|
+
return N(p(s), e);
|
|
43
43
|
if (r === "euclidean" || r === 2)
|
|
44
|
-
return q(b(
|
|
44
|
+
return q(b(z(p(s), I(2, "int32")), e));
|
|
45
45
|
throw new Error(`Error in norm: invalid ord value: ${r}`);
|
|
46
46
|
}
|
|
47
47
|
if (Array.isArray(e) && e.length === 2) {
|
|
@@ -50,28 +50,28 @@ function N(s, r, e = null) {
|
|
|
50
50
|
if (r === 1 / 0)
|
|
51
51
|
return E(b(p(s), e[1]), e[0]);
|
|
52
52
|
if (r === -1 / 0)
|
|
53
|
-
return
|
|
53
|
+
return N(b(p(s), e[1]), e[0]);
|
|
54
54
|
if (r === "fro" || r === "euclidean")
|
|
55
|
-
return q(b(
|
|
55
|
+
return q(b(B(s), e));
|
|
56
56
|
throw new Error(`Error in norm: invalid ord value: ${r}`);
|
|
57
57
|
}
|
|
58
58
|
throw new Error(`Error in norm: invalid axis: ${e}`);
|
|
59
59
|
}
|
|
60
|
-
const de = /* @__PURE__ */ i({ norm_:
|
|
61
|
-
function
|
|
60
|
+
const de = /* @__PURE__ */ i({ norm_: V });
|
|
61
|
+
function W(s, r) {
|
|
62
62
|
let e = a(s, "a", "greater", "string_or_numeric"), n = a(r, "b", "greater", "string_or_numeric");
|
|
63
63
|
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
64
64
|
const t = { a: e, b: n };
|
|
65
65
|
return u.runKernel(v, t);
|
|
66
66
|
}
|
|
67
|
-
const $e = /* @__PURE__ */ i({ greater_:
|
|
68
|
-
function
|
|
67
|
+
const $e = /* @__PURE__ */ i({ greater_: W });
|
|
68
|
+
function X(s, r) {
|
|
69
69
|
let e = a(s, "a", "greaterEqual", "string_or_numeric"), n = a(r, "b", "greaterEqual", "string_or_numeric");
|
|
70
70
|
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
71
71
|
const t = { a: e, b: n };
|
|
72
72
|
return u.runKernel(T, t);
|
|
73
73
|
}
|
|
74
|
-
const _e = /* @__PURE__ */ i({ greaterEqual_:
|
|
74
|
+
const _e = /* @__PURE__ */ i({ greaterEqual_: X });
|
|
75
75
|
function Y(s, r) {
|
|
76
76
|
let e = a(s, "a", "less", "string_or_numeric"), n = a(r, "b", "less", "string_or_numeric");
|
|
77
77
|
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
@@ -83,12 +83,12 @@ function x(s, r) {
|
|
|
83
83
|
let e = a(s, "a", "lessEqual", "string_or_numeric"), n = a(r, "b", "lessEqual", "string_or_numeric");
|
|
84
84
|
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
85
85
|
const t = { a: e, b: n };
|
|
86
|
-
return u.runKernel(
|
|
86
|
+
return u.runKernel(G, t);
|
|
87
87
|
}
|
|
88
88
|
const qe = /* @__PURE__ */ i({ lessEqual_: x });
|
|
89
89
|
function ee(s) {
|
|
90
90
|
const e = { x: a(s, "x", "log1p") };
|
|
91
|
-
return u.runKernel(
|
|
91
|
+
return u.runKernel(L, e);
|
|
92
92
|
}
|
|
93
93
|
const ke = /* @__PURE__ */ i({ log1p_: ee });
|
|
94
94
|
function ne(s, r) {
|
|
@@ -97,12 +97,12 @@ function ne(s, r) {
|
|
|
97
97
|
const t = { a: e, b: n };
|
|
98
98
|
return u.runKernel(M, t);
|
|
99
99
|
}
|
|
100
|
-
const
|
|
100
|
+
const Ne = /* @__PURE__ */ i({ logicalAnd_: ne });
|
|
101
101
|
function re(s, r = null, e = !1) {
|
|
102
102
|
const t = { x: a(s, "x", "mean") }, o = { axis: r, keepDims: e };
|
|
103
103
|
return u.runKernel(R, t, o);
|
|
104
104
|
}
|
|
105
|
-
const
|
|
105
|
+
const Ke = /* @__PURE__ */ i({ mean_: re });
|
|
106
106
|
function se(s, r) {
|
|
107
107
|
let e = a(s, "a", "minimum"), n = a(r, "b", "minimum");
|
|
108
108
|
[e, n] = m(e, n), e.dtype === "bool" && (e = k(e, "int32"), n = k(n, "int32")), f(e.shape, n.shape);
|
|
@@ -128,48 +128,48 @@ function oe(s, r) {
|
|
|
128
128
|
const t = { a: e, b: n }, o = {};
|
|
129
129
|
return u.runKernel(U, t, o);
|
|
130
130
|
}
|
|
131
|
-
const
|
|
131
|
+
const De = /* @__PURE__ */ i({ squaredDifference_: oe });
|
|
132
132
|
function ie(s, r = 0) {
|
|
133
133
|
const e = a(s, "x", "unstack", "string_or_numeric");
|
|
134
134
|
h(r >= -e.shape.length && r < e.shape.length, () => `Axis = ${r} is not in [-${e.shape.length}, ${e.shape.length})`);
|
|
135
135
|
const n = { value: e }, t = { axis: r };
|
|
136
|
-
return u.runKernel(
|
|
136
|
+
return u.runKernel(Z, n, t);
|
|
137
137
|
}
|
|
138
|
-
const
|
|
138
|
+
const ze = /* @__PURE__ */ i({ unstack_: ie });
|
|
139
139
|
function ue(s, r, e = !1, n = !1) {
|
|
140
140
|
const t = a(s, "images", "resizeBilinear");
|
|
141
141
|
h(t.rank === 3 || t.rank === 4, () => `Error in resizeBilinear: x must be rank 3 or 4, but got rank ${t.rank}.`), h(r.length === 2, () => `Error in resizeBilinear: new shape must 2D, but got shape ${r}.`), h(n === !1 || e === !1, () => "Error in resizeBilinear: If halfPixelCenters is true, alignCorners must be false.");
|
|
142
142
|
let o = t, c = !1;
|
|
143
143
|
t.rank === 3 && (c = !0, o = g(t, [1, t.shape[0], t.shape[1], t.shape[2]]));
|
|
144
|
-
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(
|
|
144
|
+
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(F, d, $);
|
|
145
145
|
return c ? g(l, [l.shape[1], l.shape[2], l.shape[3]]) : l;
|
|
146
146
|
}
|
|
147
|
-
const
|
|
147
|
+
const Ie = /* @__PURE__ */ i({ resizeBilinear_: ue });
|
|
148
148
|
function le(s, r, e = !1, n = !1) {
|
|
149
149
|
const t = a(s, "images", "resizeNearestNeighbor");
|
|
150
150
|
h(t.rank === 3 || t.rank === 4, () => `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got rank ${t.rank}.`), h(r.length === 2, () => `Error in resizeNearestNeighbor: new shape must 2D, but got shape ${r}.`), h(t.dtype === "float32" || t.dtype === "int32", () => "`images` must have `int32` or `float32` as dtype"), h(n === !1 || e === !1, () => "Error in resizeNearestNeighbor: If halfPixelCenters is true, alignCorners must be false.");
|
|
151
151
|
let o = t, c = !1;
|
|
152
152
|
t.rank === 3 && (c = !0, o = g(t, [1, t.shape[0], t.shape[1], t.shape[2]]));
|
|
153
|
-
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(
|
|
153
|
+
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(H, d, $);
|
|
154
154
|
return c ? g(l, [l.shape[1], l.shape[2], l.shape[3]]) : l;
|
|
155
155
|
}
|
|
156
|
-
const
|
|
156
|
+
const Be = /* @__PURE__ */ i({ resizeNearestNeighbor_: le });
|
|
157
157
|
export {
|
|
158
158
|
Ee as a,
|
|
159
|
-
|
|
159
|
+
Ne as b,
|
|
160
160
|
_e as c,
|
|
161
|
-
|
|
161
|
+
Ke as d,
|
|
162
162
|
ye as e,
|
|
163
163
|
ke as f,
|
|
164
164
|
$e as g,
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
165
|
+
Be as h,
|
|
166
|
+
Ie as i,
|
|
167
|
+
N as j,
|
|
168
168
|
qe as l,
|
|
169
169
|
we as m,
|
|
170
170
|
de as n,
|
|
171
171
|
Ae as r,
|
|
172
|
-
|
|
173
|
-
|
|
172
|
+
De as s,
|
|
173
|
+
ze as u,
|
|
174
174
|
ge as w
|
|
175
175
|
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { V as p, a9 as w } from "./index-D0RBWjq8.js";
|
|
2
2
|
function k(o, t, r) {
|
|
3
3
|
const n = t.rank > 1 ? t.shape[t.rank - 1] : 1, e = t.rank > 1 ? t.rank - 1 : 1, h = `Must have updates.shape = indices.shape[:batchDim] + shape[sliceDim:], got updates.shape: ${r.shape}, indices.shape: ${t.shape}, shape: ${o}, sliceDim: ${n}, and batchDim: ${e}.`;
|
|
4
4
|
if (r.rank < e)
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { m as n,
|
|
2
|
-
import { e as o, r as t, p as f, l as a, s as p, a as i } from "./step-
|
|
3
|
-
import { r as m } from "./relu-
|
|
4
|
-
import { r as d } from "./reshape-
|
|
5
|
-
import { s as g } from "./sum-
|
|
1
|
+
import { m as n, a0 as l } from "./index-D0RBWjq8.js";
|
|
2
|
+
import { e as o, r as t, p as f, l as a, s as p, a as i } from "./step-dXR33iOg.js";
|
|
3
|
+
import { r as m } from "./relu-Cf80uA2p.js";
|
|
4
|
+
import { r as d } from "./reshape-CkjKPPqB.js";
|
|
5
|
+
import { s as g } from "./sum-BdplSvq_.js";
|
|
6
6
|
function L(e, r, s) {
|
|
7
7
|
if (s == null || s === "linear")
|
|
8
8
|
return e;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { s as l } from "./shared-
|
|
1
|
+
import { s as l } from "./shared-BQboIImQ.js";
|
|
2
2
|
const { addImpl: m, bincountImpl: s, bincountReduceImpl: a, bitwiseAndImpl: I, castImpl: e, ceilImpl: t, concatImpl: r, equalImpl: i, expImpl: C, expm1Impl: P, floorImpl: U, gatherNdImpl: n, gatherV2Impl: g, greaterImpl: o, greaterEqualImpl: c, lessImpl: u, lessEqualImpl: d, linSpaceImpl: q, logImpl: R, maxImpl: h, maximumImpl: x, minimumImpl: b, multiplyImpl: E, negImpl: S, notEqualImpl: T, prodImpl: w, raggedGatherImpl: y, raggedRangeImpl: A, raggedTensorToTensorImpl: F, rangeImpl: G, rsqrtImpl: N, scatterImpl: f, sigmoidImpl: k, simpleAbsImpl: B, sliceImpl: H, sparseFillEmptyRowsImpl: K, sparseReshapeImpl: V, sparseSegmentReductionImpl: j, sqrtImpl: v, staticRegexReplaceImpl: z, stridedSliceImpl: D, stringNGramsImpl: J, stringSplitImpl: L, stringToHashBucketFastImpl: M, subImpl: O, tileImpl: Q, topKImpl: W, transposeImpl: X, uniqueImpl: Y } = l;
|
|
3
3
|
export {
|
|
4
4
|
b as A,
|