@genai-fi/nanogpt 0.10.3 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.d.ts +10 -5
- package/dist/Generator.js +1789 -1765
- package/dist/{RealDiv-KAPDe8zB.js → RealDiv-C8neBwFi.js} +15 -15
- package/dist/{Reshape-BYkmUnAv.js → Reshape-Bd4V_4X7.js} +1 -1
- package/dist/{Reshape-Zt6eb7yh.js → Reshape-Ck29jQSY.js} +5 -5
- package/dist/TeachableLLM.d.ts +5 -3
- package/dist/TeachableLLM.js +14 -14
- package/dist/Trainer.d.ts +3 -1
- package/dist/Trainer.js +11 -8
- package/dist/{axis_util-BaG7mf5A.js → axis_util-DGqbT-FX.js} +3 -3
- package/dist/backend.js +2 -2
- package/dist/{backend_util-RCe-rHaj.js → backend_util-DC3rBo_H.js} +18 -18
- package/dist/{backend_webgpu-DE3ACOLx.js → backend_webgpu-mbhNnlx9.js} +3 -3
- package/dist/{broadcast_to-B3eYlZm7.js → broadcast_to-D1Dmg2Oz.js} +2 -2
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/matMulGelu.js +2 -2
- package/dist/checks/normRMS.js +4 -4
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.js +2 -2
- package/dist/checks/qkv.js +4 -4
- package/dist/checks/rope.js +2 -2
- package/dist/{clip_by_value-BnO7-a88.js → clip_by_value-fg2aKzUy.js} +5 -5
- package/dist/complex-Cyg-eQeZ.js +11 -0
- package/dist/concat-CSm2rMwe.js +17 -0
- package/dist/{concat_util-DpW8mL_l.js → concat_util-D0je5Ppu.js} +1 -1
- package/dist/{dataset-BcwmTGYc.js → dataset-CVIJu7Xa.js} +7 -7
- package/dist/{dropout-BcvN9JYi.js → dropout-DLhSMNTZ.js} +9 -9
- package/dist/expand_dims-ChkuOp6I.js +11 -0
- package/dist/{exports_initializers-Hta_rEnm.js → exports_initializers-1KWPiStI.js} +1 -1
- package/dist/{floor-D5QdR_le.js → floor-BRMPgeIs.js} +1 -1
- package/dist/{gather-D3JcZUaI.js → gather-BSULDalH.js} +1 -1
- package/dist/{gelu-CjNPL4OH.js → gelu-BK1k-n1i.js} +1 -1
- package/dist/{gpgpu_math-DAOmgtXR.js → gpgpu_math-BJSTk_mW.js} +25 -25
- package/dist/{index-BwexR4lA.js → index-BBVLAXZD.js} +89 -89
- package/dist/{index-DOvlwCh-.js → index-Duu1Lvvv.js} +53 -53
- package/dist/{kernel_funcs_utils-CCzYdUZg.js → kernel_funcs_utils-BtYrPoJu.js} +6 -6
- package/dist/layers/BaseLayer.js +2 -2
- package/dist/layers/CausalSelfAttention.js +6 -6
- package/dist/layers/MLP.js +4 -4
- package/dist/layers/PositionEmbedding.js +5 -5
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +4 -4
- package/dist/layers/TiedEmbedding.js +6 -6
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +9 -9
- package/dist/log_sum_exp-CVqLsVLl.js +39 -0
- package/dist/main.d.ts +10 -1
- package/dist/main.js +68 -58
- package/dist/{matMul16-BWRSOCWB.js → matMul16-xswmhSuF.js} +3 -3
- package/dist/{matMulGelu-CzfgT6Wq.js → matMulGelu-BpvgnYG8.js} +14 -14
- package/dist/mat_mul-Bn2BDpT4.js +11 -0
- package/dist/{mod-AnXEvvpo.js → mod-B4AUd1Np.js} +1 -1
- package/dist/models/NanoGPTV1.js +2 -2
- package/dist/models/model.js +9 -9
- package/dist/{ones-D2rT0xk2.js → ones-CBI1AQjb.js} +3 -3
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.js +1 -1
- package/dist/ops/add16.js +1 -1
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/concat16.js +2 -2
- package/dist/ops/cpu/adamAdjust.js +9 -9
- package/dist/ops/cpu/adamMoments.js +5 -5
- package/dist/ops/cpu/appendCache.js +6 -6
- package/dist/ops/cpu/attentionMask.js +10 -10
- package/dist/ops/cpu/fusedSoftmax.js +5 -5
- package/dist/ops/cpu/gatherSub.js +9 -9
- package/dist/ops/cpu/gelu.js +5 -5
- package/dist/ops/cpu/matMul16.js +2 -2
- package/dist/ops/cpu/matMulGelu.js +3 -3
- package/dist/ops/cpu/matMulMul.js +5 -5
- package/dist/ops/cpu/mulDropout.js +1 -1
- package/dist/ops/cpu/normRMS.js +7 -7
- package/dist/ops/cpu/qkv.js +3 -3
- package/dist/ops/cpu/rope.js +5 -5
- package/dist/ops/cpu/scatterSub.js +11 -11
- package/dist/ops/dot16.js +2 -2
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.js +4 -4
- package/dist/ops/grads/attentionMask.js +2 -2
- package/dist/ops/grads/gelu.js +2 -2
- package/dist/ops/grads/matMul16.js +3 -3
- package/dist/ops/grads/matMulGelu.js +6 -6
- package/dist/ops/grads/normRMS.js +4 -4
- package/dist/ops/grads/pack16.js +3 -3
- package/dist/ops/grads/qkv.js +10 -10
- package/dist/ops/grads/rope.js +2 -2
- package/dist/ops/grads/softmax16.js +1 -1
- package/dist/ops/grads/unpack16.js +2 -2
- package/dist/ops/matMul16.js +3 -3
- package/dist/ops/matMulGelu.js +2 -2
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.js +2 -2
- package/dist/ops/qkv.js +1 -1
- package/dist/ops/reshape16.js +2 -2
- package/dist/ops/rope.js +2 -2
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.js +2 -2
- package/dist/ops/softmax16.js +1 -1
- package/dist/ops/sub16.js +1 -1
- package/dist/ops/sum16.js +2 -2
- package/dist/ops/transpose16.js +6 -6
- package/dist/ops/unpack16.js +2 -2
- package/dist/ops/webgl/adamAdjust.js +2 -2
- package/dist/ops/webgl/adamMoments.js +1 -1
- package/dist/ops/webgl/appendCache.js +1 -1
- package/dist/ops/webgl/attentionMask.js +1 -1
- package/dist/ops/webgl/fusedSoftmax.js +4 -4
- package/dist/ops/webgl/gatherSub.js +1 -1
- package/dist/ops/webgl/gelu.js +2 -2
- package/dist/ops/webgl/log.js +3 -3
- package/dist/ops/webgl/matMul16.js +8 -8
- package/dist/ops/webgl/matMulGelu.js +4 -4
- package/dist/ops/webgl/matMulMul.js +7 -7
- package/dist/ops/webgl/mulDropout.js +1 -1
- package/dist/ops/webgl/normRMS.js +7 -7
- package/dist/ops/webgl/qkv.js +1 -1
- package/dist/ops/webgl/rope.js +1 -1
- package/dist/ops/webgl/scatterSub.js +1 -1
- package/dist/ops/webgpu/adamAdjust.js +3 -3
- package/dist/ops/webgpu/adamMoments.js +5 -5
- package/dist/ops/webgpu/add16.js +1 -1
- package/dist/ops/webgpu/appendCache.js +3 -3
- package/dist/ops/webgpu/attentionMask.js +2 -2
- package/dist/ops/webgpu/attentionMask32_program.js +2 -2
- package/dist/ops/webgpu/concat16.js +5 -5
- package/dist/ops/webgpu/gatherSub.js +5 -5
- package/dist/ops/webgpu/gelu.js +3 -3
- package/dist/ops/webgpu/matMul16.js +19 -19
- package/dist/ops/webgpu/matMul16_program.js +2 -2
- package/dist/ops/webgpu/mul16.js +4 -4
- package/dist/ops/webgpu/normRMS.js +6 -6
- package/dist/ops/webgpu/normRMSGrad.js +4 -4
- package/dist/ops/webgpu/pack16.js +3 -3
- package/dist/ops/webgpu/pack16_program.js +2 -2
- package/dist/ops/webgpu/qkv.js +8 -8
- package/dist/ops/webgpu/rope.js +3 -3
- package/dist/ops/webgpu/scatterSub.js +3 -3
- package/dist/ops/webgpu/slice16.js +4 -4
- package/dist/ops/webgpu/softmax16.js +4 -4
- package/dist/ops/webgpu/softmax16_program.js +2 -2
- package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
- package/dist/ops/webgpu/softmax16grad.js +1 -1
- package/dist/ops/webgpu/sub16.js +4 -4
- package/dist/ops/webgpu/sum16.js +5 -5
- package/dist/ops/webgpu/transpose16.js +2 -2
- package/dist/ops/webgpu/transpose16_program.js +2 -2
- package/dist/ops/webgpu/transpose16_shared_program.js +3 -3
- package/dist/ops/webgpu/unpack16.js +5 -5
- package/dist/ops/webgpu/utils/binary_op.js +3 -3
- package/dist/ops/webgpu/utils/reductions.js +4 -4
- package/dist/{ops-B5yanEdW.js → ops-C2_OXuZ4.js} +69 -69
- package/dist/{pack16-nQ6JaLo-.js → pack16-atD0eYRm.js} +9 -9
- package/dist/patches/webgpu_backend.js +6 -6
- package/dist/patches/webgpu_base.js +1 -1
- package/dist/patches/webgpu_program.js +8 -8
- package/dist/{random_width-or-CEftb.js → random_width-BN4wGJaW.js} +33 -33
- package/dist/range-DKmP1-OQ.js +10 -0
- package/dist/relu-BsXmGzzu.js +9 -0
- package/dist/{reshape-ByE68wS9.js → reshape-BI0yzp1T.js} +1 -1
- package/dist/{resize_nearest_neighbor-B19mCEg2.js → resize_nearest_neighbor-BA_BX-ub.js} +26 -26
- package/dist/{rope-Ir4mTyD1.js → rope-DJ7Y7c-u.js} +1 -1
- package/dist/{scatter_nd_util-lvSiX8q4.js → scatter_nd_util-k9MUVUkn.js} +1 -1
- package/dist/{selu_util-kbhpTdYD.js → selu_util-DyW0X1WG.js} +5 -5
- package/dist/{shared-DT1TkE6w.js → shared-Q3BS6T03.js} +1 -1
- package/dist/{shared-dntlHIDQ.js → shared-nnSWpC3u.js} +86 -86
- package/dist/{slice-BfEGSH82.js → slice-wBNvzVyz.js} +1 -1
- package/dist/{slice_util-uTKwiEpW.js → slice_util-zN8KFC5I.js} +1 -1
- package/dist/{softmax-CA5jFsLR.js → softmax-DfuYyjMh.js} +1 -1
- package/dist/split-BYrLboMq.js +9 -0
- package/dist/squeeze-Bk8Brcct.js +10 -0
- package/dist/{stack-Cf4n9h0N.js → stack-CDWShFHF.js} +1 -1
- package/dist/{step-CINUs5QB.js → step-BS5JXRR6.js} +23 -23
- package/dist/{sum-DWAtNGez.js → sum-BPUfDB2X.js} +3 -3
- package/dist/tensor-CEt9Nm2s.js +8 -0
- package/dist/tensor1d-Cc_KCIDg.js +11 -0
- package/dist/{tensor2d-Bs9wZRc7.js → tensor2d-BN97fF71.js} +3 -3
- package/dist/{tensor4d-BARPdTaS.js → tensor4d-vuDDgdUI.js} +1 -1
- package/dist/{tfjs_backend-y1cvNhLA.js → tfjs_backend-806hyYve.js} +49 -49
- package/dist/{tile-mbfagpsB.js → tile-OWUvpIVt.js} +3 -3
- package/dist/tokeniser/BaseTokeniser.d.ts +25 -0
- package/dist/tokeniser/BaseTokeniser.js +94 -0
- package/dist/tokeniser/CharTokeniser.d.ts +10 -9
- package/dist/tokeniser/CharTokeniser.js +44 -30
- package/dist/tokeniser/bpe.d.ts +10 -9
- package/dist/tokeniser/bpe.js +67 -52
- package/dist/tokeniser/type.d.ts +14 -5
- package/dist/training/Adam.js +2 -2
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.d.ts +3 -3
- package/dist/training/DatasetBuilder.js +34 -38
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.d.ts +4 -3
- package/dist/training/Trainer.js +22 -25
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/training/tasks/ConversationTask.d.ts +11 -0
- package/dist/training/tasks/ConversationTask.js +26 -0
- package/dist/training/tasks/PretrainingTask.d.ts +11 -0
- package/dist/training/tasks/PretrainingTask.js +34 -0
- package/dist/training/tasks/StartSentenceTask.d.ts +12 -0
- package/dist/training/tasks/StartSentenceTask.js +42 -0
- package/dist/training/tasks/Task.d.ts +8 -0
- package/dist/training/tasks/Task.js +41 -0
- package/dist/{transpose-ClWiBS_b.js → transpose-BUkQCJp9.js} +6 -6
- package/dist/{unsorted_segment_sum-BDDhB_E6.js → unsorted_segment_sum-BljxHhCY.js} +5 -5
- package/dist/utilities/dummy.js +3 -3
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.js +1 -1
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.d.ts +1 -1
- package/dist/utilities/sentences.js +11 -11
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-WawDEaAb.js → variable-DPt_Iuog.js} +1 -1
- package/dist/{webgpu_program-DuOXPQol.js → webgpu_program-BpWRlghH.js} +3 -3
- package/dist/{webgpu_util-RxEF33Rj.js → webgpu_util-DMiKzzQM.js} +7 -7
- package/dist/{zeros-KnWaWf-X.js → zeros-5YROwwUH.js} +2 -2
- package/dist/{zeros_like-DvE73F4e.js → zeros_like-De4n1C3m.js} +71 -71
- package/package.json +1 -1
- package/dist/complex-DjxcVmoX.js +0 -11
- package/dist/concat-BV8bt5H-.js +0 -17
- package/dist/expand_dims-DT4tEPwA.js +0 -11
- package/dist/log_sum_exp-ngO0-4pK.js +0 -39
- package/dist/mat_mul-SjpJRLyL.js +0 -11
- package/dist/range-BklejeeW.js +0 -10
- package/dist/relu-CP0ZcxWO.js +0 -9
- package/dist/split-CVLc0w--.js +0 -9
- package/dist/squeeze-C7Z2srUo.js +0 -10
- package/dist/tensor-DJoc7gJU.js +0 -8
- package/dist/tensor1d-D11P_7Dp.js +0 -11
|
@@ -1,26 +1,26 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { t as
|
|
3
|
-
import { e as R } from "./expand_dims-
|
|
4
|
-
import { t as x } from "./tensor1d-
|
|
1
|
+
import { o as b, q as u, x as p, E as k, bf as hs, bg as bs, at as ds, bh as gs, bi as Es, L as W, bj as $s, bk as ys, bl as _s, bm as Ns, b as $, m as h, l as G, j as S, h as Ss, c as d, bn as ks, w as K, d as Is, a6 as L, aZ as ns, b0 as Ts, a1 as Ms } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { t as v } from "./tile-OWUvpIVt.js";
|
|
3
|
+
import { e as R } from "./expand_dims-ChkuOp6I.js";
|
|
4
|
+
import { t as x } from "./tensor1d-Cc_KCIDg.js";
|
|
5
5
|
import { n as xs, a as As, b as ws } from "./non_max_suppression_impl-B2W7YjZB.js";
|
|
6
|
-
import { r as Os, l as ls, g as ts, w as q, a as rs, m as Q, b as Gs, c as
|
|
7
|
-
import { s as ms } from "./split-
|
|
8
|
-
import { s as _ } from "./sum-
|
|
9
|
-
import { s as T } from "./slice-
|
|
10
|
-
import { r as F } from "./range-
|
|
11
|
-
import { t as Vs } from "./tensor-
|
|
12
|
-
import { n as C, t as as } from "./transpose-
|
|
13
|
-
import { r as O } from "./reshape-
|
|
14
|
-
import { s as Z } from "./stack-
|
|
15
|
-
import { z as Cs } from "./zeros-
|
|
16
|
-
import { s as Ps } from "./squeeze-
|
|
17
|
-
import { c as X } from "./concat-
|
|
18
|
-
import { m as w } from "./mat_mul-
|
|
19
|
-
import { t as J } from "./tensor2d-
|
|
20
|
-
import { o as Ys } from "./ones-
|
|
21
|
-
import { r as fs } from "./relu-
|
|
22
|
-
import { a as is, e as ss, l as
|
|
23
|
-
import { e as
|
|
6
|
+
import { r as Os, l as ls, g as ts, w as q, a as rs, m as Q, b as Gs, c as Ls, u as us, n as ps, d as Bs, e as Ds, s as Ws, f as Rs, h as qs, i as zs } from "./resize_nearest_neighbor-BA_BX-ub.js";
|
|
7
|
+
import { s as ms } from "./split-BYrLboMq.js";
|
|
8
|
+
import { s as _ } from "./sum-BPUfDB2X.js";
|
|
9
|
+
import { s as T } from "./slice-wBNvzVyz.js";
|
|
10
|
+
import { r as F } from "./range-DKmP1-OQ.js";
|
|
11
|
+
import { t as Vs } from "./tensor-CEt9Nm2s.js";
|
|
12
|
+
import { n as C, t as as } from "./transpose-BUkQCJp9.js";
|
|
13
|
+
import { r as O } from "./reshape-BI0yzp1T.js";
|
|
14
|
+
import { s as Z } from "./stack-CDWShFHF.js";
|
|
15
|
+
import { z as Cs } from "./zeros-5YROwwUH.js";
|
|
16
|
+
import { s as Ps } from "./squeeze-Bk8Brcct.js";
|
|
17
|
+
import { c as X } from "./concat-CSm2rMwe.js";
|
|
18
|
+
import { m as w } from "./mat_mul-Bn2BDpT4.js";
|
|
19
|
+
import { t as J } from "./tensor2d-BN97fF71.js";
|
|
20
|
+
import { o as Ys } from "./ones-CBI1AQjb.js";
|
|
21
|
+
import { r as fs } from "./relu-BsXmGzzu.js";
|
|
22
|
+
import { a as is, e as ss, l as js } from "./log_sum_exp-CVqLsVLl.js";
|
|
23
|
+
import { e as vs } from "./axis_util-DGqbT-FX.js";
|
|
24
24
|
function Fs(e, a, n) {
|
|
25
25
|
const r = u(e, "x", "bincount"), t = u(a, "weights", "bincount");
|
|
26
26
|
p(r.dtype === "int32", () => `Error in bincount: input dtype must be int32, but got ${r.dtype}`), p(n >= 0, () => `size must be non-negative, but got ${n}.`), p(t.size === r.size || t.size === 0, () => `Error in bincount: weights must have the same size as input or0-length, but got input shape: ${r.shape}, weights shape: ${t.shape}.`);
|
|
@@ -42,11 +42,11 @@ function Hs(e, a, n, r = "float32") {
|
|
|
42
42
|
if (n == null)
|
|
43
43
|
return o;
|
|
44
44
|
if (n.length === 1)
|
|
45
|
-
return
|
|
45
|
+
return v(R(o, 0), [n[0], 1, 1]);
|
|
46
46
|
if (n.length === 2)
|
|
47
|
-
return
|
|
47
|
+
return v(R(R(o, 0), 0), [n[0], n[1], 1, 1]);
|
|
48
48
|
if (n.length === 3)
|
|
49
|
-
return
|
|
49
|
+
return v(R(R(R(o, 0), 0), 0), [
|
|
50
50
|
n[0],
|
|
51
51
|
n[1],
|
|
52
52
|
n[2],
|
|
@@ -74,7 +74,7 @@ function nn(e) {
|
|
|
74
74
|
const a = u(e, "image", "grayscaleToRGB"), n = a.rank - 1, r = a.shape[n];
|
|
75
75
|
p(a.rank >= 2, () => `Error in grayscaleToRGB: images must be at least rank 2, but got rank ${a.rank}.`), p(r === 1, () => `Error in grayscaleToRGB: last dimension of a grayscale image should be size 1, but got size ${r}.`);
|
|
76
76
|
const t = new Array(a.rank);
|
|
77
|
-
return t.fill(1, 0, n), t[n] = 3,
|
|
77
|
+
return t.fill(1, 0, n), t[n] = 3, v(a, t);
|
|
78
78
|
}
|
|
79
79
|
const tn = /* @__PURE__ */ b({ grayscaleToRGB_: nn });
|
|
80
80
|
function en(e) {
|
|
@@ -186,8 +186,8 @@ function yn(e, a = "binary", n = !1, r = 0.5) {
|
|
|
186
186
|
let c = h(x([r]), 255), f, m, E, g;
|
|
187
187
|
if (p(t.rank === 3, () => `Error in threshold: image must be rank 3,but got rank ${t.rank}.`), p(t.shape[2] === 3 || t.shape[2] === 1, () => `Error in threshold: image color channel must be equal to 3 or 1but got ${t.shape[2]}.`), p(t.dtype === "int32" || t.dtype === "float32", () => `Error in dtype: image dtype must be int32 or float32,but got dtype ${t.dtype}.`), p(a === "otsu" || a === "binary", () => `Method must be binary or otsu, but was ${a}`), t.shape[2] === 3) {
|
|
188
188
|
[f, m, E] = ms(t, [1, 1, 1], -1);
|
|
189
|
-
const I = h(f, s), A = h(m, o),
|
|
190
|
-
g = G(G(I, A),
|
|
189
|
+
const I = h(f, s), A = h(m, o), D = h(E, i);
|
|
190
|
+
g = G(G(I, A), D);
|
|
191
191
|
} else
|
|
192
192
|
g = e;
|
|
193
193
|
if (a === "otsu") {
|
|
@@ -205,8 +205,8 @@ function _n(e, a) {
|
|
|
205
205
|
i = S(E, _(s));
|
|
206
206
|
const g = Ss(o.shape, s.size), y = G(F(0, o.size), g), M = h(o, y);
|
|
207
207
|
l = S(_(M), _(o));
|
|
208
|
-
const I = d(i, l), A = d(i, l),
|
|
209
|
-
t = h(h(
|
|
208
|
+
const I = d(i, l), A = d(i, l), D = h(c, f);
|
|
209
|
+
t = h(h(D, I), A);
|
|
210
210
|
const P = ts(t, r);
|
|
211
211
|
r = q(P, t, r), n = q(P, x([m]), n);
|
|
212
212
|
}
|
|
@@ -226,7 +226,7 @@ function In(e, a, n) {
|
|
|
226
226
|
const t = r.shape, [s, o] = r.shape.slice(-2);
|
|
227
227
|
let i, l;
|
|
228
228
|
typeof a == "number" ? (p(a % 1 === 0, () => `bandPart(): numLower must be an integer, got ${a}.`), p(a <= s, () => `bandPart(): numLower (${a}) must not be greater than the number of rows (${s}).`), i = u(a < 0 ? s : a, "numLower", "bandPart")) : (p(a.dtype === "int32", () => "bandPart(): numLower's dtype must be an int32."), i = q(rs(a, 0), s, Q(a, s))), typeof n == "number" ? (p(n % 1 === 0, () => `bandPart(): numUpper must be an integer, got ${n}.`), p(n <= o, () => `bandPart(): numUpper (${n}) must not be greater than the number of columns (${o}).`), l = u(n < 0 ? o : n, "numUpper", "bandPart")) : (p(n.dtype === "int32", () => "bandPart(): numUpper's dtype must be an int32."), l = q(rs(n, 0), o, Q(n, o)));
|
|
229
|
-
const c = O(F(0, s, 1, "int32"), [-1, 1]), f = F(0, o, 1, "int32"), m = d(c, f), E = Gs(ls(m, i),
|
|
229
|
+
const c = O(F(0, s, 1, "int32"), [-1, 1]), f = F(0, o, 1, "int32"), m = d(c, f), E = Gs(ls(m, i), Ls(m, C(l))), g = Cs([s, o], r.dtype);
|
|
230
230
|
return O(Z(us(O(r, [-1, s, o])).map((y) => q(E, y, g))), t);
|
|
231
231
|
}
|
|
232
232
|
const Tn = /* @__PURE__ */ b({ bandPart_: In });
|
|
@@ -282,10 +282,10 @@ function cs(e, a = !1) {
|
|
|
282
282
|
for (let c = 0; c < l; ++c) {
|
|
283
283
|
const f = s, m = i, E = t;
|
|
284
284
|
[i, s, t] = k.tidy(() => {
|
|
285
|
-
const g = T(s, [c, c], [n - c, 1]), y = ps(g), M = T(s, [c, c], [1, 1]), I = q(ts(M, 0), J([[-1]]), J([[1]])), A = d(M, h(I, y)),
|
|
286
|
-
|
|
285
|
+
const g = T(s, [c, c], [n - c, 1]), y = ps(g), M = T(s, [c, c], [1, 1]), I = q(ts(M, 0), J([[-1]]), J([[1]])), A = d(M, h(I, y)), D = S(g, A);
|
|
286
|
+
D.shape[0] === 1 ? i = K(o) : i = X([
|
|
287
287
|
o,
|
|
288
|
-
T(
|
|
288
|
+
T(D, [1, 0], [D.shape[0] - 1, D.shape[1]])
|
|
289
289
|
], 0);
|
|
290
290
|
const P = C(S(w(I, A), y)), Y = T(s, [c, 0], [n - c, r]), U = h(P, i), es = as(i);
|
|
291
291
|
if (c === 0)
|
|
@@ -294,11 +294,11 @@ function cs(e, a = !1) {
|
|
|
294
294
|
const H = d(Y, w(U, w(es, Y)));
|
|
295
295
|
s = X([T(s, [0, 0], [c, r]), H], 0);
|
|
296
296
|
}
|
|
297
|
-
const os = as(U),
|
|
297
|
+
const os = as(U), j = T(t, [0, c], [n, t.shape[1] - c]);
|
|
298
298
|
if (c === 0)
|
|
299
|
-
t = d(
|
|
299
|
+
t = d(j, w(w(j, i), os));
|
|
300
300
|
else {
|
|
301
|
-
const H = d(
|
|
301
|
+
const H = d(j, w(w(j, i), os));
|
|
302
302
|
t = X([T(t, [0, 0], [n, c]), H], 1);
|
|
303
303
|
}
|
|
304
304
|
return [i, s, t];
|
|
@@ -323,7 +323,7 @@ function On(e, a, n = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
|
323
323
|
return _(s);
|
|
324
324
|
if (n === N.MEAN) {
|
|
325
325
|
if (t == null)
|
|
326
|
-
return
|
|
326
|
+
return Bs(s);
|
|
327
327
|
{
|
|
328
328
|
const o = r.size / t.size, i = S(_(s), _(t));
|
|
329
329
|
return o > 1 ? S(i, $(o)) : i;
|
|
@@ -333,91 +333,91 @@ function On(e, a, n = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
|
333
333
|
if (t == null)
|
|
334
334
|
return S(_(s), $(r.size));
|
|
335
335
|
{
|
|
336
|
-
const o = h(t, Ys(r.shape)), i = W(_(
|
|
336
|
+
const o = h(t, Ys(r.shape)), i = W(_(Ds(o, $(0))), "float32");
|
|
337
337
|
return S(_(s), i);
|
|
338
338
|
}
|
|
339
339
|
}
|
|
340
340
|
throw Error(`Unknown reduction: ${n}`);
|
|
341
341
|
}
|
|
342
|
-
const
|
|
342
|
+
const B = /* @__PURE__ */ b({ computeWeightedLoss_: On });
|
|
343
343
|
function Gn(e, a, n, r = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
344
344
|
const t = u(e, "labels", "absoluteDifference"), s = u(a, "predictions", "absoluteDifference");
|
|
345
345
|
let o = null;
|
|
346
|
-
n != null && (o = u(n, "weights", "absoluteDifference")),
|
|
346
|
+
n != null && (o = u(n, "weights", "absoluteDifference")), L(t.shape, s.shape, "Error in absoluteDifference: ");
|
|
347
347
|
const i = ns(d(t, s));
|
|
348
|
-
return
|
|
348
|
+
return B(i, o, r);
|
|
349
349
|
}
|
|
350
|
-
const
|
|
351
|
-
function
|
|
350
|
+
const Ln = /* @__PURE__ */ b({ absoluteDifference_: Gn });
|
|
351
|
+
function Bn(e, a, n, r, t = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
352
352
|
const s = u(e, "labels", "cosineDistance"), o = u(a, "predictions", "cosineDistance");
|
|
353
353
|
let i = null;
|
|
354
|
-
r != null && (i = u(r, "weights", "cosineDistance")),
|
|
354
|
+
r != null && (i = u(r, "weights", "cosineDistance")), L(s.shape, o.shape, "Error in cosineDistance: ");
|
|
355
355
|
const l = $(1), c = d(l, _(h(s, o), n, !0));
|
|
356
|
-
return
|
|
356
|
+
return B(c, i, t);
|
|
357
357
|
}
|
|
358
|
-
const
|
|
358
|
+
const Dn = /* @__PURE__ */ b({ cosineDistance_: Bn });
|
|
359
359
|
function Wn(e, a, n, r = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
360
360
|
let t = u(e, "labels", "hingeLoss");
|
|
361
361
|
const s = u(a, "predictions", "hingeLoss");
|
|
362
362
|
let o = null;
|
|
363
|
-
n != null && (o = u(n, "weights", "hingeLoss")),
|
|
363
|
+
n != null && (o = u(n, "weights", "hingeLoss")), L(t.shape, s.shape, "Error in hingeLoss: ");
|
|
364
364
|
const i = $(1);
|
|
365
365
|
t = d(h($(2), t), i);
|
|
366
366
|
const l = fs(d(i, h(t, s)));
|
|
367
|
-
return
|
|
367
|
+
return B(l, o, r);
|
|
368
368
|
}
|
|
369
369
|
const Rn = /* @__PURE__ */ b({ hingeLoss_: Wn });
|
|
370
370
|
function qn(e, a, n, r = 1, t = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
371
371
|
const s = u(e, "labels", "huberLoss"), o = u(a, "predictions", "huberLoss");
|
|
372
372
|
let i = null;
|
|
373
|
-
n != null && (i = u(n, "weights", "huberLoss")),
|
|
373
|
+
n != null && (i = u(n, "weights", "huberLoss")), L(s.shape, o.shape, "Error in huberLoss: ");
|
|
374
374
|
const l = $(r), c = ns(d(o, s)), f = Q(c, l), m = d(c, f), E = G(h($(0.5), Ts(f)), h(l, m));
|
|
375
|
-
return
|
|
375
|
+
return B(E, i, t);
|
|
376
376
|
}
|
|
377
377
|
const zn = /* @__PURE__ */ b({ huberLoss_: qn });
|
|
378
378
|
function Vn(e, a, n, r = 1e-7, t = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
379
379
|
const s = u(e, "labels", "logLoss"), o = u(a, "predictions", "logLoss");
|
|
380
380
|
let i = null;
|
|
381
|
-
n != null && (i = u(n, "weights", "logLoss")),
|
|
381
|
+
n != null && (i = u(n, "weights", "logLoss")), L(s.shape, o.shape, "Error in logLoss: ");
|
|
382
382
|
const l = $(1), c = $(r), f = C(h(s, is(G(o, c)))), m = h(d(l, s), is(G(d(l, o), c))), E = d(f, m);
|
|
383
|
-
return
|
|
383
|
+
return B(E, i, t);
|
|
384
384
|
}
|
|
385
385
|
const Cn = /* @__PURE__ */ b({ logLoss_: Vn });
|
|
386
386
|
function Pn(e, a, n, r = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
387
387
|
const t = u(e, "labels", "meanSquaredError"), s = u(a, "predictions", "meanSquaredError");
|
|
388
388
|
let o = null;
|
|
389
|
-
n != null && (o = u(n, "weights", "meanSquaredError")),
|
|
389
|
+
n != null && (o = u(n, "weights", "meanSquaredError")), L(t.shape, s.shape, "Error in meanSquaredError: ");
|
|
390
390
|
const i = Ws(t, s);
|
|
391
|
-
return
|
|
391
|
+
return B(i, o, r);
|
|
392
392
|
}
|
|
393
393
|
const Yn = /* @__PURE__ */ b({ meanSquaredError_: Pn });
|
|
394
|
-
function
|
|
394
|
+
function jn(e, a) {
|
|
395
395
|
const n = u(e, "labels", "sigmoidCrossEntropyWithLogits"), r = u(a, "logits", "sigmoidCrossEntropyWithLogits");
|
|
396
|
-
|
|
396
|
+
L(n.shape, r.shape, "Error in sigmoidCrossEntropyWithLogits: ");
|
|
397
397
|
const t = fs(r), s = h(r, n), o = Rs(ss(C(ns(r))));
|
|
398
398
|
return G(d(t, s), o);
|
|
399
399
|
}
|
|
400
|
-
function
|
|
400
|
+
function vn(e, a, n, r = 0, t = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
401
401
|
let s = u(e, "multiClassLabels", "sigmoidCrossEntropy");
|
|
402
402
|
const o = u(a, "logits", "sigmoidCrossEntropy");
|
|
403
403
|
let i = null;
|
|
404
|
-
if (n != null && (i = u(n, "weights", "sigmoidCrossEntropy")),
|
|
404
|
+
if (n != null && (i = u(n, "weights", "sigmoidCrossEntropy")), L(s.shape, o.shape, "Error in sigmoidCrossEntropy: "), r > 0) {
|
|
405
405
|
const c = $(r), f = $(1), m = $(0.5);
|
|
406
406
|
s = G(h(s, d(f, c)), h(m, c));
|
|
407
407
|
}
|
|
408
|
-
const l =
|
|
409
|
-
return
|
|
408
|
+
const l = jn(s, o);
|
|
409
|
+
return B(l, i, t);
|
|
410
410
|
}
|
|
411
|
-
const Fn = /* @__PURE__ */ b({ sigmoidCrossEntropy_:
|
|
411
|
+
const Fn = /* @__PURE__ */ b({ sigmoidCrossEntropy_: vn });
|
|
412
412
|
function Zn(e, a, n = -1) {
|
|
413
413
|
if (n === -1 && (n = a.rank - 1), n !== a.rank - 1)
|
|
414
414
|
throw Error(`Softmax cross entropy along a non-last dimension is not yet supported. Labels / logits was rank ${a.rank} and dim was ${n}`);
|
|
415
415
|
return Ms((t, s, o) => {
|
|
416
|
-
const l =
|
|
416
|
+
const l = js(s, [n], !0), c = d(W(s, "float32"), l);
|
|
417
417
|
o([t, c]);
|
|
418
418
|
const f = C(h(c, t));
|
|
419
419
|
return { value: _(f, [n]), gradFunc: (g, y) => {
|
|
420
|
-
const [M, I] = y, A =
|
|
420
|
+
const [M, I] = y, A = vs(g.shape, [n]);
|
|
421
421
|
return [
|
|
422
422
|
h(O(g, A), d(W(M, "float32"), ss(I))),
|
|
423
423
|
h(O(g, A), d(ss(I), W(M, "float32")))
|
|
@@ -429,12 +429,12 @@ function Un(e, a, n, r = 0, t = N.SUM_BY_NONZERO_WEIGHTS) {
|
|
|
429
429
|
let s = u(e, "onehotLabels", "softmaxCrossEntropy");
|
|
430
430
|
const o = u(a, "logits", "softmaxCrossEntropy");
|
|
431
431
|
let i = null;
|
|
432
|
-
if (n != null && (i = u(n, "weights", "softmaxCrossEntropy")),
|
|
432
|
+
if (n != null && (i = u(n, "weights", "softmaxCrossEntropy")), L(s.shape, o.shape, "Error in softmaxCrossEntropy: "), r > 0) {
|
|
433
433
|
const c = $(r), f = $(1), m = $(s.shape[1]);
|
|
434
434
|
s = G(h(s, d(f, c)), S(c, m));
|
|
435
435
|
}
|
|
436
436
|
const l = Zn(s, o);
|
|
437
|
-
return
|
|
437
|
+
return B(l, i, t);
|
|
438
438
|
}
|
|
439
439
|
const Hn = /* @__PURE__ */ b({ softmaxCrossEntropy_: Un });
|
|
440
440
|
const $t = {
|
|
@@ -458,9 +458,9 @@ const $t = {
|
|
|
458
458
|
gramSchmidt: xn,
|
|
459
459
|
qr: wn
|
|
460
460
|
}, _t = {
|
|
461
|
-
absoluteDifference:
|
|
462
|
-
computeWeightedLoss:
|
|
463
|
-
cosineDistance:
|
|
461
|
+
absoluteDifference: Ln,
|
|
462
|
+
computeWeightedLoss: B,
|
|
463
|
+
cosineDistance: Dn,
|
|
464
464
|
hingeLoss: Rn,
|
|
465
465
|
huberLoss: zn,
|
|
466
466
|
logLoss: Cn,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { isPackedTensor as
|
|
3
|
-
import { s as
|
|
1
|
+
import { i, e as o } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { isPackedTensor as t } from "./utilities/packed.js";
|
|
3
|
+
import { s as u } from "./slice-wBNvzVyz.js";
|
|
4
4
|
const s = {
|
|
5
5
|
kernelName: "Unpack16",
|
|
6
6
|
inputsToSave: [],
|
|
@@ -9,11 +9,11 @@ const s = {
|
|
|
9
9
|
x: () => d(n)
|
|
10
10
|
})
|
|
11
11
|
};
|
|
12
|
-
|
|
12
|
+
i(s);
|
|
13
13
|
function p(n, a = 1, e = !1) {
|
|
14
|
-
if (!
|
|
14
|
+
if (!t(n))
|
|
15
15
|
return n;
|
|
16
|
-
const r =
|
|
16
|
+
const r = o().runKernel("Unpack16", { x: n }, { scaling: a });
|
|
17
17
|
return e && n.dispose(), r;
|
|
18
18
|
}
|
|
19
19
|
const c = {
|
|
@@ -23,13 +23,13 @@ const c = {
|
|
|
23
23
|
gradFunc: (n, a, e) => ({
|
|
24
24
|
x: () => {
|
|
25
25
|
const r = p(n);
|
|
26
|
-
return e.originalShape && e.padding && e.padding > 0 ?
|
|
26
|
+
return e.originalShape && e.padding && e.padding > 0 ? u(r, new Array(r.shape.length).fill(0), e.originalShape) : r;
|
|
27
27
|
}
|
|
28
28
|
})
|
|
29
29
|
};
|
|
30
|
-
|
|
30
|
+
i(c);
|
|
31
31
|
function d(n, a = 1, e = 0) {
|
|
32
|
-
return
|
|
32
|
+
return o().runKernel("Pack16", { x: n }, { scaling: a, padding: e });
|
|
33
33
|
}
|
|
34
34
|
export {
|
|
35
35
|
s as a,
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import "../webgpu_util-
|
|
3
|
-
import { W as P } from "../backend_webgpu-
|
|
1
|
+
import { U as c, ae as l, aa as m, x as n } from "../index-Duu1Lvvv.js";
|
|
2
|
+
import "../webgpu_util-DMiKzzQM.js";
|
|
3
|
+
import { W as P } from "../backend_webgpu-mbhNnlx9.js";
|
|
4
4
|
import { compileProgram as y } from "./webgpu_program.js";
|
|
5
|
-
import { m as M } from "../webgpu_program-
|
|
5
|
+
import { m as M } from "../webgpu_program-BpWRlghH.js";
|
|
6
6
|
const b = (o, s) => {
|
|
7
7
|
const i = o.limits.maxComputeWorkgroupsPerDimension, t = s.dispatchLayout, a = s.dispatch;
|
|
8
8
|
if (a.every((r) => r <= i))
|
|
@@ -17,7 +17,7 @@ const b = (o, s) => {
|
|
|
17
17
|
() => "Total dispatch size exceeds WebGPU maximum."
|
|
18
18
|
), [e, e, e]) : [e, e, 1];
|
|
19
19
|
};
|
|
20
|
-
class
|
|
20
|
+
class U extends P {
|
|
21
21
|
subgroupMaxSize;
|
|
22
22
|
subgroupMinSize;
|
|
23
23
|
constructor(s, i) {
|
|
@@ -52,5 +52,5 @@ class z extends P {
|
|
|
52
52
|
}
|
|
53
53
|
}
|
|
54
54
|
export {
|
|
55
|
-
|
|
55
|
+
U as default
|
|
56
56
|
};
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { g as A, d as b, a as x, b as g, s as L, t as l, c as y } from "../webgpu_program-
|
|
1
|
+
import { aa as z, x as F, ab as O, ac as _ } from "../index-Duu1Lvvv.js";
|
|
2
|
+
import { g as A, d as b, a as x, b as g, s as L, t as l, c as y } from "../webgpu_program-BpWRlghH.js";
|
|
3
3
|
var N = /* @__PURE__ */ ((s) => (s[s.FROM_PIXELS = 0] = "FROM_PIXELS", s[s.DRAW = 1] = "DRAW", s))(N || {});
|
|
4
4
|
const H = (s, t, e, o, u) => {
|
|
5
|
-
const a = { dtype: o.dtype, shape: o.shape }, n =
|
|
5
|
+
const a = { dtype: o.dtype, shape: o.shape }, n = D(e, a, t), r = s.createShaderModule({ code: n, label: t.constructor.name });
|
|
6
6
|
let d = z().get("WEBGPU_PRINT_SHADER");
|
|
7
7
|
if (d !== "") {
|
|
8
8
|
d = d.toLowerCase();
|
|
@@ -19,7 +19,7 @@ const H = (s, t, e, o, u) => {
|
|
|
19
19
|
layout: "auto"
|
|
20
20
|
});
|
|
21
21
|
};
|
|
22
|
-
function
|
|
22
|
+
function D(s, t, e) {
|
|
23
23
|
const o = [], u = e.workgroupSize[0] * e.workgroupSize[1] * e.workgroupSize[2];
|
|
24
24
|
if (e.outputComponent = e.outputComponent ? e.outputComponent : 1, o.push(`
|
|
25
25
|
|
|
@@ -93,7 +93,7 @@ function j(s, t, e) {
|
|
|
93
93
|
e.subgroups ? "enable subgroups;" : "",
|
|
94
94
|
C,
|
|
95
95
|
o.join(`
|
|
96
|
-
`) +
|
|
96
|
+
`) + T,
|
|
97
97
|
x(t.shape),
|
|
98
98
|
f,
|
|
99
99
|
W(t.shape.length)
|
|
@@ -165,12 +165,12 @@ const C = `
|
|
|
165
165
|
let floatToUint: vec4<u32> = bitcast<vec4<u32>>(val);
|
|
166
166
|
return (floatToUint & vec4<u32>(0x7fffffffu)) > vec4<u32>(0x7f800000u);
|
|
167
167
|
}
|
|
168
|
-
`,
|
|
168
|
+
`, T = `
|
|
169
169
|
fn isinf(val: f32) -> bool {
|
|
170
170
|
return abs(val) == uniforms.INFINITY;
|
|
171
171
|
}
|
|
172
172
|
`;
|
|
173
|
-
function
|
|
173
|
+
function j(s, t) {
|
|
174
174
|
const e = s.name, o = s.shape.length, u = g(o), a = "get" + e.charAt(0).toUpperCase() + e.slice(1), n = ["d0", "d1", "d2", "d3", "d4", "d5"].slice(0, o), r = n.map((v) => `${v} : i32`).join(", ");
|
|
175
175
|
if (o < 1)
|
|
176
176
|
return `
|
|
@@ -237,7 +237,7 @@ function E(s, t, e, o) {
|
|
|
237
237
|
`;
|
|
238
238
|
}
|
|
239
239
|
function U(s, t, e, o) {
|
|
240
|
-
let u =
|
|
240
|
+
let u = j(s, e);
|
|
241
241
|
return s.shape.length <= t.length && (u += E(s, t, e, o)), u;
|
|
242
242
|
}
|
|
243
243
|
function B(s, t) {
|
|
@@ -1,36 +1,36 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { c as te, k as Gs, m as Tn, b as ou, t as hs, a as Ps, s as Us, l as lu, p as uu, e as cu } from "./step-
|
|
3
|
-
import { n as pt, t as P } from "./transpose-
|
|
4
|
-
import { r as C } from "./reshape-
|
|
5
|
-
import { s as _ } from "./sum-
|
|
6
|
-
import { m as Mt } from "./mat_mul-
|
|
7
|
-
import { j as Vs, o as Xe, G as pi, b as hu, s as pu, D as du, t as fu, A as mu, F as gu, e as Ht, q as En, r as Ye, l as bu, c as di, p as yu, z as fi, f as wu, x as ku, a as js, i as Qe, y as vt, B as Nu, E as Hs, w as xu, v as vu, n as Su, C as Au, k as Iu, u as Cu, h as Ln, g as Du, m as zu, d as Tu } from "./unsorted_segment_sum-
|
|
8
|
-
import { d as st, w as qt, b as Re, c as _e, l as qs, g as Wt, a as Eu, u as ts, f as Lu, m as mi, e as Ts, j as $u } from "./resize_nearest_neighbor-
|
|
9
|
-
import { s as Vt } from "./split-
|
|
10
|
-
import { e as Es, a as gi, g as Ks, c as Fu } from "./axis_util-
|
|
11
|
-
import { e as xe, a as le, m as Te, l as Mu } from "./log_sum_exp-
|
|
12
|
-
import { t as Ce } from "./tile-
|
|
13
|
-
import { s as ps } from "./stack-
|
|
14
|
-
import { o as he } from "./ones-
|
|
15
|
-
import { s as me } from "./slice-
|
|
16
|
-
import { f as bi } from "./floor-
|
|
17
|
-
import { z as ft } from "./zeros-
|
|
18
|
-
import { s as Ou, b as Ru, c as _u, g as Bu, a as Wu, S as Gu } from "./selu_util-
|
|
19
|
-
import { p as Pu } from "./slice_util-
|
|
20
|
-
import { c as Zs } from "./concat-
|
|
21
|
-
import { e as ue } from "./expand_dims-
|
|
22
|
-
import { g as Uu } from "./gather-
|
|
23
|
-
import { V as d, N as B, r as ds, c as Js, a as et, b as ge, e as Be, s as Xs, g as yi, f as wi, t as Ot, R as zt, h as ht, A as Gt, i as V, n as oe, j as nt, k as Vu, l as ju, m as We, o as Pt, p as Tt, q as ae, u as jt, v as qe, w as ce, x as Hu, y as es, z as fs, B as ki, C as St, D as $n, E as qu, F as Ku, G as Zu, H as Kt, I as Ju, J as Ys, K as It, L as Ke, M as Xu, O as ot, P as Ni, Q as mt, S as ee, T as Fn, U as ke, d as Et, W as Mn, X as Ge, Y as xi, Z as Qs, _ as Yu, $ as Qu, a0 as vi, a1 as tc } from "./tfjs_backend-
|
|
24
|
-
import { s as tn } from "./squeeze-
|
|
25
|
-
import { t as Ls } from "./tensor1d-
|
|
26
|
-
import { r as Pe } from "./relu-
|
|
27
|
-
import { c as At } from "./clip_by_value-
|
|
28
|
-
import { s as Si } from "./softmax-
|
|
29
|
-
import { M as ec, a as ms } from "./dropout-
|
|
30
|
-
import { e as sc, l as nc, i as Rt } from "./ops-
|
|
31
|
-
import { t as ic } from "./tensor-
|
|
32
|
-
import { r as rc } from "./range-
|
|
33
|
-
import { v as ac } from "./variable-
|
|
1
|
+
import { o as tt, q as R, x as S, E as X, cA as li, L, cB as ui, cC as Oa, cD as Ra, cE as ci, af as Yt, b0 as ut, c as U, u as _a, at as Ba, cF as Wa, l as z, B as Ga, _ as Lt, a1 as An, cG as In, cH as Pa, cI as Ua, cJ as Va, cK as ja, cL as Ha, cM as qa, cN as Ka, cO as Za, cP as Ja, bR as Xa, m as f, c9 as Ya, n as Qt, b as Q, j as W, ca as Qa, bX as to, $ as lt, cQ as eo, bq as so, a2 as Y, cb as no, cc as io, cd as ro, cf as ao, ce as oo, cg as lo, cR as uo, cS as co, br as ho, D as po, bt as fo, cT as mo, bU as go, b_ as bo, C as yo, cU as wo, z as ko, bv as No, bw as xo, cV as vo, bx as So, by as Ao, bA as Io, bB as Co, ci as Do, cW as zo, cX as To, aI as Eo, cY as Lo, bD as $o, an as Fo, A as Mo, b$ as Oo, F as Ro, c0 as _o, bu as Bo, G as Wo, b2 as Go, aQ as Po, cj as Uo, ck as Vo, cl as jo, aJ as Ho, b5 as qo, ao as Ko, cZ as Zo, c_ as Jo, cm as Xo, am as Yo, c1 as Qo, c$ as tl, d0 as el, bG as sl, b7 as nl, U as hi, a$ as il, b8 as rl, co as al, M as ol, c2 as ll, aq as ul, bH as cl, bI as hl, P as pl, bJ as dl, d1 as fl, p as Ws, aK as ml, c3 as gl, aT as bl, cp as yl, aL as wl, H as kl, R as Nl, bd as xl, d2 as vl, be as Sl, d3 as Al, bL as Il, ba as Cl, bM as Dl, a_ as zl, bN as Tl, aH as El, cq as Ll, bO as $l, bP as Fl, S as Ml, I as Ol, bE as Rl, bK as _l, J as Bl, c5 as Wl, d4 as Gl, bb as Pl, aM as Ul, c7 as Vl, N as jl, cu as Hl, bs as ql, T as Kl, as as Zl, bc as Jl, bQ as Xl, cz as Ne, d5 as Yl, i as Ql, d6 as v, t as y, d7 as Me, d8 as Oe, ab as $t, d as q, aa as tu, d9 as Cn, k as _t, aZ as ze, h as eu, w as su, a3 as we, O as nu, da as iu, a as Dn, db as ru, dc as zn, dd as au } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { c as te, k as Gs, m as Tn, b as ou, t as hs, a as Ps, s as Us, l as lu, p as uu, e as cu } from "./step-BS5JXRR6.js";
|
|
3
|
+
import { n as pt, t as P } from "./transpose-BUkQCJp9.js";
|
|
4
|
+
import { r as C } from "./reshape-BI0yzp1T.js";
|
|
5
|
+
import { s as _ } from "./sum-BPUfDB2X.js";
|
|
6
|
+
import { m as Mt } from "./mat_mul-Bn2BDpT4.js";
|
|
7
|
+
import { j as Vs, o as Xe, G as pi, b as hu, s as pu, D as du, t as fu, A as mu, F as gu, e as Ht, q as En, r as Ye, l as bu, c as di, p as yu, z as fi, f as wu, x as ku, a as js, i as Qe, y as vt, B as Nu, E as Hs, w as xu, v as vu, n as Su, C as Au, k as Iu, u as Cu, h as Ln, g as Du, m as zu, d as Tu } from "./unsorted_segment_sum-BljxHhCY.js";
|
|
8
|
+
import { d as st, w as qt, b as Re, c as _e, l as qs, g as Wt, a as Eu, u as ts, f as Lu, m as mi, e as Ts, j as $u } from "./resize_nearest_neighbor-BA_BX-ub.js";
|
|
9
|
+
import { s as Vt } from "./split-BYrLboMq.js";
|
|
10
|
+
import { e as Es, a as gi, g as Ks, c as Fu } from "./axis_util-DGqbT-FX.js";
|
|
11
|
+
import { e as xe, a as le, m as Te, l as Mu } from "./log_sum_exp-CVqLsVLl.js";
|
|
12
|
+
import { t as Ce } from "./tile-OWUvpIVt.js";
|
|
13
|
+
import { s as ps } from "./stack-CDWShFHF.js";
|
|
14
|
+
import { o as he } from "./ones-CBI1AQjb.js";
|
|
15
|
+
import { s as me } from "./slice-wBNvzVyz.js";
|
|
16
|
+
import { f as bi } from "./floor-BRMPgeIs.js";
|
|
17
|
+
import { z as ft } from "./zeros-5YROwwUH.js";
|
|
18
|
+
import { s as Ou, b as Ru, c as _u, g as Bu, a as Wu, S as Gu } from "./selu_util-DyW0X1WG.js";
|
|
19
|
+
import { p as Pu } from "./slice_util-zN8KFC5I.js";
|
|
20
|
+
import { c as Zs } from "./concat-CSm2rMwe.js";
|
|
21
|
+
import { e as ue } from "./expand_dims-ChkuOp6I.js";
|
|
22
|
+
import { g as Uu } from "./gather-BSULDalH.js";
|
|
23
|
+
import { V as d, N as B, r as ds, c as Js, a as et, b as ge, e as Be, s as Xs, g as yi, f as wi, t as Ot, R as zt, h as ht, A as Gt, i as V, n as oe, j as nt, k as Vu, l as ju, m as We, o as Pt, p as Tt, q as ae, u as jt, v as qe, w as ce, x as Hu, y as es, z as fs, B as ki, C as St, D as $n, E as qu, F as Ku, G as Zu, H as Kt, I as Ju, J as Ys, K as It, L as Ke, M as Xu, O as ot, P as Ni, Q as mt, S as ee, T as Fn, U as ke, d as Et, W as Mn, X as Ge, Y as xi, Z as Qs, _ as Yu, $ as Qu, a0 as vi, a1 as tc } from "./tfjs_backend-806hyYve.js";
|
|
24
|
+
import { s as tn } from "./squeeze-Bk8Brcct.js";
|
|
25
|
+
import { t as Ls } from "./tensor1d-Cc_KCIDg.js";
|
|
26
|
+
import { r as Pe } from "./relu-BsXmGzzu.js";
|
|
27
|
+
import { c as At } from "./clip_by_value-fg2aKzUy.js";
|
|
28
|
+
import { s as Si } from "./softmax-DfuYyjMh.js";
|
|
29
|
+
import { M as ec, a as ms } from "./dropout-DLhSMNTZ.js";
|
|
30
|
+
import { e as sc, l as nc, i as Rt } from "./ops-C2_OXuZ4.js";
|
|
31
|
+
import { t as ic } from "./tensor-CEt9Nm2s.js";
|
|
32
|
+
import { r as rc } from "./range-DKmP1-OQ.js";
|
|
33
|
+
import { v as ac } from "./variable-DPt_Iuog.js";
|
|
34
34
|
function oc(n, t, e, s, i, r = "NDHWC") {
|
|
35
35
|
const a = R(n, "x", "avgPool3d", "float32");
|
|
36
36
|
let o = a, l = !1;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { E as e, Z as f } from "./index-Duu1Lvvv.js";
|
|
2
|
+
function E(n, o, r = 1, a = "float32") {
|
|
3
|
+
if (r === 0)
|
|
4
|
+
throw new Error("Cannot have a step of zero");
|
|
5
|
+
const t = { start: n, stop: o, step: r, dtype: a };
|
|
6
|
+
return e.runKernel(f, {}, t);
|
|
7
|
+
}
|
|
8
|
+
export {
|
|
9
|
+
E as r
|
|
10
|
+
};
|