@genai-fi/nanogpt 0.11.0 → 0.12.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.js +29 -29
- package/dist/{RealDiv-Ds-jvL09.js → RealDiv-C8neBwFi.js} +17 -17
- package/dist/{Reshape-Cd6e-Otn.js → Reshape-Bd4V_4X7.js} +1 -1
- package/dist/{Reshape-Ct266DEk.js → Reshape-Ck29jQSY.js} +7 -7
- package/dist/TeachableLLM.d.ts +2 -1
- package/dist/TeachableLLM.js +9 -9
- package/dist/Trainer.d.ts +4 -2
- package/dist/Trainer.js +12 -9
- package/dist/{axis_util-DofAuy0p.js → axis_util-DGqbT-FX.js} +1 -1
- package/dist/backend.js +2 -2
- package/dist/{backend_util-C7NWHpv7.js → backend_util-DC3rBo_H.js} +18 -18
- package/dist/{backend_webgpu-B0Vls736.js → backend_webgpu-mbhNnlx9.js} +10 -10
- package/dist/{broadcast_to-DDaNMbX7.js → broadcast_to-D1Dmg2Oz.js} +2 -2
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/matMulGelu.js +2 -2
- package/dist/checks/normRMS.js +4 -4
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.js +2 -2
- package/dist/checks/qkv.js +2 -2
- package/dist/checks/rope.js +2 -2
- package/dist/clip_by_value-fg2aKzUy.js +12 -0
- package/dist/{complex-DClmWqJt.js → complex-Cyg-eQeZ.js} +1 -1
- package/dist/concat-CSm2rMwe.js +17 -0
- package/dist/{concat_util-CHsJFZJJ.js → concat_util-D0je5Ppu.js} +1 -1
- package/dist/{dataset-DcjWqUVQ.js → dataset-CVIJu7Xa.js} +3 -3
- package/dist/{dropout-OxuaJz6z.js → dropout-DLhSMNTZ.js} +14 -14
- package/dist/expand_dims-ChkuOp6I.js +11 -0
- package/dist/{exports_initializers-eS9QJ6ut.js → exports_initializers-1KWPiStI.js} +1 -1
- package/dist/{floor-DIb-lN_u.js → floor-BRMPgeIs.js} +1 -1
- package/dist/gather-BSULDalH.js +9 -0
- package/dist/{gelu-DqTbCx5x.js → gelu-BK1k-n1i.js} +1 -1
- package/dist/{gpgpu_math-CJcbnKPC.js → gpgpu_math-BJSTk_mW.js} +25 -25
- package/dist/{index-Dj5TkmPY.js → index-BBVLAXZD.js} +14 -14
- package/dist/{index-D0RBWjq8.js → index-Duu1Lvvv.js} +45 -45
- package/dist/{kernel_funcs_utils-CSaumNDs.js → kernel_funcs_utils-BtYrPoJu.js} +8 -8
- package/dist/layers/BaseLayer.js +2 -2
- package/dist/layers/CausalSelfAttention.js +6 -6
- package/dist/layers/MLP.js +4 -4
- package/dist/layers/PositionEmbedding.js +5 -5
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +4 -4
- package/dist/layers/TiedEmbedding.js +6 -6
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +17 -17
- package/dist/{log_sum_exp-VLZgbFAH.js → log_sum_exp-CVqLsVLl.js} +4 -4
- package/dist/main.d.ts +9 -0
- package/dist/main.js +69 -58
- package/dist/{matMul16-cDxwemKj.js → matMul16-xswmhSuF.js} +7 -7
- package/dist/{matMulGelu-B2s_80-H.js → matMulGelu-BpvgnYG8.js} +26 -26
- package/dist/mat_mul-Bn2BDpT4.js +11 -0
- package/dist/{mod-PrOKlFxH.js → mod-B4AUd1Np.js} +1 -1
- package/dist/models/NanoGPTV1.js +2 -2
- package/dist/models/model.js +9 -9
- package/dist/{ones-BX_wEgzB.js → ones-CBI1AQjb.js} +3 -3
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.js +1 -1
- package/dist/ops/add16.js +1 -1
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/concat16.js +2 -2
- package/dist/ops/cpu/adamAdjust.js +7 -7
- package/dist/ops/cpu/adamMoments.js +5 -5
- package/dist/ops/cpu/appendCache.js +6 -6
- package/dist/ops/cpu/attentionMask.js +6 -6
- package/dist/ops/cpu/fusedSoftmax.js +5 -5
- package/dist/ops/cpu/gatherSub.js +7 -7
- package/dist/ops/cpu/gelu.js +5 -5
- package/dist/ops/cpu/matMul16.js +2 -2
- package/dist/ops/cpu/matMulGelu.js +3 -3
- package/dist/ops/cpu/matMulMul.js +5 -5
- package/dist/ops/cpu/mulDropout.js +1 -1
- package/dist/ops/cpu/normRMS.js +5 -5
- package/dist/ops/cpu/qkv.js +3 -3
- package/dist/ops/cpu/rope.js +9 -9
- package/dist/ops/cpu/scatterSub.js +5 -5
- package/dist/ops/dot16.js +2 -2
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.js +1 -1
- package/dist/ops/grads/attentionMask.js +2 -2
- package/dist/ops/grads/gelu.js +2 -2
- package/dist/ops/grads/matMul16.js +3 -3
- package/dist/ops/grads/matMulGelu.js +5 -5
- package/dist/ops/grads/normRMS.js +6 -6
- package/dist/ops/grads/pack16.js +3 -3
- package/dist/ops/grads/qkv.js +9 -9
- package/dist/ops/grads/rope.js +2 -2
- package/dist/ops/grads/softmax16.js +1 -1
- package/dist/ops/grads/unpack16.js +2 -2
- package/dist/ops/matMul16.js +3 -3
- package/dist/ops/matMulGelu.js +2 -2
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.js +2 -2
- package/dist/ops/qkv.js +1 -1
- package/dist/ops/reshape16.js +6 -6
- package/dist/ops/rope.js +2 -2
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.js +2 -2
- package/dist/ops/softmax16.js +1 -1
- package/dist/ops/sub16.js +1 -1
- package/dist/ops/sum16.js +2 -2
- package/dist/ops/transpose16.js +6 -6
- package/dist/ops/unpack16.js +2 -2
- package/dist/ops/webgl/adamAdjust.js +2 -2
- package/dist/ops/webgl/adamMoments.js +1 -1
- package/dist/ops/webgl/appendCache.js +1 -1
- package/dist/ops/webgl/attentionMask.js +4 -4
- package/dist/ops/webgl/fusedSoftmax.js +6 -6
- package/dist/ops/webgl/gatherSub.js +1 -1
- package/dist/ops/webgl/gelu.js +2 -2
- package/dist/ops/webgl/log.js +3 -3
- package/dist/ops/webgl/matMul16.js +10 -10
- package/dist/ops/webgl/matMulGelu.js +4 -4
- package/dist/ops/webgl/matMulMul.js +2 -2
- package/dist/ops/webgl/mulDropout.js +1 -1
- package/dist/ops/webgl/normRMS.js +2 -2
- package/dist/ops/webgl/qkv.js +1 -1
- package/dist/ops/webgl/rope.js +4 -4
- package/dist/ops/webgl/scatterSub.js +1 -1
- package/dist/ops/webgpu/adamAdjust.js +3 -3
- package/dist/ops/webgpu/adamMoments.js +5 -5
- package/dist/ops/webgpu/add16.js +1 -1
- package/dist/ops/webgpu/appendCache.js +3 -3
- package/dist/ops/webgpu/attentionMask.js +5 -5
- package/dist/ops/webgpu/attentionMask32_program.js +2 -2
- package/dist/ops/webgpu/concat16.js +5 -5
- package/dist/ops/webgpu/gatherSub.js +3 -3
- package/dist/ops/webgpu/gelu.js +3 -3
- package/dist/ops/webgpu/matMul16.js +19 -19
- package/dist/ops/webgpu/matMul16_program.js +2 -2
- package/dist/ops/webgpu/mul16.js +1 -1
- package/dist/ops/webgpu/normRMS.js +2 -2
- package/dist/ops/webgpu/normRMSGrad.js +4 -4
- package/dist/ops/webgpu/pack16.js +3 -3
- package/dist/ops/webgpu/pack16_program.js +2 -2
- package/dist/ops/webgpu/qkv.js +4 -4
- package/dist/ops/webgpu/rope.js +3 -3
- package/dist/ops/webgpu/scatterSub.js +3 -3
- package/dist/ops/webgpu/slice16.js +4 -4
- package/dist/ops/webgpu/softmax16.js +4 -4
- package/dist/ops/webgpu/softmax16_program.js +2 -2
- package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
- package/dist/ops/webgpu/softmax16grad.js +1 -1
- package/dist/ops/webgpu/sub16.js +1 -1
- package/dist/ops/webgpu/sum16.js +5 -5
- package/dist/ops/webgpu/transpose16.js +2 -2
- package/dist/ops/webgpu/transpose16_program.js +2 -2
- package/dist/ops/webgpu/transpose16_shared_program.js +3 -3
- package/dist/ops/webgpu/unpack16.js +5 -5
- package/dist/ops/webgpu/utils/binary_op.js +3 -3
- package/dist/ops/webgpu/utils/reductions.js +4 -4
- package/dist/{ops-FJapAPfm.js → ops-C2_OXuZ4.js} +35 -35
- package/dist/{pack16-k4jq6aMX.js → pack16-atD0eYRm.js} +6 -6
- package/dist/patches/webgpu_backend.js +8 -8
- package/dist/patches/webgpu_base.js +1 -1
- package/dist/patches/webgpu_program.js +2 -2
- package/dist/{random_width-UGQn4OWb.js → random_width-BN4wGJaW.js} +33 -33
- package/dist/{range-CuGvVN2c.js → range-DKmP1-OQ.js} +1 -1
- package/dist/relu-BsXmGzzu.js +9 -0
- package/dist/{reshape-CkjKPPqB.js → reshape-BI0yzp1T.js} +1 -1
- package/dist/{resize_nearest_neighbor-DB8k9KN_.js → resize_nearest_neighbor-BA_BX-ub.js} +25 -25
- package/dist/{rope-BmZmp9uP.js → rope-DJ7Y7c-u.js} +1 -1
- package/dist/{scatter_nd_util-BY22Cc-C.js → scatter_nd_util-k9MUVUkn.js} +1 -1
- package/dist/{selu_util-BuLbmbrl.js → selu_util-DyW0X1WG.js} +5 -5
- package/dist/{shared-B7USJZgw.js → shared-Q3BS6T03.js} +1 -1
- package/dist/{shared-BQboIImQ.js → shared-nnSWpC3u.js} +6 -6
- package/dist/{slice-Aqy7KbJh.js → slice-wBNvzVyz.js} +3 -3
- package/dist/{slice_util-D8CQRenR.js → slice_util-zN8KFC5I.js} +7 -7
- package/dist/{softmax-faLoUZVT.js → softmax-DfuYyjMh.js} +1 -1
- package/dist/split-BYrLboMq.js +9 -0
- package/dist/squeeze-Bk8Brcct.js +10 -0
- package/dist/{stack-WJK22CFn.js → stack-CDWShFHF.js} +1 -1
- package/dist/{step-dXR33iOg.js → step-BS5JXRR6.js} +14 -14
- package/dist/sum-BPUfDB2X.js +11 -0
- package/dist/{tensor-BQqrDvpx.js → tensor-CEt9Nm2s.js} +1 -1
- package/dist/{tensor1d-LxP9asMm.js → tensor1d-Cc_KCIDg.js} +1 -1
- package/dist/{tensor2d-BN1sSfQO.js → tensor2d-BN97fF71.js} +1 -1
- package/dist/{tensor4d-DVwr7pLF.js → tensor4d-vuDDgdUI.js} +1 -1
- package/dist/{tfjs_backend-Vi4JfLzT.js → tfjs_backend-806hyYve.js} +36 -36
- package/dist/tile-OWUvpIVt.js +11 -0
- package/dist/tokeniser/BaseTokeniser.d.ts +6 -8
- package/dist/tokeniser/BaseTokeniser.js +6 -6
- package/dist/tokeniser/CharTokeniser.d.ts +6 -6
- package/dist/tokeniser/CharTokeniser.js +26 -26
- package/dist/tokeniser/bpe.d.ts +6 -6
- package/dist/tokeniser/bpe.js +9 -9
- package/dist/tokeniser/type.d.ts +6 -8
- package/dist/training/Adam.js +2 -2
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.d.ts +1 -1
- package/dist/training/DatasetBuilder.js +29 -29
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.d.ts +5 -4
- package/dist/training/Trainer.js +37 -40
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/training/tasks/ConversationTask.d.ts +11 -0
- package/dist/training/tasks/ConversationTask.js +26 -0
- package/dist/training/tasks/PretrainingTask.d.ts +11 -0
- package/dist/training/tasks/PretrainingTask.js +34 -0
- package/dist/training/tasks/StartSentenceTask.d.ts +12 -0
- package/dist/training/tasks/StartSentenceTask.js +42 -0
- package/dist/training/tasks/Task.d.ts +8 -0
- package/dist/training/tasks/Task.js +44 -0
- package/dist/{transpose-JawVKyZy.js → transpose-BUkQCJp9.js} +7 -7
- package/dist/{unsorted_segment_sum-LAbmE9G4.js → unsorted_segment_sum-BljxHhCY.js} +78 -78
- package/dist/utilities/dummy.js +3 -3
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.js +1 -1
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.d.ts +1 -1
- package/dist/utilities/sentences.js +11 -11
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-DQ9yYgEU.js → variable-DPt_Iuog.js} +1 -1
- package/dist/{webgpu_program-CAE4RICo.js → webgpu_program-BpWRlghH.js} +1 -1
- package/dist/{webgpu_util-BdovYhXr.js → webgpu_util-DMiKzzQM.js} +7 -7
- package/dist/{zeros-DeiE2zTa.js → zeros-5YROwwUH.js} +2 -2
- package/dist/{zeros_like-BAz3iKru.js → zeros_like-De4n1C3m.js} +57 -57
- package/package.json +1 -1
- package/dist/clip_by_value-Dn5tzexi.js +0 -12
- package/dist/concat-C6X3AAlQ.js +0 -17
- package/dist/expand_dims-BzfJK2uc.js +0 -11
- package/dist/gather-BcO5UQNJ.js +0 -9
- package/dist/mat_mul-DxpNTCRz.js +0 -11
- package/dist/relu-Cf80uA2p.js +0 -9
- package/dist/split-BNz5jcGc.js +0 -9
- package/dist/squeeze--YMgaAAf.js +0 -10
- package/dist/sum-BdplSvq_.js +0 -11
- package/dist/tile-CvN_LyVr.js +0 -11
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { b as _ } from "./broadcast_to-
|
|
3
|
-
import { e as J } from "./axis_util-
|
|
4
|
-
import { m as E } from "./log_sum_exp-
|
|
5
|
-
import { r as g } from "./reshape-
|
|
6
|
-
import { s as b } from "./sum-
|
|
1
|
+
import { o as i, q as a, _ as f, E as u, a_ as y, a$ as A, af as D, aZ as p, n as q, p as z, b as B, b0 as I, B as m, b1 as v, b2 as T, b3 as S, b4 as L, b5 as G, b6 as M, b7 as R, L as k, b8 as j, b9 as C, ba as P, bb as U, x as h, bc as Z, bd as F, be as H } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { b as _ } from "./broadcast_to-D1Dmg2Oz.js";
|
|
3
|
+
import { e as J } from "./axis_util-DGqbT-FX.js";
|
|
4
|
+
import { m as E } from "./log_sum_exp-CVqLsVLl.js";
|
|
5
|
+
import { r as g } from "./reshape-BI0yzp1T.js";
|
|
6
|
+
import { s as b } from "./sum-BPUfDB2X.js";
|
|
7
7
|
function O(s, r, e) {
|
|
8
8
|
const n = a(r, "a", "where"), t = a(e, "b", "where"), o = a(s, "condition", "where", "bool"), c = f(f(o.shape, n.shape), t.shape), d = _(o, c), $ = _(n, c), l = _(t, c), w = {
|
|
9
9
|
condition: d,
|
|
@@ -17,10 +17,10 @@ function Q(s, r = null, e = !1) {
|
|
|
17
17
|
const t = { x: a(s, "x", "min") }, o = { axis: r, keepDims: e };
|
|
18
18
|
return u.runKernel(A, t, o);
|
|
19
19
|
}
|
|
20
|
-
const
|
|
20
|
+
const K = /* @__PURE__ */ i({ min_: Q });
|
|
21
21
|
function V(s, r = "euclidean", e = null, n = !1) {
|
|
22
22
|
s = a(s, "x", "norm");
|
|
23
|
-
const t =
|
|
23
|
+
const t = N(s, r, e);
|
|
24
24
|
let o = t.shape;
|
|
25
25
|
if (n) {
|
|
26
26
|
const c = D(e, s.shape);
|
|
@@ -28,20 +28,20 @@ function V(s, r = "euclidean", e = null, n = !1) {
|
|
|
28
28
|
}
|
|
29
29
|
return g(t, o);
|
|
30
30
|
}
|
|
31
|
-
function
|
|
31
|
+
function N(s, r, e = null) {
|
|
32
32
|
if (s.rank === 0)
|
|
33
33
|
return p(s);
|
|
34
34
|
if (s.rank !== 1 && e === null)
|
|
35
|
-
return
|
|
35
|
+
return N(g(s, [-1]), r, e);
|
|
36
36
|
if (s.rank === 1 || typeof e == "number" || Array.isArray(e) && e.length === 1) {
|
|
37
37
|
if (r === 1)
|
|
38
38
|
return b(p(s), e);
|
|
39
39
|
if (r === 1 / 0)
|
|
40
40
|
return E(p(s), e);
|
|
41
41
|
if (r === -1 / 0)
|
|
42
|
-
return
|
|
42
|
+
return K(p(s), e);
|
|
43
43
|
if (r === "euclidean" || r === 2)
|
|
44
|
-
return q(b(z(p(s),
|
|
44
|
+
return q(b(z(p(s), B(2, "int32")), e));
|
|
45
45
|
throw new Error(`Error in norm: invalid ord value: ${r}`);
|
|
46
46
|
}
|
|
47
47
|
if (Array.isArray(e) && e.length === 2) {
|
|
@@ -50,9 +50,9 @@ function K(s, r, e = null) {
|
|
|
50
50
|
if (r === 1 / 0)
|
|
51
51
|
return E(b(p(s), e[1]), e[0]);
|
|
52
52
|
if (r === -1 / 0)
|
|
53
|
-
return
|
|
53
|
+
return K(b(p(s), e[1]), e[0]);
|
|
54
54
|
if (r === "fro" || r === "euclidean")
|
|
55
|
-
return q(b(
|
|
55
|
+
return q(b(I(s), e));
|
|
56
56
|
throw new Error(`Error in norm: invalid ord value: ${r}`);
|
|
57
57
|
}
|
|
58
58
|
throw new Error(`Error in norm: invalid axis: ${e}`);
|
|
@@ -83,12 +83,12 @@ function x(s, r) {
|
|
|
83
83
|
let e = a(s, "a", "lessEqual", "string_or_numeric"), n = a(r, "b", "lessEqual", "string_or_numeric");
|
|
84
84
|
[e, n] = m(e, n), f(e.shape, n.shape);
|
|
85
85
|
const t = { a: e, b: n };
|
|
86
|
-
return u.runKernel(
|
|
86
|
+
return u.runKernel(L, t);
|
|
87
87
|
}
|
|
88
88
|
const qe = /* @__PURE__ */ i({ lessEqual_: x });
|
|
89
89
|
function ee(s) {
|
|
90
90
|
const e = { x: a(s, "x", "log1p") };
|
|
91
|
-
return u.runKernel(
|
|
91
|
+
return u.runKernel(G, e);
|
|
92
92
|
}
|
|
93
93
|
const ke = /* @__PURE__ */ i({ log1p_: ee });
|
|
94
94
|
function ne(s, r) {
|
|
@@ -97,12 +97,12 @@ function ne(s, r) {
|
|
|
97
97
|
const t = { a: e, b: n };
|
|
98
98
|
return u.runKernel(M, t);
|
|
99
99
|
}
|
|
100
|
-
const
|
|
100
|
+
const Ke = /* @__PURE__ */ i({ logicalAnd_: ne });
|
|
101
101
|
function re(s, r = null, e = !1) {
|
|
102
102
|
const t = { x: a(s, "x", "mean") }, o = { axis: r, keepDims: e };
|
|
103
103
|
return u.runKernel(R, t, o);
|
|
104
104
|
}
|
|
105
|
-
const
|
|
105
|
+
const Ne = /* @__PURE__ */ i({ mean_: re });
|
|
106
106
|
function se(s, r) {
|
|
107
107
|
let e = a(s, "a", "minimum"), n = a(r, "b", "minimum");
|
|
108
108
|
[e, n] = m(e, n), e.dtype === "bool" && (e = k(e, "int32"), n = k(n, "int32")), f(e.shape, n.shape);
|
|
@@ -144,7 +144,7 @@ function ue(s, r, e = !1, n = !1) {
|
|
|
144
144
|
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(F, d, $);
|
|
145
145
|
return c ? g(l, [l.shape[1], l.shape[2], l.shape[3]]) : l;
|
|
146
146
|
}
|
|
147
|
-
const
|
|
147
|
+
const Be = /* @__PURE__ */ i({ resizeBilinear_: ue });
|
|
148
148
|
function le(s, r, e = !1, n = !1) {
|
|
149
149
|
const t = a(s, "images", "resizeNearestNeighbor");
|
|
150
150
|
h(t.rank === 3 || t.rank === 4, () => `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got rank ${t.rank}.`), h(r.length === 2, () => `Error in resizeNearestNeighbor: new shape must 2D, but got shape ${r}.`), h(t.dtype === "float32" || t.dtype === "int32", () => "`images` must have `int32` or `float32` as dtype"), h(n === !1 || e === !1, () => "Error in resizeNearestNeighbor: If halfPixelCenters is true, alignCorners must be false.");
|
|
@@ -153,18 +153,18 @@ function le(s, r, e = !1, n = !1) {
|
|
|
153
153
|
const d = { images: o }, $ = { alignCorners: e, halfPixelCenters: n, size: r }, l = u.runKernel(H, d, $);
|
|
154
154
|
return c ? g(l, [l.shape[1], l.shape[2], l.shape[3]]) : l;
|
|
155
155
|
}
|
|
156
|
-
const
|
|
156
|
+
const Ie = /* @__PURE__ */ i({ resizeNearestNeighbor_: le });
|
|
157
157
|
export {
|
|
158
158
|
Ee as a,
|
|
159
|
-
|
|
159
|
+
Ke as b,
|
|
160
160
|
_e as c,
|
|
161
|
-
|
|
161
|
+
Ne as d,
|
|
162
162
|
ye as e,
|
|
163
163
|
ke as f,
|
|
164
164
|
$e as g,
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
165
|
+
Ie as h,
|
|
166
|
+
Be as i,
|
|
167
|
+
K as j,
|
|
168
168
|
qe as l,
|
|
169
169
|
we as m,
|
|
170
170
|
de as n,
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { U as p, a8 as w } from "./index-Duu1Lvvv.js";
|
|
2
2
|
function k(o, t, r) {
|
|
3
3
|
const n = t.rank > 1 ? t.shape[t.rank - 1] : 1, e = t.rank > 1 ? t.rank - 1 : 1, h = `Must have updates.shape = indices.shape[:batchDim] + shape[sliceDim:], got updates.shape: ${r.shape}, indices.shape: ${t.shape}, shape: ${o}, sliceDim: ${n}, and batchDim: ${e}.`;
|
|
4
4
|
if (r.rank < e)
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { m as n,
|
|
2
|
-
import { e as o, r as t, p as f, l as a, s as p, a as i } from "./step-
|
|
3
|
-
import { r as m } from "./relu-
|
|
4
|
-
import { r as d } from "./reshape-
|
|
5
|
-
import { s as g } from "./sum-
|
|
1
|
+
import { m as n, $ as l } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { e as o, r as t, p as f, l as a, s as p, a as i } from "./step-BS5JXRR6.js";
|
|
3
|
+
import { r as m } from "./relu-BsXmGzzu.js";
|
|
4
|
+
import { r as d } from "./reshape-BI0yzp1T.js";
|
|
5
|
+
import { s as g } from "./sum-BPUfDB2X.js";
|
|
6
6
|
function L(e, r, s) {
|
|
7
7
|
if (s == null || s === "linear")
|
|
8
8
|
return e;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { s as l } from "./shared-
|
|
1
|
+
import { s as l } from "./shared-nnSWpC3u.js";
|
|
2
2
|
const { addImpl: m, bincountImpl: s, bincountReduceImpl: a, bitwiseAndImpl: I, castImpl: e, ceilImpl: t, concatImpl: r, equalImpl: i, expImpl: C, expm1Impl: P, floorImpl: U, gatherNdImpl: n, gatherV2Impl: g, greaterImpl: o, greaterEqualImpl: c, lessImpl: u, lessEqualImpl: d, linSpaceImpl: q, logImpl: R, maxImpl: h, maximumImpl: x, minimumImpl: b, multiplyImpl: E, negImpl: S, notEqualImpl: T, prodImpl: w, raggedGatherImpl: y, raggedRangeImpl: A, raggedTensorToTensorImpl: F, rangeImpl: G, rsqrtImpl: N, scatterImpl: f, sigmoidImpl: k, simpleAbsImpl: B, sliceImpl: H, sparseFillEmptyRowsImpl: K, sparseReshapeImpl: V, sparseSegmentReductionImpl: j, sqrtImpl: v, staticRegexReplaceImpl: z, stridedSliceImpl: D, stringNGramsImpl: J, stringSplitImpl: L, stringToHashBucketFastImpl: M, subImpl: O, tileImpl: Q, topKImpl: W, transposeImpl: X, uniqueImpl: Y } = l;
|
|
3
3
|
export {
|
|
4
4
|
b as A,
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { f as it, g as Ye, a as Je, R as tn, v as en, d as nn, e as sn, h as on, i as rn, j as an, k as ln, l as cn, m as un, n as hn, o as fn, p as kt, q as dn, r as gn, s as mn } from "./backend_util-
|
|
1
|
+
import { x as Lt, bR as Ce, U as V, _ as Pt, a8 as K, ae as et, ac as ht, bS as dt, bT as at, a7 as De, Q as st, aQ as Ue, ar as We, bU as $e, bV as ze, bW as Be, bX as qt, at as H, bY as At, bZ as U, b_ as _t, bC as Vt, an as Ct, b$ as Dt, F as Ut, c0 as Wt, b1 as $t, b2 as zt, b3 as Bt, b4 as jt, ao as Gt, c1 as Zt, b8 as Ht, c2 as Kt, aq as je, aR as Ge, b9 as Xt, as as Ze, c3 as He, af as Qt, ak as Ke, t as Xe, bM as Yt, c4 as It, aH as Jt, S as Qe, c5 as te, bb as ee, c6 as ne, aB as pt, c7 as se, c8 as ot } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { f as it, g as Ye, a as Je, R as tn, v as en, d as nn, e as sn, h as on, i as rn, j as an, k as ln, l as cn, m as un, n as hn, o as fn, p as kt, q as dn, r as gn, s as mn } from "./backend_util-DC3rBo_H.js";
|
|
3
3
|
import { m as Nt } from "./complex_util-Yc1A_gV1.js";
|
|
4
|
-
import { a as pn, b as wn, e as In, c as bn } from "./axis_util-
|
|
5
|
-
import { b as xn } from "./broadcast_to-
|
|
6
|
-
import { r as En } from "./reshape-
|
|
7
|
-
import { p as Fn, a as yn, i as kn, c as Nn } from "./slice_util-
|
|
4
|
+
import { a as pn, b as wn, e as In, c as bn } from "./axis_util-DGqbT-FX.js";
|
|
5
|
+
import { b as xn } from "./broadcast_to-D1Dmg2Oz.js";
|
|
6
|
+
import { r as En } from "./reshape-BI0yzp1T.js";
|
|
7
|
+
import { p as Fn, a as yn, i as kn, c as Nn } from "./slice_util-zN8KFC5I.js";
|
|
8
8
|
import { g as vn } from "./_commonjsHelpers-ByX85dGu.js";
|
|
9
9
|
function Sn(e, t) {
|
|
10
10
|
for (var n = 0; n < t.length; n++) {
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import {
|
|
2
|
-
function
|
|
1
|
+
import { o as c, q as e, E as a, S as l } from "./index-Duu1Lvvv.js";
|
|
2
|
+
function p(r, n, o) {
|
|
3
3
|
const s = e(r, "x", "slice", "string_or_numeric");
|
|
4
4
|
if (s.rank === 0)
|
|
5
5
|
throw new Error("Slicing scalar is not possible");
|
|
6
6
|
const t = { x: s }, i = { begin: n, size: o };
|
|
7
7
|
return a.runKernel(l, t, i);
|
|
8
8
|
}
|
|
9
|
-
const x = /* @__PURE__ */ c({ slice_:
|
|
9
|
+
const x = /* @__PURE__ */ c({ slice_: p });
|
|
10
10
|
export {
|
|
11
11
|
x as s
|
|
12
12
|
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { x as E, a5 as x } from "./index-Duu1Lvvv.js";
|
|
2
2
|
const N = -2, L = -1;
|
|
3
3
|
function P(n, e, i) {
|
|
4
4
|
const r = n.shape.length;
|
|
@@ -157,7 +157,7 @@ function q(n, e, i, r, t, o, a, f, c) {
|
|
|
157
157
|
endValid: !1
|
|
158
158
|
};
|
|
159
159
|
J(g, s);
|
|
160
|
-
let u = !0, p = !0,
|
|
160
|
+
let u = !0, p = !0, M = !0;
|
|
161
161
|
const I = [], w = [];
|
|
162
162
|
for (let l = 0; l < n.length; ++l) {
|
|
163
163
|
if (s.strides[l] === 0)
|
|
@@ -173,8 +173,8 @@ function q(n, e, i, r, t, o, a, f, c) {
|
|
|
173
173
|
];
|
|
174
174
|
if (m && s.strides[l] <= 0)
|
|
175
175
|
throw Error("only stride 1 allowed on non-range indexing.");
|
|
176
|
-
|
|
177
|
-
const
|
|
176
|
+
M = M && s.strides[l] === 1;
|
|
177
|
+
const y = !!(s.beginMask & 1 << l && s.endMask & 1 << l);
|
|
178
178
|
if (s.beginValid && s.endValid) {
|
|
179
179
|
if (m) {
|
|
180
180
|
const G = s.begin[l] < 0 ? d + s.begin[l] : s.begin[l];
|
|
@@ -185,9 +185,9 @@ function q(n, e, i, r, t, o, a, f, c) {
|
|
|
185
185
|
const b = s.strides[l] === 1 && s.begin[l] === 0 && s.end[l] === d;
|
|
186
186
|
u = u && b, p = p && (l === 0 && s.strides[l] === 1 || b);
|
|
187
187
|
} else
|
|
188
|
-
u = u && s.strides[l] === 1 &&
|
|
188
|
+
u = u && s.strides[l] === 1 && y, p = p && (l === 0 && s.strides[l] === 1 || y);
|
|
189
189
|
let A, k = !1;
|
|
190
|
-
if (s.beginValid && s.endValid ? (A = s.end[l] - s.begin[l], k = !0) : m ? (A = 1, k = !0) :
|
|
190
|
+
if (s.beginValid && s.endValid ? (A = s.end[l] - s.begin[l], k = !0) : m ? (A = 1, k = !0) : y && d >= 0 && (s.strides[l] < 0 ? A = -d : A = d, k = !0), k) {
|
|
191
191
|
let b;
|
|
192
192
|
A === 0 || A < 0 != s.strides[l] < 0 ? b = 0 : b = Math.trunc(A / s.strides[l]) + (A % s.strides[l] !== 0 ? 1 : 0), I.push(b);
|
|
193
193
|
} else
|
|
@@ -202,7 +202,7 @@ function q(n, e, i, r, t, o, a, f, c) {
|
|
|
202
202
|
finalShape: w,
|
|
203
203
|
isIdentity: u,
|
|
204
204
|
sliceDim0: p,
|
|
205
|
-
isSimpleSlice:
|
|
205
|
+
isSimpleSlice: M,
|
|
206
206
|
begin: s.begin,
|
|
207
207
|
end: s.end,
|
|
208
208
|
strides: s.strides
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { o as p, q as i, E as a, J as c } from "./index-Duu1Lvvv.js";
|
|
2
|
+
function e(t, s, o = 0) {
|
|
3
|
+
const n = { x: i(t, "x", "split") }, r = { numOrSizeSplits: s, axis: o };
|
|
4
|
+
return a.runKernel(c, n, r);
|
|
5
|
+
}
|
|
6
|
+
const u = /* @__PURE__ */ p({ split_: e });
|
|
7
|
+
export {
|
|
8
|
+
u as s
|
|
9
|
+
};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { o as r, q as n, K as a } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { r as t } from "./reshape-BI0yzp1T.js";
|
|
3
|
+
function p(s, o) {
|
|
4
|
+
const e = n(s, "x", "squeeze", "string_or_numeric");
|
|
5
|
+
return t(e, a(e.shape, o).newShape);
|
|
6
|
+
}
|
|
7
|
+
const i = /* @__PURE__ */ r({ squeeze_: p });
|
|
8
|
+
export {
|
|
9
|
+
i as s
|
|
10
|
+
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { o as e, y as c, x as n, E as k, P as i } from "./index-Duu1Lvvv.js";
|
|
2
2
|
function u(r, t = 0) {
|
|
3
3
|
const s = c(r, "tensors", "stack", "string_or_numeric");
|
|
4
4
|
n(s.length >= 1, () => "Pass at least one tensor to tf.stack"), s.length > 0 && n(t <= s[0].rank, () => "Axis must be <= rank of the tensor");
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { x as _, aG as P, o as A, q as M, E as v, aH as q, aI as F, aJ as J, aK as X, aL as Q, aM as Y } from "./index-Duu1Lvvv.js";
|
|
2
2
|
function it(t, n, e, o, s = "NHWC", f) {
|
|
3
3
|
const l = t[3], r = [...n, l], c = nt(s);
|
|
4
4
|
return j(t, r, e, f, o, null, null, c);
|
|
@@ -33,15 +33,15 @@ function j(t, n, e, o, s, f, l = !1, r = "channelsLast") {
|
|
|
33
33
|
[c, i, u, h] = t;
|
|
34
34
|
else
|
|
35
35
|
throw new Error(`Unknown dataFormat ${r}`);
|
|
36
|
-
const [a, p, , $] = n, [w, g] = T(e), [x,
|
|
36
|
+
const [a, p, , $] = n, [w, g] = T(e), [x, y] = T(o), L = k(a, x), b = k(p, y), { padInfo: C, outHeight: E, outWidth: D } = d(s, u, h, w, g, L, b, f, r), I = l ? $ * i : $;
|
|
37
37
|
let m;
|
|
38
|
-
return r === "channelsFirst" ? m = [c, I,
|
|
38
|
+
return r === "channelsFirst" ? m = [c, I, E, D] : r === "channelsLast" && (m = [c, E, D, I]), {
|
|
39
39
|
batchSize: c,
|
|
40
40
|
dataFormat: r,
|
|
41
41
|
inHeight: u,
|
|
42
42
|
inWidth: h,
|
|
43
43
|
inChannels: i,
|
|
44
|
-
outHeight:
|
|
44
|
+
outHeight: E,
|
|
45
45
|
outWidth: D,
|
|
46
46
|
outChannels: I,
|
|
47
47
|
padInfo: C,
|
|
@@ -52,7 +52,7 @@ function j(t, n, e, o, s, f, l = !1, r = "channelsLast") {
|
|
|
52
52
|
effectiveFilterHeight: L,
|
|
53
53
|
effectiveFilterWidth: b,
|
|
54
54
|
dilationHeight: x,
|
|
55
|
-
dilationWidth:
|
|
55
|
+
dilationWidth: y,
|
|
56
56
|
inShape: t,
|
|
57
57
|
outShape: m,
|
|
58
58
|
filterShape: n
|
|
@@ -66,7 +66,7 @@ function Z(t, n, e, o, s, f = !1, l = "channelsLast", r) {
|
|
|
66
66
|
[c, a, u, h, i] = t;
|
|
67
67
|
else
|
|
68
68
|
throw new Error(`Unknown dataFormat ${l}`);
|
|
69
|
-
const [p, $, w, , g] = n, [x,
|
|
69
|
+
const [p, $, w, , g] = n, [x, y, L] = K(e), [b, C, E] = K(o), D = k(p, b), I = k($, C), m = k(w, E), { padInfo: G, outDepth: N, outHeight: R, outWidth: W } = tt(s, u, h, i, x, y, L, D, I, m, r), H = f ? g * a : g;
|
|
70
70
|
let O;
|
|
71
71
|
return l === "channelsFirst" ? O = [c, H, N, R, W] : l === "channelsLast" && (O = [c, N, R, W, H]), {
|
|
72
72
|
batchSize: c,
|
|
@@ -81,7 +81,7 @@ function Z(t, n, e, o, s, f = !1, l = "channelsLast", r) {
|
|
|
81
81
|
outChannels: H,
|
|
82
82
|
padInfo: G,
|
|
83
83
|
strideDepth: x,
|
|
84
|
-
strideHeight:
|
|
84
|
+
strideHeight: y,
|
|
85
85
|
strideWidth: L,
|
|
86
86
|
filterDepth: p,
|
|
87
87
|
filterHeight: $,
|
|
@@ -91,7 +91,7 @@ function Z(t, n, e, o, s, f = !1, l = "channelsLast", r) {
|
|
|
91
91
|
effectiveFilterWidth: m,
|
|
92
92
|
dilationDepth: b,
|
|
93
93
|
dilationHeight: C,
|
|
94
|
-
dilationWidth:
|
|
94
|
+
dilationWidth: E,
|
|
95
95
|
inShape: t,
|
|
96
96
|
outShape: O,
|
|
97
97
|
filterShape: n
|
|
@@ -157,8 +157,8 @@ function tt(t, n, e, o, s, f, l, r, c, u, h) {
|
|
|
157
157
|
a = g[0], p = g[1], $ = g[2];
|
|
158
158
|
} else if (t === "same") {
|
|
159
159
|
a = Math.ceil(n / s), p = Math.ceil(e / f), $ = Math.ceil(o / l);
|
|
160
|
-
const w = (a - 1) * s + r - n, g = (p - 1) * f + c - e, x = ($ - 1) * l + u - o,
|
|
161
|
-
i = { top: b, bottom: C, left:
|
|
160
|
+
const w = (a - 1) * s + r - n, g = (p - 1) * f + c - e, x = ($ - 1) * l + u - o, y = Math.floor(w / 2), L = w - y, b = Math.floor(g / 2), C = g - b, E = Math.floor(x / 2), D = x - E;
|
|
161
|
+
i = { top: b, bottom: C, left: E, right: D, front: y, back: L, type: "SAME" };
|
|
162
162
|
} else
|
|
163
163
|
throw Error(`Unknown padding parameter: ${t}`);
|
|
164
164
|
return { padInfo: i, outDepth: a, outHeight: p, outWidth: $ };
|
|
@@ -229,12 +229,12 @@ function rt(t, n) {
|
|
|
229
229
|
const e = M(t, "x", "prelu"), o = M(n, "alpha", "prelu"), s = { x: e, alpha: o };
|
|
230
230
|
return v.runKernel(X, s);
|
|
231
231
|
}
|
|
232
|
-
const
|
|
232
|
+
const Et = /* @__PURE__ */ A({ prelu_: rt });
|
|
233
233
|
function lt(t) {
|
|
234
234
|
const e = { x: M(t, "x", "relu6") };
|
|
235
235
|
return v.runKernel(Q, e);
|
|
236
236
|
}
|
|
237
|
-
const
|
|
237
|
+
const yt = /* @__PURE__ */ A({ relu6_: lt });
|
|
238
238
|
function ct(t, n = 0) {
|
|
239
239
|
const o = { x: M(t, "x", "step") }, s = { alpha: n };
|
|
240
240
|
return v.runKernel(Y, o, s);
|
|
@@ -254,8 +254,8 @@ export {
|
|
|
254
254
|
at as k,
|
|
255
255
|
xt as l,
|
|
256
256
|
pt as m,
|
|
257
|
-
|
|
258
|
-
|
|
257
|
+
Et as p,
|
|
258
|
+
yt as r,
|
|
259
259
|
gt as s,
|
|
260
260
|
V as t
|
|
261
261
|
};
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { o as e, q as u, L as c, E as l, N as m } from "./index-Duu1Lvvv.js";
|
|
2
|
+
function i(t, o = null, n = !1) {
|
|
3
|
+
let s = u(t, "x", "sum");
|
|
4
|
+
s.dtype === "bool" && (s = c(s, "int32"));
|
|
5
|
+
const r = { x: s }, a = { axis: o, keepDims: n };
|
|
6
|
+
return l.runKernel(m, r, a);
|
|
7
|
+
}
|
|
8
|
+
const f = /* @__PURE__ */ e({ sum_: i });
|
|
9
|
+
export {
|
|
10
|
+
f as s
|
|
11
|
+
};
|
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { t as Ee } from "./tensor1d-
|
|
3
|
-
import { r as Le, d as Ne } from "./dropout-
|
|
4
|
-
import { s as F } from "./slice-
|
|
5
|
-
import { r as c } from "./reshape-
|
|
6
|
-
import { g as be } from "./gather-
|
|
7
|
-
import { e as Fe } from "./step-
|
|
8
|
-
import { c as Ce } from "./clip_by_value-
|
|
9
|
-
import { t as Pe } from "./tile-
|
|
10
|
-
import { s as ve, b as Me, c as je, g as Ve } from "./selu_util-
|
|
11
|
-
import { m as k } from "./mat_mul-
|
|
12
|
-
import { t as Ue } from "./transpose-
|
|
13
|
-
import { c as M } from "./concat-
|
|
1
|
+
import { o as w, q as S, x as y, E as J, l as U, B as ie, U as ue, _ as ae, a1 as le, aX as fe, aF as he, aY as ye, t as $, L as ge, m as ke, j as Te, aZ as Ie } from "./index-Duu1Lvvv.js";
|
|
2
|
+
import { t as Ee } from "./tensor1d-Cc_KCIDg.js";
|
|
3
|
+
import { r as Le, d as Ne } from "./dropout-DLhSMNTZ.js";
|
|
4
|
+
import { s as F } from "./slice-wBNvzVyz.js";
|
|
5
|
+
import { r as c } from "./reshape-BI0yzp1T.js";
|
|
6
|
+
import { g as be } from "./gather-BSULDalH.js";
|
|
7
|
+
import { e as Fe } from "./step-BS5JXRR6.js";
|
|
8
|
+
import { c as Ce } from "./clip_by_value-fg2aKzUy.js";
|
|
9
|
+
import { t as Pe } from "./tile-OWUvpIVt.js";
|
|
10
|
+
import { s as ve, b as Me, c as je, g as Ve } from "./selu_util-DyW0X1WG.js";
|
|
11
|
+
import { m as k } from "./mat_mul-Bn2BDpT4.js";
|
|
12
|
+
import { t as Ue } from "./transpose-BUkQCJp9.js";
|
|
13
|
+
import { c as M } from "./concat-CSm2rMwe.js";
|
|
14
14
|
function Be(e) {
|
|
15
15
|
return M(
|
|
16
16
|
e,
|
|
@@ -33,22 +33,22 @@ function Ze(e, n) {
|
|
|
33
33
|
const Re = /* @__PURE__ */ w({ concat4d_: Ze });
|
|
34
34
|
function Ye(e, n, t) {
|
|
35
35
|
const s = S(e, "x", "slice1d");
|
|
36
|
-
return
|
|
36
|
+
return y(s.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${s.rank} tensor`), F(s, [n], [t]);
|
|
37
37
|
}
|
|
38
38
|
const H = /* @__PURE__ */ w({ slice1d_: Ye });
|
|
39
39
|
function We(e, n, t) {
|
|
40
40
|
const s = S(e, "x", "slice2d");
|
|
41
|
-
return
|
|
41
|
+
return y(s.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${s.rank} tensor`), F(s, n, t);
|
|
42
42
|
}
|
|
43
43
|
const we = /* @__PURE__ */ w({ slice2d_: We });
|
|
44
44
|
function Xe(e, n, t) {
|
|
45
45
|
const s = S(e, "x", "slice3d");
|
|
46
|
-
return
|
|
46
|
+
return y(s.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${s.rank} tensor`), F(s, n, t);
|
|
47
47
|
}
|
|
48
48
|
const Q = /* @__PURE__ */ w({ slice3d_: Xe });
|
|
49
49
|
function He(e, n, t) {
|
|
50
50
|
const s = S(e, "x", "slice4d");
|
|
51
|
-
return
|
|
51
|
+
return y(s.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${s.rank} tensor`), F(s, n, t);
|
|
52
52
|
}
|
|
53
53
|
const V = /* @__PURE__ */ w({ slice4d_: He });
|
|
54
54
|
function Qe({ a: e, b: n, transposeA: t = !1, transposeB: s = !1, bias: r, activation: o = "linear", preluActivationWeights: a, leakyreluAlpha: f = 0.2 }) {
|
|
@@ -58,19 +58,19 @@ function Qe({ a: e, b: n, transposeA: t = !1, transposeB: s = !1, bias: r, activ
|
|
|
58
58
|
}
|
|
59
59
|
let i = S(e, "a", "fused matMul"), u = S(n, "b", "fused matMul");
|
|
60
60
|
[i, u] = ie(i, u);
|
|
61
|
-
const m = t ? i.shape[i.rank - 2] : i.shape[i.rank - 1], d = s ? u.shape[u.rank - 1] : u.shape[u.rank - 2],
|
|
62
|
-
|
|
63
|
-
const x = ae(i.shape.slice(0, -2), u.shape.slice(0, -2)).concat([
|
|
61
|
+
const m = t ? i.shape[i.rank - 2] : i.shape[i.rank - 1], d = s ? u.shape[u.rank - 1] : u.shape[u.rank - 2], T = t ? i.shape[i.rank - 1] : i.shape[i.rank - 2], h = s ? u.shape[u.rank - 2] : u.shape[u.rank - 1], ee = i.shape.slice(0, -2), I = u.shape.slice(0, -2), ne = ue(ee), te = ue(I);
|
|
62
|
+
y(m === d, () => `Error in fused matMul: inner shapes (${m}) and (${d}) of Tensors with shapes ${i.shape} and ${u.shape} and transposeA=${t} and transposeB=${s} must match.`);
|
|
63
|
+
const x = ae(i.shape.slice(0, -2), u.shape.slice(0, -2)).concat([T, h]), G = t ? c(i, [ne, m, T]) : c(i, [ne, T, m]), q = s ? c(u, [te, h, d]) : c(u, [te, d, h]);
|
|
64
64
|
let E;
|
|
65
65
|
r != null && (E = S(r, "bias", "fused matMul"), [E] = ie(E, i), ae(x, E.shape));
|
|
66
66
|
let se;
|
|
67
67
|
a != null && (se = S(a, "prelu weights", "fused matMul"));
|
|
68
68
|
const re = (D, C) => {
|
|
69
|
-
const [A, O,
|
|
69
|
+
const [A, O, _, j] = C, g = je(c(D, _.shape), _, o);
|
|
70
70
|
let L, N;
|
|
71
71
|
if (!t && !s ? (L = k(g, O, !1, !0), N = k(A, g, !0, !1)) : !t && s ? (L = k(g, O, !1, !1), N = k(g, A, !0, !1)) : t && !s ? (L = k(O, g, !1, !0), N = k(A, g, !1, !1)) : (L = k(O, g, !0, !0), N = k(g, A, !0, !0)), r != null) {
|
|
72
|
-
const
|
|
73
|
-
return [L, N,
|
|
72
|
+
const _e = Ve(j, g);
|
|
73
|
+
return [L, N, _e];
|
|
74
74
|
} else
|
|
75
75
|
return [L, N];
|
|
76
76
|
}, oe = {
|
|
@@ -80,17 +80,17 @@ function Qe({ a: e, b: n, transposeA: t = !1, transposeB: s = !1, bias: r, activ
|
|
|
80
80
|
preluActivationWeights: se
|
|
81
81
|
}, ce = { transposeA: t, transposeB: s, activation: o, leakyreluAlpha: f };
|
|
82
82
|
return r == null ? le((C, A, O) => {
|
|
83
|
-
const
|
|
83
|
+
const _ = (
|
|
84
84
|
// tslint:disable-next-line: no-unnecessary-type-assertion
|
|
85
85
|
J.runKernel(fe, oe, ce)
|
|
86
86
|
);
|
|
87
|
-
return O([C, A,
|
|
88
|
-
})(G, q) : le((C, A, O,
|
|
87
|
+
return O([C, A, _]), { value: c(_, x), gradFunc: re };
|
|
88
|
+
})(G, q) : le((C, A, O, _) => {
|
|
89
89
|
const j = (
|
|
90
90
|
// tslint:disable-next-line: no-unnecessary-type-assertion
|
|
91
91
|
J.runKernel(fe, oe, ce)
|
|
92
92
|
);
|
|
93
|
-
return
|
|
93
|
+
return _([C, A, j, O]), { value: c(j, x), gradFunc: re };
|
|
94
94
|
})(G, q, E);
|
|
95
95
|
}
|
|
96
96
|
const pe = /* @__PURE__ */ w({ fusedMatMul_: Qe });
|
|
@@ -119,7 +119,7 @@ class z extends Error {
|
|
|
119
119
|
super(n), Object.setPrototypeOf(this, z.prototype);
|
|
120
120
|
}
|
|
121
121
|
}
|
|
122
|
-
function
|
|
122
|
+
function yn(e, n) {
|
|
123
123
|
if (Array.isArray(e)) {
|
|
124
124
|
let t = [];
|
|
125
125
|
for (let s = 0; s < n; s++)
|
|
@@ -134,7 +134,7 @@ function de(e, n) {
|
|
|
134
134
|
if (!e)
|
|
135
135
|
throw new z(n);
|
|
136
136
|
}
|
|
137
|
-
function
|
|
137
|
+
function Tn(e, n) {
|
|
138
138
|
let t = 0;
|
|
139
139
|
for (const s of e)
|
|
140
140
|
s === n && t++;
|
|
@@ -208,8 +208,8 @@ function Fn(e, n = {}, t = {}, s = "object", r = !1) {
|
|
|
208
208
|
for (const h of Object.keys(t))
|
|
209
209
|
p[h] = t[h];
|
|
210
210
|
Y(o.config);
|
|
211
|
-
const
|
|
212
|
-
return p = Object.assign({}, d),
|
|
211
|
+
const T = i(f, o.config, t, r);
|
|
212
|
+
return p = Object.assign({}, d), T;
|
|
213
213
|
} else {
|
|
214
214
|
const u = Object.assign({}, p);
|
|
215
215
|
for (const d of Object.keys(t))
|
|
@@ -249,7 +249,7 @@ function Mn(e, n, t = 0, s = 1 / 0) {
|
|
|
249
249
|
return de(t >= 0), de(s >= t), Array.isArray(e) && e.length >= t && e.length <= s && e.every((r) => typeof r === n);
|
|
250
250
|
}
|
|
251
251
|
function en(e, n) {
|
|
252
|
-
Array.isArray(e) ? (
|
|
252
|
+
Array.isArray(e) ? (y(e.length > 0, () => `${n} is unexpectedly an empty array.`), e.forEach((t, s) => en(t, `element ${s + 1} of ${n}`))) : y(Number.isInteger(e) && e > 0, () => `Expected ${n} to be a positive integer, but got ${Oe(e)}.`);
|
|
253
253
|
}
|
|
254
254
|
function Oe(e) {
|
|
255
255
|
return e === null ? "null" : Array.isArray(e) ? "[" + e.map((n) => Oe(n)).join(",") + "]" : typeof e == "string" ? `"${e}"` : `${e}`;
|
|
@@ -351,7 +351,7 @@ function Wn(e, n) {
|
|
|
351
351
|
}
|
|
352
352
|
let K;
|
|
353
353
|
function Xn() {
|
|
354
|
-
return K == null && (K =
|
|
354
|
+
return K == null && (K = ye().epsilon()), K;
|
|
355
355
|
}
|
|
356
356
|
function W() {
|
|
357
357
|
return "channelsLast";
|
|
@@ -570,7 +570,7 @@ function at(e, n = 1) {
|
|
|
570
570
|
return Fe(e);
|
|
571
571
|
}
|
|
572
572
|
function lt(e) {
|
|
573
|
-
return $(() =>
|
|
573
|
+
return $(() => Te(e, U(Ie(e), 1)));
|
|
574
574
|
}
|
|
575
575
|
function ft(e, n, t, s) {
|
|
576
576
|
return $(() => Ne(e, n, t, s));
|
|
@@ -589,7 +589,7 @@ export {
|
|
|
589
589
|
$e as A,
|
|
590
590
|
ct as B,
|
|
591
591
|
Wn as C,
|
|
592
|
-
|
|
592
|
+
Tn as D,
|
|
593
593
|
at as E,
|
|
594
594
|
ht as F,
|
|
595
595
|
lt as G,
|
|
@@ -637,7 +637,7 @@ export {
|
|
|
637
637
|
Ln as t,
|
|
638
638
|
Pn as u,
|
|
639
639
|
Cn as v,
|
|
640
|
-
|
|
640
|
+
yn as w,
|
|
641
641
|
vn as x,
|
|
642
642
|
Z as y,
|
|
643
643
|
an as z
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { o as e, q as a, x as i, E as c, T as l } from "./index-Duu1Lvvv.js";
|
|
2
|
+
function u(r, t) {
|
|
3
|
+
const n = a(r, "x", "tile", "string_or_numeric");
|
|
4
|
+
i(n.rank === t.length, () => `Error in transpose: rank of input ${n.rank} must match length of reps ${t}.`);
|
|
5
|
+
const o = { x: n }, s = { reps: t };
|
|
6
|
+
return c.runKernel(l, o, s);
|
|
7
|
+
}
|
|
8
|
+
const p = /* @__PURE__ */ e({ tile_: u });
|
|
9
|
+
export {
|
|
10
|
+
p as t
|
|
11
|
+
};
|
|
@@ -13,15 +13,13 @@ export default abstract class BaseTokeniser extends EE<'trainStatus'> implements
|
|
|
13
13
|
protected addSpecialTokens(): void;
|
|
14
14
|
protected addSpecialToken(token: string, index: number): void;
|
|
15
15
|
abstract train(text: string[]): Promise<number>;
|
|
16
|
-
abstract tokenise(text: string[], numeric?: boolean): Promise<string[][] | number[][]>;
|
|
17
|
-
abstract detokenise(tokens: string[][] | number[][]): Promise<string[]>;
|
|
18
16
|
abstract getVocab(): string[];
|
|
19
|
-
abstract getMerges():
|
|
17
|
+
abstract getMerges(): [string, string][];
|
|
20
18
|
abstract destroy(): void;
|
|
21
|
-
abstract encode(text: string):
|
|
22
|
-
encodeSequence(text: string):
|
|
23
|
-
encodeConversation(conversation: Conversation[], completion?: boolean):
|
|
24
|
-
abstract decode(tokens: number[]):
|
|
25
|
-
decodeConversation(tokens: number[]):
|
|
19
|
+
abstract encode(text: string): number[];
|
|
20
|
+
encodeSequence(text: string): number[];
|
|
21
|
+
encodeConversation(conversation: Conversation[], completion?: boolean): number[];
|
|
22
|
+
abstract decode(tokens: number[]): string;
|
|
23
|
+
decodeConversation(tokens: number[]): Conversation[];
|
|
26
24
|
getSpecialTokenIndex(token: string): number | undefined;
|
|
27
25
|
}
|