@genai-fi/nanogpt 0.10.1 → 0.10.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Generator.js +14 -14
- package/dist/{RealDiv-DgA3z9oO.js → RealDiv-zz7FpkKX.js} +17 -17
- package/dist/{Reshape-CF6odzV4.js → Reshape-CDVLyVfz.js} +3 -3
- package/dist/{Reshape-_kILl6tK.js → Reshape-CHdUjC72.js} +4 -4
- package/dist/TeachableLLM.js +8 -8
- package/dist/{axis_util-BvHEw88j.js → axis_util-BsIr9ZNu.js} +1 -1
- package/dist/backend.js +2 -2
- package/dist/{backend_util-D-rUb2ty.js → backend_util-B1XRLuq9.js} +31 -31
- package/dist/{backend_webgpu-B0u2ndUn.js → backend_webgpu-CqpfEImu.js} +5 -5
- package/dist/{broadcast_to-CwF7XIeu.js → broadcast_to-B0ChcDaz.js} +4 -4
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/matMulGelu.js +5 -5
- package/dist/checks/normRMS.js +4 -4
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.js +2 -2
- package/dist/checks/qkv.js +3 -3
- package/dist/checks/rope.js +2 -2
- package/dist/{complex-CSlYz-2T.js → complex-BBiRlsVq.js} +3 -3
- package/dist/{concat-BHlIJeyT.js → concat-DmBLPVGC.js} +3 -3
- package/dist/{concat_util-DcJk7YHS.js → concat_util-iBYIyuQe.js} +1 -1
- package/dist/{dataset-0xP8GjwI.js → dataset-D2P7rHAw.js} +5 -5
- package/dist/{dropout-C1pM3f11.js → dropout-B1x1kYMa.js} +3 -3
- package/dist/{expand_dims-BPG4fwBP.js → expand_dims-ouvfxQ1n.js} +3 -3
- package/dist/{exports_initializers-xuidcwI4.js → exports_initializers-CZSUJoVE.js} +1 -1
- package/dist/{gather-DykLGqmW.js → gather-CH9sdacz.js} +2 -2
- package/dist/{gelu-CNLFZWea.js → gelu-Bmhopi0J.js} +2 -2
- package/dist/{gpgpu_math-DDVJCn6-.js → gpgpu_math-DsCcikas.js} +3 -3
- package/dist/{index-ZyQhjEPo.js → index-D6Q1lPZO.js} +55 -55
- package/dist/{index-CjOj7j-u.js → index-DRyE072i.js} +15 -15
- package/dist/{kernel_funcs_utils-Dg_-E44D.js → kernel_funcs_utils-CWfOAPGO.js} +9 -9
- package/dist/layers/BaseLayer.js +10 -10
- package/dist/layers/CausalSelfAttention.js +6 -6
- package/dist/layers/MLP.js +4 -4
- package/dist/layers/PositionEmbedding.js +5 -5
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.js +4 -4
- package/dist/layers/TiedEmbedding.js +6 -6
- package/dist/layers/TransformerBlock.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +8 -8
- package/dist/{log_sum_exp-DWI-76TI.js → log_sum_exp-D3ftBNY5.js} +6 -6
- package/dist/main.js +8 -8
- package/dist/{matMul16--R5hOwDG.js → matMul16-fEAJ4smh.js} +4 -4
- package/dist/{mat_mul-DeAh4uTH.js → mat_mul-C59XWcJd.js} +2 -2
- package/dist/{mod-Gt1rMB4n.js → mod-DESSvHIU.js} +2 -2
- package/dist/models/NanoGPTV1.js +2 -2
- package/dist/models/model.js +8 -8
- package/dist/{mulmat_packed_gpu-BMFhLwta.js → mulmat_packed_gpu-Coh6qbJk.js} +1 -1
- package/dist/{ones-CAMiP4I2.js → ones-jU9jlQvM.js} +4 -4
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.js +1 -1
- package/dist/ops/add16.js +1 -1
- package/dist/ops/appendCache.js +3 -3
- package/dist/ops/attentionMask.js +1 -1
- package/dist/ops/concat16.js +2 -2
- package/dist/ops/cpu/adamAdjust.js +2 -2
- package/dist/ops/cpu/adamMoments.js +3 -3
- package/dist/ops/cpu/appendCache.js +3 -3
- package/dist/ops/cpu/attentionMask.js +6 -6
- package/dist/ops/cpu/fusedSoftmax.js +3 -3
- package/dist/ops/cpu/gatherSub.js +4 -4
- package/dist/ops/cpu/gelu.js +2 -2
- package/dist/ops/cpu/matMul16.js +3 -3
- package/dist/ops/cpu/matMulGelu.js +4 -4
- package/dist/ops/cpu/matMulMul.js +2 -2
- package/dist/ops/cpu/mulDropout.js +2 -2
- package/dist/ops/cpu/normRMS.js +2 -2
- package/dist/ops/cpu/qkv.js +4 -4
- package/dist/ops/cpu/rope.js +6 -6
- package/dist/ops/cpu/scatterSub.js +7 -7
- package/dist/ops/dot16.js +2 -2
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.js +2 -2
- package/dist/ops/grads/attentionMask.js +3 -3
- package/dist/ops/grads/gelu.js +3 -3
- package/dist/ops/grads/matMul16.js +4 -4
- package/dist/ops/grads/matMulGelu.js +2 -2
- package/dist/ops/grads/normRMS.js +2 -2
- package/dist/ops/grads/pack16.js +4 -4
- package/dist/ops/grads/qkv.js +4 -4
- package/dist/ops/grads/rope.js +3 -3
- package/dist/ops/grads/softmax16.js +2 -2
- package/dist/ops/grads/unpack16.js +3 -3
- package/dist/ops/matMul16.js +3 -3
- package/dist/ops/matMulGelu.js +1 -1
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.js +1 -1
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.js +2 -2
- package/dist/ops/qkv.js +1 -1
- package/dist/ops/reshape16.js +3 -3
- package/dist/ops/rope.js +5 -5
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.js +2 -2
- package/dist/ops/softmax16.js +1 -1
- package/dist/ops/sub16.js +1 -1
- package/dist/ops/sum16.js +2 -2
- package/dist/ops/transpose16.js +4 -4
- package/dist/ops/unpack16.js +2 -2
- package/dist/ops/webgl/adamAdjust.js +3 -3
- package/dist/ops/webgl/adamMoments.js +2 -2
- package/dist/ops/webgl/appendCache.js +2 -2
- package/dist/ops/webgl/attentionMask.js +2 -2
- package/dist/ops/webgl/fusedSoftmax.js +6 -6
- package/dist/ops/webgl/gatherSub.js +2 -2
- package/dist/ops/webgl/gelu.js +3 -3
- package/dist/ops/webgl/log.js +4 -4
- package/dist/ops/webgl/matMul16.js +5 -5
- package/dist/ops/webgl/matMulGelu.js +6 -6
- package/dist/ops/webgl/matMulMul.js +2 -2
- package/dist/ops/webgl/mulDropout.js +2 -2
- package/dist/ops/webgl/normRMS.js +3 -3
- package/dist/ops/webgl/qkv.js +2 -2
- package/dist/ops/webgl/rope.js +2 -2
- package/dist/ops/webgl/scatterSub.js +2 -2
- package/dist/ops/webgpu/adamAdjust.js +5 -5
- package/dist/ops/webgpu/adamMoments.js +5 -5
- package/dist/ops/webgpu/add16.js +2 -2
- package/dist/ops/webgpu/appendCache.js +5 -5
- package/dist/ops/webgpu/attentionMask.js +4 -4
- package/dist/ops/webgpu/attentionMask32_program.js +2 -2
- package/dist/ops/webgpu/concat16.js +7 -7
- package/dist/ops/webgpu/gatherSub.js +5 -5
- package/dist/ops/webgpu/gelu.js +4 -4
- package/dist/ops/webgpu/matMul16.js +6 -6
- package/dist/ops/webgpu/matMul16_program.js +3 -3
- package/dist/ops/webgpu/mul16.js +2 -2
- package/dist/ops/webgpu/normRMS.js +4 -4
- package/dist/ops/webgpu/normRMSGrad.js +6 -6
- package/dist/ops/webgpu/pack16.js +2 -2
- package/dist/ops/webgpu/pack16_program.js +2 -2
- package/dist/ops/webgpu/qkv.js +4 -4
- package/dist/ops/webgpu/rope.js +5 -5
- package/dist/ops/webgpu/scatterSub.js +5 -5
- package/dist/ops/webgpu/slice16.js +6 -6
- package/dist/ops/webgpu/softmax16.js +4 -4
- package/dist/ops/webgpu/softmax16_program.js +2 -2
- package/dist/ops/webgpu/softmax16_subgroup_program.js +2 -2
- package/dist/ops/webgpu/softmax16grad.js +2 -2
- package/dist/ops/webgpu/sub16.js +2 -2
- package/dist/ops/webgpu/sum16.js +5 -5
- package/dist/ops/webgpu/transpose16.js +3 -3
- package/dist/ops/webgpu/transpose16_program.js +2 -2
- package/dist/ops/webgpu/transpose16_shared_program.js +4 -4
- package/dist/ops/webgpu/unpack16.js +4 -4
- package/dist/ops/webgpu/utils/binary_op.js +4 -4
- package/dist/ops/webgpu/utils/reductions.js +5 -5
- package/dist/{ops-CNI3TwqM.js → ops-BFDtP6th.js} +24 -24
- package/dist/{pack16-CFUqumar.js → pack16-CmVZs6af.js} +3 -3
- package/dist/patches/PackedTensor.js +1 -1
- package/dist/patches/engine.js +7 -5
- package/dist/patches/tape.js +1 -1
- package/dist/patches/webgpu_backend.js +5 -5
- package/dist/patches/webgpu_base.js +1 -1
- package/dist/patches/webgpu_program.js +3 -3
- package/dist/{random_width-DY6Kk2Dl.js → random_width-BVV9HveY.js} +31 -31
- package/dist/{range-BMS52eQi.js → range-ZZZD60Fx.js} +2 -2
- package/dist/{reciprocal-CTmshQ9J.js → reciprocal-CrYlsAGD.js} +2 -2
- package/dist/{register_all_kernels-Bwu1PTuU.js → register_all_kernels-nvj2k7OC.js} +41 -41
- package/dist/{relu-yZ2-7WxU.js → relu-BYDneVPn.js} +2 -2
- package/dist/{reshape-DevtBWtf.js → reshape-CaPQzFvz.js} +2 -2
- package/dist/{rope-B5UUMsPi.js → rope-s4W2XO9B.js} +5 -5
- package/dist/{scatter_nd_util-5EL-8VAQ.js → scatter_nd_util-C7zXRT_h.js} +1 -1
- package/dist/{selu_util-D1w6yyTO.js → selu_util-BGPXmd4B.js} +16 -16
- package/dist/{shared-BRksrJb3.js → shared-CHhxz-O5.js} +1 -1
- package/dist/{shared-BuAXb4CI.js → shared-D2NP_CpY.js} +8 -8
- package/dist/{sin-BGfy2HZo.js → sin-Djs4aQiu.js} +2 -2
- package/dist/{slice-D_gkkqZK.js → slice-DvovR5wq.js} +2 -2
- package/dist/{slice_util-DtEldBfK.js → slice_util-DyjSAD0u.js} +1 -1
- package/dist/{softmax-ZHVebtR1.js → softmax-C9JQEtnO.js} +2 -2
- package/dist/{split-DrfihRpZ.js → split-DBck65sX.js} +2 -2
- package/dist/{squeeze-DZEpeblb.js → squeeze-C00Ipm_7.js} +3 -3
- package/dist/{stack-yOIAalTq.js → stack-ChnHwRpX.js} +3 -3
- package/dist/{sum-_fzj5ZTB.js → sum-ywRJj3Zr.js} +2 -2
- package/dist/{tensor-f35l8Odg.js → tensor-0r5yOo2R.js} +1 -1
- package/dist/{tensor-DdQUJZlz.js → tensor-CzmOBsdf.js} +21 -21
- package/dist/{tensor1d-CeZuc-Rv.js → tensor1d-BlUT89BP.js} +2 -2
- package/dist/{tensor2d-G4Ys2GxX.js → tensor2d-CSB4KOb0.js} +2 -2
- package/dist/{tensor4d-B8roDgtc.js → tensor4d-D7bLqGqz.js} +2 -2
- package/dist/{tensor_util-DV-FP5Q3.js → tensor_util-DfwaWayG.js} +12 -12
- package/dist/{tfjs_backend-kNyO5L2d.js → tfjs_backend-CNkSTL0c.js} +38 -38
- package/dist/{tile-BzyEiF-F.js → tile-CR074jmp.js} +3 -3
- package/dist/training/Adam.js +2 -2
- package/dist/training/AdamExt.js +1 -1
- package/dist/training/DatasetBuilder.js +2 -2
- package/dist/training/FullTrainer.js +1 -1
- package/dist/training/Trainer.js +2 -2
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/{transpose-DKELTqhe.js → transpose-DH4gmHvu.js} +4 -4
- package/dist/utilities/dummy.js +3 -3
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.js +338 -304
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.js +5 -5
- package/dist/utilities/weights.js +2 -2
- package/dist/{variable-Bhn5bHYv.js → variable-DzfrwYuP.js} +1 -1
- package/dist/{webgpu_program-Cigz-7RF.js → webgpu_program-DzaQiqel.js} +2 -2
- package/dist/{webgpu_util-BBCnKm2X.js → webgpu_util-0_ubCEHJ.js} +2 -2
- package/dist/{zeros-2gldETuK.js → zeros-DBFVbpv5.js} +3 -3
- package/package.json +1 -1
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import { a as K,
|
|
2
|
-
import { A as I, B as b, E as U, m as
|
|
3
|
-
import { U as X, V as Y, W as Z, X as J, Y as Q, Z as S } from "./tensor_util-
|
|
4
|
-
import { r as d } from "./relu-
|
|
5
|
-
import { r as z } from "./reshape-
|
|
6
|
-
import { s as tt } from "./sum-
|
|
1
|
+
import { a as K, G as P } from "./tensor-CzmOBsdf.js";
|
|
2
|
+
import { A as I, B as b, E as U, m as G, q } from "./index-D6Q1lPZO.js";
|
|
3
|
+
import { U as X, V as Y, W as Z, X as J, Y as Q, Z as S } from "./tensor_util-DfwaWayG.js";
|
|
4
|
+
import { r as d } from "./relu-BYDneVPn.js";
|
|
5
|
+
import { r as z } from "./reshape-CaPQzFvz.js";
|
|
6
|
+
import { s as tt } from "./sum-ywRJj3Zr.js";
|
|
7
7
|
function Ct(t, n, e, o, s = "NHWC", i) {
|
|
8
8
|
const l = t[3], r = [...n, l], u = lt(s);
|
|
9
9
|
return B(t, r, e, i, o, null, null, u);
|
|
@@ -71,9 +71,9 @@ function nt(t, n, e, o, s, i = !1, l = "channelsLast", r) {
|
|
|
71
71
|
[u, a, c, h, f] = t;
|
|
72
72
|
else
|
|
73
73
|
throw new Error(`Unknown dataFormat ${l}`);
|
|
74
|
-
const [p, g, $, , w] = n, [m, y, D] = v(e), [A, x, E] = v(o), L = k(p, A), C = k(g, x), M = k($, E), { padInfo: j, outDepth: W, outHeight:
|
|
74
|
+
const [p, g, $, , w] = n, [m, y, D] = v(e), [A, x, E] = v(o), L = k(p, A), C = k(g, x), M = k($, E), { padInfo: j, outDepth: W, outHeight: N, outWidth: H } = rt(s, c, h, f, m, y, D, L, C, M, r), O = i ? w * a : w;
|
|
75
75
|
let _;
|
|
76
|
-
return l === "channelsFirst" ? _ = [u, O, W,
|
|
76
|
+
return l === "channelsFirst" ? _ = [u, O, W, N, H] : l === "channelsLast" && (_ = [u, W, N, H, O]), {
|
|
77
77
|
batchSize: u,
|
|
78
78
|
dataFormat: l,
|
|
79
79
|
inDepth: c,
|
|
@@ -81,8 +81,8 @@ function nt(t, n, e, o, s, i = !1, l = "channelsLast", r) {
|
|
|
81
81
|
inWidth: f,
|
|
82
82
|
inChannels: a,
|
|
83
83
|
outDepth: W,
|
|
84
|
-
outHeight:
|
|
85
|
-
outWidth:
|
|
84
|
+
outHeight: N,
|
|
85
|
+
outWidth: H,
|
|
86
86
|
outChannels: O,
|
|
87
87
|
padInfo: j,
|
|
88
88
|
strideDepth: m,
|
|
@@ -249,15 +249,15 @@ function Tt(t, n, e) {
|
|
|
249
249
|
if (e == null || e === "linear")
|
|
250
250
|
return t;
|
|
251
251
|
if (e === "relu")
|
|
252
|
-
return
|
|
252
|
+
return G(t, Et(n));
|
|
253
253
|
throw new Error(`Cannot compute gradient for fused activation ${e}.`);
|
|
254
254
|
}
|
|
255
255
|
function Wt(t, n) {
|
|
256
256
|
let e = n;
|
|
257
|
-
const o =
|
|
257
|
+
const o = q(t.shape, n.shape);
|
|
258
258
|
return o.length > 0 && (e = tt(e, o)), z(e, t.shape);
|
|
259
259
|
}
|
|
260
|
-
function
|
|
260
|
+
function Nt(t, n, e, o) {
|
|
261
261
|
if (n === "linear")
|
|
262
262
|
return t;
|
|
263
263
|
if (n === "relu")
|
|
@@ -274,12 +274,12 @@ function Ht(t, n, e, o) {
|
|
|
274
274
|
return ct(t);
|
|
275
275
|
throw new Error(`Unknown fused activation ${n}.`);
|
|
276
276
|
}
|
|
277
|
-
const
|
|
277
|
+
const Ht = (t, n) => !(t > 0) || n === "linear";
|
|
278
278
|
const Ot = 1.7580993408473768, _t = 1.0507009873554805;
|
|
279
279
|
export {
|
|
280
280
|
_t as S,
|
|
281
281
|
Ot as a,
|
|
282
|
-
|
|
282
|
+
Nt as b,
|
|
283
283
|
Rt as c,
|
|
284
284
|
B as d,
|
|
285
285
|
nt as e,
|
|
@@ -296,7 +296,7 @@ export {
|
|
|
296
296
|
Et as p,
|
|
297
297
|
ct as q,
|
|
298
298
|
at as r,
|
|
299
|
-
|
|
299
|
+
Ht as s,
|
|
300
300
|
V as t,
|
|
301
301
|
gt as u,
|
|
302
302
|
$t as v
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { s as l } from "./shared-
|
|
1
|
+
import { s as l } from "./shared-D2NP_CpY.js";
|
|
2
2
|
const { addImpl: m, bincountImpl: s, bincountReduceImpl: a, bitwiseAndImpl: I, castImpl: e, ceilImpl: t, concatImpl: r, equalImpl: i, expImpl: C, expm1Impl: P, floorImpl: U, gatherNdImpl: n, gatherV2Impl: g, greaterImpl: o, greaterEqualImpl: c, lessImpl: u, lessEqualImpl: d, linSpaceImpl: q, logImpl: R, maxImpl: h, maximumImpl: x, minimumImpl: b, multiplyImpl: E, negImpl: S, notEqualImpl: T, prodImpl: w, raggedGatherImpl: y, raggedRangeImpl: A, raggedTensorToTensorImpl: F, rangeImpl: G, rsqrtImpl: N, scatterImpl: f, sigmoidImpl: k, simpleAbsImpl: B, sliceImpl: H, sparseFillEmptyRowsImpl: K, sparseReshapeImpl: V, sparseSegmentReductionImpl: j, sqrtImpl: v, staticRegexReplaceImpl: z, stridedSliceImpl: D, stringNGramsImpl: J, stringSplitImpl: L, stringToHashBucketFastImpl: M, subImpl: O, tileImpl: Q, topKImpl: W, transposeImpl: X, uniqueImpl: Y } = l;
|
|
3
3
|
export {
|
|
4
4
|
b as A,
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import { j as Lt, h as ht, J as H, t as Ce } from "./index-
|
|
2
|
-
import { a as Pt, s as V, i as K, g as et, W as dt, X as at, f as st, Y as De, O as We, Z as W,
|
|
3
|
-
import { a9 as $e, h as ze, I as Be, F as je, x as Ge, A as qt, aF as _t, aG as Vt, aH as Ct, z as Dt, aI as Wt, Q as Ut, a6 as $t, ak as zt, al as Bt, am as jt, an as Gt, L as Zt, af as Ht, ar as Kt, a7 as Xt, N as Ze, as as Jt, H as He, aJ as Ke, u as Xe, aK as Qt, U as Yt, f as Je, ad as te, au as ee, aL as ne, ae as se } from "./tensor_util-
|
|
4
|
-
import { f as it, g as Qe, a as Ye, R as tn, v as en, d as nn, e as sn, h as on, i as rn, j as an, k as ln, l as cn, m as un, n as hn, o as fn, p as kt, q as dn, r as gn, s as mn } from "./backend_util-
|
|
1
|
+
import { j as Lt, h as ht, J as H, t as Ce } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { a as Pt, s as V, i as K, g as et, W as dt, X as at, f as st, Y as De, O as We, Z as W, H as Ue, p as At, R as It, q as pt, _ as ot } from "./tensor-CzmOBsdf.js";
|
|
3
|
+
import { a9 as $e, h as ze, I as Be, F as je, x as Ge, A as qt, aF as _t, aG as Vt, aH as Ct, z as Dt, aI as Wt, Q as Ut, a6 as $t, ak as zt, al as Bt, am as jt, an as Gt, L as Zt, af as Ht, ar as Kt, a7 as Xt, N as Ze, as as Jt, H as He, aJ as Ke, u as Xe, aK as Qt, U as Yt, f as Je, ad as te, au as ee, aL as ne, ae as se } from "./tensor_util-DfwaWayG.js";
|
|
4
|
+
import { f as it, g as Qe, a as Ye, R as tn, v as en, d as nn, e as sn, h as on, i as rn, j as an, k as ln, l as cn, m as un, n as hn, o as fn, p as kt, q as dn, r as gn, s as mn } from "./backend_util-B1XRLuq9.js";
|
|
5
5
|
import { m as Nt } from "./complex_util-Yc1A_gV1.js";
|
|
6
|
-
import { a as pn, b as wn, e as In, c as xn } from "./axis_util-
|
|
7
|
-
import { b as En } from "./broadcast_to-
|
|
8
|
-
import { r as Fn } from "./reshape-
|
|
9
|
-
import { p as bn, a as yn, i as kn, c as Nn } from "./slice_util-
|
|
6
|
+
import { a as pn, b as wn, e as In, c as xn } from "./axis_util-BsIr9ZNu.js";
|
|
7
|
+
import { b as En } from "./broadcast_to-B0ChcDaz.js";
|
|
8
|
+
import { r as Fn } from "./reshape-CaPQzFvz.js";
|
|
9
|
+
import { p as bn, a as yn, i as kn, c as Nn } from "./slice_util-DyjSAD0u.js";
|
|
10
10
|
import { g as vn } from "./_commonjsHelpers-ByX85dGu.js";
|
|
11
11
|
function Sn(e, t) {
|
|
12
12
|
for (var n = 0; n < t.length; n++) {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { A as o, B as t, E as r } from "./index-
|
|
2
|
-
import { l as i, n as a } from "./tensor_util-
|
|
1
|
+
import { A as o, B as t, E as r } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { l as i, n as a } from "./tensor_util-DfwaWayG.js";
|
|
3
3
|
function e(n) {
|
|
4
4
|
const s = { x: t(n, "x", "cos", "float32") };
|
|
5
5
|
return r.runKernel(i, s);
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { A as c, B as e, E as a } from "./index-
|
|
2
|
-
import { f as l } from "./tensor_util-
|
|
1
|
+
import { A as c, B as e, E as a } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { f as l } from "./tensor_util-DfwaWayG.js";
|
|
3
3
|
function p(s, o, n) {
|
|
4
4
|
const r = e(s, "x", "slice", "string_or_numeric");
|
|
5
5
|
if (r.rank === 0)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { A as r, B as f, E as i } from "./index-
|
|
2
|
-
import { S as e } from "./tensor_util-
|
|
1
|
+
import { A as r, B as f, E as i } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { S as e } from "./tensor_util-DfwaWayG.js";
|
|
3
3
|
function l(s, o = -1) {
|
|
4
4
|
const t = f(s, "logits", "softmax", "float32");
|
|
5
5
|
if (o === -1 && (o = t.rank - 1), o !== t.rank - 1)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { A as p, B as i, E as c } from "./index-
|
|
2
|
-
import { c as a } from "./tensor_util-
|
|
1
|
+
import { A as p, B as i, E as c } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { c as a } from "./tensor_util-DfwaWayG.js";
|
|
3
3
|
function e(t, s, o = 0) {
|
|
4
4
|
const n = { x: i(t, "x", "split") }, r = { numOrSizeSplits: s, axis: o };
|
|
5
5
|
return c.runKernel(a, n, r);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { A as o, B as n } from "./index-
|
|
2
|
-
import { r as t } from "./reshape-
|
|
3
|
-
import { h as a } from "./tensor-
|
|
1
|
+
import { A as o, B as n } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { r as t } from "./reshape-CaPQzFvz.js";
|
|
3
|
+
import { h as a } from "./tensor-CzmOBsdf.js";
|
|
4
4
|
function p(r, s) {
|
|
5
5
|
const e = n(r, "x", "squeeze", "string_or_numeric");
|
|
6
6
|
return t(e, a(e.shape, s).newShape);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { A as e, C as c, E as i } from "./index-
|
|
2
|
-
import { P as m } from "./tensor_util-
|
|
3
|
-
import { a as r } from "./tensor-
|
|
1
|
+
import { A as e, C as c, E as i } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { P as m } from "./tensor_util-DfwaWayG.js";
|
|
3
|
+
import { a as r } from "./tensor-CzmOBsdf.js";
|
|
4
4
|
function f(o, s = 0) {
|
|
5
5
|
const t = c(o, "tensors", "stack", "string_or_numeric");
|
|
6
6
|
r(t.length >= 1, () => "Pass at least one tensor to tf.stack"), t.length > 0 && r(s <= t[0].rank, () => "Axis must be <= rank of the tensor");
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { A as e, B as u, G as m, E as c } from "./index-
|
|
2
|
-
import { g as i } from "./tensor_util-
|
|
1
|
+
import { A as e, B as u, G as m, E as c } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { g as i } from "./tensor_util-DfwaWayG.js";
|
|
3
3
|
function l(t, o = null, n = !1) {
|
|
4
4
|
let s = u(t, "x", "sum");
|
|
5
5
|
s.dtype === "bool" && (s = m(s, "int32"));
|
|
@@ -849,18 +849,18 @@ Object.defineProperty(mt, Symbol.hasInstance, {
|
|
|
849
849
|
});
|
|
850
850
|
export {
|
|
851
851
|
et as $,
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
852
|
+
M as A,
|
|
853
|
+
$t as B,
|
|
854
|
+
V as C,
|
|
855
|
+
ut as D,
|
|
856
856
|
zt as E,
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
857
|
+
X as F,
|
|
858
|
+
W as G,
|
|
859
|
+
Ht as H,
|
|
860
|
+
kt as I,
|
|
861
|
+
Tt as J,
|
|
862
|
+
Bt as K,
|
|
863
|
+
at as L,
|
|
864
864
|
yt as M,
|
|
865
865
|
rt as N,
|
|
866
866
|
ct as O,
|
|
@@ -894,16 +894,16 @@ export {
|
|
|
894
894
|
Et as l,
|
|
895
895
|
Ot as m,
|
|
896
896
|
xt as n,
|
|
897
|
-
|
|
897
|
+
z as o,
|
|
898
898
|
K as p,
|
|
899
|
-
|
|
900
|
-
|
|
899
|
+
gt as q,
|
|
900
|
+
Lt as r,
|
|
901
901
|
R as s,
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
902
|
+
_t as t,
|
|
903
|
+
vt as u,
|
|
904
|
+
Qt as v,
|
|
905
|
+
Nt as w,
|
|
906
|
+
Jt as x,
|
|
907
|
+
Gt as y,
|
|
908
|
+
O as z
|
|
909
909
|
};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { x as t, y as s } from "./index-
|
|
2
|
-
import { c as a } from "./tensor-
|
|
1
|
+
import { x as t, y as s } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { c as a } from "./tensor-CzmOBsdf.js";
|
|
3
3
|
function f(o, r, n) {
|
|
4
4
|
if (a(o), r != null && r.length !== 2)
|
|
5
5
|
throw new Error("tensor2d() requires shape to have two numbers");
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { x as t, y as a } from "./index-
|
|
2
|
-
import { c as s } from "./tensor-
|
|
1
|
+
import { x as t, y as a } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { c as s } from "./tensor-CzmOBsdf.js";
|
|
3
3
|
function f(o, r, n) {
|
|
4
4
|
if (s(o), r != null && r.length !== 4)
|
|
5
5
|
throw new Error("tensor4d() requires shape to have four numbers");
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { e as g, F,
|
|
1
|
+
import { e as g, D as F, v as P, F as D, b as M, T as w } from "./tensor-CzmOBsdf.js";
|
|
2
2
|
const A = 1e-7, G = 1e-4;
|
|
3
3
|
class z {
|
|
4
4
|
constructor(s, o) {
|
|
@@ -75,7 +75,7 @@ class K {
|
|
|
75
75
|
function d(t) {
|
|
76
76
|
throw new Error(`'${t}' not yet implemented or not found in the registry. This kernel may not be supported by the tfjs backend you have chosen`);
|
|
77
77
|
}
|
|
78
|
-
const _ = "Abs", U = "Acos", W = "Acosh", j = "Add", H = "AddN", X = "All", Y = "Any", Z = "ArgMax", J = "ArgMin", Q = "Asin", tt = "Asinh", st = "Atan", ot = "Atanh", et = "Atan2", nt = "AvgPool", at = "AvgPoolGrad", rt = "AvgPool3D", ct = "AvgPool3DGrad", it = "BatchMatMul", lt = "BatchToSpaceND", ut = "Bincount", pt = "BitwiseAnd", dt = "BroadcastTo", ft = "BroadcastArgs", ht = "Cast", gt = "Ceil", mt = "ClipByValue", bt = "Complex", St = "ComplexAbs", Dt = "Concat", Rt = "Conv2D", Tt = "Conv2DBackpropFilter", kt = "Conv2DBackpropInput", Nt = "Conv3D", xt = "Conv3DBackpropFilterV2",
|
|
78
|
+
const _ = "Abs", U = "Acos", W = "Acosh", j = "Add", H = "AddN", X = "All", Y = "Any", Z = "ArgMax", J = "ArgMin", Q = "Asin", tt = "Asinh", st = "Atan", ot = "Atanh", et = "Atan2", nt = "AvgPool", at = "AvgPoolGrad", rt = "AvgPool3D", ct = "AvgPool3DGrad", it = "BatchMatMul", lt = "BatchToSpaceND", ut = "Bincount", pt = "BitwiseAnd", dt = "BroadcastTo", ft = "BroadcastArgs", ht = "Cast", gt = "Ceil", mt = "ClipByValue", bt = "Complex", St = "ComplexAbs", Dt = "Concat", Rt = "Conv2D", Tt = "Conv2DBackpropFilter", kt = "Conv2DBackpropInput", Nt = "Conv3D", xt = "Conv3DBackpropFilterV2", vt = "Conv3DBackpropInputV2", Pt = "Cos", Ct = "Cosh", Ft = "Cumprod", yt = "Cumsum", It = "CropAndResize", Bt = "DenseBincount", Et = "DepthToSpace", Mt = "DepthwiseConv2dNative", wt = "DepthwiseConv2dNativeBackpropFilter", At = "DepthwiseConv2dNativeBackpropInput", Gt = "Diag", Lt = "Dilation2D", $t = "Dilation2DBackpropInput", Ot = "Dilation2DBackpropFilter", qt = "Draw", Vt = "RealDiv", zt = "Einsum", Kt = "Elu", _t = "EluGrad", Ut = "Erf", Wt = "Equal", jt = "Exp", Ht = "ExpandDims", Xt = "Expm1", Yt = "FFT", Zt = "Fill", Jt = "FlipLeftRight", Qt = "Floor", ts = "FloorDiv", ss = "FusedBatchNorm", os = "GatherV2", es = "GatherNd", ns = "Greater", as = "GreaterEqual", rs = "Identity", cs = "IFFT", is = "Imag", ls = "IsFinite", us = "IsInf", ps = "IsNan", ds = "LeakyRelu", fs = "Less", hs = "LessEqual", gs = "LinSpace", ms = "Log", bs = "Log1p", Ss = "LogicalAnd", Ds = "LogicalNot", Rs = "LogicalOr", Ts = "LogSoftmax", ks = "LRN", Ns = "LRNGrad", xs = "Max", vs = "Maximum", Ps = "MaxPool", Cs = "MaxPoolGrad", Fs = "MaxPool3D", ys = "MaxPool3DGrad", Is = "MaxPoolWithArgmax", Bs = "Mean", Es = "Min", Ms = "Minimum", ws = "MirrorPad", As = "Mod", Gs = "Multinomial", Ls = "Multiply", $s = "Neg", Os = "NotEqual", qs = "NonMaxSuppressionV3", Vs = "NonMaxSuppressionV4", zs = "NonMaxSuppressionV5", Ks = "OnesLike", _s = "OneHot", Us = "Pack", Ws = "PadV2", js = "Pow", Hs = "Prelu", Xs = "Prod", Ys = "RaggedGather", Zs = "RaggedRange", Js = "RaggedTensorToTensor", Qs = "Range", to = "Real", so = "Reciprocal", oo = "Relu", eo = "Reshape", no = "ResizeNearestNeighbor", ao = "ResizeNearestNeighborGrad", ro = "ResizeBilinear", co = "ResizeBilinearGrad", io = "Relu6", lo = "Reverse", uo = "Round", po = "Rsqrt", fo = "ScatterNd", ho = "TensorScatterUpdate", go = "SearchSorted", mo = "Select", bo = "Selu", So = "Slice", Do = "Sin", Ro = "Sinh", To = "Sign", ko = "Sigmoid", No = "Softplus", xo = "Sqrt", vo = "Sum", Po = "SpaceToBatchND", Co = "SplitV", Fo = "Softmax", yo = "SparseFillEmptyRows", Io = "SparseReshape", Bo = "SparseSegmentMean", Eo = "SparseSegmentSum", Mo = "SparseToDense", wo = "SquaredDifference", Ao = "Square", Go = "StaticRegexReplace", Lo = "StridedSlice", $o = "StringNGrams", Oo = "StringSplit", qo = "StringToHashBucketFast", Vo = "Sub", zo = "Tan", Ko = "Tanh", _o = "Tile", Uo = "TopK", Wo = "Transform", jo = "Transpose", Ho = "Unique", Xo = "Unpack", Yo = "UnsortedSegmentSum", Zo = "ZerosLike", Jo = "Step", Qo = "FromPixels", te = "RotateWithOffset", se = "_FusedMatMul", oe = "FusedConv2D", ee = "FusedDepthwiseConv2D";
|
|
79
79
|
function y(...t) {
|
|
80
80
|
g().getBool("IS_TEST") || g().getBool("PROD") || console.warn(...t);
|
|
81
81
|
}
|
|
@@ -122,14 +122,14 @@ class ue {
|
|
|
122
122
|
a = c();
|
|
123
123
|
};
|
|
124
124
|
let l;
|
|
125
|
-
const p =
|
|
125
|
+
const p = P();
|
|
126
126
|
if (this.backendTimer.timerAvailable())
|
|
127
127
|
l = this.backendTimer.time(r);
|
|
128
128
|
else {
|
|
129
129
|
r();
|
|
130
130
|
for (const e of a)
|
|
131
131
|
e.dataSync();
|
|
132
|
-
l = Promise.resolve({ kernelMs:
|
|
132
|
+
l = Promise.resolve({ kernelMs: P() - p });
|
|
133
133
|
}
|
|
134
134
|
if (g().getBool("CHECK_COMPUTATION_FOR_ERRORS"))
|
|
135
135
|
for (let e = 0; e < a.length; e++) {
|
|
@@ -172,8 +172,8 @@ class $ {
|
|
|
172
172
|
for (const h in r) {
|
|
173
173
|
const m = r[h];
|
|
174
174
|
if (m != null) {
|
|
175
|
-
const b = m.shape || o.shape,
|
|
176
|
-
f += `${h}: ${
|
|
175
|
+
const b = m.shape || o.shape, v = b.length;
|
|
176
|
+
f += `${h}: ${v}D ${v > 0 ? b : ""} `;
|
|
177
177
|
}
|
|
178
178
|
}
|
|
179
179
|
console.log(`%c${n} %c${p} %c${e}D ${u} %c${i} %c${f} %c${l}`, "font-weight:bold", "color:red", "color:blue", "color: orange", "color: green", "color: steelblue");
|
|
@@ -389,7 +389,7 @@ export {
|
|
|
389
389
|
Rt as aU,
|
|
390
390
|
kt as aV,
|
|
391
391
|
Nt as aW,
|
|
392
|
-
|
|
392
|
+
vt as aX,
|
|
393
393
|
Ct as aY,
|
|
394
394
|
Ft as aZ,
|
|
395
395
|
yt as a_,
|
|
@@ -398,7 +398,7 @@ export {
|
|
|
398
398
|
js as ac,
|
|
399
399
|
xo as ad,
|
|
400
400
|
Vo as ae,
|
|
401
|
-
|
|
401
|
+
vs as af,
|
|
402
402
|
ut as ag,
|
|
403
403
|
mo as ah,
|
|
404
404
|
zt as ai,
|
|
@@ -425,12 +425,12 @@ export {
|
|
|
425
425
|
Ut as b1,
|
|
426
426
|
No as b2,
|
|
427
427
|
Ds as b3,
|
|
428
|
-
|
|
428
|
+
Ps as b4,
|
|
429
429
|
Fs as b5,
|
|
430
430
|
_s as b6,
|
|
431
431
|
Ks as b7,
|
|
432
432
|
Ws as b8,
|
|
433
|
-
|
|
433
|
+
Po as b9,
|
|
434
434
|
us as bA,
|
|
435
435
|
ps as bB,
|
|
436
436
|
Ts as bC,
|
|
@@ -500,12 +500,12 @@ export {
|
|
|
500
500
|
so as d,
|
|
501
501
|
oo as e,
|
|
502
502
|
So as f,
|
|
503
|
-
|
|
503
|
+
vo as g,
|
|
504
504
|
bt as h,
|
|
505
505
|
Gs as i,
|
|
506
506
|
fo as j,
|
|
507
507
|
es as k,
|
|
508
|
-
|
|
508
|
+
Pt as l,
|
|
509
509
|
me as m,
|
|
510
510
|
Do as n,
|
|
511
511
|
ue as o,
|
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
import { A as g, B as $, i as _e, E as M, n as x, j as ie, w as ue, R as Te, t as A, G as ge, m as ke, l as Ee, S as Ie } from "./index-
|
|
2
|
-
import { a as y, s as ae,
|
|
3
|
-
import { t as Le } from "./tensor1d-
|
|
4
|
-
import { r as Ne, d as be } from "./dropout-
|
|
5
|
-
import { s as C } from "./slice-
|
|
6
|
-
import { r as c } from "./reshape-
|
|
7
|
-
import { g as Ce } from "./gather-
|
|
8
|
-
import { s as
|
|
9
|
-
import { a1 as Me, m as fe, a2 as he } from "./tensor_util-
|
|
10
|
-
import { t as Ue } from "./tile-
|
|
11
|
-
import { m as w } from "./mat_mul-
|
|
12
|
-
import { t as xe } from "./transpose-
|
|
13
|
-
import { c as j } from "./concat-
|
|
1
|
+
import { A as g, B as $, i as _e, E as M, n as x, j as ie, w as ue, R as Te, t as A, G as ge, m as ke, l as Ee, S as Ie } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { a as y, s as ae, v as le } from "./tensor-CzmOBsdf.js";
|
|
3
|
+
import { t as Le } from "./tensor1d-BlUT89BP.js";
|
|
4
|
+
import { r as Ne, d as be } from "./dropout-B1x1kYMa.js";
|
|
5
|
+
import { s as C } from "./slice-DvovR5wq.js";
|
|
6
|
+
import { r as c } from "./reshape-CaPQzFvz.js";
|
|
7
|
+
import { g as Ce } from "./gather-CH9sdacz.js";
|
|
8
|
+
import { s as ve, b as Fe, m as Pe, l as je, o as Be } from "./selu_util-BGPXmd4B.js";
|
|
9
|
+
import { a1 as Me, m as fe, a2 as he } from "./tensor_util-DfwaWayG.js";
|
|
10
|
+
import { t as Ue } from "./tile-CR074jmp.js";
|
|
11
|
+
import { m as w } from "./mat_mul-C59XWcJd.js";
|
|
12
|
+
import { t as xe } from "./transpose-DH4gmHvu.js";
|
|
13
|
+
import { c as j } from "./concat-DmBLPVGC.js";
|
|
14
14
|
function Ge(e, n, t) {
|
|
15
15
|
const s = $(e, "x", "clipByValue");
|
|
16
16
|
if (y(n <= t, () => `Error in clip: min (${n}) must be less than or equal to max (${t}).`), n === t)
|
|
@@ -60,9 +60,9 @@ function en(e, n, t) {
|
|
|
60
60
|
}
|
|
61
61
|
const U = /* @__PURE__ */ g({ slice4d_: en });
|
|
62
62
|
function nn({ a: e, b: n, transposeA: t = !1, transposeB: s = !1, bias: r, activation: o = "linear", preluActivationWeights: a, leakyreluAlpha: f = 0.2 }) {
|
|
63
|
-
if (
|
|
63
|
+
if (ve(M.state.gradientDepth, o) === !1) {
|
|
64
64
|
let D = w(e, n, t, s);
|
|
65
|
-
return r != null && (D = x(D, r)),
|
|
65
|
+
return r != null && (D = x(D, r)), Fe(D, o, a, f);
|
|
66
66
|
}
|
|
67
67
|
let i = $(e, "a", "fused matMul"), u = $(n, "b", "fused matMul");
|
|
68
68
|
[i, u] = fe(i, u);
|
|
@@ -73,8 +73,8 @@ function nn({ a: e, b: n, transposeA: t = !1, transposeB: s = !1, bias: r, activ
|
|
|
73
73
|
r != null && (I = $(r, "bias", "fused matMul"), [I] = fe(I, i), ie(V, I.shape));
|
|
74
74
|
let se;
|
|
75
75
|
a != null && (se = $(a, "prelu weights", "fused matMul"));
|
|
76
|
-
const re = (D,
|
|
77
|
-
const [S, O, _, B] =
|
|
76
|
+
const re = (D, v) => {
|
|
77
|
+
const [S, O, _, B] = v, k = Pe(c(D, _.shape), _, o);
|
|
78
78
|
let L, N;
|
|
79
79
|
if (!t && !s ? (L = w(k, O, !1, !0), N = w(S, k, !0, !1)) : !t && s ? (L = w(k, O, !1, !1), N = w(k, S, !0, !1)) : t && !s ? (L = w(O, k, !1, !0), N = w(S, k, !1, !1)) : (L = w(O, k, !0, !0), N = w(k, S, !0, !0)), r != null) {
|
|
80
80
|
const De = je(B, k);
|
|
@@ -87,18 +87,18 @@ function nn({ a: e, b: n, transposeA: t = !1, transposeB: s = !1, bias: r, activ
|
|
|
87
87
|
bias: I,
|
|
88
88
|
preluActivationWeights: se
|
|
89
89
|
}, ce = { transposeA: t, transposeB: s, activation: o, leakyreluAlpha: f };
|
|
90
|
-
return r == null ? ue((
|
|
90
|
+
return r == null ? ue((v, S, O) => {
|
|
91
91
|
const _ = (
|
|
92
92
|
// tslint:disable-next-line: no-unnecessary-type-assertion
|
|
93
93
|
M.runKernel(he, oe, ce)
|
|
94
94
|
);
|
|
95
|
-
return O([
|
|
96
|
-
})(q, J) : ue((
|
|
95
|
+
return O([v, S, _]), { value: c(_, V), gradFunc: re };
|
|
96
|
+
})(q, J) : ue((v, S, O, _) => {
|
|
97
97
|
const B = (
|
|
98
98
|
// tslint:disable-next-line: no-unnecessary-type-assertion
|
|
99
99
|
M.runKernel(he, oe, ce)
|
|
100
100
|
);
|
|
101
|
-
return _([
|
|
101
|
+
return _([v, S, B, O]), { value: c(B, V), gradFunc: re };
|
|
102
102
|
})(q, J, I);
|
|
103
103
|
}
|
|
104
104
|
const pe = /* @__PURE__ */ g({ fusedMatMul_: nn });
|
|
@@ -117,9 +117,9 @@ class l extends Error {
|
|
|
117
117
|
super(n), Object.setPrototypeOf(this, l.prototype);
|
|
118
118
|
}
|
|
119
119
|
}
|
|
120
|
-
class
|
|
120
|
+
class P extends Error {
|
|
121
121
|
constructor(n) {
|
|
122
|
-
super(n), Object.setPrototypeOf(this,
|
|
122
|
+
super(n), Object.setPrototypeOf(this, P.prototype);
|
|
123
123
|
}
|
|
124
124
|
}
|
|
125
125
|
class z extends Error {
|
|
@@ -158,11 +158,11 @@ function Cn(e) {
|
|
|
158
158
|
const t = e.replace(/(.)([A-Z][a-z0-9]+)/g, "$1_$2").replace(/([a-z])([A-Z])/g, "$1_$2").toLowerCase();
|
|
159
159
|
return t[0] !== "_" ? t : "private" + t;
|
|
160
160
|
}
|
|
161
|
-
function
|
|
161
|
+
function vn(e) {
|
|
162
162
|
return e.length <= 1 || e.indexOf("_") === -1 ? e : e.replace(/[_]+(\w|$)/g, (n, t) => t.toUpperCase());
|
|
163
163
|
}
|
|
164
164
|
let p = {};
|
|
165
|
-
function
|
|
165
|
+
function Fn(e) {
|
|
166
166
|
if (e == null)
|
|
167
167
|
return null;
|
|
168
168
|
const n = {};
|
|
@@ -180,7 +180,7 @@ function W(e) {
|
|
|
180
180
|
}
|
|
181
181
|
}
|
|
182
182
|
}
|
|
183
|
-
function
|
|
183
|
+
function Pn(e, n = {}, t = {}, s = "object", r = !1) {
|
|
184
184
|
if (typeof e == "string") {
|
|
185
185
|
const o = e;
|
|
186
186
|
let a;
|
|
@@ -286,18 +286,18 @@ function Jn(e) {
|
|
|
286
286
|
function Kn(e) {
|
|
287
287
|
G(un, "PoolMode", e);
|
|
288
288
|
}
|
|
289
|
-
const
|
|
289
|
+
const F = [], me = "/";
|
|
290
290
|
function Rn(e, n) {
|
|
291
|
-
|
|
291
|
+
F.push(e);
|
|
292
292
|
try {
|
|
293
293
|
const t = n();
|
|
294
|
-
return
|
|
294
|
+
return F.pop(), t;
|
|
295
295
|
} catch (t) {
|
|
296
|
-
throw
|
|
296
|
+
throw F.pop(), t;
|
|
297
297
|
}
|
|
298
298
|
}
|
|
299
299
|
function ln() {
|
|
300
|
-
return
|
|
300
|
+
return F.length === 0 ? "" : F.join(me) + me;
|
|
301
301
|
}
|
|
302
302
|
function Zn(e) {
|
|
303
303
|
if (!Oe(e))
|
|
@@ -509,11 +509,11 @@ function it(e, n = 0, t = 1, s, r) {
|
|
|
509
509
|
}
|
|
510
510
|
function ut(e, n, t, s) {
|
|
511
511
|
if (e.rank < 2 || n.rank < 2)
|
|
512
|
-
throw new
|
|
512
|
+
throw new P(`dot requires both inputs to be rank >= 2 but got x shape = ${e.shape} and y shape = ${n.shape}`);
|
|
513
513
|
if (n.rank >= 3) {
|
|
514
514
|
const r = e.shape.slice(-1)[0], o = n.shape.slice(-2)[0];
|
|
515
515
|
if (r !== o)
|
|
516
|
-
throw new
|
|
516
|
+
throw new P(`If rank y >= 3, then the second last dim of y must equal the last dim of x but got x shape = ${e.shape} and y shape = ${n.shape}`);
|
|
517
517
|
}
|
|
518
518
|
if (e.rank === 2 && n.rank === 2)
|
|
519
519
|
return pe({
|
|
@@ -574,7 +574,7 @@ function ft(e, n, t) {
|
|
|
574
574
|
}
|
|
575
575
|
function ht(e, n = 1) {
|
|
576
576
|
if (n !== 1)
|
|
577
|
-
throw new
|
|
577
|
+
throw new P(`Support for alpha values other than 1 (${n}) is not implemented yet.`);
|
|
578
578
|
return Be(e);
|
|
579
579
|
}
|
|
580
580
|
function pt(e) {
|
|
@@ -607,7 +607,7 @@ export {
|
|
|
607
607
|
Un as K,
|
|
608
608
|
ft as L,
|
|
609
609
|
rt as M,
|
|
610
|
-
|
|
610
|
+
P as N,
|
|
611
611
|
qn as O,
|
|
612
612
|
sn as P,
|
|
613
613
|
Gn as Q,
|
|
@@ -628,7 +628,7 @@ export {
|
|
|
628
628
|
ye as b,
|
|
629
629
|
G as c,
|
|
630
630
|
ut as d,
|
|
631
|
-
|
|
631
|
+
Pn as e,
|
|
632
632
|
Wn as f,
|
|
633
633
|
Zn as g,
|
|
634
634
|
Nn as h,
|
|
@@ -642,9 +642,9 @@ export {
|
|
|
642
642
|
de as p,
|
|
643
643
|
et as q,
|
|
644
644
|
it as r,
|
|
645
|
-
|
|
645
|
+
Fn as s,
|
|
646
646
|
Cn as t,
|
|
647
|
-
|
|
647
|
+
vn as u,
|
|
648
648
|
Bn as v,
|
|
649
649
|
jn as w,
|
|
650
650
|
In as x,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { A as a, B as e, E as i } from "./index-
|
|
2
|
-
import { T as m } from "./tensor_util-
|
|
3
|
-
import { a as c } from "./tensor-
|
|
1
|
+
import { A as a, B as e, E as i } from "./index-D6Q1lPZO.js";
|
|
2
|
+
import { T as m } from "./tensor_util-DfwaWayG.js";
|
|
3
|
+
import { a as c } from "./tensor-CzmOBsdf.js";
|
|
4
4
|
function l(n, t) {
|
|
5
5
|
const r = e(n, "x", "tile", "string_or_numeric");
|
|
6
6
|
c(r.rank === t.length, () => `Error in transpose: rank of input ${r.rank} must match length of reps ${t}.`);
|
package/dist/training/Adam.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { adamAdjust as b } from "../ops/adamAdjust.js";
|
|
2
2
|
import { adamMoments as d } from "../ops/adamMoments.js";
|
|
3
|
-
import { O as g, e as h, t as o, d as B } from "../index-
|
|
4
|
-
import { z as M } from "../zeros-
|
|
3
|
+
import { O as g, e as h, t as o, d as B } from "../index-D6Q1lPZO.js";
|
|
4
|
+
import { z as M } from "../zeros-DBFVbpv5.js";
|
|
5
5
|
class R extends g {
|
|
6
6
|
constructor(t, a, e, s, i = null) {
|
|
7
7
|
super(), this.learningRate = t, this.beta1 = a, this.beta2 = e, this.lossScaling = s, this.epsilon = i, this.accBeta1 = a, this.accBeta2 = e, i === null && (this.epsilon = h().backend.epsilon());
|
package/dist/training/AdamExt.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { t as g } from "../index-
|
|
2
|
-
import { d as u, i as d } from "../dataset-
|
|
1
|
+
import { t as g } from "../index-D6Q1lPZO.js";
|
|
2
|
+
import { d as u, i as d } from "../dataset-D2P7rHAw.js";
|
|
3
3
|
import "../index-Cp39cXWe.js";
|
|
4
4
|
function z(r) {
|
|
5
5
|
return u(async () => {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import b from "./Trainer.js";
|
|
2
2
|
import L from "./Evaluator.js";
|
|
3
|
-
import { d as w } from "../index-
|
|
3
|
+
import { d as w } from "../index-D6Q1lPZO.js";
|
|
4
4
|
import y from "../utilities/profile.js";
|
|
5
5
|
import { createTensorStatistics as D } from "../checks/weights.js";
|
|
6
6
|
const T = {
|
package/dist/training/Trainer.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { DatasetBuilder as f, flattenTokens as h, PAGE_FACTOR as y } from "./DatasetBuilder.js";
|
|
2
2
|
import z from "./AdamExt.js";
|
|
3
|
-
import { t as S, v as k, k as x, d as p, b as m } from "../index-
|
|
4
|
-
import { z as g } from "../zeros-
|
|
3
|
+
import { t as S, v as k, k as x, d as p, b as m } from "../index-D6Q1lPZO.js";
|
|
4
|
+
import { z as g } from "../zeros-DBFVbpv5.js";
|
|
5
5
|
class M {
|
|
6
6
|
constructor(t, e, s = 1e-3) {
|
|
7
7
|
this.tokenizer = e, this.model = t, this.lossScaling = t.lossScaling, this.learningRate = s, this.resetOptimizer(), this.datasetBuilder = new f(e, t.config.blockSize);
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { gatherSub as x } from "../ops/gatherSub.js";
|
|
2
2
|
import { scatterSub as L } from "../ops/scatterSub.js";
|
|
3
|
-
import { w as C, t as u, z as E, c as G } from "../index-
|
|
4
|
-
import { s as y } from "../softmax-
|
|
5
|
-
import { m as z, l as v } from "../log_sum_exp-
|
|
3
|
+
import { w as C, t as u, z as E, c as G } from "../index-D6Q1lPZO.js";
|
|
4
|
+
import { s as y } from "../softmax-C9JQEtnO.js";
|
|
5
|
+
import { m as z, l as v } from "../log_sum_exp-D3ftBNY5.js";
|
|
6
6
|
function k(t, s) {
|
|
7
7
|
return u(() => {
|
|
8
8
|
const n = t.shape[t.shape.length - 1], c = t.shape.slice(0, -1).reduce((o, e) => o * e, 1), h = t.shape.length > 2 ? t.reshape([c, n]) : t, p = s.shape.length > 1 ? s.reshape([c]).cast("int32") : s.cast("int32"), r = z(h, -1, !0), a = G(h, r), d = v(a, -1);
|