@genai-fi/nanogpt 0.9.1 → 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +352 -14
- package/dist/Generator.js +69 -78
- package/dist/{RealDiv-D4EzDsC0.js → RealDiv-DgA3z9oO.js} +32 -206
- package/dist/Reshape-CF6odzV4.js +16 -0
- package/dist/Reshape-_kILl6tK.js +81 -0
- package/dist/TeachableLLM.js +28 -22
- package/dist/Trainer.d.ts +2 -0
- package/dist/Trainer.js +3 -2
- package/dist/{axis_util-TbGYJ208.js → axis_util-BvHEw88j.js} +7 -23
- package/dist/backend.d.ts +2 -1
- package/dist/backend.js +10 -4
- package/dist/backend_util-D-rUb2ty.js +474 -0
- package/dist/backend_webgpu-B0u2ndUn.js +547 -0
- package/dist/binary_op_util-pKXltfxI.js +192 -0
- package/dist/broadcast_to-CwF7XIeu.js +30 -0
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/check.d.ts +1 -1
- package/dist/checks/check.js +8 -8
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/index.d.ts +2 -0
- package/dist/checks/index.js +7 -5
- package/dist/checks/matMulGelu.js +6 -6
- package/dist/checks/normRMS.js +7 -7
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.d.ts +1 -0
- package/dist/checks/packUnpack.js +18 -0
- package/dist/checks/qkv.js +12 -27
- package/dist/checks/rope.js +2 -2
- package/dist/checks/weights.js +18 -16
- package/dist/complex-CSlYz-2T.js +13 -0
- package/dist/complex_util-Yc1A_gV1.js +55 -0
- package/dist/concat-BHlIJeyT.js +19 -0
- package/dist/concat_util-DcJk7YHS.js +22 -0
- package/dist/data/docx.js +1 -1
- package/dist/data/parquet.js +2 -2
- package/dist/data/pdf.js +1 -1
- package/dist/data/textLoader.js +1 -1
- package/dist/{dataset-DlZtKmBq.js → dataset-0xP8GjwI.js} +136 -236
- package/dist/dropout-C1pM3f11.js +99 -0
- package/dist/expand_dims-BPG4fwBP.js +13 -0
- package/dist/exports_initializers-xuidcwI4.js +7 -0
- package/dist/gather-DykLGqmW.js +10 -0
- package/dist/{gelu-Bp_-935b.js → gelu-CNLFZWea.js} +11 -10
- package/dist/{gpgpu_math-CDaYiyE_.js → gpgpu_math-DDVJCn6-.js} +90 -265
- package/dist/{index-C4L8Cm77.js → index-CieiGp4Y.js} +14 -14
- package/dist/index-CjOj7j-u.js +7308 -0
- package/dist/{index-Tf7vU29b.js → index-Cp39cXWe.js} +3 -10
- package/dist/{index-Dwqa6Zy2.js → index-DvYrXKkX.js} +2 -2
- package/dist/index-ZyQhjEPo.js +2157 -0
- package/dist/{jszip.min-CjP2V1VV.js → jszip.min-Bz5-11Bk.js} +56 -57
- package/dist/kernel_funcs_utils-Dg_-E44D.js +308 -0
- package/dist/layers/BaseLayer.d.ts +1 -0
- package/dist/layers/BaseLayer.js +7 -6
- package/dist/layers/CausalSelfAttention.d.ts +0 -1
- package/dist/layers/CausalSelfAttention.js +56 -55
- package/dist/layers/MLP.js +15 -16
- package/dist/layers/PositionEmbedding.js +5 -14
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.d.ts +2 -0
- package/dist/layers/RoPECache.js +22 -17
- package/dist/layers/TiedEmbedding.js +22 -17
- package/dist/layers/TransformerBlock.js +21 -20
- package/dist/loader/load.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +39 -33
- package/dist/loader/save.js +1 -1
- package/dist/log_sum_exp-DWI-76TI.js +41 -0
- package/dist/main.d.ts +8 -0
- package/dist/main.js +63 -52
- package/dist/matMul16--R5hOwDG.js +77 -0
- package/dist/mat_mul-DeAh4uTH.js +12 -0
- package/dist/mod-Gt1rMB4n.js +12 -0
- package/dist/models/NanoGPTV1.js +40 -31
- package/dist/models/model.d.ts +2 -0
- package/dist/models/model.js +37 -29
- package/dist/{mulmat_packed_gpu-BT60jmzP.js → mulmat_packed_gpu-BMFhLwta.js} +1 -17
- package/dist/{non_max_suppression_impl-CsEgBuMA.js → non_max_suppression_impl-B2W7YjZB.js} +0 -32
- package/dist/ones-CAMiP4I2.js +15 -0
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.d.ts +1 -1
- package/dist/ops/adamMoments.js +4 -4
- package/dist/ops/add16.d.ts +2 -0
- package/dist/ops/add16.js +9 -0
- package/dist/ops/appendCache.js +16 -9
- package/dist/ops/attentionMask.js +4 -4
- package/dist/ops/concat16.d.ts +2 -0
- package/dist/ops/concat16.js +9 -0
- package/dist/ops/cpu/adamAdjust.js +14 -13
- package/dist/ops/cpu/adamMoments.js +10 -9
- package/dist/ops/cpu/appendCache.js +9 -8
- package/dist/ops/cpu/attentionMask.js +15 -14
- package/dist/ops/cpu/fusedSoftmax.js +13 -12
- package/dist/ops/cpu/gatherSub.js +9 -24
- package/dist/ops/cpu/gelu.js +13 -12
- package/dist/ops/cpu/matMul16.d.ts +1 -0
- package/dist/ops/cpu/matMul16.js +16 -0
- package/dist/ops/cpu/matMulGelu.js +18 -16
- package/dist/ops/cpu/matMulMul.js +8 -7
- package/dist/ops/cpu/mulDropout.js +4 -3
- package/dist/ops/cpu/normRMS.js +11 -10
- package/dist/ops/cpu/qkv.js +17 -13
- package/dist/ops/cpu/rope.js +23 -22
- package/dist/ops/cpu/scatterSub.js +16 -30
- package/dist/ops/dot16.d.ts +2 -0
- package/dist/ops/dot16.js +42 -0
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.d.ts +1 -0
- package/dist/ops/grads/add16.js +27 -0
- package/dist/ops/grads/attentionMask.js +12 -19
- package/dist/ops/grads/gelu.js +4 -3
- package/dist/ops/grads/matMul16.d.ts +2 -0
- package/dist/ops/grads/matMul16.js +9 -0
- package/dist/ops/grads/matMulGelu.js +8 -7
- package/dist/ops/grads/normRMS.js +8 -7
- package/dist/ops/grads/{fusedSoftmax.d.ts → pack16.d.ts} +1 -1
- package/dist/ops/grads/pack16.js +7 -0
- package/dist/ops/grads/qkv.d.ts +3 -1
- package/dist/ops/grads/qkv.js +28 -22
- package/dist/ops/grads/rope.d.ts +2 -1
- package/dist/ops/grads/rope.js +6 -13
- package/dist/ops/grads/softmax16.d.ts +2 -0
- package/dist/ops/grads/softmax16.js +26 -0
- package/dist/ops/grads/unpack16.d.ts +2 -0
- package/dist/ops/grads/unpack16.js +6 -0
- package/dist/ops/grads/utils.d.ts +3 -0
- package/dist/ops/grads/utils.js +10 -0
- package/dist/ops/matMul16.d.ts +15 -0
- package/dist/ops/matMul16.js +13 -0
- package/dist/ops/matMulGelu.js +1 -1
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.d.ts +2 -0
- package/dist/ops/mul16.js +8 -0
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.d.ts +2 -0
- package/dist/ops/pack16.js +6 -0
- package/dist/ops/qkv.d.ts +1 -1
- package/dist/ops/qkv.js +8 -4
- package/dist/ops/reshape16.d.ts +2 -0
- package/dist/ops/reshape16.js +43 -0
- package/dist/ops/rope.d.ts +1 -1
- package/dist/ops/rope.js +8 -10
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.d.ts +2 -0
- package/dist/ops/slice16.js +9 -0
- package/dist/ops/softmax16.d.ts +2 -0
- package/dist/ops/softmax16.js +12 -0
- package/dist/ops/sub16.d.ts +2 -0
- package/dist/ops/sub16.js +8 -0
- package/dist/ops/sum16.d.ts +2 -0
- package/dist/ops/sum16.js +13 -0
- package/dist/ops/transpose16.d.ts +3 -0
- package/dist/ops/transpose16.js +41 -0
- package/dist/ops/unpack16.d.ts +2 -0
- package/dist/ops/unpack16.js +6 -0
- package/dist/ops/webgl/adamAdjust.js +3 -2
- package/dist/ops/webgl/adamMoments.js +2 -1
- package/dist/ops/webgl/appendCache.js +2 -1
- package/dist/ops/webgl/attentionMask.js +5 -4
- package/dist/ops/webgl/fusedSoftmax.js +6 -4
- package/dist/ops/webgl/gatherSub.js +7 -6
- package/dist/ops/webgl/gelu.js +3 -2
- package/dist/ops/webgl/log.js +12 -27
- package/dist/ops/webgl/matMul16.d.ts +1 -0
- package/dist/ops/webgl/matMul16.js +37 -0
- package/dist/ops/webgl/matMulGelu.js +17 -15
- package/dist/ops/webgl/matMulMul.js +13 -12
- package/dist/ops/webgl/mulDropout.js +9 -8
- package/dist/ops/webgl/normRMS.js +8 -7
- package/dist/ops/webgl/qkv.js +6 -5
- package/dist/ops/webgl/rope.js +11 -10
- package/dist/ops/webgl/scatterSub.js +6 -5
- package/dist/ops/webgpu/adamAdjust.js +12 -10
- package/dist/ops/webgpu/adamMoments.js +27 -22
- package/dist/ops/webgpu/add16.d.ts +1 -0
- package/dist/ops/webgpu/add16.js +14 -0
- package/dist/ops/webgpu/appendCache.js +64 -17
- package/dist/ops/webgpu/attentionMask.js +19 -62
- package/dist/ops/webgpu/attentionMask32_program.d.ts +19 -0
- package/dist/ops/webgpu/attentionMask32_program.js +54 -0
- package/dist/ops/webgpu/concat16.d.ts +19 -0
- package/dist/ops/webgpu/concat16.js +128 -0
- package/dist/ops/webgpu/gatherSub.js +9 -7
- package/dist/ops/webgpu/gelu.js +78 -31
- package/dist/ops/webgpu/index.js +12 -0
- package/dist/ops/webgpu/matMul16.d.ts +1 -0
- package/dist/ops/webgpu/matMul16.js +58 -0
- package/dist/ops/webgpu/matMul16_program.d.ts +42 -0
- package/dist/ops/webgpu/matMul16_program.js +336 -0
- package/dist/ops/webgpu/mul16.d.ts +1 -0
- package/dist/ops/webgpu/mul16.js +14 -0
- package/dist/ops/webgpu/normRMS.js +21 -40
- package/dist/ops/webgpu/normRMS16_program.d.ts +9 -0
- package/dist/ops/webgpu/normRMS16_program.js +24 -0
- package/dist/ops/webgpu/normRMS32_program.d.ts +9 -0
- package/dist/ops/webgpu/normRMS32_program.js +24 -0
- package/dist/ops/webgpu/normRMSGrad.js +113 -64
- package/dist/ops/webgpu/pack16.d.ts +1 -0
- package/dist/ops/webgpu/pack16.js +19 -0
- package/dist/ops/webgpu/pack16_program.d.ts +19 -0
- package/dist/ops/webgpu/pack16_program.js +92 -0
- package/dist/ops/webgpu/qkv.js +20 -55
- package/dist/ops/webgpu/rope.js +77 -22
- package/dist/ops/webgpu/scatterSub.js +9 -7
- package/dist/ops/webgpu/slice16.d.ts +7 -0
- package/dist/ops/webgpu/slice16.js +71 -0
- package/dist/{variable-Bm2OFwGI.js → ops/webgpu/softmax16.d.ts} +2 -8
- package/dist/ops/webgpu/softmax16.js +23 -0
- package/dist/ops/webgpu/softmax16_program.d.ts +13 -0
- package/dist/ops/webgpu/softmax16_program.js +73 -0
- package/dist/ops/webgpu/softmax16_subgroup_program.d.ts +17 -0
- package/dist/ops/webgpu/softmax16_subgroup_program.js +75 -0
- package/dist/ops/webgpu/softmax16grad.d.ts +1 -0
- package/dist/ops/webgpu/softmax16grad.js +38 -0
- package/dist/ops/webgpu/sub16.d.ts +1 -0
- package/dist/ops/webgpu/sub16.js +14 -0
- package/dist/ops/webgpu/sum16.d.ts +1 -0
- package/dist/ops/webgpu/sum16.js +40 -0
- package/dist/ops/webgpu/transpose16.d.ts +1 -0
- package/dist/ops/webgpu/transpose16.js +35 -0
- package/dist/ops/webgpu/transpose16_program.d.ts +16 -0
- package/dist/ops/webgpu/transpose16_program.js +50 -0
- package/dist/ops/webgpu/transpose16_shared_program.d.ts +15 -0
- package/dist/ops/webgpu/transpose16_shared_program.js +71 -0
- package/dist/ops/webgpu/unpack16.d.ts +1 -0
- package/dist/ops/webgpu/unpack16.js +49 -0
- package/dist/ops/webgpu/utils/binary_op.d.ts +19 -0
- package/dist/ops/webgpu/utils/binary_op.js +79 -0
- package/dist/ops/webgpu/utils/deviceInfo.d.ts +7 -0
- package/dist/ops/webgpu/utils/deviceInfo.js +11 -0
- package/dist/ops/webgpu/utils/reductions.d.ts +32 -4
- package/dist/ops/webgpu/utils/reductions.js +236 -45
- package/dist/ops-CNI3TwqM.js +645 -0
- package/dist/pack16-CFUqumar.js +41 -0
- package/dist/{papaparse.min-C8l2Kvo1.js → papaparse.min-C0cScC2i.js} +2 -8
- package/dist/{parquet-C0Tlmv9c.js → parquet-BE8MU_ge.js} +201 -278
- package/dist/patches/PackedTensor.d.ts +12 -0
- package/dist/patches/PackedTensor.js +11 -0
- package/dist/patches/engine.d.ts +261 -0
- package/dist/patches/engine.js +10 -0
- package/dist/patches/tape.d.ts +12 -0
- package/dist/patches/tape.js +5 -0
- package/dist/patches/webgpu_backend.d.ts +18 -0
- package/dist/patches/webgpu_backend.js +57 -0
- package/dist/{tensor-CZr4dh61.js → patches/webgpu_base.d.ts} +5 -8
- package/dist/patches/webgpu_base.js +34 -0
- package/dist/patches/webgpu_program.d.ts +36 -0
- package/dist/patches/webgpu_program.js +401 -0
- package/dist/{pdf-kJD-f258.js → pdf-NIhmP3sq.js} +424 -428
- package/dist/random_width-DY6Kk2Dl.js +10051 -0
- package/dist/range-BMS52eQi.js +11 -0
- package/dist/reciprocal-CTmshQ9J.js +10 -0
- package/dist/{register_all_kernels-DIGpEwcf.js → register_all_kernels-Bwu1PTuU.js} +719 -9766
- package/dist/relu-yZ2-7WxU.js +10 -0
- package/dist/reshape-DevtBWtf.js +10 -0
- package/dist/rope-B5UUMsPi.js +32 -0
- package/dist/{scatter_nd_util-BQdz--Gn.js → scatter_nd_util-5EL-8VAQ.js} +1 -1
- package/dist/selu_util-D1w6yyTO.js +303 -0
- package/dist/{shared-DuP7ue-R.js → shared-BRksrJb3.js} +1 -17
- package/dist/shared-BuAXb4CI.js +2145 -0
- package/dist/sin-BGfy2HZo.js +16 -0
- package/dist/slice-D_gkkqZK.js +13 -0
- package/dist/slice_util-DtEldBfK.js +261 -0
- package/dist/softmax-ZHVebtR1.js +13 -0
- package/dist/split-DrfihRpZ.js +10 -0
- package/dist/squeeze-DZEpeblb.js +11 -0
- package/dist/stack-yOIAalTq.js +13 -0
- package/dist/sum-_fzj5ZTB.js +12 -0
- package/dist/tensor-DdQUJZlz.js +909 -0
- package/dist/tensor-f35l8Odg.js +8 -0
- package/dist/tensor1d-CeZuc-Rv.js +12 -0
- package/dist/tensor2d-G4Ys2GxX.js +15 -0
- package/dist/tensor4d-B8roDgtc.js +15 -0
- package/dist/tensor_util-DV-FP5Q3.js +523 -0
- package/dist/tfjs_backend-kNyO5L2d.js +653 -0
- package/dist/tile-BzyEiF-F.js +13 -0
- package/dist/tokeniser/CharTokeniser.js +1 -1
- package/dist/tokeniser/bpe.js +1 -1
- package/dist/training/Adam.d.ts +2 -1
- package/dist/training/Adam.js +12 -28
- package/dist/training/AdamExt.d.ts +1 -0
- package/dist/training/AdamExt.js +2 -2
- package/dist/training/DatasetBuilder.js +3 -20
- package/dist/training/FullTrainer.js +55 -48
- package/dist/training/Trainer.d.ts +11 -6
- package/dist/training/Trainer.js +51 -39
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/transpose-DKELTqhe.js +38 -0
- package/dist/utilities/arrayClose.js +7 -7
- package/dist/utilities/dummy.js +35 -27
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.d.ts +7 -0
- package/dist/utilities/packed.js +716 -0
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.d.ts +5 -0
- package/dist/utilities/sentences.js +41 -0
- package/dist/utilities/weights.js +2 -2
- package/dist/variable-Bhn5bHYv.js +7 -0
- package/dist/{webgpu_program-DkQJOJSd.js → webgpu_program-Cigz-7RF.js} +15 -44
- package/dist/webgpu_util-BBCnKm2X.js +65 -0
- package/dist/zeros-2gldETuK.js +14 -0
- package/package.json +4 -3
- package/dist/Reshape-Bowtk9BP.js +0 -127
- package/dist/Reshape-DUqYftGC.js +0 -30
- package/dist/backend_util-CJIiDoV1.js +0 -749
- package/dist/broadcast_to-DzlNweb8.js +0 -44
- package/dist/concat-B912vBbo.js +0 -33
- package/dist/dropout-C-csYCLj.js +0 -193
- package/dist/exports_initializers-B8iZMgQ0.js +0 -16
- package/dist/gather-Dnpgw-YQ.js +0 -25
- package/dist/index-BzFyqcy-.js +0 -4457
- package/dist/index-C1rx_Ajs.js +0 -12076
- package/dist/kernel_funcs_utils-DKLK0Mg3.js +0 -466
- package/dist/log_sum_exp-DO6z8tSE.js +0 -103
- package/dist/mat_mul-DzjTFx-u.js +0 -27
- package/dist/mod-Dobti4j4.js +0 -27
- package/dist/ones-tIJeHlq-.js +0 -29
- package/dist/ops/fusedSoftmax.d.ts +0 -2
- package/dist/ops/fusedSoftmax.js +0 -10
- package/dist/ops/grads/fusedSoftmax.js +0 -22
- package/dist/ops-LuCMAnmM.js +0 -1525
- package/dist/random_width-CXVRloNK.js +0 -13670
- package/dist/range-CWcz7xFA.js +0 -26
- package/dist/reciprocal-C4rNcM-S.js +0 -25
- package/dist/relu-BjCh_SYb.js +0 -25
- package/dist/reshape-CnIwVG1c.js +0 -25
- package/dist/selu_util-OtRzVwW5.js +0 -719
- package/dist/shared-DmRsFyaJ.js +0 -3134
- package/dist/sin-gpDNRxE0.js +0 -47
- package/dist/slice-d0Vo9XTN.js +0 -28
- package/dist/softmax-D7Jj3p_P.js +0 -28
- package/dist/split-DK2k5eHf.js +0 -25
- package/dist/stack-DFatutCx.js +0 -27
- package/dist/sum-CJ0ULhmt.js +0 -27
- package/dist/tensor1d-vML0r3q6.js +0 -27
- package/dist/tensor2d-D76QGjF3.js +0 -30
- package/dist/tensor4d-Df1WlVDY.js +0 -30
- package/dist/webgpu_util-pLEV9tks.js +0 -80
- package/dist/zeros-Bj5rMYA7.js +0 -52
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { A as e, B as n, E as s } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import { e as t } from "./tensor_util-DV-FP5Q3.js";
|
|
3
|
+
function u(r) {
|
|
4
|
+
const o = { x: n(r, "x", "relu") };
|
|
5
|
+
return s.runKernel(t, o);
|
|
6
|
+
}
|
|
7
|
+
const p = /* @__PURE__ */ e({ relu_: u });
|
|
8
|
+
export {
|
|
9
|
+
p as r
|
|
10
|
+
};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { A as n, B as t, E as a } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import { b as p } from "./tensor_util-DV-FP5Q3.js";
|
|
3
|
+
function c(r, s) {
|
|
4
|
+
const e = { x: t(r, "x", "reshape", "string_or_numeric") }, o = { shape: s };
|
|
5
|
+
return a.runKernel(p, e, o);
|
|
6
|
+
}
|
|
7
|
+
const x = /* @__PURE__ */ n({ reshape_: c });
|
|
8
|
+
export {
|
|
9
|
+
x as r
|
|
10
|
+
};
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { e as i } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import "./random_width-DY6Kk2Dl.js";
|
|
3
|
+
import "./register_all_kernels-Bwu1PTuU.js";
|
|
4
|
+
import "./index-Cp39cXWe.js";
|
|
5
|
+
import "./dataset-0xP8GjwI.js";
|
|
6
|
+
import "./ops/cpu/rope.js";
|
|
7
|
+
import "./ops/webgl/rope.js";
|
|
8
|
+
import { isPackedTensor as a, packTensor as s } from "./utilities/packed.js";
|
|
9
|
+
import { a as m } from "./tensor_util-DV-FP5Q3.js";
|
|
10
|
+
const c = {
|
|
11
|
+
kernelName: "Rope",
|
|
12
|
+
inputsToSave: [],
|
|
13
|
+
outputsToSave: [],
|
|
14
|
+
gradFunc: (e, t, r) => {
|
|
15
|
+
const { ropeCache: n } = r, o = a(e), p = u(e, n, 0, !0);
|
|
16
|
+
return { x: () => o ? s(p) : p };
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
m(c);
|
|
20
|
+
function u(e, t, r, n = !1) {
|
|
21
|
+
t.ensureRopeCache(e.shape[1] + r);
|
|
22
|
+
const o = i().runKernel("Rope", { x: e }, {
|
|
23
|
+
pastLen: r,
|
|
24
|
+
negSin: n,
|
|
25
|
+
ropeCache: t
|
|
26
|
+
});
|
|
27
|
+
return a(e) ? s(o) : o;
|
|
28
|
+
}
|
|
29
|
+
export {
|
|
30
|
+
c as a,
|
|
31
|
+
u as r
|
|
32
|
+
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { s as p, i as w } from "./tensor-DdQUJZlz.js";
|
|
2
2
|
function k(o, t, r) {
|
|
3
3
|
const n = t.rank > 1 ? t.shape[t.rank - 1] : 1, e = t.rank > 1 ? t.rank - 1 : 1, h = `Must have updates.shape = indices.shape[:batchDim] + shape[sliceDim:], got updates.shape: ${r.shape}, indices.shape: ${t.shape}, shape: ${o}, sliceDim: ${n}, and batchDim: ${e}.`;
|
|
4
4
|
if (r.rank < e)
|
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import { a as K, H as P } from "./tensor-DdQUJZlz.js";
|
|
2
|
+
import { A as I, B as b, E as U, m as q, q as G } from "./index-ZyQhjEPo.js";
|
|
3
|
+
import { U as X, V as Y, W as Z, X as J, Y as Q, Z as S } from "./tensor_util-DV-FP5Q3.js";
|
|
4
|
+
import { r as d } from "./relu-yZ2-7WxU.js";
|
|
5
|
+
import { r as z } from "./reshape-DevtBWtf.js";
|
|
6
|
+
import { s as tt } from "./sum-_fzj5ZTB.js";
|
|
7
|
+
function Ct(t, n, e, o, s = "NHWC", i) {
|
|
8
|
+
const l = t[3], r = [...n, l], u = lt(s);
|
|
9
|
+
return B(t, r, e, i, o, null, null, u);
|
|
10
|
+
}
|
|
11
|
+
function Mt(t, n, e, o, s, i, l = "channelsLast") {
|
|
12
|
+
const [r, u] = R(n);
|
|
13
|
+
let c;
|
|
14
|
+
if (l === "channelsLast")
|
|
15
|
+
c = [r, u, t[3], t[3]];
|
|
16
|
+
else if (l === "channelsFirst")
|
|
17
|
+
c = [r, u, t[1], t[1]];
|
|
18
|
+
else
|
|
19
|
+
throw new Error(`Unknown dataFormat ${l}`);
|
|
20
|
+
return B(t, c, e, o, s, i, !1, l);
|
|
21
|
+
}
|
|
22
|
+
function kt(t, n, e, o, s, i, l = "NDHWC") {
|
|
23
|
+
const [r, u, c] = v(n);
|
|
24
|
+
let h, f;
|
|
25
|
+
if (l === "NDHWC")
|
|
26
|
+
f = "channelsLast", h = [r, u, c, t[4], t[4]];
|
|
27
|
+
else if (l === "NCDHW")
|
|
28
|
+
f = "channelsFirst", h = [r, u, c, t[1], t[1]];
|
|
29
|
+
else
|
|
30
|
+
throw new Error(`Unknown dataFormat ${l}`);
|
|
31
|
+
return nt(t, h, e, o, s, !1, f, i);
|
|
32
|
+
}
|
|
33
|
+
function B(t, n, e, o, s, i, l = !1, r = "channelsLast") {
|
|
34
|
+
let [u, c, h, f] = [-1, -1, -1, -1];
|
|
35
|
+
if (r === "channelsLast")
|
|
36
|
+
[u, c, h, f] = t;
|
|
37
|
+
else if (r === "channelsFirst")
|
|
38
|
+
[u, f, c, h] = t;
|
|
39
|
+
else
|
|
40
|
+
throw new Error(`Unknown dataFormat ${r}`);
|
|
41
|
+
const [a, p, , g] = n, [$, w] = R(e), [m, y] = R(o), D = k(a, m), A = k(p, y), { padInfo: x, outHeight: E, outWidth: L } = st(s, c, h, $, w, D, A, i, r), C = l ? g * f : g;
|
|
42
|
+
let M;
|
|
43
|
+
return r === "channelsFirst" ? M = [u, C, E, L] : r === "channelsLast" && (M = [u, E, L, C]), {
|
|
44
|
+
batchSize: u,
|
|
45
|
+
dataFormat: r,
|
|
46
|
+
inHeight: c,
|
|
47
|
+
inWidth: h,
|
|
48
|
+
inChannels: f,
|
|
49
|
+
outHeight: E,
|
|
50
|
+
outWidth: L,
|
|
51
|
+
outChannels: C,
|
|
52
|
+
padInfo: x,
|
|
53
|
+
strideHeight: $,
|
|
54
|
+
strideWidth: w,
|
|
55
|
+
filterHeight: a,
|
|
56
|
+
filterWidth: p,
|
|
57
|
+
effectiveFilterHeight: D,
|
|
58
|
+
effectiveFilterWidth: A,
|
|
59
|
+
dilationHeight: m,
|
|
60
|
+
dilationWidth: y,
|
|
61
|
+
inShape: t,
|
|
62
|
+
outShape: M,
|
|
63
|
+
filterShape: n
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
function nt(t, n, e, o, s, i = !1, l = "channelsLast", r) {
|
|
67
|
+
let [u, c, h, f, a] = [-1, -1, -1, -1, -1];
|
|
68
|
+
if (l === "channelsLast")
|
|
69
|
+
[u, c, h, f, a] = t;
|
|
70
|
+
else if (l === "channelsFirst")
|
|
71
|
+
[u, a, c, h, f] = t;
|
|
72
|
+
else
|
|
73
|
+
throw new Error(`Unknown dataFormat ${l}`);
|
|
74
|
+
const [p, g, $, , w] = n, [m, y, D] = v(e), [A, x, E] = v(o), L = k(p, A), C = k(g, x), M = k($, E), { padInfo: j, outDepth: W, outHeight: H, outWidth: N } = rt(s, c, h, f, m, y, D, L, C, M, r), O = i ? w * a : w;
|
|
75
|
+
let _;
|
|
76
|
+
return l === "channelsFirst" ? _ = [u, O, W, H, N] : l === "channelsLast" && (_ = [u, W, H, N, O]), {
|
|
77
|
+
batchSize: u,
|
|
78
|
+
dataFormat: l,
|
|
79
|
+
inDepth: c,
|
|
80
|
+
inHeight: h,
|
|
81
|
+
inWidth: f,
|
|
82
|
+
inChannels: a,
|
|
83
|
+
outDepth: W,
|
|
84
|
+
outHeight: H,
|
|
85
|
+
outWidth: N,
|
|
86
|
+
outChannels: O,
|
|
87
|
+
padInfo: j,
|
|
88
|
+
strideDepth: m,
|
|
89
|
+
strideHeight: y,
|
|
90
|
+
strideWidth: D,
|
|
91
|
+
filterDepth: p,
|
|
92
|
+
filterHeight: g,
|
|
93
|
+
filterWidth: $,
|
|
94
|
+
effectiveFilterDepth: L,
|
|
95
|
+
effectiveFilterHeight: C,
|
|
96
|
+
effectiveFilterWidth: M,
|
|
97
|
+
dilationDepth: A,
|
|
98
|
+
dilationHeight: x,
|
|
99
|
+
dilationWidth: E,
|
|
100
|
+
inShape: t,
|
|
101
|
+
outShape: _,
|
|
102
|
+
filterShape: n
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
function et(t, n, e, o, s) {
|
|
106
|
+
o == null && (o = F(t, n, e));
|
|
107
|
+
const i = t[0], l = t[1], r = T((i - n + 2 * o) / e + 1, s), u = T((l - n + 2 * o) / e + 1, s);
|
|
108
|
+
return [r, u];
|
|
109
|
+
}
|
|
110
|
+
function ot(t, n, e, o, s, i) {
|
|
111
|
+
s == null && (s = F(t, n[0], o[0]));
|
|
112
|
+
const l = [0, 0, 0, e];
|
|
113
|
+
for (let r = 0; r < 3; r++)
|
|
114
|
+
t[r] + 2 * s >= n[r] && (l[r] = T((t[r] - n[r] + 2 * s) / o[r] + 1, i));
|
|
115
|
+
return l;
|
|
116
|
+
}
|
|
117
|
+
function F(t, n, e, o = 1) {
|
|
118
|
+
const s = k(n, o);
|
|
119
|
+
return Math.floor((t[0] * (e - 1) - e + s) / 2);
|
|
120
|
+
}
|
|
121
|
+
function R(t) {
|
|
122
|
+
return typeof t == "number" ? [t, t, t] : t.length === 2 ? [t[0], t[1], 1] : t;
|
|
123
|
+
}
|
|
124
|
+
function v(t) {
|
|
125
|
+
return typeof t == "number" ? [t, t, t] : t;
|
|
126
|
+
}
|
|
127
|
+
function k(t, n) {
|
|
128
|
+
return n <= 1 ? t : t + (t - 1) * (n - 1);
|
|
129
|
+
}
|
|
130
|
+
function st(t, n, e, o, s, i, l, r, u) {
|
|
131
|
+
let c, h, f;
|
|
132
|
+
if (typeof t == "number") {
|
|
133
|
+
c = { top: t, bottom: t, left: t, right: t, type: t === 0 ? "VALID" : "NUMBER" };
|
|
134
|
+
const p = et([n, e], i, o, t, r);
|
|
135
|
+
h = p[0], f = p[1];
|
|
136
|
+
} else if (t === "same") {
|
|
137
|
+
h = Math.ceil(n / o), f = Math.ceil(e / s);
|
|
138
|
+
const a = Math.max(0, (h - 1) * o + i - n), p = Math.max(0, (f - 1) * s + l - e), g = Math.floor(a / 2), $ = a - g, w = Math.floor(p / 2), m = p - w;
|
|
139
|
+
c = { top: g, bottom: $, left: w, right: m, type: "SAME" };
|
|
140
|
+
} else if (t === "valid")
|
|
141
|
+
c = { top: 0, bottom: 0, left: 0, right: 0, type: "VALID" }, h = Math.ceil((n - i + 1) / o), f = Math.ceil((e - l + 1) / s);
|
|
142
|
+
else if (typeof t == "object") {
|
|
143
|
+
const a = u === "channelsLast" ? t[1][0] : t[2][0], p = u === "channelsLast" ? t[1][1] : t[2][1], g = u === "channelsLast" ? t[2][0] : t[3][0], $ = u === "channelsLast" ? t[2][1] : t[3][1];
|
|
144
|
+
c = { top: a, bottom: p, left: g, right: $, type: a === 0 && p === 0 && g === 0 && $ === 0 ? "VALID" : "EXPLICIT" }, h = T((n - i + a + p) / o + 1, r), f = T((e - l + g + $) / s + 1, r);
|
|
145
|
+
} else
|
|
146
|
+
throw Error(`Unknown padding parameter: ${t}`);
|
|
147
|
+
return { padInfo: c, outHeight: h, outWidth: f };
|
|
148
|
+
}
|
|
149
|
+
function rt(t, n, e, o, s, i, l, r, u, c, h) {
|
|
150
|
+
let f, a, p, g;
|
|
151
|
+
if (t === "valid" && (t = 0), typeof t == "number") {
|
|
152
|
+
f = {
|
|
153
|
+
top: t,
|
|
154
|
+
bottom: t,
|
|
155
|
+
left: t,
|
|
156
|
+
right: t,
|
|
157
|
+
front: t,
|
|
158
|
+
back: t,
|
|
159
|
+
type: t === 0 ? "VALID" : "NUMBER"
|
|
160
|
+
};
|
|
161
|
+
const w = ot([n, e, o, 1], [r, u, c], 1, [s, i, l], t, h);
|
|
162
|
+
a = w[0], p = w[1], g = w[2];
|
|
163
|
+
} else if (t === "same") {
|
|
164
|
+
a = Math.ceil(n / s), p = Math.ceil(e / i), g = Math.ceil(o / l);
|
|
165
|
+
const $ = (a - 1) * s + r - n, w = (p - 1) * i + u - e, m = (g - 1) * l + c - o, y = Math.floor($ / 2), D = $ - y, A = Math.floor(w / 2), x = w - A, E = Math.floor(m / 2), L = m - E;
|
|
166
|
+
f = { top: A, bottom: x, left: E, right: L, front: y, back: D, type: "SAME" };
|
|
167
|
+
} else
|
|
168
|
+
throw Error(`Unknown padding parameter: ${t}`);
|
|
169
|
+
return { padInfo: f, outDepth: a, outHeight: p, outWidth: g };
|
|
170
|
+
}
|
|
171
|
+
function T(t, n) {
|
|
172
|
+
if (!n)
|
|
173
|
+
return Math.trunc(t);
|
|
174
|
+
switch (n) {
|
|
175
|
+
case "round":
|
|
176
|
+
return Math.round(t);
|
|
177
|
+
case "ceil":
|
|
178
|
+
return Math.ceil(t);
|
|
179
|
+
case "floor":
|
|
180
|
+
return Math.floor(t);
|
|
181
|
+
default:
|
|
182
|
+
throw new Error(`Unknown roundingMode ${n}`);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
function V(t) {
|
|
186
|
+
const [n, e, o] = R(t);
|
|
187
|
+
return n === 1 && e === 1 && o === 1;
|
|
188
|
+
}
|
|
189
|
+
function It(t, n) {
|
|
190
|
+
return V(t) || V(n);
|
|
191
|
+
}
|
|
192
|
+
function Ut(t) {
|
|
193
|
+
return R(t).every((n) => n > 0);
|
|
194
|
+
}
|
|
195
|
+
function lt(t) {
|
|
196
|
+
if (t === "NHWC")
|
|
197
|
+
return "channelsLast";
|
|
198
|
+
if (t === "NCHW")
|
|
199
|
+
return "channelsFirst";
|
|
200
|
+
throw new Error(`Unknown dataFormat ${t}`);
|
|
201
|
+
}
|
|
202
|
+
function Rt(t, n, e) {
|
|
203
|
+
if (e != null) {
|
|
204
|
+
if (typeof n == "string")
|
|
205
|
+
throw Error(`Error in ${t}: pad must be an integer when using dimRoundingMode ${e} but got pad ${n}.`);
|
|
206
|
+
if (typeof n == "number")
|
|
207
|
+
K(P(n), () => `Error in ${t}: pad must be an integer when using dimRoundingMode ${e} but got pad ${n}.`);
|
|
208
|
+
else if (typeof n == "object")
|
|
209
|
+
n.forEach((o) => {
|
|
210
|
+
o.forEach((s) => {
|
|
211
|
+
K(P(s), () => `Error in ${t}: pad must be an integer when using dimRoundingMode ${e} but got pad ${s}.`);
|
|
212
|
+
});
|
|
213
|
+
});
|
|
214
|
+
else
|
|
215
|
+
throw Error(`Error in ${t}: Unknown padding parameter: ${n}`);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
function ut(t) {
|
|
219
|
+
const e = { x: b(t, "x", "sigmoid", "float32") };
|
|
220
|
+
return U.runKernel(X, e);
|
|
221
|
+
}
|
|
222
|
+
const ct = /* @__PURE__ */ I({ sigmoid_: ut });
|
|
223
|
+
function ft(t) {
|
|
224
|
+
const e = { x: b(t, "x", "elu", "float32") };
|
|
225
|
+
return U.runKernel(Y, e);
|
|
226
|
+
}
|
|
227
|
+
const it = /* @__PURE__ */ I({ elu_: ft });
|
|
228
|
+
function ht(t, n = 0.2) {
|
|
229
|
+
const o = { x: b(t, "x", "leakyRelu") }, s = { alpha: n };
|
|
230
|
+
return U.runKernel(Z, o, s);
|
|
231
|
+
}
|
|
232
|
+
const at = /* @__PURE__ */ I({ leakyRelu_: ht });
|
|
233
|
+
function pt(t, n) {
|
|
234
|
+
const e = b(t, "x", "prelu"), o = b(n, "alpha", "prelu"), s = { x: e, alpha: o };
|
|
235
|
+
return U.runKernel(J, s);
|
|
236
|
+
}
|
|
237
|
+
const gt = /* @__PURE__ */ I({ prelu_: pt });
|
|
238
|
+
function wt(t) {
|
|
239
|
+
const e = { x: b(t, "x", "relu6") };
|
|
240
|
+
return U.runKernel(Q, e);
|
|
241
|
+
}
|
|
242
|
+
const $t = /* @__PURE__ */ I({ relu6_: wt });
|
|
243
|
+
function mt(t, n = 0) {
|
|
244
|
+
const o = { x: b(t, "x", "step") }, s = { alpha: n };
|
|
245
|
+
return U.runKernel(S, o, s);
|
|
246
|
+
}
|
|
247
|
+
const Et = /* @__PURE__ */ I({ step_: mt });
|
|
248
|
+
function Tt(t, n, e) {
|
|
249
|
+
if (e == null || e === "linear")
|
|
250
|
+
return t;
|
|
251
|
+
if (e === "relu")
|
|
252
|
+
return q(t, Et(n));
|
|
253
|
+
throw new Error(`Cannot compute gradient for fused activation ${e}.`);
|
|
254
|
+
}
|
|
255
|
+
function Wt(t, n) {
|
|
256
|
+
let e = n;
|
|
257
|
+
const o = G(t.shape, n.shape);
|
|
258
|
+
return o.length > 0 && (e = tt(e, o)), z(e, t.shape);
|
|
259
|
+
}
|
|
260
|
+
function Ht(t, n, e, o) {
|
|
261
|
+
if (n === "linear")
|
|
262
|
+
return t;
|
|
263
|
+
if (n === "relu")
|
|
264
|
+
return d(t);
|
|
265
|
+
if (n === "elu")
|
|
266
|
+
return it(t);
|
|
267
|
+
if (n === "relu6")
|
|
268
|
+
return $t(t);
|
|
269
|
+
if (n === "prelu")
|
|
270
|
+
return gt(t, e);
|
|
271
|
+
if (n === "leakyrelu")
|
|
272
|
+
return at(t, o);
|
|
273
|
+
if (n === "sigmoid")
|
|
274
|
+
return ct(t);
|
|
275
|
+
throw new Error(`Unknown fused activation ${n}.`);
|
|
276
|
+
}
|
|
277
|
+
const Nt = (t, n) => !(t > 0) || n === "linear";
|
|
278
|
+
const Ot = 1.7580993408473768, _t = 1.0507009873554805;
|
|
279
|
+
export {
|
|
280
|
+
_t as S,
|
|
281
|
+
Ot as a,
|
|
282
|
+
Ht as b,
|
|
283
|
+
Rt as c,
|
|
284
|
+
B as d,
|
|
285
|
+
nt as e,
|
|
286
|
+
F as f,
|
|
287
|
+
Ct as g,
|
|
288
|
+
Mt as h,
|
|
289
|
+
kt as i,
|
|
290
|
+
lt as j,
|
|
291
|
+
It as k,
|
|
292
|
+
Wt as l,
|
|
293
|
+
Tt as m,
|
|
294
|
+
Ut as n,
|
|
295
|
+
it as o,
|
|
296
|
+
Et as p,
|
|
297
|
+
ct as q,
|
|
298
|
+
at as r,
|
|
299
|
+
Nt as s,
|
|
300
|
+
V as t,
|
|
301
|
+
gt as u,
|
|
302
|
+
$t as v
|
|
303
|
+
};
|
|
@@ -1,20 +1,4 @@
|
|
|
1
|
-
import { s as l } from "./shared-
|
|
2
|
-
/**
|
|
3
|
-
* @license
|
|
4
|
-
* Copyright 2020 Google LLC. All Rights Reserved.
|
|
5
|
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
-
* you may not use this file except in compliance with the License.
|
|
7
|
-
* You may obtain a copy of the License at
|
|
8
|
-
*
|
|
9
|
-
* http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
-
*
|
|
11
|
-
* Unless required by applicable law or agreed to in writing, software
|
|
12
|
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
-
* See the License for the specific language governing permissions and
|
|
15
|
-
* limitations under the License.
|
|
16
|
-
* =============================================================================
|
|
17
|
-
*/
|
|
1
|
+
import { s as l } from "./shared-BuAXb4CI.js";
|
|
18
2
|
const { addImpl: m, bincountImpl: s, bincountReduceImpl: a, bitwiseAndImpl: I, castImpl: e, ceilImpl: t, concatImpl: r, equalImpl: i, expImpl: C, expm1Impl: P, floorImpl: U, gatherNdImpl: n, gatherV2Impl: g, greaterImpl: o, greaterEqualImpl: c, lessImpl: u, lessEqualImpl: d, linSpaceImpl: q, logImpl: R, maxImpl: h, maximumImpl: x, minimumImpl: b, multiplyImpl: E, negImpl: S, notEqualImpl: T, prodImpl: w, raggedGatherImpl: y, raggedRangeImpl: A, raggedTensorToTensorImpl: F, rangeImpl: G, rsqrtImpl: N, scatterImpl: f, sigmoidImpl: k, simpleAbsImpl: B, sliceImpl: H, sparseFillEmptyRowsImpl: K, sparseReshapeImpl: V, sparseSegmentReductionImpl: j, sqrtImpl: v, staticRegexReplaceImpl: z, stridedSliceImpl: D, stringNGramsImpl: J, stringSplitImpl: L, stringToHashBucketFastImpl: M, subImpl: O, tileImpl: Q, topKImpl: W, transposeImpl: X, uniqueImpl: Y } = l;
|
|
19
3
|
export {
|
|
20
4
|
b as A,
|