@genai-fi/nanogpt 0.9.1 → 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +352 -14
- package/dist/Generator.js +69 -78
- package/dist/{RealDiv-D4EzDsC0.js → RealDiv-DgA3z9oO.js} +32 -206
- package/dist/Reshape-CF6odzV4.js +16 -0
- package/dist/Reshape-_kILl6tK.js +81 -0
- package/dist/TeachableLLM.js +28 -22
- package/dist/Trainer.d.ts +2 -0
- package/dist/Trainer.js +3 -2
- package/dist/{axis_util-TbGYJ208.js → axis_util-BvHEw88j.js} +7 -23
- package/dist/backend.d.ts +2 -1
- package/dist/backend.js +10 -4
- package/dist/backend_util-D-rUb2ty.js +474 -0
- package/dist/backend_webgpu-B0u2ndUn.js +547 -0
- package/dist/binary_op_util-pKXltfxI.js +192 -0
- package/dist/broadcast_to-CwF7XIeu.js +30 -0
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/check.d.ts +1 -1
- package/dist/checks/check.js +8 -8
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/index.d.ts +2 -0
- package/dist/checks/index.js +7 -5
- package/dist/checks/matMulGelu.js +6 -6
- package/dist/checks/normRMS.js +7 -7
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.d.ts +1 -0
- package/dist/checks/packUnpack.js +18 -0
- package/dist/checks/qkv.js +12 -27
- package/dist/checks/rope.js +2 -2
- package/dist/checks/weights.js +18 -16
- package/dist/complex-CSlYz-2T.js +13 -0
- package/dist/complex_util-Yc1A_gV1.js +55 -0
- package/dist/concat-BHlIJeyT.js +19 -0
- package/dist/concat_util-DcJk7YHS.js +22 -0
- package/dist/data/docx.js +1 -1
- package/dist/data/parquet.js +2 -2
- package/dist/data/pdf.js +1 -1
- package/dist/data/textLoader.js +1 -1
- package/dist/{dataset-DlZtKmBq.js → dataset-0xP8GjwI.js} +136 -236
- package/dist/dropout-C1pM3f11.js +99 -0
- package/dist/expand_dims-BPG4fwBP.js +13 -0
- package/dist/exports_initializers-xuidcwI4.js +7 -0
- package/dist/gather-DykLGqmW.js +10 -0
- package/dist/{gelu-Bp_-935b.js → gelu-CNLFZWea.js} +11 -10
- package/dist/{gpgpu_math-CDaYiyE_.js → gpgpu_math-DDVJCn6-.js} +90 -265
- package/dist/{index-C4L8Cm77.js → index-CieiGp4Y.js} +14 -14
- package/dist/index-CjOj7j-u.js +7308 -0
- package/dist/{index-Tf7vU29b.js → index-Cp39cXWe.js} +3 -10
- package/dist/{index-Dwqa6Zy2.js → index-DvYrXKkX.js} +2 -2
- package/dist/index-ZyQhjEPo.js +2157 -0
- package/dist/{jszip.min-CjP2V1VV.js → jszip.min-Bz5-11Bk.js} +56 -57
- package/dist/kernel_funcs_utils-Dg_-E44D.js +308 -0
- package/dist/layers/BaseLayer.d.ts +1 -0
- package/dist/layers/BaseLayer.js +7 -6
- package/dist/layers/CausalSelfAttention.d.ts +0 -1
- package/dist/layers/CausalSelfAttention.js +56 -55
- package/dist/layers/MLP.js +15 -16
- package/dist/layers/PositionEmbedding.js +5 -14
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.d.ts +2 -0
- package/dist/layers/RoPECache.js +22 -17
- package/dist/layers/TiedEmbedding.js +22 -17
- package/dist/layers/TransformerBlock.js +21 -20
- package/dist/loader/load.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +39 -33
- package/dist/loader/save.js +1 -1
- package/dist/log_sum_exp-DWI-76TI.js +41 -0
- package/dist/main.d.ts +8 -0
- package/dist/main.js +63 -52
- package/dist/matMul16--R5hOwDG.js +77 -0
- package/dist/mat_mul-DeAh4uTH.js +12 -0
- package/dist/mod-Gt1rMB4n.js +12 -0
- package/dist/models/NanoGPTV1.js +40 -31
- package/dist/models/model.d.ts +2 -0
- package/dist/models/model.js +37 -29
- package/dist/{mulmat_packed_gpu-BT60jmzP.js → mulmat_packed_gpu-BMFhLwta.js} +1 -17
- package/dist/{non_max_suppression_impl-CsEgBuMA.js → non_max_suppression_impl-B2W7YjZB.js} +0 -32
- package/dist/ones-CAMiP4I2.js +15 -0
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.d.ts +1 -1
- package/dist/ops/adamMoments.js +4 -4
- package/dist/ops/add16.d.ts +2 -0
- package/dist/ops/add16.js +9 -0
- package/dist/ops/appendCache.js +16 -9
- package/dist/ops/attentionMask.js +4 -4
- package/dist/ops/concat16.d.ts +2 -0
- package/dist/ops/concat16.js +9 -0
- package/dist/ops/cpu/adamAdjust.js +14 -13
- package/dist/ops/cpu/adamMoments.js +10 -9
- package/dist/ops/cpu/appendCache.js +9 -8
- package/dist/ops/cpu/attentionMask.js +15 -14
- package/dist/ops/cpu/fusedSoftmax.js +13 -12
- package/dist/ops/cpu/gatherSub.js +9 -24
- package/dist/ops/cpu/gelu.js +13 -12
- package/dist/ops/cpu/matMul16.d.ts +1 -0
- package/dist/ops/cpu/matMul16.js +16 -0
- package/dist/ops/cpu/matMulGelu.js +18 -16
- package/dist/ops/cpu/matMulMul.js +8 -7
- package/dist/ops/cpu/mulDropout.js +4 -3
- package/dist/ops/cpu/normRMS.js +11 -10
- package/dist/ops/cpu/qkv.js +17 -13
- package/dist/ops/cpu/rope.js +23 -22
- package/dist/ops/cpu/scatterSub.js +16 -30
- package/dist/ops/dot16.d.ts +2 -0
- package/dist/ops/dot16.js +42 -0
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.d.ts +1 -0
- package/dist/ops/grads/add16.js +27 -0
- package/dist/ops/grads/attentionMask.js +12 -19
- package/dist/ops/grads/gelu.js +4 -3
- package/dist/ops/grads/matMul16.d.ts +2 -0
- package/dist/ops/grads/matMul16.js +9 -0
- package/dist/ops/grads/matMulGelu.js +8 -7
- package/dist/ops/grads/normRMS.js +8 -7
- package/dist/ops/grads/{fusedSoftmax.d.ts → pack16.d.ts} +1 -1
- package/dist/ops/grads/pack16.js +7 -0
- package/dist/ops/grads/qkv.d.ts +3 -1
- package/dist/ops/grads/qkv.js +28 -22
- package/dist/ops/grads/rope.d.ts +2 -1
- package/dist/ops/grads/rope.js +6 -13
- package/dist/ops/grads/softmax16.d.ts +2 -0
- package/dist/ops/grads/softmax16.js +26 -0
- package/dist/ops/grads/unpack16.d.ts +2 -0
- package/dist/ops/grads/unpack16.js +6 -0
- package/dist/ops/grads/utils.d.ts +3 -0
- package/dist/ops/grads/utils.js +10 -0
- package/dist/ops/matMul16.d.ts +15 -0
- package/dist/ops/matMul16.js +13 -0
- package/dist/ops/matMulGelu.js +1 -1
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.d.ts +2 -0
- package/dist/ops/mul16.js +8 -0
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.d.ts +2 -0
- package/dist/ops/pack16.js +6 -0
- package/dist/ops/qkv.d.ts +1 -1
- package/dist/ops/qkv.js +8 -4
- package/dist/ops/reshape16.d.ts +2 -0
- package/dist/ops/reshape16.js +43 -0
- package/dist/ops/rope.d.ts +1 -1
- package/dist/ops/rope.js +8 -10
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.d.ts +2 -0
- package/dist/ops/slice16.js +9 -0
- package/dist/ops/softmax16.d.ts +2 -0
- package/dist/ops/softmax16.js +12 -0
- package/dist/ops/sub16.d.ts +2 -0
- package/dist/ops/sub16.js +8 -0
- package/dist/ops/sum16.d.ts +2 -0
- package/dist/ops/sum16.js +13 -0
- package/dist/ops/transpose16.d.ts +3 -0
- package/dist/ops/transpose16.js +41 -0
- package/dist/ops/unpack16.d.ts +2 -0
- package/dist/ops/unpack16.js +6 -0
- package/dist/ops/webgl/adamAdjust.js +3 -2
- package/dist/ops/webgl/adamMoments.js +2 -1
- package/dist/ops/webgl/appendCache.js +2 -1
- package/dist/ops/webgl/attentionMask.js +5 -4
- package/dist/ops/webgl/fusedSoftmax.js +6 -4
- package/dist/ops/webgl/gatherSub.js +7 -6
- package/dist/ops/webgl/gelu.js +3 -2
- package/dist/ops/webgl/log.js +12 -27
- package/dist/ops/webgl/matMul16.d.ts +1 -0
- package/dist/ops/webgl/matMul16.js +37 -0
- package/dist/ops/webgl/matMulGelu.js +17 -15
- package/dist/ops/webgl/matMulMul.js +13 -12
- package/dist/ops/webgl/mulDropout.js +9 -8
- package/dist/ops/webgl/normRMS.js +8 -7
- package/dist/ops/webgl/qkv.js +6 -5
- package/dist/ops/webgl/rope.js +11 -10
- package/dist/ops/webgl/scatterSub.js +6 -5
- package/dist/ops/webgpu/adamAdjust.js +12 -10
- package/dist/ops/webgpu/adamMoments.js +27 -22
- package/dist/ops/webgpu/add16.d.ts +1 -0
- package/dist/ops/webgpu/add16.js +14 -0
- package/dist/ops/webgpu/appendCache.js +64 -17
- package/dist/ops/webgpu/attentionMask.js +19 -62
- package/dist/ops/webgpu/attentionMask32_program.d.ts +19 -0
- package/dist/ops/webgpu/attentionMask32_program.js +54 -0
- package/dist/ops/webgpu/concat16.d.ts +19 -0
- package/dist/ops/webgpu/concat16.js +128 -0
- package/dist/ops/webgpu/gatherSub.js +9 -7
- package/dist/ops/webgpu/gelu.js +78 -31
- package/dist/ops/webgpu/index.js +12 -0
- package/dist/ops/webgpu/matMul16.d.ts +1 -0
- package/dist/ops/webgpu/matMul16.js +58 -0
- package/dist/ops/webgpu/matMul16_program.d.ts +42 -0
- package/dist/ops/webgpu/matMul16_program.js +336 -0
- package/dist/ops/webgpu/mul16.d.ts +1 -0
- package/dist/ops/webgpu/mul16.js +14 -0
- package/dist/ops/webgpu/normRMS.js +21 -40
- package/dist/ops/webgpu/normRMS16_program.d.ts +9 -0
- package/dist/ops/webgpu/normRMS16_program.js +24 -0
- package/dist/ops/webgpu/normRMS32_program.d.ts +9 -0
- package/dist/ops/webgpu/normRMS32_program.js +24 -0
- package/dist/ops/webgpu/normRMSGrad.js +113 -64
- package/dist/ops/webgpu/pack16.d.ts +1 -0
- package/dist/ops/webgpu/pack16.js +19 -0
- package/dist/ops/webgpu/pack16_program.d.ts +19 -0
- package/dist/ops/webgpu/pack16_program.js +92 -0
- package/dist/ops/webgpu/qkv.js +20 -55
- package/dist/ops/webgpu/rope.js +77 -22
- package/dist/ops/webgpu/scatterSub.js +9 -7
- package/dist/ops/webgpu/slice16.d.ts +7 -0
- package/dist/ops/webgpu/slice16.js +71 -0
- package/dist/{variable-Bm2OFwGI.js → ops/webgpu/softmax16.d.ts} +2 -8
- package/dist/ops/webgpu/softmax16.js +23 -0
- package/dist/ops/webgpu/softmax16_program.d.ts +13 -0
- package/dist/ops/webgpu/softmax16_program.js +73 -0
- package/dist/ops/webgpu/softmax16_subgroup_program.d.ts +17 -0
- package/dist/ops/webgpu/softmax16_subgroup_program.js +75 -0
- package/dist/ops/webgpu/softmax16grad.d.ts +1 -0
- package/dist/ops/webgpu/softmax16grad.js +38 -0
- package/dist/ops/webgpu/sub16.d.ts +1 -0
- package/dist/ops/webgpu/sub16.js +14 -0
- package/dist/ops/webgpu/sum16.d.ts +1 -0
- package/dist/ops/webgpu/sum16.js +40 -0
- package/dist/ops/webgpu/transpose16.d.ts +1 -0
- package/dist/ops/webgpu/transpose16.js +35 -0
- package/dist/ops/webgpu/transpose16_program.d.ts +16 -0
- package/dist/ops/webgpu/transpose16_program.js +50 -0
- package/dist/ops/webgpu/transpose16_shared_program.d.ts +15 -0
- package/dist/ops/webgpu/transpose16_shared_program.js +71 -0
- package/dist/ops/webgpu/unpack16.d.ts +1 -0
- package/dist/ops/webgpu/unpack16.js +49 -0
- package/dist/ops/webgpu/utils/binary_op.d.ts +19 -0
- package/dist/ops/webgpu/utils/binary_op.js +79 -0
- package/dist/ops/webgpu/utils/deviceInfo.d.ts +7 -0
- package/dist/ops/webgpu/utils/deviceInfo.js +11 -0
- package/dist/ops/webgpu/utils/reductions.d.ts +32 -4
- package/dist/ops/webgpu/utils/reductions.js +236 -45
- package/dist/ops-CNI3TwqM.js +645 -0
- package/dist/pack16-CFUqumar.js +41 -0
- package/dist/{papaparse.min-C8l2Kvo1.js → papaparse.min-C0cScC2i.js} +2 -8
- package/dist/{parquet-C0Tlmv9c.js → parquet-BE8MU_ge.js} +201 -278
- package/dist/patches/PackedTensor.d.ts +12 -0
- package/dist/patches/PackedTensor.js +11 -0
- package/dist/patches/engine.d.ts +261 -0
- package/dist/patches/engine.js +10 -0
- package/dist/patches/tape.d.ts +12 -0
- package/dist/patches/tape.js +5 -0
- package/dist/patches/webgpu_backend.d.ts +18 -0
- package/dist/patches/webgpu_backend.js +57 -0
- package/dist/{tensor-CZr4dh61.js → patches/webgpu_base.d.ts} +5 -8
- package/dist/patches/webgpu_base.js +34 -0
- package/dist/patches/webgpu_program.d.ts +36 -0
- package/dist/patches/webgpu_program.js +401 -0
- package/dist/{pdf-kJD-f258.js → pdf-NIhmP3sq.js} +424 -428
- package/dist/random_width-DY6Kk2Dl.js +10051 -0
- package/dist/range-BMS52eQi.js +11 -0
- package/dist/reciprocal-CTmshQ9J.js +10 -0
- package/dist/{register_all_kernels-DIGpEwcf.js → register_all_kernels-Bwu1PTuU.js} +719 -9766
- package/dist/relu-yZ2-7WxU.js +10 -0
- package/dist/reshape-DevtBWtf.js +10 -0
- package/dist/rope-B5UUMsPi.js +32 -0
- package/dist/{scatter_nd_util-BQdz--Gn.js → scatter_nd_util-5EL-8VAQ.js} +1 -1
- package/dist/selu_util-D1w6yyTO.js +303 -0
- package/dist/{shared-DuP7ue-R.js → shared-BRksrJb3.js} +1 -17
- package/dist/shared-BuAXb4CI.js +2145 -0
- package/dist/sin-BGfy2HZo.js +16 -0
- package/dist/slice-D_gkkqZK.js +13 -0
- package/dist/slice_util-DtEldBfK.js +261 -0
- package/dist/softmax-ZHVebtR1.js +13 -0
- package/dist/split-DrfihRpZ.js +10 -0
- package/dist/squeeze-DZEpeblb.js +11 -0
- package/dist/stack-yOIAalTq.js +13 -0
- package/dist/sum-_fzj5ZTB.js +12 -0
- package/dist/tensor-DdQUJZlz.js +909 -0
- package/dist/tensor-f35l8Odg.js +8 -0
- package/dist/tensor1d-CeZuc-Rv.js +12 -0
- package/dist/tensor2d-G4Ys2GxX.js +15 -0
- package/dist/tensor4d-B8roDgtc.js +15 -0
- package/dist/tensor_util-DV-FP5Q3.js +523 -0
- package/dist/tfjs_backend-kNyO5L2d.js +653 -0
- package/dist/tile-BzyEiF-F.js +13 -0
- package/dist/tokeniser/CharTokeniser.js +1 -1
- package/dist/tokeniser/bpe.js +1 -1
- package/dist/training/Adam.d.ts +2 -1
- package/dist/training/Adam.js +12 -28
- package/dist/training/AdamExt.d.ts +1 -0
- package/dist/training/AdamExt.js +2 -2
- package/dist/training/DatasetBuilder.js +3 -20
- package/dist/training/FullTrainer.js +55 -48
- package/dist/training/Trainer.d.ts +11 -6
- package/dist/training/Trainer.js +51 -39
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/transpose-DKELTqhe.js +38 -0
- package/dist/utilities/arrayClose.js +7 -7
- package/dist/utilities/dummy.js +35 -27
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.d.ts +7 -0
- package/dist/utilities/packed.js +716 -0
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.d.ts +5 -0
- package/dist/utilities/sentences.js +41 -0
- package/dist/utilities/weights.js +2 -2
- package/dist/variable-Bhn5bHYv.js +7 -0
- package/dist/{webgpu_program-DkQJOJSd.js → webgpu_program-Cigz-7RF.js} +15 -44
- package/dist/webgpu_util-BBCnKm2X.js +65 -0
- package/dist/zeros-2gldETuK.js +14 -0
- package/package.json +4 -3
- package/dist/Reshape-Bowtk9BP.js +0 -127
- package/dist/Reshape-DUqYftGC.js +0 -30
- package/dist/backend_util-CJIiDoV1.js +0 -749
- package/dist/broadcast_to-DzlNweb8.js +0 -44
- package/dist/concat-B912vBbo.js +0 -33
- package/dist/dropout-C-csYCLj.js +0 -193
- package/dist/exports_initializers-B8iZMgQ0.js +0 -16
- package/dist/gather-Dnpgw-YQ.js +0 -25
- package/dist/index-BzFyqcy-.js +0 -4457
- package/dist/index-C1rx_Ajs.js +0 -12076
- package/dist/kernel_funcs_utils-DKLK0Mg3.js +0 -466
- package/dist/log_sum_exp-DO6z8tSE.js +0 -103
- package/dist/mat_mul-DzjTFx-u.js +0 -27
- package/dist/mod-Dobti4j4.js +0 -27
- package/dist/ones-tIJeHlq-.js +0 -29
- package/dist/ops/fusedSoftmax.d.ts +0 -2
- package/dist/ops/fusedSoftmax.js +0 -10
- package/dist/ops/grads/fusedSoftmax.js +0 -22
- package/dist/ops-LuCMAnmM.js +0 -1525
- package/dist/random_width-CXVRloNK.js +0 -13670
- package/dist/range-CWcz7xFA.js +0 -26
- package/dist/reciprocal-C4rNcM-S.js +0 -25
- package/dist/relu-BjCh_SYb.js +0 -25
- package/dist/reshape-CnIwVG1c.js +0 -25
- package/dist/selu_util-OtRzVwW5.js +0 -719
- package/dist/shared-DmRsFyaJ.js +0 -3134
- package/dist/sin-gpDNRxE0.js +0 -47
- package/dist/slice-d0Vo9XTN.js +0 -28
- package/dist/softmax-D7Jj3p_P.js +0 -28
- package/dist/split-DK2k5eHf.js +0 -25
- package/dist/stack-DFatutCx.js +0 -27
- package/dist/sum-CJ0ULhmt.js +0 -27
- package/dist/tensor1d-vML0r3q6.js +0 -27
- package/dist/tensor2d-D76QGjF3.js +0 -30
- package/dist/tensor4d-Df1WlVDY.js +0 -30
- package/dist/webgpu_util-pLEV9tks.js +0 -80
- package/dist/zeros-Bj5rMYA7.js +0 -52
package/dist/layers/RoPECache.js
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
import { b as
|
|
2
|
-
import { r as c } from "../reciprocal-
|
|
3
|
-
import { c as
|
|
4
|
-
import { r as
|
|
5
|
-
class
|
|
6
|
-
constructor(
|
|
7
|
-
this.config =
|
|
8
|
-
const
|
|
9
|
-
if (this.rotaryDim =
|
|
1
|
+
import { b as n, p as a, t as p, k as r } from "../index-ZyQhjEPo.js";
|
|
2
|
+
import { r as c } from "../reciprocal-CTmshQ9J.js";
|
|
3
|
+
import { c as l, s as f } from "../sin-BGfy2HZo.js";
|
|
4
|
+
import { r as h } from "../range-BMS52eQi.js";
|
|
5
|
+
class g {
|
|
6
|
+
constructor(s) {
|
|
7
|
+
this.config = s;
|
|
8
|
+
const i = this.config.nEmbed / this.config.nHead;
|
|
9
|
+
if (this.rotaryDim = i, this.rotaryDim % 2 !== 0)
|
|
10
10
|
throw new Error("rotaryDim must be even");
|
|
11
11
|
this.ropeBase = 1e4;
|
|
12
|
-
const
|
|
13
|
-
this.ropeInvFreq = c(
|
|
12
|
+
const o = h(0, this.rotaryDim, 2, "float32"), e = o.div(n(this.rotaryDim, "float32")), t = a(n(this.ropeBase, "float32"), e);
|
|
13
|
+
this.ropeInvFreq = c(t), e.dispose(), t.dispose(), o.dispose(), this.config.useRope === !1 ? (this.ropeCos = null, this.ropeSin = null, this.ropeNegSin = null, this.ropeCacheLen = 0) : p(() => {
|
|
14
14
|
this.ensureRopeCache(this.config.blockSize * 4);
|
|
15
15
|
});
|
|
16
16
|
}
|
|
@@ -21,13 +21,15 @@ class D {
|
|
|
21
21
|
// [cacheLen, rotaryDim/2]
|
|
22
22
|
ropeSin = null;
|
|
23
23
|
// [cacheLen, rotaryDim/2]
|
|
24
|
+
ropeNegSin = null;
|
|
25
|
+
// [cacheLen, rotaryDim/2]
|
|
24
26
|
ropeCacheLen = 0;
|
|
25
|
-
ensureRopeCache(
|
|
26
|
-
|
|
27
|
-
if (
|
|
27
|
+
ensureRopeCache(s) {
|
|
28
|
+
p(() => {
|
|
29
|
+
if (s <= this.ropeCacheLen) return;
|
|
28
30
|
this.ropeCos && this.ropeCos.dispose(), this.ropeSin && this.ropeSin.dispose();
|
|
29
|
-
const
|
|
30
|
-
this.ropeCos =
|
|
31
|
+
const i = Math.max(s, this.ropeCacheLen + this.config.blockSize * 4), e = h(0, i, 1, "float32").expandDims(1).mul(this.ropeInvFreq.expandDims(0));
|
|
32
|
+
this.ropeCos = r(l(e).expandDims(-1)), this.ropeSin = r(f(e).expandDims(-1)), this.ropeNegSin = r(this.ropeSin.neg()), this.ropeCacheLen = i;
|
|
31
33
|
});
|
|
32
34
|
}
|
|
33
35
|
getCos() {
|
|
@@ -36,10 +38,13 @@ class D {
|
|
|
36
38
|
getSin() {
|
|
37
39
|
return this.ropeSin;
|
|
38
40
|
}
|
|
41
|
+
getNegSin() {
|
|
42
|
+
return this.ropeNegSin;
|
|
43
|
+
}
|
|
39
44
|
dispose() {
|
|
40
45
|
this.ropeCos && this.ropeCos.dispose(), this.ropeSin && this.ropeSin.dispose(), this.ropeInvFreq.dispose();
|
|
41
46
|
}
|
|
42
47
|
}
|
|
43
48
|
export {
|
|
44
|
-
|
|
49
|
+
g as default
|
|
45
50
|
};
|
|
@@ -1,31 +1,36 @@
|
|
|
1
|
-
import
|
|
2
|
-
import "../index-
|
|
3
|
-
import { r as
|
|
4
|
-
import
|
|
5
|
-
import {
|
|
6
|
-
import {
|
|
7
|
-
|
|
1
|
+
import "../random_width-DY6Kk2Dl.js";
|
|
2
|
+
import "../index-ZyQhjEPo.js";
|
|
3
|
+
import { r as s } from "../exports_initializers-xuidcwI4.js";
|
|
4
|
+
import a from "./BaseLayer.js";
|
|
5
|
+
import { dot16 as o } from "../ops/dot16.js";
|
|
6
|
+
import { isPackedTensor as r } from "../utilities/packed.js";
|
|
7
|
+
import { p as m } from "../pack16-CFUqumar.js";
|
|
8
|
+
import { transpose16 as d } from "../ops/transpose16.js";
|
|
9
|
+
import { v as p } from "../variable-Bhn5bHYv.js";
|
|
10
|
+
import { g as h } from "../gather-DykLGqmW.js";
|
|
11
|
+
class g extends a {
|
|
8
12
|
vocabSize;
|
|
9
13
|
embedDim;
|
|
10
14
|
initializer;
|
|
11
15
|
WEIGHTS;
|
|
12
|
-
constructor(
|
|
13
|
-
super(
|
|
16
|
+
constructor(t, i, e) {
|
|
17
|
+
super(t, e), this.WEIGHTS = i, this.vocabSize = t.vocabSize, this.embedDim = t.nEmbed, this.initializer = s({
|
|
14
18
|
mean: 0,
|
|
15
19
|
stddev: 0.02
|
|
16
|
-
}), this.addVariable(this.WEIGHTS,
|
|
20
|
+
}), this.addVariable(this.WEIGHTS, p(this.initializer.apply([this.vocabSize, this.embedDim]), !0, i));
|
|
17
21
|
}
|
|
18
|
-
embed(
|
|
19
|
-
return
|
|
22
|
+
embed(t) {
|
|
23
|
+
return h(this.getVariable(this.WEIGHTS), t, 0);
|
|
20
24
|
}
|
|
21
|
-
project(
|
|
22
|
-
|
|
25
|
+
project(t) {
|
|
26
|
+
const i = r(t) ? m(this.getVariable(this.WEIGHTS), void 0, 32) : this.getVariable(this.WEIGHTS), e = d(i);
|
|
27
|
+
return r(t) && i.dispose(), o(t, e);
|
|
23
28
|
}
|
|
24
29
|
// Dummy, should not be used.
|
|
25
|
-
forward(
|
|
26
|
-
return this.project(
|
|
30
|
+
forward(t, i) {
|
|
31
|
+
return this.project(i);
|
|
27
32
|
}
|
|
28
33
|
}
|
|
29
34
|
export {
|
|
30
|
-
|
|
35
|
+
g as default
|
|
31
36
|
};
|
|
@@ -1,32 +1,33 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
import { k as n, t as
|
|
6
|
-
|
|
1
|
+
import p from "./CausalSelfAttention.js";
|
|
2
|
+
import m from "./MLP.js";
|
|
3
|
+
import d from "./RMSNorm.js";
|
|
4
|
+
import h from "./BaseLayer.js";
|
|
5
|
+
import { k as n, t as u } from "../index-ZyQhjEPo.js";
|
|
6
|
+
import { add16 as l } from "../ops/add16.js";
|
|
7
|
+
class g extends h {
|
|
7
8
|
ln1;
|
|
8
9
|
attn;
|
|
9
10
|
ln2;
|
|
10
11
|
mlp;
|
|
11
12
|
index;
|
|
12
13
|
skipped = !1;
|
|
13
|
-
constructor(
|
|
14
|
-
super(s,
|
|
14
|
+
constructor(e, s, i) {
|
|
15
|
+
super(s, i), this.index = e, this.ln1 = new d(s, `block_${this.index}_rms1`, this), this.attn = new p(this.index, s, this), this.ln2 = new d(s, `block_${this.index}_rms2`, this), this.mlp = new m(this.index, s, this);
|
|
15
16
|
}
|
|
16
|
-
getMLPOutput(
|
|
17
|
-
const
|
|
18
|
-
s.outputEmbeddings ? (n(
|
|
19
|
-
const
|
|
20
|
-
return
|
|
17
|
+
getMLPOutput(e, s) {
|
|
18
|
+
const i = this.ln2.call({ training: s.training }, e), t = this.mlp.call(s, i);
|
|
19
|
+
s.outputEmbeddings ? (n(i), s.embeddings.push({ name: `block_ln2_${this.index}`, tensor: i })) : i.dispose();
|
|
20
|
+
const o = l(e, t);
|
|
21
|
+
return e.dispose(), s.outputEmbeddings ? (n(t), s.embeddings.push({ name: `block_mlp_out_${this.index}`, tensor: t })) : t.dispose(), o;
|
|
21
22
|
}
|
|
22
|
-
forward(
|
|
23
|
-
return
|
|
23
|
+
forward(e, s) {
|
|
24
|
+
return u(() => {
|
|
24
25
|
if (this.skipped)
|
|
25
26
|
return s;
|
|
26
|
-
const
|
|
27
|
-
|
|
28
|
-
const
|
|
29
|
-
return
|
|
27
|
+
const i = this.ln1.call(e, s), t = this.attn.call(e, i);
|
|
28
|
+
e.outputEmbeddings ? (n(i), e.embeddings.push({ name: `block_ln1_${this.index}`, tensor: i })) : i.dispose();
|
|
29
|
+
const o = l(s, t);
|
|
30
|
+
return e.outputEmbeddings ? (n(t), e.embeddings.push({ name: `block_attn_out_${this.index}`, tensor: t })) : t.dispose(), this.getMLPOutput(o, e);
|
|
30
31
|
});
|
|
31
32
|
}
|
|
32
33
|
dispose() {
|
|
@@ -34,5 +35,5 @@ class k extends m {
|
|
|
34
35
|
}
|
|
35
36
|
}
|
|
36
37
|
export {
|
|
37
|
-
|
|
38
|
+
g as default
|
|
38
39
|
};
|
package/dist/loader/load.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import l from "../tokeniser/CharTokeniser.js";
|
|
2
2
|
import c from "../tokeniser/bpe.js";
|
|
3
3
|
import { load_safetensors as b } from "../utilities/safetensors.js";
|
|
4
|
-
import {
|
|
4
|
+
import { H as y } from "../index-ZyQhjEPo.js";
|
|
5
5
|
import { dummyPassAsync as u } from "../utilities/dummy.js";
|
|
6
6
|
import _ from "../models/factory.js";
|
|
7
7
|
async function L(e, a, r, t) {
|
|
@@ -1,73 +1,79 @@
|
|
|
1
|
-
import
|
|
1
|
+
import "../utilities/packed.js";
|
|
2
|
+
import { H as y } from "../index-ZyQhjEPo.js";
|
|
2
3
|
import "../ops/cpu/attentionMask.js";
|
|
3
4
|
import "../ops/webgl/attentionMask.js";
|
|
4
5
|
import "../ops/grads/attentionMask.js";
|
|
5
|
-
import "../
|
|
6
|
-
import "../
|
|
7
|
-
import "../
|
|
8
|
-
import "../
|
|
9
|
-
import "../register_all_kernels-DIGpEwcf.js";
|
|
10
|
-
import "../index-Tf7vU29b.js";
|
|
11
|
-
import "../dataset-DlZtKmBq.js";
|
|
6
|
+
import "../random_width-DY6Kk2Dl.js";
|
|
7
|
+
import "../register_all_kernels-Bwu1PTuU.js";
|
|
8
|
+
import "../index-Cp39cXWe.js";
|
|
9
|
+
import "../dataset-0xP8GjwI.js";
|
|
12
10
|
import "../ops/cpu/rope.js";
|
|
13
11
|
import "../ops/webgl/rope.js";
|
|
14
|
-
import "../
|
|
12
|
+
import "../rope-B5UUMsPi.js";
|
|
15
13
|
import "../ops/cpu/appendCache.js";
|
|
16
14
|
import "../ops/webgl/appendCache.js";
|
|
17
|
-
import "../ops/
|
|
18
|
-
import "../
|
|
19
|
-
import "../ops/
|
|
20
|
-
import "../ops/cpu/
|
|
21
|
-
import "../
|
|
22
|
-
import "../ops/
|
|
15
|
+
import "../ops/grads/softmax16.js";
|
|
16
|
+
import "../matMul16--R5hOwDG.js";
|
|
17
|
+
import "../ops/webgl/matMul16.js";
|
|
18
|
+
import "../ops/cpu/matMul16.js";
|
|
19
|
+
import "../pack16-CFUqumar.js";
|
|
20
|
+
import "../ops/transpose16.js";
|
|
21
|
+
import "../ops/reshape16.js";
|
|
22
|
+
import "../ops/cpu/qkv.js";
|
|
23
|
+
import "../ops/webgl/qkv.js";
|
|
24
|
+
import "../ops/grads/qkv.js";
|
|
23
25
|
import "../ops/cpu/normRMS.js";
|
|
24
26
|
import "../ops/webgl/normRMS.js";
|
|
25
27
|
import "../ops/grads/normRMS.js";
|
|
28
|
+
import "../ops/grads/add16.js";
|
|
26
29
|
import "../ops/cpu/gatherSub.js";
|
|
27
30
|
import "../ops/webgl/gatherSub.js";
|
|
28
31
|
import "../ops/cpu/scatterSub.js";
|
|
29
32
|
import "../ops/webgl/scatterSub.js";
|
|
30
|
-
import "../jszip.min-
|
|
33
|
+
import "../jszip.min-Bz5-11Bk.js";
|
|
31
34
|
import h from "../tokeniser/CharTokeniser.js";
|
|
32
35
|
import k from "../tokeniser/bpe.js";
|
|
33
36
|
import { dummyPassAsync as g } from "../utilities/dummy.js";
|
|
34
37
|
import b from "../models/factory.js";
|
|
35
38
|
import "../Generator.js";
|
|
36
|
-
import "../index-
|
|
39
|
+
import "../index-DvYrXKkX.js";
|
|
37
40
|
import "../ops/cpu/adamAdjust.js";
|
|
38
41
|
import "../ops/webgl/adamAdjust.js";
|
|
39
42
|
import "../ops/cpu/adamMoments.js";
|
|
40
43
|
import "../ops/webgl/adamMoments.js";
|
|
41
|
-
import "../papaparse.min-
|
|
44
|
+
import "../papaparse.min-C0cScC2i.js";
|
|
45
|
+
import "../ops/cpu/matMulGelu.js";
|
|
46
|
+
import "../ops/webgl/matMulGelu.js";
|
|
47
|
+
import "../ops/grads/matMulGelu.js";
|
|
42
48
|
import "../ops/cpu/gelu.js";
|
|
43
49
|
import "../ops/webgl/gelu.js";
|
|
44
|
-
import "../gelu-
|
|
50
|
+
import "../gelu-CNLFZWea.js";
|
|
45
51
|
import "../ops/webgl/log.js";
|
|
46
52
|
import "../checks/normRMS.js";
|
|
47
53
|
import "../checks/normRMSGrad.js";
|
|
48
54
|
import { importWeights as u } from "../utilities/weights.js";
|
|
49
|
-
async function
|
|
50
|
-
const e = /* @__PURE__ */ new Map(),
|
|
51
|
-
if (!
|
|
55
|
+
async function Ot(r) {
|
|
56
|
+
const e = /* @__PURE__ */ new Map(), p = await r.file("manifest.json")?.async("string");
|
|
57
|
+
if (!p)
|
|
52
58
|
throw new Error("Manifest file not found in the zip archive");
|
|
53
|
-
const
|
|
54
|
-
for (const [t, o] of Object.entries(
|
|
59
|
+
const s = JSON.parse(p);
|
|
60
|
+
for (const [t, o] of Object.entries(s.weightSpec))
|
|
55
61
|
e.set(t, { spec: o, data: new Float32Array() });
|
|
56
|
-
const
|
|
57
|
-
if (!
|
|
62
|
+
const a = await r.file("tokeniser.json")?.async("string");
|
|
63
|
+
if (!a)
|
|
58
64
|
throw new Error("Tokeniser file not found in the zip archive");
|
|
59
|
-
const i = JSON.parse(
|
|
65
|
+
const i = JSON.parse(a), f = (i.type ?? "char") === "char" ? new h(i.vocab) : new k(i.vocab, i.merges), c = /* @__PURE__ */ new Map();
|
|
60
66
|
for (const t of Object.keys(r.files))
|
|
61
67
|
if (t.endsWith(".bin")) {
|
|
62
|
-
const o = t.replace(".bin", ""), l = await r.file(t).async("arraybuffer"), w = new Float32Array(l),
|
|
63
|
-
|
|
64
|
-
const d = await u(
|
|
68
|
+
const o = t.replace(".bin", ""), l = await r.file(t).async("arraybuffer"), w = new Float32Array(l), n = e.get(o) || { spec: [], data: new Float32Array() };
|
|
69
|
+
n.data = w, e.set(o, n);
|
|
70
|
+
const d = await u(n);
|
|
65
71
|
c.set(o, d);
|
|
66
72
|
}
|
|
67
73
|
y();
|
|
68
|
-
const
|
|
69
|
-
return await g(
|
|
74
|
+
const m = b(s.config);
|
|
75
|
+
return await g(m), m.loadWeights(c), { model: m, tokeniser: f };
|
|
70
76
|
}
|
|
71
77
|
export {
|
|
72
|
-
|
|
78
|
+
Ot as default
|
|
73
79
|
};
|
package/dist/loader/save.js
CHANGED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { A as e, B as x, E as p, c as E, n as $ } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import { e as d } from "./axis_util-BvHEw88j.js";
|
|
3
|
+
import { y as h, z as S, L as K } from "./tensor_util-DV-FP5Q3.js";
|
|
4
|
+
import { r as m } from "./reshape-DevtBWtf.js";
|
|
5
|
+
import { s as _ } from "./sum-_fzj5ZTB.js";
|
|
6
|
+
import { p as T } from "./tensor-DdQUJZlz.js";
|
|
7
|
+
function b(s, o = null, n = !1) {
|
|
8
|
+
const t = { x: x(s, "x", "max") }, r = { reductionIndices: o, keepDims: n };
|
|
9
|
+
return p.runKernel(h, t, r);
|
|
10
|
+
}
|
|
11
|
+
const A = /* @__PURE__ */ e({ max_: b });
|
|
12
|
+
function I(s) {
|
|
13
|
+
const n = { x: x(s, "x", "exp") };
|
|
14
|
+
return p.runKernel(S, n);
|
|
15
|
+
}
|
|
16
|
+
const L = /* @__PURE__ */ e({ exp_: I });
|
|
17
|
+
function M(s) {
|
|
18
|
+
const n = { x: x(s, "x", "log", "float32") };
|
|
19
|
+
return p.runKernel(K, n);
|
|
20
|
+
}
|
|
21
|
+
const N = /* @__PURE__ */ e({ log_: M });
|
|
22
|
+
function v(s, o = null, n = !1) {
|
|
23
|
+
const a = x(s, "x", "logSumExp"), t = T(o, a.shape), r = A(
|
|
24
|
+
a,
|
|
25
|
+
t,
|
|
26
|
+
!0
|
|
27
|
+
/* keepDims */
|
|
28
|
+
), i = E(a, r), l = L(i), f = _(l, t), u = N(f), c = $(m(r, u.shape), u);
|
|
29
|
+
if (n) {
|
|
30
|
+
const g = d(c.shape, t);
|
|
31
|
+
return m(c, g);
|
|
32
|
+
}
|
|
33
|
+
return c;
|
|
34
|
+
}
|
|
35
|
+
const P = /* @__PURE__ */ e({ logSumExp_: v });
|
|
36
|
+
export {
|
|
37
|
+
N as a,
|
|
38
|
+
L as e,
|
|
39
|
+
P as l,
|
|
40
|
+
A as m
|
|
41
|
+
};
|
package/dist/main.d.ts
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { pack16 } from './ops/pack16';
|
|
2
|
+
import { unpack16 } from './ops/unpack16';
|
|
1
3
|
import { default as CausalSelfAttention } from './layers/CausalSelfAttention';
|
|
2
4
|
import { default as MLP } from './layers/MLP';
|
|
3
5
|
import { default as TransformerBlock } from './layers/TransformerBlock';
|
|
@@ -17,6 +19,11 @@ export type { TrainingProgress, TrainingLogEntry } from './training/Trainer';
|
|
|
17
19
|
export type { GPTConfig } from './models/config';
|
|
18
20
|
export { estimateParameterCount, estimateMemoryUsage, estimateTrainingMemoryUsage, estimateResources, validateConfig, } from './utilities/parameters';
|
|
19
21
|
export { default as topP } from './utilities/topP';
|
|
22
|
+
declare const ops: {
|
|
23
|
+
pack16: typeof pack16;
|
|
24
|
+
unpack16: typeof unpack16;
|
|
25
|
+
};
|
|
26
|
+
export { ops };
|
|
20
27
|
export { selectBackend } from './backend';
|
|
21
28
|
export { default as performanceTest } from './utilities/performance';
|
|
22
29
|
export declare const layers: {
|
|
@@ -28,3 +35,4 @@ export declare const layers: {
|
|
|
28
35
|
export { default as AdamExt } from './training/AdamExt';
|
|
29
36
|
export { default as checks } from './checks';
|
|
30
37
|
export type { TensorStatistics } from './checks/weights';
|
|
38
|
+
export { sentenceEmbeddings, sentenceEmbeddingsTensor } from './utilities/sentences';
|
package/dist/main.js
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { default as
|
|
3
|
-
import { default as
|
|
4
|
-
import { default as
|
|
5
|
-
import { default as
|
|
6
|
-
import { default as
|
|
7
|
-
import { default as
|
|
8
|
-
import { default as
|
|
9
|
-
import {
|
|
10
|
-
import {
|
|
11
|
-
import "./
|
|
1
|
+
import "./utilities/packed.js";
|
|
2
|
+
import { default as ro } from "./models/NanoGPTV1.js";
|
|
3
|
+
import { default as eo } from "./TeachableLLM.js";
|
|
4
|
+
import { default as po } from "./tokeniser/CharTokeniser.js";
|
|
5
|
+
import { default as ao } from "./tokeniser/bpe.js";
|
|
6
|
+
import { default as fo } from "./utilities/waitForModel.js";
|
|
7
|
+
import { default as no } from "./data/textLoader.js";
|
|
8
|
+
import { default as uo } from "./Generator.js";
|
|
9
|
+
import { default as To } from "./models/model.js";
|
|
10
|
+
import { estimateMemoryUsage as go, estimateParameterCount as Mo, estimateResources as Po, estimateTrainingMemoryUsage as Co, validateConfig as Eo } from "./utilities/parameters.js";
|
|
11
|
+
import { default as Bo } from "./utilities/topP.js";
|
|
12
|
+
import "./index-ZyQhjEPo.js";
|
|
12
13
|
import "./ops/cpu/scatterSub.js";
|
|
13
14
|
import "./ops/webgl/scatterSub.js";
|
|
14
15
|
import "./ops/cpu/gatherSub.js";
|
|
@@ -19,24 +20,21 @@ import "./ops/grads/attentionMask.js";
|
|
|
19
20
|
import "./ops/cpu/qkv.js";
|
|
20
21
|
import "./ops/webgl/qkv.js";
|
|
21
22
|
import "./ops/grads/qkv.js";
|
|
22
|
-
import "./random_width-
|
|
23
|
-
import "./register_all_kernels-
|
|
24
|
-
import "./index-
|
|
25
|
-
import "./dataset-
|
|
23
|
+
import "./random_width-DY6Kk2Dl.js";
|
|
24
|
+
import "./register_all_kernels-Bwu1PTuU.js";
|
|
25
|
+
import "./index-Cp39cXWe.js";
|
|
26
|
+
import "./dataset-0xP8GjwI.js";
|
|
26
27
|
import "./ops/cpu/rope.js";
|
|
27
28
|
import "./ops/webgl/rope.js";
|
|
28
|
-
import "./
|
|
29
|
+
import "./rope-B5UUMsPi.js";
|
|
29
30
|
import "./ops/cpu/appendCache.js";
|
|
30
31
|
import "./ops/webgl/appendCache.js";
|
|
31
|
-
import "./ops/cpu/fusedSoftmax.js";
|
|
32
|
-
import "./ops/webgl/fusedSoftmax.js";
|
|
33
|
-
import "./ops/grads/fusedSoftmax.js";
|
|
34
32
|
import "./ops/cpu/matMulGelu.js";
|
|
35
33
|
import "./ops/webgl/matMulGelu.js";
|
|
36
34
|
import "./ops/grads/matMulGelu.js";
|
|
37
35
|
import "./ops/cpu/gelu.js";
|
|
38
36
|
import "./ops/webgl/gelu.js";
|
|
39
|
-
import "./gelu-
|
|
37
|
+
import "./gelu-CNLFZWea.js";
|
|
40
38
|
import "./ops/cpu/normRMS.js";
|
|
41
39
|
import "./ops/webgl/normRMS.js";
|
|
42
40
|
import "./ops/grads/normRMS.js";
|
|
@@ -45,38 +43,51 @@ import "./ops/cpu/adamMoments.js";
|
|
|
45
43
|
import "./ops/webgl/adamMoments.js";
|
|
46
44
|
import "./ops/cpu/adamAdjust.js";
|
|
47
45
|
import "./ops/webgl/adamAdjust.js";
|
|
48
|
-
import {
|
|
49
|
-
import
|
|
50
|
-
import
|
|
51
|
-
import
|
|
52
|
-
import
|
|
53
|
-
import
|
|
54
|
-
import {
|
|
55
|
-
import { default as
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
46
|
+
import { u as o, p as r } from "./pack16-CFUqumar.js";
|
|
47
|
+
import "./ops/grads/softmax16.js";
|
|
48
|
+
import "./matMul16--R5hOwDG.js";
|
|
49
|
+
import "./ops/webgl/matMul16.js";
|
|
50
|
+
import "./ops/cpu/matMul16.js";
|
|
51
|
+
import "./ops/transpose16.js";
|
|
52
|
+
import { selectBackend as yo } from "./backend.js";
|
|
53
|
+
import { default as Ao } from "./utilities/performance.js";
|
|
54
|
+
import t from "./layers/CausalSelfAttention.js";
|
|
55
|
+
import e from "./layers/MLP.js";
|
|
56
|
+
import m from "./layers/TransformerBlock.js";
|
|
57
|
+
import p from "./layers/RoPECache.js";
|
|
58
|
+
import { default as Ro } from "./training/AdamExt.js";
|
|
59
|
+
import { default as vo } from "./checks/index.js";
|
|
60
|
+
import { sentenceEmbeddings as Do, sentenceEmbeddingsTensor as Fo } from "./utilities/sentences.js";
|
|
61
|
+
const Z = {
|
|
62
|
+
pack16: r,
|
|
63
|
+
unpack16: o
|
|
64
|
+
}, _ = {
|
|
65
|
+
CausalSelfAttention: t,
|
|
66
|
+
MLP: e,
|
|
67
|
+
TransformerBlock: m,
|
|
68
|
+
RoPECache: p
|
|
61
69
|
};
|
|
62
70
|
export {
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
71
|
+
Ro as AdamExt,
|
|
72
|
+
ao as BPETokeniser,
|
|
73
|
+
po as CharTokeniser,
|
|
74
|
+
uo as Generator,
|
|
75
|
+
To as Model,
|
|
76
|
+
ro as NanoGPT,
|
|
77
|
+
eo as TeachableLLM,
|
|
78
|
+
vo as checks,
|
|
79
|
+
go as estimateMemoryUsage,
|
|
80
|
+
Mo as estimateParameterCount,
|
|
81
|
+
Po as estimateResources,
|
|
82
|
+
Co as estimateTrainingMemoryUsage,
|
|
83
|
+
_ as layers,
|
|
84
|
+
no as loadTextData,
|
|
85
|
+
Z as ops,
|
|
86
|
+
Ao as performanceTest,
|
|
87
|
+
yo as selectBackend,
|
|
88
|
+
Do as sentenceEmbeddings,
|
|
89
|
+
Fo as sentenceEmbeddingsTensor,
|
|
90
|
+
Bo as topP,
|
|
91
|
+
Eo as validateConfig,
|
|
92
|
+
fo as waitForModel
|
|
82
93
|
};
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { e as y } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import "./ops/webgl/matMul16.js";
|
|
3
|
+
import "./ops/cpu/matMul16.js";
|
|
4
|
+
import { isPackedTensor as g, packTensor as k } from "./utilities/packed.js";
|
|
5
|
+
import { p as v } from "./pack16-CFUqumar.js";
|
|
6
|
+
import { d as h } from "./gelu-CNLFZWea.js";
|
|
7
|
+
import { transpose16 as S } from "./ops/transpose16.js";
|
|
8
|
+
import { reshape16 as w } from "./ops/reshape16.js";
|
|
9
|
+
import { a as G } from "./tensor_util-DV-FP5Q3.js";
|
|
10
|
+
const T = {
|
|
11
|
+
kernelName: "MatMul16",
|
|
12
|
+
inputsToSave: ["A", "B"],
|
|
13
|
+
outputsToSave: [],
|
|
14
|
+
gradFunc: (r, o, n) => {
|
|
15
|
+
const [s, t] = o;
|
|
16
|
+
if (Array.isArray(r))
|
|
17
|
+
throw new Error("Expected dy to be a single Tensor");
|
|
18
|
+
let e = r;
|
|
19
|
+
const { transposeA: f, transposeB: i, scale: a, activation: p, originalShape: c, perm: d } = n;
|
|
20
|
+
if (d && c) {
|
|
21
|
+
const u = new Array(d.length);
|
|
22
|
+
for (let A = 0; A < d.length; ++A)
|
|
23
|
+
u[d[A]] = A;
|
|
24
|
+
const m = e;
|
|
25
|
+
e = S(e, u), m.dispose();
|
|
26
|
+
}
|
|
27
|
+
if (c) {
|
|
28
|
+
const u = e;
|
|
29
|
+
e = w(e, c), u.dispose();
|
|
30
|
+
}
|
|
31
|
+
if (p === "gelu") {
|
|
32
|
+
const u = e, m = l(s, t, f, i);
|
|
33
|
+
e = h(u, m), u.dispose(), m.dispose();
|
|
34
|
+
}
|
|
35
|
+
if (!f && !i)
|
|
36
|
+
return {
|
|
37
|
+
A: () => a !== void 0 ? B(e, t, a, !1, !0) : l(e, t, !1, !0),
|
|
38
|
+
B: () => a !== void 0 ? M(s, e, a, !0, !1) : l(s, e, !0, !1)
|
|
39
|
+
};
|
|
40
|
+
if (!f && i)
|
|
41
|
+
return {
|
|
42
|
+
A: () => a !== void 0 ? B(e, t, a, !1, !1) : l(e, t, !1, !1),
|
|
43
|
+
B: () => a !== void 0 ? M(s, e, a, !0, !1) : l(s, e, !0, !1)
|
|
44
|
+
};
|
|
45
|
+
if (f && !i)
|
|
46
|
+
return {
|
|
47
|
+
A: () => a !== void 0 ? M(t, e, a, !1, !0) : l(t, e, !1, !0),
|
|
48
|
+
B: () => a !== void 0 ? M(s, e, a, !1, !1) : l(s, e, !1, !1)
|
|
49
|
+
};
|
|
50
|
+
throw new Error("Gradient for transposeA=true and transposeB=true is not supported yet.");
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
G(T);
|
|
54
|
+
function l(r, o, n = !1, s = !1, t = {}) {
|
|
55
|
+
const e = g(r), f = g(o), i = e || f, a = !i || e ? r : v(r), p = !i || f ? o : v(o), c = y().runKernel("MatMul16", { A: a, B: p }, { transposeA: n, transposeB: s, ...t });
|
|
56
|
+
return i && !e && a.dispose(), i && !f && p.dispose(), i ? k(c) : c;
|
|
57
|
+
}
|
|
58
|
+
function j(r, o, n, s = !1, t = !1) {
|
|
59
|
+
return l(r, o, s, t, { scale: n });
|
|
60
|
+
}
|
|
61
|
+
function B(r, o, n, s = !1, t = !1) {
|
|
62
|
+
return l(r, o, s, t, { scaleA: n });
|
|
63
|
+
}
|
|
64
|
+
function M(r, o, n, s = !1, t = !1) {
|
|
65
|
+
return l(r, o, s, t, { scaleB: n });
|
|
66
|
+
}
|
|
67
|
+
function q(r, o, n = !1, s = !1) {
|
|
68
|
+
return l(r, o, n, s, { activation: "gelu" });
|
|
69
|
+
}
|
|
70
|
+
export {
|
|
71
|
+
T as a,
|
|
72
|
+
l as b,
|
|
73
|
+
q as c,
|
|
74
|
+
B as d,
|
|
75
|
+
M as e,
|
|
76
|
+
j as m
|
|
77
|
+
};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { A as u, B as s, E as c } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import { m as M, B as p } from "./tensor_util-DV-FP5Q3.js";
|
|
3
|
+
function f(o, e, r = !1, m = !1) {
|
|
4
|
+
let t = s(o, "a", "matMul"), a = s(e, "b", "matMul");
|
|
5
|
+
[t, a] = M(t, a);
|
|
6
|
+
const n = { a: t, b: a }, l = { transposeA: r, transposeB: m };
|
|
7
|
+
return c.runKernel(p, n, l);
|
|
8
|
+
}
|
|
9
|
+
const B = /* @__PURE__ */ u({ matMul_: f });
|
|
10
|
+
export {
|
|
11
|
+
B as m
|
|
12
|
+
};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { A as e, B as a, E as n } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import { m as p, M as c } from "./tensor_util-DV-FP5Q3.js";
|
|
3
|
+
function d(m, r) {
|
|
4
|
+
let o = a(m, "a", "mod"), t = a(r, "b", "mod");
|
|
5
|
+
[o, t] = p(o, t);
|
|
6
|
+
const s = { a: o, b: t };
|
|
7
|
+
return n.runKernel(c, s);
|
|
8
|
+
}
|
|
9
|
+
const b = /* @__PURE__ */ e({ mod_: d });
|
|
10
|
+
export {
|
|
11
|
+
b as m
|
|
12
|
+
};
|