@genai-fi/nanogpt 0.9.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +352 -14
- package/dist/Generator.js +69 -78
- package/dist/{RealDiv-D4EzDsC0.js → RealDiv-DgA3z9oO.js} +32 -206
- package/dist/Reshape-CF6odzV4.js +16 -0
- package/dist/Reshape-_kILl6tK.js +81 -0
- package/dist/TeachableLLM.js +28 -22
- package/dist/Trainer.d.ts +2 -0
- package/dist/Trainer.js +3 -2
- package/dist/{axis_util-TbGYJ208.js → axis_util-BvHEw88j.js} +7 -23
- package/dist/backend.d.ts +2 -1
- package/dist/backend.js +10 -4
- package/dist/backend_util-D-rUb2ty.js +474 -0
- package/dist/backend_webgpu-B0u2ndUn.js +547 -0
- package/dist/binary_op_util-pKXltfxI.js +192 -0
- package/dist/broadcast_to-CwF7XIeu.js +30 -0
- package/dist/checks/appendCache.js +2 -2
- package/dist/checks/attentionMask.js +3 -3
- package/dist/checks/check.d.ts +1 -1
- package/dist/checks/check.js +8 -8
- package/dist/checks/gelu.js +2 -2
- package/dist/checks/index.d.ts +2 -0
- package/dist/checks/index.js +7 -5
- package/dist/checks/matMulGelu.js +6 -6
- package/dist/checks/normRMS.js +7 -7
- package/dist/checks/normRMSGrad.js +3 -3
- package/dist/checks/packUnpack.d.ts +1 -0
- package/dist/checks/packUnpack.js +18 -0
- package/dist/checks/qkv.js +12 -27
- package/dist/checks/rope.js +2 -2
- package/dist/checks/weights.js +18 -16
- package/dist/complex-CSlYz-2T.js +13 -0
- package/dist/complex_util-Yc1A_gV1.js +55 -0
- package/dist/concat-BHlIJeyT.js +19 -0
- package/dist/concat_util-DcJk7YHS.js +22 -0
- package/dist/data/docx.js +1 -1
- package/dist/data/parquet.js +2 -2
- package/dist/data/pdf.js +1 -1
- package/dist/data/textLoader.js +1 -1
- package/dist/{dataset-DlZtKmBq.js → dataset-0xP8GjwI.js} +136 -236
- package/dist/dropout-C1pM3f11.js +99 -0
- package/dist/expand_dims-BPG4fwBP.js +13 -0
- package/dist/exports_initializers-xuidcwI4.js +7 -0
- package/dist/gather-DykLGqmW.js +10 -0
- package/dist/{gelu-Bp_-935b.js → gelu-CNLFZWea.js} +11 -10
- package/dist/{gpgpu_math-CDaYiyE_.js → gpgpu_math-DDVJCn6-.js} +90 -265
- package/dist/{index-C4L8Cm77.js → index-CieiGp4Y.js} +14 -14
- package/dist/index-CjOj7j-u.js +7308 -0
- package/dist/{index-Tf7vU29b.js → index-Cp39cXWe.js} +3 -10
- package/dist/{index-Dwqa6Zy2.js → index-DvYrXKkX.js} +2 -2
- package/dist/index-ZyQhjEPo.js +2157 -0
- package/dist/{jszip.min-CjP2V1VV.js → jszip.min-Bz5-11Bk.js} +56 -57
- package/dist/kernel_funcs_utils-Dg_-E44D.js +308 -0
- package/dist/layers/BaseLayer.d.ts +1 -0
- package/dist/layers/BaseLayer.js +7 -6
- package/dist/layers/CausalSelfAttention.d.ts +0 -1
- package/dist/layers/CausalSelfAttention.js +56 -55
- package/dist/layers/MLP.js +15 -16
- package/dist/layers/PositionEmbedding.js +5 -14
- package/dist/layers/RMSNorm.js +3 -3
- package/dist/layers/RoPECache.d.ts +2 -0
- package/dist/layers/RoPECache.js +22 -17
- package/dist/layers/TiedEmbedding.js +22 -17
- package/dist/layers/TransformerBlock.js +21 -20
- package/dist/loader/load.js +1 -1
- package/dist/loader/loadTransformers.js +1 -1
- package/dist/loader/oldZipLoad.js +39 -33
- package/dist/loader/save.js +1 -1
- package/dist/log_sum_exp-DWI-76TI.js +41 -0
- package/dist/main.d.ts +8 -0
- package/dist/main.js +63 -52
- package/dist/matMul16--R5hOwDG.js +77 -0
- package/dist/mat_mul-DeAh4uTH.js +12 -0
- package/dist/mod-Gt1rMB4n.js +12 -0
- package/dist/models/NanoGPTV1.js +40 -31
- package/dist/models/model.d.ts +2 -0
- package/dist/models/model.js +37 -29
- package/dist/{mulmat_packed_gpu-BT60jmzP.js → mulmat_packed_gpu-BMFhLwta.js} +1 -17
- package/dist/{non_max_suppression_impl-CsEgBuMA.js → non_max_suppression_impl-B2W7YjZB.js} +0 -32
- package/dist/ones-CAMiP4I2.js +15 -0
- package/dist/ops/adamAdjust.js +1 -1
- package/dist/ops/adamMoments.d.ts +1 -1
- package/dist/ops/adamMoments.js +4 -4
- package/dist/ops/add16.d.ts +2 -0
- package/dist/ops/add16.js +9 -0
- package/dist/ops/appendCache.js +16 -9
- package/dist/ops/attentionMask.js +4 -4
- package/dist/ops/concat16.d.ts +2 -0
- package/dist/ops/concat16.js +9 -0
- package/dist/ops/cpu/adamAdjust.js +14 -13
- package/dist/ops/cpu/adamMoments.js +10 -9
- package/dist/ops/cpu/appendCache.js +9 -8
- package/dist/ops/cpu/attentionMask.js +15 -14
- package/dist/ops/cpu/fusedSoftmax.js +13 -12
- package/dist/ops/cpu/gatherSub.js +9 -24
- package/dist/ops/cpu/gelu.js +13 -12
- package/dist/ops/cpu/matMul16.d.ts +1 -0
- package/dist/ops/cpu/matMul16.js +16 -0
- package/dist/ops/cpu/matMulGelu.js +18 -16
- package/dist/ops/cpu/matMulMul.js +8 -7
- package/dist/ops/cpu/mulDropout.js +4 -3
- package/dist/ops/cpu/normRMS.js +11 -10
- package/dist/ops/cpu/qkv.js +17 -13
- package/dist/ops/cpu/rope.js +23 -22
- package/dist/ops/cpu/scatterSub.js +16 -30
- package/dist/ops/dot16.d.ts +2 -0
- package/dist/ops/dot16.js +42 -0
- package/dist/ops/gatherSub.js +1 -1
- package/dist/ops/gelu.js +2 -2
- package/dist/ops/grads/add16.d.ts +1 -0
- package/dist/ops/grads/add16.js +27 -0
- package/dist/ops/grads/attentionMask.js +12 -19
- package/dist/ops/grads/gelu.js +4 -3
- package/dist/ops/grads/matMul16.d.ts +2 -0
- package/dist/ops/grads/matMul16.js +9 -0
- package/dist/ops/grads/matMulGelu.js +8 -7
- package/dist/ops/grads/normRMS.js +8 -7
- package/dist/ops/grads/{fusedSoftmax.d.ts → pack16.d.ts} +1 -1
- package/dist/ops/grads/pack16.js +7 -0
- package/dist/ops/grads/qkv.d.ts +3 -1
- package/dist/ops/grads/qkv.js +28 -22
- package/dist/ops/grads/rope.d.ts +2 -1
- package/dist/ops/grads/rope.js +6 -13
- package/dist/ops/grads/softmax16.d.ts +2 -0
- package/dist/ops/grads/softmax16.js +26 -0
- package/dist/ops/grads/unpack16.d.ts +2 -0
- package/dist/ops/grads/unpack16.js +6 -0
- package/dist/ops/grads/utils.d.ts +3 -0
- package/dist/ops/grads/utils.js +10 -0
- package/dist/ops/matMul16.d.ts +15 -0
- package/dist/ops/matMul16.js +13 -0
- package/dist/ops/matMulGelu.js +1 -1
- package/dist/ops/matMulMul.js +1 -1
- package/dist/ops/mul16.d.ts +2 -0
- package/dist/ops/mul16.js +8 -0
- package/dist/ops/mulDrop.js +1 -1
- package/dist/ops/normRMS.js +1 -1
- package/dist/ops/pack16.d.ts +2 -0
- package/dist/ops/pack16.js +6 -0
- package/dist/ops/qkv.d.ts +1 -1
- package/dist/ops/qkv.js +8 -4
- package/dist/ops/reshape16.d.ts +2 -0
- package/dist/ops/reshape16.js +43 -0
- package/dist/ops/rope.d.ts +1 -1
- package/dist/ops/rope.js +8 -10
- package/dist/ops/scatterSub.js +1 -1
- package/dist/ops/slice16.d.ts +2 -0
- package/dist/ops/slice16.js +9 -0
- package/dist/ops/softmax16.d.ts +2 -0
- package/dist/ops/softmax16.js +12 -0
- package/dist/ops/sub16.d.ts +2 -0
- package/dist/ops/sub16.js +8 -0
- package/dist/ops/sum16.d.ts +2 -0
- package/dist/ops/sum16.js +13 -0
- package/dist/ops/transpose16.d.ts +3 -0
- package/dist/ops/transpose16.js +41 -0
- package/dist/ops/unpack16.d.ts +2 -0
- package/dist/ops/unpack16.js +6 -0
- package/dist/ops/webgl/adamAdjust.js +3 -2
- package/dist/ops/webgl/adamMoments.js +2 -1
- package/dist/ops/webgl/appendCache.js +2 -1
- package/dist/ops/webgl/attentionMask.js +5 -4
- package/dist/ops/webgl/fusedSoftmax.js +6 -4
- package/dist/ops/webgl/gatherSub.js +7 -6
- package/dist/ops/webgl/gelu.js +3 -2
- package/dist/ops/webgl/log.js +12 -27
- package/dist/ops/webgl/matMul16.d.ts +1 -0
- package/dist/ops/webgl/matMul16.js +37 -0
- package/dist/ops/webgl/matMulGelu.js +17 -15
- package/dist/ops/webgl/matMulMul.js +13 -12
- package/dist/ops/webgl/mulDropout.js +9 -8
- package/dist/ops/webgl/normRMS.js +8 -7
- package/dist/ops/webgl/qkv.js +6 -5
- package/dist/ops/webgl/rope.js +11 -10
- package/dist/ops/webgl/scatterSub.js +6 -5
- package/dist/ops/webgpu/adamAdjust.js +12 -10
- package/dist/ops/webgpu/adamMoments.js +27 -22
- package/dist/ops/webgpu/add16.d.ts +1 -0
- package/dist/ops/webgpu/add16.js +14 -0
- package/dist/ops/webgpu/appendCache.js +64 -17
- package/dist/ops/webgpu/attentionMask.js +19 -62
- package/dist/ops/webgpu/attentionMask32_program.d.ts +19 -0
- package/dist/ops/webgpu/attentionMask32_program.js +54 -0
- package/dist/ops/webgpu/concat16.d.ts +19 -0
- package/dist/ops/webgpu/concat16.js +128 -0
- package/dist/ops/webgpu/gatherSub.js +9 -7
- package/dist/ops/webgpu/gelu.js +78 -31
- package/dist/ops/webgpu/index.js +12 -0
- package/dist/ops/webgpu/matMul16.d.ts +1 -0
- package/dist/ops/webgpu/matMul16.js +58 -0
- package/dist/ops/webgpu/matMul16_program.d.ts +42 -0
- package/dist/ops/webgpu/matMul16_program.js +336 -0
- package/dist/ops/webgpu/mul16.d.ts +1 -0
- package/dist/ops/webgpu/mul16.js +14 -0
- package/dist/ops/webgpu/normRMS.js +21 -40
- package/dist/ops/webgpu/normRMS16_program.d.ts +9 -0
- package/dist/ops/webgpu/normRMS16_program.js +24 -0
- package/dist/ops/webgpu/normRMS32_program.d.ts +9 -0
- package/dist/ops/webgpu/normRMS32_program.js +24 -0
- package/dist/ops/webgpu/normRMSGrad.js +113 -64
- package/dist/ops/webgpu/pack16.d.ts +1 -0
- package/dist/ops/webgpu/pack16.js +19 -0
- package/dist/ops/webgpu/pack16_program.d.ts +19 -0
- package/dist/ops/webgpu/pack16_program.js +92 -0
- package/dist/ops/webgpu/qkv.js +20 -55
- package/dist/ops/webgpu/rope.js +77 -22
- package/dist/ops/webgpu/scatterSub.js +9 -7
- package/dist/ops/webgpu/slice16.d.ts +7 -0
- package/dist/ops/webgpu/slice16.js +71 -0
- package/dist/{variable-Bm2OFwGI.js → ops/webgpu/softmax16.d.ts} +2 -8
- package/dist/ops/webgpu/softmax16.js +23 -0
- package/dist/ops/webgpu/softmax16_program.d.ts +13 -0
- package/dist/ops/webgpu/softmax16_program.js +73 -0
- package/dist/ops/webgpu/softmax16_subgroup_program.d.ts +17 -0
- package/dist/ops/webgpu/softmax16_subgroup_program.js +75 -0
- package/dist/ops/webgpu/softmax16grad.d.ts +1 -0
- package/dist/ops/webgpu/softmax16grad.js +38 -0
- package/dist/ops/webgpu/sub16.d.ts +1 -0
- package/dist/ops/webgpu/sub16.js +14 -0
- package/dist/ops/webgpu/sum16.d.ts +1 -0
- package/dist/ops/webgpu/sum16.js +40 -0
- package/dist/ops/webgpu/transpose16.d.ts +1 -0
- package/dist/ops/webgpu/transpose16.js +35 -0
- package/dist/ops/webgpu/transpose16_program.d.ts +16 -0
- package/dist/ops/webgpu/transpose16_program.js +50 -0
- package/dist/ops/webgpu/transpose16_shared_program.d.ts +15 -0
- package/dist/ops/webgpu/transpose16_shared_program.js +71 -0
- package/dist/ops/webgpu/unpack16.d.ts +1 -0
- package/dist/ops/webgpu/unpack16.js +49 -0
- package/dist/ops/webgpu/utils/binary_op.d.ts +19 -0
- package/dist/ops/webgpu/utils/binary_op.js +79 -0
- package/dist/ops/webgpu/utils/deviceInfo.d.ts +7 -0
- package/dist/ops/webgpu/utils/deviceInfo.js +11 -0
- package/dist/ops/webgpu/utils/reductions.d.ts +32 -4
- package/dist/ops/webgpu/utils/reductions.js +236 -45
- package/dist/ops-CNI3TwqM.js +645 -0
- package/dist/pack16-CFUqumar.js +41 -0
- package/dist/{papaparse.min-C8l2Kvo1.js → papaparse.min-C0cScC2i.js} +2 -8
- package/dist/{parquet-C0Tlmv9c.js → parquet-BE8MU_ge.js} +201 -278
- package/dist/patches/PackedTensor.d.ts +12 -0
- package/dist/patches/PackedTensor.js +11 -0
- package/dist/patches/engine.d.ts +261 -0
- package/dist/patches/engine.js +10 -0
- package/dist/patches/tape.d.ts +12 -0
- package/dist/patches/tape.js +5 -0
- package/dist/patches/webgpu_backend.d.ts +18 -0
- package/dist/patches/webgpu_backend.js +57 -0
- package/dist/{tensor-CZr4dh61.js → patches/webgpu_base.d.ts} +5 -8
- package/dist/patches/webgpu_base.js +34 -0
- package/dist/patches/webgpu_program.d.ts +36 -0
- package/dist/patches/webgpu_program.js +401 -0
- package/dist/{pdf-kJD-f258.js → pdf-NIhmP3sq.js} +424 -428
- package/dist/random_width-DY6Kk2Dl.js +10051 -0
- package/dist/range-BMS52eQi.js +11 -0
- package/dist/reciprocal-CTmshQ9J.js +10 -0
- package/dist/{register_all_kernels-DIGpEwcf.js → register_all_kernels-Bwu1PTuU.js} +719 -9766
- package/dist/relu-yZ2-7WxU.js +10 -0
- package/dist/reshape-DevtBWtf.js +10 -0
- package/dist/rope-B5UUMsPi.js +32 -0
- package/dist/{scatter_nd_util-BQdz--Gn.js → scatter_nd_util-5EL-8VAQ.js} +1 -1
- package/dist/selu_util-D1w6yyTO.js +303 -0
- package/dist/{shared-DuP7ue-R.js → shared-BRksrJb3.js} +1 -17
- package/dist/shared-BuAXb4CI.js +2145 -0
- package/dist/sin-BGfy2HZo.js +16 -0
- package/dist/slice-D_gkkqZK.js +13 -0
- package/dist/slice_util-DtEldBfK.js +261 -0
- package/dist/softmax-ZHVebtR1.js +13 -0
- package/dist/split-DrfihRpZ.js +10 -0
- package/dist/squeeze-DZEpeblb.js +11 -0
- package/dist/stack-yOIAalTq.js +13 -0
- package/dist/sum-_fzj5ZTB.js +12 -0
- package/dist/tensor-DdQUJZlz.js +909 -0
- package/dist/tensor-f35l8Odg.js +8 -0
- package/dist/tensor1d-CeZuc-Rv.js +12 -0
- package/dist/tensor2d-G4Ys2GxX.js +15 -0
- package/dist/tensor4d-B8roDgtc.js +15 -0
- package/dist/tensor_util-DV-FP5Q3.js +523 -0
- package/dist/tfjs_backend-kNyO5L2d.js +653 -0
- package/dist/tile-BzyEiF-F.js +13 -0
- package/dist/tokeniser/CharTokeniser.js +1 -1
- package/dist/tokeniser/bpe.js +1 -1
- package/dist/training/Adam.d.ts +2 -1
- package/dist/training/Adam.js +12 -28
- package/dist/training/AdamExt.d.ts +1 -0
- package/dist/training/AdamExt.js +2 -2
- package/dist/training/DatasetBuilder.js +3 -20
- package/dist/training/FullTrainer.js +82 -64
- package/dist/training/Trainer.d.ts +11 -6
- package/dist/training/Trainer.js +51 -39
- package/dist/training/sparseCrossEntropy.js +3 -3
- package/dist/transpose-DKELTqhe.js +38 -0
- package/dist/utilities/arrayClose.js +7 -7
- package/dist/utilities/dummy.js +35 -27
- package/dist/utilities/multinomialCPU.js +2 -2
- package/dist/utilities/packed.d.ts +7 -0
- package/dist/utilities/packed.js +716 -0
- package/dist/utilities/performance.js +1 -1
- package/dist/utilities/profile.js +1 -1
- package/dist/utilities/safetensors.js +2 -2
- package/dist/utilities/sentences.d.ts +5 -0
- package/dist/utilities/sentences.js +41 -0
- package/dist/utilities/weights.js +2 -2
- package/dist/variable-Bhn5bHYv.js +7 -0
- package/dist/{webgpu_program-DkQJOJSd.js → webgpu_program-Cigz-7RF.js} +15 -44
- package/dist/webgpu_util-BBCnKm2X.js +65 -0
- package/dist/zeros-2gldETuK.js +14 -0
- package/package.json +4 -3
- package/dist/Reshape-Bowtk9BP.js +0 -127
- package/dist/Reshape-DUqYftGC.js +0 -30
- package/dist/backend_util-CJIiDoV1.js +0 -749
- package/dist/broadcast_to-DzlNweb8.js +0 -44
- package/dist/concat-B912vBbo.js +0 -33
- package/dist/dropout-C-csYCLj.js +0 -193
- package/dist/exports_initializers-B8iZMgQ0.js +0 -16
- package/dist/gather-Dnpgw-YQ.js +0 -25
- package/dist/index-BzFyqcy-.js +0 -4457
- package/dist/index-C1rx_Ajs.js +0 -12076
- package/dist/kernel_funcs_utils-DKLK0Mg3.js +0 -466
- package/dist/log_sum_exp-DO6z8tSE.js +0 -103
- package/dist/mat_mul-DzjTFx-u.js +0 -27
- package/dist/mod-Dobti4j4.js +0 -27
- package/dist/ones-tIJeHlq-.js +0 -29
- package/dist/ops/fusedSoftmax.d.ts +0 -2
- package/dist/ops/fusedSoftmax.js +0 -10
- package/dist/ops/grads/fusedSoftmax.js +0 -22
- package/dist/ops-LuCMAnmM.js +0 -1525
- package/dist/random_width-CXVRloNK.js +0 -13670
- package/dist/range-CWcz7xFA.js +0 -26
- package/dist/reciprocal-C4rNcM-S.js +0 -25
- package/dist/relu-BjCh_SYb.js +0 -25
- package/dist/reshape-CnIwVG1c.js +0 -25
- package/dist/selu_util-OtRzVwW5.js +0 -719
- package/dist/shared-DmRsFyaJ.js +0 -3134
- package/dist/sin-gpDNRxE0.js +0 -47
- package/dist/slice-d0Vo9XTN.js +0 -28
- package/dist/softmax-D7Jj3p_P.js +0 -28
- package/dist/split-DK2k5eHf.js +0 -25
- package/dist/stack-DFatutCx.js +0 -27
- package/dist/sum-CJ0ULhmt.js +0 -27
- package/dist/tensor1d-vML0r3q6.js +0 -27
- package/dist/tensor2d-D76QGjF3.js +0 -30
- package/dist/tensor4d-Df1WlVDY.js +0 -30
- package/dist/webgpu_util-pLEV9tks.js +0 -80
- package/dist/zeros-Bj5rMYA7.js +0 -52
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
import { j as B } from "./index-ZyQhjEPo.js";
|
|
2
|
+
import { u as N, f as G } from "./gpgpu_math-DDVJCn6-.js";
|
|
3
|
+
import { s as K, I as W, e as O } from "./tensor-DdQUJZlz.js";
|
|
4
|
+
import { I as z, h as V, W as F, X as Y, u as S } from "./tensor_util-DV-FP5Q3.js";
|
|
5
|
+
import { f as v } from "./backend_util-D-rUb2ty.js";
|
|
6
|
+
const he = `
|
|
7
|
+
if (isnan(a)) return a;
|
|
8
|
+
if (isnan(b)) return b;
|
|
9
|
+
`;
|
|
10
|
+
class b {
|
|
11
|
+
constructor(e, s, r) {
|
|
12
|
+
this.variableNames = ["A", "B"], this.outputShape = B(s, r), this.enableShapeUniforms = N(this.outputShape.length), this.userCode = `
|
|
13
|
+
float binaryOperation(float a, float b) {
|
|
14
|
+
${e}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
void main() {
|
|
18
|
+
float a = getAAtOutCoords();
|
|
19
|
+
float b = getBAtOutCoords();
|
|
20
|
+
setOutput(binaryOperation(a, b));
|
|
21
|
+
}
|
|
22
|
+
`;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function H(t, e) {
|
|
26
|
+
return ["x", "y", "z", "w", "u", "v"].slice(0, e).map((s) => `${t}.${s}`);
|
|
27
|
+
}
|
|
28
|
+
function Z(t, e) {
|
|
29
|
+
return e === 1 ? [t] : H(t, e);
|
|
30
|
+
}
|
|
31
|
+
function fe(t, e) {
|
|
32
|
+
if (t === 1)
|
|
33
|
+
return "rc";
|
|
34
|
+
let s = "";
|
|
35
|
+
for (let r = 0; r < t; r++)
|
|
36
|
+
s += e[r], r < t - 1 && (s += ",");
|
|
37
|
+
return s;
|
|
38
|
+
}
|
|
39
|
+
const xe = `
|
|
40
|
+
result.r = isNaN.r ? NAN : result.r;
|
|
41
|
+
result.g = isNaN.g ? NAN : result.g;
|
|
42
|
+
result.b = isNaN.b ? NAN : result.b;
|
|
43
|
+
result.a = isNaN.a ? NAN : result.a;
|
|
44
|
+
`;
|
|
45
|
+
class E {
|
|
46
|
+
constructor(e, s, r, u = !1) {
|
|
47
|
+
this.variableNames = ["A", "B"], this.supportsBroadcasting = !0, this.packedInputs = !0, this.packedOutput = !0, this.outputShape = B(s, r);
|
|
48
|
+
const n = this.outputShape.length;
|
|
49
|
+
this.enableShapeUniforms = N(n);
|
|
50
|
+
let o = "";
|
|
51
|
+
if (u)
|
|
52
|
+
if (n === 0 || K(this.outputShape) === 1)
|
|
53
|
+
o = `
|
|
54
|
+
result.y = 0.;
|
|
55
|
+
result.z = 0.;
|
|
56
|
+
result.w = 0.;
|
|
57
|
+
`;
|
|
58
|
+
else if (o = `
|
|
59
|
+
${G(n)} coords = getOutputCoords();
|
|
60
|
+
`, n === 1)
|
|
61
|
+
this.enableShapeUniforms ? o += `
|
|
62
|
+
result.y = (coords + 1) >= outShape ? 0. : result.y;
|
|
63
|
+
result.z = 0.;
|
|
64
|
+
result.w = 0.;
|
|
65
|
+
` : o += `
|
|
66
|
+
result.y = (coords + 1) >= ${this.outputShape[0]} ? 0. : result.y;
|
|
67
|
+
result.z = 0.;
|
|
68
|
+
result.w = 0.;
|
|
69
|
+
`;
|
|
70
|
+
else {
|
|
71
|
+
const a = Z("coords", n);
|
|
72
|
+
this.enableShapeUniforms ? o += `
|
|
73
|
+
bool nextRowOutOfBounds =
|
|
74
|
+
(${a[n - 2]} + 1) >= outShape[${n} - 2];
|
|
75
|
+
bool nextColOutOfBounds =
|
|
76
|
+
(${a[n - 1]} + 1) >= outShape[${n} - 1];
|
|
77
|
+
result.y = nextColOutOfBounds ? 0. : result.y;
|
|
78
|
+
result.z = nextRowOutOfBounds ? 0. : result.z;
|
|
79
|
+
result.w = nextColOutOfBounds || nextRowOutOfBounds ? 0. : result.w;
|
|
80
|
+
` : o += `
|
|
81
|
+
bool nextRowOutOfBounds =
|
|
82
|
+
(${a[n - 2]} + 1) >= ${this.outputShape[n - 2]};
|
|
83
|
+
bool nextColOutOfBounds =
|
|
84
|
+
(${a[n - 1]} + 1) >= ${this.outputShape[n - 1]};
|
|
85
|
+
result.y = nextColOutOfBounds ? 0. : result.y;
|
|
86
|
+
result.z = nextRowOutOfBounds ? 0. : result.z;
|
|
87
|
+
result.w = nextColOutOfBounds || nextRowOutOfBounds ? 0. : result.w;
|
|
88
|
+
`;
|
|
89
|
+
}
|
|
90
|
+
this.userCode = `
|
|
91
|
+
vec4 binaryOperation(vec4 a, vec4 b) {
|
|
92
|
+
${e}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
void main() {
|
|
96
|
+
vec4 a = getAAtOutCoords();
|
|
97
|
+
vec4 b = getBAtOutCoords();
|
|
98
|
+
|
|
99
|
+
vec4 result = binaryOperation(a, b);
|
|
100
|
+
${o}
|
|
101
|
+
|
|
102
|
+
setOutput(result);
|
|
103
|
+
}
|
|
104
|
+
`;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
function P(t) {
|
|
108
|
+
const { inputs: e, backend: s } = t, { x: r } = e;
|
|
109
|
+
return s.incRef(r.dataId), { dataId: r.dataId, shape: r.shape, dtype: r.dtype };
|
|
110
|
+
}
|
|
111
|
+
const ge = {
|
|
112
|
+
kernelName: z,
|
|
113
|
+
backendName: "webgl",
|
|
114
|
+
kernelFunc: P
|
|
115
|
+
};
|
|
116
|
+
function T(t) {
|
|
117
|
+
const { inputs: e, backend: s } = t, { real: r, imag: u } = e, n = s.makeTensorInfo(r.shape, "complex64"), o = s.texData.get(n.dataId), i = P({ inputs: { x: r }, backend: s }), a = P({ inputs: { x: u }, backend: s });
|
|
118
|
+
return o.complexTensorInfos = { real: i, imag: a }, n;
|
|
119
|
+
}
|
|
120
|
+
const me = {
|
|
121
|
+
kernelName: V,
|
|
122
|
+
backendName: "webgl",
|
|
123
|
+
kernelFunc: T
|
|
124
|
+
};
|
|
125
|
+
const L = "return (a < 0.) ? b * a : a;", w = `
|
|
126
|
+
vec4 aLessThanZero = vec4(lessThan(a, vec4(0.)));
|
|
127
|
+
return (aLessThanZero * (b * a)) + ((vec4(1.0) - aLessThanZero) * a);
|
|
128
|
+
`;
|
|
129
|
+
function q(t) {
|
|
130
|
+
const { inputs: e, backend: s, attrs: r } = t, { x: u } = e, { alpha: n } = r, o = s.makeTensorInfo([], "float32", W(n, "float32")), i = O().getBool("WEBGL_PACK_BINARY_OPERATIONS") ? new E(w, u.shape, o.shape) : new b(L, u.shape, o.shape), a = s.runWebGLProgram(i, [u, o], "float32");
|
|
131
|
+
return s.disposeIntermediateTensorInfo(o), a;
|
|
132
|
+
}
|
|
133
|
+
const be = {
|
|
134
|
+
kernelName: F,
|
|
135
|
+
backendName: "webgl",
|
|
136
|
+
kernelFunc: q
|
|
137
|
+
};
|
|
138
|
+
const R = "return (a < 0.) ? b * a : a;", k = `
|
|
139
|
+
vec4 aLessThanZero = vec4(lessThan(a, vec4(0.)));
|
|
140
|
+
return (aLessThanZero * (b * a)) + ((vec4(1.0) - aLessThanZero) * a);
|
|
141
|
+
`;
|
|
142
|
+
function j(t) {
|
|
143
|
+
const { inputs: e, backend: s } = t, { x: r, alpha: u } = e, n = O().getBool("WEBGL_PACK_BINARY_OPERATIONS") ? new E(k, r.shape, u.shape) : new b(R, r.shape, u.shape);
|
|
144
|
+
return s.runWebGLProgram(n, [r, u], "float32");
|
|
145
|
+
}
|
|
146
|
+
const Ne = {
|
|
147
|
+
kernelName: Y,
|
|
148
|
+
backendName: "webgl",
|
|
149
|
+
kernelFunc: j
|
|
150
|
+
};
|
|
151
|
+
class M {
|
|
152
|
+
constructor(e, s) {
|
|
153
|
+
this.variableNames = ["A"], this.outputShape = e, this.enableShapeUniforms = N(this.outputShape.length), this.userCode = `
|
|
154
|
+
float unaryOperation(float x) {
|
|
155
|
+
${s}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
void main() {
|
|
159
|
+
float x = getAAtOutCoords();
|
|
160
|
+
float y = unaryOperation(x);
|
|
161
|
+
|
|
162
|
+
setOutput(y);
|
|
163
|
+
}
|
|
164
|
+
`;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
const U = "if (isnan(x)) return x;", X = "return x;", Oe = "return abs(x);", J = "return (x >= 0.0) ? x : (exp(x) - 1.0);", Q = U + `
|
|
168
|
+
return (x < 0.0) ? 0.0 : x;
|
|
169
|
+
`, ee = U + `
|
|
170
|
+
return (x < 0.0) ? 0.0 : min(6.0, x);
|
|
171
|
+
`, ye = "return x;", te = "return 1.0 / (1.0 + exp(-1.0 * x));";
|
|
172
|
+
const se = "return x;", ae = `
|
|
173
|
+
vec4 result;
|
|
174
|
+
|
|
175
|
+
result.r = (x.r >= 0.0) ? x.r : (exp(x.r) - 1.0);
|
|
176
|
+
result.g = (x.g >= 0.0) ? x.g : (exp(x.g) - 1.0);
|
|
177
|
+
result.b = (x.b >= 0.0) ? x.b : (exp(x.b) - 1.0);
|
|
178
|
+
result.a = (x.a >= 0.0) ? x.a : (exp(x.a) - 1.0);
|
|
179
|
+
|
|
180
|
+
return result;
|
|
181
|
+
`, re = `
|
|
182
|
+
vec4 result = x * vec4(greaterThanEqual(x, vec4(0.0)));
|
|
183
|
+
bvec4 isNaN = isnan(x);
|
|
184
|
+
|
|
185
|
+
result.r = isNaN.r ? x.r : result.r;
|
|
186
|
+
result.g = isNaN.g ? x.g : result.g;
|
|
187
|
+
result.b = isNaN.b ? x.b : result.b;
|
|
188
|
+
result.a = isNaN.a ? x.a : result.a;
|
|
189
|
+
|
|
190
|
+
return result;
|
|
191
|
+
`, ne = `
|
|
192
|
+
vec4 result = min(x, vec4(6.)) * vec4(greaterThanEqual(x, vec4(0.0)));
|
|
193
|
+
bvec4 isNaN = isnan(x);
|
|
194
|
+
|
|
195
|
+
result.r = isNaN.r ? x.r : result.r;
|
|
196
|
+
result.g = isNaN.g ? x.g : result.g;
|
|
197
|
+
result.b = isNaN.b ? x.b : result.b;
|
|
198
|
+
result.a = isNaN.a ? x.a : result.a;
|
|
199
|
+
|
|
200
|
+
return result;
|
|
201
|
+
`, oe = "return 1.0 / (1.0 + exp(-1.0 * x));";
|
|
202
|
+
class ue {
|
|
203
|
+
constructor(e, s) {
|
|
204
|
+
this.variableNames = ["A"], this.packedInputs = !0, this.packedOutput = !0, this.outputShape = e, this.enableShapeUniforms = N(this.outputShape.length), this.userCode = `
|
|
205
|
+
vec4 unaryOperation(vec4 x) {
|
|
206
|
+
${s}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
void main() {
|
|
210
|
+
vec4 x = getAAtOutCoords();
|
|
211
|
+
vec4 y = unaryOperation(x);
|
|
212
|
+
|
|
213
|
+
setOutput(y);
|
|
214
|
+
}
|
|
215
|
+
`;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
const Ie = "if (isnan(x)) return x;";
|
|
219
|
+
function Ae({ opSnippet: t, packedOpSnippet: e, cpuKernelImpl: s, dtype: r }) {
|
|
220
|
+
return ({ inputs: u, backend: n }) => {
|
|
221
|
+
const { x: o } = u, i = n, a = r || o.dtype;
|
|
222
|
+
if (i.shouldExecuteOnCPU([o]) && s != null) {
|
|
223
|
+
const d = i.texData.get(o.dataId), y = s(d.values, a);
|
|
224
|
+
return i.makeTensorInfo(o.shape, a, y);
|
|
225
|
+
}
|
|
226
|
+
const c = O().getBool("WEBGL_PACK_UNARY_OPERATIONS") && e != null;
|
|
227
|
+
let l;
|
|
228
|
+
return c ? l = new ue(o.shape, e) : l = new M(o.shape, t), i.runWebGLProgram(l, [o], a);
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
function Ce({ opSnippet: t, packedOpSnippet: e, checkOutOfBounds: s = !1, supportsComplex: r = !1, cpuKernelImpl: u, dtype: n }) {
|
|
232
|
+
return ({ inputs: o, backend: i }) => {
|
|
233
|
+
const { a, b: c } = o, l = i;
|
|
234
|
+
if (r && a.dtype === "complex64") {
|
|
235
|
+
const h = l.texData.get(a.dataId), f = l.texData.get(c.dataId), [g, m] = [
|
|
236
|
+
[h.complexTensorInfos.real, f.complexTensorInfos.real],
|
|
237
|
+
[h.complexTensorInfos.imag, f.complexTensorInfos.imag]
|
|
238
|
+
].map((C) => {
|
|
239
|
+
const [p, x] = C, $ = {
|
|
240
|
+
dataId: p.dataId,
|
|
241
|
+
dtype: p.dtype,
|
|
242
|
+
shape: a.shape
|
|
243
|
+
}, _ = {
|
|
244
|
+
dataId: x.dataId,
|
|
245
|
+
dtype: x.dtype,
|
|
246
|
+
shape: c.shape
|
|
247
|
+
}, D = new b(t, a.shape, c.shape);
|
|
248
|
+
return l.runWebGLProgram(D, [$, _], S(p.dtype, x.dtype));
|
|
249
|
+
}), A = T({ inputs: { real: g, imag: m }, backend: l });
|
|
250
|
+
return l.disposeIntermediateTensorInfo(g), l.disposeIntermediateTensorInfo(m), A;
|
|
251
|
+
}
|
|
252
|
+
const d = n || S(a.dtype, c.dtype);
|
|
253
|
+
if ((a.dtype === "string" || c.dtype === "string" || l.shouldExecuteOnCPU([a, c])) && u != null) {
|
|
254
|
+
const h = l.texData.get(a.dataId).values, f = l.texData.get(c.dataId).values, g = a.dtype === "string" ? (
|
|
255
|
+
// tslint:disable-next-line: no-any
|
|
256
|
+
v(h)
|
|
257
|
+
) : h, m = a.dtype === "string" ? (
|
|
258
|
+
// tslint:disable-next-line: no-any
|
|
259
|
+
v(f)
|
|
260
|
+
) : f, [A, C] = u(a.shape, c.shape, g, m, d), p = l.makeTensorInfo(C, d), x = l.texData.get(p.dataId);
|
|
261
|
+
return x.values = A, p;
|
|
262
|
+
}
|
|
263
|
+
const y = O().getBool("WEBGL_PACK_BINARY_OPERATIONS") && e != null;
|
|
264
|
+
let I;
|
|
265
|
+
return y ? I = new E(e, a.shape, c.shape, s) : I = new b(t, a.shape, c.shape), l.runWebGLProgram(I, [a, c], d);
|
|
266
|
+
};
|
|
267
|
+
}
|
|
268
|
+
function Pe(t, e = !1) {
|
|
269
|
+
if (t === "linear")
|
|
270
|
+
return e ? se : X;
|
|
271
|
+
if (t === "relu")
|
|
272
|
+
return e ? re : Q;
|
|
273
|
+
if (t === "elu")
|
|
274
|
+
return e ? ae : J;
|
|
275
|
+
if (t === "relu6")
|
|
276
|
+
return e ? ne : ee;
|
|
277
|
+
if (t === "prelu")
|
|
278
|
+
return e ? k : R;
|
|
279
|
+
if (t === "leakyrelu")
|
|
280
|
+
return e ? w : L;
|
|
281
|
+
if (t === "sigmoid")
|
|
282
|
+
return e ? oe : te;
|
|
283
|
+
throw new Error(`Activation ${t} has not been implemented for the WebGL backend.`);
|
|
284
|
+
}
|
|
285
|
+
export {
|
|
286
|
+
Oe as A,
|
|
287
|
+
E as B,
|
|
288
|
+
U as C,
|
|
289
|
+
ue as U,
|
|
290
|
+
Z as a,
|
|
291
|
+
Ce as b,
|
|
292
|
+
fe as c,
|
|
293
|
+
ye as d,
|
|
294
|
+
M as e,
|
|
295
|
+
T as f,
|
|
296
|
+
H as g,
|
|
297
|
+
b as h,
|
|
298
|
+
P as i,
|
|
299
|
+
he as j,
|
|
300
|
+
xe as k,
|
|
301
|
+
Ie as l,
|
|
302
|
+
Pe as m,
|
|
303
|
+
me as n,
|
|
304
|
+
ge as o,
|
|
305
|
+
be as p,
|
|
306
|
+
Ne as q,
|
|
307
|
+
Ae as u
|
|
308
|
+
};
|
package/dist/layers/BaseLayer.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { v
|
|
1
|
+
import { w as p, e as o, F as g } from "../index-ZyQhjEPo.js";
|
|
2
|
+
import { v } from "../variable-Bhn5bHYv.js";
|
|
3
|
+
import { T as _ } from "../tensor-DdQUJZlz.js";
|
|
3
4
|
class T {
|
|
4
5
|
parent;
|
|
5
6
|
config;
|
|
@@ -79,7 +80,7 @@ class T {
|
|
|
79
80
|
const i = t.get(e)?.[0];
|
|
80
81
|
if (!i)
|
|
81
82
|
throw new Error(`Weights for ${e} not found`);
|
|
82
|
-
r ? r.assign(i) : this._variables.set(e,
|
|
83
|
+
r ? r.assign(i) : this._variables.set(e, v(i, this._trainable));
|
|
83
84
|
}), this.children.forEach((r) => {
|
|
84
85
|
r.loadWeights(t);
|
|
85
86
|
});
|
|
@@ -97,7 +98,7 @@ class T {
|
|
|
97
98
|
call(t, ...r) {
|
|
98
99
|
this.build();
|
|
99
100
|
const e = this.forward(t, ...r);
|
|
100
|
-
if (t.training && e instanceof
|
|
101
|
+
if (t.training && e instanceof _) {
|
|
101
102
|
const i = this.dropout(e);
|
|
102
103
|
return i !== e && e.dispose(), i;
|
|
103
104
|
} else
|
|
@@ -107,12 +108,12 @@ class T {
|
|
|
107
108
|
return this.build(), this.checkpointingFn(t, ...r);
|
|
108
109
|
}
|
|
109
110
|
checkpointingFn(t, ...r) {
|
|
110
|
-
const e = this.trainableVariables, s =
|
|
111
|
+
const e = this.trainableVariables, s = p((...a) => {
|
|
111
112
|
const l = a[a.length - 1], n = a.slice(0, r.length), h = this.forward(t, ...n);
|
|
112
113
|
return l(n), { value: h, gradFunc: (c, f) => {
|
|
113
114
|
const u = o().state.activeTape;
|
|
114
115
|
o().state.activeTape = [];
|
|
115
|
-
const b =
|
|
116
|
+
const b = g((...d) => this.forward(t, ...d.slice(0, n.length)))([...f, ...e], c);
|
|
116
117
|
return o().state.activeTape = u, b;
|
|
117
118
|
} };
|
|
118
119
|
})(...r, ...e);
|
|
@@ -26,7 +26,6 @@ export default class CausalSelfAttention extends BaseLayer<AttentionForwardAttri
|
|
|
26
26
|
constructor(index: number, config: GPTConfig, parent?: BaseLayer);
|
|
27
27
|
protected build(): void;
|
|
28
28
|
private getAttentionScores;
|
|
29
|
-
private getAttentionScoresWithPast;
|
|
30
29
|
private getQKV;
|
|
31
30
|
private getOutputProjection;
|
|
32
31
|
private updateCache;
|
|
@@ -1,83 +1,84 @@
|
|
|
1
|
-
import { attentionMask as
|
|
2
|
-
import
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import {
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
import {
|
|
11
|
-
import {
|
|
12
|
-
import {
|
|
13
|
-
|
|
1
|
+
import { attentionMask as P } from "../ops/attentionMask.js";
|
|
2
|
+
import _ from "./BaseLayer.js";
|
|
3
|
+
import { r as k } from "../rope-B5UUMsPi.js";
|
|
4
|
+
import { appendCache as b } from "../ops/appendCache.js";
|
|
5
|
+
import { k as u, t as S } from "../index-ZyQhjEPo.js";
|
|
6
|
+
import { softmax16 as C } from "../ops/softmax16.js";
|
|
7
|
+
import { b as R } from "../matMul16--R5hOwDG.js";
|
|
8
|
+
import { p as V } from "../pack16-CFUqumar.js";
|
|
9
|
+
import { transpose16 as J } from "../ops/transpose16.js";
|
|
10
|
+
import { dot16 as L } from "../ops/dot16.js";
|
|
11
|
+
import { reshape16 as N } from "../ops/reshape16.js";
|
|
12
|
+
import { isPackedTensor as l } from "../utilities/packed.js";
|
|
13
|
+
import { qkv as j } from "../ops/qkv.js";
|
|
14
|
+
import { v as T } from "../variable-Bhn5bHYv.js";
|
|
15
|
+
import { r as v, d as y } from "../dropout-C1pM3f11.js";
|
|
16
|
+
class X extends _ {
|
|
14
17
|
divisor;
|
|
15
18
|
index;
|
|
16
19
|
units;
|
|
17
20
|
projUnits;
|
|
18
21
|
ATTN;
|
|
19
22
|
PROJ;
|
|
20
|
-
constructor(t,
|
|
21
|
-
super(
|
|
23
|
+
constructor(t, o, s) {
|
|
24
|
+
super(o, s), this.index = t, this.units = o.nEmbed * 3, this.projUnits = o.nEmbed, this.ATTN = `block_${this.index}_cAttn`, this.PROJ = `block_${this.index}_cProj`, this.addVariable(this.ATTN), this.addVariable(this.PROJ), this.divisor = 1 / Math.sqrt(o.nEmbed / o.nHead);
|
|
22
25
|
}
|
|
23
26
|
build() {
|
|
24
27
|
this.hasVariable(this.ATTN) === !1 && this.setVariable(
|
|
25
28
|
this.ATTN,
|
|
26
|
-
|
|
27
|
-
|
|
29
|
+
T(
|
|
30
|
+
v([this.config.nEmbed, this.units], 0, 0.02),
|
|
28
31
|
!0,
|
|
29
32
|
`block_${this.index}_attn_cAttn_kernel`
|
|
30
33
|
)
|
|
31
34
|
), this.hasVariable(this.PROJ) === !1 && this.setVariable(
|
|
32
35
|
this.PROJ,
|
|
33
|
-
|
|
34
|
-
|
|
36
|
+
T(
|
|
37
|
+
v([this.projUnits, this.config.nEmbed], 0, 0.02),
|
|
35
38
|
!0,
|
|
36
39
|
`block_${this.index}_attn_cProj_kernel`
|
|
37
40
|
)
|
|
38
41
|
);
|
|
39
42
|
}
|
|
40
|
-
getAttentionScores(t,
|
|
41
|
-
const
|
|
42
|
-
return
|
|
43
|
-
}
|
|
44
|
-
// Attention with optional past. If pastLen > 0 and T_cur == 1, no mask needed.
|
|
45
|
-
getAttentionScoresWithPast(t, i, s) {
|
|
46
|
-
const o = g(t, i, this.divisor, s), e = b(o, 0, 0);
|
|
47
|
-
return o.dispose(), e;
|
|
43
|
+
getAttentionScores(t, o, s) {
|
|
44
|
+
const i = P(t, o, this.divisor, s), e = C(i);
|
|
45
|
+
return i.dispose(), e;
|
|
48
46
|
}
|
|
49
47
|
getQKV(t) {
|
|
50
|
-
|
|
48
|
+
const o = l(t) ? V(this.getVariable(this.ATTN)) : this.getVariable(this.ATTN), s = j(t, o, this.config.nHead);
|
|
49
|
+
return l(t) && o.dispose(), s;
|
|
51
50
|
}
|
|
52
51
|
getOutputProjection(t) {
|
|
53
|
-
const
|
|
54
|
-
|
|
52
|
+
const o = t.shape[0], s = t.shape[2], i = this.config.nEmbed, e = l(t), r = J(t, [0, 2, 1, 3]), n = N(r, [o, s, e ? i / 2 : i]);
|
|
53
|
+
r.dispose();
|
|
54
|
+
const a = e ? V(this.getVariable(this.PROJ)) : this.getVariable(this.PROJ), h = L(n, a);
|
|
55
|
+
return e && a.dispose(), n.dispose(), h;
|
|
55
56
|
}
|
|
56
|
-
updateCache(t,
|
|
57
|
-
const
|
|
57
|
+
updateCache(t, o, s) {
|
|
58
|
+
const i = this.config.blockSize, e = t.shape[2], r = s.length || 0, n = b(t, i, r, s.k);
|
|
58
59
|
t.dispose(), s.k && s.k.dispose();
|
|
59
|
-
const
|
|
60
|
-
|
|
61
|
-
const
|
|
62
|
-
s.length =
|
|
60
|
+
const a = b(o, i, r, s.v);
|
|
61
|
+
o.dispose(), s.v && s.v.dispose();
|
|
62
|
+
const h = Math.min(r + e, i), d = s.cumulativeLength + e;
|
|
63
|
+
s.length = h, s.cumulativeLength = d, s.k = u(n), s.v = u(a);
|
|
63
64
|
}
|
|
64
|
-
forward(t,
|
|
65
|
-
return
|
|
65
|
+
forward(t, o) {
|
|
66
|
+
return S(() => {
|
|
66
67
|
this.startMemory();
|
|
67
|
-
const [s,
|
|
68
|
-
|
|
69
|
-
const
|
|
70
|
-
t.pastKV && !t.training && this.updateCache(
|
|
71
|
-
const
|
|
72
|
-
let
|
|
73
|
-
|
|
74
|
-
const
|
|
75
|
-
|
|
76
|
-
const A = this.getOutputProjection(
|
|
77
|
-
if (
|
|
78
|
-
const
|
|
79
|
-
t.attentionScores.attentionOut?.push(
|
|
80
|
-
|
|
68
|
+
const [s, i, e] = this.getQKV(o), r = t.pastKV ? t.pastKV.cumulativeLength : 0, n = t.ropeCache, a = n ? k(s, n, r) : s, h = n ? k(i, n, r) : i;
|
|
69
|
+
n && (s.dispose(), i.dispose());
|
|
70
|
+
const d = t.pastKV ? t.pastKV.length : 0;
|
|
71
|
+
t.pastKV && !t.training && this.updateCache(h, e, t.pastKV);
|
|
72
|
+
const c = t.pastKV?.k ? t.pastKV.k : h, m = t.pastKV?.v ? t.pastKV.v : e;
|
|
73
|
+
let p;
|
|
74
|
+
d > 0 ? p = this.getAttentionScores(a, c, d) : p = this.getAttentionScores(a, c), a.dispose(), t.pastKV || c.dispose();
|
|
75
|
+
const f = R(p, m), g = t.attentionScores !== void 0 && t.attentionScores.attentionOut !== void 0;
|
|
76
|
+
g || p.dispose(), t.pastKV || m.dispose();
|
|
77
|
+
const A = this.getOutputProjection(f);
|
|
78
|
+
if (f.dispose(), g && t.attentionScores && t.attentionScores.attentionOut !== void 0) {
|
|
79
|
+
const O = p.shape[1], K = p.shape[2];
|
|
80
|
+
console.log("Outputting attention scores shape:", p.shape), t.attentionScores.attentionOut?.push(
|
|
81
|
+
u(p.slice([0, 0, 0, 0], [1, -1, -1, -1]).reshape([O, K, -1]))
|
|
81
82
|
);
|
|
82
83
|
}
|
|
83
84
|
return this.endMemory("CausalSelfAttention"), A;
|
|
@@ -85,12 +86,12 @@ class W extends O {
|
|
|
85
86
|
}
|
|
86
87
|
dropout(t) {
|
|
87
88
|
if (this.config.dropout > 0) {
|
|
88
|
-
const
|
|
89
|
-
return t.dispose(),
|
|
89
|
+
const o = y(t, this.config.dropout);
|
|
90
|
+
return t.dispose(), o;
|
|
90
91
|
} else
|
|
91
92
|
return t;
|
|
92
93
|
}
|
|
93
94
|
}
|
|
94
95
|
export {
|
|
95
|
-
|
|
96
|
+
X as default
|
|
96
97
|
};
|
package/dist/layers/MLP.js
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
|
-
import { t as p } from "../index-
|
|
1
|
+
import { t as p } from "../index-ZyQhjEPo.js";
|
|
2
2
|
import u from "./BaseLayer.js";
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import { r as h } from "../
|
|
7
|
-
|
|
8
|
-
class H extends u {
|
|
3
|
+
import { c, b as M } from "../matMul16--R5hOwDG.js";
|
|
4
|
+
import { reshape16 as o } from "../ops/reshape16.js";
|
|
5
|
+
import { v as d } from "../variable-Bhn5bHYv.js";
|
|
6
|
+
import { r as h, d as b } from "../dropout-C1pM3f11.js";
|
|
7
|
+
class O extends u {
|
|
9
8
|
index;
|
|
10
9
|
hiddenUnits;
|
|
11
10
|
MLPHIDDEN;
|
|
@@ -16,15 +15,15 @@ class H extends u {
|
|
|
16
15
|
build() {
|
|
17
16
|
this.hasVariable(this.MLPHIDDEN) === !1 && this.setVariable(
|
|
18
17
|
this.MLPHIDDEN,
|
|
19
|
-
|
|
20
|
-
|
|
18
|
+
d(
|
|
19
|
+
h([this.config.nEmbed, this.hiddenUnits], 0, 0.02),
|
|
21
20
|
!0,
|
|
22
21
|
`block_${this.index}_mlpHidden_kernel`
|
|
23
22
|
)
|
|
24
23
|
), this.hasVariable(this.MLPOUT) === !1 && this.setVariable(
|
|
25
24
|
this.MLPOUT,
|
|
26
|
-
|
|
27
|
-
|
|
25
|
+
d(
|
|
26
|
+
h([this.hiddenUnits, this.config.nEmbed], 0, 0.02 / Math.sqrt(2 * this.config.nLayer)),
|
|
28
27
|
!0,
|
|
29
28
|
`block_${this.index}_mlpOut_kernel`
|
|
30
29
|
)
|
|
@@ -33,20 +32,20 @@ class H extends u {
|
|
|
33
32
|
forward(i, t) {
|
|
34
33
|
return p(() => {
|
|
35
34
|
this.startMemory();
|
|
36
|
-
const [s, e, r] = t.shape, n =
|
|
37
|
-
|
|
38
|
-
const m =
|
|
35
|
+
const [s, e, r] = t.shape, n = o(t, [s * e, r]), a = c(n, this.getVariable(this.MLPHIDDEN)), l = M(a, this.getVariable(this.MLPOUT));
|
|
36
|
+
a.dispose();
|
|
37
|
+
const m = o(l, [s, e, r]);
|
|
39
38
|
return this.endMemory("MLP"), m;
|
|
40
39
|
});
|
|
41
40
|
}
|
|
42
41
|
dropout(i) {
|
|
43
42
|
if (this.config.dropout > 0) {
|
|
44
|
-
const t =
|
|
43
|
+
const t = b(i, this.config.dropout);
|
|
45
44
|
return i.dispose(), t;
|
|
46
45
|
}
|
|
47
46
|
return i;
|
|
48
47
|
}
|
|
49
48
|
}
|
|
50
49
|
export {
|
|
51
|
-
|
|
50
|
+
O as default
|
|
52
51
|
};
|
|
@@ -1,18 +1,9 @@
|
|
|
1
|
-
import { t as c,
|
|
1
|
+
import { t as c, I as u, b as i } from "../index-ZyQhjEPo.js";
|
|
2
2
|
import f from "./BaseLayer.js";
|
|
3
|
-
import { E as g, D as h } from "../random_width-
|
|
4
|
-
import { r as b } from "../exports_initializers-
|
|
5
|
-
import { m as l } from "../mod-
|
|
6
|
-
import { r as w } from "../range-
|
|
7
|
-
/**
|
|
8
|
-
* @license
|
|
9
|
-
* Copyright 2018 Google LLC
|
|
10
|
-
*
|
|
11
|
-
* Use of this source code is governed by an MIT-style
|
|
12
|
-
* license that can be found in the LICENSE file or at
|
|
13
|
-
* https://opensource.org/licenses/MIT.
|
|
14
|
-
* =============================================================================
|
|
15
|
-
*/
|
|
3
|
+
import { E as g, D as h } from "../random_width-DY6Kk2Dl.js";
|
|
4
|
+
import { r as b } from "../exports_initializers-xuidcwI4.js";
|
|
5
|
+
import { m as l } from "../mod-Gt1rMB4n.js";
|
|
6
|
+
import { r as w } from "../range-BMS52eQi.js";
|
|
16
7
|
function E(t) {
|
|
17
8
|
return new h(t);
|
|
18
9
|
}
|
package/dist/layers/RMSNorm.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { t as s } from "../index-
|
|
1
|
+
import { t as s } from "../index-ZyQhjEPo.js";
|
|
2
2
|
import e from "./BaseLayer.js";
|
|
3
3
|
import { normRMS as a } from "../ops/normRMS.js";
|
|
4
|
-
import { v as i } from "../variable-
|
|
5
|
-
import { o as m } from "../ones-
|
|
4
|
+
import { v as i } from "../variable-Bhn5bHYv.js";
|
|
5
|
+
import { o as m } from "../ones-CAMiP4I2.js";
|
|
6
6
|
class l extends e {
|
|
7
7
|
GAMMA;
|
|
8
8
|
constructor(r, t = "", o) {
|
|
@@ -7,10 +7,12 @@ export default class RoPECache {
|
|
|
7
7
|
private ropeInvFreq;
|
|
8
8
|
private ropeCos;
|
|
9
9
|
private ropeSin;
|
|
10
|
+
private ropeNegSin;
|
|
10
11
|
private ropeCacheLen;
|
|
11
12
|
constructor(config: GPTConfig);
|
|
12
13
|
ensureRopeCache(needed: number): void;
|
|
13
14
|
getCos(): Tensor | null;
|
|
14
15
|
getSin(): Tensor | null;
|
|
16
|
+
getNegSin(): Tensor | null;
|
|
15
17
|
dispose(): void;
|
|
16
18
|
}
|