@genai-fi/nanogpt 0.9.0 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (343) hide show
  1. package/README.md +352 -14
  2. package/dist/Generator.js +69 -78
  3. package/dist/{RealDiv-D4EzDsC0.js → RealDiv-DgA3z9oO.js} +32 -206
  4. package/dist/Reshape-CF6odzV4.js +16 -0
  5. package/dist/Reshape-_kILl6tK.js +81 -0
  6. package/dist/TeachableLLM.js +28 -22
  7. package/dist/Trainer.d.ts +2 -0
  8. package/dist/Trainer.js +3 -2
  9. package/dist/{axis_util-TbGYJ208.js → axis_util-BvHEw88j.js} +7 -23
  10. package/dist/backend.d.ts +2 -1
  11. package/dist/backend.js +10 -4
  12. package/dist/backend_util-D-rUb2ty.js +474 -0
  13. package/dist/backend_webgpu-B0u2ndUn.js +547 -0
  14. package/dist/binary_op_util-pKXltfxI.js +192 -0
  15. package/dist/broadcast_to-CwF7XIeu.js +30 -0
  16. package/dist/checks/appendCache.js +2 -2
  17. package/dist/checks/attentionMask.js +3 -3
  18. package/dist/checks/check.d.ts +1 -1
  19. package/dist/checks/check.js +8 -8
  20. package/dist/checks/gelu.js +2 -2
  21. package/dist/checks/index.d.ts +2 -0
  22. package/dist/checks/index.js +7 -5
  23. package/dist/checks/matMulGelu.js +6 -6
  24. package/dist/checks/normRMS.js +7 -7
  25. package/dist/checks/normRMSGrad.js +3 -3
  26. package/dist/checks/packUnpack.d.ts +1 -0
  27. package/dist/checks/packUnpack.js +18 -0
  28. package/dist/checks/qkv.js +12 -27
  29. package/dist/checks/rope.js +2 -2
  30. package/dist/checks/weights.js +18 -16
  31. package/dist/complex-CSlYz-2T.js +13 -0
  32. package/dist/complex_util-Yc1A_gV1.js +55 -0
  33. package/dist/concat-BHlIJeyT.js +19 -0
  34. package/dist/concat_util-DcJk7YHS.js +22 -0
  35. package/dist/data/docx.js +1 -1
  36. package/dist/data/parquet.js +2 -2
  37. package/dist/data/pdf.js +1 -1
  38. package/dist/data/textLoader.js +1 -1
  39. package/dist/{dataset-DlZtKmBq.js → dataset-0xP8GjwI.js} +136 -236
  40. package/dist/dropout-C1pM3f11.js +99 -0
  41. package/dist/expand_dims-BPG4fwBP.js +13 -0
  42. package/dist/exports_initializers-xuidcwI4.js +7 -0
  43. package/dist/gather-DykLGqmW.js +10 -0
  44. package/dist/{gelu-Bp_-935b.js → gelu-CNLFZWea.js} +11 -10
  45. package/dist/{gpgpu_math-CDaYiyE_.js → gpgpu_math-DDVJCn6-.js} +90 -265
  46. package/dist/{index-C4L8Cm77.js → index-CieiGp4Y.js} +14 -14
  47. package/dist/index-CjOj7j-u.js +7308 -0
  48. package/dist/{index-Tf7vU29b.js → index-Cp39cXWe.js} +3 -10
  49. package/dist/{index-Dwqa6Zy2.js → index-DvYrXKkX.js} +2 -2
  50. package/dist/index-ZyQhjEPo.js +2157 -0
  51. package/dist/{jszip.min-CjP2V1VV.js → jszip.min-Bz5-11Bk.js} +56 -57
  52. package/dist/kernel_funcs_utils-Dg_-E44D.js +308 -0
  53. package/dist/layers/BaseLayer.d.ts +1 -0
  54. package/dist/layers/BaseLayer.js +7 -6
  55. package/dist/layers/CausalSelfAttention.d.ts +0 -1
  56. package/dist/layers/CausalSelfAttention.js +56 -55
  57. package/dist/layers/MLP.js +15 -16
  58. package/dist/layers/PositionEmbedding.js +5 -14
  59. package/dist/layers/RMSNorm.js +3 -3
  60. package/dist/layers/RoPECache.d.ts +2 -0
  61. package/dist/layers/RoPECache.js +22 -17
  62. package/dist/layers/TiedEmbedding.js +22 -17
  63. package/dist/layers/TransformerBlock.js +21 -20
  64. package/dist/loader/load.js +1 -1
  65. package/dist/loader/loadTransformers.js +1 -1
  66. package/dist/loader/oldZipLoad.js +39 -33
  67. package/dist/loader/save.js +1 -1
  68. package/dist/log_sum_exp-DWI-76TI.js +41 -0
  69. package/dist/main.d.ts +8 -0
  70. package/dist/main.js +63 -52
  71. package/dist/matMul16--R5hOwDG.js +77 -0
  72. package/dist/mat_mul-DeAh4uTH.js +12 -0
  73. package/dist/mod-Gt1rMB4n.js +12 -0
  74. package/dist/models/NanoGPTV1.js +40 -31
  75. package/dist/models/model.d.ts +2 -0
  76. package/dist/models/model.js +37 -29
  77. package/dist/{mulmat_packed_gpu-BT60jmzP.js → mulmat_packed_gpu-BMFhLwta.js} +1 -17
  78. package/dist/{non_max_suppression_impl-CsEgBuMA.js → non_max_suppression_impl-B2W7YjZB.js} +0 -32
  79. package/dist/ones-CAMiP4I2.js +15 -0
  80. package/dist/ops/adamAdjust.js +1 -1
  81. package/dist/ops/adamMoments.d.ts +1 -1
  82. package/dist/ops/adamMoments.js +4 -4
  83. package/dist/ops/add16.d.ts +2 -0
  84. package/dist/ops/add16.js +9 -0
  85. package/dist/ops/appendCache.js +16 -9
  86. package/dist/ops/attentionMask.js +4 -4
  87. package/dist/ops/concat16.d.ts +2 -0
  88. package/dist/ops/concat16.js +9 -0
  89. package/dist/ops/cpu/adamAdjust.js +14 -13
  90. package/dist/ops/cpu/adamMoments.js +10 -9
  91. package/dist/ops/cpu/appendCache.js +9 -8
  92. package/dist/ops/cpu/attentionMask.js +15 -14
  93. package/dist/ops/cpu/fusedSoftmax.js +13 -12
  94. package/dist/ops/cpu/gatherSub.js +9 -24
  95. package/dist/ops/cpu/gelu.js +13 -12
  96. package/dist/ops/cpu/matMul16.d.ts +1 -0
  97. package/dist/ops/cpu/matMul16.js +16 -0
  98. package/dist/ops/cpu/matMulGelu.js +18 -16
  99. package/dist/ops/cpu/matMulMul.js +8 -7
  100. package/dist/ops/cpu/mulDropout.js +4 -3
  101. package/dist/ops/cpu/normRMS.js +11 -10
  102. package/dist/ops/cpu/qkv.js +17 -13
  103. package/dist/ops/cpu/rope.js +23 -22
  104. package/dist/ops/cpu/scatterSub.js +16 -30
  105. package/dist/ops/dot16.d.ts +2 -0
  106. package/dist/ops/dot16.js +42 -0
  107. package/dist/ops/gatherSub.js +1 -1
  108. package/dist/ops/gelu.js +2 -2
  109. package/dist/ops/grads/add16.d.ts +1 -0
  110. package/dist/ops/grads/add16.js +27 -0
  111. package/dist/ops/grads/attentionMask.js +12 -19
  112. package/dist/ops/grads/gelu.js +4 -3
  113. package/dist/ops/grads/matMul16.d.ts +2 -0
  114. package/dist/ops/grads/matMul16.js +9 -0
  115. package/dist/ops/grads/matMulGelu.js +8 -7
  116. package/dist/ops/grads/normRMS.js +8 -7
  117. package/dist/ops/grads/{fusedSoftmax.d.ts → pack16.d.ts} +1 -1
  118. package/dist/ops/grads/pack16.js +7 -0
  119. package/dist/ops/grads/qkv.d.ts +3 -1
  120. package/dist/ops/grads/qkv.js +28 -22
  121. package/dist/ops/grads/rope.d.ts +2 -1
  122. package/dist/ops/grads/rope.js +6 -13
  123. package/dist/ops/grads/softmax16.d.ts +2 -0
  124. package/dist/ops/grads/softmax16.js +26 -0
  125. package/dist/ops/grads/unpack16.d.ts +2 -0
  126. package/dist/ops/grads/unpack16.js +6 -0
  127. package/dist/ops/grads/utils.d.ts +3 -0
  128. package/dist/ops/grads/utils.js +10 -0
  129. package/dist/ops/matMul16.d.ts +15 -0
  130. package/dist/ops/matMul16.js +13 -0
  131. package/dist/ops/matMulGelu.js +1 -1
  132. package/dist/ops/matMulMul.js +1 -1
  133. package/dist/ops/mul16.d.ts +2 -0
  134. package/dist/ops/mul16.js +8 -0
  135. package/dist/ops/mulDrop.js +1 -1
  136. package/dist/ops/normRMS.js +1 -1
  137. package/dist/ops/pack16.d.ts +2 -0
  138. package/dist/ops/pack16.js +6 -0
  139. package/dist/ops/qkv.d.ts +1 -1
  140. package/dist/ops/qkv.js +8 -4
  141. package/dist/ops/reshape16.d.ts +2 -0
  142. package/dist/ops/reshape16.js +43 -0
  143. package/dist/ops/rope.d.ts +1 -1
  144. package/dist/ops/rope.js +8 -10
  145. package/dist/ops/scatterSub.js +1 -1
  146. package/dist/ops/slice16.d.ts +2 -0
  147. package/dist/ops/slice16.js +9 -0
  148. package/dist/ops/softmax16.d.ts +2 -0
  149. package/dist/ops/softmax16.js +12 -0
  150. package/dist/ops/sub16.d.ts +2 -0
  151. package/dist/ops/sub16.js +8 -0
  152. package/dist/ops/sum16.d.ts +2 -0
  153. package/dist/ops/sum16.js +13 -0
  154. package/dist/ops/transpose16.d.ts +3 -0
  155. package/dist/ops/transpose16.js +41 -0
  156. package/dist/ops/unpack16.d.ts +2 -0
  157. package/dist/ops/unpack16.js +6 -0
  158. package/dist/ops/webgl/adamAdjust.js +3 -2
  159. package/dist/ops/webgl/adamMoments.js +2 -1
  160. package/dist/ops/webgl/appendCache.js +2 -1
  161. package/dist/ops/webgl/attentionMask.js +5 -4
  162. package/dist/ops/webgl/fusedSoftmax.js +6 -4
  163. package/dist/ops/webgl/gatherSub.js +7 -6
  164. package/dist/ops/webgl/gelu.js +3 -2
  165. package/dist/ops/webgl/log.js +12 -27
  166. package/dist/ops/webgl/matMul16.d.ts +1 -0
  167. package/dist/ops/webgl/matMul16.js +37 -0
  168. package/dist/ops/webgl/matMulGelu.js +17 -15
  169. package/dist/ops/webgl/matMulMul.js +13 -12
  170. package/dist/ops/webgl/mulDropout.js +9 -8
  171. package/dist/ops/webgl/normRMS.js +8 -7
  172. package/dist/ops/webgl/qkv.js +6 -5
  173. package/dist/ops/webgl/rope.js +11 -10
  174. package/dist/ops/webgl/scatterSub.js +6 -5
  175. package/dist/ops/webgpu/adamAdjust.js +12 -10
  176. package/dist/ops/webgpu/adamMoments.js +27 -22
  177. package/dist/ops/webgpu/add16.d.ts +1 -0
  178. package/dist/ops/webgpu/add16.js +14 -0
  179. package/dist/ops/webgpu/appendCache.js +64 -17
  180. package/dist/ops/webgpu/attentionMask.js +19 -62
  181. package/dist/ops/webgpu/attentionMask32_program.d.ts +19 -0
  182. package/dist/ops/webgpu/attentionMask32_program.js +54 -0
  183. package/dist/ops/webgpu/concat16.d.ts +19 -0
  184. package/dist/ops/webgpu/concat16.js +128 -0
  185. package/dist/ops/webgpu/gatherSub.js +9 -7
  186. package/dist/ops/webgpu/gelu.js +78 -31
  187. package/dist/ops/webgpu/index.js +12 -0
  188. package/dist/ops/webgpu/matMul16.d.ts +1 -0
  189. package/dist/ops/webgpu/matMul16.js +58 -0
  190. package/dist/ops/webgpu/matMul16_program.d.ts +42 -0
  191. package/dist/ops/webgpu/matMul16_program.js +336 -0
  192. package/dist/ops/webgpu/mul16.d.ts +1 -0
  193. package/dist/ops/webgpu/mul16.js +14 -0
  194. package/dist/ops/webgpu/normRMS.js +21 -40
  195. package/dist/ops/webgpu/normRMS16_program.d.ts +9 -0
  196. package/dist/ops/webgpu/normRMS16_program.js +24 -0
  197. package/dist/ops/webgpu/normRMS32_program.d.ts +9 -0
  198. package/dist/ops/webgpu/normRMS32_program.js +24 -0
  199. package/dist/ops/webgpu/normRMSGrad.js +113 -64
  200. package/dist/ops/webgpu/pack16.d.ts +1 -0
  201. package/dist/ops/webgpu/pack16.js +19 -0
  202. package/dist/ops/webgpu/pack16_program.d.ts +19 -0
  203. package/dist/ops/webgpu/pack16_program.js +92 -0
  204. package/dist/ops/webgpu/qkv.js +20 -55
  205. package/dist/ops/webgpu/rope.js +77 -22
  206. package/dist/ops/webgpu/scatterSub.js +9 -7
  207. package/dist/ops/webgpu/slice16.d.ts +7 -0
  208. package/dist/ops/webgpu/slice16.js +71 -0
  209. package/dist/{variable-Bm2OFwGI.js → ops/webgpu/softmax16.d.ts} +2 -8
  210. package/dist/ops/webgpu/softmax16.js +23 -0
  211. package/dist/ops/webgpu/softmax16_program.d.ts +13 -0
  212. package/dist/ops/webgpu/softmax16_program.js +73 -0
  213. package/dist/ops/webgpu/softmax16_subgroup_program.d.ts +17 -0
  214. package/dist/ops/webgpu/softmax16_subgroup_program.js +75 -0
  215. package/dist/ops/webgpu/softmax16grad.d.ts +1 -0
  216. package/dist/ops/webgpu/softmax16grad.js +38 -0
  217. package/dist/ops/webgpu/sub16.d.ts +1 -0
  218. package/dist/ops/webgpu/sub16.js +14 -0
  219. package/dist/ops/webgpu/sum16.d.ts +1 -0
  220. package/dist/ops/webgpu/sum16.js +40 -0
  221. package/dist/ops/webgpu/transpose16.d.ts +1 -0
  222. package/dist/ops/webgpu/transpose16.js +35 -0
  223. package/dist/ops/webgpu/transpose16_program.d.ts +16 -0
  224. package/dist/ops/webgpu/transpose16_program.js +50 -0
  225. package/dist/ops/webgpu/transpose16_shared_program.d.ts +15 -0
  226. package/dist/ops/webgpu/transpose16_shared_program.js +71 -0
  227. package/dist/ops/webgpu/unpack16.d.ts +1 -0
  228. package/dist/ops/webgpu/unpack16.js +49 -0
  229. package/dist/ops/webgpu/utils/binary_op.d.ts +19 -0
  230. package/dist/ops/webgpu/utils/binary_op.js +79 -0
  231. package/dist/ops/webgpu/utils/deviceInfo.d.ts +7 -0
  232. package/dist/ops/webgpu/utils/deviceInfo.js +11 -0
  233. package/dist/ops/webgpu/utils/reductions.d.ts +32 -4
  234. package/dist/ops/webgpu/utils/reductions.js +236 -45
  235. package/dist/ops-CNI3TwqM.js +645 -0
  236. package/dist/pack16-CFUqumar.js +41 -0
  237. package/dist/{papaparse.min-C8l2Kvo1.js → papaparse.min-C0cScC2i.js} +2 -8
  238. package/dist/{parquet-C0Tlmv9c.js → parquet-BE8MU_ge.js} +201 -278
  239. package/dist/patches/PackedTensor.d.ts +12 -0
  240. package/dist/patches/PackedTensor.js +11 -0
  241. package/dist/patches/engine.d.ts +261 -0
  242. package/dist/patches/engine.js +10 -0
  243. package/dist/patches/tape.d.ts +12 -0
  244. package/dist/patches/tape.js +5 -0
  245. package/dist/patches/webgpu_backend.d.ts +18 -0
  246. package/dist/patches/webgpu_backend.js +57 -0
  247. package/dist/{tensor-CZr4dh61.js → patches/webgpu_base.d.ts} +5 -8
  248. package/dist/patches/webgpu_base.js +34 -0
  249. package/dist/patches/webgpu_program.d.ts +36 -0
  250. package/dist/patches/webgpu_program.js +401 -0
  251. package/dist/{pdf-kJD-f258.js → pdf-NIhmP3sq.js} +424 -428
  252. package/dist/random_width-DY6Kk2Dl.js +10051 -0
  253. package/dist/range-BMS52eQi.js +11 -0
  254. package/dist/reciprocal-CTmshQ9J.js +10 -0
  255. package/dist/{register_all_kernels-DIGpEwcf.js → register_all_kernels-Bwu1PTuU.js} +719 -9766
  256. package/dist/relu-yZ2-7WxU.js +10 -0
  257. package/dist/reshape-DevtBWtf.js +10 -0
  258. package/dist/rope-B5UUMsPi.js +32 -0
  259. package/dist/{scatter_nd_util-BQdz--Gn.js → scatter_nd_util-5EL-8VAQ.js} +1 -1
  260. package/dist/selu_util-D1w6yyTO.js +303 -0
  261. package/dist/{shared-DuP7ue-R.js → shared-BRksrJb3.js} +1 -17
  262. package/dist/shared-BuAXb4CI.js +2145 -0
  263. package/dist/sin-BGfy2HZo.js +16 -0
  264. package/dist/slice-D_gkkqZK.js +13 -0
  265. package/dist/slice_util-DtEldBfK.js +261 -0
  266. package/dist/softmax-ZHVebtR1.js +13 -0
  267. package/dist/split-DrfihRpZ.js +10 -0
  268. package/dist/squeeze-DZEpeblb.js +11 -0
  269. package/dist/stack-yOIAalTq.js +13 -0
  270. package/dist/sum-_fzj5ZTB.js +12 -0
  271. package/dist/tensor-DdQUJZlz.js +909 -0
  272. package/dist/tensor-f35l8Odg.js +8 -0
  273. package/dist/tensor1d-CeZuc-Rv.js +12 -0
  274. package/dist/tensor2d-G4Ys2GxX.js +15 -0
  275. package/dist/tensor4d-B8roDgtc.js +15 -0
  276. package/dist/tensor_util-DV-FP5Q3.js +523 -0
  277. package/dist/tfjs_backend-kNyO5L2d.js +653 -0
  278. package/dist/tile-BzyEiF-F.js +13 -0
  279. package/dist/tokeniser/CharTokeniser.js +1 -1
  280. package/dist/tokeniser/bpe.js +1 -1
  281. package/dist/training/Adam.d.ts +2 -1
  282. package/dist/training/Adam.js +12 -28
  283. package/dist/training/AdamExt.d.ts +1 -0
  284. package/dist/training/AdamExt.js +2 -2
  285. package/dist/training/DatasetBuilder.js +3 -20
  286. package/dist/training/FullTrainer.js +82 -64
  287. package/dist/training/Trainer.d.ts +11 -6
  288. package/dist/training/Trainer.js +51 -39
  289. package/dist/training/sparseCrossEntropy.js +3 -3
  290. package/dist/transpose-DKELTqhe.js +38 -0
  291. package/dist/utilities/arrayClose.js +7 -7
  292. package/dist/utilities/dummy.js +35 -27
  293. package/dist/utilities/multinomialCPU.js +2 -2
  294. package/dist/utilities/packed.d.ts +7 -0
  295. package/dist/utilities/packed.js +716 -0
  296. package/dist/utilities/performance.js +1 -1
  297. package/dist/utilities/profile.js +1 -1
  298. package/dist/utilities/safetensors.js +2 -2
  299. package/dist/utilities/sentences.d.ts +5 -0
  300. package/dist/utilities/sentences.js +41 -0
  301. package/dist/utilities/weights.js +2 -2
  302. package/dist/variable-Bhn5bHYv.js +7 -0
  303. package/dist/{webgpu_program-DkQJOJSd.js → webgpu_program-Cigz-7RF.js} +15 -44
  304. package/dist/webgpu_util-BBCnKm2X.js +65 -0
  305. package/dist/zeros-2gldETuK.js +14 -0
  306. package/package.json +4 -3
  307. package/dist/Reshape-Bowtk9BP.js +0 -127
  308. package/dist/Reshape-DUqYftGC.js +0 -30
  309. package/dist/backend_util-CJIiDoV1.js +0 -749
  310. package/dist/broadcast_to-DzlNweb8.js +0 -44
  311. package/dist/concat-B912vBbo.js +0 -33
  312. package/dist/dropout-C-csYCLj.js +0 -193
  313. package/dist/exports_initializers-B8iZMgQ0.js +0 -16
  314. package/dist/gather-Dnpgw-YQ.js +0 -25
  315. package/dist/index-BzFyqcy-.js +0 -4457
  316. package/dist/index-C1rx_Ajs.js +0 -12076
  317. package/dist/kernel_funcs_utils-DKLK0Mg3.js +0 -466
  318. package/dist/log_sum_exp-DO6z8tSE.js +0 -103
  319. package/dist/mat_mul-DzjTFx-u.js +0 -27
  320. package/dist/mod-Dobti4j4.js +0 -27
  321. package/dist/ones-tIJeHlq-.js +0 -29
  322. package/dist/ops/fusedSoftmax.d.ts +0 -2
  323. package/dist/ops/fusedSoftmax.js +0 -10
  324. package/dist/ops/grads/fusedSoftmax.js +0 -22
  325. package/dist/ops-LuCMAnmM.js +0 -1525
  326. package/dist/random_width-CXVRloNK.js +0 -13670
  327. package/dist/range-CWcz7xFA.js +0 -26
  328. package/dist/reciprocal-C4rNcM-S.js +0 -25
  329. package/dist/relu-BjCh_SYb.js +0 -25
  330. package/dist/reshape-CnIwVG1c.js +0 -25
  331. package/dist/selu_util-OtRzVwW5.js +0 -719
  332. package/dist/shared-DmRsFyaJ.js +0 -3134
  333. package/dist/sin-gpDNRxE0.js +0 -47
  334. package/dist/slice-d0Vo9XTN.js +0 -28
  335. package/dist/softmax-D7Jj3p_P.js +0 -28
  336. package/dist/split-DK2k5eHf.js +0 -25
  337. package/dist/stack-DFatutCx.js +0 -27
  338. package/dist/sum-CJ0ULhmt.js +0 -27
  339. package/dist/tensor1d-vML0r3q6.js +0 -27
  340. package/dist/tensor2d-D76QGjF3.js +0 -30
  341. package/dist/tensor4d-Df1WlVDY.js +0 -30
  342. package/dist/webgpu_util-pLEV9tks.js +0 -80
  343. package/dist/zeros-Bj5rMYA7.js +0 -52
@@ -1,16 +1,16 @@
1
- import { b as t, x as h, t as n, k as p } from "../index-BzFyqcy-.js";
2
- import { r as c } from "../reciprocal-C4rNcM-S.js";
3
- import { c as f, s as m } from "../sin-gpDNRxE0.js";
4
- import { r as a } from "../range-CWcz7xFA.js";
5
- class D {
6
- constructor(o) {
7
- this.config = o;
8
- const e = this.config.nEmbed / this.config.nHead;
9
- if (this.rotaryDim = e, this.rotaryDim % 2 !== 0)
1
+ import { b as n, p as a, t as p, k as r } from "../index-ZyQhjEPo.js";
2
+ import { r as c } from "../reciprocal-CTmshQ9J.js";
3
+ import { c as l, s as f } from "../sin-BGfy2HZo.js";
4
+ import { r as h } from "../range-BMS52eQi.js";
5
+ class g {
6
+ constructor(s) {
7
+ this.config = s;
8
+ const i = this.config.nEmbed / this.config.nHead;
9
+ if (this.rotaryDim = i, this.rotaryDim % 2 !== 0)
10
10
  throw new Error("rotaryDim must be even");
11
11
  this.ropeBase = 1e4;
12
- const i = a(0, this.rotaryDim, 2, "float32"), s = i.div(t(this.rotaryDim, "float32")), r = h(t(this.ropeBase, "float32"), s);
13
- this.ropeInvFreq = c(r), s.dispose(), r.dispose(), i.dispose(), this.config.useRope === !1 ? (this.ropeCos = null, this.ropeSin = null, this.ropeCacheLen = 0) : n(() => {
12
+ const o = h(0, this.rotaryDim, 2, "float32"), e = o.div(n(this.rotaryDim, "float32")), t = a(n(this.ropeBase, "float32"), e);
13
+ this.ropeInvFreq = c(t), e.dispose(), t.dispose(), o.dispose(), this.config.useRope === !1 ? (this.ropeCos = null, this.ropeSin = null, this.ropeNegSin = null, this.ropeCacheLen = 0) : p(() => {
14
14
  this.ensureRopeCache(this.config.blockSize * 4);
15
15
  });
16
16
  }
@@ -21,13 +21,15 @@ class D {
21
21
  // [cacheLen, rotaryDim/2]
22
22
  ropeSin = null;
23
23
  // [cacheLen, rotaryDim/2]
24
+ ropeNegSin = null;
25
+ // [cacheLen, rotaryDim/2]
24
26
  ropeCacheLen = 0;
25
- ensureRopeCache(o) {
26
- n(() => {
27
- if (o <= this.ropeCacheLen) return;
27
+ ensureRopeCache(s) {
28
+ p(() => {
29
+ if (s <= this.ropeCacheLen) return;
28
30
  this.ropeCos && this.ropeCos.dispose(), this.ropeSin && this.ropeSin.dispose();
29
- const e = Math.max(o, this.ropeCacheLen + this.config.blockSize * 4), s = a(0, e, 1, "float32").expandDims(1).mul(this.ropeInvFreq.expandDims(0));
30
- this.ropeCos = p(f(s).expandDims(-1)), this.ropeSin = p(m(s).expandDims(-1)), this.ropeCacheLen = e;
31
+ const i = Math.max(s, this.ropeCacheLen + this.config.blockSize * 4), e = h(0, i, 1, "float32").expandDims(1).mul(this.ropeInvFreq.expandDims(0));
32
+ this.ropeCos = r(l(e).expandDims(-1)), this.ropeSin = r(f(e).expandDims(-1)), this.ropeNegSin = r(this.ropeSin.neg()), this.ropeCacheLen = i;
31
33
  });
32
34
  }
33
35
  getCos() {
@@ -36,10 +38,13 @@ class D {
36
38
  getSin() {
37
39
  return this.ropeSin;
38
40
  }
41
+ getNegSin() {
42
+ return this.ropeNegSin;
43
+ }
39
44
  dispose() {
40
45
  this.ropeCos && this.ropeCos.dispose(), this.ropeSin && this.ropeSin.dispose(), this.ropeInvFreq.dispose();
41
46
  }
42
47
  }
43
48
  export {
44
- D as default
49
+ g as default
45
50
  };
@@ -1,31 +1,36 @@
1
- import { d as r } from "../random_width-CXVRloNK.js";
2
- import "../index-BzFyqcy-.js";
3
- import { r as a } from "../exports_initializers-B8iZMgQ0.js";
4
- import s from "./BaseLayer.js";
5
- import { v as o } from "../variable-Bm2OFwGI.js";
6
- import { g as m } from "../gather-Dnpgw-YQ.js";
7
- class S extends s {
1
+ import "../random_width-DY6Kk2Dl.js";
2
+ import "../index-ZyQhjEPo.js";
3
+ import { r as s } from "../exports_initializers-xuidcwI4.js";
4
+ import a from "./BaseLayer.js";
5
+ import { dot16 as o } from "../ops/dot16.js";
6
+ import { isPackedTensor as r } from "../utilities/packed.js";
7
+ import { p as m } from "../pack16-CFUqumar.js";
8
+ import { transpose16 as d } from "../ops/transpose16.js";
9
+ import { v as p } from "../variable-Bhn5bHYv.js";
10
+ import { g as h } from "../gather-DykLGqmW.js";
11
+ class g extends a {
8
12
  vocabSize;
9
13
  embedDim;
10
14
  initializer;
11
15
  WEIGHTS;
12
- constructor(i, t, e) {
13
- super(i, e), this.WEIGHTS = t, this.vocabSize = i.vocabSize, this.embedDim = i.nEmbed, this.initializer = a({
16
+ constructor(t, i, e) {
17
+ super(t, e), this.WEIGHTS = i, this.vocabSize = t.vocabSize, this.embedDim = t.nEmbed, this.initializer = s({
14
18
  mean: 0,
15
19
  stddev: 0.02
16
- }), this.addVariable(this.WEIGHTS, o(this.initializer.apply([this.vocabSize, this.embedDim]), !0, t));
20
+ }), this.addVariable(this.WEIGHTS, p(this.initializer.apply([this.vocabSize, this.embedDim]), !0, i));
17
21
  }
18
- embed(i) {
19
- return m(this.getVariable(this.WEIGHTS), i, 0);
22
+ embed(t) {
23
+ return h(this.getVariable(this.WEIGHTS), t, 0);
20
24
  }
21
- project(i) {
22
- return r(i, this.getVariable(this.WEIGHTS).transpose());
25
+ project(t) {
26
+ const i = r(t) ? m(this.getVariable(this.WEIGHTS), void 0, 32) : this.getVariable(this.WEIGHTS), e = d(i);
27
+ return r(t) && i.dispose(), o(t, e);
23
28
  }
24
29
  // Dummy, should not be used.
25
- forward(i, t) {
26
- return this.project(t);
30
+ forward(t, i) {
31
+ return this.project(i);
27
32
  }
28
33
  }
29
34
  export {
30
- S as default
35
+ g as default
31
36
  };
@@ -1,32 +1,33 @@
1
- import l from "./CausalSelfAttention.js";
2
- import p from "./MLP.js";
3
- import o from "./RMSNorm.js";
4
- import m from "./BaseLayer.js";
5
- import { k as n, t as h } from "../index-BzFyqcy-.js";
6
- class k extends m {
1
+ import p from "./CausalSelfAttention.js";
2
+ import m from "./MLP.js";
3
+ import d from "./RMSNorm.js";
4
+ import h from "./BaseLayer.js";
5
+ import { k as n, t as u } from "../index-ZyQhjEPo.js";
6
+ import { add16 as l } from "../ops/add16.js";
7
+ class g extends h {
7
8
  ln1;
8
9
  attn;
9
10
  ln2;
10
11
  mlp;
11
12
  index;
12
13
  skipped = !1;
13
- constructor(i, s, e) {
14
- super(s, e), this.index = i, this.ln1 = new o(s, `block_${this.index}_rms1`, this), this.attn = new l(this.index, s, this), this.ln2 = new o(s, `block_${this.index}_rms2`, this), this.mlp = new p(this.index, s, this);
14
+ constructor(e, s, i) {
15
+ super(s, i), this.index = e, this.ln1 = new d(s, `block_${this.index}_rms1`, this), this.attn = new p(this.index, s, this), this.ln2 = new d(s, `block_${this.index}_rms2`, this), this.mlp = new m(this.index, s, this);
15
16
  }
16
- getMLPOutput(i, s) {
17
- const e = this.ln2.call({ training: s.training }, i), t = this.mlp.call({ training: s.training }, e);
18
- s.outputEmbeddings ? (n(e), s.embeddings.push({ name: `block_ln2_${this.index}`, tensor: e })) : e.dispose();
19
- const d = i.add(t);
20
- return i.dispose(), s.outputEmbeddings ? (n(t), s.embeddings.push({ name: `block_mlp_out_${this.index}`, tensor: t })) : t.dispose(), d;
17
+ getMLPOutput(e, s) {
18
+ const i = this.ln2.call({ training: s.training }, e), t = this.mlp.call(s, i);
19
+ s.outputEmbeddings ? (n(i), s.embeddings.push({ name: `block_ln2_${this.index}`, tensor: i })) : i.dispose();
20
+ const o = l(e, t);
21
+ return e.dispose(), s.outputEmbeddings ? (n(t), s.embeddings.push({ name: `block_mlp_out_${this.index}`, tensor: t })) : t.dispose(), o;
21
22
  }
22
- forward(i, s) {
23
- return h(() => {
23
+ forward(e, s) {
24
+ return u(() => {
24
25
  if (this.skipped)
25
26
  return s;
26
- const e = this.ln1.call(i, s), t = this.attn.call(i, e);
27
- i.outputEmbeddings ? (n(e), i.embeddings.push({ name: `block_ln1_${this.index}`, tensor: e })) : e.dispose();
28
- const d = s.add(t);
29
- return i.outputEmbeddings ? (n(t), i.embeddings.push({ name: `block_attn_out_${this.index}`, tensor: t })) : t.dispose(), this.getMLPOutput(d, i);
27
+ const i = this.ln1.call(e, s), t = this.attn.call(e, i);
28
+ e.outputEmbeddings ? (n(i), e.embeddings.push({ name: `block_ln1_${this.index}`, tensor: i })) : i.dispose();
29
+ const o = l(s, t);
30
+ return e.outputEmbeddings ? (n(t), e.embeddings.push({ name: `block_attn_out_${this.index}`, tensor: t })) : t.dispose(), this.getMLPOutput(o, e);
30
31
  });
31
32
  }
32
33
  dispose() {
@@ -34,5 +35,5 @@ class k extends m {
34
35
  }
35
36
  }
36
37
  export {
37
- k as default
38
+ g as default
38
39
  };
@@ -1,4 +1,4 @@
1
- import { j as o } from "../jszip.min-CjP2V1VV.js";
1
+ import { j as o } from "../jszip.min-Bz5-11Bk.js";
2
2
  import i from "./oldZipLoad.js";
3
3
  import s from "./newZipLoad.js";
4
4
  import n from "./loadHF.js";
@@ -1,7 +1,7 @@
1
1
  import l from "../tokeniser/CharTokeniser.js";
2
2
  import c from "../tokeniser/bpe.js";
3
3
  import { load_safetensors as b } from "../utilities/safetensors.js";
4
- import { a2 as y } from "../index-BzFyqcy-.js";
4
+ import { H as y } from "../index-ZyQhjEPo.js";
5
5
  import { dummyPassAsync as u } from "../utilities/dummy.js";
6
6
  import _ from "../models/factory.js";
7
7
  async function L(e, a, r, t) {
@@ -1,73 +1,79 @@
1
- import { a2 as y } from "../index-BzFyqcy-.js";
1
+ import "../utilities/packed.js";
2
+ import { H as y } from "../index-ZyQhjEPo.js";
2
3
  import "../ops/cpu/attentionMask.js";
3
4
  import "../ops/webgl/attentionMask.js";
4
5
  import "../ops/grads/attentionMask.js";
5
- import "../ops/cpu/qkv.js";
6
- import "../ops/webgl/qkv.js";
7
- import "../ops/grads/qkv.js";
8
- import "../random_width-CXVRloNK.js";
9
- import "../register_all_kernels-DIGpEwcf.js";
10
- import "../index-Tf7vU29b.js";
11
- import "../dataset-DlZtKmBq.js";
6
+ import "../random_width-DY6Kk2Dl.js";
7
+ import "../register_all_kernels-Bwu1PTuU.js";
8
+ import "../index-Cp39cXWe.js";
9
+ import "../dataset-0xP8GjwI.js";
12
10
  import "../ops/cpu/rope.js";
13
11
  import "../ops/webgl/rope.js";
14
- import "../ops/grads/rope.js";
12
+ import "../rope-B5UUMsPi.js";
15
13
  import "../ops/cpu/appendCache.js";
16
14
  import "../ops/webgl/appendCache.js";
17
- import "../ops/cpu/fusedSoftmax.js";
18
- import "../ops/webgl/fusedSoftmax.js";
19
- import "../ops/grads/fusedSoftmax.js";
20
- import "../ops/cpu/matMulGelu.js";
21
- import "../ops/webgl/matMulGelu.js";
22
- import "../ops/grads/matMulGelu.js";
15
+ import "../ops/grads/softmax16.js";
16
+ import "../matMul16--R5hOwDG.js";
17
+ import "../ops/webgl/matMul16.js";
18
+ import "../ops/cpu/matMul16.js";
19
+ import "../pack16-CFUqumar.js";
20
+ import "../ops/transpose16.js";
21
+ import "../ops/reshape16.js";
22
+ import "../ops/cpu/qkv.js";
23
+ import "../ops/webgl/qkv.js";
24
+ import "../ops/grads/qkv.js";
23
25
  import "../ops/cpu/normRMS.js";
24
26
  import "../ops/webgl/normRMS.js";
25
27
  import "../ops/grads/normRMS.js";
28
+ import "../ops/grads/add16.js";
26
29
  import "../ops/cpu/gatherSub.js";
27
30
  import "../ops/webgl/gatherSub.js";
28
31
  import "../ops/cpu/scatterSub.js";
29
32
  import "../ops/webgl/scatterSub.js";
30
- import "../jszip.min-CjP2V1VV.js";
33
+ import "../jszip.min-Bz5-11Bk.js";
31
34
  import h from "../tokeniser/CharTokeniser.js";
32
35
  import k from "../tokeniser/bpe.js";
33
36
  import { dummyPassAsync as g } from "../utilities/dummy.js";
34
37
  import b from "../models/factory.js";
35
38
  import "../Generator.js";
36
- import "../index-Dwqa6Zy2.js";
39
+ import "../index-DvYrXKkX.js";
37
40
  import "../ops/cpu/adamAdjust.js";
38
41
  import "../ops/webgl/adamAdjust.js";
39
42
  import "../ops/cpu/adamMoments.js";
40
43
  import "../ops/webgl/adamMoments.js";
41
- import "../papaparse.min-C8l2Kvo1.js";
44
+ import "../papaparse.min-C0cScC2i.js";
45
+ import "../ops/cpu/matMulGelu.js";
46
+ import "../ops/webgl/matMulGelu.js";
47
+ import "../ops/grads/matMulGelu.js";
42
48
  import "../ops/cpu/gelu.js";
43
49
  import "../ops/webgl/gelu.js";
44
- import "../gelu-Bp_-935b.js";
50
+ import "../gelu-CNLFZWea.js";
45
51
  import "../ops/webgl/log.js";
46
52
  import "../checks/normRMS.js";
47
53
  import "../checks/normRMSGrad.js";
48
54
  import { importWeights as u } from "../utilities/weights.js";
49
- async function ht(r) {
50
- const e = /* @__PURE__ */ new Map(), a = await r.file("manifest.json")?.async("string");
51
- if (!a)
55
+ async function Ot(r) {
56
+ const e = /* @__PURE__ */ new Map(), p = await r.file("manifest.json")?.async("string");
57
+ if (!p)
52
58
  throw new Error("Manifest file not found in the zip archive");
53
- const m = JSON.parse(a);
54
- for (const [t, o] of Object.entries(m.weightSpec))
59
+ const s = JSON.parse(p);
60
+ for (const [t, o] of Object.entries(s.weightSpec))
55
61
  e.set(t, { spec: o, data: new Float32Array() });
56
- const p = await r.file("tokeniser.json")?.async("string");
57
- if (!p)
62
+ const a = await r.file("tokeniser.json")?.async("string");
63
+ if (!a)
58
64
  throw new Error("Tokeniser file not found in the zip archive");
59
- const i = JSON.parse(p), f = (i.type ?? "char") === "char" ? new h(i.vocab) : new k(i.vocab, i.merges), c = /* @__PURE__ */ new Map();
65
+ const i = JSON.parse(a), f = (i.type ?? "char") === "char" ? new h(i.vocab) : new k(i.vocab, i.merges), c = /* @__PURE__ */ new Map();
60
66
  for (const t of Object.keys(r.files))
61
67
  if (t.endsWith(".bin")) {
62
- const o = t.replace(".bin", ""), l = await r.file(t).async("arraybuffer"), w = new Float32Array(l), s = e.get(o) || { spec: [], data: new Float32Array() };
63
- s.data = w, e.set(o, s);
64
- const d = await u(s);
68
+ const o = t.replace(".bin", ""), l = await r.file(t).async("arraybuffer"), w = new Float32Array(l), n = e.get(o) || { spec: [], data: new Float32Array() };
69
+ n.data = w, e.set(o, n);
70
+ const d = await u(n);
65
71
  c.set(o, d);
66
72
  }
67
73
  y();
68
- const n = b(m.config);
69
- return await g(n), n.loadWeights(c), { model: n, tokeniser: f };
74
+ const m = b(s.config);
75
+ return await g(m), m.loadWeights(c), { model: m, tokeniser: f };
70
76
  }
71
77
  export {
72
- ht as default
78
+ Ot as default
73
79
  };
@@ -1,4 +1,4 @@
1
- import { j as g } from "../jszip.min-CjP2V1VV.js";
1
+ import { j as g } from "../jszip.min-Bz5-11Bk.js";
2
2
  import b from "../tokeniser/CharTokeniser.js";
3
3
  import { save_safetensors as p } from "../utilities/safetensors.js";
4
4
  import { VERSION as y } from "./load.js";
@@ -0,0 +1,41 @@
1
+ import { A as e, B as x, E as p, c as E, n as $ } from "./index-ZyQhjEPo.js";
2
+ import { e as d } from "./axis_util-BvHEw88j.js";
3
+ import { y as h, z as S, L as K } from "./tensor_util-DV-FP5Q3.js";
4
+ import { r as m } from "./reshape-DevtBWtf.js";
5
+ import { s as _ } from "./sum-_fzj5ZTB.js";
6
+ import { p as T } from "./tensor-DdQUJZlz.js";
7
+ function b(s, o = null, n = !1) {
8
+ const t = { x: x(s, "x", "max") }, r = { reductionIndices: o, keepDims: n };
9
+ return p.runKernel(h, t, r);
10
+ }
11
+ const A = /* @__PURE__ */ e({ max_: b });
12
+ function I(s) {
13
+ const n = { x: x(s, "x", "exp") };
14
+ return p.runKernel(S, n);
15
+ }
16
+ const L = /* @__PURE__ */ e({ exp_: I });
17
+ function M(s) {
18
+ const n = { x: x(s, "x", "log", "float32") };
19
+ return p.runKernel(K, n);
20
+ }
21
+ const N = /* @__PURE__ */ e({ log_: M });
22
+ function v(s, o = null, n = !1) {
23
+ const a = x(s, "x", "logSumExp"), t = T(o, a.shape), r = A(
24
+ a,
25
+ t,
26
+ !0
27
+ /* keepDims */
28
+ ), i = E(a, r), l = L(i), f = _(l, t), u = N(f), c = $(m(r, u.shape), u);
29
+ if (n) {
30
+ const g = d(c.shape, t);
31
+ return m(c, g);
32
+ }
33
+ return c;
34
+ }
35
+ const P = /* @__PURE__ */ e({ logSumExp_: v });
36
+ export {
37
+ N as a,
38
+ L as e,
39
+ P as l,
40
+ A as m
41
+ };
package/dist/main.d.ts CHANGED
@@ -1,3 +1,5 @@
1
+ import { pack16 } from './ops/pack16';
2
+ import { unpack16 } from './ops/unpack16';
1
3
  import { default as CausalSelfAttention } from './layers/CausalSelfAttention';
2
4
  import { default as MLP } from './layers/MLP';
3
5
  import { default as TransformerBlock } from './layers/TransformerBlock';
@@ -17,6 +19,11 @@ export type { TrainingProgress, TrainingLogEntry } from './training/Trainer';
17
19
  export type { GPTConfig } from './models/config';
18
20
  export { estimateParameterCount, estimateMemoryUsage, estimateTrainingMemoryUsage, estimateResources, validateConfig, } from './utilities/parameters';
19
21
  export { default as topP } from './utilities/topP';
22
+ declare const ops: {
23
+ pack16: typeof pack16;
24
+ unpack16: typeof unpack16;
25
+ };
26
+ export { ops };
20
27
  export { selectBackend } from './backend';
21
28
  export { default as performanceTest } from './utilities/performance';
22
29
  export declare const layers: {
@@ -28,3 +35,4 @@ export declare const layers: {
28
35
  export { default as AdamExt } from './training/AdamExt';
29
36
  export { default as checks } from './checks';
30
37
  export type { TensorStatistics } from './checks/weights';
38
+ export { sentenceEmbeddings, sentenceEmbeddingsTensor } from './utilities/sentences';
package/dist/main.js CHANGED
@@ -1,14 +1,15 @@
1
- import { default as W } from "./models/NanoGPTV1.js";
2
- import { default as Y } from "./TeachableLLM.js";
3
- import { default as _ } from "./tokeniser/CharTokeniser.js";
4
- import { default as oo } from "./tokeniser/bpe.js";
5
- import { default as to } from "./utilities/waitForModel.js";
6
- import { default as eo } from "./data/textLoader.js";
7
- import { default as io } from "./Generator.js";
8
- import { default as fo } from "./models/model.js";
9
- import { estimateMemoryUsage as lo, estimateParameterCount as xo, estimateResources as uo, estimateTrainingMemoryUsage as no, validateConfig as co } from "./utilities/parameters.js";
10
- import { default as ko } from "./utilities/topP.js";
11
- import "./index-BzFyqcy-.js";
1
+ import "./utilities/packed.js";
2
+ import { default as ro } from "./models/NanoGPTV1.js";
3
+ import { default as eo } from "./TeachableLLM.js";
4
+ import { default as po } from "./tokeniser/CharTokeniser.js";
5
+ import { default as ao } from "./tokeniser/bpe.js";
6
+ import { default as fo } from "./utilities/waitForModel.js";
7
+ import { default as no } from "./data/textLoader.js";
8
+ import { default as uo } from "./Generator.js";
9
+ import { default as To } from "./models/model.js";
10
+ import { estimateMemoryUsage as go, estimateParameterCount as Mo, estimateResources as Po, estimateTrainingMemoryUsage as Co, validateConfig as Eo } from "./utilities/parameters.js";
11
+ import { default as Bo } from "./utilities/topP.js";
12
+ import "./index-ZyQhjEPo.js";
12
13
  import "./ops/cpu/scatterSub.js";
13
14
  import "./ops/webgl/scatterSub.js";
14
15
  import "./ops/cpu/gatherSub.js";
@@ -19,24 +20,21 @@ import "./ops/grads/attentionMask.js";
19
20
  import "./ops/cpu/qkv.js";
20
21
  import "./ops/webgl/qkv.js";
21
22
  import "./ops/grads/qkv.js";
22
- import "./random_width-CXVRloNK.js";
23
- import "./register_all_kernels-DIGpEwcf.js";
24
- import "./index-Tf7vU29b.js";
25
- import "./dataset-DlZtKmBq.js";
23
+ import "./random_width-DY6Kk2Dl.js";
24
+ import "./register_all_kernels-Bwu1PTuU.js";
25
+ import "./index-Cp39cXWe.js";
26
+ import "./dataset-0xP8GjwI.js";
26
27
  import "./ops/cpu/rope.js";
27
28
  import "./ops/webgl/rope.js";
28
- import "./ops/grads/rope.js";
29
+ import "./rope-B5UUMsPi.js";
29
30
  import "./ops/cpu/appendCache.js";
30
31
  import "./ops/webgl/appendCache.js";
31
- import "./ops/cpu/fusedSoftmax.js";
32
- import "./ops/webgl/fusedSoftmax.js";
33
- import "./ops/grads/fusedSoftmax.js";
34
32
  import "./ops/cpu/matMulGelu.js";
35
33
  import "./ops/webgl/matMulGelu.js";
36
34
  import "./ops/grads/matMulGelu.js";
37
35
  import "./ops/cpu/gelu.js";
38
36
  import "./ops/webgl/gelu.js";
39
- import "./gelu-Bp_-935b.js";
37
+ import "./gelu-CNLFZWea.js";
40
38
  import "./ops/cpu/normRMS.js";
41
39
  import "./ops/webgl/normRMS.js";
42
40
  import "./ops/grads/normRMS.js";
@@ -45,38 +43,51 @@ import "./ops/cpu/adamMoments.js";
45
43
  import "./ops/webgl/adamMoments.js";
46
44
  import "./ops/cpu/adamAdjust.js";
47
45
  import "./ops/webgl/adamAdjust.js";
48
- import { selectBackend as Po } from "./backend.js";
49
- import { default as go } from "./utilities/performance.js";
50
- import o from "./layers/CausalSelfAttention.js";
51
- import r from "./layers/MLP.js";
52
- import t from "./layers/TransformerBlock.js";
53
- import m from "./layers/RoPECache.js";
54
- import { default as Bo } from "./training/AdamExt.js";
55
- import { default as Eo } from "./checks/index.js";
56
- const O = {
57
- CausalSelfAttention: o,
58
- MLP: r,
59
- TransformerBlock: t,
60
- RoPECache: m
46
+ import { u as o, p as r } from "./pack16-CFUqumar.js";
47
+ import "./ops/grads/softmax16.js";
48
+ import "./matMul16--R5hOwDG.js";
49
+ import "./ops/webgl/matMul16.js";
50
+ import "./ops/cpu/matMul16.js";
51
+ import "./ops/transpose16.js";
52
+ import { selectBackend as yo } from "./backend.js";
53
+ import { default as Ao } from "./utilities/performance.js";
54
+ import t from "./layers/CausalSelfAttention.js";
55
+ import e from "./layers/MLP.js";
56
+ import m from "./layers/TransformerBlock.js";
57
+ import p from "./layers/RoPECache.js";
58
+ import { default as Ro } from "./training/AdamExt.js";
59
+ import { default as vo } from "./checks/index.js";
60
+ import { sentenceEmbeddings as Do, sentenceEmbeddingsTensor as Fo } from "./utilities/sentences.js";
61
+ const Z = {
62
+ pack16: r,
63
+ unpack16: o
64
+ }, _ = {
65
+ CausalSelfAttention: t,
66
+ MLP: e,
67
+ TransformerBlock: m,
68
+ RoPECache: p
61
69
  };
62
70
  export {
63
- Bo as AdamExt,
64
- oo as BPETokeniser,
65
- _ as CharTokeniser,
66
- io as Generator,
67
- fo as Model,
68
- W as NanoGPT,
69
- Y as TeachableLLM,
70
- Eo as checks,
71
- lo as estimateMemoryUsage,
72
- xo as estimateParameterCount,
73
- uo as estimateResources,
74
- no as estimateTrainingMemoryUsage,
75
- O as layers,
76
- eo as loadTextData,
77
- go as performanceTest,
78
- Po as selectBackend,
79
- ko as topP,
80
- co as validateConfig,
81
- to as waitForModel
71
+ Ro as AdamExt,
72
+ ao as BPETokeniser,
73
+ po as CharTokeniser,
74
+ uo as Generator,
75
+ To as Model,
76
+ ro as NanoGPT,
77
+ eo as TeachableLLM,
78
+ vo as checks,
79
+ go as estimateMemoryUsage,
80
+ Mo as estimateParameterCount,
81
+ Po as estimateResources,
82
+ Co as estimateTrainingMemoryUsage,
83
+ _ as layers,
84
+ no as loadTextData,
85
+ Z as ops,
86
+ Ao as performanceTest,
87
+ yo as selectBackend,
88
+ Do as sentenceEmbeddings,
89
+ Fo as sentenceEmbeddingsTensor,
90
+ Bo as topP,
91
+ Eo as validateConfig,
92
+ fo as waitForModel
82
93
  };
@@ -0,0 +1,77 @@
1
+ import { e as y } from "./index-ZyQhjEPo.js";
2
+ import "./ops/webgl/matMul16.js";
3
+ import "./ops/cpu/matMul16.js";
4
+ import { isPackedTensor as g, packTensor as k } from "./utilities/packed.js";
5
+ import { p as v } from "./pack16-CFUqumar.js";
6
+ import { d as h } from "./gelu-CNLFZWea.js";
7
+ import { transpose16 as S } from "./ops/transpose16.js";
8
+ import { reshape16 as w } from "./ops/reshape16.js";
9
+ import { a as G } from "./tensor_util-DV-FP5Q3.js";
10
+ const T = {
11
+ kernelName: "MatMul16",
12
+ inputsToSave: ["A", "B"],
13
+ outputsToSave: [],
14
+ gradFunc: (r, o, n) => {
15
+ const [s, t] = o;
16
+ if (Array.isArray(r))
17
+ throw new Error("Expected dy to be a single Tensor");
18
+ let e = r;
19
+ const { transposeA: f, transposeB: i, scale: a, activation: p, originalShape: c, perm: d } = n;
20
+ if (d && c) {
21
+ const u = new Array(d.length);
22
+ for (let A = 0; A < d.length; ++A)
23
+ u[d[A]] = A;
24
+ const m = e;
25
+ e = S(e, u), m.dispose();
26
+ }
27
+ if (c) {
28
+ const u = e;
29
+ e = w(e, c), u.dispose();
30
+ }
31
+ if (p === "gelu") {
32
+ const u = e, m = l(s, t, f, i);
33
+ e = h(u, m), u.dispose(), m.dispose();
34
+ }
35
+ if (!f && !i)
36
+ return {
37
+ A: () => a !== void 0 ? B(e, t, a, !1, !0) : l(e, t, !1, !0),
38
+ B: () => a !== void 0 ? M(s, e, a, !0, !1) : l(s, e, !0, !1)
39
+ };
40
+ if (!f && i)
41
+ return {
42
+ A: () => a !== void 0 ? B(e, t, a, !1, !1) : l(e, t, !1, !1),
43
+ B: () => a !== void 0 ? M(s, e, a, !0, !1) : l(s, e, !0, !1)
44
+ };
45
+ if (f && !i)
46
+ return {
47
+ A: () => a !== void 0 ? M(t, e, a, !1, !0) : l(t, e, !1, !0),
48
+ B: () => a !== void 0 ? M(s, e, a, !1, !1) : l(s, e, !1, !1)
49
+ };
50
+ throw new Error("Gradient for transposeA=true and transposeB=true is not supported yet.");
51
+ }
52
+ };
53
+ G(T);
54
+ function l(r, o, n = !1, s = !1, t = {}) {
55
+ const e = g(r), f = g(o), i = e || f, a = !i || e ? r : v(r), p = !i || f ? o : v(o), c = y().runKernel("MatMul16", { A: a, B: p }, { transposeA: n, transposeB: s, ...t });
56
+ return i && !e && a.dispose(), i && !f && p.dispose(), i ? k(c) : c;
57
+ }
58
+ function j(r, o, n, s = !1, t = !1) {
59
+ return l(r, o, s, t, { scale: n });
60
+ }
61
+ function B(r, o, n, s = !1, t = !1) {
62
+ return l(r, o, s, t, { scaleA: n });
63
+ }
64
+ function M(r, o, n, s = !1, t = !1) {
65
+ return l(r, o, s, t, { scaleB: n });
66
+ }
67
+ function q(r, o, n = !1, s = !1) {
68
+ return l(r, o, n, s, { activation: "gelu" });
69
+ }
70
+ export {
71
+ T as a,
72
+ l as b,
73
+ q as c,
74
+ B as d,
75
+ M as e,
76
+ j as m
77
+ };
@@ -0,0 +1,12 @@
1
+ import { A as u, B as s, E as c } from "./index-ZyQhjEPo.js";
2
+ import { m as M, B as p } from "./tensor_util-DV-FP5Q3.js";
3
+ function f(o, e, r = !1, m = !1) {
4
+ let t = s(o, "a", "matMul"), a = s(e, "b", "matMul");
5
+ [t, a] = M(t, a);
6
+ const n = { a: t, b: a }, l = { transposeA: r, transposeB: m };
7
+ return c.runKernel(p, n, l);
8
+ }
9
+ const B = /* @__PURE__ */ u({ matMul_: f });
10
+ export {
11
+ B as m
12
+ };
@@ -0,0 +1,12 @@
1
+ import { A as e, B as a, E as n } from "./index-ZyQhjEPo.js";
2
+ import { m as p, M as c } from "./tensor_util-DV-FP5Q3.js";
3
+ function d(m, r) {
4
+ let o = a(m, "a", "mod"), t = a(r, "b", "mod");
5
+ [o, t] = p(o, t);
6
+ const s = { a: o, b: t };
7
+ return n.runKernel(c, s);
8
+ }
9
+ const b = /* @__PURE__ */ e({ mod_: d });
10
+ export {
11
+ b as m
12
+ };