@genai-fi/nanogpt 0.9.1 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (343) hide show
  1. package/README.md +352 -14
  2. package/dist/Generator.js +69 -78
  3. package/dist/{RealDiv-D4EzDsC0.js → RealDiv-DgA3z9oO.js} +32 -206
  4. package/dist/Reshape-CF6odzV4.js +16 -0
  5. package/dist/Reshape-_kILl6tK.js +81 -0
  6. package/dist/TeachableLLM.js +28 -22
  7. package/dist/Trainer.d.ts +2 -0
  8. package/dist/Trainer.js +3 -2
  9. package/dist/{axis_util-TbGYJ208.js → axis_util-BvHEw88j.js} +7 -23
  10. package/dist/backend.d.ts +2 -1
  11. package/dist/backend.js +10 -4
  12. package/dist/backend_util-D-rUb2ty.js +474 -0
  13. package/dist/backend_webgpu-B0u2ndUn.js +547 -0
  14. package/dist/binary_op_util-pKXltfxI.js +192 -0
  15. package/dist/broadcast_to-CwF7XIeu.js +30 -0
  16. package/dist/checks/appendCache.js +2 -2
  17. package/dist/checks/attentionMask.js +3 -3
  18. package/dist/checks/check.d.ts +1 -1
  19. package/dist/checks/check.js +8 -8
  20. package/dist/checks/gelu.js +2 -2
  21. package/dist/checks/index.d.ts +2 -0
  22. package/dist/checks/index.js +7 -5
  23. package/dist/checks/matMulGelu.js +6 -6
  24. package/dist/checks/normRMS.js +7 -7
  25. package/dist/checks/normRMSGrad.js +3 -3
  26. package/dist/checks/packUnpack.d.ts +1 -0
  27. package/dist/checks/packUnpack.js +18 -0
  28. package/dist/checks/qkv.js +12 -27
  29. package/dist/checks/rope.js +2 -2
  30. package/dist/checks/weights.js +18 -16
  31. package/dist/complex-CSlYz-2T.js +13 -0
  32. package/dist/complex_util-Yc1A_gV1.js +55 -0
  33. package/dist/concat-BHlIJeyT.js +19 -0
  34. package/dist/concat_util-DcJk7YHS.js +22 -0
  35. package/dist/data/docx.js +1 -1
  36. package/dist/data/parquet.js +2 -2
  37. package/dist/data/pdf.js +1 -1
  38. package/dist/data/textLoader.js +1 -1
  39. package/dist/{dataset-DlZtKmBq.js → dataset-0xP8GjwI.js} +136 -236
  40. package/dist/dropout-C1pM3f11.js +99 -0
  41. package/dist/expand_dims-BPG4fwBP.js +13 -0
  42. package/dist/exports_initializers-xuidcwI4.js +7 -0
  43. package/dist/gather-DykLGqmW.js +10 -0
  44. package/dist/{gelu-Bp_-935b.js → gelu-CNLFZWea.js} +11 -10
  45. package/dist/{gpgpu_math-CDaYiyE_.js → gpgpu_math-DDVJCn6-.js} +90 -265
  46. package/dist/{index-C4L8Cm77.js → index-CieiGp4Y.js} +14 -14
  47. package/dist/index-CjOj7j-u.js +7308 -0
  48. package/dist/{index-Tf7vU29b.js → index-Cp39cXWe.js} +3 -10
  49. package/dist/{index-Dwqa6Zy2.js → index-DvYrXKkX.js} +2 -2
  50. package/dist/index-ZyQhjEPo.js +2157 -0
  51. package/dist/{jszip.min-CjP2V1VV.js → jszip.min-Bz5-11Bk.js} +56 -57
  52. package/dist/kernel_funcs_utils-Dg_-E44D.js +308 -0
  53. package/dist/layers/BaseLayer.d.ts +1 -0
  54. package/dist/layers/BaseLayer.js +7 -6
  55. package/dist/layers/CausalSelfAttention.d.ts +0 -1
  56. package/dist/layers/CausalSelfAttention.js +56 -55
  57. package/dist/layers/MLP.js +15 -16
  58. package/dist/layers/PositionEmbedding.js +5 -14
  59. package/dist/layers/RMSNorm.js +3 -3
  60. package/dist/layers/RoPECache.d.ts +2 -0
  61. package/dist/layers/RoPECache.js +22 -17
  62. package/dist/layers/TiedEmbedding.js +22 -17
  63. package/dist/layers/TransformerBlock.js +21 -20
  64. package/dist/loader/load.js +1 -1
  65. package/dist/loader/loadTransformers.js +1 -1
  66. package/dist/loader/oldZipLoad.js +39 -33
  67. package/dist/loader/save.js +1 -1
  68. package/dist/log_sum_exp-DWI-76TI.js +41 -0
  69. package/dist/main.d.ts +8 -0
  70. package/dist/main.js +63 -52
  71. package/dist/matMul16--R5hOwDG.js +77 -0
  72. package/dist/mat_mul-DeAh4uTH.js +12 -0
  73. package/dist/mod-Gt1rMB4n.js +12 -0
  74. package/dist/models/NanoGPTV1.js +40 -31
  75. package/dist/models/model.d.ts +2 -0
  76. package/dist/models/model.js +37 -29
  77. package/dist/{mulmat_packed_gpu-BT60jmzP.js → mulmat_packed_gpu-BMFhLwta.js} +1 -17
  78. package/dist/{non_max_suppression_impl-CsEgBuMA.js → non_max_suppression_impl-B2W7YjZB.js} +0 -32
  79. package/dist/ones-CAMiP4I2.js +15 -0
  80. package/dist/ops/adamAdjust.js +1 -1
  81. package/dist/ops/adamMoments.d.ts +1 -1
  82. package/dist/ops/adamMoments.js +4 -4
  83. package/dist/ops/add16.d.ts +2 -0
  84. package/dist/ops/add16.js +9 -0
  85. package/dist/ops/appendCache.js +16 -9
  86. package/dist/ops/attentionMask.js +4 -4
  87. package/dist/ops/concat16.d.ts +2 -0
  88. package/dist/ops/concat16.js +9 -0
  89. package/dist/ops/cpu/adamAdjust.js +14 -13
  90. package/dist/ops/cpu/adamMoments.js +10 -9
  91. package/dist/ops/cpu/appendCache.js +9 -8
  92. package/dist/ops/cpu/attentionMask.js +15 -14
  93. package/dist/ops/cpu/fusedSoftmax.js +13 -12
  94. package/dist/ops/cpu/gatherSub.js +9 -24
  95. package/dist/ops/cpu/gelu.js +13 -12
  96. package/dist/ops/cpu/matMul16.d.ts +1 -0
  97. package/dist/ops/cpu/matMul16.js +16 -0
  98. package/dist/ops/cpu/matMulGelu.js +18 -16
  99. package/dist/ops/cpu/matMulMul.js +8 -7
  100. package/dist/ops/cpu/mulDropout.js +4 -3
  101. package/dist/ops/cpu/normRMS.js +11 -10
  102. package/dist/ops/cpu/qkv.js +17 -13
  103. package/dist/ops/cpu/rope.js +23 -22
  104. package/dist/ops/cpu/scatterSub.js +16 -30
  105. package/dist/ops/dot16.d.ts +2 -0
  106. package/dist/ops/dot16.js +42 -0
  107. package/dist/ops/gatherSub.js +1 -1
  108. package/dist/ops/gelu.js +2 -2
  109. package/dist/ops/grads/add16.d.ts +1 -0
  110. package/dist/ops/grads/add16.js +27 -0
  111. package/dist/ops/grads/attentionMask.js +12 -19
  112. package/dist/ops/grads/gelu.js +4 -3
  113. package/dist/ops/grads/matMul16.d.ts +2 -0
  114. package/dist/ops/grads/matMul16.js +9 -0
  115. package/dist/ops/grads/matMulGelu.js +8 -7
  116. package/dist/ops/grads/normRMS.js +8 -7
  117. package/dist/ops/grads/{fusedSoftmax.d.ts → pack16.d.ts} +1 -1
  118. package/dist/ops/grads/pack16.js +7 -0
  119. package/dist/ops/grads/qkv.d.ts +3 -1
  120. package/dist/ops/grads/qkv.js +28 -22
  121. package/dist/ops/grads/rope.d.ts +2 -1
  122. package/dist/ops/grads/rope.js +6 -13
  123. package/dist/ops/grads/softmax16.d.ts +2 -0
  124. package/dist/ops/grads/softmax16.js +26 -0
  125. package/dist/ops/grads/unpack16.d.ts +2 -0
  126. package/dist/ops/grads/unpack16.js +6 -0
  127. package/dist/ops/grads/utils.d.ts +3 -0
  128. package/dist/ops/grads/utils.js +10 -0
  129. package/dist/ops/matMul16.d.ts +15 -0
  130. package/dist/ops/matMul16.js +13 -0
  131. package/dist/ops/matMulGelu.js +1 -1
  132. package/dist/ops/matMulMul.js +1 -1
  133. package/dist/ops/mul16.d.ts +2 -0
  134. package/dist/ops/mul16.js +8 -0
  135. package/dist/ops/mulDrop.js +1 -1
  136. package/dist/ops/normRMS.js +1 -1
  137. package/dist/ops/pack16.d.ts +2 -0
  138. package/dist/ops/pack16.js +6 -0
  139. package/dist/ops/qkv.d.ts +1 -1
  140. package/dist/ops/qkv.js +8 -4
  141. package/dist/ops/reshape16.d.ts +2 -0
  142. package/dist/ops/reshape16.js +43 -0
  143. package/dist/ops/rope.d.ts +1 -1
  144. package/dist/ops/rope.js +8 -10
  145. package/dist/ops/scatterSub.js +1 -1
  146. package/dist/ops/slice16.d.ts +2 -0
  147. package/dist/ops/slice16.js +9 -0
  148. package/dist/ops/softmax16.d.ts +2 -0
  149. package/dist/ops/softmax16.js +12 -0
  150. package/dist/ops/sub16.d.ts +2 -0
  151. package/dist/ops/sub16.js +8 -0
  152. package/dist/ops/sum16.d.ts +2 -0
  153. package/dist/ops/sum16.js +13 -0
  154. package/dist/ops/transpose16.d.ts +3 -0
  155. package/dist/ops/transpose16.js +41 -0
  156. package/dist/ops/unpack16.d.ts +2 -0
  157. package/dist/ops/unpack16.js +6 -0
  158. package/dist/ops/webgl/adamAdjust.js +3 -2
  159. package/dist/ops/webgl/adamMoments.js +2 -1
  160. package/dist/ops/webgl/appendCache.js +2 -1
  161. package/dist/ops/webgl/attentionMask.js +5 -4
  162. package/dist/ops/webgl/fusedSoftmax.js +6 -4
  163. package/dist/ops/webgl/gatherSub.js +7 -6
  164. package/dist/ops/webgl/gelu.js +3 -2
  165. package/dist/ops/webgl/log.js +12 -27
  166. package/dist/ops/webgl/matMul16.d.ts +1 -0
  167. package/dist/ops/webgl/matMul16.js +37 -0
  168. package/dist/ops/webgl/matMulGelu.js +17 -15
  169. package/dist/ops/webgl/matMulMul.js +13 -12
  170. package/dist/ops/webgl/mulDropout.js +9 -8
  171. package/dist/ops/webgl/normRMS.js +8 -7
  172. package/dist/ops/webgl/qkv.js +6 -5
  173. package/dist/ops/webgl/rope.js +11 -10
  174. package/dist/ops/webgl/scatterSub.js +6 -5
  175. package/dist/ops/webgpu/adamAdjust.js +12 -10
  176. package/dist/ops/webgpu/adamMoments.js +27 -22
  177. package/dist/ops/webgpu/add16.d.ts +1 -0
  178. package/dist/ops/webgpu/add16.js +14 -0
  179. package/dist/ops/webgpu/appendCache.js +64 -17
  180. package/dist/ops/webgpu/attentionMask.js +19 -62
  181. package/dist/ops/webgpu/attentionMask32_program.d.ts +19 -0
  182. package/dist/ops/webgpu/attentionMask32_program.js +54 -0
  183. package/dist/ops/webgpu/concat16.d.ts +19 -0
  184. package/dist/ops/webgpu/concat16.js +128 -0
  185. package/dist/ops/webgpu/gatherSub.js +9 -7
  186. package/dist/ops/webgpu/gelu.js +78 -31
  187. package/dist/ops/webgpu/index.js +12 -0
  188. package/dist/ops/webgpu/matMul16.d.ts +1 -0
  189. package/dist/ops/webgpu/matMul16.js +58 -0
  190. package/dist/ops/webgpu/matMul16_program.d.ts +42 -0
  191. package/dist/ops/webgpu/matMul16_program.js +336 -0
  192. package/dist/ops/webgpu/mul16.d.ts +1 -0
  193. package/dist/ops/webgpu/mul16.js +14 -0
  194. package/dist/ops/webgpu/normRMS.js +21 -40
  195. package/dist/ops/webgpu/normRMS16_program.d.ts +9 -0
  196. package/dist/ops/webgpu/normRMS16_program.js +24 -0
  197. package/dist/ops/webgpu/normRMS32_program.d.ts +9 -0
  198. package/dist/ops/webgpu/normRMS32_program.js +24 -0
  199. package/dist/ops/webgpu/normRMSGrad.js +113 -64
  200. package/dist/ops/webgpu/pack16.d.ts +1 -0
  201. package/dist/ops/webgpu/pack16.js +19 -0
  202. package/dist/ops/webgpu/pack16_program.d.ts +19 -0
  203. package/dist/ops/webgpu/pack16_program.js +92 -0
  204. package/dist/ops/webgpu/qkv.js +20 -55
  205. package/dist/ops/webgpu/rope.js +77 -22
  206. package/dist/ops/webgpu/scatterSub.js +9 -7
  207. package/dist/ops/webgpu/slice16.d.ts +7 -0
  208. package/dist/ops/webgpu/slice16.js +71 -0
  209. package/dist/{variable-Bm2OFwGI.js → ops/webgpu/softmax16.d.ts} +2 -8
  210. package/dist/ops/webgpu/softmax16.js +23 -0
  211. package/dist/ops/webgpu/softmax16_program.d.ts +13 -0
  212. package/dist/ops/webgpu/softmax16_program.js +73 -0
  213. package/dist/ops/webgpu/softmax16_subgroup_program.d.ts +17 -0
  214. package/dist/ops/webgpu/softmax16_subgroup_program.js +75 -0
  215. package/dist/ops/webgpu/softmax16grad.d.ts +1 -0
  216. package/dist/ops/webgpu/softmax16grad.js +38 -0
  217. package/dist/ops/webgpu/sub16.d.ts +1 -0
  218. package/dist/ops/webgpu/sub16.js +14 -0
  219. package/dist/ops/webgpu/sum16.d.ts +1 -0
  220. package/dist/ops/webgpu/sum16.js +40 -0
  221. package/dist/ops/webgpu/transpose16.d.ts +1 -0
  222. package/dist/ops/webgpu/transpose16.js +35 -0
  223. package/dist/ops/webgpu/transpose16_program.d.ts +16 -0
  224. package/dist/ops/webgpu/transpose16_program.js +50 -0
  225. package/dist/ops/webgpu/transpose16_shared_program.d.ts +15 -0
  226. package/dist/ops/webgpu/transpose16_shared_program.js +71 -0
  227. package/dist/ops/webgpu/unpack16.d.ts +1 -0
  228. package/dist/ops/webgpu/unpack16.js +49 -0
  229. package/dist/ops/webgpu/utils/binary_op.d.ts +19 -0
  230. package/dist/ops/webgpu/utils/binary_op.js +79 -0
  231. package/dist/ops/webgpu/utils/deviceInfo.d.ts +7 -0
  232. package/dist/ops/webgpu/utils/deviceInfo.js +11 -0
  233. package/dist/ops/webgpu/utils/reductions.d.ts +32 -4
  234. package/dist/ops/webgpu/utils/reductions.js +236 -45
  235. package/dist/ops-CNI3TwqM.js +645 -0
  236. package/dist/pack16-CFUqumar.js +41 -0
  237. package/dist/{papaparse.min-C8l2Kvo1.js → papaparse.min-C0cScC2i.js} +2 -8
  238. package/dist/{parquet-C0Tlmv9c.js → parquet-BE8MU_ge.js} +201 -278
  239. package/dist/patches/PackedTensor.d.ts +12 -0
  240. package/dist/patches/PackedTensor.js +11 -0
  241. package/dist/patches/engine.d.ts +261 -0
  242. package/dist/patches/engine.js +10 -0
  243. package/dist/patches/tape.d.ts +12 -0
  244. package/dist/patches/tape.js +5 -0
  245. package/dist/patches/webgpu_backend.d.ts +18 -0
  246. package/dist/patches/webgpu_backend.js +57 -0
  247. package/dist/{tensor-CZr4dh61.js → patches/webgpu_base.d.ts} +5 -8
  248. package/dist/patches/webgpu_base.js +34 -0
  249. package/dist/patches/webgpu_program.d.ts +36 -0
  250. package/dist/patches/webgpu_program.js +401 -0
  251. package/dist/{pdf-kJD-f258.js → pdf-NIhmP3sq.js} +424 -428
  252. package/dist/random_width-DY6Kk2Dl.js +10051 -0
  253. package/dist/range-BMS52eQi.js +11 -0
  254. package/dist/reciprocal-CTmshQ9J.js +10 -0
  255. package/dist/{register_all_kernels-DIGpEwcf.js → register_all_kernels-Bwu1PTuU.js} +719 -9766
  256. package/dist/relu-yZ2-7WxU.js +10 -0
  257. package/dist/reshape-DevtBWtf.js +10 -0
  258. package/dist/rope-B5UUMsPi.js +32 -0
  259. package/dist/{scatter_nd_util-BQdz--Gn.js → scatter_nd_util-5EL-8VAQ.js} +1 -1
  260. package/dist/selu_util-D1w6yyTO.js +303 -0
  261. package/dist/{shared-DuP7ue-R.js → shared-BRksrJb3.js} +1 -17
  262. package/dist/shared-BuAXb4CI.js +2145 -0
  263. package/dist/sin-BGfy2HZo.js +16 -0
  264. package/dist/slice-D_gkkqZK.js +13 -0
  265. package/dist/slice_util-DtEldBfK.js +261 -0
  266. package/dist/softmax-ZHVebtR1.js +13 -0
  267. package/dist/split-DrfihRpZ.js +10 -0
  268. package/dist/squeeze-DZEpeblb.js +11 -0
  269. package/dist/stack-yOIAalTq.js +13 -0
  270. package/dist/sum-_fzj5ZTB.js +12 -0
  271. package/dist/tensor-DdQUJZlz.js +909 -0
  272. package/dist/tensor-f35l8Odg.js +8 -0
  273. package/dist/tensor1d-CeZuc-Rv.js +12 -0
  274. package/dist/tensor2d-G4Ys2GxX.js +15 -0
  275. package/dist/tensor4d-B8roDgtc.js +15 -0
  276. package/dist/tensor_util-DV-FP5Q3.js +523 -0
  277. package/dist/tfjs_backend-kNyO5L2d.js +653 -0
  278. package/dist/tile-BzyEiF-F.js +13 -0
  279. package/dist/tokeniser/CharTokeniser.js +1 -1
  280. package/dist/tokeniser/bpe.js +1 -1
  281. package/dist/training/Adam.d.ts +2 -1
  282. package/dist/training/Adam.js +12 -28
  283. package/dist/training/AdamExt.d.ts +1 -0
  284. package/dist/training/AdamExt.js +2 -2
  285. package/dist/training/DatasetBuilder.js +3 -20
  286. package/dist/training/FullTrainer.js +55 -48
  287. package/dist/training/Trainer.d.ts +11 -6
  288. package/dist/training/Trainer.js +51 -39
  289. package/dist/training/sparseCrossEntropy.js +3 -3
  290. package/dist/transpose-DKELTqhe.js +38 -0
  291. package/dist/utilities/arrayClose.js +7 -7
  292. package/dist/utilities/dummy.js +35 -27
  293. package/dist/utilities/multinomialCPU.js +2 -2
  294. package/dist/utilities/packed.d.ts +7 -0
  295. package/dist/utilities/packed.js +716 -0
  296. package/dist/utilities/performance.js +1 -1
  297. package/dist/utilities/profile.js +1 -1
  298. package/dist/utilities/safetensors.js +2 -2
  299. package/dist/utilities/sentences.d.ts +5 -0
  300. package/dist/utilities/sentences.js +41 -0
  301. package/dist/utilities/weights.js +2 -2
  302. package/dist/variable-Bhn5bHYv.js +7 -0
  303. package/dist/{webgpu_program-DkQJOJSd.js → webgpu_program-Cigz-7RF.js} +15 -44
  304. package/dist/webgpu_util-BBCnKm2X.js +65 -0
  305. package/dist/zeros-2gldETuK.js +14 -0
  306. package/package.json +4 -3
  307. package/dist/Reshape-Bowtk9BP.js +0 -127
  308. package/dist/Reshape-DUqYftGC.js +0 -30
  309. package/dist/backend_util-CJIiDoV1.js +0 -749
  310. package/dist/broadcast_to-DzlNweb8.js +0 -44
  311. package/dist/concat-B912vBbo.js +0 -33
  312. package/dist/dropout-C-csYCLj.js +0 -193
  313. package/dist/exports_initializers-B8iZMgQ0.js +0 -16
  314. package/dist/gather-Dnpgw-YQ.js +0 -25
  315. package/dist/index-BzFyqcy-.js +0 -4457
  316. package/dist/index-C1rx_Ajs.js +0 -12076
  317. package/dist/kernel_funcs_utils-DKLK0Mg3.js +0 -466
  318. package/dist/log_sum_exp-DO6z8tSE.js +0 -103
  319. package/dist/mat_mul-DzjTFx-u.js +0 -27
  320. package/dist/mod-Dobti4j4.js +0 -27
  321. package/dist/ones-tIJeHlq-.js +0 -29
  322. package/dist/ops/fusedSoftmax.d.ts +0 -2
  323. package/dist/ops/fusedSoftmax.js +0 -10
  324. package/dist/ops/grads/fusedSoftmax.js +0 -22
  325. package/dist/ops-LuCMAnmM.js +0 -1525
  326. package/dist/random_width-CXVRloNK.js +0 -13670
  327. package/dist/range-CWcz7xFA.js +0 -26
  328. package/dist/reciprocal-C4rNcM-S.js +0 -25
  329. package/dist/relu-BjCh_SYb.js +0 -25
  330. package/dist/reshape-CnIwVG1c.js +0 -25
  331. package/dist/selu_util-OtRzVwW5.js +0 -719
  332. package/dist/shared-DmRsFyaJ.js +0 -3134
  333. package/dist/sin-gpDNRxE0.js +0 -47
  334. package/dist/slice-d0Vo9XTN.js +0 -28
  335. package/dist/softmax-D7Jj3p_P.js +0 -28
  336. package/dist/split-DK2k5eHf.js +0 -25
  337. package/dist/stack-DFatutCx.js +0 -27
  338. package/dist/sum-CJ0ULhmt.js +0 -27
  339. package/dist/tensor1d-vML0r3q6.js +0 -27
  340. package/dist/tensor2d-D76QGjF3.js +0 -30
  341. package/dist/tensor4d-Df1WlVDY.js +0 -30
  342. package/dist/webgpu_util-pLEV9tks.js +0 -80
  343. package/dist/zeros-Bj5rMYA7.js +0 -52
@@ -0,0 +1,653 @@
1
+ import { A as g, B as $, i as _e, E as M, n as x, j as ie, w as ue, R as Te, t as A, G as ge, m as ke, l as Ee, S as Ie } from "./index-ZyQhjEPo.js";
2
+ import { a as y, s as ae, x as le } from "./tensor-DdQUJZlz.js";
3
+ import { t as Le } from "./tensor1d-CeZuc-Rv.js";
4
+ import { r as Ne, d as be } from "./dropout-C1pM3f11.js";
5
+ import { s as C } from "./slice-D_gkkqZK.js";
6
+ import { r as c } from "./reshape-DevtBWtf.js";
7
+ import { g as Ce } from "./gather-DykLGqmW.js";
8
+ import { s as Fe, b as Pe, m as ve, l as je, o as Be } from "./selu_util-D1w6yyTO.js";
9
+ import { a1 as Me, m as fe, a2 as he } from "./tensor_util-DV-FP5Q3.js";
10
+ import { t as Ue } from "./tile-BzyEiF-F.js";
11
+ import { m as w } from "./mat_mul-DeAh4uTH.js";
12
+ import { t as xe } from "./transpose-DKELTqhe.js";
13
+ import { c as j } from "./concat-BHlIJeyT.js";
14
+ function Ge(e, n, t) {
15
+ const s = $(e, "x", "clipByValue");
16
+ if (y(n <= t, () => `Error in clip: min (${n}) must be less than or equal to max (${t}).`), n === t)
17
+ return _e(s.shape, n, s.dtype);
18
+ const r = { x: s }, o = { clipValueMin: n, clipValueMax: t };
19
+ return M.runKernel(Me, r, o);
20
+ }
21
+ const Ve = /* @__PURE__ */ g({ clipByValue_: Ge });
22
+ function qe(e) {
23
+ return j(
24
+ e,
25
+ 0
26
+ /* axis */
27
+ );
28
+ }
29
+ const Je = /* @__PURE__ */ g({ concat1d_: qe });
30
+ function Ke(e, n) {
31
+ return j(e, n);
32
+ }
33
+ const Re = /* @__PURE__ */ g({ concat2d_: Ke });
34
+ function Ze(e, n) {
35
+ return j(e, n);
36
+ }
37
+ const We = /* @__PURE__ */ g({ concat3d_: Ze });
38
+ function Ye(e, n) {
39
+ return j(e, n);
40
+ }
41
+ const He = /* @__PURE__ */ g({ concat4d_: Ye });
42
+ function Qe(e, n, t) {
43
+ const s = $(e, "x", "slice1d");
44
+ return y(s.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${s.rank} tensor`), C(s, [n], [t]);
45
+ }
46
+ const Q = /* @__PURE__ */ g({ slice1d_: Qe });
47
+ function Xe(e, n, t) {
48
+ const s = $(e, "x", "slice2d");
49
+ return y(s.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${s.rank} tensor`), C(s, n, t);
50
+ }
51
+ const we = /* @__PURE__ */ g({ slice2d_: Xe });
52
+ function ze(e, n, t) {
53
+ const s = $(e, "x", "slice3d");
54
+ return y(s.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${s.rank} tensor`), C(s, n, t);
55
+ }
56
+ const X = /* @__PURE__ */ g({ slice3d_: ze });
57
+ function en(e, n, t) {
58
+ const s = $(e, "x", "slice4d");
59
+ return y(s.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${s.rank} tensor`), C(s, n, t);
60
+ }
61
+ const U = /* @__PURE__ */ g({ slice4d_: en });
62
+ function nn({ a: e, b: n, transposeA: t = !1, transposeB: s = !1, bias: r, activation: o = "linear", preluActivationWeights: a, leakyreluAlpha: f = 0.2 }) {
63
+ if (Fe(M.state.gradientDepth, o) === !1) {
64
+ let D = w(e, n, t, s);
65
+ return r != null && (D = x(D, r)), Pe(D, o, a, f);
66
+ }
67
+ let i = $(e, "a", "fused matMul"), u = $(n, "b", "fused matMul");
68
+ [i, u] = fe(i, u);
69
+ const m = t ? i.shape[i.rank - 2] : i.shape[i.rank - 1], d = s ? u.shape[u.rank - 1] : u.shape[u.rank - 2], T = t ? i.shape[i.rank - 1] : i.shape[i.rank - 2], h = s ? u.shape[u.rank - 2] : u.shape[u.rank - 1], ee = i.shape.slice(0, -2), E = u.shape.slice(0, -2), ne = ae(ee), te = ae(E);
70
+ y(m === d, () => `Error in fused matMul: inner shapes (${m}) and (${d}) of Tensors with shapes ${i.shape} and ${u.shape} and transposeA=${t} and transposeB=${s} must match.`);
71
+ const V = ie(i.shape.slice(0, -2), u.shape.slice(0, -2)).concat([T, h]), q = t ? c(i, [ne, m, T]) : c(i, [ne, T, m]), J = s ? c(u, [te, h, d]) : c(u, [te, d, h]);
72
+ let I;
73
+ r != null && (I = $(r, "bias", "fused matMul"), [I] = fe(I, i), ie(V, I.shape));
74
+ let se;
75
+ a != null && (se = $(a, "prelu weights", "fused matMul"));
76
+ const re = (D, F) => {
77
+ const [S, O, _, B] = F, k = ve(c(D, _.shape), _, o);
78
+ let L, N;
79
+ if (!t && !s ? (L = w(k, O, !1, !0), N = w(S, k, !0, !1)) : !t && s ? (L = w(k, O, !1, !1), N = w(k, S, !0, !1)) : t && !s ? (L = w(O, k, !1, !0), N = w(S, k, !1, !1)) : (L = w(O, k, !0, !0), N = w(k, S, !0, !0)), r != null) {
80
+ const De = je(B, k);
81
+ return [L, N, De];
82
+ } else
83
+ return [L, N];
84
+ }, oe = {
85
+ a: q,
86
+ b: J,
87
+ bias: I,
88
+ preluActivationWeights: se
89
+ }, ce = { transposeA: t, transposeB: s, activation: o, leakyreluAlpha: f };
90
+ return r == null ? ue((F, S, O) => {
91
+ const _ = (
92
+ // tslint:disable-next-line: no-unnecessary-type-assertion
93
+ M.runKernel(he, oe, ce)
94
+ );
95
+ return O([F, S, _]), { value: c(_, V), gradFunc: re };
96
+ })(q, J) : ue((F, S, O, _) => {
97
+ const B = (
98
+ // tslint:disable-next-line: no-unnecessary-type-assertion
99
+ M.runKernel(he, oe, ce)
100
+ );
101
+ return _([F, S, B, O]), { value: c(B, V), gradFunc: re };
102
+ })(q, J, I);
103
+ }
104
+ const pe = /* @__PURE__ */ g({ fusedMatMul_: nn });
105
+ class $e extends Error {
106
+ constructor(n) {
107
+ super(n), Object.setPrototypeOf(this, $e.prototype);
108
+ }
109
+ }
110
+ class Ae extends Error {
111
+ constructor(n) {
112
+ super(n), Object.setPrototypeOf(this, Ae.prototype);
113
+ }
114
+ }
115
+ class l extends Error {
116
+ constructor(n) {
117
+ super(n), Object.setPrototypeOf(this, l.prototype);
118
+ }
119
+ }
120
+ class v extends Error {
121
+ constructor(n) {
122
+ super(n), Object.setPrototypeOf(this, v.prototype);
123
+ }
124
+ }
125
+ class z extends Error {
126
+ constructor(n) {
127
+ super(n), Object.setPrototypeOf(this, z.prototype);
128
+ }
129
+ }
130
+ function In(e, n) {
131
+ if (Array.isArray(e)) {
132
+ let t = [];
133
+ for (let s = 0; s < n; s++)
134
+ t = t.concat(e);
135
+ return t;
136
+ } else {
137
+ const t = new Array(n);
138
+ return t.fill(e), t;
139
+ }
140
+ }
141
+ function de(e, n) {
142
+ if (!e)
143
+ throw new z(n);
144
+ }
145
+ function Ln(e, n) {
146
+ let t = 0;
147
+ for (const s of e)
148
+ s === n && t++;
149
+ return t;
150
+ }
151
+ function Nn(e) {
152
+ return e.length === 1 ? e[0] : e;
153
+ }
154
+ function bn(e) {
155
+ return Array.isArray(e) ? e : [e];
156
+ }
157
+ function Cn(e) {
158
+ const t = e.replace(/(.)([A-Z][a-z0-9]+)/g, "$1_$2").replace(/([a-z])([A-Z])/g, "$1_$2").toLowerCase();
159
+ return t[0] !== "_" ? t : "private" + t;
160
+ }
161
+ function Fn(e) {
162
+ return e.length <= 1 || e.indexOf("_") === -1 ? e : e.replace(/[_]+(\w|$)/g, (n, t) => t.toUpperCase());
163
+ }
164
+ let p = {};
165
+ function Pn(e) {
166
+ if (e == null)
167
+ return null;
168
+ const n = {};
169
+ return n.className = e.getClassName(), n.config = e.getConfig(), n;
170
+ }
171
+ function W(e) {
172
+ if (!(e == null || typeof e != "object"))
173
+ if (Array.isArray(e))
174
+ e.forEach((n) => W(n));
175
+ else {
176
+ const n = Object.keys(e);
177
+ for (const t of n) {
178
+ const s = e[t];
179
+ s != null && typeof s == "object" && (!Array.isArray(s) && s.type === "ndarray" && typeof s.value == "number" ? e[t] = s.value : W(s));
180
+ }
181
+ }
182
+ }
183
+ function vn(e, n = {}, t = {}, s = "object", r = !1) {
184
+ if (typeof e == "string") {
185
+ const o = e;
186
+ let a;
187
+ if (o in t)
188
+ a = t[o];
189
+ else if (o in p)
190
+ a = p[o];
191
+ else if (a = n[o], a == null)
192
+ throw new l(`Unknown ${s}: ${e}. This may be due to one of the following reasons:
193
+ 1. The ${s} is defined in Python, in which case it needs to be ported to TensorFlow.js or your JavaScript code.
194
+ 2. The custom ${s} is defined in JavaScript, but is not registered properly with tf.serialization.registerClass().`);
195
+ return a;
196
+ } else {
197
+ const o = e;
198
+ if (o.className == null || o.config == null)
199
+ throw new l(`${s}: Improper config format: ${JSON.stringify(o)}.
200
+ 'className' and 'config' must set.`);
201
+ const a = o.className;
202
+ let f, i;
203
+ if (a in t ? [f, i] = t[a] : a in p ? [f, i] = p.className : a in n && ([f, i] = n[a]), f == null)
204
+ throw new l(`Unknown ${s}: ${a}. This may be due to one of the following reasons:
205
+ 1. The ${s} is defined in Python, in which case it needs to be ported to TensorFlow.js or your JavaScript code.
206
+ 2. The custom ${s} is defined in JavaScript, but is not registered properly with tf.serialization.registerClass().`);
207
+ if (i != null) {
208
+ const u = {};
209
+ for (const h of Object.keys(p))
210
+ u[h] = p[h];
211
+ for (const h of Object.keys(t))
212
+ u[h] = t[h];
213
+ const m = o.config;
214
+ m.customObjects = u;
215
+ const d = Object.assign({}, p);
216
+ for (const h of Object.keys(t))
217
+ p[h] = t[h];
218
+ W(o.config);
219
+ const T = i(f, o.config, t, r);
220
+ return p = Object.assign({}, d), T;
221
+ } else {
222
+ const u = Object.assign({}, p);
223
+ for (const d of Object.keys(t))
224
+ p[d] = t[d];
225
+ const m = new f(o.config);
226
+ return p = Object.assign({}, u), m;
227
+ }
228
+ }
229
+ }
230
+ function tn(e, n) {
231
+ return e < n ? -1 : e > n ? 1 : 0;
232
+ }
233
+ function jn(e, n) {
234
+ return -1 * tn(e, n);
235
+ }
236
+ function Bn(e) {
237
+ if (e == null)
238
+ return e;
239
+ const n = [];
240
+ for (const t of e)
241
+ n.indexOf(t) === -1 && n.push(t);
242
+ return n;
243
+ }
244
+ function Mn(e) {
245
+ if (e == null)
246
+ throw new l(`Invalid value in obj: ${JSON.stringify(e)}`);
247
+ for (const n in e)
248
+ if (e.hasOwnProperty(n))
249
+ return !1;
250
+ return !0;
251
+ }
252
+ function G(e, n, t) {
253
+ if (t != null && e.indexOf(t) < 0)
254
+ throw new l(`${t} is not a valid ${n}. Valid values are ${e} or null/undefined.`);
255
+ }
256
+ function Un(e, n, t = 0, s = 1 / 0) {
257
+ return de(t >= 0), de(s >= t), Array.isArray(e) && e.length >= t && e.length <= s && e.every((r) => typeof r === n);
258
+ }
259
+ function sn(e, n) {
260
+ Array.isArray(e) ? (y(e.length > 0, () => `${n} is unexpectedly an empty array.`), e.forEach((t, s) => sn(t, `element ${s + 1} of ${n}`))) : y(Number.isInteger(e) && e > 0, () => `Expected ${n} to be a positive integer, but got ${Se(e)}.`);
261
+ }
262
+ function Se(e) {
263
+ return e === null ? "null" : Array.isArray(e) ? "[" + e.map((n) => Se(n)).join(",") + "]" : typeof e == "string" ? `"${e}"` : `${e}`;
264
+ }
265
+ function xn(e, n, t) {
266
+ let s = t != null ? t() : le(), r;
267
+ return (...a) => {
268
+ const f = t != null ? t() : le();
269
+ return f - s < n || (s = f, r = e(...a)), r;
270
+ };
271
+ }
272
+ function Gn(e) {
273
+ return e === "relu" ? "relu" : e === "linear" ? "linear" : e === "elu" ? "elu" : null;
274
+ }
275
+ const rn = ["channelsFirst", "channelsLast"], on = ["nearest", "bilinear"], cn = ["valid", "same", "causal"], un = ["max", "avg"], Vn = ["sum", "mul", "concat", "ave"];
276
+ const b = /* @__PURE__ */ new Map();
277
+ function an(e) {
278
+ G(rn, "DataFormat", e);
279
+ }
280
+ function qn(e) {
281
+ G(on, "InterpolationFormat", e);
282
+ }
283
+ function Jn(e) {
284
+ G(cn, "PaddingMode", e);
285
+ }
286
+ function Kn(e) {
287
+ G(un, "PoolMode", e);
288
+ }
289
+ const P = [], me = "/";
290
+ function Rn(e, n) {
291
+ P.push(e);
292
+ try {
293
+ const t = n();
294
+ return P.pop(), t;
295
+ } catch (t) {
296
+ throw P.pop(), t;
297
+ }
298
+ }
299
+ function ln() {
300
+ return P.length === 0 ? "" : P.join(me) + me;
301
+ }
302
+ function Zn(e) {
303
+ if (!Oe(e))
304
+ throw new Error("Not a valid tensor name: '" + e + "'");
305
+ return ln() + e;
306
+ }
307
+ function Wn(e) {
308
+ if (!Oe(e))
309
+ throw new Error("Not a valid tensor name: '" + e + "'");
310
+ b.has(e) || b.set(e, 0);
311
+ const n = b.get(e);
312
+ if (b.set(e, b.get(e) + 1), n > 0) {
313
+ const t = `${e}_${n}`;
314
+ return b.set(t, 1), t;
315
+ } else
316
+ return e;
317
+ }
318
+ const fn = new RegExp(/^[A-Za-z0-9][-A-Za-z0-9\._\/]*$/);
319
+ function Oe(e) {
320
+ return !!e.match(fn);
321
+ }
322
+ function Yn(e) {
323
+ return e === parseInt(e.toString(), 10);
324
+ }
325
+ function ye(e, n, t) {
326
+ n == null && (n = 0), t == null && (t = e.length);
327
+ let s = 1;
328
+ for (let r = n; r < t; ++r)
329
+ s *= e[r];
330
+ return s;
331
+ }
332
+ function Hn(e) {
333
+ if (e.length === 0)
334
+ return Number.NaN;
335
+ let n = Number.POSITIVE_INFINITY;
336
+ for (let t = 0; t < e.length; t++) {
337
+ const s = e[t];
338
+ s < n && (n = s);
339
+ }
340
+ return n;
341
+ }
342
+ function Qn(e) {
343
+ if (e.length === 0)
344
+ return Number.NaN;
345
+ let n = Number.NEGATIVE_INFINITY;
346
+ for (let t = 0; t < e.length; t++) {
347
+ const s = e[t];
348
+ s > n && (n = s);
349
+ }
350
+ return n;
351
+ }
352
+ function Xn(e, n) {
353
+ if (n < e)
354
+ throw new l(`end (${n}) < begin (${e}) is forbidden.`);
355
+ const t = [];
356
+ for (let s = e; s < n; ++s)
357
+ t.push(s);
358
+ return t;
359
+ }
360
+ let K;
361
+ function zn() {
362
+ return K == null && (K = Te().epsilon()), K;
363
+ }
364
+ function Y() {
365
+ return "channelsLast";
366
+ }
367
+ function et(e, n) {
368
+ return ge(e, n);
369
+ }
370
+ function hn(e, n = -1) {
371
+ const t = e.shape.slice();
372
+ return n < 0 && (n = t.length + n + 1), t.splice(n, 0, 1), c(e, t);
373
+ }
374
+ function nt(e, n) {
375
+ return A(() => {
376
+ if (e.shape.length !== 2)
377
+ throw new l(`repeat() expects a rank-2 tensor, but received a rank-${e.shape.length} tensor.`);
378
+ const t = hn(e, 1);
379
+ return pn(t, [1, n, 1]);
380
+ });
381
+ }
382
+ function tt(e) {
383
+ const n = [ye(e.shape)];
384
+ return c(e, n);
385
+ }
386
+ function st(e) {
387
+ if (e.rank <= 1)
388
+ throw new l(`batchFlatten requires a minimum rank of 2. Got rank: ${e.rank}.`);
389
+ const n = [e.shape[0], ye(e.shape, 1)];
390
+ return c(e, n);
391
+ }
392
+ function R(e, n, t) {
393
+ return A(() => {
394
+ switch (e.rank) {
395
+ case 1:
396
+ return Q(e, n, t);
397
+ case 2:
398
+ return we(e, [n, 0], [t, e.shape[1]]);
399
+ case 3:
400
+ return X(e, [n, 0, 0], [t, e.shape[1], e.shape[2]]);
401
+ case 4:
402
+ return U(e, [n, 0, 0, 0], [t, e.shape[1], e.shape[2], e.shape[3]]);
403
+ case 5:
404
+ return C(e, [n, 0, 0, 0, 0], [
405
+ t,
406
+ e.shape[1],
407
+ e.shape[2],
408
+ e.shape[3],
409
+ e.shape[4]
410
+ ]);
411
+ case 6:
412
+ return C(e, [n, 0, 0, 0, 0, 0], [
413
+ t,
414
+ e.shape[1],
415
+ e.shape[2],
416
+ e.shape[3],
417
+ e.shape[4],
418
+ e.shape[5]
419
+ ]);
420
+ default:
421
+ throw new l(`sliceAlongFirstAxis() received an unsupported tensor rank: ${e.rank}`);
422
+ }
423
+ });
424
+ }
425
+ function Z(e, n, t) {
426
+ return A(() => {
427
+ switch (e.rank) {
428
+ case 1:
429
+ return Q(e, n, t);
430
+ case 2:
431
+ return we(e, [0, n], [e.shape[0], t]);
432
+ case 3:
433
+ return X(e, [0, 0, n], [e.shape[0], e.shape[1], t]);
434
+ case 4:
435
+ return U(e, [0, 0, 0, n], [e.shape[0], e.shape[1], e.shape[2], t]);
436
+ default:
437
+ throw new l(`sliceAlongLastAxis() received an unsupported tensor rank: ${e.rank}`);
438
+ }
439
+ });
440
+ }
441
+ function rt(e, n, t, s) {
442
+ return A(() => {
443
+ switch (e.rank) {
444
+ case 1:
445
+ return Q(e, n, t);
446
+ case 2:
447
+ switch (s) {
448
+ case 1:
449
+ return R(e, n, t);
450
+ case 2:
451
+ return Z(e, n, t);
452
+ default:
453
+ throw new l(`The axis is not within the rank of the tensor ${s}`);
454
+ }
455
+ case 3:
456
+ switch (s) {
457
+ case 1:
458
+ return R(e, n, t);
459
+ case 2:
460
+ return X(e, [0, n, 0], [e.shape[0], t, e.shape[2]]);
461
+ case 3:
462
+ return Z(e, n, t);
463
+ default:
464
+ throw new l(`The axis is not within the rank of the tensor ${s}`);
465
+ }
466
+ case 4:
467
+ switch (s) {
468
+ case 1:
469
+ return R(e, n, t);
470
+ case 2:
471
+ return U(e, [0, n, 0, 0], [e.shape[0], t, e.shape[2], e.shape[3]]);
472
+ case 3:
473
+ return U(e, [0, 0, n, 0], [e.shape[0], e.shape[1], t, e.shape[3]]);
474
+ case 4:
475
+ return Z(e, n, t);
476
+ default:
477
+ throw new l(`The axis is not within the rank of the tensor ${s}`);
478
+ }
479
+ default:
480
+ throw new l(`sliceAlongLastAxis() received an unsupported tensor rank: ${e.rank}`);
481
+ }
482
+ });
483
+ }
484
+ function ot(e, n = -1) {
485
+ let t;
486
+ return n < 0 && (t = e[0].rank, t !== 0 ? n = t : n = 0), n === e[0].rank && (n = -1), j(e, n);
487
+ }
488
+ function ct(e, n) {
489
+ switch (e.rank) {
490
+ case 1:
491
+ return Je([e, n]);
492
+ case 2:
493
+ return Re([e, n], 0);
494
+ case 3:
495
+ return We([e, n], 0);
496
+ case 4:
497
+ return He([e, n], 0);
498
+ default:
499
+ throw new l(`concatAlongFirstAxis() received an unsupported tensor rank: ${e.rank}`);
500
+ }
501
+ }
502
+ function pn(e, n) {
503
+ if (Array.isArray(n) || (n = [n]), e.rank !== n.length)
504
+ throw new l(`The length of input n (${n.length}) does not match the number of dimensions in input x (${e.rank})`);
505
+ return Ue(e, n);
506
+ }
507
+ function it(e, n = 0, t = 1, s, r) {
508
+ return Ne(e, n, t, s, r);
509
+ }
510
+ function ut(e, n, t, s) {
511
+ if (e.rank < 2 || n.rank < 2)
512
+ throw new v(`dot requires both inputs to be rank >= 2 but got x shape = ${e.shape} and y shape = ${n.shape}`);
513
+ if (n.rank >= 3) {
514
+ const r = e.shape.slice(-1)[0], o = n.shape.slice(-2)[0];
515
+ if (r !== o)
516
+ throw new v(`If rank y >= 3, then the second last dim of y must equal the last dim of x but got x shape = ${e.shape} and y shape = ${n.shape}`);
517
+ }
518
+ if (e.rank === 2 && n.rank === 2)
519
+ return pe({
520
+ a: e,
521
+ b: n,
522
+ transposeA: !1,
523
+ transposeB: !1,
524
+ bias: s ? H(e.rank, s, Y()) : null,
525
+ activation: t
526
+ });
527
+ {
528
+ const r = e.shape.slice(), o = r.pop();
529
+ e = c(e, [-1, o]);
530
+ const a = n.shape.slice(), f = a.pop(), i = a.pop(), u = [...a, f], m = Array.from({ length: n.rank }, (ee, E) => E === 0 ? n.rank - 2 : E <= n.rank - 2 ? E - 1 : E);
531
+ n = c(xe(n, m), [i, -1]);
532
+ const d = [...r, ...u];
533
+ return c(pe({
534
+ a: e,
535
+ b: n,
536
+ transposeA: !1,
537
+ transposeB: !1,
538
+ bias: s ? H(e.rank, s, Y()) : null,
539
+ activation: t
540
+ }), d);
541
+ }
542
+ }
543
+ function at(e, n, t) {
544
+ return A(() => (Array.isArray(n) ? n = Le(n, "int32") : n = ge(n, "int32"), Ce(e, n, t)));
545
+ }
546
+ function lt(e) {
547
+ return ke(e, e);
548
+ }
549
+ function H(e, n, t) {
550
+ const s = n.shape;
551
+ if (n.rank !== 1 && n.rank !== e)
552
+ throw new l(`Unexpected bias dimensions: ${n.rank}; expected it to be 1 or ${e}`);
553
+ if (e === 5) {
554
+ if (t === "channelsFirst")
555
+ return s.length === 1 ? c(n, [1, s[0], 1, 1, 1]) : c(n, [1, s[3], s[0], s[1], s[2]]);
556
+ if (t === "channelsLast")
557
+ return s.length === 1 ? c(n, [1, 1, 1, 1, s[0]]) : c(n, [1].concat(s));
558
+ } else if (e === 4) {
559
+ if (t === "channelsFirst")
560
+ return s.length === 1 ? c(n, [1, s[0], 1, 1]) : c(n, [1, s[2], s[0], s[1]]);
561
+ if (t === "channelsLast")
562
+ return s.length === 1 ? c(n, [1, 1, 1, s[0]]) : c(n, [1].concat(s));
563
+ } else if (e === 3) {
564
+ if (t === "channelsFirst")
565
+ return s.length === 1 ? c(n, [1, s[0], 1]) : c(n, [1, s[1], s[0]]);
566
+ if (t === "channelsLast")
567
+ return s.length === 1 ? c(n, [1, 1, s[0]]) : c(n, [1].concat(s));
568
+ } else if (e < 3)
569
+ return n;
570
+ throw new l(`Unsupported input rank by biasAdd: ${n.rank}`);
571
+ }
572
+ function ft(e, n, t) {
573
+ return A(() => (t == null && (t = Y()), an(t), x(e, H(e.rank, n, t))));
574
+ }
575
+ function ht(e, n = 1) {
576
+ if (n !== 1)
577
+ throw new v(`Support for alpha values other than 1 (${n}) is not implemented yet.`);
578
+ return Be(e);
579
+ }
580
+ function pt(e) {
581
+ return A(() => Ee(e, x(Ie(e), 1)));
582
+ }
583
+ function dt(e, n, t, s) {
584
+ return A(() => be(e, n, t, s));
585
+ }
586
+ function mt(e) {
587
+ return A(() => {
588
+ const n = x(0.5, ke(0.2, e));
589
+ return Ve(n, 0, 1);
590
+ });
591
+ }
592
+ function gt(e, n, t = !1) {
593
+ return t ? e() : n();
594
+ }
595
+ export {
596
+ st as $,
597
+ $e as A,
598
+ hn as B,
599
+ at as C,
600
+ Xn as D,
601
+ Ln as E,
602
+ ht as F,
603
+ mt as G,
604
+ pt as H,
605
+ Qn as I,
606
+ Yn as J,
607
+ Un as K,
608
+ ft as L,
609
+ rt as M,
610
+ v as N,
611
+ qn as O,
612
+ sn as P,
613
+ Gn as Q,
614
+ Ae as R,
615
+ Jn as S,
616
+ Y as T,
617
+ pn as U,
618
+ l as V,
619
+ Hn as W,
620
+ ct as X,
621
+ gt as Y,
622
+ dt as Z,
623
+ ot as _,
624
+ an as a,
625
+ nt as a0,
626
+ Kn as a1,
627
+ Vn as a2,
628
+ ye as b,
629
+ G as c,
630
+ ut as d,
631
+ vn as e,
632
+ Wn as f,
633
+ Zn as g,
634
+ Nn as h,
635
+ bn as i,
636
+ Ve as j,
637
+ zn as k,
638
+ xn as l,
639
+ tt as m,
640
+ Rn as n,
641
+ lt as o,
642
+ de as p,
643
+ et as q,
644
+ it as r,
645
+ Pn as s,
646
+ Cn as t,
647
+ Fn as u,
648
+ Bn as v,
649
+ jn as w,
650
+ In as x,
651
+ Mn as y,
652
+ R as z
653
+ };
@@ -0,0 +1,13 @@
1
+ import { A as a, B as e, E as i } from "./index-ZyQhjEPo.js";
2
+ import { T as m } from "./tensor_util-DV-FP5Q3.js";
3
+ import { a as c } from "./tensor-DdQUJZlz.js";
4
+ function l(n, t) {
5
+ const r = e(n, "x", "tile", "string_or_numeric");
6
+ c(r.rank === t.length, () => `Error in transpose: rank of input ${r.rank} must match length of reps ${t}.`);
7
+ const o = { x: r }, s = { reps: t };
8
+ return i.runKernel(m, o, s);
9
+ }
10
+ const x = /* @__PURE__ */ a({ tile_: l });
11
+ export {
12
+ x as t
13
+ };
@@ -1,4 +1,4 @@
1
- import { E as k } from "../index-Dwqa6Zy2.js";
1
+ import { E as k } from "../index-DvYrXKkX.js";
2
2
  const u = ["<eos>", "<unk>"];
3
3
  class b extends k {
4
4
  vocabSize = 0;
@@ -1,5 +1,5 @@
1
1
  import l from "../utilities/tokenParse.js";
2
- import { E as f } from "../index-Dwqa6Zy2.js";
2
+ import { E as f } from "../index-DvYrXKkX.js";
3
3
  function u(o, e) {
4
4
  return `${o}-::-${e}`;
5
5
  }
@@ -5,13 +5,14 @@ export declare class AdamOptimizer extends Optimizer {
5
5
  protected learningRate: number;
6
6
  protected beta1: number;
7
7
  protected beta2: number;
8
+ protected lossScaling: number;
8
9
  protected epsilon: number | null;
9
10
  /** @nocollapse */
10
11
  static get className(): string;
11
12
  private accBeta1;
12
13
  private accBeta2;
13
14
  private accumulatedMoments;
14
- constructor(learningRate: number, beta1: number, beta2: number, epsilon?: number | null);
15
+ constructor(learningRate: number, beta1: number, beta2: number, lossScaling: number, epsilon?: number | null);
15
16
  applyGradients(variableGradients: NamedVariableMap | NamedTensor[]): void;
16
17
  dispose(): void;
17
18
  getWeights(): Promise<NamedTensor[]>;