whispercpp 1.3.4 → 1.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (891) hide show
  1. checksums.yaml +4 -4
  2. data/LICENSE +1 -1
  3. data/README.md +158 -44
  4. data/ext/extconf.rb +3 -2
  5. data/ext/ruby_whisper.c +34 -6
  6. data/ext/ruby_whisper.h +67 -0
  7. data/ext/ruby_whisper_context.c +236 -144
  8. data/ext/ruby_whisper_context_params.c +163 -0
  9. data/ext/ruby_whisper_model.c +12 -13
  10. data/ext/ruby_whisper_params.c +47 -24
  11. data/ext/ruby_whisper_segment.c +84 -20
  12. data/ext/ruby_whisper_token.c +371 -0
  13. data/ext/ruby_whisper_transcribe.cpp +5 -2
  14. data/ext/ruby_whisper_vad_context.c +122 -0
  15. data/ext/ruby_whisper_vad_context_detect.cpp +51 -0
  16. data/ext/ruby_whisper_vad_params.c +0 -1
  17. data/ext/ruby_whisper_vad_segment.c +138 -0
  18. data/ext/ruby_whisper_vad_segments.c +105 -0
  19. data/ext/sources/CMakeLists.txt +4 -1
  20. data/ext/sources/bindings/javascript/package.json +1 -1
  21. data/ext/sources/cmake/arm64-apple-clang.cmake +16 -0
  22. data/ext/sources/cmake/arm64-windows-llvm.cmake +16 -0
  23. data/ext/sources/cmake/riscv64-spacemit-linux-gnu-gcc.cmake +29 -0
  24. data/ext/sources/cmake/whisper-config.cmake.in +5 -40
  25. data/ext/sources/cmake/x64-windows-llvm.cmake +5 -0
  26. data/ext/sources/examples/addon.node/vad-example.js +2 -2
  27. data/ext/sources/examples/bench/bench.cpp +23 -18
  28. data/ext/sources/examples/cli/cli.cpp +129 -112
  29. data/ext/sources/examples/common-ggml.cpp +2 -0
  30. data/ext/sources/examples/lsp/CMakeLists.txt +2 -1
  31. data/ext/sources/examples/miniaudio.h +4507 -2131
  32. data/ext/sources/examples/quantize/CMakeLists.txt +2 -1
  33. data/ext/sources/examples/server/server.cpp +28 -15
  34. data/ext/sources/examples/talk-llama/CMakeLists.txt +8 -3
  35. data/ext/sources/examples/talk-llama/llama-adapter.cpp +5 -2
  36. data/ext/sources/examples/talk-llama/llama-adapter.h +7 -0
  37. data/ext/sources/examples/talk-llama/llama-arch.cpp +2378 -1988
  38. data/ext/sources/examples/talk-llama/llama-arch.h +109 -2
  39. data/ext/sources/examples/talk-llama/llama-batch.cpp +78 -34
  40. data/ext/sources/examples/talk-llama/llama-batch.h +17 -4
  41. data/ext/sources/examples/talk-llama/llama-chat.cpp +100 -4
  42. data/ext/sources/examples/talk-llama/llama-chat.h +5 -0
  43. data/ext/sources/examples/talk-llama/llama-context.cpp +1088 -403
  44. data/ext/sources/examples/talk-llama/llama-context.h +70 -23
  45. data/ext/sources/examples/talk-llama/llama-cparams.h +6 -0
  46. data/ext/sources/examples/talk-llama/llama-ext.h +12 -0
  47. data/ext/sources/examples/talk-llama/llama-grammar.cpp +295 -60
  48. data/ext/sources/examples/talk-llama/llama-grammar.h +22 -1
  49. data/ext/sources/examples/talk-llama/llama-graph.cpp +925 -155
  50. data/ext/sources/examples/talk-llama/llama-graph.h +234 -23
  51. data/ext/sources/examples/talk-llama/llama-hparams.cpp +79 -38
  52. data/ext/sources/examples/talk-llama/llama-hparams.h +118 -18
  53. data/ext/sources/examples/talk-llama/llama-impl.cpp +11 -7
  54. data/ext/sources/examples/talk-llama/llama-impl.h +14 -2
  55. data/ext/sources/examples/talk-llama/llama-kv-cache-iswa.cpp +8 -4
  56. data/ext/sources/examples/talk-llama/llama-kv-cache.cpp +405 -140
  57. data/ext/sources/examples/talk-llama/llama-kv-cache.h +24 -10
  58. data/ext/sources/examples/talk-llama/llama-kv-cells.h +44 -2
  59. data/ext/sources/examples/talk-llama/llama-memory-hybrid-iswa.cpp +275 -0
  60. data/ext/sources/examples/talk-llama/llama-memory-hybrid-iswa.h +140 -0
  61. data/ext/sources/examples/talk-llama/llama-memory-hybrid.cpp +12 -10
  62. data/ext/sources/examples/talk-llama/llama-memory-recurrent.cpp +42 -31
  63. data/ext/sources/examples/talk-llama/llama-memory-recurrent.h +2 -2
  64. data/ext/sources/examples/talk-llama/llama-mmap.cpp +197 -45
  65. data/ext/sources/examples/talk-llama/llama-mmap.h +8 -3
  66. data/ext/sources/examples/talk-llama/llama-model-loader.cpp +606 -116
  67. data/ext/sources/examples/talk-llama/llama-model-loader.h +41 -5
  68. data/ext/sources/examples/talk-llama/llama-model-saver.cpp +61 -44
  69. data/ext/sources/examples/talk-llama/llama-model-saver.h +5 -2
  70. data/ext/sources/examples/talk-llama/llama-model.cpp +2756 -13643
  71. data/ext/sources/examples/talk-llama/llama-model.h +112 -18
  72. data/ext/sources/examples/talk-llama/llama-quant.cpp +582 -365
  73. data/ext/sources/examples/talk-llama/{llama-sampling.cpp → llama-sampler.cpp} +1409 -199
  74. data/ext/sources/examples/talk-llama/llama-sampler.h +42 -0
  75. data/ext/sources/examples/talk-llama/llama-vocab.cpp +248 -82
  76. data/ext/sources/examples/talk-llama/llama-vocab.h +50 -40
  77. data/ext/sources/examples/talk-llama/llama.cpp +802 -21
  78. data/ext/sources/examples/talk-llama/llama.h +210 -39
  79. data/ext/sources/examples/talk-llama/models/afmoe.cpp +190 -0
  80. data/ext/sources/examples/talk-llama/models/apertus.cpp +125 -0
  81. data/ext/sources/examples/talk-llama/models/arcee.cpp +135 -0
  82. data/ext/sources/examples/talk-llama/models/arctic.cpp +137 -0
  83. data/ext/sources/examples/talk-llama/models/arwkv7.cpp +86 -0
  84. data/ext/sources/examples/talk-llama/models/baichuan.cpp +123 -0
  85. data/ext/sources/examples/talk-llama/models/bailingmoe.cpp +143 -0
  86. data/ext/sources/examples/talk-llama/models/bailingmoe2.cpp +133 -0
  87. data/ext/sources/examples/talk-llama/models/bert.cpp +184 -0
  88. data/ext/sources/examples/talk-llama/models/bitnet.cpp +145 -0
  89. data/ext/sources/examples/talk-llama/models/bloom.cpp +101 -0
  90. data/ext/sources/examples/talk-llama/models/chameleon.cpp +178 -0
  91. data/ext/sources/examples/talk-llama/models/chatglm.cpp +132 -0
  92. data/ext/sources/examples/talk-llama/models/codeshell.cpp +111 -0
  93. data/ext/sources/examples/talk-llama/models/cogvlm.cpp +102 -0
  94. data/ext/sources/examples/talk-llama/models/cohere2-iswa.cpp +134 -0
  95. data/ext/sources/examples/talk-llama/models/command-r.cpp +122 -0
  96. data/ext/sources/examples/talk-llama/models/dbrx.cpp +122 -0
  97. data/ext/sources/examples/talk-llama/models/deci.cpp +135 -0
  98. data/ext/sources/examples/talk-llama/models/deepseek.cpp +142 -0
  99. data/ext/sources/examples/talk-llama/models/deepseek2.cpp +262 -0
  100. data/ext/sources/examples/talk-llama/models/delta-net-base.cpp +445 -0
  101. data/ext/sources/examples/talk-llama/models/dots1.cpp +132 -0
  102. data/ext/sources/examples/talk-llama/models/dream.cpp +105 -0
  103. data/ext/sources/examples/talk-llama/models/ernie4-5-moe.cpp +148 -0
  104. data/ext/sources/examples/talk-llama/models/ernie4-5.cpp +110 -0
  105. data/ext/sources/examples/talk-llama/models/eurobert.cpp +97 -0
  106. data/ext/sources/examples/talk-llama/models/exaone-moe.cpp +145 -0
  107. data/ext/sources/examples/talk-llama/models/exaone.cpp +114 -0
  108. data/ext/sources/examples/talk-llama/models/exaone4.cpp +123 -0
  109. data/ext/sources/examples/talk-llama/models/falcon-h1.cpp +111 -0
  110. data/ext/sources/examples/talk-llama/models/falcon.cpp +120 -0
  111. data/ext/sources/examples/talk-llama/models/gemma-embedding.cpp +116 -0
  112. data/ext/sources/examples/talk-llama/models/gemma.cpp +112 -0
  113. data/ext/sources/examples/talk-llama/models/gemma2-iswa.cpp +128 -0
  114. data/ext/sources/examples/talk-llama/models/gemma3.cpp +155 -0
  115. data/ext/sources/examples/talk-llama/models/gemma3n-iswa.cpp +384 -0
  116. data/ext/sources/examples/talk-llama/models/glm4-moe.cpp +170 -0
  117. data/ext/sources/examples/talk-llama/models/glm4.cpp +157 -0
  118. data/ext/sources/examples/talk-llama/models/gpt2.cpp +105 -0
  119. data/ext/sources/examples/talk-llama/models/gptneox.cpp +144 -0
  120. data/ext/sources/examples/talk-llama/models/granite-hybrid.cpp +195 -0
  121. data/ext/sources/examples/talk-llama/models/granite.cpp +210 -0
  122. data/ext/sources/examples/talk-llama/models/grok.cpp +159 -0
  123. data/ext/sources/examples/talk-llama/models/grovemoe.cpp +139 -0
  124. data/ext/sources/examples/talk-llama/models/hunyuan-dense.cpp +132 -0
  125. data/ext/sources/examples/talk-llama/models/hunyuan-moe.cpp +153 -0
  126. data/ext/sources/examples/talk-llama/models/internlm2.cpp +120 -0
  127. data/ext/sources/examples/talk-llama/models/jais.cpp +86 -0
  128. data/ext/sources/examples/talk-llama/models/jais2.cpp +123 -0
  129. data/ext/sources/examples/talk-llama/models/jamba.cpp +106 -0
  130. data/ext/sources/examples/talk-llama/models/kimi-linear.cpp +381 -0
  131. data/ext/sources/examples/talk-llama/models/lfm2.cpp +196 -0
  132. data/ext/sources/examples/talk-llama/models/llada-moe.cpp +122 -0
  133. data/ext/sources/examples/talk-llama/models/llada.cpp +99 -0
  134. data/ext/sources/examples/talk-llama/models/llama-iswa.cpp +178 -0
  135. data/ext/sources/examples/talk-llama/models/llama.cpp +175 -0
  136. data/ext/sources/examples/talk-llama/models/maincoder.cpp +117 -0
  137. data/ext/sources/examples/talk-llama/models/mamba-base.cpp +289 -0
  138. data/ext/sources/examples/talk-llama/models/mamba.cpp +54 -0
  139. data/ext/sources/examples/talk-llama/models/mimo2-iswa.cpp +129 -0
  140. data/ext/sources/examples/talk-llama/models/minicpm3.cpp +200 -0
  141. data/ext/sources/examples/talk-llama/models/minimax-m2.cpp +123 -0
  142. data/ext/sources/examples/talk-llama/models/mistral3.cpp +160 -0
  143. data/ext/sources/examples/talk-llama/models/models.h +704 -0
  144. data/ext/sources/examples/talk-llama/models/modern-bert.cpp +109 -0
  145. data/ext/sources/examples/talk-llama/models/mpt.cpp +126 -0
  146. data/ext/sources/examples/talk-llama/models/nemotron-h.cpp +162 -0
  147. data/ext/sources/examples/talk-llama/models/nemotron.cpp +122 -0
  148. data/ext/sources/examples/talk-llama/models/neo-bert.cpp +104 -0
  149. data/ext/sources/examples/talk-llama/models/olmo.cpp +121 -0
  150. data/ext/sources/examples/talk-llama/models/olmo2.cpp +150 -0
  151. data/ext/sources/examples/talk-llama/models/olmoe.cpp +124 -0
  152. data/ext/sources/examples/talk-llama/models/openai-moe-iswa.cpp +127 -0
  153. data/ext/sources/examples/talk-llama/models/openelm.cpp +124 -0
  154. data/ext/sources/examples/talk-llama/models/orion.cpp +123 -0
  155. data/ext/sources/examples/talk-llama/models/paddleocr.cpp +122 -0
  156. data/ext/sources/examples/talk-llama/models/pangu-embedded.cpp +121 -0
  157. data/ext/sources/examples/talk-llama/models/phi2.cpp +121 -0
  158. data/ext/sources/examples/talk-llama/models/phi3.cpp +152 -0
  159. data/ext/sources/examples/talk-llama/models/plamo.cpp +110 -0
  160. data/ext/sources/examples/talk-llama/models/plamo2.cpp +320 -0
  161. data/ext/sources/examples/talk-llama/models/plamo3.cpp +128 -0
  162. data/ext/sources/examples/talk-llama/models/plm.cpp +169 -0
  163. data/ext/sources/examples/talk-llama/models/qwen.cpp +108 -0
  164. data/ext/sources/examples/talk-llama/models/qwen2.cpp +126 -0
  165. data/ext/sources/examples/talk-llama/models/qwen2moe.cpp +151 -0
  166. data/ext/sources/examples/talk-llama/models/qwen2vl.cpp +117 -0
  167. data/ext/sources/examples/talk-llama/models/qwen3.cpp +120 -0
  168. data/ext/sources/examples/talk-llama/models/qwen35.cpp +381 -0
  169. data/ext/sources/examples/talk-llama/models/qwen35moe.cpp +422 -0
  170. data/ext/sources/examples/talk-llama/models/qwen3moe.cpp +131 -0
  171. data/ext/sources/examples/talk-llama/models/qwen3next.cpp +525 -0
  172. data/ext/sources/examples/talk-llama/models/qwen3vl-moe.cpp +140 -0
  173. data/ext/sources/examples/talk-llama/models/qwen3vl.cpp +132 -0
  174. data/ext/sources/examples/talk-llama/models/refact.cpp +94 -0
  175. data/ext/sources/examples/talk-llama/models/rnd1.cpp +126 -0
  176. data/ext/sources/examples/talk-llama/models/rwkv6-base.cpp +164 -0
  177. data/ext/sources/examples/talk-llama/models/rwkv6.cpp +94 -0
  178. data/ext/sources/examples/talk-llama/models/rwkv6qwen2.cpp +86 -0
  179. data/ext/sources/examples/talk-llama/models/rwkv7-base.cpp +137 -0
  180. data/ext/sources/examples/talk-llama/models/rwkv7.cpp +90 -0
  181. data/ext/sources/examples/talk-llama/models/seed-oss.cpp +124 -0
  182. data/ext/sources/examples/talk-llama/models/smallthinker.cpp +126 -0
  183. data/ext/sources/examples/talk-llama/models/smollm3.cpp +128 -0
  184. data/ext/sources/examples/talk-llama/models/stablelm.cpp +146 -0
  185. data/ext/sources/examples/talk-llama/models/starcoder.cpp +100 -0
  186. data/ext/sources/examples/talk-llama/models/starcoder2.cpp +121 -0
  187. data/ext/sources/examples/talk-llama/models/step35-iswa.cpp +165 -0
  188. data/ext/sources/examples/talk-llama/models/t5-dec.cpp +166 -0
  189. data/ext/sources/examples/talk-llama/models/t5-enc.cpp +96 -0
  190. data/ext/sources/examples/talk-llama/models/wavtokenizer-dec.cpp +149 -0
  191. data/ext/sources/examples/talk-llama/models/xverse.cpp +108 -0
  192. data/ext/sources/examples/talk-llama/unicode.cpp +121 -79
  193. data/ext/sources/examples/vad-speech-segments/CMakeLists.txt +1 -1
  194. data/ext/sources/examples/whisper.wasm/index-tmpl.html +1 -1
  195. data/ext/sources/ggml/CMakeLists.txt +90 -56
  196. data/ext/sources/ggml/include/ggml-alloc.h +9 -0
  197. data/ext/sources/ggml/include/ggml-backend.h +5 -2
  198. data/ext/sources/ggml/include/ggml-cann.h +1 -1
  199. data/ext/sources/ggml/include/ggml-cpu.h +6 -0
  200. data/ext/sources/ggml/include/ggml-hexagon.h +19 -0
  201. data/ext/sources/ggml/include/ggml-openvino.h +37 -0
  202. data/ext/sources/ggml/include/ggml-opt.h +1 -1
  203. data/ext/sources/ggml/include/ggml-rpc.h +14 -12
  204. data/ext/sources/ggml/include/ggml-virtgpu.h +14 -0
  205. data/ext/sources/ggml/include/ggml-zendnn.h +22 -0
  206. data/ext/sources/ggml/include/ggml.h +246 -21
  207. data/ext/sources/ggml/src/CMakeLists.txt +85 -11
  208. data/ext/sources/ggml/src/ggml-alloc.c +128 -50
  209. data/ext/sources/ggml/src/ggml-backend-dl.cpp +48 -0
  210. data/ext/sources/ggml/src/ggml-backend-dl.h +45 -0
  211. data/ext/sources/ggml/src/ggml-backend-impl.h +1 -4
  212. data/ext/sources/ggml/src/ggml-backend-reg.cpp +54 -88
  213. data/ext/sources/ggml/src/ggml-backend.cpp +76 -23
  214. data/ext/sources/ggml/src/ggml-blas/CMakeLists.txt +18 -4
  215. data/ext/sources/ggml/src/ggml-blas/ggml-blas.cpp +11 -11
  216. data/ext/sources/ggml/src/ggml-cann/acl_tensor.cpp +58 -46
  217. data/ext/sources/ggml/src/ggml-cann/acl_tensor.h +139 -48
  218. data/ext/sources/ggml/src/ggml-cann/aclnn_ops.cpp +2427 -1785
  219. data/ext/sources/ggml/src/ggml-cann/aclnn_ops.h +238 -362
  220. data/ext/sources/ggml/src/ggml-cann/common.h +285 -211
  221. data/ext/sources/ggml/src/ggml-cann/ggml-cann.cpp +663 -831
  222. data/ext/sources/ggml/src/ggml-common.h +11 -0
  223. data/ext/sources/ggml/src/ggml-cpu/CMakeLists.txt +170 -95
  224. data/ext/sources/ggml/src/ggml-cpu/amx/amx.cpp +42 -18
  225. data/ext/sources/ggml/src/ggml-cpu/amx/common.h +34 -10
  226. data/ext/sources/ggml/src/ggml-cpu/amx/mmq.cpp +85 -85
  227. data/ext/sources/ggml/src/ggml-cpu/arch/arm/cpu-feats.cpp +4 -0
  228. data/ext/sources/ggml/src/ggml-cpu/arch/arm/quants.c +513 -27
  229. data/ext/sources/ggml/src/ggml-cpu/arch/arm/repack.cpp +4192 -992
  230. data/ext/sources/ggml/src/ggml-cpu/arch/loongarch/quants.c +4 -5
  231. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/cpu-feats.cpp +38 -0
  232. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/quants.c +1761 -49
  233. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/repack.cpp +1391 -0
  234. data/ext/sources/ggml/src/ggml-cpu/arch/s390/cpu-feats.cpp +50 -0
  235. data/ext/sources/ggml/src/ggml-cpu/arch/s390/quants.c +8 -10
  236. data/ext/sources/ggml/src/ggml-cpu/arch/x86/quants.c +9 -9
  237. data/ext/sources/ggml/src/ggml-cpu/arch/x86/repack.cpp +124 -24
  238. data/ext/sources/ggml/src/ggml-cpu/arch-fallback.h +157 -28
  239. data/ext/sources/ggml/src/ggml-cpu/binary-ops.cpp +2 -6
  240. data/ext/sources/ggml/src/ggml-cpu/common.h +8 -0
  241. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-impl.h +8 -3
  242. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.c +251 -80
  243. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.cpp +19 -0
  244. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.cpp +587 -119
  245. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.h +33 -44
  246. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp +1093 -194
  247. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.cpp +1284 -203
  248. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.h +6 -0
  249. data/ext/sources/ggml/src/ggml-cpu/ops.cpp +1519 -527
  250. data/ext/sources/ggml/src/ggml-cpu/ops.h +6 -4
  251. data/ext/sources/ggml/src/ggml-cpu/quants.c +40 -0
  252. data/ext/sources/ggml/src/ggml-cpu/quants.h +3 -0
  253. data/ext/sources/ggml/src/ggml-cpu/repack.cpp +3632 -781
  254. data/ext/sources/ggml/src/ggml-cpu/repack.h +129 -4
  255. data/ext/sources/ggml/src/ggml-cpu/simd-gemm.h +136 -0
  256. data/ext/sources/ggml/src/ggml-cpu/simd-mappings.h +152 -46
  257. data/ext/sources/ggml/src/ggml-cpu/spacemit/ime.cpp +3 -2
  258. data/ext/sources/ggml/src/ggml-cpu/unary-ops.cpp +152 -1
  259. data/ext/sources/ggml/src/ggml-cpu/unary-ops.h +7 -0
  260. data/ext/sources/ggml/src/ggml-cpu/vec.cpp +140 -0
  261. data/ext/sources/ggml/src/ggml-cpu/vec.h +261 -146
  262. data/ext/sources/ggml/src/ggml-cuda/CMakeLists.txt +72 -1
  263. data/ext/sources/ggml/src/ggml-cuda/argmax.cu +2 -2
  264. data/ext/sources/ggml/src/ggml-cuda/argsort.cu +132 -6
  265. data/ext/sources/ggml/src/ggml-cuda/argsort.cuh +16 -0
  266. data/ext/sources/ggml/src/ggml-cuda/binbcast.cu +33 -31
  267. data/ext/sources/ggml/src/ggml-cuda/common.cuh +474 -85
  268. data/ext/sources/ggml/src/ggml-cuda/convert.cu +41 -27
  269. data/ext/sources/ggml/src/ggml-cuda/convert.cuh +10 -0
  270. data/ext/sources/ggml/src/ggml-cuda/cpy-utils.cuh +1 -1
  271. data/ext/sources/ggml/src/ggml-cuda/cpy.cu +342 -246
  272. data/ext/sources/ggml/src/ggml-cuda/cpy.cuh +1 -5
  273. data/ext/sources/ggml/src/ggml-cuda/cumsum.cu +307 -0
  274. data/ext/sources/ggml/src/ggml-cuda/cumsum.cuh +5 -0
  275. data/ext/sources/ggml/src/ggml-cuda/diag.cu +77 -0
  276. data/ext/sources/ggml/src/ggml-cuda/diag.cuh +5 -0
  277. data/ext/sources/ggml/src/ggml-cuda/fattn-common.cuh +98 -74
  278. data/ext/sources/ggml/src/ggml-cuda/fattn-mma-f16.cuh +973 -665
  279. data/ext/sources/ggml/src/ggml-cuda/fattn-tile.cu +35 -741
  280. data/ext/sources/ggml/src/ggml-cuda/fattn-tile.cuh +1255 -0
  281. data/ext/sources/ggml/src/ggml-cuda/fattn-vec.cuh +33 -40
  282. data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cu +40 -18
  283. data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cuh +48 -0
  284. data/ext/sources/ggml/src/ggml-cuda/fattn.cu +206 -45
  285. data/ext/sources/ggml/src/ggml-cuda/fill.cu +37 -0
  286. data/ext/sources/ggml/src/ggml-cuda/fill.cuh +3 -0
  287. data/ext/sources/ggml/src/ggml-cuda/gated_delta_net.cu +263 -0
  288. data/ext/sources/ggml/src/ggml-cuda/gated_delta_net.cuh +4 -0
  289. data/ext/sources/ggml/src/ggml-cuda/ggml-cuda.cu +1688 -302
  290. data/ext/sources/ggml/src/ggml-cuda/mean.cu +12 -10
  291. data/ext/sources/ggml/src/ggml-cuda/mma.cuh +908 -48
  292. data/ext/sources/ggml/src/ggml-cuda/mmf.cu +88 -20
  293. data/ext/sources/ggml/src/ggml-cuda/mmf.cuh +502 -90
  294. data/ext/sources/ggml/src/ggml-cuda/mmid.cu +164 -0
  295. data/ext/sources/ggml/src/ggml-cuda/mmid.cuh +5 -0
  296. data/ext/sources/ggml/src/ggml-cuda/mmq.cu +69 -176
  297. data/ext/sources/ggml/src/ggml-cuda/mmq.cuh +532 -193
  298. data/ext/sources/ggml/src/ggml-cuda/mmvf.cu +460 -104
  299. data/ext/sources/ggml/src/ggml-cuda/mmvf.cuh +5 -2
  300. data/ext/sources/ggml/src/ggml-cuda/mmvq.cu +360 -122
  301. data/ext/sources/ggml/src/ggml-cuda/mmvq.cuh +2 -1
  302. data/ext/sources/ggml/src/ggml-cuda/norm.cu +18 -76
  303. data/ext/sources/ggml/src/ggml-cuda/pad.cu +73 -39
  304. data/ext/sources/ggml/src/ggml-cuda/quantize.cu +152 -1
  305. data/ext/sources/ggml/src/ggml-cuda/quantize.cuh +14 -0
  306. data/ext/sources/ggml/src/ggml-cuda/reduce_rows.cuh +2 -16
  307. data/ext/sources/ggml/src/ggml-cuda/rope.cu +364 -149
  308. data/ext/sources/ggml/src/ggml-cuda/rope.cuh +2 -0
  309. data/ext/sources/ggml/src/ggml-cuda/set-rows.cu +101 -47
  310. data/ext/sources/ggml/src/ggml-cuda/set.cu +39 -0
  311. data/ext/sources/ggml/src/ggml-cuda/set.cuh +7 -0
  312. data/ext/sources/ggml/src/ggml-cuda/softmax.cu +163 -41
  313. data/ext/sources/ggml/src/ggml-cuda/solve_tri.cu +275 -0
  314. data/ext/sources/ggml/src/ggml-cuda/solve_tri.cuh +3 -0
  315. data/ext/sources/ggml/src/ggml-cuda/ssm-conv.cu +68 -50
  316. data/ext/sources/ggml/src/ggml-cuda/ssm-conv.cuh +1 -1
  317. data/ext/sources/ggml/src/ggml-cuda/ssm-scan.cu +49 -84
  318. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_1-ncols2_32.cu +5 -0
  319. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_16-ncols2_4.cu +1 -0
  320. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_2-ncols2_32.cu +5 -0
  321. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_2-ncols2_4.cu +1 -0
  322. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_4-ncols2_4.cu +1 -0
  323. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-mma-f16-instance-ncols1_8-ncols2_4.cu +1 -0
  324. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq112-dv112.cu +5 -0
  325. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq128-dv128.cu +5 -0
  326. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq256-dv256.cu +5 -0
  327. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq40-dv40.cu +5 -0
  328. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq576-dv512.cu +5 -0
  329. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq64-dv64.cu +5 -0
  330. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq72-dv72.cu +5 -0
  331. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq80-dv80.cu +5 -0
  332. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq96-dv96.cu +5 -0
  333. data/ext/sources/ggml/src/ggml-cuda/template-instances/generate_cu_files.py +22 -4
  334. data/ext/sources/ggml/src/ggml-cuda/top-k.cu +95 -0
  335. data/ext/sources/ggml/src/ggml-cuda/top-k.cuh +3 -0
  336. data/ext/sources/ggml/src/ggml-cuda/topk-moe.cu +275 -119
  337. data/ext/sources/ggml/src/ggml-cuda/topk-moe.cuh +20 -7
  338. data/ext/sources/ggml/src/ggml-cuda/tri.cu +136 -0
  339. data/ext/sources/ggml/src/ggml-cuda/tri.cuh +5 -0
  340. data/ext/sources/ggml/src/ggml-cuda/unary.cu +160 -11
  341. data/ext/sources/ggml/src/ggml-cuda/unary.cuh +38 -0
  342. data/ext/sources/ggml/src/ggml-cuda/upscale.cu +163 -7
  343. data/ext/sources/ggml/src/ggml-cuda/vecdotq.cuh +31 -17
  344. data/ext/sources/ggml/src/ggml-cuda/vendors/cuda.h +4 -0
  345. data/ext/sources/ggml/src/ggml-cuda/vendors/hip.h +22 -1
  346. data/ext/sources/ggml/src/ggml-cuda/vendors/musa.h +6 -0
  347. data/ext/sources/ggml/src/ggml-hexagon/CMakeLists.txt +117 -0
  348. data/ext/sources/ggml/src/ggml-hexagon/ggml-hexagon.cpp +3325 -0
  349. data/ext/sources/ggml/src/ggml-hexagon/htp/CMakeLists.txt +46 -0
  350. data/ext/sources/ggml/src/ggml-hexagon/htp/act-ops.c +813 -0
  351. data/ext/sources/ggml/src/ggml-hexagon/htp/argsort-ops.c +281 -0
  352. data/ext/sources/ggml/src/ggml-hexagon/htp/binary-ops.c +891 -0
  353. data/ext/sources/ggml/src/ggml-hexagon/htp/cmake-toolchain.cmake +157 -0
  354. data/ext/sources/ggml/src/ggml-hexagon/htp/cpy-ops.c +252 -0
  355. data/ext/sources/ggml/src/ggml-hexagon/htp/flash-attn-ops.c +713 -0
  356. data/ext/sources/ggml/src/ggml-hexagon/htp/get-rows-ops.c +112 -0
  357. data/ext/sources/ggml/src/ggml-hexagon/htp/hex-dma.c +63 -0
  358. data/ext/sources/ggml/src/ggml-hexagon/htp/hex-dma.h +182 -0
  359. data/ext/sources/ggml/src/ggml-hexagon/htp/hex-dump.h +77 -0
  360. data/ext/sources/ggml/src/ggml-hexagon/htp/hex-fastdiv.h +37 -0
  361. data/ext/sources/ggml/src/ggml-hexagon/htp/hex-utils.h +51 -0
  362. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-ctx.h +35 -0
  363. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-msg.h +155 -0
  364. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-ops.h +63 -0
  365. data/ext/sources/ggml/src/ggml-hexagon/htp/htp_iface.idl +16 -0
  366. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-arith.h +443 -0
  367. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-base.h +240 -0
  368. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-copy.h +245 -0
  369. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-div.h +251 -0
  370. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-dump.h +129 -0
  371. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-exp.h +215 -0
  372. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-floor.h +100 -0
  373. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-inverse.h +210 -0
  374. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-reduce.h +296 -0
  375. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-scale.h +133 -0
  376. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-sigmoid.h +141 -0
  377. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-sqrt.h +126 -0
  378. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-types.h +36 -0
  379. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-utils.h +26 -0
  380. data/ext/sources/ggml/src/ggml-hexagon/htp/main.c +1199 -0
  381. data/ext/sources/ggml/src/ggml-hexagon/htp/matmul-ops.c +2670 -0
  382. data/ext/sources/ggml/src/ggml-hexagon/htp/rope-ops.c +497 -0
  383. data/ext/sources/ggml/src/ggml-hexagon/htp/set-rows-ops.c +168 -0
  384. data/ext/sources/ggml/src/ggml-hexagon/htp/softmax-ops.c +419 -0
  385. data/ext/sources/ggml/src/ggml-hexagon/htp/ssm-conv.c +339 -0
  386. data/ext/sources/ggml/src/ggml-hexagon/htp/sum-rows-ops.c +128 -0
  387. data/ext/sources/ggml/src/ggml-hexagon/htp/unary-ops.c +382 -0
  388. data/ext/sources/ggml/src/ggml-hexagon/htp/worker-pool.c +293 -0
  389. data/ext/sources/ggml/src/ggml-hexagon/htp/worker-pool.h +57 -0
  390. data/ext/sources/ggml/src/ggml-hexagon/htp-drv.cpp +418 -0
  391. data/ext/sources/ggml/src/ggml-hexagon/htp-drv.h +121 -0
  392. data/ext/sources/ggml/src/ggml-hexagon/libdl.h +79 -0
  393. data/ext/sources/ggml/src/ggml-hexagon/libggml-htp.inf +38 -0
  394. data/ext/sources/ggml/src/ggml-hexagon/op-desc.h +153 -0
  395. data/ext/sources/ggml/src/ggml-hip/CMakeLists.txt +14 -13
  396. data/ext/sources/ggml/src/ggml-impl.h +129 -6
  397. data/ext/sources/ggml/src/ggml-metal/CMakeLists.txt +10 -10
  398. data/ext/sources/ggml/src/ggml-metal/ggml-metal-common.cpp +15 -4
  399. data/ext/sources/ggml/src/ggml-metal/ggml-metal-context.h +8 -0
  400. data/ext/sources/ggml/src/ggml-metal/ggml-metal-context.m +173 -34
  401. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.cpp +912 -344
  402. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.h +124 -59
  403. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.m +588 -144
  404. data/ext/sources/ggml/src/ggml-metal/ggml-metal-impl.h +396 -23
  405. data/ext/sources/ggml/src/ggml-metal/ggml-metal-ops.cpp +1724 -421
  406. data/ext/sources/ggml/src/ggml-metal/ggml-metal-ops.h +16 -3
  407. data/ext/sources/ggml/src/ggml-metal/ggml-metal.cpp +333 -114
  408. data/ext/sources/ggml/src/ggml-metal/ggml-metal.metal +3050 -1539
  409. data/ext/sources/ggml/src/ggml-musa/CMakeLists.txt +3 -1
  410. data/ext/sources/ggml/src/ggml-opencl/CMakeLists.txt +30 -1
  411. data/ext/sources/ggml/src/ggml-opencl/ggml-opencl.cpp +4279 -497
  412. data/ext/sources/ggml/src/ggml-opencl/kernels/concat.cl +41 -99
  413. data/ext/sources/ggml/src/ggml-opencl/kernels/cpy.cl +45 -0
  414. data/ext/sources/ggml/src/ggml-opencl/kernels/cumsum.cl +139 -0
  415. data/ext/sources/ggml/src/ggml-opencl/kernels/cvt.cl +267 -0
  416. data/ext/sources/ggml/src/ggml-opencl/kernels/diag.cl +27 -0
  417. data/ext/sources/ggml/src/ggml-opencl/kernels/exp.cl +125 -0
  418. data/ext/sources/ggml/src/ggml-opencl/kernels/expm1.cl +113 -0
  419. data/ext/sources/ggml/src/ggml-opencl/kernels/fill.cl +17 -0
  420. data/ext/sources/ggml/src/ggml-opencl/kernels/flash_attn_f32.cl +4 -3
  421. data/ext/sources/ggml/src/ggml-opencl/kernels/gemm_moe_mxfp4_f32.cl +162 -0
  422. data/ext/sources/ggml/src/ggml-opencl/kernels/gemm_noshuffle_q4_1_f32.cl +132 -0
  423. data/ext/sources/ggml/src/ggml-opencl/kernels/gemv_moe_mxfp4_f32.cl +156 -0
  424. data/ext/sources/ggml/src/ggml-opencl/kernels/gemv_noshuffle_general_q8_0_f32.cl +195 -0
  425. data/ext/sources/ggml/src/ggml-opencl/kernels/gemv_noshuffle_q4_1_f32.cl +283 -0
  426. data/ext/sources/ggml/src/ggml-opencl/kernels/get_rows.cl +36 -12
  427. data/ext/sources/ggml/src/ggml-opencl/kernels/l2_norm.cl +71 -0
  428. data/ext/sources/ggml/src/ggml-opencl/kernels/mean.cl +140 -0
  429. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f16_f32_kq_kqv.cl +273 -0
  430. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f16_f32_l4_lm.cl +24 -10
  431. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f32_f32_l4_lm.cl +24 -10
  432. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_q4_0_f32_l4_lm.cl +163 -0
  433. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_q4_1_f32_l4_lm.cl +165 -0
  434. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_q6_k_f32_l4_lm.cl +158 -0
  435. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_q8_0_f32_8x4.cl +129 -0
  436. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_q8_0_f32_l4_lm.cl +154 -0
  437. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_1_f32.cl +219 -0
  438. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_1_f32_flat.cl +229 -0
  439. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q4_k_f32.cl +180 -0
  440. data/ext/sources/ggml/src/ggml-opencl/kernels/{mul_mv_q6_k.cl → mul_mv_q6_k_f32.cl} +4 -0
  441. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q6_k_f32_flat.cl +194 -0
  442. data/ext/sources/ggml/src/ggml-opencl/kernels/neg.cl +125 -0
  443. data/ext/sources/ggml/src/ggml-opencl/kernels/pad.cl +29 -20
  444. data/ext/sources/ggml/src/ggml-opencl/kernels/repeat.cl +31 -32
  445. data/ext/sources/ggml/src/ggml-opencl/kernels/rms_norm.cl +25 -10
  446. data/ext/sources/ggml/src/ggml-opencl/kernels/rope.cl +50 -24
  447. data/ext/sources/ggml/src/ggml-opencl/kernels/scale.cl +14 -4
  448. data/ext/sources/ggml/src/ggml-opencl/kernels/set_rows.cl +35 -16
  449. data/ext/sources/ggml/src/ggml-opencl/kernels/softplus.cl +116 -0
  450. data/ext/sources/ggml/src/ggml-opencl/kernels/solve_tri.cl +51 -0
  451. data/ext/sources/ggml/src/ggml-opencl/kernels/sqr.cl +53 -0
  452. data/ext/sources/ggml/src/ggml-opencl/kernels/sqrt.cl +53 -0
  453. data/ext/sources/ggml/src/ggml-opencl/kernels/ssm_conv.cl +77 -0
  454. data/ext/sources/ggml/src/ggml-opencl/kernels/sum_rows.cl +114 -13
  455. data/ext/sources/ggml/src/ggml-opencl/kernels/tanh.cl +94 -48
  456. data/ext/sources/ggml/src/ggml-opencl/kernels/transpose.cl +39 -0
  457. data/ext/sources/ggml/src/ggml-opencl/kernels/tri.cl +32 -0
  458. data/ext/sources/ggml/src/ggml-openvino/.clang-format +154 -0
  459. data/ext/sources/ggml/src/ggml-openvino/CMakeLists.txt +22 -0
  460. data/ext/sources/ggml/src/ggml-openvino/ggml-decoder.cpp +975 -0
  461. data/ext/sources/ggml/src/ggml-openvino/ggml-decoder.h +294 -0
  462. data/ext/sources/ggml/src/ggml-openvino/ggml-openvino-extra.cpp +373 -0
  463. data/ext/sources/ggml/src/ggml-openvino/ggml-openvino-extra.h +182 -0
  464. data/ext/sources/ggml/src/ggml-openvino/ggml-openvino.cpp +1110 -0
  465. data/ext/sources/ggml/src/ggml-openvino/ggml-quants.cpp +884 -0
  466. data/ext/sources/ggml/src/ggml-openvino/ggml-quants.h +153 -0
  467. data/ext/sources/ggml/src/ggml-openvino/openvino/decoder.h +74 -0
  468. data/ext/sources/ggml/src/ggml-openvino/openvino/frontend.cpp +27 -0
  469. data/ext/sources/ggml/src/ggml-openvino/openvino/frontend.h +23 -0
  470. data/ext/sources/ggml/src/ggml-openvino/openvino/input_model.cpp +17 -0
  471. data/ext/sources/ggml/src/ggml-openvino/openvino/input_model.h +29 -0
  472. data/ext/sources/ggml/src/ggml-openvino/openvino/node_context.h +112 -0
  473. data/ext/sources/ggml/src/ggml-openvino/openvino/op/cont.cpp +48 -0
  474. data/ext/sources/ggml/src/ggml-openvino/openvino/op/cpy.cpp +21 -0
  475. data/ext/sources/ggml/src/ggml-openvino/openvino/op/flash_attn_ext.cpp +90 -0
  476. data/ext/sources/ggml/src/ggml-openvino/openvino/op/get_rows.cpp +69 -0
  477. data/ext/sources/ggml/src/ggml-openvino/openvino/op/glu_geglu.cpp +61 -0
  478. data/ext/sources/ggml/src/ggml-openvino/openvino/op/glu_swiglu.cpp +62 -0
  479. data/ext/sources/ggml/src/ggml-openvino/openvino/op/mulmat.cpp +90 -0
  480. data/ext/sources/ggml/src/ggml-openvino/openvino/op/permute.cpp +102 -0
  481. data/ext/sources/ggml/src/ggml-openvino/openvino/op/reshape.cpp +83 -0
  482. data/ext/sources/ggml/src/ggml-openvino/openvino/op/rms_norm.cpp +46 -0
  483. data/ext/sources/ggml/src/ggml-openvino/openvino/op/rope.cpp +123 -0
  484. data/ext/sources/ggml/src/ggml-openvino/openvino/op/scale.cpp +41 -0
  485. data/ext/sources/ggml/src/ggml-openvino/openvino/op/set_rows.cpp +76 -0
  486. data/ext/sources/ggml/src/ggml-openvino/openvino/op/softmax.cpp +89 -0
  487. data/ext/sources/ggml/src/ggml-openvino/openvino/op/transpose.cpp +23 -0
  488. data/ext/sources/ggml/src/ggml-openvino/openvino/op/unary_silu.cpp +27 -0
  489. data/ext/sources/ggml/src/ggml-openvino/openvino/op/view.cpp +53 -0
  490. data/ext/sources/ggml/src/ggml-openvino/openvino/op_table.cpp +46 -0
  491. data/ext/sources/ggml/src/ggml-openvino/openvino/op_table.h +39 -0
  492. data/ext/sources/ggml/src/ggml-openvino/openvino/pass/eliminate_zp.cpp +123 -0
  493. data/ext/sources/ggml/src/ggml-openvino/openvino/pass/eliminate_zp.h +17 -0
  494. data/ext/sources/ggml/src/ggml-openvino/openvino/pass/fuse_to_sdpa.cpp +60 -0
  495. data/ext/sources/ggml/src/ggml-openvino/openvino/pass/fuse_to_sdpa.h +17 -0
  496. data/ext/sources/ggml/src/ggml-openvino/openvino/pass/mark_decompression_convert_constant_folding.h +29 -0
  497. data/ext/sources/ggml/src/ggml-openvino/openvino/pass/squeeze_matmul.cpp +58 -0
  498. data/ext/sources/ggml/src/ggml-openvino/openvino/pass/squeeze_matmul.h +17 -0
  499. data/ext/sources/ggml/src/ggml-openvino/openvino/translate_session.cpp +293 -0
  500. data/ext/sources/ggml/src/ggml-openvino/openvino/translate_session.h +28 -0
  501. data/ext/sources/ggml/src/ggml-openvino/openvino/utils.cpp +226 -0
  502. data/ext/sources/ggml/src/ggml-openvino/openvino/utils.h +85 -0
  503. data/ext/sources/ggml/src/ggml-openvino/utils.cpp +823 -0
  504. data/ext/sources/ggml/src/ggml-openvino/utils.h +123 -0
  505. data/ext/sources/ggml/src/ggml-quants.c +96 -5
  506. data/ext/sources/ggml/src/ggml-quants.h +3 -0
  507. data/ext/sources/ggml/src/ggml-rpc/ggml-rpc.cpp +438 -156
  508. data/ext/sources/ggml/src/ggml-sycl/CMakeLists.txt +59 -87
  509. data/ext/sources/ggml/src/ggml-sycl/add-id.cpp +81 -0
  510. data/ext/sources/ggml/src/ggml-sycl/add-id.hpp +8 -0
  511. data/ext/sources/ggml/src/ggml-sycl/backend.hpp +7 -0
  512. data/ext/sources/ggml/src/ggml-sycl/binbcast.cpp +21 -29
  513. data/ext/sources/ggml/src/ggml-sycl/binbcast.hpp +0 -6
  514. data/ext/sources/ggml/src/ggml-sycl/common.hpp +427 -20
  515. data/ext/sources/ggml/src/ggml-sycl/concat.cpp +55 -44
  516. data/ext/sources/ggml/src/ggml-sycl/convert.cpp +103 -1
  517. data/ext/sources/ggml/src/ggml-sycl/convert.hpp +22 -1
  518. data/ext/sources/ggml/src/ggml-sycl/count-equal.cpp +79 -0
  519. data/ext/sources/ggml/src/ggml-sycl/count-equal.hpp +9 -0
  520. data/ext/sources/ggml/src/ggml-sycl/cpy.cpp +0 -3
  521. data/ext/sources/ggml/src/ggml-sycl/dequantize.hpp +18 -0
  522. data/ext/sources/ggml/src/ggml-sycl/dpct/helper.hpp +867 -50
  523. data/ext/sources/ggml/src/ggml-sycl/element_wise.cpp +401 -358
  524. data/ext/sources/ggml/src/ggml-sycl/element_wise.hpp +12 -2
  525. data/ext/sources/ggml/src/ggml-sycl/fattn-common.hpp +1179 -0
  526. data/ext/sources/ggml/src/ggml-sycl/fattn-tile.cpp +55 -0
  527. data/ext/sources/ggml/src/ggml-sycl/fattn-tile.hpp +1338 -0
  528. data/ext/sources/ggml/src/ggml-sycl/fattn-vec.hpp +667 -0
  529. data/ext/sources/ggml/src/ggml-sycl/fattn.cpp +225 -0
  530. data/ext/sources/ggml/src/ggml-sycl/fattn.hpp +22 -0
  531. data/ext/sources/ggml/src/ggml-sycl/gated_delta_net.cpp +309 -0
  532. data/ext/sources/ggml/src/ggml-sycl/gated_delta_net.hpp +8 -0
  533. data/ext/sources/ggml/src/ggml-sycl/ggml-sycl.cpp +645 -155
  534. data/ext/sources/ggml/src/ggml-sycl/mmvq.cpp +22 -0
  535. data/ext/sources/ggml/src/ggml-sycl/norm.cpp +221 -66
  536. data/ext/sources/ggml/src/ggml-sycl/norm.hpp +2 -0
  537. data/ext/sources/ggml/src/ggml-sycl/outprod.cpp +3 -3
  538. data/ext/sources/ggml/src/ggml-sycl/pad.cpp +97 -0
  539. data/ext/sources/ggml/src/ggml-sycl/pad.hpp +24 -0
  540. data/ext/sources/ggml/src/ggml-sycl/pad_reflect_1d.cpp +100 -0
  541. data/ext/sources/ggml/src/ggml-sycl/pad_reflect_1d.hpp +10 -0
  542. data/ext/sources/ggml/src/ggml-sycl/presets.hpp +5 -0
  543. data/ext/sources/ggml/src/ggml-sycl/quants.hpp +1 -1
  544. data/ext/sources/ggml/src/ggml-sycl/repeat_back.cpp +76 -0
  545. data/ext/sources/ggml/src/ggml-sycl/repeat_back.hpp +8 -0
  546. data/ext/sources/ggml/src/ggml-sycl/roll.cpp +122 -0
  547. data/ext/sources/ggml/src/ggml-sycl/roll.hpp +20 -0
  548. data/ext/sources/ggml/src/ggml-sycl/rope.cpp +457 -281
  549. data/ext/sources/ggml/src/ggml-sycl/rope.hpp +6 -0
  550. data/ext/sources/ggml/src/ggml-sycl/set.cpp +73 -0
  551. data/ext/sources/ggml/src/ggml-sycl/set.hpp +5 -0
  552. data/ext/sources/ggml/src/ggml-sycl/softmax.cpp +327 -162
  553. data/ext/sources/ggml/src/ggml-sycl/softmax.hpp +4 -0
  554. data/ext/sources/ggml/src/ggml-sycl/ssm_conv.cpp +127 -0
  555. data/ext/sources/ggml/src/ggml-sycl/ssm_conv.hpp +5 -0
  556. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq112-dv112.cpp +5 -0
  557. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq128-dv128.cpp +5 -0
  558. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq256-dv256.cpp +5 -0
  559. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq40-dv40.cpp +5 -0
  560. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq576-dv512.cpp +5 -0
  561. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq64-dv64.cpp +5 -0
  562. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq72-dv72.cpp +5 -0
  563. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq80-dv80.cpp +5 -0
  564. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-tile-instance-dkq96-dv96.cpp +5 -0
  565. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-f16-f16.cpp +7 -0
  566. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-f16-q4_0.cpp +7 -0
  567. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-f16-q4_1.cpp +7 -0
  568. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-f16-q5_0.cpp +7 -0
  569. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-f16-q5_1.cpp +7 -0
  570. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-f16-q8_0.cpp +7 -0
  571. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_0-f16.cpp +7 -0
  572. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_0-q4_0.cpp +7 -0
  573. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_0-q4_1.cpp +7 -0
  574. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_0-q5_0.cpp +7 -0
  575. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_0-q5_1.cpp +7 -0
  576. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_0-q8_0.cpp +7 -0
  577. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_1-f16.cpp +7 -0
  578. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_1-q4_0.cpp +7 -0
  579. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_1-q4_1.cpp +7 -0
  580. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_1-q5_0.cpp +7 -0
  581. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_1-q5_1.cpp +7 -0
  582. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q4_1-q8_0.cpp +7 -0
  583. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_0-f16.cpp +7 -0
  584. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_0-q4_0.cpp +7 -0
  585. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_0-q4_1.cpp +7 -0
  586. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_0-q5_0.cpp +7 -0
  587. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_0-q5_1.cpp +7 -0
  588. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_0-q8_0.cpp +7 -0
  589. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_1-f16.cpp +7 -0
  590. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_1-q4_0.cpp +7 -0
  591. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_1-q4_1.cpp +7 -0
  592. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_1-q5_0.cpp +7 -0
  593. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_1-q5_1.cpp +7 -0
  594. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q5_1-q8_0.cpp +7 -0
  595. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q8_0-f16.cpp +7 -0
  596. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q8_0-q4_0.cpp +7 -0
  597. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q8_0-q4_1.cpp +7 -0
  598. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q8_0-q5_0.cpp +7 -0
  599. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q8_0-q5_1.cpp +7 -0
  600. data/ext/sources/ggml/src/ggml-sycl/template-instances/fattn-vec-instance-q8_0-q8_0.cpp +7 -0
  601. data/ext/sources/ggml/src/ggml-sycl/vecdotq.hpp +71 -0
  602. data/ext/sources/ggml/src/ggml-sycl/wkv.cpp +1 -1
  603. data/ext/sources/ggml/src/ggml-virtgpu/CMakeLists.txt +70 -0
  604. data/ext/sources/ggml/src/ggml-virtgpu/apir_cs_ggml-rpc-front.cpp +87 -0
  605. data/ext/sources/ggml/src/ggml-virtgpu/backend/CMakeLists.txt +21 -0
  606. data/ext/sources/ggml/src/ggml-virtgpu/backend/apir_cs_ggml-rpc-back.cpp +115 -0
  607. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-convert.h +13 -0
  608. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-dispatched-backend.cpp +102 -0
  609. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-dispatched-buffer-type.cpp +105 -0
  610. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-dispatched-buffer.cpp +179 -0
  611. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-dispatched-device.cpp +148 -0
  612. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-dispatched.cpp +51 -0
  613. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-dispatched.gen.h +73 -0
  614. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-dispatched.h +27 -0
  615. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend-virgl-apir.h +32 -0
  616. data/ext/sources/ggml/src/ggml-virtgpu/backend/backend.cpp +144 -0
  617. data/ext/sources/ggml/src/ggml-virtgpu/backend/shared/api_remoting.h +95 -0
  618. data/ext/sources/ggml/src/ggml-virtgpu/backend/shared/apir_backend.gen.h +94 -0
  619. data/ext/sources/ggml/src/ggml-virtgpu/backend/shared/apir_backend.h +50 -0
  620. data/ext/sources/ggml/src/ggml-virtgpu/backend/shared/apir_cs.h +378 -0
  621. data/ext/sources/ggml/src/ggml-virtgpu/backend/shared/apir_cs_ggml.h +232 -0
  622. data/ext/sources/ggml/src/ggml-virtgpu/backend/shared/apir_cs_rpc.h +58 -0
  623. data/ext/sources/ggml/src/ggml-virtgpu/ggml-backend-buffer-type.cpp +81 -0
  624. data/ext/sources/ggml/src/ggml-virtgpu/ggml-backend-buffer.cpp +119 -0
  625. data/ext/sources/ggml/src/ggml-virtgpu/ggml-backend-device.cpp +158 -0
  626. data/ext/sources/ggml/src/ggml-virtgpu/ggml-backend-reg.cpp +213 -0
  627. data/ext/sources/ggml/src/ggml-virtgpu/ggml-backend.cpp +69 -0
  628. data/ext/sources/ggml/src/ggml-virtgpu/ggml-remoting.h +71 -0
  629. data/ext/sources/ggml/src/ggml-virtgpu/ggmlremoting_functions.yaml +166 -0
  630. data/ext/sources/ggml/src/ggml-virtgpu/include/apir_hw.h +9 -0
  631. data/ext/sources/ggml/src/ggml-virtgpu/regenerate_remoting.py +333 -0
  632. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-apir.h +15 -0
  633. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-forward-backend.cpp +58 -0
  634. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-forward-buffer-type.cpp +110 -0
  635. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-forward-buffer.cpp +173 -0
  636. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-forward-device.cpp +192 -0
  637. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-forward-impl.h +36 -0
  638. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-forward.gen.h +53 -0
  639. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-shm.cpp +98 -0
  640. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-shm.h +23 -0
  641. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-utils.cpp +179 -0
  642. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu-utils.h +86 -0
  643. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu.cpp +544 -0
  644. data/ext/sources/ggml/src/ggml-virtgpu/virtgpu.h +117 -0
  645. data/ext/sources/ggml/src/ggml-vulkan/CMakeLists.txt +39 -19
  646. data/ext/sources/ggml/src/ggml-vulkan/ggml-vulkan.cpp +5994 -3055
  647. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/abs.comp +21 -0
  648. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/acc.comp +18 -10
  649. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add.comp +2 -2
  650. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add1.comp +28 -0
  651. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add_id.comp +1 -1
  652. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/arange.comp +20 -0
  653. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp +2 -2
  654. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp +33 -26
  655. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argsort_large.comp +114 -0
  656. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ceil.comp +22 -0
  657. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/clamp.comp +2 -2
  658. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/concat.comp +2 -2
  659. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/contig_copy.comp +2 -2
  660. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv2d_dw.comp +1 -1
  661. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv2d_mm.comp +47 -49
  662. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv_transpose_1d.comp +1 -1
  663. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy.comp +2 -2
  664. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_from_quant.comp +3 -3
  665. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_to_quant.comp +4 -4
  666. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_transpose.comp +67 -0
  667. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cos.comp +2 -2
  668. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/count_equal.comp +2 -2
  669. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/count_experts.comp +51 -0
  670. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum.comp +83 -0
  671. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum_multipass1.comp +60 -0
  672. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum_multipass2.comp +66 -0
  673. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_f32.comp +1 -1
  674. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_funcs.comp → dequant_funcs.glsl} +9 -21
  675. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_funcs_cm2.comp → dequant_funcs_cm2.glsl} +18 -4
  676. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_head.comp → dequant_head.glsl} +1 -1
  677. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_m.comp +1 -1
  678. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_s.comp +1 -1
  679. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_s.comp +1 -1
  680. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xs.comp +1 -1
  681. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xxs.comp +1 -1
  682. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_s.comp +1 -1
  683. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_xxs.comp +1 -1
  684. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_nl.comp +1 -1
  685. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_xs.comp +1 -1
  686. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_mxfp4.comp +3 -3
  687. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q2_k.comp +3 -3
  688. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q3_k.comp +1 -1
  689. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_0.comp +1 -1
  690. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_1.comp +1 -1
  691. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_k.comp +3 -3
  692. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_0.comp +1 -1
  693. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_1.comp +1 -1
  694. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_k.comp +3 -3
  695. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q6_k.comp +1 -1
  696. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q8_0.comp +1 -1
  697. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/diag.comp +29 -0
  698. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/diag_mask_inf.comp +1 -1
  699. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/div.comp +2 -2
  700. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/elu.comp +27 -0
  701. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/exp.comp +3 -3
  702. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/fill.comp +19 -0
  703. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn.comp +386 -160
  704. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{flash_attn_base.comp → flash_attn_base.glsl} +82 -20
  705. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp +400 -174
  706. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm2.comp +123 -37
  707. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_mask_opt.comp +162 -0
  708. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_split_k_reduce.comp +10 -9
  709. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/floor.comp +22 -0
  710. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gated_delta_net.comp +128 -0
  711. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu.comp +2 -2
  712. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu_erf.comp +2 -2
  713. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu_quick.comp +2 -2
  714. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu.comp +2 -2
  715. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu_erf.comp +2 -2
  716. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu_quick.comp +2 -2
  717. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_binary_head.comp → generic_binary_head.glsl} +17 -2
  718. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_head.comp → generic_head.glsl} +2 -0
  719. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_unary_head.comp → generic_unary_head.glsl} +7 -0
  720. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows.comp +4 -4
  721. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows_quant.comp +3 -3
  722. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{glu_head.comp → glu_head.glsl} +1 -1
  723. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/group_norm.comp +2 -2
  724. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/hardsigmoid.comp +2 -2
  725. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/hardswish.comp +2 -2
  726. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp +19 -7
  727. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/im2col_3d.comp +2 -3
  728. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/l2_norm.comp +13 -10
  729. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/leaky_relu.comp +2 -2
  730. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/log.comp +18 -0
  731. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul.comp +2 -2
  732. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec.comp +2 -2
  733. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{mul_mat_vec_base.comp → mul_mat_vec_base.glsl} +77 -29
  734. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iface.glsl +35 -0
  735. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_m.comp +71 -21
  736. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_s.comp +41 -25
  737. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_s.comp +2 -2
  738. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xs.comp +44 -26
  739. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xxs.comp +2 -2
  740. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_s.comp +2 -2
  741. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_xxs.comp +2 -2
  742. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_nc.comp +9 -7
  743. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_p021.comp +9 -7
  744. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp +4 -6
  745. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp +2 -2
  746. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp +4 -6
  747. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp +4 -6
  748. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp +2 -2
  749. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vecq.comp +39 -36
  750. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vecq_funcs.glsl +494 -0
  751. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp +88 -105
  752. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_cm2.comp +41 -26
  753. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{mul_mm_funcs.comp → mul_mm_funcs.glsl} +69 -59
  754. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_id_funcs.glsl +74 -0
  755. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq.comp +92 -230
  756. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_funcs.glsl +454 -0
  757. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_shmem_types.glsl +78 -0
  758. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/multi_add.comp +97 -13
  759. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/neg.comp +20 -0
  760. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/norm.comp +2 -2
  761. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_adamw.comp +2 -2
  762. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_sgd.comp +1 -1
  763. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pad.comp +21 -6
  764. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pool2d.comp +1 -1
  765. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/quantize_q8_1.comp +10 -10
  766. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/reglu.comp +2 -2
  767. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/relu.comp +2 -2
  768. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat.comp +2 -2
  769. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat_back.comp +2 -2
  770. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm.comp +49 -4
  771. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm_back.comp +2 -2
  772. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm_partials.comp +2 -2
  773. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/roll.comp +2 -2
  774. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_funcs.glsl +207 -0
  775. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.glsl +20 -0
  776. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_multi.comp +8 -49
  777. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_neox.comp +8 -32
  778. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_norm.comp +8 -32
  779. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_params.glsl +33 -0
  780. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_vision.comp +8 -38
  781. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/round.comp +29 -0
  782. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/scale.comp +2 -2
  783. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sgn.comp +21 -0
  784. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sigmoid.comp +2 -2
  785. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu.comp +2 -2
  786. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu_back.comp +2 -2
  787. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sin.comp +2 -2
  788. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max.comp +1 -1
  789. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_back.comp +2 -2
  790. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large1.comp +62 -0
  791. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large2.comp +79 -0
  792. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large3.comp +65 -0
  793. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large_common.glsl +53 -0
  794. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/softplus.comp +23 -0
  795. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/solve_tri.comp +81 -0
  796. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sqrt.comp +2 -2
  797. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/square.comp +2 -2
  798. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ssm_conv.comp +50 -0
  799. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ssm_scan.comp +124 -0
  800. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/step.comp +22 -0
  801. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sub.comp +2 -2
  802. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sum_rows.comp +2 -25
  803. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sum_rows.glsl +25 -0
  804. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/swiglu.comp +2 -2
  805. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/swiglu_oai.comp +2 -2
  806. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp +2 -2
  807. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/timestep_embedding.comp +1 -1
  808. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_argsort.comp +118 -0
  809. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_moe.comp +213 -0
  810. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_nary_search.comp +246 -0
  811. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/tri.comp +43 -0
  812. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/trunc.comp +22 -0
  813. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{types.comp → types.glsl} +345 -26
  814. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/upscale.comp +90 -12
  815. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +384 -180
  816. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/xielu.comp +35 -0
  817. data/ext/sources/ggml/src/ggml-webgpu/CMakeLists.txt +28 -2
  818. data/ext/sources/ggml/src/ggml-webgpu/ggml-webgpu-shader-lib.hpp +1374 -0
  819. data/ext/sources/ggml/src/ggml-webgpu/ggml-webgpu.cpp +2544 -726
  820. data/ext/sources/ggml/src/ggml-webgpu/pre_wgsl.hpp +778 -0
  821. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/argmax.wgsl +72 -0
  822. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/argsort.wgsl +106 -0
  823. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/argsort_merge.wgsl +134 -0
  824. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/binary.wgsl +141 -0
  825. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/common_decls.tmpl +65 -72
  826. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/concat.wgsl +75 -0
  827. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/cpy.tmpl.wgsl +107 -0
  828. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/cumsum.wgsl +66 -0
  829. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/embed_wgsl.py +73 -15
  830. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/flash_attn.wgsl +636 -0
  831. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/{get_rows.tmpl.wgsl → get_rows.wgsl} +53 -259
  832. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/glu.tmpl.wgsl +323 -0
  833. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/{mul_mat.tmpl.wgsl → mul_mat.wgsl} +72 -261
  834. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_decls.tmpl +766 -0
  835. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_reg_tile.wgsl +147 -0
  836. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_subgroup_matrix.wgsl +196 -0
  837. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_vec.wgsl +480 -0
  838. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/pad.wgsl +86 -0
  839. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/repeat.wgsl +67 -0
  840. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rms_norm.wgsl +83 -17
  841. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rope.tmpl.wgsl +295 -0
  842. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/scale.wgsl +63 -0
  843. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/set_rows.wgsl +40 -12
  844. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/soft_max.tmpl.wgsl +345 -0
  845. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/sum_rows.wgsl +55 -0
  846. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/unary.wgsl +193 -0
  847. data/ext/sources/ggml/src/ggml-zdnn/ggml-zdnn.cpp +6 -1
  848. data/ext/sources/ggml/src/ggml-zendnn/CMakeLists.txt +91 -0
  849. data/ext/sources/ggml/src/ggml-zendnn/ggml-zendnn.cpp +469 -0
  850. data/ext/sources/ggml/src/ggml.c +590 -64
  851. data/ext/sources/ggml/src/gguf.cpp +229 -44
  852. data/ext/sources/include/whisper.h +1 -0
  853. data/ext/sources/src/CMakeLists.txt +3 -1
  854. data/ext/sources/src/whisper.cpp +106 -62
  855. data/ext/sources/tests/CMakeLists.txt +2 -2
  856. data/ext/sources/tests/test-vad-full.cpp +4 -2
  857. data/ext/sources/tests/test-vad.cpp +1 -1
  858. data/extsources.rb +1 -0
  859. data/lib/whisper/model/uri.rb +17 -18
  860. data/sig/whisper.rbs +162 -4
  861. data/test/test_context_params.rb +82 -0
  862. data/test/test_params.rb +16 -8
  863. data/test/test_segment.rb +0 -1
  864. data/test/test_token.rb +81 -0
  865. data/test/test_vad.rb +1 -1
  866. data/test/test_vad_context.rb +100 -0
  867. data/test/test_vad_segment.rb +19 -0
  868. data/test/test_vad_segments.rb +16 -0
  869. data/test/test_whisper.rb +27 -0
  870. data/whispercpp.gemspec +1 -1
  871. metadata +502 -37
  872. data/ext/sources/build-xcframework.sh +0 -571
  873. data/ext/sources/examples/talk-llama/llama-sampling.h +0 -32
  874. data/ext/sources/ggml/cmake/BuildTypes.cmake +0 -54
  875. data/ext/sources/ggml/src/ggml-cann/Doxyfile +0 -2579
  876. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_funcs.comp +0 -105
  877. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp +0 -55
  878. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/add.tmpl.wgsl +0 -44
  879. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/add_in_place.tmpl.wgsl +0 -41
  880. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/binary_head.tmpl +0 -45
  881. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/cpy.wgsl +0 -60
  882. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul.tmpl.wgsl +0 -44
  883. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_in_place.tmpl.wgsl +0 -41
  884. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rms_norm_in_place.wgsl +0 -48
  885. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_bfloat16_support.comp → feature-tests/bfloat16.comp} +0 -0
  886. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_coopmat_support.comp → feature-tests/coopmat.comp} +0 -0
  887. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_coopmat2_support.comp → feature-tests/coopmat2.comp} +0 -0
  888. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_integer_dot_support.comp → feature-tests/integer_dot.comp} +0 -0
  889. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{glu_main.comp → glu_main.glsl} +0 -0
  890. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{rte.comp → rte.glsl} +0 -0
  891. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{utils.comp → utils.glsl} +0 -0
@@ -3,8 +3,11 @@
3
3
  #include "llama-impl.h"
4
4
 
5
5
  #include <map>
6
+ #include <set>
7
+ #include <vector>
6
8
 
7
9
  static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
10
+ { LLM_ARCH_CLIP, "clip" }, // dummy, only used by llama-quantize
8
11
  { LLM_ARCH_LLAMA, "llama" },
9
12
  { LLM_ARCH_LLAMA4, "llama4" },
10
13
  { LLM_ARCH_DECI, "deci" },
@@ -18,11 +21,13 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
18
21
  { LLM_ARCH_STARCODER, "starcoder" },
19
22
  { LLM_ARCH_REFACT, "refact" },
20
23
  { LLM_ARCH_BERT, "bert" },
24
+ { LLM_ARCH_MODERN_BERT, "modern-bert" },
21
25
  { LLM_ARCH_NOMIC_BERT, "nomic-bert" },
22
26
  { LLM_ARCH_NOMIC_BERT_MOE, "nomic-bert-moe" },
23
27
  { LLM_ARCH_NEO_BERT, "neo-bert" },
24
28
  { LLM_ARCH_JINA_BERT_V2, "jina-bert-v2" },
25
29
  { LLM_ARCH_JINA_BERT_V3, "jina-bert-v3" },
30
+ { LLM_ARCH_EUROBERT, "eurobert" },
26
31
  { LLM_ARCH_BLOOM, "bloom" },
27
32
  { LLM_ARCH_STABLELM, "stablelm" },
28
33
  { LLM_ARCH_QWEN, "qwen" },
@@ -31,11 +36,17 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
31
36
  { LLM_ARCH_QWEN2VL, "qwen2vl" },
32
37
  { LLM_ARCH_QWEN3, "qwen3" },
33
38
  { LLM_ARCH_QWEN3MOE, "qwen3moe" },
39
+ { LLM_ARCH_QWEN3NEXT, "qwen3next" },
40
+ { LLM_ARCH_QWEN3VL, "qwen3vl" },
41
+ { LLM_ARCH_QWEN3VLMOE, "qwen3vlmoe" },
42
+ { LLM_ARCH_QWEN35, "qwen35" },
43
+ { LLM_ARCH_QWEN35MOE, "qwen35moe" },
34
44
  { LLM_ARCH_PHI2, "phi2" },
35
45
  { LLM_ARCH_PHI3, "phi3" },
36
46
  { LLM_ARCH_PHIMOE, "phimoe" },
37
47
  { LLM_ARCH_PLAMO, "plamo" },
38
48
  { LLM_ARCH_PLAMO2, "plamo2" },
49
+ { LLM_ARCH_PLAMO3, "plamo3" },
39
50
  { LLM_ARCH_CODESHELL, "codeshell" },
40
51
  { LLM_ARCH_ORION, "orion" },
41
52
  { LLM_ARCH_INTERNLM2, "internlm2" },
@@ -65,14 +76,18 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
65
76
  { LLM_ARCH_CHATGLM, "chatglm" },
66
77
  { LLM_ARCH_GLM4, "glm4" },
67
78
  { LLM_ARCH_GLM4_MOE, "glm4moe" },
79
+ { LLM_ARCH_GLM_DSA, "glm-dsa" },
68
80
  { LLM_ARCH_BITNET, "bitnet" },
69
81
  { LLM_ARCH_T5, "t5" },
70
82
  { LLM_ARCH_T5ENCODER, "t5encoder" },
71
83
  { LLM_ARCH_JAIS, "jais" },
84
+ { LLM_ARCH_JAIS2, "jais2" },
72
85
  { LLM_ARCH_NEMOTRON, "nemotron" },
73
86
  { LLM_ARCH_NEMOTRON_H, "nemotron_h" },
87
+ { LLM_ARCH_NEMOTRON_H_MOE, "nemotron_h_moe" },
74
88
  { LLM_ARCH_EXAONE, "exaone" },
75
89
  { LLM_ARCH_EXAONE4, "exaone4" },
90
+ { LLM_ARCH_EXAONE_MOE, "exaone-moe" },
76
91
  { LLM_ARCH_RWKV6, "rwkv6" },
77
92
  { LLM_ARCH_RWKV6QWEN2, "rwkv6qwen2" },
78
93
  { LLM_ARCH_RWKV7, "rwkv7" },
@@ -84,8 +99,10 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
84
99
  { LLM_ARCH_WAVTOKENIZER_DEC, "wavtokenizer-dec" },
85
100
  { LLM_ARCH_PLM, "plm" },
86
101
  { LLM_ARCH_BAILINGMOE, "bailingmoe" },
102
+ { LLM_ARCH_BAILINGMOE2, "bailingmoe2" },
87
103
  { LLM_ARCH_DOTS1, "dots1" },
88
104
  { LLM_ARCH_ARCEE, "arcee" },
105
+ { LLM_ARCH_AFMOE, "afmoe" },
89
106
  { LLM_ARCH_ERNIE4_5, "ernie4_5" },
90
107
  { LLM_ARCH_ERNIE4_5_MOE, "ernie4_5-moe" },
91
108
  { LLM_ARCH_HUNYUAN_MOE, "hunyuan-moe" },
@@ -93,33 +110,59 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
93
110
  { LLM_ARCH_SMOLLM3, "smollm3" },
94
111
  { LLM_ARCH_OPENAI_MOE, "gpt-oss" },
95
112
  { LLM_ARCH_LFM2, "lfm2" },
113
+ { LLM_ARCH_LFM2MOE, "lfm2moe" },
96
114
  { LLM_ARCH_DREAM, "dream" },
97
115
  { LLM_ARCH_SMALLTHINKER, "smallthinker" },
98
116
  { LLM_ARCH_LLADA, "llada" },
99
117
  { LLM_ARCH_LLADA_MOE, "llada-moe" },
100
118
  { LLM_ARCH_SEED_OSS, "seed_oss" },
101
119
  { LLM_ARCH_GROVEMOE, "grovemoe" },
120
+ { LLM_ARCH_APERTUS, "apertus" },
121
+ { LLM_ARCH_MINIMAX_M2, "minimax-m2" },
122
+ { LLM_ARCH_COGVLM, "cogvlm" },
123
+ { LLM_ARCH_RND1, "rnd1" },
124
+ { LLM_ARCH_PANGU_EMBED, "pangu-embedded" },
125
+ { LLM_ARCH_MISTRAL3, "mistral3" },
126
+ { LLM_ARCH_PADDLEOCR, "paddleocr" },
127
+ { LLM_ARCH_MIMO2, "mimo2" },
128
+ { LLM_ARCH_STEP35, "step35" },
129
+ { LLM_ARCH_LLAMA_EMBED, "llama-embed" },
130
+ { LLM_ARCH_MAINCODER, "maincoder" },
131
+ { LLM_ARCH_KIMI_LINEAR, "kimi-linear" },
102
132
  { LLM_ARCH_UNKNOWN, "(unknown)" },
103
133
  };
104
134
 
105
135
  static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
106
- { LLM_KV_GENERAL_TYPE, "general.type" },
107
- { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" },
108
- { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" },
109
- { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" },
110
- { LLM_KV_GENERAL_FILE_TYPE, "general.file_type" },
111
- { LLM_KV_GENERAL_NAME, "general.name" },
112
- { LLM_KV_GENERAL_AUTHOR, "general.author" },
113
- { LLM_KV_GENERAL_VERSION, "general.version" },
114
- { LLM_KV_GENERAL_URL, "general.url" },
115
- { LLM_KV_GENERAL_DESCRIPTION, "general.description" },
116
- { LLM_KV_GENERAL_LICENSE, "general.license" },
117
- { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" },
118
- { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" },
136
+ { LLM_KV_GENERAL_TYPE, "general.type" },
137
+ { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" },
138
+ { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" },
139
+ { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" },
140
+ { LLM_KV_GENERAL_FILE_TYPE, "general.file_type" },
141
+ { LLM_KV_GENERAL_SAMPLING_SEQUENCE, "general.sampling.sequence" },
142
+ { LLM_KV_GENERAL_SAMPLING_TOP_K, "general.sampling.top_k" },
143
+ { LLM_KV_GENERAL_SAMPLING_TOP_P, "general.sampling.top_p" },
144
+ { LLM_KV_GENERAL_SAMPLING_MIN_P, "general.sampling.min_p" },
145
+ { LLM_KV_GENERAL_SAMPLING_XTC_PROBABILITY, "general.sampling.xtc_probability" },
146
+ { LLM_KV_GENERAL_SAMPLING_XTC_THRESHOLD, "general.sampling.xtc_threshold" },
147
+ { LLM_KV_GENERAL_SAMPLING_TEMP, "general.sampling.temp" },
148
+ { LLM_KV_GENERAL_SAMPLING_PENALTY_LAST_N, "general.sampling.penalty_last_n" },
149
+ { LLM_KV_GENERAL_SAMPLING_PENALTY_REPEAT, "general.sampling.penalty_repeat" },
150
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT, "general.sampling.mirostat" },
151
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT_TAU, "general.sampling.mirostat_tau" },
152
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT_ETA, "general.sampling.mirostat_eta" },
153
+ { LLM_KV_GENERAL_NAME, "general.name" },
154
+ { LLM_KV_GENERAL_AUTHOR, "general.author" },
155
+ { LLM_KV_GENERAL_VERSION, "general.version" },
156
+ { LLM_KV_GENERAL_URL, "general.url" },
157
+ { LLM_KV_GENERAL_DESCRIPTION, "general.description" },
158
+ { LLM_KV_GENERAL_LICENSE, "general.license" },
159
+ { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" },
160
+ { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" },
119
161
 
120
162
  { LLM_KV_VOCAB_SIZE, "%s.vocab_size" },
121
163
  { LLM_KV_CONTEXT_LENGTH, "%s.context_length" },
122
164
  { LLM_KV_EMBEDDING_LENGTH, "%s.embedding_length" },
165
+ { LLM_KV_EMBEDDING_LENGTH_OUT, "%s.embedding_length_out" },
123
166
  { LLM_KV_FEATURES_LENGTH, "%s.features_length" },
124
167
  { LLM_KV_BLOCK_COUNT, "%s.block_count" },
125
168
  { LLM_KV_LEADING_DENSE_BLOCK_COUNT, "%s.leading_dense_block_count" },
@@ -127,18 +170,24 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
127
170
  { LLM_KV_EXPERT_FEED_FORWARD_LENGTH, "%s.expert_feed_forward_length" },
128
171
  { LLM_KV_EXPERT_SHARED_FEED_FORWARD_LENGTH, "%s.expert_shared_feed_forward_length" },
129
172
  { LLM_KV_EXPERT_CHUNK_FEED_FORWARD_LENGTH, "%s.expert_chunk_feed_forward_length" },
173
+ { LLM_KV_SWIGLU_CLAMP_EXP, "%s.swiglu_clamp_exp" },
174
+ { LLM_KV_SWIGLU_CLAMP_SHEXP, "%s.swiglu_clamp_shexp" },
130
175
  { LLM_KV_USE_PARALLEL_RESIDUAL, "%s.use_parallel_residual" },
131
176
  { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" },
132
177
  { LLM_KV_EXPERT_COUNT, "%s.expert_count" },
133
178
  { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" },
134
179
  { LLM_KV_EXPERT_SHARED_COUNT, "%s.expert_shared_count" },
180
+ { LLM_KV_EXPERT_GROUP_COUNT, "%s.expert_group_count" },
181
+ { LLM_KV_EXPERT_GROUP_USED_COUNT, "%s.expert_group_used_count" },
135
182
  { LLM_KV_EXPERT_WEIGHTS_SCALE, "%s.expert_weights_scale" },
136
183
  { LLM_KV_EXPERT_WEIGHTS_NORM, "%s.expert_weights_norm" },
137
184
  { LLM_KV_EXPERT_GATING_FUNC, "%s.expert_gating_func" },
138
185
  { LLM_KV_EXPERT_GROUP_SCALE, "%s.expert_group_scale" },
139
186
  { LLM_KV_EXPERTS_PER_GROUP, "%s.experts_per_group" },
140
187
  { LLM_KV_MOE_EVERY_N_LAYERS, "%s.moe_every_n_layers" },
188
+ { LLM_KV_MOE_LATENT_SIZE, "%s.moe_latent_size" },
141
189
  { LLM_KV_NEXTN_PREDICT_LAYERS, "%s.nextn_predict_layers" },
190
+ { LLM_KV_NUM_DEEPSTACK_LAYERS, "%s.n_deepstack_layers" },
142
191
  { LLM_KV_POOLING_TYPE, "%s.pooling_type" },
143
192
  { LLM_KV_LOGIT_SCALE, "%s.logit_scale" },
144
193
  { LLM_KV_DECODER_START_TOKEN_ID, "%s.decoder_start_token_id" },
@@ -154,6 +203,7 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
154
203
  { LLM_KV_EMBEDDING_SCALE, "%s.embedding_scale" },
155
204
  { LLM_KV_TOKEN_SHIFT_COUNT, "%s.token_shift_count" },
156
205
  { LLM_KV_INTERLEAVE_MOE_LAYER_STEP, "%s.interleave_moe_layer_step" },
206
+ { LLM_KV_FULL_ATTENTION_INTERVAL, "%s.full_attention_interval" },
157
207
 
158
208
  { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" },
159
209
  { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" },
@@ -174,26 +224,35 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
174
224
  { LLM_KV_ATTENTION_GATE_LORA_RANK, "%s.attention.gate_lora_rank" },
175
225
  { LLM_KV_ATTENTION_RELATIVE_BUCKETS_COUNT, "%s.attention.relative_buckets_count" },
176
226
  { LLM_KV_ATTENTION_SLIDING_WINDOW, "%s.attention.sliding_window" },
227
+ { LLM_KV_ATTENTION_SLIDING_WINDOW_PATTERN, "%s.attention.sliding_window_pattern" },
177
228
  { LLM_KV_ATTENTION_SCALE, "%s.attention.scale" },
178
229
  { LLM_KV_ATTENTION_OUTPUT_SCALE, "%s.attention.output_scale" },
179
230
  { LLM_KV_ATTENTION_TEMPERATURE_LENGTH, "%s.attention.temperature_length" },
231
+ { LLM_KV_ATTENTION_TEMPERATURE_SCALE, "%s.attention.temperature_scale" },
180
232
  { LLM_KV_ATTENTION_KEY_LENGTH_MLA, "%s.attention.key_length_mla" },
181
233
  { LLM_KV_ATTENTION_VALUE_LENGTH_MLA, "%s.attention.value_length_mla" },
234
+ { LLM_KV_ATTENTION_KEY_LENGTH_SWA, "%s.attention.key_length_swa" },
235
+ { LLM_KV_ATTENTION_VALUE_LENGTH_SWA, "%s.attention.value_length_swa" },
236
+ { LLM_KV_ATTENTION_INDEXER_HEAD_COUNT, "%s.attention.indexer.head_count" },
237
+ { LLM_KV_ATTENTION_INDEXER_KEY_LENGTH, "%s.attention.indexer.key_length" },
238
+ { LLM_KV_ATTENTION_INDEXER_TOP_K, "%s.attention.indexer.top_k" },
182
239
 
183
- { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
184
- { LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },
185
- { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" },
186
- { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" },
187
- { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" },
188
- { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" },
189
- { LLM_KV_ROPE_SCALING_ATTN_FACTOR, "%s.rope.scaling.attn_factor" },
190
- { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" },
191
- { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" },
192
- { LLM_KV_ROPE_SCALING_YARN_LOG_MUL, "%s.rope.scaling.yarn_log_multiplier" },
193
- { LLM_KV_ROPE_SCALING_YARN_EXT_FACTOR, "%s.rope.scaling.yarn_ext_factor" },
194
- { LLM_KV_ROPE_SCALING_YARN_ATTN_FACTOR, "%s.rope.scaling.yarn_attn_factor" },
195
- { LLM_KV_ROPE_SCALING_YARN_BETA_FAST, "%s.rope.scaling.yarn_beta_fast" },
196
- { LLM_KV_ROPE_SCALING_YARN_BETA_SLOW, "%s.rope.scaling.yarn_beta_slow" },
240
+ { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
241
+ { LLM_KV_ROPE_DIMENSION_COUNT_SWA, "%s.rope.dimension_count_swa" },
242
+ { LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },
243
+ { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" },
244
+ { LLM_KV_ROPE_FREQ_BASE_SWA, "%s.rope.freq_base_swa" },
245
+ { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" },
246
+ { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" },
247
+ { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" },
248
+ { LLM_KV_ROPE_SCALING_ATTN_FACTOR, "%s.rope.scaling.attn_factor" },
249
+ { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" },
250
+ { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" },
251
+ { LLM_KV_ROPE_SCALING_YARN_LOG_MUL, "%s.rope.scaling.yarn_log_multiplier" },
252
+ { LLM_KV_ROPE_SCALING_YARN_EXT_FACTOR, "%s.rope.scaling.yarn_ext_factor" },
253
+ { LLM_KV_ROPE_SCALING_YARN_ATTN_FACTOR, "%s.rope.scaling.yarn_attn_factor" },
254
+ { LLM_KV_ROPE_SCALING_YARN_BETA_FAST, "%s.rope.scaling.yarn_beta_fast" },
255
+ { LLM_KV_ROPE_SCALING_YARN_BETA_SLOW, "%s.rope.scaling.yarn_beta_slow" },
197
256
 
198
257
  { LLM_KV_SPLIT_NO, "split.no" },
199
258
  { LLM_KV_SPLIT_COUNT, "split.count" },
@@ -206,6 +265,8 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
206
265
  { LLM_KV_SSM_GROUP_COUNT, "%s.ssm.group_count" },
207
266
  { LLM_KV_SSM_DT_B_C_RMS, "%s.ssm.dt_b_c_rms" },
208
267
 
268
+ { LLM_KV_KDA_HEAD_DIM, "%s.kda.head_dim" },
269
+
209
270
  { LLM_KV_WKV_HEAD_SIZE, "%s.wkv.head_size" },
210
271
 
211
272
  { LLM_KV_POSNET_EMBEDDING_LENGTH, "%s.posnet.embedding_length" },
@@ -217,6 +278,11 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
217
278
  { LLM_KV_CLASSIFIER_OUTPUT_LABELS, "%s.classifier.output_labels" },
218
279
 
219
280
  { LLM_KV_SHORTCONV_L_CACHE, "%s.shortconv.l_cache" },
281
+ // sentence-transformers dense modules feature dims
282
+ { LLM_KV_DENSE_2_FEAT_IN, "%s.dense_2_feat_in" },
283
+ { LLM_KV_DENSE_2_FEAT_OUT, "%s.dense_2_feat_out" },
284
+ { LLM_KV_DENSE_3_FEAT_IN, "%s.dense_3_feat_in" },
285
+ { LLM_KV_DENSE_3_FEAT_OUT, "%s.dense_3_feat_out" },
220
286
 
221
287
  { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" },
222
288
  { LLM_KV_TOKENIZER_PRE, "tokenizer.ggml.pre" },
@@ -256,1980 +322,2255 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
256
322
  { LLM_KV_ADAPTER_LORA_PROMPT_PREFIX, "adapter.lora.prompt_prefix" },
257
323
  { LLM_KV_ADAPTER_ALORA_INVOCATION_TOKENS, "adapter.alora.invocation_tokens" },
258
324
 
325
+ { LLM_KV_XIELU_ALPHA_N, "xielu.alpha_n" },
326
+ { LLM_KV_XIELU_ALPHA_P, "xielu.alpha_p" },
327
+ { LLM_KV_XIELU_BETA, "xielu.beta" },
328
+ { LLM_KV_XIELU_EPS, "xielu.eps" },
329
+
259
330
  // deprecated
260
331
  { LLM_KV_TOKENIZER_PREFIX_ID, "tokenizer.ggml.prefix_token_id" },
261
332
  { LLM_KV_TOKENIZER_SUFFIX_ID, "tokenizer.ggml.suffix_token_id" },
262
333
  { LLM_KV_TOKENIZER_MIDDLE_ID, "tokenizer.ggml.middle_token_id" },
263
334
  };
264
335
 
265
- static const std::map<llm_arch, std::map<llm_tensor, const char *>> LLM_TENSOR_NAMES = {
266
- {
267
- LLM_ARCH_LLAMA,
268
- {
269
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
270
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
271
- { LLM_TENSOR_OUTPUT, "output" },
272
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
273
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
274
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
275
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
276
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
277
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
278
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
279
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
280
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
281
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
282
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
283
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
284
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
285
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
286
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
287
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
288
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
289
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
290
- },
291
- },
292
- {
293
- LLM_ARCH_ARCEE,
294
- {
295
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
296
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
297
- { LLM_TENSOR_OUTPUT, "output" },
298
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
299
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
300
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
301
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
302
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
303
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
304
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
305
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
306
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
307
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
308
- },
309
- },
310
- {
311
- LLM_ARCH_LLAMA4,
312
- {
313
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
314
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
315
- { LLM_TENSOR_OUTPUT, "output" },
316
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
317
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
318
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
319
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
320
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
321
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
322
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
323
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
324
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
325
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
326
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
327
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
328
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
329
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
330
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
331
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
332
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
333
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
334
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
335
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
336
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
337
- },
338
- },
339
- {
340
- LLM_ARCH_DECI,
341
- {
342
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
343
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
344
- { LLM_TENSOR_OUTPUT, "output" },
345
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
346
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
347
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
348
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
349
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
350
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
351
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
352
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
353
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
354
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
355
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
356
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
357
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
358
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
359
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
360
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
361
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
362
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
363
- },
364
- },
365
- {
366
- LLM_ARCH_BAICHUAN,
367
- {
368
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
369
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
370
- { LLM_TENSOR_OUTPUT, "output" },
371
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
372
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
373
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
374
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
375
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
376
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
377
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
378
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
379
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
380
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
381
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
382
- },
383
- },
384
- {
385
- LLM_ARCH_FALCON,
386
- {
387
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
388
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
389
- { LLM_TENSOR_OUTPUT, "output" },
390
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
391
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
392
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
393
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
394
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
395
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
396
- },
397
- },
398
- {
399
- LLM_ARCH_GROK,
400
- {
401
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
402
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
403
- { LLM_TENSOR_OUTPUT, "output" },
404
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
405
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
406
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
407
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
408
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
409
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
410
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
411
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
412
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
413
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
414
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
415
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
416
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
417
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
418
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
419
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
420
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
421
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
422
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
423
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
424
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
425
- },
426
- },
427
- {
428
- LLM_ARCH_GPT2,
429
- {
430
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
431
- { LLM_TENSOR_POS_EMBD, "position_embd" },
432
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
433
- { LLM_TENSOR_OUTPUT, "output" },
434
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
435
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
436
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
437
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
438
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
439
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
440
- },
441
- },
442
- {
443
- LLM_ARCH_GPTJ,
444
- {
445
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
446
- },
447
- },
448
- {
449
- LLM_ARCH_GPTNEOX,
450
- {
451
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
452
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
453
- { LLM_TENSOR_OUTPUT, "output" },
454
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
455
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
456
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
457
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
458
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
459
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
460
- },
461
- },
462
- {
463
- LLM_ARCH_MPT,
464
- {
465
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
466
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
467
- { LLM_TENSOR_OUTPUT, "output"},
468
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
469
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
470
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
471
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
472
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
473
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
474
- { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" },
475
- { LLM_TENSOR_POS_EMBD, "position_embd" },
476
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm"},
477
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm"},
478
- },
479
- },
480
- {
481
- LLM_ARCH_STARCODER,
482
- {
483
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
484
- { LLM_TENSOR_POS_EMBD, "position_embd" },
485
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
486
- { LLM_TENSOR_OUTPUT, "output" },
487
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
488
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
489
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
490
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
491
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
492
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
493
- },
494
- },
495
- {
496
- LLM_ARCH_REFACT,
497
- {
498
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
499
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
500
- { LLM_TENSOR_OUTPUT, "output" },
501
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
502
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
503
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
504
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
505
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
506
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
507
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
508
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
509
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
510
- },
511
- },
512
- {
513
- LLM_ARCH_BERT,
514
- {
515
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
516
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
517
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
518
- { LLM_TENSOR_POS_EMBD, "position_embd" },
519
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
520
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
521
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
522
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
523
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
524
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
525
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
526
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
527
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
528
- { LLM_TENSOR_CLS, "cls" },
529
- { LLM_TENSOR_CLS_OUT, "cls.output" },
530
- },
531
- },
532
- {
533
- LLM_ARCH_NOMIC_BERT,
534
- {
535
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
536
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
537
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
538
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
539
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
540
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
541
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
542
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
543
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
544
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
545
- },
546
- },
547
- {
548
- LLM_ARCH_NOMIC_BERT_MOE,
549
- {
550
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
551
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
552
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
553
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
554
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
555
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
556
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
557
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
558
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
559
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
560
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
561
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
562
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
563
- },
564
- },
565
- {
566
- LLM_ARCH_NEO_BERT,
567
- {
568
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
569
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
570
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
571
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
572
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
573
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
574
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
575
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
576
- { LLM_TENSOR_CLS, "cls" },
577
- { LLM_TENSOR_CLS_OUT, "cls.output" },
578
- },
579
- },
580
- {
581
- LLM_ARCH_JINA_BERT_V2,
582
- {
583
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
584
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
585
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
586
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
587
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
588
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
589
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
590
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
591
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
592
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
593
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
594
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
595
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
596
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
597
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
598
- { LLM_TENSOR_CLS, "cls" },
599
- },
600
- },
601
- {
602
- LLM_ARCH_JINA_BERT_V3,
603
- {
604
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
605
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
606
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
607
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
608
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
609
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
610
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
611
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
612
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
613
- },
614
- },
615
- {
616
- LLM_ARCH_BLOOM,
617
- {
618
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
619
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
620
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
621
- { LLM_TENSOR_OUTPUT, "output" },
622
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
623
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
624
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
625
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
626
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
627
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
628
- },
629
- },
630
- {
631
- LLM_ARCH_STABLELM,
632
- {
633
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
634
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
635
- { LLM_TENSOR_OUTPUT, "output" },
636
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
637
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
638
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
639
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
640
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
641
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
642
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
643
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
644
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
645
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
646
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
647
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
648
- },
649
- },
650
- {
651
- LLM_ARCH_QWEN,
652
- {
653
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
654
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
655
- { LLM_TENSOR_OUTPUT, "output" },
656
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
657
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
658
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
659
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
660
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
661
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
662
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
663
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
664
- },
665
- },
666
- {
667
- LLM_ARCH_QWEN2,
668
- {
669
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
670
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
671
- { LLM_TENSOR_OUTPUT, "output" },
672
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
673
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
674
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
675
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
676
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
677
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
678
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
679
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
680
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
681
- },
682
- },
683
- {
684
- LLM_ARCH_QWEN2VL,
685
- {
686
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
687
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
688
- { LLM_TENSOR_OUTPUT, "output" },
689
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
690
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
691
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
692
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
693
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
694
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
695
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
696
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
697
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
698
- },
699
- },
700
- {
701
- LLM_ARCH_QWEN2MOE,
702
- {
703
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
704
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
705
- { LLM_TENSOR_OUTPUT, "output" },
706
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
707
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
708
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
709
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
710
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
711
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
712
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
713
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
714
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
715
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
716
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
717
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
718
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
719
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
720
- },
721
- },
722
- {
723
- LLM_ARCH_QWEN3,
724
- {
725
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
726
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
727
- { LLM_TENSOR_OUTPUT, "output" },
728
- { LLM_TENSOR_CLS_OUT, "cls.output" },
729
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
730
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
731
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
732
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
733
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
734
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
735
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
736
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
737
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
738
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
739
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
740
- },
741
- },
742
- {
743
- LLM_ARCH_QWEN3MOE,
744
- {
745
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
746
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
747
- { LLM_TENSOR_OUTPUT, "output" },
748
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
749
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
750
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
751
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
752
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
753
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
754
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
755
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
756
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
757
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
758
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
759
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
760
- },
761
- },
762
- {
763
- LLM_ARCH_PHI2,
764
- {
765
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
766
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
767
- { LLM_TENSOR_OUTPUT, "output" },
768
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
769
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
770
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
771
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
772
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
773
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
774
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
775
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
776
- },
777
- },
778
- {
779
- LLM_ARCH_PHI3,
780
- {
781
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
782
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
783
- { LLM_TENSOR_OUTPUT, "output" },
784
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
785
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
786
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
787
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
788
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
789
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
790
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
791
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
792
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
793
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
794
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
795
- },
796
- },
797
- {
798
- LLM_ARCH_PHIMOE,
799
- {
800
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
801
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
802
- { LLM_TENSOR_OUTPUT, "output" },
803
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
804
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
805
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
806
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
807
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
808
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
809
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
810
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
811
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
812
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
813
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
814
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
815
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
816
- },
817
- },
818
- {
819
- LLM_ARCH_PLAMO,
820
- {
821
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
822
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
823
- { LLM_TENSOR_OUTPUT, "output" },
824
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
825
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
826
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
827
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
828
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
829
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
830
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
831
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
832
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
833
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
834
- },
835
- },
836
- {
837
- LLM_ARCH_PLAMO2,
838
- {
839
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
840
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
841
- { LLM_TENSOR_OUTPUT, "output" },
842
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
843
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
844
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
845
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
846
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
847
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
848
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
849
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
850
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
851
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
852
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
853
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
854
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
855
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
856
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
857
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
858
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
859
- { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
860
- { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
861
- { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
862
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
863
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
864
- },
865
- },
866
- {
867
- LLM_ARCH_CODESHELL,
868
- {
869
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
870
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
871
- { LLM_TENSOR_OUTPUT, "output" },
872
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
873
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
874
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
875
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
876
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
877
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
878
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
879
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
880
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
881
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
882
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
883
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
884
- },
885
- },
886
- {
887
- LLM_ARCH_ORION,
888
- {
889
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
890
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
891
- { LLM_TENSOR_OUTPUT, "output" },
892
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
893
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
894
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
895
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
896
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
897
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
898
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
899
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
900
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
901
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
902
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
903
- },
904
- },
905
- {
906
- LLM_ARCH_INTERNLM2,
907
- {
908
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
909
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
910
- { LLM_TENSOR_OUTPUT, "output" },
911
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
912
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
913
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
914
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
915
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
916
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
917
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
918
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
919
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
920
- },
921
- },
922
- {
923
- LLM_ARCH_MINICPM,
924
- {
925
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
926
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
927
- { LLM_TENSOR_OUTPUT, "output" },
928
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
929
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
930
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
931
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
932
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
933
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
934
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
935
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
936
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
937
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
938
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
939
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
940
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
941
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
942
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
943
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
944
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
945
- },
946
- },
947
- {
948
- LLM_ARCH_MINICPM3,
949
- {
950
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
951
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
952
- { LLM_TENSOR_OUTPUT, "output" },
953
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
954
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
955
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
956
- { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
957
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
958
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
959
- { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
960
- { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
961
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
962
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
963
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
964
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
965
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
966
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
967
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
968
- },
969
- },
970
- {
971
- LLM_ARCH_GEMMA,
972
- {
973
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
974
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
975
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
976
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
977
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
978
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
979
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
980
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
981
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
982
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
983
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
984
- },
985
- },
986
- {
987
- LLM_ARCH_GEMMA2,
988
- {
989
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
990
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
991
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
992
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
993
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
994
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
995
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
996
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
997
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
998
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
999
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1000
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1001
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1002
- },
1003
- },
1004
- {
1005
- LLM_ARCH_GEMMA3,
1006
- {
1007
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1008
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1009
- { LLM_TENSOR_OUTPUT, "output" },
1010
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1011
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1012
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1013
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1014
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1015
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1016
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1017
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1018
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1019
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1020
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1021
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1022
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1023
- },
1024
- },
1025
- {
1026
- LLM_ARCH_GEMMA3N,
1027
- {
1028
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1029
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1030
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1031
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1032
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1033
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1034
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1035
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1036
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1037
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1038
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1039
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1040
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1041
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1042
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1043
- { LLM_TENSOR_PER_LAYER_TOKEN_EMBD, "per_layer_token_embd" },
1044
- { LLM_TENSOR_PER_LAYER_MODEL_PROJ, "per_layer_model_proj" },
1045
- { LLM_TENSOR_PER_LAYER_PROJ_NORM, "per_layer_proj_norm" },
1046
- { LLM_TENSOR_ALTUP_UNEMBD_PROJ, "altup_unembd_proj" },
1047
- { LLM_TENSOR_ALTUP_PROJ, "altup_proj" },
1048
- { LLM_TENSOR_PER_LAYER_INP_GATE, "blk.%d.inp_gate" },
1049
- { LLM_TENSOR_PER_LAYER_PROJ, "blk.%d.proj" },
1050
- { LLM_TENSOR_PER_LAYER_POST_NORM, "blk.%d.post_norm" },
1051
- { LLM_TENSOR_ALTUP_CORRECT_COEF, "blk.%d.altup_correct_coef" },
1052
- { LLM_TENSOR_ALTUP_CORRECT_SCALE, "blk.%d.altup_correct_scale" },
1053
- { LLM_TENSOR_ALTUP_PREDICT_COEF, "blk.%d.altup_predict_coef" },
1054
- { LLM_TENSOR_ALTUP_ROUTER, "blk.%d.altup_router" },
1055
- { LLM_TENSOR_ALTUP_ROUTER_NORM, "blk.%d.altup_router_norm" },
1056
- { LLM_TENSOR_LAUREL_L, "blk.%d.laurel_l" },
1057
- { LLM_TENSOR_LAUREL_R, "blk.%d.laurel_r" },
1058
- { LLM_TENSOR_LAUREL_POST_NORM, "blk.%d.laurel_post_norm" },
1059
- },
1060
- },
1061
- {
1062
- LLM_ARCH_GEMMA_EMBEDDING,
1063
- {
1064
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1065
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1066
- { LLM_TENSOR_OUTPUT, "output" },
1067
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1068
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1069
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1070
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1071
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1072
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1073
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1074
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1075
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1076
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1077
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1078
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1079
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1080
- },
1081
- },
1082
- {
1083
- LLM_ARCH_STARCODER2,
1084
- {
1085
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1086
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1087
- { LLM_TENSOR_OUTPUT, "output" },
1088
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1089
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1090
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1091
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1092
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1093
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1094
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1095
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1096
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1097
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1098
- },
1099
- },
1100
- {
1101
- LLM_ARCH_MAMBA,
1102
- {
1103
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1104
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1105
- { LLM_TENSOR_OUTPUT, "output" },
1106
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1107
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1108
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1109
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
1110
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1111
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1112
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1113
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1114
- },
1115
- },
1116
- {
1117
- LLM_ARCH_MAMBA2,
1118
- {
1119
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1120
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1121
- { LLM_TENSOR_OUTPUT, "output" },
1122
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1123
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1124
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1125
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1126
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1127
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1128
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1129
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1130
- },
1131
- },
1132
- {
1133
- LLM_ARCH_JAMBA,
1134
- {
1135
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1136
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1137
- { LLM_TENSOR_OUTPUT, "output" },
1138
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1139
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1140
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1141
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
1142
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1143
- { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
1144
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1145
- { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
1146
- { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
1147
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1148
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1149
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1150
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1151
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1152
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1153
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1154
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1155
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1156
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1157
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1158
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1159
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1160
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1161
- },
1162
- },
1163
- {
1164
- LLM_ARCH_FALCON_H1,
1165
- {
1166
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1167
- { LLM_TENSOR_OUTPUT, "output" },
1168
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1169
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1170
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1171
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1172
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1173
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1174
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1175
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1176
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1177
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1178
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1179
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1180
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1181
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1182
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1183
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1184
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1185
- },
1186
- },
1187
- {
1188
- LLM_ARCH_XVERSE,
1189
- {
1190
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1191
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1192
- { LLM_TENSOR_OUTPUT, "output" },
1193
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1194
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1195
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1196
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1197
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1198
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1199
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1200
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1201
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1202
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1203
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1204
- },
1205
- },
1206
- {
1207
- LLM_ARCH_COMMAND_R,
1208
- {
1209
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1210
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1211
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1212
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1213
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1214
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1215
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1216
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1217
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1218
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1219
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1220
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1221
- },
1222
- },
1223
- {
1224
- LLM_ARCH_COHERE2,
1225
- {
1226
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1227
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1228
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1229
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1230
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1231
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1232
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1233
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1234
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1235
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1236
- },
1237
- },
1238
- {
1239
- LLM_ARCH_DBRX,
1240
- {
1241
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1242
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1243
- { LLM_TENSOR_OUTPUT, "output" },
1244
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1245
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1246
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1247
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
1248
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1249
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1250
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1251
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1252
- },
1253
- },
1254
- {
1255
- LLM_ARCH_OLMO,
1256
- {
1257
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1258
- { LLM_TENSOR_OUTPUT, "output" },
1259
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1260
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1261
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1262
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1263
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1264
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1265
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1266
- },
1267
- },
1268
- {
1269
- LLM_ARCH_OLMO2,
1270
- {
1271
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1272
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1273
- { LLM_TENSOR_OUTPUT, "output" },
1274
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1275
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1276
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1277
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1278
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1279
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1280
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1281
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1282
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1283
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1284
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1285
- },
1286
- },
1287
- {
1288
- LLM_ARCH_OLMOE,
1289
- {
1290
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1291
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1292
- { LLM_TENSOR_OUTPUT, "output" },
1293
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1294
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1295
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1296
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1297
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1298
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1299
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1300
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1301
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1302
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1303
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1304
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1305
- },
1306
- },
1307
- {
1308
- LLM_ARCH_OPENELM,
1309
- {
1310
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1311
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1312
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1313
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1314
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1315
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1316
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1317
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1318
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1319
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1320
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1321
- },
1322
- },
1323
- {
1324
- LLM_ARCH_ARCTIC,
1325
- {
1326
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1327
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1328
- { LLM_TENSOR_OUTPUT, "output" },
1329
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1330
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1331
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1332
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1333
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1334
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1335
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1336
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1337
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1338
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1339
- { LLM_TENSOR_FFN_NORM_EXPS, "blk.%d.ffn_norm_exps" },
1340
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1341
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1342
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1343
- },
1344
- },
1345
- {
1346
- LLM_ARCH_DEEPSEEK,
1347
- {
1348
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1349
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1350
- { LLM_TENSOR_OUTPUT, "output" },
1351
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1352
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1353
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1354
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1355
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1356
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1357
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1358
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1359
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1360
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1361
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1362
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1363
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1364
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1365
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1366
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1367
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1368
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1369
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1370
- },
1371
- },
1372
- {
1373
- LLM_ARCH_DEEPSEEK2,
1374
- {
1375
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1376
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1377
- { LLM_TENSOR_OUTPUT, "output" },
1378
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1379
- { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
1380
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
1381
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1382
- { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
1383
- { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
1384
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
1385
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
1386
- { LLM_TENSOR_ATTN_K_B, "blk.%d.attn_k_b" },
1387
- { LLM_TENSOR_ATTN_V_B, "blk.%d.attn_v_b" },
1388
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1389
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1390
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1391
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1392
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1393
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1394
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1395
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1396
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1397
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1398
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1399
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1400
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1401
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1402
- },
1403
- },
1404
- {
1405
- LLM_ARCH_PLM,
1406
- {
1407
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1408
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1409
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1410
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1411
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
1412
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
1413
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
1414
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1415
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1416
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1417
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1418
- },
1419
- },
1420
- {
1421
- LLM_ARCH_CHATGLM,
1422
- {
1423
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1424
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1425
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1426
- { LLM_TENSOR_OUTPUT, "output" },
1427
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1428
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1429
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1430
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1431
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1432
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1433
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1434
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1435
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1436
- },
1437
- },
1438
- {
1439
- LLM_ARCH_GLM4,
1440
- {
1441
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1442
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1443
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1444
- { LLM_TENSOR_OUTPUT, "output" },
1445
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1446
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1447
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1448
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1449
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1450
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1451
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1452
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1453
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1454
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1455
- },
1456
- },
1457
- {
1458
- LLM_ARCH_GLM4_MOE,
1459
- {
1460
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1461
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1462
- { LLM_TENSOR_OUTPUT, "output" },
1463
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1464
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1465
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1466
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1467
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1468
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1469
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1470
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1471
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1472
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1473
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1474
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1475
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1476
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1477
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1478
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1479
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1480
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1481
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1482
- // NextN/MTP tensors - preserved but unused (in final layer, dynamic layer number)
1483
- { LLM_TENSOR_NEXTN_EH_PROJ, "blk.%d.nextn.eh_proj" },
1484
- { LLM_TENSOR_NEXTN_EMBED_TOKENS, "blk.%d.nextn.embed_tokens" },
1485
- { LLM_TENSOR_NEXTN_ENORM, "blk.%d.nextn.enorm" },
1486
- { LLM_TENSOR_NEXTN_HNORM, "blk.%d.nextn.hnorm" },
1487
- { LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, "blk.%d.nextn.shared_head_head" },
1488
- { LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, "blk.%d.nextn.shared_head_norm" },
1489
- },
1490
- },
1491
- {
1492
- LLM_ARCH_BITNET,
1493
- {
1494
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1495
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1496
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1497
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1498
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1499
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1500
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1501
- { LLM_TENSOR_ATTN_SUB_NORM, "blk.%d.attn_sub_norm" },
1502
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1503
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1504
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1505
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1506
- { LLM_TENSOR_FFN_SUB_NORM, "blk.%d.ffn_sub_norm" },
1507
- },
1508
- },
1509
- {
1510
- LLM_ARCH_T5,
1511
- {
1512
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1513
- { LLM_TENSOR_OUTPUT, "output" },
1514
- { LLM_TENSOR_DEC_OUTPUT_NORM, "dec.output_norm" },
1515
- { LLM_TENSOR_DEC_ATTN_NORM, "dec.blk.%d.attn_norm" },
1516
- { LLM_TENSOR_DEC_ATTN_Q, "dec.blk.%d.attn_q" },
1517
- { LLM_TENSOR_DEC_ATTN_K, "dec.blk.%d.attn_k" },
1518
- { LLM_TENSOR_DEC_ATTN_V, "dec.blk.%d.attn_v" },
1519
- { LLM_TENSOR_DEC_ATTN_OUT, "dec.blk.%d.attn_o" },
1520
- { LLM_TENSOR_DEC_ATTN_REL_B, "dec.blk.%d.attn_rel_b" },
1521
- { LLM_TENSOR_DEC_CROSS_ATTN_NORM, "dec.blk.%d.cross_attn_norm" },
1522
- { LLM_TENSOR_DEC_CROSS_ATTN_Q, "dec.blk.%d.cross_attn_q" },
1523
- { LLM_TENSOR_DEC_CROSS_ATTN_K, "dec.blk.%d.cross_attn_k" },
1524
- { LLM_TENSOR_DEC_CROSS_ATTN_V, "dec.blk.%d.cross_attn_v" },
1525
- { LLM_TENSOR_DEC_CROSS_ATTN_OUT, "dec.blk.%d.cross_attn_o" },
1526
- { LLM_TENSOR_DEC_CROSS_ATTN_REL_B, "dec.blk.%d.cross_attn_rel_b" },
1527
- { LLM_TENSOR_DEC_FFN_NORM, "dec.blk.%d.ffn_norm" },
1528
- { LLM_TENSOR_DEC_FFN_GATE, "dec.blk.%d.ffn_gate" },
1529
- { LLM_TENSOR_DEC_FFN_DOWN, "dec.blk.%d.ffn_down" },
1530
- { LLM_TENSOR_DEC_FFN_UP, "dec.blk.%d.ffn_up" },
1531
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
1532
- { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
1533
- { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
1534
- { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
1535
- { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
1536
- { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
1537
- { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
1538
- { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
1539
- { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
1540
- { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
1541
- { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
1542
- },
1543
- },
1544
- {
1545
- LLM_ARCH_T5ENCODER,
1546
- {
1547
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1548
- { LLM_TENSOR_OUTPUT, "output" },
1549
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
1550
- { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
1551
- { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
1552
- { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
1553
- { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
1554
- { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
1555
- { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
1556
- { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
1557
- { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
1558
- { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
1559
- { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
1560
- },
1561
- },
1562
- {
1563
- LLM_ARCH_JAIS,
1564
- {
1565
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1566
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1567
- { LLM_TENSOR_OUTPUT, "output" },
1568
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1569
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1570
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1571
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1572
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1573
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1574
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1575
- },
1576
- },
1577
- {
1578
- LLM_ARCH_NEMOTRON,
1579
- {
1580
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1581
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1582
- { LLM_TENSOR_OUTPUT, "output" },
1583
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1584
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1585
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1586
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1587
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1588
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1589
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1590
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1591
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1592
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1593
- },
1594
- },
1595
- {
1596
- LLM_ARCH_NEMOTRON_H,
1597
- {
1598
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1599
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1600
- { LLM_TENSOR_OUTPUT, "output" },
1601
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1602
- // mamba(2) ssm layers
1603
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1604
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1605
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1606
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1607
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1608
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1609
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1610
- // attention layers
1611
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1612
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1613
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1614
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1615
- // dense FFN
1616
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1617
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1618
- },
1619
- },
1620
- {
1621
- LLM_ARCH_EXAONE,
1622
- {
1623
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1624
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1625
- { LLM_TENSOR_OUTPUT, "output" },
1626
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1627
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1628
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1629
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1630
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1631
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1632
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1633
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1634
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1635
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1636
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1637
- },
1638
- },
1639
- {
1640
- LLM_ARCH_EXAONE4,
1641
- {
1642
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1643
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1644
- { LLM_TENSOR_OUTPUT, "output" },
1645
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1646
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1647
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1648
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1649
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1650
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1651
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1652
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1653
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1654
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1655
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1656
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1657
- }
1658
- },
1659
- {
1660
- LLM_ARCH_RWKV6,
1661
- {
1662
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1663
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1664
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1665
- { LLM_TENSOR_OUTPUT, "output" },
1666
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1667
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
1668
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1669
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1670
- { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
1671
- { LLM_TENSOR_TIME_MIX_LERP_W, "blk.%d.time_mix_lerp_w" },
1672
- { LLM_TENSOR_TIME_MIX_LERP_K, "blk.%d.time_mix_lerp_k" },
1673
- { LLM_TENSOR_TIME_MIX_LERP_V, "blk.%d.time_mix_lerp_v" },
1674
- { LLM_TENSOR_TIME_MIX_LERP_R, "blk.%d.time_mix_lerp_r" },
1675
- { LLM_TENSOR_TIME_MIX_LERP_G, "blk.%d.time_mix_lerp_g" },
1676
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1677
- { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
1678
- { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
1679
- { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
1680
- { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
1681
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1682
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1683
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1684
- { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
1685
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1686
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1687
- { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
1688
- { LLM_TENSOR_CHANNEL_MIX_LERP_R, "blk.%d.channel_mix_lerp_r" },
1689
- { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
1690
- { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
1691
- { LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, "blk.%d.channel_mix_receptance" },
1692
- },
1693
- },
1694
- {
1695
- LLM_ARCH_RWKV6QWEN2,
1696
- {
1697
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1698
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1699
- { LLM_TENSOR_OUTPUT, "output" },
1700
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1701
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1702
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1703
- { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
1704
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1705
- { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
1706
- { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
1707
- { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
1708
- { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
1709
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1710
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1711
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1712
- { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
1713
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1714
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1715
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1716
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1717
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1718
- },
1719
- },
1720
- {
1721
- LLM_ARCH_RWKV7,
1722
- {
1723
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1724
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1725
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1726
- { LLM_TENSOR_OUTPUT, "output" },
1727
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1728
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
1729
- { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
1730
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1731
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1732
- { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
1733
- { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
1734
- { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
1735
- { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
1736
- { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
1737
- { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
1738
- { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
1739
- { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
1740
- { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
1741
- { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
1742
- { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
1743
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1744
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1745
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1746
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1747
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1748
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1749
- { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
1750
- { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
1751
- { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
1752
- },
1753
- },
1754
- {
1755
- LLM_ARCH_ARWKV7,
1756
- {
1757
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1758
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1759
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1760
- { LLM_TENSOR_OUTPUT, "output" },
1761
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1762
- { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
1763
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1764
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1765
- { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
1766
- { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
1767
- { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
1768
- { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
1769
- { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
1770
- { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
1771
- { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
1772
- { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
1773
- { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
1774
- { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
1775
- { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
1776
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1777
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1778
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1779
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1780
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1781
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1782
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1783
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1784
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1785
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1786
- },
1787
- },
1788
- {
1789
- LLM_ARCH_GRANITE,
1790
- {
1791
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1792
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1793
- { LLM_TENSOR_OUTPUT, "output" },
1794
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1795
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1796
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1797
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1798
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1799
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1800
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1801
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1802
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1803
- },
1804
- },
1805
- {
1806
- LLM_ARCH_GRANITE_MOE,
1807
- {
1808
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1809
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1810
- { LLM_TENSOR_OUTPUT, "output" },
1811
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1812
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1813
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1814
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1815
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1816
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1817
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1818
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1819
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1820
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1821
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1822
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1823
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1824
- },
1825
- },
1826
- {
1827
- LLM_ARCH_GRANITE_HYBRID,
1828
- {
1829
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1830
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1831
- { LLM_TENSOR_OUTPUT, "output" },
1832
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1833
- // mamba(2) ssm layers
1834
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1835
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1836
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1837
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1838
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1839
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1840
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1841
- // attention layers
1842
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1843
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1844
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1845
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1846
- // dense FFN
1847
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1848
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1849
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1850
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1851
- // moe FFN
1852
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1853
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1854
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1855
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1856
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1857
- // shared expert
1858
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1859
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1860
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1861
- },
1862
- },
1863
- {
1864
- LLM_ARCH_CHAMELEON,
1865
- {
1866
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1867
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1868
- { LLM_TENSOR_OUTPUT, "output" },
1869
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1870
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1871
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1872
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1873
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1874
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1875
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1876
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1877
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1878
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1879
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1880
- },
1881
- },
1882
- {
1883
- LLM_ARCH_WAVTOKENIZER_DEC,
1884
- {
1885
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1886
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1887
- { LLM_TENSOR_CONV1D, "conv1d" },
1888
- { LLM_TENSOR_CONVNEXT_DW, "convnext.%d.dw" },
1889
- { LLM_TENSOR_CONVNEXT_NORM, "convnext.%d.norm" },
1890
- { LLM_TENSOR_CONVNEXT_PW1, "convnext.%d.pw1" },
1891
- { LLM_TENSOR_CONVNEXT_PW2, "convnext.%d.pw2" },
1892
- { LLM_TENSOR_CONVNEXT_GAMMA, "convnext.%d.gamma" },
1893
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1894
- { LLM_TENSOR_OUTPUT, "output" },
1895
- { LLM_TENSOR_POS_NET_CONV1, "posnet.%d.conv1" },
1896
- { LLM_TENSOR_POS_NET_CONV2, "posnet.%d.conv2" },
1897
- { LLM_TENSOR_POS_NET_NORM, "posnet.%d.norm" },
1898
- { LLM_TENSOR_POS_NET_NORM1, "posnet.%d.norm1" },
1899
- { LLM_TENSOR_POS_NET_NORM2, "posnet.%d.norm2" },
1900
- { LLM_TENSOR_POS_NET_ATTN_NORM, "posnet.%d.attn_norm" },
1901
- { LLM_TENSOR_POS_NET_ATTN_Q, "posnet.%d.attn_q" },
1902
- { LLM_TENSOR_POS_NET_ATTN_K, "posnet.%d.attn_k" },
1903
- { LLM_TENSOR_POS_NET_ATTN_V, "posnet.%d.attn_v" },
1904
- { LLM_TENSOR_POS_NET_ATTN_OUT, "posnet.%d.attn_output" },
1905
- },
1906
- },
1907
- {
1908
- LLM_ARCH_BAILINGMOE,
1909
- {
1910
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1911
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1912
- { LLM_TENSOR_OUTPUT, "output" },
1913
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1914
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1915
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1916
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1917
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1918
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1919
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1920
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1921
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1922
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1923
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1924
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1925
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1926
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1927
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1928
- },
1929
- },
1930
- {
1931
- LLM_ARCH_DOTS1,
1932
- {
1933
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1934
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1935
- { LLM_TENSOR_OUTPUT, "output" },
1936
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1937
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1938
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1939
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1940
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1941
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1942
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1943
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1944
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1945
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1946
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1947
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1948
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1949
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1950
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1951
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1952
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1953
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1954
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1955
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1956
- }
1957
- },
1958
- {
1959
- LLM_ARCH_ERNIE4_5,
1960
- {
1961
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1962
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1963
- { LLM_TENSOR_OUTPUT, "output" },
1964
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1965
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1966
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1967
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1968
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1969
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1970
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1971
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1972
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1973
- },
1974
- },
1975
- {
1976
- LLM_ARCH_ERNIE4_5_MOE,
1977
- {
1978
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1979
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1980
- { LLM_TENSOR_OUTPUT, "output" },
1981
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1982
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1983
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1984
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1985
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1986
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1987
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1988
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1989
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1990
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1991
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1992
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1993
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1994
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1995
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1996
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1997
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1998
- },
1999
- },
2000
- {
2001
- LLM_ARCH_HUNYUAN_MOE,
2002
- {
2003
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2004
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2005
- { LLM_TENSOR_OUTPUT, "output" },
2006
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2007
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2008
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2009
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2010
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2011
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2012
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2013
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2014
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2015
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
2016
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
2017
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
2018
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2019
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2020
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2021
- },
2022
- },
2023
- {
2024
- LLM_ARCH_HUNYUAN_DENSE,
2025
- {
2026
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2027
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2028
- { LLM_TENSOR_OUTPUT, "output" },
2029
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2030
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2031
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2032
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2033
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2034
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2035
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2036
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2037
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2038
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2039
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2040
-
2041
- },
2042
- },
2043
- {
2044
- LLM_ARCH_SMOLLM3,
2045
- {
2046
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2047
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2048
- { LLM_TENSOR_OUTPUT, "output" },
2049
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2050
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2051
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2052
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2053
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2054
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2055
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2056
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2057
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2058
- },
2059
- },
2060
- {
2061
- LLM_ARCH_OPENAI_MOE,
2062
- {
2063
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2064
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2065
- { LLM_TENSOR_OUTPUT, "output" },
2066
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2067
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
2068
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2069
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2070
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2071
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2072
- { LLM_TENSOR_ATTN_SINKS, "blk.%d.attn_sinks" },
2073
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2074
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2075
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2076
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2077
- },
2078
- },
2079
- {
2080
- LLM_ARCH_LFM2,
2081
- {
2082
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2083
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2084
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2085
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2086
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2087
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2088
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2089
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2090
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2091
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2092
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2093
- { LLM_TENSOR_SHORTCONV_CONV, "blk.%d.shortconv.conv" },
2094
- { LLM_TENSOR_SHORTCONV_INPROJ, "blk.%d.shortconv.in_proj" },
2095
- { LLM_TENSOR_SHORTCONV_OUTPROJ, "blk.%d.shortconv.out_proj" },
2096
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2097
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
2098
- { LLM_TENSOR_OUTPUT, "output" },
2099
- }
2100
- },
2101
- {
2102
- LLM_ARCH_SMALLTHINKER,
2103
- {
2104
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2105
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2106
- { LLM_TENSOR_OUTPUT, "output" },
2107
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2108
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2109
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2110
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2111
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2112
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2113
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2114
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2115
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2116
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2117
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2118
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2119
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" }
2120
- },
2121
- },
2122
- {
2123
- LLM_ARCH_DREAM,
2124
- {
2125
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2126
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2127
- { LLM_TENSOR_OUTPUT, "output" },
2128
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2129
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2130
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2131
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2132
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2133
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2134
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2135
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2136
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2137
- },
2138
- },
2139
- {
2140
- LLM_ARCH_LLADA,
2141
- {
2142
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2143
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2144
- { LLM_TENSOR_OUTPUT, "output" },
2145
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2146
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2147
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2148
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2149
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2150
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2151
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2152
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2153
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2154
- },
2155
- },
2156
- {
2157
- LLM_ARCH_LLADA_MOE,
2158
- {
2159
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2160
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2161
- { LLM_TENSOR_OUTPUT, "output" },
2162
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2163
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2164
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2165
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2166
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2167
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2168
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2169
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2170
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2171
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2172
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2173
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2174
- },
2175
- },
2176
- {
2177
- LLM_ARCH_SEED_OSS,
2178
- {
2179
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2180
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2181
- { LLM_TENSOR_OUTPUT, "output" },
2182
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2183
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2184
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2185
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2186
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2187
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
2188
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2189
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2190
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2191
- },
2192
- },
2193
- {
2194
- LLM_ARCH_GROVEMOE,
2195
- {
2196
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2197
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2198
- { LLM_TENSOR_OUTPUT, "output" },
2199
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2200
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2201
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2202
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2203
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2204
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2205
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2206
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2207
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2208
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2209
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2210
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2211
- { LLM_TENSOR_FFN_GATE_CHEXPS, "blk.%d.ffn_gate_chexps" },
2212
- { LLM_TENSOR_FFN_DOWN_CHEXPS, "blk.%d.ffn_down_chexps" },
2213
- { LLM_TENSOR_FFN_UP_CHEXPS, "blk.%d.ffn_up_chexps" },
2214
- },
2215
- },
2216
- {
2217
- LLM_ARCH_UNKNOWN,
2218
- {
2219
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2220
- },
2221
- },
336
+ static const std::map<llm_tensor, const char *> LLM_TENSOR_NAMES = {
337
+ { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
338
+ { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
339
+ { LLM_TENSOR_OUTPUT_NORM_LFM2, "token_embd_norm" }, // fix for wrong tensor name
340
+ { LLM_TENSOR_OUTPUT, "output" },
341
+ { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
342
+ { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
343
+ { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
344
+ { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
345
+ { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
346
+ { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
347
+ { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
348
+ { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
349
+ { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
350
+ { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
351
+ { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
352
+ { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
353
+ { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
354
+ { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
355
+ { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
356
+ { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
357
+ { LLM_TENSOR_FFN_GATE_UP_EXPS, "blk.%d.ffn_gate_up_exps" },
358
+ { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
359
+ { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
360
+ { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
361
+ { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
362
+ { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
363
+ { LLM_TENSOR_ATTN_GATE, "blk.%d.attn_gate" },
364
+ { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
365
+ { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
366
+ { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
367
+ { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
368
+ { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
369
+ { LLM_TENSOR_FFN_LATENT_DOWN, "blk.%d.ffn_latent_down" },
370
+ { LLM_TENSOR_FFN_LATENT_UP, "blk.%d.ffn_latent_up" },
371
+ { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
372
+ { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
373
+ { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
374
+ { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
375
+ { LLM_TENSOR_POS_EMBD, "position_embd" },
376
+ { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" },
377
+ { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
378
+ { LLM_TENSOR_TOKEN_TYPES, "token_types" },
379
+ { LLM_TENSOR_CLS, "cls" },
380
+ { LLM_TENSOR_CLS_OUT, "cls.output" },
381
+ { LLM_TENSOR_CLS_NORM, "cls.norm" },
382
+ { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
383
+ { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
384
+ { LLM_TENSOR_SSM_A_NOSCAN, "blk.%d.ssm_a" },
385
+ { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
386
+ { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
387
+ { LLM_TENSOR_SSM_BETA_ALPHA, "blk.%d.ssm_ba" },
388
+ { LLM_TENSOR_SSM_ALPHA, "blk.%d.ssm_alpha" },
389
+ { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
390
+ { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
391
+ { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
392
+ { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
393
+ { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
394
+ { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
395
+ { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
396
+ { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
397
+ { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
398
+ { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
399
+ { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
400
+ { LLM_TENSOR_SSM_CONV1D_Q, "blk.%d.ssm_conv1d_q" },
401
+ { LLM_TENSOR_SSM_CONV1D_K, "blk.%d.ssm_conv1d_k" },
402
+ { LLM_TENSOR_SSM_CONV1D_V, "blk.%d.ssm_conv1d_v" },
403
+ { LLM_TENSOR_SSM_F_A, "blk.%d.ssm_f_a" },
404
+ { LLM_TENSOR_SSM_F_B, "blk.%d.ssm_f_b" },
405
+ { LLM_TENSOR_SSM_BETA, "blk.%d.ssm_beta" },
406
+ { LLM_TENSOR_SSM_G_A, "blk.%d.ssm_g_a" },
407
+ { LLM_TENSOR_SSM_G_B, "blk.%d.ssm_g_b" },
408
+ { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
409
+ { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
410
+ { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
411
+ { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
412
+ { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
413
+ { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
414
+ { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
415
+ { LLM_TENSOR_PER_LAYER_TOKEN_EMBD, "per_layer_token_embd" },
416
+ { LLM_TENSOR_PER_LAYER_MODEL_PROJ, "per_layer_model_proj" },
417
+ { LLM_TENSOR_PER_LAYER_PROJ_NORM, "per_layer_proj_norm" },
418
+ { LLM_TENSOR_ALTUP_UNEMBD_PROJ, "altup_unembd_proj" },
419
+ { LLM_TENSOR_ALTUP_PROJ, "altup_proj" },
420
+ { LLM_TENSOR_PER_LAYER_INP_GATE, "blk.%d.inp_gate" },
421
+ { LLM_TENSOR_PER_LAYER_PROJ, "blk.%d.proj" },
422
+ { LLM_TENSOR_PER_LAYER_POST_NORM, "blk.%d.post_norm" },
423
+ { LLM_TENSOR_ALTUP_CORRECT_COEF, "blk.%d.altup_correct_coef" },
424
+ { LLM_TENSOR_ALTUP_CORRECT_SCALE, "blk.%d.altup_correct_scale" },
425
+ { LLM_TENSOR_ALTUP_PREDICT_COEF, "blk.%d.altup_predict_coef" },
426
+ { LLM_TENSOR_ALTUP_ROUTER, "blk.%d.altup_router" },
427
+ { LLM_TENSOR_ALTUP_ROUTER_NORM, "blk.%d.altup_router_norm" },
428
+ { LLM_TENSOR_LAUREL_L, "blk.%d.laurel_l" },
429
+ { LLM_TENSOR_LAUREL_R, "blk.%d.laurel_r" },
430
+ { LLM_TENSOR_LAUREL_POST_NORM, "blk.%d.laurel_post_norm" },
431
+ { LLM_TENSOR_DENSE_2_OUT, "dense_2" },
432
+ { LLM_TENSOR_DENSE_3_OUT, "dense_3" },
433
+ { LLM_TENSOR_FFN_NORM_EXPS, "blk.%d.ffn_norm_exps" },
434
+ { LLM_TENSOR_ATTN_K_B, "blk.%d.attn_k_b" },
435
+ { LLM_TENSOR_ATTN_V_B, "blk.%d.attn_v_b" },
436
+ { LLM_TENSOR_NEXTN_EH_PROJ, "blk.%d.nextn.eh_proj" },
437
+ { LLM_TENSOR_NEXTN_EMBED_TOKENS, "blk.%d.nextn.embed_tokens" },
438
+ { LLM_TENSOR_NEXTN_ENORM, "blk.%d.nextn.enorm" },
439
+ { LLM_TENSOR_NEXTN_HNORM, "blk.%d.nextn.hnorm" },
440
+ { LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, "blk.%d.nextn.shared_head_head" },
441
+ { LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, "blk.%d.nextn.shared_head_norm" },
442
+ { LLM_TENSOR_ATTN_SUB_NORM, "blk.%d.attn_sub_norm" },
443
+ { LLM_TENSOR_FFN_SUB_NORM, "blk.%d.ffn_sub_norm" },
444
+ { LLM_TENSOR_DEC_OUTPUT_NORM, "dec.output_norm" },
445
+ { LLM_TENSOR_DEC_ATTN_NORM, "dec.blk.%d.attn_norm" },
446
+ { LLM_TENSOR_DEC_ATTN_Q, "dec.blk.%d.attn_q" },
447
+ { LLM_TENSOR_DEC_ATTN_K, "dec.blk.%d.attn_k" },
448
+ { LLM_TENSOR_DEC_ATTN_V, "dec.blk.%d.attn_v" },
449
+ { LLM_TENSOR_DEC_ATTN_OUT, "dec.blk.%d.attn_o" },
450
+ { LLM_TENSOR_DEC_ATTN_REL_B, "dec.blk.%d.attn_rel_b" },
451
+ { LLM_TENSOR_DEC_CROSS_ATTN_NORM, "dec.blk.%d.cross_attn_norm" },
452
+ { LLM_TENSOR_DEC_CROSS_ATTN_Q, "dec.blk.%d.cross_attn_q" },
453
+ { LLM_TENSOR_DEC_CROSS_ATTN_K, "dec.blk.%d.cross_attn_k" },
454
+ { LLM_TENSOR_DEC_CROSS_ATTN_V, "dec.blk.%d.cross_attn_v" },
455
+ { LLM_TENSOR_DEC_CROSS_ATTN_OUT, "dec.blk.%d.cross_attn_o" },
456
+ { LLM_TENSOR_DEC_CROSS_ATTN_REL_B, "dec.blk.%d.cross_attn_rel_b" },
457
+ { LLM_TENSOR_DEC_FFN_NORM, "dec.blk.%d.ffn_norm" },
458
+ { LLM_TENSOR_DEC_FFN_GATE, "dec.blk.%d.ffn_gate" },
459
+ { LLM_TENSOR_DEC_FFN_DOWN, "dec.blk.%d.ffn_down" },
460
+ { LLM_TENSOR_DEC_FFN_UP, "dec.blk.%d.ffn_up" },
461
+ { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
462
+ { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
463
+ { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
464
+ { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
465
+ { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
466
+ { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
467
+ { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
468
+ { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
469
+ { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
470
+ { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
471
+ { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
472
+ { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
473
+ { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
474
+ { LLM_TENSOR_TIME_MIX_LERP_W, "blk.%d.time_mix_lerp_w" },
475
+ { LLM_TENSOR_TIME_MIX_LERP_K, "blk.%d.time_mix_lerp_k" },
476
+ { LLM_TENSOR_TIME_MIX_LERP_V, "blk.%d.time_mix_lerp_v" },
477
+ { LLM_TENSOR_TIME_MIX_LERP_R, "blk.%d.time_mix_lerp_r" },
478
+ { LLM_TENSOR_TIME_MIX_LERP_G, "blk.%d.time_mix_lerp_g" },
479
+ { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
480
+ { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
481
+ { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
482
+ { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
483
+ { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
484
+ { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
485
+ { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
486
+ { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
487
+ { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
488
+ { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
489
+ { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
490
+ { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
491
+ { LLM_TENSOR_CHANNEL_MIX_LERP_R, "blk.%d.channel_mix_lerp_r" },
492
+ { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
493
+ { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
494
+ { LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, "blk.%d.channel_mix_receptance" },
495
+ { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
496
+ { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
497
+ { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
498
+ { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
499
+ { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
500
+ { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
501
+ { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
502
+ { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
503
+ { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
504
+ { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
505
+ { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
506
+ { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
507
+ { LLM_TENSOR_CONV1D, "conv1d" },
508
+ { LLM_TENSOR_CONVNEXT_DW, "convnext.%d.dw" },
509
+ { LLM_TENSOR_CONVNEXT_NORM, "convnext.%d.norm" },
510
+ { LLM_TENSOR_CONVNEXT_PW1, "convnext.%d.pw1" },
511
+ { LLM_TENSOR_CONVNEXT_PW2, "convnext.%d.pw2" },
512
+ { LLM_TENSOR_CONVNEXT_GAMMA, "convnext.%d.gamma" },
513
+ { LLM_TENSOR_POS_NET_CONV1, "posnet.%d.conv1" },
514
+ { LLM_TENSOR_POS_NET_CONV2, "posnet.%d.conv2" },
515
+ { LLM_TENSOR_POS_NET_NORM, "posnet.%d.norm" },
516
+ { LLM_TENSOR_POS_NET_NORM1, "posnet.%d.norm1" },
517
+ { LLM_TENSOR_POS_NET_NORM2, "posnet.%d.norm2" },
518
+ { LLM_TENSOR_POS_NET_ATTN_NORM, "posnet.%d.attn_norm" },
519
+ { LLM_TENSOR_POS_NET_ATTN_Q, "posnet.%d.attn_q" },
520
+ { LLM_TENSOR_POS_NET_ATTN_K, "posnet.%d.attn_k" },
521
+ { LLM_TENSOR_POS_NET_ATTN_V, "posnet.%d.attn_v" },
522
+ { LLM_TENSOR_POS_NET_ATTN_OUT, "posnet.%d.attn_output" },
523
+ { LLM_TENSOR_ATTN_SINKS, "blk.%d.attn_sinks" },
524
+ { LLM_TENSOR_SHORTCONV_CONV, "blk.%d.shortconv.conv" },
525
+ { LLM_TENSOR_SHORTCONV_INPROJ, "blk.%d.shortconv.in_proj" },
526
+ { LLM_TENSOR_SHORTCONV_OUTPROJ, "blk.%d.shortconv.out_proj" },
527
+ { LLM_TENSOR_FFN_GATE_CHEXPS, "blk.%d.ffn_gate_chexps" },
528
+ { LLM_TENSOR_FFN_DOWN_CHEXPS, "blk.%d.ffn_down_chexps" },
529
+ { LLM_TENSOR_FFN_UP_CHEXPS, "blk.%d.ffn_up_chexps" },
530
+ { LLM_TENSOR_VISEXP_ATTN_QKV, "blk.%d.vis_attn_qkv" },
531
+ { LLM_TENSOR_VISEXP_ATTN_OUT, "blk.%d.vis_attn_output" },
532
+ { LLM_TENSOR_VISEXP_FFN_GATE, "blk.%d.vis_gate" },
533
+ { LLM_TENSOR_VISEXP_FFN_DOWN, "blk.%d.vis_down" },
534
+ { LLM_TENSOR_VISEXP_FFN_UP, "blk.%d.vis_up" },
535
+ { LLM_TENSOR_INDEXER_K_NORM, "blk.%d.indexer.k_norm" },
536
+ { LLM_TENSOR_INDEXER_PROJ, "blk.%d.indexer.proj" },
537
+ { LLM_TENSOR_INDEXER_ATTN_K, "blk.%d.indexer.attn_k" },
538
+ { LLM_TENSOR_INDEXER_ATTN_Q_B, "blk.%d.indexer.attn_q_b" },
2222
539
  };
2223
540
 
541
+ static std::set<llm_tensor> llm_get_tensor_names(llm_arch arch) {
542
+ switch (arch) {
543
+ case LLM_ARCH_CLIP:
544
+ return {};
545
+ case LLM_ARCH_LLAMA:
546
+ case LLM_ARCH_DECI:
547
+ case LLM_ARCH_MISTRAL3:
548
+ case LLM_ARCH_LLAMA_EMBED:
549
+ return {
550
+ LLM_TENSOR_TOKEN_EMBD,
551
+ LLM_TENSOR_OUTPUT_NORM,
552
+ LLM_TENSOR_OUTPUT,
553
+ LLM_TENSOR_ROPE_FREQS,
554
+ LLM_TENSOR_ATTN_NORM,
555
+ LLM_TENSOR_ATTN_Q,
556
+ LLM_TENSOR_ATTN_K,
557
+ LLM_TENSOR_ATTN_V,
558
+ LLM_TENSOR_ATTN_OUT,
559
+ LLM_TENSOR_ATTN_ROT_EMBD,
560
+ LLM_TENSOR_FFN_GATE_INP,
561
+ LLM_TENSOR_FFN_NORM,
562
+ LLM_TENSOR_FFN_GATE,
563
+ LLM_TENSOR_FFN_DOWN,
564
+ LLM_TENSOR_FFN_UP,
565
+ LLM_TENSOR_FFN_GATE_EXP,
566
+ LLM_TENSOR_FFN_DOWN_EXP,
567
+ LLM_TENSOR_FFN_UP_EXP,
568
+ LLM_TENSOR_FFN_GATE_EXPS,
569
+ LLM_TENSOR_FFN_DOWN_EXPS,
570
+ LLM_TENSOR_FFN_UP_EXPS,
571
+ };
572
+ case LLM_ARCH_ARCEE:
573
+ case LLM_ARCH_STARCODER2:
574
+ case LLM_ARCH_NEMOTRON:
575
+ return {
576
+ LLM_TENSOR_TOKEN_EMBD,
577
+ LLM_TENSOR_OUTPUT_NORM,
578
+ LLM_TENSOR_OUTPUT,
579
+ LLM_TENSOR_ROPE_FREQS,
580
+ LLM_TENSOR_ATTN_NORM,
581
+ LLM_TENSOR_ATTN_Q,
582
+ LLM_TENSOR_ATTN_K,
583
+ LLM_TENSOR_ATTN_V,
584
+ LLM_TENSOR_ATTN_OUT,
585
+ LLM_TENSOR_ATTN_ROT_EMBD,
586
+ LLM_TENSOR_FFN_NORM,
587
+ LLM_TENSOR_FFN_DOWN,
588
+ LLM_TENSOR_FFN_UP,
589
+ };
590
+ case LLM_ARCH_AFMOE:
591
+ return {
592
+ LLM_TENSOR_TOKEN_EMBD,
593
+ LLM_TENSOR_OUTPUT_NORM,
594
+ LLM_TENSOR_OUTPUT,
595
+ LLM_TENSOR_ATTN_NORM,
596
+ LLM_TENSOR_ATTN_POST_NORM,
597
+ LLM_TENSOR_ATTN_Q,
598
+ LLM_TENSOR_ATTN_K,
599
+ LLM_TENSOR_ATTN_V,
600
+ LLM_TENSOR_ATTN_OUT,
601
+ LLM_TENSOR_ATTN_Q_NORM,
602
+ LLM_TENSOR_ATTN_K_NORM,
603
+ LLM_TENSOR_ATTN_GATE,
604
+ LLM_TENSOR_FFN_NORM,
605
+ LLM_TENSOR_FFN_POST_NORM,
606
+ LLM_TENSOR_FFN_GATE_INP,
607
+ LLM_TENSOR_FFN_GATE,
608
+ LLM_TENSOR_FFN_DOWN,
609
+ LLM_TENSOR_FFN_UP,
610
+ LLM_TENSOR_FFN_GATE_EXPS,
611
+ LLM_TENSOR_FFN_DOWN_EXPS,
612
+ LLM_TENSOR_FFN_UP_EXPS,
613
+ LLM_TENSOR_FFN_GATE_SHEXP,
614
+ LLM_TENSOR_FFN_UP_SHEXP,
615
+ LLM_TENSOR_FFN_DOWN_SHEXP,
616
+ LLM_TENSOR_FFN_EXP_PROBS_B,
617
+ };
618
+ case LLM_ARCH_LLAMA4:
619
+ return {
620
+ LLM_TENSOR_TOKEN_EMBD,
621
+ LLM_TENSOR_OUTPUT_NORM,
622
+ LLM_TENSOR_OUTPUT,
623
+ LLM_TENSOR_ROPE_FREQS,
624
+ LLM_TENSOR_ATTN_NORM,
625
+ LLM_TENSOR_ATTN_Q,
626
+ LLM_TENSOR_ATTN_K,
627
+ LLM_TENSOR_ATTN_V,
628
+ LLM_TENSOR_ATTN_OUT,
629
+ LLM_TENSOR_ATTN_ROT_EMBD,
630
+ LLM_TENSOR_FFN_GATE_INP,
631
+ LLM_TENSOR_FFN_NORM,
632
+ LLM_TENSOR_FFN_GATE,
633
+ LLM_TENSOR_FFN_DOWN,
634
+ LLM_TENSOR_FFN_UP,
635
+ LLM_TENSOR_FFN_GATE_EXP,
636
+ LLM_TENSOR_FFN_DOWN_EXP,
637
+ LLM_TENSOR_FFN_UP_EXP,
638
+ LLM_TENSOR_FFN_GATE_EXPS,
639
+ LLM_TENSOR_FFN_DOWN_EXPS,
640
+ LLM_TENSOR_FFN_UP_EXPS,
641
+ LLM_TENSOR_FFN_GATE_SHEXP,
642
+ LLM_TENSOR_FFN_DOWN_SHEXP,
643
+ LLM_TENSOR_FFN_UP_SHEXP,
644
+ };
645
+ case LLM_ARCH_BAICHUAN:
646
+ case LLM_ARCH_ORION:
647
+ case LLM_ARCH_XVERSE:
648
+ case LLM_ARCH_EXAONE:
649
+ return {
650
+ LLM_TENSOR_TOKEN_EMBD,
651
+ LLM_TENSOR_OUTPUT_NORM,
652
+ LLM_TENSOR_OUTPUT,
653
+ LLM_TENSOR_ROPE_FREQS,
654
+ LLM_TENSOR_ATTN_NORM,
655
+ LLM_TENSOR_ATTN_Q,
656
+ LLM_TENSOR_ATTN_K,
657
+ LLM_TENSOR_ATTN_V,
658
+ LLM_TENSOR_ATTN_OUT,
659
+ LLM_TENSOR_ATTN_ROT_EMBD,
660
+ LLM_TENSOR_FFN_NORM,
661
+ LLM_TENSOR_FFN_GATE,
662
+ LLM_TENSOR_FFN_DOWN,
663
+ LLM_TENSOR_FFN_UP,
664
+ };
665
+ case LLM_ARCH_FALCON:
666
+ return {
667
+ LLM_TENSOR_TOKEN_EMBD,
668
+ LLM_TENSOR_OUTPUT_NORM,
669
+ LLM_TENSOR_OUTPUT,
670
+ LLM_TENSOR_ATTN_NORM,
671
+ LLM_TENSOR_ATTN_NORM_2,
672
+ LLM_TENSOR_ATTN_QKV,
673
+ LLM_TENSOR_ATTN_OUT,
674
+ LLM_TENSOR_FFN_DOWN,
675
+ LLM_TENSOR_FFN_UP,
676
+ };
677
+ case LLM_ARCH_GROK:
678
+ return {
679
+ LLM_TENSOR_TOKEN_EMBD,
680
+ LLM_TENSOR_OUTPUT_NORM,
681
+ LLM_TENSOR_OUTPUT,
682
+ LLM_TENSOR_ROPE_FREQS,
683
+ LLM_TENSOR_ATTN_NORM,
684
+ LLM_TENSOR_ATTN_Q,
685
+ LLM_TENSOR_ATTN_K,
686
+ LLM_TENSOR_ATTN_V,
687
+ LLM_TENSOR_ATTN_OUT,
688
+ LLM_TENSOR_ATTN_ROT_EMBD,
689
+ LLM_TENSOR_FFN_GATE_INP,
690
+ LLM_TENSOR_FFN_NORM,
691
+ LLM_TENSOR_FFN_GATE,
692
+ LLM_TENSOR_FFN_DOWN,
693
+ LLM_TENSOR_FFN_UP,
694
+ LLM_TENSOR_FFN_GATE_EXP,
695
+ LLM_TENSOR_FFN_DOWN_EXP,
696
+ LLM_TENSOR_FFN_UP_EXP,
697
+ LLM_TENSOR_FFN_GATE_EXPS,
698
+ LLM_TENSOR_FFN_DOWN_EXPS,
699
+ LLM_TENSOR_FFN_UP_EXPS,
700
+ LLM_TENSOR_FFN_POST_NORM,
701
+ LLM_TENSOR_LAYER_OUT_NORM,
702
+ LLM_TENSOR_ATTN_OUT_NORM,
703
+ };
704
+ case LLM_ARCH_GPT2:
705
+ case LLM_ARCH_STARCODER:
706
+ return {
707
+ LLM_TENSOR_TOKEN_EMBD,
708
+ LLM_TENSOR_POS_EMBD,
709
+ LLM_TENSOR_OUTPUT_NORM,
710
+ LLM_TENSOR_OUTPUT,
711
+ LLM_TENSOR_ATTN_NORM,
712
+ LLM_TENSOR_ATTN_QKV,
713
+ LLM_TENSOR_ATTN_OUT,
714
+ LLM_TENSOR_FFN_NORM,
715
+ LLM_TENSOR_FFN_UP,
716
+ LLM_TENSOR_FFN_DOWN,
717
+ };
718
+ case LLM_ARCH_GPTNEOX:
719
+ return {
720
+ LLM_TENSOR_TOKEN_EMBD,
721
+ LLM_TENSOR_OUTPUT_NORM,
722
+ LLM_TENSOR_OUTPUT,
723
+ LLM_TENSOR_ATTN_NORM,
724
+ LLM_TENSOR_ATTN_QKV,
725
+ LLM_TENSOR_ATTN_OUT,
726
+ LLM_TENSOR_FFN_NORM,
727
+ LLM_TENSOR_FFN_DOWN,
728
+ LLM_TENSOR_FFN_UP,
729
+ };
730
+ case LLM_ARCH_MPT:
731
+ return {
732
+ LLM_TENSOR_TOKEN_EMBD,
733
+ LLM_TENSOR_OUTPUT_NORM,
734
+ LLM_TENSOR_OUTPUT,
735
+ LLM_TENSOR_ATTN_NORM,
736
+ LLM_TENSOR_FFN_NORM,
737
+ LLM_TENSOR_ATTN_QKV,
738
+ LLM_TENSOR_ATTN_OUT,
739
+ LLM_TENSOR_FFN_DOWN,
740
+ LLM_TENSOR_FFN_UP,
741
+ LLM_TENSOR_FFN_ACT,
742
+ LLM_TENSOR_POS_EMBD,
743
+ LLM_TENSOR_ATTN_Q_NORM,
744
+ LLM_TENSOR_ATTN_K_NORM,
745
+ };
746
+ case LLM_ARCH_REFACT:
747
+ case LLM_ARCH_QWEN2:
748
+ case LLM_ARCH_QWEN2VL:
749
+ case LLM_ARCH_INTERNLM2:
750
+ case LLM_ARCH_GRANITE:
751
+ case LLM_ARCH_ERNIE4_5:
752
+ case LLM_ARCH_PADDLEOCR:
753
+ case LLM_ARCH_SMOLLM3:
754
+ case LLM_ARCH_DREAM:
755
+ case LLM_ARCH_LLADA:
756
+ case LLM_ARCH_PANGU_EMBED:
757
+ return {
758
+ LLM_TENSOR_TOKEN_EMBD,
759
+ LLM_TENSOR_OUTPUT_NORM,
760
+ LLM_TENSOR_OUTPUT,
761
+ LLM_TENSOR_ATTN_NORM,
762
+ LLM_TENSOR_ATTN_Q,
763
+ LLM_TENSOR_ATTN_K,
764
+ LLM_TENSOR_ATTN_V,
765
+ LLM_TENSOR_ATTN_OUT,
766
+ LLM_TENSOR_FFN_NORM,
767
+ LLM_TENSOR_FFN_GATE,
768
+ LLM_TENSOR_FFN_DOWN,
769
+ LLM_TENSOR_FFN_UP,
770
+ };
771
+ case LLM_ARCH_BERT:
772
+ return {
773
+ LLM_TENSOR_TOKEN_EMBD,
774
+ LLM_TENSOR_TOKEN_EMBD_NORM,
775
+ LLM_TENSOR_TOKEN_TYPES,
776
+ LLM_TENSOR_POS_EMBD,
777
+ LLM_TENSOR_ATTN_OUT_NORM,
778
+ LLM_TENSOR_ATTN_QKV,
779
+ LLM_TENSOR_ATTN_Q,
780
+ LLM_TENSOR_ATTN_K,
781
+ LLM_TENSOR_ATTN_V,
782
+ LLM_TENSOR_ATTN_OUT,
783
+ LLM_TENSOR_LAYER_OUT_NORM,
784
+ LLM_TENSOR_FFN_DOWN,
785
+ LLM_TENSOR_FFN_UP,
786
+ LLM_TENSOR_CLS,
787
+ LLM_TENSOR_CLS_OUT,
788
+ };
789
+ case LLM_ARCH_NOMIC_BERT:
790
+ return {
791
+ LLM_TENSOR_TOKEN_EMBD,
792
+ LLM_TENSOR_TOKEN_EMBD_NORM,
793
+ LLM_TENSOR_TOKEN_TYPES,
794
+ LLM_TENSOR_ATTN_OUT_NORM,
795
+ LLM_TENSOR_ATTN_QKV,
796
+ LLM_TENSOR_ATTN_OUT,
797
+ LLM_TENSOR_LAYER_OUT_NORM,
798
+ LLM_TENSOR_FFN_GATE,
799
+ LLM_TENSOR_FFN_DOWN,
800
+ LLM_TENSOR_FFN_UP,
801
+ };
802
+ case LLM_ARCH_NOMIC_BERT_MOE:
803
+ return {
804
+ LLM_TENSOR_TOKEN_EMBD,
805
+ LLM_TENSOR_TOKEN_EMBD_NORM,
806
+ LLM_TENSOR_TOKEN_TYPES,
807
+ LLM_TENSOR_ATTN_OUT_NORM,
808
+ LLM_TENSOR_ATTN_QKV,
809
+ LLM_TENSOR_ATTN_OUT,
810
+ LLM_TENSOR_LAYER_OUT_NORM,
811
+ LLM_TENSOR_FFN_GATE,
812
+ LLM_TENSOR_FFN_DOWN,
813
+ LLM_TENSOR_FFN_UP,
814
+ LLM_TENSOR_FFN_GATE_INP,
815
+ LLM_TENSOR_FFN_DOWN_EXPS,
816
+ LLM_TENSOR_FFN_UP_EXPS,
817
+ };
818
+ case LLM_ARCH_NEO_BERT:
819
+ return {
820
+ LLM_TENSOR_TOKEN_EMBD,
821
+ LLM_TENSOR_ATTN_NORM,
822
+ LLM_TENSOR_ATTN_QKV,
823
+ LLM_TENSOR_ATTN_OUT,
824
+ LLM_TENSOR_FFN_NORM,
825
+ LLM_TENSOR_FFN_DOWN,
826
+ LLM_TENSOR_FFN_UP,
827
+ LLM_TENSOR_ENC_OUTPUT_NORM,
828
+ LLM_TENSOR_CLS,
829
+ LLM_TENSOR_CLS_OUT,
830
+ };
831
+ case LLM_ARCH_EUROBERT:
832
+ return {
833
+ LLM_TENSOR_TOKEN_EMBD,
834
+ LLM_TENSOR_OUTPUT_NORM,
835
+ LLM_TENSOR_ATTN_NORM,
836
+ LLM_TENSOR_ATTN_Q,
837
+ LLM_TENSOR_ATTN_K,
838
+ LLM_TENSOR_ATTN_V,
839
+ LLM_TENSOR_ATTN_OUT,
840
+ LLM_TENSOR_FFN_NORM,
841
+ LLM_TENSOR_FFN_GATE,
842
+ LLM_TENSOR_FFN_UP,
843
+ LLM_TENSOR_FFN_DOWN,
844
+ };
845
+ case LLM_ARCH_MODERN_BERT:
846
+ return {
847
+ LLM_TENSOR_TOKEN_EMBD,
848
+ LLM_TENSOR_TOKEN_EMBD_NORM,
849
+ LLM_TENSOR_OUTPUT_NORM,
850
+ LLM_TENSOR_ATTN_NORM,
851
+ LLM_TENSOR_ATTN_OUT,
852
+ LLM_TENSOR_ATTN_QKV,
853
+ LLM_TENSOR_FFN_DOWN,
854
+ LLM_TENSOR_FFN_UP,
855
+ LLM_TENSOR_FFN_NORM,
856
+ LLM_TENSOR_CLS,
857
+ LLM_TENSOR_CLS_OUT,
858
+ LLM_TENSOR_CLS_NORM,
859
+ };
860
+ case LLM_ARCH_JINA_BERT_V2:
861
+ return {
862
+ LLM_TENSOR_TOKEN_EMBD,
863
+ LLM_TENSOR_TOKEN_EMBD_NORM,
864
+ LLM_TENSOR_TOKEN_TYPES,
865
+ LLM_TENSOR_ATTN_NORM_2,
866
+ LLM_TENSOR_ATTN_OUT_NORM,
867
+ LLM_TENSOR_ATTN_Q,
868
+ LLM_TENSOR_ATTN_Q_NORM,
869
+ LLM_TENSOR_ATTN_K,
870
+ LLM_TENSOR_ATTN_K_NORM,
871
+ LLM_TENSOR_ATTN_V,
872
+ LLM_TENSOR_ATTN_OUT,
873
+ LLM_TENSOR_LAYER_OUT_NORM,
874
+ LLM_TENSOR_FFN_DOWN,
875
+ LLM_TENSOR_FFN_GATE,
876
+ LLM_TENSOR_FFN_UP,
877
+ LLM_TENSOR_CLS,
878
+ };
879
+ case LLM_ARCH_JINA_BERT_V3:
880
+ return {
881
+ LLM_TENSOR_TOKEN_EMBD,
882
+ LLM_TENSOR_TOKEN_EMBD_NORM,
883
+ LLM_TENSOR_TOKEN_TYPES,
884
+ LLM_TENSOR_ATTN_OUT_NORM,
885
+ LLM_TENSOR_ATTN_QKV,
886
+ LLM_TENSOR_ATTN_OUT,
887
+ LLM_TENSOR_FFN_DOWN,
888
+ LLM_TENSOR_FFN_UP,
889
+ LLM_TENSOR_LAYER_OUT_NORM,
890
+ };
891
+ case LLM_ARCH_BLOOM:
892
+ return {
893
+ LLM_TENSOR_TOKEN_EMBD,
894
+ LLM_TENSOR_TOKEN_EMBD_NORM,
895
+ LLM_TENSOR_OUTPUT_NORM,
896
+ LLM_TENSOR_OUTPUT,
897
+ LLM_TENSOR_ATTN_NORM,
898
+ LLM_TENSOR_ATTN_QKV,
899
+ LLM_TENSOR_ATTN_OUT,
900
+ LLM_TENSOR_FFN_NORM,
901
+ LLM_TENSOR_FFN_UP,
902
+ LLM_TENSOR_FFN_DOWN,
903
+ };
904
+ case LLM_ARCH_STABLELM:
905
+ return {
906
+ LLM_TENSOR_TOKEN_EMBD,
907
+ LLM_TENSOR_OUTPUT_NORM,
908
+ LLM_TENSOR_OUTPUT,
909
+ LLM_TENSOR_ROPE_FREQS,
910
+ LLM_TENSOR_ATTN_NORM,
911
+ LLM_TENSOR_ATTN_Q,
912
+ LLM_TENSOR_ATTN_K,
913
+ LLM_TENSOR_ATTN_V,
914
+ LLM_TENSOR_ATTN_OUT,
915
+ LLM_TENSOR_FFN_NORM,
916
+ LLM_TENSOR_FFN_GATE,
917
+ LLM_TENSOR_FFN_DOWN,
918
+ LLM_TENSOR_FFN_UP,
919
+ LLM_TENSOR_ATTN_Q_NORM,
920
+ LLM_TENSOR_ATTN_K_NORM,
921
+ };
922
+ case LLM_ARCH_QWEN:
923
+ return {
924
+ LLM_TENSOR_TOKEN_EMBD,
925
+ LLM_TENSOR_OUTPUT_NORM,
926
+ LLM_TENSOR_OUTPUT,
927
+ LLM_TENSOR_ROPE_FREQS,
928
+ LLM_TENSOR_ATTN_NORM,
929
+ LLM_TENSOR_ATTN_QKV,
930
+ LLM_TENSOR_ATTN_OUT,
931
+ LLM_TENSOR_FFN_NORM,
932
+ LLM_TENSOR_FFN_GATE,
933
+ LLM_TENSOR_FFN_DOWN,
934
+ LLM_TENSOR_FFN_UP,
935
+ };
936
+ case LLM_ARCH_QWEN2MOE:
937
+ return {
938
+ LLM_TENSOR_TOKEN_EMBD,
939
+ LLM_TENSOR_OUTPUT_NORM,
940
+ LLM_TENSOR_OUTPUT,
941
+ LLM_TENSOR_ATTN_NORM,
942
+ LLM_TENSOR_ATTN_Q,
943
+ LLM_TENSOR_ATTN_K,
944
+ LLM_TENSOR_ATTN_V,
945
+ LLM_TENSOR_ATTN_OUT,
946
+ LLM_TENSOR_FFN_NORM,
947
+ LLM_TENSOR_FFN_GATE_INP,
948
+ LLM_TENSOR_FFN_GATE_EXPS,
949
+ LLM_TENSOR_FFN_DOWN_EXPS,
950
+ LLM_TENSOR_FFN_UP_EXPS,
951
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
952
+ LLM_TENSOR_FFN_GATE_SHEXP,
953
+ LLM_TENSOR_FFN_DOWN_SHEXP,
954
+ LLM_TENSOR_FFN_UP_SHEXP,
955
+ };
956
+ case LLM_ARCH_QWEN3:
957
+ return {
958
+ LLM_TENSOR_TOKEN_EMBD,
959
+ LLM_TENSOR_OUTPUT_NORM,
960
+ LLM_TENSOR_OUTPUT,
961
+ LLM_TENSOR_CLS_OUT,
962
+ LLM_TENSOR_ATTN_NORM,
963
+ LLM_TENSOR_ATTN_Q,
964
+ LLM_TENSOR_ATTN_Q_NORM,
965
+ LLM_TENSOR_ATTN_K,
966
+ LLM_TENSOR_ATTN_K_NORM,
967
+ LLM_TENSOR_ATTN_V,
968
+ LLM_TENSOR_ATTN_OUT,
969
+ LLM_TENSOR_FFN_NORM,
970
+ LLM_TENSOR_FFN_GATE,
971
+ LLM_TENSOR_FFN_DOWN,
972
+ LLM_TENSOR_FFN_UP,
973
+ };
974
+ case LLM_ARCH_QWEN3MOE:
975
+ case LLM_ARCH_QWEN3VLMOE:
976
+ case LLM_ARCH_OLMOE:
977
+ case LLM_ARCH_LLADA_MOE:
978
+ case LLM_ARCH_RND1:
979
+ return {
980
+ LLM_TENSOR_TOKEN_EMBD,
981
+ LLM_TENSOR_OUTPUT_NORM,
982
+ LLM_TENSOR_OUTPUT,
983
+ LLM_TENSOR_ATTN_NORM,
984
+ LLM_TENSOR_ATTN_Q,
985
+ LLM_TENSOR_ATTN_Q_NORM,
986
+ LLM_TENSOR_ATTN_K,
987
+ LLM_TENSOR_ATTN_K_NORM,
988
+ LLM_TENSOR_ATTN_V,
989
+ LLM_TENSOR_ATTN_OUT,
990
+ LLM_TENSOR_FFN_NORM,
991
+ LLM_TENSOR_FFN_GATE_INP,
992
+ LLM_TENSOR_FFN_GATE_EXPS,
993
+ LLM_TENSOR_FFN_DOWN_EXPS,
994
+ LLM_TENSOR_FFN_UP_EXPS,
995
+ };
996
+ case LLM_ARCH_QWEN3NEXT:
997
+ return {
998
+ LLM_TENSOR_TOKEN_EMBD,
999
+ LLM_TENSOR_OUTPUT_NORM,
1000
+ LLM_TENSOR_OUTPUT,
1001
+ LLM_TENSOR_ATTN_NORM,
1002
+ LLM_TENSOR_ATTN_POST_NORM,
1003
+ LLM_TENSOR_ATTN_Q,
1004
+ LLM_TENSOR_ATTN_Q_NORM,
1005
+ LLM_TENSOR_ATTN_K,
1006
+ LLM_TENSOR_ATTN_K_NORM,
1007
+ LLM_TENSOR_ATTN_V,
1008
+ LLM_TENSOR_ATTN_OUT,
1009
+ LLM_TENSOR_ATTN_QKV,
1010
+ LLM_TENSOR_ATTN_GATE,
1011
+ LLM_TENSOR_FFN_GATE_INP,
1012
+ LLM_TENSOR_FFN_GATE_EXPS,
1013
+ LLM_TENSOR_FFN_DOWN_EXPS,
1014
+ LLM_TENSOR_FFN_UP_EXPS,
1015
+ LLM_TENSOR_FFN_GATE_UP_EXPS,
1016
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1017
+ LLM_TENSOR_FFN_GATE_SHEXP,
1018
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1019
+ LLM_TENSOR_FFN_UP_SHEXP,
1020
+ LLM_TENSOR_SSM_A_NOSCAN,
1021
+ LLM_TENSOR_SSM_CONV1D,
1022
+ LLM_TENSOR_SSM_DT,
1023
+ LLM_TENSOR_SSM_BETA_ALPHA,
1024
+ LLM_TENSOR_SSM_IN,
1025
+ LLM_TENSOR_SSM_NORM,
1026
+ LLM_TENSOR_SSM_OUT,
1027
+ };
1028
+ case LLM_ARCH_QWEN35:
1029
+ return {
1030
+ LLM_TENSOR_TOKEN_EMBD,
1031
+ LLM_TENSOR_OUTPUT_NORM,
1032
+ LLM_TENSOR_OUTPUT,
1033
+ LLM_TENSOR_ATTN_NORM,
1034
+ LLM_TENSOR_ATTN_POST_NORM,
1035
+ LLM_TENSOR_ATTN_Q,
1036
+ LLM_TENSOR_ATTN_Q_NORM,
1037
+ LLM_TENSOR_ATTN_K,
1038
+ LLM_TENSOR_ATTN_K_NORM,
1039
+ LLM_TENSOR_ATTN_V,
1040
+ LLM_TENSOR_ATTN_OUT,
1041
+ LLM_TENSOR_ATTN_QKV,
1042
+ LLM_TENSOR_ATTN_GATE,
1043
+ LLM_TENSOR_FFN_GATE,
1044
+ LLM_TENSOR_FFN_DOWN,
1045
+ LLM_TENSOR_FFN_UP,
1046
+ LLM_TENSOR_SSM_A_NOSCAN,
1047
+ LLM_TENSOR_SSM_CONV1D,
1048
+ LLM_TENSOR_SSM_DT,
1049
+ LLM_TENSOR_SSM_BETA,
1050
+ LLM_TENSOR_SSM_ALPHA,
1051
+ LLM_TENSOR_SSM_NORM,
1052
+ LLM_TENSOR_SSM_OUT,
1053
+ };
1054
+ case LLM_ARCH_QWEN35MOE:
1055
+ return {
1056
+ LLM_TENSOR_TOKEN_EMBD,
1057
+ LLM_TENSOR_OUTPUT_NORM,
1058
+ LLM_TENSOR_OUTPUT,
1059
+ LLM_TENSOR_ATTN_NORM,
1060
+ LLM_TENSOR_ATTN_POST_NORM,
1061
+ LLM_TENSOR_ATTN_Q,
1062
+ LLM_TENSOR_ATTN_Q_NORM,
1063
+ LLM_TENSOR_ATTN_K,
1064
+ LLM_TENSOR_ATTN_K_NORM,
1065
+ LLM_TENSOR_ATTN_V,
1066
+ LLM_TENSOR_ATTN_OUT,
1067
+ LLM_TENSOR_ATTN_QKV,
1068
+ LLM_TENSOR_ATTN_GATE,
1069
+ LLM_TENSOR_FFN_GATE_INP,
1070
+ LLM_TENSOR_FFN_GATE_EXPS,
1071
+ LLM_TENSOR_FFN_DOWN_EXPS,
1072
+ LLM_TENSOR_FFN_UP_EXPS,
1073
+ LLM_TENSOR_FFN_GATE_UP_EXPS,
1074
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1075
+ LLM_TENSOR_FFN_GATE_SHEXP,
1076
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1077
+ LLM_TENSOR_FFN_UP_SHEXP,
1078
+ LLM_TENSOR_SSM_A_NOSCAN,
1079
+ LLM_TENSOR_SSM_CONV1D,
1080
+ LLM_TENSOR_SSM_DT,
1081
+ LLM_TENSOR_SSM_BETA,
1082
+ LLM_TENSOR_SSM_ALPHA,
1083
+ LLM_TENSOR_SSM_NORM,
1084
+ LLM_TENSOR_SSM_OUT,
1085
+ };
1086
+ case LLM_ARCH_QWEN3VL:
1087
+ case LLM_ARCH_CHAMELEON:
1088
+ case LLM_ARCH_HUNYUAN_DENSE:
1089
+ return {
1090
+ LLM_TENSOR_TOKEN_EMBD,
1091
+ LLM_TENSOR_OUTPUT_NORM,
1092
+ LLM_TENSOR_OUTPUT,
1093
+ LLM_TENSOR_CLS_OUT,
1094
+ LLM_TENSOR_ATTN_NORM,
1095
+ LLM_TENSOR_ATTN_Q,
1096
+ LLM_TENSOR_ATTN_Q_NORM,
1097
+ LLM_TENSOR_ATTN_K,
1098
+ LLM_TENSOR_ATTN_K_NORM,
1099
+ LLM_TENSOR_ATTN_V,
1100
+ LLM_TENSOR_ATTN_OUT,
1101
+ LLM_TENSOR_FFN_NORM,
1102
+ LLM_TENSOR_FFN_GATE,
1103
+ LLM_TENSOR_FFN_DOWN,
1104
+ LLM_TENSOR_FFN_UP,
1105
+ };
1106
+ case LLM_ARCH_PHI2:
1107
+ return {
1108
+ LLM_TENSOR_TOKEN_EMBD,
1109
+ LLM_TENSOR_OUTPUT_NORM,
1110
+ LLM_TENSOR_OUTPUT,
1111
+ LLM_TENSOR_ATTN_NORM,
1112
+ LLM_TENSOR_ATTN_QKV,
1113
+ LLM_TENSOR_ATTN_Q,
1114
+ LLM_TENSOR_ATTN_K,
1115
+ LLM_TENSOR_ATTN_V,
1116
+ LLM_TENSOR_ATTN_OUT,
1117
+ LLM_TENSOR_FFN_DOWN,
1118
+ LLM_TENSOR_FFN_UP,
1119
+ };
1120
+ case LLM_ARCH_PHI3:
1121
+ return {
1122
+ LLM_TENSOR_TOKEN_EMBD,
1123
+ LLM_TENSOR_OUTPUT_NORM,
1124
+ LLM_TENSOR_OUTPUT,
1125
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1126
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1127
+ LLM_TENSOR_ATTN_NORM,
1128
+ LLM_TENSOR_ATTN_QKV,
1129
+ LLM_TENSOR_ATTN_Q,
1130
+ LLM_TENSOR_ATTN_K,
1131
+ LLM_TENSOR_ATTN_V,
1132
+ LLM_TENSOR_ATTN_OUT,
1133
+ LLM_TENSOR_FFN_NORM,
1134
+ LLM_TENSOR_FFN_DOWN,
1135
+ LLM_TENSOR_FFN_UP,
1136
+ };
1137
+ case LLM_ARCH_PHIMOE:
1138
+ return {
1139
+ LLM_TENSOR_TOKEN_EMBD,
1140
+ LLM_TENSOR_OUTPUT_NORM,
1141
+ LLM_TENSOR_OUTPUT,
1142
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1143
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1144
+ LLM_TENSOR_ATTN_NORM,
1145
+ LLM_TENSOR_ATTN_QKV,
1146
+ LLM_TENSOR_ATTN_Q,
1147
+ LLM_TENSOR_ATTN_K,
1148
+ LLM_TENSOR_ATTN_V,
1149
+ LLM_TENSOR_ATTN_OUT,
1150
+ LLM_TENSOR_FFN_NORM,
1151
+ LLM_TENSOR_FFN_GATE_INP,
1152
+ LLM_TENSOR_FFN_GATE_EXPS,
1153
+ LLM_TENSOR_FFN_DOWN_EXPS,
1154
+ LLM_TENSOR_FFN_UP_EXPS,
1155
+ };
1156
+ case LLM_ARCH_PLAMO:
1157
+ return {
1158
+ LLM_TENSOR_TOKEN_EMBD,
1159
+ LLM_TENSOR_OUTPUT_NORM,
1160
+ LLM_TENSOR_OUTPUT,
1161
+ LLM_TENSOR_ROPE_FREQS,
1162
+ LLM_TENSOR_ATTN_NORM,
1163
+ LLM_TENSOR_ATTN_Q,
1164
+ LLM_TENSOR_ATTN_K,
1165
+ LLM_TENSOR_ATTN_V,
1166
+ LLM_TENSOR_ATTN_OUT,
1167
+ LLM_TENSOR_ATTN_ROT_EMBD,
1168
+ LLM_TENSOR_FFN_GATE,
1169
+ LLM_TENSOR_FFN_DOWN,
1170
+ LLM_TENSOR_FFN_UP,
1171
+ };
1172
+ case LLM_ARCH_PLAMO2:
1173
+ return {
1174
+ LLM_TENSOR_TOKEN_EMBD,
1175
+ LLM_TENSOR_OUTPUT_NORM,
1176
+ LLM_TENSOR_OUTPUT,
1177
+ LLM_TENSOR_ROPE_FREQS,
1178
+ LLM_TENSOR_ATTN_NORM,
1179
+ LLM_TENSOR_ATTN_QKV,
1180
+ LLM_TENSOR_ATTN_Q_NORM,
1181
+ LLM_TENSOR_ATTN_K_NORM,
1182
+ LLM_TENSOR_ATTN_OUT,
1183
+ LLM_TENSOR_ATTN_ROT_EMBD,
1184
+ LLM_TENSOR_FFN_NORM,
1185
+ LLM_TENSOR_FFN_DOWN,
1186
+ LLM_TENSOR_FFN_UP,
1187
+ LLM_TENSOR_SSM_IN,
1188
+ LLM_TENSOR_SSM_CONV1D,
1189
+ LLM_TENSOR_SSM_X,
1190
+ LLM_TENSOR_SSM_DT,
1191
+ LLM_TENSOR_SSM_A,
1192
+ LLM_TENSOR_SSM_D,
1193
+ LLM_TENSOR_SSM_OUT,
1194
+ LLM_TENSOR_SSM_DT_NORM,
1195
+ LLM_TENSOR_SSM_B_NORM,
1196
+ LLM_TENSOR_SSM_C_NORM,
1197
+ LLM_TENSOR_ATTN_POST_NORM,
1198
+ LLM_TENSOR_FFN_POST_NORM,
1199
+ };
1200
+ case LLM_ARCH_PLAMO3:
1201
+ return {
1202
+ LLM_TENSOR_TOKEN_EMBD,
1203
+ LLM_TENSOR_OUTPUT_NORM,
1204
+ LLM_TENSOR_OUTPUT,
1205
+ LLM_TENSOR_ATTN_NORM,
1206
+ LLM_TENSOR_ATTN_QKV,
1207
+ LLM_TENSOR_ATTN_Q_NORM,
1208
+ LLM_TENSOR_ATTN_K_NORM,
1209
+ LLM_TENSOR_ATTN_OUT,
1210
+ LLM_TENSOR_ATTN_POST_NORM,
1211
+ LLM_TENSOR_FFN_NORM,
1212
+ LLM_TENSOR_FFN_POST_NORM,
1213
+ LLM_TENSOR_FFN_DOWN,
1214
+ LLM_TENSOR_FFN_UP,
1215
+ };
1216
+ case LLM_ARCH_CODESHELL:
1217
+ return {
1218
+ LLM_TENSOR_TOKEN_EMBD,
1219
+ LLM_TENSOR_OUTPUT_NORM,
1220
+ LLM_TENSOR_OUTPUT,
1221
+ LLM_TENSOR_ROPE_FREQS,
1222
+ LLM_TENSOR_ATTN_NORM,
1223
+ LLM_TENSOR_ATTN_Q,
1224
+ LLM_TENSOR_ATTN_K,
1225
+ LLM_TENSOR_ATTN_V,
1226
+ LLM_TENSOR_ATTN_QKV,
1227
+ LLM_TENSOR_ATTN_OUT,
1228
+ LLM_TENSOR_ATTN_ROT_EMBD,
1229
+ LLM_TENSOR_FFN_NORM,
1230
+ LLM_TENSOR_FFN_GATE,
1231
+ LLM_TENSOR_FFN_DOWN,
1232
+ LLM_TENSOR_FFN_UP,
1233
+ };
1234
+ case LLM_ARCH_MINICPM:
1235
+ return {
1236
+ LLM_TENSOR_TOKEN_EMBD,
1237
+ LLM_TENSOR_OUTPUT_NORM,
1238
+ LLM_TENSOR_OUTPUT,
1239
+ LLM_TENSOR_ROPE_FREQS,
1240
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1241
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1242
+ LLM_TENSOR_ATTN_NORM,
1243
+ LLM_TENSOR_ATTN_Q,
1244
+ LLM_TENSOR_ATTN_K,
1245
+ LLM_TENSOR_ATTN_V,
1246
+ LLM_TENSOR_ATTN_OUT,
1247
+ LLM_TENSOR_ATTN_ROT_EMBD,
1248
+ LLM_TENSOR_FFN_GATE_INP,
1249
+ LLM_TENSOR_FFN_NORM,
1250
+ LLM_TENSOR_FFN_GATE,
1251
+ LLM_TENSOR_FFN_DOWN,
1252
+ LLM_TENSOR_FFN_UP,
1253
+ LLM_TENSOR_FFN_GATE_EXP,
1254
+ LLM_TENSOR_FFN_DOWN_EXP,
1255
+ LLM_TENSOR_FFN_UP_EXP,
1256
+ };
1257
+ case LLM_ARCH_MINICPM3:
1258
+ return {
1259
+ LLM_TENSOR_TOKEN_EMBD,
1260
+ LLM_TENSOR_OUTPUT_NORM,
1261
+ LLM_TENSOR_OUTPUT,
1262
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1263
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1264
+ LLM_TENSOR_ATTN_NORM,
1265
+ LLM_TENSOR_ATTN_Q_A_NORM,
1266
+ LLM_TENSOR_ATTN_KV_A_NORM,
1267
+ LLM_TENSOR_ATTN_Q,
1268
+ LLM_TENSOR_ATTN_Q_A,
1269
+ LLM_TENSOR_ATTN_Q_B,
1270
+ LLM_TENSOR_ATTN_KV_A_MQA,
1271
+ LLM_TENSOR_ATTN_KV_B,
1272
+ LLM_TENSOR_ATTN_OUT,
1273
+ LLM_TENSOR_FFN_NORM,
1274
+ LLM_TENSOR_FFN_GATE,
1275
+ LLM_TENSOR_FFN_UP,
1276
+ LLM_TENSOR_FFN_DOWN,
1277
+ };
1278
+ case LLM_ARCH_GEMMA:
1279
+ return {
1280
+ LLM_TENSOR_TOKEN_EMBD,
1281
+ LLM_TENSOR_OUTPUT_NORM,
1282
+ LLM_TENSOR_ATTN_NORM,
1283
+ LLM_TENSOR_ATTN_Q,
1284
+ LLM_TENSOR_ATTN_K,
1285
+ LLM_TENSOR_ATTN_V,
1286
+ LLM_TENSOR_ATTN_OUT,
1287
+ LLM_TENSOR_FFN_NORM,
1288
+ LLM_TENSOR_FFN_GATE,
1289
+ LLM_TENSOR_FFN_DOWN,
1290
+ LLM_TENSOR_FFN_UP,
1291
+ };
1292
+ case LLM_ARCH_GEMMA2:
1293
+ return {
1294
+ LLM_TENSOR_TOKEN_EMBD,
1295
+ LLM_TENSOR_OUTPUT_NORM,
1296
+ LLM_TENSOR_ATTN_NORM,
1297
+ LLM_TENSOR_ATTN_Q,
1298
+ LLM_TENSOR_ATTN_K,
1299
+ LLM_TENSOR_ATTN_V,
1300
+ LLM_TENSOR_ATTN_OUT,
1301
+ LLM_TENSOR_ATTN_POST_NORM,
1302
+ LLM_TENSOR_FFN_NORM,
1303
+ LLM_TENSOR_FFN_GATE,
1304
+ LLM_TENSOR_FFN_DOWN,
1305
+ LLM_TENSOR_FFN_UP,
1306
+ LLM_TENSOR_FFN_POST_NORM,
1307
+ };
1308
+ case LLM_ARCH_GEMMA3:
1309
+ return {
1310
+ LLM_TENSOR_TOKEN_EMBD,
1311
+ LLM_TENSOR_OUTPUT_NORM,
1312
+ LLM_TENSOR_OUTPUT,
1313
+ LLM_TENSOR_ATTN_NORM,
1314
+ LLM_TENSOR_ATTN_Q,
1315
+ LLM_TENSOR_ATTN_Q_NORM,
1316
+ LLM_TENSOR_ATTN_K,
1317
+ LLM_TENSOR_ATTN_K_NORM,
1318
+ LLM_TENSOR_ATTN_V,
1319
+ LLM_TENSOR_ATTN_OUT,
1320
+ LLM_TENSOR_ATTN_POST_NORM,
1321
+ LLM_TENSOR_FFN_NORM,
1322
+ LLM_TENSOR_FFN_GATE,
1323
+ LLM_TENSOR_FFN_DOWN,
1324
+ LLM_TENSOR_FFN_UP,
1325
+ LLM_TENSOR_FFN_POST_NORM,
1326
+ };
1327
+ case LLM_ARCH_GEMMA3N:
1328
+ return {
1329
+ LLM_TENSOR_TOKEN_EMBD,
1330
+ LLM_TENSOR_OUTPUT_NORM,
1331
+ LLM_TENSOR_ATTN_NORM,
1332
+ LLM_TENSOR_ATTN_Q,
1333
+ LLM_TENSOR_ATTN_Q_NORM,
1334
+ LLM_TENSOR_ATTN_K,
1335
+ LLM_TENSOR_ATTN_K_NORM,
1336
+ LLM_TENSOR_ATTN_V,
1337
+ LLM_TENSOR_ATTN_OUT,
1338
+ LLM_TENSOR_ATTN_POST_NORM,
1339
+ LLM_TENSOR_FFN_NORM,
1340
+ LLM_TENSOR_FFN_GATE,
1341
+ LLM_TENSOR_FFN_DOWN,
1342
+ LLM_TENSOR_FFN_UP,
1343
+ LLM_TENSOR_FFN_POST_NORM,
1344
+ LLM_TENSOR_PER_LAYER_TOKEN_EMBD,
1345
+ LLM_TENSOR_PER_LAYER_MODEL_PROJ,
1346
+ LLM_TENSOR_PER_LAYER_PROJ_NORM,
1347
+ LLM_TENSOR_ALTUP_UNEMBD_PROJ,
1348
+ LLM_TENSOR_ALTUP_PROJ,
1349
+ LLM_TENSOR_PER_LAYER_INP_GATE,
1350
+ LLM_TENSOR_PER_LAYER_PROJ,
1351
+ LLM_TENSOR_PER_LAYER_POST_NORM,
1352
+ LLM_TENSOR_ALTUP_CORRECT_COEF,
1353
+ LLM_TENSOR_ALTUP_CORRECT_SCALE,
1354
+ LLM_TENSOR_ALTUP_PREDICT_COEF,
1355
+ LLM_TENSOR_ALTUP_ROUTER,
1356
+ LLM_TENSOR_ALTUP_ROUTER_NORM,
1357
+ LLM_TENSOR_LAUREL_L,
1358
+ LLM_TENSOR_LAUREL_R,
1359
+ LLM_TENSOR_LAUREL_POST_NORM,
1360
+ };
1361
+ case LLM_ARCH_GEMMA_EMBEDDING:
1362
+ return {
1363
+ LLM_TENSOR_TOKEN_EMBD,
1364
+ LLM_TENSOR_OUTPUT_NORM,
1365
+ LLM_TENSOR_OUTPUT,
1366
+ LLM_TENSOR_DENSE_2_OUT,
1367
+ LLM_TENSOR_DENSE_3_OUT,
1368
+ LLM_TENSOR_ATTN_NORM,
1369
+ LLM_TENSOR_ATTN_Q,
1370
+ LLM_TENSOR_ATTN_Q_NORM,
1371
+ LLM_TENSOR_ATTN_K,
1372
+ LLM_TENSOR_ATTN_K_NORM,
1373
+ LLM_TENSOR_ATTN_V,
1374
+ LLM_TENSOR_ATTN_OUT,
1375
+ LLM_TENSOR_ATTN_POST_NORM,
1376
+ LLM_TENSOR_FFN_NORM,
1377
+ LLM_TENSOR_FFN_GATE,
1378
+ LLM_TENSOR_FFN_DOWN,
1379
+ LLM_TENSOR_FFN_UP,
1380
+ LLM_TENSOR_FFN_POST_NORM,
1381
+ };
1382
+ case LLM_ARCH_MAMBA:
1383
+ return {
1384
+ LLM_TENSOR_TOKEN_EMBD,
1385
+ LLM_TENSOR_OUTPUT_NORM,
1386
+ LLM_TENSOR_OUTPUT,
1387
+ LLM_TENSOR_ATTN_NORM,
1388
+ LLM_TENSOR_SSM_IN,
1389
+ LLM_TENSOR_SSM_CONV1D,
1390
+ LLM_TENSOR_SSM_X,
1391
+ LLM_TENSOR_SSM_DT,
1392
+ LLM_TENSOR_SSM_A,
1393
+ LLM_TENSOR_SSM_D,
1394
+ LLM_TENSOR_SSM_OUT,
1395
+ };
1396
+ case LLM_ARCH_MAMBA2:
1397
+ return {
1398
+ LLM_TENSOR_TOKEN_EMBD,
1399
+ LLM_TENSOR_OUTPUT_NORM,
1400
+ LLM_TENSOR_OUTPUT,
1401
+ LLM_TENSOR_ATTN_NORM,
1402
+ LLM_TENSOR_SSM_IN,
1403
+ LLM_TENSOR_SSM_CONV1D,
1404
+ LLM_TENSOR_SSM_DT,
1405
+ LLM_TENSOR_SSM_A,
1406
+ LLM_TENSOR_SSM_D,
1407
+ LLM_TENSOR_SSM_NORM,
1408
+ LLM_TENSOR_SSM_OUT,
1409
+ };
1410
+ case LLM_ARCH_JAMBA:
1411
+ return {
1412
+ LLM_TENSOR_TOKEN_EMBD,
1413
+ LLM_TENSOR_OUTPUT_NORM,
1414
+ LLM_TENSOR_OUTPUT,
1415
+ LLM_TENSOR_ATTN_NORM,
1416
+ LLM_TENSOR_SSM_IN,
1417
+ LLM_TENSOR_SSM_CONV1D,
1418
+ LLM_TENSOR_SSM_X,
1419
+ LLM_TENSOR_SSM_DT,
1420
+ LLM_TENSOR_SSM_DT_NORM,
1421
+ LLM_TENSOR_SSM_A,
1422
+ LLM_TENSOR_SSM_B_NORM,
1423
+ LLM_TENSOR_SSM_C_NORM,
1424
+ LLM_TENSOR_SSM_D,
1425
+ LLM_TENSOR_SSM_OUT,
1426
+ LLM_TENSOR_ATTN_Q,
1427
+ LLM_TENSOR_ATTN_K,
1428
+ LLM_TENSOR_ATTN_V,
1429
+ LLM_TENSOR_ATTN_OUT,
1430
+ LLM_TENSOR_FFN_GATE_INP,
1431
+ LLM_TENSOR_FFN_NORM,
1432
+ LLM_TENSOR_FFN_GATE,
1433
+ LLM_TENSOR_FFN_DOWN,
1434
+ LLM_TENSOR_FFN_UP,
1435
+ LLM_TENSOR_FFN_GATE_EXPS,
1436
+ LLM_TENSOR_FFN_DOWN_EXPS,
1437
+ LLM_TENSOR_FFN_UP_EXPS,
1438
+ };
1439
+ case LLM_ARCH_FALCON_H1:
1440
+ return {
1441
+ LLM_TENSOR_TOKEN_EMBD,
1442
+ LLM_TENSOR_OUTPUT,
1443
+ LLM_TENSOR_OUTPUT_NORM,
1444
+ LLM_TENSOR_ATTN_NORM,
1445
+ LLM_TENSOR_ATTN_Q,
1446
+ LLM_TENSOR_ATTN_K,
1447
+ LLM_TENSOR_ATTN_V,
1448
+ LLM_TENSOR_ATTN_OUT,
1449
+ LLM_TENSOR_SSM_IN,
1450
+ LLM_TENSOR_SSM_CONV1D,
1451
+ LLM_TENSOR_SSM_DT,
1452
+ LLM_TENSOR_SSM_A,
1453
+ LLM_TENSOR_SSM_D,
1454
+ LLM_TENSOR_SSM_NORM,
1455
+ LLM_TENSOR_SSM_OUT,
1456
+ LLM_TENSOR_FFN_NORM,
1457
+ LLM_TENSOR_FFN_GATE,
1458
+ LLM_TENSOR_FFN_DOWN,
1459
+ LLM_TENSOR_FFN_UP,
1460
+ };
1461
+ case LLM_ARCH_COMMAND_R:
1462
+ return {
1463
+ LLM_TENSOR_TOKEN_EMBD,
1464
+ LLM_TENSOR_OUTPUT_NORM,
1465
+ LLM_TENSOR_ATTN_NORM,
1466
+ LLM_TENSOR_ATTN_Q,
1467
+ LLM_TENSOR_ATTN_K,
1468
+ LLM_TENSOR_ATTN_V,
1469
+ LLM_TENSOR_ATTN_OUT,
1470
+ LLM_TENSOR_FFN_GATE,
1471
+ LLM_TENSOR_FFN_DOWN,
1472
+ LLM_TENSOR_FFN_UP,
1473
+ LLM_TENSOR_ATTN_Q_NORM,
1474
+ LLM_TENSOR_ATTN_K_NORM,
1475
+ };
1476
+ case LLM_ARCH_COHERE2:
1477
+ return {
1478
+ LLM_TENSOR_TOKEN_EMBD,
1479
+ LLM_TENSOR_OUTPUT_NORM,
1480
+ LLM_TENSOR_ATTN_NORM,
1481
+ LLM_TENSOR_ATTN_Q,
1482
+ LLM_TENSOR_ATTN_K,
1483
+ LLM_TENSOR_ATTN_V,
1484
+ LLM_TENSOR_ATTN_OUT,
1485
+ LLM_TENSOR_FFN_GATE,
1486
+ LLM_TENSOR_FFN_DOWN,
1487
+ LLM_TENSOR_FFN_UP,
1488
+ };
1489
+ case LLM_ARCH_DBRX:
1490
+ return {
1491
+ LLM_TENSOR_TOKEN_EMBD,
1492
+ LLM_TENSOR_OUTPUT_NORM,
1493
+ LLM_TENSOR_OUTPUT,
1494
+ LLM_TENSOR_ATTN_QKV,
1495
+ LLM_TENSOR_ATTN_NORM,
1496
+ LLM_TENSOR_ATTN_OUT,
1497
+ LLM_TENSOR_ATTN_OUT_NORM,
1498
+ LLM_TENSOR_FFN_GATE_INP,
1499
+ LLM_TENSOR_FFN_GATE_EXPS,
1500
+ LLM_TENSOR_FFN_DOWN_EXPS,
1501
+ LLM_TENSOR_FFN_UP_EXPS,
1502
+ };
1503
+ case LLM_ARCH_OLMO:
1504
+ return {
1505
+ LLM_TENSOR_TOKEN_EMBD,
1506
+ LLM_TENSOR_OUTPUT,
1507
+ LLM_TENSOR_ATTN_Q,
1508
+ LLM_TENSOR_ATTN_K,
1509
+ LLM_TENSOR_ATTN_V,
1510
+ LLM_TENSOR_ATTN_OUT,
1511
+ LLM_TENSOR_FFN_GATE,
1512
+ LLM_TENSOR_FFN_DOWN,
1513
+ LLM_TENSOR_FFN_UP,
1514
+ };
1515
+ case LLM_ARCH_OLMO2:
1516
+ return {
1517
+ LLM_TENSOR_TOKEN_EMBD,
1518
+ LLM_TENSOR_OUTPUT_NORM,
1519
+ LLM_TENSOR_OUTPUT,
1520
+ LLM_TENSOR_ATTN_Q,
1521
+ LLM_TENSOR_ATTN_K,
1522
+ LLM_TENSOR_ATTN_V,
1523
+ LLM_TENSOR_ATTN_OUT,
1524
+ LLM_TENSOR_ATTN_POST_NORM,
1525
+ LLM_TENSOR_ATTN_Q_NORM,
1526
+ LLM_TENSOR_ATTN_K_NORM,
1527
+ LLM_TENSOR_FFN_POST_NORM,
1528
+ LLM_TENSOR_FFN_GATE,
1529
+ LLM_TENSOR_FFN_DOWN,
1530
+ LLM_TENSOR_FFN_UP,
1531
+ };
1532
+ case LLM_ARCH_OPENELM:
1533
+ return {
1534
+ LLM_TENSOR_TOKEN_EMBD,
1535
+ LLM_TENSOR_OUTPUT_NORM,
1536
+ LLM_TENSOR_ATTN_NORM,
1537
+ LLM_TENSOR_ATTN_QKV,
1538
+ LLM_TENSOR_ATTN_Q_NORM,
1539
+ LLM_TENSOR_ATTN_K_NORM,
1540
+ LLM_TENSOR_ATTN_OUT,
1541
+ LLM_TENSOR_FFN_NORM,
1542
+ LLM_TENSOR_FFN_GATE,
1543
+ LLM_TENSOR_FFN_DOWN,
1544
+ LLM_TENSOR_FFN_UP,
1545
+ };
1546
+ case LLM_ARCH_ARCTIC:
1547
+ return {
1548
+ LLM_TENSOR_TOKEN_EMBD,
1549
+ LLM_TENSOR_OUTPUT_NORM,
1550
+ LLM_TENSOR_OUTPUT,
1551
+ LLM_TENSOR_ATTN_NORM,
1552
+ LLM_TENSOR_ATTN_Q,
1553
+ LLM_TENSOR_ATTN_K,
1554
+ LLM_TENSOR_ATTN_V,
1555
+ LLM_TENSOR_ATTN_OUT,
1556
+ LLM_TENSOR_FFN_GATE_INP,
1557
+ LLM_TENSOR_FFN_NORM,
1558
+ LLM_TENSOR_FFN_GATE,
1559
+ LLM_TENSOR_FFN_DOWN,
1560
+ LLM_TENSOR_FFN_UP,
1561
+ LLM_TENSOR_FFN_NORM_EXPS,
1562
+ LLM_TENSOR_FFN_GATE_EXPS,
1563
+ LLM_TENSOR_FFN_DOWN_EXPS,
1564
+ LLM_TENSOR_FFN_UP_EXPS,
1565
+ };
1566
+ case LLM_ARCH_DEEPSEEK:
1567
+ return {
1568
+ LLM_TENSOR_TOKEN_EMBD,
1569
+ LLM_TENSOR_OUTPUT_NORM,
1570
+ LLM_TENSOR_OUTPUT,
1571
+ LLM_TENSOR_ROPE_FREQS,
1572
+ LLM_TENSOR_ATTN_NORM,
1573
+ LLM_TENSOR_ATTN_Q,
1574
+ LLM_TENSOR_ATTN_K,
1575
+ LLM_TENSOR_ATTN_V,
1576
+ LLM_TENSOR_ATTN_OUT,
1577
+ LLM_TENSOR_ATTN_ROT_EMBD,
1578
+ LLM_TENSOR_FFN_GATE_INP,
1579
+ LLM_TENSOR_FFN_NORM,
1580
+ LLM_TENSOR_FFN_GATE,
1581
+ LLM_TENSOR_FFN_DOWN,
1582
+ LLM_TENSOR_FFN_UP,
1583
+ LLM_TENSOR_FFN_GATE_EXPS,
1584
+ LLM_TENSOR_FFN_DOWN_EXPS,
1585
+ LLM_TENSOR_FFN_UP_EXPS,
1586
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1587
+ LLM_TENSOR_FFN_GATE_SHEXP,
1588
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1589
+ LLM_TENSOR_FFN_UP_SHEXP,
1590
+ };
1591
+ case LLM_ARCH_DEEPSEEK2:
1592
+ return {
1593
+ LLM_TENSOR_TOKEN_EMBD,
1594
+ LLM_TENSOR_OUTPUT_NORM,
1595
+ LLM_TENSOR_OUTPUT,
1596
+ LLM_TENSOR_ATTN_NORM,
1597
+ LLM_TENSOR_ATTN_Q_A_NORM,
1598
+ LLM_TENSOR_ATTN_KV_A_NORM,
1599
+ LLM_TENSOR_ATTN_Q,
1600
+ LLM_TENSOR_ATTN_Q_A,
1601
+ LLM_TENSOR_ATTN_Q_B,
1602
+ LLM_TENSOR_ATTN_KV_A_MQA,
1603
+ LLM_TENSOR_ATTN_KV_B,
1604
+ LLM_TENSOR_ATTN_K_B,
1605
+ LLM_TENSOR_ATTN_V_B,
1606
+ LLM_TENSOR_ATTN_OUT,
1607
+ LLM_TENSOR_FFN_NORM,
1608
+ LLM_TENSOR_FFN_GATE,
1609
+ LLM_TENSOR_FFN_UP,
1610
+ LLM_TENSOR_FFN_DOWN,
1611
+ LLM_TENSOR_FFN_GATE_INP,
1612
+ LLM_TENSOR_FFN_GATE_EXPS,
1613
+ LLM_TENSOR_FFN_DOWN_EXPS,
1614
+ LLM_TENSOR_FFN_UP_EXPS,
1615
+ LLM_TENSOR_FFN_GATE_UP_EXPS,
1616
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1617
+ LLM_TENSOR_FFN_GATE_SHEXP,
1618
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1619
+ LLM_TENSOR_FFN_UP_SHEXP,
1620
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1621
+ };
1622
+ case LLM_ARCH_PLM:
1623
+ return {
1624
+ LLM_TENSOR_TOKEN_EMBD,
1625
+ LLM_TENSOR_OUTPUT_NORM,
1626
+ LLM_TENSOR_ATTN_NORM,
1627
+ LLM_TENSOR_ATTN_Q,
1628
+ LLM_TENSOR_ATTN_KV_A_MQA,
1629
+ LLM_TENSOR_ATTN_KV_A_NORM,
1630
+ LLM_TENSOR_ATTN_KV_B,
1631
+ LLM_TENSOR_ATTN_OUT,
1632
+ LLM_TENSOR_FFN_NORM,
1633
+ LLM_TENSOR_FFN_DOWN,
1634
+ LLM_TENSOR_FFN_UP,
1635
+ };
1636
+ case LLM_ARCH_CHATGLM:
1637
+ return {
1638
+ LLM_TENSOR_TOKEN_EMBD,
1639
+ LLM_TENSOR_ROPE_FREQS,
1640
+ LLM_TENSOR_OUTPUT_NORM,
1641
+ LLM_TENSOR_OUTPUT,
1642
+ LLM_TENSOR_ATTN_NORM,
1643
+ LLM_TENSOR_ATTN_QKV,
1644
+ LLM_TENSOR_ATTN_Q,
1645
+ LLM_TENSOR_ATTN_K,
1646
+ LLM_TENSOR_ATTN_V,
1647
+ LLM_TENSOR_ATTN_OUT,
1648
+ LLM_TENSOR_FFN_NORM,
1649
+ LLM_TENSOR_FFN_UP,
1650
+ LLM_TENSOR_FFN_DOWN,
1651
+ };
1652
+ case LLM_ARCH_GLM4:
1653
+ return {
1654
+ LLM_TENSOR_TOKEN_EMBD,
1655
+ LLM_TENSOR_ROPE_FREQS,
1656
+ LLM_TENSOR_OUTPUT_NORM,
1657
+ LLM_TENSOR_OUTPUT,
1658
+ LLM_TENSOR_ATTN_NORM,
1659
+ LLM_TENSOR_ATTN_Q,
1660
+ LLM_TENSOR_ATTN_K,
1661
+ LLM_TENSOR_ATTN_V,
1662
+ LLM_TENSOR_ATTN_OUT,
1663
+ LLM_TENSOR_FFN_NORM,
1664
+ LLM_TENSOR_FFN_UP,
1665
+ LLM_TENSOR_FFN_DOWN,
1666
+ LLM_TENSOR_ATTN_POST_NORM,
1667
+ LLM_TENSOR_FFN_POST_NORM,
1668
+ LLM_TENSOR_NEXTN_EH_PROJ,
1669
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1670
+ LLM_TENSOR_NEXTN_ENORM,
1671
+ LLM_TENSOR_NEXTN_HNORM,
1672
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1673
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1674
+ };
1675
+ case LLM_ARCH_GLM4_MOE:
1676
+ return {
1677
+ LLM_TENSOR_TOKEN_EMBD,
1678
+ LLM_TENSOR_OUTPUT_NORM,
1679
+ LLM_TENSOR_OUTPUT,
1680
+ LLM_TENSOR_ATTN_NORM,
1681
+ LLM_TENSOR_ATTN_POST_NORM,
1682
+ LLM_TENSOR_ATTN_Q,
1683
+ LLM_TENSOR_ATTN_K,
1684
+ LLM_TENSOR_ATTN_V,
1685
+ LLM_TENSOR_ATTN_OUT,
1686
+ LLM_TENSOR_ATTN_Q_NORM,
1687
+ LLM_TENSOR_ATTN_K_NORM,
1688
+ LLM_TENSOR_FFN_GATE,
1689
+ LLM_TENSOR_FFN_DOWN,
1690
+ LLM_TENSOR_FFN_UP,
1691
+ LLM_TENSOR_FFN_GATE_INP,
1692
+ LLM_TENSOR_FFN_GATE_EXPS,
1693
+ LLM_TENSOR_FFN_DOWN_EXPS,
1694
+ LLM_TENSOR_FFN_UP_EXPS,
1695
+ LLM_TENSOR_FFN_GATE_SHEXP,
1696
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1697
+ LLM_TENSOR_FFN_UP_SHEXP,
1698
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1699
+ LLM_TENSOR_NEXTN_EH_PROJ,
1700
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1701
+ LLM_TENSOR_NEXTN_ENORM,
1702
+ LLM_TENSOR_NEXTN_HNORM,
1703
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1704
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1705
+ };
1706
+ case LLM_ARCH_GLM_DSA:
1707
+ return {
1708
+ LLM_TENSOR_TOKEN_EMBD,
1709
+ LLM_TENSOR_OUTPUT_NORM,
1710
+ LLM_TENSOR_OUTPUT,
1711
+ LLM_TENSOR_ATTN_NORM,
1712
+ LLM_TENSOR_ATTN_Q_A_NORM,
1713
+ LLM_TENSOR_ATTN_KV_A_NORM,
1714
+ LLM_TENSOR_ATTN_Q,
1715
+ LLM_TENSOR_ATTN_Q_A,
1716
+ LLM_TENSOR_ATTN_Q_B,
1717
+ LLM_TENSOR_ATTN_KV_A_MQA,
1718
+ LLM_TENSOR_ATTN_KV_B,
1719
+ LLM_TENSOR_ATTN_K_B,
1720
+ LLM_TENSOR_ATTN_V_B,
1721
+ LLM_TENSOR_ATTN_OUT,
1722
+ LLM_TENSOR_FFN_NORM,
1723
+ LLM_TENSOR_FFN_GATE,
1724
+ LLM_TENSOR_FFN_UP,
1725
+ LLM_TENSOR_FFN_DOWN,
1726
+ LLM_TENSOR_FFN_GATE_INP,
1727
+ LLM_TENSOR_FFN_GATE_EXPS,
1728
+ LLM_TENSOR_FFN_DOWN_EXPS,
1729
+ LLM_TENSOR_FFN_UP_EXPS,
1730
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1731
+ LLM_TENSOR_FFN_GATE_SHEXP,
1732
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1733
+ LLM_TENSOR_FFN_UP_SHEXP,
1734
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1735
+ LLM_TENSOR_INDEXER_K_NORM,
1736
+ LLM_TENSOR_INDEXER_PROJ,
1737
+ LLM_TENSOR_INDEXER_ATTN_K,
1738
+ LLM_TENSOR_INDEXER_ATTN_Q_B,
1739
+ LLM_TENSOR_NEXTN_EH_PROJ,
1740
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1741
+ LLM_TENSOR_NEXTN_ENORM,
1742
+ LLM_TENSOR_NEXTN_HNORM,
1743
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1744
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1745
+ };
1746
+ case LLM_ARCH_BITNET:
1747
+ return {
1748
+ LLM_TENSOR_TOKEN_EMBD,
1749
+ LLM_TENSOR_OUTPUT_NORM,
1750
+ LLM_TENSOR_ATTN_Q,
1751
+ LLM_TENSOR_ATTN_K,
1752
+ LLM_TENSOR_ATTN_V,
1753
+ LLM_TENSOR_ATTN_OUT,
1754
+ LLM_TENSOR_ATTN_NORM,
1755
+ LLM_TENSOR_ATTN_SUB_NORM,
1756
+ LLM_TENSOR_FFN_GATE,
1757
+ LLM_TENSOR_FFN_DOWN,
1758
+ LLM_TENSOR_FFN_UP,
1759
+ LLM_TENSOR_FFN_NORM,
1760
+ LLM_TENSOR_FFN_SUB_NORM,
1761
+ };
1762
+ case LLM_ARCH_T5:
1763
+ return {
1764
+ LLM_TENSOR_TOKEN_EMBD,
1765
+ LLM_TENSOR_OUTPUT,
1766
+ LLM_TENSOR_DEC_OUTPUT_NORM,
1767
+ LLM_TENSOR_DEC_ATTN_NORM,
1768
+ LLM_TENSOR_DEC_ATTN_Q,
1769
+ LLM_TENSOR_DEC_ATTN_K,
1770
+ LLM_TENSOR_DEC_ATTN_V,
1771
+ LLM_TENSOR_DEC_ATTN_OUT,
1772
+ LLM_TENSOR_DEC_ATTN_REL_B,
1773
+ LLM_TENSOR_DEC_CROSS_ATTN_NORM,
1774
+ LLM_TENSOR_DEC_CROSS_ATTN_Q,
1775
+ LLM_TENSOR_DEC_CROSS_ATTN_K,
1776
+ LLM_TENSOR_DEC_CROSS_ATTN_V,
1777
+ LLM_TENSOR_DEC_CROSS_ATTN_OUT,
1778
+ LLM_TENSOR_DEC_CROSS_ATTN_REL_B,
1779
+ LLM_TENSOR_DEC_FFN_NORM,
1780
+ LLM_TENSOR_DEC_FFN_GATE,
1781
+ LLM_TENSOR_DEC_FFN_DOWN,
1782
+ LLM_TENSOR_DEC_FFN_UP,
1783
+ LLM_TENSOR_ENC_OUTPUT_NORM,
1784
+ LLM_TENSOR_ENC_ATTN_NORM,
1785
+ LLM_TENSOR_ENC_ATTN_Q,
1786
+ LLM_TENSOR_ENC_ATTN_K,
1787
+ LLM_TENSOR_ENC_ATTN_V,
1788
+ LLM_TENSOR_ENC_ATTN_OUT,
1789
+ LLM_TENSOR_ENC_ATTN_REL_B,
1790
+ LLM_TENSOR_ENC_FFN_NORM,
1791
+ LLM_TENSOR_ENC_FFN_GATE,
1792
+ LLM_TENSOR_ENC_FFN_DOWN,
1793
+ LLM_TENSOR_ENC_FFN_UP,
1794
+ };
1795
+ case LLM_ARCH_T5ENCODER:
1796
+ return {
1797
+ LLM_TENSOR_TOKEN_EMBD,
1798
+ LLM_TENSOR_OUTPUT,
1799
+ LLM_TENSOR_ENC_OUTPUT_NORM,
1800
+ LLM_TENSOR_ENC_ATTN_NORM,
1801
+ LLM_TENSOR_ENC_ATTN_Q,
1802
+ LLM_TENSOR_ENC_ATTN_K,
1803
+ LLM_TENSOR_ENC_ATTN_V,
1804
+ LLM_TENSOR_ENC_ATTN_OUT,
1805
+ LLM_TENSOR_ENC_ATTN_REL_B,
1806
+ LLM_TENSOR_ENC_FFN_NORM,
1807
+ LLM_TENSOR_ENC_FFN_GATE,
1808
+ LLM_TENSOR_ENC_FFN_DOWN,
1809
+ LLM_TENSOR_ENC_FFN_UP,
1810
+ };
1811
+ case LLM_ARCH_JAIS:
1812
+ return {
1813
+ LLM_TENSOR_TOKEN_EMBD,
1814
+ LLM_TENSOR_OUTPUT_NORM,
1815
+ LLM_TENSOR_OUTPUT,
1816
+ LLM_TENSOR_ATTN_NORM,
1817
+ LLM_TENSOR_ATTN_QKV,
1818
+ LLM_TENSOR_ATTN_OUT,
1819
+ LLM_TENSOR_FFN_NORM,
1820
+ LLM_TENSOR_FFN_UP,
1821
+ LLM_TENSOR_FFN_GATE,
1822
+ LLM_TENSOR_FFN_DOWN,
1823
+ };
1824
+ case LLM_ARCH_JAIS2:
1825
+ return {
1826
+ LLM_TENSOR_TOKEN_EMBD,
1827
+ LLM_TENSOR_OUTPUT_NORM,
1828
+ LLM_TENSOR_OUTPUT,
1829
+ LLM_TENSOR_ATTN_NORM,
1830
+ LLM_TENSOR_ATTN_Q,
1831
+ LLM_TENSOR_ATTN_K,
1832
+ LLM_TENSOR_ATTN_V,
1833
+ LLM_TENSOR_ATTN_OUT,
1834
+ LLM_TENSOR_FFN_NORM,
1835
+ LLM_TENSOR_FFN_UP,
1836
+ LLM_TENSOR_FFN_DOWN,
1837
+ };
1838
+ case LLM_ARCH_NEMOTRON_H:
1839
+ return {
1840
+ LLM_TENSOR_TOKEN_EMBD,
1841
+ LLM_TENSOR_OUTPUT_NORM,
1842
+ LLM_TENSOR_OUTPUT,
1843
+ LLM_TENSOR_ATTN_NORM,
1844
+ LLM_TENSOR_SSM_IN,
1845
+ LLM_TENSOR_SSM_CONV1D,
1846
+ LLM_TENSOR_SSM_DT,
1847
+ LLM_TENSOR_SSM_A,
1848
+ LLM_TENSOR_SSM_D,
1849
+ LLM_TENSOR_SSM_NORM,
1850
+ LLM_TENSOR_SSM_OUT,
1851
+ LLM_TENSOR_ATTN_Q,
1852
+ LLM_TENSOR_ATTN_K,
1853
+ LLM_TENSOR_ATTN_V,
1854
+ LLM_TENSOR_ATTN_OUT,
1855
+ LLM_TENSOR_FFN_DOWN,
1856
+ LLM_TENSOR_FFN_UP,
1857
+ };
1858
+ case LLM_ARCH_NEMOTRON_H_MOE:
1859
+ return {
1860
+ LLM_TENSOR_TOKEN_EMBD,
1861
+ LLM_TENSOR_OUTPUT_NORM,
1862
+ LLM_TENSOR_OUTPUT,
1863
+ LLM_TENSOR_ATTN_NORM,
1864
+ // mamba(2) ssm layers
1865
+ LLM_TENSOR_SSM_IN,
1866
+ LLM_TENSOR_SSM_CONV1D,
1867
+ LLM_TENSOR_SSM_DT,
1868
+ LLM_TENSOR_SSM_A,
1869
+ LLM_TENSOR_SSM_D,
1870
+ LLM_TENSOR_SSM_NORM,
1871
+ LLM_TENSOR_SSM_OUT,
1872
+ // attention layers
1873
+ LLM_TENSOR_ATTN_Q,
1874
+ LLM_TENSOR_ATTN_K,
1875
+ LLM_TENSOR_ATTN_V,
1876
+ LLM_TENSOR_ATTN_OUT,
1877
+ // dense FFN
1878
+ LLM_TENSOR_FFN_DOWN,
1879
+ LLM_TENSOR_FFN_UP,
1880
+ // MoE FFN (for MoE layers)
1881
+ LLM_TENSOR_FFN_GATE_INP,
1882
+ LLM_TENSOR_FFN_UP_EXPS,
1883
+ LLM_TENSOR_FFN_DOWN_EXPS,
1884
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1885
+ LLM_TENSOR_FFN_LATENT_DOWN,
1886
+ LLM_TENSOR_FFN_LATENT_UP,
1887
+ // MoE shared expert layer
1888
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1889
+ LLM_TENSOR_FFN_UP_SHEXP,
1890
+ };
1891
+ case LLM_ARCH_EXAONE4:
1892
+ return {
1893
+ LLM_TENSOR_TOKEN_EMBD,
1894
+ LLM_TENSOR_OUTPUT_NORM,
1895
+ LLM_TENSOR_OUTPUT,
1896
+ LLM_TENSOR_ROPE_FREQS,
1897
+ LLM_TENSOR_ATTN_Q,
1898
+ LLM_TENSOR_ATTN_Q_NORM,
1899
+ LLM_TENSOR_ATTN_K,
1900
+ LLM_TENSOR_ATTN_K_NORM,
1901
+ LLM_TENSOR_ATTN_V,
1902
+ LLM_TENSOR_ATTN_OUT,
1903
+ LLM_TENSOR_ATTN_POST_NORM,
1904
+ LLM_TENSOR_FFN_GATE,
1905
+ LLM_TENSOR_FFN_DOWN,
1906
+ LLM_TENSOR_FFN_UP,
1907
+ LLM_TENSOR_FFN_POST_NORM,
1908
+ };
1909
+ case LLM_ARCH_EXAONE_MOE:
1910
+ return {
1911
+ LLM_TENSOR_TOKEN_EMBD,
1912
+ LLM_TENSOR_OUTPUT_NORM,
1913
+ LLM_TENSOR_OUTPUT,
1914
+ LLM_TENSOR_ROPE_FREQS,
1915
+ LLM_TENSOR_ATTN_NORM,
1916
+ LLM_TENSOR_ATTN_Q,
1917
+ LLM_TENSOR_ATTN_Q_NORM,
1918
+ LLM_TENSOR_ATTN_K,
1919
+ LLM_TENSOR_ATTN_K_NORM,
1920
+ LLM_TENSOR_ATTN_V,
1921
+ LLM_TENSOR_ATTN_OUT,
1922
+ LLM_TENSOR_FFN_NORM,
1923
+ LLM_TENSOR_FFN_GATE,
1924
+ LLM_TENSOR_FFN_DOWN,
1925
+ LLM_TENSOR_FFN_UP,
1926
+ LLM_TENSOR_FFN_GATE_INP,
1927
+ LLM_TENSOR_FFN_GATE_EXPS,
1928
+ LLM_TENSOR_FFN_DOWN_EXPS,
1929
+ LLM_TENSOR_FFN_UP_EXPS,
1930
+ LLM_TENSOR_FFN_GATE_SHEXP,
1931
+ LLM_TENSOR_FFN_UP_SHEXP,
1932
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1933
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1934
+ LLM_TENSOR_NEXTN_EH_PROJ,
1935
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1936
+ LLM_TENSOR_NEXTN_ENORM,
1937
+ LLM_TENSOR_NEXTN_HNORM,
1938
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1939
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1940
+ };
1941
+ case LLM_ARCH_RWKV6:
1942
+ return {
1943
+ LLM_TENSOR_TOKEN_EMBD,
1944
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1945
+ LLM_TENSOR_OUTPUT_NORM,
1946
+ LLM_TENSOR_OUTPUT,
1947
+ LLM_TENSOR_ATTN_NORM,
1948
+ LLM_TENSOR_ATTN_NORM_2,
1949
+ LLM_TENSOR_TIME_MIX_W1,
1950
+ LLM_TENSOR_TIME_MIX_W2,
1951
+ LLM_TENSOR_TIME_MIX_LERP_X,
1952
+ LLM_TENSOR_TIME_MIX_LERP_W,
1953
+ LLM_TENSOR_TIME_MIX_LERP_K,
1954
+ LLM_TENSOR_TIME_MIX_LERP_V,
1955
+ LLM_TENSOR_TIME_MIX_LERP_R,
1956
+ LLM_TENSOR_TIME_MIX_LERP_G,
1957
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1958
+ LLM_TENSOR_TIME_MIX_FIRST,
1959
+ LLM_TENSOR_TIME_MIX_DECAY,
1960
+ LLM_TENSOR_TIME_MIX_DECAY_W1,
1961
+ LLM_TENSOR_TIME_MIX_DECAY_W2,
1962
+ LLM_TENSOR_TIME_MIX_KEY,
1963
+ LLM_TENSOR_TIME_MIX_VALUE,
1964
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1965
+ LLM_TENSOR_TIME_MIX_GATE,
1966
+ LLM_TENSOR_TIME_MIX_LN,
1967
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1968
+ LLM_TENSOR_CHANNEL_MIX_LERP_K,
1969
+ LLM_TENSOR_CHANNEL_MIX_LERP_R,
1970
+ LLM_TENSOR_CHANNEL_MIX_KEY,
1971
+ LLM_TENSOR_CHANNEL_MIX_VALUE,
1972
+ LLM_TENSOR_CHANNEL_MIX_RECEPTANCE,
1973
+ };
1974
+ case LLM_ARCH_RWKV6QWEN2:
1975
+ return {
1976
+ LLM_TENSOR_TOKEN_EMBD,
1977
+ LLM_TENSOR_OUTPUT_NORM,
1978
+ LLM_TENSOR_OUTPUT,
1979
+ LLM_TENSOR_ATTN_NORM,
1980
+ LLM_TENSOR_TIME_MIX_W1,
1981
+ LLM_TENSOR_TIME_MIX_W2,
1982
+ LLM_TENSOR_TIME_MIX_LERP_X,
1983
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1984
+ LLM_TENSOR_TIME_MIX_FIRST,
1985
+ LLM_TENSOR_TIME_MIX_DECAY,
1986
+ LLM_TENSOR_TIME_MIX_DECAY_W1,
1987
+ LLM_TENSOR_TIME_MIX_DECAY_W2,
1988
+ LLM_TENSOR_TIME_MIX_KEY,
1989
+ LLM_TENSOR_TIME_MIX_VALUE,
1990
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1991
+ LLM_TENSOR_TIME_MIX_GATE,
1992
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1993
+ LLM_TENSOR_FFN_NORM,
1994
+ LLM_TENSOR_FFN_GATE,
1995
+ LLM_TENSOR_FFN_DOWN,
1996
+ LLM_TENSOR_FFN_UP,
1997
+ };
1998
+ case LLM_ARCH_RWKV7:
1999
+ return {
2000
+ LLM_TENSOR_TOKEN_EMBD,
2001
+ LLM_TENSOR_TOKEN_EMBD_NORM,
2002
+ LLM_TENSOR_OUTPUT_NORM,
2003
+ LLM_TENSOR_OUTPUT,
2004
+ LLM_TENSOR_ATTN_NORM,
2005
+ LLM_TENSOR_ATTN_NORM_2,
2006
+ LLM_TENSOR_TIME_MIX_W0,
2007
+ LLM_TENSOR_TIME_MIX_W1,
2008
+ LLM_TENSOR_TIME_MIX_W2,
2009
+ LLM_TENSOR_TIME_MIX_A0,
2010
+ LLM_TENSOR_TIME_MIX_A1,
2011
+ LLM_TENSOR_TIME_MIX_A2,
2012
+ LLM_TENSOR_TIME_MIX_V0,
2013
+ LLM_TENSOR_TIME_MIX_V1,
2014
+ LLM_TENSOR_TIME_MIX_V2,
2015
+ LLM_TENSOR_TIME_MIX_G1,
2016
+ LLM_TENSOR_TIME_MIX_G2,
2017
+ LLM_TENSOR_TIME_MIX_K_K,
2018
+ LLM_TENSOR_TIME_MIX_K_A,
2019
+ LLM_TENSOR_TIME_MIX_R_K,
2020
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
2021
+ LLM_TENSOR_TIME_MIX_KEY,
2022
+ LLM_TENSOR_TIME_MIX_VALUE,
2023
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
2024
+ LLM_TENSOR_TIME_MIX_LN,
2025
+ LLM_TENSOR_TIME_MIX_OUTPUT,
2026
+ LLM_TENSOR_CHANNEL_MIX_LERP_K,
2027
+ LLM_TENSOR_CHANNEL_MIX_KEY,
2028
+ LLM_TENSOR_CHANNEL_MIX_VALUE,
2029
+ };
2030
+ case LLM_ARCH_ARWKV7:
2031
+ return {
2032
+ LLM_TENSOR_TOKEN_EMBD,
2033
+ LLM_TENSOR_TOKEN_EMBD_NORM,
2034
+ LLM_TENSOR_OUTPUT_NORM,
2035
+ LLM_TENSOR_OUTPUT,
2036
+ LLM_TENSOR_ATTN_NORM,
2037
+ LLM_TENSOR_TIME_MIX_W0,
2038
+ LLM_TENSOR_TIME_MIX_W1,
2039
+ LLM_TENSOR_TIME_MIX_W2,
2040
+ LLM_TENSOR_TIME_MIX_A0,
2041
+ LLM_TENSOR_TIME_MIX_A1,
2042
+ LLM_TENSOR_TIME_MIX_A2,
2043
+ LLM_TENSOR_TIME_MIX_V0,
2044
+ LLM_TENSOR_TIME_MIX_V1,
2045
+ LLM_TENSOR_TIME_MIX_V2,
2046
+ LLM_TENSOR_TIME_MIX_G1,
2047
+ LLM_TENSOR_TIME_MIX_G2,
2048
+ LLM_TENSOR_TIME_MIX_K_K,
2049
+ LLM_TENSOR_TIME_MIX_K_A,
2050
+ LLM_TENSOR_TIME_MIX_R_K,
2051
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
2052
+ LLM_TENSOR_TIME_MIX_KEY,
2053
+ LLM_TENSOR_TIME_MIX_VALUE,
2054
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
2055
+ LLM_TENSOR_TIME_MIX_LN,
2056
+ LLM_TENSOR_TIME_MIX_OUTPUT,
2057
+ LLM_TENSOR_FFN_NORM,
2058
+ LLM_TENSOR_FFN_GATE,
2059
+ LLM_TENSOR_FFN_DOWN,
2060
+ LLM_TENSOR_FFN_UP,
2061
+ };
2062
+ case LLM_ARCH_GRANITE_MOE:
2063
+ return {
2064
+ LLM_TENSOR_TOKEN_EMBD,
2065
+ LLM_TENSOR_OUTPUT_NORM,
2066
+ LLM_TENSOR_OUTPUT,
2067
+ LLM_TENSOR_ATTN_NORM,
2068
+ LLM_TENSOR_ATTN_Q,
2069
+ LLM_TENSOR_ATTN_K,
2070
+ LLM_TENSOR_ATTN_V,
2071
+ LLM_TENSOR_ATTN_OUT,
2072
+ LLM_TENSOR_FFN_NORM,
2073
+ LLM_TENSOR_FFN_GATE_INP,
2074
+ LLM_TENSOR_FFN_GATE_EXPS,
2075
+ LLM_TENSOR_FFN_DOWN_EXPS,
2076
+ LLM_TENSOR_FFN_UP_EXPS,
2077
+ LLM_TENSOR_FFN_GATE_SHEXP,
2078
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2079
+ LLM_TENSOR_FFN_UP_SHEXP,
2080
+ };
2081
+ case LLM_ARCH_GRANITE_HYBRID:
2082
+ return {
2083
+ LLM_TENSOR_TOKEN_EMBD,
2084
+ LLM_TENSOR_OUTPUT_NORM,
2085
+ LLM_TENSOR_OUTPUT,
2086
+ LLM_TENSOR_ATTN_NORM,
2087
+ LLM_TENSOR_SSM_IN,
2088
+ LLM_TENSOR_SSM_CONV1D,
2089
+ LLM_TENSOR_SSM_DT,
2090
+ LLM_TENSOR_SSM_A,
2091
+ LLM_TENSOR_SSM_D,
2092
+ LLM_TENSOR_SSM_NORM,
2093
+ LLM_TENSOR_SSM_OUT,
2094
+ LLM_TENSOR_ATTN_Q,
2095
+ LLM_TENSOR_ATTN_K,
2096
+ LLM_TENSOR_ATTN_V,
2097
+ LLM_TENSOR_ATTN_OUT,
2098
+ LLM_TENSOR_FFN_NORM,
2099
+ LLM_TENSOR_FFN_GATE,
2100
+ LLM_TENSOR_FFN_DOWN,
2101
+ LLM_TENSOR_FFN_UP,
2102
+ LLM_TENSOR_FFN_NORM,
2103
+ LLM_TENSOR_FFN_GATE_INP,
2104
+ LLM_TENSOR_FFN_GATE_EXPS,
2105
+ LLM_TENSOR_FFN_DOWN_EXPS,
2106
+ LLM_TENSOR_FFN_UP_EXPS,
2107
+ LLM_TENSOR_FFN_GATE_SHEXP,
2108
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2109
+ LLM_TENSOR_FFN_UP_SHEXP,
2110
+ };
2111
+ case LLM_ARCH_WAVTOKENIZER_DEC:
2112
+ return {
2113
+ LLM_TENSOR_TOKEN_EMBD,
2114
+ LLM_TENSOR_TOKEN_EMBD_NORM,
2115
+ LLM_TENSOR_CONV1D,
2116
+ LLM_TENSOR_CONVNEXT_DW,
2117
+ LLM_TENSOR_CONVNEXT_NORM,
2118
+ LLM_TENSOR_CONVNEXT_PW1,
2119
+ LLM_TENSOR_CONVNEXT_PW2,
2120
+ LLM_TENSOR_CONVNEXT_GAMMA,
2121
+ LLM_TENSOR_OUTPUT_NORM,
2122
+ LLM_TENSOR_OUTPUT,
2123
+ LLM_TENSOR_POS_NET_CONV1,
2124
+ LLM_TENSOR_POS_NET_CONV2,
2125
+ LLM_TENSOR_POS_NET_NORM,
2126
+ LLM_TENSOR_POS_NET_NORM1,
2127
+ LLM_TENSOR_POS_NET_NORM2,
2128
+ LLM_TENSOR_POS_NET_ATTN_NORM,
2129
+ LLM_TENSOR_POS_NET_ATTN_Q,
2130
+ LLM_TENSOR_POS_NET_ATTN_K,
2131
+ LLM_TENSOR_POS_NET_ATTN_V,
2132
+ LLM_TENSOR_POS_NET_ATTN_OUT,
2133
+ };
2134
+ case LLM_ARCH_BAILINGMOE:
2135
+ return {
2136
+ LLM_TENSOR_TOKEN_EMBD,
2137
+ LLM_TENSOR_OUTPUT_NORM,
2138
+ LLM_TENSOR_OUTPUT,
2139
+ LLM_TENSOR_ROPE_FREQS,
2140
+ LLM_TENSOR_ATTN_NORM,
2141
+ LLM_TENSOR_ATTN_Q,
2142
+ LLM_TENSOR_ATTN_K,
2143
+ LLM_TENSOR_ATTN_V,
2144
+ LLM_TENSOR_ATTN_OUT,
2145
+ LLM_TENSOR_FFN_GATE_INP,
2146
+ LLM_TENSOR_FFN_NORM,
2147
+ LLM_TENSOR_FFN_GATE_EXPS,
2148
+ LLM_TENSOR_FFN_DOWN_EXPS,
2149
+ LLM_TENSOR_FFN_UP_EXPS,
2150
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
2151
+ LLM_TENSOR_FFN_GATE_SHEXP,
2152
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2153
+ LLM_TENSOR_FFN_UP_SHEXP,
2154
+ };
2155
+ case LLM_ARCH_BAILINGMOE2:
2156
+ return {
2157
+ LLM_TENSOR_TOKEN_EMBD,
2158
+ LLM_TENSOR_OUTPUT_NORM,
2159
+ LLM_TENSOR_OUTPUT,
2160
+ LLM_TENSOR_ATTN_NORM,
2161
+ LLM_TENSOR_ATTN_Q_NORM,
2162
+ LLM_TENSOR_ATTN_K_NORM,
2163
+ LLM_TENSOR_ATTN_QKV,
2164
+ LLM_TENSOR_ATTN_OUT,
2165
+ LLM_TENSOR_FFN_GATE_INP,
2166
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2167
+ LLM_TENSOR_FFN_NORM,
2168
+ LLM_TENSOR_FFN_GATE,
2169
+ LLM_TENSOR_FFN_DOWN,
2170
+ LLM_TENSOR_FFN_UP,
2171
+ LLM_TENSOR_FFN_GATE_EXPS,
2172
+ LLM_TENSOR_FFN_DOWN_EXPS,
2173
+ LLM_TENSOR_FFN_UP_EXPS,
2174
+ LLM_TENSOR_FFN_GATE_SHEXP,
2175
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2176
+ LLM_TENSOR_FFN_UP_SHEXP,
2177
+ LLM_TENSOR_NEXTN_EH_PROJ,
2178
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
2179
+ LLM_TENSOR_NEXTN_ENORM,
2180
+ LLM_TENSOR_NEXTN_HNORM,
2181
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
2182
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
2183
+ LLM_TENSOR_LAYER_OUT_NORM,
2184
+ };
2185
+ case LLM_ARCH_DOTS1:
2186
+ return {
2187
+ LLM_TENSOR_TOKEN_EMBD,
2188
+ LLM_TENSOR_OUTPUT_NORM,
2189
+ LLM_TENSOR_OUTPUT,
2190
+ LLM_TENSOR_ATTN_NORM,
2191
+ LLM_TENSOR_ATTN_Q,
2192
+ LLM_TENSOR_ATTN_Q_NORM,
2193
+ LLM_TENSOR_ATTN_K,
2194
+ LLM_TENSOR_ATTN_K_NORM,
2195
+ LLM_TENSOR_ATTN_V,
2196
+ LLM_TENSOR_ATTN_OUT,
2197
+ LLM_TENSOR_FFN_NORM,
2198
+ LLM_TENSOR_FFN_GATE,
2199
+ LLM_TENSOR_FFN_UP,
2200
+ LLM_TENSOR_FFN_DOWN,
2201
+ LLM_TENSOR_FFN_GATE_INP,
2202
+ LLM_TENSOR_FFN_GATE_EXPS,
2203
+ LLM_TENSOR_FFN_DOWN_EXPS,
2204
+ LLM_TENSOR_FFN_UP_EXPS,
2205
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
2206
+ LLM_TENSOR_FFN_GATE_SHEXP,
2207
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2208
+ LLM_TENSOR_FFN_UP_SHEXP,
2209
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2210
+ };
2211
+ case LLM_ARCH_ERNIE4_5_MOE:
2212
+ return {
2213
+ LLM_TENSOR_TOKEN_EMBD,
2214
+ LLM_TENSOR_OUTPUT_NORM,
2215
+ LLM_TENSOR_OUTPUT,
2216
+ LLM_TENSOR_ATTN_NORM,
2217
+ LLM_TENSOR_ATTN_Q,
2218
+ LLM_TENSOR_ATTN_K,
2219
+ LLM_TENSOR_ATTN_V,
2220
+ LLM_TENSOR_ATTN_OUT,
2221
+ LLM_TENSOR_FFN_NORM,
2222
+ LLM_TENSOR_FFN_GATE,
2223
+ LLM_TENSOR_FFN_DOWN,
2224
+ LLM_TENSOR_FFN_UP,
2225
+ LLM_TENSOR_FFN_GATE_INP,
2226
+ LLM_TENSOR_FFN_GATE_SHEXP,
2227
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2228
+ LLM_TENSOR_FFN_UP_SHEXP,
2229
+ LLM_TENSOR_FFN_GATE_EXPS,
2230
+ LLM_TENSOR_FFN_DOWN_EXPS,
2231
+ LLM_TENSOR_FFN_UP_EXPS,
2232
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2233
+ };
2234
+ case LLM_ARCH_HUNYUAN_MOE:
2235
+ return {
2236
+ LLM_TENSOR_TOKEN_EMBD,
2237
+ LLM_TENSOR_OUTPUT_NORM,
2238
+ LLM_TENSOR_OUTPUT,
2239
+ LLM_TENSOR_ATTN_NORM,
2240
+ LLM_TENSOR_ATTN_Q,
2241
+ LLM_TENSOR_ATTN_Q_NORM,
2242
+ LLM_TENSOR_ATTN_K,
2243
+ LLM_TENSOR_ATTN_K_NORM,
2244
+ LLM_TENSOR_ATTN_V,
2245
+ LLM_TENSOR_ATTN_OUT,
2246
+ LLM_TENSOR_FFN_GATE_INP,
2247
+ LLM_TENSOR_FFN_NORM,
2248
+ LLM_TENSOR_FFN_GATE_SHEXP,
2249
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2250
+ LLM_TENSOR_FFN_UP_SHEXP,
2251
+ LLM_TENSOR_FFN_GATE_EXPS,
2252
+ LLM_TENSOR_FFN_DOWN_EXPS,
2253
+ LLM_TENSOR_FFN_UP_EXPS,
2254
+ };
2255
+ case LLM_ARCH_OPENAI_MOE:
2256
+ return {
2257
+ LLM_TENSOR_TOKEN_EMBD,
2258
+ LLM_TENSOR_OUTPUT_NORM,
2259
+ LLM_TENSOR_OUTPUT,
2260
+ LLM_TENSOR_ATTN_NORM,
2261
+ LLM_TENSOR_ATTN_POST_NORM,
2262
+ LLM_TENSOR_ATTN_Q,
2263
+ LLM_TENSOR_ATTN_K,
2264
+ LLM_TENSOR_ATTN_V,
2265
+ LLM_TENSOR_ATTN_OUT,
2266
+ LLM_TENSOR_ATTN_SINKS,
2267
+ LLM_TENSOR_FFN_GATE_INP,
2268
+ LLM_TENSOR_FFN_GATE_EXPS,
2269
+ LLM_TENSOR_FFN_DOWN_EXPS,
2270
+ LLM_TENSOR_FFN_UP_EXPS,
2271
+ };
2272
+ case LLM_ARCH_LFM2:
2273
+ return {
2274
+ LLM_TENSOR_ATTN_NORM,
2275
+ LLM_TENSOR_ATTN_Q,
2276
+ LLM_TENSOR_ATTN_K,
2277
+ LLM_TENSOR_ATTN_V,
2278
+ LLM_TENSOR_ATTN_OUT,
2279
+ LLM_TENSOR_ATTN_K_NORM,
2280
+ LLM_TENSOR_ATTN_Q_NORM,
2281
+ LLM_TENSOR_FFN_DOWN,
2282
+ LLM_TENSOR_FFN_GATE,
2283
+ LLM_TENSOR_FFN_NORM,
2284
+ LLM_TENSOR_FFN_UP,
2285
+ LLM_TENSOR_SHORTCONV_CONV,
2286
+ LLM_TENSOR_SHORTCONV_INPROJ,
2287
+ LLM_TENSOR_SHORTCONV_OUTPROJ,
2288
+ LLM_TENSOR_TOKEN_EMBD,
2289
+ LLM_TENSOR_OUTPUT_NORM_LFM2,
2290
+ LLM_TENSOR_OUTPUT,
2291
+ LLM_TENSOR_DENSE_2_OUT,
2292
+ };
2293
+ case LLM_ARCH_LFM2MOE:
2294
+ return {
2295
+ LLM_TENSOR_ATTN_NORM,
2296
+ LLM_TENSOR_ATTN_Q,
2297
+ LLM_TENSOR_ATTN_K,
2298
+ LLM_TENSOR_ATTN_V,
2299
+ LLM_TENSOR_ATTN_OUT,
2300
+ LLM_TENSOR_ATTN_K_NORM,
2301
+ LLM_TENSOR_ATTN_Q_NORM,
2302
+ LLM_TENSOR_FFN_DOWN,
2303
+ LLM_TENSOR_FFN_GATE,
2304
+ LLM_TENSOR_FFN_NORM,
2305
+ LLM_TENSOR_FFN_UP,
2306
+ LLM_TENSOR_SHORTCONV_CONV,
2307
+ LLM_TENSOR_SHORTCONV_INPROJ,
2308
+ LLM_TENSOR_SHORTCONV_OUTPROJ,
2309
+ LLM_TENSOR_TOKEN_EMBD,
2310
+ LLM_TENSOR_OUTPUT_NORM_LFM2,
2311
+ LLM_TENSOR_FFN_GATE_INP,
2312
+ LLM_TENSOR_FFN_GATE_EXPS,
2313
+ LLM_TENSOR_FFN_DOWN_EXPS,
2314
+ LLM_TENSOR_FFN_UP_EXPS,
2315
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2316
+ };
2317
+ case LLM_ARCH_SMALLTHINKER:
2318
+ return {
2319
+ LLM_TENSOR_TOKEN_EMBD,
2320
+ LLM_TENSOR_OUTPUT_NORM,
2321
+ LLM_TENSOR_OUTPUT,
2322
+ LLM_TENSOR_ATTN_NORM,
2323
+ LLM_TENSOR_ATTN_Q,
2324
+ LLM_TENSOR_ATTN_K,
2325
+ LLM_TENSOR_ATTN_V,
2326
+ LLM_TENSOR_ATTN_OUT,
2327
+ LLM_TENSOR_FFN_NORM,
2328
+ LLM_TENSOR_FFN_GATE,
2329
+ LLM_TENSOR_FFN_DOWN,
2330
+ LLM_TENSOR_FFN_UP,
2331
+ LLM_TENSOR_FFN_GATE_INP,
2332
+ LLM_TENSOR_FFN_GATE_EXPS,
2333
+ LLM_TENSOR_FFN_DOWN_EXPS,
2334
+ LLM_TENSOR_FFN_UP_EXPS,
2335
+ };
2336
+ case LLM_ARCH_APERTUS:
2337
+ return {
2338
+ LLM_TENSOR_TOKEN_EMBD,
2339
+ LLM_TENSOR_OUTPUT_NORM,
2340
+ LLM_TENSOR_OUTPUT,
2341
+ LLM_TENSOR_ROPE_FREQS,
2342
+ LLM_TENSOR_ATTN_NORM,
2343
+ LLM_TENSOR_ATTN_Q,
2344
+ LLM_TENSOR_ATTN_K,
2345
+ LLM_TENSOR_ATTN_V,
2346
+ LLM_TENSOR_ATTN_OUT,
2347
+ LLM_TENSOR_ATTN_Q_NORM,
2348
+ LLM_TENSOR_ATTN_K_NORM,
2349
+ LLM_TENSOR_FFN_NORM,
2350
+ LLM_TENSOR_FFN_DOWN,
2351
+ LLM_TENSOR_FFN_UP,
2352
+ };
2353
+ case LLM_ARCH_SEED_OSS:
2354
+ return {
2355
+ LLM_TENSOR_TOKEN_EMBD,
2356
+ LLM_TENSOR_OUTPUT_NORM,
2357
+ LLM_TENSOR_OUTPUT,
2358
+ LLM_TENSOR_ATTN_NORM,
2359
+ LLM_TENSOR_ATTN_Q,
2360
+ LLM_TENSOR_ATTN_K,
2361
+ LLM_TENSOR_ATTN_V,
2362
+ LLM_TENSOR_ATTN_OUT,
2363
+ LLM_TENSOR_ATTN_POST_NORM,
2364
+ LLM_TENSOR_FFN_GATE,
2365
+ LLM_TENSOR_FFN_DOWN,
2366
+ LLM_TENSOR_FFN_UP,
2367
+ };
2368
+ case LLM_ARCH_GROVEMOE:
2369
+ return {
2370
+ LLM_TENSOR_TOKEN_EMBD,
2371
+ LLM_TENSOR_OUTPUT_NORM,
2372
+ LLM_TENSOR_OUTPUT,
2373
+ LLM_TENSOR_ATTN_NORM,
2374
+ LLM_TENSOR_ATTN_Q,
2375
+ LLM_TENSOR_ATTN_Q_NORM,
2376
+ LLM_TENSOR_ATTN_K,
2377
+ LLM_TENSOR_ATTN_K_NORM,
2378
+ LLM_TENSOR_ATTN_V,
2379
+ LLM_TENSOR_ATTN_OUT,
2380
+ LLM_TENSOR_FFN_NORM,
2381
+ LLM_TENSOR_FFN_GATE_INP,
2382
+ LLM_TENSOR_FFN_GATE_EXPS,
2383
+ LLM_TENSOR_FFN_DOWN_EXPS,
2384
+ LLM_TENSOR_FFN_UP_EXPS,
2385
+ LLM_TENSOR_FFN_GATE_CHEXPS,
2386
+ LLM_TENSOR_FFN_DOWN_CHEXPS,
2387
+ LLM_TENSOR_FFN_UP_CHEXPS,
2388
+ };
2389
+ case LLM_ARCH_MINIMAX_M2:
2390
+ return {
2391
+ LLM_TENSOR_TOKEN_EMBD,
2392
+ LLM_TENSOR_OUTPUT_NORM,
2393
+ LLM_TENSOR_OUTPUT,
2394
+ LLM_TENSOR_ATTN_NORM,
2395
+ LLM_TENSOR_ATTN_Q,
2396
+ LLM_TENSOR_ATTN_K,
2397
+ LLM_TENSOR_ATTN_V,
2398
+ LLM_TENSOR_ATTN_OUT,
2399
+ LLM_TENSOR_ATTN_Q_NORM,
2400
+ LLM_TENSOR_ATTN_K_NORM,
2401
+ LLM_TENSOR_FFN_NORM,
2402
+ LLM_TENSOR_FFN_GATE_INP,
2403
+ LLM_TENSOR_FFN_GATE_EXPS,
2404
+ LLM_TENSOR_FFN_DOWN_EXPS,
2405
+ LLM_TENSOR_FFN_UP_EXPS,
2406
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2407
+ };
2408
+ case LLM_ARCH_COGVLM:
2409
+ return {
2410
+ LLM_TENSOR_TOKEN_EMBD,
2411
+ LLM_TENSOR_OUTPUT_NORM,
2412
+ LLM_TENSOR_OUTPUT,
2413
+ LLM_TENSOR_ATTN_NORM,
2414
+ LLM_TENSOR_ATTN_QKV,
2415
+ LLM_TENSOR_ATTN_OUT,
2416
+ LLM_TENSOR_FFN_NORM,
2417
+ LLM_TENSOR_FFN_GATE,
2418
+ LLM_TENSOR_FFN_DOWN,
2419
+ LLM_TENSOR_FFN_UP,
2420
+ LLM_TENSOR_VISEXP_ATTN_QKV,
2421
+ LLM_TENSOR_VISEXP_ATTN_OUT,
2422
+ LLM_TENSOR_VISEXP_FFN_GATE,
2423
+ LLM_TENSOR_VISEXP_FFN_DOWN,
2424
+ LLM_TENSOR_VISEXP_FFN_UP,
2425
+ };
2426
+ case LLM_ARCH_MIMO2:
2427
+ return {
2428
+ LLM_TENSOR_TOKEN_EMBD,
2429
+ LLM_TENSOR_OUTPUT_NORM,
2430
+ LLM_TENSOR_OUTPUT,
2431
+ LLM_TENSOR_ATTN_NORM,
2432
+ LLM_TENSOR_ATTN_Q,
2433
+ LLM_TENSOR_ATTN_K,
2434
+ LLM_TENSOR_ATTN_V,
2435
+ LLM_TENSOR_ATTN_SINKS,
2436
+ LLM_TENSOR_ATTN_OUT,
2437
+ LLM_TENSOR_FFN_NORM,
2438
+ LLM_TENSOR_FFN_GATE,
2439
+ LLM_TENSOR_FFN_DOWN,
2440
+ LLM_TENSOR_FFN_UP,
2441
+ LLM_TENSOR_FFN_GATE_INP,
2442
+ LLM_TENSOR_FFN_GATE_EXPS,
2443
+ LLM_TENSOR_FFN_DOWN_EXPS,
2444
+ LLM_TENSOR_FFN_UP_EXPS,
2445
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2446
+ };
2447
+ case LLM_ARCH_STEP35:
2448
+ return {
2449
+ LLM_TENSOR_TOKEN_EMBD,
2450
+ LLM_TENSOR_OUTPUT_NORM,
2451
+ LLM_TENSOR_OUTPUT,
2452
+ LLM_TENSOR_ROPE_FREQS,
2453
+ LLM_TENSOR_ROPE_FACTORS_LONG,
2454
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
2455
+ LLM_TENSOR_ATTN_NORM,
2456
+ LLM_TENSOR_ATTN_Q,
2457
+ LLM_TENSOR_ATTN_Q_NORM,
2458
+ LLM_TENSOR_ATTN_K,
2459
+ LLM_TENSOR_ATTN_K_NORM,
2460
+ LLM_TENSOR_ATTN_V,
2461
+ LLM_TENSOR_ATTN_GATE,
2462
+ LLM_TENSOR_ATTN_OUT,
2463
+ LLM_TENSOR_FFN_NORM,
2464
+ LLM_TENSOR_FFN_GATE,
2465
+ LLM_TENSOR_FFN_DOWN,
2466
+ LLM_TENSOR_FFN_UP,
2467
+ LLM_TENSOR_FFN_GATE_INP,
2468
+ LLM_TENSOR_FFN_GATE_EXPS,
2469
+ LLM_TENSOR_FFN_DOWN_EXPS,
2470
+ LLM_TENSOR_FFN_UP_EXPS,
2471
+ LLM_TENSOR_FFN_GATE_SHEXP,
2472
+ LLM_TENSOR_FFN_UP_SHEXP,
2473
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2474
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2475
+ };
2476
+ case LLM_ARCH_GPTJ:
2477
+ case LLM_ARCH_UNKNOWN:
2478
+ return {
2479
+ LLM_TENSOR_TOKEN_EMBD,
2480
+ };
2481
+ case LLM_ARCH_MAINCODER:
2482
+ return {
2483
+ LLM_TENSOR_TOKEN_EMBD,
2484
+ LLM_TENSOR_OUTPUT_NORM,
2485
+ LLM_TENSOR_OUTPUT,
2486
+ LLM_TENSOR_ATTN_NORM,
2487
+ LLM_TENSOR_ATTN_Q,
2488
+ LLM_TENSOR_ATTN_Q_NORM,
2489
+ LLM_TENSOR_ATTN_K,
2490
+ LLM_TENSOR_ATTN_K_NORM,
2491
+ LLM_TENSOR_ATTN_V,
2492
+ LLM_TENSOR_ATTN_OUT,
2493
+ LLM_TENSOR_FFN_NORM,
2494
+ LLM_TENSOR_FFN_GATE,
2495
+ LLM_TENSOR_FFN_DOWN,
2496
+ LLM_TENSOR_FFN_UP,
2497
+ };
2498
+ case LLM_ARCH_KIMI_LINEAR:
2499
+ return {
2500
+ LLM_TENSOR_TOKEN_EMBD,
2501
+ LLM_TENSOR_OUTPUT_NORM,
2502
+ LLM_TENSOR_OUTPUT,
2503
+ LLM_TENSOR_ROPE_FREQS,
2504
+ LLM_TENSOR_ATTN_NORM,
2505
+ LLM_TENSOR_ATTN_Q,
2506
+ LLM_TENSOR_ATTN_K,
2507
+ LLM_TENSOR_ATTN_V,
2508
+ LLM_TENSOR_ATTN_OUT,
2509
+ LLM_TENSOR_FFN_NORM,
2510
+ // Dense FFN (layer 0 only)
2511
+ LLM_TENSOR_FFN_GATE,
2512
+ LLM_TENSOR_FFN_DOWN,
2513
+ LLM_TENSOR_FFN_UP,
2514
+ // MoE FFN (layers 1+)
2515
+ LLM_TENSOR_FFN_GATE_INP,
2516
+ LLM_TENSOR_FFN_GATE_EXPS,
2517
+ LLM_TENSOR_FFN_DOWN_EXPS,
2518
+ LLM_TENSOR_FFN_UP_EXPS,
2519
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2520
+ // Shared experts
2521
+ LLM_TENSOR_FFN_GATE_SHEXP,
2522
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2523
+ LLM_TENSOR_FFN_UP_SHEXP,
2524
+ // KDA (using SSM_ enum prefix, keeping GGUF names for backward compat)
2525
+ LLM_TENSOR_SSM_CONV1D_Q,
2526
+ LLM_TENSOR_SSM_CONV1D_K,
2527
+ LLM_TENSOR_SSM_CONV1D_V,
2528
+ LLM_TENSOR_SSM_F_A,
2529
+ LLM_TENSOR_SSM_F_B,
2530
+ LLM_TENSOR_SSM_BETA,
2531
+ LLM_TENSOR_SSM_A,
2532
+ LLM_TENSOR_SSM_G_A,
2533
+ LLM_TENSOR_SSM_G_B,
2534
+ LLM_TENSOR_SSM_DT,
2535
+ LLM_TENSOR_SSM_NORM,
2536
+ // MLA
2537
+ LLM_TENSOR_ATTN_Q_A,
2538
+ LLM_TENSOR_ATTN_Q_B,
2539
+ LLM_TENSOR_ATTN_Q_A_NORM,
2540
+ LLM_TENSOR_ATTN_KV_A_MQA,
2541
+ LLM_TENSOR_ATTN_KV_B,
2542
+ LLM_TENSOR_ATTN_K_B,
2543
+ LLM_TENSOR_ATTN_V_B,
2544
+ LLM_TENSOR_ATTN_KV_A_NORM,
2545
+ };
2546
+ default:
2547
+ GGML_ABORT("unknown architecture for tensor mapping");
2548
+ }
2549
+ }
2550
+
2551
+ // declare information about the model weight tensors:
2552
+ // - the layer in which the tensor is going to be used. this is needed in order to assign the correct buffer type for the weight
2553
+ // - the operator which is going to use the weight. this is needed to determine if the respective backend supports the operator
2554
+ //
2555
+ // for example, input layers are usually assigned to CPU/host buffer types
2556
+ //
2557
+ // a mismatch between the declared information and the actual layer/op in which the tensor is used can lead to sub-optimal
2558
+ // assignment of the buffer types and extra overhead during computation
2559
+ // example: https://github.com/ggml-org/llama.cpp/pull/17548
2560
+ //
2224
2561
  static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2225
2562
  {LLM_TENSOR_TOKEN_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2226
2563
  {LLM_TENSOR_POS_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2227
- {LLM_TENSOR_TOKEN_EMBD_NORM, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2228
2564
  {LLM_TENSOR_TOKEN_TYPES, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2565
+ {LLM_TENSOR_TOKEN_EMBD_NORM, {LLM_TENSOR_LAYER_INPUT, GGML_OP_MUL}},
2229
2566
  {LLM_TENSOR_OUTPUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2230
2567
  {LLM_TENSOR_CLS, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2231
2568
  {LLM_TENSOR_CLS_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2569
+ {LLM_TENSOR_CLS_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2570
+ {LLM_TENSOR_DENSE_2_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
2571
+ {LLM_TENSOR_DENSE_3_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
2232
2572
  {LLM_TENSOR_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2573
+ {LLM_TENSOR_OUTPUT_NORM_LFM2, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2233
2574
  {LLM_TENSOR_DEC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2234
2575
  {LLM_TENSOR_ENC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2235
2576
  {LLM_TENSOR_ROPE_FREQS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ROPE}},
@@ -2240,6 +2581,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2240
2581
  {LLM_TENSOR_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2241
2582
  {LLM_TENSOR_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2242
2583
  {LLM_TENSOR_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2584
+ {LLM_TENSOR_ATTN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2243
2585
  {LLM_TENSOR_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2244
2586
  {LLM_TENSOR_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2245
2587
  {LLM_TENSOR_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
@@ -2277,6 +2619,8 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2277
2619
  {LLM_TENSOR_SSM_X, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2278
2620
  {LLM_TENSOR_SSM_DT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2279
2621
  {LLM_TENSOR_SSM_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2622
+ {LLM_TENSOR_SSM_ALPHA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2623
+ {LLM_TENSOR_SSM_BETA_ALPHA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2280
2624
  {LLM_TENSOR_TIME_MIX_W1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2281
2625
  {LLM_TENSOR_TIME_MIX_W2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2282
2626
  {LLM_TENSOR_TIME_MIX_A1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
@@ -2298,11 +2642,21 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2298
2642
  {LLM_TENSOR_FFN_ACT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_DIV}},
2299
2643
  {LLM_TENSOR_SSM_CONV1D, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
2300
2644
  {LLM_TENSOR_SSM_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_SCAN}},
2645
+ {LLM_TENSOR_SSM_A_NOSCAN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}}, // a version of SSM_A used for MUL instead of SSM_SCAN
2301
2646
  {LLM_TENSOR_SSM_DT_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2302
2647
  {LLM_TENSOR_SSM_B_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2303
2648
  {LLM_TENSOR_SSM_C_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2304
2649
  {LLM_TENSOR_SSM_D, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2305
2650
  {LLM_TENSOR_SSM_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2651
+ // Kimi KDA - Conv tensors are 4D [d_conv, 1, d_inner, 1], reshaped to 2D at runtime
2652
+ {LLM_TENSOR_SSM_CONV1D_Q, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2653
+ {LLM_TENSOR_SSM_CONV1D_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2654
+ {LLM_TENSOR_SSM_CONV1D_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2655
+ {LLM_TENSOR_SSM_F_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2656
+ {LLM_TENSOR_SSM_F_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2657
+ {LLM_TENSOR_SSM_BETA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2658
+ {LLM_TENSOR_SSM_G_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2659
+ {LLM_TENSOR_SSM_G_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2306
2660
  {LLM_TENSOR_TIME_MIX_LERP_X, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2307
2661
  {LLM_TENSOR_TIME_MIX_LN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2308
2662
  {LLM_TENSOR_CHANNEL_MIX_LERP_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
@@ -2345,6 +2699,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2345
2699
  {LLM_TENSOR_FFN_DOWN_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2346
2700
  {LLM_TENSOR_FFN_GATE_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2347
2701
  {LLM_TENSOR_FFN_UP_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2702
+ {LLM_TENSOR_FFN_GATE_UP_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2348
2703
  {LLM_TENSOR_FFN_DOWN_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2349
2704
  {LLM_TENSOR_FFN_GATE_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2350
2705
  {LLM_TENSOR_FFN_UP_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
@@ -2387,6 +2742,15 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2387
2742
  {LLM_TENSOR_SHORTCONV_CONV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
2388
2743
  {LLM_TENSOR_SHORTCONV_INPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2389
2744
  {LLM_TENSOR_SHORTCONV_OUTPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2745
+ {LLM_TENSOR_VISEXP_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2746
+ {LLM_TENSOR_VISEXP_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2747
+ {LLM_TENSOR_VISEXP_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2748
+ {LLM_TENSOR_VISEXP_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2749
+ {LLM_TENSOR_VISEXP_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2750
+ {LLM_TENSOR_INDEXER_K_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2751
+ {LLM_TENSOR_INDEXER_PROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2752
+ {LLM_TENSOR_INDEXER_ATTN_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2753
+ {LLM_TENSOR_INDEXER_ATTN_Q_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2390
2754
  // NextN/MTP tensors are currently ignored (reserved for future MTP support)
2391
2755
  // These tensors only exist in the last layer(s) and are treated as output tensors
2392
2756
  {LLM_TENSOR_NEXTN_EH_PROJ, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
@@ -2395,6 +2759,9 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2395
2759
  {LLM_TENSOR_NEXTN_HNORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2396
2760
  {LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2397
2761
  {LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2762
+ // Nemotron 3 Super
2763
+ {LLM_TENSOR_FFN_LATENT_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2764
+ {LLM_TENSOR_FFN_LATENT_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2398
2765
  };
2399
2766
 
2400
2767
  LLM_KV::LLM_KV(llm_arch arch, const char * suffix) : arch(arch), suffix(suffix) {}
@@ -2410,13 +2777,20 @@ std::string LLM_KV::operator()(llm_kv kv) const {
2410
2777
  return name;
2411
2778
  }
2412
2779
 
2780
+ LLM_TN_IMPL::LLM_TN_IMPL(llm_arch arch, llm_tensor tensor, const char * suffix, int bid, int xid)
2781
+ : arch(arch), tensor(tensor), suffix(suffix), bid(bid), xid(xid),
2782
+ model_tensors(llm_get_tensor_names(arch)) {}
2783
+
2413
2784
  std::string LLM_TN_IMPL::str() const {
2414
- if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) {
2415
- return "__missing__";
2785
+ if (LLM_TENSOR_NAMES.find(tensor) == LLM_TENSOR_NAMES.end()) {
2786
+ GGML_ABORT("unknown tensor name for tensor id %d", static_cast<int>(tensor));
2416
2787
  }
2417
2788
 
2418
- std::string name = ::format(LLM_TENSOR_NAMES.at(arch).at(tensor), bid, xid);
2789
+ if (model_tensors.find(tensor) == model_tensors.end()) {
2790
+ return LLM_TENSOR_NAMES.at(tensor);
2791
+ }
2419
2792
 
2793
+ std::string name = ::format(LLM_TENSOR_NAMES.at(tensor), bid, xid);
2420
2794
  if (suffix != nullptr) {
2421
2795
  name += ".";
2422
2796
  name += suffix;
@@ -2425,6 +2799,15 @@ std::string LLM_TN_IMPL::str() const {
2425
2799
  return name;
2426
2800
  }
2427
2801
 
2802
+ std::vector<llm_arch> llm_arch_all() {
2803
+ std::vector<llm_arch> ret;
2804
+ ret.reserve(LLM_ARCH_NAMES.size());
2805
+ for (const auto & [arch, _] : LLM_ARCH_NAMES) {
2806
+ ret.push_back(arch);
2807
+ }
2808
+ return ret;
2809
+ }
2810
+
2428
2811
  const char * llm_arch_name(llm_arch arch) {
2429
2812
  auto it = LLM_ARCH_NAMES.find(arch);
2430
2813
  if (it == LLM_ARCH_NAMES.end()) {
@@ -2468,7 +2851,13 @@ bool llm_arch_is_hybrid(const llm_arch & arch) {
2468
2851
  case LLM_ARCH_PLAMO2:
2469
2852
  case LLM_ARCH_GRANITE_HYBRID:
2470
2853
  case LLM_ARCH_LFM2:
2854
+ case LLM_ARCH_LFM2MOE:
2471
2855
  case LLM_ARCH_NEMOTRON_H:
2856
+ case LLM_ARCH_NEMOTRON_H_MOE:
2857
+ case LLM_ARCH_QWEN3NEXT:
2858
+ case LLM_ARCH_KIMI_LINEAR:
2859
+ case LLM_ARCH_QWEN35:
2860
+ case LLM_ARCH_QWEN35MOE:
2472
2861
  return true;
2473
2862
  default:
2474
2863
  return false;
@@ -2480,6 +2869,7 @@ bool llm_arch_is_diffusion(const llm_arch & arch) {
2480
2869
  case LLM_ARCH_DREAM:
2481
2870
  case LLM_ARCH_LLADA:
2482
2871
  case LLM_ARCH_LLADA_MOE:
2872
+ case LLM_ARCH_RND1:
2483
2873
  return true;
2484
2874
  default:
2485
2875
  return false;