whispercpp 1.3.4 → 1.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (630) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +60 -43
  3. data/ext/extconf.rb +2 -2
  4. data/ext/ruby_whisper.c +14 -2
  5. data/ext/ruby_whisper.h +39 -0
  6. data/ext/ruby_whisper_context.c +22 -22
  7. data/ext/ruby_whisper_model.c +12 -12
  8. data/ext/ruby_whisper_params.c +47 -23
  9. data/ext/ruby_whisper_segment.c +84 -19
  10. data/ext/ruby_whisper_token.c +351 -0
  11. data/ext/ruby_whisper_transcribe.cpp +1 -1
  12. data/ext/ruby_whisper_vad_context.c +75 -0
  13. data/ext/ruby_whisper_vad_context_detect.cpp +50 -0
  14. data/ext/ruby_whisper_vad_segment.c +139 -0
  15. data/ext/ruby_whisper_vad_segments.c +106 -0
  16. data/ext/sources/CMakeLists.txt +4 -1
  17. data/ext/sources/bindings/javascript/package.json +1 -1
  18. data/ext/sources/cmake/arm64-apple-clang.cmake +16 -0
  19. data/ext/sources/cmake/arm64-windows-llvm.cmake +16 -0
  20. data/ext/sources/cmake/riscv64-spacemit-linux-gnu-gcc.cmake +29 -0
  21. data/ext/sources/cmake/x64-windows-llvm.cmake +5 -0
  22. data/ext/sources/examples/addon.node/vad-example.js +2 -2
  23. data/ext/sources/examples/cli/cli.cpp +121 -112
  24. data/ext/sources/examples/lsp/CMakeLists.txt +2 -1
  25. data/ext/sources/examples/quantize/CMakeLists.txt +2 -1
  26. data/ext/sources/examples/server/server.cpp +10 -11
  27. data/ext/sources/examples/talk-llama/CMakeLists.txt +5 -1
  28. data/ext/sources/examples/talk-llama/llama-adapter.cpp +12 -3
  29. data/ext/sources/examples/talk-llama/llama-adapter.h +7 -1
  30. data/ext/sources/examples/talk-llama/llama-arch.cpp +2046 -1974
  31. data/ext/sources/examples/talk-llama/llama-arch.h +67 -2
  32. data/ext/sources/examples/talk-llama/llama-batch.cpp +75 -33
  33. data/ext/sources/examples/talk-llama/llama-batch.h +17 -4
  34. data/ext/sources/examples/talk-llama/llama-chat.cpp +79 -3
  35. data/ext/sources/examples/talk-llama/llama-chat.h +4 -0
  36. data/ext/sources/examples/talk-llama/llama-context.cpp +775 -78
  37. data/ext/sources/examples/talk-llama/llama-context.h +57 -9
  38. data/ext/sources/examples/talk-llama/llama-cparams.h +1 -0
  39. data/ext/sources/examples/talk-llama/llama-grammar.cpp +288 -53
  40. data/ext/sources/examples/talk-llama/llama-grammar.h +22 -1
  41. data/ext/sources/examples/talk-llama/llama-graph.cpp +381 -64
  42. data/ext/sources/examples/talk-llama/llama-graph.h +103 -13
  43. data/ext/sources/examples/talk-llama/llama-hparams.cpp +26 -2
  44. data/ext/sources/examples/talk-llama/llama-hparams.h +41 -10
  45. data/ext/sources/examples/talk-llama/llama-impl.cpp +7 -3
  46. data/ext/sources/examples/talk-llama/llama-impl.h +1 -1
  47. data/ext/sources/examples/talk-llama/llama-kv-cache-iswa.cpp +5 -3
  48. data/ext/sources/examples/talk-llama/llama-kv-cache.cpp +145 -65
  49. data/ext/sources/examples/talk-llama/llama-kv-cache.h +22 -7
  50. data/ext/sources/examples/talk-llama/llama-kv-cells.h +44 -2
  51. data/ext/sources/examples/talk-llama/llama-memory-hybrid.cpp +12 -10
  52. data/ext/sources/examples/talk-llama/llama-memory-recurrent.cpp +32 -19
  53. data/ext/sources/examples/talk-llama/llama-memory-recurrent.h +2 -2
  54. data/ext/sources/examples/talk-llama/llama-mmap.cpp +172 -37
  55. data/ext/sources/examples/talk-llama/llama-mmap.h +8 -3
  56. data/ext/sources/examples/talk-llama/llama-model-loader.cpp +91 -9
  57. data/ext/sources/examples/talk-llama/llama-model-loader.h +6 -0
  58. data/ext/sources/examples/talk-llama/llama-model-saver.cpp +3 -0
  59. data/ext/sources/examples/talk-llama/llama-model.cpp +1529 -13134
  60. data/ext/sources/examples/talk-llama/llama-model.h +44 -3
  61. data/ext/sources/examples/talk-llama/llama-quant.cpp +8 -23
  62. data/ext/sources/examples/talk-llama/llama-sampling.cpp +1294 -198
  63. data/ext/sources/examples/talk-llama/llama-sampling.h +19 -7
  64. data/ext/sources/examples/talk-llama/llama-vocab.cpp +133 -37
  65. data/ext/sources/examples/talk-llama/llama-vocab.h +45 -40
  66. data/ext/sources/examples/talk-llama/llama.cpp +729 -2
  67. data/ext/sources/examples/talk-llama/llama.h +152 -14
  68. data/ext/sources/examples/talk-llama/models/afmoe.cpp +191 -0
  69. data/ext/sources/examples/talk-llama/models/apertus.cpp +125 -0
  70. data/ext/sources/examples/talk-llama/models/arcee.cpp +135 -0
  71. data/ext/sources/examples/talk-llama/models/arctic.cpp +138 -0
  72. data/ext/sources/examples/talk-llama/models/arwkv7.cpp +86 -0
  73. data/ext/sources/examples/talk-llama/models/baichuan.cpp +122 -0
  74. data/ext/sources/examples/talk-llama/models/bailingmoe.cpp +144 -0
  75. data/ext/sources/examples/talk-llama/models/bailingmoe2.cpp +135 -0
  76. data/ext/sources/examples/talk-llama/models/bert.cpp +178 -0
  77. data/ext/sources/examples/talk-llama/models/bitnet.cpp +160 -0
  78. data/ext/sources/examples/talk-llama/models/bloom.cpp +101 -0
  79. data/ext/sources/examples/talk-llama/models/chameleon.cpp +178 -0
  80. data/ext/sources/examples/talk-llama/models/chatglm.cpp +132 -0
  81. data/ext/sources/examples/talk-llama/models/codeshell.cpp +111 -0
  82. data/ext/sources/examples/talk-llama/models/cogvlm.cpp +102 -0
  83. data/ext/sources/examples/talk-llama/models/cohere2-iswa.cpp +134 -0
  84. data/ext/sources/examples/talk-llama/models/command-r.cpp +122 -0
  85. data/ext/sources/examples/talk-llama/models/dbrx.cpp +123 -0
  86. data/ext/sources/examples/talk-llama/models/deci.cpp +135 -0
  87. data/ext/sources/examples/talk-llama/models/deepseek.cpp +144 -0
  88. data/ext/sources/examples/talk-llama/models/deepseek2.cpp +259 -0
  89. data/ext/sources/examples/talk-llama/models/dots1.cpp +134 -0
  90. data/ext/sources/examples/talk-llama/models/dream.cpp +105 -0
  91. data/ext/sources/examples/talk-llama/models/ernie4-5-moe.cpp +150 -0
  92. data/ext/sources/examples/talk-llama/models/ernie4-5.cpp +110 -0
  93. data/ext/sources/examples/talk-llama/models/exaone.cpp +114 -0
  94. data/ext/sources/examples/talk-llama/models/exaone4.cpp +123 -0
  95. data/ext/sources/examples/talk-llama/models/falcon-h1.cpp +113 -0
  96. data/ext/sources/examples/talk-llama/models/falcon.cpp +120 -0
  97. data/ext/sources/examples/talk-llama/models/gemma-embedding.cpp +116 -0
  98. data/ext/sources/examples/talk-llama/models/gemma.cpp +112 -0
  99. data/ext/sources/examples/talk-llama/models/gemma2-iswa.cpp +128 -0
  100. data/ext/sources/examples/talk-llama/models/gemma3.cpp +155 -0
  101. data/ext/sources/examples/talk-llama/models/gemma3n-iswa.cpp +384 -0
  102. data/ext/sources/examples/talk-llama/models/glm4-moe.cpp +170 -0
  103. data/ext/sources/examples/talk-llama/models/glm4.cpp +150 -0
  104. data/ext/sources/examples/talk-llama/models/gpt2.cpp +105 -0
  105. data/ext/sources/examples/talk-llama/models/gptneox.cpp +144 -0
  106. data/ext/sources/examples/talk-llama/models/granite-hybrid.cpp +196 -0
  107. data/ext/sources/examples/talk-llama/models/granite.cpp +211 -0
  108. data/ext/sources/examples/talk-llama/models/graph-context-mamba.cpp +283 -0
  109. data/ext/sources/examples/talk-llama/models/grok.cpp +159 -0
  110. data/ext/sources/examples/talk-llama/models/grovemoe.cpp +141 -0
  111. data/ext/sources/examples/talk-llama/models/hunyuan-dense.cpp +132 -0
  112. data/ext/sources/examples/talk-llama/models/hunyuan-moe.cpp +154 -0
  113. data/ext/sources/examples/talk-llama/models/internlm2.cpp +120 -0
  114. data/ext/sources/examples/talk-llama/models/jais.cpp +86 -0
  115. data/ext/sources/examples/talk-llama/models/jamba.cpp +106 -0
  116. data/ext/sources/examples/talk-llama/models/lfm2.cpp +175 -0
  117. data/ext/sources/examples/talk-llama/models/llada-moe.cpp +122 -0
  118. data/ext/sources/examples/talk-llama/models/llada.cpp +99 -0
  119. data/ext/sources/examples/talk-llama/models/llama-iswa.cpp +178 -0
  120. data/ext/sources/examples/talk-llama/models/llama.cpp +168 -0
  121. data/ext/sources/examples/talk-llama/models/maincoder.cpp +117 -0
  122. data/ext/sources/examples/talk-llama/models/mamba.cpp +55 -0
  123. data/ext/sources/examples/talk-llama/models/mimo2-iswa.cpp +123 -0
  124. data/ext/sources/examples/talk-llama/models/minicpm3.cpp +199 -0
  125. data/ext/sources/examples/talk-llama/models/minimax-m2.cpp +124 -0
  126. data/ext/sources/examples/talk-llama/models/mistral3.cpp +160 -0
  127. data/ext/sources/examples/talk-llama/models/models.h +569 -0
  128. data/ext/sources/examples/talk-llama/models/modern-bert.cpp +116 -0
  129. data/ext/sources/examples/talk-llama/models/mpt.cpp +126 -0
  130. data/ext/sources/examples/talk-llama/models/nemotron-h.cpp +150 -0
  131. data/ext/sources/examples/talk-llama/models/nemotron.cpp +122 -0
  132. data/ext/sources/examples/talk-llama/models/neo-bert.cpp +104 -0
  133. data/ext/sources/examples/talk-llama/models/olmo.cpp +121 -0
  134. data/ext/sources/examples/talk-llama/models/olmo2.cpp +150 -0
  135. data/ext/sources/examples/talk-llama/models/olmoe.cpp +124 -0
  136. data/ext/sources/examples/talk-llama/models/openai-moe-iswa.cpp +127 -0
  137. data/ext/sources/examples/talk-llama/models/openelm.cpp +124 -0
  138. data/ext/sources/examples/talk-llama/models/orion.cpp +123 -0
  139. data/ext/sources/examples/talk-llama/models/pangu-embedded.cpp +121 -0
  140. data/ext/sources/examples/talk-llama/models/phi2.cpp +121 -0
  141. data/ext/sources/examples/talk-llama/models/phi3.cpp +152 -0
  142. data/ext/sources/examples/talk-llama/models/plamo.cpp +110 -0
  143. data/ext/sources/examples/talk-llama/models/plamo2.cpp +316 -0
  144. data/ext/sources/examples/talk-llama/models/plamo3.cpp +128 -0
  145. data/ext/sources/examples/talk-llama/models/plm.cpp +168 -0
  146. data/ext/sources/examples/talk-llama/models/qwen.cpp +108 -0
  147. data/ext/sources/examples/talk-llama/models/qwen2.cpp +126 -0
  148. data/ext/sources/examples/talk-llama/models/qwen2moe.cpp +151 -0
  149. data/ext/sources/examples/talk-llama/models/qwen2vl.cpp +117 -0
  150. data/ext/sources/examples/talk-llama/models/qwen3.cpp +117 -0
  151. data/ext/sources/examples/talk-llama/models/qwen3moe.cpp +124 -0
  152. data/ext/sources/examples/talk-llama/models/qwen3next.cpp +873 -0
  153. data/ext/sources/examples/talk-llama/models/qwen3vl-moe.cpp +149 -0
  154. data/ext/sources/examples/talk-llama/models/qwen3vl.cpp +141 -0
  155. data/ext/sources/examples/talk-llama/models/refact.cpp +94 -0
  156. data/ext/sources/examples/talk-llama/models/rnd1.cpp +126 -0
  157. data/ext/sources/examples/talk-llama/models/rwkv6-base.cpp +162 -0
  158. data/ext/sources/examples/talk-llama/models/rwkv6.cpp +94 -0
  159. data/ext/sources/examples/talk-llama/models/rwkv6qwen2.cpp +86 -0
  160. data/ext/sources/examples/talk-llama/models/rwkv7-base.cpp +135 -0
  161. data/ext/sources/examples/talk-llama/models/rwkv7.cpp +90 -0
  162. data/ext/sources/examples/talk-llama/models/seed-oss.cpp +124 -0
  163. data/ext/sources/examples/talk-llama/models/smallthinker.cpp +126 -0
  164. data/ext/sources/examples/talk-llama/models/smollm3.cpp +128 -0
  165. data/ext/sources/examples/talk-llama/models/stablelm.cpp +146 -0
  166. data/ext/sources/examples/talk-llama/models/starcoder.cpp +100 -0
  167. data/ext/sources/examples/talk-llama/models/starcoder2.cpp +121 -0
  168. data/ext/sources/examples/talk-llama/models/t5-dec.cpp +166 -0
  169. data/ext/sources/examples/talk-llama/models/t5-enc.cpp +96 -0
  170. data/ext/sources/examples/talk-llama/models/wavtokenizer-dec.cpp +149 -0
  171. data/ext/sources/examples/talk-llama/models/xverse.cpp +108 -0
  172. data/ext/sources/examples/talk-llama/unicode.cpp +102 -16
  173. data/ext/sources/examples/vad-speech-segments/CMakeLists.txt +1 -1
  174. data/ext/sources/examples/whisper.wasm/index-tmpl.html +1 -1
  175. data/ext/sources/ggml/CMakeLists.txt +82 -54
  176. data/ext/sources/ggml/include/ggml-alloc.h +9 -0
  177. data/ext/sources/ggml/include/ggml-backend.h +4 -1
  178. data/ext/sources/ggml/include/ggml-cpu.h +1 -0
  179. data/ext/sources/ggml/include/ggml-hexagon.h +19 -0
  180. data/ext/sources/ggml/include/ggml-rpc.h +8 -11
  181. data/ext/sources/ggml/include/ggml-zendnn.h +22 -0
  182. data/ext/sources/ggml/include/ggml.h +190 -12
  183. data/ext/sources/ggml/src/CMakeLists.txt +82 -11
  184. data/ext/sources/ggml/src/ggml-alloc.c +124 -41
  185. data/ext/sources/ggml/src/ggml-backend-impl.h +1 -4
  186. data/ext/sources/ggml/src/ggml-backend-reg.cpp +27 -3
  187. data/ext/sources/ggml/src/ggml-backend.cpp +71 -21
  188. data/ext/sources/ggml/src/ggml-blas/CMakeLists.txt +17 -3
  189. data/ext/sources/ggml/src/ggml-blas/ggml-blas.cpp +5 -9
  190. data/ext/sources/ggml/src/ggml-cann/acl_tensor.cpp +57 -45
  191. data/ext/sources/ggml/src/ggml-cann/acl_tensor.h +138 -47
  192. data/ext/sources/ggml/src/ggml-cann/aclnn_ops.cpp +2179 -1696
  193. data/ext/sources/ggml/src/ggml-cann/aclnn_ops.h +238 -317
  194. data/ext/sources/ggml/src/ggml-cann/common.h +283 -208
  195. data/ext/sources/ggml/src/ggml-cann/ggml-cann.cpp +626 -776
  196. data/ext/sources/ggml/src/ggml-cpu/CMakeLists.txt +156 -86
  197. data/ext/sources/ggml/src/ggml-cpu/amx/amx.cpp +1 -0
  198. data/ext/sources/ggml/src/ggml-cpu/arch/arm/cpu-feats.cpp +4 -0
  199. data/ext/sources/ggml/src/ggml-cpu/arch/arm/quants.c +428 -26
  200. data/ext/sources/ggml/src/ggml-cpu/arch/arm/repack.cpp +1004 -0
  201. data/ext/sources/ggml/src/ggml-cpu/arch/loongarch/quants.c +4 -5
  202. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/cpu-feats.cpp +38 -0
  203. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/quants.c +108 -49
  204. data/ext/sources/ggml/src/ggml-cpu/arch/s390/cpu-feats.cpp +50 -0
  205. data/ext/sources/ggml/src/ggml-cpu/arch/x86/repack.cpp +6 -6
  206. data/ext/sources/ggml/src/ggml-cpu/arch-fallback.h +50 -2
  207. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-impl.h +5 -3
  208. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.c +195 -71
  209. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.cpp +4 -0
  210. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.cpp +573 -106
  211. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.h +33 -44
  212. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp +298 -112
  213. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm-ppc.h +333 -0
  214. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.cpp +819 -125
  215. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.h +6 -0
  216. data/ext/sources/ggml/src/ggml-cpu/ops.cpp +708 -431
  217. data/ext/sources/ggml/src/ggml-cpu/ops.h +5 -4
  218. data/ext/sources/ggml/src/ggml-cpu/repack.cpp +671 -31
  219. data/ext/sources/ggml/src/ggml-cpu/repack.h +14 -0
  220. data/ext/sources/ggml/src/ggml-cpu/simd-mappings.h +41 -43
  221. data/ext/sources/ggml/src/ggml-cpu/spacemit/ime.cpp +3 -2
  222. data/ext/sources/ggml/src/ggml-cpu/unary-ops.cpp +151 -0
  223. data/ext/sources/ggml/src/ggml-cpu/unary-ops.h +7 -0
  224. data/ext/sources/ggml/src/ggml-cpu/vec.cpp +124 -1
  225. data/ext/sources/ggml/src/ggml-cpu/vec.h +261 -146
  226. data/ext/sources/ggml/src/ggml-cuda/CMakeLists.txt +72 -1
  227. data/ext/sources/ggml/src/ggml-cuda/argmax.cu +2 -2
  228. data/ext/sources/ggml/src/ggml-cuda/argsort.cu +123 -6
  229. data/ext/sources/ggml/src/ggml-cuda/argsort.cuh +16 -0
  230. data/ext/sources/ggml/src/ggml-cuda/binbcast.cu +1 -1
  231. data/ext/sources/ggml/src/ggml-cuda/common.cuh +353 -80
  232. data/ext/sources/ggml/src/ggml-cuda/convert.cuh +10 -0
  233. data/ext/sources/ggml/src/ggml-cuda/cpy-utils.cuh +1 -1
  234. data/ext/sources/ggml/src/ggml-cuda/cpy.cu +339 -246
  235. data/ext/sources/ggml/src/ggml-cuda/cpy.cuh +1 -5
  236. data/ext/sources/ggml/src/ggml-cuda/cumsum.cu +307 -0
  237. data/ext/sources/ggml/src/ggml-cuda/cumsum.cuh +5 -0
  238. data/ext/sources/ggml/src/ggml-cuda/diag.cu +77 -0
  239. data/ext/sources/ggml/src/ggml-cuda/diag.cuh +5 -0
  240. data/ext/sources/ggml/src/ggml-cuda/fattn-common.cuh +31 -21
  241. data/ext/sources/ggml/src/ggml-cuda/fattn-mma-f16.cuh +663 -596
  242. data/ext/sources/ggml/src/ggml-cuda/fattn-tile.cu +35 -741
  243. data/ext/sources/ggml/src/ggml-cuda/fattn-tile.cuh +1241 -0
  244. data/ext/sources/ggml/src/ggml-cuda/fattn-vec.cuh +30 -37
  245. data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cu +14 -13
  246. data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cuh +48 -0
  247. data/ext/sources/ggml/src/ggml-cuda/fattn.cu +83 -37
  248. data/ext/sources/ggml/src/ggml-cuda/fill.cu +37 -0
  249. data/ext/sources/ggml/src/ggml-cuda/fill.cuh +3 -0
  250. data/ext/sources/ggml/src/ggml-cuda/ggml-cuda.cu +1155 -164
  251. data/ext/sources/ggml/src/ggml-cuda/mean.cu +5 -4
  252. data/ext/sources/ggml/src/ggml-cuda/mma.cuh +741 -48
  253. data/ext/sources/ggml/src/ggml-cuda/mmf.cu +60 -12
  254. data/ext/sources/ggml/src/ggml-cuda/mmf.cuh +381 -42
  255. data/ext/sources/ggml/src/ggml-cuda/mmid.cu +164 -0
  256. data/ext/sources/ggml/src/ggml-cuda/mmid.cuh +5 -0
  257. data/ext/sources/ggml/src/ggml-cuda/mmq.cu +69 -176
  258. data/ext/sources/ggml/src/ggml-cuda/mmq.cuh +498 -171
  259. data/ext/sources/ggml/src/ggml-cuda/mmvf.cu +375 -79
  260. data/ext/sources/ggml/src/ggml-cuda/mmvf.cuh +3 -2
  261. data/ext/sources/ggml/src/ggml-cuda/mmvq.cu +241 -95
  262. data/ext/sources/ggml/src/ggml-cuda/mmvq.cuh +1 -1
  263. data/ext/sources/ggml/src/ggml-cuda/pad.cu +64 -33
  264. data/ext/sources/ggml/src/ggml-cuda/quantize.cu +151 -0
  265. data/ext/sources/ggml/src/ggml-cuda/quantize.cuh +14 -0
  266. data/ext/sources/ggml/src/ggml-cuda/rope.cu +192 -77
  267. data/ext/sources/ggml/src/ggml-cuda/rope.cuh +2 -0
  268. data/ext/sources/ggml/src/ggml-cuda/set-rows.cu +101 -47
  269. data/ext/sources/ggml/src/ggml-cuda/set.cu +39 -0
  270. data/ext/sources/ggml/src/ggml-cuda/set.cuh +7 -0
  271. data/ext/sources/ggml/src/ggml-cuda/softmax.cu +203 -6
  272. data/ext/sources/ggml/src/ggml-cuda/solve_tri.cu +275 -0
  273. data/ext/sources/ggml/src/ggml-cuda/solve_tri.cuh +3 -0
  274. data/ext/sources/ggml/src/ggml-cuda/ssm-conv.cu +14 -20
  275. data/ext/sources/ggml/src/ggml-cuda/ssm-scan.cu +49 -84
  276. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq112-dv112.cu +5 -0
  277. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq128-dv128.cu +5 -0
  278. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq256-dv256.cu +5 -0
  279. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq40-dv40.cu +5 -0
  280. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq576-dv512.cu +5 -0
  281. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq64-dv64.cu +5 -0
  282. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq72-dv72.cu +5 -0
  283. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq80-dv80.cu +5 -0
  284. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq96-dv96.cu +5 -0
  285. data/ext/sources/ggml/src/ggml-cuda/template-instances/generate_cu_files.py +19 -1
  286. data/ext/sources/ggml/src/ggml-cuda/top-k.cu +96 -0
  287. data/ext/sources/ggml/src/ggml-cuda/top-k.cuh +3 -0
  288. data/ext/sources/ggml/src/ggml-cuda/topk-moe.cu +168 -76
  289. data/ext/sources/ggml/src/ggml-cuda/topk-moe.cuh +11 -4
  290. data/ext/sources/ggml/src/ggml-cuda/tri.cu +136 -0
  291. data/ext/sources/ggml/src/ggml-cuda/tri.cuh +5 -0
  292. data/ext/sources/ggml/src/ggml-cuda/unary.cu +105 -11
  293. data/ext/sources/ggml/src/ggml-cuda/unary.cuh +36 -0
  294. data/ext/sources/ggml/src/ggml-cuda/upscale.cu +163 -7
  295. data/ext/sources/ggml/src/ggml-cuda/vendors/cuda.h +4 -0
  296. data/ext/sources/ggml/src/ggml-cuda/vendors/hip.h +12 -1
  297. data/ext/sources/ggml/src/ggml-cuda/vendors/musa.h +6 -0
  298. data/ext/sources/ggml/src/ggml-hexagon/CMakeLists.txt +80 -0
  299. data/ext/sources/ggml/src/ggml-hexagon/ggml-hexagon.cpp +3151 -0
  300. data/ext/sources/ggml/src/ggml-hexagon/htp/CMakeLists.txt +44 -0
  301. data/ext/sources/ggml/src/ggml-hexagon/htp/act-ops.c +682 -0
  302. data/ext/sources/ggml/src/ggml-hexagon/htp/binary-ops.c +360 -0
  303. data/ext/sources/ggml/src/ggml-hexagon/htp/cmake-toolchain.cmake +157 -0
  304. data/ext/sources/ggml/src/ggml-hexagon/htp/flash-attn-ops.c +566 -0
  305. data/ext/sources/ggml/src/ggml-hexagon/htp/get-rows-ops.c +112 -0
  306. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-ctx.h +35 -0
  307. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-dma.c +63 -0
  308. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-dma.h +157 -0
  309. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-msg.h +165 -0
  310. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-ops.h +92 -0
  311. data/ext/sources/ggml/src/ggml-hexagon/htp/htp_iface.idl +16 -0
  312. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-exp.c +94 -0
  313. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-inverse.c +72 -0
  314. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-sigmoid.c +49 -0
  315. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-utils.c +1020 -0
  316. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-utils.h +1353 -0
  317. data/ext/sources/ggml/src/ggml-hexagon/htp/main.c +1001 -0
  318. data/ext/sources/ggml/src/ggml-hexagon/htp/matmul-ops.c +2503 -0
  319. data/ext/sources/ggml/src/ggml-hexagon/htp/ops-utils.h +149 -0
  320. data/ext/sources/ggml/src/ggml-hexagon/htp/rope-ops.c +487 -0
  321. data/ext/sources/ggml/src/ggml-hexagon/htp/set-rows-ops.c +168 -0
  322. data/ext/sources/ggml/src/ggml-hexagon/htp/softmax-ops.c +402 -0
  323. data/ext/sources/ggml/src/ggml-hexagon/htp/unary-ops.c +287 -0
  324. data/ext/sources/ggml/src/ggml-hexagon/htp/worker-pool.c +297 -0
  325. data/ext/sources/ggml/src/ggml-hexagon/htp/worker-pool.h +57 -0
  326. data/ext/sources/ggml/src/ggml-hexagon/htp-utils.c +454 -0
  327. data/ext/sources/ggml/src/ggml-hexagon/htp-utils.h +221 -0
  328. data/ext/sources/ggml/src/ggml-hexagon/op-desc.h +153 -0
  329. data/ext/sources/ggml/src/ggml-hip/CMakeLists.txt +8 -13
  330. data/ext/sources/ggml/src/ggml-impl.h +67 -6
  331. data/ext/sources/ggml/src/ggml-metal/ggml-metal-common.cpp +2 -2
  332. data/ext/sources/ggml/src/ggml-metal/ggml-metal-context.m +29 -20
  333. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.cpp +652 -285
  334. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.h +103 -56
  335. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.m +496 -118
  336. data/ext/sources/ggml/src/ggml-metal/ggml-metal-impl.h +231 -9
  337. data/ext/sources/ggml/src/ggml-metal/ggml-metal-ops.cpp +1227 -224
  338. data/ext/sources/ggml/src/ggml-metal/ggml-metal-ops.h +12 -0
  339. data/ext/sources/ggml/src/ggml-metal/ggml-metal.cpp +14 -8
  340. data/ext/sources/ggml/src/ggml-metal/ggml-metal.metal +1972 -704
  341. data/ext/sources/ggml/src/ggml-musa/CMakeLists.txt +3 -1
  342. data/ext/sources/ggml/src/ggml-opencl/CMakeLists.txt +11 -0
  343. data/ext/sources/ggml/src/ggml-opencl/ggml-opencl.cpp +1430 -120
  344. data/ext/sources/ggml/src/ggml-opencl/kernels/cvt.cl +63 -0
  345. data/ext/sources/ggml/src/ggml-opencl/kernels/expm1.cl +82 -0
  346. data/ext/sources/ggml/src/ggml-opencl/kernels/fill.cl +17 -0
  347. data/ext/sources/ggml/src/ggml-opencl/kernels/flash_attn_f32.cl +4 -3
  348. data/ext/sources/ggml/src/ggml-opencl/kernels/gemm_moe_mxfp4_f32.cl +162 -0
  349. data/ext/sources/ggml/src/ggml-opencl/kernels/gemv_moe_mxfp4_f32.cl +156 -0
  350. data/ext/sources/ggml/src/ggml-opencl/kernels/get_rows.cl +36 -12
  351. data/ext/sources/ggml/src/ggml-opencl/kernels/mean.cl +39 -0
  352. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f16_f32_kq_kqv.cl +273 -0
  353. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f16_f32_l4_lm.cl +24 -10
  354. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f32_f32_l4_lm.cl +24 -10
  355. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_q8_0_f32_l4_lm.cl +154 -0
  356. data/ext/sources/ggml/src/ggml-opencl/kernels/pad.cl +29 -20
  357. data/ext/sources/ggml/src/ggml-opencl/kernels/rms_norm.cl +25 -10
  358. data/ext/sources/ggml/src/ggml-opencl/kernels/rope.cl +50 -24
  359. data/ext/sources/ggml/src/ggml-opencl/kernels/set_rows.cl +35 -16
  360. data/ext/sources/ggml/src/ggml-opencl/kernels/softplus.cl +88 -0
  361. data/ext/sources/ggml/src/ggml-opencl/kernels/sqr.cl +53 -0
  362. data/ext/sources/ggml/src/ggml-opencl/kernels/sqrt.cl +53 -0
  363. data/ext/sources/ggml/src/ggml-opencl/kernels/ssm_conv.cl +77 -0
  364. data/ext/sources/ggml/src/ggml-opencl/kernels/transpose.cl +13 -0
  365. data/ext/sources/ggml/src/ggml-rpc/ggml-rpc.cpp +438 -156
  366. data/ext/sources/ggml/src/ggml-sycl/CMakeLists.txt +48 -3
  367. data/ext/sources/ggml/src/ggml-sycl/add-id.cpp +77 -0
  368. data/ext/sources/ggml/src/ggml-sycl/add-id.hpp +8 -0
  369. data/ext/sources/ggml/src/ggml-sycl/backend.hpp +6 -0
  370. data/ext/sources/ggml/src/ggml-sycl/binbcast.cpp +0 -9
  371. data/ext/sources/ggml/src/ggml-sycl/binbcast.hpp +0 -6
  372. data/ext/sources/ggml/src/ggml-sycl/common.hpp +117 -15
  373. data/ext/sources/ggml/src/ggml-sycl/concat.cpp +55 -44
  374. data/ext/sources/ggml/src/ggml-sycl/convert.cpp +34 -0
  375. data/ext/sources/ggml/src/ggml-sycl/count-equal.cpp +79 -0
  376. data/ext/sources/ggml/src/ggml-sycl/count-equal.hpp +9 -0
  377. data/ext/sources/ggml/src/ggml-sycl/cpy.cpp +0 -3
  378. data/ext/sources/ggml/src/ggml-sycl/dequantize.hpp +18 -0
  379. data/ext/sources/ggml/src/ggml-sycl/dpct/helper.hpp +76 -3
  380. data/ext/sources/ggml/src/ggml-sycl/element_wise.cpp +333 -300
  381. data/ext/sources/ggml/src/ggml-sycl/element_wise.hpp +10 -2
  382. data/ext/sources/ggml/src/ggml-sycl/ggml-sycl.cpp +335 -110
  383. data/ext/sources/ggml/src/ggml-sycl/mmvq.cpp +22 -0
  384. data/ext/sources/ggml/src/ggml-sycl/norm.cpp +156 -0
  385. data/ext/sources/ggml/src/ggml-sycl/norm.hpp +2 -0
  386. data/ext/sources/ggml/src/ggml-sycl/pad.cpp +97 -0
  387. data/ext/sources/ggml/src/ggml-sycl/pad.hpp +24 -0
  388. data/ext/sources/ggml/src/ggml-sycl/pad_reflect_1d.cpp +100 -0
  389. data/ext/sources/ggml/src/ggml-sycl/pad_reflect_1d.hpp +10 -0
  390. data/ext/sources/ggml/src/ggml-sycl/presets.hpp +2 -0
  391. data/ext/sources/ggml/src/ggml-sycl/repeat_back.cpp +76 -0
  392. data/ext/sources/ggml/src/ggml-sycl/repeat_back.hpp +8 -0
  393. data/ext/sources/ggml/src/ggml-sycl/roll.cpp +122 -0
  394. data/ext/sources/ggml/src/ggml-sycl/roll.hpp +20 -0
  395. data/ext/sources/ggml/src/ggml-sycl/rope.cpp +30 -17
  396. data/ext/sources/ggml/src/ggml-sycl/set.cpp +73 -0
  397. data/ext/sources/ggml/src/ggml-sycl/set.hpp +5 -0
  398. data/ext/sources/ggml/src/ggml-sycl/softmax.cpp +327 -162
  399. data/ext/sources/ggml/src/ggml-sycl/softmax.hpp +4 -0
  400. data/ext/sources/ggml/src/ggml-sycl/ssm_conv.cpp +127 -0
  401. data/ext/sources/ggml/src/ggml-sycl/ssm_conv.hpp +5 -0
  402. data/ext/sources/ggml/src/ggml-sycl/vecdotq.hpp +58 -0
  403. data/ext/sources/ggml/src/ggml-vulkan/CMakeLists.txt +38 -18
  404. data/ext/sources/ggml/src/ggml-vulkan/ggml-vulkan.cpp +5013 -2859
  405. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/abs.comp +21 -0
  406. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/acc.comp +2 -2
  407. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add.comp +2 -2
  408. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add1.comp +28 -0
  409. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add_id.comp +1 -1
  410. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/arange.comp +20 -0
  411. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp +2 -2
  412. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp +33 -26
  413. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argsort_large.comp +114 -0
  414. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ceil.comp +22 -0
  415. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/clamp.comp +2 -2
  416. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/concat.comp +2 -2
  417. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/contig_copy.comp +2 -2
  418. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv2d_dw.comp +1 -1
  419. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv2d_mm.comp +47 -49
  420. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv_transpose_1d.comp +1 -1
  421. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy.comp +2 -2
  422. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_from_quant.comp +3 -3
  423. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_to_quant.comp +4 -4
  424. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_transpose.comp +67 -0
  425. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cos.comp +2 -2
  426. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/count_equal.comp +2 -2
  427. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/count_experts.comp +51 -0
  428. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum.comp +83 -0
  429. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum_multipass1.comp +60 -0
  430. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum_multipass2.comp +66 -0
  431. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_f32.comp +1 -1
  432. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_funcs.comp → dequant_funcs.glsl} +9 -21
  433. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_funcs_cm2.comp → dequant_funcs_cm2.glsl} +18 -4
  434. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_head.comp → dequant_head.glsl} +1 -1
  435. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_m.comp +1 -1
  436. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_s.comp +1 -1
  437. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_s.comp +1 -1
  438. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xs.comp +1 -1
  439. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xxs.comp +1 -1
  440. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_s.comp +1 -1
  441. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_xxs.comp +1 -1
  442. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_nl.comp +1 -1
  443. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_xs.comp +1 -1
  444. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_mxfp4.comp +3 -3
  445. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q2_k.comp +3 -3
  446. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q3_k.comp +1 -1
  447. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_0.comp +1 -1
  448. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_1.comp +1 -1
  449. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_k.comp +3 -3
  450. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_0.comp +1 -1
  451. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_1.comp +1 -1
  452. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_k.comp +3 -3
  453. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q6_k.comp +1 -1
  454. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q8_0.comp +1 -1
  455. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/diag.comp +29 -0
  456. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/diag_mask_inf.comp +1 -1
  457. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/div.comp +2 -2
  458. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/exp.comp +3 -3
  459. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/fill.comp +19 -0
  460. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn.comp +39 -17
  461. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{flash_attn_base.comp → flash_attn_base.glsl} +19 -1
  462. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp +45 -7
  463. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm2.comp +50 -12
  464. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_split_k_reduce.comp +1 -1
  465. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/floor.comp +22 -0
  466. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu.comp +2 -2
  467. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu_erf.comp +2 -2
  468. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu_quick.comp +2 -2
  469. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu.comp +2 -2
  470. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu_erf.comp +2 -2
  471. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu_quick.comp +2 -2
  472. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_binary_head.comp → generic_binary_head.glsl} +17 -2
  473. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_head.comp → generic_head.glsl} +2 -0
  474. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_unary_head.comp → generic_unary_head.glsl} +7 -0
  475. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows.comp +4 -4
  476. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows_quant.comp +3 -3
  477. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{glu_head.comp → glu_head.glsl} +1 -1
  478. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/group_norm.comp +2 -2
  479. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/hardsigmoid.comp +2 -2
  480. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/hardswish.comp +2 -2
  481. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp +19 -7
  482. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/im2col_3d.comp +2 -3
  483. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/l2_norm.comp +2 -2
  484. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/leaky_relu.comp +2 -2
  485. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/log.comp +18 -0
  486. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul.comp +2 -2
  487. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec.comp +2 -2
  488. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{mul_mat_vec_base.comp → mul_mat_vec_base.glsl} +70 -25
  489. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iface.glsl +35 -0
  490. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_m.comp +71 -21
  491. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_s.comp +41 -25
  492. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_s.comp +2 -2
  493. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xs.comp +44 -26
  494. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xxs.comp +2 -2
  495. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_s.comp +2 -2
  496. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_xxs.comp +2 -2
  497. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_nc.comp +9 -7
  498. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_p021.comp +9 -7
  499. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp +4 -6
  500. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp +2 -2
  501. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp +4 -6
  502. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp +4 -6
  503. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp +2 -2
  504. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vecq.comp +39 -36
  505. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vecq_funcs.glsl +494 -0
  506. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp +78 -103
  507. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_cm2.comp +34 -23
  508. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{mul_mm_funcs.comp → mul_mm_funcs.glsl} +69 -59
  509. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_id_funcs.glsl +72 -0
  510. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq.comp +88 -228
  511. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_funcs.glsl +454 -0
  512. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_shmem_types.glsl +78 -0
  513. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/multi_add.comp +97 -13
  514. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/neg.comp +20 -0
  515. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/norm.comp +2 -2
  516. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_adamw.comp +2 -2
  517. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_sgd.comp +1 -1
  518. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pad.comp +21 -6
  519. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pool2d.comp +1 -1
  520. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/quantize_q8_1.comp +10 -10
  521. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/reglu.comp +2 -2
  522. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/relu.comp +2 -2
  523. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat.comp +2 -2
  524. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat_back.comp +2 -2
  525. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm.comp +50 -4
  526. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm_back.comp +2 -2
  527. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm_partials.comp +2 -2
  528. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/roll.comp +2 -2
  529. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_funcs.glsl +234 -0
  530. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.glsl +20 -0
  531. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_multi.comp +6 -50
  532. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_neox.comp +6 -33
  533. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_norm.comp +6 -33
  534. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_params.glsl +28 -0
  535. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_vision.comp +6 -39
  536. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/round.comp +29 -0
  537. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/scale.comp +2 -2
  538. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sigmoid.comp +2 -2
  539. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu.comp +2 -2
  540. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu_back.comp +2 -2
  541. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sin.comp +2 -2
  542. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max.comp +1 -1
  543. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_back.comp +2 -2
  544. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large1.comp +62 -0
  545. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large2.comp +79 -0
  546. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large3.comp +65 -0
  547. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large_common.glsl +53 -0
  548. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/softplus.comp +23 -0
  549. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/solve_tri.comp +81 -0
  550. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sqrt.comp +2 -2
  551. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/square.comp +2 -2
  552. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ssm_conv.comp +44 -0
  553. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ssm_scan.comp +124 -0
  554. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/step.comp +22 -0
  555. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sub.comp +2 -2
  556. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sum_rows.comp +2 -25
  557. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sum_rows.glsl +25 -0
  558. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/swiglu.comp +2 -2
  559. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/swiglu_oai.comp +2 -2
  560. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp +2 -2
  561. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/timestep_embedding.comp +1 -1
  562. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_argsort.comp +118 -0
  563. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_moe.comp +213 -0
  564. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_nary_search.comp +246 -0
  565. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/tri.comp +43 -0
  566. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/trunc.comp +22 -0
  567. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{types.comp → types.glsl} +345 -26
  568. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/upscale.comp +90 -12
  569. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +335 -151
  570. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/xielu.comp +35 -0
  571. data/ext/sources/ggml/src/ggml-webgpu/CMakeLists.txt +28 -2
  572. data/ext/sources/ggml/src/ggml-webgpu/ggml-webgpu-shader-lib.hpp +169 -0
  573. data/ext/sources/ggml/src/ggml-webgpu/ggml-webgpu.cpp +1964 -435
  574. data/ext/sources/ggml/src/ggml-webgpu/pre_wgsl.hpp +778 -0
  575. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/bin_op.tmpl.wgsl +188 -0
  576. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/cpy.tmpl.wgsl +101 -0
  577. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/embed_wgsl.py +33 -10
  578. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/flash_attn.wgsl +591 -0
  579. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/get_rows.tmpl.wgsl +1 -1
  580. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/glu.tmpl.wgsl +323 -0
  581. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat.tmpl.wgsl +6 -6
  582. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_decls.tmpl +97 -0
  583. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_reg_tile.tmpl.wgsl +247 -0
  584. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_subgroup_matrix.tmpl.wgsl +302 -0
  585. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_vec.tmpl.wgsl +267 -0
  586. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rms_norm.wgsl +83 -17
  587. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rope.tmpl.wgsl +295 -0
  588. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/scale.tmpl.wgsl +90 -0
  589. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/set_rows.tmpl.wgsl +112 -0
  590. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/soft_max.tmpl.wgsl +345 -0
  591. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/unary_op.wgsl +483 -0
  592. data/ext/sources/ggml/src/ggml-zendnn/CMakeLists.txt +92 -0
  593. data/ext/sources/ggml/src/ggml-zendnn/ggml-zendnn.cpp +466 -0
  594. data/ext/sources/ggml/src/ggml.c +425 -33
  595. data/ext/sources/include/whisper.h +1 -0
  596. data/ext/sources/src/CMakeLists.txt +3 -1
  597. data/ext/sources/src/whisper.cpp +101 -35
  598. data/ext/sources/tests/CMakeLists.txt +2 -2
  599. data/ext/sources/tests/test-vad-full.cpp +4 -2
  600. data/ext/sources/tests/test-vad.cpp +1 -1
  601. data/extsources.rb +1 -0
  602. data/lib/whisper/model/uri.rb +17 -18
  603. data/sig/whisper.rbs +119 -2
  604. data/test/test_params.rb +16 -8
  605. data/test/test_segment.rb +0 -1
  606. data/test/test_token.rb +70 -0
  607. data/test/test_vad.rb +1 -1
  608. data/test/test_vad_context.rb +50 -0
  609. data/test/test_vad_segment.rb +19 -0
  610. data/test/test_vad_segments.rb +16 -0
  611. data/test/test_whisper.rb +7 -0
  612. data/whispercpp.gemspec +1 -1
  613. metadata +287 -34
  614. data/ext/sources/build-xcframework.sh +0 -571
  615. data/ext/sources/ggml/src/ggml-cann/Doxyfile +0 -2579
  616. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_funcs.comp +0 -105
  617. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp +0 -55
  618. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/add.tmpl.wgsl +0 -44
  619. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/add_in_place.tmpl.wgsl +0 -41
  620. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/cpy.wgsl +0 -60
  621. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul.tmpl.wgsl +0 -44
  622. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_in_place.tmpl.wgsl +0 -41
  623. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rms_norm_in_place.wgsl +0 -48
  624. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_bfloat16_support.comp → feature-tests/bfloat16.comp} +0 -0
  625. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_coopmat_support.comp → feature-tests/coopmat.comp} +0 -0
  626. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_coopmat2_support.comp → feature-tests/coopmat2.comp} +0 -0
  627. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_integer_dot_support.comp → feature-tests/integer_dot.comp} +0 -0
  628. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{glu_main.comp → glu_main.glsl} +0 -0
  629. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{rte.comp → rte.glsl} +0 -0
  630. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{utils.comp → utils.glsl} +0 -0
@@ -3,8 +3,10 @@
3
3
  #include "llama-impl.h"
4
4
 
5
5
  #include <map>
6
+ #include <set>
6
7
 
7
8
  static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
9
+ { LLM_ARCH_CLIP, "clip" }, // dummy, only used by llama-quantize
8
10
  { LLM_ARCH_LLAMA, "llama" },
9
11
  { LLM_ARCH_LLAMA4, "llama4" },
10
12
  { LLM_ARCH_DECI, "deci" },
@@ -18,6 +20,7 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
18
20
  { LLM_ARCH_STARCODER, "starcoder" },
19
21
  { LLM_ARCH_REFACT, "refact" },
20
22
  { LLM_ARCH_BERT, "bert" },
23
+ { LLM_ARCH_MODERN_BERT, "modern-bert" },
21
24
  { LLM_ARCH_NOMIC_BERT, "nomic-bert" },
22
25
  { LLM_ARCH_NOMIC_BERT_MOE, "nomic-bert-moe" },
23
26
  { LLM_ARCH_NEO_BERT, "neo-bert" },
@@ -31,11 +34,15 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
31
34
  { LLM_ARCH_QWEN2VL, "qwen2vl" },
32
35
  { LLM_ARCH_QWEN3, "qwen3" },
33
36
  { LLM_ARCH_QWEN3MOE, "qwen3moe" },
37
+ { LLM_ARCH_QWEN3NEXT, "qwen3next" },
38
+ { LLM_ARCH_QWEN3VL, "qwen3vl" },
39
+ { LLM_ARCH_QWEN3VLMOE, "qwen3vlmoe" },
34
40
  { LLM_ARCH_PHI2, "phi2" },
35
41
  { LLM_ARCH_PHI3, "phi3" },
36
42
  { LLM_ARCH_PHIMOE, "phimoe" },
37
43
  { LLM_ARCH_PLAMO, "plamo" },
38
44
  { LLM_ARCH_PLAMO2, "plamo2" },
45
+ { LLM_ARCH_PLAMO3, "plamo3" },
39
46
  { LLM_ARCH_CODESHELL, "codeshell" },
40
47
  { LLM_ARCH_ORION, "orion" },
41
48
  { LLM_ARCH_INTERNLM2, "internlm2" },
@@ -71,6 +78,7 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
71
78
  { LLM_ARCH_JAIS, "jais" },
72
79
  { LLM_ARCH_NEMOTRON, "nemotron" },
73
80
  { LLM_ARCH_NEMOTRON_H, "nemotron_h" },
81
+ { LLM_ARCH_NEMOTRON_H_MOE, "nemotron_h_moe" },
74
82
  { LLM_ARCH_EXAONE, "exaone" },
75
83
  { LLM_ARCH_EXAONE4, "exaone4" },
76
84
  { LLM_ARCH_RWKV6, "rwkv6" },
@@ -84,8 +92,10 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
84
92
  { LLM_ARCH_WAVTOKENIZER_DEC, "wavtokenizer-dec" },
85
93
  { LLM_ARCH_PLM, "plm" },
86
94
  { LLM_ARCH_BAILINGMOE, "bailingmoe" },
95
+ { LLM_ARCH_BAILINGMOE2, "bailingmoe2" },
87
96
  { LLM_ARCH_DOTS1, "dots1" },
88
97
  { LLM_ARCH_ARCEE, "arcee" },
98
+ { LLM_ARCH_AFMOE, "afmoe" },
89
99
  { LLM_ARCH_ERNIE4_5, "ernie4_5" },
90
100
  { LLM_ARCH_ERNIE4_5_MOE, "ernie4_5-moe" },
91
101
  { LLM_ARCH_HUNYUAN_MOE, "hunyuan-moe" },
@@ -93,33 +103,56 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
93
103
  { LLM_ARCH_SMOLLM3, "smollm3" },
94
104
  { LLM_ARCH_OPENAI_MOE, "gpt-oss" },
95
105
  { LLM_ARCH_LFM2, "lfm2" },
106
+ { LLM_ARCH_LFM2MOE, "lfm2moe" },
96
107
  { LLM_ARCH_DREAM, "dream" },
97
108
  { LLM_ARCH_SMALLTHINKER, "smallthinker" },
98
109
  { LLM_ARCH_LLADA, "llada" },
99
110
  { LLM_ARCH_LLADA_MOE, "llada-moe" },
100
111
  { LLM_ARCH_SEED_OSS, "seed_oss" },
101
112
  { LLM_ARCH_GROVEMOE, "grovemoe" },
113
+ { LLM_ARCH_APERTUS, "apertus" },
114
+ { LLM_ARCH_MINIMAX_M2, "minimax-m2" },
115
+ { LLM_ARCH_COGVLM, "cogvlm" },
116
+ { LLM_ARCH_RND1, "rnd1" },
117
+ { LLM_ARCH_PANGU_EMBED, "pangu-embedded" },
118
+ { LLM_ARCH_MISTRAL3, "mistral3" },
119
+ { LLM_ARCH_MIMO2, "mimo2" },
120
+ { LLM_ARCH_LLAMA_EMBED, "llama-embed" },
121
+ { LLM_ARCH_MAINCODER, "maincoder" },
102
122
  { LLM_ARCH_UNKNOWN, "(unknown)" },
103
123
  };
104
124
 
105
125
  static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
106
- { LLM_KV_GENERAL_TYPE, "general.type" },
107
- { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" },
108
- { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" },
109
- { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" },
110
- { LLM_KV_GENERAL_FILE_TYPE, "general.file_type" },
111
- { LLM_KV_GENERAL_NAME, "general.name" },
112
- { LLM_KV_GENERAL_AUTHOR, "general.author" },
113
- { LLM_KV_GENERAL_VERSION, "general.version" },
114
- { LLM_KV_GENERAL_URL, "general.url" },
115
- { LLM_KV_GENERAL_DESCRIPTION, "general.description" },
116
- { LLM_KV_GENERAL_LICENSE, "general.license" },
117
- { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" },
118
- { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" },
126
+ { LLM_KV_GENERAL_TYPE, "general.type" },
127
+ { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" },
128
+ { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" },
129
+ { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" },
130
+ { LLM_KV_GENERAL_FILE_TYPE, "general.file_type" },
131
+ { LLM_KV_GENERAL_SAMPLING_SEQUENCE, "general.sampling.sequence" },
132
+ { LLM_KV_GENERAL_SAMPLING_TOP_K, "general.sampling.top_k" },
133
+ { LLM_KV_GENERAL_SAMPLING_TOP_P, "general.sampling.top_p" },
134
+ { LLM_KV_GENERAL_SAMPLING_MIN_P, "general.sampling.min_p" },
135
+ { LLM_KV_GENERAL_SAMPLING_XTC_PROBABILITY, "general.sampling.xtc_probability" },
136
+ { LLM_KV_GENERAL_SAMPLING_XTC_THRESHOLD, "general.sampling.xtc_threshold" },
137
+ { LLM_KV_GENERAL_SAMPLING_TEMP, "general.sampling.temp" },
138
+ { LLM_KV_GENERAL_SAMPLING_PENALTY_LAST_N, "general.sampling.penalty_last_n" },
139
+ { LLM_KV_GENERAL_SAMPLING_PENALTY_REPEAT, "general.sampling.penalty_repeat" },
140
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT, "general.sampling.mirostat" },
141
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT_TAU, "general.sampling.mirostat_tau" },
142
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT_ETA, "general.sampling.mirostat_eta" },
143
+ { LLM_KV_GENERAL_NAME, "general.name" },
144
+ { LLM_KV_GENERAL_AUTHOR, "general.author" },
145
+ { LLM_KV_GENERAL_VERSION, "general.version" },
146
+ { LLM_KV_GENERAL_URL, "general.url" },
147
+ { LLM_KV_GENERAL_DESCRIPTION, "general.description" },
148
+ { LLM_KV_GENERAL_LICENSE, "general.license" },
149
+ { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" },
150
+ { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" },
119
151
 
120
152
  { LLM_KV_VOCAB_SIZE, "%s.vocab_size" },
121
153
  { LLM_KV_CONTEXT_LENGTH, "%s.context_length" },
122
154
  { LLM_KV_EMBEDDING_LENGTH, "%s.embedding_length" },
155
+ { LLM_KV_EMBEDDING_LENGTH_OUT, "%s.embedding_length_out" },
123
156
  { LLM_KV_FEATURES_LENGTH, "%s.features_length" },
124
157
  { LLM_KV_BLOCK_COUNT, "%s.block_count" },
125
158
  { LLM_KV_LEADING_DENSE_BLOCK_COUNT, "%s.leading_dense_block_count" },
@@ -132,6 +165,8 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
132
165
  { LLM_KV_EXPERT_COUNT, "%s.expert_count" },
133
166
  { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" },
134
167
  { LLM_KV_EXPERT_SHARED_COUNT, "%s.expert_shared_count" },
168
+ { LLM_KV_EXPERT_GROUP_COUNT, "%s.expert_group_count" },
169
+ { LLM_KV_EXPERT_GROUP_USED_COUNT, "%s.expert_group_used_count" },
135
170
  { LLM_KV_EXPERT_WEIGHTS_SCALE, "%s.expert_weights_scale" },
136
171
  { LLM_KV_EXPERT_WEIGHTS_NORM, "%s.expert_weights_norm" },
137
172
  { LLM_KV_EXPERT_GATING_FUNC, "%s.expert_gating_func" },
@@ -139,6 +174,7 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
139
174
  { LLM_KV_EXPERTS_PER_GROUP, "%s.experts_per_group" },
140
175
  { LLM_KV_MOE_EVERY_N_LAYERS, "%s.moe_every_n_layers" },
141
176
  { LLM_KV_NEXTN_PREDICT_LAYERS, "%s.nextn_predict_layers" },
177
+ { LLM_KV_NUM_DEEPSTACK_LAYERS, "%s.n_deepstack_layers" },
142
178
  { LLM_KV_POOLING_TYPE, "%s.pooling_type" },
143
179
  { LLM_KV_LOGIT_SCALE, "%s.logit_scale" },
144
180
  { LLM_KV_DECODER_START_TOKEN_ID, "%s.decoder_start_token_id" },
@@ -174,15 +210,18 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
174
210
  { LLM_KV_ATTENTION_GATE_LORA_RANK, "%s.attention.gate_lora_rank" },
175
211
  { LLM_KV_ATTENTION_RELATIVE_BUCKETS_COUNT, "%s.attention.relative_buckets_count" },
176
212
  { LLM_KV_ATTENTION_SLIDING_WINDOW, "%s.attention.sliding_window" },
213
+ { LLM_KV_ATTENTION_SLIDING_WINDOW_PATTERN, "%s.attention.sliding_window_pattern" },
177
214
  { LLM_KV_ATTENTION_SCALE, "%s.attention.scale" },
178
215
  { LLM_KV_ATTENTION_OUTPUT_SCALE, "%s.attention.output_scale" },
179
216
  { LLM_KV_ATTENTION_TEMPERATURE_LENGTH, "%s.attention.temperature_length" },
217
+ { LLM_KV_ATTENTION_TEMPERATURE_SCALE, "%s.attention.temperature_scale" },
180
218
  { LLM_KV_ATTENTION_KEY_LENGTH_MLA, "%s.attention.key_length_mla" },
181
219
  { LLM_KV_ATTENTION_VALUE_LENGTH_MLA, "%s.attention.value_length_mla" },
182
220
 
183
221
  { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
184
222
  { LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },
185
223
  { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" },
224
+ { LLM_KV_ROPE_FREQ_BASE_SWA, "%s.rope.freq_base_swa" },
186
225
  { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" },
187
226
  { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" },
188
227
  { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" },
@@ -217,6 +256,11 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
217
256
  { LLM_KV_CLASSIFIER_OUTPUT_LABELS, "%s.classifier.output_labels" },
218
257
 
219
258
  { LLM_KV_SHORTCONV_L_CACHE, "%s.shortconv.l_cache" },
259
+ // sentence-transformers dense modules feature dims
260
+ { LLM_KV_DENSE_2_FEAT_IN, "%s.dense_2_feat_in" },
261
+ { LLM_KV_DENSE_2_FEAT_OUT, "%s.dense_2_feat_out" },
262
+ { LLM_KV_DENSE_3_FEAT_IN, "%s.dense_3_feat_in" },
263
+ { LLM_KV_DENSE_3_FEAT_OUT, "%s.dense_3_feat_out" },
220
264
 
221
265
  { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" },
222
266
  { LLM_KV_TOKENIZER_PRE, "tokenizer.ggml.pre" },
@@ -256,1980 +300,1989 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
256
300
  { LLM_KV_ADAPTER_LORA_PROMPT_PREFIX, "adapter.lora.prompt_prefix" },
257
301
  { LLM_KV_ADAPTER_ALORA_INVOCATION_TOKENS, "adapter.alora.invocation_tokens" },
258
302
 
303
+ { LLM_KV_XIELU_ALPHA_N, "xielu.alpha_n" },
304
+ { LLM_KV_XIELU_ALPHA_P, "xielu.alpha_p" },
305
+ { LLM_KV_XIELU_BETA, "xielu.beta" },
306
+ { LLM_KV_XIELU_EPS, "xielu.eps" },
307
+
259
308
  // deprecated
260
309
  { LLM_KV_TOKENIZER_PREFIX_ID, "tokenizer.ggml.prefix_token_id" },
261
310
  { LLM_KV_TOKENIZER_SUFFIX_ID, "tokenizer.ggml.suffix_token_id" },
262
311
  { LLM_KV_TOKENIZER_MIDDLE_ID, "tokenizer.ggml.middle_token_id" },
263
312
  };
264
313
 
265
- static const std::map<llm_arch, std::map<llm_tensor, const char *>> LLM_TENSOR_NAMES = {
266
- {
267
- LLM_ARCH_LLAMA,
268
- {
269
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
270
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
271
- { LLM_TENSOR_OUTPUT, "output" },
272
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
273
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
274
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
275
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
276
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
277
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
278
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
279
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
280
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
281
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
282
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
283
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
284
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
285
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
286
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
287
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
288
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
289
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
290
- },
291
- },
292
- {
293
- LLM_ARCH_ARCEE,
294
- {
295
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
296
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
297
- { LLM_TENSOR_OUTPUT, "output" },
298
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
299
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
300
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
301
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
302
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
303
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
304
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
305
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
306
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
307
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
308
- },
309
- },
310
- {
311
- LLM_ARCH_LLAMA4,
312
- {
313
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
314
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
315
- { LLM_TENSOR_OUTPUT, "output" },
316
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
317
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
318
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
319
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
320
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
321
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
322
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
323
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
324
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
325
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
326
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
327
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
328
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
329
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
330
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
331
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
332
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
333
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
334
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
335
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
336
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
337
- },
338
- },
339
- {
340
- LLM_ARCH_DECI,
341
- {
342
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
343
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
344
- { LLM_TENSOR_OUTPUT, "output" },
345
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
346
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
347
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
348
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
349
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
350
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
351
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
352
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
353
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
354
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
355
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
356
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
357
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
358
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
359
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
360
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
361
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
362
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
363
- },
364
- },
365
- {
366
- LLM_ARCH_BAICHUAN,
367
- {
368
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
369
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
370
- { LLM_TENSOR_OUTPUT, "output" },
371
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
372
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
373
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
374
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
375
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
376
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
377
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
378
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
379
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
380
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
381
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
382
- },
383
- },
384
- {
385
- LLM_ARCH_FALCON,
386
- {
387
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
388
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
389
- { LLM_TENSOR_OUTPUT, "output" },
390
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
391
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
392
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
393
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
394
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
395
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
396
- },
397
- },
398
- {
399
- LLM_ARCH_GROK,
400
- {
401
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
402
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
403
- { LLM_TENSOR_OUTPUT, "output" },
404
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
405
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
406
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
407
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
408
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
409
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
410
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
411
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
412
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
413
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
414
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
415
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
416
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
417
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
418
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
419
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
420
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
421
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
422
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
423
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
424
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
425
- },
426
- },
427
- {
428
- LLM_ARCH_GPT2,
429
- {
430
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
431
- { LLM_TENSOR_POS_EMBD, "position_embd" },
432
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
433
- { LLM_TENSOR_OUTPUT, "output" },
434
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
435
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
436
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
437
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
438
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
439
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
440
- },
441
- },
442
- {
443
- LLM_ARCH_GPTJ,
444
- {
445
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
446
- },
447
- },
448
- {
449
- LLM_ARCH_GPTNEOX,
450
- {
451
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
452
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
453
- { LLM_TENSOR_OUTPUT, "output" },
454
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
455
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
456
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
457
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
458
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
459
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
460
- },
461
- },
462
- {
463
- LLM_ARCH_MPT,
464
- {
465
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
466
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
467
- { LLM_TENSOR_OUTPUT, "output"},
468
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
469
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
470
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
471
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
472
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
473
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
474
- { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" },
475
- { LLM_TENSOR_POS_EMBD, "position_embd" },
476
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm"},
477
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm"},
478
- },
479
- },
480
- {
481
- LLM_ARCH_STARCODER,
482
- {
483
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
484
- { LLM_TENSOR_POS_EMBD, "position_embd" },
485
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
486
- { LLM_TENSOR_OUTPUT, "output" },
487
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
488
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
489
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
490
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
491
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
492
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
493
- },
494
- },
495
- {
496
- LLM_ARCH_REFACT,
497
- {
498
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
499
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
500
- { LLM_TENSOR_OUTPUT, "output" },
501
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
502
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
503
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
504
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
505
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
506
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
507
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
508
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
509
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
510
- },
511
- },
512
- {
513
- LLM_ARCH_BERT,
514
- {
515
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
516
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
517
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
518
- { LLM_TENSOR_POS_EMBD, "position_embd" },
519
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
520
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
521
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
522
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
523
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
524
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
525
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
526
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
527
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
528
- { LLM_TENSOR_CLS, "cls" },
529
- { LLM_TENSOR_CLS_OUT, "cls.output" },
530
- },
531
- },
532
- {
533
- LLM_ARCH_NOMIC_BERT,
534
- {
535
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
536
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
537
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
538
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
539
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
540
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
541
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
542
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
543
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
544
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
545
- },
546
- },
547
- {
548
- LLM_ARCH_NOMIC_BERT_MOE,
549
- {
550
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
551
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
552
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
553
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
554
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
555
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
556
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
557
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
558
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
559
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
560
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
561
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
562
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
563
- },
564
- },
565
- {
566
- LLM_ARCH_NEO_BERT,
567
- {
568
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
569
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
570
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
571
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
572
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
573
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
574
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
575
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
576
- { LLM_TENSOR_CLS, "cls" },
577
- { LLM_TENSOR_CLS_OUT, "cls.output" },
578
- },
579
- },
580
- {
581
- LLM_ARCH_JINA_BERT_V2,
582
- {
583
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
584
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
585
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
586
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
587
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
588
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
589
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
590
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
591
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
592
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
593
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
594
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
595
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
596
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
597
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
598
- { LLM_TENSOR_CLS, "cls" },
599
- },
600
- },
601
- {
602
- LLM_ARCH_JINA_BERT_V3,
603
- {
604
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
605
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
606
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
607
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
608
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
609
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
610
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
611
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
612
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
613
- },
614
- },
615
- {
616
- LLM_ARCH_BLOOM,
617
- {
618
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
619
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
620
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
621
- { LLM_TENSOR_OUTPUT, "output" },
622
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
623
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
624
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
625
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
626
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
627
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
628
- },
629
- },
630
- {
631
- LLM_ARCH_STABLELM,
632
- {
633
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
634
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
635
- { LLM_TENSOR_OUTPUT, "output" },
636
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
637
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
638
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
639
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
640
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
641
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
642
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
643
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
644
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
645
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
646
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
647
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
648
- },
649
- },
650
- {
651
- LLM_ARCH_QWEN,
652
- {
653
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
654
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
655
- { LLM_TENSOR_OUTPUT, "output" },
656
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
657
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
658
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
659
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
660
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
661
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
662
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
663
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
664
- },
665
- },
666
- {
667
- LLM_ARCH_QWEN2,
668
- {
669
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
670
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
671
- { LLM_TENSOR_OUTPUT, "output" },
672
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
673
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
674
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
675
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
676
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
677
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
678
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
679
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
680
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
681
- },
682
- },
683
- {
684
- LLM_ARCH_QWEN2VL,
685
- {
686
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
687
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
688
- { LLM_TENSOR_OUTPUT, "output" },
689
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
690
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
691
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
692
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
693
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
694
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
695
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
696
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
697
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
698
- },
699
- },
700
- {
701
- LLM_ARCH_QWEN2MOE,
702
- {
703
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
704
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
705
- { LLM_TENSOR_OUTPUT, "output" },
706
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
707
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
708
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
709
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
710
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
711
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
712
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
713
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
714
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
715
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
716
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
717
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
718
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
719
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
720
- },
721
- },
722
- {
723
- LLM_ARCH_QWEN3,
724
- {
725
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
726
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
727
- { LLM_TENSOR_OUTPUT, "output" },
728
- { LLM_TENSOR_CLS_OUT, "cls.output" },
729
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
730
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
731
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
732
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
733
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
734
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
735
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
736
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
737
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
738
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
739
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
740
- },
741
- },
742
- {
743
- LLM_ARCH_QWEN3MOE,
744
- {
745
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
746
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
747
- { LLM_TENSOR_OUTPUT, "output" },
748
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
749
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
750
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
751
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
752
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
753
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
754
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
755
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
756
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
757
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
758
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
759
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
760
- },
761
- },
762
- {
763
- LLM_ARCH_PHI2,
764
- {
765
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
766
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
767
- { LLM_TENSOR_OUTPUT, "output" },
768
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
769
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
770
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
771
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
772
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
773
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
774
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
775
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
776
- },
777
- },
778
- {
779
- LLM_ARCH_PHI3,
780
- {
781
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
782
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
783
- { LLM_TENSOR_OUTPUT, "output" },
784
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
785
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
786
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
787
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
788
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
789
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
790
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
791
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
792
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
793
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
794
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
795
- },
796
- },
797
- {
798
- LLM_ARCH_PHIMOE,
799
- {
800
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
801
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
802
- { LLM_TENSOR_OUTPUT, "output" },
803
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
804
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
805
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
806
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
807
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
808
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
809
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
810
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
811
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
812
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
813
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
814
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
815
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
816
- },
817
- },
818
- {
819
- LLM_ARCH_PLAMO,
820
- {
821
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
822
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
823
- { LLM_TENSOR_OUTPUT, "output" },
824
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
825
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
826
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
827
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
828
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
829
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
830
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
831
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
832
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
833
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
834
- },
835
- },
836
- {
837
- LLM_ARCH_PLAMO2,
838
- {
839
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
840
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
841
- { LLM_TENSOR_OUTPUT, "output" },
842
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
843
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
844
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
845
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
846
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
847
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
848
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
849
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
850
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
851
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
852
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
853
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
854
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
855
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
856
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
857
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
858
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
859
- { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
860
- { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
861
- { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
862
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
863
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
864
- },
865
- },
866
- {
867
- LLM_ARCH_CODESHELL,
868
- {
869
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
870
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
871
- { LLM_TENSOR_OUTPUT, "output" },
872
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
873
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
874
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
875
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
876
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
877
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
878
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
879
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
880
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
881
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
882
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
883
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
884
- },
885
- },
886
- {
887
- LLM_ARCH_ORION,
888
- {
889
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
890
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
891
- { LLM_TENSOR_OUTPUT, "output" },
892
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
893
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
894
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
895
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
896
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
897
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
898
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
899
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
900
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
901
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
902
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
903
- },
904
- },
905
- {
906
- LLM_ARCH_INTERNLM2,
907
- {
908
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
909
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
910
- { LLM_TENSOR_OUTPUT, "output" },
911
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
912
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
913
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
914
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
915
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
916
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
917
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
918
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
919
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
920
- },
921
- },
922
- {
923
- LLM_ARCH_MINICPM,
924
- {
925
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
926
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
927
- { LLM_TENSOR_OUTPUT, "output" },
928
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
929
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
930
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
931
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
932
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
933
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
934
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
935
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
936
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
937
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
938
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
939
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
940
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
941
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
942
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
943
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
944
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
945
- },
946
- },
947
- {
948
- LLM_ARCH_MINICPM3,
949
- {
950
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
951
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
952
- { LLM_TENSOR_OUTPUT, "output" },
953
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
954
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
955
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
956
- { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
957
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
958
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
959
- { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
960
- { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
961
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
962
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
963
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
964
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
965
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
966
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
967
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
968
- },
969
- },
970
- {
971
- LLM_ARCH_GEMMA,
972
- {
973
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
974
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
975
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
976
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
977
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
978
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
979
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
980
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
981
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
982
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
983
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
984
- },
985
- },
986
- {
987
- LLM_ARCH_GEMMA2,
988
- {
989
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
990
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
991
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
992
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
993
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
994
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
995
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
996
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
997
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
998
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
999
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1000
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1001
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1002
- },
1003
- },
1004
- {
1005
- LLM_ARCH_GEMMA3,
1006
- {
1007
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1008
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1009
- { LLM_TENSOR_OUTPUT, "output" },
1010
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1011
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1012
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1013
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1014
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1015
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1016
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1017
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1018
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1019
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1020
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1021
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1022
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1023
- },
1024
- },
1025
- {
1026
- LLM_ARCH_GEMMA3N,
1027
- {
1028
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1029
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1030
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1031
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1032
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1033
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1034
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1035
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1036
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1037
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1038
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1039
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1040
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1041
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1042
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1043
- { LLM_TENSOR_PER_LAYER_TOKEN_EMBD, "per_layer_token_embd" },
1044
- { LLM_TENSOR_PER_LAYER_MODEL_PROJ, "per_layer_model_proj" },
1045
- { LLM_TENSOR_PER_LAYER_PROJ_NORM, "per_layer_proj_norm" },
1046
- { LLM_TENSOR_ALTUP_UNEMBD_PROJ, "altup_unembd_proj" },
1047
- { LLM_TENSOR_ALTUP_PROJ, "altup_proj" },
1048
- { LLM_TENSOR_PER_LAYER_INP_GATE, "blk.%d.inp_gate" },
1049
- { LLM_TENSOR_PER_LAYER_PROJ, "blk.%d.proj" },
1050
- { LLM_TENSOR_PER_LAYER_POST_NORM, "blk.%d.post_norm" },
1051
- { LLM_TENSOR_ALTUP_CORRECT_COEF, "blk.%d.altup_correct_coef" },
1052
- { LLM_TENSOR_ALTUP_CORRECT_SCALE, "blk.%d.altup_correct_scale" },
1053
- { LLM_TENSOR_ALTUP_PREDICT_COEF, "blk.%d.altup_predict_coef" },
1054
- { LLM_TENSOR_ALTUP_ROUTER, "blk.%d.altup_router" },
1055
- { LLM_TENSOR_ALTUP_ROUTER_NORM, "blk.%d.altup_router_norm" },
1056
- { LLM_TENSOR_LAUREL_L, "blk.%d.laurel_l" },
1057
- { LLM_TENSOR_LAUREL_R, "blk.%d.laurel_r" },
1058
- { LLM_TENSOR_LAUREL_POST_NORM, "blk.%d.laurel_post_norm" },
1059
- },
1060
- },
1061
- {
1062
- LLM_ARCH_GEMMA_EMBEDDING,
1063
- {
1064
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1065
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1066
- { LLM_TENSOR_OUTPUT, "output" },
1067
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1068
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1069
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1070
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1071
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1072
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1073
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1074
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1075
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1076
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1077
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1078
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1079
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1080
- },
1081
- },
1082
- {
1083
- LLM_ARCH_STARCODER2,
1084
- {
1085
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1086
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1087
- { LLM_TENSOR_OUTPUT, "output" },
1088
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1089
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1090
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1091
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1092
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1093
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1094
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1095
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1096
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1097
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1098
- },
1099
- },
1100
- {
1101
- LLM_ARCH_MAMBA,
1102
- {
1103
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1104
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1105
- { LLM_TENSOR_OUTPUT, "output" },
1106
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1107
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1108
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1109
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
1110
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1111
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1112
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1113
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1114
- },
1115
- },
1116
- {
1117
- LLM_ARCH_MAMBA2,
1118
- {
1119
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1120
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1121
- { LLM_TENSOR_OUTPUT, "output" },
1122
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1123
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1124
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1125
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1126
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1127
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1128
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1129
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1130
- },
1131
- },
1132
- {
1133
- LLM_ARCH_JAMBA,
1134
- {
1135
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1136
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1137
- { LLM_TENSOR_OUTPUT, "output" },
1138
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1139
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1140
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1141
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
1142
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1143
- { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
1144
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1145
- { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
1146
- { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
1147
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1148
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1149
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1150
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1151
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1152
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1153
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1154
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1155
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1156
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1157
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1158
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1159
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1160
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1161
- },
1162
- },
1163
- {
1164
- LLM_ARCH_FALCON_H1,
1165
- {
1166
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1167
- { LLM_TENSOR_OUTPUT, "output" },
1168
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1169
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1170
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1171
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1172
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1173
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1174
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1175
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1176
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1177
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1178
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1179
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1180
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1181
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1182
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1183
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1184
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1185
- },
1186
- },
1187
- {
1188
- LLM_ARCH_XVERSE,
1189
- {
1190
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1191
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1192
- { LLM_TENSOR_OUTPUT, "output" },
1193
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1194
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1195
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1196
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1197
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1198
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1199
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1200
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1201
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1202
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1203
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1204
- },
1205
- },
1206
- {
1207
- LLM_ARCH_COMMAND_R,
1208
- {
1209
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1210
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1211
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1212
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1213
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1214
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1215
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1216
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1217
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1218
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1219
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1220
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1221
- },
1222
- },
1223
- {
1224
- LLM_ARCH_COHERE2,
1225
- {
1226
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1227
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1228
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1229
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1230
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1231
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1232
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1233
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1234
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1235
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1236
- },
1237
- },
1238
- {
1239
- LLM_ARCH_DBRX,
1240
- {
1241
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1242
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1243
- { LLM_TENSOR_OUTPUT, "output" },
1244
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1245
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1246
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1247
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
1248
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1249
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1250
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1251
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1252
- },
1253
- },
1254
- {
1255
- LLM_ARCH_OLMO,
1256
- {
1257
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1258
- { LLM_TENSOR_OUTPUT, "output" },
1259
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1260
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1261
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1262
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1263
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1264
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1265
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1266
- },
1267
- },
1268
- {
1269
- LLM_ARCH_OLMO2,
1270
- {
1271
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1272
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1273
- { LLM_TENSOR_OUTPUT, "output" },
1274
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1275
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1276
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1277
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1278
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1279
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1280
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1281
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1282
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1283
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1284
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1285
- },
1286
- },
1287
- {
1288
- LLM_ARCH_OLMOE,
1289
- {
1290
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1291
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1292
- { LLM_TENSOR_OUTPUT, "output" },
1293
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1294
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1295
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1296
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1297
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1298
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1299
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1300
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1301
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1302
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1303
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1304
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1305
- },
1306
- },
1307
- {
1308
- LLM_ARCH_OPENELM,
1309
- {
1310
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1311
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1312
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1313
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1314
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1315
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1316
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1317
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1318
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1319
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1320
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1321
- },
1322
- },
1323
- {
1324
- LLM_ARCH_ARCTIC,
1325
- {
1326
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1327
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1328
- { LLM_TENSOR_OUTPUT, "output" },
1329
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1330
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1331
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1332
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1333
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1334
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1335
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1336
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1337
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1338
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1339
- { LLM_TENSOR_FFN_NORM_EXPS, "blk.%d.ffn_norm_exps" },
1340
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1341
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1342
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1343
- },
1344
- },
1345
- {
1346
- LLM_ARCH_DEEPSEEK,
1347
- {
1348
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1349
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1350
- { LLM_TENSOR_OUTPUT, "output" },
1351
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1352
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1353
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1354
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1355
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1356
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1357
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1358
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1359
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1360
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1361
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1362
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1363
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1364
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1365
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1366
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1367
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1368
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1369
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1370
- },
1371
- },
1372
- {
1373
- LLM_ARCH_DEEPSEEK2,
1374
- {
1375
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1376
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1377
- { LLM_TENSOR_OUTPUT, "output" },
1378
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1379
- { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
1380
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
1381
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1382
- { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
1383
- { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
1384
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
1385
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
1386
- { LLM_TENSOR_ATTN_K_B, "blk.%d.attn_k_b" },
1387
- { LLM_TENSOR_ATTN_V_B, "blk.%d.attn_v_b" },
1388
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1389
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1390
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1391
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1392
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1393
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1394
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1395
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1396
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1397
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1398
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1399
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1400
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1401
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1402
- },
1403
- },
1404
- {
1405
- LLM_ARCH_PLM,
1406
- {
1407
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1408
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1409
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1410
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1411
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
1412
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
1413
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
1414
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1415
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1416
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1417
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1418
- },
1419
- },
1420
- {
1421
- LLM_ARCH_CHATGLM,
1422
- {
1423
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1424
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1425
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1426
- { LLM_TENSOR_OUTPUT, "output" },
1427
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1428
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1429
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1430
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1431
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1432
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1433
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1434
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1435
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1436
- },
1437
- },
1438
- {
1439
- LLM_ARCH_GLM4,
1440
- {
1441
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1442
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1443
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1444
- { LLM_TENSOR_OUTPUT, "output" },
1445
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1446
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1447
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1448
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1449
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1450
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1451
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1452
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1453
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1454
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1455
- },
1456
- },
1457
- {
1458
- LLM_ARCH_GLM4_MOE,
1459
- {
1460
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1461
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1462
- { LLM_TENSOR_OUTPUT, "output" },
1463
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1464
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1465
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1466
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1467
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1468
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1469
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1470
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1471
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1472
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1473
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1474
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1475
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1476
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1477
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1478
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1479
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1480
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1481
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1482
- // NextN/MTP tensors - preserved but unused (in final layer, dynamic layer number)
1483
- { LLM_TENSOR_NEXTN_EH_PROJ, "blk.%d.nextn.eh_proj" },
1484
- { LLM_TENSOR_NEXTN_EMBED_TOKENS, "blk.%d.nextn.embed_tokens" },
1485
- { LLM_TENSOR_NEXTN_ENORM, "blk.%d.nextn.enorm" },
1486
- { LLM_TENSOR_NEXTN_HNORM, "blk.%d.nextn.hnorm" },
1487
- { LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, "blk.%d.nextn.shared_head_head" },
1488
- { LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, "blk.%d.nextn.shared_head_norm" },
1489
- },
1490
- },
1491
- {
1492
- LLM_ARCH_BITNET,
1493
- {
1494
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1495
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1496
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1497
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1498
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1499
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1500
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1501
- { LLM_TENSOR_ATTN_SUB_NORM, "blk.%d.attn_sub_norm" },
1502
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1503
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1504
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1505
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1506
- { LLM_TENSOR_FFN_SUB_NORM, "blk.%d.ffn_sub_norm" },
1507
- },
1508
- },
1509
- {
1510
- LLM_ARCH_T5,
1511
- {
1512
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1513
- { LLM_TENSOR_OUTPUT, "output" },
1514
- { LLM_TENSOR_DEC_OUTPUT_NORM, "dec.output_norm" },
1515
- { LLM_TENSOR_DEC_ATTN_NORM, "dec.blk.%d.attn_norm" },
1516
- { LLM_TENSOR_DEC_ATTN_Q, "dec.blk.%d.attn_q" },
1517
- { LLM_TENSOR_DEC_ATTN_K, "dec.blk.%d.attn_k" },
1518
- { LLM_TENSOR_DEC_ATTN_V, "dec.blk.%d.attn_v" },
1519
- { LLM_TENSOR_DEC_ATTN_OUT, "dec.blk.%d.attn_o" },
1520
- { LLM_TENSOR_DEC_ATTN_REL_B, "dec.blk.%d.attn_rel_b" },
1521
- { LLM_TENSOR_DEC_CROSS_ATTN_NORM, "dec.blk.%d.cross_attn_norm" },
1522
- { LLM_TENSOR_DEC_CROSS_ATTN_Q, "dec.blk.%d.cross_attn_q" },
1523
- { LLM_TENSOR_DEC_CROSS_ATTN_K, "dec.blk.%d.cross_attn_k" },
1524
- { LLM_TENSOR_DEC_CROSS_ATTN_V, "dec.blk.%d.cross_attn_v" },
1525
- { LLM_TENSOR_DEC_CROSS_ATTN_OUT, "dec.blk.%d.cross_attn_o" },
1526
- { LLM_TENSOR_DEC_CROSS_ATTN_REL_B, "dec.blk.%d.cross_attn_rel_b" },
1527
- { LLM_TENSOR_DEC_FFN_NORM, "dec.blk.%d.ffn_norm" },
1528
- { LLM_TENSOR_DEC_FFN_GATE, "dec.blk.%d.ffn_gate" },
1529
- { LLM_TENSOR_DEC_FFN_DOWN, "dec.blk.%d.ffn_down" },
1530
- { LLM_TENSOR_DEC_FFN_UP, "dec.blk.%d.ffn_up" },
1531
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
1532
- { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
1533
- { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
1534
- { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
1535
- { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
1536
- { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
1537
- { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
1538
- { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
1539
- { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
1540
- { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
1541
- { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
1542
- },
1543
- },
1544
- {
1545
- LLM_ARCH_T5ENCODER,
1546
- {
1547
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1548
- { LLM_TENSOR_OUTPUT, "output" },
1549
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
1550
- { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
1551
- { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
1552
- { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
1553
- { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
1554
- { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
1555
- { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
1556
- { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
1557
- { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
1558
- { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
1559
- { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
1560
- },
1561
- },
1562
- {
1563
- LLM_ARCH_JAIS,
1564
- {
1565
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1566
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1567
- { LLM_TENSOR_OUTPUT, "output" },
1568
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1569
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1570
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1571
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1572
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1573
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1574
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1575
- },
1576
- },
1577
- {
1578
- LLM_ARCH_NEMOTRON,
1579
- {
1580
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1581
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1582
- { LLM_TENSOR_OUTPUT, "output" },
1583
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1584
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1585
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1586
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1587
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1588
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1589
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1590
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1591
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1592
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1593
- },
1594
- },
1595
- {
1596
- LLM_ARCH_NEMOTRON_H,
1597
- {
1598
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1599
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1600
- { LLM_TENSOR_OUTPUT, "output" },
1601
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1602
- // mamba(2) ssm layers
1603
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1604
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1605
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1606
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1607
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1608
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1609
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1610
- // attention layers
1611
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1612
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1613
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1614
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1615
- // dense FFN
1616
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1617
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1618
- },
1619
- },
1620
- {
1621
- LLM_ARCH_EXAONE,
1622
- {
1623
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1624
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1625
- { LLM_TENSOR_OUTPUT, "output" },
1626
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1627
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1628
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1629
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1630
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1631
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1632
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1633
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1634
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1635
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1636
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1637
- },
1638
- },
1639
- {
1640
- LLM_ARCH_EXAONE4,
1641
- {
1642
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1643
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1644
- { LLM_TENSOR_OUTPUT, "output" },
1645
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1646
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1647
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1648
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1649
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1650
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1651
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1652
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1653
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1654
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1655
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1656
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1657
- }
1658
- },
1659
- {
1660
- LLM_ARCH_RWKV6,
1661
- {
1662
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1663
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1664
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1665
- { LLM_TENSOR_OUTPUT, "output" },
1666
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1667
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
1668
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1669
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1670
- { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
1671
- { LLM_TENSOR_TIME_MIX_LERP_W, "blk.%d.time_mix_lerp_w" },
1672
- { LLM_TENSOR_TIME_MIX_LERP_K, "blk.%d.time_mix_lerp_k" },
1673
- { LLM_TENSOR_TIME_MIX_LERP_V, "blk.%d.time_mix_lerp_v" },
1674
- { LLM_TENSOR_TIME_MIX_LERP_R, "blk.%d.time_mix_lerp_r" },
1675
- { LLM_TENSOR_TIME_MIX_LERP_G, "blk.%d.time_mix_lerp_g" },
1676
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1677
- { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
1678
- { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
1679
- { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
1680
- { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
1681
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1682
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1683
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1684
- { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
1685
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1686
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1687
- { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
1688
- { LLM_TENSOR_CHANNEL_MIX_LERP_R, "blk.%d.channel_mix_lerp_r" },
1689
- { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
1690
- { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
1691
- { LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, "blk.%d.channel_mix_receptance" },
1692
- },
1693
- },
1694
- {
1695
- LLM_ARCH_RWKV6QWEN2,
1696
- {
1697
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1698
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1699
- { LLM_TENSOR_OUTPUT, "output" },
1700
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1701
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1702
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1703
- { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
1704
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1705
- { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
1706
- { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
1707
- { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
1708
- { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
1709
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1710
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1711
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1712
- { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
1713
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1714
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1715
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1716
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1717
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1718
- },
1719
- },
1720
- {
1721
- LLM_ARCH_RWKV7,
1722
- {
1723
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1724
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1725
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1726
- { LLM_TENSOR_OUTPUT, "output" },
1727
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1728
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
1729
- { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
1730
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1731
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1732
- { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
1733
- { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
1734
- { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
1735
- { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
1736
- { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
1737
- { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
1738
- { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
1739
- { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
1740
- { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
1741
- { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
1742
- { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
1743
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1744
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1745
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1746
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1747
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1748
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1749
- { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
1750
- { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
1751
- { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
1752
- },
1753
- },
1754
- {
1755
- LLM_ARCH_ARWKV7,
1756
- {
1757
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1758
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1759
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1760
- { LLM_TENSOR_OUTPUT, "output" },
1761
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1762
- { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
1763
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1764
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1765
- { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
1766
- { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
1767
- { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
1768
- { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
1769
- { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
1770
- { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
1771
- { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
1772
- { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
1773
- { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
1774
- { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
1775
- { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
1776
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1777
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1778
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1779
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1780
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1781
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1782
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1783
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1784
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1785
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1786
- },
1787
- },
1788
- {
1789
- LLM_ARCH_GRANITE,
1790
- {
1791
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1792
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1793
- { LLM_TENSOR_OUTPUT, "output" },
1794
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1795
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1796
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1797
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1798
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1799
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1800
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1801
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1802
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1803
- },
1804
- },
1805
- {
1806
- LLM_ARCH_GRANITE_MOE,
1807
- {
1808
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1809
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1810
- { LLM_TENSOR_OUTPUT, "output" },
1811
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1812
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1813
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1814
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1815
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1816
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1817
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1818
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1819
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1820
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1821
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1822
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1823
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1824
- },
1825
- },
1826
- {
1827
- LLM_ARCH_GRANITE_HYBRID,
1828
- {
1829
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1830
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1831
- { LLM_TENSOR_OUTPUT, "output" },
1832
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1833
- // mamba(2) ssm layers
1834
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
1835
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1836
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1837
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1838
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1839
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
1840
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1841
- // attention layers
1842
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1843
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1844
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1845
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1846
- // dense FFN
1847
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1848
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1849
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1850
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1851
- // moe FFN
1852
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1853
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1854
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1855
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1856
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1857
- // shared expert
1858
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1859
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1860
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1861
- },
1862
- },
1863
- {
1864
- LLM_ARCH_CHAMELEON,
1865
- {
1866
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1867
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1868
- { LLM_TENSOR_OUTPUT, "output" },
1869
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1870
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1871
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1872
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1873
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1874
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1875
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1876
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1877
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1878
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1879
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1880
- },
1881
- },
1882
- {
1883
- LLM_ARCH_WAVTOKENIZER_DEC,
1884
- {
1885
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1886
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1887
- { LLM_TENSOR_CONV1D, "conv1d" },
1888
- { LLM_TENSOR_CONVNEXT_DW, "convnext.%d.dw" },
1889
- { LLM_TENSOR_CONVNEXT_NORM, "convnext.%d.norm" },
1890
- { LLM_TENSOR_CONVNEXT_PW1, "convnext.%d.pw1" },
1891
- { LLM_TENSOR_CONVNEXT_PW2, "convnext.%d.pw2" },
1892
- { LLM_TENSOR_CONVNEXT_GAMMA, "convnext.%d.gamma" },
1893
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1894
- { LLM_TENSOR_OUTPUT, "output" },
1895
- { LLM_TENSOR_POS_NET_CONV1, "posnet.%d.conv1" },
1896
- { LLM_TENSOR_POS_NET_CONV2, "posnet.%d.conv2" },
1897
- { LLM_TENSOR_POS_NET_NORM, "posnet.%d.norm" },
1898
- { LLM_TENSOR_POS_NET_NORM1, "posnet.%d.norm1" },
1899
- { LLM_TENSOR_POS_NET_NORM2, "posnet.%d.norm2" },
1900
- { LLM_TENSOR_POS_NET_ATTN_NORM, "posnet.%d.attn_norm" },
1901
- { LLM_TENSOR_POS_NET_ATTN_Q, "posnet.%d.attn_q" },
1902
- { LLM_TENSOR_POS_NET_ATTN_K, "posnet.%d.attn_k" },
1903
- { LLM_TENSOR_POS_NET_ATTN_V, "posnet.%d.attn_v" },
1904
- { LLM_TENSOR_POS_NET_ATTN_OUT, "posnet.%d.attn_output" },
1905
- },
1906
- },
1907
- {
1908
- LLM_ARCH_BAILINGMOE,
1909
- {
1910
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1911
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1912
- { LLM_TENSOR_OUTPUT, "output" },
1913
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1914
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1915
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1916
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1917
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1918
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1919
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1920
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1921
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1922
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1923
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1924
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1925
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1926
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1927
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1928
- },
1929
- },
1930
- {
1931
- LLM_ARCH_DOTS1,
1932
- {
1933
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1934
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1935
- { LLM_TENSOR_OUTPUT, "output" },
1936
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1937
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1938
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1939
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1940
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1941
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1942
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1943
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1944
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1945
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1946
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1947
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1948
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1949
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1950
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1951
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1952
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1953
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1954
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1955
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1956
- }
1957
- },
1958
- {
1959
- LLM_ARCH_ERNIE4_5,
1960
- {
1961
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1962
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1963
- { LLM_TENSOR_OUTPUT, "output" },
1964
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1965
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1966
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1967
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1968
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1969
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1970
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1971
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1972
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1973
- },
1974
- },
1975
- {
1976
- LLM_ARCH_ERNIE4_5_MOE,
1977
- {
1978
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1979
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1980
- { LLM_TENSOR_OUTPUT, "output" },
1981
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1982
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1983
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1984
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1985
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1986
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1987
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1988
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1989
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1990
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1991
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1992
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1993
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1994
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1995
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1996
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1997
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1998
- },
1999
- },
2000
- {
2001
- LLM_ARCH_HUNYUAN_MOE,
2002
- {
2003
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2004
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2005
- { LLM_TENSOR_OUTPUT, "output" },
2006
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2007
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2008
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2009
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2010
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2011
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2012
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2013
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2014
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2015
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
2016
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
2017
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
2018
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2019
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2020
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2021
- },
2022
- },
2023
- {
2024
- LLM_ARCH_HUNYUAN_DENSE,
2025
- {
2026
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2027
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2028
- { LLM_TENSOR_OUTPUT, "output" },
2029
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2030
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2031
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2032
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2033
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2034
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2035
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2036
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2037
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2038
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2039
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2040
-
2041
- },
2042
- },
2043
- {
2044
- LLM_ARCH_SMOLLM3,
2045
- {
2046
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2047
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2048
- { LLM_TENSOR_OUTPUT, "output" },
2049
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2050
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2051
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2052
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2053
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2054
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2055
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2056
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2057
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2058
- },
2059
- },
2060
- {
2061
- LLM_ARCH_OPENAI_MOE,
2062
- {
2063
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2064
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2065
- { LLM_TENSOR_OUTPUT, "output" },
2066
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2067
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
2068
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2069
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2070
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2071
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2072
- { LLM_TENSOR_ATTN_SINKS, "blk.%d.attn_sinks" },
2073
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2074
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2075
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2076
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2077
- },
2078
- },
2079
- {
2080
- LLM_ARCH_LFM2,
2081
- {
2082
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2083
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2084
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2085
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2086
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2087
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2088
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2089
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2090
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2091
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2092
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2093
- { LLM_TENSOR_SHORTCONV_CONV, "blk.%d.shortconv.conv" },
2094
- { LLM_TENSOR_SHORTCONV_INPROJ, "blk.%d.shortconv.in_proj" },
2095
- { LLM_TENSOR_SHORTCONV_OUTPROJ, "blk.%d.shortconv.out_proj" },
2096
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2097
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
2098
- { LLM_TENSOR_OUTPUT, "output" },
2099
- }
2100
- },
2101
- {
2102
- LLM_ARCH_SMALLTHINKER,
2103
- {
2104
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2105
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2106
- { LLM_TENSOR_OUTPUT, "output" },
2107
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2108
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2109
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2110
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2111
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2112
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2113
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2114
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2115
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2116
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2117
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2118
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2119
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" }
2120
- },
2121
- },
2122
- {
2123
- LLM_ARCH_DREAM,
2124
- {
2125
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2126
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2127
- { LLM_TENSOR_OUTPUT, "output" },
2128
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2129
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2130
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2131
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2132
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2133
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2134
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2135
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2136
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2137
- },
2138
- },
2139
- {
2140
- LLM_ARCH_LLADA,
2141
- {
2142
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2143
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2144
- { LLM_TENSOR_OUTPUT, "output" },
2145
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2146
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2147
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2148
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2149
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2150
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2151
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2152
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2153
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2154
- },
2155
- },
2156
- {
2157
- LLM_ARCH_LLADA_MOE,
2158
- {
2159
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2160
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2161
- { LLM_TENSOR_OUTPUT, "output" },
2162
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2163
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2164
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2165
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2166
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2167
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2168
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2169
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2170
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2171
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2172
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2173
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2174
- },
2175
- },
2176
- {
2177
- LLM_ARCH_SEED_OSS,
2178
- {
2179
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2180
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2181
- { LLM_TENSOR_OUTPUT, "output" },
2182
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2183
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2184
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2185
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2186
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2187
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
2188
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
2189
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
2190
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
2191
- },
2192
- },
2193
- {
2194
- LLM_ARCH_GROVEMOE,
2195
- {
2196
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2197
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
2198
- { LLM_TENSOR_OUTPUT, "output" },
2199
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
2200
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
2201
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
2202
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
2203
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
2204
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
2205
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
2206
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
2207
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
2208
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
2209
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
2210
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
2211
- { LLM_TENSOR_FFN_GATE_CHEXPS, "blk.%d.ffn_gate_chexps" },
2212
- { LLM_TENSOR_FFN_DOWN_CHEXPS, "blk.%d.ffn_down_chexps" },
2213
- { LLM_TENSOR_FFN_UP_CHEXPS, "blk.%d.ffn_up_chexps" },
2214
- },
2215
- },
2216
- {
2217
- LLM_ARCH_UNKNOWN,
2218
- {
2219
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
2220
- },
2221
- },
314
+ static const std::map<llm_tensor, const char *> LLM_TENSOR_NAMES = {
315
+ { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
316
+ { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
317
+ { LLM_TENSOR_OUTPUT_NORM_LFM2, "token_embd_norm" }, // fix for wrong tensor name
318
+ { LLM_TENSOR_OUTPUT, "output" },
319
+ { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
320
+ { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
321
+ { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
322
+ { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
323
+ { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
324
+ { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
325
+ { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
326
+ { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
327
+ { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
328
+ { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
329
+ { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
330
+ { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
331
+ { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
332
+ { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
333
+ { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
334
+ { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
335
+ { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
336
+ { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
337
+ { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
338
+ { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
339
+ { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
340
+ { LLM_TENSOR_ATTN_GATE, "blk.%d.attn_gate" },
341
+ { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
342
+ { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
343
+ { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
344
+ { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
345
+ { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
346
+ { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
347
+ { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
348
+ { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
349
+ { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
350
+ { LLM_TENSOR_POS_EMBD, "position_embd" },
351
+ { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" },
352
+ { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
353
+ { LLM_TENSOR_TOKEN_TYPES, "token_types" },
354
+ { LLM_TENSOR_CLS, "cls" },
355
+ { LLM_TENSOR_CLS_OUT, "cls.output" },
356
+ { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
357
+ { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
358
+ { LLM_TENSOR_SSM_A_NOSCAN, "blk.%d.ssm_a" },
359
+ { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
360
+ { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
361
+ { LLM_TENSOR_SSM_BETA_ALPHA, "blk.%d.ssm_ba" },
362
+ { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
363
+ { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
364
+ { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
365
+ { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
366
+ { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
367
+ { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
368
+ { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
369
+ { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
370
+ { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
371
+ { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
372
+ { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
373
+ { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
374
+ { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
375
+ { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
376
+ { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
377
+ { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
378
+ { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
379
+ { LLM_TENSOR_PER_LAYER_TOKEN_EMBD, "per_layer_token_embd" },
380
+ { LLM_TENSOR_PER_LAYER_MODEL_PROJ, "per_layer_model_proj" },
381
+ { LLM_TENSOR_PER_LAYER_PROJ_NORM, "per_layer_proj_norm" },
382
+ { LLM_TENSOR_ALTUP_UNEMBD_PROJ, "altup_unembd_proj" },
383
+ { LLM_TENSOR_ALTUP_PROJ, "altup_proj" },
384
+ { LLM_TENSOR_PER_LAYER_INP_GATE, "blk.%d.inp_gate" },
385
+ { LLM_TENSOR_PER_LAYER_PROJ, "blk.%d.proj" },
386
+ { LLM_TENSOR_PER_LAYER_POST_NORM, "blk.%d.post_norm" },
387
+ { LLM_TENSOR_ALTUP_CORRECT_COEF, "blk.%d.altup_correct_coef" },
388
+ { LLM_TENSOR_ALTUP_CORRECT_SCALE, "blk.%d.altup_correct_scale" },
389
+ { LLM_TENSOR_ALTUP_PREDICT_COEF, "blk.%d.altup_predict_coef" },
390
+ { LLM_TENSOR_ALTUP_ROUTER, "blk.%d.altup_router" },
391
+ { LLM_TENSOR_ALTUP_ROUTER_NORM, "blk.%d.altup_router_norm" },
392
+ { LLM_TENSOR_LAUREL_L, "blk.%d.laurel_l" },
393
+ { LLM_TENSOR_LAUREL_R, "blk.%d.laurel_r" },
394
+ { LLM_TENSOR_LAUREL_POST_NORM, "blk.%d.laurel_post_norm" },
395
+ { LLM_TENSOR_DENSE_2_OUT, "dense_2" },
396
+ { LLM_TENSOR_DENSE_3_OUT, "dense_3" },
397
+ { LLM_TENSOR_FFN_NORM_EXPS, "blk.%d.ffn_norm_exps" },
398
+ { LLM_TENSOR_ATTN_K_B, "blk.%d.attn_k_b" },
399
+ { LLM_TENSOR_ATTN_V_B, "blk.%d.attn_v_b" },
400
+ { LLM_TENSOR_NEXTN_EH_PROJ, "blk.%d.nextn.eh_proj" },
401
+ { LLM_TENSOR_NEXTN_EMBED_TOKENS, "blk.%d.nextn.embed_tokens" },
402
+ { LLM_TENSOR_NEXTN_ENORM, "blk.%d.nextn.enorm" },
403
+ { LLM_TENSOR_NEXTN_HNORM, "blk.%d.nextn.hnorm" },
404
+ { LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, "blk.%d.nextn.shared_head_head" },
405
+ { LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, "blk.%d.nextn.shared_head_norm" },
406
+ { LLM_TENSOR_ATTN_SUB_NORM, "blk.%d.attn_sub_norm" },
407
+ { LLM_TENSOR_FFN_SUB_NORM, "blk.%d.ffn_sub_norm" },
408
+ { LLM_TENSOR_DEC_OUTPUT_NORM, "dec.output_norm" },
409
+ { LLM_TENSOR_DEC_ATTN_NORM, "dec.blk.%d.attn_norm" },
410
+ { LLM_TENSOR_DEC_ATTN_Q, "dec.blk.%d.attn_q" },
411
+ { LLM_TENSOR_DEC_ATTN_K, "dec.blk.%d.attn_k" },
412
+ { LLM_TENSOR_DEC_ATTN_V, "dec.blk.%d.attn_v" },
413
+ { LLM_TENSOR_DEC_ATTN_OUT, "dec.blk.%d.attn_o" },
414
+ { LLM_TENSOR_DEC_ATTN_REL_B, "dec.blk.%d.attn_rel_b" },
415
+ { LLM_TENSOR_DEC_CROSS_ATTN_NORM, "dec.blk.%d.cross_attn_norm" },
416
+ { LLM_TENSOR_DEC_CROSS_ATTN_Q, "dec.blk.%d.cross_attn_q" },
417
+ { LLM_TENSOR_DEC_CROSS_ATTN_K, "dec.blk.%d.cross_attn_k" },
418
+ { LLM_TENSOR_DEC_CROSS_ATTN_V, "dec.blk.%d.cross_attn_v" },
419
+ { LLM_TENSOR_DEC_CROSS_ATTN_OUT, "dec.blk.%d.cross_attn_o" },
420
+ { LLM_TENSOR_DEC_CROSS_ATTN_REL_B, "dec.blk.%d.cross_attn_rel_b" },
421
+ { LLM_TENSOR_DEC_FFN_NORM, "dec.blk.%d.ffn_norm" },
422
+ { LLM_TENSOR_DEC_FFN_GATE, "dec.blk.%d.ffn_gate" },
423
+ { LLM_TENSOR_DEC_FFN_DOWN, "dec.blk.%d.ffn_down" },
424
+ { LLM_TENSOR_DEC_FFN_UP, "dec.blk.%d.ffn_up" },
425
+ { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
426
+ { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
427
+ { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
428
+ { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
429
+ { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
430
+ { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
431
+ { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
432
+ { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
433
+ { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
434
+ { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
435
+ { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
436
+ { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
437
+ { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
438
+ { LLM_TENSOR_TIME_MIX_LERP_W, "blk.%d.time_mix_lerp_w" },
439
+ { LLM_TENSOR_TIME_MIX_LERP_K, "blk.%d.time_mix_lerp_k" },
440
+ { LLM_TENSOR_TIME_MIX_LERP_V, "blk.%d.time_mix_lerp_v" },
441
+ { LLM_TENSOR_TIME_MIX_LERP_R, "blk.%d.time_mix_lerp_r" },
442
+ { LLM_TENSOR_TIME_MIX_LERP_G, "blk.%d.time_mix_lerp_g" },
443
+ { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
444
+ { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
445
+ { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
446
+ { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
447
+ { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
448
+ { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
449
+ { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
450
+ { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
451
+ { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
452
+ { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
453
+ { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
454
+ { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
455
+ { LLM_TENSOR_CHANNEL_MIX_LERP_R, "blk.%d.channel_mix_lerp_r" },
456
+ { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
457
+ { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
458
+ { LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, "blk.%d.channel_mix_receptance" },
459
+ { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
460
+ { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
461
+ { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
462
+ { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
463
+ { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
464
+ { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
465
+ { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
466
+ { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
467
+ { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
468
+ { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
469
+ { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
470
+ { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
471
+ { LLM_TENSOR_CONV1D, "conv1d" },
472
+ { LLM_TENSOR_CONVNEXT_DW, "convnext.%d.dw" },
473
+ { LLM_TENSOR_CONVNEXT_NORM, "convnext.%d.norm" },
474
+ { LLM_TENSOR_CONVNEXT_PW1, "convnext.%d.pw1" },
475
+ { LLM_TENSOR_CONVNEXT_PW2, "convnext.%d.pw2" },
476
+ { LLM_TENSOR_CONVNEXT_GAMMA, "convnext.%d.gamma" },
477
+ { LLM_TENSOR_POS_NET_CONV1, "posnet.%d.conv1" },
478
+ { LLM_TENSOR_POS_NET_CONV2, "posnet.%d.conv2" },
479
+ { LLM_TENSOR_POS_NET_NORM, "posnet.%d.norm" },
480
+ { LLM_TENSOR_POS_NET_NORM1, "posnet.%d.norm1" },
481
+ { LLM_TENSOR_POS_NET_NORM2, "posnet.%d.norm2" },
482
+ { LLM_TENSOR_POS_NET_ATTN_NORM, "posnet.%d.attn_norm" },
483
+ { LLM_TENSOR_POS_NET_ATTN_Q, "posnet.%d.attn_q" },
484
+ { LLM_TENSOR_POS_NET_ATTN_K, "posnet.%d.attn_k" },
485
+ { LLM_TENSOR_POS_NET_ATTN_V, "posnet.%d.attn_v" },
486
+ { LLM_TENSOR_POS_NET_ATTN_OUT, "posnet.%d.attn_output" },
487
+ { LLM_TENSOR_ATTN_SINKS, "blk.%d.attn_sinks" },
488
+ { LLM_TENSOR_SHORTCONV_CONV, "blk.%d.shortconv.conv" },
489
+ { LLM_TENSOR_SHORTCONV_INPROJ, "blk.%d.shortconv.in_proj" },
490
+ { LLM_TENSOR_SHORTCONV_OUTPROJ, "blk.%d.shortconv.out_proj" },
491
+ { LLM_TENSOR_FFN_GATE_CHEXPS, "blk.%d.ffn_gate_chexps" },
492
+ { LLM_TENSOR_FFN_DOWN_CHEXPS, "blk.%d.ffn_down_chexps" },
493
+ { LLM_TENSOR_FFN_UP_CHEXPS, "blk.%d.ffn_up_chexps" },
494
+ { LLM_TENSOR_VISEXP_ATTN_QKV, "blk.%d.vis_attn_qkv" },
495
+ { LLM_TENSOR_VISEXP_ATTN_OUT, "blk.%d.vis_attn_output" },
496
+ { LLM_TENSOR_VISEXP_FFN_GATE, "blk.%d.vis_gate" },
497
+ { LLM_TENSOR_VISEXP_FFN_DOWN, "blk.%d.vis_down" },
498
+ { LLM_TENSOR_VISEXP_FFN_UP, "blk.%d.vis_up" },
2222
499
  };
2223
500
 
501
+ static std::set<llm_tensor> llm_get_tensor_names(llm_arch arch) {
502
+ switch (arch) {
503
+ case LLM_ARCH_CLIP:
504
+ return {};
505
+ case LLM_ARCH_LLAMA:
506
+ case LLM_ARCH_DECI:
507
+ case LLM_ARCH_MISTRAL3:
508
+ case LLM_ARCH_LLAMA_EMBED:
509
+ return {
510
+ LLM_TENSOR_TOKEN_EMBD,
511
+ LLM_TENSOR_OUTPUT_NORM,
512
+ LLM_TENSOR_OUTPUT,
513
+ LLM_TENSOR_ROPE_FREQS,
514
+ LLM_TENSOR_ATTN_NORM,
515
+ LLM_TENSOR_ATTN_Q,
516
+ LLM_TENSOR_ATTN_K,
517
+ LLM_TENSOR_ATTN_V,
518
+ LLM_TENSOR_ATTN_OUT,
519
+ LLM_TENSOR_ATTN_ROT_EMBD,
520
+ LLM_TENSOR_FFN_GATE_INP,
521
+ LLM_TENSOR_FFN_NORM,
522
+ LLM_TENSOR_FFN_GATE,
523
+ LLM_TENSOR_FFN_DOWN,
524
+ LLM_TENSOR_FFN_UP,
525
+ LLM_TENSOR_FFN_GATE_EXP,
526
+ LLM_TENSOR_FFN_DOWN_EXP,
527
+ LLM_TENSOR_FFN_UP_EXP,
528
+ LLM_TENSOR_FFN_GATE_EXPS,
529
+ LLM_TENSOR_FFN_DOWN_EXPS,
530
+ LLM_TENSOR_FFN_UP_EXPS,
531
+ };
532
+ case LLM_ARCH_ARCEE:
533
+ case LLM_ARCH_STARCODER2:
534
+ case LLM_ARCH_NEMOTRON:
535
+ return {
536
+ LLM_TENSOR_TOKEN_EMBD,
537
+ LLM_TENSOR_OUTPUT_NORM,
538
+ LLM_TENSOR_OUTPUT,
539
+ LLM_TENSOR_ROPE_FREQS,
540
+ LLM_TENSOR_ATTN_NORM,
541
+ LLM_TENSOR_ATTN_Q,
542
+ LLM_TENSOR_ATTN_K,
543
+ LLM_TENSOR_ATTN_V,
544
+ LLM_TENSOR_ATTN_OUT,
545
+ LLM_TENSOR_ATTN_ROT_EMBD,
546
+ LLM_TENSOR_FFN_NORM,
547
+ LLM_TENSOR_FFN_DOWN,
548
+ LLM_TENSOR_FFN_UP,
549
+ };
550
+ case LLM_ARCH_AFMOE:
551
+ return {
552
+ LLM_TENSOR_TOKEN_EMBD,
553
+ LLM_TENSOR_OUTPUT_NORM,
554
+ LLM_TENSOR_OUTPUT,
555
+ LLM_TENSOR_ATTN_NORM,
556
+ LLM_TENSOR_ATTN_POST_NORM,
557
+ LLM_TENSOR_ATTN_Q,
558
+ LLM_TENSOR_ATTN_K,
559
+ LLM_TENSOR_ATTN_V,
560
+ LLM_TENSOR_ATTN_OUT,
561
+ LLM_TENSOR_ATTN_Q_NORM,
562
+ LLM_TENSOR_ATTN_K_NORM,
563
+ LLM_TENSOR_ATTN_GATE,
564
+ LLM_TENSOR_FFN_NORM,
565
+ LLM_TENSOR_FFN_POST_NORM,
566
+ LLM_TENSOR_FFN_GATE_INP,
567
+ LLM_TENSOR_FFN_GATE,
568
+ LLM_TENSOR_FFN_DOWN,
569
+ LLM_TENSOR_FFN_UP,
570
+ LLM_TENSOR_FFN_GATE_EXPS,
571
+ LLM_TENSOR_FFN_DOWN_EXPS,
572
+ LLM_TENSOR_FFN_UP_EXPS,
573
+ LLM_TENSOR_FFN_GATE_SHEXP,
574
+ LLM_TENSOR_FFN_UP_SHEXP,
575
+ LLM_TENSOR_FFN_DOWN_SHEXP,
576
+ LLM_TENSOR_FFN_EXP_PROBS_B,
577
+ };
578
+ case LLM_ARCH_LLAMA4:
579
+ return {
580
+ LLM_TENSOR_TOKEN_EMBD,
581
+ LLM_TENSOR_OUTPUT_NORM,
582
+ LLM_TENSOR_OUTPUT,
583
+ LLM_TENSOR_ROPE_FREQS,
584
+ LLM_TENSOR_ATTN_NORM,
585
+ LLM_TENSOR_ATTN_Q,
586
+ LLM_TENSOR_ATTN_K,
587
+ LLM_TENSOR_ATTN_V,
588
+ LLM_TENSOR_ATTN_OUT,
589
+ LLM_TENSOR_ATTN_ROT_EMBD,
590
+ LLM_TENSOR_FFN_GATE_INP,
591
+ LLM_TENSOR_FFN_NORM,
592
+ LLM_TENSOR_FFN_GATE,
593
+ LLM_TENSOR_FFN_DOWN,
594
+ LLM_TENSOR_FFN_UP,
595
+ LLM_TENSOR_FFN_GATE_EXP,
596
+ LLM_TENSOR_FFN_DOWN_EXP,
597
+ LLM_TENSOR_FFN_UP_EXP,
598
+ LLM_TENSOR_FFN_GATE_EXPS,
599
+ LLM_TENSOR_FFN_DOWN_EXPS,
600
+ LLM_TENSOR_FFN_UP_EXPS,
601
+ LLM_TENSOR_FFN_GATE_SHEXP,
602
+ LLM_TENSOR_FFN_DOWN_SHEXP,
603
+ LLM_TENSOR_FFN_UP_SHEXP,
604
+ };
605
+ case LLM_ARCH_BAICHUAN:
606
+ case LLM_ARCH_ORION:
607
+ case LLM_ARCH_XVERSE:
608
+ case LLM_ARCH_EXAONE:
609
+ return {
610
+ LLM_TENSOR_TOKEN_EMBD,
611
+ LLM_TENSOR_OUTPUT_NORM,
612
+ LLM_TENSOR_OUTPUT,
613
+ LLM_TENSOR_ROPE_FREQS,
614
+ LLM_TENSOR_ATTN_NORM,
615
+ LLM_TENSOR_ATTN_Q,
616
+ LLM_TENSOR_ATTN_K,
617
+ LLM_TENSOR_ATTN_V,
618
+ LLM_TENSOR_ATTN_OUT,
619
+ LLM_TENSOR_ATTN_ROT_EMBD,
620
+ LLM_TENSOR_FFN_NORM,
621
+ LLM_TENSOR_FFN_GATE,
622
+ LLM_TENSOR_FFN_DOWN,
623
+ LLM_TENSOR_FFN_UP,
624
+ };
625
+ case LLM_ARCH_FALCON:
626
+ return {
627
+ LLM_TENSOR_TOKEN_EMBD,
628
+ LLM_TENSOR_OUTPUT_NORM,
629
+ LLM_TENSOR_OUTPUT,
630
+ LLM_TENSOR_ATTN_NORM,
631
+ LLM_TENSOR_ATTN_NORM_2,
632
+ LLM_TENSOR_ATTN_QKV,
633
+ LLM_TENSOR_ATTN_OUT,
634
+ LLM_TENSOR_FFN_DOWN,
635
+ LLM_TENSOR_FFN_UP,
636
+ };
637
+ case LLM_ARCH_GROK:
638
+ return {
639
+ LLM_TENSOR_TOKEN_EMBD,
640
+ LLM_TENSOR_OUTPUT_NORM,
641
+ LLM_TENSOR_OUTPUT,
642
+ LLM_TENSOR_ROPE_FREQS,
643
+ LLM_TENSOR_ATTN_NORM,
644
+ LLM_TENSOR_ATTN_Q,
645
+ LLM_TENSOR_ATTN_K,
646
+ LLM_TENSOR_ATTN_V,
647
+ LLM_TENSOR_ATTN_OUT,
648
+ LLM_TENSOR_ATTN_ROT_EMBD,
649
+ LLM_TENSOR_FFN_GATE_INP,
650
+ LLM_TENSOR_FFN_NORM,
651
+ LLM_TENSOR_FFN_GATE,
652
+ LLM_TENSOR_FFN_DOWN,
653
+ LLM_TENSOR_FFN_UP,
654
+ LLM_TENSOR_FFN_GATE_EXP,
655
+ LLM_TENSOR_FFN_DOWN_EXP,
656
+ LLM_TENSOR_FFN_UP_EXP,
657
+ LLM_TENSOR_FFN_GATE_EXPS,
658
+ LLM_TENSOR_FFN_DOWN_EXPS,
659
+ LLM_TENSOR_FFN_UP_EXPS,
660
+ LLM_TENSOR_FFN_POST_NORM,
661
+ LLM_TENSOR_LAYER_OUT_NORM,
662
+ LLM_TENSOR_ATTN_OUT_NORM,
663
+ };
664
+ case LLM_ARCH_GPT2:
665
+ case LLM_ARCH_STARCODER:
666
+ return {
667
+ LLM_TENSOR_TOKEN_EMBD,
668
+ LLM_TENSOR_POS_EMBD,
669
+ LLM_TENSOR_OUTPUT_NORM,
670
+ LLM_TENSOR_OUTPUT,
671
+ LLM_TENSOR_ATTN_NORM,
672
+ LLM_TENSOR_ATTN_QKV,
673
+ LLM_TENSOR_ATTN_OUT,
674
+ LLM_TENSOR_FFN_NORM,
675
+ LLM_TENSOR_FFN_UP,
676
+ LLM_TENSOR_FFN_DOWN,
677
+ };
678
+ case LLM_ARCH_GPTNEOX:
679
+ return {
680
+ LLM_TENSOR_TOKEN_EMBD,
681
+ LLM_TENSOR_OUTPUT_NORM,
682
+ LLM_TENSOR_OUTPUT,
683
+ LLM_TENSOR_ATTN_NORM,
684
+ LLM_TENSOR_ATTN_QKV,
685
+ LLM_TENSOR_ATTN_OUT,
686
+ LLM_TENSOR_FFN_NORM,
687
+ LLM_TENSOR_FFN_DOWN,
688
+ LLM_TENSOR_FFN_UP,
689
+ };
690
+ case LLM_ARCH_MPT:
691
+ return {
692
+ LLM_TENSOR_TOKEN_EMBD,
693
+ LLM_TENSOR_OUTPUT_NORM,
694
+ LLM_TENSOR_OUTPUT,
695
+ LLM_TENSOR_ATTN_NORM,
696
+ LLM_TENSOR_FFN_NORM,
697
+ LLM_TENSOR_ATTN_QKV,
698
+ LLM_TENSOR_ATTN_OUT,
699
+ LLM_TENSOR_FFN_DOWN,
700
+ LLM_TENSOR_FFN_UP,
701
+ LLM_TENSOR_FFN_ACT,
702
+ LLM_TENSOR_POS_EMBD,
703
+ LLM_TENSOR_ATTN_Q_NORM,
704
+ LLM_TENSOR_ATTN_K_NORM,
705
+ };
706
+ case LLM_ARCH_REFACT:
707
+ case LLM_ARCH_QWEN2:
708
+ case LLM_ARCH_QWEN2VL:
709
+ case LLM_ARCH_INTERNLM2:
710
+ case LLM_ARCH_GRANITE:
711
+ case LLM_ARCH_ERNIE4_5:
712
+ case LLM_ARCH_SMOLLM3:
713
+ case LLM_ARCH_DREAM:
714
+ case LLM_ARCH_LLADA:
715
+ case LLM_ARCH_PANGU_EMBED:
716
+ return {
717
+ LLM_TENSOR_TOKEN_EMBD,
718
+ LLM_TENSOR_OUTPUT_NORM,
719
+ LLM_TENSOR_OUTPUT,
720
+ LLM_TENSOR_ATTN_NORM,
721
+ LLM_TENSOR_ATTN_Q,
722
+ LLM_TENSOR_ATTN_K,
723
+ LLM_TENSOR_ATTN_V,
724
+ LLM_TENSOR_ATTN_OUT,
725
+ LLM_TENSOR_FFN_NORM,
726
+ LLM_TENSOR_FFN_GATE,
727
+ LLM_TENSOR_FFN_DOWN,
728
+ LLM_TENSOR_FFN_UP,
729
+ };
730
+ case LLM_ARCH_BERT:
731
+ return {
732
+ LLM_TENSOR_TOKEN_EMBD,
733
+ LLM_TENSOR_TOKEN_EMBD_NORM,
734
+ LLM_TENSOR_TOKEN_TYPES,
735
+ LLM_TENSOR_POS_EMBD,
736
+ LLM_TENSOR_ATTN_OUT_NORM,
737
+ LLM_TENSOR_ATTN_QKV,
738
+ LLM_TENSOR_ATTN_Q,
739
+ LLM_TENSOR_ATTN_K,
740
+ LLM_TENSOR_ATTN_V,
741
+ LLM_TENSOR_ATTN_OUT,
742
+ LLM_TENSOR_LAYER_OUT_NORM,
743
+ LLM_TENSOR_FFN_DOWN,
744
+ LLM_TENSOR_FFN_UP,
745
+ LLM_TENSOR_CLS,
746
+ LLM_TENSOR_CLS_OUT,
747
+ };
748
+ case LLM_ARCH_NOMIC_BERT:
749
+ return {
750
+ LLM_TENSOR_TOKEN_EMBD,
751
+ LLM_TENSOR_TOKEN_EMBD_NORM,
752
+ LLM_TENSOR_TOKEN_TYPES,
753
+ LLM_TENSOR_ATTN_OUT_NORM,
754
+ LLM_TENSOR_ATTN_QKV,
755
+ LLM_TENSOR_ATTN_OUT,
756
+ LLM_TENSOR_LAYER_OUT_NORM,
757
+ LLM_TENSOR_FFN_GATE,
758
+ LLM_TENSOR_FFN_DOWN,
759
+ LLM_TENSOR_FFN_UP,
760
+ };
761
+ case LLM_ARCH_NOMIC_BERT_MOE:
762
+ return {
763
+ LLM_TENSOR_TOKEN_EMBD,
764
+ LLM_TENSOR_TOKEN_EMBD_NORM,
765
+ LLM_TENSOR_TOKEN_TYPES,
766
+ LLM_TENSOR_ATTN_OUT_NORM,
767
+ LLM_TENSOR_ATTN_QKV,
768
+ LLM_TENSOR_ATTN_OUT,
769
+ LLM_TENSOR_LAYER_OUT_NORM,
770
+ LLM_TENSOR_FFN_GATE,
771
+ LLM_TENSOR_FFN_DOWN,
772
+ LLM_TENSOR_FFN_UP,
773
+ LLM_TENSOR_FFN_GATE_INP,
774
+ LLM_TENSOR_FFN_DOWN_EXPS,
775
+ LLM_TENSOR_FFN_UP_EXPS,
776
+ };
777
+ case LLM_ARCH_NEO_BERT:
778
+ return {
779
+ LLM_TENSOR_TOKEN_EMBD,
780
+ LLM_TENSOR_ATTN_NORM,
781
+ LLM_TENSOR_ATTN_QKV,
782
+ LLM_TENSOR_ATTN_OUT,
783
+ LLM_TENSOR_FFN_NORM,
784
+ LLM_TENSOR_FFN_DOWN,
785
+ LLM_TENSOR_FFN_UP,
786
+ LLM_TENSOR_ENC_OUTPUT_NORM,
787
+ LLM_TENSOR_CLS,
788
+ LLM_TENSOR_CLS_OUT,
789
+ };
790
+ case LLM_ARCH_MODERN_BERT:
791
+ return {
792
+ LLM_TENSOR_TOKEN_EMBD,
793
+ LLM_TENSOR_TOKEN_EMBD_NORM,
794
+ LLM_TENSOR_OUTPUT_NORM,
795
+ LLM_TENSOR_ATTN_NORM,
796
+ LLM_TENSOR_ATTN_OUT,
797
+ LLM_TENSOR_ATTN_QKV,
798
+ LLM_TENSOR_FFN_DOWN,
799
+ LLM_TENSOR_FFN_UP,
800
+ LLM_TENSOR_FFN_NORM,
801
+ LLM_TENSOR_CLS,
802
+ LLM_TENSOR_CLS_OUT,
803
+ };
804
+ case LLM_ARCH_JINA_BERT_V2:
805
+ return {
806
+ LLM_TENSOR_TOKEN_EMBD,
807
+ LLM_TENSOR_TOKEN_EMBD_NORM,
808
+ LLM_TENSOR_TOKEN_TYPES,
809
+ LLM_TENSOR_ATTN_NORM_2,
810
+ LLM_TENSOR_ATTN_OUT_NORM,
811
+ LLM_TENSOR_ATTN_Q,
812
+ LLM_TENSOR_ATTN_Q_NORM,
813
+ LLM_TENSOR_ATTN_K,
814
+ LLM_TENSOR_ATTN_K_NORM,
815
+ LLM_TENSOR_ATTN_V,
816
+ LLM_TENSOR_ATTN_OUT,
817
+ LLM_TENSOR_LAYER_OUT_NORM,
818
+ LLM_TENSOR_FFN_DOWN,
819
+ LLM_TENSOR_FFN_GATE,
820
+ LLM_TENSOR_FFN_UP,
821
+ LLM_TENSOR_CLS,
822
+ };
823
+ case LLM_ARCH_JINA_BERT_V3:
824
+ return {
825
+ LLM_TENSOR_TOKEN_EMBD,
826
+ LLM_TENSOR_TOKEN_EMBD_NORM,
827
+ LLM_TENSOR_TOKEN_TYPES,
828
+ LLM_TENSOR_ATTN_OUT_NORM,
829
+ LLM_TENSOR_ATTN_QKV,
830
+ LLM_TENSOR_ATTN_OUT,
831
+ LLM_TENSOR_FFN_DOWN,
832
+ LLM_TENSOR_FFN_UP,
833
+ LLM_TENSOR_LAYER_OUT_NORM,
834
+ };
835
+ case LLM_ARCH_BLOOM:
836
+ return {
837
+ LLM_TENSOR_TOKEN_EMBD,
838
+ LLM_TENSOR_TOKEN_EMBD_NORM,
839
+ LLM_TENSOR_OUTPUT_NORM,
840
+ LLM_TENSOR_OUTPUT,
841
+ LLM_TENSOR_ATTN_NORM,
842
+ LLM_TENSOR_ATTN_QKV,
843
+ LLM_TENSOR_ATTN_OUT,
844
+ LLM_TENSOR_FFN_NORM,
845
+ LLM_TENSOR_FFN_UP,
846
+ LLM_TENSOR_FFN_DOWN,
847
+ };
848
+ case LLM_ARCH_STABLELM:
849
+ return {
850
+ LLM_TENSOR_TOKEN_EMBD,
851
+ LLM_TENSOR_OUTPUT_NORM,
852
+ LLM_TENSOR_OUTPUT,
853
+ LLM_TENSOR_ROPE_FREQS,
854
+ LLM_TENSOR_ATTN_NORM,
855
+ LLM_TENSOR_ATTN_Q,
856
+ LLM_TENSOR_ATTN_K,
857
+ LLM_TENSOR_ATTN_V,
858
+ LLM_TENSOR_ATTN_OUT,
859
+ LLM_TENSOR_FFN_NORM,
860
+ LLM_TENSOR_FFN_GATE,
861
+ LLM_TENSOR_FFN_DOWN,
862
+ LLM_TENSOR_FFN_UP,
863
+ LLM_TENSOR_ATTN_Q_NORM,
864
+ LLM_TENSOR_ATTN_K_NORM,
865
+ };
866
+ case LLM_ARCH_QWEN:
867
+ return {
868
+ LLM_TENSOR_TOKEN_EMBD,
869
+ LLM_TENSOR_OUTPUT_NORM,
870
+ LLM_TENSOR_OUTPUT,
871
+ LLM_TENSOR_ROPE_FREQS,
872
+ LLM_TENSOR_ATTN_NORM,
873
+ LLM_TENSOR_ATTN_QKV,
874
+ LLM_TENSOR_ATTN_OUT,
875
+ LLM_TENSOR_FFN_NORM,
876
+ LLM_TENSOR_FFN_GATE,
877
+ LLM_TENSOR_FFN_DOWN,
878
+ LLM_TENSOR_FFN_UP,
879
+ };
880
+ case LLM_ARCH_QWEN2MOE:
881
+ return {
882
+ LLM_TENSOR_TOKEN_EMBD,
883
+ LLM_TENSOR_OUTPUT_NORM,
884
+ LLM_TENSOR_OUTPUT,
885
+ LLM_TENSOR_ATTN_NORM,
886
+ LLM_TENSOR_ATTN_Q,
887
+ LLM_TENSOR_ATTN_K,
888
+ LLM_TENSOR_ATTN_V,
889
+ LLM_TENSOR_ATTN_OUT,
890
+ LLM_TENSOR_FFN_NORM,
891
+ LLM_TENSOR_FFN_GATE_INP,
892
+ LLM_TENSOR_FFN_GATE_EXPS,
893
+ LLM_TENSOR_FFN_DOWN_EXPS,
894
+ LLM_TENSOR_FFN_UP_EXPS,
895
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
896
+ LLM_TENSOR_FFN_GATE_SHEXP,
897
+ LLM_TENSOR_FFN_DOWN_SHEXP,
898
+ LLM_TENSOR_FFN_UP_SHEXP,
899
+ };
900
+ case LLM_ARCH_QWEN3:
901
+ return {
902
+ LLM_TENSOR_TOKEN_EMBD,
903
+ LLM_TENSOR_OUTPUT_NORM,
904
+ LLM_TENSOR_OUTPUT,
905
+ LLM_TENSOR_CLS_OUT,
906
+ LLM_TENSOR_ATTN_NORM,
907
+ LLM_TENSOR_ATTN_Q,
908
+ LLM_TENSOR_ATTN_Q_NORM,
909
+ LLM_TENSOR_ATTN_K,
910
+ LLM_TENSOR_ATTN_K_NORM,
911
+ LLM_TENSOR_ATTN_V,
912
+ LLM_TENSOR_ATTN_OUT,
913
+ LLM_TENSOR_FFN_NORM,
914
+ LLM_TENSOR_FFN_GATE,
915
+ LLM_TENSOR_FFN_DOWN,
916
+ LLM_TENSOR_FFN_UP,
917
+ };
918
+ case LLM_ARCH_QWEN3MOE:
919
+ case LLM_ARCH_QWEN3VLMOE:
920
+ case LLM_ARCH_OLMOE:
921
+ case LLM_ARCH_LLADA_MOE:
922
+ case LLM_ARCH_RND1:
923
+ return {
924
+ LLM_TENSOR_TOKEN_EMBD,
925
+ LLM_TENSOR_OUTPUT_NORM,
926
+ LLM_TENSOR_OUTPUT,
927
+ LLM_TENSOR_ATTN_NORM,
928
+ LLM_TENSOR_ATTN_Q,
929
+ LLM_TENSOR_ATTN_Q_NORM,
930
+ LLM_TENSOR_ATTN_K,
931
+ LLM_TENSOR_ATTN_K_NORM,
932
+ LLM_TENSOR_ATTN_V,
933
+ LLM_TENSOR_ATTN_OUT,
934
+ LLM_TENSOR_FFN_NORM,
935
+ LLM_TENSOR_FFN_GATE_INP,
936
+ LLM_TENSOR_FFN_GATE_EXPS,
937
+ LLM_TENSOR_FFN_DOWN_EXPS,
938
+ LLM_TENSOR_FFN_UP_EXPS,
939
+ };
940
+ case LLM_ARCH_QWEN3NEXT:
941
+ return {
942
+ LLM_TENSOR_TOKEN_EMBD,
943
+ LLM_TENSOR_OUTPUT_NORM,
944
+ LLM_TENSOR_OUTPUT,
945
+ LLM_TENSOR_ATTN_NORM,
946
+ LLM_TENSOR_ATTN_POST_NORM,
947
+ LLM_TENSOR_ATTN_Q,
948
+ LLM_TENSOR_ATTN_Q_NORM,
949
+ LLM_TENSOR_ATTN_K,
950
+ LLM_TENSOR_ATTN_K_NORM,
951
+ LLM_TENSOR_ATTN_V,
952
+ LLM_TENSOR_ATTN_OUT,
953
+ LLM_TENSOR_ATTN_QKV,
954
+ LLM_TENSOR_ATTN_GATE,
955
+ LLM_TENSOR_FFN_NORM,
956
+ LLM_TENSOR_FFN_GATE_INP,
957
+ LLM_TENSOR_FFN_GATE_EXPS,
958
+ LLM_TENSOR_FFN_DOWN_EXPS,
959
+ LLM_TENSOR_FFN_UP_EXPS,
960
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
961
+ LLM_TENSOR_FFN_GATE_SHEXP,
962
+ LLM_TENSOR_FFN_DOWN_SHEXP,
963
+ LLM_TENSOR_FFN_UP_SHEXP,
964
+ LLM_TENSOR_SSM_A_NOSCAN,
965
+ LLM_TENSOR_SSM_CONV1D,
966
+ LLM_TENSOR_SSM_DT,
967
+ LLM_TENSOR_SSM_BETA_ALPHA,
968
+ LLM_TENSOR_SSM_IN,
969
+ LLM_TENSOR_SSM_NORM,
970
+ LLM_TENSOR_SSM_OUT,
971
+ };
972
+ case LLM_ARCH_QWEN3VL:
973
+ case LLM_ARCH_CHAMELEON:
974
+ case LLM_ARCH_HUNYUAN_DENSE:
975
+ return {
976
+ LLM_TENSOR_TOKEN_EMBD,
977
+ LLM_TENSOR_OUTPUT_NORM,
978
+ LLM_TENSOR_OUTPUT,
979
+ LLM_TENSOR_ATTN_NORM,
980
+ LLM_TENSOR_ATTN_Q,
981
+ LLM_TENSOR_ATTN_Q_NORM,
982
+ LLM_TENSOR_ATTN_K,
983
+ LLM_TENSOR_ATTN_K_NORM,
984
+ LLM_TENSOR_ATTN_V,
985
+ LLM_TENSOR_ATTN_OUT,
986
+ LLM_TENSOR_FFN_NORM,
987
+ LLM_TENSOR_FFN_GATE,
988
+ LLM_TENSOR_FFN_DOWN,
989
+ LLM_TENSOR_FFN_UP,
990
+ };
991
+ case LLM_ARCH_PHI2:
992
+ return {
993
+ LLM_TENSOR_TOKEN_EMBD,
994
+ LLM_TENSOR_OUTPUT_NORM,
995
+ LLM_TENSOR_OUTPUT,
996
+ LLM_TENSOR_ATTN_NORM,
997
+ LLM_TENSOR_ATTN_QKV,
998
+ LLM_TENSOR_ATTN_Q,
999
+ LLM_TENSOR_ATTN_K,
1000
+ LLM_TENSOR_ATTN_V,
1001
+ LLM_TENSOR_ATTN_OUT,
1002
+ LLM_TENSOR_FFN_DOWN,
1003
+ LLM_TENSOR_FFN_UP,
1004
+ };
1005
+ case LLM_ARCH_PHI3:
1006
+ return {
1007
+ LLM_TENSOR_TOKEN_EMBD,
1008
+ LLM_TENSOR_OUTPUT_NORM,
1009
+ LLM_TENSOR_OUTPUT,
1010
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1011
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1012
+ LLM_TENSOR_ATTN_NORM,
1013
+ LLM_TENSOR_ATTN_QKV,
1014
+ LLM_TENSOR_ATTN_Q,
1015
+ LLM_TENSOR_ATTN_K,
1016
+ LLM_TENSOR_ATTN_V,
1017
+ LLM_TENSOR_ATTN_OUT,
1018
+ LLM_TENSOR_FFN_NORM,
1019
+ LLM_TENSOR_FFN_DOWN,
1020
+ LLM_TENSOR_FFN_UP,
1021
+ };
1022
+ case LLM_ARCH_PHIMOE:
1023
+ return {
1024
+ LLM_TENSOR_TOKEN_EMBD,
1025
+ LLM_TENSOR_OUTPUT_NORM,
1026
+ LLM_TENSOR_OUTPUT,
1027
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1028
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1029
+ LLM_TENSOR_ATTN_NORM,
1030
+ LLM_TENSOR_ATTN_QKV,
1031
+ LLM_TENSOR_ATTN_Q,
1032
+ LLM_TENSOR_ATTN_K,
1033
+ LLM_TENSOR_ATTN_V,
1034
+ LLM_TENSOR_ATTN_OUT,
1035
+ LLM_TENSOR_FFN_NORM,
1036
+ LLM_TENSOR_FFN_GATE_INP,
1037
+ LLM_TENSOR_FFN_GATE_EXPS,
1038
+ LLM_TENSOR_FFN_DOWN_EXPS,
1039
+ LLM_TENSOR_FFN_UP_EXPS,
1040
+ };
1041
+ case LLM_ARCH_PLAMO:
1042
+ return {
1043
+ LLM_TENSOR_TOKEN_EMBD,
1044
+ LLM_TENSOR_OUTPUT_NORM,
1045
+ LLM_TENSOR_OUTPUT,
1046
+ LLM_TENSOR_ROPE_FREQS,
1047
+ LLM_TENSOR_ATTN_NORM,
1048
+ LLM_TENSOR_ATTN_Q,
1049
+ LLM_TENSOR_ATTN_K,
1050
+ LLM_TENSOR_ATTN_V,
1051
+ LLM_TENSOR_ATTN_OUT,
1052
+ LLM_TENSOR_ATTN_ROT_EMBD,
1053
+ LLM_TENSOR_FFN_GATE,
1054
+ LLM_TENSOR_FFN_DOWN,
1055
+ LLM_TENSOR_FFN_UP,
1056
+ };
1057
+ case LLM_ARCH_PLAMO2:
1058
+ return {
1059
+ LLM_TENSOR_TOKEN_EMBD,
1060
+ LLM_TENSOR_OUTPUT_NORM,
1061
+ LLM_TENSOR_OUTPUT,
1062
+ LLM_TENSOR_ROPE_FREQS,
1063
+ LLM_TENSOR_ATTN_NORM,
1064
+ LLM_TENSOR_ATTN_QKV,
1065
+ LLM_TENSOR_ATTN_Q_NORM,
1066
+ LLM_TENSOR_ATTN_K_NORM,
1067
+ LLM_TENSOR_ATTN_OUT,
1068
+ LLM_TENSOR_ATTN_ROT_EMBD,
1069
+ LLM_TENSOR_FFN_NORM,
1070
+ LLM_TENSOR_FFN_DOWN,
1071
+ LLM_TENSOR_FFN_UP,
1072
+ LLM_TENSOR_SSM_IN,
1073
+ LLM_TENSOR_SSM_CONV1D,
1074
+ LLM_TENSOR_SSM_X,
1075
+ LLM_TENSOR_SSM_DT,
1076
+ LLM_TENSOR_SSM_A,
1077
+ LLM_TENSOR_SSM_D,
1078
+ LLM_TENSOR_SSM_OUT,
1079
+ LLM_TENSOR_SSM_DT_NORM,
1080
+ LLM_TENSOR_SSM_B_NORM,
1081
+ LLM_TENSOR_SSM_C_NORM,
1082
+ LLM_TENSOR_ATTN_POST_NORM,
1083
+ LLM_TENSOR_FFN_POST_NORM,
1084
+ };
1085
+ case LLM_ARCH_PLAMO3:
1086
+ return {
1087
+ LLM_TENSOR_TOKEN_EMBD,
1088
+ LLM_TENSOR_OUTPUT_NORM,
1089
+ LLM_TENSOR_OUTPUT,
1090
+ LLM_TENSOR_ATTN_NORM,
1091
+ LLM_TENSOR_ATTN_QKV,
1092
+ LLM_TENSOR_ATTN_Q_NORM,
1093
+ LLM_TENSOR_ATTN_K_NORM,
1094
+ LLM_TENSOR_ATTN_OUT,
1095
+ LLM_TENSOR_ATTN_POST_NORM,
1096
+ LLM_TENSOR_FFN_NORM,
1097
+ LLM_TENSOR_FFN_POST_NORM,
1098
+ LLM_TENSOR_FFN_DOWN,
1099
+ LLM_TENSOR_FFN_UP,
1100
+ };
1101
+ case LLM_ARCH_CODESHELL:
1102
+ return {
1103
+ LLM_TENSOR_TOKEN_EMBD,
1104
+ LLM_TENSOR_OUTPUT_NORM,
1105
+ LLM_TENSOR_OUTPUT,
1106
+ LLM_TENSOR_ROPE_FREQS,
1107
+ LLM_TENSOR_ATTN_NORM,
1108
+ LLM_TENSOR_ATTN_Q,
1109
+ LLM_TENSOR_ATTN_K,
1110
+ LLM_TENSOR_ATTN_V,
1111
+ LLM_TENSOR_ATTN_QKV,
1112
+ LLM_TENSOR_ATTN_OUT,
1113
+ LLM_TENSOR_ATTN_ROT_EMBD,
1114
+ LLM_TENSOR_FFN_NORM,
1115
+ LLM_TENSOR_FFN_GATE,
1116
+ LLM_TENSOR_FFN_DOWN,
1117
+ LLM_TENSOR_FFN_UP,
1118
+ };
1119
+ case LLM_ARCH_MINICPM:
1120
+ return {
1121
+ LLM_TENSOR_TOKEN_EMBD,
1122
+ LLM_TENSOR_OUTPUT_NORM,
1123
+ LLM_TENSOR_OUTPUT,
1124
+ LLM_TENSOR_ROPE_FREQS,
1125
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1126
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1127
+ LLM_TENSOR_ATTN_NORM,
1128
+ LLM_TENSOR_ATTN_Q,
1129
+ LLM_TENSOR_ATTN_K,
1130
+ LLM_TENSOR_ATTN_V,
1131
+ LLM_TENSOR_ATTN_OUT,
1132
+ LLM_TENSOR_ATTN_ROT_EMBD,
1133
+ LLM_TENSOR_FFN_GATE_INP,
1134
+ LLM_TENSOR_FFN_NORM,
1135
+ LLM_TENSOR_FFN_GATE,
1136
+ LLM_TENSOR_FFN_DOWN,
1137
+ LLM_TENSOR_FFN_UP,
1138
+ LLM_TENSOR_FFN_GATE_EXP,
1139
+ LLM_TENSOR_FFN_DOWN_EXP,
1140
+ LLM_TENSOR_FFN_UP_EXP,
1141
+ };
1142
+ case LLM_ARCH_MINICPM3:
1143
+ return {
1144
+ LLM_TENSOR_TOKEN_EMBD,
1145
+ LLM_TENSOR_OUTPUT_NORM,
1146
+ LLM_TENSOR_OUTPUT,
1147
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1148
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1149
+ LLM_TENSOR_ATTN_NORM,
1150
+ LLM_TENSOR_ATTN_Q_A_NORM,
1151
+ LLM_TENSOR_ATTN_KV_A_NORM,
1152
+ LLM_TENSOR_ATTN_Q,
1153
+ LLM_TENSOR_ATTN_Q_A,
1154
+ LLM_TENSOR_ATTN_Q_B,
1155
+ LLM_TENSOR_ATTN_KV_A_MQA,
1156
+ LLM_TENSOR_ATTN_KV_B,
1157
+ LLM_TENSOR_ATTN_OUT,
1158
+ LLM_TENSOR_FFN_NORM,
1159
+ LLM_TENSOR_FFN_GATE,
1160
+ LLM_TENSOR_FFN_UP,
1161
+ LLM_TENSOR_FFN_DOWN,
1162
+ };
1163
+ case LLM_ARCH_GEMMA:
1164
+ return {
1165
+ LLM_TENSOR_TOKEN_EMBD,
1166
+ LLM_TENSOR_OUTPUT_NORM,
1167
+ LLM_TENSOR_ATTN_NORM,
1168
+ LLM_TENSOR_ATTN_Q,
1169
+ LLM_TENSOR_ATTN_K,
1170
+ LLM_TENSOR_ATTN_V,
1171
+ LLM_TENSOR_ATTN_OUT,
1172
+ LLM_TENSOR_FFN_NORM,
1173
+ LLM_TENSOR_FFN_GATE,
1174
+ LLM_TENSOR_FFN_DOWN,
1175
+ LLM_TENSOR_FFN_UP,
1176
+ };
1177
+ case LLM_ARCH_GEMMA2:
1178
+ return {
1179
+ LLM_TENSOR_TOKEN_EMBD,
1180
+ LLM_TENSOR_OUTPUT_NORM,
1181
+ LLM_TENSOR_ATTN_NORM,
1182
+ LLM_TENSOR_ATTN_Q,
1183
+ LLM_TENSOR_ATTN_K,
1184
+ LLM_TENSOR_ATTN_V,
1185
+ LLM_TENSOR_ATTN_OUT,
1186
+ LLM_TENSOR_ATTN_POST_NORM,
1187
+ LLM_TENSOR_FFN_NORM,
1188
+ LLM_TENSOR_FFN_GATE,
1189
+ LLM_TENSOR_FFN_DOWN,
1190
+ LLM_TENSOR_FFN_UP,
1191
+ LLM_TENSOR_FFN_POST_NORM,
1192
+ };
1193
+ case LLM_ARCH_GEMMA3:
1194
+ return {
1195
+ LLM_TENSOR_TOKEN_EMBD,
1196
+ LLM_TENSOR_OUTPUT_NORM,
1197
+ LLM_TENSOR_OUTPUT,
1198
+ LLM_TENSOR_ATTN_NORM,
1199
+ LLM_TENSOR_ATTN_Q,
1200
+ LLM_TENSOR_ATTN_Q_NORM,
1201
+ LLM_TENSOR_ATTN_K,
1202
+ LLM_TENSOR_ATTN_K_NORM,
1203
+ LLM_TENSOR_ATTN_V,
1204
+ LLM_TENSOR_ATTN_OUT,
1205
+ LLM_TENSOR_ATTN_POST_NORM,
1206
+ LLM_TENSOR_FFN_NORM,
1207
+ LLM_TENSOR_FFN_GATE,
1208
+ LLM_TENSOR_FFN_DOWN,
1209
+ LLM_TENSOR_FFN_UP,
1210
+ LLM_TENSOR_FFN_POST_NORM,
1211
+ };
1212
+ case LLM_ARCH_GEMMA3N:
1213
+ return {
1214
+ LLM_TENSOR_TOKEN_EMBD,
1215
+ LLM_TENSOR_OUTPUT_NORM,
1216
+ LLM_TENSOR_ATTN_NORM,
1217
+ LLM_TENSOR_ATTN_Q,
1218
+ LLM_TENSOR_ATTN_Q_NORM,
1219
+ LLM_TENSOR_ATTN_K,
1220
+ LLM_TENSOR_ATTN_K_NORM,
1221
+ LLM_TENSOR_ATTN_V,
1222
+ LLM_TENSOR_ATTN_OUT,
1223
+ LLM_TENSOR_ATTN_POST_NORM,
1224
+ LLM_TENSOR_FFN_NORM,
1225
+ LLM_TENSOR_FFN_GATE,
1226
+ LLM_TENSOR_FFN_DOWN,
1227
+ LLM_TENSOR_FFN_UP,
1228
+ LLM_TENSOR_FFN_POST_NORM,
1229
+ LLM_TENSOR_PER_LAYER_TOKEN_EMBD,
1230
+ LLM_TENSOR_PER_LAYER_MODEL_PROJ,
1231
+ LLM_TENSOR_PER_LAYER_PROJ_NORM,
1232
+ LLM_TENSOR_ALTUP_UNEMBD_PROJ,
1233
+ LLM_TENSOR_ALTUP_PROJ,
1234
+ LLM_TENSOR_PER_LAYER_INP_GATE,
1235
+ LLM_TENSOR_PER_LAYER_PROJ,
1236
+ LLM_TENSOR_PER_LAYER_POST_NORM,
1237
+ LLM_TENSOR_ALTUP_CORRECT_COEF,
1238
+ LLM_TENSOR_ALTUP_CORRECT_SCALE,
1239
+ LLM_TENSOR_ALTUP_PREDICT_COEF,
1240
+ LLM_TENSOR_ALTUP_ROUTER,
1241
+ LLM_TENSOR_ALTUP_ROUTER_NORM,
1242
+ LLM_TENSOR_LAUREL_L,
1243
+ LLM_TENSOR_LAUREL_R,
1244
+ LLM_TENSOR_LAUREL_POST_NORM,
1245
+ };
1246
+ case LLM_ARCH_GEMMA_EMBEDDING:
1247
+ return {
1248
+ LLM_TENSOR_TOKEN_EMBD,
1249
+ LLM_TENSOR_OUTPUT_NORM,
1250
+ LLM_TENSOR_OUTPUT,
1251
+ LLM_TENSOR_DENSE_2_OUT,
1252
+ LLM_TENSOR_DENSE_3_OUT,
1253
+ LLM_TENSOR_ATTN_NORM,
1254
+ LLM_TENSOR_ATTN_Q,
1255
+ LLM_TENSOR_ATTN_Q_NORM,
1256
+ LLM_TENSOR_ATTN_K,
1257
+ LLM_TENSOR_ATTN_K_NORM,
1258
+ LLM_TENSOR_ATTN_V,
1259
+ LLM_TENSOR_ATTN_OUT,
1260
+ LLM_TENSOR_ATTN_POST_NORM,
1261
+ LLM_TENSOR_FFN_NORM,
1262
+ LLM_TENSOR_FFN_GATE,
1263
+ LLM_TENSOR_FFN_DOWN,
1264
+ LLM_TENSOR_FFN_UP,
1265
+ LLM_TENSOR_FFN_POST_NORM,
1266
+ };
1267
+ case LLM_ARCH_MAMBA:
1268
+ return {
1269
+ LLM_TENSOR_TOKEN_EMBD,
1270
+ LLM_TENSOR_OUTPUT_NORM,
1271
+ LLM_TENSOR_OUTPUT,
1272
+ LLM_TENSOR_ATTN_NORM,
1273
+ LLM_TENSOR_SSM_IN,
1274
+ LLM_TENSOR_SSM_CONV1D,
1275
+ LLM_TENSOR_SSM_X,
1276
+ LLM_TENSOR_SSM_DT,
1277
+ LLM_TENSOR_SSM_A,
1278
+ LLM_TENSOR_SSM_D,
1279
+ LLM_TENSOR_SSM_OUT,
1280
+ };
1281
+ case LLM_ARCH_MAMBA2:
1282
+ return {
1283
+ LLM_TENSOR_TOKEN_EMBD,
1284
+ LLM_TENSOR_OUTPUT_NORM,
1285
+ LLM_TENSOR_OUTPUT,
1286
+ LLM_TENSOR_ATTN_NORM,
1287
+ LLM_TENSOR_SSM_IN,
1288
+ LLM_TENSOR_SSM_CONV1D,
1289
+ LLM_TENSOR_SSM_DT,
1290
+ LLM_TENSOR_SSM_A,
1291
+ LLM_TENSOR_SSM_D,
1292
+ LLM_TENSOR_SSM_NORM,
1293
+ LLM_TENSOR_SSM_OUT,
1294
+ };
1295
+ case LLM_ARCH_JAMBA:
1296
+ return {
1297
+ LLM_TENSOR_TOKEN_EMBD,
1298
+ LLM_TENSOR_OUTPUT_NORM,
1299
+ LLM_TENSOR_OUTPUT,
1300
+ LLM_TENSOR_ATTN_NORM,
1301
+ LLM_TENSOR_SSM_IN,
1302
+ LLM_TENSOR_SSM_CONV1D,
1303
+ LLM_TENSOR_SSM_X,
1304
+ LLM_TENSOR_SSM_DT,
1305
+ LLM_TENSOR_SSM_DT_NORM,
1306
+ LLM_TENSOR_SSM_A,
1307
+ LLM_TENSOR_SSM_B_NORM,
1308
+ LLM_TENSOR_SSM_C_NORM,
1309
+ LLM_TENSOR_SSM_D,
1310
+ LLM_TENSOR_SSM_OUT,
1311
+ LLM_TENSOR_ATTN_Q,
1312
+ LLM_TENSOR_ATTN_K,
1313
+ LLM_TENSOR_ATTN_V,
1314
+ LLM_TENSOR_ATTN_OUT,
1315
+ LLM_TENSOR_FFN_GATE_INP,
1316
+ LLM_TENSOR_FFN_NORM,
1317
+ LLM_TENSOR_FFN_GATE,
1318
+ LLM_TENSOR_FFN_DOWN,
1319
+ LLM_TENSOR_FFN_UP,
1320
+ LLM_TENSOR_FFN_GATE_EXPS,
1321
+ LLM_TENSOR_FFN_DOWN_EXPS,
1322
+ LLM_TENSOR_FFN_UP_EXPS,
1323
+ };
1324
+ case LLM_ARCH_FALCON_H1:
1325
+ return {
1326
+ LLM_TENSOR_TOKEN_EMBD,
1327
+ LLM_TENSOR_OUTPUT,
1328
+ LLM_TENSOR_OUTPUT_NORM,
1329
+ LLM_TENSOR_ATTN_NORM,
1330
+ LLM_TENSOR_ATTN_Q,
1331
+ LLM_TENSOR_ATTN_K,
1332
+ LLM_TENSOR_ATTN_V,
1333
+ LLM_TENSOR_ATTN_OUT,
1334
+ LLM_TENSOR_SSM_IN,
1335
+ LLM_TENSOR_SSM_CONV1D,
1336
+ LLM_TENSOR_SSM_DT,
1337
+ LLM_TENSOR_SSM_A,
1338
+ LLM_TENSOR_SSM_D,
1339
+ LLM_TENSOR_SSM_NORM,
1340
+ LLM_TENSOR_SSM_OUT,
1341
+ LLM_TENSOR_FFN_NORM,
1342
+ LLM_TENSOR_FFN_GATE,
1343
+ LLM_TENSOR_FFN_DOWN,
1344
+ LLM_TENSOR_FFN_UP,
1345
+ };
1346
+ case LLM_ARCH_COMMAND_R:
1347
+ return {
1348
+ LLM_TENSOR_TOKEN_EMBD,
1349
+ LLM_TENSOR_OUTPUT_NORM,
1350
+ LLM_TENSOR_ATTN_NORM,
1351
+ LLM_TENSOR_ATTN_Q,
1352
+ LLM_TENSOR_ATTN_K,
1353
+ LLM_TENSOR_ATTN_V,
1354
+ LLM_TENSOR_ATTN_OUT,
1355
+ LLM_TENSOR_FFN_GATE,
1356
+ LLM_TENSOR_FFN_DOWN,
1357
+ LLM_TENSOR_FFN_UP,
1358
+ LLM_TENSOR_ATTN_Q_NORM,
1359
+ LLM_TENSOR_ATTN_K_NORM,
1360
+ };
1361
+ case LLM_ARCH_COHERE2:
1362
+ return {
1363
+ LLM_TENSOR_TOKEN_EMBD,
1364
+ LLM_TENSOR_OUTPUT_NORM,
1365
+ LLM_TENSOR_ATTN_NORM,
1366
+ LLM_TENSOR_ATTN_Q,
1367
+ LLM_TENSOR_ATTN_K,
1368
+ LLM_TENSOR_ATTN_V,
1369
+ LLM_TENSOR_ATTN_OUT,
1370
+ LLM_TENSOR_FFN_GATE,
1371
+ LLM_TENSOR_FFN_DOWN,
1372
+ LLM_TENSOR_FFN_UP,
1373
+ };
1374
+ case LLM_ARCH_DBRX:
1375
+ return {
1376
+ LLM_TENSOR_TOKEN_EMBD,
1377
+ LLM_TENSOR_OUTPUT_NORM,
1378
+ LLM_TENSOR_OUTPUT,
1379
+ LLM_TENSOR_ATTN_QKV,
1380
+ LLM_TENSOR_ATTN_NORM,
1381
+ LLM_TENSOR_ATTN_OUT,
1382
+ LLM_TENSOR_ATTN_OUT_NORM,
1383
+ LLM_TENSOR_FFN_GATE_INP,
1384
+ LLM_TENSOR_FFN_GATE_EXPS,
1385
+ LLM_TENSOR_FFN_DOWN_EXPS,
1386
+ LLM_TENSOR_FFN_UP_EXPS,
1387
+ };
1388
+ case LLM_ARCH_OLMO:
1389
+ return {
1390
+ LLM_TENSOR_TOKEN_EMBD,
1391
+ LLM_TENSOR_OUTPUT,
1392
+ LLM_TENSOR_ATTN_Q,
1393
+ LLM_TENSOR_ATTN_K,
1394
+ LLM_TENSOR_ATTN_V,
1395
+ LLM_TENSOR_ATTN_OUT,
1396
+ LLM_TENSOR_FFN_GATE,
1397
+ LLM_TENSOR_FFN_DOWN,
1398
+ LLM_TENSOR_FFN_UP,
1399
+ };
1400
+ case LLM_ARCH_OLMO2:
1401
+ return {
1402
+ LLM_TENSOR_TOKEN_EMBD,
1403
+ LLM_TENSOR_OUTPUT_NORM,
1404
+ LLM_TENSOR_OUTPUT,
1405
+ LLM_TENSOR_ATTN_Q,
1406
+ LLM_TENSOR_ATTN_K,
1407
+ LLM_TENSOR_ATTN_V,
1408
+ LLM_TENSOR_ATTN_OUT,
1409
+ LLM_TENSOR_ATTN_POST_NORM,
1410
+ LLM_TENSOR_ATTN_Q_NORM,
1411
+ LLM_TENSOR_ATTN_K_NORM,
1412
+ LLM_TENSOR_FFN_POST_NORM,
1413
+ LLM_TENSOR_FFN_GATE,
1414
+ LLM_TENSOR_FFN_DOWN,
1415
+ LLM_TENSOR_FFN_UP,
1416
+ };
1417
+ case LLM_ARCH_OPENELM:
1418
+ return {
1419
+ LLM_TENSOR_TOKEN_EMBD,
1420
+ LLM_TENSOR_OUTPUT_NORM,
1421
+ LLM_TENSOR_ATTN_NORM,
1422
+ LLM_TENSOR_ATTN_QKV,
1423
+ LLM_TENSOR_ATTN_Q_NORM,
1424
+ LLM_TENSOR_ATTN_K_NORM,
1425
+ LLM_TENSOR_ATTN_OUT,
1426
+ LLM_TENSOR_FFN_NORM,
1427
+ LLM_TENSOR_FFN_GATE,
1428
+ LLM_TENSOR_FFN_DOWN,
1429
+ LLM_TENSOR_FFN_UP,
1430
+ };
1431
+ case LLM_ARCH_ARCTIC:
1432
+ return {
1433
+ LLM_TENSOR_TOKEN_EMBD,
1434
+ LLM_TENSOR_OUTPUT_NORM,
1435
+ LLM_TENSOR_OUTPUT,
1436
+ LLM_TENSOR_ATTN_NORM,
1437
+ LLM_TENSOR_ATTN_Q,
1438
+ LLM_TENSOR_ATTN_K,
1439
+ LLM_TENSOR_ATTN_V,
1440
+ LLM_TENSOR_ATTN_OUT,
1441
+ LLM_TENSOR_FFN_GATE_INP,
1442
+ LLM_TENSOR_FFN_NORM,
1443
+ LLM_TENSOR_FFN_GATE,
1444
+ LLM_TENSOR_FFN_DOWN,
1445
+ LLM_TENSOR_FFN_UP,
1446
+ LLM_TENSOR_FFN_NORM_EXPS,
1447
+ LLM_TENSOR_FFN_GATE_EXPS,
1448
+ LLM_TENSOR_FFN_DOWN_EXPS,
1449
+ LLM_TENSOR_FFN_UP_EXPS,
1450
+ };
1451
+ case LLM_ARCH_DEEPSEEK:
1452
+ return {
1453
+ LLM_TENSOR_TOKEN_EMBD,
1454
+ LLM_TENSOR_OUTPUT_NORM,
1455
+ LLM_TENSOR_OUTPUT,
1456
+ LLM_TENSOR_ROPE_FREQS,
1457
+ LLM_TENSOR_ATTN_NORM,
1458
+ LLM_TENSOR_ATTN_Q,
1459
+ LLM_TENSOR_ATTN_K,
1460
+ LLM_TENSOR_ATTN_V,
1461
+ LLM_TENSOR_ATTN_OUT,
1462
+ LLM_TENSOR_ATTN_ROT_EMBD,
1463
+ LLM_TENSOR_FFN_GATE_INP,
1464
+ LLM_TENSOR_FFN_NORM,
1465
+ LLM_TENSOR_FFN_GATE,
1466
+ LLM_TENSOR_FFN_DOWN,
1467
+ LLM_TENSOR_FFN_UP,
1468
+ LLM_TENSOR_FFN_GATE_EXPS,
1469
+ LLM_TENSOR_FFN_DOWN_EXPS,
1470
+ LLM_TENSOR_FFN_UP_EXPS,
1471
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1472
+ LLM_TENSOR_FFN_GATE_SHEXP,
1473
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1474
+ LLM_TENSOR_FFN_UP_SHEXP,
1475
+ };
1476
+ case LLM_ARCH_DEEPSEEK2:
1477
+ return {
1478
+ LLM_TENSOR_TOKEN_EMBD,
1479
+ LLM_TENSOR_OUTPUT_NORM,
1480
+ LLM_TENSOR_OUTPUT,
1481
+ LLM_TENSOR_ATTN_NORM,
1482
+ LLM_TENSOR_ATTN_Q_A_NORM,
1483
+ LLM_TENSOR_ATTN_KV_A_NORM,
1484
+ LLM_TENSOR_ATTN_Q,
1485
+ LLM_TENSOR_ATTN_Q_A,
1486
+ LLM_TENSOR_ATTN_Q_B,
1487
+ LLM_TENSOR_ATTN_KV_A_MQA,
1488
+ LLM_TENSOR_ATTN_KV_B,
1489
+ LLM_TENSOR_ATTN_K_B,
1490
+ LLM_TENSOR_ATTN_V_B,
1491
+ LLM_TENSOR_ATTN_OUT,
1492
+ LLM_TENSOR_FFN_NORM,
1493
+ LLM_TENSOR_FFN_GATE,
1494
+ LLM_TENSOR_FFN_UP,
1495
+ LLM_TENSOR_FFN_DOWN,
1496
+ LLM_TENSOR_FFN_GATE_INP,
1497
+ LLM_TENSOR_FFN_GATE_EXPS,
1498
+ LLM_TENSOR_FFN_DOWN_EXPS,
1499
+ LLM_TENSOR_FFN_UP_EXPS,
1500
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1501
+ LLM_TENSOR_FFN_GATE_SHEXP,
1502
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1503
+ LLM_TENSOR_FFN_UP_SHEXP,
1504
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1505
+ };
1506
+ case LLM_ARCH_PLM:
1507
+ return {
1508
+ LLM_TENSOR_TOKEN_EMBD,
1509
+ LLM_TENSOR_OUTPUT_NORM,
1510
+ LLM_TENSOR_ATTN_NORM,
1511
+ LLM_TENSOR_ATTN_Q,
1512
+ LLM_TENSOR_ATTN_KV_A_MQA,
1513
+ LLM_TENSOR_ATTN_KV_A_NORM,
1514
+ LLM_TENSOR_ATTN_KV_B,
1515
+ LLM_TENSOR_ATTN_OUT,
1516
+ LLM_TENSOR_FFN_NORM,
1517
+ LLM_TENSOR_FFN_DOWN,
1518
+ LLM_TENSOR_FFN_UP,
1519
+ };
1520
+ case LLM_ARCH_CHATGLM:
1521
+ return {
1522
+ LLM_TENSOR_TOKEN_EMBD,
1523
+ LLM_TENSOR_ROPE_FREQS,
1524
+ LLM_TENSOR_OUTPUT_NORM,
1525
+ LLM_TENSOR_OUTPUT,
1526
+ LLM_TENSOR_ATTN_NORM,
1527
+ LLM_TENSOR_ATTN_QKV,
1528
+ LLM_TENSOR_ATTN_Q,
1529
+ LLM_TENSOR_ATTN_K,
1530
+ LLM_TENSOR_ATTN_V,
1531
+ LLM_TENSOR_ATTN_OUT,
1532
+ LLM_TENSOR_FFN_NORM,
1533
+ LLM_TENSOR_FFN_UP,
1534
+ LLM_TENSOR_FFN_DOWN,
1535
+ };
1536
+ case LLM_ARCH_GLM4:
1537
+ return {
1538
+ LLM_TENSOR_TOKEN_EMBD,
1539
+ LLM_TENSOR_ROPE_FREQS,
1540
+ LLM_TENSOR_OUTPUT_NORM,
1541
+ LLM_TENSOR_OUTPUT,
1542
+ LLM_TENSOR_ATTN_NORM,
1543
+ LLM_TENSOR_ATTN_Q,
1544
+ LLM_TENSOR_ATTN_K,
1545
+ LLM_TENSOR_ATTN_V,
1546
+ LLM_TENSOR_ATTN_OUT,
1547
+ LLM_TENSOR_FFN_NORM,
1548
+ LLM_TENSOR_FFN_UP,
1549
+ LLM_TENSOR_FFN_DOWN,
1550
+ LLM_TENSOR_ATTN_POST_NORM,
1551
+ LLM_TENSOR_FFN_POST_NORM,
1552
+ };
1553
+ case LLM_ARCH_GLM4_MOE:
1554
+ return {
1555
+ LLM_TENSOR_TOKEN_EMBD,
1556
+ LLM_TENSOR_OUTPUT_NORM,
1557
+ LLM_TENSOR_OUTPUT,
1558
+ LLM_TENSOR_ATTN_NORM,
1559
+ LLM_TENSOR_ATTN_POST_NORM,
1560
+ LLM_TENSOR_ATTN_Q,
1561
+ LLM_TENSOR_ATTN_K,
1562
+ LLM_TENSOR_ATTN_V,
1563
+ LLM_TENSOR_ATTN_OUT,
1564
+ LLM_TENSOR_ATTN_Q_NORM,
1565
+ LLM_TENSOR_ATTN_K_NORM,
1566
+ LLM_TENSOR_FFN_GATE,
1567
+ LLM_TENSOR_FFN_DOWN,
1568
+ LLM_TENSOR_FFN_UP,
1569
+ LLM_TENSOR_FFN_GATE_INP,
1570
+ LLM_TENSOR_FFN_GATE_EXPS,
1571
+ LLM_TENSOR_FFN_DOWN_EXPS,
1572
+ LLM_TENSOR_FFN_UP_EXPS,
1573
+ LLM_TENSOR_FFN_GATE_SHEXP,
1574
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1575
+ LLM_TENSOR_FFN_UP_SHEXP,
1576
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1577
+ LLM_TENSOR_NEXTN_EH_PROJ,
1578
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1579
+ LLM_TENSOR_NEXTN_ENORM,
1580
+ LLM_TENSOR_NEXTN_HNORM,
1581
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1582
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1583
+ };
1584
+ case LLM_ARCH_BITNET:
1585
+ return {
1586
+ LLM_TENSOR_TOKEN_EMBD,
1587
+ LLM_TENSOR_OUTPUT_NORM,
1588
+ LLM_TENSOR_ATTN_Q,
1589
+ LLM_TENSOR_ATTN_K,
1590
+ LLM_TENSOR_ATTN_V,
1591
+ LLM_TENSOR_ATTN_OUT,
1592
+ LLM_TENSOR_ATTN_NORM,
1593
+ LLM_TENSOR_ATTN_SUB_NORM,
1594
+ LLM_TENSOR_FFN_GATE,
1595
+ LLM_TENSOR_FFN_DOWN,
1596
+ LLM_TENSOR_FFN_UP,
1597
+ LLM_TENSOR_FFN_NORM,
1598
+ LLM_TENSOR_FFN_SUB_NORM,
1599
+ };
1600
+ case LLM_ARCH_T5:
1601
+ return {
1602
+ LLM_TENSOR_TOKEN_EMBD,
1603
+ LLM_TENSOR_OUTPUT,
1604
+ LLM_TENSOR_DEC_OUTPUT_NORM,
1605
+ LLM_TENSOR_DEC_ATTN_NORM,
1606
+ LLM_TENSOR_DEC_ATTN_Q,
1607
+ LLM_TENSOR_DEC_ATTN_K,
1608
+ LLM_TENSOR_DEC_ATTN_V,
1609
+ LLM_TENSOR_DEC_ATTN_OUT,
1610
+ LLM_TENSOR_DEC_ATTN_REL_B,
1611
+ LLM_TENSOR_DEC_CROSS_ATTN_NORM,
1612
+ LLM_TENSOR_DEC_CROSS_ATTN_Q,
1613
+ LLM_TENSOR_DEC_CROSS_ATTN_K,
1614
+ LLM_TENSOR_DEC_CROSS_ATTN_V,
1615
+ LLM_TENSOR_DEC_CROSS_ATTN_OUT,
1616
+ LLM_TENSOR_DEC_CROSS_ATTN_REL_B,
1617
+ LLM_TENSOR_DEC_FFN_NORM,
1618
+ LLM_TENSOR_DEC_FFN_GATE,
1619
+ LLM_TENSOR_DEC_FFN_DOWN,
1620
+ LLM_TENSOR_DEC_FFN_UP,
1621
+ LLM_TENSOR_ENC_OUTPUT_NORM,
1622
+ LLM_TENSOR_ENC_ATTN_NORM,
1623
+ LLM_TENSOR_ENC_ATTN_Q,
1624
+ LLM_TENSOR_ENC_ATTN_K,
1625
+ LLM_TENSOR_ENC_ATTN_V,
1626
+ LLM_TENSOR_ENC_ATTN_OUT,
1627
+ LLM_TENSOR_ENC_ATTN_REL_B,
1628
+ LLM_TENSOR_ENC_FFN_NORM,
1629
+ LLM_TENSOR_ENC_FFN_GATE,
1630
+ LLM_TENSOR_ENC_FFN_DOWN,
1631
+ LLM_TENSOR_ENC_FFN_UP,
1632
+ };
1633
+ case LLM_ARCH_T5ENCODER:
1634
+ return {
1635
+ LLM_TENSOR_TOKEN_EMBD,
1636
+ LLM_TENSOR_OUTPUT,
1637
+ LLM_TENSOR_ENC_OUTPUT_NORM,
1638
+ LLM_TENSOR_ENC_ATTN_NORM,
1639
+ LLM_TENSOR_ENC_ATTN_Q,
1640
+ LLM_TENSOR_ENC_ATTN_K,
1641
+ LLM_TENSOR_ENC_ATTN_V,
1642
+ LLM_TENSOR_ENC_ATTN_OUT,
1643
+ LLM_TENSOR_ENC_ATTN_REL_B,
1644
+ LLM_TENSOR_ENC_FFN_NORM,
1645
+ LLM_TENSOR_ENC_FFN_GATE,
1646
+ LLM_TENSOR_ENC_FFN_DOWN,
1647
+ LLM_TENSOR_ENC_FFN_UP,
1648
+ };
1649
+ case LLM_ARCH_JAIS:
1650
+ return {
1651
+ LLM_TENSOR_TOKEN_EMBD,
1652
+ LLM_TENSOR_OUTPUT_NORM,
1653
+ LLM_TENSOR_OUTPUT,
1654
+ LLM_TENSOR_ATTN_NORM,
1655
+ LLM_TENSOR_ATTN_QKV,
1656
+ LLM_TENSOR_ATTN_OUT,
1657
+ LLM_TENSOR_FFN_NORM,
1658
+ LLM_TENSOR_FFN_UP,
1659
+ LLM_TENSOR_FFN_GATE,
1660
+ LLM_TENSOR_FFN_DOWN,
1661
+ };
1662
+ case LLM_ARCH_NEMOTRON_H:
1663
+ return {
1664
+ LLM_TENSOR_TOKEN_EMBD,
1665
+ LLM_TENSOR_OUTPUT_NORM,
1666
+ LLM_TENSOR_OUTPUT,
1667
+ LLM_TENSOR_ATTN_NORM,
1668
+ LLM_TENSOR_SSM_IN,
1669
+ LLM_TENSOR_SSM_CONV1D,
1670
+ LLM_TENSOR_SSM_DT,
1671
+ LLM_TENSOR_SSM_A,
1672
+ LLM_TENSOR_SSM_D,
1673
+ LLM_TENSOR_SSM_NORM,
1674
+ LLM_TENSOR_SSM_OUT,
1675
+ LLM_TENSOR_ATTN_Q,
1676
+ LLM_TENSOR_ATTN_K,
1677
+ LLM_TENSOR_ATTN_V,
1678
+ LLM_TENSOR_ATTN_OUT,
1679
+ LLM_TENSOR_FFN_DOWN,
1680
+ LLM_TENSOR_FFN_UP,
1681
+ };
1682
+ case LLM_ARCH_NEMOTRON_H_MOE:
1683
+ return {
1684
+ LLM_TENSOR_TOKEN_EMBD,
1685
+ LLM_TENSOR_OUTPUT_NORM,
1686
+ LLM_TENSOR_OUTPUT,
1687
+ LLM_TENSOR_ATTN_NORM,
1688
+ // mamba(2) ssm layers
1689
+ LLM_TENSOR_SSM_IN,
1690
+ LLM_TENSOR_SSM_CONV1D,
1691
+ LLM_TENSOR_SSM_DT,
1692
+ LLM_TENSOR_SSM_A,
1693
+ LLM_TENSOR_SSM_D,
1694
+ LLM_TENSOR_SSM_NORM,
1695
+ LLM_TENSOR_SSM_OUT,
1696
+ // attention layers
1697
+ LLM_TENSOR_ATTN_Q,
1698
+ LLM_TENSOR_ATTN_K,
1699
+ LLM_TENSOR_ATTN_V,
1700
+ LLM_TENSOR_ATTN_OUT,
1701
+ // dense FFN
1702
+ LLM_TENSOR_FFN_DOWN,
1703
+ LLM_TENSOR_FFN_UP,
1704
+ // MoE FFN (for MoE layers)
1705
+ LLM_TENSOR_FFN_GATE_INP,
1706
+ LLM_TENSOR_FFN_UP_EXPS,
1707
+ LLM_TENSOR_FFN_DOWN_EXPS,
1708
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1709
+ // MoE shared expert layer
1710
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1711
+ LLM_TENSOR_FFN_UP_SHEXP,
1712
+ };
1713
+ case LLM_ARCH_EXAONE4:
1714
+ return {
1715
+ LLM_TENSOR_TOKEN_EMBD,
1716
+ LLM_TENSOR_OUTPUT_NORM,
1717
+ LLM_TENSOR_OUTPUT,
1718
+ LLM_TENSOR_ROPE_FREQS,
1719
+ LLM_TENSOR_ATTN_Q,
1720
+ LLM_TENSOR_ATTN_Q_NORM,
1721
+ LLM_TENSOR_ATTN_K,
1722
+ LLM_TENSOR_ATTN_K_NORM,
1723
+ LLM_TENSOR_ATTN_V,
1724
+ LLM_TENSOR_ATTN_OUT,
1725
+ LLM_TENSOR_ATTN_POST_NORM,
1726
+ LLM_TENSOR_FFN_GATE,
1727
+ LLM_TENSOR_FFN_DOWN,
1728
+ LLM_TENSOR_FFN_UP,
1729
+ LLM_TENSOR_FFN_POST_NORM,
1730
+ };
1731
+ case LLM_ARCH_RWKV6:
1732
+ return {
1733
+ LLM_TENSOR_TOKEN_EMBD,
1734
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1735
+ LLM_TENSOR_OUTPUT_NORM,
1736
+ LLM_TENSOR_OUTPUT,
1737
+ LLM_TENSOR_ATTN_NORM,
1738
+ LLM_TENSOR_ATTN_NORM_2,
1739
+ LLM_TENSOR_TIME_MIX_W1,
1740
+ LLM_TENSOR_TIME_MIX_W2,
1741
+ LLM_TENSOR_TIME_MIX_LERP_X,
1742
+ LLM_TENSOR_TIME_MIX_LERP_W,
1743
+ LLM_TENSOR_TIME_MIX_LERP_K,
1744
+ LLM_TENSOR_TIME_MIX_LERP_V,
1745
+ LLM_TENSOR_TIME_MIX_LERP_R,
1746
+ LLM_TENSOR_TIME_MIX_LERP_G,
1747
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1748
+ LLM_TENSOR_TIME_MIX_FIRST,
1749
+ LLM_TENSOR_TIME_MIX_DECAY,
1750
+ LLM_TENSOR_TIME_MIX_DECAY_W1,
1751
+ LLM_TENSOR_TIME_MIX_DECAY_W2,
1752
+ LLM_TENSOR_TIME_MIX_KEY,
1753
+ LLM_TENSOR_TIME_MIX_VALUE,
1754
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1755
+ LLM_TENSOR_TIME_MIX_GATE,
1756
+ LLM_TENSOR_TIME_MIX_LN,
1757
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1758
+ LLM_TENSOR_CHANNEL_MIX_LERP_K,
1759
+ LLM_TENSOR_CHANNEL_MIX_LERP_R,
1760
+ LLM_TENSOR_CHANNEL_MIX_KEY,
1761
+ LLM_TENSOR_CHANNEL_MIX_VALUE,
1762
+ LLM_TENSOR_CHANNEL_MIX_RECEPTANCE,
1763
+ };
1764
+ case LLM_ARCH_RWKV6QWEN2:
1765
+ return {
1766
+ LLM_TENSOR_TOKEN_EMBD,
1767
+ LLM_TENSOR_OUTPUT_NORM,
1768
+ LLM_TENSOR_OUTPUT,
1769
+ LLM_TENSOR_ATTN_NORM,
1770
+ LLM_TENSOR_TIME_MIX_W1,
1771
+ LLM_TENSOR_TIME_MIX_W2,
1772
+ LLM_TENSOR_TIME_MIX_LERP_X,
1773
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1774
+ LLM_TENSOR_TIME_MIX_FIRST,
1775
+ LLM_TENSOR_TIME_MIX_DECAY,
1776
+ LLM_TENSOR_TIME_MIX_DECAY_W1,
1777
+ LLM_TENSOR_TIME_MIX_DECAY_W2,
1778
+ LLM_TENSOR_TIME_MIX_KEY,
1779
+ LLM_TENSOR_TIME_MIX_VALUE,
1780
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1781
+ LLM_TENSOR_TIME_MIX_GATE,
1782
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1783
+ LLM_TENSOR_FFN_NORM,
1784
+ LLM_TENSOR_FFN_GATE,
1785
+ LLM_TENSOR_FFN_DOWN,
1786
+ LLM_TENSOR_FFN_UP,
1787
+ };
1788
+ case LLM_ARCH_RWKV7:
1789
+ return {
1790
+ LLM_TENSOR_TOKEN_EMBD,
1791
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1792
+ LLM_TENSOR_OUTPUT_NORM,
1793
+ LLM_TENSOR_OUTPUT,
1794
+ LLM_TENSOR_ATTN_NORM,
1795
+ LLM_TENSOR_ATTN_NORM_2,
1796
+ LLM_TENSOR_TIME_MIX_W0,
1797
+ LLM_TENSOR_TIME_MIX_W1,
1798
+ LLM_TENSOR_TIME_MIX_W2,
1799
+ LLM_TENSOR_TIME_MIX_A0,
1800
+ LLM_TENSOR_TIME_MIX_A1,
1801
+ LLM_TENSOR_TIME_MIX_A2,
1802
+ LLM_TENSOR_TIME_MIX_V0,
1803
+ LLM_TENSOR_TIME_MIX_V1,
1804
+ LLM_TENSOR_TIME_MIX_V2,
1805
+ LLM_TENSOR_TIME_MIX_G1,
1806
+ LLM_TENSOR_TIME_MIX_G2,
1807
+ LLM_TENSOR_TIME_MIX_K_K,
1808
+ LLM_TENSOR_TIME_MIX_K_A,
1809
+ LLM_TENSOR_TIME_MIX_R_K,
1810
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1811
+ LLM_TENSOR_TIME_MIX_KEY,
1812
+ LLM_TENSOR_TIME_MIX_VALUE,
1813
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1814
+ LLM_TENSOR_TIME_MIX_LN,
1815
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1816
+ LLM_TENSOR_CHANNEL_MIX_LERP_K,
1817
+ LLM_TENSOR_CHANNEL_MIX_KEY,
1818
+ LLM_TENSOR_CHANNEL_MIX_VALUE,
1819
+ };
1820
+ case LLM_ARCH_ARWKV7:
1821
+ return {
1822
+ LLM_TENSOR_TOKEN_EMBD,
1823
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1824
+ LLM_TENSOR_OUTPUT_NORM,
1825
+ LLM_TENSOR_OUTPUT,
1826
+ LLM_TENSOR_ATTN_NORM,
1827
+ LLM_TENSOR_TIME_MIX_W0,
1828
+ LLM_TENSOR_TIME_MIX_W1,
1829
+ LLM_TENSOR_TIME_MIX_W2,
1830
+ LLM_TENSOR_TIME_MIX_A0,
1831
+ LLM_TENSOR_TIME_MIX_A1,
1832
+ LLM_TENSOR_TIME_MIX_A2,
1833
+ LLM_TENSOR_TIME_MIX_V0,
1834
+ LLM_TENSOR_TIME_MIX_V1,
1835
+ LLM_TENSOR_TIME_MIX_V2,
1836
+ LLM_TENSOR_TIME_MIX_G1,
1837
+ LLM_TENSOR_TIME_MIX_G2,
1838
+ LLM_TENSOR_TIME_MIX_K_K,
1839
+ LLM_TENSOR_TIME_MIX_K_A,
1840
+ LLM_TENSOR_TIME_MIX_R_K,
1841
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1842
+ LLM_TENSOR_TIME_MIX_KEY,
1843
+ LLM_TENSOR_TIME_MIX_VALUE,
1844
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1845
+ LLM_TENSOR_TIME_MIX_LN,
1846
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1847
+ LLM_TENSOR_FFN_NORM,
1848
+ LLM_TENSOR_FFN_GATE,
1849
+ LLM_TENSOR_FFN_DOWN,
1850
+ LLM_TENSOR_FFN_UP,
1851
+ };
1852
+ case LLM_ARCH_GRANITE_MOE:
1853
+ return {
1854
+ LLM_TENSOR_TOKEN_EMBD,
1855
+ LLM_TENSOR_OUTPUT_NORM,
1856
+ LLM_TENSOR_OUTPUT,
1857
+ LLM_TENSOR_ATTN_NORM,
1858
+ LLM_TENSOR_ATTN_Q,
1859
+ LLM_TENSOR_ATTN_K,
1860
+ LLM_TENSOR_ATTN_V,
1861
+ LLM_TENSOR_ATTN_OUT,
1862
+ LLM_TENSOR_FFN_NORM,
1863
+ LLM_TENSOR_FFN_GATE_INP,
1864
+ LLM_TENSOR_FFN_GATE_EXPS,
1865
+ LLM_TENSOR_FFN_DOWN_EXPS,
1866
+ LLM_TENSOR_FFN_UP_EXPS,
1867
+ LLM_TENSOR_FFN_GATE_SHEXP,
1868
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1869
+ LLM_TENSOR_FFN_UP_SHEXP,
1870
+ };
1871
+ case LLM_ARCH_GRANITE_HYBRID:
1872
+ return {
1873
+ LLM_TENSOR_TOKEN_EMBD,
1874
+ LLM_TENSOR_OUTPUT_NORM,
1875
+ LLM_TENSOR_OUTPUT,
1876
+ LLM_TENSOR_ATTN_NORM,
1877
+ LLM_TENSOR_SSM_IN,
1878
+ LLM_TENSOR_SSM_CONV1D,
1879
+ LLM_TENSOR_SSM_DT,
1880
+ LLM_TENSOR_SSM_A,
1881
+ LLM_TENSOR_SSM_D,
1882
+ LLM_TENSOR_SSM_NORM,
1883
+ LLM_TENSOR_SSM_OUT,
1884
+ LLM_TENSOR_ATTN_Q,
1885
+ LLM_TENSOR_ATTN_K,
1886
+ LLM_TENSOR_ATTN_V,
1887
+ LLM_TENSOR_ATTN_OUT,
1888
+ LLM_TENSOR_FFN_NORM,
1889
+ LLM_TENSOR_FFN_GATE,
1890
+ LLM_TENSOR_FFN_DOWN,
1891
+ LLM_TENSOR_FFN_UP,
1892
+ LLM_TENSOR_FFN_NORM,
1893
+ LLM_TENSOR_FFN_GATE_INP,
1894
+ LLM_TENSOR_FFN_GATE_EXPS,
1895
+ LLM_TENSOR_FFN_DOWN_EXPS,
1896
+ LLM_TENSOR_FFN_UP_EXPS,
1897
+ LLM_TENSOR_FFN_GATE_SHEXP,
1898
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1899
+ LLM_TENSOR_FFN_UP_SHEXP,
1900
+ };
1901
+ case LLM_ARCH_WAVTOKENIZER_DEC:
1902
+ return {
1903
+ LLM_TENSOR_TOKEN_EMBD,
1904
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1905
+ LLM_TENSOR_CONV1D,
1906
+ LLM_TENSOR_CONVNEXT_DW,
1907
+ LLM_TENSOR_CONVNEXT_NORM,
1908
+ LLM_TENSOR_CONVNEXT_PW1,
1909
+ LLM_TENSOR_CONVNEXT_PW2,
1910
+ LLM_TENSOR_CONVNEXT_GAMMA,
1911
+ LLM_TENSOR_OUTPUT_NORM,
1912
+ LLM_TENSOR_OUTPUT,
1913
+ LLM_TENSOR_POS_NET_CONV1,
1914
+ LLM_TENSOR_POS_NET_CONV2,
1915
+ LLM_TENSOR_POS_NET_NORM,
1916
+ LLM_TENSOR_POS_NET_NORM1,
1917
+ LLM_TENSOR_POS_NET_NORM2,
1918
+ LLM_TENSOR_POS_NET_ATTN_NORM,
1919
+ LLM_TENSOR_POS_NET_ATTN_Q,
1920
+ LLM_TENSOR_POS_NET_ATTN_K,
1921
+ LLM_TENSOR_POS_NET_ATTN_V,
1922
+ LLM_TENSOR_POS_NET_ATTN_OUT,
1923
+ };
1924
+ case LLM_ARCH_BAILINGMOE:
1925
+ return {
1926
+ LLM_TENSOR_TOKEN_EMBD,
1927
+ LLM_TENSOR_OUTPUT_NORM,
1928
+ LLM_TENSOR_OUTPUT,
1929
+ LLM_TENSOR_ROPE_FREQS,
1930
+ LLM_TENSOR_ATTN_NORM,
1931
+ LLM_TENSOR_ATTN_Q,
1932
+ LLM_TENSOR_ATTN_K,
1933
+ LLM_TENSOR_ATTN_V,
1934
+ LLM_TENSOR_ATTN_OUT,
1935
+ LLM_TENSOR_FFN_GATE_INP,
1936
+ LLM_TENSOR_FFN_NORM,
1937
+ LLM_TENSOR_FFN_GATE_EXPS,
1938
+ LLM_TENSOR_FFN_DOWN_EXPS,
1939
+ LLM_TENSOR_FFN_UP_EXPS,
1940
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1941
+ LLM_TENSOR_FFN_GATE_SHEXP,
1942
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1943
+ LLM_TENSOR_FFN_UP_SHEXP,
1944
+ };
1945
+ case LLM_ARCH_BAILINGMOE2:
1946
+ return {
1947
+ LLM_TENSOR_TOKEN_EMBD,
1948
+ LLM_TENSOR_OUTPUT_NORM,
1949
+ LLM_TENSOR_OUTPUT,
1950
+ LLM_TENSOR_ATTN_NORM,
1951
+ LLM_TENSOR_ATTN_Q_NORM,
1952
+ LLM_TENSOR_ATTN_K_NORM,
1953
+ LLM_TENSOR_ATTN_QKV,
1954
+ LLM_TENSOR_ATTN_OUT,
1955
+ LLM_TENSOR_FFN_GATE_INP,
1956
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1957
+ LLM_TENSOR_FFN_NORM,
1958
+ LLM_TENSOR_FFN_GATE,
1959
+ LLM_TENSOR_FFN_DOWN,
1960
+ LLM_TENSOR_FFN_UP,
1961
+ LLM_TENSOR_FFN_GATE_EXPS,
1962
+ LLM_TENSOR_FFN_DOWN_EXPS,
1963
+ LLM_TENSOR_FFN_UP_EXPS,
1964
+ LLM_TENSOR_FFN_GATE_SHEXP,
1965
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1966
+ LLM_TENSOR_FFN_UP_SHEXP,
1967
+ LLM_TENSOR_NEXTN_EH_PROJ,
1968
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1969
+ LLM_TENSOR_NEXTN_ENORM,
1970
+ LLM_TENSOR_NEXTN_HNORM,
1971
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1972
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1973
+ LLM_TENSOR_LAYER_OUT_NORM,
1974
+ };
1975
+ case LLM_ARCH_DOTS1:
1976
+ return {
1977
+ LLM_TENSOR_TOKEN_EMBD,
1978
+ LLM_TENSOR_OUTPUT_NORM,
1979
+ LLM_TENSOR_OUTPUT,
1980
+ LLM_TENSOR_ATTN_NORM,
1981
+ LLM_TENSOR_ATTN_Q,
1982
+ LLM_TENSOR_ATTN_Q_NORM,
1983
+ LLM_TENSOR_ATTN_K,
1984
+ LLM_TENSOR_ATTN_K_NORM,
1985
+ LLM_TENSOR_ATTN_V,
1986
+ LLM_TENSOR_ATTN_OUT,
1987
+ LLM_TENSOR_FFN_NORM,
1988
+ LLM_TENSOR_FFN_GATE,
1989
+ LLM_TENSOR_FFN_UP,
1990
+ LLM_TENSOR_FFN_DOWN,
1991
+ LLM_TENSOR_FFN_GATE_INP,
1992
+ LLM_TENSOR_FFN_GATE_EXPS,
1993
+ LLM_TENSOR_FFN_DOWN_EXPS,
1994
+ LLM_TENSOR_FFN_UP_EXPS,
1995
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1996
+ LLM_TENSOR_FFN_GATE_SHEXP,
1997
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1998
+ LLM_TENSOR_FFN_UP_SHEXP,
1999
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2000
+ };
2001
+ case LLM_ARCH_ERNIE4_5_MOE:
2002
+ return {
2003
+ LLM_TENSOR_TOKEN_EMBD,
2004
+ LLM_TENSOR_OUTPUT_NORM,
2005
+ LLM_TENSOR_OUTPUT,
2006
+ LLM_TENSOR_ATTN_NORM,
2007
+ LLM_TENSOR_ATTN_Q,
2008
+ LLM_TENSOR_ATTN_K,
2009
+ LLM_TENSOR_ATTN_V,
2010
+ LLM_TENSOR_ATTN_OUT,
2011
+ LLM_TENSOR_FFN_NORM,
2012
+ LLM_TENSOR_FFN_GATE,
2013
+ LLM_TENSOR_FFN_DOWN,
2014
+ LLM_TENSOR_FFN_UP,
2015
+ LLM_TENSOR_FFN_GATE_INP,
2016
+ LLM_TENSOR_FFN_GATE_SHEXP,
2017
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2018
+ LLM_TENSOR_FFN_UP_SHEXP,
2019
+ LLM_TENSOR_FFN_GATE_EXPS,
2020
+ LLM_TENSOR_FFN_DOWN_EXPS,
2021
+ LLM_TENSOR_FFN_UP_EXPS,
2022
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2023
+ };
2024
+ case LLM_ARCH_HUNYUAN_MOE:
2025
+ return {
2026
+ LLM_TENSOR_TOKEN_EMBD,
2027
+ LLM_TENSOR_OUTPUT_NORM,
2028
+ LLM_TENSOR_OUTPUT,
2029
+ LLM_TENSOR_ATTN_NORM,
2030
+ LLM_TENSOR_ATTN_Q,
2031
+ LLM_TENSOR_ATTN_Q_NORM,
2032
+ LLM_TENSOR_ATTN_K,
2033
+ LLM_TENSOR_ATTN_K_NORM,
2034
+ LLM_TENSOR_ATTN_V,
2035
+ LLM_TENSOR_ATTN_OUT,
2036
+ LLM_TENSOR_FFN_GATE_INP,
2037
+ LLM_TENSOR_FFN_NORM,
2038
+ LLM_TENSOR_FFN_GATE_SHEXP,
2039
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2040
+ LLM_TENSOR_FFN_UP_SHEXP,
2041
+ LLM_TENSOR_FFN_GATE_EXPS,
2042
+ LLM_TENSOR_FFN_DOWN_EXPS,
2043
+ LLM_TENSOR_FFN_UP_EXPS,
2044
+ };
2045
+ case LLM_ARCH_OPENAI_MOE:
2046
+ return {
2047
+ LLM_TENSOR_TOKEN_EMBD,
2048
+ LLM_TENSOR_OUTPUT_NORM,
2049
+ LLM_TENSOR_OUTPUT,
2050
+ LLM_TENSOR_ATTN_NORM,
2051
+ LLM_TENSOR_ATTN_POST_NORM,
2052
+ LLM_TENSOR_ATTN_Q,
2053
+ LLM_TENSOR_ATTN_K,
2054
+ LLM_TENSOR_ATTN_V,
2055
+ LLM_TENSOR_ATTN_OUT,
2056
+ LLM_TENSOR_ATTN_SINKS,
2057
+ LLM_TENSOR_FFN_GATE_INP,
2058
+ LLM_TENSOR_FFN_GATE_EXPS,
2059
+ LLM_TENSOR_FFN_DOWN_EXPS,
2060
+ LLM_TENSOR_FFN_UP_EXPS,
2061
+ };
2062
+ case LLM_ARCH_LFM2:
2063
+ return {
2064
+ LLM_TENSOR_ATTN_NORM,
2065
+ LLM_TENSOR_ATTN_Q,
2066
+ LLM_TENSOR_ATTN_K,
2067
+ LLM_TENSOR_ATTN_V,
2068
+ LLM_TENSOR_ATTN_OUT,
2069
+ LLM_TENSOR_ATTN_K_NORM,
2070
+ LLM_TENSOR_ATTN_Q_NORM,
2071
+ LLM_TENSOR_FFN_DOWN,
2072
+ LLM_TENSOR_FFN_GATE,
2073
+ LLM_TENSOR_FFN_NORM,
2074
+ LLM_TENSOR_FFN_UP,
2075
+ LLM_TENSOR_SHORTCONV_CONV,
2076
+ LLM_TENSOR_SHORTCONV_INPROJ,
2077
+ LLM_TENSOR_SHORTCONV_OUTPROJ,
2078
+ LLM_TENSOR_TOKEN_EMBD,
2079
+ LLM_TENSOR_OUTPUT_NORM_LFM2,
2080
+ LLM_TENSOR_OUTPUT,
2081
+ LLM_TENSOR_DENSE_2_OUT,
2082
+ };
2083
+ case LLM_ARCH_LFM2MOE:
2084
+ return {
2085
+ LLM_TENSOR_ATTN_NORM,
2086
+ LLM_TENSOR_ATTN_Q,
2087
+ LLM_TENSOR_ATTN_K,
2088
+ LLM_TENSOR_ATTN_V,
2089
+ LLM_TENSOR_ATTN_OUT,
2090
+ LLM_TENSOR_ATTN_K_NORM,
2091
+ LLM_TENSOR_ATTN_Q_NORM,
2092
+ LLM_TENSOR_FFN_DOWN,
2093
+ LLM_TENSOR_FFN_GATE,
2094
+ LLM_TENSOR_FFN_NORM,
2095
+ LLM_TENSOR_FFN_UP,
2096
+ LLM_TENSOR_SHORTCONV_CONV,
2097
+ LLM_TENSOR_SHORTCONV_INPROJ,
2098
+ LLM_TENSOR_SHORTCONV_OUTPROJ,
2099
+ LLM_TENSOR_TOKEN_EMBD,
2100
+ LLM_TENSOR_OUTPUT_NORM_LFM2,
2101
+ LLM_TENSOR_FFN_GATE_INP,
2102
+ LLM_TENSOR_FFN_GATE_EXPS,
2103
+ LLM_TENSOR_FFN_DOWN_EXPS,
2104
+ LLM_TENSOR_FFN_UP_EXPS,
2105
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2106
+ };
2107
+ case LLM_ARCH_SMALLTHINKER:
2108
+ return {
2109
+ LLM_TENSOR_TOKEN_EMBD,
2110
+ LLM_TENSOR_OUTPUT_NORM,
2111
+ LLM_TENSOR_OUTPUT,
2112
+ LLM_TENSOR_ATTN_NORM,
2113
+ LLM_TENSOR_ATTN_Q,
2114
+ LLM_TENSOR_ATTN_K,
2115
+ LLM_TENSOR_ATTN_V,
2116
+ LLM_TENSOR_ATTN_OUT,
2117
+ LLM_TENSOR_FFN_NORM,
2118
+ LLM_TENSOR_FFN_GATE,
2119
+ LLM_TENSOR_FFN_DOWN,
2120
+ LLM_TENSOR_FFN_UP,
2121
+ LLM_TENSOR_FFN_GATE_INP,
2122
+ LLM_TENSOR_FFN_GATE_EXPS,
2123
+ LLM_TENSOR_FFN_DOWN_EXPS,
2124
+ LLM_TENSOR_FFN_UP_EXPS,
2125
+ };
2126
+ case LLM_ARCH_APERTUS:
2127
+ return {
2128
+ LLM_TENSOR_TOKEN_EMBD,
2129
+ LLM_TENSOR_OUTPUT_NORM,
2130
+ LLM_TENSOR_OUTPUT,
2131
+ LLM_TENSOR_ROPE_FREQS,
2132
+ LLM_TENSOR_ATTN_NORM,
2133
+ LLM_TENSOR_ATTN_Q,
2134
+ LLM_TENSOR_ATTN_K,
2135
+ LLM_TENSOR_ATTN_V,
2136
+ LLM_TENSOR_ATTN_OUT,
2137
+ LLM_TENSOR_ATTN_Q_NORM,
2138
+ LLM_TENSOR_ATTN_K_NORM,
2139
+ LLM_TENSOR_FFN_NORM,
2140
+ LLM_TENSOR_FFN_DOWN,
2141
+ LLM_TENSOR_FFN_UP,
2142
+ };
2143
+ case LLM_ARCH_SEED_OSS:
2144
+ return {
2145
+ LLM_TENSOR_TOKEN_EMBD,
2146
+ LLM_TENSOR_OUTPUT_NORM,
2147
+ LLM_TENSOR_OUTPUT,
2148
+ LLM_TENSOR_ATTN_NORM,
2149
+ LLM_TENSOR_ATTN_Q,
2150
+ LLM_TENSOR_ATTN_K,
2151
+ LLM_TENSOR_ATTN_V,
2152
+ LLM_TENSOR_ATTN_OUT,
2153
+ LLM_TENSOR_ATTN_POST_NORM,
2154
+ LLM_TENSOR_FFN_GATE,
2155
+ LLM_TENSOR_FFN_DOWN,
2156
+ LLM_TENSOR_FFN_UP,
2157
+ };
2158
+ case LLM_ARCH_GROVEMOE:
2159
+ return {
2160
+ LLM_TENSOR_TOKEN_EMBD,
2161
+ LLM_TENSOR_OUTPUT_NORM,
2162
+ LLM_TENSOR_OUTPUT,
2163
+ LLM_TENSOR_ATTN_NORM,
2164
+ LLM_TENSOR_ATTN_Q,
2165
+ LLM_TENSOR_ATTN_Q_NORM,
2166
+ LLM_TENSOR_ATTN_K,
2167
+ LLM_TENSOR_ATTN_K_NORM,
2168
+ LLM_TENSOR_ATTN_V,
2169
+ LLM_TENSOR_ATTN_OUT,
2170
+ LLM_TENSOR_FFN_NORM,
2171
+ LLM_TENSOR_FFN_GATE_INP,
2172
+ LLM_TENSOR_FFN_GATE_EXPS,
2173
+ LLM_TENSOR_FFN_DOWN_EXPS,
2174
+ LLM_TENSOR_FFN_UP_EXPS,
2175
+ LLM_TENSOR_FFN_GATE_CHEXPS,
2176
+ LLM_TENSOR_FFN_DOWN_CHEXPS,
2177
+ LLM_TENSOR_FFN_UP_CHEXPS,
2178
+ };
2179
+ case LLM_ARCH_MINIMAX_M2:
2180
+ return {
2181
+ LLM_TENSOR_TOKEN_EMBD,
2182
+ LLM_TENSOR_OUTPUT_NORM,
2183
+ LLM_TENSOR_OUTPUT,
2184
+ LLM_TENSOR_ATTN_NORM,
2185
+ LLM_TENSOR_ATTN_Q,
2186
+ LLM_TENSOR_ATTN_K,
2187
+ LLM_TENSOR_ATTN_V,
2188
+ LLM_TENSOR_ATTN_OUT,
2189
+ LLM_TENSOR_ATTN_Q_NORM,
2190
+ LLM_TENSOR_ATTN_K_NORM,
2191
+ LLM_TENSOR_FFN_NORM,
2192
+ LLM_TENSOR_FFN_GATE_INP,
2193
+ LLM_TENSOR_FFN_GATE_EXPS,
2194
+ LLM_TENSOR_FFN_DOWN_EXPS,
2195
+ LLM_TENSOR_FFN_UP_EXPS,
2196
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2197
+ };
2198
+ case LLM_ARCH_COGVLM:
2199
+ return {
2200
+ LLM_TENSOR_TOKEN_EMBD,
2201
+ LLM_TENSOR_OUTPUT_NORM,
2202
+ LLM_TENSOR_OUTPUT,
2203
+ LLM_TENSOR_ATTN_NORM,
2204
+ LLM_TENSOR_ATTN_QKV,
2205
+ LLM_TENSOR_ATTN_OUT,
2206
+ LLM_TENSOR_FFN_NORM,
2207
+ LLM_TENSOR_FFN_GATE,
2208
+ LLM_TENSOR_FFN_DOWN,
2209
+ LLM_TENSOR_FFN_UP,
2210
+ LLM_TENSOR_VISEXP_ATTN_QKV,
2211
+ LLM_TENSOR_VISEXP_ATTN_OUT,
2212
+ LLM_TENSOR_VISEXP_FFN_GATE,
2213
+ LLM_TENSOR_VISEXP_FFN_DOWN,
2214
+ LLM_TENSOR_VISEXP_FFN_UP,
2215
+ };
2216
+ case LLM_ARCH_MIMO2:
2217
+ return {
2218
+ LLM_TENSOR_TOKEN_EMBD,
2219
+ LLM_TENSOR_OUTPUT_NORM,
2220
+ LLM_TENSOR_OUTPUT,
2221
+ LLM_TENSOR_ATTN_NORM,
2222
+ LLM_TENSOR_ATTN_Q,
2223
+ LLM_TENSOR_ATTN_K,
2224
+ LLM_TENSOR_ATTN_V,
2225
+ LLM_TENSOR_ATTN_SINKS,
2226
+ LLM_TENSOR_ATTN_OUT,
2227
+ LLM_TENSOR_FFN_NORM,
2228
+ LLM_TENSOR_FFN_GATE,
2229
+ LLM_TENSOR_FFN_DOWN,
2230
+ LLM_TENSOR_FFN_UP,
2231
+ LLM_TENSOR_FFN_GATE_INP,
2232
+ LLM_TENSOR_FFN_GATE_EXPS,
2233
+ LLM_TENSOR_FFN_DOWN_EXPS,
2234
+ LLM_TENSOR_FFN_UP_EXPS,
2235
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2236
+ };
2237
+ case LLM_ARCH_GPTJ:
2238
+ case LLM_ARCH_UNKNOWN:
2239
+ return {
2240
+ LLM_TENSOR_TOKEN_EMBD,
2241
+ };
2242
+ case LLM_ARCH_MAINCODER:
2243
+ return {
2244
+ LLM_TENSOR_TOKEN_EMBD,
2245
+ LLM_TENSOR_OUTPUT_NORM,
2246
+ LLM_TENSOR_OUTPUT,
2247
+ LLM_TENSOR_ATTN_NORM,
2248
+ LLM_TENSOR_ATTN_Q,
2249
+ LLM_TENSOR_ATTN_Q_NORM,
2250
+ LLM_TENSOR_ATTN_K,
2251
+ LLM_TENSOR_ATTN_K_NORM,
2252
+ LLM_TENSOR_ATTN_V,
2253
+ LLM_TENSOR_ATTN_OUT,
2254
+ LLM_TENSOR_FFN_NORM,
2255
+ LLM_TENSOR_FFN_GATE,
2256
+ LLM_TENSOR_FFN_DOWN,
2257
+ LLM_TENSOR_FFN_UP,
2258
+ };
2259
+ default:
2260
+ GGML_ABORT("unknown architecture for tensor mapping");
2261
+ }
2262
+ }
2263
+
2264
+ // declare information about the model weight tensors:
2265
+ // - the layer in which the tensor is going to be used. this is needed in order to assign the correct buffer type for the weight
2266
+ // - the operator which is going to use the weight. this is needed to determine if the respective backend supports the operator
2267
+ //
2268
+ // for example, input layers are usually assigned to CPU/host buffer types
2269
+ //
2270
+ // a mismatch between the declared information and the actual layer/op in which the tensor is used can lead to sub-optimal
2271
+ // assignment of the buffer types and extra overhead during computation
2272
+ // example: https://github.com/ggml-org/llama.cpp/pull/17548
2273
+ //
2224
2274
  static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2225
2275
  {LLM_TENSOR_TOKEN_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2226
2276
  {LLM_TENSOR_POS_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2227
- {LLM_TENSOR_TOKEN_EMBD_NORM, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2228
2277
  {LLM_TENSOR_TOKEN_TYPES, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2278
+ {LLM_TENSOR_TOKEN_EMBD_NORM, {LLM_TENSOR_LAYER_INPUT, GGML_OP_MUL}},
2229
2279
  {LLM_TENSOR_OUTPUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2230
2280
  {LLM_TENSOR_CLS, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2231
2281
  {LLM_TENSOR_CLS_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2282
+ {LLM_TENSOR_DENSE_2_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
2283
+ {LLM_TENSOR_DENSE_3_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
2232
2284
  {LLM_TENSOR_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2285
+ {LLM_TENSOR_OUTPUT_NORM_LFM2, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2233
2286
  {LLM_TENSOR_DEC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2234
2287
  {LLM_TENSOR_ENC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2235
2288
  {LLM_TENSOR_ROPE_FREQS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ROPE}},
@@ -2240,6 +2293,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2240
2293
  {LLM_TENSOR_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2241
2294
  {LLM_TENSOR_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2242
2295
  {LLM_TENSOR_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2296
+ {LLM_TENSOR_ATTN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2243
2297
  {LLM_TENSOR_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2244
2298
  {LLM_TENSOR_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2245
2299
  {LLM_TENSOR_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
@@ -2277,6 +2331,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2277
2331
  {LLM_TENSOR_SSM_X, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2278
2332
  {LLM_TENSOR_SSM_DT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2279
2333
  {LLM_TENSOR_SSM_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2334
+ {LLM_TENSOR_SSM_BETA_ALPHA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2280
2335
  {LLM_TENSOR_TIME_MIX_W1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2281
2336
  {LLM_TENSOR_TIME_MIX_W2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2282
2337
  {LLM_TENSOR_TIME_MIX_A1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
@@ -2298,6 +2353,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2298
2353
  {LLM_TENSOR_FFN_ACT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_DIV}},
2299
2354
  {LLM_TENSOR_SSM_CONV1D, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
2300
2355
  {LLM_TENSOR_SSM_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_SCAN}},
2356
+ {LLM_TENSOR_SSM_A_NOSCAN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}}, // a version of SSM_A used for MUL instead of SSM_SCAN
2301
2357
  {LLM_TENSOR_SSM_DT_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2302
2358
  {LLM_TENSOR_SSM_B_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2303
2359
  {LLM_TENSOR_SSM_C_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
@@ -2387,6 +2443,11 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
2387
2443
  {LLM_TENSOR_SHORTCONV_CONV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
2388
2444
  {LLM_TENSOR_SHORTCONV_INPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2389
2445
  {LLM_TENSOR_SHORTCONV_OUTPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2446
+ {LLM_TENSOR_VISEXP_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2447
+ {LLM_TENSOR_VISEXP_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2448
+ {LLM_TENSOR_VISEXP_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2449
+ {LLM_TENSOR_VISEXP_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2450
+ {LLM_TENSOR_VISEXP_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2390
2451
  // NextN/MTP tensors are currently ignored (reserved for future MTP support)
2391
2452
  // These tensors only exist in the last layer(s) and are treated as output tensors
2392
2453
  {LLM_TENSOR_NEXTN_EH_PROJ, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
@@ -2410,13 +2471,20 @@ std::string LLM_KV::operator()(llm_kv kv) const {
2410
2471
  return name;
2411
2472
  }
2412
2473
 
2474
+ LLM_TN_IMPL::LLM_TN_IMPL(llm_arch arch, llm_tensor tensor, const char * suffix, int bid, int xid)
2475
+ : arch(arch), tensor(tensor), suffix(suffix), bid(bid), xid(xid),
2476
+ model_tensors(llm_get_tensor_names(arch)) {}
2477
+
2413
2478
  std::string LLM_TN_IMPL::str() const {
2414
- if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) {
2415
- return "__missing__";
2479
+ if (LLM_TENSOR_NAMES.find(tensor) == LLM_TENSOR_NAMES.end()) {
2480
+ GGML_ABORT("unknown tensor name for tensor id %d", static_cast<int>(tensor));
2416
2481
  }
2417
2482
 
2418
- std::string name = ::format(LLM_TENSOR_NAMES.at(arch).at(tensor), bid, xid);
2483
+ if (model_tensors.find(tensor) == model_tensors.end()) {
2484
+ return LLM_TENSOR_NAMES.at(tensor);
2485
+ }
2419
2486
 
2487
+ std::string name = ::format(LLM_TENSOR_NAMES.at(tensor), bid, xid);
2420
2488
  if (suffix != nullptr) {
2421
2489
  name += ".";
2422
2490
  name += suffix;
@@ -2468,7 +2536,10 @@ bool llm_arch_is_hybrid(const llm_arch & arch) {
2468
2536
  case LLM_ARCH_PLAMO2:
2469
2537
  case LLM_ARCH_GRANITE_HYBRID:
2470
2538
  case LLM_ARCH_LFM2:
2539
+ case LLM_ARCH_LFM2MOE:
2471
2540
  case LLM_ARCH_NEMOTRON_H:
2541
+ case LLM_ARCH_NEMOTRON_H_MOE:
2542
+ case LLM_ARCH_QWEN3NEXT:
2472
2543
  return true;
2473
2544
  default:
2474
2545
  return false;
@@ -2480,6 +2551,7 @@ bool llm_arch_is_diffusion(const llm_arch & arch) {
2480
2551
  case LLM_ARCH_DREAM:
2481
2552
  case LLM_ARCH_LLADA:
2482
2553
  case LLM_ARCH_LLADA_MOE:
2554
+ case LLM_ARCH_RND1:
2483
2555
  return true;
2484
2556
  default:
2485
2557
  return false;