whispercpp 1.3.3 → 1.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (963) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +60 -43
  3. data/ext/extconf.rb +2 -2
  4. data/ext/ruby_whisper.c +14 -2
  5. data/ext/ruby_whisper.h +39 -0
  6. data/ext/ruby_whisper_context.c +22 -22
  7. data/ext/ruby_whisper_model.c +12 -12
  8. data/ext/ruby_whisper_params.c +79 -25
  9. data/ext/ruby_whisper_segment.c +84 -19
  10. data/ext/ruby_whisper_token.c +351 -0
  11. data/ext/ruby_whisper_transcribe.cpp +1 -1
  12. data/ext/ruby_whisper_vad_context.c +75 -0
  13. data/ext/ruby_whisper_vad_context_detect.cpp +50 -0
  14. data/ext/ruby_whisper_vad_segment.c +139 -0
  15. data/ext/ruby_whisper_vad_segments.c +106 -0
  16. data/ext/sources/CMakeLists.txt +4 -1
  17. data/ext/sources/bindings/javascript/package.json +1 -1
  18. data/ext/sources/cmake/arm64-apple-clang.cmake +16 -0
  19. data/ext/sources/cmake/arm64-windows-llvm.cmake +16 -0
  20. data/ext/sources/cmake/riscv64-spacemit-linux-gnu-gcc.cmake +29 -0
  21. data/ext/sources/cmake/x64-windows-llvm.cmake +5 -0
  22. data/ext/sources/examples/CMakeLists.txt +1 -0
  23. data/ext/sources/examples/addon.node/addon.cpp +19 -19
  24. data/ext/sources/examples/addon.node/index.js +7 -5
  25. data/ext/sources/examples/addon.node/vad-example.js +2 -2
  26. data/ext/sources/examples/bench/bench.cpp +26 -16
  27. data/ext/sources/examples/bench.wasm/index-tmpl.html +10 -9
  28. data/ext/sources/examples/cli/cli.cpp +122 -111
  29. data/ext/sources/examples/command/command.cpp +26 -24
  30. data/ext/sources/examples/command.wasm/index-tmpl.html +5 -4
  31. data/ext/sources/examples/common-ggml.cpp +2 -0
  32. data/ext/sources/examples/lsp/CMakeLists.txt +2 -1
  33. data/ext/sources/examples/lsp/lsp.cpp +19 -17
  34. data/ext/sources/examples/quantize/CMakeLists.txt +2 -1
  35. data/ext/sources/examples/server/server.cpp +34 -24
  36. data/ext/sources/examples/server.py +6 -1
  37. data/ext/sources/examples/stream/stream.cpp +4 -2
  38. data/ext/sources/examples/stream.wasm/emscripten.cpp +6 -6
  39. data/ext/sources/examples/stream.wasm/index-tmpl.html +82 -5
  40. data/ext/sources/examples/talk-llama/CMakeLists.txt +7 -3
  41. data/ext/sources/examples/talk-llama/llama-adapter.cpp +113 -7
  42. data/ext/sources/examples/talk-llama/llama-adapter.h +13 -1
  43. data/ext/sources/examples/talk-llama/llama-arch.cpp +2136 -1491
  44. data/ext/sources/examples/talk-llama/llama-arch.h +125 -3
  45. data/ext/sources/examples/talk-llama/llama-batch.cpp +174 -100
  46. data/ext/sources/examples/talk-llama/llama-batch.h +46 -20
  47. data/ext/sources/examples/talk-llama/llama-chat.cpp +199 -8
  48. data/ext/sources/examples/talk-llama/llama-chat.h +11 -0
  49. data/ext/sources/examples/talk-llama/llama-context.cpp +1213 -413
  50. data/ext/sources/examples/talk-llama/llama-context.h +99 -36
  51. data/ext/sources/examples/talk-llama/llama-cparams.h +5 -4
  52. data/ext/sources/examples/talk-llama/llama-grammar.cpp +288 -53
  53. data/ext/sources/examples/talk-llama/llama-grammar.h +22 -1
  54. data/ext/sources/examples/talk-llama/llama-graph.cpp +883 -294
  55. data/ext/sources/examples/talk-llama/llama-graph.h +361 -161
  56. data/ext/sources/examples/talk-llama/llama-hparams.cpp +144 -6
  57. data/ext/sources/examples/talk-llama/llama-hparams.h +100 -23
  58. data/ext/sources/examples/talk-llama/llama-impl.cpp +7 -3
  59. data/ext/sources/examples/talk-llama/llama-impl.h +3 -1
  60. data/ext/sources/examples/talk-llama/llama-kv-cache-iswa.cpp +328 -0
  61. data/ext/sources/examples/talk-llama/{llama-kv-cache-unified-iswa.h → llama-kv-cache-iswa.h} +38 -29
  62. data/ext/sources/examples/talk-llama/llama-kv-cache.cpp +2100 -0
  63. data/ext/sources/examples/talk-llama/llama-kv-cache.h +373 -27
  64. data/ext/sources/examples/talk-llama/llama-kv-cells.h +124 -30
  65. data/ext/sources/examples/talk-llama/llama-memory-hybrid.cpp +63 -41
  66. data/ext/sources/examples/talk-llama/llama-memory-hybrid.h +30 -29
  67. data/ext/sources/examples/talk-llama/llama-memory-recurrent.cpp +77 -35
  68. data/ext/sources/examples/talk-llama/llama-memory-recurrent.h +15 -16
  69. data/ext/sources/examples/talk-llama/llama-memory.h +16 -10
  70. data/ext/sources/examples/talk-llama/llama-mmap.cpp +172 -37
  71. data/ext/sources/examples/talk-llama/llama-mmap.h +8 -3
  72. data/ext/sources/examples/talk-llama/llama-model-loader.cpp +93 -9
  73. data/ext/sources/examples/talk-llama/llama-model-loader.h +9 -2
  74. data/ext/sources/examples/talk-llama/llama-model-saver.cpp +3 -0
  75. data/ext/sources/examples/talk-llama/llama-model.cpp +3369 -10145
  76. data/ext/sources/examples/talk-llama/llama-model.h +104 -12
  77. data/ext/sources/examples/talk-llama/llama-quant.cpp +53 -30
  78. data/ext/sources/examples/talk-llama/llama-sampling.cpp +1520 -324
  79. data/ext/sources/examples/talk-llama/llama-sampling.h +19 -7
  80. data/ext/sources/examples/talk-llama/llama-vocab.cpp +562 -39
  81. data/ext/sources/examples/talk-llama/llama-vocab.h +50 -0
  82. data/ext/sources/examples/talk-llama/llama.cpp +794 -12
  83. data/ext/sources/examples/talk-llama/llama.h +246 -190
  84. data/ext/sources/examples/talk-llama/models/afmoe.cpp +191 -0
  85. data/ext/sources/examples/talk-llama/models/apertus.cpp +125 -0
  86. data/ext/sources/examples/talk-llama/models/arcee.cpp +135 -0
  87. data/ext/sources/examples/talk-llama/models/arctic.cpp +138 -0
  88. data/ext/sources/examples/talk-llama/models/arwkv7.cpp +86 -0
  89. data/ext/sources/examples/talk-llama/models/baichuan.cpp +122 -0
  90. data/ext/sources/examples/talk-llama/models/bailingmoe.cpp +144 -0
  91. data/ext/sources/examples/talk-llama/models/bailingmoe2.cpp +135 -0
  92. data/ext/sources/examples/talk-llama/models/bert.cpp +178 -0
  93. data/ext/sources/examples/talk-llama/models/bitnet.cpp +160 -0
  94. data/ext/sources/examples/talk-llama/models/bloom.cpp +101 -0
  95. data/ext/sources/examples/talk-llama/models/chameleon.cpp +178 -0
  96. data/ext/sources/examples/talk-llama/models/chatglm.cpp +132 -0
  97. data/ext/sources/examples/talk-llama/models/codeshell.cpp +111 -0
  98. data/ext/sources/examples/talk-llama/models/cogvlm.cpp +102 -0
  99. data/ext/sources/examples/talk-llama/models/cohere2-iswa.cpp +134 -0
  100. data/ext/sources/examples/talk-llama/models/command-r.cpp +122 -0
  101. data/ext/sources/examples/talk-llama/models/dbrx.cpp +123 -0
  102. data/ext/sources/examples/talk-llama/models/deci.cpp +135 -0
  103. data/ext/sources/examples/talk-llama/models/deepseek.cpp +144 -0
  104. data/ext/sources/examples/talk-llama/models/deepseek2.cpp +259 -0
  105. data/ext/sources/examples/talk-llama/models/dots1.cpp +134 -0
  106. data/ext/sources/examples/talk-llama/models/dream.cpp +105 -0
  107. data/ext/sources/examples/talk-llama/models/ernie4-5-moe.cpp +150 -0
  108. data/ext/sources/examples/talk-llama/models/ernie4-5.cpp +110 -0
  109. data/ext/sources/examples/talk-llama/models/exaone.cpp +114 -0
  110. data/ext/sources/examples/talk-llama/models/exaone4.cpp +123 -0
  111. data/ext/sources/examples/talk-llama/models/falcon-h1.cpp +113 -0
  112. data/ext/sources/examples/talk-llama/models/falcon.cpp +120 -0
  113. data/ext/sources/examples/talk-llama/models/gemma-embedding.cpp +116 -0
  114. data/ext/sources/examples/talk-llama/models/gemma.cpp +112 -0
  115. data/ext/sources/examples/talk-llama/models/gemma2-iswa.cpp +128 -0
  116. data/ext/sources/examples/talk-llama/models/gemma3.cpp +155 -0
  117. data/ext/sources/examples/talk-llama/models/gemma3n-iswa.cpp +384 -0
  118. data/ext/sources/examples/talk-llama/models/glm4-moe.cpp +170 -0
  119. data/ext/sources/examples/talk-llama/models/glm4.cpp +150 -0
  120. data/ext/sources/examples/talk-llama/models/gpt2.cpp +105 -0
  121. data/ext/sources/examples/talk-llama/models/gptneox.cpp +144 -0
  122. data/ext/sources/examples/talk-llama/models/granite-hybrid.cpp +196 -0
  123. data/ext/sources/examples/talk-llama/models/granite.cpp +211 -0
  124. data/ext/sources/examples/talk-llama/models/graph-context-mamba.cpp +283 -0
  125. data/ext/sources/examples/talk-llama/models/grok.cpp +159 -0
  126. data/ext/sources/examples/talk-llama/models/grovemoe.cpp +141 -0
  127. data/ext/sources/examples/talk-llama/models/hunyuan-dense.cpp +132 -0
  128. data/ext/sources/examples/talk-llama/models/hunyuan-moe.cpp +154 -0
  129. data/ext/sources/examples/talk-llama/models/internlm2.cpp +120 -0
  130. data/ext/sources/examples/talk-llama/models/jais.cpp +86 -0
  131. data/ext/sources/examples/talk-llama/models/jamba.cpp +106 -0
  132. data/ext/sources/examples/talk-llama/models/lfm2.cpp +175 -0
  133. data/ext/sources/examples/talk-llama/models/llada-moe.cpp +122 -0
  134. data/ext/sources/examples/talk-llama/models/llada.cpp +99 -0
  135. data/ext/sources/examples/talk-llama/models/llama-iswa.cpp +178 -0
  136. data/ext/sources/examples/talk-llama/models/llama.cpp +168 -0
  137. data/ext/sources/examples/talk-llama/models/maincoder.cpp +117 -0
  138. data/ext/sources/examples/talk-llama/models/mamba.cpp +55 -0
  139. data/ext/sources/examples/talk-llama/models/mimo2-iswa.cpp +123 -0
  140. data/ext/sources/examples/talk-llama/models/minicpm3.cpp +199 -0
  141. data/ext/sources/examples/talk-llama/models/minimax-m2.cpp +124 -0
  142. data/ext/sources/examples/talk-llama/models/mistral3.cpp +160 -0
  143. data/ext/sources/examples/talk-llama/models/models.h +569 -0
  144. data/ext/sources/examples/talk-llama/models/modern-bert.cpp +116 -0
  145. data/ext/sources/examples/talk-llama/models/mpt.cpp +126 -0
  146. data/ext/sources/examples/talk-llama/models/nemotron-h.cpp +150 -0
  147. data/ext/sources/examples/talk-llama/models/nemotron.cpp +122 -0
  148. data/ext/sources/examples/talk-llama/models/neo-bert.cpp +104 -0
  149. data/ext/sources/examples/talk-llama/models/olmo.cpp +121 -0
  150. data/ext/sources/examples/talk-llama/models/olmo2.cpp +150 -0
  151. data/ext/sources/examples/talk-llama/models/olmoe.cpp +124 -0
  152. data/ext/sources/examples/talk-llama/models/openai-moe-iswa.cpp +127 -0
  153. data/ext/sources/examples/talk-llama/models/openelm.cpp +124 -0
  154. data/ext/sources/examples/talk-llama/models/orion.cpp +123 -0
  155. data/ext/sources/examples/talk-llama/models/pangu-embedded.cpp +121 -0
  156. data/ext/sources/examples/talk-llama/models/phi2.cpp +121 -0
  157. data/ext/sources/examples/talk-llama/models/phi3.cpp +152 -0
  158. data/ext/sources/examples/talk-llama/models/plamo.cpp +110 -0
  159. data/ext/sources/examples/talk-llama/models/plamo2.cpp +316 -0
  160. data/ext/sources/examples/talk-llama/models/plamo3.cpp +128 -0
  161. data/ext/sources/examples/talk-llama/models/plm.cpp +168 -0
  162. data/ext/sources/examples/talk-llama/models/qwen.cpp +108 -0
  163. data/ext/sources/examples/talk-llama/models/qwen2.cpp +126 -0
  164. data/ext/sources/examples/talk-llama/models/qwen2moe.cpp +151 -0
  165. data/ext/sources/examples/talk-llama/models/qwen2vl.cpp +117 -0
  166. data/ext/sources/examples/talk-llama/models/qwen3.cpp +117 -0
  167. data/ext/sources/examples/talk-llama/models/qwen3moe.cpp +124 -0
  168. data/ext/sources/examples/talk-llama/models/qwen3next.cpp +873 -0
  169. data/ext/sources/examples/talk-llama/models/qwen3vl-moe.cpp +149 -0
  170. data/ext/sources/examples/talk-llama/models/qwen3vl.cpp +141 -0
  171. data/ext/sources/examples/talk-llama/models/refact.cpp +94 -0
  172. data/ext/sources/examples/talk-llama/models/rnd1.cpp +126 -0
  173. data/ext/sources/examples/talk-llama/models/rwkv6-base.cpp +162 -0
  174. data/ext/sources/examples/talk-llama/models/rwkv6.cpp +94 -0
  175. data/ext/sources/examples/talk-llama/models/rwkv6qwen2.cpp +86 -0
  176. data/ext/sources/examples/talk-llama/models/rwkv7-base.cpp +135 -0
  177. data/ext/sources/examples/talk-llama/models/rwkv7.cpp +90 -0
  178. data/ext/sources/examples/talk-llama/models/seed-oss.cpp +124 -0
  179. data/ext/sources/examples/talk-llama/models/smallthinker.cpp +126 -0
  180. data/ext/sources/examples/talk-llama/models/smollm3.cpp +128 -0
  181. data/ext/sources/examples/talk-llama/models/stablelm.cpp +146 -0
  182. data/ext/sources/examples/talk-llama/models/starcoder.cpp +100 -0
  183. data/ext/sources/examples/talk-llama/models/starcoder2.cpp +121 -0
  184. data/ext/sources/examples/talk-llama/models/t5-dec.cpp +166 -0
  185. data/ext/sources/examples/talk-llama/models/t5-enc.cpp +96 -0
  186. data/ext/sources/examples/talk-llama/models/wavtokenizer-dec.cpp +149 -0
  187. data/ext/sources/examples/talk-llama/models/xverse.cpp +108 -0
  188. data/ext/sources/examples/talk-llama/talk-llama.cpp +9 -6
  189. data/ext/sources/examples/talk-llama/unicode.cpp +309 -16
  190. data/ext/sources/examples/talk-llama/unicode.h +45 -0
  191. data/ext/sources/examples/vad-speech-segments/CMakeLists.txt +1 -1
  192. data/ext/sources/examples/wchess/wchess.cmd/wchess.cmd.cpp +4 -2
  193. data/ext/sources/examples/whisper.wasm/index-tmpl.html +18 -17
  194. data/ext/sources/ggml/CMakeLists.txt +135 -79
  195. data/ext/sources/ggml/cmake/ggml-config.cmake.in +132 -93
  196. data/ext/sources/ggml/include/ggml-alloc.h +9 -0
  197. data/ext/sources/ggml/include/ggml-backend.h +21 -2
  198. data/ext/sources/ggml/include/ggml-cpu.h +2 -1
  199. data/ext/sources/ggml/include/ggml-hexagon.h +19 -0
  200. data/ext/sources/ggml/include/ggml-metal.h +1 -6
  201. data/ext/sources/ggml/include/ggml-opt.h +25 -6
  202. data/ext/sources/ggml/include/ggml-rpc.h +8 -11
  203. data/ext/sources/ggml/include/ggml-webgpu.h +19 -0
  204. data/ext/sources/ggml/include/ggml-zdnn.h +17 -0
  205. data/ext/sources/ggml/include/ggml-zendnn.h +22 -0
  206. data/ext/sources/ggml/include/ggml.h +406 -23
  207. data/ext/sources/ggml/src/CMakeLists.txt +99 -13
  208. data/ext/sources/ggml/src/ggml-alloc.c +368 -161
  209. data/ext/sources/ggml/src/ggml-backend-impl.h +5 -5
  210. data/ext/sources/ggml/src/ggml-backend-reg.cpp +55 -14
  211. data/ext/sources/ggml/src/ggml-backend.cpp +290 -57
  212. data/ext/sources/ggml/src/ggml-blas/CMakeLists.txt +17 -3
  213. data/ext/sources/ggml/src/ggml-blas/ggml-blas.cpp +10 -13
  214. data/ext/sources/ggml/src/ggml-cann/CMakeLists.txt +14 -0
  215. data/ext/sources/ggml/src/ggml-cann/acl_tensor.cpp +59 -45
  216. data/ext/sources/ggml/src/ggml-cann/acl_tensor.h +138 -47
  217. data/ext/sources/ggml/src/ggml-cann/aclnn_ops.cpp +2586 -1917
  218. data/ext/sources/ggml/src/ggml-cann/aclnn_ops.h +348 -309
  219. data/ext/sources/ggml/src/ggml-cann/common.h +350 -133
  220. data/ext/sources/ggml/src/ggml-cann/ggml-cann.cpp +894 -625
  221. data/ext/sources/ggml/src/ggml-common.h +17 -0
  222. data/ext/sources/ggml/src/ggml-cpu/CMakeLists.txt +167 -75
  223. data/ext/sources/ggml/src/ggml-cpu/amx/amx.cpp +5 -2
  224. data/ext/sources/ggml/src/ggml-cpu/arch/arm/cpu-feats.cpp +4 -0
  225. data/ext/sources/ggml/src/ggml-cpu/arch/arm/quants.c +560 -622
  226. data/ext/sources/ggml/src/ggml-cpu/arch/arm/repack.cpp +1002 -270
  227. data/ext/sources/ggml/src/ggml-cpu/arch/loongarch/quants.c +107 -587
  228. data/ext/sources/ggml/src/ggml-cpu/arch/powerpc/quants.c +162 -589
  229. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/cpu-feats.cpp +38 -0
  230. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/quants.c +373 -486
  231. data/ext/sources/ggml/src/ggml-cpu/arch/riscv/repack.cpp +3 -58
  232. data/ext/sources/ggml/src/ggml-cpu/arch/s390/cpu-feats.cpp +50 -0
  233. data/ext/sources/ggml/src/ggml-cpu/arch/s390/quants.c +521 -353
  234. data/ext/sources/ggml/src/ggml-cpu/arch/wasm/quants.c +54 -314
  235. data/ext/sources/ggml/src/ggml-cpu/arch/x86/quants.c +184 -675
  236. data/ext/sources/ggml/src/ggml-cpu/arch/x86/repack.cpp +4682 -1660
  237. data/ext/sources/ggml/src/ggml-cpu/arch-fallback.h +82 -4
  238. data/ext/sources/ggml/src/ggml-cpu/common.h +14 -0
  239. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu-impl.h +18 -9
  240. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.c +263 -111
  241. data/ext/sources/ggml/src/ggml-cpu/ggml-cpu.cpp +39 -28
  242. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.cpp +683 -82
  243. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kernels.h +38 -43
  244. data/ext/sources/ggml/src/ggml-cpu/kleidiai/kleidiai.cpp +435 -119
  245. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm-ppc.h +333 -0
  246. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.cpp +1234 -1182
  247. data/ext/sources/ggml/src/ggml-cpu/llamafile/sgemm.h +6 -0
  248. data/ext/sources/ggml/src/ggml-cpu/ops.cpp +2167 -1480
  249. data/ext/sources/ggml/src/ggml-cpu/ops.h +10 -12
  250. data/ext/sources/ggml/src/ggml-cpu/quants.c +35 -0
  251. data/ext/sources/ggml/src/ggml-cpu/quants.h +8 -0
  252. data/ext/sources/ggml/src/ggml-cpu/repack.cpp +1132 -81
  253. data/ext/sources/ggml/src/ggml-cpu/repack.h +36 -0
  254. data/ext/sources/ggml/src/ggml-cpu/simd-mappings.h +120 -93
  255. data/ext/sources/ggml/src/ggml-cpu/spacemit/ime.cpp +1025 -0
  256. data/ext/sources/ggml/src/ggml-cpu/spacemit/ime.h +13 -0
  257. data/ext/sources/ggml/src/ggml-cpu/spacemit/ime1_kernels.cpp +3196 -0
  258. data/ext/sources/ggml/src/ggml-cpu/spacemit/ime_kernels.h +26 -0
  259. data/ext/sources/ggml/src/ggml-cpu/traits.cpp +2 -2
  260. data/ext/sources/ggml/src/ggml-cpu/traits.h +1 -1
  261. data/ext/sources/ggml/src/ggml-cpu/unary-ops.cpp +151 -0
  262. data/ext/sources/ggml/src/ggml-cpu/unary-ops.h +7 -0
  263. data/ext/sources/ggml/src/ggml-cpu/vec.cpp +294 -27
  264. data/ext/sources/ggml/src/ggml-cpu/vec.h +606 -48
  265. data/ext/sources/ggml/src/ggml-cuda/CMakeLists.txt +92 -17
  266. data/ext/sources/ggml/src/ggml-cuda/add-id.cu +58 -0
  267. data/ext/sources/ggml/src/ggml-cuda/add-id.cuh +3 -0
  268. data/ext/sources/ggml/src/ggml-cuda/argmax.cu +2 -2
  269. data/ext/sources/ggml/src/ggml-cuda/argsort.cu +123 -6
  270. data/ext/sources/ggml/src/ggml-cuda/argsort.cuh +16 -0
  271. data/ext/sources/ggml/src/ggml-cuda/binbcast.cu +330 -191
  272. data/ext/sources/ggml/src/ggml-cuda/binbcast.cuh +2 -0
  273. data/ext/sources/ggml/src/ggml-cuda/common.cuh +588 -128
  274. data/ext/sources/ggml/src/ggml-cuda/conv-transpose-1d.cu +1 -4
  275. data/ext/sources/ggml/src/ggml-cuda/conv2d.cu +166 -0
  276. data/ext/sources/ggml/src/ggml-cuda/conv2d.cuh +5 -0
  277. data/ext/sources/ggml/src/ggml-cuda/convert.cu +95 -22
  278. data/ext/sources/ggml/src/ggml-cuda/convert.cuh +25 -0
  279. data/ext/sources/ggml/src/ggml-cuda/cpy-utils.cuh +217 -0
  280. data/ext/sources/ggml/src/ggml-cuda/cpy.cu +335 -485
  281. data/ext/sources/ggml/src/ggml-cuda/cpy.cuh +1 -5
  282. data/ext/sources/ggml/src/ggml-cuda/cross-entropy-loss.cu +2 -14
  283. data/ext/sources/ggml/src/ggml-cuda/cumsum.cu +307 -0
  284. data/ext/sources/ggml/src/ggml-cuda/cumsum.cuh +5 -0
  285. data/ext/sources/ggml/src/ggml-cuda/dequantize.cuh +14 -40
  286. data/ext/sources/ggml/src/ggml-cuda/diag.cu +77 -0
  287. data/ext/sources/ggml/src/ggml-cuda/diag.cuh +5 -0
  288. data/ext/sources/ggml/src/ggml-cuda/fattn-common.cuh +519 -378
  289. data/ext/sources/ggml/src/ggml-cuda/fattn-mma-f16.cuh +750 -637
  290. data/ext/sources/ggml/src/ggml-cuda/fattn-tile.cu +49 -0
  291. data/ext/sources/ggml/src/ggml-cuda/fattn-tile.cuh +1244 -0
  292. data/ext/sources/ggml/src/ggml-cuda/fattn-vec.cuh +586 -0
  293. data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cu +98 -61
  294. data/ext/sources/ggml/src/ggml-cuda/fattn-wmma-f16.cuh +48 -0
  295. data/ext/sources/ggml/src/ggml-cuda/fattn.cu +230 -197
  296. data/ext/sources/ggml/src/ggml-cuda/fattn.cuh +2 -0
  297. data/ext/sources/ggml/src/ggml-cuda/fill.cu +37 -0
  298. data/ext/sources/ggml/src/ggml-cuda/fill.cuh +3 -0
  299. data/ext/sources/ggml/src/ggml-cuda/getrows.cu +50 -39
  300. data/ext/sources/ggml/src/ggml-cuda/ggml-cuda.cu +1557 -294
  301. data/ext/sources/ggml/src/ggml-cuda/im2col.cu +196 -35
  302. data/ext/sources/ggml/src/ggml-cuda/im2col.cuh +1 -0
  303. data/ext/sources/ggml/src/ggml-cuda/mean.cu +57 -2
  304. data/ext/sources/ggml/src/ggml-cuda/mma.cuh +915 -69
  305. data/ext/sources/ggml/src/ggml-cuda/mmf.cu +171 -0
  306. data/ext/sources/ggml/src/ggml-cuda/mmf.cuh +835 -0
  307. data/ext/sources/ggml/src/ggml-cuda/mmid.cu +164 -0
  308. data/ext/sources/ggml/src/ggml-cuda/mmid.cuh +5 -0
  309. data/ext/sources/ggml/src/ggml-cuda/mmq.cu +109 -67
  310. data/ext/sources/ggml/src/ggml-cuda/mmq.cuh +1601 -733
  311. data/ext/sources/ggml/src/ggml-cuda/mmvf.cu +802 -0
  312. data/ext/sources/ggml/src/ggml-cuda/mmvf.cuh +12 -0
  313. data/ext/sources/ggml/src/ggml-cuda/mmvq.cu +286 -149
  314. data/ext/sources/ggml/src/ggml-cuda/mmvq.cuh +1 -1
  315. data/ext/sources/ggml/src/ggml-cuda/norm.cu +284 -12
  316. data/ext/sources/ggml/src/ggml-cuda/norm.cuh +7 -0
  317. data/ext/sources/ggml/src/ggml-cuda/opt-step-sgd.cu +49 -0
  318. data/ext/sources/ggml/src/ggml-cuda/opt-step-sgd.cuh +5 -0
  319. data/ext/sources/ggml/src/ggml-cuda/pad.cu +86 -32
  320. data/ext/sources/ggml/src/ggml-cuda/pad_reflect_1d.cu +91 -0
  321. data/ext/sources/ggml/src/ggml-cuda/pad_reflect_1d.cuh +5 -0
  322. data/ext/sources/ggml/src/ggml-cuda/quantize.cu +163 -10
  323. data/ext/sources/ggml/src/ggml-cuda/quantize.cuh +14 -0
  324. data/ext/sources/ggml/src/ggml-cuda/reduce_rows.cuh +53 -0
  325. data/ext/sources/ggml/src/ggml-cuda/roll.cu +67 -0
  326. data/ext/sources/ggml/src/ggml-cuda/roll.cuh +5 -0
  327. data/ext/sources/ggml/src/ggml-cuda/rope.cu +207 -98
  328. data/ext/sources/ggml/src/ggml-cuda/rope.cuh +2 -0
  329. data/ext/sources/ggml/src/ggml-cuda/scale.cu +14 -11
  330. data/ext/sources/ggml/src/ggml-cuda/set-rows.cu +330 -0
  331. data/ext/sources/ggml/src/ggml-cuda/set-rows.cuh +7 -0
  332. data/ext/sources/ggml/src/ggml-cuda/set.cu +39 -0
  333. data/ext/sources/ggml/src/ggml-cuda/set.cuh +7 -0
  334. data/ext/sources/ggml/src/ggml-cuda/softcap.cu +34 -0
  335. data/ext/sources/ggml/src/ggml-cuda/softcap.cuh +5 -0
  336. data/ext/sources/ggml/src/ggml-cuda/softmax.cu +325 -61
  337. data/ext/sources/ggml/src/ggml-cuda/solve_tri.cu +275 -0
  338. data/ext/sources/ggml/src/ggml-cuda/solve_tri.cuh +3 -0
  339. data/ext/sources/ggml/src/ggml-cuda/ssm-conv.cu +14 -12
  340. data/ext/sources/ggml/src/ggml-cuda/ssm-scan.cu +291 -104
  341. data/ext/sources/ggml/src/ggml-cuda/sum.cu +6 -10
  342. data/ext/sources/ggml/src/ggml-cuda/sumrows.cu +21 -4
  343. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq112-dv112.cu +5 -0
  344. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq128-dv128.cu +5 -0
  345. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq256-dv256.cu +5 -0
  346. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq40-dv40.cu +5 -0
  347. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq576-dv512.cu +5 -0
  348. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq64-dv64.cu +5 -0
  349. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq72-dv72.cu +5 -0
  350. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq80-dv80.cu +5 -0
  351. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-tile-instance-dkq96-dv96.cu +5 -0
  352. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-f16-f16.cu +7 -0
  353. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-f16-q4_0.cu +7 -0
  354. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-f16-q4_1.cu +7 -0
  355. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-f16-q5_0.cu +7 -0
  356. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-f16-q5_1.cu +7 -0
  357. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-f16-q8_0.cu +7 -0
  358. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_0-f16.cu +7 -0
  359. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_0-q4_0.cu +7 -0
  360. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_0-q4_1.cu +7 -0
  361. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_0-q5_0.cu +7 -0
  362. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_0-q5_1.cu +7 -0
  363. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_0-q8_0.cu +7 -0
  364. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_1-f16.cu +7 -0
  365. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_1-q4_0.cu +7 -0
  366. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_1-q4_1.cu +7 -0
  367. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_1-q5_0.cu +7 -0
  368. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_1-q5_1.cu +7 -0
  369. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q4_1-q8_0.cu +7 -0
  370. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_0-f16.cu +7 -0
  371. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_0-q4_0.cu +7 -0
  372. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_0-q4_1.cu +7 -0
  373. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_0-q5_0.cu +7 -0
  374. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_0-q5_1.cu +7 -0
  375. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_0-q8_0.cu +7 -0
  376. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_1-f16.cu +7 -0
  377. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_1-q4_0.cu +7 -0
  378. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_1-q4_1.cu +7 -0
  379. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_1-q5_0.cu +7 -0
  380. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_1-q5_1.cu +7 -0
  381. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q5_1-q8_0.cu +7 -0
  382. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q8_0-f16.cu +7 -0
  383. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q8_0-q4_0.cu +7 -0
  384. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q8_0-q4_1.cu +7 -0
  385. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q8_0-q5_0.cu +7 -0
  386. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q8_0-q5_1.cu +7 -0
  387. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-instance-q8_0-q8_0.cu +7 -0
  388. data/ext/sources/ggml/src/ggml-cuda/template-instances/generate_cu_files.py +40 -19
  389. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_1.cu +5 -0
  390. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_10.cu +5 -0
  391. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_11.cu +5 -0
  392. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_12.cu +5 -0
  393. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_13.cu +5 -0
  394. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_14.cu +5 -0
  395. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_15.cu +5 -0
  396. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_16.cu +5 -0
  397. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_2.cu +5 -0
  398. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_3.cu +5 -0
  399. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_4.cu +5 -0
  400. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_5.cu +5 -0
  401. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_6.cu +5 -0
  402. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_7.cu +5 -0
  403. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_8.cu +5 -0
  404. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmf-instance-ncols_9.cu +5 -0
  405. data/ext/sources/ggml/src/ggml-cuda/template-instances/mmq-instance-mxfp4.cu +5 -0
  406. data/ext/sources/ggml/src/ggml-cuda/top-k.cu +96 -0
  407. data/ext/sources/ggml/src/ggml-cuda/top-k.cuh +3 -0
  408. data/ext/sources/ggml/src/ggml-cuda/topk-moe.cu +351 -0
  409. data/ext/sources/ggml/src/ggml-cuda/topk-moe.cuh +21 -0
  410. data/ext/sources/ggml/src/ggml-cuda/tri.cu +136 -0
  411. data/ext/sources/ggml/src/ggml-cuda/tri.cuh +5 -0
  412. data/ext/sources/ggml/src/ggml-cuda/tsembd.cu +3 -3
  413. data/ext/sources/ggml/src/ggml-cuda/unary.cu +189 -5
  414. data/ext/sources/ggml/src/ggml-cuda/unary.cuh +44 -0
  415. data/ext/sources/ggml/src/ggml-cuda/upscale.cu +248 -6
  416. data/ext/sources/ggml/src/ggml-cuda/vecdotq.cuh +110 -22
  417. data/ext/sources/ggml/src/ggml-cuda/vendors/cuda.h +8 -0
  418. data/ext/sources/ggml/src/ggml-cuda/vendors/hip.h +70 -37
  419. data/ext/sources/ggml/src/ggml-cuda/vendors/musa.h +10 -3
  420. data/ext/sources/ggml/src/ggml-hexagon/CMakeLists.txt +80 -0
  421. data/ext/sources/ggml/src/ggml-hexagon/ggml-hexagon.cpp +3151 -0
  422. data/ext/sources/ggml/src/ggml-hexagon/htp/CMakeLists.txt +44 -0
  423. data/ext/sources/ggml/src/ggml-hexagon/htp/act-ops.c +682 -0
  424. data/ext/sources/ggml/src/ggml-hexagon/htp/binary-ops.c +360 -0
  425. data/ext/sources/ggml/src/ggml-hexagon/htp/cmake-toolchain.cmake +157 -0
  426. data/ext/sources/ggml/src/ggml-hexagon/htp/flash-attn-ops.c +566 -0
  427. data/ext/sources/ggml/src/ggml-hexagon/htp/get-rows-ops.c +112 -0
  428. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-ctx.h +35 -0
  429. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-dma.c +63 -0
  430. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-dma.h +157 -0
  431. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-msg.h +165 -0
  432. data/ext/sources/ggml/src/ggml-hexagon/htp/htp-ops.h +92 -0
  433. data/ext/sources/ggml/src/ggml-hexagon/htp/htp_iface.idl +16 -0
  434. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-exp.c +94 -0
  435. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-inverse.c +72 -0
  436. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-sigmoid.c +49 -0
  437. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-utils.c +1020 -0
  438. data/ext/sources/ggml/src/ggml-hexagon/htp/hvx-utils.h +1353 -0
  439. data/ext/sources/ggml/src/ggml-hexagon/htp/main.c +1001 -0
  440. data/ext/sources/ggml/src/ggml-hexagon/htp/matmul-ops.c +2503 -0
  441. data/ext/sources/ggml/src/ggml-hexagon/htp/ops-utils.h +149 -0
  442. data/ext/sources/ggml/src/ggml-hexagon/htp/rope-ops.c +487 -0
  443. data/ext/sources/ggml/src/ggml-hexagon/htp/set-rows-ops.c +168 -0
  444. data/ext/sources/ggml/src/ggml-hexagon/htp/softmax-ops.c +402 -0
  445. data/ext/sources/ggml/src/ggml-hexagon/htp/unary-ops.c +287 -0
  446. data/ext/sources/ggml/src/ggml-hexagon/htp/worker-pool.c +297 -0
  447. data/ext/sources/ggml/src/ggml-hexagon/htp/worker-pool.h +57 -0
  448. data/ext/sources/ggml/src/ggml-hexagon/htp-utils.c +454 -0
  449. data/ext/sources/ggml/src/ggml-hexagon/htp-utils.h +221 -0
  450. data/ext/sources/ggml/src/ggml-hexagon/op-desc.h +153 -0
  451. data/ext/sources/ggml/src/ggml-hip/CMakeLists.txt +16 -13
  452. data/ext/sources/ggml/src/ggml-impl.h +186 -15
  453. data/ext/sources/ggml/src/ggml-metal/CMakeLists.txt +10 -7
  454. data/ext/sources/ggml/src/ggml-metal/ggml-metal-common.cpp +446 -0
  455. data/ext/sources/ggml/src/ggml-metal/ggml-metal-common.h +52 -0
  456. data/ext/sources/ggml/src/ggml-metal/ggml-metal-context.h +33 -0
  457. data/ext/sources/ggml/src/ggml-metal/ggml-metal-context.m +609 -0
  458. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.cpp +1743 -0
  459. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.h +273 -0
  460. data/ext/sources/ggml/src/ggml-metal/ggml-metal-device.m +1686 -0
  461. data/ext/sources/ggml/src/ggml-metal/ggml-metal-impl.h +356 -61
  462. data/ext/sources/ggml/src/ggml-metal/ggml-metal-ops.cpp +4161 -0
  463. data/ext/sources/ggml/src/ggml-metal/ggml-metal-ops.h +94 -0
  464. data/ext/sources/ggml/src/ggml-metal/ggml-metal.cpp +724 -0
  465. data/ext/sources/ggml/src/ggml-metal/ggml-metal.metal +4495 -1876
  466. data/ext/sources/ggml/src/ggml-musa/CMakeLists.txt +21 -9
  467. data/ext/sources/ggml/src/ggml-opencl/CMakeLists.txt +29 -0
  468. data/ext/sources/ggml/src/ggml-opencl/ggml-opencl.cpp +4005 -427
  469. data/ext/sources/ggml/src/ggml-opencl/kernels/add.cl +107 -0
  470. data/ext/sources/ggml/src/ggml-opencl/kernels/add_id.cl +42 -0
  471. data/ext/sources/ggml/src/ggml-opencl/kernels/conv2d.cl +185 -0
  472. data/ext/sources/ggml/src/ggml-opencl/kernels/conv2d_f16_f32.cl +176 -0
  473. data/ext/sources/ggml/src/ggml-opencl/kernels/cvt.cl +147 -0
  474. data/ext/sources/ggml/src/ggml-opencl/kernels/div.cl +66 -0
  475. data/ext/sources/ggml/src/ggml-opencl/kernels/expm1.cl +82 -0
  476. data/ext/sources/ggml/src/ggml-opencl/kernels/fill.cl +17 -0
  477. data/ext/sources/ggml/src/ggml-opencl/kernels/flash_attn_f16.cl +370 -0
  478. data/ext/sources/ggml/src/ggml-opencl/kernels/flash_attn_f32.cl +371 -0
  479. data/ext/sources/ggml/src/ggml-opencl/kernels/flash_attn_f32_f16.cl +373 -0
  480. data/ext/sources/ggml/src/ggml-opencl/kernels/gelu.cl +27 -0
  481. data/ext/sources/ggml/src/ggml-opencl/kernels/gemm_moe_mxfp4_f32.cl +162 -0
  482. data/ext/sources/ggml/src/ggml-opencl/kernels/gemv_moe_mxfp4_f32.cl +156 -0
  483. data/ext/sources/ggml/src/ggml-opencl/kernels/get_rows.cl +36 -12
  484. data/ext/sources/ggml/src/ggml-opencl/kernels/glu.cl +177 -0
  485. data/ext/sources/ggml/src/ggml-opencl/kernels/group_norm.cl +49 -0
  486. data/ext/sources/ggml/src/ggml-opencl/kernels/im2col_f16.cl +1 -1
  487. data/ext/sources/ggml/src/ggml-opencl/kernels/im2col_f32.cl +1 -1
  488. data/ext/sources/ggml/src/ggml-opencl/kernels/mean.cl +39 -0
  489. data/ext/sources/ggml/src/ggml-opencl/kernels/mul.cl +73 -0
  490. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mat_f16_f32.cl +130 -0
  491. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f16_f32_kq_kqv.cl +273 -0
  492. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f16_f32_l4_lm.cl +146 -0
  493. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_f32_f32_l4_lm.cl +147 -0
  494. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mm_q8_0_f32_l4_lm.cl +154 -0
  495. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_id_mxfp4_f32.cl +189 -0
  496. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_id_mxfp4_f32_flat.cl +176 -0
  497. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_id_q8_0_f32.cl +140 -0
  498. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_id_q8_0_f32_flat.cl +222 -0
  499. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_mxfp4_f32.cl +144 -0
  500. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_mxfp4_f32_flat.cl +167 -0
  501. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q8_0_f32.cl +125 -0
  502. data/ext/sources/ggml/src/ggml-opencl/kernels/mul_mv_q8_0_f32_flat.cl +202 -0
  503. data/ext/sources/ggml/src/ggml-opencl/kernels/norm.cl +80 -0
  504. data/ext/sources/ggml/src/ggml-opencl/kernels/pad.cl +29 -20
  505. data/ext/sources/ggml/src/ggml-opencl/kernels/rms_norm.cl +94 -0
  506. data/ext/sources/ggml/src/ggml-opencl/kernels/rope.cl +50 -24
  507. data/ext/sources/ggml/src/ggml-opencl/kernels/scale.cl +3 -2
  508. data/ext/sources/ggml/src/ggml-opencl/kernels/set_rows.cl +208 -0
  509. data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_4_f16.cl +34 -13
  510. data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_4_f32.cl +34 -13
  511. data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_f16.cl +34 -13
  512. data/ext/sources/ggml/src/ggml-opencl/kernels/softmax_f32.cl +34 -13
  513. data/ext/sources/ggml/src/ggml-opencl/kernels/softplus.cl +88 -0
  514. data/ext/sources/ggml/src/ggml-opencl/kernels/sqr.cl +53 -0
  515. data/ext/sources/ggml/src/ggml-opencl/kernels/sqrt.cl +53 -0
  516. data/ext/sources/ggml/src/ggml-opencl/kernels/ssm_conv.cl +77 -0
  517. data/ext/sources/ggml/src/ggml-opencl/kernels/sub.cl +66 -0
  518. data/ext/sources/ggml/src/ggml-opencl/kernels/transpose.cl +33 -0
  519. data/ext/sources/ggml/src/ggml-opencl/kernels/tsembd.cl +2 -2
  520. data/ext/sources/ggml/src/ggml-opencl/kernels/upscale.cl +2 -3
  521. data/ext/sources/ggml/src/ggml-opt.cpp +97 -41
  522. data/ext/sources/ggml/src/ggml-quants.c +111 -16
  523. data/ext/sources/ggml/src/ggml-quants.h +6 -0
  524. data/ext/sources/ggml/src/ggml-rpc/ggml-rpc.cpp +497 -195
  525. data/ext/sources/ggml/src/ggml-sycl/CMakeLists.txt +48 -3
  526. data/ext/sources/ggml/src/ggml-sycl/add-id.cpp +77 -0
  527. data/ext/sources/ggml/src/ggml-sycl/add-id.hpp +8 -0
  528. data/ext/sources/ggml/src/ggml-sycl/backend.hpp +8 -0
  529. data/ext/sources/ggml/src/ggml-sycl/binbcast.cpp +6 -5
  530. data/ext/sources/ggml/src/ggml-sycl/common.hpp +117 -15
  531. data/ext/sources/ggml/src/ggml-sycl/concat.cpp +50 -30
  532. data/ext/sources/ggml/src/ggml-sycl/conv.cpp +10 -4
  533. data/ext/sources/ggml/src/ggml-sycl/convert.cpp +200 -99
  534. data/ext/sources/ggml/src/ggml-sycl/count-equal.cpp +79 -0
  535. data/ext/sources/ggml/src/ggml-sycl/count-equal.hpp +9 -0
  536. data/ext/sources/ggml/src/ggml-sycl/cpy.cpp +72 -309
  537. data/ext/sources/ggml/src/ggml-sycl/cpy.hpp +213 -1
  538. data/ext/sources/ggml/src/ggml-sycl/dequantize.hpp +18 -0
  539. data/ext/sources/ggml/src/ggml-sycl/dmmv.cpp +67 -49
  540. data/ext/sources/ggml/src/ggml-sycl/dpct/helper.hpp +77 -34
  541. data/ext/sources/ggml/src/ggml-sycl/element_wise.cpp +397 -314
  542. data/ext/sources/ggml/src/ggml-sycl/element_wise.hpp +12 -2
  543. data/ext/sources/ggml/src/ggml-sycl/gemm.hpp +14 -26
  544. data/ext/sources/ggml/src/ggml-sycl/getrows.cpp +9 -6
  545. data/ext/sources/ggml/src/ggml-sycl/ggml-sycl.cpp +643 -413
  546. data/ext/sources/ggml/src/ggml-sycl/gla.cpp +2 -2
  547. data/ext/sources/ggml/src/ggml-sycl/im2col.cpp +2 -2
  548. data/ext/sources/ggml/src/ggml-sycl/mmq.cpp +80 -60
  549. data/ext/sources/ggml/src/ggml-sycl/mmvq.cpp +223 -132
  550. data/ext/sources/ggml/src/ggml-sycl/norm.cpp +230 -55
  551. data/ext/sources/ggml/src/ggml-sycl/norm.hpp +2 -0
  552. data/ext/sources/ggml/src/ggml-sycl/pad.cpp +97 -0
  553. data/ext/sources/ggml/src/ggml-sycl/pad.hpp +24 -0
  554. data/ext/sources/ggml/src/ggml-sycl/pad_reflect_1d.cpp +100 -0
  555. data/ext/sources/ggml/src/ggml-sycl/pad_reflect_1d.hpp +10 -0
  556. data/ext/sources/ggml/src/ggml-sycl/presets.hpp +2 -0
  557. data/ext/sources/ggml/src/ggml-sycl/quantize.hpp +133 -0
  558. data/ext/sources/ggml/src/ggml-sycl/quants.hpp +8 -9
  559. data/ext/sources/ggml/src/ggml-sycl/repeat_back.cpp +76 -0
  560. data/ext/sources/ggml/src/ggml-sycl/repeat_back.hpp +8 -0
  561. data/ext/sources/ggml/src/ggml-sycl/roll.cpp +122 -0
  562. data/ext/sources/ggml/src/ggml-sycl/roll.hpp +20 -0
  563. data/ext/sources/ggml/src/ggml-sycl/rope.cpp +65 -59
  564. data/ext/sources/ggml/src/ggml-sycl/set.cpp +73 -0
  565. data/ext/sources/ggml/src/ggml-sycl/set.hpp +5 -0
  566. data/ext/sources/ggml/src/ggml-sycl/set_rows.cpp +234 -0
  567. data/ext/sources/ggml/src/ggml-sycl/set_rows.hpp +8 -0
  568. data/ext/sources/ggml/src/ggml-sycl/softmax.cpp +330 -165
  569. data/ext/sources/ggml/src/ggml-sycl/softmax.hpp +4 -0
  570. data/ext/sources/ggml/src/ggml-sycl/ssm_conv.cpp +127 -0
  571. data/ext/sources/ggml/src/ggml-sycl/ssm_conv.hpp +5 -0
  572. data/ext/sources/ggml/src/ggml-sycl/tsembd.cpp +12 -6
  573. data/ext/sources/ggml/src/ggml-sycl/vecdotq.hpp +60 -6
  574. data/ext/sources/ggml/src/ggml-sycl/wkv.cpp +16 -12
  575. data/ext/sources/ggml/src/ggml-vulkan/CMakeLists.txt +38 -18
  576. data/ext/sources/ggml/src/ggml-vulkan/ggml-vulkan.cpp +7398 -2635
  577. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/abs.comp +21 -0
  578. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/acc.comp +2 -2
  579. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add.comp +43 -3
  580. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add1.comp +28 -0
  581. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/add_id.comp +42 -0
  582. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/arange.comp +20 -0
  583. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argmax.comp +15 -6
  584. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argsort.comp +56 -39
  585. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/argsort_large.comp +114 -0
  586. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ceil.comp +22 -0
  587. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/clamp.comp +2 -2
  588. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/concat.comp +2 -2
  589. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/contig_copy.comp +2 -2
  590. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv2d_dw.comp +1 -1
  591. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv2d_mm.comp +347 -0
  592. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/conv_transpose_1d.comp +1 -1
  593. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy.comp +2 -2
  594. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_from_quant.comp +5 -5
  595. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_to_quant.comp +67 -13
  596. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/copy_transpose.comp +67 -0
  597. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cos.comp +2 -2
  598. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/count_equal.comp +2 -2
  599. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/count_experts.comp +51 -0
  600. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum.comp +83 -0
  601. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum_multipass1.comp +60 -0
  602. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/cumsum_multipass2.comp +66 -0
  603. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_f32.comp +1 -1
  604. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_funcs.comp → dequant_funcs.glsl} +158 -16
  605. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_funcs_cm2.comp → dequant_funcs_cm2.glsl} +38 -3
  606. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{dequant_head.comp → dequant_head.glsl} +1 -1
  607. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_m.comp +1 -1
  608. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq1_s.comp +1 -1
  609. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_s.comp +2 -2
  610. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xs.comp +1 -1
  611. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq2_xxs.comp +3 -2
  612. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_s.comp +7 -6
  613. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq3_xxs.comp +5 -3
  614. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_nl.comp +1 -1
  615. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_iq4_xs.comp +1 -1
  616. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_mxfp4.comp +32 -0
  617. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q2_k.comp +4 -4
  618. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q3_k.comp +2 -2
  619. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_0.comp +1 -1
  620. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_1.comp +1 -1
  621. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q4_k.comp +4 -4
  622. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_0.comp +1 -1
  623. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_1.comp +1 -1
  624. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q5_k.comp +4 -4
  625. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q6_k.comp +2 -2
  626. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/dequant_q8_0.comp +1 -1
  627. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/diag.comp +29 -0
  628. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/diag_mask_inf.comp +1 -1
  629. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/div.comp +2 -2
  630. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/exp.comp +21 -0
  631. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/fill.comp +19 -0
  632. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn.comp +103 -36
  633. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_base.glsl +220 -0
  634. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm1.comp +139 -45
  635. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_cm2.comp +113 -38
  636. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_split_k_reduce.comp +75 -14
  637. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/floor.comp +22 -0
  638. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu.comp +2 -2
  639. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu_erf.comp +27 -0
  640. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/geglu_quick.comp +11 -0
  641. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu.comp +2 -2
  642. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu_erf.comp +39 -0
  643. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/gelu_quick.comp +2 -2
  644. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_binary_head.comp → generic_binary_head.glsl} +19 -17
  645. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_head.comp → generic_head.glsl} +2 -0
  646. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{generic_unary_head.comp → generic_unary_head.glsl} +7 -0
  647. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows.comp +21 -12
  648. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/get_rows_quant.comp +28 -18
  649. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{glu_head.comp → glu_head.glsl} +4 -0
  650. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/group_norm.comp +2 -2
  651. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/hardsigmoid.comp +22 -0
  652. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/hardswish.comp +22 -0
  653. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/im2col.comp +33 -17
  654. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/im2col_3d.comp +125 -0
  655. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/l2_norm.comp +2 -2
  656. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/leaky_relu.comp +2 -2
  657. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/log.comp +18 -0
  658. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul.comp +2 -2
  659. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec.comp +2 -2
  660. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.glsl +227 -0
  661. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iface.glsl +35 -0
  662. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_m.comp +71 -21
  663. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq1_s.comp +41 -25
  664. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_s.comp +2 -2
  665. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xs.comp +44 -26
  666. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq2_xxs.comp +2 -2
  667. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_s.comp +2 -2
  668. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_iq3_xxs.comp +2 -2
  669. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_nc.comp +20 -14
  670. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_p021.comp +9 -7
  671. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q2_k.comp +4 -6
  672. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q3_k.comp +2 -2
  673. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q4_k.comp +4 -6
  674. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q5_k.comp +4 -6
  675. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_q6_k.comp +2 -2
  676. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vecq.comp +143 -0
  677. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vecq_funcs.glsl +494 -0
  678. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm.comp +144 -556
  679. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_cm2.comp +230 -51
  680. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_funcs.glsl +566 -0
  681. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mm_id_funcs.glsl +72 -0
  682. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq.comp +90 -223
  683. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_funcs.glsl +454 -0
  684. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_shmem_types.glsl +78 -0
  685. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/multi_add.comp +195 -0
  686. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/neg.comp +20 -0
  687. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/norm.comp +2 -2
  688. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_adamw.comp +2 -2
  689. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/opt_step_sgd.comp +22 -0
  690. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pad.comp +41 -5
  691. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/pool2d.comp +1 -1
  692. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/quantize_q8_1.comp +59 -9
  693. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/reglu.comp +2 -2
  694. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/relu.comp +2 -2
  695. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat.comp +2 -2
  696. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/repeat_back.comp +2 -2
  697. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm.comp +104 -14
  698. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm_back.comp +2 -2
  699. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rms_norm_partials.comp +65 -0
  700. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/roll.comp +46 -0
  701. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_funcs.glsl +234 -0
  702. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.glsl +20 -0
  703. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_multi.comp +6 -52
  704. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_neox.comp +6 -35
  705. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_norm.comp +6 -35
  706. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_params.glsl +28 -0
  707. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_vision.comp +6 -39
  708. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/round.comp +29 -0
  709. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rte.glsl +5 -0
  710. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/scale.comp +3 -3
  711. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sigmoid.comp +2 -2
  712. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu.comp +2 -2
  713. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/silu_back.comp +2 -2
  714. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sin.comp +2 -2
  715. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max.comp +30 -8
  716. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_back.comp +6 -2
  717. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large1.comp +62 -0
  718. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large2.comp +79 -0
  719. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large3.comp +65 -0
  720. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/soft_max_large_common.glsl +53 -0
  721. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/softplus.comp +23 -0
  722. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/solve_tri.comp +81 -0
  723. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sqrt.comp +17 -0
  724. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/square.comp +2 -2
  725. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ssm_conv.comp +44 -0
  726. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/ssm_scan.comp +124 -0
  727. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/step.comp +22 -0
  728. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sub.comp +2 -2
  729. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sum_rows.comp +16 -6
  730. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/sum_rows.glsl +25 -0
  731. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/swiglu.comp +2 -2
  732. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/swiglu_oai.comp +14 -0
  733. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/tanh.comp +2 -2
  734. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/timestep_embedding.comp +5 -4
  735. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_argsort.comp +118 -0
  736. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_moe.comp +213 -0
  737. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/topk_nary_search.comp +246 -0
  738. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/tri.comp +43 -0
  739. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/trunc.comp +22 -0
  740. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{types.comp → types.glsl} +435 -24
  741. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/upscale.comp +148 -6
  742. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/utils.glsl +25 -0
  743. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/vulkan-shaders-gen.cpp +619 -177
  744. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/xielu.comp +35 -0
  745. data/ext/sources/ggml/src/ggml-webgpu/CMakeLists.txt +80 -0
  746. data/ext/sources/ggml/src/ggml-webgpu/ggml-webgpu-shader-lib.hpp +169 -0
  747. data/ext/sources/ggml/src/ggml-webgpu/ggml-webgpu.cpp +3087 -0
  748. data/ext/sources/ggml/src/ggml-webgpu/pre_wgsl.hpp +778 -0
  749. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/bin_op.tmpl.wgsl +188 -0
  750. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/binary_head.tmpl +45 -0
  751. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/common_decls.tmpl +930 -0
  752. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/cpy.tmpl.wgsl +101 -0
  753. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/embed_wgsl.py +147 -0
  754. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/flash_attn.wgsl +591 -0
  755. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/get_rows.tmpl.wgsl +874 -0
  756. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/glu.tmpl.wgsl +323 -0
  757. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/memset.wgsl +40 -0
  758. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat.tmpl.wgsl +907 -0
  759. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_decls.tmpl +97 -0
  760. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_reg_tile.tmpl.wgsl +247 -0
  761. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_subgroup_matrix.tmpl.wgsl +302 -0
  762. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/mul_mat_vec.tmpl.wgsl +267 -0
  763. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rms_norm.wgsl +123 -0
  764. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/rope.tmpl.wgsl +295 -0
  765. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/scale.tmpl.wgsl +90 -0
  766. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/set_rows.tmpl.wgsl +112 -0
  767. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/set_rows.wgsl +81 -0
  768. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/soft_max.tmpl.wgsl +345 -0
  769. data/ext/sources/ggml/src/ggml-webgpu/wgsl-shaders/unary_op.wgsl +483 -0
  770. data/ext/sources/ggml/src/ggml-zdnn/CMakeLists.txt +36 -0
  771. data/ext/sources/ggml/src/ggml-zdnn/common.hpp +59 -0
  772. data/ext/sources/ggml/src/ggml-zdnn/ggml-zdnn.cpp +628 -0
  773. data/ext/sources/ggml/src/ggml-zdnn/mmf.cpp +80 -0
  774. data/ext/sources/ggml/src/ggml-zdnn/mmf.hpp +12 -0
  775. data/ext/sources/ggml/src/ggml-zdnn/utils.cpp +79 -0
  776. data/ext/sources/ggml/src/ggml-zdnn/utils.hpp +19 -0
  777. data/ext/sources/ggml/src/ggml-zendnn/CMakeLists.txt +92 -0
  778. data/ext/sources/ggml/src/ggml-zendnn/ggml-zendnn.cpp +466 -0
  779. data/ext/sources/ggml/src/ggml.c +901 -129
  780. data/ext/sources/ggml/src/gguf.cpp +8 -1
  781. data/ext/sources/include/whisper.h +1 -0
  782. data/ext/sources/src/CMakeLists.txt +3 -1
  783. data/ext/sources/src/whisper.cpp +124 -81
  784. data/ext/sources/tests/CMakeLists.txt +8 -1
  785. data/ext/sources/tests/test-vad-full.cpp +7 -5
  786. data/ext/sources/tests/test-vad.cpp +3 -3
  787. data/extsources.rb +1 -0
  788. data/lib/whisper/model/uri.rb +17 -18
  789. data/sig/whisper.rbs +126 -2
  790. data/test/test_params.rb +24 -8
  791. data/test/test_segment.rb +0 -1
  792. data/test/test_token.rb +70 -0
  793. data/test/test_vad.rb +1 -1
  794. data/test/test_vad_context.rb +50 -0
  795. data/test/test_vad_segment.rb +19 -0
  796. data/test/test_vad_segments.rb +16 -0
  797. data/test/test_whisper.rb +8 -1
  798. data/whispercpp.gemspec +1 -1
  799. metadata +439 -179
  800. data/ext/sources/build-xcframework.sh +0 -547
  801. data/ext/sources/examples/talk-llama/llama-kv-cache-unified-iswa.cpp +0 -279
  802. data/ext/sources/examples/talk-llama/llama-kv-cache-unified.cpp +0 -1841
  803. data/ext/sources/examples/talk-llama/llama-kv-cache-unified.h +0 -303
  804. data/ext/sources/ggml/include/ggml-kompute.h +0 -50
  805. data/ext/sources/ggml/src/ggml-amx/CMakeLists.txt +0 -107
  806. data/ext/sources/ggml/src/ggml-amx/common.h +0 -94
  807. data/ext/sources/ggml/src/ggml-amx/ggml-amx.cpp +0 -446
  808. data/ext/sources/ggml/src/ggml-amx/mmq.cpp +0 -2510
  809. data/ext/sources/ggml/src/ggml-amx/mmq.h +0 -17
  810. data/ext/sources/ggml/src/ggml-cann/Doxyfile +0 -2579
  811. data/ext/sources/ggml/src/ggml-cann/kernels/CMakeLists.txt +0 -30
  812. data/ext/sources/ggml/src/ggml-cann/kernels/ascendc_kernels.h +0 -19
  813. data/ext/sources/ggml/src/ggml-cann/kernels/dup.cpp +0 -234
  814. data/ext/sources/ggml/src/ggml-cann/kernels/get_row_f16.cpp +0 -197
  815. data/ext/sources/ggml/src/ggml-cann/kernels/get_row_f32.cpp +0 -190
  816. data/ext/sources/ggml/src/ggml-cann/kernels/get_row_q4_0.cpp +0 -204
  817. data/ext/sources/ggml/src/ggml-cann/kernels/get_row_q8_0.cpp +0 -191
  818. data/ext/sources/ggml/src/ggml-cann/kernels/quantize_f16_q8_0.cpp +0 -218
  819. data/ext/sources/ggml/src/ggml-cann/kernels/quantize_f32_q8_0.cpp +0 -216
  820. data/ext/sources/ggml/src/ggml-cann/kernels/quantize_float_to_q4_0.cpp +0 -295
  821. data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f16.cu +0 -357
  822. data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f16.cuh +0 -3
  823. data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f32.cu +0 -365
  824. data/ext/sources/ggml/src/ggml-cuda/fattn-tile-f32.cuh +0 -3
  825. data/ext/sources/ggml/src/ggml-cuda/fattn-vec-f16.cuh +0 -482
  826. data/ext/sources/ggml/src/ggml-cuda/fattn-vec-f32.cuh +0 -472
  827. data/ext/sources/ggml/src/ggml-cuda/mmv.cu +0 -506
  828. data/ext/sources/ggml/src/ggml-cuda/mmv.cuh +0 -11
  829. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-f16.cu +0 -5
  830. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q4_0.cu +0 -5
  831. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q4_1.cu +0 -5
  832. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q5_0.cu +0 -5
  833. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q5_1.cu +0 -5
  834. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-f16-q8_0.cu +0 -5
  835. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-f16.cu +0 -5
  836. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q4_0.cu +0 -5
  837. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q4_1.cu +0 -5
  838. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q5_0.cu +0 -5
  839. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q5_1.cu +0 -5
  840. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_0-q8_0.cu +0 -5
  841. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-f16.cu +0 -5
  842. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q4_0.cu +0 -5
  843. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q4_1.cu +0 -5
  844. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q5_0.cu +0 -5
  845. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q5_1.cu +0 -5
  846. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q4_1-q8_0.cu +0 -5
  847. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-f16.cu +0 -5
  848. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q4_0.cu +0 -5
  849. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q4_1.cu +0 -5
  850. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q5_0.cu +0 -5
  851. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q5_1.cu +0 -5
  852. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_0-q8_0.cu +0 -5
  853. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-f16.cu +0 -5
  854. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q4_0.cu +0 -5
  855. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q4_1.cu +0 -5
  856. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q5_0.cu +0 -5
  857. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q5_1.cu +0 -5
  858. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q5_1-q8_0.cu +0 -5
  859. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-f16.cu +0 -5
  860. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q4_0.cu +0 -5
  861. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q4_1.cu +0 -5
  862. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q5_0.cu +0 -5
  863. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q5_1.cu +0 -5
  864. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs128-q8_0-q8_0.cu +0 -5
  865. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs256-f16-f16.cu +0 -5
  866. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-f16.cu +0 -5
  867. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q4_0.cu +0 -5
  868. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q4_1.cu +0 -5
  869. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q5_0.cu +0 -5
  870. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q5_1.cu +0 -5
  871. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f16-instance-hs64-f16-q8_0.cu +0 -5
  872. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-f16.cu +0 -5
  873. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q4_0.cu +0 -5
  874. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q4_1.cu +0 -5
  875. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q5_0.cu +0 -5
  876. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q5_1.cu +0 -5
  877. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-f16-q8_0.cu +0 -5
  878. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-f16.cu +0 -5
  879. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q4_0.cu +0 -5
  880. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q4_1.cu +0 -5
  881. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q5_0.cu +0 -5
  882. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q5_1.cu +0 -5
  883. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_0-q8_0.cu +0 -5
  884. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-f16.cu +0 -5
  885. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q4_0.cu +0 -5
  886. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q4_1.cu +0 -5
  887. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q5_0.cu +0 -5
  888. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q5_1.cu +0 -5
  889. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q4_1-q8_0.cu +0 -5
  890. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-f16.cu +0 -5
  891. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q4_0.cu +0 -5
  892. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q4_1.cu +0 -5
  893. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q5_0.cu +0 -5
  894. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q5_1.cu +0 -5
  895. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_0-q8_0.cu +0 -5
  896. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-f16.cu +0 -5
  897. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q4_0.cu +0 -5
  898. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q4_1.cu +0 -5
  899. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q5_0.cu +0 -5
  900. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q5_1.cu +0 -5
  901. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q5_1-q8_0.cu +0 -5
  902. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-f16.cu +0 -5
  903. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q4_0.cu +0 -5
  904. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q4_1.cu +0 -5
  905. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q5_0.cu +0 -5
  906. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q5_1.cu +0 -5
  907. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs128-q8_0-q8_0.cu +0 -5
  908. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs256-f16-f16.cu +0 -5
  909. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-f16.cu +0 -5
  910. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q4_0.cu +0 -5
  911. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q4_1.cu +0 -5
  912. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q5_0.cu +0 -5
  913. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q5_1.cu +0 -5
  914. data/ext/sources/ggml/src/ggml-cuda/template-instances/fattn-vec-f32-instance-hs64-f16-q8_0.cu +0 -5
  915. data/ext/sources/ggml/src/ggml-kompute/CMakeLists.txt +0 -166
  916. data/ext/sources/ggml/src/ggml-kompute/ggml-kompute.cpp +0 -2251
  917. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/common.comp +0 -112
  918. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_add.comp +0 -58
  919. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_addrow.comp +0 -25
  920. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f16_f16.comp +0 -52
  921. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f16_f32.comp +0 -52
  922. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f32_f16.comp +0 -52
  923. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_cpy_f32_f32.comp +0 -52
  924. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_diagmask.comp +0 -30
  925. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_gelu.comp +0 -22
  926. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows.comp +0 -17
  927. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_f16.comp +0 -31
  928. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_f32.comp +0 -31
  929. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_q4_0.comp +0 -38
  930. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_q4_1.comp +0 -39
  931. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_getrows_q6_k.comp +0 -44
  932. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul.comp +0 -52
  933. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_f16.comp +0 -69
  934. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_mat_f32.comp +0 -51
  935. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q4_0.comp +0 -33
  936. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q4_1.comp +0 -35
  937. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q4_k.comp +0 -140
  938. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q6_k.comp +0 -106
  939. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mat_q8_0.comp +0 -73
  940. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mv_q_n.comp +0 -52
  941. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_mul_mv_q_n_pre.comp +0 -28
  942. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_norm.comp +0 -84
  943. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_relu.comp +0 -21
  944. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rmsnorm.comp +0 -53
  945. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_neox_f16.comp +0 -52
  946. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_neox_f32.comp +0 -52
  947. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_norm_f16.comp +0 -52
  948. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_rope_norm_f32.comp +0 -52
  949. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_scale.comp +0 -19
  950. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_scale_8.comp +0 -23
  951. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_silu.comp +0 -22
  952. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/op_softmax.comp +0 -72
  953. data/ext/sources/ggml/src/ggml-kompute/kompute-shaders/rope_common.comp +0 -71
  954. data/ext/sources/ggml/src/ggml-metal/ggml-metal.m +0 -6280
  955. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/flash_attn_base.comp +0 -162
  956. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mat_vec_base.comp +0 -118
  957. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/mul_mmq_funcs.comp +0 -99
  958. data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/rope_head.comp +0 -58
  959. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_bfloat16_support.comp → feature-tests/bfloat16.comp} +0 -0
  960. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_coopmat_support.comp → feature-tests/coopmat.comp} +0 -0
  961. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_coopmat2_support.comp → feature-tests/coopmat2.comp} +0 -0
  962. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{test_integer_dot_support.comp → feature-tests/integer_dot.comp} +0 -0
  963. /data/ext/sources/ggml/src/ggml-vulkan/vulkan-shaders/{glu_main.comp → glu_main.glsl} +0 -0
@@ -3,8 +3,10 @@
3
3
  #include "llama-impl.h"
4
4
 
5
5
  #include <map>
6
+ #include <set>
6
7
 
7
8
  static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
9
+ { LLM_ARCH_CLIP, "clip" }, // dummy, only used by llama-quantize
8
10
  { LLM_ARCH_LLAMA, "llama" },
9
11
  { LLM_ARCH_LLAMA4, "llama4" },
10
12
  { LLM_ARCH_DECI, "deci" },
@@ -18,10 +20,12 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
18
20
  { LLM_ARCH_STARCODER, "starcoder" },
19
21
  { LLM_ARCH_REFACT, "refact" },
20
22
  { LLM_ARCH_BERT, "bert" },
23
+ { LLM_ARCH_MODERN_BERT, "modern-bert" },
21
24
  { LLM_ARCH_NOMIC_BERT, "nomic-bert" },
22
25
  { LLM_ARCH_NOMIC_BERT_MOE, "nomic-bert-moe" },
23
26
  { LLM_ARCH_NEO_BERT, "neo-bert" },
24
27
  { LLM_ARCH_JINA_BERT_V2, "jina-bert-v2" },
28
+ { LLM_ARCH_JINA_BERT_V3, "jina-bert-v3" },
25
29
  { LLM_ARCH_BLOOM, "bloom" },
26
30
  { LLM_ARCH_STABLELM, "stablelm" },
27
31
  { LLM_ARCH_QWEN, "qwen" },
@@ -30,10 +34,15 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
30
34
  { LLM_ARCH_QWEN2VL, "qwen2vl" },
31
35
  { LLM_ARCH_QWEN3, "qwen3" },
32
36
  { LLM_ARCH_QWEN3MOE, "qwen3moe" },
37
+ { LLM_ARCH_QWEN3NEXT, "qwen3next" },
38
+ { LLM_ARCH_QWEN3VL, "qwen3vl" },
39
+ { LLM_ARCH_QWEN3VLMOE, "qwen3vlmoe" },
33
40
  { LLM_ARCH_PHI2, "phi2" },
34
41
  { LLM_ARCH_PHI3, "phi3" },
35
42
  { LLM_ARCH_PHIMOE, "phimoe" },
36
43
  { LLM_ARCH_PLAMO, "plamo" },
44
+ { LLM_ARCH_PLAMO2, "plamo2" },
45
+ { LLM_ARCH_PLAMO3, "plamo3" },
37
46
  { LLM_ARCH_CODESHELL, "codeshell" },
38
47
  { LLM_ARCH_ORION, "orion" },
39
48
  { LLM_ARCH_INTERNLM2, "internlm2" },
@@ -43,8 +52,12 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
43
52
  { LLM_ARCH_GEMMA2, "gemma2" },
44
53
  { LLM_ARCH_GEMMA3, "gemma3" },
45
54
  { LLM_ARCH_GEMMA3N, "gemma3n" },
55
+ { LLM_ARCH_GEMMA_EMBEDDING, "gemma-embedding" },
46
56
  { LLM_ARCH_STARCODER2, "starcoder2" },
47
57
  { LLM_ARCH_MAMBA, "mamba" },
58
+ { LLM_ARCH_MAMBA2, "mamba2" },
59
+ { LLM_ARCH_JAMBA, "jamba" },
60
+ { LLM_ARCH_FALCON_H1, "falcon-h1" },
48
61
  { LLM_ARCH_XVERSE, "xverse" },
49
62
  { LLM_ARCH_COMMAND_R, "command-r" },
50
63
  { LLM_ARCH_COHERE2, "cohere2" },
@@ -58,65 +71,116 @@ static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
58
71
  { LLM_ARCH_DEEPSEEK2, "deepseek2" },
59
72
  { LLM_ARCH_CHATGLM, "chatglm" },
60
73
  { LLM_ARCH_GLM4, "glm4" },
74
+ { LLM_ARCH_GLM4_MOE, "glm4moe" },
61
75
  { LLM_ARCH_BITNET, "bitnet" },
62
76
  { LLM_ARCH_T5, "t5" },
63
77
  { LLM_ARCH_T5ENCODER, "t5encoder" },
64
78
  { LLM_ARCH_JAIS, "jais" },
65
79
  { LLM_ARCH_NEMOTRON, "nemotron" },
80
+ { LLM_ARCH_NEMOTRON_H, "nemotron_h" },
81
+ { LLM_ARCH_NEMOTRON_H_MOE, "nemotron_h_moe" },
66
82
  { LLM_ARCH_EXAONE, "exaone" },
83
+ { LLM_ARCH_EXAONE4, "exaone4" },
67
84
  { LLM_ARCH_RWKV6, "rwkv6" },
68
85
  { LLM_ARCH_RWKV6QWEN2, "rwkv6qwen2" },
69
86
  { LLM_ARCH_RWKV7, "rwkv7" },
70
87
  { LLM_ARCH_ARWKV7, "arwkv7" },
71
88
  { LLM_ARCH_GRANITE, "granite" },
72
89
  { LLM_ARCH_GRANITE_MOE, "granitemoe" },
90
+ { LLM_ARCH_GRANITE_HYBRID, "granitehybrid" },
73
91
  { LLM_ARCH_CHAMELEON, "chameleon" },
74
92
  { LLM_ARCH_WAVTOKENIZER_DEC, "wavtokenizer-dec" },
75
93
  { LLM_ARCH_PLM, "plm" },
76
94
  { LLM_ARCH_BAILINGMOE, "bailingmoe" },
95
+ { LLM_ARCH_BAILINGMOE2, "bailingmoe2" },
77
96
  { LLM_ARCH_DOTS1, "dots1" },
78
97
  { LLM_ARCH_ARCEE, "arcee" },
98
+ { LLM_ARCH_AFMOE, "afmoe" },
79
99
  { LLM_ARCH_ERNIE4_5, "ernie4_5" },
100
+ { LLM_ARCH_ERNIE4_5_MOE, "ernie4_5-moe" },
101
+ { LLM_ARCH_HUNYUAN_MOE, "hunyuan-moe" },
102
+ { LLM_ARCH_HUNYUAN_DENSE, "hunyuan-dense" },
103
+ { LLM_ARCH_SMOLLM3, "smollm3" },
104
+ { LLM_ARCH_OPENAI_MOE, "gpt-oss" },
105
+ { LLM_ARCH_LFM2, "lfm2" },
106
+ { LLM_ARCH_LFM2MOE, "lfm2moe" },
107
+ { LLM_ARCH_DREAM, "dream" },
108
+ { LLM_ARCH_SMALLTHINKER, "smallthinker" },
109
+ { LLM_ARCH_LLADA, "llada" },
110
+ { LLM_ARCH_LLADA_MOE, "llada-moe" },
111
+ { LLM_ARCH_SEED_OSS, "seed_oss" },
112
+ { LLM_ARCH_GROVEMOE, "grovemoe" },
113
+ { LLM_ARCH_APERTUS, "apertus" },
114
+ { LLM_ARCH_MINIMAX_M2, "minimax-m2" },
115
+ { LLM_ARCH_COGVLM, "cogvlm" },
116
+ { LLM_ARCH_RND1, "rnd1" },
117
+ { LLM_ARCH_PANGU_EMBED, "pangu-embedded" },
118
+ { LLM_ARCH_MISTRAL3, "mistral3" },
119
+ { LLM_ARCH_MIMO2, "mimo2" },
120
+ { LLM_ARCH_LLAMA_EMBED, "llama-embed" },
121
+ { LLM_ARCH_MAINCODER, "maincoder" },
80
122
  { LLM_ARCH_UNKNOWN, "(unknown)" },
81
123
  };
82
124
 
83
125
  static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
84
- { LLM_KV_GENERAL_TYPE, "general.type" },
85
- { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" },
86
- { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" },
87
- { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" },
88
- { LLM_KV_GENERAL_FILE_TYPE, "general.file_type" },
89
- { LLM_KV_GENERAL_NAME, "general.name" },
90
- { LLM_KV_GENERAL_AUTHOR, "general.author" },
91
- { LLM_KV_GENERAL_VERSION, "general.version" },
92
- { LLM_KV_GENERAL_URL, "general.url" },
93
- { LLM_KV_GENERAL_DESCRIPTION, "general.description" },
94
- { LLM_KV_GENERAL_LICENSE, "general.license" },
95
- { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" },
96
- { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" },
126
+ { LLM_KV_GENERAL_TYPE, "general.type" },
127
+ { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" },
128
+ { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" },
129
+ { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" },
130
+ { LLM_KV_GENERAL_FILE_TYPE, "general.file_type" },
131
+ { LLM_KV_GENERAL_SAMPLING_SEQUENCE, "general.sampling.sequence" },
132
+ { LLM_KV_GENERAL_SAMPLING_TOP_K, "general.sampling.top_k" },
133
+ { LLM_KV_GENERAL_SAMPLING_TOP_P, "general.sampling.top_p" },
134
+ { LLM_KV_GENERAL_SAMPLING_MIN_P, "general.sampling.min_p" },
135
+ { LLM_KV_GENERAL_SAMPLING_XTC_PROBABILITY, "general.sampling.xtc_probability" },
136
+ { LLM_KV_GENERAL_SAMPLING_XTC_THRESHOLD, "general.sampling.xtc_threshold" },
137
+ { LLM_KV_GENERAL_SAMPLING_TEMP, "general.sampling.temp" },
138
+ { LLM_KV_GENERAL_SAMPLING_PENALTY_LAST_N, "general.sampling.penalty_last_n" },
139
+ { LLM_KV_GENERAL_SAMPLING_PENALTY_REPEAT, "general.sampling.penalty_repeat" },
140
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT, "general.sampling.mirostat" },
141
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT_TAU, "general.sampling.mirostat_tau" },
142
+ { LLM_KV_GENERAL_SAMPLING_MIROSTAT_ETA, "general.sampling.mirostat_eta" },
143
+ { LLM_KV_GENERAL_NAME, "general.name" },
144
+ { LLM_KV_GENERAL_AUTHOR, "general.author" },
145
+ { LLM_KV_GENERAL_VERSION, "general.version" },
146
+ { LLM_KV_GENERAL_URL, "general.url" },
147
+ { LLM_KV_GENERAL_DESCRIPTION, "general.description" },
148
+ { LLM_KV_GENERAL_LICENSE, "general.license" },
149
+ { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" },
150
+ { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" },
97
151
 
98
152
  { LLM_KV_VOCAB_SIZE, "%s.vocab_size" },
99
153
  { LLM_KV_CONTEXT_LENGTH, "%s.context_length" },
100
154
  { LLM_KV_EMBEDDING_LENGTH, "%s.embedding_length" },
155
+ { LLM_KV_EMBEDDING_LENGTH_OUT, "%s.embedding_length_out" },
101
156
  { LLM_KV_FEATURES_LENGTH, "%s.features_length" },
102
157
  { LLM_KV_BLOCK_COUNT, "%s.block_count" },
103
158
  { LLM_KV_LEADING_DENSE_BLOCK_COUNT, "%s.leading_dense_block_count" },
104
159
  { LLM_KV_FEED_FORWARD_LENGTH, "%s.feed_forward_length" },
105
160
  { LLM_KV_EXPERT_FEED_FORWARD_LENGTH, "%s.expert_feed_forward_length" },
106
161
  { LLM_KV_EXPERT_SHARED_FEED_FORWARD_LENGTH, "%s.expert_shared_feed_forward_length" },
162
+ { LLM_KV_EXPERT_CHUNK_FEED_FORWARD_LENGTH, "%s.expert_chunk_feed_forward_length" },
107
163
  { LLM_KV_USE_PARALLEL_RESIDUAL, "%s.use_parallel_residual" },
108
164
  { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" },
109
165
  { LLM_KV_EXPERT_COUNT, "%s.expert_count" },
110
166
  { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" },
111
167
  { LLM_KV_EXPERT_SHARED_COUNT, "%s.expert_shared_count" },
168
+ { LLM_KV_EXPERT_GROUP_COUNT, "%s.expert_group_count" },
169
+ { LLM_KV_EXPERT_GROUP_USED_COUNT, "%s.expert_group_used_count" },
112
170
  { LLM_KV_EXPERT_WEIGHTS_SCALE, "%s.expert_weights_scale" },
113
171
  { LLM_KV_EXPERT_WEIGHTS_NORM, "%s.expert_weights_norm" },
114
172
  { LLM_KV_EXPERT_GATING_FUNC, "%s.expert_gating_func" },
173
+ { LLM_KV_EXPERT_GROUP_SCALE, "%s.expert_group_scale" },
174
+ { LLM_KV_EXPERTS_PER_GROUP, "%s.experts_per_group" },
115
175
  { LLM_KV_MOE_EVERY_N_LAYERS, "%s.moe_every_n_layers" },
176
+ { LLM_KV_NEXTN_PREDICT_LAYERS, "%s.nextn_predict_layers" },
177
+ { LLM_KV_NUM_DEEPSTACK_LAYERS, "%s.n_deepstack_layers" },
116
178
  { LLM_KV_POOLING_TYPE, "%s.pooling_type" },
117
179
  { LLM_KV_LOGIT_SCALE, "%s.logit_scale" },
118
180
  { LLM_KV_DECODER_START_TOKEN_ID, "%s.decoder_start_token_id" },
181
+ { LLM_KV_DECODER_BLOCK_COUNT, "%s.decoder_block_count" },
119
182
  { LLM_KV_ATTN_LOGIT_SOFTCAPPING, "%s.attn_logit_softcapping" },
183
+ { LLM_KV_ROUTER_LOGIT_SOFTCAPPING, "%s.router_logit_softcapping" },
120
184
  { LLM_KV_FINAL_LOGIT_SOFTCAPPING, "%s.final_logit_softcapping" },
121
185
  { LLM_KV_SWIN_NORM, "%s.swin_norm" },
122
186
  { LLM_KV_RESCALE_EVERY_N_LAYERS, "%s.rescale_every_n_layers" },
@@ -146,21 +210,29 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
146
210
  { LLM_KV_ATTENTION_GATE_LORA_RANK, "%s.attention.gate_lora_rank" },
147
211
  { LLM_KV_ATTENTION_RELATIVE_BUCKETS_COUNT, "%s.attention.relative_buckets_count" },
148
212
  { LLM_KV_ATTENTION_SLIDING_WINDOW, "%s.attention.sliding_window" },
213
+ { LLM_KV_ATTENTION_SLIDING_WINDOW_PATTERN, "%s.attention.sliding_window_pattern" },
149
214
  { LLM_KV_ATTENTION_SCALE, "%s.attention.scale" },
215
+ { LLM_KV_ATTENTION_OUTPUT_SCALE, "%s.attention.output_scale" },
216
+ { LLM_KV_ATTENTION_TEMPERATURE_LENGTH, "%s.attention.temperature_length" },
217
+ { LLM_KV_ATTENTION_TEMPERATURE_SCALE, "%s.attention.temperature_scale" },
150
218
  { LLM_KV_ATTENTION_KEY_LENGTH_MLA, "%s.attention.key_length_mla" },
151
219
  { LLM_KV_ATTENTION_VALUE_LENGTH_MLA, "%s.attention.value_length_mla" },
152
- { LLM_KV_ATTENTION_LAYER_INDICES, "%s.attention.layer_indices" },
153
220
 
154
- { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
155
- { LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },
156
- { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" },
157
- { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" },
158
- { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" },
159
- { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" },
160
- { LLM_KV_ROPE_SCALING_ATTN_FACTOR, "%s.rope.scaling.attn_factor" },
161
- { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" },
162
- { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" },
163
- { LLM_KV_ROPE_SCALING_YARN_LOG_MUL, "%s.rope.scaling.yarn_log_multiplier" },
221
+ { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
222
+ { LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },
223
+ { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" },
224
+ { LLM_KV_ROPE_FREQ_BASE_SWA, "%s.rope.freq_base_swa" },
225
+ { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" },
226
+ { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" },
227
+ { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" },
228
+ { LLM_KV_ROPE_SCALING_ATTN_FACTOR, "%s.rope.scaling.attn_factor" },
229
+ { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" },
230
+ { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" },
231
+ { LLM_KV_ROPE_SCALING_YARN_LOG_MUL, "%s.rope.scaling.yarn_log_multiplier" },
232
+ { LLM_KV_ROPE_SCALING_YARN_EXT_FACTOR, "%s.rope.scaling.yarn_ext_factor" },
233
+ { LLM_KV_ROPE_SCALING_YARN_ATTN_FACTOR, "%s.rope.scaling.yarn_attn_factor" },
234
+ { LLM_KV_ROPE_SCALING_YARN_BETA_FAST, "%s.rope.scaling.yarn_beta_fast" },
235
+ { LLM_KV_ROPE_SCALING_YARN_BETA_SLOW, "%s.rope.scaling.yarn_beta_slow" },
164
236
 
165
237
  { LLM_KV_SPLIT_NO, "split.no" },
166
238
  { LLM_KV_SPLIT_COUNT, "split.count" },
@@ -170,6 +242,7 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
170
242
  { LLM_KV_SSM_INNER_SIZE, "%s.ssm.inner_size" },
171
243
  { LLM_KV_SSM_STATE_SIZE, "%s.ssm.state_size" },
172
244
  { LLM_KV_SSM_TIME_STEP_RANK, "%s.ssm.time_step_rank" },
245
+ { LLM_KV_SSM_GROUP_COUNT, "%s.ssm.group_count" },
173
246
  { LLM_KV_SSM_DT_B_C_RMS, "%s.ssm.dt_b_c_rms" },
174
247
 
175
248
  { LLM_KV_WKV_HEAD_SIZE, "%s.wkv.head_size" },
@@ -182,6 +255,13 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
182
255
 
183
256
  { LLM_KV_CLASSIFIER_OUTPUT_LABELS, "%s.classifier.output_labels" },
184
257
 
258
+ { LLM_KV_SHORTCONV_L_CACHE, "%s.shortconv.l_cache" },
259
+ // sentence-transformers dense modules feature dims
260
+ { LLM_KV_DENSE_2_FEAT_IN, "%s.dense_2_feat_in" },
261
+ { LLM_KV_DENSE_2_FEAT_OUT, "%s.dense_2_feat_out" },
262
+ { LLM_KV_DENSE_3_FEAT_IN, "%s.dense_3_feat_in" },
263
+ { LLM_KV_DENSE_3_FEAT_OUT, "%s.dense_3_feat_out" },
264
+
185
265
  { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" },
186
266
  { LLM_KV_TOKENIZER_PRE, "tokenizer.ggml.pre" },
187
267
  { LLM_KV_TOKENIZER_LIST, "tokenizer.ggml.tokens" },
@@ -214,8 +294,16 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
214
294
  { LLM_KV_TOKENIZER_FIM_REP_ID, "tokenizer.ggml.fim_rep_token_id" },
215
295
  { LLM_KV_TOKENIZER_FIM_SEP_ID, "tokenizer.ggml.fim_sep_token_id" },
216
296
 
217
- { LLM_KV_ADAPTER_TYPE, "adapter.type" },
218
- { LLM_KV_ADAPTER_LORA_ALPHA, "adapter.lora.alpha" },
297
+ { LLM_KV_ADAPTER_TYPE, "adapter.type" },
298
+ { LLM_KV_ADAPTER_LORA_ALPHA, "adapter.lora.alpha" },
299
+ { LLM_KV_ADAPTER_LORA_TASK_NAME, "adapter.lora.task_name" },
300
+ { LLM_KV_ADAPTER_LORA_PROMPT_PREFIX, "adapter.lora.prompt_prefix" },
301
+ { LLM_KV_ADAPTER_ALORA_INVOCATION_TOKENS, "adapter.alora.invocation_tokens" },
302
+
303
+ { LLM_KV_XIELU_ALPHA_N, "xielu.alpha_n" },
304
+ { LLM_KV_XIELU_ALPHA_P, "xielu.alpha_p" },
305
+ { LLM_KV_XIELU_BETA, "xielu.beta" },
306
+ { LLM_KV_XIELU_EPS, "xielu.eps" },
219
307
 
220
308
  // deprecated
221
309
  { LLM_KV_TOKENIZER_PREFIX_ID, "tokenizer.ggml.prefix_token_id" },
@@ -223,1476 +311,1978 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
223
311
  { LLM_KV_TOKENIZER_MIDDLE_ID, "tokenizer.ggml.middle_token_id" },
224
312
  };
225
313
 
226
- static const std::map<llm_arch, std::map<llm_tensor, const char *>> LLM_TENSOR_NAMES = {
227
- {
228
- LLM_ARCH_LLAMA,
229
- {
230
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
231
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
232
- { LLM_TENSOR_OUTPUT, "output" },
233
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
234
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
235
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
236
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
237
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
238
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
239
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
240
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
241
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
242
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
243
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
244
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
245
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
246
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
247
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
248
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
249
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
250
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
251
- },
252
- },
253
- {
254
- LLM_ARCH_ARCEE,
255
- {
256
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
257
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
258
- { LLM_TENSOR_OUTPUT, "output" },
259
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
260
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
261
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
262
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
263
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
264
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
265
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
266
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
267
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
268
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
269
- },
270
- },
271
- {
272
- LLM_ARCH_LLAMA4,
273
- {
274
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
275
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
276
- { LLM_TENSOR_OUTPUT, "output" },
277
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
278
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
279
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
280
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
281
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
282
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
283
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
284
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
285
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
286
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
287
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
288
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
289
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
290
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
291
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
292
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
293
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
294
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
295
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
296
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
297
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
298
- },
299
- },
300
- {
301
- LLM_ARCH_DECI,
302
- {
303
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
304
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
305
- { LLM_TENSOR_OUTPUT, "output" },
306
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
307
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
308
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
309
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
310
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
311
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
312
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
313
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
314
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
315
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
316
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
317
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
318
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
319
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
320
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
321
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
322
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
323
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
324
- },
325
- },
326
- {
327
- LLM_ARCH_BAICHUAN,
328
- {
329
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
330
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
331
- { LLM_TENSOR_OUTPUT, "output" },
332
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
333
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
334
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
335
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
336
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
337
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
338
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
339
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
340
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
341
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
342
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
343
- },
344
- },
345
- {
346
- LLM_ARCH_FALCON,
347
- {
348
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
349
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
350
- { LLM_TENSOR_OUTPUT, "output" },
351
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
352
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
353
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
354
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
355
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
356
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
357
- },
358
- },
359
- {
360
- LLM_ARCH_GROK,
361
- {
362
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
363
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
364
- { LLM_TENSOR_OUTPUT, "output" },
365
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
366
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
367
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
368
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
369
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
370
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
371
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
372
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
373
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
374
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
375
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
376
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
377
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
378
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
379
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
380
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
381
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
382
- },
383
- },
384
- {
385
- LLM_ARCH_GPT2,
386
- {
387
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
388
- { LLM_TENSOR_POS_EMBD, "position_embd" },
389
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
390
- { LLM_TENSOR_OUTPUT, "output" },
391
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
392
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
393
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
394
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
395
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
396
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
397
- },
398
- },
399
- {
400
- LLM_ARCH_GPTJ,
401
- {
402
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
403
- },
404
- },
405
- {
406
- LLM_ARCH_GPTNEOX,
407
- {
408
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
409
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
410
- { LLM_TENSOR_OUTPUT, "output" },
411
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
412
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
413
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
414
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
415
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
416
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
417
- },
418
- },
419
- {
420
- LLM_ARCH_MPT,
421
- {
422
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
423
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
424
- { LLM_TENSOR_OUTPUT, "output"},
425
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
426
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
427
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
428
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
429
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
430
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
431
- { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" },
432
- { LLM_TENSOR_POS_EMBD, "position_embd" },
433
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm"},
434
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm"},
435
- },
436
- },
437
- {
438
- LLM_ARCH_STARCODER,
439
- {
440
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
441
- { LLM_TENSOR_POS_EMBD, "position_embd" },
442
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
443
- { LLM_TENSOR_OUTPUT, "output" },
444
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
445
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
446
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
447
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
448
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
449
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
450
- },
451
- },
452
- {
453
- LLM_ARCH_REFACT,
454
- {
455
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
456
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
457
- { LLM_TENSOR_OUTPUT, "output" },
458
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
459
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
460
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
461
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
462
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
463
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
464
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
465
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
466
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
467
- },
468
- },
469
- {
470
- LLM_ARCH_BERT,
471
- {
472
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
473
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
474
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
475
- { LLM_TENSOR_POS_EMBD, "position_embd" },
476
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
477
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
478
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
479
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
480
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
481
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
482
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
483
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
484
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
485
- { LLM_TENSOR_CLS, "cls" },
486
- { LLM_TENSOR_CLS_OUT, "cls.output" },
487
- },
488
- },
489
- {
490
- LLM_ARCH_NOMIC_BERT,
491
- {
492
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
493
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
494
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
495
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
496
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
497
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
498
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
499
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
500
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
501
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
502
- },
503
- },
504
- {
505
- LLM_ARCH_NOMIC_BERT_MOE,
506
- {
507
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
508
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
509
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
510
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
511
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
512
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
513
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
514
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
515
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
516
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
517
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
518
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
519
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
520
- },
521
- },
522
- {
523
- LLM_ARCH_NEO_BERT,
524
- {
525
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
526
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
527
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
528
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
529
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
530
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
531
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
532
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
533
- { LLM_TENSOR_CLS, "cls" },
534
- { LLM_TENSOR_CLS_OUT, "cls.output" },
535
- },
536
- },
537
- {
538
- LLM_ARCH_JINA_BERT_V2,
539
- {
540
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
541
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
542
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
543
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
544
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
545
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
546
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
547
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
548
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
549
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
550
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
551
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
552
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
553
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
554
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
555
- { LLM_TENSOR_CLS, "cls" },
556
- },
557
- },
558
- {
559
- LLM_ARCH_BLOOM,
560
- {
561
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
562
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
563
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
564
- { LLM_TENSOR_OUTPUT, "output" },
565
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
566
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
567
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
568
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
569
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
570
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
571
- },
572
- },
573
- {
574
- LLM_ARCH_STABLELM,
575
- {
576
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
577
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
578
- { LLM_TENSOR_OUTPUT, "output" },
579
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
580
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
581
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
582
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
583
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
584
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
585
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
586
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
587
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
588
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
589
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
590
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
591
- },
592
- },
593
- {
594
- LLM_ARCH_QWEN,
595
- {
596
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
597
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
598
- { LLM_TENSOR_OUTPUT, "output" },
599
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
600
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
601
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
602
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
603
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
604
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
605
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
606
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
607
- },
608
- },
609
- {
610
- LLM_ARCH_QWEN2,
611
- {
612
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
613
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
614
- { LLM_TENSOR_OUTPUT, "output" },
615
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
616
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
617
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
618
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
619
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
620
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
621
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
622
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
623
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
624
- },
625
- },
626
- {
627
- LLM_ARCH_QWEN2VL,
628
- {
629
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
630
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
631
- { LLM_TENSOR_OUTPUT, "output" },
632
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
633
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
634
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
635
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
636
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
637
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
638
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
639
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
640
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
641
- },
642
- },
643
- {
644
- LLM_ARCH_QWEN2MOE,
645
- {
646
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
647
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
648
- { LLM_TENSOR_OUTPUT, "output" },
649
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
650
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
651
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
652
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
653
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
654
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
655
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
656
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
657
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
658
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
659
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
660
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
661
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
662
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
663
- },
664
- },
665
- {
666
- LLM_ARCH_QWEN3,
667
- {
668
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
669
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
670
- { LLM_TENSOR_OUTPUT, "output" },
671
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
672
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
673
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
674
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
675
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
676
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
677
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
678
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
679
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
680
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
681
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
682
- },
683
- },
684
- {
685
- LLM_ARCH_QWEN3MOE,
686
- {
687
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
688
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
689
- { LLM_TENSOR_OUTPUT, "output" },
690
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
691
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
692
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
693
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
694
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
695
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
696
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
697
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
698
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
699
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
700
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
701
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
702
- },
703
- },
704
- {
705
- LLM_ARCH_PHI2,
706
- {
707
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
708
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
709
- { LLM_TENSOR_OUTPUT, "output" },
710
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
711
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
712
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
713
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
714
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
715
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
716
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
717
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
718
- },
719
- },
720
- {
721
- LLM_ARCH_PHI3,
722
- {
723
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
724
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
725
- { LLM_TENSOR_OUTPUT, "output" },
726
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
727
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
728
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
729
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
730
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
731
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
732
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
733
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
734
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
735
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
736
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
737
- },
738
- },
739
- {
740
- LLM_ARCH_PHIMOE,
741
- {
742
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
743
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
744
- { LLM_TENSOR_OUTPUT, "output" },
745
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
746
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
747
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
748
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
749
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
750
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
751
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
752
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
753
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
754
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
755
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
756
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
757
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
758
- },
759
- },
760
- {
761
- LLM_ARCH_PLAMO,
762
- {
763
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
764
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
765
- { LLM_TENSOR_OUTPUT, "output" },
766
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
767
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
768
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
769
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
770
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
771
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
772
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
773
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
774
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
775
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
776
- },
777
- },
778
- {
779
- LLM_ARCH_CODESHELL,
780
- {
781
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
782
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
783
- { LLM_TENSOR_OUTPUT, "output" },
784
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
785
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
786
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
787
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
788
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
789
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
790
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
791
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
792
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
793
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
794
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
795
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
796
- },
797
- },
798
- {
799
- LLM_ARCH_ORION,
800
- {
801
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
802
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
803
- { LLM_TENSOR_OUTPUT, "output" },
804
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
805
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
806
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
807
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
808
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
809
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
810
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
811
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
812
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
813
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
814
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
815
- },
816
- },
817
- {
818
- LLM_ARCH_INTERNLM2,
819
- {
820
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
821
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
822
- { LLM_TENSOR_OUTPUT, "output" },
823
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
824
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
825
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
826
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
827
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
828
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
829
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
830
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
831
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
832
- },
833
- },
834
- {
835
- LLM_ARCH_MINICPM,
836
- {
837
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
838
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
839
- { LLM_TENSOR_OUTPUT, "output" },
840
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
841
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
842
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
843
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
844
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
845
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
846
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
847
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
848
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
849
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
850
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
851
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
852
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
853
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
854
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
855
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
856
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
857
- },
858
- },
859
- {
860
- LLM_ARCH_MINICPM3,
861
- {
862
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
863
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
864
- { LLM_TENSOR_OUTPUT, "output" },
865
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
866
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
867
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
868
- { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
869
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
870
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
871
- { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
872
- { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
873
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
874
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
875
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
876
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
877
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
878
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
879
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
880
- },
881
- },
882
- {
883
- LLM_ARCH_GEMMA,
884
- {
885
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
886
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
887
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
888
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
889
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
890
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
891
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
892
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
893
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
894
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
895
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
896
- },
897
- },
898
- {
899
- LLM_ARCH_GEMMA2,
900
- {
901
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
902
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
903
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
904
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
905
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
906
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
907
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
908
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
909
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
910
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
911
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
912
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
913
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
914
- },
915
- },
916
- {
917
- LLM_ARCH_GEMMA3,
918
- {
919
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
920
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
921
- { LLM_TENSOR_OUTPUT, "output" },
922
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
923
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
924
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
925
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
926
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
927
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
928
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
929
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
930
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
931
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
932
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
933
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
934
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
935
- },
936
- },
937
- {
938
- LLM_ARCH_GEMMA3N,
939
- {
940
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
941
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
942
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
943
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
944
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
945
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
946
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
947
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
948
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
949
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
950
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
951
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
952
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
953
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
954
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
955
- { LLM_TENSOR_PER_LAYER_TOKEN_EMBD, "per_layer_token_embd" },
956
- { LLM_TENSOR_PER_LAYER_MODEL_PROJ, "per_layer_model_proj" },
957
- { LLM_TENSOR_PER_LAYER_PROJ_NORM, "per_layer_proj_norm" },
958
- { LLM_TENSOR_ALTUP_UNEMBD_PROJ, "altup_unembd_proj" },
959
- { LLM_TENSOR_ALTUP_PROJ, "altup_proj" },
960
- { LLM_TENSOR_PER_LAYER_INP_GATE, "blk.%d.inp_gate" },
961
- { LLM_TENSOR_PER_LAYER_PROJ, "blk.%d.proj" },
962
- { LLM_TENSOR_PER_LAYER_POST_NORM, "blk.%d.post_norm" },
963
- { LLM_TENSOR_ALTUP_CORRECT_COEF, "blk.%d.altup_correct_coef" },
964
- { LLM_TENSOR_ALTUP_CORRECT_SCALE, "blk.%d.altup_correct_scale" },
965
- { LLM_TENSOR_ALTUP_PREDICT_COEF, "blk.%d.altup_predict_coef" },
966
- { LLM_TENSOR_ALTUP_ROUTER, "blk.%d.altup_router" },
967
- { LLM_TENSOR_ALTUP_ROUTER_NORM, "blk.%d.altup_router_norm" },
968
- { LLM_TENSOR_LAUREL_L, "blk.%d.laurel_l" },
969
- { LLM_TENSOR_LAUREL_R, "blk.%d.laurel_r" },
970
- { LLM_TENSOR_LAUREL_POST_NORM, "blk.%d.laurel_post_norm" },
971
- },
972
- },
973
- {
974
- LLM_ARCH_STARCODER2,
975
- {
976
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
977
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
978
- { LLM_TENSOR_OUTPUT, "output" },
979
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
980
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
981
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
982
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
983
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
984
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
985
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
986
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
987
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
988
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
989
- },
990
- },
991
- {
992
- LLM_ARCH_MAMBA,
993
- {
994
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
995
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
996
- { LLM_TENSOR_OUTPUT, "output" },
997
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
998
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
999
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
1000
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
1001
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
1002
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
1003
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
1004
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
1005
- },
1006
- },
1007
- {
1008
- LLM_ARCH_XVERSE,
1009
- {
1010
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1011
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1012
- { LLM_TENSOR_OUTPUT, "output" },
1013
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1014
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1015
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1016
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1017
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1018
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1019
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1020
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1021
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1022
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1023
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1024
- },
1025
- },
1026
- {
1027
- LLM_ARCH_COMMAND_R,
1028
- {
1029
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1030
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1031
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1032
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1033
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1034
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1035
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1036
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1037
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1038
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1039
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1040
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1041
- },
1042
- },
1043
- {
1044
- LLM_ARCH_COHERE2,
1045
- {
1046
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1047
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1048
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1049
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1050
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1051
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1052
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1053
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1054
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1055
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1056
- },
1057
- },
1058
- {
1059
- LLM_ARCH_DBRX,
1060
- {
1061
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1062
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1063
- { LLM_TENSOR_OUTPUT, "output" },
1064
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1065
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1066
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1067
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
1068
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1069
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1070
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1071
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1072
- },
1073
- },
1074
- {
1075
- LLM_ARCH_OLMO,
1076
- {
1077
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1078
- { LLM_TENSOR_OUTPUT, "output" },
1079
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1080
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1081
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1082
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1083
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1084
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1085
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1086
- },
1087
- },
1088
- {
1089
- LLM_ARCH_OLMO2,
1090
- {
1091
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1092
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1093
- { LLM_TENSOR_OUTPUT, "output" },
1094
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1095
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1096
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1097
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1098
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1099
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1100
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1101
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1102
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1103
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1104
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1105
- },
1106
- },
1107
- {
1108
- LLM_ARCH_OLMOE,
1109
- {
1110
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1111
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1112
- { LLM_TENSOR_OUTPUT, "output" },
1113
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1114
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1115
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1116
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1117
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1118
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1119
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1120
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1121
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1122
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1123
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1124
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1125
- },
1126
- },
1127
- {
1128
- LLM_ARCH_OPENELM,
1129
- {
1130
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1131
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1132
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1133
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1134
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1135
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1136
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1137
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1138
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1139
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1140
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1141
- },
1142
- },
1143
- {
1144
- LLM_ARCH_ARCTIC,
1145
- {
1146
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1147
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1148
- { LLM_TENSOR_OUTPUT, "output" },
1149
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1150
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1151
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1152
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1153
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1154
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1155
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1156
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1157
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1158
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1159
- { LLM_TENSOR_FFN_NORM_EXPS, "blk.%d.ffn_norm_exps" },
1160
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1161
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1162
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1163
- },
1164
- },
1165
- {
1166
- LLM_ARCH_DEEPSEEK,
1167
- {
1168
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1169
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1170
- { LLM_TENSOR_OUTPUT, "output" },
1171
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1172
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1173
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1174
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1175
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1176
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1177
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1178
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1179
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1180
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1181
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1182
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1183
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1184
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1185
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1186
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1187
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1188
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1189
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1190
- },
1191
- },
1192
- {
1193
- LLM_ARCH_DEEPSEEK2,
1194
- {
1195
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1196
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1197
- { LLM_TENSOR_OUTPUT, "output" },
1198
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1199
- { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
1200
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
1201
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1202
- { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
1203
- { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
1204
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
1205
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
1206
- { LLM_TENSOR_ATTN_K_B, "blk.%d.attn_k_b" },
1207
- { LLM_TENSOR_ATTN_V_B, "blk.%d.attn_v_b" },
1208
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1209
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1210
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1211
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1212
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1213
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1214
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1215
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1216
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1217
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1218
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1219
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1220
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1221
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1222
- },
1223
- },
1224
- {
1225
- LLM_ARCH_PLM,
1226
- {
1227
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1228
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1229
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1230
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1231
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
1232
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
1233
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
1234
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1235
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1236
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1237
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1238
- },
1239
- },
1240
- {
1241
- LLM_ARCH_CHATGLM,
1242
- {
1243
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1244
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1245
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1246
- { LLM_TENSOR_OUTPUT, "output" },
1247
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1248
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1249
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1250
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1251
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1252
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1253
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1254
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1255
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1256
- },
1257
- },
1258
- {
1259
- LLM_ARCH_GLM4,
1260
- {
1261
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1262
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1263
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1264
- { LLM_TENSOR_OUTPUT, "output" },
1265
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1266
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1267
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1268
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1269
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1270
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1271
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1272
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1273
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
1274
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
1275
- },
1276
- },
1277
- {
1278
- LLM_ARCH_BITNET,
1279
- {
1280
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1281
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1282
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1283
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1284
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1285
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1286
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1287
- { LLM_TENSOR_ATTN_SUB_NORM, "blk.%d.attn_sub_norm" },
1288
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1289
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1290
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1291
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1292
- { LLM_TENSOR_FFN_SUB_NORM, "blk.%d.ffn_sub_norm" },
1293
- },
1294
- },
1295
- {
1296
- LLM_ARCH_T5,
1297
- {
1298
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1299
- { LLM_TENSOR_OUTPUT, "output" },
1300
- { LLM_TENSOR_DEC_OUTPUT_NORM, "dec.output_norm" },
1301
- { LLM_TENSOR_DEC_ATTN_NORM, "dec.blk.%d.attn_norm" },
1302
- { LLM_TENSOR_DEC_ATTN_Q, "dec.blk.%d.attn_q" },
1303
- { LLM_TENSOR_DEC_ATTN_K, "dec.blk.%d.attn_k" },
1304
- { LLM_TENSOR_DEC_ATTN_V, "dec.blk.%d.attn_v" },
1305
- { LLM_TENSOR_DEC_ATTN_OUT, "dec.blk.%d.attn_o" },
1306
- { LLM_TENSOR_DEC_ATTN_REL_B, "dec.blk.%d.attn_rel_b" },
1307
- { LLM_TENSOR_DEC_CROSS_ATTN_NORM, "dec.blk.%d.cross_attn_norm" },
1308
- { LLM_TENSOR_DEC_CROSS_ATTN_Q, "dec.blk.%d.cross_attn_q" },
1309
- { LLM_TENSOR_DEC_CROSS_ATTN_K, "dec.blk.%d.cross_attn_k" },
1310
- { LLM_TENSOR_DEC_CROSS_ATTN_V, "dec.blk.%d.cross_attn_v" },
1311
- { LLM_TENSOR_DEC_CROSS_ATTN_OUT, "dec.blk.%d.cross_attn_o" },
1312
- { LLM_TENSOR_DEC_CROSS_ATTN_REL_B, "dec.blk.%d.cross_attn_rel_b" },
1313
- { LLM_TENSOR_DEC_FFN_NORM, "dec.blk.%d.ffn_norm" },
1314
- { LLM_TENSOR_DEC_FFN_GATE, "dec.blk.%d.ffn_gate" },
1315
- { LLM_TENSOR_DEC_FFN_DOWN, "dec.blk.%d.ffn_down" },
1316
- { LLM_TENSOR_DEC_FFN_UP, "dec.blk.%d.ffn_up" },
1317
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
1318
- { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
1319
- { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
1320
- { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
1321
- { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
1322
- { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
1323
- { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
1324
- { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
1325
- { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
1326
- { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
1327
- { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
1328
- },
1329
- },
1330
- {
1331
- LLM_ARCH_T5ENCODER,
1332
- {
1333
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1334
- { LLM_TENSOR_OUTPUT, "output" },
1335
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
1336
- { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
1337
- { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
1338
- { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
1339
- { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
1340
- { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
1341
- { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
1342
- { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
1343
- { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
1344
- { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
1345
- { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
1346
- },
1347
- },
1348
- {
1349
- LLM_ARCH_JAIS,
1350
- {
1351
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1352
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1353
- { LLM_TENSOR_OUTPUT, "output" },
1354
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1355
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
1356
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1357
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1358
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1359
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1360
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1361
- },
1362
- },
1363
- {
1364
- LLM_ARCH_NEMOTRON,
1365
- {
1366
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1367
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1368
- { LLM_TENSOR_OUTPUT, "output" },
1369
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1370
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1371
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1372
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1373
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1374
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1375
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1376
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1377
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1378
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1379
- },
1380
- },
1381
- {
1382
- LLM_ARCH_EXAONE,
1383
- {
1384
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1385
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1386
- { LLM_TENSOR_OUTPUT, "output" },
1387
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1388
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1389
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1390
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1391
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1392
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1393
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
1394
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1395
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1396
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1397
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1398
- },
1399
- },
1400
- {
1401
- LLM_ARCH_RWKV6,
1402
- {
1403
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1404
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1405
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1406
- { LLM_TENSOR_OUTPUT, "output" },
1407
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1408
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
1409
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1410
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1411
- { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
1412
- { LLM_TENSOR_TIME_MIX_LERP_W, "blk.%d.time_mix_lerp_w" },
1413
- { LLM_TENSOR_TIME_MIX_LERP_K, "blk.%d.time_mix_lerp_k" },
1414
- { LLM_TENSOR_TIME_MIX_LERP_V, "blk.%d.time_mix_lerp_v" },
1415
- { LLM_TENSOR_TIME_MIX_LERP_R, "blk.%d.time_mix_lerp_r" },
1416
- { LLM_TENSOR_TIME_MIX_LERP_G, "blk.%d.time_mix_lerp_g" },
1417
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1418
- { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
1419
- { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
1420
- { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
1421
- { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
1422
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1423
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1424
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1425
- { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
1426
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1427
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1428
- { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
1429
- { LLM_TENSOR_CHANNEL_MIX_LERP_R, "blk.%d.channel_mix_lerp_r" },
1430
- { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
1431
- { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
1432
- { LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, "blk.%d.channel_mix_receptance" },
1433
- },
1434
- },
1435
- {
1436
- LLM_ARCH_RWKV6QWEN2,
1437
- {
1438
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1439
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1440
- { LLM_TENSOR_OUTPUT, "output" },
1441
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1442
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1443
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1444
- { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
1445
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1446
- { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
1447
- { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
1448
- { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
1449
- { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
1450
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1451
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1452
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1453
- { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
1454
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1455
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1456
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1457
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1458
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1459
- },
1460
- },
1461
- {
1462
- LLM_ARCH_RWKV7,
1463
- {
1464
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1465
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1466
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1467
- { LLM_TENSOR_OUTPUT, "output" },
1468
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1469
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
1470
- { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
1471
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1472
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1473
- { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
1474
- { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
1475
- { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
1476
- { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
1477
- { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
1478
- { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
1479
- { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
1480
- { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
1481
- { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
1482
- { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
1483
- { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
1484
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1485
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1486
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1487
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1488
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1489
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1490
- { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
1491
- { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
1492
- { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
1493
- },
1494
- },
1495
- {
1496
- LLM_ARCH_ARWKV7,
1497
- {
1498
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1499
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1500
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1501
- { LLM_TENSOR_OUTPUT, "output" },
1502
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1503
- { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
1504
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
1505
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
1506
- { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
1507
- { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
1508
- { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
1509
- { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
1510
- { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
1511
- { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
1512
- { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
1513
- { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
1514
- { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
1515
- { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
1516
- { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
1517
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
1518
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
1519
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
1520
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
1521
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
1522
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
1523
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1524
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1525
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1526
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1527
- },
1528
- },
1529
- {
1530
- LLM_ARCH_GRANITE,
1531
- {
1532
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1533
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1534
- { LLM_TENSOR_OUTPUT, "output" },
1535
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1536
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1537
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1538
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1539
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1540
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1541
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1542
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1543
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1544
- },
1545
- },
1546
- {
1547
- LLM_ARCH_GRANITE_MOE,
1548
- {
1549
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1550
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1551
- { LLM_TENSOR_OUTPUT, "output" },
1552
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1553
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1554
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1555
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1556
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1557
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1558
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1559
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1560
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1561
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1562
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1563
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1564
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1565
- },
1566
- },
1567
- {
1568
- LLM_ARCH_CHAMELEON,
1569
- {
1570
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1571
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1572
- { LLM_TENSOR_OUTPUT, "output" },
1573
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1574
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1575
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1576
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1577
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1578
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1579
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1580
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1581
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1582
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1583
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1584
- },
1585
- },
1586
- {
1587
- LLM_ARCH_WAVTOKENIZER_DEC,
1588
- {
1589
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1590
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
1591
- { LLM_TENSOR_CONV1D, "conv1d" },
1592
- { LLM_TENSOR_CONVNEXT_DW, "convnext.%d.dw" },
1593
- { LLM_TENSOR_CONVNEXT_NORM, "convnext.%d.norm" },
1594
- { LLM_TENSOR_CONVNEXT_PW1, "convnext.%d.pw1" },
1595
- { LLM_TENSOR_CONVNEXT_PW2, "convnext.%d.pw2" },
1596
- { LLM_TENSOR_CONVNEXT_GAMMA, "convnext.%d.gamma" },
1597
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1598
- { LLM_TENSOR_OUTPUT, "output" },
1599
- { LLM_TENSOR_POS_NET_CONV1, "posnet.%d.conv1" },
1600
- { LLM_TENSOR_POS_NET_CONV2, "posnet.%d.conv2" },
1601
- { LLM_TENSOR_POS_NET_NORM, "posnet.%d.norm" },
1602
- { LLM_TENSOR_POS_NET_NORM1, "posnet.%d.norm1" },
1603
- { LLM_TENSOR_POS_NET_NORM2, "posnet.%d.norm2" },
1604
- { LLM_TENSOR_POS_NET_ATTN_NORM, "posnet.%d.attn_norm" },
1605
- { LLM_TENSOR_POS_NET_ATTN_Q, "posnet.%d.attn_q" },
1606
- { LLM_TENSOR_POS_NET_ATTN_K, "posnet.%d.attn_k" },
1607
- { LLM_TENSOR_POS_NET_ATTN_V, "posnet.%d.attn_v" },
1608
- { LLM_TENSOR_POS_NET_ATTN_OUT, "posnet.%d.attn_output" },
1609
- },
1610
- },
1611
- {
1612
- LLM_ARCH_BAILINGMOE,
1613
- {
1614
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1615
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1616
- { LLM_TENSOR_OUTPUT, "output" },
1617
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
1618
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1619
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1620
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1621
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1622
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1623
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1624
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1625
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1626
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1627
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1628
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1629
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1630
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1631
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1632
- },
1633
- },
1634
- {
1635
- LLM_ARCH_DOTS1,
1636
- {
1637
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1638
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1639
- { LLM_TENSOR_OUTPUT, "output" },
1640
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1641
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1642
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
1643
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1644
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
1645
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1646
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1647
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1648
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1649
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1650
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1651
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
1652
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
1653
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
1654
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
1655
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
1656
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
1657
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
1658
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
1659
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
1660
- }
1661
- },
1662
- {
1663
- LLM_ARCH_ERNIE4_5,
1664
- {
1665
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1666
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
1667
- { LLM_TENSOR_OUTPUT, "output" },
1668
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
1669
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
1670
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
1671
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
1672
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
1673
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
1674
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
1675
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
1676
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
1677
- },
1678
- },
1679
- {
1680
- LLM_ARCH_UNKNOWN,
1681
- {
1682
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
1683
- },
1684
- },
314
+ static const std::map<llm_tensor, const char *> LLM_TENSOR_NAMES = {
315
+ { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
316
+ { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
317
+ { LLM_TENSOR_OUTPUT_NORM_LFM2, "token_embd_norm" }, // fix for wrong tensor name
318
+ { LLM_TENSOR_OUTPUT, "output" },
319
+ { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
320
+ { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
321
+ { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
322
+ { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
323
+ { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
324
+ { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
325
+ { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
326
+ { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
327
+ { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
328
+ { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
329
+ { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
330
+ { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
331
+ { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
332
+ { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
333
+ { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
334
+ { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
335
+ { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
336
+ { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
337
+ { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
338
+ { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
339
+ { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
340
+ { LLM_TENSOR_ATTN_GATE, "blk.%d.attn_gate" },
341
+ { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
342
+ { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
343
+ { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
344
+ { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
345
+ { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
346
+ { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
347
+ { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
348
+ { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
349
+ { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
350
+ { LLM_TENSOR_POS_EMBD, "position_embd" },
351
+ { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" },
352
+ { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
353
+ { LLM_TENSOR_TOKEN_TYPES, "token_types" },
354
+ { LLM_TENSOR_CLS, "cls" },
355
+ { LLM_TENSOR_CLS_OUT, "cls.output" },
356
+ { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
357
+ { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
358
+ { LLM_TENSOR_SSM_A_NOSCAN, "blk.%d.ssm_a" },
359
+ { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
360
+ { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
361
+ { LLM_TENSOR_SSM_BETA_ALPHA, "blk.%d.ssm_ba" },
362
+ { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
363
+ { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
364
+ { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
365
+ { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
366
+ { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
367
+ { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
368
+ { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
369
+ { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
370
+ { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
371
+ { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
372
+ { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
373
+ { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
374
+ { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
375
+ { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
376
+ { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
377
+ { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
378
+ { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
379
+ { LLM_TENSOR_PER_LAYER_TOKEN_EMBD, "per_layer_token_embd" },
380
+ { LLM_TENSOR_PER_LAYER_MODEL_PROJ, "per_layer_model_proj" },
381
+ { LLM_TENSOR_PER_LAYER_PROJ_NORM, "per_layer_proj_norm" },
382
+ { LLM_TENSOR_ALTUP_UNEMBD_PROJ, "altup_unembd_proj" },
383
+ { LLM_TENSOR_ALTUP_PROJ, "altup_proj" },
384
+ { LLM_TENSOR_PER_LAYER_INP_GATE, "blk.%d.inp_gate" },
385
+ { LLM_TENSOR_PER_LAYER_PROJ, "blk.%d.proj" },
386
+ { LLM_TENSOR_PER_LAYER_POST_NORM, "blk.%d.post_norm" },
387
+ { LLM_TENSOR_ALTUP_CORRECT_COEF, "blk.%d.altup_correct_coef" },
388
+ { LLM_TENSOR_ALTUP_CORRECT_SCALE, "blk.%d.altup_correct_scale" },
389
+ { LLM_TENSOR_ALTUP_PREDICT_COEF, "blk.%d.altup_predict_coef" },
390
+ { LLM_TENSOR_ALTUP_ROUTER, "blk.%d.altup_router" },
391
+ { LLM_TENSOR_ALTUP_ROUTER_NORM, "blk.%d.altup_router_norm" },
392
+ { LLM_TENSOR_LAUREL_L, "blk.%d.laurel_l" },
393
+ { LLM_TENSOR_LAUREL_R, "blk.%d.laurel_r" },
394
+ { LLM_TENSOR_LAUREL_POST_NORM, "blk.%d.laurel_post_norm" },
395
+ { LLM_TENSOR_DENSE_2_OUT, "dense_2" },
396
+ { LLM_TENSOR_DENSE_3_OUT, "dense_3" },
397
+ { LLM_TENSOR_FFN_NORM_EXPS, "blk.%d.ffn_norm_exps" },
398
+ { LLM_TENSOR_ATTN_K_B, "blk.%d.attn_k_b" },
399
+ { LLM_TENSOR_ATTN_V_B, "blk.%d.attn_v_b" },
400
+ { LLM_TENSOR_NEXTN_EH_PROJ, "blk.%d.nextn.eh_proj" },
401
+ { LLM_TENSOR_NEXTN_EMBED_TOKENS, "blk.%d.nextn.embed_tokens" },
402
+ { LLM_TENSOR_NEXTN_ENORM, "blk.%d.nextn.enorm" },
403
+ { LLM_TENSOR_NEXTN_HNORM, "blk.%d.nextn.hnorm" },
404
+ { LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, "blk.%d.nextn.shared_head_head" },
405
+ { LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, "blk.%d.nextn.shared_head_norm" },
406
+ { LLM_TENSOR_ATTN_SUB_NORM, "blk.%d.attn_sub_norm" },
407
+ { LLM_TENSOR_FFN_SUB_NORM, "blk.%d.ffn_sub_norm" },
408
+ { LLM_TENSOR_DEC_OUTPUT_NORM, "dec.output_norm" },
409
+ { LLM_TENSOR_DEC_ATTN_NORM, "dec.blk.%d.attn_norm" },
410
+ { LLM_TENSOR_DEC_ATTN_Q, "dec.blk.%d.attn_q" },
411
+ { LLM_TENSOR_DEC_ATTN_K, "dec.blk.%d.attn_k" },
412
+ { LLM_TENSOR_DEC_ATTN_V, "dec.blk.%d.attn_v" },
413
+ { LLM_TENSOR_DEC_ATTN_OUT, "dec.blk.%d.attn_o" },
414
+ { LLM_TENSOR_DEC_ATTN_REL_B, "dec.blk.%d.attn_rel_b" },
415
+ { LLM_TENSOR_DEC_CROSS_ATTN_NORM, "dec.blk.%d.cross_attn_norm" },
416
+ { LLM_TENSOR_DEC_CROSS_ATTN_Q, "dec.blk.%d.cross_attn_q" },
417
+ { LLM_TENSOR_DEC_CROSS_ATTN_K, "dec.blk.%d.cross_attn_k" },
418
+ { LLM_TENSOR_DEC_CROSS_ATTN_V, "dec.blk.%d.cross_attn_v" },
419
+ { LLM_TENSOR_DEC_CROSS_ATTN_OUT, "dec.blk.%d.cross_attn_o" },
420
+ { LLM_TENSOR_DEC_CROSS_ATTN_REL_B, "dec.blk.%d.cross_attn_rel_b" },
421
+ { LLM_TENSOR_DEC_FFN_NORM, "dec.blk.%d.ffn_norm" },
422
+ { LLM_TENSOR_DEC_FFN_GATE, "dec.blk.%d.ffn_gate" },
423
+ { LLM_TENSOR_DEC_FFN_DOWN, "dec.blk.%d.ffn_down" },
424
+ { LLM_TENSOR_DEC_FFN_UP, "dec.blk.%d.ffn_up" },
425
+ { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
426
+ { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
427
+ { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
428
+ { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
429
+ { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
430
+ { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
431
+ { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
432
+ { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
433
+ { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
434
+ { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
435
+ { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
436
+ { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
437
+ { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
438
+ { LLM_TENSOR_TIME_MIX_LERP_W, "blk.%d.time_mix_lerp_w" },
439
+ { LLM_TENSOR_TIME_MIX_LERP_K, "blk.%d.time_mix_lerp_k" },
440
+ { LLM_TENSOR_TIME_MIX_LERP_V, "blk.%d.time_mix_lerp_v" },
441
+ { LLM_TENSOR_TIME_MIX_LERP_R, "blk.%d.time_mix_lerp_r" },
442
+ { LLM_TENSOR_TIME_MIX_LERP_G, "blk.%d.time_mix_lerp_g" },
443
+ { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
444
+ { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
445
+ { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
446
+ { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
447
+ { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
448
+ { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
449
+ { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
450
+ { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
451
+ { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
452
+ { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
453
+ { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
454
+ { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
455
+ { LLM_TENSOR_CHANNEL_MIX_LERP_R, "blk.%d.channel_mix_lerp_r" },
456
+ { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
457
+ { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
458
+ { LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, "blk.%d.channel_mix_receptance" },
459
+ { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
460
+ { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
461
+ { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
462
+ { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
463
+ { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
464
+ { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
465
+ { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
466
+ { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
467
+ { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
468
+ { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
469
+ { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
470
+ { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
471
+ { LLM_TENSOR_CONV1D, "conv1d" },
472
+ { LLM_TENSOR_CONVNEXT_DW, "convnext.%d.dw" },
473
+ { LLM_TENSOR_CONVNEXT_NORM, "convnext.%d.norm" },
474
+ { LLM_TENSOR_CONVNEXT_PW1, "convnext.%d.pw1" },
475
+ { LLM_TENSOR_CONVNEXT_PW2, "convnext.%d.pw2" },
476
+ { LLM_TENSOR_CONVNEXT_GAMMA, "convnext.%d.gamma" },
477
+ { LLM_TENSOR_POS_NET_CONV1, "posnet.%d.conv1" },
478
+ { LLM_TENSOR_POS_NET_CONV2, "posnet.%d.conv2" },
479
+ { LLM_TENSOR_POS_NET_NORM, "posnet.%d.norm" },
480
+ { LLM_TENSOR_POS_NET_NORM1, "posnet.%d.norm1" },
481
+ { LLM_TENSOR_POS_NET_NORM2, "posnet.%d.norm2" },
482
+ { LLM_TENSOR_POS_NET_ATTN_NORM, "posnet.%d.attn_norm" },
483
+ { LLM_TENSOR_POS_NET_ATTN_Q, "posnet.%d.attn_q" },
484
+ { LLM_TENSOR_POS_NET_ATTN_K, "posnet.%d.attn_k" },
485
+ { LLM_TENSOR_POS_NET_ATTN_V, "posnet.%d.attn_v" },
486
+ { LLM_TENSOR_POS_NET_ATTN_OUT, "posnet.%d.attn_output" },
487
+ { LLM_TENSOR_ATTN_SINKS, "blk.%d.attn_sinks" },
488
+ { LLM_TENSOR_SHORTCONV_CONV, "blk.%d.shortconv.conv" },
489
+ { LLM_TENSOR_SHORTCONV_INPROJ, "blk.%d.shortconv.in_proj" },
490
+ { LLM_TENSOR_SHORTCONV_OUTPROJ, "blk.%d.shortconv.out_proj" },
491
+ { LLM_TENSOR_FFN_GATE_CHEXPS, "blk.%d.ffn_gate_chexps" },
492
+ { LLM_TENSOR_FFN_DOWN_CHEXPS, "blk.%d.ffn_down_chexps" },
493
+ { LLM_TENSOR_FFN_UP_CHEXPS, "blk.%d.ffn_up_chexps" },
494
+ { LLM_TENSOR_VISEXP_ATTN_QKV, "blk.%d.vis_attn_qkv" },
495
+ { LLM_TENSOR_VISEXP_ATTN_OUT, "blk.%d.vis_attn_output" },
496
+ { LLM_TENSOR_VISEXP_FFN_GATE, "blk.%d.vis_gate" },
497
+ { LLM_TENSOR_VISEXP_FFN_DOWN, "blk.%d.vis_down" },
498
+ { LLM_TENSOR_VISEXP_FFN_UP, "blk.%d.vis_up" },
1685
499
  };
1686
500
 
501
+ static std::set<llm_tensor> llm_get_tensor_names(llm_arch arch) {
502
+ switch (arch) {
503
+ case LLM_ARCH_CLIP:
504
+ return {};
505
+ case LLM_ARCH_LLAMA:
506
+ case LLM_ARCH_DECI:
507
+ case LLM_ARCH_MISTRAL3:
508
+ case LLM_ARCH_LLAMA_EMBED:
509
+ return {
510
+ LLM_TENSOR_TOKEN_EMBD,
511
+ LLM_TENSOR_OUTPUT_NORM,
512
+ LLM_TENSOR_OUTPUT,
513
+ LLM_TENSOR_ROPE_FREQS,
514
+ LLM_TENSOR_ATTN_NORM,
515
+ LLM_TENSOR_ATTN_Q,
516
+ LLM_TENSOR_ATTN_K,
517
+ LLM_TENSOR_ATTN_V,
518
+ LLM_TENSOR_ATTN_OUT,
519
+ LLM_TENSOR_ATTN_ROT_EMBD,
520
+ LLM_TENSOR_FFN_GATE_INP,
521
+ LLM_TENSOR_FFN_NORM,
522
+ LLM_TENSOR_FFN_GATE,
523
+ LLM_TENSOR_FFN_DOWN,
524
+ LLM_TENSOR_FFN_UP,
525
+ LLM_TENSOR_FFN_GATE_EXP,
526
+ LLM_TENSOR_FFN_DOWN_EXP,
527
+ LLM_TENSOR_FFN_UP_EXP,
528
+ LLM_TENSOR_FFN_GATE_EXPS,
529
+ LLM_TENSOR_FFN_DOWN_EXPS,
530
+ LLM_TENSOR_FFN_UP_EXPS,
531
+ };
532
+ case LLM_ARCH_ARCEE:
533
+ case LLM_ARCH_STARCODER2:
534
+ case LLM_ARCH_NEMOTRON:
535
+ return {
536
+ LLM_TENSOR_TOKEN_EMBD,
537
+ LLM_TENSOR_OUTPUT_NORM,
538
+ LLM_TENSOR_OUTPUT,
539
+ LLM_TENSOR_ROPE_FREQS,
540
+ LLM_TENSOR_ATTN_NORM,
541
+ LLM_TENSOR_ATTN_Q,
542
+ LLM_TENSOR_ATTN_K,
543
+ LLM_TENSOR_ATTN_V,
544
+ LLM_TENSOR_ATTN_OUT,
545
+ LLM_TENSOR_ATTN_ROT_EMBD,
546
+ LLM_TENSOR_FFN_NORM,
547
+ LLM_TENSOR_FFN_DOWN,
548
+ LLM_TENSOR_FFN_UP,
549
+ };
550
+ case LLM_ARCH_AFMOE:
551
+ return {
552
+ LLM_TENSOR_TOKEN_EMBD,
553
+ LLM_TENSOR_OUTPUT_NORM,
554
+ LLM_TENSOR_OUTPUT,
555
+ LLM_TENSOR_ATTN_NORM,
556
+ LLM_TENSOR_ATTN_POST_NORM,
557
+ LLM_TENSOR_ATTN_Q,
558
+ LLM_TENSOR_ATTN_K,
559
+ LLM_TENSOR_ATTN_V,
560
+ LLM_TENSOR_ATTN_OUT,
561
+ LLM_TENSOR_ATTN_Q_NORM,
562
+ LLM_TENSOR_ATTN_K_NORM,
563
+ LLM_TENSOR_ATTN_GATE,
564
+ LLM_TENSOR_FFN_NORM,
565
+ LLM_TENSOR_FFN_POST_NORM,
566
+ LLM_TENSOR_FFN_GATE_INP,
567
+ LLM_TENSOR_FFN_GATE,
568
+ LLM_TENSOR_FFN_DOWN,
569
+ LLM_TENSOR_FFN_UP,
570
+ LLM_TENSOR_FFN_GATE_EXPS,
571
+ LLM_TENSOR_FFN_DOWN_EXPS,
572
+ LLM_TENSOR_FFN_UP_EXPS,
573
+ LLM_TENSOR_FFN_GATE_SHEXP,
574
+ LLM_TENSOR_FFN_UP_SHEXP,
575
+ LLM_TENSOR_FFN_DOWN_SHEXP,
576
+ LLM_TENSOR_FFN_EXP_PROBS_B,
577
+ };
578
+ case LLM_ARCH_LLAMA4:
579
+ return {
580
+ LLM_TENSOR_TOKEN_EMBD,
581
+ LLM_TENSOR_OUTPUT_NORM,
582
+ LLM_TENSOR_OUTPUT,
583
+ LLM_TENSOR_ROPE_FREQS,
584
+ LLM_TENSOR_ATTN_NORM,
585
+ LLM_TENSOR_ATTN_Q,
586
+ LLM_TENSOR_ATTN_K,
587
+ LLM_TENSOR_ATTN_V,
588
+ LLM_TENSOR_ATTN_OUT,
589
+ LLM_TENSOR_ATTN_ROT_EMBD,
590
+ LLM_TENSOR_FFN_GATE_INP,
591
+ LLM_TENSOR_FFN_NORM,
592
+ LLM_TENSOR_FFN_GATE,
593
+ LLM_TENSOR_FFN_DOWN,
594
+ LLM_TENSOR_FFN_UP,
595
+ LLM_TENSOR_FFN_GATE_EXP,
596
+ LLM_TENSOR_FFN_DOWN_EXP,
597
+ LLM_TENSOR_FFN_UP_EXP,
598
+ LLM_TENSOR_FFN_GATE_EXPS,
599
+ LLM_TENSOR_FFN_DOWN_EXPS,
600
+ LLM_TENSOR_FFN_UP_EXPS,
601
+ LLM_TENSOR_FFN_GATE_SHEXP,
602
+ LLM_TENSOR_FFN_DOWN_SHEXP,
603
+ LLM_TENSOR_FFN_UP_SHEXP,
604
+ };
605
+ case LLM_ARCH_BAICHUAN:
606
+ case LLM_ARCH_ORION:
607
+ case LLM_ARCH_XVERSE:
608
+ case LLM_ARCH_EXAONE:
609
+ return {
610
+ LLM_TENSOR_TOKEN_EMBD,
611
+ LLM_TENSOR_OUTPUT_NORM,
612
+ LLM_TENSOR_OUTPUT,
613
+ LLM_TENSOR_ROPE_FREQS,
614
+ LLM_TENSOR_ATTN_NORM,
615
+ LLM_TENSOR_ATTN_Q,
616
+ LLM_TENSOR_ATTN_K,
617
+ LLM_TENSOR_ATTN_V,
618
+ LLM_TENSOR_ATTN_OUT,
619
+ LLM_TENSOR_ATTN_ROT_EMBD,
620
+ LLM_TENSOR_FFN_NORM,
621
+ LLM_TENSOR_FFN_GATE,
622
+ LLM_TENSOR_FFN_DOWN,
623
+ LLM_TENSOR_FFN_UP,
624
+ };
625
+ case LLM_ARCH_FALCON:
626
+ return {
627
+ LLM_TENSOR_TOKEN_EMBD,
628
+ LLM_TENSOR_OUTPUT_NORM,
629
+ LLM_TENSOR_OUTPUT,
630
+ LLM_TENSOR_ATTN_NORM,
631
+ LLM_TENSOR_ATTN_NORM_2,
632
+ LLM_TENSOR_ATTN_QKV,
633
+ LLM_TENSOR_ATTN_OUT,
634
+ LLM_TENSOR_FFN_DOWN,
635
+ LLM_TENSOR_FFN_UP,
636
+ };
637
+ case LLM_ARCH_GROK:
638
+ return {
639
+ LLM_TENSOR_TOKEN_EMBD,
640
+ LLM_TENSOR_OUTPUT_NORM,
641
+ LLM_TENSOR_OUTPUT,
642
+ LLM_TENSOR_ROPE_FREQS,
643
+ LLM_TENSOR_ATTN_NORM,
644
+ LLM_TENSOR_ATTN_Q,
645
+ LLM_TENSOR_ATTN_K,
646
+ LLM_TENSOR_ATTN_V,
647
+ LLM_TENSOR_ATTN_OUT,
648
+ LLM_TENSOR_ATTN_ROT_EMBD,
649
+ LLM_TENSOR_FFN_GATE_INP,
650
+ LLM_TENSOR_FFN_NORM,
651
+ LLM_TENSOR_FFN_GATE,
652
+ LLM_TENSOR_FFN_DOWN,
653
+ LLM_TENSOR_FFN_UP,
654
+ LLM_TENSOR_FFN_GATE_EXP,
655
+ LLM_TENSOR_FFN_DOWN_EXP,
656
+ LLM_TENSOR_FFN_UP_EXP,
657
+ LLM_TENSOR_FFN_GATE_EXPS,
658
+ LLM_TENSOR_FFN_DOWN_EXPS,
659
+ LLM_TENSOR_FFN_UP_EXPS,
660
+ LLM_TENSOR_FFN_POST_NORM,
661
+ LLM_TENSOR_LAYER_OUT_NORM,
662
+ LLM_TENSOR_ATTN_OUT_NORM,
663
+ };
664
+ case LLM_ARCH_GPT2:
665
+ case LLM_ARCH_STARCODER:
666
+ return {
667
+ LLM_TENSOR_TOKEN_EMBD,
668
+ LLM_TENSOR_POS_EMBD,
669
+ LLM_TENSOR_OUTPUT_NORM,
670
+ LLM_TENSOR_OUTPUT,
671
+ LLM_TENSOR_ATTN_NORM,
672
+ LLM_TENSOR_ATTN_QKV,
673
+ LLM_TENSOR_ATTN_OUT,
674
+ LLM_TENSOR_FFN_NORM,
675
+ LLM_TENSOR_FFN_UP,
676
+ LLM_TENSOR_FFN_DOWN,
677
+ };
678
+ case LLM_ARCH_GPTNEOX:
679
+ return {
680
+ LLM_TENSOR_TOKEN_EMBD,
681
+ LLM_TENSOR_OUTPUT_NORM,
682
+ LLM_TENSOR_OUTPUT,
683
+ LLM_TENSOR_ATTN_NORM,
684
+ LLM_TENSOR_ATTN_QKV,
685
+ LLM_TENSOR_ATTN_OUT,
686
+ LLM_TENSOR_FFN_NORM,
687
+ LLM_TENSOR_FFN_DOWN,
688
+ LLM_TENSOR_FFN_UP,
689
+ };
690
+ case LLM_ARCH_MPT:
691
+ return {
692
+ LLM_TENSOR_TOKEN_EMBD,
693
+ LLM_TENSOR_OUTPUT_NORM,
694
+ LLM_TENSOR_OUTPUT,
695
+ LLM_TENSOR_ATTN_NORM,
696
+ LLM_TENSOR_FFN_NORM,
697
+ LLM_TENSOR_ATTN_QKV,
698
+ LLM_TENSOR_ATTN_OUT,
699
+ LLM_TENSOR_FFN_DOWN,
700
+ LLM_TENSOR_FFN_UP,
701
+ LLM_TENSOR_FFN_ACT,
702
+ LLM_TENSOR_POS_EMBD,
703
+ LLM_TENSOR_ATTN_Q_NORM,
704
+ LLM_TENSOR_ATTN_K_NORM,
705
+ };
706
+ case LLM_ARCH_REFACT:
707
+ case LLM_ARCH_QWEN2:
708
+ case LLM_ARCH_QWEN2VL:
709
+ case LLM_ARCH_INTERNLM2:
710
+ case LLM_ARCH_GRANITE:
711
+ case LLM_ARCH_ERNIE4_5:
712
+ case LLM_ARCH_SMOLLM3:
713
+ case LLM_ARCH_DREAM:
714
+ case LLM_ARCH_LLADA:
715
+ case LLM_ARCH_PANGU_EMBED:
716
+ return {
717
+ LLM_TENSOR_TOKEN_EMBD,
718
+ LLM_TENSOR_OUTPUT_NORM,
719
+ LLM_TENSOR_OUTPUT,
720
+ LLM_TENSOR_ATTN_NORM,
721
+ LLM_TENSOR_ATTN_Q,
722
+ LLM_TENSOR_ATTN_K,
723
+ LLM_TENSOR_ATTN_V,
724
+ LLM_TENSOR_ATTN_OUT,
725
+ LLM_TENSOR_FFN_NORM,
726
+ LLM_TENSOR_FFN_GATE,
727
+ LLM_TENSOR_FFN_DOWN,
728
+ LLM_TENSOR_FFN_UP,
729
+ };
730
+ case LLM_ARCH_BERT:
731
+ return {
732
+ LLM_TENSOR_TOKEN_EMBD,
733
+ LLM_TENSOR_TOKEN_EMBD_NORM,
734
+ LLM_TENSOR_TOKEN_TYPES,
735
+ LLM_TENSOR_POS_EMBD,
736
+ LLM_TENSOR_ATTN_OUT_NORM,
737
+ LLM_TENSOR_ATTN_QKV,
738
+ LLM_TENSOR_ATTN_Q,
739
+ LLM_TENSOR_ATTN_K,
740
+ LLM_TENSOR_ATTN_V,
741
+ LLM_TENSOR_ATTN_OUT,
742
+ LLM_TENSOR_LAYER_OUT_NORM,
743
+ LLM_TENSOR_FFN_DOWN,
744
+ LLM_TENSOR_FFN_UP,
745
+ LLM_TENSOR_CLS,
746
+ LLM_TENSOR_CLS_OUT,
747
+ };
748
+ case LLM_ARCH_NOMIC_BERT:
749
+ return {
750
+ LLM_TENSOR_TOKEN_EMBD,
751
+ LLM_TENSOR_TOKEN_EMBD_NORM,
752
+ LLM_TENSOR_TOKEN_TYPES,
753
+ LLM_TENSOR_ATTN_OUT_NORM,
754
+ LLM_TENSOR_ATTN_QKV,
755
+ LLM_TENSOR_ATTN_OUT,
756
+ LLM_TENSOR_LAYER_OUT_NORM,
757
+ LLM_TENSOR_FFN_GATE,
758
+ LLM_TENSOR_FFN_DOWN,
759
+ LLM_TENSOR_FFN_UP,
760
+ };
761
+ case LLM_ARCH_NOMIC_BERT_MOE:
762
+ return {
763
+ LLM_TENSOR_TOKEN_EMBD,
764
+ LLM_TENSOR_TOKEN_EMBD_NORM,
765
+ LLM_TENSOR_TOKEN_TYPES,
766
+ LLM_TENSOR_ATTN_OUT_NORM,
767
+ LLM_TENSOR_ATTN_QKV,
768
+ LLM_TENSOR_ATTN_OUT,
769
+ LLM_TENSOR_LAYER_OUT_NORM,
770
+ LLM_TENSOR_FFN_GATE,
771
+ LLM_TENSOR_FFN_DOWN,
772
+ LLM_TENSOR_FFN_UP,
773
+ LLM_TENSOR_FFN_GATE_INP,
774
+ LLM_TENSOR_FFN_DOWN_EXPS,
775
+ LLM_TENSOR_FFN_UP_EXPS,
776
+ };
777
+ case LLM_ARCH_NEO_BERT:
778
+ return {
779
+ LLM_TENSOR_TOKEN_EMBD,
780
+ LLM_TENSOR_ATTN_NORM,
781
+ LLM_TENSOR_ATTN_QKV,
782
+ LLM_TENSOR_ATTN_OUT,
783
+ LLM_TENSOR_FFN_NORM,
784
+ LLM_TENSOR_FFN_DOWN,
785
+ LLM_TENSOR_FFN_UP,
786
+ LLM_TENSOR_ENC_OUTPUT_NORM,
787
+ LLM_TENSOR_CLS,
788
+ LLM_TENSOR_CLS_OUT,
789
+ };
790
+ case LLM_ARCH_MODERN_BERT:
791
+ return {
792
+ LLM_TENSOR_TOKEN_EMBD,
793
+ LLM_TENSOR_TOKEN_EMBD_NORM,
794
+ LLM_TENSOR_OUTPUT_NORM,
795
+ LLM_TENSOR_ATTN_NORM,
796
+ LLM_TENSOR_ATTN_OUT,
797
+ LLM_TENSOR_ATTN_QKV,
798
+ LLM_TENSOR_FFN_DOWN,
799
+ LLM_TENSOR_FFN_UP,
800
+ LLM_TENSOR_FFN_NORM,
801
+ LLM_TENSOR_CLS,
802
+ LLM_TENSOR_CLS_OUT,
803
+ };
804
+ case LLM_ARCH_JINA_BERT_V2:
805
+ return {
806
+ LLM_TENSOR_TOKEN_EMBD,
807
+ LLM_TENSOR_TOKEN_EMBD_NORM,
808
+ LLM_TENSOR_TOKEN_TYPES,
809
+ LLM_TENSOR_ATTN_NORM_2,
810
+ LLM_TENSOR_ATTN_OUT_NORM,
811
+ LLM_TENSOR_ATTN_Q,
812
+ LLM_TENSOR_ATTN_Q_NORM,
813
+ LLM_TENSOR_ATTN_K,
814
+ LLM_TENSOR_ATTN_K_NORM,
815
+ LLM_TENSOR_ATTN_V,
816
+ LLM_TENSOR_ATTN_OUT,
817
+ LLM_TENSOR_LAYER_OUT_NORM,
818
+ LLM_TENSOR_FFN_DOWN,
819
+ LLM_TENSOR_FFN_GATE,
820
+ LLM_TENSOR_FFN_UP,
821
+ LLM_TENSOR_CLS,
822
+ };
823
+ case LLM_ARCH_JINA_BERT_V3:
824
+ return {
825
+ LLM_TENSOR_TOKEN_EMBD,
826
+ LLM_TENSOR_TOKEN_EMBD_NORM,
827
+ LLM_TENSOR_TOKEN_TYPES,
828
+ LLM_TENSOR_ATTN_OUT_NORM,
829
+ LLM_TENSOR_ATTN_QKV,
830
+ LLM_TENSOR_ATTN_OUT,
831
+ LLM_TENSOR_FFN_DOWN,
832
+ LLM_TENSOR_FFN_UP,
833
+ LLM_TENSOR_LAYER_OUT_NORM,
834
+ };
835
+ case LLM_ARCH_BLOOM:
836
+ return {
837
+ LLM_TENSOR_TOKEN_EMBD,
838
+ LLM_TENSOR_TOKEN_EMBD_NORM,
839
+ LLM_TENSOR_OUTPUT_NORM,
840
+ LLM_TENSOR_OUTPUT,
841
+ LLM_TENSOR_ATTN_NORM,
842
+ LLM_TENSOR_ATTN_QKV,
843
+ LLM_TENSOR_ATTN_OUT,
844
+ LLM_TENSOR_FFN_NORM,
845
+ LLM_TENSOR_FFN_UP,
846
+ LLM_TENSOR_FFN_DOWN,
847
+ };
848
+ case LLM_ARCH_STABLELM:
849
+ return {
850
+ LLM_TENSOR_TOKEN_EMBD,
851
+ LLM_TENSOR_OUTPUT_NORM,
852
+ LLM_TENSOR_OUTPUT,
853
+ LLM_TENSOR_ROPE_FREQS,
854
+ LLM_TENSOR_ATTN_NORM,
855
+ LLM_TENSOR_ATTN_Q,
856
+ LLM_TENSOR_ATTN_K,
857
+ LLM_TENSOR_ATTN_V,
858
+ LLM_TENSOR_ATTN_OUT,
859
+ LLM_TENSOR_FFN_NORM,
860
+ LLM_TENSOR_FFN_GATE,
861
+ LLM_TENSOR_FFN_DOWN,
862
+ LLM_TENSOR_FFN_UP,
863
+ LLM_TENSOR_ATTN_Q_NORM,
864
+ LLM_TENSOR_ATTN_K_NORM,
865
+ };
866
+ case LLM_ARCH_QWEN:
867
+ return {
868
+ LLM_TENSOR_TOKEN_EMBD,
869
+ LLM_TENSOR_OUTPUT_NORM,
870
+ LLM_TENSOR_OUTPUT,
871
+ LLM_TENSOR_ROPE_FREQS,
872
+ LLM_TENSOR_ATTN_NORM,
873
+ LLM_TENSOR_ATTN_QKV,
874
+ LLM_TENSOR_ATTN_OUT,
875
+ LLM_TENSOR_FFN_NORM,
876
+ LLM_TENSOR_FFN_GATE,
877
+ LLM_TENSOR_FFN_DOWN,
878
+ LLM_TENSOR_FFN_UP,
879
+ };
880
+ case LLM_ARCH_QWEN2MOE:
881
+ return {
882
+ LLM_TENSOR_TOKEN_EMBD,
883
+ LLM_TENSOR_OUTPUT_NORM,
884
+ LLM_TENSOR_OUTPUT,
885
+ LLM_TENSOR_ATTN_NORM,
886
+ LLM_TENSOR_ATTN_Q,
887
+ LLM_TENSOR_ATTN_K,
888
+ LLM_TENSOR_ATTN_V,
889
+ LLM_TENSOR_ATTN_OUT,
890
+ LLM_TENSOR_FFN_NORM,
891
+ LLM_TENSOR_FFN_GATE_INP,
892
+ LLM_TENSOR_FFN_GATE_EXPS,
893
+ LLM_TENSOR_FFN_DOWN_EXPS,
894
+ LLM_TENSOR_FFN_UP_EXPS,
895
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
896
+ LLM_TENSOR_FFN_GATE_SHEXP,
897
+ LLM_TENSOR_FFN_DOWN_SHEXP,
898
+ LLM_TENSOR_FFN_UP_SHEXP,
899
+ };
900
+ case LLM_ARCH_QWEN3:
901
+ return {
902
+ LLM_TENSOR_TOKEN_EMBD,
903
+ LLM_TENSOR_OUTPUT_NORM,
904
+ LLM_TENSOR_OUTPUT,
905
+ LLM_TENSOR_CLS_OUT,
906
+ LLM_TENSOR_ATTN_NORM,
907
+ LLM_TENSOR_ATTN_Q,
908
+ LLM_TENSOR_ATTN_Q_NORM,
909
+ LLM_TENSOR_ATTN_K,
910
+ LLM_TENSOR_ATTN_K_NORM,
911
+ LLM_TENSOR_ATTN_V,
912
+ LLM_TENSOR_ATTN_OUT,
913
+ LLM_TENSOR_FFN_NORM,
914
+ LLM_TENSOR_FFN_GATE,
915
+ LLM_TENSOR_FFN_DOWN,
916
+ LLM_TENSOR_FFN_UP,
917
+ };
918
+ case LLM_ARCH_QWEN3MOE:
919
+ case LLM_ARCH_QWEN3VLMOE:
920
+ case LLM_ARCH_OLMOE:
921
+ case LLM_ARCH_LLADA_MOE:
922
+ case LLM_ARCH_RND1:
923
+ return {
924
+ LLM_TENSOR_TOKEN_EMBD,
925
+ LLM_TENSOR_OUTPUT_NORM,
926
+ LLM_TENSOR_OUTPUT,
927
+ LLM_TENSOR_ATTN_NORM,
928
+ LLM_TENSOR_ATTN_Q,
929
+ LLM_TENSOR_ATTN_Q_NORM,
930
+ LLM_TENSOR_ATTN_K,
931
+ LLM_TENSOR_ATTN_K_NORM,
932
+ LLM_TENSOR_ATTN_V,
933
+ LLM_TENSOR_ATTN_OUT,
934
+ LLM_TENSOR_FFN_NORM,
935
+ LLM_TENSOR_FFN_GATE_INP,
936
+ LLM_TENSOR_FFN_GATE_EXPS,
937
+ LLM_TENSOR_FFN_DOWN_EXPS,
938
+ LLM_TENSOR_FFN_UP_EXPS,
939
+ };
940
+ case LLM_ARCH_QWEN3NEXT:
941
+ return {
942
+ LLM_TENSOR_TOKEN_EMBD,
943
+ LLM_TENSOR_OUTPUT_NORM,
944
+ LLM_TENSOR_OUTPUT,
945
+ LLM_TENSOR_ATTN_NORM,
946
+ LLM_TENSOR_ATTN_POST_NORM,
947
+ LLM_TENSOR_ATTN_Q,
948
+ LLM_TENSOR_ATTN_Q_NORM,
949
+ LLM_TENSOR_ATTN_K,
950
+ LLM_TENSOR_ATTN_K_NORM,
951
+ LLM_TENSOR_ATTN_V,
952
+ LLM_TENSOR_ATTN_OUT,
953
+ LLM_TENSOR_ATTN_QKV,
954
+ LLM_TENSOR_ATTN_GATE,
955
+ LLM_TENSOR_FFN_NORM,
956
+ LLM_TENSOR_FFN_GATE_INP,
957
+ LLM_TENSOR_FFN_GATE_EXPS,
958
+ LLM_TENSOR_FFN_DOWN_EXPS,
959
+ LLM_TENSOR_FFN_UP_EXPS,
960
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
961
+ LLM_TENSOR_FFN_GATE_SHEXP,
962
+ LLM_TENSOR_FFN_DOWN_SHEXP,
963
+ LLM_TENSOR_FFN_UP_SHEXP,
964
+ LLM_TENSOR_SSM_A_NOSCAN,
965
+ LLM_TENSOR_SSM_CONV1D,
966
+ LLM_TENSOR_SSM_DT,
967
+ LLM_TENSOR_SSM_BETA_ALPHA,
968
+ LLM_TENSOR_SSM_IN,
969
+ LLM_TENSOR_SSM_NORM,
970
+ LLM_TENSOR_SSM_OUT,
971
+ };
972
+ case LLM_ARCH_QWEN3VL:
973
+ case LLM_ARCH_CHAMELEON:
974
+ case LLM_ARCH_HUNYUAN_DENSE:
975
+ return {
976
+ LLM_TENSOR_TOKEN_EMBD,
977
+ LLM_TENSOR_OUTPUT_NORM,
978
+ LLM_TENSOR_OUTPUT,
979
+ LLM_TENSOR_ATTN_NORM,
980
+ LLM_TENSOR_ATTN_Q,
981
+ LLM_TENSOR_ATTN_Q_NORM,
982
+ LLM_TENSOR_ATTN_K,
983
+ LLM_TENSOR_ATTN_K_NORM,
984
+ LLM_TENSOR_ATTN_V,
985
+ LLM_TENSOR_ATTN_OUT,
986
+ LLM_TENSOR_FFN_NORM,
987
+ LLM_TENSOR_FFN_GATE,
988
+ LLM_TENSOR_FFN_DOWN,
989
+ LLM_TENSOR_FFN_UP,
990
+ };
991
+ case LLM_ARCH_PHI2:
992
+ return {
993
+ LLM_TENSOR_TOKEN_EMBD,
994
+ LLM_TENSOR_OUTPUT_NORM,
995
+ LLM_TENSOR_OUTPUT,
996
+ LLM_TENSOR_ATTN_NORM,
997
+ LLM_TENSOR_ATTN_QKV,
998
+ LLM_TENSOR_ATTN_Q,
999
+ LLM_TENSOR_ATTN_K,
1000
+ LLM_TENSOR_ATTN_V,
1001
+ LLM_TENSOR_ATTN_OUT,
1002
+ LLM_TENSOR_FFN_DOWN,
1003
+ LLM_TENSOR_FFN_UP,
1004
+ };
1005
+ case LLM_ARCH_PHI3:
1006
+ return {
1007
+ LLM_TENSOR_TOKEN_EMBD,
1008
+ LLM_TENSOR_OUTPUT_NORM,
1009
+ LLM_TENSOR_OUTPUT,
1010
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1011
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1012
+ LLM_TENSOR_ATTN_NORM,
1013
+ LLM_TENSOR_ATTN_QKV,
1014
+ LLM_TENSOR_ATTN_Q,
1015
+ LLM_TENSOR_ATTN_K,
1016
+ LLM_TENSOR_ATTN_V,
1017
+ LLM_TENSOR_ATTN_OUT,
1018
+ LLM_TENSOR_FFN_NORM,
1019
+ LLM_TENSOR_FFN_DOWN,
1020
+ LLM_TENSOR_FFN_UP,
1021
+ };
1022
+ case LLM_ARCH_PHIMOE:
1023
+ return {
1024
+ LLM_TENSOR_TOKEN_EMBD,
1025
+ LLM_TENSOR_OUTPUT_NORM,
1026
+ LLM_TENSOR_OUTPUT,
1027
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1028
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1029
+ LLM_TENSOR_ATTN_NORM,
1030
+ LLM_TENSOR_ATTN_QKV,
1031
+ LLM_TENSOR_ATTN_Q,
1032
+ LLM_TENSOR_ATTN_K,
1033
+ LLM_TENSOR_ATTN_V,
1034
+ LLM_TENSOR_ATTN_OUT,
1035
+ LLM_TENSOR_FFN_NORM,
1036
+ LLM_TENSOR_FFN_GATE_INP,
1037
+ LLM_TENSOR_FFN_GATE_EXPS,
1038
+ LLM_TENSOR_FFN_DOWN_EXPS,
1039
+ LLM_TENSOR_FFN_UP_EXPS,
1040
+ };
1041
+ case LLM_ARCH_PLAMO:
1042
+ return {
1043
+ LLM_TENSOR_TOKEN_EMBD,
1044
+ LLM_TENSOR_OUTPUT_NORM,
1045
+ LLM_TENSOR_OUTPUT,
1046
+ LLM_TENSOR_ROPE_FREQS,
1047
+ LLM_TENSOR_ATTN_NORM,
1048
+ LLM_TENSOR_ATTN_Q,
1049
+ LLM_TENSOR_ATTN_K,
1050
+ LLM_TENSOR_ATTN_V,
1051
+ LLM_TENSOR_ATTN_OUT,
1052
+ LLM_TENSOR_ATTN_ROT_EMBD,
1053
+ LLM_TENSOR_FFN_GATE,
1054
+ LLM_TENSOR_FFN_DOWN,
1055
+ LLM_TENSOR_FFN_UP,
1056
+ };
1057
+ case LLM_ARCH_PLAMO2:
1058
+ return {
1059
+ LLM_TENSOR_TOKEN_EMBD,
1060
+ LLM_TENSOR_OUTPUT_NORM,
1061
+ LLM_TENSOR_OUTPUT,
1062
+ LLM_TENSOR_ROPE_FREQS,
1063
+ LLM_TENSOR_ATTN_NORM,
1064
+ LLM_TENSOR_ATTN_QKV,
1065
+ LLM_TENSOR_ATTN_Q_NORM,
1066
+ LLM_TENSOR_ATTN_K_NORM,
1067
+ LLM_TENSOR_ATTN_OUT,
1068
+ LLM_TENSOR_ATTN_ROT_EMBD,
1069
+ LLM_TENSOR_FFN_NORM,
1070
+ LLM_TENSOR_FFN_DOWN,
1071
+ LLM_TENSOR_FFN_UP,
1072
+ LLM_TENSOR_SSM_IN,
1073
+ LLM_TENSOR_SSM_CONV1D,
1074
+ LLM_TENSOR_SSM_X,
1075
+ LLM_TENSOR_SSM_DT,
1076
+ LLM_TENSOR_SSM_A,
1077
+ LLM_TENSOR_SSM_D,
1078
+ LLM_TENSOR_SSM_OUT,
1079
+ LLM_TENSOR_SSM_DT_NORM,
1080
+ LLM_TENSOR_SSM_B_NORM,
1081
+ LLM_TENSOR_SSM_C_NORM,
1082
+ LLM_TENSOR_ATTN_POST_NORM,
1083
+ LLM_TENSOR_FFN_POST_NORM,
1084
+ };
1085
+ case LLM_ARCH_PLAMO3:
1086
+ return {
1087
+ LLM_TENSOR_TOKEN_EMBD,
1088
+ LLM_TENSOR_OUTPUT_NORM,
1089
+ LLM_TENSOR_OUTPUT,
1090
+ LLM_TENSOR_ATTN_NORM,
1091
+ LLM_TENSOR_ATTN_QKV,
1092
+ LLM_TENSOR_ATTN_Q_NORM,
1093
+ LLM_TENSOR_ATTN_K_NORM,
1094
+ LLM_TENSOR_ATTN_OUT,
1095
+ LLM_TENSOR_ATTN_POST_NORM,
1096
+ LLM_TENSOR_FFN_NORM,
1097
+ LLM_TENSOR_FFN_POST_NORM,
1098
+ LLM_TENSOR_FFN_DOWN,
1099
+ LLM_TENSOR_FFN_UP,
1100
+ };
1101
+ case LLM_ARCH_CODESHELL:
1102
+ return {
1103
+ LLM_TENSOR_TOKEN_EMBD,
1104
+ LLM_TENSOR_OUTPUT_NORM,
1105
+ LLM_TENSOR_OUTPUT,
1106
+ LLM_TENSOR_ROPE_FREQS,
1107
+ LLM_TENSOR_ATTN_NORM,
1108
+ LLM_TENSOR_ATTN_Q,
1109
+ LLM_TENSOR_ATTN_K,
1110
+ LLM_TENSOR_ATTN_V,
1111
+ LLM_TENSOR_ATTN_QKV,
1112
+ LLM_TENSOR_ATTN_OUT,
1113
+ LLM_TENSOR_ATTN_ROT_EMBD,
1114
+ LLM_TENSOR_FFN_NORM,
1115
+ LLM_TENSOR_FFN_GATE,
1116
+ LLM_TENSOR_FFN_DOWN,
1117
+ LLM_TENSOR_FFN_UP,
1118
+ };
1119
+ case LLM_ARCH_MINICPM:
1120
+ return {
1121
+ LLM_TENSOR_TOKEN_EMBD,
1122
+ LLM_TENSOR_OUTPUT_NORM,
1123
+ LLM_TENSOR_OUTPUT,
1124
+ LLM_TENSOR_ROPE_FREQS,
1125
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1126
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1127
+ LLM_TENSOR_ATTN_NORM,
1128
+ LLM_TENSOR_ATTN_Q,
1129
+ LLM_TENSOR_ATTN_K,
1130
+ LLM_TENSOR_ATTN_V,
1131
+ LLM_TENSOR_ATTN_OUT,
1132
+ LLM_TENSOR_ATTN_ROT_EMBD,
1133
+ LLM_TENSOR_FFN_GATE_INP,
1134
+ LLM_TENSOR_FFN_NORM,
1135
+ LLM_TENSOR_FFN_GATE,
1136
+ LLM_TENSOR_FFN_DOWN,
1137
+ LLM_TENSOR_FFN_UP,
1138
+ LLM_TENSOR_FFN_GATE_EXP,
1139
+ LLM_TENSOR_FFN_DOWN_EXP,
1140
+ LLM_TENSOR_FFN_UP_EXP,
1141
+ };
1142
+ case LLM_ARCH_MINICPM3:
1143
+ return {
1144
+ LLM_TENSOR_TOKEN_EMBD,
1145
+ LLM_TENSOR_OUTPUT_NORM,
1146
+ LLM_TENSOR_OUTPUT,
1147
+ LLM_TENSOR_ROPE_FACTORS_LONG,
1148
+ LLM_TENSOR_ROPE_FACTORS_SHORT,
1149
+ LLM_TENSOR_ATTN_NORM,
1150
+ LLM_TENSOR_ATTN_Q_A_NORM,
1151
+ LLM_TENSOR_ATTN_KV_A_NORM,
1152
+ LLM_TENSOR_ATTN_Q,
1153
+ LLM_TENSOR_ATTN_Q_A,
1154
+ LLM_TENSOR_ATTN_Q_B,
1155
+ LLM_TENSOR_ATTN_KV_A_MQA,
1156
+ LLM_TENSOR_ATTN_KV_B,
1157
+ LLM_TENSOR_ATTN_OUT,
1158
+ LLM_TENSOR_FFN_NORM,
1159
+ LLM_TENSOR_FFN_GATE,
1160
+ LLM_TENSOR_FFN_UP,
1161
+ LLM_TENSOR_FFN_DOWN,
1162
+ };
1163
+ case LLM_ARCH_GEMMA:
1164
+ return {
1165
+ LLM_TENSOR_TOKEN_EMBD,
1166
+ LLM_TENSOR_OUTPUT_NORM,
1167
+ LLM_TENSOR_ATTN_NORM,
1168
+ LLM_TENSOR_ATTN_Q,
1169
+ LLM_TENSOR_ATTN_K,
1170
+ LLM_TENSOR_ATTN_V,
1171
+ LLM_TENSOR_ATTN_OUT,
1172
+ LLM_TENSOR_FFN_NORM,
1173
+ LLM_TENSOR_FFN_GATE,
1174
+ LLM_TENSOR_FFN_DOWN,
1175
+ LLM_TENSOR_FFN_UP,
1176
+ };
1177
+ case LLM_ARCH_GEMMA2:
1178
+ return {
1179
+ LLM_TENSOR_TOKEN_EMBD,
1180
+ LLM_TENSOR_OUTPUT_NORM,
1181
+ LLM_TENSOR_ATTN_NORM,
1182
+ LLM_TENSOR_ATTN_Q,
1183
+ LLM_TENSOR_ATTN_K,
1184
+ LLM_TENSOR_ATTN_V,
1185
+ LLM_TENSOR_ATTN_OUT,
1186
+ LLM_TENSOR_ATTN_POST_NORM,
1187
+ LLM_TENSOR_FFN_NORM,
1188
+ LLM_TENSOR_FFN_GATE,
1189
+ LLM_TENSOR_FFN_DOWN,
1190
+ LLM_TENSOR_FFN_UP,
1191
+ LLM_TENSOR_FFN_POST_NORM,
1192
+ };
1193
+ case LLM_ARCH_GEMMA3:
1194
+ return {
1195
+ LLM_TENSOR_TOKEN_EMBD,
1196
+ LLM_TENSOR_OUTPUT_NORM,
1197
+ LLM_TENSOR_OUTPUT,
1198
+ LLM_TENSOR_ATTN_NORM,
1199
+ LLM_TENSOR_ATTN_Q,
1200
+ LLM_TENSOR_ATTN_Q_NORM,
1201
+ LLM_TENSOR_ATTN_K,
1202
+ LLM_TENSOR_ATTN_K_NORM,
1203
+ LLM_TENSOR_ATTN_V,
1204
+ LLM_TENSOR_ATTN_OUT,
1205
+ LLM_TENSOR_ATTN_POST_NORM,
1206
+ LLM_TENSOR_FFN_NORM,
1207
+ LLM_TENSOR_FFN_GATE,
1208
+ LLM_TENSOR_FFN_DOWN,
1209
+ LLM_TENSOR_FFN_UP,
1210
+ LLM_TENSOR_FFN_POST_NORM,
1211
+ };
1212
+ case LLM_ARCH_GEMMA3N:
1213
+ return {
1214
+ LLM_TENSOR_TOKEN_EMBD,
1215
+ LLM_TENSOR_OUTPUT_NORM,
1216
+ LLM_TENSOR_ATTN_NORM,
1217
+ LLM_TENSOR_ATTN_Q,
1218
+ LLM_TENSOR_ATTN_Q_NORM,
1219
+ LLM_TENSOR_ATTN_K,
1220
+ LLM_TENSOR_ATTN_K_NORM,
1221
+ LLM_TENSOR_ATTN_V,
1222
+ LLM_TENSOR_ATTN_OUT,
1223
+ LLM_TENSOR_ATTN_POST_NORM,
1224
+ LLM_TENSOR_FFN_NORM,
1225
+ LLM_TENSOR_FFN_GATE,
1226
+ LLM_TENSOR_FFN_DOWN,
1227
+ LLM_TENSOR_FFN_UP,
1228
+ LLM_TENSOR_FFN_POST_NORM,
1229
+ LLM_TENSOR_PER_LAYER_TOKEN_EMBD,
1230
+ LLM_TENSOR_PER_LAYER_MODEL_PROJ,
1231
+ LLM_TENSOR_PER_LAYER_PROJ_NORM,
1232
+ LLM_TENSOR_ALTUP_UNEMBD_PROJ,
1233
+ LLM_TENSOR_ALTUP_PROJ,
1234
+ LLM_TENSOR_PER_LAYER_INP_GATE,
1235
+ LLM_TENSOR_PER_LAYER_PROJ,
1236
+ LLM_TENSOR_PER_LAYER_POST_NORM,
1237
+ LLM_TENSOR_ALTUP_CORRECT_COEF,
1238
+ LLM_TENSOR_ALTUP_CORRECT_SCALE,
1239
+ LLM_TENSOR_ALTUP_PREDICT_COEF,
1240
+ LLM_TENSOR_ALTUP_ROUTER,
1241
+ LLM_TENSOR_ALTUP_ROUTER_NORM,
1242
+ LLM_TENSOR_LAUREL_L,
1243
+ LLM_TENSOR_LAUREL_R,
1244
+ LLM_TENSOR_LAUREL_POST_NORM,
1245
+ };
1246
+ case LLM_ARCH_GEMMA_EMBEDDING:
1247
+ return {
1248
+ LLM_TENSOR_TOKEN_EMBD,
1249
+ LLM_TENSOR_OUTPUT_NORM,
1250
+ LLM_TENSOR_OUTPUT,
1251
+ LLM_TENSOR_DENSE_2_OUT,
1252
+ LLM_TENSOR_DENSE_3_OUT,
1253
+ LLM_TENSOR_ATTN_NORM,
1254
+ LLM_TENSOR_ATTN_Q,
1255
+ LLM_TENSOR_ATTN_Q_NORM,
1256
+ LLM_TENSOR_ATTN_K,
1257
+ LLM_TENSOR_ATTN_K_NORM,
1258
+ LLM_TENSOR_ATTN_V,
1259
+ LLM_TENSOR_ATTN_OUT,
1260
+ LLM_TENSOR_ATTN_POST_NORM,
1261
+ LLM_TENSOR_FFN_NORM,
1262
+ LLM_TENSOR_FFN_GATE,
1263
+ LLM_TENSOR_FFN_DOWN,
1264
+ LLM_TENSOR_FFN_UP,
1265
+ LLM_TENSOR_FFN_POST_NORM,
1266
+ };
1267
+ case LLM_ARCH_MAMBA:
1268
+ return {
1269
+ LLM_TENSOR_TOKEN_EMBD,
1270
+ LLM_TENSOR_OUTPUT_NORM,
1271
+ LLM_TENSOR_OUTPUT,
1272
+ LLM_TENSOR_ATTN_NORM,
1273
+ LLM_TENSOR_SSM_IN,
1274
+ LLM_TENSOR_SSM_CONV1D,
1275
+ LLM_TENSOR_SSM_X,
1276
+ LLM_TENSOR_SSM_DT,
1277
+ LLM_TENSOR_SSM_A,
1278
+ LLM_TENSOR_SSM_D,
1279
+ LLM_TENSOR_SSM_OUT,
1280
+ };
1281
+ case LLM_ARCH_MAMBA2:
1282
+ return {
1283
+ LLM_TENSOR_TOKEN_EMBD,
1284
+ LLM_TENSOR_OUTPUT_NORM,
1285
+ LLM_TENSOR_OUTPUT,
1286
+ LLM_TENSOR_ATTN_NORM,
1287
+ LLM_TENSOR_SSM_IN,
1288
+ LLM_TENSOR_SSM_CONV1D,
1289
+ LLM_TENSOR_SSM_DT,
1290
+ LLM_TENSOR_SSM_A,
1291
+ LLM_TENSOR_SSM_D,
1292
+ LLM_TENSOR_SSM_NORM,
1293
+ LLM_TENSOR_SSM_OUT,
1294
+ };
1295
+ case LLM_ARCH_JAMBA:
1296
+ return {
1297
+ LLM_TENSOR_TOKEN_EMBD,
1298
+ LLM_TENSOR_OUTPUT_NORM,
1299
+ LLM_TENSOR_OUTPUT,
1300
+ LLM_TENSOR_ATTN_NORM,
1301
+ LLM_TENSOR_SSM_IN,
1302
+ LLM_TENSOR_SSM_CONV1D,
1303
+ LLM_TENSOR_SSM_X,
1304
+ LLM_TENSOR_SSM_DT,
1305
+ LLM_TENSOR_SSM_DT_NORM,
1306
+ LLM_TENSOR_SSM_A,
1307
+ LLM_TENSOR_SSM_B_NORM,
1308
+ LLM_TENSOR_SSM_C_NORM,
1309
+ LLM_TENSOR_SSM_D,
1310
+ LLM_TENSOR_SSM_OUT,
1311
+ LLM_TENSOR_ATTN_Q,
1312
+ LLM_TENSOR_ATTN_K,
1313
+ LLM_TENSOR_ATTN_V,
1314
+ LLM_TENSOR_ATTN_OUT,
1315
+ LLM_TENSOR_FFN_GATE_INP,
1316
+ LLM_TENSOR_FFN_NORM,
1317
+ LLM_TENSOR_FFN_GATE,
1318
+ LLM_TENSOR_FFN_DOWN,
1319
+ LLM_TENSOR_FFN_UP,
1320
+ LLM_TENSOR_FFN_GATE_EXPS,
1321
+ LLM_TENSOR_FFN_DOWN_EXPS,
1322
+ LLM_TENSOR_FFN_UP_EXPS,
1323
+ };
1324
+ case LLM_ARCH_FALCON_H1:
1325
+ return {
1326
+ LLM_TENSOR_TOKEN_EMBD,
1327
+ LLM_TENSOR_OUTPUT,
1328
+ LLM_TENSOR_OUTPUT_NORM,
1329
+ LLM_TENSOR_ATTN_NORM,
1330
+ LLM_TENSOR_ATTN_Q,
1331
+ LLM_TENSOR_ATTN_K,
1332
+ LLM_TENSOR_ATTN_V,
1333
+ LLM_TENSOR_ATTN_OUT,
1334
+ LLM_TENSOR_SSM_IN,
1335
+ LLM_TENSOR_SSM_CONV1D,
1336
+ LLM_TENSOR_SSM_DT,
1337
+ LLM_TENSOR_SSM_A,
1338
+ LLM_TENSOR_SSM_D,
1339
+ LLM_TENSOR_SSM_NORM,
1340
+ LLM_TENSOR_SSM_OUT,
1341
+ LLM_TENSOR_FFN_NORM,
1342
+ LLM_TENSOR_FFN_GATE,
1343
+ LLM_TENSOR_FFN_DOWN,
1344
+ LLM_TENSOR_FFN_UP,
1345
+ };
1346
+ case LLM_ARCH_COMMAND_R:
1347
+ return {
1348
+ LLM_TENSOR_TOKEN_EMBD,
1349
+ LLM_TENSOR_OUTPUT_NORM,
1350
+ LLM_TENSOR_ATTN_NORM,
1351
+ LLM_TENSOR_ATTN_Q,
1352
+ LLM_TENSOR_ATTN_K,
1353
+ LLM_TENSOR_ATTN_V,
1354
+ LLM_TENSOR_ATTN_OUT,
1355
+ LLM_TENSOR_FFN_GATE,
1356
+ LLM_TENSOR_FFN_DOWN,
1357
+ LLM_TENSOR_FFN_UP,
1358
+ LLM_TENSOR_ATTN_Q_NORM,
1359
+ LLM_TENSOR_ATTN_K_NORM,
1360
+ };
1361
+ case LLM_ARCH_COHERE2:
1362
+ return {
1363
+ LLM_TENSOR_TOKEN_EMBD,
1364
+ LLM_TENSOR_OUTPUT_NORM,
1365
+ LLM_TENSOR_ATTN_NORM,
1366
+ LLM_TENSOR_ATTN_Q,
1367
+ LLM_TENSOR_ATTN_K,
1368
+ LLM_TENSOR_ATTN_V,
1369
+ LLM_TENSOR_ATTN_OUT,
1370
+ LLM_TENSOR_FFN_GATE,
1371
+ LLM_TENSOR_FFN_DOWN,
1372
+ LLM_TENSOR_FFN_UP,
1373
+ };
1374
+ case LLM_ARCH_DBRX:
1375
+ return {
1376
+ LLM_TENSOR_TOKEN_EMBD,
1377
+ LLM_TENSOR_OUTPUT_NORM,
1378
+ LLM_TENSOR_OUTPUT,
1379
+ LLM_TENSOR_ATTN_QKV,
1380
+ LLM_TENSOR_ATTN_NORM,
1381
+ LLM_TENSOR_ATTN_OUT,
1382
+ LLM_TENSOR_ATTN_OUT_NORM,
1383
+ LLM_TENSOR_FFN_GATE_INP,
1384
+ LLM_TENSOR_FFN_GATE_EXPS,
1385
+ LLM_TENSOR_FFN_DOWN_EXPS,
1386
+ LLM_TENSOR_FFN_UP_EXPS,
1387
+ };
1388
+ case LLM_ARCH_OLMO:
1389
+ return {
1390
+ LLM_TENSOR_TOKEN_EMBD,
1391
+ LLM_TENSOR_OUTPUT,
1392
+ LLM_TENSOR_ATTN_Q,
1393
+ LLM_TENSOR_ATTN_K,
1394
+ LLM_TENSOR_ATTN_V,
1395
+ LLM_TENSOR_ATTN_OUT,
1396
+ LLM_TENSOR_FFN_GATE,
1397
+ LLM_TENSOR_FFN_DOWN,
1398
+ LLM_TENSOR_FFN_UP,
1399
+ };
1400
+ case LLM_ARCH_OLMO2:
1401
+ return {
1402
+ LLM_TENSOR_TOKEN_EMBD,
1403
+ LLM_TENSOR_OUTPUT_NORM,
1404
+ LLM_TENSOR_OUTPUT,
1405
+ LLM_TENSOR_ATTN_Q,
1406
+ LLM_TENSOR_ATTN_K,
1407
+ LLM_TENSOR_ATTN_V,
1408
+ LLM_TENSOR_ATTN_OUT,
1409
+ LLM_TENSOR_ATTN_POST_NORM,
1410
+ LLM_TENSOR_ATTN_Q_NORM,
1411
+ LLM_TENSOR_ATTN_K_NORM,
1412
+ LLM_TENSOR_FFN_POST_NORM,
1413
+ LLM_TENSOR_FFN_GATE,
1414
+ LLM_TENSOR_FFN_DOWN,
1415
+ LLM_TENSOR_FFN_UP,
1416
+ };
1417
+ case LLM_ARCH_OPENELM:
1418
+ return {
1419
+ LLM_TENSOR_TOKEN_EMBD,
1420
+ LLM_TENSOR_OUTPUT_NORM,
1421
+ LLM_TENSOR_ATTN_NORM,
1422
+ LLM_TENSOR_ATTN_QKV,
1423
+ LLM_TENSOR_ATTN_Q_NORM,
1424
+ LLM_TENSOR_ATTN_K_NORM,
1425
+ LLM_TENSOR_ATTN_OUT,
1426
+ LLM_TENSOR_FFN_NORM,
1427
+ LLM_TENSOR_FFN_GATE,
1428
+ LLM_TENSOR_FFN_DOWN,
1429
+ LLM_TENSOR_FFN_UP,
1430
+ };
1431
+ case LLM_ARCH_ARCTIC:
1432
+ return {
1433
+ LLM_TENSOR_TOKEN_EMBD,
1434
+ LLM_TENSOR_OUTPUT_NORM,
1435
+ LLM_TENSOR_OUTPUT,
1436
+ LLM_TENSOR_ATTN_NORM,
1437
+ LLM_TENSOR_ATTN_Q,
1438
+ LLM_TENSOR_ATTN_K,
1439
+ LLM_TENSOR_ATTN_V,
1440
+ LLM_TENSOR_ATTN_OUT,
1441
+ LLM_TENSOR_FFN_GATE_INP,
1442
+ LLM_TENSOR_FFN_NORM,
1443
+ LLM_TENSOR_FFN_GATE,
1444
+ LLM_TENSOR_FFN_DOWN,
1445
+ LLM_TENSOR_FFN_UP,
1446
+ LLM_TENSOR_FFN_NORM_EXPS,
1447
+ LLM_TENSOR_FFN_GATE_EXPS,
1448
+ LLM_TENSOR_FFN_DOWN_EXPS,
1449
+ LLM_TENSOR_FFN_UP_EXPS,
1450
+ };
1451
+ case LLM_ARCH_DEEPSEEK:
1452
+ return {
1453
+ LLM_TENSOR_TOKEN_EMBD,
1454
+ LLM_TENSOR_OUTPUT_NORM,
1455
+ LLM_TENSOR_OUTPUT,
1456
+ LLM_TENSOR_ROPE_FREQS,
1457
+ LLM_TENSOR_ATTN_NORM,
1458
+ LLM_TENSOR_ATTN_Q,
1459
+ LLM_TENSOR_ATTN_K,
1460
+ LLM_TENSOR_ATTN_V,
1461
+ LLM_TENSOR_ATTN_OUT,
1462
+ LLM_TENSOR_ATTN_ROT_EMBD,
1463
+ LLM_TENSOR_FFN_GATE_INP,
1464
+ LLM_TENSOR_FFN_NORM,
1465
+ LLM_TENSOR_FFN_GATE,
1466
+ LLM_TENSOR_FFN_DOWN,
1467
+ LLM_TENSOR_FFN_UP,
1468
+ LLM_TENSOR_FFN_GATE_EXPS,
1469
+ LLM_TENSOR_FFN_DOWN_EXPS,
1470
+ LLM_TENSOR_FFN_UP_EXPS,
1471
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1472
+ LLM_TENSOR_FFN_GATE_SHEXP,
1473
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1474
+ LLM_TENSOR_FFN_UP_SHEXP,
1475
+ };
1476
+ case LLM_ARCH_DEEPSEEK2:
1477
+ return {
1478
+ LLM_TENSOR_TOKEN_EMBD,
1479
+ LLM_TENSOR_OUTPUT_NORM,
1480
+ LLM_TENSOR_OUTPUT,
1481
+ LLM_TENSOR_ATTN_NORM,
1482
+ LLM_TENSOR_ATTN_Q_A_NORM,
1483
+ LLM_TENSOR_ATTN_KV_A_NORM,
1484
+ LLM_TENSOR_ATTN_Q,
1485
+ LLM_TENSOR_ATTN_Q_A,
1486
+ LLM_TENSOR_ATTN_Q_B,
1487
+ LLM_TENSOR_ATTN_KV_A_MQA,
1488
+ LLM_TENSOR_ATTN_KV_B,
1489
+ LLM_TENSOR_ATTN_K_B,
1490
+ LLM_TENSOR_ATTN_V_B,
1491
+ LLM_TENSOR_ATTN_OUT,
1492
+ LLM_TENSOR_FFN_NORM,
1493
+ LLM_TENSOR_FFN_GATE,
1494
+ LLM_TENSOR_FFN_UP,
1495
+ LLM_TENSOR_FFN_DOWN,
1496
+ LLM_TENSOR_FFN_GATE_INP,
1497
+ LLM_TENSOR_FFN_GATE_EXPS,
1498
+ LLM_TENSOR_FFN_DOWN_EXPS,
1499
+ LLM_TENSOR_FFN_UP_EXPS,
1500
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1501
+ LLM_TENSOR_FFN_GATE_SHEXP,
1502
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1503
+ LLM_TENSOR_FFN_UP_SHEXP,
1504
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1505
+ };
1506
+ case LLM_ARCH_PLM:
1507
+ return {
1508
+ LLM_TENSOR_TOKEN_EMBD,
1509
+ LLM_TENSOR_OUTPUT_NORM,
1510
+ LLM_TENSOR_ATTN_NORM,
1511
+ LLM_TENSOR_ATTN_Q,
1512
+ LLM_TENSOR_ATTN_KV_A_MQA,
1513
+ LLM_TENSOR_ATTN_KV_A_NORM,
1514
+ LLM_TENSOR_ATTN_KV_B,
1515
+ LLM_TENSOR_ATTN_OUT,
1516
+ LLM_TENSOR_FFN_NORM,
1517
+ LLM_TENSOR_FFN_DOWN,
1518
+ LLM_TENSOR_FFN_UP,
1519
+ };
1520
+ case LLM_ARCH_CHATGLM:
1521
+ return {
1522
+ LLM_TENSOR_TOKEN_EMBD,
1523
+ LLM_TENSOR_ROPE_FREQS,
1524
+ LLM_TENSOR_OUTPUT_NORM,
1525
+ LLM_TENSOR_OUTPUT,
1526
+ LLM_TENSOR_ATTN_NORM,
1527
+ LLM_TENSOR_ATTN_QKV,
1528
+ LLM_TENSOR_ATTN_Q,
1529
+ LLM_TENSOR_ATTN_K,
1530
+ LLM_TENSOR_ATTN_V,
1531
+ LLM_TENSOR_ATTN_OUT,
1532
+ LLM_TENSOR_FFN_NORM,
1533
+ LLM_TENSOR_FFN_UP,
1534
+ LLM_TENSOR_FFN_DOWN,
1535
+ };
1536
+ case LLM_ARCH_GLM4:
1537
+ return {
1538
+ LLM_TENSOR_TOKEN_EMBD,
1539
+ LLM_TENSOR_ROPE_FREQS,
1540
+ LLM_TENSOR_OUTPUT_NORM,
1541
+ LLM_TENSOR_OUTPUT,
1542
+ LLM_TENSOR_ATTN_NORM,
1543
+ LLM_TENSOR_ATTN_Q,
1544
+ LLM_TENSOR_ATTN_K,
1545
+ LLM_TENSOR_ATTN_V,
1546
+ LLM_TENSOR_ATTN_OUT,
1547
+ LLM_TENSOR_FFN_NORM,
1548
+ LLM_TENSOR_FFN_UP,
1549
+ LLM_TENSOR_FFN_DOWN,
1550
+ LLM_TENSOR_ATTN_POST_NORM,
1551
+ LLM_TENSOR_FFN_POST_NORM,
1552
+ };
1553
+ case LLM_ARCH_GLM4_MOE:
1554
+ return {
1555
+ LLM_TENSOR_TOKEN_EMBD,
1556
+ LLM_TENSOR_OUTPUT_NORM,
1557
+ LLM_TENSOR_OUTPUT,
1558
+ LLM_TENSOR_ATTN_NORM,
1559
+ LLM_TENSOR_ATTN_POST_NORM,
1560
+ LLM_TENSOR_ATTN_Q,
1561
+ LLM_TENSOR_ATTN_K,
1562
+ LLM_TENSOR_ATTN_V,
1563
+ LLM_TENSOR_ATTN_OUT,
1564
+ LLM_TENSOR_ATTN_Q_NORM,
1565
+ LLM_TENSOR_ATTN_K_NORM,
1566
+ LLM_TENSOR_FFN_GATE,
1567
+ LLM_TENSOR_FFN_DOWN,
1568
+ LLM_TENSOR_FFN_UP,
1569
+ LLM_TENSOR_FFN_GATE_INP,
1570
+ LLM_TENSOR_FFN_GATE_EXPS,
1571
+ LLM_TENSOR_FFN_DOWN_EXPS,
1572
+ LLM_TENSOR_FFN_UP_EXPS,
1573
+ LLM_TENSOR_FFN_GATE_SHEXP,
1574
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1575
+ LLM_TENSOR_FFN_UP_SHEXP,
1576
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1577
+ LLM_TENSOR_NEXTN_EH_PROJ,
1578
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1579
+ LLM_TENSOR_NEXTN_ENORM,
1580
+ LLM_TENSOR_NEXTN_HNORM,
1581
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1582
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1583
+ };
1584
+ case LLM_ARCH_BITNET:
1585
+ return {
1586
+ LLM_TENSOR_TOKEN_EMBD,
1587
+ LLM_TENSOR_OUTPUT_NORM,
1588
+ LLM_TENSOR_ATTN_Q,
1589
+ LLM_TENSOR_ATTN_K,
1590
+ LLM_TENSOR_ATTN_V,
1591
+ LLM_TENSOR_ATTN_OUT,
1592
+ LLM_TENSOR_ATTN_NORM,
1593
+ LLM_TENSOR_ATTN_SUB_NORM,
1594
+ LLM_TENSOR_FFN_GATE,
1595
+ LLM_TENSOR_FFN_DOWN,
1596
+ LLM_TENSOR_FFN_UP,
1597
+ LLM_TENSOR_FFN_NORM,
1598
+ LLM_TENSOR_FFN_SUB_NORM,
1599
+ };
1600
+ case LLM_ARCH_T5:
1601
+ return {
1602
+ LLM_TENSOR_TOKEN_EMBD,
1603
+ LLM_TENSOR_OUTPUT,
1604
+ LLM_TENSOR_DEC_OUTPUT_NORM,
1605
+ LLM_TENSOR_DEC_ATTN_NORM,
1606
+ LLM_TENSOR_DEC_ATTN_Q,
1607
+ LLM_TENSOR_DEC_ATTN_K,
1608
+ LLM_TENSOR_DEC_ATTN_V,
1609
+ LLM_TENSOR_DEC_ATTN_OUT,
1610
+ LLM_TENSOR_DEC_ATTN_REL_B,
1611
+ LLM_TENSOR_DEC_CROSS_ATTN_NORM,
1612
+ LLM_TENSOR_DEC_CROSS_ATTN_Q,
1613
+ LLM_TENSOR_DEC_CROSS_ATTN_K,
1614
+ LLM_TENSOR_DEC_CROSS_ATTN_V,
1615
+ LLM_TENSOR_DEC_CROSS_ATTN_OUT,
1616
+ LLM_TENSOR_DEC_CROSS_ATTN_REL_B,
1617
+ LLM_TENSOR_DEC_FFN_NORM,
1618
+ LLM_TENSOR_DEC_FFN_GATE,
1619
+ LLM_TENSOR_DEC_FFN_DOWN,
1620
+ LLM_TENSOR_DEC_FFN_UP,
1621
+ LLM_TENSOR_ENC_OUTPUT_NORM,
1622
+ LLM_TENSOR_ENC_ATTN_NORM,
1623
+ LLM_TENSOR_ENC_ATTN_Q,
1624
+ LLM_TENSOR_ENC_ATTN_K,
1625
+ LLM_TENSOR_ENC_ATTN_V,
1626
+ LLM_TENSOR_ENC_ATTN_OUT,
1627
+ LLM_TENSOR_ENC_ATTN_REL_B,
1628
+ LLM_TENSOR_ENC_FFN_NORM,
1629
+ LLM_TENSOR_ENC_FFN_GATE,
1630
+ LLM_TENSOR_ENC_FFN_DOWN,
1631
+ LLM_TENSOR_ENC_FFN_UP,
1632
+ };
1633
+ case LLM_ARCH_T5ENCODER:
1634
+ return {
1635
+ LLM_TENSOR_TOKEN_EMBD,
1636
+ LLM_TENSOR_OUTPUT,
1637
+ LLM_TENSOR_ENC_OUTPUT_NORM,
1638
+ LLM_TENSOR_ENC_ATTN_NORM,
1639
+ LLM_TENSOR_ENC_ATTN_Q,
1640
+ LLM_TENSOR_ENC_ATTN_K,
1641
+ LLM_TENSOR_ENC_ATTN_V,
1642
+ LLM_TENSOR_ENC_ATTN_OUT,
1643
+ LLM_TENSOR_ENC_ATTN_REL_B,
1644
+ LLM_TENSOR_ENC_FFN_NORM,
1645
+ LLM_TENSOR_ENC_FFN_GATE,
1646
+ LLM_TENSOR_ENC_FFN_DOWN,
1647
+ LLM_TENSOR_ENC_FFN_UP,
1648
+ };
1649
+ case LLM_ARCH_JAIS:
1650
+ return {
1651
+ LLM_TENSOR_TOKEN_EMBD,
1652
+ LLM_TENSOR_OUTPUT_NORM,
1653
+ LLM_TENSOR_OUTPUT,
1654
+ LLM_TENSOR_ATTN_NORM,
1655
+ LLM_TENSOR_ATTN_QKV,
1656
+ LLM_TENSOR_ATTN_OUT,
1657
+ LLM_TENSOR_FFN_NORM,
1658
+ LLM_TENSOR_FFN_UP,
1659
+ LLM_TENSOR_FFN_GATE,
1660
+ LLM_TENSOR_FFN_DOWN,
1661
+ };
1662
+ case LLM_ARCH_NEMOTRON_H:
1663
+ return {
1664
+ LLM_TENSOR_TOKEN_EMBD,
1665
+ LLM_TENSOR_OUTPUT_NORM,
1666
+ LLM_TENSOR_OUTPUT,
1667
+ LLM_TENSOR_ATTN_NORM,
1668
+ LLM_TENSOR_SSM_IN,
1669
+ LLM_TENSOR_SSM_CONV1D,
1670
+ LLM_TENSOR_SSM_DT,
1671
+ LLM_TENSOR_SSM_A,
1672
+ LLM_TENSOR_SSM_D,
1673
+ LLM_TENSOR_SSM_NORM,
1674
+ LLM_TENSOR_SSM_OUT,
1675
+ LLM_TENSOR_ATTN_Q,
1676
+ LLM_TENSOR_ATTN_K,
1677
+ LLM_TENSOR_ATTN_V,
1678
+ LLM_TENSOR_ATTN_OUT,
1679
+ LLM_TENSOR_FFN_DOWN,
1680
+ LLM_TENSOR_FFN_UP,
1681
+ };
1682
+ case LLM_ARCH_NEMOTRON_H_MOE:
1683
+ return {
1684
+ LLM_TENSOR_TOKEN_EMBD,
1685
+ LLM_TENSOR_OUTPUT_NORM,
1686
+ LLM_TENSOR_OUTPUT,
1687
+ LLM_TENSOR_ATTN_NORM,
1688
+ // mamba(2) ssm layers
1689
+ LLM_TENSOR_SSM_IN,
1690
+ LLM_TENSOR_SSM_CONV1D,
1691
+ LLM_TENSOR_SSM_DT,
1692
+ LLM_TENSOR_SSM_A,
1693
+ LLM_TENSOR_SSM_D,
1694
+ LLM_TENSOR_SSM_NORM,
1695
+ LLM_TENSOR_SSM_OUT,
1696
+ // attention layers
1697
+ LLM_TENSOR_ATTN_Q,
1698
+ LLM_TENSOR_ATTN_K,
1699
+ LLM_TENSOR_ATTN_V,
1700
+ LLM_TENSOR_ATTN_OUT,
1701
+ // dense FFN
1702
+ LLM_TENSOR_FFN_DOWN,
1703
+ LLM_TENSOR_FFN_UP,
1704
+ // MoE FFN (for MoE layers)
1705
+ LLM_TENSOR_FFN_GATE_INP,
1706
+ LLM_TENSOR_FFN_UP_EXPS,
1707
+ LLM_TENSOR_FFN_DOWN_EXPS,
1708
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1709
+ // MoE shared expert layer
1710
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1711
+ LLM_TENSOR_FFN_UP_SHEXP,
1712
+ };
1713
+ case LLM_ARCH_EXAONE4:
1714
+ return {
1715
+ LLM_TENSOR_TOKEN_EMBD,
1716
+ LLM_TENSOR_OUTPUT_NORM,
1717
+ LLM_TENSOR_OUTPUT,
1718
+ LLM_TENSOR_ROPE_FREQS,
1719
+ LLM_TENSOR_ATTN_Q,
1720
+ LLM_TENSOR_ATTN_Q_NORM,
1721
+ LLM_TENSOR_ATTN_K,
1722
+ LLM_TENSOR_ATTN_K_NORM,
1723
+ LLM_TENSOR_ATTN_V,
1724
+ LLM_TENSOR_ATTN_OUT,
1725
+ LLM_TENSOR_ATTN_POST_NORM,
1726
+ LLM_TENSOR_FFN_GATE,
1727
+ LLM_TENSOR_FFN_DOWN,
1728
+ LLM_TENSOR_FFN_UP,
1729
+ LLM_TENSOR_FFN_POST_NORM,
1730
+ };
1731
+ case LLM_ARCH_RWKV6:
1732
+ return {
1733
+ LLM_TENSOR_TOKEN_EMBD,
1734
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1735
+ LLM_TENSOR_OUTPUT_NORM,
1736
+ LLM_TENSOR_OUTPUT,
1737
+ LLM_TENSOR_ATTN_NORM,
1738
+ LLM_TENSOR_ATTN_NORM_2,
1739
+ LLM_TENSOR_TIME_MIX_W1,
1740
+ LLM_TENSOR_TIME_MIX_W2,
1741
+ LLM_TENSOR_TIME_MIX_LERP_X,
1742
+ LLM_TENSOR_TIME_MIX_LERP_W,
1743
+ LLM_TENSOR_TIME_MIX_LERP_K,
1744
+ LLM_TENSOR_TIME_MIX_LERP_V,
1745
+ LLM_TENSOR_TIME_MIX_LERP_R,
1746
+ LLM_TENSOR_TIME_MIX_LERP_G,
1747
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1748
+ LLM_TENSOR_TIME_MIX_FIRST,
1749
+ LLM_TENSOR_TIME_MIX_DECAY,
1750
+ LLM_TENSOR_TIME_MIX_DECAY_W1,
1751
+ LLM_TENSOR_TIME_MIX_DECAY_W2,
1752
+ LLM_TENSOR_TIME_MIX_KEY,
1753
+ LLM_TENSOR_TIME_MIX_VALUE,
1754
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1755
+ LLM_TENSOR_TIME_MIX_GATE,
1756
+ LLM_TENSOR_TIME_MIX_LN,
1757
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1758
+ LLM_TENSOR_CHANNEL_MIX_LERP_K,
1759
+ LLM_TENSOR_CHANNEL_MIX_LERP_R,
1760
+ LLM_TENSOR_CHANNEL_MIX_KEY,
1761
+ LLM_TENSOR_CHANNEL_MIX_VALUE,
1762
+ LLM_TENSOR_CHANNEL_MIX_RECEPTANCE,
1763
+ };
1764
+ case LLM_ARCH_RWKV6QWEN2:
1765
+ return {
1766
+ LLM_TENSOR_TOKEN_EMBD,
1767
+ LLM_TENSOR_OUTPUT_NORM,
1768
+ LLM_TENSOR_OUTPUT,
1769
+ LLM_TENSOR_ATTN_NORM,
1770
+ LLM_TENSOR_TIME_MIX_W1,
1771
+ LLM_TENSOR_TIME_MIX_W2,
1772
+ LLM_TENSOR_TIME_MIX_LERP_X,
1773
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1774
+ LLM_TENSOR_TIME_MIX_FIRST,
1775
+ LLM_TENSOR_TIME_MIX_DECAY,
1776
+ LLM_TENSOR_TIME_MIX_DECAY_W1,
1777
+ LLM_TENSOR_TIME_MIX_DECAY_W2,
1778
+ LLM_TENSOR_TIME_MIX_KEY,
1779
+ LLM_TENSOR_TIME_MIX_VALUE,
1780
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1781
+ LLM_TENSOR_TIME_MIX_GATE,
1782
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1783
+ LLM_TENSOR_FFN_NORM,
1784
+ LLM_TENSOR_FFN_GATE,
1785
+ LLM_TENSOR_FFN_DOWN,
1786
+ LLM_TENSOR_FFN_UP,
1787
+ };
1788
+ case LLM_ARCH_RWKV7:
1789
+ return {
1790
+ LLM_TENSOR_TOKEN_EMBD,
1791
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1792
+ LLM_TENSOR_OUTPUT_NORM,
1793
+ LLM_TENSOR_OUTPUT,
1794
+ LLM_TENSOR_ATTN_NORM,
1795
+ LLM_TENSOR_ATTN_NORM_2,
1796
+ LLM_TENSOR_TIME_MIX_W0,
1797
+ LLM_TENSOR_TIME_MIX_W1,
1798
+ LLM_TENSOR_TIME_MIX_W2,
1799
+ LLM_TENSOR_TIME_MIX_A0,
1800
+ LLM_TENSOR_TIME_MIX_A1,
1801
+ LLM_TENSOR_TIME_MIX_A2,
1802
+ LLM_TENSOR_TIME_MIX_V0,
1803
+ LLM_TENSOR_TIME_MIX_V1,
1804
+ LLM_TENSOR_TIME_MIX_V2,
1805
+ LLM_TENSOR_TIME_MIX_G1,
1806
+ LLM_TENSOR_TIME_MIX_G2,
1807
+ LLM_TENSOR_TIME_MIX_K_K,
1808
+ LLM_TENSOR_TIME_MIX_K_A,
1809
+ LLM_TENSOR_TIME_MIX_R_K,
1810
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1811
+ LLM_TENSOR_TIME_MIX_KEY,
1812
+ LLM_TENSOR_TIME_MIX_VALUE,
1813
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1814
+ LLM_TENSOR_TIME_MIX_LN,
1815
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1816
+ LLM_TENSOR_CHANNEL_MIX_LERP_K,
1817
+ LLM_TENSOR_CHANNEL_MIX_KEY,
1818
+ LLM_TENSOR_CHANNEL_MIX_VALUE,
1819
+ };
1820
+ case LLM_ARCH_ARWKV7:
1821
+ return {
1822
+ LLM_TENSOR_TOKEN_EMBD,
1823
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1824
+ LLM_TENSOR_OUTPUT_NORM,
1825
+ LLM_TENSOR_OUTPUT,
1826
+ LLM_TENSOR_ATTN_NORM,
1827
+ LLM_TENSOR_TIME_MIX_W0,
1828
+ LLM_TENSOR_TIME_MIX_W1,
1829
+ LLM_TENSOR_TIME_MIX_W2,
1830
+ LLM_TENSOR_TIME_MIX_A0,
1831
+ LLM_TENSOR_TIME_MIX_A1,
1832
+ LLM_TENSOR_TIME_MIX_A2,
1833
+ LLM_TENSOR_TIME_MIX_V0,
1834
+ LLM_TENSOR_TIME_MIX_V1,
1835
+ LLM_TENSOR_TIME_MIX_V2,
1836
+ LLM_TENSOR_TIME_MIX_G1,
1837
+ LLM_TENSOR_TIME_MIX_G2,
1838
+ LLM_TENSOR_TIME_MIX_K_K,
1839
+ LLM_TENSOR_TIME_MIX_K_A,
1840
+ LLM_TENSOR_TIME_MIX_R_K,
1841
+ LLM_TENSOR_TIME_MIX_LERP_FUSED,
1842
+ LLM_TENSOR_TIME_MIX_KEY,
1843
+ LLM_TENSOR_TIME_MIX_VALUE,
1844
+ LLM_TENSOR_TIME_MIX_RECEPTANCE,
1845
+ LLM_TENSOR_TIME_MIX_LN,
1846
+ LLM_TENSOR_TIME_MIX_OUTPUT,
1847
+ LLM_TENSOR_FFN_NORM,
1848
+ LLM_TENSOR_FFN_GATE,
1849
+ LLM_TENSOR_FFN_DOWN,
1850
+ LLM_TENSOR_FFN_UP,
1851
+ };
1852
+ case LLM_ARCH_GRANITE_MOE:
1853
+ return {
1854
+ LLM_TENSOR_TOKEN_EMBD,
1855
+ LLM_TENSOR_OUTPUT_NORM,
1856
+ LLM_TENSOR_OUTPUT,
1857
+ LLM_TENSOR_ATTN_NORM,
1858
+ LLM_TENSOR_ATTN_Q,
1859
+ LLM_TENSOR_ATTN_K,
1860
+ LLM_TENSOR_ATTN_V,
1861
+ LLM_TENSOR_ATTN_OUT,
1862
+ LLM_TENSOR_FFN_NORM,
1863
+ LLM_TENSOR_FFN_GATE_INP,
1864
+ LLM_TENSOR_FFN_GATE_EXPS,
1865
+ LLM_TENSOR_FFN_DOWN_EXPS,
1866
+ LLM_TENSOR_FFN_UP_EXPS,
1867
+ LLM_TENSOR_FFN_GATE_SHEXP,
1868
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1869
+ LLM_TENSOR_FFN_UP_SHEXP,
1870
+ };
1871
+ case LLM_ARCH_GRANITE_HYBRID:
1872
+ return {
1873
+ LLM_TENSOR_TOKEN_EMBD,
1874
+ LLM_TENSOR_OUTPUT_NORM,
1875
+ LLM_TENSOR_OUTPUT,
1876
+ LLM_TENSOR_ATTN_NORM,
1877
+ LLM_TENSOR_SSM_IN,
1878
+ LLM_TENSOR_SSM_CONV1D,
1879
+ LLM_TENSOR_SSM_DT,
1880
+ LLM_TENSOR_SSM_A,
1881
+ LLM_TENSOR_SSM_D,
1882
+ LLM_TENSOR_SSM_NORM,
1883
+ LLM_TENSOR_SSM_OUT,
1884
+ LLM_TENSOR_ATTN_Q,
1885
+ LLM_TENSOR_ATTN_K,
1886
+ LLM_TENSOR_ATTN_V,
1887
+ LLM_TENSOR_ATTN_OUT,
1888
+ LLM_TENSOR_FFN_NORM,
1889
+ LLM_TENSOR_FFN_GATE,
1890
+ LLM_TENSOR_FFN_DOWN,
1891
+ LLM_TENSOR_FFN_UP,
1892
+ LLM_TENSOR_FFN_NORM,
1893
+ LLM_TENSOR_FFN_GATE_INP,
1894
+ LLM_TENSOR_FFN_GATE_EXPS,
1895
+ LLM_TENSOR_FFN_DOWN_EXPS,
1896
+ LLM_TENSOR_FFN_UP_EXPS,
1897
+ LLM_TENSOR_FFN_GATE_SHEXP,
1898
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1899
+ LLM_TENSOR_FFN_UP_SHEXP,
1900
+ };
1901
+ case LLM_ARCH_WAVTOKENIZER_DEC:
1902
+ return {
1903
+ LLM_TENSOR_TOKEN_EMBD,
1904
+ LLM_TENSOR_TOKEN_EMBD_NORM,
1905
+ LLM_TENSOR_CONV1D,
1906
+ LLM_TENSOR_CONVNEXT_DW,
1907
+ LLM_TENSOR_CONVNEXT_NORM,
1908
+ LLM_TENSOR_CONVNEXT_PW1,
1909
+ LLM_TENSOR_CONVNEXT_PW2,
1910
+ LLM_TENSOR_CONVNEXT_GAMMA,
1911
+ LLM_TENSOR_OUTPUT_NORM,
1912
+ LLM_TENSOR_OUTPUT,
1913
+ LLM_TENSOR_POS_NET_CONV1,
1914
+ LLM_TENSOR_POS_NET_CONV2,
1915
+ LLM_TENSOR_POS_NET_NORM,
1916
+ LLM_TENSOR_POS_NET_NORM1,
1917
+ LLM_TENSOR_POS_NET_NORM2,
1918
+ LLM_TENSOR_POS_NET_ATTN_NORM,
1919
+ LLM_TENSOR_POS_NET_ATTN_Q,
1920
+ LLM_TENSOR_POS_NET_ATTN_K,
1921
+ LLM_TENSOR_POS_NET_ATTN_V,
1922
+ LLM_TENSOR_POS_NET_ATTN_OUT,
1923
+ };
1924
+ case LLM_ARCH_BAILINGMOE:
1925
+ return {
1926
+ LLM_TENSOR_TOKEN_EMBD,
1927
+ LLM_TENSOR_OUTPUT_NORM,
1928
+ LLM_TENSOR_OUTPUT,
1929
+ LLM_TENSOR_ROPE_FREQS,
1930
+ LLM_TENSOR_ATTN_NORM,
1931
+ LLM_TENSOR_ATTN_Q,
1932
+ LLM_TENSOR_ATTN_K,
1933
+ LLM_TENSOR_ATTN_V,
1934
+ LLM_TENSOR_ATTN_OUT,
1935
+ LLM_TENSOR_FFN_GATE_INP,
1936
+ LLM_TENSOR_FFN_NORM,
1937
+ LLM_TENSOR_FFN_GATE_EXPS,
1938
+ LLM_TENSOR_FFN_DOWN_EXPS,
1939
+ LLM_TENSOR_FFN_UP_EXPS,
1940
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1941
+ LLM_TENSOR_FFN_GATE_SHEXP,
1942
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1943
+ LLM_TENSOR_FFN_UP_SHEXP,
1944
+ };
1945
+ case LLM_ARCH_BAILINGMOE2:
1946
+ return {
1947
+ LLM_TENSOR_TOKEN_EMBD,
1948
+ LLM_TENSOR_OUTPUT_NORM,
1949
+ LLM_TENSOR_OUTPUT,
1950
+ LLM_TENSOR_ATTN_NORM,
1951
+ LLM_TENSOR_ATTN_Q_NORM,
1952
+ LLM_TENSOR_ATTN_K_NORM,
1953
+ LLM_TENSOR_ATTN_QKV,
1954
+ LLM_TENSOR_ATTN_OUT,
1955
+ LLM_TENSOR_FFN_GATE_INP,
1956
+ LLM_TENSOR_FFN_EXP_PROBS_B,
1957
+ LLM_TENSOR_FFN_NORM,
1958
+ LLM_TENSOR_FFN_GATE,
1959
+ LLM_TENSOR_FFN_DOWN,
1960
+ LLM_TENSOR_FFN_UP,
1961
+ LLM_TENSOR_FFN_GATE_EXPS,
1962
+ LLM_TENSOR_FFN_DOWN_EXPS,
1963
+ LLM_TENSOR_FFN_UP_EXPS,
1964
+ LLM_TENSOR_FFN_GATE_SHEXP,
1965
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1966
+ LLM_TENSOR_FFN_UP_SHEXP,
1967
+ LLM_TENSOR_NEXTN_EH_PROJ,
1968
+ LLM_TENSOR_NEXTN_EMBED_TOKENS,
1969
+ LLM_TENSOR_NEXTN_ENORM,
1970
+ LLM_TENSOR_NEXTN_HNORM,
1971
+ LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
1972
+ LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
1973
+ LLM_TENSOR_LAYER_OUT_NORM,
1974
+ };
1975
+ case LLM_ARCH_DOTS1:
1976
+ return {
1977
+ LLM_TENSOR_TOKEN_EMBD,
1978
+ LLM_TENSOR_OUTPUT_NORM,
1979
+ LLM_TENSOR_OUTPUT,
1980
+ LLM_TENSOR_ATTN_NORM,
1981
+ LLM_TENSOR_ATTN_Q,
1982
+ LLM_TENSOR_ATTN_Q_NORM,
1983
+ LLM_TENSOR_ATTN_K,
1984
+ LLM_TENSOR_ATTN_K_NORM,
1985
+ LLM_TENSOR_ATTN_V,
1986
+ LLM_TENSOR_ATTN_OUT,
1987
+ LLM_TENSOR_FFN_NORM,
1988
+ LLM_TENSOR_FFN_GATE,
1989
+ LLM_TENSOR_FFN_UP,
1990
+ LLM_TENSOR_FFN_DOWN,
1991
+ LLM_TENSOR_FFN_GATE_INP,
1992
+ LLM_TENSOR_FFN_GATE_EXPS,
1993
+ LLM_TENSOR_FFN_DOWN_EXPS,
1994
+ LLM_TENSOR_FFN_UP_EXPS,
1995
+ LLM_TENSOR_FFN_GATE_INP_SHEXP,
1996
+ LLM_TENSOR_FFN_GATE_SHEXP,
1997
+ LLM_TENSOR_FFN_DOWN_SHEXP,
1998
+ LLM_TENSOR_FFN_UP_SHEXP,
1999
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2000
+ };
2001
+ case LLM_ARCH_ERNIE4_5_MOE:
2002
+ return {
2003
+ LLM_TENSOR_TOKEN_EMBD,
2004
+ LLM_TENSOR_OUTPUT_NORM,
2005
+ LLM_TENSOR_OUTPUT,
2006
+ LLM_TENSOR_ATTN_NORM,
2007
+ LLM_TENSOR_ATTN_Q,
2008
+ LLM_TENSOR_ATTN_K,
2009
+ LLM_TENSOR_ATTN_V,
2010
+ LLM_TENSOR_ATTN_OUT,
2011
+ LLM_TENSOR_FFN_NORM,
2012
+ LLM_TENSOR_FFN_GATE,
2013
+ LLM_TENSOR_FFN_DOWN,
2014
+ LLM_TENSOR_FFN_UP,
2015
+ LLM_TENSOR_FFN_GATE_INP,
2016
+ LLM_TENSOR_FFN_GATE_SHEXP,
2017
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2018
+ LLM_TENSOR_FFN_UP_SHEXP,
2019
+ LLM_TENSOR_FFN_GATE_EXPS,
2020
+ LLM_TENSOR_FFN_DOWN_EXPS,
2021
+ LLM_TENSOR_FFN_UP_EXPS,
2022
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2023
+ };
2024
+ case LLM_ARCH_HUNYUAN_MOE:
2025
+ return {
2026
+ LLM_TENSOR_TOKEN_EMBD,
2027
+ LLM_TENSOR_OUTPUT_NORM,
2028
+ LLM_TENSOR_OUTPUT,
2029
+ LLM_TENSOR_ATTN_NORM,
2030
+ LLM_TENSOR_ATTN_Q,
2031
+ LLM_TENSOR_ATTN_Q_NORM,
2032
+ LLM_TENSOR_ATTN_K,
2033
+ LLM_TENSOR_ATTN_K_NORM,
2034
+ LLM_TENSOR_ATTN_V,
2035
+ LLM_TENSOR_ATTN_OUT,
2036
+ LLM_TENSOR_FFN_GATE_INP,
2037
+ LLM_TENSOR_FFN_NORM,
2038
+ LLM_TENSOR_FFN_GATE_SHEXP,
2039
+ LLM_TENSOR_FFN_DOWN_SHEXP,
2040
+ LLM_TENSOR_FFN_UP_SHEXP,
2041
+ LLM_TENSOR_FFN_GATE_EXPS,
2042
+ LLM_TENSOR_FFN_DOWN_EXPS,
2043
+ LLM_TENSOR_FFN_UP_EXPS,
2044
+ };
2045
+ case LLM_ARCH_OPENAI_MOE:
2046
+ return {
2047
+ LLM_TENSOR_TOKEN_EMBD,
2048
+ LLM_TENSOR_OUTPUT_NORM,
2049
+ LLM_TENSOR_OUTPUT,
2050
+ LLM_TENSOR_ATTN_NORM,
2051
+ LLM_TENSOR_ATTN_POST_NORM,
2052
+ LLM_TENSOR_ATTN_Q,
2053
+ LLM_TENSOR_ATTN_K,
2054
+ LLM_TENSOR_ATTN_V,
2055
+ LLM_TENSOR_ATTN_OUT,
2056
+ LLM_TENSOR_ATTN_SINKS,
2057
+ LLM_TENSOR_FFN_GATE_INP,
2058
+ LLM_TENSOR_FFN_GATE_EXPS,
2059
+ LLM_TENSOR_FFN_DOWN_EXPS,
2060
+ LLM_TENSOR_FFN_UP_EXPS,
2061
+ };
2062
+ case LLM_ARCH_LFM2:
2063
+ return {
2064
+ LLM_TENSOR_ATTN_NORM,
2065
+ LLM_TENSOR_ATTN_Q,
2066
+ LLM_TENSOR_ATTN_K,
2067
+ LLM_TENSOR_ATTN_V,
2068
+ LLM_TENSOR_ATTN_OUT,
2069
+ LLM_TENSOR_ATTN_K_NORM,
2070
+ LLM_TENSOR_ATTN_Q_NORM,
2071
+ LLM_TENSOR_FFN_DOWN,
2072
+ LLM_TENSOR_FFN_GATE,
2073
+ LLM_TENSOR_FFN_NORM,
2074
+ LLM_TENSOR_FFN_UP,
2075
+ LLM_TENSOR_SHORTCONV_CONV,
2076
+ LLM_TENSOR_SHORTCONV_INPROJ,
2077
+ LLM_TENSOR_SHORTCONV_OUTPROJ,
2078
+ LLM_TENSOR_TOKEN_EMBD,
2079
+ LLM_TENSOR_OUTPUT_NORM_LFM2,
2080
+ LLM_TENSOR_OUTPUT,
2081
+ LLM_TENSOR_DENSE_2_OUT,
2082
+ };
2083
+ case LLM_ARCH_LFM2MOE:
2084
+ return {
2085
+ LLM_TENSOR_ATTN_NORM,
2086
+ LLM_TENSOR_ATTN_Q,
2087
+ LLM_TENSOR_ATTN_K,
2088
+ LLM_TENSOR_ATTN_V,
2089
+ LLM_TENSOR_ATTN_OUT,
2090
+ LLM_TENSOR_ATTN_K_NORM,
2091
+ LLM_TENSOR_ATTN_Q_NORM,
2092
+ LLM_TENSOR_FFN_DOWN,
2093
+ LLM_TENSOR_FFN_GATE,
2094
+ LLM_TENSOR_FFN_NORM,
2095
+ LLM_TENSOR_FFN_UP,
2096
+ LLM_TENSOR_SHORTCONV_CONV,
2097
+ LLM_TENSOR_SHORTCONV_INPROJ,
2098
+ LLM_TENSOR_SHORTCONV_OUTPROJ,
2099
+ LLM_TENSOR_TOKEN_EMBD,
2100
+ LLM_TENSOR_OUTPUT_NORM_LFM2,
2101
+ LLM_TENSOR_FFN_GATE_INP,
2102
+ LLM_TENSOR_FFN_GATE_EXPS,
2103
+ LLM_TENSOR_FFN_DOWN_EXPS,
2104
+ LLM_TENSOR_FFN_UP_EXPS,
2105
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2106
+ };
2107
+ case LLM_ARCH_SMALLTHINKER:
2108
+ return {
2109
+ LLM_TENSOR_TOKEN_EMBD,
2110
+ LLM_TENSOR_OUTPUT_NORM,
2111
+ LLM_TENSOR_OUTPUT,
2112
+ LLM_TENSOR_ATTN_NORM,
2113
+ LLM_TENSOR_ATTN_Q,
2114
+ LLM_TENSOR_ATTN_K,
2115
+ LLM_TENSOR_ATTN_V,
2116
+ LLM_TENSOR_ATTN_OUT,
2117
+ LLM_TENSOR_FFN_NORM,
2118
+ LLM_TENSOR_FFN_GATE,
2119
+ LLM_TENSOR_FFN_DOWN,
2120
+ LLM_TENSOR_FFN_UP,
2121
+ LLM_TENSOR_FFN_GATE_INP,
2122
+ LLM_TENSOR_FFN_GATE_EXPS,
2123
+ LLM_TENSOR_FFN_DOWN_EXPS,
2124
+ LLM_TENSOR_FFN_UP_EXPS,
2125
+ };
2126
+ case LLM_ARCH_APERTUS:
2127
+ return {
2128
+ LLM_TENSOR_TOKEN_EMBD,
2129
+ LLM_TENSOR_OUTPUT_NORM,
2130
+ LLM_TENSOR_OUTPUT,
2131
+ LLM_TENSOR_ROPE_FREQS,
2132
+ LLM_TENSOR_ATTN_NORM,
2133
+ LLM_TENSOR_ATTN_Q,
2134
+ LLM_TENSOR_ATTN_K,
2135
+ LLM_TENSOR_ATTN_V,
2136
+ LLM_TENSOR_ATTN_OUT,
2137
+ LLM_TENSOR_ATTN_Q_NORM,
2138
+ LLM_TENSOR_ATTN_K_NORM,
2139
+ LLM_TENSOR_FFN_NORM,
2140
+ LLM_TENSOR_FFN_DOWN,
2141
+ LLM_TENSOR_FFN_UP,
2142
+ };
2143
+ case LLM_ARCH_SEED_OSS:
2144
+ return {
2145
+ LLM_TENSOR_TOKEN_EMBD,
2146
+ LLM_TENSOR_OUTPUT_NORM,
2147
+ LLM_TENSOR_OUTPUT,
2148
+ LLM_TENSOR_ATTN_NORM,
2149
+ LLM_TENSOR_ATTN_Q,
2150
+ LLM_TENSOR_ATTN_K,
2151
+ LLM_TENSOR_ATTN_V,
2152
+ LLM_TENSOR_ATTN_OUT,
2153
+ LLM_TENSOR_ATTN_POST_NORM,
2154
+ LLM_TENSOR_FFN_GATE,
2155
+ LLM_TENSOR_FFN_DOWN,
2156
+ LLM_TENSOR_FFN_UP,
2157
+ };
2158
+ case LLM_ARCH_GROVEMOE:
2159
+ return {
2160
+ LLM_TENSOR_TOKEN_EMBD,
2161
+ LLM_TENSOR_OUTPUT_NORM,
2162
+ LLM_TENSOR_OUTPUT,
2163
+ LLM_TENSOR_ATTN_NORM,
2164
+ LLM_TENSOR_ATTN_Q,
2165
+ LLM_TENSOR_ATTN_Q_NORM,
2166
+ LLM_TENSOR_ATTN_K,
2167
+ LLM_TENSOR_ATTN_K_NORM,
2168
+ LLM_TENSOR_ATTN_V,
2169
+ LLM_TENSOR_ATTN_OUT,
2170
+ LLM_TENSOR_FFN_NORM,
2171
+ LLM_TENSOR_FFN_GATE_INP,
2172
+ LLM_TENSOR_FFN_GATE_EXPS,
2173
+ LLM_TENSOR_FFN_DOWN_EXPS,
2174
+ LLM_TENSOR_FFN_UP_EXPS,
2175
+ LLM_TENSOR_FFN_GATE_CHEXPS,
2176
+ LLM_TENSOR_FFN_DOWN_CHEXPS,
2177
+ LLM_TENSOR_FFN_UP_CHEXPS,
2178
+ };
2179
+ case LLM_ARCH_MINIMAX_M2:
2180
+ return {
2181
+ LLM_TENSOR_TOKEN_EMBD,
2182
+ LLM_TENSOR_OUTPUT_NORM,
2183
+ LLM_TENSOR_OUTPUT,
2184
+ LLM_TENSOR_ATTN_NORM,
2185
+ LLM_TENSOR_ATTN_Q,
2186
+ LLM_TENSOR_ATTN_K,
2187
+ LLM_TENSOR_ATTN_V,
2188
+ LLM_TENSOR_ATTN_OUT,
2189
+ LLM_TENSOR_ATTN_Q_NORM,
2190
+ LLM_TENSOR_ATTN_K_NORM,
2191
+ LLM_TENSOR_FFN_NORM,
2192
+ LLM_TENSOR_FFN_GATE_INP,
2193
+ LLM_TENSOR_FFN_GATE_EXPS,
2194
+ LLM_TENSOR_FFN_DOWN_EXPS,
2195
+ LLM_TENSOR_FFN_UP_EXPS,
2196
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2197
+ };
2198
+ case LLM_ARCH_COGVLM:
2199
+ return {
2200
+ LLM_TENSOR_TOKEN_EMBD,
2201
+ LLM_TENSOR_OUTPUT_NORM,
2202
+ LLM_TENSOR_OUTPUT,
2203
+ LLM_TENSOR_ATTN_NORM,
2204
+ LLM_TENSOR_ATTN_QKV,
2205
+ LLM_TENSOR_ATTN_OUT,
2206
+ LLM_TENSOR_FFN_NORM,
2207
+ LLM_TENSOR_FFN_GATE,
2208
+ LLM_TENSOR_FFN_DOWN,
2209
+ LLM_TENSOR_FFN_UP,
2210
+ LLM_TENSOR_VISEXP_ATTN_QKV,
2211
+ LLM_TENSOR_VISEXP_ATTN_OUT,
2212
+ LLM_TENSOR_VISEXP_FFN_GATE,
2213
+ LLM_TENSOR_VISEXP_FFN_DOWN,
2214
+ LLM_TENSOR_VISEXP_FFN_UP,
2215
+ };
2216
+ case LLM_ARCH_MIMO2:
2217
+ return {
2218
+ LLM_TENSOR_TOKEN_EMBD,
2219
+ LLM_TENSOR_OUTPUT_NORM,
2220
+ LLM_TENSOR_OUTPUT,
2221
+ LLM_TENSOR_ATTN_NORM,
2222
+ LLM_TENSOR_ATTN_Q,
2223
+ LLM_TENSOR_ATTN_K,
2224
+ LLM_TENSOR_ATTN_V,
2225
+ LLM_TENSOR_ATTN_SINKS,
2226
+ LLM_TENSOR_ATTN_OUT,
2227
+ LLM_TENSOR_FFN_NORM,
2228
+ LLM_TENSOR_FFN_GATE,
2229
+ LLM_TENSOR_FFN_DOWN,
2230
+ LLM_TENSOR_FFN_UP,
2231
+ LLM_TENSOR_FFN_GATE_INP,
2232
+ LLM_TENSOR_FFN_GATE_EXPS,
2233
+ LLM_TENSOR_FFN_DOWN_EXPS,
2234
+ LLM_TENSOR_FFN_UP_EXPS,
2235
+ LLM_TENSOR_FFN_EXP_PROBS_B,
2236
+ };
2237
+ case LLM_ARCH_GPTJ:
2238
+ case LLM_ARCH_UNKNOWN:
2239
+ return {
2240
+ LLM_TENSOR_TOKEN_EMBD,
2241
+ };
2242
+ case LLM_ARCH_MAINCODER:
2243
+ return {
2244
+ LLM_TENSOR_TOKEN_EMBD,
2245
+ LLM_TENSOR_OUTPUT_NORM,
2246
+ LLM_TENSOR_OUTPUT,
2247
+ LLM_TENSOR_ATTN_NORM,
2248
+ LLM_TENSOR_ATTN_Q,
2249
+ LLM_TENSOR_ATTN_Q_NORM,
2250
+ LLM_TENSOR_ATTN_K,
2251
+ LLM_TENSOR_ATTN_K_NORM,
2252
+ LLM_TENSOR_ATTN_V,
2253
+ LLM_TENSOR_ATTN_OUT,
2254
+ LLM_TENSOR_FFN_NORM,
2255
+ LLM_TENSOR_FFN_GATE,
2256
+ LLM_TENSOR_FFN_DOWN,
2257
+ LLM_TENSOR_FFN_UP,
2258
+ };
2259
+ default:
2260
+ GGML_ABORT("unknown architecture for tensor mapping");
2261
+ }
2262
+ }
2263
+
2264
+ // declare information about the model weight tensors:
2265
+ // - the layer in which the tensor is going to be used. this is needed in order to assign the correct buffer type for the weight
2266
+ // - the operator which is going to use the weight. this is needed to determine if the respective backend supports the operator
2267
+ //
2268
+ // for example, input layers are usually assigned to CPU/host buffer types
2269
+ //
2270
+ // a mismatch between the declared information and the actual layer/op in which the tensor is used can lead to sub-optimal
2271
+ // assignment of the buffer types and extra overhead during computation
2272
+ // example: https://github.com/ggml-org/llama.cpp/pull/17548
2273
+ //
1687
2274
  static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
1688
2275
  {LLM_TENSOR_TOKEN_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
1689
2276
  {LLM_TENSOR_POS_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
1690
- {LLM_TENSOR_TOKEN_EMBD_NORM, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
1691
2277
  {LLM_TENSOR_TOKEN_TYPES, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
2278
+ {LLM_TENSOR_TOKEN_EMBD_NORM, {LLM_TENSOR_LAYER_INPUT, GGML_OP_MUL}},
1692
2279
  {LLM_TENSOR_OUTPUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
1693
2280
  {LLM_TENSOR_CLS, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
1694
2281
  {LLM_TENSOR_CLS_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2282
+ {LLM_TENSOR_DENSE_2_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
2283
+ {LLM_TENSOR_DENSE_3_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
1695
2284
  {LLM_TENSOR_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2285
+ {LLM_TENSOR_OUTPUT_NORM_LFM2, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
1696
2286
  {LLM_TENSOR_DEC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
1697
2287
  {LLM_TENSOR_ENC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
1698
2288
  {LLM_TENSOR_ROPE_FREQS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ROPE}},
@@ -1703,6 +2293,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
1703
2293
  {LLM_TENSOR_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1704
2294
  {LLM_TENSOR_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1705
2295
  {LLM_TENSOR_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2296
+ {LLM_TENSOR_ATTN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1706
2297
  {LLM_TENSOR_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1707
2298
  {LLM_TENSOR_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1708
2299
  {LLM_TENSOR_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
@@ -1715,6 +2306,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
1715
2306
  {LLM_TENSOR_ATTN_KV_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1716
2307
  {LLM_TENSOR_ATTN_K_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1717
2308
  {LLM_TENSOR_ATTN_V_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2309
+ {LLM_TENSOR_ATTN_SINKS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SCALE}},
1718
2310
  {LLM_TENSOR_DEC_ATTN_Q, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1719
2311
  {LLM_TENSOR_DEC_ATTN_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1720
2312
  {LLM_TENSOR_DEC_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
@@ -1739,6 +2331,7 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
1739
2331
  {LLM_TENSOR_SSM_X, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1740
2332
  {LLM_TENSOR_SSM_DT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1741
2333
  {LLM_TENSOR_SSM_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2334
+ {LLM_TENSOR_SSM_BETA_ALPHA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1742
2335
  {LLM_TENSOR_TIME_MIX_W1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1743
2336
  {LLM_TENSOR_TIME_MIX_W2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1744
2337
  {LLM_TENSOR_TIME_MIX_A1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
@@ -1760,7 +2353,12 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
1760
2353
  {LLM_TENSOR_FFN_ACT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_DIV}},
1761
2354
  {LLM_TENSOR_SSM_CONV1D, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
1762
2355
  {LLM_TENSOR_SSM_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_SCAN}},
2356
+ {LLM_TENSOR_SSM_A_NOSCAN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}}, // a version of SSM_A used for MUL instead of SSM_SCAN
2357
+ {LLM_TENSOR_SSM_DT_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2358
+ {LLM_TENSOR_SSM_B_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2359
+ {LLM_TENSOR_SSM_C_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
1763
2360
  {LLM_TENSOR_SSM_D, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2361
+ {LLM_TENSOR_SSM_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
1764
2362
  {LLM_TENSOR_TIME_MIX_LERP_X, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
1765
2363
  {LLM_TENSOR_TIME_MIX_LN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
1766
2364
  {LLM_TENSOR_CHANNEL_MIX_LERP_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
@@ -1803,6 +2401,9 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
1803
2401
  {LLM_TENSOR_FFN_DOWN_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
1804
2402
  {LLM_TENSOR_FFN_GATE_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
1805
2403
  {LLM_TENSOR_FFN_UP_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2404
+ {LLM_TENSOR_FFN_DOWN_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2405
+ {LLM_TENSOR_FFN_GATE_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
2406
+ {LLM_TENSOR_FFN_UP_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
1806
2407
  {LLM_TENSOR_FFN_EXP_PROBS_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
1807
2408
  // altup / laurel (gemma 3n)
1808
2409
  {LLM_TENSOR_PER_LAYER_TOKEN_EMBD, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_GET_ROWS}},
@@ -1839,6 +2440,22 @@ static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
1839
2440
  {LLM_TENSOR_CONVNEXT_PW1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1840
2441
  {LLM_TENSOR_CONVNEXT_PW2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
1841
2442
  {LLM_TENSOR_CONVNEXT_GAMMA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
2443
+ {LLM_TENSOR_SHORTCONV_CONV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
2444
+ {LLM_TENSOR_SHORTCONV_INPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2445
+ {LLM_TENSOR_SHORTCONV_OUTPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2446
+ {LLM_TENSOR_VISEXP_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2447
+ {LLM_TENSOR_VISEXP_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2448
+ {LLM_TENSOR_VISEXP_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2449
+ {LLM_TENSOR_VISEXP_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2450
+ {LLM_TENSOR_VISEXP_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
2451
+ // NextN/MTP tensors are currently ignored (reserved for future MTP support)
2452
+ // These tensors only exist in the last layer(s) and are treated as output tensors
2453
+ {LLM_TENSOR_NEXTN_EH_PROJ, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2454
+ {LLM_TENSOR_NEXTN_EMBED_TOKENS, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_GET_ROWS}},
2455
+ {LLM_TENSOR_NEXTN_ENORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_GET_ROWS}},
2456
+ {LLM_TENSOR_NEXTN_HNORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
2457
+ {LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
2458
+ {LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
1842
2459
  };
1843
2460
 
1844
2461
  LLM_KV::LLM_KV(llm_arch arch, const char * suffix) : arch(arch), suffix(suffix) {}
@@ -1854,13 +2471,20 @@ std::string LLM_KV::operator()(llm_kv kv) const {
1854
2471
  return name;
1855
2472
  }
1856
2473
 
2474
+ LLM_TN_IMPL::LLM_TN_IMPL(llm_arch arch, llm_tensor tensor, const char * suffix, int bid, int xid)
2475
+ : arch(arch), tensor(tensor), suffix(suffix), bid(bid), xid(xid),
2476
+ model_tensors(llm_get_tensor_names(arch)) {}
2477
+
1857
2478
  std::string LLM_TN_IMPL::str() const {
1858
- if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) {
1859
- return "__missing__";
2479
+ if (LLM_TENSOR_NAMES.find(tensor) == LLM_TENSOR_NAMES.end()) {
2480
+ GGML_ABORT("unknown tensor name for tensor id %d", static_cast<int>(tensor));
1860
2481
  }
1861
2482
 
1862
- std::string name = ::format(LLM_TENSOR_NAMES.at(arch).at(tensor), bid, xid);
2483
+ if (model_tensors.find(tensor) == model_tensors.end()) {
2484
+ return LLM_TENSOR_NAMES.at(tensor);
2485
+ }
1863
2486
 
2487
+ std::string name = ::format(LLM_TENSOR_NAMES.at(tensor), bid, xid);
1864
2488
  if (suffix != nullptr) {
1865
2489
  name += ".";
1866
2490
  name += suffix;
@@ -1894,6 +2518,7 @@ const llm_tensor_info & llm_tensor_info_for(llm_tensor tensor) {
1894
2518
  bool llm_arch_is_recurrent(const llm_arch & arch) {
1895
2519
  switch (arch) {
1896
2520
  case LLM_ARCH_MAMBA:
2521
+ case LLM_ARCH_MAMBA2:
1897
2522
  case LLM_ARCH_RWKV6:
1898
2523
  case LLM_ARCH_RWKV6QWEN2:
1899
2524
  case LLM_ARCH_RWKV7:
@@ -1905,9 +2530,29 @@ bool llm_arch_is_recurrent(const llm_arch & arch) {
1905
2530
  }
1906
2531
 
1907
2532
  bool llm_arch_is_hybrid(const llm_arch & arch) {
1908
- // TODO: There are currently no hybrid models! Once there are, this will be
1909
- // the place to identify them
1910
2533
  switch (arch) {
2534
+ case LLM_ARCH_JAMBA:
2535
+ case LLM_ARCH_FALCON_H1:
2536
+ case LLM_ARCH_PLAMO2:
2537
+ case LLM_ARCH_GRANITE_HYBRID:
2538
+ case LLM_ARCH_LFM2:
2539
+ case LLM_ARCH_LFM2MOE:
2540
+ case LLM_ARCH_NEMOTRON_H:
2541
+ case LLM_ARCH_NEMOTRON_H_MOE:
2542
+ case LLM_ARCH_QWEN3NEXT:
2543
+ return true;
2544
+ default:
2545
+ return false;
2546
+ }
2547
+ }
2548
+
2549
+ bool llm_arch_is_diffusion(const llm_arch & arch) {
2550
+ switch (arch) {
2551
+ case LLM_ARCH_DREAM:
2552
+ case LLM_ARCH_LLADA:
2553
+ case LLM_ARCH_LLADA_MOE:
2554
+ case LLM_ARCH_RND1:
2555
+ return true;
1911
2556
  default:
1912
2557
  return false;
1913
2558
  }