@simulatte/doppler 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (355) hide show
  1. package/CHANGELOG.md +145 -0
  2. package/README.md +16 -23
  3. package/package.json +30 -32
  4. package/src/adapters/adapter-registry.js +12 -1
  5. package/src/adapters/lora-loader.js +23 -6
  6. package/src/bridge/extension-client.d.ts +5 -0
  7. package/src/bridge/extension-client.js +40 -0
  8. package/src/bridge/index.d.ts +2 -1
  9. package/src/bridge/index.js +6 -4
  10. package/src/browser/browser-converter.js +31 -1
  11. package/src/browser/file-picker.js +6 -0
  12. package/src/browser/safetensors-parser-browser.js +84 -1
  13. package/src/browser/shard-io-browser.js +2 -2
  14. package/src/browser/tensor-source-download.js +8 -2
  15. package/src/browser/tensor-source-http.d.ts +1 -0
  16. package/src/browser/tensor-source-http.js +5 -1
  17. package/src/client/doppler-api.browser.js +20 -4
  18. package/src/client/doppler-api.js +19 -3
  19. package/src/client/doppler-provider/generation.js +12 -0
  20. package/src/client/doppler-provider/model-manager.d.ts +10 -0
  21. package/src/client/doppler-provider/model-manager.js +91 -19
  22. package/src/client/doppler-provider/source-runtime.d.ts +2 -1
  23. package/src/client/doppler-provider/source-runtime.js +132 -13
  24. package/src/client/doppler-registry.json +5 -20
  25. package/src/config/backward-registry-loader.js +17 -2
  26. package/src/config/execution-v0-contract-check.js +113 -15
  27. package/src/config/kernel-path-contract-check.js +57 -29
  28. package/src/config/kernel-path-loader.d.ts +5 -0
  29. package/src/config/kernel-path-loader.js +18 -36
  30. package/src/config/kernels/kernel-ref-digests.js +1 -1
  31. package/src/config/kernels/registry.js +14 -1
  32. package/src/config/kernels/registry.json +81 -5
  33. package/src/config/loader.d.ts +1 -1
  34. package/src/config/loader.js +15 -2
  35. package/src/config/merge-contract-check.js +66 -4
  36. package/src/config/merge-helpers.js +128 -7
  37. package/src/config/merge.d.ts +1 -0
  38. package/src/config/merge.js +10 -0
  39. package/src/config/param-validator.js +47 -2
  40. package/src/config/presets/kernel-paths/{gemma2-q4k-dequant-f32a.json → gemma2-q4k-dequant-f32a-nosubgroups.json} +3 -3
  41. package/src/config/presets/kernel-paths/gemma3-f16-fused-f32a-online-streamingprefill.json +223 -0
  42. package/src/config/presets/kernel-paths/{gemma3-q4k-dequant-f32a.json → gemma3-q4k-dequant-f32a-nosubgroups.json} +3 -3
  43. package/src/config/presets/kernel-paths/gemma3-q4k-dequant-f32w-f32a-online.json +56 -0
  44. package/src/config/presets/kernel-paths/lfm2-q4k-dequant-f32a-nosubgroups.json +61 -0
  45. package/src/config/presets/kernel-paths/registry.json +43 -8
  46. package/src/config/presets/models/gemma2.json +3 -2
  47. package/src/config/presets/models/gemma3.json +2 -0
  48. package/src/config/presets/models/qwen3.json +4 -3
  49. package/src/config/presets/models/qwen3_5.json +16 -0
  50. package/src/config/presets/runtime/experiments/bench/gemma3-bench-q4k.json +1 -1
  51. package/src/config/presets/runtime/experiments/debug/gemma3-debug-q4k.json +1 -1
  52. package/src/config/presets/runtime/experiments/verify/gemma3-verify.json +1 -1
  53. package/src/config/presets/runtime/kernels/dequant-f16-q4k.json +6 -13
  54. package/src/config/presets/runtime/kernels/dequant-f32-q4k.json +6 -13
  55. package/src/config/presets/runtime/kernels/embeddinggemma-q4k-dequant-f32a.json +37 -0
  56. package/src/config/presets/runtime/kernels/fused-q4k.json +6 -13
  57. package/src/config/presets/runtime/kernels/gemma2-q4k-dequant-f16a.json +33 -0
  58. package/src/config/presets/runtime/kernels/gemma2-q4k-dequant-f32a-nosubgroups.json +33 -0
  59. package/src/config/presets/runtime/kernels/gemma2-q4k-fused-f32a.json +33 -0
  60. package/src/config/presets/runtime/kernels/safe-q4k.json +6 -13
  61. package/src/config/presets/runtime/model/qwen3-5-layer-probe.json +52 -0
  62. package/src/config/presets/runtime/model/qwen3-5-linear-attn-debug.json +90 -0
  63. package/src/config/presets/runtime/platform/metal-apple-q4k.json +1 -1
  64. package/src/config/runtime.js +6 -1
  65. package/src/config/schema/conversion.schema.d.ts +1 -0
  66. package/src/config/schema/debug.schema.d.ts +5 -0
  67. package/src/config/schema/doppler.schema.js +16 -21
  68. package/src/config/schema/inference-defaults.schema.js +3 -3
  69. package/src/config/schema/kernel-path.schema.d.ts +5 -1
  70. package/src/config/schema/kernel-thresholds.schema.js +12 -4
  71. package/src/config/schema/manifest.schema.d.ts +3 -2
  72. package/src/config/schema/manifest.schema.js +17 -4
  73. package/src/config/schema/storage.schema.js +1 -1
  74. package/src/config/training-defaults.js +30 -22
  75. package/src/converter/conversion-plan.js +104 -11
  76. package/src/converter/core.d.ts +7 -0
  77. package/src/converter/core.js +16 -9
  78. package/src/converter/execution-v0-manifest.js +4 -1
  79. package/src/converter/index.d.ts +1 -0
  80. package/src/converter/index.js +1 -0
  81. package/src/converter/manifest-inference.js +50 -29
  82. package/src/converter/parsers/diffusion.js +0 -3
  83. package/src/converter/parsers/transformer.js +4 -0
  84. package/src/converter/quantization-info.js +40 -16
  85. package/src/converter/quantizer.js +19 -12
  86. package/src/converter/rope-config.js +8 -6
  87. package/src/converter/shard-packer.d.ts +1 -1
  88. package/src/converter/shard-packer.js +4 -1
  89. package/src/converter/tokenizer-utils.d.ts +1 -0
  90. package/src/converter/tokenizer-utils.js +4 -1
  91. package/src/debug/config.js +123 -11
  92. package/src/debug/reference/hf_qwen35_linear_attn_debug.py +268 -0
  93. package/src/debug/signals.js +7 -1
  94. package/src/debug/tensor.d.ts +2 -0
  95. package/src/debug/tensor.js +13 -2
  96. package/src/distribution/p2p-control-plane.js +52 -12
  97. package/src/distribution/p2p-observability.js +43 -7
  98. package/src/distribution/p2p-webrtc-browser.js +20 -0
  99. package/src/distribution/shard-delivery.js +83 -27
  100. package/src/formats/gguf/types.js +33 -16
  101. package/src/formats/rdrr/groups.d.ts +12 -4
  102. package/src/formats/rdrr/groups.js +3 -6
  103. package/src/formats/rdrr/parsing.d.ts +4 -0
  104. package/src/formats/rdrr/parsing.js +53 -3
  105. package/src/formats/rdrr/types.d.ts +2 -1
  106. package/src/gpu/command-recorder.js +86 -61
  107. package/src/gpu/device.d.ts +1 -0
  108. package/src/gpu/device.js +73 -19
  109. package/src/gpu/kernel-tuner/benchmarks.js +326 -316
  110. package/src/gpu/kernel-tuner/cache.js +71 -4
  111. package/src/gpu/kernel-tuner/tuner.js +22 -4
  112. package/src/gpu/kernels/attention.js +15 -34
  113. package/src/gpu/kernels/backward/adam.js +62 -58
  114. package/src/gpu/kernels/backward/attention_backward.js +257 -169
  115. package/src/gpu/kernels/backward/conv2d_backward.js +14 -1
  116. package/src/gpu/kernels/cast.js +191 -149
  117. package/src/gpu/kernels/check-stop.js +33 -44
  118. package/src/gpu/kernels/conv2d.js +27 -17
  119. package/src/gpu/kernels/cross_entropy_loss.js +21 -15
  120. package/src/gpu/kernels/depthwise_conv2d.js +36 -26
  121. package/src/gpu/kernels/dequant.js +178 -126
  122. package/src/gpu/kernels/energy.d.ts +3 -21
  123. package/src/gpu/kernels/energy.js +111 -88
  124. package/src/gpu/kernels/feature-check.js +1 -1
  125. package/src/gpu/kernels/fused_ffn.js +84 -65
  126. package/src/gpu/kernels/fused_matmul_residual.js +56 -33
  127. package/src/gpu/kernels/fused_matmul_rmsnorm.js +62 -45
  128. package/src/gpu/kernels/gather.js +33 -15
  129. package/src/gpu/kernels/gelu.js +19 -11
  130. package/src/gpu/kernels/grouped_pointwise_conv2d.js +33 -23
  131. package/src/gpu/kernels/groupnorm.js +34 -23
  132. package/src/gpu/kernels/index.d.ts +8 -0
  133. package/src/gpu/kernels/index.js +6 -0
  134. package/src/gpu/kernels/kv-quantize.js +5 -2
  135. package/src/gpu/kernels/layernorm.js +35 -19
  136. package/src/gpu/kernels/logit-merge.js +5 -3
  137. package/src/gpu/kernels/matmul-selection.js +47 -4
  138. package/src/gpu/kernels/matmul.d.ts +2 -0
  139. package/src/gpu/kernels/matmul.js +59 -40
  140. package/src/gpu/kernels/modulate.js +23 -15
  141. package/src/gpu/kernels/moe.js +221 -175
  142. package/src/gpu/kernels/pixel_shuffle.js +22 -14
  143. package/src/gpu/kernels/relu.js +18 -10
  144. package/src/gpu/kernels/repeat_channels.js +25 -17
  145. package/src/gpu/kernels/residual.js +37 -27
  146. package/src/gpu/kernels/rmsnorm.js +66 -43
  147. package/src/gpu/kernels/rope.js +3 -0
  148. package/src/gpu/kernels/sample.js +27 -38
  149. package/src/gpu/kernels/sana_linear_attention.js +18 -10
  150. package/src/gpu/kernels/scale.js +18 -11
  151. package/src/gpu/kernels/shader-cache.js +4 -2
  152. package/src/gpu/kernels/silu.js +120 -72
  153. package/src/gpu/kernels/softmax.js +44 -25
  154. package/src/gpu/kernels/split_qg.d.ts +50 -0
  155. package/src/gpu/kernels/split_qg.js +46 -0
  156. package/src/gpu/kernels/split_qg.wgsl +58 -0
  157. package/src/gpu/kernels/split_qg_f16.wgsl +62 -0
  158. package/src/gpu/kernels/split_qkv.js +23 -13
  159. package/src/gpu/kernels/transpose.js +18 -10
  160. package/src/gpu/kernels/transpose.wgsl +5 -3
  161. package/src/gpu/kernels/upsample2d.js +21 -13
  162. package/src/gpu/kernels/utils.js +20 -13
  163. package/src/gpu/partitioned-buffer-pool.js +10 -2
  164. package/src/gpu/perf-guards.js +2 -9
  165. package/src/gpu/profiler.js +27 -22
  166. package/src/gpu/readback-utils.d.ts +16 -0
  167. package/src/gpu/readback-utils.js +41 -0
  168. package/src/gpu/submit-tracker.js +13 -0
  169. package/src/gpu/uniform-cache.d.ts +1 -0
  170. package/src/gpu/uniform-cache.js +30 -9
  171. package/src/gpu/weight-buffer.d.ts +1 -1
  172. package/src/gpu/weight-buffer.js +1 -1
  173. package/src/hotswap/intent-bundle.js +6 -0
  174. package/src/hotswap/manifest.d.ts +10 -1
  175. package/src/hotswap/manifest.js +12 -2
  176. package/src/hotswap/runtime.js +30 -8
  177. package/src/index-browser.d.ts +44 -0
  178. package/src/index-browser.js +14 -0
  179. package/src/inference/browser-harness-contract-helpers.d.ts +5 -0
  180. package/src/inference/browser-harness-contract-helpers.js +28 -0
  181. package/src/inference/browser-harness-diffusion-energy-suites.d.ts +2 -0
  182. package/src/inference/browser-harness-diffusion-energy-suites.js +269 -0
  183. package/src/inference/browser-harness-model-helpers.d.ts +16 -0
  184. package/src/inference/browser-harness-model-helpers.js +217 -0
  185. package/src/inference/browser-harness-report-helpers.d.ts +7 -0
  186. package/src/inference/browser-harness-report-helpers.js +42 -0
  187. package/src/inference/browser-harness-runtime-helpers.d.ts +61 -0
  188. package/src/inference/browser-harness-runtime-helpers.js +415 -0
  189. package/src/inference/browser-harness-suite-helpers.d.ts +28 -0
  190. package/src/inference/browser-harness-suite-helpers.js +268 -0
  191. package/src/inference/browser-harness-text-helpers.d.ts +27 -0
  192. package/src/inference/browser-harness-text-helpers.js +788 -0
  193. package/src/inference/browser-harness.d.ts +8 -0
  194. package/src/inference/browser-harness.js +149 -1996
  195. package/src/inference/kv-cache/base.js +140 -94
  196. package/src/inference/kv-cache/tiered.js +5 -3
  197. package/src/inference/moe-router.js +88 -56
  198. package/src/inference/multi-model-network.js +5 -3
  199. package/src/inference/network-evolution.d.ts +11 -2
  200. package/src/inference/network-evolution.js +20 -21
  201. package/src/inference/pipelines/context.d.ts +3 -0
  202. package/src/inference/pipelines/context.js +142 -2
  203. package/src/inference/pipelines/diffusion/helpers.js +10 -2
  204. package/src/inference/pipelines/diffusion/pipeline.js +2 -1
  205. package/src/inference/pipelines/diffusion/sd3-transformer.js +10 -10
  206. package/src/inference/pipelines/diffusion/text-encoder-gpu.js +8 -2
  207. package/src/inference/pipelines/diffusion/vae.js +3 -7
  208. package/src/inference/pipelines/energy/pipeline.js +27 -21
  209. package/src/inference/pipelines/energy/quintel.d.ts +5 -0
  210. package/src/inference/pipelines/energy/quintel.js +11 -0
  211. package/src/inference/pipelines/energy-head/row-head-pipeline.js +17 -13
  212. package/src/inference/pipelines/structured/json-head-pipeline.js +26 -11
  213. package/src/inference/pipelines/text/attention/output-projection.d.ts +12 -0
  214. package/src/inference/pipelines/text/attention/output-projection.js +8 -0
  215. package/src/inference/pipelines/text/attention/projections.d.ts +10 -1
  216. package/src/inference/pipelines/text/attention/projections.js +192 -112
  217. package/src/inference/pipelines/text/attention/record.js +77 -14
  218. package/src/inference/pipelines/text/attention/run.js +112 -14
  219. package/src/inference/pipelines/text/config.js +17 -4
  220. package/src/inference/pipelines/text/embed.js +2 -8
  221. package/src/inference/pipelines/text/execution-plan.js +46 -23
  222. package/src/inference/pipelines/text/execution-v0-contract-helpers.d.ts +59 -0
  223. package/src/inference/pipelines/text/execution-v0-contract-helpers.js +937 -0
  224. package/src/inference/pipelines/text/execution-v0-runtime-builders.d.ts +15 -0
  225. package/src/inference/pipelines/text/execution-v0-runtime-builders.js +279 -0
  226. package/src/inference/pipelines/text/execution-v0.js +62 -1013
  227. package/src/inference/pipelines/text/generator-runtime.js +5 -0
  228. package/src/inference/pipelines/text/generator-steps.d.ts +52 -0
  229. package/src/inference/pipelines/text/generator-steps.js +340 -221
  230. package/src/inference/pipelines/text/generator.js +56 -40
  231. package/src/inference/pipelines/text/init.d.ts +13 -0
  232. package/src/inference/pipelines/text/init.js +94 -25
  233. package/src/inference/pipelines/text/kernel-path-auto-select.js +2 -0
  234. package/src/inference/pipelines/text/kernel-trace.d.ts +2 -0
  235. package/src/inference/pipelines/text/kernel-trace.js +6 -0
  236. package/src/inference/pipelines/text/layer.js +4 -9
  237. package/src/inference/pipelines/text/linear-attention.d.ts +15 -0
  238. package/src/inference/pipelines/text/linear-attention.js +113 -9
  239. package/src/inference/pipelines/text/logits/gpu.js +12 -7
  240. package/src/inference/pipelines/text/logits/index.d.ts +6 -1
  241. package/src/inference/pipelines/text/logits/index.js +13 -12
  242. package/src/inference/pipelines/text/logits/utils.d.ts +7 -0
  243. package/src/inference/pipelines/text/logits/utils.js +9 -0
  244. package/src/inference/pipelines/text/lora-apply.js +50 -32
  245. package/src/inference/pipelines/text/model-load.js +282 -104
  246. package/src/inference/pipelines/text/moe-cache.js +5 -4
  247. package/src/inference/pipelines/text/moe-cpu-gptoss.js +74 -69
  248. package/src/inference/pipelines/text/moe-cpu.js +42 -38
  249. package/src/inference/pipelines/text/moe-gpu.js +110 -86
  250. package/src/inference/pipelines/text/ops.js +90 -90
  251. package/src/inference/pipelines/text/probes.js +9 -9
  252. package/src/inference/pipelines/text/sampling.js +52 -6
  253. package/src/inference/pipelines/text/weights.js +17 -7
  254. package/src/inference/pipelines/text.js +13 -1
  255. package/src/inference/speculative.d.ts +2 -2
  256. package/src/inference/speculative.js +4 -18
  257. package/src/inference/test-harness.d.ts +1 -1
  258. package/src/inference/test-harness.js +17 -7
  259. package/src/inference/tokenizer.d.ts +0 -5
  260. package/src/inference/tokenizer.js +4 -23
  261. package/src/inference/tokenizers/bpe.js +9 -0
  262. package/src/inference/tokenizers/bundled.js +20 -0
  263. package/src/inference/tokenizers/sentencepiece.js +12 -0
  264. package/src/loader/doppler-loader.js +38 -22
  265. package/src/loader/dtype-utils.js +3 -44
  266. package/src/loader/embedding-loader.js +7 -3
  267. package/src/loader/experts/expert-cache.js +13 -6
  268. package/src/loader/experts/expert-loader.js +10 -6
  269. package/src/loader/final-weights-loader.js +10 -4
  270. package/src/loader/layer-loader.js +2 -1
  271. package/src/loader/loader-state.js +2 -2
  272. package/src/loader/memory-monitor.js +8 -0
  273. package/src/loader/multi-model-loader.d.ts +14 -0
  274. package/src/loader/multi-model-loader.js +70 -24
  275. package/src/loader/shard-cache.js +84 -14
  276. package/src/loader/shard-resolver.js +25 -3
  277. package/src/loader/tensors/tensor-loader.js +214 -144
  278. package/src/loader/tensors/tensor-reader.js +76 -19
  279. package/src/loader/weight-downcast.js +1 -1
  280. package/src/memory/buffer-pool.d.ts +9 -1
  281. package/src/memory/buffer-pool.js +109 -44
  282. package/src/memory/unified-detect.js +1 -1
  283. package/src/rules/inference/dtype.rules.json +5 -0
  284. package/src/rules/inference/kernel-path.rules.json +24 -8
  285. package/src/rules/kernels/split-qg.rules.json +6 -0
  286. package/src/rules/rule-registry.js +27 -1
  287. package/src/storage/backends/opfs-store.js +68 -24
  288. package/src/storage/downloader.js +365 -83
  289. package/src/storage/index.d.ts +3 -0
  290. package/src/storage/index.js +3 -0
  291. package/src/storage/preflight.d.ts +2 -2
  292. package/src/storage/preflight.js +24 -2
  293. package/src/storage/quickstart-downloader.js +11 -5
  294. package/src/storage/registry.js +10 -4
  295. package/src/storage/reports.js +1 -1
  296. package/src/storage/shard-manager.d.ts +15 -1
  297. package/src/storage/shard-manager.js +55 -6
  298. package/src/storage/source-artifact-store.d.ts +52 -0
  299. package/src/storage/source-artifact-store.js +234 -0
  300. package/src/tooling/command-api-constants.d.ts +9 -0
  301. package/src/tooling/command-api-constants.js +9 -0
  302. package/src/tooling/command-api-family-normalizers.d.ts +9 -0
  303. package/src/tooling/command-api-family-normalizers.js +343 -0
  304. package/src/tooling/command-api-helpers.d.ts +25 -0
  305. package/src/tooling/command-api-helpers.js +262 -0
  306. package/src/tooling/command-api.js +16 -602
  307. package/src/tooling/command-envelope.js +4 -1
  308. package/src/tooling/command-runner-shared.js +52 -18
  309. package/src/tooling/conversion-config-materializer.js +3 -5
  310. package/src/tooling/lean-execution-contract.js +150 -3
  311. package/src/tooling/node-browser-command-runner.js +161 -271
  312. package/src/tooling/node-command-runner.js +29 -3
  313. package/src/tooling/node-converter.js +30 -1
  314. package/src/tooling/node-source-runtime.d.ts +1 -1
  315. package/src/tooling/node-source-runtime.js +120 -3
  316. package/src/tooling/node-webgpu.js +24 -21
  317. package/src/tooling/opfs-cache.js +21 -4
  318. package/src/tooling/runtime-input-composition.d.ts +38 -0
  319. package/src/tooling/runtime-input-composition.js +86 -0
  320. package/src/tooling/source-runtime-bundle.d.ts +40 -5
  321. package/src/tooling/source-runtime-bundle.js +261 -34
  322. package/src/tooling/source-runtime-materializer.d.ts +6 -0
  323. package/src/tooling/source-runtime-materializer.js +93 -0
  324. package/src/training/attention-backward.js +32 -17
  325. package/src/training/autograd.js +80 -52
  326. package/src/training/checkpoint-watch.d.ts +2 -1
  327. package/src/training/checkpoint-watch.js +39 -6
  328. package/src/training/checkpoint.js +40 -11
  329. package/src/training/clip.js +2 -1
  330. package/src/training/datasets/token-batch.js +20 -8
  331. package/src/training/distillation/checkpoint-watch.js +1 -0
  332. package/src/training/distillation/student-fixture.d.ts +22 -0
  333. package/src/training/distillation/student-fixture.js +846 -0
  334. package/src/training/distillation/suite-data.d.ts +45 -0
  335. package/src/training/distillation/suite-data.js +189 -0
  336. package/src/training/lora-pipeline.js +4 -7
  337. package/src/training/lora.js +26 -12
  338. package/src/training/loss.js +5 -6
  339. package/src/training/objectives/cross_entropy.js +2 -5
  340. package/src/training/objectives/distill_kd.js +4 -8
  341. package/src/training/objectives/distill_triplet.js +4 -8
  342. package/src/training/objectives/ul_stage2_base.js +4 -8
  343. package/src/training/operator-command.js +2 -0
  344. package/src/training/optimizer.js +19 -7
  345. package/src/training/runner.js +2 -1
  346. package/src/training/suite.js +18 -978
  347. package/src/training/tensor-factory.d.ts +9 -0
  348. package/src/training/tensor-factory.js +13 -0
  349. package/src/training/trainer.js +3 -5
  350. package/src/training/ul_dataset.js +3 -5
  351. package/src/training/workloads.js +70 -79
  352. package/src/types/model.d.ts +5 -0
  353. package/src/version.js +1 -1
  354. package/tools/convert-safetensors-node.js +22 -16
  355. package/tools/doppler-cli.js +50 -26
@@ -2,6 +2,7 @@ import { log } from '../debug/index.js';
2
2
  import { PARAM_CATEGORIES, CategoryRules } from './param-categories.js';
3
3
  import { TOOLING_INTENTS, TOOLING_DIAGNOSTICS } from './schema/tooling.schema.js';
4
4
  import { validateEcosystemConfig } from './schema/ecosystem.schema.js';
5
+ import { isPlainObject } from '../utils/plain-object.js';
5
6
 
6
7
  export function validateCallTimeOptions(options) {
7
8
  if (!options) return;
@@ -33,7 +34,23 @@ export function validateCallTimeOptions(options) {
33
34
  }
34
35
 
35
36
  export function validateRuntimeOverrides(overrides) {
37
+ if (!isPlainObject(overrides)) {
38
+ throw new Error('DopplerConfigError: runtime overrides must be an object when provided.');
39
+ }
40
+
41
+ assertRequiredRuntimeOverrideNotNull(overrides, 'shared');
42
+ assertRequiredRuntimeOverrideNotNull(overrides, 'loading');
43
+ assertRequiredRuntimeOverrideNotNull(overrides, 'inference');
44
+ assertRequiredRuntimeOverrideNotNull(overrides, 'emulation');
45
+ assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'batching', 'runtime.inference');
46
+ assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'compute', 'runtime.inference');
47
+ assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'generation', 'runtime.inference');
48
+ assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'kernelPathPolicy', 'runtime.inference');
49
+
36
50
  const modelOverrides = overrides?.inference?.modelOverrides;
51
+ if (modelOverrides !== undefined && modelOverrides !== null && !isPlainObject(modelOverrides)) {
52
+ throw new Error('DopplerConfigError: runtime.inference.modelOverrides must be an object when provided.');
53
+ }
37
54
  if (!modelOverrides) return;
38
55
 
39
56
  const params = flattenObject(modelOverrides);
@@ -214,6 +231,15 @@ function validateKernelPathPolicy(label, value) {
214
231
  if (!value || typeof value !== 'object' || Array.isArray(value)) {
215
232
  throw new Error(`DopplerConfigError: ${label} must be an object.`);
216
233
  }
234
+ if (
235
+ value.sourceScope !== undefined
236
+ && value.allowSources !== undefined
237
+ && !arraysEqual(value.sourceScope, value.allowSources)
238
+ ) {
239
+ throw new Error(
240
+ `DopplerConfigError: ${label}.sourceScope and ${label}.allowSources must match exactly when both are provided.`
241
+ );
242
+ }
217
243
  if (value.mode !== 'locked' && value.mode !== 'capability-aware') {
218
244
  throw new Error(`DopplerConfigError: ${label}.mode must be "locked" or "capability-aware".`);
219
245
  }
@@ -224,12 +250,31 @@ function validateKernelPathPolicy(label, value) {
224
250
  if (value.onIncompatible !== 'error' && value.onIncompatible !== 'remap') {
225
251
  throw new Error(`DopplerConfigError: ${label}.onIncompatible must be "error" or "remap".`);
226
252
  }
227
- const validSources = new Set(['model', 'manifest', 'config', 'runtime', 'execution-v0']);
253
+ const validSources = new Set(['model', 'manifest', 'config', 'execution-v0']);
228
254
  for (const source of sourceScope) {
229
255
  if (!validSources.has(source)) {
230
256
  throw new Error(
231
- `DopplerConfigError: ${label}.sourceScope entries must be model|manifest|config|runtime|execution-v0.`
257
+ `DopplerConfigError: ${label}.sourceScope entries must be model|manifest|config|execution-v0.`
232
258
  );
233
259
  }
234
260
  }
235
261
  }
262
+
263
+ function assertRequiredRuntimeOverrideNotNull(container, key, prefix = 'runtime') {
264
+ if (!isPlainObject(container) || !Object.prototype.hasOwnProperty.call(container, key)) {
265
+ return;
266
+ }
267
+ if (container[key] === null) {
268
+ throw new Error(`DopplerConfigError: ${prefix}.${key} must not be null.`);
269
+ }
270
+ }
271
+
272
+ function arraysEqual(left, right) {
273
+ if (!Array.isArray(left) || !Array.isArray(right)) {
274
+ return false;
275
+ }
276
+ if (left.length !== right.length) {
277
+ return false;
278
+ }
279
+ return left.every((value, index) => value === right[index]);
280
+ }
@@ -1,7 +1,7 @@
1
1
  {
2
- "id": "gemma2-q4k-dequant-f32a",
3
- "name": "Gemma 2 Q4K Dequant (F32 activations)",
4
- "description": "Q4K weights dequantized to F16 with F32 activations. Non-fused compatibility path with no subgroup requirement.",
2
+ "id": "gemma2-q4k-dequant-f32a-nosubgroups",
3
+ "name": "Gemma 2 Q4K Dequant (F32 activations, no subgroups)",
4
+ "description": "Q4K weights dequantized to F16 with F32 activations. Non-fused path with no subgroup requirement that still requires shader-f16 kernels.",
5
5
  "activationDtype": "f32",
6
6
 
7
7
  "decode": {
@@ -0,0 +1,223 @@
1
+ {
2
+ "id": "gemma3-f16-fused-f32a-online-streamingprefill",
3
+ "name": "Gemma 3 F16 (F32 activations, online, streaming prefill)",
4
+ "description": "F16 weights with F32 activations, online decode attention, and streaming prefill attention for Gemma 3 stability-sensitive runs.",
5
+ "activationDtype": "f32",
6
+ "kvDtype": "f16",
7
+ "decode": {
8
+ "steps": [
9
+ {
10
+ "op": "input_norm",
11
+ "kernel": "rmsnorm.wgsl",
12
+ "entry": "main"
13
+ },
14
+ {
15
+ "op": "q_proj",
16
+ "kernel": "matmul_gemv_subgroup.wgsl",
17
+ "entry": "main_vec4",
18
+ "weights": "layer.{L}.self_attn.q_proj"
19
+ },
20
+ {
21
+ "op": "k_proj",
22
+ "kernel": "matmul_gemv_subgroup.wgsl",
23
+ "entry": "main_vec4",
24
+ "weights": "layer.{L}.self_attn.k_proj"
25
+ },
26
+ {
27
+ "op": "v_proj",
28
+ "kernel": "matmul_gemv_subgroup.wgsl",
29
+ "entry": "main_vec4",
30
+ "weights": "layer.{L}.self_attn.v_proj"
31
+ },
32
+ {
33
+ "op": "rope_q",
34
+ "kernel": "rope.wgsl",
35
+ "entry": "main"
36
+ },
37
+ {
38
+ "op": "rope_k",
39
+ "kernel": "rope.wgsl",
40
+ "entry": "main"
41
+ },
42
+ {
43
+ "op": "attention",
44
+ "kernel": "attention_decode_online_f16kv.wgsl",
45
+ "entry": "main"
46
+ },
47
+ {
48
+ "op": "o_proj",
49
+ "kernel": "matmul_gemv_subgroup.wgsl",
50
+ "entry": "main_vec4",
51
+ "weights": "layer.{L}.self_attn.o_proj"
52
+ },
53
+ {
54
+ "op": "attn_residual",
55
+ "kernel": "residual.wgsl",
56
+ "entry": "main"
57
+ },
58
+ {
59
+ "op": "post_attn_norm",
60
+ "kernel": "rmsnorm.wgsl",
61
+ "entry": "main"
62
+ },
63
+ {
64
+ "op": "gate_proj",
65
+ "kernel": "matmul_gemv_subgroup.wgsl",
66
+ "entry": "main_vec4",
67
+ "weights": "layer.{L}.mlp.gate_proj"
68
+ },
69
+ {
70
+ "op": "up_proj",
71
+ "kernel": "matmul_gemv_subgroup.wgsl",
72
+ "entry": "main_vec4",
73
+ "weights": "layer.{L}.mlp.up_proj"
74
+ },
75
+ {
76
+ "op": "activation",
77
+ "kernel": "gelu.wgsl",
78
+ "entry": "main",
79
+ "constants": {
80
+ "HAS_GATE": true
81
+ }
82
+ },
83
+ {
84
+ "op": "down_proj",
85
+ "kernel": "matmul_gemv_subgroup.wgsl",
86
+ "entry": "main_vec4",
87
+ "weights": "layer.{L}.mlp.down_proj"
88
+ },
89
+ {
90
+ "op": "ffn_residual",
91
+ "kernel": "residual.wgsl",
92
+ "entry": "main"
93
+ }
94
+ ]
95
+ },
96
+ "prefill": {
97
+ "steps": [
98
+ {
99
+ "op": "input_norm",
100
+ "kernel": "rmsnorm.wgsl",
101
+ "entry": "main"
102
+ },
103
+ {
104
+ "op": "q_proj",
105
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
106
+ "entry": "main",
107
+ "weights": "layer.{L}.self_attn.q_proj"
108
+ },
109
+ {
110
+ "op": "k_proj",
111
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
112
+ "entry": "main",
113
+ "weights": "layer.{L}.self_attn.k_proj"
114
+ },
115
+ {
116
+ "op": "v_proj",
117
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
118
+ "entry": "main",
119
+ "weights": "layer.{L}.self_attn.v_proj"
120
+ },
121
+ {
122
+ "op": "rope_q",
123
+ "kernel": "rope.wgsl",
124
+ "entry": "main"
125
+ },
126
+ {
127
+ "op": "rope_k",
128
+ "kernel": "rope.wgsl",
129
+ "entry": "main"
130
+ },
131
+ {
132
+ "op": "attention",
133
+ "kernel": "attention_streaming_f16kv.wgsl",
134
+ "entry": "main"
135
+ },
136
+ {
137
+ "op": "o_proj",
138
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
139
+ "entry": "main",
140
+ "weights": "layer.{L}.self_attn.o_proj"
141
+ },
142
+ {
143
+ "op": "attn_residual",
144
+ "kernel": "residual.wgsl",
145
+ "entry": "main"
146
+ },
147
+ {
148
+ "op": "post_attn_norm",
149
+ "kernel": "rmsnorm.wgsl",
150
+ "entry": "main"
151
+ },
152
+ {
153
+ "op": "gate_proj",
154
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
155
+ "entry": "main",
156
+ "weights": "layer.{L}.mlp.gate_proj"
157
+ },
158
+ {
159
+ "op": "up_proj",
160
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
161
+ "entry": "main",
162
+ "weights": "layer.{L}.mlp.up_proj"
163
+ },
164
+ {
165
+ "op": "activation",
166
+ "kernel": "gelu.wgsl",
167
+ "entry": "main",
168
+ "constants": {
169
+ "HAS_GATE": true
170
+ }
171
+ },
172
+ {
173
+ "op": "down_proj",
174
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
175
+ "entry": "main",
176
+ "weights": "layer.{L}.mlp.down_proj"
177
+ },
178
+ {
179
+ "op": "ffn_residual",
180
+ "kernel": "residual.wgsl",
181
+ "entry": "main"
182
+ }
183
+ ]
184
+ },
185
+ "preLayer": [
186
+ {
187
+ "op": "embed",
188
+ "kernel": "gather.wgsl",
189
+ "entry": "main",
190
+ "weights": "embed_tokens"
191
+ }
192
+ ],
193
+ "postLayer": [
194
+ {
195
+ "op": "final_norm",
196
+ "kernel": "rmsnorm.wgsl",
197
+ "entry": "main"
198
+ },
199
+ {
200
+ "op": "lm_head",
201
+ "kernel": "matmul_gemv_subgroup.wgsl",
202
+ "entry": "main_multicol",
203
+ "weights": "lm_head",
204
+ "constants": {
205
+ "MULTICOL_COLS_PER_WG": 64,
206
+ "MULTICOL_THREADS_PER_COL": 4
207
+ }
208
+ },
209
+ {
210
+ "op": "lm_head_prefill",
211
+ "kernel": "matmul_f16w_f32a_tiled.wgsl",
212
+ "entry": "main",
213
+ "weights": "lm_head"
214
+ }
215
+ ],
216
+ "sampling": [
217
+ {
218
+ "op": "sample",
219
+ "kernel": "sample.wgsl",
220
+ "entry": "sample_single_pass"
221
+ }
222
+ ]
223
+ }
@@ -1,7 +1,7 @@
1
1
  {
2
- "id": "gemma3-q4k-dequant-f32a",
3
- "name": "Gemma 3 Q4K Dequant (F32 activations)",
4
- "description": "Q4K weights dequantized to F16 with F32 activations for improved stability on small Gemma 3 checkpoints.",
2
+ "id": "gemma3-q4k-dequant-f32a-nosubgroups",
3
+ "name": "Gemma 3 Q4K Dequant (F32 activations, no subgroups)",
4
+ "description": "Q4K weights dequantized to F16 with F32 activations. Subgroup-free non-online path for Gemma 3 that still requires shader-f16 kernels.",
5
5
  "activationDtype": "f32",
6
6
  "kvDtype": "f16",
7
7
 
@@ -0,0 +1,56 @@
1
+ {
2
+ "id": "gemma3-q4k-dequant-f32w-f32a-online",
3
+ "name": "Gemma 3 Q4K Dequant (F32 projection weights, F32 activations, online decode)",
4
+ "description": "Q4K projection weights dequantized to F32 with F32 activations. Tied embeddings and LM head stay on the native F16 path. Decode uses online attention; prefill uses streaming attention.",
5
+ "activationDtype": "f32",
6
+ "kvDtype": "f16",
7
+ "decode": {
8
+ "steps": [
9
+ { "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
10
+ { "op": "q_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
11
+ { "op": "k_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
12
+ { "op": "v_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
13
+ { "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
14
+ { "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
15
+ { "op": "attention", "kernel": "attention_decode_online_f16kv.wgsl", "entry": "main" },
16
+ { "op": "o_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
17
+ { "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
18
+ { "op": "post_attn_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
19
+ { "op": "gate_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
20
+ { "op": "up_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
21
+ { "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
22
+ { "op": "down_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
23
+ { "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
24
+ ]
25
+ },
26
+ "prefill": {
27
+ "steps": [
28
+ { "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
29
+ { "op": "q_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
30
+ { "op": "k_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
31
+ { "op": "v_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
32
+ { "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
33
+ { "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
34
+ { "op": "attention", "kernel": "attention_streaming_f16kv.wgsl", "entry": "main" },
35
+ { "op": "o_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
36
+ { "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
37
+ { "op": "post_attn_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
38
+ { "op": "gate_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
39
+ { "op": "up_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
40
+ { "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
41
+ { "op": "down_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
42
+ { "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
43
+ ]
44
+ },
45
+ "preLayer": [
46
+ { "op": "embed", "kernel": "gather_f16.wgsl", "entry": "main", "weights": "embed_tokens" }
47
+ ],
48
+ "postLayer": [
49
+ { "op": "final_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
50
+ { "op": "lm_head", "kernel": "matmul_gemv_subgroup.wgsl", "entry": "main_multicol", "weights": "lm_head", "constants": { "MULTICOL_COLS_PER_WG": 64, "MULTICOL_THREADS_PER_COL": 4 } },
51
+ { "op": "lm_head_prefill", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "lm_head" }
52
+ ],
53
+ "sampling": [
54
+ { "op": "sample", "kernel": "sample.wgsl", "entry": "sample_single_pass" }
55
+ ]
56
+ }
@@ -0,0 +1,61 @@
1
+ {
2
+ "id": "lfm2-q4k-dequant-f32a-nosubgroups",
3
+ "name": "LFM2 Q4K Dequant (F32 activations, no subgroups)",
4
+ "description": "Subgroup-free LFM2 Q4K path: F32 activations with tiled prefill matmul and small-kernel prefill attention. Still requires shader-f16 kernels.",
5
+ "activationDtype": "f32",
6
+ "kvDtype": "f16",
7
+
8
+ "decode": {
9
+ "steps": [
10
+ { "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
11
+ { "op": "q_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
12
+ { "op": "k_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
13
+ { "op": "v_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
14
+ { "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
15
+ { "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
16
+ { "op": "attention", "kernel": "attention_decode_chunked_f16kv.wgsl", "entry": "main" },
17
+ { "op": "o_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
18
+ { "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
19
+ { "op": "post_attn_norm","kernel": "rmsnorm.wgsl", "entry": "main" },
20
+ { "op": "gate_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
21
+ { "op": "up_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
22
+ { "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
23
+ { "op": "down_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
24
+ { "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
25
+ ]
26
+ },
27
+
28
+ "prefill": {
29
+ "steps": [
30
+ { "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
31
+ { "op": "q_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
32
+ { "op": "k_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
33
+ { "op": "v_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
34
+ { "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
35
+ { "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
36
+ { "op": "attention", "kernel": "attention_small_f16kv.wgsl", "entry": "main" },
37
+ { "op": "o_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
38
+ { "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
39
+ { "op": "post_attn_norm","kernel": "rmsnorm.wgsl", "entry": "main" },
40
+ { "op": "gate_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
41
+ { "op": "up_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
42
+ { "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
43
+ { "op": "down_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
44
+ { "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
45
+ ]
46
+ },
47
+
48
+ "preLayer": [
49
+ { "op": "embed", "kernel": "gather_f16.wgsl", "entry": "main", "weights": "embed_tokens" }
50
+ ],
51
+
52
+ "postLayer": [
53
+ { "op": "final_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
54
+ { "op": "lm_head", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "lm_head" },
55
+ { "op": "lm_head_prefill", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "lm_head" }
56
+ ],
57
+
58
+ "sampling": [
59
+ { "op": "sample", "kernel": "sample.wgsl", "entry": "sample_single_pass" }
60
+ ]
61
+ }
@@ -16,11 +16,18 @@
16
16
  "notes": "Default Gemma 2 Q4K dequant path for baseline F16 activation workflows."
17
17
  },
18
18
  {
19
- "id": "gemma2-q4k-dequant-f32a",
20
- "file": "gemma2-q4k-dequant-f32a.json",
19
+ "id": "gemma2-q4k-dequant-f32a-nosubgroups",
20
+ "file": "gemma2-q4k-dequant-f32a-nosubgroups.json",
21
21
  "status": "canonical",
22
- "statusReason": "compatibility",
23
- "notes": "Subgroup-free Gemma 2 Q4K dequant path with F32 activations."
22
+ "statusReason": "subgroup-free",
23
+ "notes": "Subgroup-free Gemma 2 Q4K dequant path with F32 activations. Still requires shader-f16 kernels."
24
+ },
25
+ {
26
+ "id": "gemma2-q4k-dequant-f32a",
27
+ "aliasOf": "gemma2-q4k-dequant-f32a-nosubgroups",
28
+ "status": "legacy",
29
+ "statusReason": "compatibility-alias",
30
+ "notes": "Legacy alias for gemma2-q4k-dequant-f32a-nosubgroups."
24
31
  },
25
32
  {
26
33
  "id": "gemma2-f16-f16a",
@@ -50,6 +57,13 @@
50
57
  "statusReason": "benchmark-probe",
51
58
  "notes": "Experimental fused F32 pipeline variant for fast decode."
52
59
  },
60
+ {
61
+ "id": "gemma3-f16-fused-f32a-online-streamingprefill",
62
+ "file": "gemma3-f16-fused-f32a-online-streamingprefill.json",
63
+ "status": "experimental",
64
+ "statusReason": "stability-probe",
65
+ "notes": "Gemma 3 F16/F32 online path with streaming prefill attention instead of the small-tile prefill kernel."
66
+ },
53
67
  {
54
68
  "id": "gemma3-q4k-dequant-f16a-online",
55
69
  "file": "gemma3-q4k-dequant-f16a-online.json",
@@ -58,11 +72,18 @@
58
72
  "notes": "Experimental Gemma 3 Q4K path using online decode attention on subgroup-capable GPUs."
59
73
  },
60
74
  {
61
- "id": "gemma3-q4k-dequant-f32a",
62
- "file": "gemma3-q4k-dequant-f32a.json",
75
+ "id": "gemma3-q4k-dequant-f32a-nosubgroups",
76
+ "file": "gemma3-q4k-dequant-f32a-nosubgroups.json",
63
77
  "status": "canonical",
64
- "statusReason": "accuracy-first",
65
- "notes": "Gemma 3 Q4K dequant path with F32 activation for quality-first runs."
78
+ "statusReason": "subgroup-free",
79
+ "notes": "Subgroup-free Gemma 3 Q4K dequant path with F32 activations. Still requires shader-f16 kernels."
80
+ },
81
+ {
82
+ "id": "gemma3-q4k-dequant-f32a",
83
+ "aliasOf": "gemma3-q4k-dequant-f32a-nosubgroups",
84
+ "status": "legacy",
85
+ "statusReason": "compatibility-alias",
86
+ "notes": "Legacy alias for gemma3-q4k-dequant-f32a-nosubgroups."
66
87
  },
67
88
  {
68
89
  "id": "gemma3-q4k-dequant-f32a-online",
@@ -71,6 +92,13 @@
71
92
  "statusReason": "default",
72
93
  "notes": "Gemma 3 Q4K dequant default: subgroup GEMV + online attention + tuned lm_head multicol, F32 activations."
73
94
  },
95
+ {
96
+ "id": "gemma3-q4k-dequant-f32w-f32a-online",
97
+ "file": "gemma3-q4k-dequant-f32w-f32a-online.json",
98
+ "status": "experimental",
99
+ "statusReason": "accuracy-probe",
100
+ "notes": "Gemma 3 Q4K dequant path that keeps matmul weights in F32 and runs F32 matmul kernels for numeric-sensitivity debugging."
101
+ },
74
102
  {
75
103
  "id": "lfm2-q4k-dequant-f32a-online",
76
104
  "file": "lfm2-q4k-dequant-f32a-online.json",
@@ -78,6 +106,13 @@
78
106
  "statusReason": "default",
79
107
  "notes": "LFM2 Q4K default: subgroup GEMV decode with tiled fast-prefill path and F32 activations."
80
108
  },
109
+ {
110
+ "id": "lfm2-q4k-dequant-f32a-nosubgroups",
111
+ "file": "lfm2-q4k-dequant-f32a-nosubgroups.json",
112
+ "status": "canonical",
113
+ "statusReason": "subgroup-free",
114
+ "notes": "Subgroup-free LFM2 Q4K dequant path with F32 activations and tiled prefill. Still requires shader-f16 kernels."
115
+ },
81
116
  {
82
117
  "id": "embeddinggemma-f16-f32a",
83
118
  "file": "embeddinggemma-f16-f32a.json",
@@ -9,6 +9,7 @@
9
9
 
10
10
  "inference": {
11
11
  "attention": {
12
+ "queryPreAttnScalar": 256,
12
13
  "slidingWindow": 4096,
13
14
  "attnLogitSoftcapping": 50.0,
14
15
  "queryKeyNorm": false
@@ -40,8 +41,8 @@
40
41
  "f32": "gemma2-f16-f32a"
41
42
  },
42
43
  "q4k": {
43
- "f16": "gemma2-q4k-dequant-f32a",
44
- "f32": "gemma2-q4k-dequant-f32a"
44
+ "f16": "gemma2-q4k-dequant-f16a",
45
+ "f32": "gemma2-q4k-dequant-f32a-nosubgroups"
45
46
  }
46
47
  }
47
48
  },
@@ -8,7 +8,9 @@
8
8
  },
9
9
  "inference": {
10
10
  "attention": {
11
+ "queryPreAttnScalar": 256,
11
12
  "attnLogitSoftcapping": null,
13
+ "slidingWindow": 512,
12
14
  "queryKeyNorm": true
13
15
  },
14
16
  "normalization": {
@@ -10,7 +10,8 @@
10
10
  "inference": {
11
11
  "attention": {
12
12
  "slidingWindow": null,
13
- "queryKeyNorm": true
13
+ "queryKeyNorm": true,
14
+ "attentionOutputGate": true
14
15
  },
15
16
  "output": {
16
17
  "scaleEmbeddings": false
@@ -39,8 +40,8 @@
39
40
  },
40
41
 
41
42
  "detection": {
42
- "architecturePatterns": ["qwen3", "qwen3_5", "Qwen3ForCausalLM", "Qwen3_5ForCausalLM", "Qwen2ForCausalLM"],
43
- "modelTypePatterns": ["qwen3_5", "qwen3_5_text", "qwen2"],
43
+ "architecturePatterns": ["qwen3", "Qwen3ForCausalLM", "Qwen2ForCausalLM"],
44
+ "modelTypePatterns": ["qwen3", "qwen2"],
44
45
  "configPatterns": {
45
46
  "model_type": "qwen2"
46
47
  }
@@ -0,0 +1,16 @@
1
+ {
2
+ "id": "qwen3_5",
3
+ "name": "Qwen 3.5",
4
+ "extends": "qwen3",
5
+
6
+ "inference": {
7
+ "normalization": {
8
+ "rmsNormWeightOffset": true
9
+ }
10
+ },
11
+
12
+ "detection": {
13
+ "architecturePatterns": ["qwen3_5", "Qwen3_5ForCausalLM", "Qwen3_5ForConditionalGeneration"],
14
+ "modelTypePatterns": ["qwen3_5", "qwen3_5_text"]
15
+ }
16
+ }
@@ -6,7 +6,7 @@
6
6
  "owner": "doppler-core",
7
7
  "createdAtUtc": "2026-02-25T00:00:00Z",
8
8
  "extends": "modes/bench",
9
- "model": "gemma-3-1b-it-wq4k-ef16-hf16",
9
+ "model": "gemma-3-1b-it-q4k-ehf16-af32",
10
10
  "runtime": {
11
11
  "shared": {
12
12
  "benchmark": {
@@ -7,7 +7,7 @@
7
7
  "owner": "doppler-core",
8
8
  "createdAtUtc": "2026-02-25T00:00:00Z",
9
9
  "extends": "default",
10
- "model": "gemma-3-1b-it-wq4k-ef16-hf16",
10
+ "model": "gemma-3-1b-it-q4k-ehf16-af32",
11
11
  "runtime": {
12
12
  "shared": {
13
13
  "tooling": {
@@ -6,7 +6,7 @@
6
6
  "owner": "doppler-core",
7
7
  "createdAtUtc": "2026-02-25T00:00:00Z",
8
8
  "extends": "modes/bench",
9
- "model": "gemma-3-1b-it-wf16-ef16-hf16",
9
+ "model": "gemma-3-1b-it-f16-af32",
10
10
  "runtime": {
11
11
  "shared": {
12
12
  "tooling": {