@simulatte/doppler 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +145 -0
- package/README.md +16 -23
- package/package.json +30 -32
- package/src/adapters/adapter-registry.js +12 -1
- package/src/adapters/lora-loader.js +23 -6
- package/src/bridge/extension-client.d.ts +5 -0
- package/src/bridge/extension-client.js +40 -0
- package/src/bridge/index.d.ts +2 -1
- package/src/bridge/index.js +6 -4
- package/src/browser/browser-converter.js +31 -1
- package/src/browser/file-picker.js +6 -0
- package/src/browser/safetensors-parser-browser.js +84 -1
- package/src/browser/shard-io-browser.js +2 -2
- package/src/browser/tensor-source-download.js +8 -2
- package/src/browser/tensor-source-http.d.ts +1 -0
- package/src/browser/tensor-source-http.js +5 -1
- package/src/client/doppler-api.browser.js +20 -4
- package/src/client/doppler-api.js +19 -3
- package/src/client/doppler-provider/generation.js +12 -0
- package/src/client/doppler-provider/model-manager.d.ts +10 -0
- package/src/client/doppler-provider/model-manager.js +91 -19
- package/src/client/doppler-provider/source-runtime.d.ts +2 -1
- package/src/client/doppler-provider/source-runtime.js +132 -13
- package/src/client/doppler-registry.json +5 -20
- package/src/config/backward-registry-loader.js +17 -2
- package/src/config/execution-v0-contract-check.js +113 -15
- package/src/config/kernel-path-contract-check.js +57 -29
- package/src/config/kernel-path-loader.d.ts +5 -0
- package/src/config/kernel-path-loader.js +18 -36
- package/src/config/kernels/kernel-ref-digests.js +1 -1
- package/src/config/kernels/registry.js +14 -1
- package/src/config/kernels/registry.json +81 -5
- package/src/config/loader.d.ts +1 -1
- package/src/config/loader.js +15 -2
- package/src/config/merge-contract-check.js +66 -4
- package/src/config/merge-helpers.js +128 -7
- package/src/config/merge.d.ts +1 -0
- package/src/config/merge.js +10 -0
- package/src/config/param-validator.js +47 -2
- package/src/config/presets/kernel-paths/{gemma2-q4k-dequant-f32a.json → gemma2-q4k-dequant-f32a-nosubgroups.json} +3 -3
- package/src/config/presets/kernel-paths/gemma3-f16-fused-f32a-online-streamingprefill.json +223 -0
- package/src/config/presets/kernel-paths/{gemma3-q4k-dequant-f32a.json → gemma3-q4k-dequant-f32a-nosubgroups.json} +3 -3
- package/src/config/presets/kernel-paths/gemma3-q4k-dequant-f32w-f32a-online.json +56 -0
- package/src/config/presets/kernel-paths/lfm2-q4k-dequant-f32a-nosubgroups.json +61 -0
- package/src/config/presets/kernel-paths/registry.json +43 -8
- package/src/config/presets/models/gemma2.json +3 -2
- package/src/config/presets/models/gemma3.json +2 -0
- package/src/config/presets/models/qwen3.json +4 -3
- package/src/config/presets/models/qwen3_5.json +16 -0
- package/src/config/presets/runtime/experiments/bench/gemma3-bench-q4k.json +1 -1
- package/src/config/presets/runtime/experiments/debug/gemma3-debug-q4k.json +1 -1
- package/src/config/presets/runtime/experiments/verify/gemma3-verify.json +1 -1
- package/src/config/presets/runtime/kernels/dequant-f16-q4k.json +6 -13
- package/src/config/presets/runtime/kernels/dequant-f32-q4k.json +6 -13
- package/src/config/presets/runtime/kernels/embeddinggemma-q4k-dequant-f32a.json +37 -0
- package/src/config/presets/runtime/kernels/fused-q4k.json +6 -13
- package/src/config/presets/runtime/kernels/gemma2-q4k-dequant-f16a.json +33 -0
- package/src/config/presets/runtime/kernels/gemma2-q4k-dequant-f32a-nosubgroups.json +33 -0
- package/src/config/presets/runtime/kernels/gemma2-q4k-fused-f32a.json +33 -0
- package/src/config/presets/runtime/kernels/safe-q4k.json +6 -13
- package/src/config/presets/runtime/model/qwen3-5-layer-probe.json +52 -0
- package/src/config/presets/runtime/model/qwen3-5-linear-attn-debug.json +90 -0
- package/src/config/presets/runtime/platform/metal-apple-q4k.json +1 -1
- package/src/config/runtime.js +6 -1
- package/src/config/schema/conversion.schema.d.ts +1 -0
- package/src/config/schema/debug.schema.d.ts +5 -0
- package/src/config/schema/doppler.schema.js +16 -21
- package/src/config/schema/inference-defaults.schema.js +3 -3
- package/src/config/schema/kernel-path.schema.d.ts +5 -1
- package/src/config/schema/kernel-thresholds.schema.js +12 -4
- package/src/config/schema/manifest.schema.d.ts +3 -2
- package/src/config/schema/manifest.schema.js +17 -4
- package/src/config/schema/storage.schema.js +1 -1
- package/src/config/training-defaults.js +30 -22
- package/src/converter/conversion-plan.js +104 -11
- package/src/converter/core.d.ts +7 -0
- package/src/converter/core.js +16 -9
- package/src/converter/execution-v0-manifest.js +4 -1
- package/src/converter/index.d.ts +1 -0
- package/src/converter/index.js +1 -0
- package/src/converter/manifest-inference.js +50 -29
- package/src/converter/parsers/diffusion.js +0 -3
- package/src/converter/parsers/transformer.js +4 -0
- package/src/converter/quantization-info.js +40 -16
- package/src/converter/quantizer.js +19 -12
- package/src/converter/rope-config.js +8 -6
- package/src/converter/shard-packer.d.ts +1 -1
- package/src/converter/shard-packer.js +4 -1
- package/src/converter/tokenizer-utils.d.ts +1 -0
- package/src/converter/tokenizer-utils.js +4 -1
- package/src/debug/config.js +123 -11
- package/src/debug/reference/hf_qwen35_linear_attn_debug.py +268 -0
- package/src/debug/signals.js +7 -1
- package/src/debug/tensor.d.ts +2 -0
- package/src/debug/tensor.js +13 -2
- package/src/distribution/p2p-control-plane.js +52 -12
- package/src/distribution/p2p-observability.js +43 -7
- package/src/distribution/p2p-webrtc-browser.js +20 -0
- package/src/distribution/shard-delivery.js +83 -27
- package/src/formats/gguf/types.js +33 -16
- package/src/formats/rdrr/groups.d.ts +12 -4
- package/src/formats/rdrr/groups.js +3 -6
- package/src/formats/rdrr/parsing.d.ts +4 -0
- package/src/formats/rdrr/parsing.js +53 -3
- package/src/formats/rdrr/types.d.ts +2 -1
- package/src/gpu/command-recorder.js +86 -61
- package/src/gpu/device.d.ts +1 -0
- package/src/gpu/device.js +73 -19
- package/src/gpu/kernel-tuner/benchmarks.js +326 -316
- package/src/gpu/kernel-tuner/cache.js +71 -4
- package/src/gpu/kernel-tuner/tuner.js +22 -4
- package/src/gpu/kernels/attention.js +15 -34
- package/src/gpu/kernels/backward/adam.js +62 -58
- package/src/gpu/kernels/backward/attention_backward.js +257 -169
- package/src/gpu/kernels/backward/conv2d_backward.js +14 -1
- package/src/gpu/kernels/cast.js +191 -149
- package/src/gpu/kernels/check-stop.js +33 -44
- package/src/gpu/kernels/conv2d.js +27 -17
- package/src/gpu/kernels/cross_entropy_loss.js +21 -15
- package/src/gpu/kernels/depthwise_conv2d.js +36 -26
- package/src/gpu/kernels/dequant.js +178 -126
- package/src/gpu/kernels/energy.d.ts +3 -21
- package/src/gpu/kernels/energy.js +111 -88
- package/src/gpu/kernels/feature-check.js +1 -1
- package/src/gpu/kernels/fused_ffn.js +84 -65
- package/src/gpu/kernels/fused_matmul_residual.js +56 -33
- package/src/gpu/kernels/fused_matmul_rmsnorm.js +62 -45
- package/src/gpu/kernels/gather.js +33 -15
- package/src/gpu/kernels/gelu.js +19 -11
- package/src/gpu/kernels/grouped_pointwise_conv2d.js +33 -23
- package/src/gpu/kernels/groupnorm.js +34 -23
- package/src/gpu/kernels/index.d.ts +8 -0
- package/src/gpu/kernels/index.js +6 -0
- package/src/gpu/kernels/kv-quantize.js +5 -2
- package/src/gpu/kernels/layernorm.js +35 -19
- package/src/gpu/kernels/logit-merge.js +5 -3
- package/src/gpu/kernels/matmul-selection.js +47 -4
- package/src/gpu/kernels/matmul.d.ts +2 -0
- package/src/gpu/kernels/matmul.js +59 -40
- package/src/gpu/kernels/modulate.js +23 -15
- package/src/gpu/kernels/moe.js +221 -175
- package/src/gpu/kernels/pixel_shuffle.js +22 -14
- package/src/gpu/kernels/relu.js +18 -10
- package/src/gpu/kernels/repeat_channels.js +25 -17
- package/src/gpu/kernels/residual.js +37 -27
- package/src/gpu/kernels/rmsnorm.js +66 -43
- package/src/gpu/kernels/rope.js +3 -0
- package/src/gpu/kernels/sample.js +27 -38
- package/src/gpu/kernels/sana_linear_attention.js +18 -10
- package/src/gpu/kernels/scale.js +18 -11
- package/src/gpu/kernels/shader-cache.js +4 -2
- package/src/gpu/kernels/silu.js +120 -72
- package/src/gpu/kernels/softmax.js +44 -25
- package/src/gpu/kernels/split_qg.d.ts +50 -0
- package/src/gpu/kernels/split_qg.js +46 -0
- package/src/gpu/kernels/split_qg.wgsl +58 -0
- package/src/gpu/kernels/split_qg_f16.wgsl +62 -0
- package/src/gpu/kernels/split_qkv.js +23 -13
- package/src/gpu/kernels/transpose.js +18 -10
- package/src/gpu/kernels/transpose.wgsl +5 -3
- package/src/gpu/kernels/upsample2d.js +21 -13
- package/src/gpu/kernels/utils.js +20 -13
- package/src/gpu/partitioned-buffer-pool.js +10 -2
- package/src/gpu/perf-guards.js +2 -9
- package/src/gpu/profiler.js +27 -22
- package/src/gpu/readback-utils.d.ts +16 -0
- package/src/gpu/readback-utils.js +41 -0
- package/src/gpu/submit-tracker.js +13 -0
- package/src/gpu/uniform-cache.d.ts +1 -0
- package/src/gpu/uniform-cache.js +30 -9
- package/src/gpu/weight-buffer.d.ts +1 -1
- package/src/gpu/weight-buffer.js +1 -1
- package/src/hotswap/intent-bundle.js +6 -0
- package/src/hotswap/manifest.d.ts +10 -1
- package/src/hotswap/manifest.js +12 -2
- package/src/hotswap/runtime.js +30 -8
- package/src/index-browser.d.ts +44 -0
- package/src/index-browser.js +14 -0
- package/src/inference/browser-harness-contract-helpers.d.ts +5 -0
- package/src/inference/browser-harness-contract-helpers.js +28 -0
- package/src/inference/browser-harness-diffusion-energy-suites.d.ts +2 -0
- package/src/inference/browser-harness-diffusion-energy-suites.js +269 -0
- package/src/inference/browser-harness-model-helpers.d.ts +16 -0
- package/src/inference/browser-harness-model-helpers.js +217 -0
- package/src/inference/browser-harness-report-helpers.d.ts +7 -0
- package/src/inference/browser-harness-report-helpers.js +42 -0
- package/src/inference/browser-harness-runtime-helpers.d.ts +61 -0
- package/src/inference/browser-harness-runtime-helpers.js +415 -0
- package/src/inference/browser-harness-suite-helpers.d.ts +28 -0
- package/src/inference/browser-harness-suite-helpers.js +268 -0
- package/src/inference/browser-harness-text-helpers.d.ts +27 -0
- package/src/inference/browser-harness-text-helpers.js +788 -0
- package/src/inference/browser-harness.d.ts +8 -0
- package/src/inference/browser-harness.js +149 -1996
- package/src/inference/kv-cache/base.js +140 -94
- package/src/inference/kv-cache/tiered.js +5 -3
- package/src/inference/moe-router.js +88 -56
- package/src/inference/multi-model-network.js +5 -3
- package/src/inference/network-evolution.d.ts +11 -2
- package/src/inference/network-evolution.js +20 -21
- package/src/inference/pipelines/context.d.ts +3 -0
- package/src/inference/pipelines/context.js +142 -2
- package/src/inference/pipelines/diffusion/helpers.js +10 -2
- package/src/inference/pipelines/diffusion/pipeline.js +2 -1
- package/src/inference/pipelines/diffusion/sd3-transformer.js +10 -10
- package/src/inference/pipelines/diffusion/text-encoder-gpu.js +8 -2
- package/src/inference/pipelines/diffusion/vae.js +3 -7
- package/src/inference/pipelines/energy/pipeline.js +27 -21
- package/src/inference/pipelines/energy/quintel.d.ts +5 -0
- package/src/inference/pipelines/energy/quintel.js +11 -0
- package/src/inference/pipelines/energy-head/row-head-pipeline.js +17 -13
- package/src/inference/pipelines/structured/json-head-pipeline.js +26 -11
- package/src/inference/pipelines/text/attention/output-projection.d.ts +12 -0
- package/src/inference/pipelines/text/attention/output-projection.js +8 -0
- package/src/inference/pipelines/text/attention/projections.d.ts +10 -1
- package/src/inference/pipelines/text/attention/projections.js +192 -112
- package/src/inference/pipelines/text/attention/record.js +77 -14
- package/src/inference/pipelines/text/attention/run.js +112 -14
- package/src/inference/pipelines/text/config.js +17 -4
- package/src/inference/pipelines/text/embed.js +2 -8
- package/src/inference/pipelines/text/execution-plan.js +46 -23
- package/src/inference/pipelines/text/execution-v0-contract-helpers.d.ts +59 -0
- package/src/inference/pipelines/text/execution-v0-contract-helpers.js +937 -0
- package/src/inference/pipelines/text/execution-v0-runtime-builders.d.ts +15 -0
- package/src/inference/pipelines/text/execution-v0-runtime-builders.js +279 -0
- package/src/inference/pipelines/text/execution-v0.js +62 -1013
- package/src/inference/pipelines/text/generator-runtime.js +5 -0
- package/src/inference/pipelines/text/generator-steps.d.ts +52 -0
- package/src/inference/pipelines/text/generator-steps.js +340 -221
- package/src/inference/pipelines/text/generator.js +56 -40
- package/src/inference/pipelines/text/init.d.ts +13 -0
- package/src/inference/pipelines/text/init.js +94 -25
- package/src/inference/pipelines/text/kernel-path-auto-select.js +2 -0
- package/src/inference/pipelines/text/kernel-trace.d.ts +2 -0
- package/src/inference/pipelines/text/kernel-trace.js +6 -0
- package/src/inference/pipelines/text/layer.js +4 -9
- package/src/inference/pipelines/text/linear-attention.d.ts +15 -0
- package/src/inference/pipelines/text/linear-attention.js +113 -9
- package/src/inference/pipelines/text/logits/gpu.js +12 -7
- package/src/inference/pipelines/text/logits/index.d.ts +6 -1
- package/src/inference/pipelines/text/logits/index.js +13 -12
- package/src/inference/pipelines/text/logits/utils.d.ts +7 -0
- package/src/inference/pipelines/text/logits/utils.js +9 -0
- package/src/inference/pipelines/text/lora-apply.js +50 -32
- package/src/inference/pipelines/text/model-load.js +282 -104
- package/src/inference/pipelines/text/moe-cache.js +5 -4
- package/src/inference/pipelines/text/moe-cpu-gptoss.js +74 -69
- package/src/inference/pipelines/text/moe-cpu.js +42 -38
- package/src/inference/pipelines/text/moe-gpu.js +110 -86
- package/src/inference/pipelines/text/ops.js +90 -90
- package/src/inference/pipelines/text/probes.js +9 -9
- package/src/inference/pipelines/text/sampling.js +52 -6
- package/src/inference/pipelines/text/weights.js +17 -7
- package/src/inference/pipelines/text.js +13 -1
- package/src/inference/speculative.d.ts +2 -2
- package/src/inference/speculative.js +4 -18
- package/src/inference/test-harness.d.ts +1 -1
- package/src/inference/test-harness.js +17 -7
- package/src/inference/tokenizer.d.ts +0 -5
- package/src/inference/tokenizer.js +4 -23
- package/src/inference/tokenizers/bpe.js +9 -0
- package/src/inference/tokenizers/bundled.js +20 -0
- package/src/inference/tokenizers/sentencepiece.js +12 -0
- package/src/loader/doppler-loader.js +38 -22
- package/src/loader/dtype-utils.js +3 -44
- package/src/loader/embedding-loader.js +7 -3
- package/src/loader/experts/expert-cache.js +13 -6
- package/src/loader/experts/expert-loader.js +10 -6
- package/src/loader/final-weights-loader.js +10 -4
- package/src/loader/layer-loader.js +2 -1
- package/src/loader/loader-state.js +2 -2
- package/src/loader/memory-monitor.js +8 -0
- package/src/loader/multi-model-loader.d.ts +14 -0
- package/src/loader/multi-model-loader.js +70 -24
- package/src/loader/shard-cache.js +84 -14
- package/src/loader/shard-resolver.js +25 -3
- package/src/loader/tensors/tensor-loader.js +214 -144
- package/src/loader/tensors/tensor-reader.js +76 -19
- package/src/loader/weight-downcast.js +1 -1
- package/src/memory/buffer-pool.d.ts +9 -1
- package/src/memory/buffer-pool.js +109 -44
- package/src/memory/unified-detect.js +1 -1
- package/src/rules/inference/dtype.rules.json +5 -0
- package/src/rules/inference/kernel-path.rules.json +24 -8
- package/src/rules/kernels/split-qg.rules.json +6 -0
- package/src/rules/rule-registry.js +27 -1
- package/src/storage/backends/opfs-store.js +68 -24
- package/src/storage/downloader.js +365 -83
- package/src/storage/index.d.ts +3 -0
- package/src/storage/index.js +3 -0
- package/src/storage/preflight.d.ts +2 -2
- package/src/storage/preflight.js +24 -2
- package/src/storage/quickstart-downloader.js +11 -5
- package/src/storage/registry.js +10 -4
- package/src/storage/reports.js +1 -1
- package/src/storage/shard-manager.d.ts +15 -1
- package/src/storage/shard-manager.js +55 -6
- package/src/storage/source-artifact-store.d.ts +52 -0
- package/src/storage/source-artifact-store.js +234 -0
- package/src/tooling/command-api-constants.d.ts +9 -0
- package/src/tooling/command-api-constants.js +9 -0
- package/src/tooling/command-api-family-normalizers.d.ts +9 -0
- package/src/tooling/command-api-family-normalizers.js +343 -0
- package/src/tooling/command-api-helpers.d.ts +25 -0
- package/src/tooling/command-api-helpers.js +262 -0
- package/src/tooling/command-api.js +16 -602
- package/src/tooling/command-envelope.js +4 -1
- package/src/tooling/command-runner-shared.js +52 -18
- package/src/tooling/conversion-config-materializer.js +3 -5
- package/src/tooling/lean-execution-contract.js +150 -3
- package/src/tooling/node-browser-command-runner.js +161 -271
- package/src/tooling/node-command-runner.js +29 -3
- package/src/tooling/node-converter.js +30 -1
- package/src/tooling/node-source-runtime.d.ts +1 -1
- package/src/tooling/node-source-runtime.js +120 -3
- package/src/tooling/node-webgpu.js +24 -21
- package/src/tooling/opfs-cache.js +21 -4
- package/src/tooling/runtime-input-composition.d.ts +38 -0
- package/src/tooling/runtime-input-composition.js +86 -0
- package/src/tooling/source-runtime-bundle.d.ts +40 -5
- package/src/tooling/source-runtime-bundle.js +261 -34
- package/src/tooling/source-runtime-materializer.d.ts +6 -0
- package/src/tooling/source-runtime-materializer.js +93 -0
- package/src/training/attention-backward.js +32 -17
- package/src/training/autograd.js +80 -52
- package/src/training/checkpoint-watch.d.ts +2 -1
- package/src/training/checkpoint-watch.js +39 -6
- package/src/training/checkpoint.js +40 -11
- package/src/training/clip.js +2 -1
- package/src/training/datasets/token-batch.js +20 -8
- package/src/training/distillation/checkpoint-watch.js +1 -0
- package/src/training/distillation/student-fixture.d.ts +22 -0
- package/src/training/distillation/student-fixture.js +846 -0
- package/src/training/distillation/suite-data.d.ts +45 -0
- package/src/training/distillation/suite-data.js +189 -0
- package/src/training/lora-pipeline.js +4 -7
- package/src/training/lora.js +26 -12
- package/src/training/loss.js +5 -6
- package/src/training/objectives/cross_entropy.js +2 -5
- package/src/training/objectives/distill_kd.js +4 -8
- package/src/training/objectives/distill_triplet.js +4 -8
- package/src/training/objectives/ul_stage2_base.js +4 -8
- package/src/training/operator-command.js +2 -0
- package/src/training/optimizer.js +19 -7
- package/src/training/runner.js +2 -1
- package/src/training/suite.js +18 -978
- package/src/training/tensor-factory.d.ts +9 -0
- package/src/training/tensor-factory.js +13 -0
- package/src/training/trainer.js +3 -5
- package/src/training/ul_dataset.js +3 -5
- package/src/training/workloads.js +70 -79
- package/src/types/model.d.ts +5 -0
- package/src/version.js +1 -1
- package/tools/convert-safetensors-node.js +22 -16
- package/tools/doppler-cli.js +50 -26
|
@@ -2,6 +2,7 @@ import { log } from '../debug/index.js';
|
|
|
2
2
|
import { PARAM_CATEGORIES, CategoryRules } from './param-categories.js';
|
|
3
3
|
import { TOOLING_INTENTS, TOOLING_DIAGNOSTICS } from './schema/tooling.schema.js';
|
|
4
4
|
import { validateEcosystemConfig } from './schema/ecosystem.schema.js';
|
|
5
|
+
import { isPlainObject } from '../utils/plain-object.js';
|
|
5
6
|
|
|
6
7
|
export function validateCallTimeOptions(options) {
|
|
7
8
|
if (!options) return;
|
|
@@ -33,7 +34,23 @@ export function validateCallTimeOptions(options) {
|
|
|
33
34
|
}
|
|
34
35
|
|
|
35
36
|
export function validateRuntimeOverrides(overrides) {
|
|
37
|
+
if (!isPlainObject(overrides)) {
|
|
38
|
+
throw new Error('DopplerConfigError: runtime overrides must be an object when provided.');
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
assertRequiredRuntimeOverrideNotNull(overrides, 'shared');
|
|
42
|
+
assertRequiredRuntimeOverrideNotNull(overrides, 'loading');
|
|
43
|
+
assertRequiredRuntimeOverrideNotNull(overrides, 'inference');
|
|
44
|
+
assertRequiredRuntimeOverrideNotNull(overrides, 'emulation');
|
|
45
|
+
assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'batching', 'runtime.inference');
|
|
46
|
+
assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'compute', 'runtime.inference');
|
|
47
|
+
assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'generation', 'runtime.inference');
|
|
48
|
+
assertRequiredRuntimeOverrideNotNull(overrides?.inference, 'kernelPathPolicy', 'runtime.inference');
|
|
49
|
+
|
|
36
50
|
const modelOverrides = overrides?.inference?.modelOverrides;
|
|
51
|
+
if (modelOverrides !== undefined && modelOverrides !== null && !isPlainObject(modelOverrides)) {
|
|
52
|
+
throw new Error('DopplerConfigError: runtime.inference.modelOverrides must be an object when provided.');
|
|
53
|
+
}
|
|
37
54
|
if (!modelOverrides) return;
|
|
38
55
|
|
|
39
56
|
const params = flattenObject(modelOverrides);
|
|
@@ -214,6 +231,15 @@ function validateKernelPathPolicy(label, value) {
|
|
|
214
231
|
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
215
232
|
throw new Error(`DopplerConfigError: ${label} must be an object.`);
|
|
216
233
|
}
|
|
234
|
+
if (
|
|
235
|
+
value.sourceScope !== undefined
|
|
236
|
+
&& value.allowSources !== undefined
|
|
237
|
+
&& !arraysEqual(value.sourceScope, value.allowSources)
|
|
238
|
+
) {
|
|
239
|
+
throw new Error(
|
|
240
|
+
`DopplerConfigError: ${label}.sourceScope and ${label}.allowSources must match exactly when both are provided.`
|
|
241
|
+
);
|
|
242
|
+
}
|
|
217
243
|
if (value.mode !== 'locked' && value.mode !== 'capability-aware') {
|
|
218
244
|
throw new Error(`DopplerConfigError: ${label}.mode must be "locked" or "capability-aware".`);
|
|
219
245
|
}
|
|
@@ -224,12 +250,31 @@ function validateKernelPathPolicy(label, value) {
|
|
|
224
250
|
if (value.onIncompatible !== 'error' && value.onIncompatible !== 'remap') {
|
|
225
251
|
throw new Error(`DopplerConfigError: ${label}.onIncompatible must be "error" or "remap".`);
|
|
226
252
|
}
|
|
227
|
-
const validSources = new Set(['model', 'manifest', 'config', '
|
|
253
|
+
const validSources = new Set(['model', 'manifest', 'config', 'execution-v0']);
|
|
228
254
|
for (const source of sourceScope) {
|
|
229
255
|
if (!validSources.has(source)) {
|
|
230
256
|
throw new Error(
|
|
231
|
-
`DopplerConfigError: ${label}.sourceScope entries must be model|manifest|config|
|
|
257
|
+
`DopplerConfigError: ${label}.sourceScope entries must be model|manifest|config|execution-v0.`
|
|
232
258
|
);
|
|
233
259
|
}
|
|
234
260
|
}
|
|
235
261
|
}
|
|
262
|
+
|
|
263
|
+
function assertRequiredRuntimeOverrideNotNull(container, key, prefix = 'runtime') {
|
|
264
|
+
if (!isPlainObject(container) || !Object.prototype.hasOwnProperty.call(container, key)) {
|
|
265
|
+
return;
|
|
266
|
+
}
|
|
267
|
+
if (container[key] === null) {
|
|
268
|
+
throw new Error(`DopplerConfigError: ${prefix}.${key} must not be null.`);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
function arraysEqual(left, right) {
|
|
273
|
+
if (!Array.isArray(left) || !Array.isArray(right)) {
|
|
274
|
+
return false;
|
|
275
|
+
}
|
|
276
|
+
if (left.length !== right.length) {
|
|
277
|
+
return false;
|
|
278
|
+
}
|
|
279
|
+
return left.every((value, index) => value === right[index]);
|
|
280
|
+
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
|
-
"id": "gemma2-q4k-dequant-f32a",
|
|
3
|
-
"name": "Gemma 2 Q4K Dequant (F32 activations)",
|
|
4
|
-
"description": "Q4K weights dequantized to F16 with F32 activations. Non-fused
|
|
2
|
+
"id": "gemma2-q4k-dequant-f32a-nosubgroups",
|
|
3
|
+
"name": "Gemma 2 Q4K Dequant (F32 activations, no subgroups)",
|
|
4
|
+
"description": "Q4K weights dequantized to F16 with F32 activations. Non-fused path with no subgroup requirement that still requires shader-f16 kernels.",
|
|
5
5
|
"activationDtype": "f32",
|
|
6
6
|
|
|
7
7
|
"decode": {
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "gemma3-f16-fused-f32a-online-streamingprefill",
|
|
3
|
+
"name": "Gemma 3 F16 (F32 activations, online, streaming prefill)",
|
|
4
|
+
"description": "F16 weights with F32 activations, online decode attention, and streaming prefill attention for Gemma 3 stability-sensitive runs.",
|
|
5
|
+
"activationDtype": "f32",
|
|
6
|
+
"kvDtype": "f16",
|
|
7
|
+
"decode": {
|
|
8
|
+
"steps": [
|
|
9
|
+
{
|
|
10
|
+
"op": "input_norm",
|
|
11
|
+
"kernel": "rmsnorm.wgsl",
|
|
12
|
+
"entry": "main"
|
|
13
|
+
},
|
|
14
|
+
{
|
|
15
|
+
"op": "q_proj",
|
|
16
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
17
|
+
"entry": "main_vec4",
|
|
18
|
+
"weights": "layer.{L}.self_attn.q_proj"
|
|
19
|
+
},
|
|
20
|
+
{
|
|
21
|
+
"op": "k_proj",
|
|
22
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
23
|
+
"entry": "main_vec4",
|
|
24
|
+
"weights": "layer.{L}.self_attn.k_proj"
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
"op": "v_proj",
|
|
28
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
29
|
+
"entry": "main_vec4",
|
|
30
|
+
"weights": "layer.{L}.self_attn.v_proj"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"op": "rope_q",
|
|
34
|
+
"kernel": "rope.wgsl",
|
|
35
|
+
"entry": "main"
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
"op": "rope_k",
|
|
39
|
+
"kernel": "rope.wgsl",
|
|
40
|
+
"entry": "main"
|
|
41
|
+
},
|
|
42
|
+
{
|
|
43
|
+
"op": "attention",
|
|
44
|
+
"kernel": "attention_decode_online_f16kv.wgsl",
|
|
45
|
+
"entry": "main"
|
|
46
|
+
},
|
|
47
|
+
{
|
|
48
|
+
"op": "o_proj",
|
|
49
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
50
|
+
"entry": "main_vec4",
|
|
51
|
+
"weights": "layer.{L}.self_attn.o_proj"
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
"op": "attn_residual",
|
|
55
|
+
"kernel": "residual.wgsl",
|
|
56
|
+
"entry": "main"
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
"op": "post_attn_norm",
|
|
60
|
+
"kernel": "rmsnorm.wgsl",
|
|
61
|
+
"entry": "main"
|
|
62
|
+
},
|
|
63
|
+
{
|
|
64
|
+
"op": "gate_proj",
|
|
65
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
66
|
+
"entry": "main_vec4",
|
|
67
|
+
"weights": "layer.{L}.mlp.gate_proj"
|
|
68
|
+
},
|
|
69
|
+
{
|
|
70
|
+
"op": "up_proj",
|
|
71
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
72
|
+
"entry": "main_vec4",
|
|
73
|
+
"weights": "layer.{L}.mlp.up_proj"
|
|
74
|
+
},
|
|
75
|
+
{
|
|
76
|
+
"op": "activation",
|
|
77
|
+
"kernel": "gelu.wgsl",
|
|
78
|
+
"entry": "main",
|
|
79
|
+
"constants": {
|
|
80
|
+
"HAS_GATE": true
|
|
81
|
+
}
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
"op": "down_proj",
|
|
85
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
86
|
+
"entry": "main_vec4",
|
|
87
|
+
"weights": "layer.{L}.mlp.down_proj"
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
"op": "ffn_residual",
|
|
91
|
+
"kernel": "residual.wgsl",
|
|
92
|
+
"entry": "main"
|
|
93
|
+
}
|
|
94
|
+
]
|
|
95
|
+
},
|
|
96
|
+
"prefill": {
|
|
97
|
+
"steps": [
|
|
98
|
+
{
|
|
99
|
+
"op": "input_norm",
|
|
100
|
+
"kernel": "rmsnorm.wgsl",
|
|
101
|
+
"entry": "main"
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
"op": "q_proj",
|
|
105
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
106
|
+
"entry": "main",
|
|
107
|
+
"weights": "layer.{L}.self_attn.q_proj"
|
|
108
|
+
},
|
|
109
|
+
{
|
|
110
|
+
"op": "k_proj",
|
|
111
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
112
|
+
"entry": "main",
|
|
113
|
+
"weights": "layer.{L}.self_attn.k_proj"
|
|
114
|
+
},
|
|
115
|
+
{
|
|
116
|
+
"op": "v_proj",
|
|
117
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
118
|
+
"entry": "main",
|
|
119
|
+
"weights": "layer.{L}.self_attn.v_proj"
|
|
120
|
+
},
|
|
121
|
+
{
|
|
122
|
+
"op": "rope_q",
|
|
123
|
+
"kernel": "rope.wgsl",
|
|
124
|
+
"entry": "main"
|
|
125
|
+
},
|
|
126
|
+
{
|
|
127
|
+
"op": "rope_k",
|
|
128
|
+
"kernel": "rope.wgsl",
|
|
129
|
+
"entry": "main"
|
|
130
|
+
},
|
|
131
|
+
{
|
|
132
|
+
"op": "attention",
|
|
133
|
+
"kernel": "attention_streaming_f16kv.wgsl",
|
|
134
|
+
"entry": "main"
|
|
135
|
+
},
|
|
136
|
+
{
|
|
137
|
+
"op": "o_proj",
|
|
138
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
139
|
+
"entry": "main",
|
|
140
|
+
"weights": "layer.{L}.self_attn.o_proj"
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
"op": "attn_residual",
|
|
144
|
+
"kernel": "residual.wgsl",
|
|
145
|
+
"entry": "main"
|
|
146
|
+
},
|
|
147
|
+
{
|
|
148
|
+
"op": "post_attn_norm",
|
|
149
|
+
"kernel": "rmsnorm.wgsl",
|
|
150
|
+
"entry": "main"
|
|
151
|
+
},
|
|
152
|
+
{
|
|
153
|
+
"op": "gate_proj",
|
|
154
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
155
|
+
"entry": "main",
|
|
156
|
+
"weights": "layer.{L}.mlp.gate_proj"
|
|
157
|
+
},
|
|
158
|
+
{
|
|
159
|
+
"op": "up_proj",
|
|
160
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
161
|
+
"entry": "main",
|
|
162
|
+
"weights": "layer.{L}.mlp.up_proj"
|
|
163
|
+
},
|
|
164
|
+
{
|
|
165
|
+
"op": "activation",
|
|
166
|
+
"kernel": "gelu.wgsl",
|
|
167
|
+
"entry": "main",
|
|
168
|
+
"constants": {
|
|
169
|
+
"HAS_GATE": true
|
|
170
|
+
}
|
|
171
|
+
},
|
|
172
|
+
{
|
|
173
|
+
"op": "down_proj",
|
|
174
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
175
|
+
"entry": "main",
|
|
176
|
+
"weights": "layer.{L}.mlp.down_proj"
|
|
177
|
+
},
|
|
178
|
+
{
|
|
179
|
+
"op": "ffn_residual",
|
|
180
|
+
"kernel": "residual.wgsl",
|
|
181
|
+
"entry": "main"
|
|
182
|
+
}
|
|
183
|
+
]
|
|
184
|
+
},
|
|
185
|
+
"preLayer": [
|
|
186
|
+
{
|
|
187
|
+
"op": "embed",
|
|
188
|
+
"kernel": "gather.wgsl",
|
|
189
|
+
"entry": "main",
|
|
190
|
+
"weights": "embed_tokens"
|
|
191
|
+
}
|
|
192
|
+
],
|
|
193
|
+
"postLayer": [
|
|
194
|
+
{
|
|
195
|
+
"op": "final_norm",
|
|
196
|
+
"kernel": "rmsnorm.wgsl",
|
|
197
|
+
"entry": "main"
|
|
198
|
+
},
|
|
199
|
+
{
|
|
200
|
+
"op": "lm_head",
|
|
201
|
+
"kernel": "matmul_gemv_subgroup.wgsl",
|
|
202
|
+
"entry": "main_multicol",
|
|
203
|
+
"weights": "lm_head",
|
|
204
|
+
"constants": {
|
|
205
|
+
"MULTICOL_COLS_PER_WG": 64,
|
|
206
|
+
"MULTICOL_THREADS_PER_COL": 4
|
|
207
|
+
}
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
"op": "lm_head_prefill",
|
|
211
|
+
"kernel": "matmul_f16w_f32a_tiled.wgsl",
|
|
212
|
+
"entry": "main",
|
|
213
|
+
"weights": "lm_head"
|
|
214
|
+
}
|
|
215
|
+
],
|
|
216
|
+
"sampling": [
|
|
217
|
+
{
|
|
218
|
+
"op": "sample",
|
|
219
|
+
"kernel": "sample.wgsl",
|
|
220
|
+
"entry": "sample_single_pass"
|
|
221
|
+
}
|
|
222
|
+
]
|
|
223
|
+
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
|
-
"id": "gemma3-q4k-dequant-f32a",
|
|
3
|
-
"name": "Gemma 3 Q4K Dequant (F32 activations)",
|
|
4
|
-
"description": "Q4K weights dequantized to F16 with F32 activations
|
|
2
|
+
"id": "gemma3-q4k-dequant-f32a-nosubgroups",
|
|
3
|
+
"name": "Gemma 3 Q4K Dequant (F32 activations, no subgroups)",
|
|
4
|
+
"description": "Q4K weights dequantized to F16 with F32 activations. Subgroup-free non-online path for Gemma 3 that still requires shader-f16 kernels.",
|
|
5
5
|
"activationDtype": "f32",
|
|
6
6
|
"kvDtype": "f16",
|
|
7
7
|
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "gemma3-q4k-dequant-f32w-f32a-online",
|
|
3
|
+
"name": "Gemma 3 Q4K Dequant (F32 projection weights, F32 activations, online decode)",
|
|
4
|
+
"description": "Q4K projection weights dequantized to F32 with F32 activations. Tied embeddings and LM head stay on the native F16 path. Decode uses online attention; prefill uses streaming attention.",
|
|
5
|
+
"activationDtype": "f32",
|
|
6
|
+
"kvDtype": "f16",
|
|
7
|
+
"decode": {
|
|
8
|
+
"steps": [
|
|
9
|
+
{ "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
10
|
+
{ "op": "q_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
|
|
11
|
+
{ "op": "k_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
|
|
12
|
+
{ "op": "v_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
|
|
13
|
+
{ "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
|
|
14
|
+
{ "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
|
|
15
|
+
{ "op": "attention", "kernel": "attention_decode_online_f16kv.wgsl", "entry": "main" },
|
|
16
|
+
{ "op": "o_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
|
|
17
|
+
{ "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
|
|
18
|
+
{ "op": "post_attn_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
19
|
+
{ "op": "gate_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
|
|
20
|
+
{ "op": "up_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
|
|
21
|
+
{ "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
|
|
22
|
+
{ "op": "down_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
|
|
23
|
+
{ "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
|
|
24
|
+
]
|
|
25
|
+
},
|
|
26
|
+
"prefill": {
|
|
27
|
+
"steps": [
|
|
28
|
+
{ "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
29
|
+
{ "op": "q_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
|
|
30
|
+
{ "op": "k_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
|
|
31
|
+
{ "op": "v_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
|
|
32
|
+
{ "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
|
|
33
|
+
{ "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
|
|
34
|
+
{ "op": "attention", "kernel": "attention_streaming_f16kv.wgsl", "entry": "main" },
|
|
35
|
+
{ "op": "o_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
|
|
36
|
+
{ "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
|
|
37
|
+
{ "op": "post_attn_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
38
|
+
{ "op": "gate_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
|
|
39
|
+
{ "op": "up_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
|
|
40
|
+
{ "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
|
|
41
|
+
{ "op": "down_proj", "kernel": "matmul_f32.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
|
|
42
|
+
{ "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
|
|
43
|
+
]
|
|
44
|
+
},
|
|
45
|
+
"preLayer": [
|
|
46
|
+
{ "op": "embed", "kernel": "gather_f16.wgsl", "entry": "main", "weights": "embed_tokens" }
|
|
47
|
+
],
|
|
48
|
+
"postLayer": [
|
|
49
|
+
{ "op": "final_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
50
|
+
{ "op": "lm_head", "kernel": "matmul_gemv_subgroup.wgsl", "entry": "main_multicol", "weights": "lm_head", "constants": { "MULTICOL_COLS_PER_WG": 64, "MULTICOL_THREADS_PER_COL": 4 } },
|
|
51
|
+
{ "op": "lm_head_prefill", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "lm_head" }
|
|
52
|
+
],
|
|
53
|
+
"sampling": [
|
|
54
|
+
{ "op": "sample", "kernel": "sample.wgsl", "entry": "sample_single_pass" }
|
|
55
|
+
]
|
|
56
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "lfm2-q4k-dequant-f32a-nosubgroups",
|
|
3
|
+
"name": "LFM2 Q4K Dequant (F32 activations, no subgroups)",
|
|
4
|
+
"description": "Subgroup-free LFM2 Q4K path: F32 activations with tiled prefill matmul and small-kernel prefill attention. Still requires shader-f16 kernels.",
|
|
5
|
+
"activationDtype": "f32",
|
|
6
|
+
"kvDtype": "f16",
|
|
7
|
+
|
|
8
|
+
"decode": {
|
|
9
|
+
"steps": [
|
|
10
|
+
{ "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
11
|
+
{ "op": "q_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
|
|
12
|
+
{ "op": "k_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
|
|
13
|
+
{ "op": "v_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
|
|
14
|
+
{ "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
|
|
15
|
+
{ "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
|
|
16
|
+
{ "op": "attention", "kernel": "attention_decode_chunked_f16kv.wgsl", "entry": "main" },
|
|
17
|
+
{ "op": "o_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
|
|
18
|
+
{ "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
|
|
19
|
+
{ "op": "post_attn_norm","kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
20
|
+
{ "op": "gate_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
|
|
21
|
+
{ "op": "up_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
|
|
22
|
+
{ "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
|
|
23
|
+
{ "op": "down_proj", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
|
|
24
|
+
{ "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
|
|
25
|
+
]
|
|
26
|
+
},
|
|
27
|
+
|
|
28
|
+
"prefill": {
|
|
29
|
+
"steps": [
|
|
30
|
+
{ "op": "input_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
31
|
+
{ "op": "q_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.q_proj" },
|
|
32
|
+
{ "op": "k_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.k_proj" },
|
|
33
|
+
{ "op": "v_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.v_proj" },
|
|
34
|
+
{ "op": "rope_q", "kernel": "rope.wgsl", "entry": "main" },
|
|
35
|
+
{ "op": "rope_k", "kernel": "rope.wgsl", "entry": "main" },
|
|
36
|
+
{ "op": "attention", "kernel": "attention_small_f16kv.wgsl", "entry": "main" },
|
|
37
|
+
{ "op": "o_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.self_attn.o_proj" },
|
|
38
|
+
{ "op": "attn_residual", "kernel": "residual.wgsl", "entry": "main" },
|
|
39
|
+
{ "op": "post_attn_norm","kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
40
|
+
{ "op": "gate_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.mlp.gate_proj" },
|
|
41
|
+
{ "op": "up_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.mlp.up_proj" },
|
|
42
|
+
{ "op": "activation", "kernel": "gelu.wgsl", "entry": "main", "constants": { "HAS_GATE": true } },
|
|
43
|
+
{ "op": "down_proj", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "layer.{L}.mlp.down_proj" },
|
|
44
|
+
{ "op": "ffn_residual", "kernel": "residual.wgsl", "entry": "main" }
|
|
45
|
+
]
|
|
46
|
+
},
|
|
47
|
+
|
|
48
|
+
"preLayer": [
|
|
49
|
+
{ "op": "embed", "kernel": "gather_f16.wgsl", "entry": "main", "weights": "embed_tokens" }
|
|
50
|
+
],
|
|
51
|
+
|
|
52
|
+
"postLayer": [
|
|
53
|
+
{ "op": "final_norm", "kernel": "rmsnorm.wgsl", "entry": "main" },
|
|
54
|
+
{ "op": "lm_head", "kernel": "matmul_f16w_f32a.wgsl", "entry": "main", "weights": "lm_head" },
|
|
55
|
+
{ "op": "lm_head_prefill", "kernel": "matmul_f16w_f32a_tiled.wgsl", "entry": "main", "weights": "lm_head" }
|
|
56
|
+
],
|
|
57
|
+
|
|
58
|
+
"sampling": [
|
|
59
|
+
{ "op": "sample", "kernel": "sample.wgsl", "entry": "sample_single_pass" }
|
|
60
|
+
]
|
|
61
|
+
}
|
|
@@ -16,11 +16,18 @@
|
|
|
16
16
|
"notes": "Default Gemma 2 Q4K dequant path for baseline F16 activation workflows."
|
|
17
17
|
},
|
|
18
18
|
{
|
|
19
|
-
"id": "gemma2-q4k-dequant-f32a",
|
|
20
|
-
"file": "gemma2-q4k-dequant-f32a.json",
|
|
19
|
+
"id": "gemma2-q4k-dequant-f32a-nosubgroups",
|
|
20
|
+
"file": "gemma2-q4k-dequant-f32a-nosubgroups.json",
|
|
21
21
|
"status": "canonical",
|
|
22
|
-
"statusReason": "
|
|
23
|
-
"notes": "Subgroup-free Gemma 2 Q4K dequant path with F32 activations."
|
|
22
|
+
"statusReason": "subgroup-free",
|
|
23
|
+
"notes": "Subgroup-free Gemma 2 Q4K dequant path with F32 activations. Still requires shader-f16 kernels."
|
|
24
|
+
},
|
|
25
|
+
{
|
|
26
|
+
"id": "gemma2-q4k-dequant-f32a",
|
|
27
|
+
"aliasOf": "gemma2-q4k-dequant-f32a-nosubgroups",
|
|
28
|
+
"status": "legacy",
|
|
29
|
+
"statusReason": "compatibility-alias",
|
|
30
|
+
"notes": "Legacy alias for gemma2-q4k-dequant-f32a-nosubgroups."
|
|
24
31
|
},
|
|
25
32
|
{
|
|
26
33
|
"id": "gemma2-f16-f16a",
|
|
@@ -50,6 +57,13 @@
|
|
|
50
57
|
"statusReason": "benchmark-probe",
|
|
51
58
|
"notes": "Experimental fused F32 pipeline variant for fast decode."
|
|
52
59
|
},
|
|
60
|
+
{
|
|
61
|
+
"id": "gemma3-f16-fused-f32a-online-streamingprefill",
|
|
62
|
+
"file": "gemma3-f16-fused-f32a-online-streamingprefill.json",
|
|
63
|
+
"status": "experimental",
|
|
64
|
+
"statusReason": "stability-probe",
|
|
65
|
+
"notes": "Gemma 3 F16/F32 online path with streaming prefill attention instead of the small-tile prefill kernel."
|
|
66
|
+
},
|
|
53
67
|
{
|
|
54
68
|
"id": "gemma3-q4k-dequant-f16a-online",
|
|
55
69
|
"file": "gemma3-q4k-dequant-f16a-online.json",
|
|
@@ -58,11 +72,18 @@
|
|
|
58
72
|
"notes": "Experimental Gemma 3 Q4K path using online decode attention on subgroup-capable GPUs."
|
|
59
73
|
},
|
|
60
74
|
{
|
|
61
|
-
"id": "gemma3-q4k-dequant-f32a",
|
|
62
|
-
"file": "gemma3-q4k-dequant-f32a.json",
|
|
75
|
+
"id": "gemma3-q4k-dequant-f32a-nosubgroups",
|
|
76
|
+
"file": "gemma3-q4k-dequant-f32a-nosubgroups.json",
|
|
63
77
|
"status": "canonical",
|
|
64
|
-
"statusReason": "
|
|
65
|
-
"notes": "Gemma 3 Q4K dequant path with F32
|
|
78
|
+
"statusReason": "subgroup-free",
|
|
79
|
+
"notes": "Subgroup-free Gemma 3 Q4K dequant path with F32 activations. Still requires shader-f16 kernels."
|
|
80
|
+
},
|
|
81
|
+
{
|
|
82
|
+
"id": "gemma3-q4k-dequant-f32a",
|
|
83
|
+
"aliasOf": "gemma3-q4k-dequant-f32a-nosubgroups",
|
|
84
|
+
"status": "legacy",
|
|
85
|
+
"statusReason": "compatibility-alias",
|
|
86
|
+
"notes": "Legacy alias for gemma3-q4k-dequant-f32a-nosubgroups."
|
|
66
87
|
},
|
|
67
88
|
{
|
|
68
89
|
"id": "gemma3-q4k-dequant-f32a-online",
|
|
@@ -71,6 +92,13 @@
|
|
|
71
92
|
"statusReason": "default",
|
|
72
93
|
"notes": "Gemma 3 Q4K dequant default: subgroup GEMV + online attention + tuned lm_head multicol, F32 activations."
|
|
73
94
|
},
|
|
95
|
+
{
|
|
96
|
+
"id": "gemma3-q4k-dequant-f32w-f32a-online",
|
|
97
|
+
"file": "gemma3-q4k-dequant-f32w-f32a-online.json",
|
|
98
|
+
"status": "experimental",
|
|
99
|
+
"statusReason": "accuracy-probe",
|
|
100
|
+
"notes": "Gemma 3 Q4K dequant path that keeps matmul weights in F32 and runs F32 matmul kernels for numeric-sensitivity debugging."
|
|
101
|
+
},
|
|
74
102
|
{
|
|
75
103
|
"id": "lfm2-q4k-dequant-f32a-online",
|
|
76
104
|
"file": "lfm2-q4k-dequant-f32a-online.json",
|
|
@@ -78,6 +106,13 @@
|
|
|
78
106
|
"statusReason": "default",
|
|
79
107
|
"notes": "LFM2 Q4K default: subgroup GEMV decode with tiled fast-prefill path and F32 activations."
|
|
80
108
|
},
|
|
109
|
+
{
|
|
110
|
+
"id": "lfm2-q4k-dequant-f32a-nosubgroups",
|
|
111
|
+
"file": "lfm2-q4k-dequant-f32a-nosubgroups.json",
|
|
112
|
+
"status": "canonical",
|
|
113
|
+
"statusReason": "subgroup-free",
|
|
114
|
+
"notes": "Subgroup-free LFM2 Q4K dequant path with F32 activations and tiled prefill. Still requires shader-f16 kernels."
|
|
115
|
+
},
|
|
81
116
|
{
|
|
82
117
|
"id": "embeddinggemma-f16-f32a",
|
|
83
118
|
"file": "embeddinggemma-f16-f32a.json",
|
|
@@ -9,6 +9,7 @@
|
|
|
9
9
|
|
|
10
10
|
"inference": {
|
|
11
11
|
"attention": {
|
|
12
|
+
"queryPreAttnScalar": 256,
|
|
12
13
|
"slidingWindow": 4096,
|
|
13
14
|
"attnLogitSoftcapping": 50.0,
|
|
14
15
|
"queryKeyNorm": false
|
|
@@ -40,8 +41,8 @@
|
|
|
40
41
|
"f32": "gemma2-f16-f32a"
|
|
41
42
|
},
|
|
42
43
|
"q4k": {
|
|
43
|
-
"f16": "gemma2-q4k-dequant-
|
|
44
|
-
"f32": "gemma2-q4k-dequant-f32a"
|
|
44
|
+
"f16": "gemma2-q4k-dequant-f16a",
|
|
45
|
+
"f32": "gemma2-q4k-dequant-f32a-nosubgroups"
|
|
45
46
|
}
|
|
46
47
|
}
|
|
47
48
|
},
|
|
@@ -10,7 +10,8 @@
|
|
|
10
10
|
"inference": {
|
|
11
11
|
"attention": {
|
|
12
12
|
"slidingWindow": null,
|
|
13
|
-
"queryKeyNorm": true
|
|
13
|
+
"queryKeyNorm": true,
|
|
14
|
+
"attentionOutputGate": true
|
|
14
15
|
},
|
|
15
16
|
"output": {
|
|
16
17
|
"scaleEmbeddings": false
|
|
@@ -39,8 +40,8 @@
|
|
|
39
40
|
},
|
|
40
41
|
|
|
41
42
|
"detection": {
|
|
42
|
-
"architecturePatterns": ["qwen3", "
|
|
43
|
-
"modelTypePatterns": ["
|
|
43
|
+
"architecturePatterns": ["qwen3", "Qwen3ForCausalLM", "Qwen2ForCausalLM"],
|
|
44
|
+
"modelTypePatterns": ["qwen3", "qwen2"],
|
|
44
45
|
"configPatterns": {
|
|
45
46
|
"model_type": "qwen2"
|
|
46
47
|
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "qwen3_5",
|
|
3
|
+
"name": "Qwen 3.5",
|
|
4
|
+
"extends": "qwen3",
|
|
5
|
+
|
|
6
|
+
"inference": {
|
|
7
|
+
"normalization": {
|
|
8
|
+
"rmsNormWeightOffset": true
|
|
9
|
+
}
|
|
10
|
+
},
|
|
11
|
+
|
|
12
|
+
"detection": {
|
|
13
|
+
"architecturePatterns": ["qwen3_5", "Qwen3_5ForCausalLM", "Qwen3_5ForConditionalGeneration"],
|
|
14
|
+
"modelTypePatterns": ["qwen3_5", "qwen3_5_text"]
|
|
15
|
+
}
|
|
16
|
+
}
|