@simulatte/doppler 0.1.9 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -116
- package/package.json +5 -162
- package/BRANDING.md +0 -14
- package/CHANGELOG.md +0 -158
- package/LICENSE +0 -201
- package/NOTICE +0 -5
- package/SECURITY.md +0 -19
- package/src/adapters/adapter-manager.d.ts +0 -200
- package/src/adapters/adapter-manager.js +0 -509
- package/src/adapters/adapter-manifest.d.ts +0 -290
- package/src/adapters/adapter-manifest.js +0 -320
- package/src/adapters/adapter-registry.d.ts +0 -192
- package/src/adapters/adapter-registry.js +0 -477
- package/src/adapters/index.d.ts +0 -89
- package/src/adapters/index.js +0 -42
- package/src/adapters/lora-loader.d.ts +0 -105
- package/src/adapters/lora-loader.js +0 -414
- package/src/bootstrap.d.ts +0 -1
- package/src/bootstrap.js +0 -30
- package/src/bridge/extension/background.d.ts +0 -14
- package/src/bridge/extension/background.js +0 -168
- package/src/bridge/extension/manifest.json +0 -34
- package/src/bridge/extension-client.d.ts +0 -114
- package/src/bridge/extension-client.js +0 -409
- package/src/bridge/index.d.ts +0 -69
- package/src/bridge/index.js +0 -53
- package/src/bridge/protocol.d.ts +0 -96
- package/src/bridge/protocol.js +0 -130
- package/src/browser/browser-converter.d.ts +0 -71
- package/src/browser/browser-converter.js +0 -977
- package/src/browser/file-picker.d.ts +0 -63
- package/src/browser/file-picker.js +0 -281
- package/src/browser/gguf-importer.d.ts +0 -136
- package/src/browser/gguf-importer.js +0 -532
- package/src/browser/gguf-parser-browser.d.ts +0 -14
- package/src/browser/gguf-parser-browser.js +0 -17
- package/src/browser/quantization.d.ts +0 -69
- package/src/browser/quantization.js +0 -328
- package/src/browser/safetensors-parser-browser.d.ts +0 -193
- package/src/browser/safetensors-parser-browser.js +0 -347
- package/src/browser/shard-io-browser.d.ts +0 -57
- package/src/browser/shard-io-browser.js +0 -89
- package/src/browser/tensor-source-download.d.ts +0 -27
- package/src/browser/tensor-source-download.js +0 -245
- package/src/browser/tensor-source-file.d.ts +0 -26
- package/src/browser/tensor-source-file.js +0 -53
- package/src/browser/tensor-source-http.d.ts +0 -29
- package/src/browser/tensor-source-http.js +0 -130
- package/src/client/doppler-api.browser.d.ts +0 -1
- package/src/client/doppler-api.browser.js +0 -310
- package/src/client/doppler-api.d.ts +0 -83
- package/src/client/doppler-api.js +0 -323
- package/src/client/doppler-provider/generation.d.ts +0 -25
- package/src/client/doppler-provider/generation.js +0 -126
- package/src/client/doppler-provider/index.d.ts +0 -2
- package/src/client/doppler-provider/index.js +0 -3
- package/src/client/doppler-provider/model-manager.d.ts +0 -71
- package/src/client/doppler-provider/model-manager.js +0 -739
- package/src/client/doppler-provider/provider.d.ts +0 -5
- package/src/client/doppler-provider/provider.js +0 -102
- package/src/client/doppler-provider/source-runtime.d.ts +0 -23
- package/src/client/doppler-provider/source-runtime.js +0 -641
- package/src/client/doppler-provider/types.d.ts +0 -127
- package/src/client/doppler-provider/types.js +0 -17
- package/src/client/doppler-provider.d.ts +0 -46
- package/src/client/doppler-provider.js +0 -36
- package/src/client/doppler-registry.d.ts +0 -23
- package/src/client/doppler-registry.js +0 -86
- package/src/client/doppler-registry.json +0 -40
- package/src/config/README.md +0 -69
- package/src/config/backward-registry-loader.d.ts +0 -3
- package/src/config/backward-registry-loader.js +0 -23
- package/src/config/execution-contract-check.d.ts +0 -82
- package/src/config/execution-contract-check.js +0 -317
- package/src/config/execution-v0-contract-check.d.ts +0 -94
- package/src/config/execution-v0-contract-check.js +0 -349
- package/src/config/execution-v0-graph-contract-check.d.ts +0 -20
- package/src/config/execution-v0-graph-contract-check.js +0 -64
- package/src/config/index.d.ts +0 -63
- package/src/config/index.js +0 -31
- package/src/config/kernel-path-contract-check.d.ts +0 -76
- package/src/config/kernel-path-contract-check.js +0 -507
- package/src/config/kernel-path-loader.d.ts +0 -170
- package/src/config/kernel-path-loader.js +0 -570
- package/src/config/kernels/backward-registry.json +0 -99
- package/src/config/kernels/kernel-ref-digests.d.ts +0 -1
- package/src/config/kernels/kernel-ref-digests.js +0 -228
- package/src/config/kernels/kernel-ref.d.ts +0 -17
- package/src/config/kernels/kernel-ref.js +0 -75
- package/src/config/kernels/moe/gpt-oss.paths.json +0 -49
- package/src/config/kernels/moe/mixtral.paths.json +0 -46
- package/src/config/kernels/registry.d.ts +0 -86
- package/src/config/kernels/registry.js +0 -116
- package/src/config/kernels/registry.json +0 -7443
- package/src/config/loader.d.ts +0 -57
- package/src/config/loader.js +0 -584
- package/src/config/merge-contract-check.d.ts +0 -16
- package/src/config/merge-contract-check.js +0 -383
- package/src/config/merge-helpers.d.ts +0 -58
- package/src/config/merge-helpers.js +0 -175
- package/src/config/merge.d.ts +0 -143
- package/src/config/merge.js +0 -414
- package/src/config/param-categories.d.ts +0 -17
- package/src/config/param-categories.js +0 -72
- package/src/config/param-validator.d.ts +0 -26
- package/src/config/param-validator.js +0 -280
- package/src/config/platforms/amd-rdna3.json +0 -16
- package/src/config/platforms/apple-m1.json +0 -16
- package/src/config/platforms/apple-m2.json +0 -16
- package/src/config/platforms/apple-m3.json +0 -16
- package/src/config/platforms/generic.json +0 -14
- package/src/config/platforms/loader.d.ts +0 -65
- package/src/config/platforms/loader.js +0 -155
- package/src/config/platforms/nvidia-rtx30.json +0 -16
- package/src/config/platforms/nvidia-rtx40.json +0 -16
- package/src/config/presets/kernel-paths/embeddinggemma-f16-f32a.json +0 -60
- package/src/config/presets/kernel-paths/embeddinggemma-f32-f32a.json +0 -60
- package/src/config/presets/kernel-paths/embeddinggemma-q4k-dequant-f32a.json +0 -60
- package/src/config/presets/kernel-paths/gemma2-f16-f16a.json +0 -61
- package/src/config/presets/kernel-paths/gemma2-f16-f32a.json +0 -60
- package/src/config/presets/kernel-paths/gemma2-q4k-dequant-f16a.json +0 -61
- package/src/config/presets/kernel-paths/gemma2-q4k-dequant-f32a-nosubgroups.json +0 -60
- package/src/config/presets/kernel-paths/gemma2-q4k-fused-f32a.json +0 -57
- package/src/config/presets/kernel-paths/gemma3-f16-fused-f16a-online.json +0 -200
- package/src/config/presets/kernel-paths/gemma3-f16-fused-f32a-online-streamingprefill.json +0 -223
- package/src/config/presets/kernel-paths/gemma3-f16-fused-f32a-online.json +0 -223
- package/src/config/presets/kernel-paths/gemma3-q4k-dequant-f16a-online.json +0 -60
- package/src/config/presets/kernel-paths/gemma3-q4k-dequant-f32a-nosubgroups.json +0 -61
- package/src/config/presets/kernel-paths/gemma3-q4k-dequant-f32a-online.json +0 -61
- package/src/config/presets/kernel-paths/gemma3-q4k-dequant-f32a-small-attn.json +0 -61
- package/src/config/presets/kernel-paths/gemma3-q4k-dequant-f32w-f32a-online.json +0 -56
- package/src/config/presets/kernel-paths/lfm2-q4k-dequant-f32a-nosubgroups.json +0 -61
- package/src/config/presets/kernel-paths/lfm2-q4k-dequant-f32a-online.json +0 -61
- package/src/config/presets/kernel-paths/registry.json +0 -145
- package/src/config/presets/models/deepseek.json +0 -20
- package/src/config/presets/models/diffusion.json +0 -10
- package/src/config/presets/models/embeddinggemma.json +0 -74
- package/src/config/presets/models/functiongemma.json +0 -31
- package/src/config/presets/models/gemma2.json +0 -60
- package/src/config/presets/models/gemma3.json +0 -78
- package/src/config/presets/models/gemma4.json +0 -61
- package/src/config/presets/models/gpt-oss.json +0 -68
- package/src/config/presets/models/granite-docling.json +0 -70
- package/src/config/presets/models/janus-text.json +0 -27
- package/src/config/presets/models/kimi-k2.json +0 -25
- package/src/config/presets/models/lfm2.json +0 -88
- package/src/config/presets/models/llama3.json +0 -40
- package/src/config/presets/models/mamba.json +0 -34
- package/src/config/presets/models/mixtral.json +0 -37
- package/src/config/presets/models/modernbert.json +0 -32
- package/src/config/presets/models/qwen3.json +0 -49
- package/src/config/presets/models/qwen3_5.json +0 -16
- package/src/config/presets/models/qwen3_vl.json +0 -40
- package/src/config/presets/models/transformer.json +0 -78
- package/src/config/presets/models/translategemma.json +0 -30
- package/src/config/presets/platforms/nvidia-gb200-8gpu.json +0 -45
- package/src/config/presets/platforms/nvidia-gb200-nvl72.json +0 -45
- package/src/config/presets/platforms/nvidia-gh200-nvl2.json +0 -44
- package/src/config/presets/platforms/nvidia-gh200.json +0 -44
- package/src/config/presets/runtime/compute/f16-activations.json +0 -30
- package/src/config/presets/runtime/compute/f16-batched.json +0 -32
- package/src/config/presets/runtime/default.json +0 -101
- package/src/config/presets/runtime/diagnostics/debug-logits.json +0 -53
- package/src/config/presets/runtime/experiments/bench/gemma3-bench-q4k.json +0 -54
- package/src/config/presets/runtime/experiments/debug/gemma3-debug-q4k.json +0 -210
- package/src/config/presets/runtime/experiments/verify/gemma3-verify.json +0 -39
- package/src/config/presets/runtime/experiments/verify/lfm2-verify.json +0 -46
- package/src/config/presets/runtime/experiments/verify/translategemma-verify.json +0 -39
- package/src/config/presets/runtime/kernels/dequant-f16-q4k.json +0 -13
- package/src/config/presets/runtime/kernels/dequant-f32-q4k.json +0 -13
- package/src/config/presets/runtime/kernels/embeddinggemma-q4k-dequant-f32a.json +0 -37
- package/src/config/presets/runtime/kernels/fused-q4k.json +0 -13
- package/src/config/presets/runtime/kernels/gemma2-q4k-dequant-f16a.json +0 -33
- package/src/config/presets/runtime/kernels/gemma2-q4k-dequant-f32a-nosubgroups.json +0 -33
- package/src/config/presets/runtime/kernels/gemma2-q4k-fused-f32a.json +0 -33
- package/src/config/presets/runtime/kernels/safe-q4k.json +0 -13
- package/src/config/presets/runtime/model/gemma2-debug.json +0 -77
- package/src/config/presets/runtime/model/gemma2-pipeline-debug.json +0 -66
- package/src/config/presets/runtime/model/gemma2-pipeline.json +0 -75
- package/src/config/presets/runtime/model/gemma3-layer-probe.json +0 -85
- package/src/config/presets/runtime/model/qwen3-5-layer-probe.json +0 -52
- package/src/config/presets/runtime/model/qwen3-5-linear-attn-debug.json +0 -90
- package/src/config/presets/runtime/modes/bench.json +0 -37
- package/src/config/presets/runtime/modes/debug.json +0 -39
- package/src/config/presets/runtime/modes/default.json +0 -10
- package/src/config/presets/runtime/modes/embedding-bench.json +0 -28
- package/src/config/presets/runtime/modes/embedding.json +0 -54
- package/src/config/presets/runtime/modes/low-memory.json +0 -40
- package/src/config/presets/runtime/modes/production.json +0 -48
- package/src/config/presets/runtime/modes/simulation.json +0 -30
- package/src/config/presets/runtime/modes/trace-layers.json +0 -127
- package/src/config/presets/runtime/platform/metal-apple-q4k.json +0 -11
- package/src/config/presets/runtime/tiers/gemma4-16gb.json +0 -69
- package/src/config/presets/runtime/tiers/gemma4-24gb.json +0 -66
- package/src/config/presets/runtime/tiers/gemma4-32gb.json +0 -66
- package/src/config/quantization-contract-check.d.ts +0 -12
- package/src/config/quantization-contract-check.js +0 -91
- package/src/config/required-inference-fields-contract-check.d.ts +0 -24
- package/src/config/required-inference-fields-contract-check.js +0 -237
- package/src/config/runtime-merge.d.ts +0 -5
- package/src/config/runtime-merge.js +0 -21
- package/src/config/runtime.d.ts +0 -28
- package/src/config/runtime.js +0 -64
- package/src/config/schema/adapter.schema.d.ts +0 -53
- package/src/config/schema/adapter.schema.js +0 -60
- package/src/config/schema/backward-registry.schema.d.ts +0 -14
- package/src/config/schema/backward-registry.schema.js +0 -46
- package/src/config/schema/benchmark.schema.d.ts +0 -54
- package/src/config/schema/benchmark.schema.js +0 -74
- package/src/config/schema/bridge.schema.d.ts +0 -25
- package/src/config/schema/bridge.schema.js +0 -22
- package/src/config/schema/browser-suite-metrics.schema.d.ts +0 -17
- package/src/config/schema/browser-suite-metrics.schema.js +0 -46
- package/src/config/schema/buffer-pool.schema.d.ts +0 -92
- package/src/config/schema/buffer-pool.schema.js +0 -50
- package/src/config/schema/conversion-report.schema.d.ts +0 -40
- package/src/config/schema/conversion-report.schema.js +0 -108
- package/src/config/schema/conversion.schema.d.ts +0 -184
- package/src/config/schema/conversion.schema.js +0 -13
- package/src/config/schema/converter.schema.d.ts +0 -123
- package/src/config/schema/converter.schema.js +0 -136
- package/src/config/schema/debug.schema.d.ts +0 -290
- package/src/config/schema/debug.schema.js +0 -134
- package/src/config/schema/diffusion.schema.d.ts +0 -88
- package/src/config/schema/diffusion.schema.js +0 -62
- package/src/config/schema/distill-training.schema.d.ts +0 -48
- package/src/config/schema/distill-training.schema.js +0 -139
- package/src/config/schema/distribution.schema.d.ts +0 -155
- package/src/config/schema/distribution.schema.js +0 -81
- package/src/config/schema/doppler.schema.d.ts +0 -75
- package/src/config/schema/doppler.schema.js +0 -341
- package/src/config/schema/ecosystem.schema.d.ts +0 -255
- package/src/config/schema/ecosystem.schema.js +0 -534
- package/src/config/schema/emulation.schema.d.ts +0 -351
- package/src/config/schema/emulation.schema.js +0 -299
- package/src/config/schema/energy.schema.d.ts +0 -102
- package/src/config/schema/energy.schema.js +0 -72
- package/src/config/schema/execution-v0.schema.d.ts +0 -187
- package/src/config/schema/execution-v0.schema.js +0 -55
- package/src/config/schema/gpu-cache.schema.d.ts +0 -26
- package/src/config/schema/gpu-cache.schema.js +0 -8
- package/src/config/schema/harness.schema.d.ts +0 -32
- package/src/config/schema/harness.schema.js +0 -20
- package/src/config/schema/hotswap.schema.d.ts +0 -55
- package/src/config/schema/hotswap.schema.js +0 -18
- package/src/config/schema/index.d.ts +0 -885
- package/src/config/schema/index.js +0 -491
- package/src/config/schema/inference-defaults.schema.d.ts +0 -276
- package/src/config/schema/inference-defaults.schema.js +0 -188
- package/src/config/schema/inference.schema.d.ts +0 -298
- package/src/config/schema/inference.schema.js +0 -39
- package/src/config/schema/intent-bundle.schema.d.ts +0 -28
- package/src/config/schema/intent-bundle.schema.js +0 -12
- package/src/config/schema/kernel-path.schema.d.ts +0 -184
- package/src/config/schema/kernel-path.schema.js +0 -9
- package/src/config/schema/kernel-registry.schema.d.ts +0 -199
- package/src/config/schema/kernel-registry.schema.js +0 -46
- package/src/config/schema/kernel-thresholds.schema.d.ts +0 -302
- package/src/config/schema/kernel-thresholds.schema.js +0 -195
- package/src/config/schema/kernel-warmup.schema.d.ts +0 -19
- package/src/config/schema/kernel-warmup.schema.js +0 -5
- package/src/config/schema/kvcache.schema.d.ts +0 -131
- package/src/config/schema/kvcache.schema.js +0 -31
- package/src/config/schema/loading.schema.d.ts +0 -153
- package/src/config/schema/loading.schema.js +0 -84
- package/src/config/schema/lora.schema.d.ts +0 -12
- package/src/config/schema/lora.schema.js +0 -12
- package/src/config/schema/manifest.schema.d.ts +0 -507
- package/src/config/schema/manifest.schema.js +0 -146
- package/src/config/schema/memory-limits.schema.d.ts +0 -107
- package/src/config/schema/memory-limits.schema.js +0 -57
- package/src/config/schema/moe.schema.d.ts +0 -78
- package/src/config/schema/moe.schema.js +0 -31
- package/src/config/schema/platform.schema.d.ts +0 -121
- package/src/config/schema/platform.schema.js +0 -1
- package/src/config/schema/preset.schema.d.ts +0 -124
- package/src/config/schema/preset.schema.js +0 -1
- package/src/config/schema/quantization-defaults.schema.d.ts +0 -34
- package/src/config/schema/quantization-defaults.schema.js +0 -5
- package/src/config/schema/quantization.schema.d.ts +0 -10
- package/src/config/schema/quantization.schema.js +0 -33
- package/src/config/schema/shared-runtime.schema.d.ts +0 -75
- package/src/config/schema/shared-runtime.schema.js +0 -45
- package/src/config/schema/speculative.schema.d.ts +0 -21
- package/src/config/schema/speculative.schema.js +0 -11
- package/src/config/schema/storage.schema.d.ts +0 -123
- package/src/config/schema/storage.schema.js +0 -66
- package/src/config/schema/tooling.schema.d.ts +0 -29
- package/src/config/schema/tooling.schema.js +0 -12
- package/src/config/schema/training-metrics.schema.d.ts +0 -89
- package/src/config/schema/training-metrics.schema.js +0 -374
- package/src/config/schema/training.schema.d.ts +0 -88
- package/src/config/schema/training.schema.js +0 -106
- package/src/config/schema/tuner.schema.d.ts +0 -39
- package/src/config/schema/tuner.schema.js +0 -13
- package/src/config/schema/ul-training.schema.d.ts +0 -61
- package/src/config/schema/ul-training.schema.js +0 -140
- package/src/config/schema/units.schema.d.ts +0 -27
- package/src/config/schema/units.schema.js +0 -26
- package/src/config/training-defaults.d.ts +0 -24
- package/src/config/training-defaults.js +0 -99
- package/src/converter/conversion-plan.d.ts +0 -64
- package/src/converter/conversion-plan.js +0 -565
- package/src/converter/core.d.ts +0 -264
- package/src/converter/core.js +0 -1383
- package/src/converter/execution-v0-manifest.d.ts +0 -15
- package/src/converter/execution-v0-manifest.js +0 -149
- package/src/converter/index.d.ts +0 -99
- package/src/converter/index.js +0 -60
- package/src/converter/manifest-inference.d.ts +0 -20
- package/src/converter/manifest-inference.js +0 -513
- package/src/converter/parsers/diffusion.d.ts +0 -50
- package/src/converter/parsers/diffusion.js +0 -327
- package/src/converter/parsers/gguf.d.ts +0 -22
- package/src/converter/parsers/gguf.js +0 -46
- package/src/converter/parsers/index.d.ts +0 -21
- package/src/converter/parsers/index.js +0 -12
- package/src/converter/parsers/transformer.d.ts +0 -16
- package/src/converter/parsers/transformer.js +0 -29
- package/src/converter/quantization-info.d.ts +0 -37
- package/src/converter/quantization-info.js +0 -422
- package/src/converter/quantizer.d.ts +0 -101
- package/src/converter/quantizer.js +0 -444
- package/src/converter/rope-config.d.ts +0 -15
- package/src/converter/rope-config.js +0 -262
- package/src/converter/shard-packer.d.ts +0 -138
- package/src/converter/shard-packer.js +0 -425
- package/src/converter/tokenizer-utils.d.ts +0 -12
- package/src/converter/tokenizer-utils.js +0 -104
- package/src/debug/config.d.ts +0 -78
- package/src/debug/config.js +0 -347
- package/src/debug/history.d.ts +0 -65
- package/src/debug/history.js +0 -71
- package/src/debug/index.d.ts +0 -268
- package/src/debug/index.js +0 -192
- package/src/debug/log.d.ts +0 -46
- package/src/debug/log.js +0 -132
- package/src/debug/perf.d.ts +0 -33
- package/src/debug/perf.js +0 -51
- package/src/debug/reference/README.md +0 -114
- package/src/debug/reference/hf_attn_debug.py +0 -114
- package/src/debug/reference/hf_embed_check.py +0 -89
- package/src/debug/reference/hf_layer_out.py +0 -100
- package/src/debug/reference/hf_qwen35_linear_attn_debug.py +0 -268
- package/src/debug/reference/hf_rope_check.py +0 -116
- package/src/debug/reference/hf_weights.py +0 -75
- package/src/debug/signals.d.ts +0 -63
- package/src/debug/signals.js +0 -39
- package/src/debug/stats.d.ts +0 -47
- package/src/debug/stats.js +0 -160
- package/src/debug/tensor.d.ts +0 -125
- package/src/debug/tensor.js +0 -268
- package/src/debug/trace.d.ts +0 -17
- package/src/debug/trace.js +0 -167
- package/src/diffusion/image-regression.d.ts +0 -31
- package/src/diffusion/image-regression.js +0 -107
- package/src/diffusion/index.d.ts +0 -8
- package/src/diffusion/index.js +0 -8
- package/src/distribution/p2p-control-plane.d.ts +0 -52
- package/src/distribution/p2p-control-plane.js +0 -272
- package/src/distribution/p2p-observability.d.ts +0 -116
- package/src/distribution/p2p-observability.js +0 -303
- package/src/distribution/p2p-transport-contract.d.ts +0 -57
- package/src/distribution/p2p-transport-contract.js +0 -310
- package/src/distribution/p2p-webrtc-browser.d.ts +0 -37
- package/src/distribution/p2p-webrtc-browser.js +0 -454
- package/src/distribution/shard-delivery.d.ts +0 -251
- package/src/distribution/shard-delivery.js +0 -2186
- package/src/energy/index.d.ts +0 -2
- package/src/energy/index.js +0 -2
- package/src/errors/doppler-error.d.ts +0 -21
- package/src/errors/doppler-error.js +0 -25
- package/src/errors/index.d.ts +0 -1
- package/src/errors/index.js +0 -1
- package/src/formats/gguf/index.d.ts +0 -8
- package/src/formats/gguf/index.js +0 -4
- package/src/formats/gguf/types.d.ts +0 -137
- package/src/formats/gguf/types.js +0 -460
- package/src/formats/index.d.ts +0 -51
- package/src/formats/index.js +0 -13
- package/src/formats/rdrr/classification.d.ts +0 -39
- package/src/formats/rdrr/classification.js +0 -307
- package/src/formats/rdrr/groups.d.ts +0 -35
- package/src/formats/rdrr/groups.js +0 -73
- package/src/formats/rdrr/index.d.ts +0 -25
- package/src/formats/rdrr/index.js +0 -19
- package/src/formats/rdrr/manifest.d.ts +0 -32
- package/src/formats/rdrr/manifest.js +0 -108
- package/src/formats/rdrr/parsing.d.ts +0 -27
- package/src/formats/rdrr/parsing.js +0 -151
- package/src/formats/rdrr/tensor-config-validator.d.ts +0 -42
- package/src/formats/rdrr/tensor-config-validator.js +0 -156
- package/src/formats/rdrr/types.d.ts +0 -201
- package/src/formats/rdrr/types.js +0 -16
- package/src/formats/rdrr/validation.d.ts +0 -9
- package/src/formats/rdrr/validation.js +0 -213
- package/src/formats/safetensors/index.d.ts +0 -8
- package/src/formats/safetensors/index.js +0 -4
- package/src/formats/safetensors/types.d.ts +0 -67
- package/src/formats/safetensors/types.js +0 -102
- package/src/formats/tokenizer/index.d.ts +0 -5
- package/src/formats/tokenizer/index.js +0 -3
- package/src/formats/tokenizer/types.d.ts +0 -9
- package/src/formats/tokenizer/types.js +0 -22
- package/src/generation/index.d.ts +0 -18
- package/src/generation/index.js +0 -12
- package/src/gpu/command-recorder.d.ts +0 -175
- package/src/gpu/command-recorder.js +0 -498
- package/src/gpu/device.d.ts +0 -142
- package/src/gpu/device.js +0 -462
- package/src/gpu/kernel-runtime.d.ts +0 -20
- package/src/gpu/kernel-runtime.js +0 -39
- package/src/gpu/kernel-selection-cache.d.ts +0 -13
- package/src/gpu/kernel-selection-cache.js +0 -13
- package/src/gpu/kernel-selection-log.d.ts +0 -12
- package/src/gpu/kernel-selection-log.js +0 -28
- package/src/gpu/kernel-selector.d.ts +0 -11
- package/src/gpu/kernel-selector.js +0 -10
- package/src/gpu/kernel-tuner/benchmarks.d.ts +0 -144
- package/src/gpu/kernel-tuner/benchmarks.js +0 -902
- package/src/gpu/kernel-tuner/cache.d.ts +0 -55
- package/src/gpu/kernel-tuner/cache.js +0 -133
- package/src/gpu/kernel-tuner/index.d.ts +0 -59
- package/src/gpu/kernel-tuner/index.js +0 -38
- package/src/gpu/kernel-tuner/tuner.d.ts +0 -82
- package/src/gpu/kernel-tuner/tuner.js +0 -247
- package/src/gpu/kernel-tuner/types.d.ts +0 -101
- package/src/gpu/kernel-tuner/types.js +0 -4
- package/src/gpu/kernel-tuner.d.ts +0 -33
- package/src/gpu/kernel-tuner.js +0 -12
- package/src/gpu/kernels/README.md +0 -127
- package/src/gpu/kernels/attention.d.ts +0 -236
- package/src/gpu/kernels/attention.js +0 -1439
- package/src/gpu/kernels/attention.wgsl +0 -249
- package/src/gpu/kernels/attention_bdpa_decode_f16.wgsl +0 -246
- package/src/gpu/kernels/attention_decode.wgsl +0 -233
- package/src/gpu/kernels/attention_decode_chunked_f16.wgsl +0 -183
- package/src/gpu/kernels/attention_decode_chunked_f16kv.wgsl +0 -208
- package/src/gpu/kernels/attention_decode_f16.wgsl +0 -202
- package/src/gpu/kernels/attention_decode_f16kv.wgsl +0 -224
- package/src/gpu/kernels/attention_decode_online_f16.wgsl +0 -223
- package/src/gpu/kernels/attention_decode_online_f16kv.wgsl +0 -225
- package/src/gpu/kernels/attention_decode_optimized.wgsl +0 -445
- package/src/gpu/kernels/attention_decode_paged_f16.wgsl +0 -172
- package/src/gpu/kernels/attention_decode_paged_f16kv.wgsl +0 -174
- package/src/gpu/kernels/attention_decode_subgroup.wgsl +0 -233
- package/src/gpu/kernels/attention_decode_tiered_f16.wgsl +0 -218
- package/src/gpu/kernels/attention_decode_tiered_f16kv.wgsl +0 -220
- package/src/gpu/kernels/attention_decode_tiered_int4_f16kv.wgsl +0 -242
- package/src/gpu/kernels/attention_decode_tiered_int8_f16kv.wgsl +0 -242
- package/src/gpu/kernels/attention_f16.wgsl +0 -214
- package/src/gpu/kernels/attention_f16kv.wgsl +0 -242
- package/src/gpu/kernels/attention_small.wgsl +0 -260
- package/src/gpu/kernels/attention_small_f16.wgsl +0 -240
- package/src/gpu/kernels/attention_small_f16kv.wgsl +0 -266
- package/src/gpu/kernels/attention_streaming.wgsl +0 -149
- package/src/gpu/kernels/attention_streaming_f16.wgsl +0 -147
- package/src/gpu/kernels/attention_streaming_f16kv.wgsl +0 -151
- package/src/gpu/kernels/backward/adam.d.ts +0 -28
- package/src/gpu/kernels/backward/adam.js +0 -203
- package/src/gpu/kernels/backward/adam.wgsl +0 -50
- package/src/gpu/kernels/backward/attention_backward.d.ts +0 -22
- package/src/gpu/kernels/backward/attention_backward.js +0 -364
- package/src/gpu/kernels/backward/attention_backward.wgsl +0 -49
- package/src/gpu/kernels/backward/bias_add_backward.d.ts +0 -17
- package/src/gpu/kernels/backward/bias_add_backward.js +0 -24
- package/src/gpu/kernels/backward/bias_add_backward.wgsl +0 -33
- package/src/gpu/kernels/backward/conv2d_backward.d.ts +0 -31
- package/src/gpu/kernels/backward/conv2d_backward.js +0 -148
- package/src/gpu/kernels/backward/conv2d_backward_input.wgsl +0 -83
- package/src/gpu/kernels/backward/conv2d_backward_weight.wgsl +0 -70
- package/src/gpu/kernels/backward/cross_entropy_backward.d.ts +0 -23
- package/src/gpu/kernels/backward/cross_entropy_backward.js +0 -29
- package/src/gpu/kernels/backward/cross_entropy_backward.wgsl +0 -39
- package/src/gpu/kernels/backward/embed_backward.d.ts +0 -29
- package/src/gpu/kernels/backward/embed_backward.js +0 -118
- package/src/gpu/kernels/backward/embed_backward.wgsl +0 -73
- package/src/gpu/kernels/backward/gelu_backward.d.ts +0 -16
- package/src/gpu/kernels/backward/gelu_backward.js +0 -39
- package/src/gpu/kernels/backward/gelu_backward.wgsl +0 -38
- package/src/gpu/kernels/backward/groupnorm_backward.d.ts +0 -24
- package/src/gpu/kernels/backward/groupnorm_backward.js +0 -29
- package/src/gpu/kernels/backward/groupnorm_backward.wgsl +0 -143
- package/src/gpu/kernels/backward/index.d.ts +0 -17
- package/src/gpu/kernels/backward/index.js +0 -23
- package/src/gpu/kernels/backward/layernorm_backward.d.ts +0 -22
- package/src/gpu/kernels/backward/layernorm_backward.js +0 -135
- package/src/gpu/kernels/backward/layernorm_backward.wgsl +0 -194
- package/src/gpu/kernels/backward/matmul_backward.d.ts +0 -32
- package/src/gpu/kernels/backward/matmul_backward.js +0 -124
- package/src/gpu/kernels/backward/matmul_backward.wgsl +0 -90
- package/src/gpu/kernels/backward/matmul_transpose_a.wgsl +0 -84
- package/src/gpu/kernels/backward/pixel_shuffle_backward.d.ts +0 -22
- package/src/gpu/kernels/backward/pixel_shuffle_backward.js +0 -30
- package/src/gpu/kernels/backward/pixel_shuffle_backward.wgsl +0 -54
- package/src/gpu/kernels/backward/rmsnorm_backward.d.ts +0 -24
- package/src/gpu/kernels/backward/rmsnorm_backward.js +0 -101
- package/src/gpu/kernels/backward/rmsnorm_backward.wgsl +0 -78
- package/src/gpu/kernels/backward/rope_backward.d.ts +0 -25
- package/src/gpu/kernels/backward/rope_backward.js +0 -109
- package/src/gpu/kernels/backward/rope_backward.wgsl +0 -59
- package/src/gpu/kernels/backward/scale_backward.d.ts +0 -16
- package/src/gpu/kernels/backward/scale_backward.js +0 -84
- package/src/gpu/kernels/backward/scale_backward.wgsl +0 -27
- package/src/gpu/kernels/backward/silu_backward.d.ts +0 -16
- package/src/gpu/kernels/backward/silu_backward.js +0 -39
- package/src/gpu/kernels/backward/silu_backward.wgsl +0 -31
- package/src/gpu/kernels/backward/softmax_backward.d.ts +0 -16
- package/src/gpu/kernels/backward/softmax_backward.js +0 -43
- package/src/gpu/kernels/backward/softmax_backward.wgsl +0 -44
- package/src/gpu/kernels/backward/upsample2d_backward.d.ts +0 -21
- package/src/gpu/kernels/backward/upsample2d_backward.js +0 -30
- package/src/gpu/kernels/backward/upsample2d_backward.wgsl +0 -59
- package/src/gpu/kernels/backward/utils.d.ts +0 -45
- package/src/gpu/kernels/backward/utils.js +0 -371
- package/src/gpu/kernels/bf16_to_f16.wgsl +0 -54
- package/src/gpu/kernels/bf16_to_f32.wgsl +0 -70
- package/src/gpu/kernels/bias_add.wgsl +0 -42
- package/src/gpu/kernels/bias_add_f16.wgsl +0 -47
- package/src/gpu/kernels/cast.d.ts +0 -67
- package/src/gpu/kernels/cast.js +0 -464
- package/src/gpu/kernels/cast_f16_to_f32.wgsl +0 -31
- package/src/gpu/kernels/cast_f32_to_f16.wgsl +0 -36
- package/src/gpu/kernels/check-finiteness.d.ts +0 -15
- package/src/gpu/kernels/check-finiteness.js +0 -149
- package/src/gpu/kernels/check-stop.d.ts +0 -31
- package/src/gpu/kernels/check-stop.js +0 -170
- package/src/gpu/kernels/clamp.d.ts +0 -22
- package/src/gpu/kernels/clamp.js +0 -42
- package/src/gpu/kernels/clamp.wgsl +0 -24
- package/src/gpu/kernels/constants.d.ts +0 -168
- package/src/gpu/kernels/constants.js +0 -129
- package/src/gpu/kernels/conv2d.d.ts +0 -34
- package/src/gpu/kernels/conv2d.js +0 -91
- package/src/gpu/kernels/conv2d.wgsl +0 -70
- package/src/gpu/kernels/conv2d_f16.wgsl +0 -72
- package/src/gpu/kernels/cross_entropy_loss.d.ts +0 -21
- package/src/gpu/kernels/cross_entropy_loss.js +0 -60
- package/src/gpu/kernels/cross_entropy_loss.wgsl +0 -39
- package/src/gpu/kernels/depthwise_conv2d.d.ts +0 -29
- package/src/gpu/kernels/depthwise_conv2d.js +0 -109
- package/src/gpu/kernels/depthwise_conv2d.wgsl +0 -55
- package/src/gpu/kernels/depthwise_conv2d_f16.wgsl +0 -59
- package/src/gpu/kernels/dequant.d.ts +0 -108
- package/src/gpu/kernels/dequant.js +0 -576
- package/src/gpu/kernels/dequant_f16_out.wgsl +0 -153
- package/src/gpu/kernels/dequant_f16_out_vec4.wgsl +0 -152
- package/src/gpu/kernels/dequant_f16_rowwise.wgsl +0 -139
- package/src/gpu/kernels/dequant_f32_rowwise.wgsl +0 -133
- package/src/gpu/kernels/dequant_mxfp4.wgsl +0 -120
- package/src/gpu/kernels/dequant_mxfp4_expert.wgsl +0 -129
- package/src/gpu/kernels/dequant_mxfp4_expert_f16.wgsl +0 -105
- package/src/gpu/kernels/dequant_mxfp4_vec4.wgsl +0 -116
- package/src/gpu/kernels/dequant_q6k.wgsl +0 -140
- package/src/gpu/kernels/dequant_q8_0.wgsl +0 -98
- package/src/gpu/kernels/dequant_shared.wgsl +0 -204
- package/src/gpu/kernels/dequant_shared_vec4.wgsl +0 -155
- package/src/gpu/kernels/dequant_subgroup.wgsl +0 -206
- package/src/gpu/kernels/dispatch.d.ts +0 -157
- package/src/gpu/kernels/dispatch.js +0 -235
- package/src/gpu/kernels/energy.d.ts +0 -113
- package/src/gpu/kernels/energy.js +0 -448
- package/src/gpu/kernels/energy_eval.wgsl +0 -26
- package/src/gpu/kernels/energy_eval_f16.wgsl +0 -30
- package/src/gpu/kernels/energy_quintel_grad.wgsl +0 -92
- package/src/gpu/kernels/energy_quintel_grad_f16.wgsl +0 -96
- package/src/gpu/kernels/energy_quintel_reduce.wgsl +0 -112
- package/src/gpu/kernels/energy_quintel_reduce_f16.wgsl +0 -116
- package/src/gpu/kernels/energy_quintel_update.wgsl +0 -92
- package/src/gpu/kernels/energy_quintel_update_f16.wgsl +0 -96
- package/src/gpu/kernels/energy_update.wgsl +0 -25
- package/src/gpu/kernels/energy_update_f16.wgsl +0 -30
- package/src/gpu/kernels/feature-check.d.ts +0 -42
- package/src/gpu/kernels/feature-check.js +0 -70
- package/src/gpu/kernels/fused_ffn.d.ts +0 -65
- package/src/gpu/kernels/fused_ffn.js +0 -337
- package/src/gpu/kernels/fused_ffn.wgsl +0 -420
- package/src/gpu/kernels/fused_ffn_f16.wgsl +0 -213
- package/src/gpu/kernels/fused_ffn_q4k.wgsl +0 -375
- package/src/gpu/kernels/fused_matmul_q4.wgsl +0 -404
- package/src/gpu/kernels/fused_matmul_q4_batched.wgsl +0 -194
- package/src/gpu/kernels/fused_matmul_q4_batched_f16.wgsl +0 -170
- package/src/gpu/kernels/fused_matmul_q4_batched_f16a.wgsl +0 -154
- package/src/gpu/kernels/fused_matmul_q4_f16a.wgsl +0 -219
- package/src/gpu/kernels/fused_matmul_q4_multicol_f16.wgsl +0 -216
- package/src/gpu/kernels/fused_matmul_q4_multicol_f16a.wgsl +0 -204
- package/src/gpu/kernels/fused_matmul_residual.d.ts +0 -46
- package/src/gpu/kernels/fused_matmul_residual.js +0 -175
- package/src/gpu/kernels/fused_matmul_rmsnorm.d.ts +0 -64
- package/src/gpu/kernels/fused_matmul_rmsnorm.js +0 -290
- package/src/gpu/kernels/fused_matmul_rmsnorm.wgsl +0 -324
- package/src/gpu/kernels/fused_matmul_rmsnorm_f16.wgsl +0 -303
- package/src/gpu/kernels/fused_swiglu.wgsl +0 -63
- package/src/gpu/kernels/fused_swiglu_f16.wgsl +0 -57
- package/src/gpu/kernels/gated-short-conv.d.ts +0 -63
- package/src/gpu/kernels/gated-short-conv.js +0 -284
- package/src/gpu/kernels/gather.d.ts +0 -64
- package/src/gpu/kernels/gather.js +0 -137
- package/src/gpu/kernels/gather.wgsl +0 -61
- package/src/gpu/kernels/gather_f16.wgsl +0 -65
- package/src/gpu/kernels/gather_f16_f16_out.wgsl +0 -55
- package/src/gpu/kernels/gather_f16_out.wgsl +0 -55
- package/src/gpu/kernels/gather_f16_vec4.wgsl +0 -76
- package/src/gpu/kernels/gather_f16_vec4_f16_out.wgsl +0 -68
- package/src/gpu/kernels/gather_vec4.wgsl +0 -74
- package/src/gpu/kernels/gather_vec4_f16_out.wgsl +0 -68
- package/src/gpu/kernels/gelu.d.ts +0 -33
- package/src/gpu/kernels/gelu.js +0 -55
- package/src/gpu/kernels/gelu.wgsl +0 -64
- package/src/gpu/kernels/gelu_f16.wgsl +0 -66
- package/src/gpu/kernels/gptoss_mxfp4_expert_fused.wgsl +0 -127
- package/src/gpu/kernels/gptoss_router_topk.wgsl +0 -119
- package/src/gpu/kernels/grouped_pointwise_conv2d.d.ts +0 -27
- package/src/gpu/kernels/grouped_pointwise_conv2d.js +0 -103
- package/src/gpu/kernels/grouped_pointwise_conv2d.wgsl +0 -44
- package/src/gpu/kernels/grouped_pointwise_conv2d_f16.wgsl +0 -48
- package/src/gpu/kernels/groupnorm.d.ts +0 -31
- package/src/gpu/kernels/groupnorm.js +0 -102
- package/src/gpu/kernels/groupnorm_apply.wgsl +0 -41
- package/src/gpu/kernels/groupnorm_apply_f16.wgsl +0 -46
- package/src/gpu/kernels/groupnorm_stats.wgsl +0 -76
- package/src/gpu/kernels/groupnorm_stats_f16.wgsl +0 -79
- package/src/gpu/kernels/index.d.ts +0 -374
- package/src/gpu/kernels/index.js +0 -315
- package/src/gpu/kernels/kernel-base.d.ts +0 -33
- package/src/gpu/kernels/kernel-base.js +0 -46
- package/src/gpu/kernels/kernel-configs.d.ts +0 -65
- package/src/gpu/kernels/kernel-configs.js +0 -50
- package/src/gpu/kernels/kernel-tuning.d.ts +0 -42
- package/src/gpu/kernels/kernel-tuning.js +0 -149
- package/src/gpu/kernels/kv-quantize.d.ts +0 -37
- package/src/gpu/kernels/kv-quantize.js +0 -141
- package/src/gpu/kernels/kv_quantize_int4.wgsl +0 -119
- package/src/gpu/kernels/kv_quantize_int8.wgsl +0 -119
- package/src/gpu/kernels/layernorm.d.ts +0 -37
- package/src/gpu/kernels/layernorm.js +0 -96
- package/src/gpu/kernels/layernorm.wgsl +0 -121
- package/src/gpu/kernels/layernorm_f16.wgsl +0 -103
- package/src/gpu/kernels/linear-attention-core.d.ts +0 -39
- package/src/gpu/kernels/linear-attention-core.js +0 -555
- package/src/gpu/kernels/logit-merge.d.ts +0 -110
- package/src/gpu/kernels/logit-merge.js +0 -394
- package/src/gpu/kernels/matmul-dispatch.d.ts +0 -38
- package/src/gpu/kernels/matmul-dispatch.js +0 -155
- package/src/gpu/kernels/matmul-selection.d.ts +0 -87
- package/src/gpu/kernels/matmul-selection.js +0 -518
- package/src/gpu/kernels/matmul.d.ts +0 -114
- package/src/gpu/kernels/matmul.js +0 -384
- package/src/gpu/kernels/matmul_f16.wgsl +0 -170
- package/src/gpu/kernels/matmul_f16_tiled.wgsl +0 -165
- package/src/gpu/kernels/matmul_f16w_f32a.wgsl +0 -89
- package/src/gpu/kernels/matmul_f16w_f32a_tiled.wgsl +0 -154
- package/src/gpu/kernels/matmul_f32.wgsl +0 -100
- package/src/gpu/kernels/matmul_gemv.wgsl +0 -80
- package/src/gpu/kernels/matmul_gemv_f16a.wgsl +0 -81
- package/src/gpu/kernels/matmul_gemv_residual.wgsl +0 -119
- package/src/gpu/kernels/matmul_gemv_residual_f16.wgsl +0 -78
- package/src/gpu/kernels/matmul_gemv_subgroup.wgsl +0 -343
- package/src/gpu/kernels/matmul_gemv_subgroup_f16a.wgsl +0 -514
- package/src/gpu/kernels/modulate.d.ts +0 -29
- package/src/gpu/kernels/modulate.js +0 -57
- package/src/gpu/kernels/modulate.wgsl +0 -40
- package/src/gpu/kernels/modulate_f16.wgsl +0 -43
- package/src/gpu/kernels/moe.d.ts +0 -164
- package/src/gpu/kernels/moe.js +0 -542
- package/src/gpu/kernels/moe_gather.wgsl +0 -170
- package/src/gpu/kernels/moe_gather_f16.wgsl +0 -82
- package/src/gpu/kernels/moe_gather_vec4.wgsl +0 -74
- package/src/gpu/kernels/moe_offsets.wgsl +0 -48
- package/src/gpu/kernels/pipeline-cache.d.ts +0 -88
- package/src/gpu/kernels/pipeline-cache.js +0 -305
- package/src/gpu/kernels/pixel_shuffle.d.ts +0 -27
- package/src/gpu/kernels/pixel_shuffle.js +0 -57
- package/src/gpu/kernels/pixel_shuffle.wgsl +0 -43
- package/src/gpu/kernels/pixel_shuffle_f16.wgsl +0 -46
- package/src/gpu/kernels/relu.d.ts +0 -18
- package/src/gpu/kernels/relu.js +0 -66
- package/src/gpu/kernels/relu.wgsl +0 -22
- package/src/gpu/kernels/relu_f16.wgsl +0 -24
- package/src/gpu/kernels/repeat_channels.d.ts +0 -21
- package/src/gpu/kernels/repeat_channels.js +0 -68
- package/src/gpu/kernels/repeat_channels.wgsl +0 -28
- package/src/gpu/kernels/repeat_channels_f16.wgsl +0 -30
- package/src/gpu/kernels/residual.d.ts +0 -74
- package/src/gpu/kernels/residual.js +0 -173
- package/src/gpu/kernels/residual.wgsl +0 -56
- package/src/gpu/kernels/residual_f16.wgsl +0 -36
- package/src/gpu/kernels/residual_f16_vec4.wgsl +0 -48
- package/src/gpu/kernels/residual_vec4.wgsl +0 -47
- package/src/gpu/kernels/rmsnorm.d.ts +0 -53
- package/src/gpu/kernels/rmsnorm.js +0 -215
- package/src/gpu/kernels/rmsnorm.wgsl +0 -425
- package/src/gpu/kernels/rmsnorm_f16.wgsl +0 -172
- package/src/gpu/kernels/rope.d.ts +0 -50
- package/src/gpu/kernels/rope.js +0 -66
- package/src/gpu/kernels/rope.wgsl +0 -344
- package/src/gpu/kernels/rope_f16.wgsl +0 -271
- package/src/gpu/kernels/rule-matcher.d.ts +0 -30
- package/src/gpu/kernels/rule-matcher.js +0 -42
- package/src/gpu/kernels/rule-registry.d.ts +0 -7
- package/src/gpu/kernels/rule-registry.js +0 -41
- package/src/gpu/kernels/sample.d.ts +0 -75
- package/src/gpu/kernels/sample.js +0 -565
- package/src/gpu/kernels/sample.wgsl +0 -407
- package/src/gpu/kernels/sample_f16.wgsl +0 -361
- package/src/gpu/kernels/sana_linear_attention.d.ts +0 -27
- package/src/gpu/kernels/sana_linear_attention.js +0 -129
- package/src/gpu/kernels/sana_linear_attention_apply.wgsl +0 -43
- package/src/gpu/kernels/sana_linear_attention_apply_f16.wgsl +0 -46
- package/src/gpu/kernels/sana_linear_attention_summary.wgsl +0 -51
- package/src/gpu/kernels/sana_linear_attention_summary_f16.wgsl +0 -53
- package/src/gpu/kernels/scale.d.ts +0 -35
- package/src/gpu/kernels/scale.js +0 -44
- package/src/gpu/kernels/scale.wgsl +0 -38
- package/src/gpu/kernels/scatter_add.wgsl +0 -88
- package/src/gpu/kernels/scatter_add_dynamic.wgsl +0 -59
- package/src/gpu/kernels/scatter_add_dynamic_f16.wgsl +0 -52
- package/src/gpu/kernels/scatter_add_dynamic_f16_weights.wgsl +0 -50
- package/src/gpu/kernels/scatter_add_vec4.wgsl +0 -70
- package/src/gpu/kernels/shader-cache.d.ts +0 -56
- package/src/gpu/kernels/shader-cache.js +0 -213
- package/src/gpu/kernels/silu.d.ts +0 -76
- package/src/gpu/kernels/silu.js +0 -406
- package/src/gpu/kernels/silu.wgsl +0 -109
- package/src/gpu/kernels/silu_f16.wgsl +0 -108
- package/src/gpu/kernels/softmax.d.ts +0 -57
- package/src/gpu/kernels/softmax.js +0 -125
- package/src/gpu/kernels/softmax.wgsl +0 -388
- package/src/gpu/kernels/softmax_subgroup.wgsl +0 -175
- package/src/gpu/kernels/split_qg.d.ts +0 -50
- package/src/gpu/kernels/split_qg.js +0 -46
- package/src/gpu/kernels/split_qg.wgsl +0 -58
- package/src/gpu/kernels/split_qg_f16.wgsl +0 -62
- package/src/gpu/kernels/split_qkv.d.ts +0 -51
- package/src/gpu/kernels/split_qkv.js +0 -51
- package/src/gpu/kernels/split_qkv.wgsl +0 -71
- package/src/gpu/kernels/split_qkv_f16.wgsl +0 -75
- package/src/gpu/kernels/topk.wgsl +0 -243
- package/src/gpu/kernels/topk_f16.wgsl +0 -108
- package/src/gpu/kernels/topk_f16_weights.wgsl +0 -101
- package/src/gpu/kernels/transpose.d.ts +0 -21
- package/src/gpu/kernels/transpose.js +0 -51
- package/src/gpu/kernels/transpose.wgsl +0 -33
- package/src/gpu/kernels/types.d.ts +0 -21
- package/src/gpu/kernels/types.js +0 -4
- package/src/gpu/kernels/uniform-utils.d.ts +0 -48
- package/src/gpu/kernels/uniform-utils.js +0 -94
- package/src/gpu/kernels/upsample2d.d.ts +0 -25
- package/src/gpu/kernels/upsample2d.js +0 -67
- package/src/gpu/kernels/upsample2d.wgsl +0 -34
- package/src/gpu/kernels/upsample2d_f16.wgsl +0 -38
- package/src/gpu/kernels/utils.d.ts +0 -106
- package/src/gpu/kernels/utils.js +0 -246
- package/src/gpu/multi-model-recorder.d.ts +0 -21
- package/src/gpu/multi-model-recorder.js +0 -31
- package/src/gpu/partitioned-buffer-pool.d.ts +0 -28
- package/src/gpu/partitioned-buffer-pool.js +0 -57
- package/src/gpu/perf-guards.d.ts +0 -25
- package/src/gpu/perf-guards.js +0 -133
- package/src/gpu/profiler.d.ts +0 -114
- package/src/gpu/profiler.js +0 -396
- package/src/gpu/readback-utils.d.ts +0 -16
- package/src/gpu/readback-utils.js +0 -41
- package/src/gpu/submit-tracker.d.ts +0 -111
- package/src/gpu/submit-tracker.js +0 -242
- package/src/gpu/tensor.d.ts +0 -69
- package/src/gpu/tensor.js +0 -75
- package/src/gpu/uniform-cache.d.ts +0 -109
- package/src/gpu/uniform-cache.js +0 -263
- package/src/gpu/weight-buffer.d.ts +0 -115
- package/src/gpu/weight-buffer.js +0 -118
- package/src/hotswap/intent-bundle.d.ts +0 -37
- package/src/hotswap/intent-bundle.js +0 -129
- package/src/hotswap/manifest.d.ts +0 -42
- package/src/hotswap/manifest.js +0 -124
- package/src/hotswap/runtime.d.ts +0 -31
- package/src/hotswap/runtime.js +0 -150
- package/src/index-browser.d.ts +0 -92
- package/src/index-browser.js +0 -68
- package/src/index-internal.d.ts +0 -2
- package/src/index-internal.js +0 -2
- package/src/index.d.ts +0 -103
- package/src/index.js +0 -76
- package/src/inference/README.md +0 -593
- package/src/inference/browser-harness-contract-helpers.d.ts +0 -5
- package/src/inference/browser-harness-contract-helpers.js +0 -28
- package/src/inference/browser-harness-diffusion-energy-suites.d.ts +0 -2
- package/src/inference/browser-harness-diffusion-energy-suites.js +0 -269
- package/src/inference/browser-harness-model-helpers.d.ts +0 -16
- package/src/inference/browser-harness-model-helpers.js +0 -217
- package/src/inference/browser-harness-report-helpers.d.ts +0 -7
- package/src/inference/browser-harness-report-helpers.js +0 -42
- package/src/inference/browser-harness-runtime-helpers.d.ts +0 -61
- package/src/inference/browser-harness-runtime-helpers.js +0 -415
- package/src/inference/browser-harness-suite-helpers.d.ts +0 -28
- package/src/inference/browser-harness-suite-helpers.js +0 -268
- package/src/inference/browser-harness-text-helpers.d.ts +0 -27
- package/src/inference/browser-harness-text-helpers.js +0 -788
- package/src/inference/browser-harness.d.ts +0 -242
- package/src/inference/browser-harness.js +0 -990
- package/src/inference/decode-buffers.d.ts +0 -108
- package/src/inference/decode-buffers.js +0 -181
- package/src/inference/decode-ring.d.ts +0 -52
- package/src/inference/decode-ring.js +0 -273
- package/src/inference/expert-router.d.ts +0 -27
- package/src/inference/expert-router.js +0 -55
- package/src/inference/functiongemma.d.ts +0 -15
- package/src/inference/functiongemma.js +0 -1
- package/src/inference/kv-cache/base.d.ts +0 -150
- package/src/inference/kv-cache/base.js +0 -1076
- package/src/inference/kv-cache/basis-decomposed-paged.d.ts +0 -50
- package/src/inference/kv-cache/basis-decomposed-paged.js +0 -276
- package/src/inference/kv-cache/index.d.ts +0 -35
- package/src/inference/kv-cache/index.js +0 -20
- package/src/inference/kv-cache/sliding-window.d.ts +0 -72
- package/src/inference/kv-cache/sliding-window.js +0 -243
- package/src/inference/kv-cache/tiered.d.ts +0 -89
- package/src/inference/kv-cache/tiered.js +0 -576
- package/src/inference/kv-cache/types.d.ts +0 -188
- package/src/inference/kv-cache/types.js +0 -80
- package/src/inference/kv-cache.d.ts +0 -36
- package/src/inference/kv-cache.js +0 -18
- package/src/inference/moe-router.d.ts +0 -212
- package/src/inference/moe-router.js +0 -585
- package/src/inference/multi-model-network.d.ts +0 -139
- package/src/inference/multi-model-network.js +0 -771
- package/src/inference/multi-pipeline-pool.d.ts +0 -62
- package/src/inference/multi-pipeline-pool.js +0 -161
- package/src/inference/network-evolution.d.ts +0 -55
- package/src/inference/network-evolution.js +0 -79
- package/src/inference/pipelines/context.d.ts +0 -21
- package/src/inference/pipelines/context.js +0 -184
- package/src/inference/pipelines/diffusion/helpers.d.ts +0 -29
- package/src/inference/pipelines/diffusion/helpers.js +0 -120
- package/src/inference/pipelines/diffusion/index.d.ts +0 -3
- package/src/inference/pipelines/diffusion/index.js +0 -3
- package/src/inference/pipelines/diffusion/init.d.ts +0 -24
- package/src/inference/pipelines/diffusion/init.js +0 -138
- package/src/inference/pipelines/diffusion/pipeline.d.ts +0 -38
- package/src/inference/pipelines/diffusion/pipeline.js +0 -772
- package/src/inference/pipelines/diffusion/sana-transformer.d.ts +0 -53
- package/src/inference/pipelines/diffusion/sana-transformer.js +0 -738
- package/src/inference/pipelines/diffusion/scheduler.d.ts +0 -35
- package/src/inference/pipelines/diffusion/scheduler.js +0 -153
- package/src/inference/pipelines/diffusion/sd3-transformer.d.ts +0 -20
- package/src/inference/pipelines/diffusion/sd3-transformer.js +0 -1194
- package/src/inference/pipelines/diffusion/sd3-weights.d.ts +0 -21
- package/src/inference/pipelines/diffusion/sd3-weights.js +0 -287
- package/src/inference/pipelines/diffusion/text-encoder-gpu.d.ts +0 -87
- package/src/inference/pipelines/diffusion/text-encoder-gpu.js +0 -1224
- package/src/inference/pipelines/diffusion/text-encoder.d.ts +0 -29
- package/src/inference/pipelines/diffusion/text-encoder.js +0 -195
- package/src/inference/pipelines/diffusion/types.d.ts +0 -116
- package/src/inference/pipelines/diffusion/types.js +0 -1
- package/src/inference/pipelines/diffusion/vae.d.ts +0 -20
- package/src/inference/pipelines/diffusion/vae.js +0 -1375
- package/src/inference/pipelines/diffusion/weights.d.ts +0 -40
- package/src/inference/pipelines/diffusion/weights.js +0 -150
- package/src/inference/pipelines/dream/energy-head-pipeline.d.ts +0 -29
- package/src/inference/pipelines/dream/energy-head-pipeline.js +0 -6
- package/src/inference/pipelines/dream/pipeline.d.ts +0 -17
- package/src/inference/pipelines/dream/pipeline.js +0 -8
- package/src/inference/pipelines/energy/index.d.ts +0 -1
- package/src/inference/pipelines/energy/index.js +0 -1
- package/src/inference/pipelines/energy/pipeline.d.ts +0 -27
- package/src/inference/pipelines/energy/pipeline.js +0 -686
- package/src/inference/pipelines/energy/quintel.d.ts +0 -92
- package/src/inference/pipelines/energy/quintel.js +0 -218
- package/src/inference/pipelines/energy/types.d.ts +0 -63
- package/src/inference/pipelines/energy/types.js +0 -1
- package/src/inference/pipelines/energy-head/index.d.ts +0 -6
- package/src/inference/pipelines/energy-head/index.js +0 -6
- package/src/inference/pipelines/energy-head/row-head-pipeline.d.ts +0 -103
- package/src/inference/pipelines/energy-head/row-head-pipeline.js +0 -491
- package/src/inference/pipelines/factory.d.ts +0 -10
- package/src/inference/pipelines/factory.js +0 -6
- package/src/inference/pipelines/index.d.ts +0 -22
- package/src/inference/pipelines/index.js +0 -19
- package/src/inference/pipelines/registry.d.ts +0 -15
- package/src/inference/pipelines/registry.js +0 -23
- package/src/inference/pipelines/rng.d.ts +0 -2
- package/src/inference/pipelines/rng.js +0 -17
- package/src/inference/pipelines/structured/index.d.ts +0 -8
- package/src/inference/pipelines/structured/index.js +0 -8
- package/src/inference/pipelines/structured/json-head-pipeline.d.ts +0 -58
- package/src/inference/pipelines/structured/json-head-pipeline.js +0 -196
- package/src/inference/pipelines/text/attention/index.d.ts +0 -24
- package/src/inference/pipelines/text/attention/index.js +0 -17
- package/src/inference/pipelines/text/attention/output-projection.d.ts +0 -12
- package/src/inference/pipelines/text/attention/output-projection.js +0 -8
- package/src/inference/pipelines/text/attention/projections.d.ts +0 -113
- package/src/inference/pipelines/text/attention/projections.js +0 -526
- package/src/inference/pipelines/text/attention/record.d.ts +0 -36
- package/src/inference/pipelines/text/attention/record.js +0 -686
- package/src/inference/pipelines/text/attention/run.d.ts +0 -38
- package/src/inference/pipelines/text/attention/run.js +0 -942
- package/src/inference/pipelines/text/attention/types.d.ts +0 -98
- package/src/inference/pipelines/text/attention/types.js +0 -67
- package/src/inference/pipelines/text/attention.d.ts +0 -23
- package/src/inference/pipelines/text/attention.js +0 -12
- package/src/inference/pipelines/text/bdpa-steamroller.d.ts +0 -22
- package/src/inference/pipelines/text/bdpa-steamroller.js +0 -158
- package/src/inference/pipelines/text/buffer-types.d.ts +0 -7
- package/src/inference/pipelines/text/buffer-types.js +0 -4
- package/src/inference/pipelines/text/chat-format.d.ts +0 -46
- package/src/inference/pipelines/text/chat-format.js +0 -390
- package/src/inference/pipelines/text/config.d.ts +0 -245
- package/src/inference/pipelines/text/config.js +0 -731
- package/src/inference/pipelines/text/debug-utils/config.d.ts +0 -144
- package/src/inference/pipelines/text/debug-utils/config.js +0 -156
- package/src/inference/pipelines/text/debug-utils/index.d.ts +0 -53
- package/src/inference/pipelines/text/debug-utils/index.js +0 -44
- package/src/inference/pipelines/text/debug-utils/logging.d.ts +0 -106
- package/src/inference/pipelines/text/debug-utils/logging.js +0 -152
- package/src/inference/pipelines/text/debug-utils/tensor.d.ts +0 -119
- package/src/inference/pipelines/text/debug-utils/tensor.js +0 -268
- package/src/inference/pipelines/text/debug-utils/utils.d.ts +0 -77
- package/src/inference/pipelines/text/debug-utils/utils.js +0 -139
- package/src/inference/pipelines/text/debug-utils.d.ts +0 -42
- package/src/inference/pipelines/text/debug-utils.js +0 -34
- package/src/inference/pipelines/text/embed.d.ts +0 -67
- package/src/inference/pipelines/text/embed.js +0 -474
- package/src/inference/pipelines/text/execution-plan.d.ts +0 -116
- package/src/inference/pipelines/text/execution-plan.js +0 -329
- package/src/inference/pipelines/text/execution-v0-contract-helpers.d.ts +0 -59
- package/src/inference/pipelines/text/execution-v0-contract-helpers.js +0 -937
- package/src/inference/pipelines/text/execution-v0-runtime-builders.d.ts +0 -15
- package/src/inference/pipelines/text/execution-v0-runtime-builders.js +0 -286
- package/src/inference/pipelines/text/execution-v0.d.ts +0 -66
- package/src/inference/pipelines/text/execution-v0.js +0 -266
- package/src/inference/pipelines/text/ffn/dense.d.ts +0 -40
- package/src/inference/pipelines/text/ffn/dense.js +0 -759
- package/src/inference/pipelines/text/ffn/index.d.ts +0 -23
- package/src/inference/pipelines/text/ffn/index.js +0 -16
- package/src/inference/pipelines/text/ffn/moe.d.ts +0 -21
- package/src/inference/pipelines/text/ffn/moe.js +0 -49
- package/src/inference/pipelines/text/ffn/sandwich.d.ts +0 -25
- package/src/inference/pipelines/text/ffn/sandwich.js +0 -196
- package/src/inference/pipelines/text/ffn/standard.d.ts +0 -23
- package/src/inference/pipelines/text/ffn/standard.js +0 -87
- package/src/inference/pipelines/text/ffn/types.d.ts +0 -30
- package/src/inference/pipelines/text/ffn/types.js +0 -25
- package/src/inference/pipelines/text/ffn.d.ts +0 -31
- package/src/inference/pipelines/text/ffn.js +0 -18
- package/src/inference/pipelines/text/finiteness-guard-status.d.ts +0 -11
- package/src/inference/pipelines/text/finiteness-guard-status.js +0 -21
- package/src/inference/pipelines/text/finiteness-policy.d.ts +0 -35
- package/src/inference/pipelines/text/finiteness-policy.js +0 -45
- package/src/inference/pipelines/text/generator-helpers.d.ts +0 -34
- package/src/inference/pipelines/text/generator-helpers.js +0 -176
- package/src/inference/pipelines/text/generator-runtime.d.ts +0 -93
- package/src/inference/pipelines/text/generator-runtime.js +0 -392
- package/src/inference/pipelines/text/generator-steps.d.ts +0 -136
- package/src/inference/pipelines/text/generator-steps.js +0 -1214
- package/src/inference/pipelines/text/generator.d.ts +0 -46
- package/src/inference/pipelines/text/generator.js +0 -1515
- package/src/inference/pipelines/text/index.d.ts +0 -5
- package/src/inference/pipelines/text/index.js +0 -6
- package/src/inference/pipelines/text/init.d.ts +0 -314
- package/src/inference/pipelines/text/init.js +0 -1126
- package/src/inference/pipelines/text/kernel-path-auto-select.d.ts +0 -12
- package/src/inference/pipelines/text/kernel-path-auto-select.js +0 -92
- package/src/inference/pipelines/text/kernel-trace.d.ts +0 -152
- package/src/inference/pipelines/text/kernel-trace.js +0 -330
- package/src/inference/pipelines/text/layer-plan.d.ts +0 -65
- package/src/inference/pipelines/text/layer-plan.js +0 -249
- package/src/inference/pipelines/text/layer.d.ts +0 -56
- package/src/inference/pipelines/text/layer.js +0 -951
- package/src/inference/pipelines/text/linear-attention.d.ts +0 -109
- package/src/inference/pipelines/text/linear-attention.js +0 -907
- package/src/inference/pipelines/text/logits/cpu.d.ts +0 -81
- package/src/inference/pipelines/text/logits/cpu.js +0 -91
- package/src/inference/pipelines/text/logits/gpu.d.ts +0 -113
- package/src/inference/pipelines/text/logits/gpu.js +0 -411
- package/src/inference/pipelines/text/logits/index.d.ts +0 -62
- package/src/inference/pipelines/text/logits/index.js +0 -306
- package/src/inference/pipelines/text/logits/types.d.ts +0 -46
- package/src/inference/pipelines/text/logits/types.js +0 -4
- package/src/inference/pipelines/text/logits/utils.d.ts +0 -56
- package/src/inference/pipelines/text/logits/utils.js +0 -68
- package/src/inference/pipelines/text/logits.d.ts +0 -27
- package/src/inference/pipelines/text/logits.js +0 -16
- package/src/inference/pipelines/text/lora-apply.d.ts +0 -28
- package/src/inference/pipelines/text/lora-apply.js +0 -76
- package/src/inference/pipelines/text/lora-types.d.ts +0 -39
- package/src/inference/pipelines/text/lora-types.js +0 -18
- package/src/inference/pipelines/text/lora.d.ts +0 -18
- package/src/inference/pipelines/text/lora.js +0 -12
- package/src/inference/pipelines/text/model-load.d.ts +0 -58
- package/src/inference/pipelines/text/model-load.js +0 -739
- package/src/inference/pipelines/text/moe-cache.d.ts +0 -32
- package/src/inference/pipelines/text/moe-cache.js +0 -108
- package/src/inference/pipelines/text/moe-cpu-gptoss.d.ts +0 -9
- package/src/inference/pipelines/text/moe-cpu-gptoss.js +0 -115
- package/src/inference/pipelines/text/moe-cpu.d.ts +0 -13
- package/src/inference/pipelines/text/moe-cpu.js +0 -120
- package/src/inference/pipelines/text/moe-gpu.d.ts +0 -13
- package/src/inference/pipelines/text/moe-gpu.js +0 -653
- package/src/inference/pipelines/text/moe-helpers.d.ts +0 -12
- package/src/inference/pipelines/text/moe-helpers.js +0 -21
- package/src/inference/pipelines/text/moe-impl.d.ts +0 -117
- package/src/inference/pipelines/text/moe-impl.js +0 -9
- package/src/inference/pipelines/text/moe-shape-validator.d.ts +0 -40
- package/src/inference/pipelines/text/moe-shape-validator.js +0 -98
- package/src/inference/pipelines/text/ops.d.ts +0 -167
- package/src/inference/pipelines/text/ops.js +0 -437
- package/src/inference/pipelines/text/probes.d.ts +0 -31
- package/src/inference/pipelines/text/probes.js +0 -171
- package/src/inference/pipelines/text/sampling.d.ts +0 -54
- package/src/inference/pipelines/text/sampling.js +0 -249
- package/src/inference/pipelines/text/state.d.ts +0 -112
- package/src/inference/pipelines/text/state.js +0 -154
- package/src/inference/pipelines/text/types.d.ts +0 -627
- package/src/inference/pipelines/text/types.js +0 -4
- package/src/inference/pipelines/text/weights.d.ts +0 -110
- package/src/inference/pipelines/text/weights.js +0 -173
- package/src/inference/pipelines/text.d.ts +0 -162
- package/src/inference/pipelines/text.js +0 -666
- package/src/inference/pipelines/vision/encoder.js +0 -386
- package/src/inference/pipelines/vision/image-preprocess.js +0 -151
- package/src/inference/pipelines/vision/index.js +0 -173
- package/src/inference/pipelines/vision/ops.js +0 -78
- package/src/inference/pipelines/vision/patch-embed.js +0 -151
- package/src/inference/speculative.d.ts +0 -239
- package/src/inference/speculative.js +0 -402
- package/src/inference/test-harness.d.ts +0 -178
- package/src/inference/test-harness.js +0 -361
- package/src/inference/tokenizer.d.ts +0 -72
- package/src/inference/tokenizer.js +0 -239
- package/src/inference/tokenizers/base.d.ts +0 -39
- package/src/inference/tokenizers/base.js +0 -69
- package/src/inference/tokenizers/bpe.d.ts +0 -27
- package/src/inference/tokenizers/bpe.js +0 -180
- package/src/inference/tokenizers/bundled.d.ts +0 -63
- package/src/inference/tokenizers/bundled.js +0 -1009
- package/src/inference/tokenizers/sentencepiece.d.ts +0 -28
- package/src/inference/tokenizers/sentencepiece.js +0 -401
- package/src/inference/tokenizers/types.d.ts +0 -166
- package/src/inference/tokenizers/types.js +0 -7
- package/src/loader/doppler-loader.d.ts +0 -137
- package/src/loader/doppler-loader.js +0 -1069
- package/src/loader/dtype-utils.d.ts +0 -40
- package/src/loader/dtype-utils.js +0 -61
- package/src/loader/embedding-loader.d.ts +0 -56
- package/src/loader/embedding-loader.js +0 -211
- package/src/loader/experts/expert-cache.d.ts +0 -156
- package/src/loader/experts/expert-cache.js +0 -386
- package/src/loader/experts/expert-loader.d.ts +0 -108
- package/src/loader/experts/expert-loader.js +0 -392
- package/src/loader/final-weights-loader.d.ts +0 -68
- package/src/loader/final-weights-loader.js +0 -268
- package/src/loader/index.d.ts +0 -150
- package/src/loader/index.js +0 -124
- package/src/loader/layer-loader.d.ts +0 -63
- package/src/loader/layer-loader.js +0 -457
- package/src/loader/loader-state.d.ts +0 -51
- package/src/loader/loader-state.js +0 -142
- package/src/loader/loader-types.d.ts +0 -236
- package/src/loader/loader-types.js +0 -4
- package/src/loader/manifest-config.d.ts +0 -97
- package/src/loader/manifest-config.js +0 -134
- package/src/loader/memory-monitor.d.ts +0 -112
- package/src/loader/memory-monitor.js +0 -284
- package/src/loader/multi-model-loader.d.ts +0 -51
- package/src/loader/multi-model-loader.js +0 -133
- package/src/loader/quantization-constants.d.ts +0 -23
- package/src/loader/quantization-constants.js +0 -14
- package/src/loader/shard-cache.d.ts +0 -60
- package/src/loader/shard-cache.js +0 -638
- package/src/loader/shard-resolver.d.ts +0 -12
- package/src/loader/shard-resolver.js +0 -105
- package/src/loader/tensors/tensor-loader.d.ts +0 -157
- package/src/loader/tensors/tensor-loader.js +0 -618
- package/src/loader/tensors/tensor-reader.d.ts +0 -22
- package/src/loader/tensors/tensor-reader.js +0 -113
- package/src/loader/tensors/tensor-role.d.ts +0 -7
- package/src/loader/tensors/tensor-role.js +0 -12
- package/src/loader/weight-downcast.d.ts +0 -62
- package/src/loader/weight-downcast.js +0 -213
- package/src/loader/weights.d.ts +0 -22
- package/src/loader/weights.js +0 -4
- package/src/memory/address-table.d.ts +0 -104
- package/src/memory/address-table.js +0 -114
- package/src/memory/buffer-pool.d.ts +0 -204
- package/src/memory/buffer-pool.js +0 -821
- package/src/memory/capability.d.ts +0 -49
- package/src/memory/capability.js +0 -95
- package/src/memory/heap-manager.d.ts +0 -104
- package/src/memory/heap-manager.js +0 -264
- package/src/memory/unified-detect.d.ts +0 -59
- package/src/memory/unified-detect.js +0 -192
- package/src/rules/converter/execution.rules.json +0 -20
- package/src/rules/converter/tensor-roles.rules.json +0 -13
- package/src/rules/converter/tokenizer.rules.json +0 -7
- package/src/rules/execution-rules-contract-check.d.ts +0 -17
- package/src/rules/execution-rules-contract-check.js +0 -245
- package/src/rules/inference/attention.rules.json +0 -54
- package/src/rules/inference/config.rules.json +0 -58
- package/src/rules/inference/dtype.rules.json +0 -99
- package/src/rules/inference/execution.rules.json +0 -45
- package/src/rules/inference/ffn.rules.json +0 -35
- package/src/rules/inference/kernel-path.rules.json +0 -92
- package/src/rules/inference/layer-pattern.rules.json +0 -16
- package/src/rules/inference/layer.rules.json +0 -7
- package/src/rules/inference/moe.rules.json +0 -48
- package/src/rules/kernels/attention.rules.json +0 -61
- package/src/rules/kernels/conv2d.rules.json +0 -6
- package/src/rules/kernels/depthwise-conv2d.rules.json +0 -6
- package/src/rules/kernels/dequant.rules.json +0 -58
- package/src/rules/kernels/energy.rules.json +0 -22
- package/src/rules/kernels/fused-ffn.rules.json +0 -13
- package/src/rules/kernels/fused-matmul-residual.rules.json +0 -6
- package/src/rules/kernels/fused-matmul-rmsnorm.rules.json +0 -8
- package/src/rules/kernels/gather.rules.json +0 -12
- package/src/rules/kernels/gelu.rules.json +0 -11
- package/src/rules/kernels/grouped-pointwise-conv2d.rules.json +0 -6
- package/src/rules/kernels/groupnorm.rules.json +0 -10
- package/src/rules/kernels/kernel-validator.d.ts +0 -24
- package/src/rules/kernels/kernel-validator.js +0 -160
- package/src/rules/kernels/kv_quantize.rules.json +0 -7
- package/src/rules/kernels/layernorm.rules.json +0 -6
- package/src/rules/kernels/matmul.rules.json +0 -60
- package/src/rules/kernels/modulate.rules.json +0 -6
- package/src/rules/kernels/moe.rules.gptoss.json +0 -105
- package/src/rules/kernels/moe.rules.json +0 -11
- package/src/rules/kernels/moe.rules.mixtral.json +0 -75
- package/src/rules/kernels/pixel_shuffle.rules.json +0 -6
- package/src/rules/kernels/relu.rules.json +0 -6
- package/src/rules/kernels/repeat-channels.rules.json +0 -6
- package/src/rules/kernels/residual.rules.json +0 -12
- package/src/rules/kernels/rmsnorm.rules.json +0 -11
- package/src/rules/kernels/rope.rules.json +0 -6
- package/src/rules/kernels/sample.rules.json +0 -6
- package/src/rules/kernels/sana-linear-attention.rules.json +0 -6
- package/src/rules/kernels/scale.rules.json +0 -6
- package/src/rules/kernels/silu.rules.json +0 -21
- package/src/rules/kernels/softmax.rules.json +0 -25
- package/src/rules/kernels/split-qg.rules.json +0 -6
- package/src/rules/kernels/split-qkv.rules.json +0 -6
- package/src/rules/kernels/upsample2d.rules.json +0 -6
- package/src/rules/layer-pattern-contract-check.d.ts +0 -17
- package/src/rules/layer-pattern-contract-check.js +0 -231
- package/src/rules/loader/tensor-loader.rules.json +0 -15
- package/src/rules/loader/weights.rules.json +0 -41
- package/src/rules/rule-registry.d.ts +0 -77
- package/src/rules/rule-registry.js +0 -243
- package/src/rules/tooling/command-runtime.rules.json +0 -56
- package/src/storage/backends/idb-store.d.ts +0 -52
- package/src/storage/backends/idb-store.js +0 -590
- package/src/storage/backends/memory-store.d.ts +0 -36
- package/src/storage/backends/memory-store.js +0 -242
- package/src/storage/backends/opfs-store.d.ts +0 -41
- package/src/storage/backends/opfs-store.js +0 -473
- package/src/storage/blake3.d.ts +0 -17
- package/src/storage/blake3.js +0 -269
- package/src/storage/download-types.d.ts +0 -157
- package/src/storage/download-types.js +0 -48
- package/src/storage/downloader.d.ts +0 -103
- package/src/storage/downloader.js +0 -1121
- package/src/storage/emulated-vram.d.ts +0 -264
- package/src/storage/emulated-vram.js +0 -576
- package/src/storage/export.d.ts +0 -20
- package/src/storage/export.js +0 -159
- package/src/storage/index.d.ts +0 -256
- package/src/storage/index.js +0 -188
- package/src/storage/inventory.d.ts +0 -26
- package/src/storage/inventory.js +0 -218
- package/src/storage/preflight.d.ts +0 -144
- package/src/storage/preflight.js +0 -316
- package/src/storage/quickstart-downloader.d.ts +0 -157
- package/src/storage/quickstart-downloader.js +0 -268
- package/src/storage/quota.d.ts +0 -150
- package/src/storage/quota.js +0 -304
- package/src/storage/registry.d.ts +0 -28
- package/src/storage/registry.js +0 -131
- package/src/storage/reports.d.ts +0 -20
- package/src/storage/reports.js +0 -94
- package/src/storage/shard-manager.d.ts +0 -151
- package/src/storage/shard-manager.js +0 -850
- package/src/storage/source-artifact-store.d.ts +0 -52
- package/src/storage/source-artifact-store.js +0 -234
- package/src/sw.d.ts +0 -1
- package/src/sw.js +0 -187
- package/src/tooling/browser-command-runner.d.ts +0 -28
- package/src/tooling/browser-command-runner.js +0 -82
- package/src/tooling/command-api-constants.d.ts +0 -9
- package/src/tooling/command-api-constants.js +0 -9
- package/src/tooling/command-api-family-normalizers.d.ts +0 -9
- package/src/tooling/command-api-family-normalizers.js +0 -343
- package/src/tooling/command-api-helpers.d.ts +0 -25
- package/src/tooling/command-api-helpers.js +0 -262
- package/src/tooling/command-api.d.ts +0 -173
- package/src/tooling/command-api.js +0 -76
- package/src/tooling/command-envelope.d.ts +0 -81
- package/src/tooling/command-envelope.js +0 -198
- package/src/tooling/command-runner-shared.d.ts +0 -73
- package/src/tooling/command-runner-shared.js +0 -180
- package/src/tooling/command-runner.html +0 -45
- package/src/tooling/conversion-config-materializer.d.ts +0 -24
- package/src/tooling/conversion-config-materializer.js +0 -97
- package/src/tooling/lean-execution-contract-runner.d.ts +0 -43
- package/src/tooling/lean-execution-contract-runner.js +0 -158
- package/src/tooling/lean-execution-contract.d.ts +0 -16
- package/src/tooling/lean-execution-contract.js +0 -228
- package/src/tooling/node-browser-command-runner.d.ts +0 -34
- package/src/tooling/node-browser-command-runner.js +0 -813
- package/src/tooling/node-command-runner.d.ts +0 -36
- package/src/tooling/node-command-runner.js +0 -168
- package/src/tooling/node-convert-worker-pool.d.ts +0 -16
- package/src/tooling/node-convert-worker-pool.js +0 -186
- package/src/tooling/node-convert-worker.d.ts +0 -1
- package/src/tooling/node-convert-worker.js +0 -60
- package/src/tooling/node-converter.d.ts +0 -1
- package/src/tooling/node-converter.js +0 -1333
- package/src/tooling/node-file-fetch.d.ts +0 -1
- package/src/tooling/node-file-fetch.js +0 -38
- package/src/tooling/node-source-runtime.d.ts +0 -19
- package/src/tooling/node-source-runtime.js +0 -610
- package/src/tooling/node-webgpu.d.ts +0 -6
- package/src/tooling/node-webgpu.js +0 -284
- package/src/tooling/opfs-cache.d.ts +0 -11
- package/src/tooling/opfs-cache.js +0 -191
- package/src/tooling/runtime-input-composition.d.ts +0 -38
- package/src/tooling/runtime-input-composition.js +0 -86
- package/src/tooling/source-runtime-bundle.d.ts +0 -137
- package/src/tooling/source-runtime-bundle.js +0 -711
- package/src/tooling/source-runtime-materializer.d.ts +0 -6
- package/src/tooling/source-runtime-materializer.js +0 -93
- package/src/tooling-exports.browser.d.ts +0 -7
- package/src/tooling-exports.browser.js +0 -2
- package/src/tooling-exports.d.ts +0 -22
- package/src/tooling-exports.js +0 -7
- package/src/tooling-exports.shared.d.ts +0 -105
- package/src/tooling-exports.shared.js +0 -92
- package/src/training/README.md +0 -153
- package/src/training/artifacts.d.ts +0 -160
- package/src/training/artifacts.js +0 -896
- package/src/training/attention-backward.d.ts +0 -30
- package/src/training/attention-backward.js +0 -232
- package/src/training/attention-forward.d.ts +0 -22
- package/src/training/attention-forward.js +0 -82
- package/src/training/autograd.d.ts +0 -51
- package/src/training/autograd.js +0 -408
- package/src/training/checkpoint-watch.d.ts +0 -8
- package/src/training/checkpoint-watch.js +0 -139
- package/src/training/checkpoint.d.ts +0 -36
- package/src/training/checkpoint.js +0 -277
- package/src/training/clip.d.ts +0 -9
- package/src/training/clip.js +0 -55
- package/src/training/dataloader.d.ts +0 -8
- package/src/training/dataloader.js +0 -44
- package/src/training/datasets/index.d.ts +0 -12
- package/src/training/datasets/index.js +0 -6
- package/src/training/datasets/jsonl.d.ts +0 -11
- package/src/training/datasets/jsonl.js +0 -50
- package/src/training/datasets/reploid.d.ts +0 -3
- package/src/training/datasets/reploid.js +0 -36
- package/src/training/datasets/text-pairs.d.ts +0 -21
- package/src/training/datasets/text-pairs.js +0 -42
- package/src/training/datasets/token-batch.d.ts +0 -21
- package/src/training/datasets/token-batch.js +0 -52
- package/src/training/datasets/translation-pairs.d.ts +0 -34
- package/src/training/datasets/translation-pairs.js +0 -49
- package/src/training/distillation/artifacts.d.ts +0 -71
- package/src/training/distillation/artifacts.js +0 -132
- package/src/training/distillation/checkpoint-watch.d.ts +0 -10
- package/src/training/distillation/checkpoint-watch.js +0 -58
- package/src/training/distillation/dataset.d.ts +0 -59
- package/src/training/distillation/dataset.js +0 -337
- package/src/training/distillation/eval.d.ts +0 -34
- package/src/training/distillation/eval.js +0 -310
- package/src/training/distillation/index.d.ts +0 -29
- package/src/training/distillation/index.js +0 -29
- package/src/training/distillation/runtime.d.ts +0 -20
- package/src/training/distillation/runtime.js +0 -121
- package/src/training/distillation/scoreboard.d.ts +0 -6
- package/src/training/distillation/scoreboard.js +0 -8
- package/src/training/distillation/stage-a.d.ts +0 -45
- package/src/training/distillation/stage-a.js +0 -338
- package/src/training/distillation/stage-b.d.ts +0 -24
- package/src/training/distillation/stage-b.js +0 -20
- package/src/training/distillation/student-fixture.d.ts +0 -22
- package/src/training/distillation/student-fixture.js +0 -846
- package/src/training/distillation/suite-data.d.ts +0 -45
- package/src/training/distillation/suite-data.js +0 -189
- package/src/training/export.d.ts +0 -32
- package/src/training/export.js +0 -112
- package/src/training/index.d.ts +0 -62
- package/src/training/index.js +0 -51
- package/src/training/lora-pipeline.d.ts +0 -40
- package/src/training/lora-pipeline.js +0 -793
- package/src/training/lora.d.ts +0 -19
- package/src/training/lora.js +0 -71
- package/src/training/loss-scaling.d.ts +0 -21
- package/src/training/loss-scaling.js +0 -80
- package/src/training/loss.d.ts +0 -10
- package/src/training/loss.js +0 -40
- package/src/training/objectives/base.d.ts +0 -58
- package/src/training/objectives/base.js +0 -38
- package/src/training/objectives/cross_entropy.d.ts +0 -18
- package/src/training/objectives/cross_entropy.js +0 -34
- package/src/training/objectives/distill_kd.d.ts +0 -16
- package/src/training/objectives/distill_kd.js +0 -365
- package/src/training/objectives/distill_triplet.d.ts +0 -16
- package/src/training/objectives/distill_triplet.js +0 -408
- package/src/training/objectives/index.d.ts +0 -12
- package/src/training/objectives/index.js +0 -6
- package/src/training/objectives/ul_stage1_joint.d.ts +0 -16
- package/src/training/objectives/ul_stage1_joint.js +0 -188
- package/src/training/objectives/ul_stage2_base.d.ts +0 -16
- package/src/training/objectives/ul_stage2_base.js +0 -218
- package/src/training/operator-artifacts.d.ts +0 -62
- package/src/training/operator-artifacts.js +0 -140
- package/src/training/operator-command.d.ts +0 -5
- package/src/training/operator-command.js +0 -455
- package/src/training/operator-eval.d.ts +0 -48
- package/src/training/operator-eval.js +0 -230
- package/src/training/operator-scoreboard.d.ts +0 -5
- package/src/training/operator-scoreboard.js +0 -44
- package/src/training/optimizer.d.ts +0 -22
- package/src/training/optimizer.js +0 -127
- package/src/training/runner.d.ts +0 -248
- package/src/training/runner.js +0 -1220
- package/src/training/suite.d.ts +0 -299
- package/src/training/suite.js +0 -2196
- package/src/training/tensor-factory.d.ts +0 -9
- package/src/training/tensor-factory.js +0 -13
- package/src/training/trainer.d.ts +0 -89
- package/src/training/trainer.js +0 -299
- package/src/training/ul_dataset.d.ts +0 -47
- package/src/training/ul_dataset.js +0 -151
- package/src/training/ul_schedule.d.ts +0 -6
- package/src/training/ul_schedule.js +0 -29
- package/src/training/workloads.d.ts +0 -164
- package/src/training/workloads.js +0 -530
- package/src/types/chrome.d.ts +0 -36
- package/src/types/chrome.js +0 -1
- package/src/types/gpu.d.ts +0 -185
- package/src/types/gpu.js +0 -5
- package/src/types/index.d.ts +0 -3
- package/src/types/index.js +0 -3
- package/src/types/inference.d.ts +0 -197
- package/src/types/inference.js +0 -5
- package/src/types/model.d.ts +0 -130
- package/src/types/model.js +0 -5
- package/src/utils/hf-resolve-url.d.ts +0 -16
- package/src/utils/hf-resolve-url.js +0 -17
- package/src/utils/index.d.ts +0 -7
- package/src/utils/index.js +0 -7
- package/src/utils/load-json.d.ts +0 -5
- package/src/utils/load-json.js +0 -23
- package/src/utils/plain-object.d.ts +0 -1
- package/src/utils/plain-object.js +0 -3
- package/src/utils/sha256.d.ts +0 -4
- package/src/utils/sha256.js +0 -135
- package/src/version.d.ts +0 -2
- package/src/version.js +0 -2
- package/tools/convert-safetensors-node.js +0 -233
- package/tools/doppler-cli.js +0 -1452
|
@@ -1,1439 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
import { getDevice, getDeviceEpoch, getDeviceLimits, getKernelCapabilities } from '../device.js';
|
|
4
|
-
import { acquireBuffer } from '../../memory/buffer-pool.js';
|
|
5
|
-
import { createTensor } from '../tensor.js';
|
|
6
|
-
import { KernelBase } from './kernel-base.js';
|
|
7
|
-
import { TILE_SIZES } from './constants.js';
|
|
8
|
-
import { getKernelThresholds, padToQ4KBlock } from '../../config/schema/index.js';
|
|
9
|
-
import { createUniformBufferWithView, getKernelConfig, hasRequiredFeatures } from './utils.js';
|
|
10
|
-
import { dispatchIndirect, recordDispatchIndirect } from './dispatch.js';
|
|
11
|
-
import { releaseUniformBuffer } from '../uniform-cache.js';
|
|
12
|
-
import { log, trace } from '../../debug/index.js';
|
|
13
|
-
import { getKernelPathAttentionVariant, getKernelPathStrict } from '../../config/kernel-path-loader.js';
|
|
14
|
-
import { selectRuleValue as selectKernelRuleValue } from './rule-registry.js';
|
|
15
|
-
import { selectRuleValue as selectSharedRuleValue } from '../../rules/rule-registry.js';
|
|
16
|
-
import { logKernelSelectionOnce } from '../kernel-selection-log.js';
|
|
17
|
-
|
|
18
|
-
// Track if we've logged the attention tier selection (avoid spam)
|
|
19
|
-
let loggedAttentionTier = false;
|
|
20
|
-
|
|
21
|
-
function getRequiredVariantMaxKVLen(operation, variant, errorLabel) {
|
|
22
|
-
const config = getKernelConfig(operation, variant);
|
|
23
|
-
const maxKVLen = config.variantMetadata?.maxKVLen;
|
|
24
|
-
if (!Number.isFinite(maxKVLen)) {
|
|
25
|
-
throw new Error(`Kernel config missing ${errorLabel} maxKVLen`);
|
|
26
|
-
}
|
|
27
|
-
return maxKVLen;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
function getChunkedMaxKVLen() {
|
|
31
|
-
return getRequiredVariantMaxKVLen('attention', 'decode_chunked_f16kv', 'attention.decode_chunked_f16kv');
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
function getTieredMaxKVLen() {
|
|
35
|
-
return getRequiredVariantMaxKVLen('attention_tiered', 'decode_tiered_f16', 'attention_tiered.decode_tiered_f16');
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
function getTieredQuantMaxKVLen() {
|
|
39
|
-
return getRequiredVariantMaxKVLen(
|
|
40
|
-
'attention_tiered_quant',
|
|
41
|
-
'decode_tiered_int8_f16kv',
|
|
42
|
-
'attention_tiered_quant.decode_tiered_int8_f16kv'
|
|
43
|
-
);
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
let kvLenFallbackBuffer = null;
|
|
48
|
-
let kvLenFallbackBufferEpoch = -1;
|
|
49
|
-
const U32_BYTES = Uint32Array.BYTES_PER_ELEMENT;
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
function getKvLenFallbackBuffer(device) {
|
|
53
|
-
const epoch = getDeviceEpoch();
|
|
54
|
-
if (!kvLenFallbackBuffer || kvLenFallbackBufferEpoch !== epoch) {
|
|
55
|
-
kvLenFallbackBuffer = device.createBuffer({
|
|
56
|
-
label: 'attention_kv_len_fallback',
|
|
57
|
-
size: U32_BYTES,
|
|
58
|
-
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST,
|
|
59
|
-
});
|
|
60
|
-
device.queue.writeBuffer(kvLenFallbackBuffer, 0, new Uint32Array([0]));
|
|
61
|
-
kvLenFallbackBufferEpoch = epoch;
|
|
62
|
-
}
|
|
63
|
-
return kvLenFallbackBuffer;
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
let pageTableFallbackBuffer = null;
|
|
67
|
-
let pageTableFallbackBufferEpoch = -1;
|
|
68
|
-
|
|
69
|
-
function getPageTableFallbackBuffer(device) {
|
|
70
|
-
const epoch = getDeviceEpoch();
|
|
71
|
-
if (!pageTableFallbackBuffer || pageTableFallbackBufferEpoch !== epoch) {
|
|
72
|
-
pageTableFallbackBuffer = device.createBuffer({
|
|
73
|
-
label: 'attention_page_table_fallback',
|
|
74
|
-
size: U32_BYTES,
|
|
75
|
-
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST,
|
|
76
|
-
});
|
|
77
|
-
device.queue.writeBuffer(pageTableFallbackBuffer, 0, new Uint32Array([0]));
|
|
78
|
-
pageTableFallbackBufferEpoch = epoch;
|
|
79
|
-
}
|
|
80
|
-
return pageTableFallbackBuffer;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
class AttentionKernel extends KernelBase {
|
|
87
|
-
|
|
88
|
-
async getPipeline(variant) {
|
|
89
|
-
return this.getPipelineFor('attention', variant);
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
dispatch(
|
|
94
|
-
pipeline,
|
|
95
|
-
bindGroup,
|
|
96
|
-
workgroups
|
|
97
|
-
) {
|
|
98
|
-
this.dispatchKernel(pipeline, bindGroup, workgroups, 'attention');
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
record(
|
|
103
|
-
recorder,
|
|
104
|
-
pipeline,
|
|
105
|
-
bindGroup,
|
|
106
|
-
workgroups
|
|
107
|
-
) {
|
|
108
|
-
this.recordKernel(recorder, pipeline, bindGroup, workgroups, 'attention');
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
class AttentionTieredKernel extends KernelBase {
|
|
113
|
-
|
|
114
|
-
async getPipeline(variant) {
|
|
115
|
-
return this.getPipelineFor('attention_tiered', variant);
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
dispatch(
|
|
120
|
-
pipeline,
|
|
121
|
-
bindGroup,
|
|
122
|
-
workgroups
|
|
123
|
-
) {
|
|
124
|
-
this.dispatchKernel(pipeline, bindGroup, workgroups, 'attention_tiered');
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
record(
|
|
129
|
-
recorder,
|
|
130
|
-
pipeline,
|
|
131
|
-
bindGroup,
|
|
132
|
-
workgroups
|
|
133
|
-
) {
|
|
134
|
-
this.recordKernel(recorder, pipeline, bindGroup, workgroups, 'attention_tiered');
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
class AttentionTieredQuantKernel extends KernelBase {
|
|
139
|
-
|
|
140
|
-
async getPipeline(variant) {
|
|
141
|
-
return this.getPipelineFor('attention_tiered_quant', variant);
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
dispatch(
|
|
146
|
-
pipeline,
|
|
147
|
-
bindGroup,
|
|
148
|
-
workgroups
|
|
149
|
-
) {
|
|
150
|
-
this.dispatchKernel(pipeline, bindGroup, workgroups, 'attention_tiered_quant');
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
record(
|
|
155
|
-
recorder,
|
|
156
|
-
pipeline,
|
|
157
|
-
bindGroup,
|
|
158
|
-
workgroups
|
|
159
|
-
) {
|
|
160
|
-
this.recordKernel(recorder, pipeline, bindGroup, workgroups, 'attention_tiered_quant');
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
class AttentionBDPAKernel extends KernelBase {
|
|
165
|
-
async getPipeline(variant) {
|
|
166
|
-
return this.getPipelineFor('attention_bdpa', variant);
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
dispatch(
|
|
170
|
-
pipeline,
|
|
171
|
-
bindGroup,
|
|
172
|
-
workgroups
|
|
173
|
-
) {
|
|
174
|
-
this.dispatchKernel(pipeline, bindGroup, workgroups, 'attention_bdpa');
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
record(
|
|
178
|
-
recorder,
|
|
179
|
-
pipeline,
|
|
180
|
-
bindGroup,
|
|
181
|
-
workgroups
|
|
182
|
-
) {
|
|
183
|
-
this.recordKernel(recorder, pipeline, bindGroup, workgroups, 'attention_bdpa');
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
function selectAttentionTier(
|
|
189
|
-
headDim,
|
|
190
|
-
seqLen,
|
|
191
|
-
useF16KV,
|
|
192
|
-
forcedTier,
|
|
193
|
-
sharedLimit,
|
|
194
|
-
caps
|
|
195
|
-
) {
|
|
196
|
-
const isDecode = seqLen === 1;
|
|
197
|
-
const thresholds = getKernelThresholds().attention;
|
|
198
|
-
const largeRequired = useF16KV
|
|
199
|
-
? thresholds.largeSharedF16
|
|
200
|
-
: thresholds.largeSharedF32;
|
|
201
|
-
const canLarge =
|
|
202
|
-
headDim <= thresholds.largeMaxHeadDim &&
|
|
203
|
-
sharedLimit >= largeRequired;
|
|
204
|
-
const smallRequired = useF16KV
|
|
205
|
-
? thresholds.smallSharedF16
|
|
206
|
-
: thresholds.smallSharedF32;
|
|
207
|
-
const canSmall =
|
|
208
|
-
headDim <= thresholds.smallMaxHeadDim &&
|
|
209
|
-
sharedLimit >= smallRequired;
|
|
210
|
-
const canSubgroup =
|
|
211
|
-
caps.hasSubgroups &&
|
|
212
|
-
headDim <= thresholds.subgroupMaxHeadDim &&
|
|
213
|
-
sharedLimit >= thresholds.subgroupShared &&
|
|
214
|
-
isDecode;
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
let tier = forcedTier;
|
|
218
|
-
let reason = forcedTier ? `forced:${forcedTier}` : '';
|
|
219
|
-
|
|
220
|
-
if (tier === 'tiled_large' && !canLarge) {
|
|
221
|
-
throw new Error(`Requested tiled_large but device doesn't support it (headDim=${headDim}, shared=${sharedLimit}).`);
|
|
222
|
-
}
|
|
223
|
-
if (tier === 'tiled_small' && !canSmall) {
|
|
224
|
-
throw new Error(`Requested tiled_small but device doesn't support it (headDim=${headDim}, shared=${sharedLimit}).`);
|
|
225
|
-
}
|
|
226
|
-
if (tier === 'subgroup' && !canSubgroup) {
|
|
227
|
-
throw new Error(`Requested subgroup attention but device doesn't support it (headDim=${headDim}, shared=${sharedLimit}, subgroups=${caps.hasSubgroups}).`);
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
if (!tier) {
|
|
231
|
-
tier = selectKernelRuleValue('attention', 'tier', { canSubgroup, canLarge, canSmall, isDecode });
|
|
232
|
-
if (!reason) {
|
|
233
|
-
if (canSubgroup) {
|
|
234
|
-
reason = 'subgroup_capable';
|
|
235
|
-
} else if (canLarge) {
|
|
236
|
-
reason = 'tiled_large_capable';
|
|
237
|
-
} else if (canSmall) {
|
|
238
|
-
reason = 'tiled_small_capable';
|
|
239
|
-
} else if (isDecode) {
|
|
240
|
-
reason = 'decode_streaming_fallback';
|
|
241
|
-
} else {
|
|
242
|
-
reason = 'streaming_fallback';
|
|
243
|
-
}
|
|
244
|
-
}
|
|
245
|
-
if (tier === 'subgroup' && !loggedAttentionTier) {
|
|
246
|
-
trace.attn(0, `Using subgroup decode kernel (headDim=${headDim}, hasSubgroups=true)`);
|
|
247
|
-
loggedAttentionTier = true;
|
|
248
|
-
}
|
|
249
|
-
}
|
|
250
|
-
|
|
251
|
-
return { tier, reason };
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
// Track if we've logged chunked kernel selection
|
|
255
|
-
let loggedChunkedKernel = false;
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
function resolveAttentionVariant(
|
|
259
|
-
tier,
|
|
260
|
-
isDecode,
|
|
261
|
-
useF16KV,
|
|
262
|
-
useF16Q,
|
|
263
|
-
numHeads,
|
|
264
|
-
headDim,
|
|
265
|
-
kvLen,
|
|
266
|
-
isPaged,
|
|
267
|
-
caps,
|
|
268
|
-
sharedLimit
|
|
269
|
-
) {
|
|
270
|
-
const base = selectKernelRuleValue('attention', 'phase', { isDecode });
|
|
271
|
-
const useF16 = useF16KV && useF16Q;
|
|
272
|
-
const suffix = selectKernelRuleValue('attention', 'suffix', { useF16, useF16KV });
|
|
273
|
-
|
|
274
|
-
// Check if chunked kernel is viable:
|
|
275
|
-
// - Decode only (seqLen=1)
|
|
276
|
-
// - F16 KV cache
|
|
277
|
-
// - Large headDim (parallelizes across dimensions)
|
|
278
|
-
// - KV length within shared memory limit (from kernel config)
|
|
279
|
-
const chunkedMaxKVLen = getChunkedMaxKVLen();
|
|
280
|
-
const minHeadDimForChunked = getKernelThresholds().attention.minHeadDimForChunked;
|
|
281
|
-
const canUseChunked = isDecode && useF16KV && headDim >= minHeadDimForChunked && kvLen <= chunkedMaxKVLen;
|
|
282
|
-
const decodeSubgroupMaxKVLen = chunkedMaxKVLen;
|
|
283
|
-
const decodeSubgroupMaxHeadDim = getKernelThresholds().attention.subgroupMaxHeadDim;
|
|
284
|
-
const canUseDecodeSubgroup = isDecode && !useF16KV && !useF16Q && headDim <= decodeSubgroupMaxHeadDim && kvLen <= decodeSubgroupMaxKVLen;
|
|
285
|
-
const canUseDecodeOptimized = isDecode
|
|
286
|
-
&& useF16KV
|
|
287
|
-
&& caps.hasF16
|
|
288
|
-
&& caps.hasSubgroups
|
|
289
|
-
&& headDim <= decodeSubgroupMaxHeadDim
|
|
290
|
-
&& sharedLimit >= getKernelThresholds().attention.subgroupShared;
|
|
291
|
-
const chunkedVariant = selectKernelRuleValue('attention', 'chunkedVariant', { useF16 });
|
|
292
|
-
const pagedVariant = selectKernelRuleValue('attention', 'pagedVariant', { useF16 });
|
|
293
|
-
const optimizedVariant = selectKernelRuleValue('attention', 'optimizedVariant', { useF16 });
|
|
294
|
-
const variant = selectKernelRuleValue(
|
|
295
|
-
'attention',
|
|
296
|
-
'variant',
|
|
297
|
-
{
|
|
298
|
-
tier,
|
|
299
|
-
useF16KV,
|
|
300
|
-
canUseChunked,
|
|
301
|
-
canUseDecodeSubgroup,
|
|
302
|
-
canUseDecodeOptimized,
|
|
303
|
-
base,
|
|
304
|
-
suffix,
|
|
305
|
-
chunkedVariant,
|
|
306
|
-
pagedVariant,
|
|
307
|
-
optimizedVariant,
|
|
308
|
-
isPaged,
|
|
309
|
-
isDecode,
|
|
310
|
-
}
|
|
311
|
-
);
|
|
312
|
-
|
|
313
|
-
if (variant === chunkedVariant && !loggedChunkedKernel) {
|
|
314
|
-
trace.attn(0, `Using chunked decode kernel (headDim=${headDim}, numHeads=${numHeads}, f16kv=${!useF16Q})`);
|
|
315
|
-
loggedChunkedKernel = true;
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
return variant;
|
|
319
|
-
}
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
function calculateAttentionWorkgroups(tier, seqLen, numHeads) {
|
|
323
|
-
if (tier === 'subgroup') {
|
|
324
|
-
return numHeads;
|
|
325
|
-
}
|
|
326
|
-
if (tier === 'streaming') {
|
|
327
|
-
return seqLen * numHeads;
|
|
328
|
-
}
|
|
329
|
-
if (tier === 'tiled_large') {
|
|
330
|
-
return Math.ceil(seqLen / TILE_SIZES.ATTENTION_LARGE_BLOCK_SIZE) * numHeads;
|
|
331
|
-
}
|
|
332
|
-
return Math.ceil(seqLen / TILE_SIZES.ATTENTION_SMALL_BLOCK_SIZE) * numHeads;
|
|
333
|
-
}
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
function inferAttentionTierFromVariant(variant) {
|
|
337
|
-
if (variant === 'decode_subgroup') return 'subgroup';
|
|
338
|
-
if (variant.startsWith('decode_online')) return 'subgroup';
|
|
339
|
-
if (variant.startsWith('decode_paged')) return 'tiled_large';
|
|
340
|
-
if (variant.startsWith('prefill_streaming') || variant.startsWith('decode_streaming') || variant === 'decode_chunked_f16kv') {
|
|
341
|
-
return 'streaming';
|
|
342
|
-
}
|
|
343
|
-
if (variant.startsWith('prefill_small') || variant.startsWith('decode_small')) return 'tiled_small';
|
|
344
|
-
return 'tiled_large';
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
function validateAttentionVariant(
|
|
349
|
-
variant,
|
|
350
|
-
isDecode,
|
|
351
|
-
useF16KV,
|
|
352
|
-
useF16Q,
|
|
353
|
-
caps,
|
|
354
|
-
headDim,
|
|
355
|
-
kvLen,
|
|
356
|
-
sharedLimit
|
|
357
|
-
) {
|
|
358
|
-
const normalized = variant.trim();
|
|
359
|
-
|
|
360
|
-
let config;
|
|
361
|
-
try {
|
|
362
|
-
config = getKernelConfig('attention', normalized);
|
|
363
|
-
} catch {
|
|
364
|
-
throw new Error(`Unknown attention kernel variant "${variant}".`);
|
|
365
|
-
}
|
|
366
|
-
|
|
367
|
-
if (!hasRequiredFeatures(config.requires, caps)) {
|
|
368
|
-
throw new Error(`Attention kernel "${variant}" requires unsupported GPU features.`);
|
|
369
|
-
}
|
|
370
|
-
|
|
371
|
-
const expectsF16KV = normalized.includes('_f16kv');
|
|
372
|
-
const expectsF16 = normalized.includes('_f16') && !expectsF16KV;
|
|
373
|
-
if (expectsF16) {
|
|
374
|
-
if (!(useF16KV && useF16Q)) {
|
|
375
|
-
const kvLabel = selectSharedRuleValue('shared', 'dtype', 'f16OrF32', { useF16: useF16KV });
|
|
376
|
-
const qLabel = selectSharedRuleValue('shared', 'dtype', 'f16OrF32', { useF16: useF16Q });
|
|
377
|
-
throw new Error(`Attention kernel "${variant}" requires f16 Q/K/V but got Q=${qLabel}, KV=${kvLabel}.`);
|
|
378
|
-
}
|
|
379
|
-
} else if (expectsF16KV) {
|
|
380
|
-
if (!useF16KV || useF16Q) {
|
|
381
|
-
const kvLabel = selectSharedRuleValue('shared', 'dtype', 'f16OrF32', { useF16: useF16KV });
|
|
382
|
-
const qLabel = selectSharedRuleValue('shared', 'dtype', 'f16OrF32', { useF16: useF16Q });
|
|
383
|
-
throw new Error(`Attention kernel "${variant}" requires f32 Q with f16 KV but got Q=${qLabel}, KV=${kvLabel}.`);
|
|
384
|
-
}
|
|
385
|
-
} else {
|
|
386
|
-
if (useF16KV || useF16Q) {
|
|
387
|
-
const kvLabel = selectSharedRuleValue('shared', 'dtype', 'f16OrF32', { useF16: useF16KV });
|
|
388
|
-
const qLabel = selectSharedRuleValue('shared', 'dtype', 'f16OrF32', { useF16: useF16Q });
|
|
389
|
-
throw new Error(`Attention kernel "${variant}" requires f32 Q/K/V but got Q=${qLabel}, KV=${kvLabel}.`);
|
|
390
|
-
}
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
const isDecodeVariant = normalized.startsWith('decode');
|
|
394
|
-
const isPrefillVariant = normalized.startsWith('prefill');
|
|
395
|
-
if (isDecode && isPrefillVariant) {
|
|
396
|
-
throw new Error(`Attention kernel "${variant}" is prefill-only but decode requested.`);
|
|
397
|
-
}
|
|
398
|
-
if (!isDecode && isDecodeVariant) {
|
|
399
|
-
throw new Error(`Attention kernel "${variant}" is decode-only but prefill requested.`);
|
|
400
|
-
}
|
|
401
|
-
|
|
402
|
-
const thresholds = getKernelThresholds().attention;
|
|
403
|
-
const chunkedMaxKVLen = getChunkedMaxKVLen();
|
|
404
|
-
const isChunked = normalized.startsWith('decode_chunked');
|
|
405
|
-
if (isChunked) {
|
|
406
|
-
const minHeadDimForChunked = thresholds.minHeadDimForChunked;
|
|
407
|
-
if (headDim < minHeadDimForChunked) {
|
|
408
|
-
throw new Error(`Attention kernel "${variant}" requires headDim >= ${minHeadDimForChunked} but got ${headDim}.`);
|
|
409
|
-
}
|
|
410
|
-
if (kvLen > chunkedMaxKVLen) {
|
|
411
|
-
throw new Error(`Attention kernel "${variant}" requires kvLen <= ${chunkedMaxKVLen} but got ${kvLen}.`);
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
if (normalized === 'decode_subgroup') {
|
|
416
|
-
if (!caps.hasSubgroups) {
|
|
417
|
-
throw new Error(`Attention kernel "${variant}" requires subgroup support.`);
|
|
418
|
-
}
|
|
419
|
-
if (headDim > thresholds.subgroupMaxHeadDim) {
|
|
420
|
-
throw new Error(`Attention kernel "${variant}" requires headDim <= ${thresholds.subgroupMaxHeadDim} but got ${headDim}.`);
|
|
421
|
-
}
|
|
422
|
-
if (kvLen > chunkedMaxKVLen) {
|
|
423
|
-
throw new Error(`Attention kernel "${variant}" requires kvLen <= ${chunkedMaxKVLen} but got ${kvLen}.`);
|
|
424
|
-
}
|
|
425
|
-
if (sharedLimit < thresholds.subgroupShared) {
|
|
426
|
-
throw new Error(`Attention kernel "${variant}" requires shared >= ${thresholds.subgroupShared} but got ${sharedLimit}.`);
|
|
427
|
-
}
|
|
428
|
-
}
|
|
429
|
-
|
|
430
|
-
if (normalized.startsWith('decode_online')) {
|
|
431
|
-
if (!caps.hasSubgroups) {
|
|
432
|
-
throw new Error(`Attention kernel "${variant}" requires subgroup support.`);
|
|
433
|
-
}
|
|
434
|
-
if (headDim > thresholds.subgroupMaxHeadDim) {
|
|
435
|
-
throw new Error(`Attention kernel "${variant}" requires headDim <= ${thresholds.subgroupMaxHeadDim} but got ${headDim}.`);
|
|
436
|
-
}
|
|
437
|
-
if (sharedLimit < thresholds.subgroupShared) {
|
|
438
|
-
throw new Error(`Attention kernel "${variant}" requires shared >= ${thresholds.subgroupShared} but got ${sharedLimit}.`);
|
|
439
|
-
}
|
|
440
|
-
}
|
|
441
|
-
|
|
442
|
-
if (normalized.startsWith('prefill') || normalized.startsWith('decode')) {
|
|
443
|
-
const isSmall = normalized.includes('_small');
|
|
444
|
-
const isStreaming = normalized.includes('_streaming');
|
|
445
|
-
const isTiled = !isStreaming
|
|
446
|
-
&& !normalized.startsWith('decode_subgroup')
|
|
447
|
-
&& !normalized.startsWith('decode_online')
|
|
448
|
-
&& !isChunked;
|
|
449
|
-
if (isTiled) {
|
|
450
|
-
const requiredShared = isSmall
|
|
451
|
-
? (useF16KV ? thresholds.smallSharedF16 : thresholds.smallSharedF32)
|
|
452
|
-
: (useF16KV ? thresholds.largeSharedF16 : thresholds.largeSharedF32);
|
|
453
|
-
const maxHeadDim = isSmall ? thresholds.smallMaxHeadDim : thresholds.largeMaxHeadDim;
|
|
454
|
-
if (headDim > maxHeadDim) {
|
|
455
|
-
throw new Error(`Attention kernel "${variant}" requires headDim <= ${maxHeadDim} but got ${headDim}.`);
|
|
456
|
-
}
|
|
457
|
-
if (sharedLimit < requiredShared) {
|
|
458
|
-
throw new Error(`Attention kernel "${variant}" requires shared >= ${requiredShared} but got ${sharedLimit}.`);
|
|
459
|
-
}
|
|
460
|
-
}
|
|
461
|
-
}
|
|
462
|
-
|
|
463
|
-
return normalized;
|
|
464
|
-
}
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
function resolveAttentionPlan(
|
|
468
|
-
seqLen,
|
|
469
|
-
kvLen,
|
|
470
|
-
headDim,
|
|
471
|
-
numHeads,
|
|
472
|
-
kvDtype,
|
|
473
|
-
qDtype,
|
|
474
|
-
sharedLimit,
|
|
475
|
-
caps,
|
|
476
|
-
layerIdx,
|
|
477
|
-
isPaged,
|
|
478
|
-
kernelPath
|
|
479
|
-
) {
|
|
480
|
-
const useF16KV = kvDtype === 'f16';
|
|
481
|
-
const useF16Q = qDtype === 'f16';
|
|
482
|
-
const isDecode = seqLen === 1;
|
|
483
|
-
const phase = selectKernelRuleValue('attention', 'phase', { isDecode });
|
|
484
|
-
const pathVariant = getKernelPathAttentionVariant(phase, layerIdx, kernelPath);
|
|
485
|
-
const strictPath = getKernelPathStrict();
|
|
486
|
-
|
|
487
|
-
if (pathVariant) {
|
|
488
|
-
let variantOverride;
|
|
489
|
-
try {
|
|
490
|
-
variantOverride = validateAttentionVariant(
|
|
491
|
-
pathVariant,
|
|
492
|
-
isDecode,
|
|
493
|
-
useF16KV,
|
|
494
|
-
useF16Q,
|
|
495
|
-
caps,
|
|
496
|
-
headDim,
|
|
497
|
-
kvLen,
|
|
498
|
-
sharedLimit
|
|
499
|
-
);
|
|
500
|
-
} catch (error) {
|
|
501
|
-
if (strictPath) {
|
|
502
|
-
throw error;
|
|
503
|
-
}
|
|
504
|
-
const reason = error instanceof Error ? error.message : String(error);
|
|
505
|
-
log.warn(
|
|
506
|
-
'Attention',
|
|
507
|
-
`Kernel path override "${pathVariant}" rejected; falling back to capability selection: ${reason}`
|
|
508
|
-
);
|
|
509
|
-
const adaptiveSelection = selectAttentionTier(headDim, seqLen, useF16KV, null, sharedLimit, caps);
|
|
510
|
-
const adaptiveVariant = resolveAttentionVariant(
|
|
511
|
-
adaptiveSelection.tier,
|
|
512
|
-
isDecode,
|
|
513
|
-
useF16KV,
|
|
514
|
-
useF16Q,
|
|
515
|
-
numHeads,
|
|
516
|
-
headDim,
|
|
517
|
-
kvLen,
|
|
518
|
-
isPaged,
|
|
519
|
-
caps,
|
|
520
|
-
sharedLimit
|
|
521
|
-
);
|
|
522
|
-
const workgroups = calculateAttentionWorkgroups(adaptiveSelection.tier, seqLen, numHeads);
|
|
523
|
-
logKernelSelectionOnce('attention', {
|
|
524
|
-
variant: adaptiveVariant,
|
|
525
|
-
reason: `path_override_fallback:${adaptiveSelection.tier}`,
|
|
526
|
-
});
|
|
527
|
-
return {
|
|
528
|
-
tier: adaptiveSelection.tier,
|
|
529
|
-
variant: adaptiveVariant,
|
|
530
|
-
workgroups,
|
|
531
|
-
useF16KV,
|
|
532
|
-
isDecode,
|
|
533
|
-
};
|
|
534
|
-
}
|
|
535
|
-
let selectionReason = 'path_override';
|
|
536
|
-
|
|
537
|
-
if (!isDecode && variantOverride.startsWith('prefill_streaming') && seqLen <= 64) {
|
|
538
|
-
const adaptivePrefillVariant = variantOverride.endsWith('_f16kv')
|
|
539
|
-
? 'prefill_f16kv'
|
|
540
|
-
: variantOverride.endsWith('_f16')
|
|
541
|
-
? 'prefill_f16'
|
|
542
|
-
: 'prefill';
|
|
543
|
-
try {
|
|
544
|
-
const validatedAdaptive = validateAttentionVariant(
|
|
545
|
-
adaptivePrefillVariant,
|
|
546
|
-
isDecode,
|
|
547
|
-
useF16KV,
|
|
548
|
-
useF16Q,
|
|
549
|
-
caps,
|
|
550
|
-
headDim,
|
|
551
|
-
kvLen,
|
|
552
|
-
sharedLimit
|
|
553
|
-
);
|
|
554
|
-
if (validatedAdaptive !== variantOverride) {
|
|
555
|
-
variantOverride = validatedAdaptive;
|
|
556
|
-
selectionReason = 'path_override_adaptive_prefill';
|
|
557
|
-
}
|
|
558
|
-
} catch {
|
|
559
|
-
// Keep original strict-path variant when adaptive fallback is not valid.
|
|
560
|
-
}
|
|
561
|
-
}
|
|
562
|
-
|
|
563
|
-
const tier = inferAttentionTierFromVariant(variantOverride);
|
|
564
|
-
const workgroups = calculateAttentionWorkgroups(tier, seqLen, numHeads);
|
|
565
|
-
logKernelSelectionOnce('attention', {
|
|
566
|
-
variant: variantOverride,
|
|
567
|
-
reason: `${selectionReason}:${tier}`,
|
|
568
|
-
});
|
|
569
|
-
return { tier, variant: variantOverride, workgroups, useF16KV, isDecode };
|
|
570
|
-
}
|
|
571
|
-
|
|
572
|
-
const selection = selectAttentionTier(headDim, seqLen, useF16KV, null, sharedLimit, caps);
|
|
573
|
-
const tier = selection.tier;
|
|
574
|
-
const variant = resolveAttentionVariant(
|
|
575
|
-
tier,
|
|
576
|
-
isDecode,
|
|
577
|
-
useF16KV,
|
|
578
|
-
useF16Q,
|
|
579
|
-
numHeads,
|
|
580
|
-
headDim,
|
|
581
|
-
kvLen,
|
|
582
|
-
isPaged,
|
|
583
|
-
caps,
|
|
584
|
-
sharedLimit
|
|
585
|
-
);
|
|
586
|
-
const validatedVariant = validateAttentionVariant(
|
|
587
|
-
variant,
|
|
588
|
-
isDecode,
|
|
589
|
-
useF16KV,
|
|
590
|
-
useF16Q,
|
|
591
|
-
caps,
|
|
592
|
-
headDim,
|
|
593
|
-
kvLen,
|
|
594
|
-
sharedLimit
|
|
595
|
-
);
|
|
596
|
-
const workgroups = calculateAttentionWorkgroups(tier, seqLen, numHeads);
|
|
597
|
-
|
|
598
|
-
logKernelSelectionOnce('attention', {
|
|
599
|
-
variant: validatedVariant,
|
|
600
|
-
reason: selection.reason,
|
|
601
|
-
});
|
|
602
|
-
|
|
603
|
-
return { tier, variant: validatedVariant, workgroups, useF16KV, isDecode };
|
|
604
|
-
}
|
|
605
|
-
|
|
606
|
-
export function resolveAttentionPlanForTest(
|
|
607
|
-
seqLen,
|
|
608
|
-
kvLen,
|
|
609
|
-
headDim,
|
|
610
|
-
numHeads,
|
|
611
|
-
kvDtype,
|
|
612
|
-
qDtype,
|
|
613
|
-
sharedLimit,
|
|
614
|
-
caps,
|
|
615
|
-
layerIdx,
|
|
616
|
-
isPaged = false,
|
|
617
|
-
kernelPath = null
|
|
618
|
-
) {
|
|
619
|
-
return resolveAttentionPlan(
|
|
620
|
-
seqLen,
|
|
621
|
-
kvLen,
|
|
622
|
-
headDim,
|
|
623
|
-
numHeads,
|
|
624
|
-
kvDtype,
|
|
625
|
-
qDtype,
|
|
626
|
-
sharedLimit,
|
|
627
|
-
caps,
|
|
628
|
-
layerIdx,
|
|
629
|
-
isPaged,
|
|
630
|
-
kernelPath
|
|
631
|
-
);
|
|
632
|
-
}
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
function createAttentionUniformBuffer(
|
|
636
|
-
device,
|
|
637
|
-
recorder,
|
|
638
|
-
params
|
|
639
|
-
) {
|
|
640
|
-
return createUniformBufferWithView(
|
|
641
|
-
'attention_uniforms',
|
|
642
|
-
64, // 60 bytes used + 4 padding for 16-byte alignment
|
|
643
|
-
(view) => {
|
|
644
|
-
view.setUint32(0, params.numHeads, true);
|
|
645
|
-
view.setUint32(4, params.numKVHeads, true);
|
|
646
|
-
view.setUint32(8, params.headDim, true);
|
|
647
|
-
view.setUint32(12, params.kvLen, true);
|
|
648
|
-
view.setUint32(16, params.seqLen, true);
|
|
649
|
-
view.setFloat32(20, params.scale, true);
|
|
650
|
-
view.setUint32(24, params.causal ? 1 : 0, true);
|
|
651
|
-
view.setUint32(28, params.startPos, true);
|
|
652
|
-
view.setFloat32(32, params.attnSoftcap, true); // Gemma 2: 50.0, 0 = disabled
|
|
653
|
-
view.setUint32(36, params.slidingWindow, true); // Sliding window size, 0 = disabled
|
|
654
|
-
view.setUint32(40, params.kvLenSource, true); // 0 = uniform kvLen, 1 = buffer
|
|
655
|
-
view.setUint32(44, params.kvStart ?? 0, true);
|
|
656
|
-
view.setUint32(48, params.pageSize ?? 0, true);
|
|
657
|
-
view.setUint32(52, params.kvLayout ?? 0, true);
|
|
658
|
-
view.setUint32(56, 0, true);
|
|
659
|
-
},
|
|
660
|
-
recorder,
|
|
661
|
-
device
|
|
662
|
-
);
|
|
663
|
-
}
|
|
664
|
-
|
|
665
|
-
function createTieredAttentionUniformBuffer(
|
|
666
|
-
device,
|
|
667
|
-
recorder,
|
|
668
|
-
params
|
|
669
|
-
) {
|
|
670
|
-
return createUniformBufferWithView(
|
|
671
|
-
'attention_tiered_uniforms',
|
|
672
|
-
80,
|
|
673
|
-
(view) => {
|
|
674
|
-
view.setUint32(0, params.numHeads, true);
|
|
675
|
-
view.setUint32(4, params.numKVHeads, true);
|
|
676
|
-
view.setUint32(8, params.headDim, true);
|
|
677
|
-
view.setUint32(12, params.coldLen, true);
|
|
678
|
-
view.setUint32(16, params.hotLen, true);
|
|
679
|
-
view.setUint32(20, params.seqLen, true);
|
|
680
|
-
view.setFloat32(24, params.scale, true);
|
|
681
|
-
view.setUint32(28, params.causal ? 1 : 0, true);
|
|
682
|
-
view.setUint32(32, params.startPos, true);
|
|
683
|
-
view.setFloat32(36, params.attnSoftcap, true);
|
|
684
|
-
view.setUint32(40, params.slidingWindow, true);
|
|
685
|
-
view.setUint32(44, params.hotWindow, true);
|
|
686
|
-
view.setUint32(48, params.hotStart, true);
|
|
687
|
-
view.setUint32(52, params.coldPageSize, true);
|
|
688
|
-
view.setUint32(56, params.coldLayout ?? 0, true);
|
|
689
|
-
view.setUint32(60, params.hotLayout ?? 1, true);
|
|
690
|
-
view.setUint32(64, 0, true);
|
|
691
|
-
},
|
|
692
|
-
recorder,
|
|
693
|
-
device
|
|
694
|
-
);
|
|
695
|
-
}
|
|
696
|
-
|
|
697
|
-
function createTieredQuantAttentionUniformBuffer(
|
|
698
|
-
device,
|
|
699
|
-
recorder,
|
|
700
|
-
params
|
|
701
|
-
) {
|
|
702
|
-
return createUniformBufferWithView(
|
|
703
|
-
'attention_tiered_quant_uniforms',
|
|
704
|
-
64,
|
|
705
|
-
(view) => {
|
|
706
|
-
view.setUint32(0, params.numHeads, true);
|
|
707
|
-
view.setUint32(4, params.numKVHeads, true);
|
|
708
|
-
view.setUint32(8, params.headDim, true);
|
|
709
|
-
view.setUint32(12, params.coldLen, true);
|
|
710
|
-
view.setUint32(16, params.hotLen, true);
|
|
711
|
-
view.setUint32(20, params.seqLen, true);
|
|
712
|
-
view.setFloat32(24, params.scale, true);
|
|
713
|
-
view.setUint32(28, params.causal ? 1 : 0, true);
|
|
714
|
-
view.setUint32(32, params.startPos, true);
|
|
715
|
-
view.setFloat32(36, params.attnSoftcap, true);
|
|
716
|
-
view.setUint32(40, params.slidingWindow, true);
|
|
717
|
-
view.setUint32(44, params.hotWindow, true);
|
|
718
|
-
view.setUint32(48, params.hotStart, true);
|
|
719
|
-
view.setUint32(52, params.packedStride, true);
|
|
720
|
-
view.setUint32(56, 0, true);
|
|
721
|
-
},
|
|
722
|
-
recorder,
|
|
723
|
-
device
|
|
724
|
-
);
|
|
725
|
-
}
|
|
726
|
-
|
|
727
|
-
function createBDPAAttentionUniformBuffer(
|
|
728
|
-
device,
|
|
729
|
-
recorder,
|
|
730
|
-
params
|
|
731
|
-
) {
|
|
732
|
-
return createUniformBufferWithView(
|
|
733
|
-
'attention_bdpa_uniforms',
|
|
734
|
-
64,
|
|
735
|
-
(view) => {
|
|
736
|
-
view.setUint32(0, params.numHeads, true);
|
|
737
|
-
view.setUint32(4, params.numKVHeads, true);
|
|
738
|
-
view.setUint32(8, params.headDim, true);
|
|
739
|
-
view.setUint32(12, params.kvLen, true);
|
|
740
|
-
view.setUint32(16, params.seqLen, true);
|
|
741
|
-
view.setFloat32(20, params.scale, true);
|
|
742
|
-
view.setUint32(24, params.causal ? 1 : 0, true);
|
|
743
|
-
view.setUint32(28, params.startPos, true);
|
|
744
|
-
view.setFloat32(32, params.attnSoftcap, true);
|
|
745
|
-
view.setUint32(36, params.slidingWindow, true);
|
|
746
|
-
view.setUint32(40, 0, true); // padding
|
|
747
|
-
view.setUint32(44, 0, true); // padding
|
|
748
|
-
view.setUint32(48, 0, true); // padding
|
|
749
|
-
view.setUint32(52, 0, true); // padding
|
|
750
|
-
view.setUint32(56, 0, true); // padding
|
|
751
|
-
view.setUint32(60, 0, true); // padding
|
|
752
|
-
},
|
|
753
|
-
recorder,
|
|
754
|
-
device
|
|
755
|
-
);
|
|
756
|
-
}
|
|
757
|
-
|
|
758
|
-
function resolveAttentionExecution(recorder) {
|
|
759
|
-
return {
|
|
760
|
-
recorder: recorder || null,
|
|
761
|
-
device: recorder?.device || getDevice(),
|
|
762
|
-
};
|
|
763
|
-
}
|
|
764
|
-
|
|
765
|
-
function assertAttentionBindGroupBuffer(kernelName, variant, bindingIndex, bindingLabel, buffer, details = []) {
|
|
766
|
-
const isGpuBuffer = buffer && (
|
|
767
|
-
typeof GPUBuffer === 'undefined'
|
|
768
|
-
? true
|
|
769
|
-
: buffer instanceof GPUBuffer
|
|
770
|
-
);
|
|
771
|
-
if (isGpuBuffer) {
|
|
772
|
-
return;
|
|
773
|
-
}
|
|
774
|
-
const detailText = details.filter(Boolean).join(', ');
|
|
775
|
-
throw new Error(
|
|
776
|
-
`[${kernelName}] variant="${variant}" binding ${bindingIndex} "${bindingLabel}" requires a GPUBuffer` +
|
|
777
|
-
(detailText ? ` (${detailText})` : '') +
|
|
778
|
-
'.'
|
|
779
|
-
);
|
|
780
|
-
}
|
|
781
|
-
|
|
782
|
-
function releaseAttentionUniform(execution, uniformBuffer) {
|
|
783
|
-
if (!execution.recorder) {
|
|
784
|
-
releaseUniformBuffer(uniformBuffer);
|
|
785
|
-
}
|
|
786
|
-
}
|
|
787
|
-
|
|
788
|
-
function dispatchAttentionKernel(execution, kernel, pipeline, bindGroup, workgroups) {
|
|
789
|
-
if (execution.recorder) {
|
|
790
|
-
kernel.record(execution.recorder, pipeline, bindGroup, workgroups);
|
|
791
|
-
return;
|
|
792
|
-
}
|
|
793
|
-
kernel.dispatch(pipeline, bindGroup, workgroups);
|
|
794
|
-
}
|
|
795
|
-
|
|
796
|
-
async function executeAttentionBDPA(
|
|
797
|
-
recorder,
|
|
798
|
-
Q,
|
|
799
|
-
basisK,
|
|
800
|
-
basisV,
|
|
801
|
-
pagedK,
|
|
802
|
-
pagedV,
|
|
803
|
-
index,
|
|
804
|
-
numHeads,
|
|
805
|
-
headDim,
|
|
806
|
-
options = {}
|
|
807
|
-
) {
|
|
808
|
-
const execution = resolveAttentionExecution(recorder);
|
|
809
|
-
const {
|
|
810
|
-
seqLen = 1,
|
|
811
|
-
kvLen = seqLen,
|
|
812
|
-
numKVHeads = numHeads,
|
|
813
|
-
scale = 1.0 / Math.sqrt(headDim),
|
|
814
|
-
causal = true,
|
|
815
|
-
startPos = 0,
|
|
816
|
-
outputBuffer = null,
|
|
817
|
-
attnSoftcap = 0,
|
|
818
|
-
slidingWindow = 0,
|
|
819
|
-
ropeCos = null,
|
|
820
|
-
ropeSin = null,
|
|
821
|
-
} = options;
|
|
822
|
-
|
|
823
|
-
if (seqLen !== 1) {
|
|
824
|
-
throw new Error(`BDPA attention currently supports decode only (seqLen=1), got seqLen=${seqLen}.`);
|
|
825
|
-
}
|
|
826
|
-
if (Q.dtype !== 'f16' || basisK.dtype !== 'f16' || basisV.dtype !== 'f16') {
|
|
827
|
-
throw new Error(`BDPA attention requires f16 Q/basis tensors; got Q=${Q.dtype}, basisK=${basisK.dtype}, basisV=${basisV.dtype}.`);
|
|
828
|
-
}
|
|
829
|
-
if (!(ropeCos instanceof GPUBuffer) || !(ropeSin instanceof GPUBuffer)) {
|
|
830
|
-
throw new Error('BDPA attention requires GPU ropeCos/ropeSin buffers.');
|
|
831
|
-
}
|
|
832
|
-
|
|
833
|
-
const variant = 'decode_bdpa_f16';
|
|
834
|
-
const caps = getKernelCapabilities();
|
|
835
|
-
const config = getKernelConfig('attention_bdpa', variant);
|
|
836
|
-
if (!hasRequiredFeatures(config.requires, caps)) {
|
|
837
|
-
throw new Error(`BDPA attention kernel "${variant}" requires unsupported GPU features.`);
|
|
838
|
-
}
|
|
839
|
-
const maxKVLen = config.variantMetadata?.maxKVLen;
|
|
840
|
-
if (Number.isFinite(maxKVLen) && kvLen > maxKVLen) {
|
|
841
|
-
throw new Error(`BDPA attention requires kvLen <= ${maxKVLen} but got ${kvLen}.`);
|
|
842
|
-
}
|
|
843
|
-
|
|
844
|
-
const kernel = new AttentionBDPAKernel(execution.device);
|
|
845
|
-
const pipeline = await kernel.getPipeline(variant);
|
|
846
|
-
|
|
847
|
-
const outputDtype = config.outputDtype;
|
|
848
|
-
if (!outputDtype) {
|
|
849
|
-
throw new Error(`Kernel config missing outputDtype for attention_bdpa variant "${variant}".`);
|
|
850
|
-
}
|
|
851
|
-
const bytesPerElement = outputDtype === 'f16' ? 2 : 4;
|
|
852
|
-
const paddedHiddenSize = padToQ4KBlock(numHeads * headDim);
|
|
853
|
-
const outputSize = seqLen * paddedHiddenSize * bytesPerElement;
|
|
854
|
-
const outputBuf = outputBuffer || acquireBuffer(outputSize, undefined, 'attention_bdpa_output');
|
|
855
|
-
|
|
856
|
-
const uniformBuffer = createBDPAAttentionUniformBuffer(execution.device, execution.recorder, {
|
|
857
|
-
numHeads,
|
|
858
|
-
numKVHeads,
|
|
859
|
-
headDim,
|
|
860
|
-
kvLen,
|
|
861
|
-
seqLen,
|
|
862
|
-
scale,
|
|
863
|
-
causal,
|
|
864
|
-
startPos,
|
|
865
|
-
attnSoftcap,
|
|
866
|
-
slidingWindow,
|
|
867
|
-
});
|
|
868
|
-
|
|
869
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 0, 'uniforms', uniformBuffer);
|
|
870
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 1, 'Q', Q?.buffer, [
|
|
871
|
-
`QLabel=${Q?.label ?? 'unknown'}`,
|
|
872
|
-
`QDtype=${Q?.dtype ?? 'unknown'}`,
|
|
873
|
-
]);
|
|
874
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 2, 'basisK', basisK?.buffer, [
|
|
875
|
-
`basisKLabel=${basisK?.label ?? 'unknown'}`,
|
|
876
|
-
`basisKDtype=${basisK?.dtype ?? 'unknown'}`,
|
|
877
|
-
]);
|
|
878
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 3, 'basisV', basisV?.buffer, [
|
|
879
|
-
`basisVLabel=${basisV?.label ?? 'unknown'}`,
|
|
880
|
-
`basisVDtype=${basisV?.dtype ?? 'unknown'}`,
|
|
881
|
-
]);
|
|
882
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 4, 'pagedK', pagedK);
|
|
883
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 5, 'pagedV', pagedV);
|
|
884
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 6, 'index', index);
|
|
885
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 7, 'ropeCos', ropeCos);
|
|
886
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 8, 'ropeSin', ropeSin);
|
|
887
|
-
assertAttentionBindGroupBuffer('attention_bdpa', variant, 9, 'output', outputBuf);
|
|
888
|
-
|
|
889
|
-
const bindGroup = execution.device.createBindGroup({
|
|
890
|
-
label: 'attention_bdpa_bind_group',
|
|
891
|
-
layout: pipeline.getBindGroupLayout(0),
|
|
892
|
-
entries: [
|
|
893
|
-
{ binding: 0, resource: { buffer: uniformBuffer } },
|
|
894
|
-
{ binding: 1, resource: { buffer: Q.buffer } },
|
|
895
|
-
{ binding: 2, resource: { buffer: basisK.buffer } },
|
|
896
|
-
{ binding: 3, resource: { buffer: basisV.buffer } },
|
|
897
|
-
{ binding: 4, resource: { buffer: pagedK } },
|
|
898
|
-
{ binding: 5, resource: { buffer: pagedV } },
|
|
899
|
-
{ binding: 6, resource: { buffer: index } },
|
|
900
|
-
{ binding: 7, resource: { buffer: ropeCos } },
|
|
901
|
-
{ binding: 8, resource: { buffer: ropeSin } },
|
|
902
|
-
{ binding: 9, resource: { buffer: outputBuf } },
|
|
903
|
-
],
|
|
904
|
-
});
|
|
905
|
-
|
|
906
|
-
dispatchAttentionKernel(execution, kernel, pipeline, bindGroup, numHeads);
|
|
907
|
-
releaseAttentionUniform(execution, uniformBuffer);
|
|
908
|
-
|
|
909
|
-
return createTensor(outputBuf, outputDtype, [seqLen, numHeads, headDim], 'attention_bdpa_output');
|
|
910
|
-
}
|
|
911
|
-
|
|
912
|
-
async function executeAttention(
|
|
913
|
-
recorder,
|
|
914
|
-
Q,
|
|
915
|
-
K,
|
|
916
|
-
V,
|
|
917
|
-
mask,
|
|
918
|
-
numHeads,
|
|
919
|
-
headDim,
|
|
920
|
-
options = {}
|
|
921
|
-
) {
|
|
922
|
-
const execution = resolveAttentionExecution(recorder);
|
|
923
|
-
const {
|
|
924
|
-
seqLen = 1,
|
|
925
|
-
kvLen = seqLen,
|
|
926
|
-
numKVHeads = numHeads,
|
|
927
|
-
scale = 1.0 / Math.sqrt(headDim),
|
|
928
|
-
causal = true,
|
|
929
|
-
startPos = 0,
|
|
930
|
-
layerIdx,
|
|
931
|
-
outputBuffer = null,
|
|
932
|
-
attnSoftcap = 0,
|
|
933
|
-
slidingWindow = 0,
|
|
934
|
-
kvLenBuffer = null,
|
|
935
|
-
indirectBuffer = null,
|
|
936
|
-
indirectOffset = 0,
|
|
937
|
-
kvStart = 0,
|
|
938
|
-
kvLayout = 'contiguous',
|
|
939
|
-
kvPageTable = null,
|
|
940
|
-
kvPageSize = 0,
|
|
941
|
-
kernelPath = null,
|
|
942
|
-
} = options;
|
|
943
|
-
|
|
944
|
-
const limits = getDeviceLimits();
|
|
945
|
-
const sharedLimit = limits?.maxComputeWorkgroupStorageSize ?? Infinity;
|
|
946
|
-
const caps = getKernelCapabilities();
|
|
947
|
-
|
|
948
|
-
const kvDtype = K.dtype;
|
|
949
|
-
const qDtype = Q.dtype;
|
|
950
|
-
const isPaged = kvLayout === 'paged';
|
|
951
|
-
const plan = resolveAttentionPlan(
|
|
952
|
-
seqLen,
|
|
953
|
-
kvLen,
|
|
954
|
-
headDim,
|
|
955
|
-
numHeads,
|
|
956
|
-
kvDtype,
|
|
957
|
-
qDtype,
|
|
958
|
-
sharedLimit,
|
|
959
|
-
caps,
|
|
960
|
-
layerIdx,
|
|
961
|
-
isPaged,
|
|
962
|
-
kernelPath
|
|
963
|
-
);
|
|
964
|
-
|
|
965
|
-
if (execution.recorder) {
|
|
966
|
-
trace.attn(0, `recordAttention: isDecode=${plan.isDecode}, tier=${plan.tier}, variant=${plan.variant}, seqLen=${seqLen}, kvLen=${kvLen}, numHeads=${numHeads}, headDim=${headDim}, useF16KV=${plan.useF16KV}`);
|
|
967
|
-
}
|
|
968
|
-
|
|
969
|
-
const kernel = new AttentionKernel(execution.device);
|
|
970
|
-
const pipeline = await kernel.getPipeline(plan.variant);
|
|
971
|
-
|
|
972
|
-
const outputConfig = getKernelConfig('attention', plan.variant);
|
|
973
|
-
const outputDtype = outputConfig.outputDtype;
|
|
974
|
-
if (!outputDtype) {
|
|
975
|
-
if (execution.recorder) {
|
|
976
|
-
throw new Error(`Kernel config missing outputDtype for attention variant "${plan.variant}".`);
|
|
977
|
-
}
|
|
978
|
-
throw new Error(`[Attention] outputDtype is required for variant "${plan.variant}".`);
|
|
979
|
-
}
|
|
980
|
-
const bytesPerElement = outputDtype === 'f16' ? 2 : 4;
|
|
981
|
-
const paddedHiddenSize = padToQ4KBlock(numHeads * headDim);
|
|
982
|
-
const outputSize = seqLen * paddedHiddenSize * bytesPerElement;
|
|
983
|
-
const outputBuf = outputBuffer || acquireBuffer(outputSize, undefined, 'attention_output');
|
|
984
|
-
|
|
985
|
-
const uniformBuffer = createAttentionUniformBuffer(execution.device, execution.recorder, {
|
|
986
|
-
numHeads,
|
|
987
|
-
numKVHeads,
|
|
988
|
-
headDim,
|
|
989
|
-
kvLen,
|
|
990
|
-
seqLen,
|
|
991
|
-
scale,
|
|
992
|
-
causal,
|
|
993
|
-
startPos,
|
|
994
|
-
attnSoftcap,
|
|
995
|
-
slidingWindow,
|
|
996
|
-
kvLenSource: kvLenBuffer ? 1 : 0,
|
|
997
|
-
kvStart,
|
|
998
|
-
pageSize: kvPageSize,
|
|
999
|
-
kvLayout: kvLayout === 'paged' ? 2 : (kvLayout === 'ring' ? 1 : 0),
|
|
1000
|
-
});
|
|
1001
|
-
|
|
1002
|
-
const kvLenBinding = kvLenBuffer || getKvLenFallbackBuffer(execution.device);
|
|
1003
|
-
const pageTableBinding = kvPageTable || getPageTableFallbackBuffer(execution.device);
|
|
1004
|
-
assertAttentionBindGroupBuffer('attention', plan.variant, 0, 'uniforms', uniformBuffer);
|
|
1005
|
-
assertAttentionBindGroupBuffer('attention', plan.variant, 1, 'Q', Q?.buffer, [
|
|
1006
|
-
`QLabel=${Q?.label ?? 'unknown'}`,
|
|
1007
|
-
`QDtype=${Q?.dtype ?? 'unknown'}`,
|
|
1008
|
-
]);
|
|
1009
|
-
assertAttentionBindGroupBuffer('attention', plan.variant, 2, 'K', K?.buffer, [
|
|
1010
|
-
`KLabel=${K?.label ?? 'unknown'}`,
|
|
1011
|
-
`KDtype=${K?.dtype ?? 'unknown'}`,
|
|
1012
|
-
]);
|
|
1013
|
-
assertAttentionBindGroupBuffer('attention', plan.variant, 3, 'V', V?.buffer, [
|
|
1014
|
-
`VLabel=${V?.label ?? 'unknown'}`,
|
|
1015
|
-
`VDtype=${V?.dtype ?? 'unknown'}`,
|
|
1016
|
-
]);
|
|
1017
|
-
assertAttentionBindGroupBuffer('attention', plan.variant, 4, 'output', outputBuf);
|
|
1018
|
-
assertAttentionBindGroupBuffer('attention', plan.variant, 5, 'kvLen', kvLenBinding);
|
|
1019
|
-
assertAttentionBindGroupBuffer('attention', plan.variant, 6, 'pageTable', pageTableBinding, [
|
|
1020
|
-
`kvLayout=${kvLayout}`,
|
|
1021
|
-
]);
|
|
1022
|
-
const bindGroup = execution.device.createBindGroup({
|
|
1023
|
-
label: 'attention_bind_group',
|
|
1024
|
-
layout: pipeline.getBindGroupLayout(0),
|
|
1025
|
-
entries: [
|
|
1026
|
-
{ binding: 0, resource: { buffer: uniformBuffer } },
|
|
1027
|
-
{ binding: 1, resource: { buffer: Q.buffer } },
|
|
1028
|
-
{ binding: 2, resource: { buffer: K.buffer } },
|
|
1029
|
-
{ binding: 3, resource: { buffer: V.buffer } },
|
|
1030
|
-
{ binding: 4, resource: { buffer: outputBuf } },
|
|
1031
|
-
{ binding: 5, resource: { buffer: kvLenBinding } },
|
|
1032
|
-
{ binding: 6, resource: { buffer: pageTableBinding } },
|
|
1033
|
-
],
|
|
1034
|
-
});
|
|
1035
|
-
|
|
1036
|
-
if (!indirectBuffer && limits && plan.workgroups > limits.maxComputeWorkgroupsPerDimension) {
|
|
1037
|
-
throw new Error(
|
|
1038
|
-
`Attention dispatch requires ${plan.workgroups} workgroups but device limit is ` +
|
|
1039
|
-
`${limits.maxComputeWorkgroupsPerDimension}. Reduce prompt length or use streaming attention.`
|
|
1040
|
-
);
|
|
1041
|
-
}
|
|
1042
|
-
|
|
1043
|
-
if (indirectBuffer) {
|
|
1044
|
-
if (execution.recorder) {
|
|
1045
|
-
recordDispatchIndirect(execution.recorder, pipeline, bindGroup, indirectBuffer, indirectOffset, 'attention');
|
|
1046
|
-
} else {
|
|
1047
|
-
dispatchIndirect(execution.device, pipeline, bindGroup, indirectBuffer, indirectOffset, 'attention');
|
|
1048
|
-
}
|
|
1049
|
-
} else {
|
|
1050
|
-
dispatchAttentionKernel(execution, kernel, pipeline, bindGroup, plan.workgroups);
|
|
1051
|
-
}
|
|
1052
|
-
|
|
1053
|
-
releaseAttentionUniform(execution, uniformBuffer);
|
|
1054
|
-
|
|
1055
|
-
return createTensor(outputBuf, outputDtype, [seqLen, numHeads, headDim], 'attention_output');
|
|
1056
|
-
}
|
|
1057
|
-
|
|
1058
|
-
async function executeAttentionTiered(
|
|
1059
|
-
recorder,
|
|
1060
|
-
Q,
|
|
1061
|
-
hotK,
|
|
1062
|
-
hotV,
|
|
1063
|
-
coldK,
|
|
1064
|
-
coldV,
|
|
1065
|
-
numHeads,
|
|
1066
|
-
headDim,
|
|
1067
|
-
options = {}
|
|
1068
|
-
) {
|
|
1069
|
-
const execution = resolveAttentionExecution(recorder);
|
|
1070
|
-
const {
|
|
1071
|
-
seqLen = 1,
|
|
1072
|
-
coldLen = 0,
|
|
1073
|
-
hotLen = 0,
|
|
1074
|
-
numKVHeads = numHeads,
|
|
1075
|
-
scale = 1.0 / Math.sqrt(headDim),
|
|
1076
|
-
causal = true,
|
|
1077
|
-
startPos = 0,
|
|
1078
|
-
outputBuffer = null,
|
|
1079
|
-
attnSoftcap = 0,
|
|
1080
|
-
slidingWindow = 0,
|
|
1081
|
-
hotWindow = hotLen,
|
|
1082
|
-
hotStart = 0,
|
|
1083
|
-
coldPageTable = null,
|
|
1084
|
-
coldPageSize = 0,
|
|
1085
|
-
coldLayout = 2,
|
|
1086
|
-
hotLayout = 1,
|
|
1087
|
-
} = options;
|
|
1088
|
-
|
|
1089
|
-
const totalLen = coldLen + hotLen;
|
|
1090
|
-
const maxKVLen = getTieredMaxKVLen();
|
|
1091
|
-
if (totalLen > maxKVLen) {
|
|
1092
|
-
throw new Error(`Tiered attention requires total KV len <= ${maxKVLen} but got ${totalLen}.`);
|
|
1093
|
-
}
|
|
1094
|
-
|
|
1095
|
-
const useF16 = Q.dtype === 'f16' && hotK.dtype === 'f16' && coldK.dtype === 'f16';
|
|
1096
|
-
const useF16KV = hotK.dtype === 'f16' && coldK.dtype === 'f16';
|
|
1097
|
-
const variant = selectKernelRuleValue('attention', 'tieredVariant', { useF16 });
|
|
1098
|
-
const caps = getKernelCapabilities();
|
|
1099
|
-
const config = getKernelConfig('attention_tiered', variant);
|
|
1100
|
-
if (!hasRequiredFeatures(config.requires, caps)) {
|
|
1101
|
-
throw new Error(`Tiered attention kernel "${variant}" requires unsupported GPU features.`);
|
|
1102
|
-
}
|
|
1103
|
-
if (!useF16KV) {
|
|
1104
|
-
throw new Error('Tiered attention requires f16 KV buffers.');
|
|
1105
|
-
}
|
|
1106
|
-
|
|
1107
|
-
const kernel = new AttentionTieredKernel(execution.device);
|
|
1108
|
-
const pipeline = await kernel.getPipeline(variant);
|
|
1109
|
-
|
|
1110
|
-
const outputDtype = config.outputDtype;
|
|
1111
|
-
if (!outputDtype) {
|
|
1112
|
-
throw new Error(`Kernel config missing outputDtype for attention_tiered variant "${variant}".`);
|
|
1113
|
-
}
|
|
1114
|
-
const bytesPerElement = outputDtype === 'f16' ? 2 : 4;
|
|
1115
|
-
const paddedHiddenSize = padToQ4KBlock(numHeads * headDim);
|
|
1116
|
-
const outputSize = seqLen * paddedHiddenSize * bytesPerElement;
|
|
1117
|
-
const outputBuf = outputBuffer || acquireBuffer(outputSize, undefined, 'attention_tiered_output');
|
|
1118
|
-
|
|
1119
|
-
const uniformBuffer = createTieredAttentionUniformBuffer(execution.device, execution.recorder, {
|
|
1120
|
-
numHeads,
|
|
1121
|
-
numKVHeads,
|
|
1122
|
-
headDim,
|
|
1123
|
-
coldLen,
|
|
1124
|
-
hotLen,
|
|
1125
|
-
seqLen,
|
|
1126
|
-
scale,
|
|
1127
|
-
causal,
|
|
1128
|
-
startPos,
|
|
1129
|
-
attnSoftcap,
|
|
1130
|
-
slidingWindow,
|
|
1131
|
-
hotWindow,
|
|
1132
|
-
hotStart,
|
|
1133
|
-
coldPageSize,
|
|
1134
|
-
coldLayout,
|
|
1135
|
-
hotLayout,
|
|
1136
|
-
});
|
|
1137
|
-
|
|
1138
|
-
const pageTableBinding = coldPageTable || getPageTableFallbackBuffer(execution.device);
|
|
1139
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 0, 'uniforms', uniformBuffer);
|
|
1140
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 1, 'Q', Q?.buffer, [
|
|
1141
|
-
`QLabel=${Q?.label ?? 'unknown'}`,
|
|
1142
|
-
`QDtype=${Q?.dtype ?? 'unknown'}`,
|
|
1143
|
-
]);
|
|
1144
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 2, 'hotK', hotK?.buffer, [
|
|
1145
|
-
`hotKLabel=${hotK?.label ?? 'unknown'}`,
|
|
1146
|
-
`hotKDtype=${hotK?.dtype ?? 'unknown'}`,
|
|
1147
|
-
]);
|
|
1148
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 3, 'hotV', hotV?.buffer, [
|
|
1149
|
-
`hotVLabel=${hotV?.label ?? 'unknown'}`,
|
|
1150
|
-
`hotVDtype=${hotV?.dtype ?? 'unknown'}`,
|
|
1151
|
-
]);
|
|
1152
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 4, 'coldK', coldK?.buffer, [
|
|
1153
|
-
`coldKLabel=${coldK?.label ?? 'unknown'}`,
|
|
1154
|
-
`coldKDtype=${coldK?.dtype ?? 'unknown'}`,
|
|
1155
|
-
]);
|
|
1156
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 5, 'coldV', coldV?.buffer, [
|
|
1157
|
-
`coldVLabel=${coldV?.label ?? 'unknown'}`,
|
|
1158
|
-
`coldVDtype=${coldV?.dtype ?? 'unknown'}`,
|
|
1159
|
-
]);
|
|
1160
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 6, 'output', outputBuf);
|
|
1161
|
-
assertAttentionBindGroupBuffer('attention_tiered', variant, 7, 'pageTable', pageTableBinding, [
|
|
1162
|
-
`coldLayout=${coldLayout}`,
|
|
1163
|
-
]);
|
|
1164
|
-
const bindGroup = execution.device.createBindGroup({
|
|
1165
|
-
label: 'attention_tiered_bind_group',
|
|
1166
|
-
layout: pipeline.getBindGroupLayout(0),
|
|
1167
|
-
entries: [
|
|
1168
|
-
{ binding: 0, resource: { buffer: uniformBuffer } },
|
|
1169
|
-
{ binding: 1, resource: { buffer: Q.buffer } },
|
|
1170
|
-
{ binding: 2, resource: { buffer: hotK.buffer } },
|
|
1171
|
-
{ binding: 3, resource: { buffer: hotV.buffer } },
|
|
1172
|
-
{ binding: 4, resource: { buffer: coldK.buffer } },
|
|
1173
|
-
{ binding: 5, resource: { buffer: coldV.buffer } },
|
|
1174
|
-
{ binding: 6, resource: { buffer: outputBuf } },
|
|
1175
|
-
{ binding: 7, resource: { buffer: pageTableBinding } },
|
|
1176
|
-
],
|
|
1177
|
-
});
|
|
1178
|
-
|
|
1179
|
-
dispatchAttentionKernel(execution, kernel, pipeline, bindGroup, numHeads);
|
|
1180
|
-
releaseAttentionUniform(execution, uniformBuffer);
|
|
1181
|
-
|
|
1182
|
-
return createTensor(outputBuf, outputDtype, [seqLen, numHeads, headDim], 'attention_tiered_output');
|
|
1183
|
-
}
|
|
1184
|
-
|
|
1185
|
-
async function executeAttentionTieredQuant(
|
|
1186
|
-
recorder,
|
|
1187
|
-
Q,
|
|
1188
|
-
hotK,
|
|
1189
|
-
hotV,
|
|
1190
|
-
coldPackedK,
|
|
1191
|
-
coldPackedV,
|
|
1192
|
-
coldScalesK,
|
|
1193
|
-
coldScalesV,
|
|
1194
|
-
numHeads,
|
|
1195
|
-
headDim,
|
|
1196
|
-
options = {}
|
|
1197
|
-
) {
|
|
1198
|
-
const execution = resolveAttentionExecution(recorder);
|
|
1199
|
-
const {
|
|
1200
|
-
seqLen = 1,
|
|
1201
|
-
coldLen = 0,
|
|
1202
|
-
hotLen = 0,
|
|
1203
|
-
numKVHeads = numHeads,
|
|
1204
|
-
scale = 1.0 / Math.sqrt(headDim),
|
|
1205
|
-
causal = true,
|
|
1206
|
-
startPos = 0,
|
|
1207
|
-
outputBuffer = null,
|
|
1208
|
-
attnSoftcap = 0,
|
|
1209
|
-
slidingWindow = 0,
|
|
1210
|
-
hotWindow = hotLen,
|
|
1211
|
-
hotStart = 0,
|
|
1212
|
-
packedStride = 0,
|
|
1213
|
-
mode = 'int8',
|
|
1214
|
-
} = options;
|
|
1215
|
-
|
|
1216
|
-
const totalLen = coldLen + hotLen;
|
|
1217
|
-
const maxKVLen = getTieredQuantMaxKVLen();
|
|
1218
|
-
if (totalLen > maxKVLen) {
|
|
1219
|
-
throw new Error(`Tiered quant attention requires total KV len <= ${maxKVLen} but got ${totalLen}.`);
|
|
1220
|
-
}
|
|
1221
|
-
if (!Number.isFinite(packedStride) || packedStride <= 0) {
|
|
1222
|
-
throw new Error('Tiered quant attention requires packedStride > 0.');
|
|
1223
|
-
}
|
|
1224
|
-
|
|
1225
|
-
if (Q.dtype !== 'f32') {
|
|
1226
|
-
throw new Error('Tiered quant attention requires f32 Q.');
|
|
1227
|
-
}
|
|
1228
|
-
|
|
1229
|
-
const variant = selectKernelRuleValue('attention', 'tieredQuantVariant', { mode });
|
|
1230
|
-
const caps = getKernelCapabilities();
|
|
1231
|
-
const config = getKernelConfig('attention_tiered_quant', variant);
|
|
1232
|
-
if (!hasRequiredFeatures(config.requires, caps)) {
|
|
1233
|
-
throw new Error(`Tiered quant attention kernel "${variant}" requires unsupported GPU features.`);
|
|
1234
|
-
}
|
|
1235
|
-
|
|
1236
|
-
const kernel = new AttentionTieredQuantKernel(execution.device);
|
|
1237
|
-
const pipeline = await kernel.getPipeline(variant);
|
|
1238
|
-
|
|
1239
|
-
const outputDtype = config.outputDtype;
|
|
1240
|
-
if (!outputDtype) {
|
|
1241
|
-
throw new Error(`Kernel config missing outputDtype for attention_tiered_quant variant "${variant}".`);
|
|
1242
|
-
}
|
|
1243
|
-
const bytesPerElement = outputDtype === 'f16' ? 2 : 4;
|
|
1244
|
-
const paddedHiddenSize = padToQ4KBlock(numHeads * headDim);
|
|
1245
|
-
const outputSize = seqLen * paddedHiddenSize * bytesPerElement;
|
|
1246
|
-
const outputBuf = outputBuffer || acquireBuffer(outputSize, undefined, 'attention_tiered_quant_output');
|
|
1247
|
-
|
|
1248
|
-
const uniformBuffer = createTieredQuantAttentionUniformBuffer(execution.device, execution.recorder, {
|
|
1249
|
-
numHeads,
|
|
1250
|
-
numKVHeads,
|
|
1251
|
-
headDim,
|
|
1252
|
-
coldLen,
|
|
1253
|
-
hotLen,
|
|
1254
|
-
seqLen,
|
|
1255
|
-
scale,
|
|
1256
|
-
causal,
|
|
1257
|
-
startPos,
|
|
1258
|
-
attnSoftcap,
|
|
1259
|
-
slidingWindow,
|
|
1260
|
-
hotWindow,
|
|
1261
|
-
hotStart,
|
|
1262
|
-
packedStride,
|
|
1263
|
-
});
|
|
1264
|
-
|
|
1265
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 0, 'uniforms', uniformBuffer);
|
|
1266
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 1, 'Q', Q?.buffer, [
|
|
1267
|
-
`QLabel=${Q?.label ?? 'unknown'}`,
|
|
1268
|
-
`QDtype=${Q?.dtype ?? 'unknown'}`,
|
|
1269
|
-
]);
|
|
1270
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 2, 'hotK', hotK?.buffer, [
|
|
1271
|
-
`hotKLabel=${hotK?.label ?? 'unknown'}`,
|
|
1272
|
-
`hotKDtype=${hotK?.dtype ?? 'unknown'}`,
|
|
1273
|
-
]);
|
|
1274
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 3, 'hotV', hotV?.buffer, [
|
|
1275
|
-
`hotVLabel=${hotV?.label ?? 'unknown'}`,
|
|
1276
|
-
`hotVDtype=${hotV?.dtype ?? 'unknown'}`,
|
|
1277
|
-
]);
|
|
1278
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 4, 'coldPackedK', coldPackedK);
|
|
1279
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 5, 'coldPackedV', coldPackedV);
|
|
1280
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 6, 'coldScalesK', coldScalesK);
|
|
1281
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 7, 'coldScalesV', coldScalesV);
|
|
1282
|
-
assertAttentionBindGroupBuffer('attention_tiered_quant', variant, 8, 'output', outputBuf);
|
|
1283
|
-
const bindGroup = execution.device.createBindGroup({
|
|
1284
|
-
label: 'attention_tiered_quant_bind_group',
|
|
1285
|
-
layout: pipeline.getBindGroupLayout(0),
|
|
1286
|
-
entries: [
|
|
1287
|
-
{ binding: 0, resource: { buffer: uniformBuffer } },
|
|
1288
|
-
{ binding: 1, resource: { buffer: Q.buffer } },
|
|
1289
|
-
{ binding: 2, resource: { buffer: hotK.buffer } },
|
|
1290
|
-
{ binding: 3, resource: { buffer: hotV.buffer } },
|
|
1291
|
-
{ binding: 4, resource: { buffer: coldPackedK } },
|
|
1292
|
-
{ binding: 5, resource: { buffer: coldPackedV } },
|
|
1293
|
-
{ binding: 6, resource: { buffer: coldScalesK } },
|
|
1294
|
-
{ binding: 7, resource: { buffer: coldScalesV } },
|
|
1295
|
-
{ binding: 8, resource: { buffer: outputBuf } },
|
|
1296
|
-
],
|
|
1297
|
-
});
|
|
1298
|
-
|
|
1299
|
-
dispatchAttentionKernel(execution, kernel, pipeline, bindGroup, numHeads);
|
|
1300
|
-
releaseAttentionUniform(execution, uniformBuffer);
|
|
1301
|
-
|
|
1302
|
-
return createTensor(outputBuf, outputDtype, [seqLen, numHeads, headDim], 'attention_tiered_quant_output');
|
|
1303
|
-
}
|
|
1304
|
-
|
|
1305
|
-
export async function runAttentionBDPA(
|
|
1306
|
-
Q,
|
|
1307
|
-
basisK,
|
|
1308
|
-
basisV,
|
|
1309
|
-
pagedK,
|
|
1310
|
-
pagedV,
|
|
1311
|
-
index,
|
|
1312
|
-
numHeads,
|
|
1313
|
-
headDim,
|
|
1314
|
-
options = {}
|
|
1315
|
-
) {
|
|
1316
|
-
return executeAttentionBDPA(null, Q, basisK, basisV, pagedK, pagedV, index, numHeads, headDim, options);
|
|
1317
|
-
}
|
|
1318
|
-
|
|
1319
|
-
export async function recordAttentionBDPA(
|
|
1320
|
-
recorder,
|
|
1321
|
-
Q,
|
|
1322
|
-
basisK,
|
|
1323
|
-
basisV,
|
|
1324
|
-
pagedK,
|
|
1325
|
-
pagedV,
|
|
1326
|
-
index,
|
|
1327
|
-
numHeads,
|
|
1328
|
-
headDim,
|
|
1329
|
-
options = {}
|
|
1330
|
-
) {
|
|
1331
|
-
return executeAttentionBDPA(recorder, Q, basisK, basisV, pagedK, pagedV, index, numHeads, headDim, options);
|
|
1332
|
-
}
|
|
1333
|
-
|
|
1334
|
-
export async function runAttention(
|
|
1335
|
-
Q,
|
|
1336
|
-
K,
|
|
1337
|
-
V,
|
|
1338
|
-
mask,
|
|
1339
|
-
numHeads,
|
|
1340
|
-
headDim,
|
|
1341
|
-
options = {}
|
|
1342
|
-
) {
|
|
1343
|
-
return executeAttention(null, Q, K, V, mask, numHeads, headDim, options);
|
|
1344
|
-
}
|
|
1345
|
-
|
|
1346
|
-
export async function recordAttention(
|
|
1347
|
-
recorder,
|
|
1348
|
-
Q,
|
|
1349
|
-
K,
|
|
1350
|
-
V,
|
|
1351
|
-
mask,
|
|
1352
|
-
numHeads,
|
|
1353
|
-
headDim,
|
|
1354
|
-
options = {}
|
|
1355
|
-
) {
|
|
1356
|
-
return executeAttention(recorder, Q, K, V, mask, numHeads, headDim, options);
|
|
1357
|
-
}
|
|
1358
|
-
|
|
1359
|
-
export async function runAttentionTiered(
|
|
1360
|
-
Q,
|
|
1361
|
-
hotK,
|
|
1362
|
-
hotV,
|
|
1363
|
-
coldK,
|
|
1364
|
-
coldV,
|
|
1365
|
-
numHeads,
|
|
1366
|
-
headDim,
|
|
1367
|
-
options = {}
|
|
1368
|
-
) {
|
|
1369
|
-
return executeAttentionTiered(null, Q, hotK, hotV, coldK, coldV, numHeads, headDim, options);
|
|
1370
|
-
}
|
|
1371
|
-
|
|
1372
|
-
export async function recordAttentionTiered(
|
|
1373
|
-
recorder,
|
|
1374
|
-
Q,
|
|
1375
|
-
hotK,
|
|
1376
|
-
hotV,
|
|
1377
|
-
coldK,
|
|
1378
|
-
coldV,
|
|
1379
|
-
numHeads,
|
|
1380
|
-
headDim,
|
|
1381
|
-
options = {}
|
|
1382
|
-
) {
|
|
1383
|
-
return executeAttentionTiered(recorder, Q, hotK, hotV, coldK, coldV, numHeads, headDim, options);
|
|
1384
|
-
}
|
|
1385
|
-
|
|
1386
|
-
export async function runAttentionTieredQuant(
|
|
1387
|
-
Q,
|
|
1388
|
-
hotK,
|
|
1389
|
-
hotV,
|
|
1390
|
-
coldPackedK,
|
|
1391
|
-
coldPackedV,
|
|
1392
|
-
coldScalesK,
|
|
1393
|
-
coldScalesV,
|
|
1394
|
-
numHeads,
|
|
1395
|
-
headDim,
|
|
1396
|
-
options = {}
|
|
1397
|
-
) {
|
|
1398
|
-
return executeAttentionTieredQuant(
|
|
1399
|
-
null,
|
|
1400
|
-
Q,
|
|
1401
|
-
hotK,
|
|
1402
|
-
hotV,
|
|
1403
|
-
coldPackedK,
|
|
1404
|
-
coldPackedV,
|
|
1405
|
-
coldScalesK,
|
|
1406
|
-
coldScalesV,
|
|
1407
|
-
numHeads,
|
|
1408
|
-
headDim,
|
|
1409
|
-
options
|
|
1410
|
-
);
|
|
1411
|
-
}
|
|
1412
|
-
|
|
1413
|
-
export async function recordAttentionTieredQuant(
|
|
1414
|
-
recorder,
|
|
1415
|
-
Q,
|
|
1416
|
-
hotK,
|
|
1417
|
-
hotV,
|
|
1418
|
-
coldPackedK,
|
|
1419
|
-
coldPackedV,
|
|
1420
|
-
coldScalesK,
|
|
1421
|
-
coldScalesV,
|
|
1422
|
-
numHeads,
|
|
1423
|
-
headDim,
|
|
1424
|
-
options = {}
|
|
1425
|
-
) {
|
|
1426
|
-
return executeAttentionTieredQuant(
|
|
1427
|
-
recorder,
|
|
1428
|
-
Q,
|
|
1429
|
-
hotK,
|
|
1430
|
-
hotV,
|
|
1431
|
-
coldPackedK,
|
|
1432
|
-
coldPackedV,
|
|
1433
|
-
coldScalesK,
|
|
1434
|
-
coldScalesV,
|
|
1435
|
-
numHeads,
|
|
1436
|
-
headDim,
|
|
1437
|
-
options
|
|
1438
|
-
);
|
|
1439
|
-
}
|