sglang 0.5.1.post3__tar.gz → 0.5.2rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (889) hide show
  1. {sglang-0.5.1.post3/sglang.egg-info → sglang-0.5.2rc1}/PKG-INFO +5 -5
  2. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/pyproject.toml +5 -5
  3. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/bench_one_batch.py +3 -0
  4. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/__init__.py +2 -0
  5. sglang-0.5.2rc1/sglang/srt/configs/longcat_flash.py +104 -0
  6. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/model_config.py +14 -1
  7. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/__init__.py +1 -1
  8. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/base_connector.py +1 -2
  9. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/redis.py +2 -2
  10. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/serde/__init__.py +1 -1
  11. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/serde/safe_serde.py +4 -3
  12. sglang-0.5.2rc1/sglang/srt/disaggregation/ascend/conn.py +117 -0
  13. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/launch_lb.py +0 -13
  14. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/mini_lb.py +33 -8
  15. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/prefill.py +1 -1
  16. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/parallel_state.py +27 -15
  17. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/engine.py +19 -12
  18. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/http_server.py +174 -34
  19. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/protocol.py +60 -0
  20. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/eplb_manager.py +26 -2
  21. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/expert_distribution.py +29 -2
  22. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/hf_transformers_utils.py +10 -0
  23. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/activation.py +12 -0
  24. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/ascend_backend.py +240 -109
  25. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/hybrid_attn_backend.py +53 -21
  26. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/trtllm_mla_backend.py +25 -10
  27. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/layernorm.py +28 -3
  28. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/linear.py +3 -2
  29. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/logits_processor.py +1 -1
  30. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/cutlass_w4a8_moe.py +1 -9
  31. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/ep_moe/kernels.py +74 -0
  32. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/ep_moe/layer.py +14 -13
  33. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/__init__.py +5 -3
  34. sglang-0.5.2rc1/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=129,N=352,device_name=NVIDIA_B200,dtype=fp8_w8a8.json +146 -0
  35. sglang-0.5.2rc1/sglang/srt/layers/moe/fused_moe_triton/fused_moe.py +684 -0
  36. sglang-0.5.2rc1/sglang/srt/layers/moe/fused_moe_triton/fused_moe_triton_config.py +212 -0
  37. sglang-0.5.2rc1/sglang/srt/layers/moe/fused_moe_triton/fused_moe_triton_kernels.py +796 -0
  38. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/layer.py +5 -2
  39. sglang-0.5.2rc1/sglang/srt/layers/moe/fused_moe_triton/moe_align_block_size.py +87 -0
  40. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/topk.py +35 -12
  41. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/deep_gemm_wrapper/compile_utils.py +9 -1
  42. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/deep_gemm_wrapper/configurer.py +0 -3
  43. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/modelopt_quant.py +7 -0
  44. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/mxfp4.py +9 -4
  45. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/utils.py +13 -0
  46. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/w4afp8.py +30 -25
  47. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/w8a8_int8.py +7 -3
  48. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/rotary_embedding.py +28 -1
  49. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/sampler.py +29 -5
  50. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/cache_controller.py +62 -96
  51. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/detokenizer_manager.py +9 -2
  52. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/io_struct.py +27 -0
  53. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/mm_utils.py +5 -1
  54. sglang-0.5.2rc1/sglang/srt/managers/multi_tokenizer_mixin.py +629 -0
  55. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/scheduler.py +39 -2
  56. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/scheduler_output_processor_mixin.py +20 -18
  57. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/scheduler_update_weights_mixin.py +8 -1
  58. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/tokenizer_manager.py +86 -39
  59. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/chunk_cache.py +1 -1
  60. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/hicache_storage.py +20 -3
  61. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/hiradix_cache.py +94 -71
  62. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/lora_radix_cache.py +1 -1
  63. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/memory_pool.py +4 -0
  64. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/memory_pool_host.py +4 -4
  65. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/radix_cache.py +5 -4
  66. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/radix_cache_cpp.py +1 -1
  67. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/hf3fs/mini_3fs_metadata_server.py +61 -34
  68. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/hf3fs/storage_hf3fs.py +56 -9
  69. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/mooncake_store/mooncake_store.py +2 -1
  70. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/swa_radix_cache.py +1 -1
  71. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_executor/model_runner.py +5 -4
  72. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_loader/loader.py +15 -24
  73. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_loader/utils.py +12 -0
  74. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/deepseek_v2.py +31 -10
  75. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gpt_oss.py +5 -18
  76. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llama_eagle3.py +4 -0
  77. sglang-0.5.2rc1/sglang/srt/models/longcat_flash.py +1026 -0
  78. sglang-0.5.2rc1/sglang/srt/models/longcat_flash_nextn.py +699 -0
  79. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2.py +26 -3
  80. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2_5_vl.py +65 -41
  81. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2_moe.py +22 -2
  82. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/transformers.py +1 -1
  83. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/base_processor.py +4 -2
  84. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/penaltylib/orchestrator.py +14 -2
  85. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/server_args.py +112 -55
  86. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/speculative/eagle_worker.py +28 -8
  87. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/utils.py +4 -0
  88. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/attention/test_trtllm_mla_backend.py +12 -3
  89. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_cutlass_w4a8_moe.py +24 -9
  90. sglang-0.5.2rc1/sglang/version.py +1 -0
  91. {sglang-0.5.1.post3 → sglang-0.5.2rc1/sglang.egg-info}/PKG-INFO +5 -5
  92. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang.egg-info/SOURCES.txt +8 -0
  93. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang.egg-info/requires.txt +4 -4
  94. sglang-0.5.1.post3/sglang/srt/disaggregation/ascend/conn.py +0 -42
  95. sglang-0.5.1.post3/sglang/srt/layers/moe/fused_moe_triton/fused_moe.py +0 -1727
  96. sglang-0.5.1.post3/sglang/version.py +0 -1
  97. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/LICENSE +0 -0
  98. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/README.md +0 -0
  99. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/setup.cfg +0 -0
  100. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/__init__.py +0 -0
  101. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/bench_offline_throughput.py +0 -0
  102. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/bench_one_batch_server.py +0 -0
  103. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/bench_serving.py +0 -0
  104. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/check_env.py +0 -0
  105. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/compile_deep_gemm.py +0 -0
  106. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/eval/llama3_eval.py +0 -0
  107. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/eval/loogle_eval.py +0 -0
  108. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/global_config.py +0 -0
  109. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/api.py +0 -0
  110. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/backend/anthropic.py +0 -0
  111. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/backend/base_backend.py +0 -0
  112. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/backend/litellm.py +0 -0
  113. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/backend/openai.py +0 -0
  114. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/backend/runtime_endpoint.py +0 -0
  115. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/backend/vertexai.py +0 -0
  116. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/chat_template.py +0 -0
  117. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/choices.py +0 -0
  118. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/compiler.py +0 -0
  119. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/interpreter.py +0 -0
  120. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/ir.py +0 -0
  121. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/lang/tracer.py +0 -0
  122. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/launch_server.py +0 -0
  123. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/profiler.py +0 -0
  124. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/_custom_ops.py +0 -0
  125. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/aio_rwlock.py +0 -0
  126. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/bench_utils.py +0 -0
  127. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/code_completion_parser.py +0 -0
  128. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/chatglm.py +0 -0
  129. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/dbrx.py +0 -0
  130. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/deepseekvl2.py +0 -0
  131. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/device_config.py +0 -0
  132. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/exaone.py +0 -0
  133. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/internvl.py +0 -0
  134. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/janus_pro.py +0 -0
  135. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/kimi_vl.py +0 -0
  136. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/kimi_vl_moonvit.py +0 -0
  137. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/load_config.py +0 -0
  138. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/step3_vl.py +0 -0
  139. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/update_config.py +0 -0
  140. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/configs/utils.py +0 -0
  141. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/s3.py +0 -0
  142. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/serde/serde.py +0 -0
  143. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/connector/utils.py +0 -0
  144. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constants.py +0 -0
  145. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constrained/base_grammar_backend.py +0 -0
  146. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constrained/llguidance_backend.py +0 -0
  147. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constrained/outlines_backend.py +0 -0
  148. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constrained/outlines_jump_forward.py +0 -0
  149. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constrained/reasoner_grammar_backend.py +0 -0
  150. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constrained/triton_ops/bitmask_ops.py +0 -0
  151. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/constrained/xgrammar_backend.py +0 -0
  152. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/conversation.py +0 -0
  153. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/custom_op.py +0 -0
  154. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/debug_utils/__init__.py +0 -0
  155. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/debug_utils/dump_comparator.py +0 -0
  156. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/debug_utils/dumper.py +0 -0
  157. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/debug_utils/text_comparator.py +0 -0
  158. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/ascend/__init__.py +0 -0
  159. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/ascend/transfer_engine.py +0 -0
  160. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/base/__init__.py +0 -0
  161. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/base/conn.py +0 -0
  162. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/common/__init__.py +0 -0
  163. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/common/conn.py +0 -0
  164. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/common/utils.py +0 -0
  165. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/decode.py +0 -0
  166. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/decode_schedule_batch_mixin.py +0 -0
  167. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/fake/__init__.py +0 -0
  168. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/fake/conn.py +0 -0
  169. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/kv_events.py +0 -0
  170. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/mooncake/__init__.py +0 -0
  171. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/mooncake/conn.py +0 -0
  172. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/mooncake/transfer_engine.py +0 -0
  173. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/nixl/__init__.py +0 -0
  174. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/nixl/conn.py +0 -0
  175. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/disaggregation/utils.py +0 -0
  176. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/__init__.py +0 -0
  177. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/communication_op.py +0 -0
  178. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/cuda_wrapper.py +0 -0
  179. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/custom_all_reduce.py +0 -0
  180. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/custom_all_reduce_utils.py +0 -0
  181. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/hpu_communicator.py +0 -0
  182. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/npu_communicator.py +0 -0
  183. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/pymscclpp.py +0 -0
  184. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/pynccl.py +0 -0
  185. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/pynccl_allocator.py +0 -0
  186. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/pynccl_wrapper.py +0 -0
  187. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/quick_all_reduce.py +0 -0
  188. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/shm_broadcast.py +0 -0
  189. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/device_communicators/xpu_communicator.py +0 -0
  190. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/naive_distributed.py +0 -0
  191. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/distributed/utils.py +0 -0
  192. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/EngineBase.py +0 -0
  193. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/context.py +0 -0
  194. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/harmony_utils.py +0 -0
  195. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/http_server_engine.py +0 -0
  196. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/__init__.py +0 -0
  197. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/serving_base.py +0 -0
  198. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/serving_chat.py +0 -0
  199. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/serving_completions.py +0 -0
  200. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/serving_embedding.py +0 -0
  201. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/serving_rerank.py +0 -0
  202. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/serving_responses.py +0 -0
  203. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/serving_score.py +0 -0
  204. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/tool_server.py +0 -0
  205. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/usage_processor.py +0 -0
  206. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/openai/utils.py +0 -0
  207. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/entrypoints/tool.py +0 -0
  208. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/__init__.py +0 -0
  209. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/eplb_algorithms/__init__.py +0 -0
  210. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/eplb_algorithms/deepseek.py +0 -0
  211. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/eplb_algorithms/deepseek_vec.py +0 -0
  212. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/eplb_simulator/__init__.py +0 -0
  213. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/eplb_simulator/reader.py +0 -0
  214. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/expert_location.py +0 -0
  215. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/expert_location_dispatch.py +0 -0
  216. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/eplb/expert_location_updater.py +0 -0
  217. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/base_format_detector.py +0 -0
  218. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/core_types.py +0 -0
  219. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/deepseekv31_detector.py +0 -0
  220. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/deepseekv3_detector.py +0 -0
  221. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/ebnf_composer.py +0 -0
  222. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/function_call_parser.py +0 -0
  223. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/glm4_moe_detector.py +0 -0
  224. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/gpt_oss_detector.py +0 -0
  225. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/kimik2_detector.py +0 -0
  226. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/llama32_detector.py +0 -0
  227. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/mistral_detector.py +0 -0
  228. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/pythonic_detector.py +0 -0
  229. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/qwen25_detector.py +0 -0
  230. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/qwen3_coder_detector.py +0 -0
  231. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/step3_detector.py +0 -0
  232. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/function_call/utils.py +0 -0
  233. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/harmony_parser.py +0 -0
  234. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/host_shared_memory.py +0 -0
  235. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/jinja_template_utils.py +0 -0
  236. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/amx_utils.py +0 -0
  237. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/aiter_backend.py +0 -0
  238. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/base_attn_backend.py +0 -0
  239. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/cutlass_mla_backend.py +0 -0
  240. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/double_sparsity_backend.py +0 -0
  241. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/dual_chunk_flashattention_backend.py +0 -0
  242. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/flashattention_backend.py +0 -0
  243. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/flashinfer_backend.py +0 -0
  244. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/flashinfer_mla_backend.py +0 -0
  245. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/flashmla_backend.py +0 -0
  246. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/intel_amx_backend.py +0 -0
  247. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/merge_state.py +0 -0
  248. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/tbo_backend.py +0 -0
  249. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/torch_native_backend.py +0 -0
  250. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/triton_backend.py +0 -0
  251. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/triton_ops/decode_attention.py +0 -0
  252. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/triton_ops/double_sparsity_attention.py +0 -0
  253. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/triton_ops/extend_attention.py +0 -0
  254. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/triton_ops/merge_state.py +0 -0
  255. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/triton_ops/prefill_attention.py +0 -0
  256. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/triton_ops/rocm_mla_decode_rope.py +0 -0
  257. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/trtllm_mha_backend.py +0 -0
  258. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/utils.py +0 -0
  259. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/vision.py +0 -0
  260. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/vision_utils.py +0 -0
  261. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/wave_backend.py +0 -0
  262. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/wave_ops/decode_attention.py +0 -0
  263. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/wave_ops/extend_attention.py +0 -0
  264. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/attention/wave_ops/prefill_attention.py +0 -0
  265. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/communicator.py +0 -0
  266. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/dp_attention.py +0 -0
  267. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/elementwise.py +0 -0
  268. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/flashinfer_comm_fusion.py +0 -0
  269. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/__init__.py +0 -0
  270. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/cutlass_moe.py +0 -0
  271. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/cutlass_moe_params.py +0 -0
  272. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/ep_moe/__init__.py +0 -0
  273. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_native.py +0 -0
  274. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=14336,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  275. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=14336,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  276. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=1792,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  277. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=1792,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  278. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=3072,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  279. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=3072,device_name=NVIDIA_H100_80GB_HBM3,dtype=int8_w8a16.json +0 -0
  280. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=3072,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  281. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=3584,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  282. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=3584,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  283. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=7168,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  284. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=1,N=7168,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  285. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=144,N=512,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  286. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=1024,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  287. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=1024,device_name=NVIDIA_H200.json +0 -0
  288. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=1344,device_name=NVIDIA_A100-SXM4-40GB.json +0 -0
  289. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=1344,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  290. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=1344,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  291. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=14336,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  292. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=14336,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  293. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=1792,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  294. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=1792,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  295. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=2048,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  296. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=2688,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  297. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=2688,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  298. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=3072,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  299. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=3072,device_name=NVIDIA_H100_80GB_HBM3,dtype=int8_w8a16.json +0 -0
  300. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=3200,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  301. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=3584,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  302. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=3584,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  303. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=6400,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  304. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=7168,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a16.json +0 -0
  305. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=7168,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  306. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=int8_w8a16.json +0 -0
  307. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=16,N=800,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  308. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=160,N=192,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  309. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=20,N=2048,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  310. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=24,N=1024,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  311. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  312. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8.json +0 -0
  313. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  314. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8.json +0 -0
  315. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  316. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_H20,block_shape=[128, 128].json +0 -0
  317. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  318. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=128,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  319. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=256,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  320. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=256,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  321. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=256,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  322. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=256,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  323. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=256,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  324. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=256,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  325. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=64,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  326. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=64,device_name=NVIDIA_L20,dtype=int8_w8a8.json +0 -0
  327. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=256,N=64,device_name=NVIDIA_L40S,dtype=int8_w8a8.json +0 -0
  328. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=1024,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  329. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=1280,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  330. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=1280,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  331. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=1280,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  332. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=1280,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  333. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=1280,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  334. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=1280,device_name=NVIDIA_H200.json +0 -0
  335. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=2560,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  336. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=2560,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  337. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=2560,device_name=NVIDIA_H200.json +0 -0
  338. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=320,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  339. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=320,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  340. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=320,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  341. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=320,device_name=NVIDIA_H200.json +0 -0
  342. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=512,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  343. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=640,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  344. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=640,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  345. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=640,device_name=NVIDIA_GeForce_RTX_4090,dtype=fp8_w8a8.json +0 -0
  346. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=640,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  347. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=640,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  348. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=640,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  349. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=64,N=640,device_name=NVIDIA_H200.json +0 -0
  350. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=14336,device_name=AMD_Instinct_MI300X.json +0 -0
  351. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=14336,device_name=AMD_Instinct_MI325X.json +0 -0
  352. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=14336,device_name=AMD_Radeon_Graphics.json +0 -0
  353. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=14336,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  354. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=14336,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  355. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=14336,device_name=NVIDIA_H200.json +0 -0
  356. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=AMD_Instinct_MI300X.json +0 -0
  357. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=AMD_Instinct_MI325X.json +0 -0
  358. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=AMD_Radeon_Graphics.json +0 -0
  359. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=NVIDIA_A100-SXM4-40GB.json +0 -0
  360. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  361. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  362. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  363. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=1792,device_name=NVIDIA_H200.json +0 -0
  364. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=2048,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  365. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=2048,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  366. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=2048,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  367. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=2048,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  368. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=2048,device_name=NVIDIA_H200.json +0 -0
  369. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=AMD_Instinct_MI300X.json +0 -0
  370. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=AMD_Instinct_MI325X.json +0 -0
  371. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=AMD_Radeon_Graphics.json +0 -0
  372. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_A100-SXM4-40GB.json +0 -0
  373. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  374. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_GeForce_RTX_4090,dtype=fp8_w8a8.json +0 -0
  375. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  376. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  377. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  378. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_H200.json +0 -0
  379. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=3584,device_name=NVIDIA_L40S.json +0 -0
  380. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8.json +0 -0
  381. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8.json +0 -0
  382. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8.json +0 -0
  383. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  384. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  385. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  386. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  387. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=4096,device_name=NVIDIA_H200.json +0 -0
  388. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=AMD_Instinct_MI300X.json +0 -0
  389. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=AMD_Instinct_MI325X.json +0 -0
  390. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=AMD_Radeon_Graphics.json +0 -0
  391. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  392. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  393. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  394. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  395. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=7168,device_name=NVIDIA_H200.json +0 -0
  396. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=8192,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8.json +0 -0
  397. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=8192,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8.json +0 -0
  398. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=8192,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8.json +0 -0
  399. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=8192,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8.json +0 -0
  400. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_1_0/E=8,N=8192,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  401. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=192,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  402. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=192,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  403. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=192,device_name=NVIDIA_H20.json +0 -0
  404. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=192,device_name=NVIDIA_H200.json +0 -0
  405. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=384,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  406. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=384,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  407. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=384,device_name=NVIDIA_H20.json +0 -0
  408. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=384,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  409. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=384,device_name=NVIDIA_H200.json +0 -0
  410. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=512,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  411. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=768,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  412. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=768,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  413. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=768,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  414. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=768,device_name=NVIDIA_H20.json +0 -0
  415. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=768,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  416. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=768,device_name=NVIDIA_H200.json +0 -0
  417. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=128,N=96,device_name=NVIDIA_H20.json +0 -0
  418. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=129,N=352,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8.json +0 -0
  419. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=160,N=320,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8.json +0 -0
  420. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=161,N=192,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8.json +0 -0
  421. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=257,N=128,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  422. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=257,N=128,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  423. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=257,N=256,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  424. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=257,N=256,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  425. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=257,N=256,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  426. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=264,N=128,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8.json +0 -0
  427. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=264,N=256,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  428. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=264,N=256,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  429. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=264,N=256,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  430. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=272,N=128,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8.json +0 -0
  431. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=272,N=128,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  432. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=272,N=128,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  433. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=272,N=64,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  434. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=288,N=64,device_name=NVIDIA_A800-SXM4-80GB.json +0 -0
  435. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_2_0/E=8,N=7168,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  436. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_0/E=16,N=1024,device_name=NVIDIA_B200.json +0 -0
  437. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=128,N=352,device_name=NVIDIA_RTX_6000_Ada_Generation,dtype=fp8_w8a8.json +0 -0
  438. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=128,N=384,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  439. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=128,N=384,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  440. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=128,N=768,device_name=NVIDIA_H20.json +0 -0
  441. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=160,N=192,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  442. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=160,N=320,device_name=NVIDIA_H20-3e.json +0 -0
  443. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=160,N=384,device_name=NVIDIA_H200,dtype=fp8_w8a8.json +0 -0
  444. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=160,N=640,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  445. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=257,N=128,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  446. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=257,N=128,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  447. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=257,N=256,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  448. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=257,N=256,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  449. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=257,N=256,device_name=NVIDIA_H20-3e,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  450. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=257,N=256,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  451. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=384,N=128,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  452. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=384,N=128,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  453. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=384,N=256,device_name=NVIDIA_H20-3e,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  454. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=385,N=128,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  455. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=385,N=128,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  456. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_3_1/E=8,N=7168,device_name=NVIDIA_H100_80GB_HBM3.json +0 -0
  457. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=128,N=384,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  458. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=128,N=768,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  459. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=129,N=352,device_name=NVIDIA_RTX_PRO_6000_Blackwell_Max-Q_Workstation_Edition,dtype=fp8_w8a8.json +0 -0
  460. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=129,N=704,device_name=NVIDIA_B200,dtype=fp8_w8a8.json +0 -0
  461. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=161,N=384,device_name=NVIDIA_RTX_PRO_6000_Blackwell_Max-Q_Workstation_Edition,dtype=fp8_w8a8.json +0 -0
  462. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=256,N=256,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  463. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=257,N=64,device_name=NVIDIA_A100-SXM4-80GB.json +0 -0
  464. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/configs/triton_3_4_0/E=384,N=256,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  465. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/fused_moe_triton/triton_kernels_moe.py +0 -0
  466. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/moe_runner/__init__.py +0 -0
  467. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/moe_runner/base.py +0 -0
  468. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/rocm_moe_utils.py +0 -0
  469. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/router.py +0 -0
  470. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/token_dispatcher/__init__.py +0 -0
  471. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/token_dispatcher/base_dispatcher.py +0 -0
  472. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/token_dispatcher/deepep.py +0 -0
  473. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/token_dispatcher/standard.py +0 -0
  474. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/moe/utils.py +0 -0
  475. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/multimodal.py +0 -0
  476. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/parameter.py +0 -0
  477. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/pooler.py +0 -0
  478. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/__init__.py +0 -0
  479. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/awq.py +0 -0
  480. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/awq_triton.py +0 -0
  481. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/base_config.py +0 -0
  482. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/blockwise_int8.py +0 -0
  483. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/__init__.py +0 -0
  484. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/compressed_tensors.py +0 -0
  485. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/compressed_tensors_moe.py +0 -0
  486. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/schemes/__init__.py +0 -0
  487. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/schemes/compressed_tensors_scheme.py +0 -0
  488. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/schemes/compressed_tensors_w8a16_fp8.py +0 -0
  489. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/schemes/compressed_tensors_w8a8_fp8.py +0 -0
  490. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/compressed_tensors/utils.py +0 -0
  491. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=1536,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  492. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=1536,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  493. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=1536,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  494. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=1536,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  495. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=1536,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  496. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  497. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  498. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  499. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  500. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  501. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  502. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  503. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  504. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=1536,K=7168,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  505. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2048,K=512,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  506. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2048,K=512,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  507. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2048,K=512,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  508. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2048,K=512,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  509. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2048,K=512,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  510. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2048,K=512,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  511. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2304,K=7168,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  512. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2304,K=7168,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  513. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2304,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  514. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2304,K=7168,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  515. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2304,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  516. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=2304,K=7168,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  517. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=1536,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  518. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=1536,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  519. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=1536,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  520. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  521. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  522. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  523. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  524. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  525. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  526. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  527. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=24576,K=7168,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  528. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=256,K=7168,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  529. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=256,K=7168,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  530. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=256,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  531. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=256,K=7168,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  532. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=256,K=7168,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  533. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=1536,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  534. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=1536,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  535. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=1536,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  536. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=1536,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  537. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=1536,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  538. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=1536,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  539. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=7168,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  540. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  541. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=7168,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  542. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=3072,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  543. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  544. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  545. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  546. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  547. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  548. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  549. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  550. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  551. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  552. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  553. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=32768,K=512,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  554. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=36864,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  555. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=36864,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  556. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4096,K=512,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  557. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4096,K=512,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  558. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4096,K=512,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  559. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4096,K=512,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  560. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4096,K=512,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  561. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4096,K=512,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  562. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4096,K=512,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  563. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4608,K=7168,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  564. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4608,K=7168,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  565. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4608,K=7168,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  566. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4608,K=7168,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  567. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4608,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  568. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4608,K=7168,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  569. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=4608,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  570. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=512,K=7168,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  571. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=512,K=7168,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  572. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=512,K=7168,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  573. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=512,K=7168,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  574. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=512,K=7168,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  575. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=512,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  576. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  577. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  578. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  579. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  580. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  581. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  582. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  583. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  584. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  585. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  586. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=576,K=7168,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  587. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1024,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  588. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1024,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  589. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1024,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  590. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1024,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  591. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1024,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  592. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1024,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  593. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1152,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  594. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1152,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  595. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1152,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  596. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1152,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  597. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1152,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  598. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=1152,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  599. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=128,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  600. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=128,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  601. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=128,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  602. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=128,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  603. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=128,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  604. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  605. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  606. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  607. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  608. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  609. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  610. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  611. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  612. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  613. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  614. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=16384,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  615. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_A100-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  616. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_A800-SXM4-80GB,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  617. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  618. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  619. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_H20,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  620. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  621. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  622. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=18432,device_name=NVIDIA_L20Y,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  623. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2048,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  624. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2048,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  625. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2048,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  626. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2048,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  627. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2048,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  628. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2048,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  629. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2048,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  630. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2304,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  631. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2304,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  632. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2304,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  633. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2304,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  634. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2304,device_name=NVIDIA_H100_80GB_HBM3,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  635. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2304,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  636. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=2304,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  637. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=256,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  638. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=256,device_name=AMD_Instinct_MI325X,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  639. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=256,device_name=AMD_Radeon_Graphics,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  640. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=256,device_name=NVIDIA_B200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  641. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=256,device_name=NVIDIA_H20,dtype=int8_w8a8,block_shape=[128, 128].json +0 -0
  642. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/configs/N=7168,K=256,device_name=NVIDIA_H200,dtype=fp8_w8a8,block_shape=[128, 128].json +0 -0
  643. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/deep_gemm_wrapper/__init__.py +0 -0
  644. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/deep_gemm_wrapper/entrypoint.py +0 -0
  645. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/fp8.py +0 -0
  646. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/fp8_kernel.py +0 -0
  647. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/fp8_utils.py +0 -0
  648. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/fpgemm_fp8.py +0 -0
  649. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/gptq.py +0 -0
  650. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/int8_kernel.py +0 -0
  651. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/int8_utils.py +0 -0
  652. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/kv_cache.py +0 -0
  653. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/marlin_utils.py +0 -0
  654. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/marlin_utils_fp8.py +0 -0
  655. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/moe_wna16.py +0 -0
  656. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/mxfp4_tensor.py +0 -0
  657. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/petit.py +0 -0
  658. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/petit_utils.py +0 -0
  659. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/qoq.py +0 -0
  660. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/quark/__init__.py +0 -0
  661. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/quark/quark.py +0 -0
  662. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/quark/quark_moe.py +0 -0
  663. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/quark/schemes/__init__.py +0 -0
  664. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/quark/schemes/quark_scheme.py +0 -0
  665. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/quark/schemes/quark_w4a4_mxfp4.py +0 -0
  666. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/quark/utils.py +0 -0
  667. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/unquant.py +0 -0
  668. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/quantization/w8a8_fp8.py +0 -0
  669. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/radix_attention.py +0 -0
  670. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/torchao_utils.py +0 -0
  671. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/utils.py +0 -0
  672. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/layers/vocab_parallel_embedding.py +0 -0
  673. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/backend/base_backend.py +0 -0
  674. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/backend/triton_backend.py +0 -0
  675. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/layers.py +0 -0
  676. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/lora.py +0 -0
  677. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/lora_config.py +0 -0
  678. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/lora_manager.py +0 -0
  679. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/lora_registry.py +0 -0
  680. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/mem_pool.py +0 -0
  681. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/triton_ops/__init__.py +0 -0
  682. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/triton_ops/gate_up_lora_b.py +0 -0
  683. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/triton_ops/qkv_lora_b.py +0 -0
  684. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/triton_ops/sgemm_lora_a.py +0 -0
  685. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/triton_ops/sgemm_lora_b.py +0 -0
  686. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/lora/utils.py +0 -0
  687. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/configure_logging.py +0 -0
  688. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/data_parallel_controller.py +0 -0
  689. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/multimodal_processor.py +0 -0
  690. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/schedule_batch.py +0 -0
  691. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/schedule_policy.py +0 -0
  692. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/scheduler_input_blocker.py +0 -0
  693. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/scheduler_metrics_mixin.py +0 -0
  694. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/scheduler_profiler_mixin.py +0 -0
  695. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/scheduler_recv_skipper.py +0 -0
  696. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/session_controller.py +0 -0
  697. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/template_manager.py +0 -0
  698. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/tp_worker.py +0 -0
  699. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/tp_worker_overlap_thread.py +0 -0
  700. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/managers/utils.py +0 -0
  701. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/allocator.py +0 -0
  702. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/allocator_ascend.py +0 -0
  703. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/base_prefix_cache.py +0 -0
  704. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/cpp_radix_tree/radix_tree.py +0 -0
  705. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/flush_cache.py +0 -0
  706. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/multimodal_cache.py +0 -0
  707. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/hf3fs/client_hf3fs.py +0 -0
  708. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/hf3fs/hf3fs_utils.cpp +0 -0
  709. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/hf3fs/test_hf3fs_utils.py +0 -0
  710. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/mooncake_store/unit_test.py +0 -0
  711. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/nixl/hicache_nixl.py +0 -0
  712. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/nixl/nixl_utils.py +0 -0
  713. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/mem_cache/storage/nixl/test_hicache_nixl_storage.py +0 -0
  714. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/metrics/collector.py +0 -0
  715. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/metrics/func_timer.py +0 -0
  716. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_executor/cuda_graph_runner.py +0 -0
  717. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_executor/forward_batch_info.py +0 -0
  718. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_executor/npu_graph_runner.py +0 -0
  719. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_loader/__init__.py +0 -0
  720. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_loader/weight_utils.py +0 -0
  721. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/model_parallel.py +0 -0
  722. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/arcee.py +0 -0
  723. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/baichuan.py +0 -0
  724. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/bailing_moe.py +0 -0
  725. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/bert.py +0 -0
  726. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/chatglm.py +0 -0
  727. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/clip.py +0 -0
  728. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/commandr.py +0 -0
  729. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/dbrx.py +0 -0
  730. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/deepseek.py +0 -0
  731. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/deepseek_janus_pro.py +0 -0
  732. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/deepseek_nextn.py +0 -0
  733. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/deepseek_vl2.py +0 -0
  734. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/ernie4.py +0 -0
  735. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/ernie4_eagle.py +0 -0
  736. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/exaone.py +0 -0
  737. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma.py +0 -0
  738. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma2.py +0 -0
  739. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma2_reward.py +0 -0
  740. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma3_causal.py +0 -0
  741. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma3_mm.py +0 -0
  742. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma3n_audio.py +0 -0
  743. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma3n_causal.py +0 -0
  744. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gemma3n_mm.py +0 -0
  745. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/glm4.py +0 -0
  746. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/glm4_moe.py +0 -0
  747. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/glm4_moe_nextn.py +0 -0
  748. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/glm4v.py +0 -0
  749. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/glm4v_moe.py +0 -0
  750. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gpt2.py +0 -0
  751. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/gpt_bigcode.py +0 -0
  752. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/granite.py +0 -0
  753. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/granitemoe.py +0 -0
  754. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/grok.py +0 -0
  755. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/hunyuan.py +0 -0
  756. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/idefics2.py +0 -0
  757. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/internlm2.py +0 -0
  758. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/internlm2_reward.py +0 -0
  759. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/interns1.py +0 -0
  760. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/internvl.py +0 -0
  761. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/kimi_vl.py +0 -0
  762. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/kimi_vl_moonvit.py +0 -0
  763. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llama.py +0 -0
  764. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llama4.py +0 -0
  765. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llama_classification.py +0 -0
  766. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llama_eagle.py +0 -0
  767. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llama_embedding.py +0 -0
  768. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llama_reward.py +0 -0
  769. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llava.py +0 -0
  770. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/llavavid.py +0 -0
  771. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/mimo.py +0 -0
  772. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/mimo_mtp.py +0 -0
  773. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/minicpm.py +0 -0
  774. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/minicpm3.py +0 -0
  775. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/minicpmo.py +0 -0
  776. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/minicpmv.py +0 -0
  777. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/mistral.py +0 -0
  778. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/mixtral.py +0 -0
  779. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/mixtral_quant.py +0 -0
  780. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/mllama.py +0 -0
  781. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/mllama4.py +0 -0
  782. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/nemotron_nas.py +0 -0
  783. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/olmo.py +0 -0
  784. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/olmo2.py +0 -0
  785. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/olmoe.py +0 -0
  786. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/persimmon.py +0 -0
  787. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/phi.py +0 -0
  788. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/phi3_small.py +0 -0
  789. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/phi4mm.py +0 -0
  790. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/phi4mm_audio.py +0 -0
  791. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/phi4mm_utils.py +0 -0
  792. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/phimoe.py +0 -0
  793. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/pixtral.py +0 -0
  794. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen.py +0 -0
  795. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2_audio.py +0 -0
  796. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2_classification.py +0 -0
  797. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2_eagle.py +0 -0
  798. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2_rm.py +0 -0
  799. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen2_vl.py +0 -0
  800. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen3.py +0 -0
  801. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen3_classification.py +0 -0
  802. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/qwen3_moe.py +0 -0
  803. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/registry.py +0 -0
  804. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/roberta.py +0 -0
  805. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/siglip.py +0 -0
  806. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/stablelm.py +0 -0
  807. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/step3_vl.py +0 -0
  808. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/torch_native_llama.py +0 -0
  809. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/vila.py +0 -0
  810. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/xverse.py +0 -0
  811. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/xverse_moe.py +0 -0
  812. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/models/yivl.py +0 -0
  813. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/mm_utils.py +0 -0
  814. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/clip.py +0 -0
  815. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/deepseek_vl_v2.py +0 -0
  816. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/gemma3.py +0 -0
  817. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/gemma3n.py +0 -0
  818. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/glm4v.py +0 -0
  819. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/internvl.py +0 -0
  820. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/janus_pro.py +0 -0
  821. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/kimi_vl.py +0 -0
  822. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/llava.py +0 -0
  823. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/minicpm.py +0 -0
  824. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/mlama.py +0 -0
  825. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/mllama4.py +0 -0
  826. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/phi4mm.py +0 -0
  827. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/pixtral.py +0 -0
  828. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/qwen_audio.py +0 -0
  829. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/qwen_vl.py +0 -0
  830. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/step3_vl.py +0 -0
  831. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/multimodal/processors/vila.py +0 -0
  832. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/offloader.py +0 -0
  833. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/operations.py +0 -0
  834. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/operations_strategy.py +0 -0
  835. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/patch_torch.py +0 -0
  836. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/poll_based_barrier.py +0 -0
  837. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/reasoning_parser.py +0 -0
  838. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/custom_logit_processor.py +0 -0
  839. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/penaltylib/__init__.py +0 -0
  840. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/penaltylib/frequency_penalty.py +0 -0
  841. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/penaltylib/min_new_tokens.py +0 -0
  842. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/penaltylib/presence_penalty.py +0 -0
  843. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/sampling_batch_info.py +0 -0
  844. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/sampling/sampling_params.py +0 -0
  845. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/speculative/build_eagle_tree.py +0 -0
  846. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/speculative/eagle_draft_cuda_graph_runner.py +0 -0
  847. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/speculative/eagle_draft_extend_cuda_graph_runner.py +0 -0
  848. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/speculative/eagle_utils.py +0 -0
  849. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/speculative/spec_info.py +0 -0
  850. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/tokenizer/tiktoken_tokenizer.py +0 -0
  851. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/torch_memory_saver_adapter.py +0 -0
  852. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/two_batch_overlap.py +0 -0
  853. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/warmup.py +0 -0
  854. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/weight_sync/tensor_bucket.py +0 -0
  855. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/srt/weight_sync/utils.py +0 -0
  856. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/__init__.py +0 -0
  857. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/attention/__init__.py +0 -0
  858. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/attention/test_flashattn_backend.py +0 -0
  859. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/attention/test_flashattn_mla_backend.py +0 -0
  860. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/attention/test_prefix_chunk_info.py +0 -0
  861. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/doc_patch.py +0 -0
  862. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/few_shot_gsm8k.py +0 -0
  863. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/few_shot_gsm8k_engine.py +0 -0
  864. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/run_eval.py +0 -0
  865. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/runners.py +0 -0
  866. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/send_one.py +0 -0
  867. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/simple_eval_common.py +0 -0
  868. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/simple_eval_gpqa.py +0 -0
  869. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/simple_eval_humaneval.py +0 -0
  870. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/simple_eval_math.py +0 -0
  871. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/simple_eval_mgsm.py +0 -0
  872. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/simple_eval_mmlu.py +0 -0
  873. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_activation.py +0 -0
  874. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_block_fp8.py +0 -0
  875. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_block_fp8_deep_gemm_blackwell.py +0 -0
  876. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_block_fp8_ep.py +0 -0
  877. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_custom_ops.py +0 -0
  878. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_cutlass_moe.py +0 -0
  879. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_deepep_utils.py +0 -0
  880. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_dynamic_grad_mode.py +0 -0
  881. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_fp4_moe.py +0 -0
  882. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_layernorm.py +0 -0
  883. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_marlin_moe.py +0 -0
  884. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_marlin_utils.py +0 -0
  885. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_programs.py +0 -0
  886. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/test/test_utils.py +0 -0
  887. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang/utils.py +0 -0
  888. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang.egg-info/dependency_links.txt +0 -0
  889. {sglang-0.5.1.post3 → sglang-0.5.2rc1}/sglang.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sglang
3
- Version: 0.5.1.post3
3
+ Version: 0.5.2rc1
4
4
  Summary: SGLang is yet another fast serving framework for large language models and vision language models.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -251,18 +251,18 @@ Requires-Dist: scipy; extra == "runtime-common"
251
251
  Requires-Dist: timm==1.0.16; extra == "runtime-common"
252
252
  Requires-Dist: tiktoken; extra == "runtime-common"
253
253
  Requires-Dist: torchao==0.9.0; extra == "runtime-common"
254
- Requires-Dist: transformers==4.55.2; extra == "runtime-common"
254
+ Requires-Dist: transformers==4.56.0; extra == "runtime-common"
255
255
  Requires-Dist: uvicorn; extra == "runtime-common"
256
256
  Requires-Dist: uvloop; extra == "runtime-common"
257
257
  Requires-Dist: xgrammar==0.1.23; extra == "runtime-common"
258
258
  Provides-Extra: srt
259
259
  Requires-Dist: sglang[runtime_common]; extra == "srt"
260
- Requires-Dist: sgl-kernel==0.3.7; extra == "srt"
260
+ Requires-Dist: sgl-kernel==0.3.8; extra == "srt"
261
261
  Requires-Dist: torch==2.8.0; extra == "srt"
262
262
  Requires-Dist: torchaudio==2.8.0; extra == "srt"
263
263
  Requires-Dist: torchvision; extra == "srt"
264
264
  Requires-Dist: cuda-python; extra == "srt"
265
- Requires-Dist: flashinfer_python==0.2.14.post1; extra == "srt"
265
+ Requires-Dist: flashinfer_python==0.3.0; extra == "srt"
266
266
  Provides-Extra: blackwell
267
267
  Requires-Dist: sglang[runtime_common]; extra == "blackwell"
268
268
  Requires-Dist: sgl-kernel; extra == "blackwell"
@@ -270,7 +270,7 @@ Requires-Dist: torch==2.8.0; extra == "blackwell"
270
270
  Requires-Dist: torchaudio==2.8.0; extra == "blackwell"
271
271
  Requires-Dist: torchvision; extra == "blackwell"
272
272
  Requires-Dist: cuda-python; extra == "blackwell"
273
- Requires-Dist: flashinfer_python==0.2.14.post1; extra == "blackwell"
273
+ Requires-Dist: flashinfer_python==0.3.0; extra == "blackwell"
274
274
  Provides-Extra: srt-hip
275
275
  Requires-Dist: sglang[runtime_common]; extra == "srt-hip"
276
276
  Requires-Dist: torch; extra == "srt-hip"
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "sglang"
7
- version = "0.5.1.post3"
7
+ version = "0.5.2rc1"
8
8
  description = "SGLang is yet another fast serving framework for large language models and vision language models."
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.10"
@@ -50,7 +50,7 @@ runtime_common = [
50
50
  "timm==1.0.16",
51
51
  "tiktoken",
52
52
  "torchao==0.9.0",
53
- "transformers==4.55.2",
53
+ "transformers==4.56.0",
54
54
  "uvicorn",
55
55
  "uvloop",
56
56
  "xgrammar==0.1.23",
@@ -58,12 +58,12 @@ runtime_common = [
58
58
 
59
59
  srt = [
60
60
  "sglang[runtime_common]",
61
- "sgl-kernel==0.3.7",
61
+ "sgl-kernel==0.3.8",
62
62
  "torch==2.8.0",
63
63
  "torchaudio==2.8.0",
64
64
  "torchvision",
65
65
  "cuda-python",
66
- "flashinfer_python==0.2.14.post1",
66
+ "flashinfer_python==0.3.0",
67
67
  ]
68
68
 
69
69
  blackwell = [
@@ -73,7 +73,7 @@ blackwell = [
73
73
  "torchaudio==2.8.0",
74
74
  "torchvision",
75
75
  "cuda-python",
76
- "flashinfer_python==0.2.14.post1",
76
+ "flashinfer_python==0.3.0",
77
77
  ]
78
78
 
79
79
  # HIP (Heterogeneous-computing Interface for Portability) for AMD
@@ -61,6 +61,7 @@ from sglang.srt.configs.model_config import ModelConfig
61
61
  from sglang.srt.distributed.parallel_state import destroy_distributed_environment
62
62
  from sglang.srt.entrypoints.engine import _set_envs_and_config
63
63
  from sglang.srt.hf_transformers_utils import get_tokenizer
64
+ from sglang.srt.layers.moe import initialize_moe_config
64
65
  from sglang.srt.managers.schedule_batch import Req, ScheduleBatch
65
66
  from sglang.srt.managers.scheduler import Scheduler
66
67
  from sglang.srt.model_executor.forward_batch_info import ForwardBatch
@@ -509,6 +510,8 @@ def latency_test(
509
510
  bench_args,
510
511
  tp_rank,
511
512
  ):
513
+ initialize_moe_config(server_args)
514
+
512
515
  # Set CPU affinity
513
516
  if get_bool_env_var("SGLANG_SET_CPU_AFFINITY"):
514
517
  set_gpu_proc_affinity(server_args.tp_size, server_args.nnodes, tp_rank)
@@ -5,6 +5,7 @@ from sglang.srt.configs.exaone import ExaoneConfig
5
5
  from sglang.srt.configs.janus_pro import MultiModalityConfig
6
6
  from sglang.srt.configs.kimi_vl import KimiVLConfig
7
7
  from sglang.srt.configs.kimi_vl_moonvit import MoonViTConfig
8
+ from sglang.srt.configs.longcat_flash import LongcatFlashConfig
8
9
  from sglang.srt.configs.step3_vl import (
9
10
  Step3TextConfig,
10
11
  Step3VisionEncoderConfig,
@@ -16,6 +17,7 @@ __all__ = [
16
17
  "ChatGLMConfig",
17
18
  "DbrxConfig",
18
19
  "DeepseekVL2Config",
20
+ "LongcatFlashConfig",
19
21
  "MultiModalityConfig",
20
22
  "KimiVLConfig",
21
23
  "MoonViTConfig",
@@ -0,0 +1,104 @@
1
+ from transformers.configuration_utils import PretrainedConfig
2
+ from transformers.utils import logging
3
+
4
+ logger = logging.get_logger(__name__)
5
+
6
+ FLASH_PRETRAINED_CONFIG_ARCHIVE_MAP = {}
7
+
8
+
9
+ class LongcatFlashConfig(PretrainedConfig):
10
+ model_type = "longcat_flash"
11
+ keys_to_ignore_at_inference = ["past_key_values"]
12
+
13
+ def __init__(
14
+ self,
15
+ vocab_size=131072,
16
+ hidden_size=6144,
17
+ intermediate_size=None,
18
+ ffn_hidden_size=12288,
19
+ expert_ffn_hidden_size=2048,
20
+ num_layers=28,
21
+ num_hidden_layers=None,
22
+ num_attention_heads=64,
23
+ ep_size=1,
24
+ kv_lora_rank=512,
25
+ q_lora_rank=1536,
26
+ qk_rope_head_dim=128,
27
+ qk_nope_head_dim=128,
28
+ v_head_dim=128,
29
+ n_routed_experts=512,
30
+ moe_topk=12,
31
+ norm_topk_prob=False,
32
+ max_position_embeddings=131072,
33
+ rms_norm_eps=1e-05,
34
+ use_cache=True,
35
+ pad_token_id=None,
36
+ bos_token_id=1,
37
+ eos_token_id=2,
38
+ pretraining_tp=1,
39
+ tie_word_embeddings=False,
40
+ rope_theta=10000000.0,
41
+ rope_scaling=None,
42
+ attention_bias=False,
43
+ attention_dropout=0.0,
44
+ mla_scale_q_lora=True,
45
+ mla_scale_kv_lora=True,
46
+ torch_dtype="bfloat16",
47
+ params_dtype="bfloat16",
48
+ rounter_params_dtype="float32",
49
+ router_bias=False,
50
+ topk_method=None,
51
+ routed_scaling_factor=6.0,
52
+ zero_expert_num=256,
53
+ zero_expert_type="identity",
54
+ nextn_use_scmoe=False,
55
+ num_nextn_predict_layers=1,
56
+ **kwargs,
57
+ ):
58
+ super().__init__(
59
+ pad_token_id=pad_token_id,
60
+ bos_token_id=bos_token_id,
61
+ eos_token_id=eos_token_id,
62
+ tie_word_embeddings=tie_word_embeddings,
63
+ torch_dtype=torch_dtype,
64
+ params_dtype=params_dtype,
65
+ rounter_params_dtype=rounter_params_dtype,
66
+ topk_method=topk_method,
67
+ router_bias=router_bias,
68
+ nextn_use_scmoe=nextn_use_scmoe,
69
+ num_nextn_predict_layers=num_nextn_predict_layers,
70
+ **kwargs,
71
+ )
72
+ self.vocab_size = vocab_size
73
+ self.max_position_embeddings = max_position_embeddings
74
+ self.hidden_size = hidden_size
75
+ self.num_hidden_layers = (
76
+ num_hidden_layers if num_hidden_layers is not None else num_layers
77
+ )
78
+ self.intermediate_size = (
79
+ intermediate_size if intermediate_size is not None else ffn_hidden_size
80
+ )
81
+ self.moe_intermediate_size = expert_ffn_hidden_size
82
+ self.num_attention_heads = num_attention_heads
83
+ self.ep_size = ep_size
84
+ self.kv_lora_rank = kv_lora_rank
85
+ self.q_lora_rank = q_lora_rank
86
+ self.qk_rope_head_dim = qk_rope_head_dim
87
+ self.v_head_dim = v_head_dim
88
+ self.qk_nope_head_dim = qk_nope_head_dim
89
+ self.n_routed_experts = n_routed_experts
90
+ self.moe_topk = moe_topk
91
+ self.norm_topk_prob = norm_topk_prob
92
+ self.rms_norm_eps = rms_norm_eps
93
+ self.pretraining_tp = pretraining_tp
94
+ self.use_cache = use_cache
95
+ self.rope_theta = rope_theta
96
+ self.rope_scaling = rope_scaling
97
+ self.attention_bias = attention_bias
98
+ self.attention_dropout = attention_dropout
99
+ self.mla_scale_q_lora = mla_scale_q_lora
100
+ self.mla_scale_kv_lora = mla_scale_kv_lora
101
+ self.zero_expert_num = zero_expert_num
102
+ self.zero_expert_type = zero_expert_type
103
+ self.routed_scaling_factor = routed_scaling_factor
104
+ self.hidden_act = "silu"
@@ -132,6 +132,13 @@ class ModelConfig:
132
132
  if is_draft_model and self.hf_config.architectures[0] == "Glm4MoeForCausalLM":
133
133
  self.hf_config.architectures[0] = "Glm4MoeForCausalLMNextN"
134
134
 
135
+ if (
136
+ is_draft_model
137
+ and self.hf_config.architectures[0] == "LongcatFlashForCausalLM"
138
+ ):
139
+ self.hf_config.architectures[0] = "LongcatFlashForCausalLMNextN"
140
+ self.hf_config.num_hidden_layers = self.hf_config.num_nextn_predict_layers
141
+
135
142
  if is_draft_model and self.hf_config.architectures[0] == "MiMoForCausalLM":
136
143
  self.hf_config.architectures[0] = "MiMoMTP"
137
144
  if (
@@ -199,6 +206,8 @@ class ModelConfig:
199
206
  "DeepseekV2ForCausalLM" in self.hf_config.architectures
200
207
  or "DeepseekV3ForCausalLM" in self.hf_config.architectures
201
208
  or "DeepseekV3ForCausalLMNextN" in self.hf_config.architectures
209
+ or "LongcatFlashForCausalLM" in self.hf_config.architectures
210
+ or "LongcatFlashForCausalLMNextN" in self.hf_config.architectures
202
211
  ):
203
212
  self.head_dim = 256
204
213
  self.attention_arch = AttentionArch.MLA
@@ -270,6 +279,9 @@ class ModelConfig:
270
279
  self.num_key_value_heads = self.num_attention_heads
271
280
  self.hidden_size = self.hf_text_config.hidden_size
272
281
  self.num_hidden_layers = self.hf_text_config.num_hidden_layers
282
+ self.num_attention_layers = self.num_hidden_layers
283
+ if "LongcatFlashForCausalLM" in self.hf_config.architectures:
284
+ self.num_attention_layers = self.num_hidden_layers * 2
273
285
  self.num_nextn_predict_layers = getattr(
274
286
  self.hf_text_config, "num_nextn_predict_layers", None
275
287
  )
@@ -393,9 +405,10 @@ class ModelConfig:
393
405
  # compressed-tensors uses a "compression_config" key
394
406
  quant_cfg = getattr(self.hf_config, "compression_config", None)
395
407
  if quant_cfg is None:
396
- # check if is modelopt model -- modelopt doesn't have corresponding field
408
+ # check if is modelopt or mixed-precision model -- Both of them don't have corresponding field
397
409
  # in hf `config.json` but has a standalone `hf_quant_config.json` in the root directory
398
410
  # example: https://huggingface.co/nvidia/Llama-3.1-8B-Instruct-FP8/tree/main
411
+ # example: https://huggingface.co/Barrrrry/DeepSeek-R1-W4AFP8/tree/main
399
412
  is_local = os.path.exists(self.model_path)
400
413
  modelopt_quant_config = {"quant_method": "modelopt"}
401
414
  if not is_local:
@@ -20,7 +20,7 @@ class ConnectorType(str, enum.Enum):
20
20
  KV = "KV"
21
21
 
22
22
 
23
- def create_remote_connector(url, device="cpu") -> BaseConnector:
23
+ def create_remote_connector(url, **kwargs) -> BaseConnector:
24
24
  connector_type = parse_connector_type(url)
25
25
  if connector_type == "redis":
26
26
  return RedisConnector(url)
@@ -20,9 +20,8 @@ class BaseConnector(ABC):
20
20
  <connector_type://<host>:<port>/<model_name>/files/<filename>
21
21
  """
22
22
 
23
- def __init__(self, url: str, device: torch.device = "cpu"):
23
+ def __init__(self, url: str):
24
24
  self.url = url
25
- self.device = device
26
25
  self.closed = False
27
26
  self.local_dir = tempfile.mkdtemp()
28
27
  for sig in (signal.SIGINT, signal.SIGTERM):
@@ -15,10 +15,10 @@ logger = logging.getLogger(__name__)
15
15
 
16
16
  class RedisConnector(BaseKVConnector):
17
17
 
18
- def __init__(self, url: str, device: torch.device = "cpu"):
18
+ def __init__(self, url: str):
19
19
  import redis
20
20
 
21
- super().__init__(url, device)
21
+ super().__init__(url)
22
22
  parsed_url = urlparse(url)
23
23
  self.connection = redis.Redis(host=parsed_url.hostname, port=parsed_url.port)
24
24
  self.model_name = parsed_url.path.lstrip("/")
@@ -15,7 +15,7 @@ def create_serde(serde_type: str) -> Tuple[Serializer, Deserializer]:
15
15
 
16
16
  if serde_type == "safe":
17
17
  s = SafeSerializer()
18
- d = SafeDeserializer(torch.uint8)
18
+ d = SafeDeserializer()
19
19
  else:
20
20
  raise ValueError(f"Unknown serde type: {serde_type}")
21
21
 
@@ -19,11 +19,12 @@ class SafeSerializer(Serializer):
19
19
 
20
20
  class SafeDeserializer(Deserializer):
21
21
 
22
- def __init__(self, dtype):
23
- super().__init__(dtype)
22
+ def __init__(self):
23
+ # TODO: dtype options
24
+ super().__init__(torch.float32)
24
25
 
25
26
  def from_bytes_normal(self, b: Union[bytearray, bytes]) -> torch.Tensor:
26
- return load(bytes(b))["tensor_bytes"].to(dtype=self.dtype)
27
+ return load(bytes(b))["tensor_bytes"]
27
28
 
28
29
  def from_bytes(self, b: Union[bytearray, bytes]) -> torch.Tensor:
29
30
  return self.from_bytes_normal(b)
@@ -0,0 +1,117 @@
1
+ import concurrent.futures
2
+ import logging
3
+ from typing import List, Tuple
4
+
5
+ import numpy as np
6
+ import numpy.typing as npt
7
+
8
+ from sglang.srt.disaggregation.ascend.transfer_engine import AscendTransferEngine
9
+ from sglang.srt.disaggregation.common.utils import group_concurrent_contiguous
10
+ from sglang.srt.disaggregation.mooncake.conn import (
11
+ MooncakeKVBootstrapServer,
12
+ MooncakeKVManager,
13
+ MooncakeKVReceiver,
14
+ MooncakeKVSender,
15
+ )
16
+ from sglang.srt.utils import get_local_ip_by_remote
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class AscendKVManager(MooncakeKVManager):
22
+ def init_engine(self):
23
+ # TransferEngine initialized on ascend.
24
+ local_ip = get_local_ip_by_remote()
25
+ self.engine = AscendTransferEngine(
26
+ hostname=local_ip,
27
+ npu_id=self.kv_args.gpu_id,
28
+ disaggregation_mode=self.disaggregation_mode,
29
+ )
30
+
31
+ def register_buffer_to_engine(self):
32
+ self.engine.batch_register(self.kv_args.kv_data_ptrs, self.kv_args.kv_data_lens)
33
+ # The Ascend backend optimize batch registration for small memory blocks.
34
+ self.engine.batch_register(
35
+ self.kv_args.aux_data_ptrs, self.kv_args.aux_data_lens
36
+ )
37
+
38
+ def send_kvcache(
39
+ self,
40
+ mooncake_session_id: str,
41
+ prefill_kv_indices: npt.NDArray[np.int32],
42
+ dst_kv_ptrs: list[int],
43
+ dst_kv_indices: npt.NDArray[np.int32],
44
+ executor: concurrent.futures.ThreadPoolExecutor,
45
+ ):
46
+ # Group by indices
47
+ prefill_kv_blocks, dst_kv_blocks = group_concurrent_contiguous(
48
+ prefill_kv_indices, dst_kv_indices
49
+ )
50
+
51
+ num_layers = len(self.kv_args.kv_data_ptrs)
52
+ layers_params = [
53
+ (
54
+ self.kv_args.kv_data_ptrs[layer_id],
55
+ dst_kv_ptrs[layer_id],
56
+ self.kv_args.kv_item_lens[layer_id],
57
+ )
58
+ for layer_id in range(num_layers)
59
+ ]
60
+
61
+ def set_transfer_blocks(
62
+ src_ptr: int, dst_ptr: int, item_len: int
63
+ ) -> List[Tuple[int, int, int]]:
64
+ transfer_blocks = []
65
+ for prefill_index, decode_index in zip(prefill_kv_blocks, dst_kv_blocks):
66
+ src_addr = src_ptr + int(prefill_index[0]) * item_len
67
+ dst_addr = dst_ptr + int(decode_index[0]) * item_len
68
+ length = item_len * len(prefill_index)
69
+ transfer_blocks.append((src_addr, dst_addr, length))
70
+ return transfer_blocks
71
+
72
+ # Worker function for processing a single layer
73
+ def process_layer(src_ptr: int, dst_ptr: int, item_len: int) -> int:
74
+ transfer_blocks = set_transfer_blocks(src_ptr, dst_ptr, item_len)
75
+ return self._transfer_data(mooncake_session_id, transfer_blocks)
76
+
77
+ # Worker function for processing all layers in a batch
78
+ def process_layers(layers_params: List[Tuple[int, int, int]]) -> int:
79
+ transfer_blocks = []
80
+ for src_ptr, dst_ptr, item_len in layers_params:
81
+ transfer_blocks.extend(set_transfer_blocks(src_ptr, dst_ptr, item_len))
82
+ return self._transfer_data(mooncake_session_id, transfer_blocks)
83
+
84
+ if self.enable_custom_mem_pool:
85
+ futures = [
86
+ executor.submit(
87
+ process_layer,
88
+ src_ptr,
89
+ dst_ptr,
90
+ item_len,
91
+ )
92
+ for (src_ptr, dst_ptr, item_len) in layers_params
93
+ ]
94
+ for future in concurrent.futures.as_completed(futures):
95
+ status = future.result()
96
+ if status != 0:
97
+ for f in futures:
98
+ f.cancel()
99
+ return status
100
+ else:
101
+ # Combining all layers' params in one batch transfer is more efficient
102
+ # compared to using multiple threads
103
+ return process_layers(layers_params)
104
+
105
+ return 0
106
+
107
+
108
+ class AscendKVSender(MooncakeKVSender):
109
+ pass
110
+
111
+
112
+ class AscendKVReceiver(MooncakeKVReceiver):
113
+ pass
114
+
115
+
116
+ class AscendKVBootstrapServer(MooncakeKVBootstrapServer):
117
+ pass
@@ -6,7 +6,6 @@ from sglang.srt.disaggregation.mini_lb import PrefillConfig, run
6
6
 
7
7
  @dataclasses.dataclass
8
8
  class LBArgs:
9
- rust_lb: bool = False
10
9
  host: str = "0.0.0.0"
11
10
  port: int = 8000
12
11
  policy: str = "random"
@@ -17,11 +16,6 @@ class LBArgs:
17
16
 
18
17
  @staticmethod
19
18
  def add_cli_args(parser: argparse.ArgumentParser):
20
- parser.add_argument(
21
- "--rust-lb",
22
- action="store_true",
23
- help="Deprecated, please use SGLang Router instead, this argument will have no effect.",
24
- )
25
19
  parser.add_argument(
26
20
  "--host",
27
21
  type=str,
@@ -92,7 +86,6 @@ class LBArgs:
92
86
  ]
93
87
 
94
88
  return cls(
95
- rust_lb=args.rust_lb,
96
89
  host=args.host,
97
90
  port=args.port,
98
91
  policy=args.policy,
@@ -102,12 +95,6 @@ class LBArgs:
102
95
  timeout=args.timeout,
103
96
  )
104
97
 
105
- def __post_init__(self):
106
- if not self.rust_lb:
107
- assert (
108
- self.policy == "random"
109
- ), "Only random policy is supported for Python load balancer"
110
-
111
98
 
112
99
  def main():
113
100
  parser = argparse.ArgumentParser(
@@ -7,6 +7,7 @@ import dataclasses
7
7
  import logging
8
8
  import random
9
9
  import urllib
10
+ from http import HTTPStatus
10
11
  from itertools import chain
11
12
  from typing import List, Optional
12
13
 
@@ -262,14 +263,38 @@ async def get_server_info():
262
263
 
263
264
  @app.get("/get_model_info")
264
265
  async def get_model_info():
265
- # Dummy model information
266
- model_info = {
267
- "model_path": "/path/to/dummy/model",
268
- "tokenizer_path": "/path/to/dummy/tokenizer",
269
- "is_generation": True,
270
- "preferred_sampling_params": {"temperature": 0.7, "max_new_tokens": 128},
271
- }
272
- return ORJSONResponse(content=model_info)
266
+ global load_balancer
267
+
268
+ if not load_balancer or not load_balancer.prefill_servers:
269
+ raise HTTPException(
270
+ status_code=HTTPStatus.SERVICE_UNAVAILABLE,
271
+ detail="There is no server registered",
272
+ )
273
+
274
+ target_server_url = load_balancer.prefill_servers[0]
275
+ endpoint_url = f"{target_server_url}/get_model_info"
276
+
277
+ async with aiohttp.ClientSession() as session:
278
+ try:
279
+ async with session.get(endpoint_url) as response:
280
+ if response.status != 200:
281
+ error_text = await response.text()
282
+ raise HTTPException(
283
+ status_code=HTTPStatus.BAD_GATEWAY,
284
+ detail=(
285
+ f"Failed to get model info from {target_server_url}"
286
+ f"Status: {response.status}, Response: {error_text}"
287
+ ),
288
+ )
289
+
290
+ model_info_json = await response.json()
291
+ return ORJSONResponse(content=model_info_json)
292
+
293
+ except aiohttp.ClientError as e:
294
+ raise HTTPException(
295
+ status_code=HTTPStatus.SERVICE_UNAVAILABLE,
296
+ detail=f"Failed to get model info from backend",
297
+ )
273
298
 
274
299
 
275
300
  @app.post("/generate")
@@ -567,7 +567,7 @@ class SchedulerDisaggregationPrefillMixin:
567
567
  # Move the chunked request out of the batch so that we can merge
568
568
  # only finished requests to running_batch.
569
569
  self.last_batch.filter_batch(chunked_req_to_exclude=self.chunked_req)
570
- self.tree_cache.cache_unfinished_req(self.chunked_req)
570
+ self.tree_cache.cache_unfinished_req(self.chunked_req, chunked=True)
571
571
  if self.enable_overlap:
572
572
  # Delay KV transfer to process_batch_result_disagg_prefill when overlap is enabled to ensure results are resolved
573
573
  self.chunked_req.tmp_end_idx = min(
@@ -43,6 +43,7 @@ from sglang.srt.utils import (
43
43
  direct_register_custom_op,
44
44
  get_bool_env_var,
45
45
  get_int_env_var,
46
+ is_cpu,
46
47
  is_cuda_alike,
47
48
  is_hip,
48
49
  is_npu,
@@ -51,6 +52,9 @@ from sglang.srt.utils import (
51
52
  )
52
53
 
53
54
  _is_npu = is_npu()
55
+ _is_cpu = is_cpu()
56
+
57
+ IS_ONE_DEVICE_PER_PROCESS = get_bool_env_var("SGLANG_ONE_DEVICE_PER_PROCESS")
54
58
 
55
59
 
56
60
  @dataclass
@@ -223,10 +227,12 @@ class GroupCoordinator:
223
227
  use_message_queue_broadcaster: bool = False,
224
228
  group_name: Optional[str] = None,
225
229
  ):
230
+ # Set group info
226
231
  group_name = group_name or "anonymous"
227
232
  self.unique_name = _get_unique_name(group_name)
228
233
  _register_group(self)
229
234
 
235
+ # Set rank info
230
236
  self.rank = torch.distributed.get_rank()
231
237
  self.local_rank = local_rank
232
238
  self.device_group = None
@@ -250,14 +256,16 @@ class GroupCoordinator:
250
256
  assert self.cpu_group is not None
251
257
  assert self.device_group is not None
252
258
 
259
+ device_id = 0 if IS_ONE_DEVICE_PER_PROCESS else local_rank
253
260
  if is_cuda_alike():
254
- self.device = torch.device(f"cuda:{local_rank}")
261
+ self.device = torch.device(f"cuda:{device_id}")
255
262
  elif _is_npu:
256
- self.device = torch.device(f"npu:{local_rank}")
263
+ self.device = torch.device(f"npu:{device_id}")
257
264
  else:
258
265
  self.device = torch.device("cpu")
259
266
  self.device_module = torch.get_device_module(self.device)
260
267
 
268
+ # Import communicators
261
269
  self.use_pynccl = use_pynccl
262
270
  self.use_pymscclpp = use_pymscclpp
263
271
  self.use_custom_allreduce = use_custom_allreduce
@@ -270,6 +278,9 @@ class GroupCoordinator:
270
278
  from sglang.srt.distributed.device_communicators.custom_all_reduce import (
271
279
  CustomAllreduce,
272
280
  )
281
+ from sglang.srt.distributed.device_communicators.pymscclpp import (
282
+ PyMscclppCommunicator,
283
+ )
273
284
  from sglang.srt.distributed.device_communicators.pynccl import (
274
285
  PyNcclCommunicator,
275
286
  )
@@ -287,10 +298,6 @@ class GroupCoordinator:
287
298
  device=self.device,
288
299
  )
289
300
 
290
- from sglang.srt.distributed.device_communicators.pymscclpp import (
291
- PyMscclppCommunicator,
292
- )
293
-
294
301
  self.pymscclpp_comm: Optional[PyMscclppCommunicator] = None
295
302
  if use_pymscclpp and self.world_size > 1:
296
303
  self.pymscclpp_comm = PyMscclppCommunicator(
@@ -325,30 +332,30 @@ class GroupCoordinator:
325
332
  except Exception as e:
326
333
  logger.warning(f"Failed to initialize QuickAllReduce: {e}")
327
334
 
335
+ # Create communicator for other hardware backends
328
336
  from sglang.srt.distributed.device_communicators.hpu_communicator import (
329
337
  HpuCommunicator,
330
338
  )
339
+ from sglang.srt.distributed.device_communicators.npu_communicator import (
340
+ NpuCommunicator,
341
+ )
342
+ from sglang.srt.distributed.device_communicators.xpu_communicator import (
343
+ XpuCommunicator,
344
+ )
331
345
 
332
346
  self.hpu_communicator: Optional[HpuCommunicator] = None
333
347
  if use_hpu_communicator and self.world_size > 1:
334
348
  self.hpu_communicator = HpuCommunicator(group=self.device_group)
335
349
 
336
- from sglang.srt.distributed.device_communicators.xpu_communicator import (
337
- XpuCommunicator,
338
- )
339
-
340
350
  self.xpu_communicator: Optional[XpuCommunicator] = None
341
351
  if use_xpu_communicator and self.world_size > 1:
342
352
  self.xpu_communicator = XpuCommunicator(group=self.device_group)
343
353
 
344
- from sglang.srt.distributed.device_communicators.npu_communicator import (
345
- NpuCommunicator,
346
- )
347
-
348
354
  self.npu_communicator: Optional[NpuCommunicator] = None
349
355
  if use_npu_communicator and self.world_size > 1:
350
356
  self.npu_communicator = NpuCommunicator(group=self.device_group)
351
357
 
358
+ # Create message queue
352
359
  from sglang.srt.distributed.device_communicators.shm_broadcast import (
353
360
  MessageQueue,
354
361
  )
@@ -848,6 +855,11 @@ class GroupCoordinator:
848
855
  )
849
856
  return obj_list
850
857
 
858
+ def all_gather_object(self, obj: Any) -> List[Any]:
859
+ objs = [None] * self.world_size
860
+ torch.distributed.all_gather_object(objs, obj, group=self.cpu_group)
861
+ return objs
862
+
851
863
  def send_object(self, obj: Any, dst: int) -> None:
852
864
  """Send the input object list to the destination rank."""
853
865
  """NOTE: `dst` is the local rank of the destination rank."""
@@ -1633,7 +1645,7 @@ def cleanup_dist_env_and_memory(shutdown_ray: bool = False):
1633
1645
 
1634
1646
  ray.shutdown()
1635
1647
  gc.collect()
1636
- if not current_platform.is_cpu():
1648
+ if not _is_cpu:
1637
1649
  if hasattr(torch, "cuda") and torch.cuda.is_available():
1638
1650
  torch.cuda.empty_cache()
1639
1651
  if hasattr(torch._C, "_host_emptyCache"):