sglang 0.4.6.post4__py3-none-any.whl → 0.4.6.post5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sglang/bench_offline_throughput.py +6 -6
- sglang/bench_one_batch.py +5 -4
- sglang/bench_one_batch_server.py +23 -15
- sglang/bench_serving.py +133 -57
- sglang/compile_deep_gemm.py +4 -4
- sglang/srt/configs/model_config.py +39 -28
- sglang/srt/conversation.py +1 -1
- sglang/srt/disaggregation/decode.py +122 -133
- sglang/srt/disaggregation/decode_schedule_batch_mixin.py +142 -0
- sglang/srt/disaggregation/fake/conn.py +3 -13
- sglang/srt/disaggregation/kv_events.py +357 -0
- sglang/srt/disaggregation/mini_lb.py +57 -24
- sglang/srt/disaggregation/mooncake/conn.py +11 -2
- sglang/srt/disaggregation/mooncake/transfer_engine.py +2 -1
- sglang/srt/disaggregation/nixl/conn.py +9 -19
- sglang/srt/disaggregation/prefill.py +126 -44
- sglang/srt/disaggregation/utils.py +116 -5
- sglang/srt/distributed/utils.py +3 -3
- sglang/srt/entrypoints/EngineBase.py +5 -0
- sglang/srt/entrypoints/engine.py +28 -8
- sglang/srt/entrypoints/http_server.py +6 -4
- sglang/srt/entrypoints/http_server_engine.py +5 -2
- sglang/srt/function_call/base_format_detector.py +250 -0
- sglang/srt/function_call/core_types.py +34 -0
- sglang/srt/function_call/deepseekv3_detector.py +157 -0
- sglang/srt/function_call/ebnf_composer.py +234 -0
- sglang/srt/function_call/function_call_parser.py +175 -0
- sglang/srt/function_call/llama32_detector.py +74 -0
- sglang/srt/function_call/mistral_detector.py +84 -0
- sglang/srt/function_call/pythonic_detector.py +163 -0
- sglang/srt/function_call/qwen25_detector.py +67 -0
- sglang/srt/function_call/utils.py +35 -0
- sglang/srt/hf_transformers_utils.py +46 -7
- sglang/srt/layers/attention/aiter_backend.py +513 -0
- sglang/srt/layers/attention/flashattention_backend.py +63 -17
- sglang/srt/layers/attention/flashinfer_mla_backend.py +8 -4
- sglang/srt/layers/attention/flashmla_backend.py +340 -78
- sglang/srt/layers/attention/triton_backend.py +3 -0
- sglang/srt/layers/attention/utils.py +2 -2
- sglang/srt/layers/attention/vision.py +1 -1
- sglang/srt/layers/communicator.py +451 -0
- sglang/srt/layers/dp_attention.py +0 -10
- sglang/srt/layers/moe/cutlass_moe.py +207 -0
- sglang/srt/layers/moe/ep_moe/kernels.py +33 -11
- sglang/srt/layers/moe/ep_moe/layer.py +104 -50
- sglang/srt/layers/moe/ep_moe/token_dispatcher.py +82 -7
- sglang/srt/layers/moe/fused_moe_triton/layer.py +14 -0
- sglang/srt/layers/moe/topk.py +66 -9
- sglang/srt/layers/multimodal.py +70 -0
- sglang/srt/layers/quantization/__init__.py +7 -2
- sglang/srt/layers/quantization/deep_gemm.py +5 -3
- sglang/srt/layers/quantization/fp8.py +90 -0
- sglang/srt/layers/quantization/fp8_utils.py +6 -0
- sglang/srt/layers/quantization/gptq.py +298 -6
- sglang/srt/layers/quantization/int8_kernel.py +18 -5
- sglang/srt/layers/quantization/qoq.py +244 -0
- sglang/srt/lora/lora_manager.py +1 -3
- sglang/srt/managers/deepseek_eplb.py +278 -0
- sglang/srt/managers/eplb_manager.py +55 -0
- sglang/srt/managers/expert_distribution.py +704 -56
- sglang/srt/managers/expert_location.py +394 -0
- sglang/srt/managers/expert_location_dispatch.py +91 -0
- sglang/srt/managers/io_struct.py +16 -3
- sglang/srt/managers/mm_utils.py +293 -139
- sglang/srt/managers/multimodal_processors/base_processor.py +127 -42
- sglang/srt/managers/multimodal_processors/deepseek_vl_v2.py +6 -1
- sglang/srt/managers/multimodal_processors/gemma3.py +31 -6
- sglang/srt/managers/multimodal_processors/internvl.py +14 -5
- sglang/srt/managers/multimodal_processors/janus_pro.py +7 -1
- sglang/srt/managers/multimodal_processors/kimi_vl.py +7 -6
- sglang/srt/managers/multimodal_processors/llava.py +3 -3
- sglang/srt/managers/multimodal_processors/minicpm.py +25 -31
- sglang/srt/managers/multimodal_processors/mllama4.py +6 -0
- sglang/srt/managers/multimodal_processors/pixtral.py +9 -9
- sglang/srt/managers/multimodal_processors/qwen_vl.py +58 -16
- sglang/srt/managers/schedule_batch.py +49 -21
- sglang/srt/managers/schedule_policy.py +4 -5
- sglang/srt/managers/scheduler.py +92 -50
- sglang/srt/managers/session_controller.py +1 -1
- sglang/srt/managers/tokenizer_manager.py +99 -24
- sglang/srt/mem_cache/base_prefix_cache.py +3 -0
- sglang/srt/mem_cache/chunk_cache.py +3 -1
- sglang/srt/mem_cache/hiradix_cache.py +4 -4
- sglang/srt/mem_cache/memory_pool.py +74 -52
- sglang/srt/mem_cache/multimodal_cache.py +45 -0
- sglang/srt/mem_cache/radix_cache.py +58 -5
- sglang/srt/metrics/collector.py +2 -2
- sglang/srt/mm_utils.py +10 -0
- sglang/srt/model_executor/cuda_graph_runner.py +20 -9
- sglang/srt/model_executor/expert_location_updater.py +422 -0
- sglang/srt/model_executor/forward_batch_info.py +4 -0
- sglang/srt/model_executor/model_runner.py +144 -54
- sglang/srt/model_loader/loader.py +10 -6
- sglang/srt/models/clip.py +5 -1
- sglang/srt/models/deepseek_v2.py +297 -343
- sglang/srt/models/exaone.py +8 -3
- sglang/srt/models/gemma3_mm.py +70 -33
- sglang/srt/models/llama4.py +10 -2
- sglang/srt/models/llava.py +26 -18
- sglang/srt/models/mimo_mtp.py +220 -0
- sglang/srt/models/minicpmo.py +5 -12
- sglang/srt/models/mistral.py +71 -1
- sglang/srt/models/mllama.py +3 -3
- sglang/srt/models/qwen2.py +95 -26
- sglang/srt/models/qwen2_5_vl.py +8 -0
- sglang/srt/models/qwen2_moe.py +330 -60
- sglang/srt/models/qwen2_vl.py +6 -0
- sglang/srt/models/qwen3.py +52 -10
- sglang/srt/models/qwen3_moe.py +411 -48
- sglang/srt/models/siglip.py +294 -0
- sglang/srt/openai_api/adapter.py +28 -16
- sglang/srt/openai_api/protocol.py +6 -0
- sglang/srt/operations.py +154 -0
- sglang/srt/operations_strategy.py +31 -0
- sglang/srt/server_args.py +134 -24
- sglang/srt/speculative/eagle_utils.py +131 -0
- sglang/srt/speculative/eagle_worker.py +47 -2
- sglang/srt/utils.py +68 -12
- sglang/test/test_cutlass_moe.py +278 -0
- sglang/test/test_utils.py +2 -36
- sglang/utils.py +2 -2
- sglang/version.py +1 -1
- {sglang-0.4.6.post4.dist-info → sglang-0.4.6.post5.dist-info}/METADATA +20 -11
- {sglang-0.4.6.post4.dist-info → sglang-0.4.6.post5.dist-info}/RECORD +128 -102
- {sglang-0.4.6.post4.dist-info → sglang-0.4.6.post5.dist-info}/WHEEL +1 -1
- sglang/srt/function_call_parser.py +0 -858
- sglang/srt/platforms/interface.py +0 -371
- /sglang/srt/models/{xiaomi_mimo.py → mimo.py} +0 -0
- {sglang-0.4.6.post4.dist-info → sglang-0.4.6.post5.dist-info}/licenses/LICENSE +0 -0
- {sglang-0.4.6.post4.dist-info → sglang-0.4.6.post5.dist-info}/top_level.txt +0 -0
@@ -24,10 +24,10 @@ def launch_server_process(server_args: ServerArgs) -> multiprocessing.Process:
|
|
24
24
|
|
25
25
|
base_url = server_args.url()
|
26
26
|
timeout = 300.0 # Increased timeout to 5 minutes for downloading large models
|
27
|
-
start_time = time.
|
27
|
+
start_time = time.perf_counter()
|
28
28
|
|
29
29
|
with requests.Session() as session:
|
30
|
-
while time.
|
30
|
+
while time.perf_counter() - start_time < timeout:
|
31
31
|
try:
|
32
32
|
headers = {
|
33
33
|
"Content-Type": "application/json; charset=utf-8",
|
@@ -140,3 +140,6 @@ class HttpServerEngineAdapter(EngineBase):
|
|
140
140
|
|
141
141
|
def resume_memory_occupation(self):
|
142
142
|
return self._make_request("resume_memory_occupation")
|
143
|
+
|
144
|
+
def flush_cache(self):
|
145
|
+
return self._make_request("flush_cache")
|
@@ -0,0 +1,250 @@
|
|
1
|
+
import json
|
2
|
+
import logging
|
3
|
+
from abc import ABC, abstractmethod
|
4
|
+
from typing import Any, Dict, List
|
5
|
+
|
6
|
+
from partial_json_parser.core.exceptions import MalformedJSON
|
7
|
+
from partial_json_parser.core.options import Allow
|
8
|
+
|
9
|
+
from sglang.srt.function_call.core_types import (
|
10
|
+
StreamingParseResult,
|
11
|
+
ToolCallItem,
|
12
|
+
_GetInfoFunc,
|
13
|
+
)
|
14
|
+
from sglang.srt.function_call.utils import (
|
15
|
+
_find_common_prefix,
|
16
|
+
_is_complete_json,
|
17
|
+
_partial_json_loads,
|
18
|
+
)
|
19
|
+
from sglang.srt.openai_api.protocol import Tool
|
20
|
+
|
21
|
+
logger = logging.getLogger(__name__)
|
22
|
+
|
23
|
+
|
24
|
+
class BaseFormatDetector(ABC):
|
25
|
+
"""Base class providing two sets of interfaces: one-time and streaming incremental."""
|
26
|
+
|
27
|
+
def __init__(self):
|
28
|
+
# initialize properties used for state when parsing tool calls in
|
29
|
+
self._buffer = ""
|
30
|
+
# streaming mode
|
31
|
+
self.prev_tool_call_arr: List[Dict] = []
|
32
|
+
self.current_tool_id: int = -1
|
33
|
+
self.current_tool_name_sent: bool = False
|
34
|
+
self.streamed_args_for_tool: List[str] = (
|
35
|
+
[]
|
36
|
+
) # map what has been streamed for each tool so far to a list
|
37
|
+
self.bot_token = ""
|
38
|
+
self.eot_token = ""
|
39
|
+
|
40
|
+
def parse_base_json(self, action: Any, tools: List[Tool]) -> List[ToolCallItem]:
|
41
|
+
tool_indices = {
|
42
|
+
tool.function.name: i for i, tool in enumerate(tools) if tool.function.name
|
43
|
+
}
|
44
|
+
if not isinstance(action, list):
|
45
|
+
action = [action]
|
46
|
+
|
47
|
+
results = []
|
48
|
+
for act in action:
|
49
|
+
name = act.get("name")
|
50
|
+
if name and name in tool_indices:
|
51
|
+
results.append(
|
52
|
+
ToolCallItem(
|
53
|
+
tool_index=tool_indices[name],
|
54
|
+
name=name,
|
55
|
+
parameters=json.dumps(
|
56
|
+
act.get("parameters") or act.get("arguments", {}),
|
57
|
+
ensure_ascii=False,
|
58
|
+
),
|
59
|
+
)
|
60
|
+
)
|
61
|
+
else:
|
62
|
+
logger.warning(f"Model attempted to call undefined function: {name}")
|
63
|
+
|
64
|
+
return results
|
65
|
+
|
66
|
+
@abstractmethod
|
67
|
+
def detect_and_parse(self, text: str, tools: List[Tool]) -> StreamingParseResult:
|
68
|
+
"""
|
69
|
+
Parses the text in one go. Returns success=True if the format matches, otherwise False.
|
70
|
+
Note that leftover_text here represents "content that this parser will not consume further".
|
71
|
+
"""
|
72
|
+
action = json.loads(text)
|
73
|
+
return StreamingParseResult(calls=self.parse_base_json(action, tools))
|
74
|
+
|
75
|
+
def parse_streaming_increment(
|
76
|
+
self, new_text: str, tools: List[Tool]
|
77
|
+
) -> StreamingParseResult:
|
78
|
+
"""
|
79
|
+
Streaming incremental parsing with tool validation.
|
80
|
+
"""
|
81
|
+
# Append new text to buffer
|
82
|
+
self._buffer += new_text
|
83
|
+
current_text = self._buffer
|
84
|
+
if not (self.bot_token in current_text or current_text.startswith("{")):
|
85
|
+
self._buffer = ""
|
86
|
+
if self.eot_token in new_text:
|
87
|
+
new_text = new_text.replace(self.eot_token, "")
|
88
|
+
return StreamingParseResult(normal_text=new_text)
|
89
|
+
|
90
|
+
# Build tool indices if not already built
|
91
|
+
if not hasattr(self, "_tool_indices"):
|
92
|
+
self._tool_indices = {
|
93
|
+
tool.function.name: i
|
94
|
+
for i, tool in enumerate(tools)
|
95
|
+
if tool.function and tool.function.name
|
96
|
+
}
|
97
|
+
|
98
|
+
flags = Allow.ALL if self.current_tool_name_sent else Allow.ALL & ~Allow.STR
|
99
|
+
try:
|
100
|
+
tool_call_arr = []
|
101
|
+
is_complete = []
|
102
|
+
try:
|
103
|
+
start_idx = (
|
104
|
+
len(self.bot_token)
|
105
|
+
if current_text.startswith(self.bot_token)
|
106
|
+
else 0
|
107
|
+
)
|
108
|
+
while start_idx < len(current_text):
|
109
|
+
(obj, end_idx) = _partial_json_loads(
|
110
|
+
current_text[start_idx:], flags
|
111
|
+
)
|
112
|
+
is_complete.append(
|
113
|
+
_is_complete_json(current_text[start_idx : start_idx + end_idx])
|
114
|
+
)
|
115
|
+
start_idx += end_idx + len("; ")
|
116
|
+
|
117
|
+
# Validate tool name if present
|
118
|
+
if "name" in obj and obj["name"] not in self._tool_indices:
|
119
|
+
# Invalid tool name - reset state
|
120
|
+
self._buffer = ""
|
121
|
+
self.current_tool_id = -1
|
122
|
+
self.current_tool_name_sent = False
|
123
|
+
if self.streamed_args_for_tool:
|
124
|
+
self.streamed_args_for_tool.pop()
|
125
|
+
return StreamingParseResult()
|
126
|
+
|
127
|
+
# Handle parameters/arguments consistency
|
128
|
+
if "parameters" in obj:
|
129
|
+
assert (
|
130
|
+
"arguments" not in obj
|
131
|
+
), "model generated both parameters and arguments"
|
132
|
+
obj["arguments"] = obj["parameters"]
|
133
|
+
tool_call_arr.append(obj)
|
134
|
+
|
135
|
+
except MalformedJSON:
|
136
|
+
return StreamingParseResult()
|
137
|
+
|
138
|
+
if len(tool_call_arr) == 0:
|
139
|
+
return StreamingParseResult()
|
140
|
+
|
141
|
+
current_tool_call: Dict = (
|
142
|
+
tool_call_arr[self.current_tool_id] if len(tool_call_arr) > 0 else {}
|
143
|
+
)
|
144
|
+
|
145
|
+
# Handle new tool in array
|
146
|
+
if len(tool_call_arr) > 0 and len(tool_call_arr) > self.current_tool_id + 1:
|
147
|
+
if self.current_tool_id >= 0:
|
148
|
+
cur_arguments = current_tool_call.get("arguments")
|
149
|
+
if cur_arguments:
|
150
|
+
cur_args_json = json.dumps(cur_arguments)
|
151
|
+
sent = len(self.streamed_args_for_tool[self.current_tool_id])
|
152
|
+
argument_diff = cur_args_json[sent:]
|
153
|
+
|
154
|
+
res = StreamingParseResult(
|
155
|
+
calls=[
|
156
|
+
ToolCallItem(
|
157
|
+
tool_index=self.current_tool_id,
|
158
|
+
name="",
|
159
|
+
parameters=argument_diff,
|
160
|
+
)
|
161
|
+
],
|
162
|
+
)
|
163
|
+
self.streamed_args_for_tool[
|
164
|
+
self.current_tool_id
|
165
|
+
] += argument_diff
|
166
|
+
else:
|
167
|
+
res = StreamingParseResult()
|
168
|
+
else:
|
169
|
+
res = StreamingParseResult()
|
170
|
+
|
171
|
+
self.current_tool_id = len(tool_call_arr) - 1
|
172
|
+
self.current_tool_name_sent = False
|
173
|
+
self.streamed_args_for_tool.append("")
|
174
|
+
return res
|
175
|
+
|
176
|
+
# Handle tool name
|
177
|
+
elif not self.current_tool_name_sent:
|
178
|
+
function_name = current_tool_call.get("name")
|
179
|
+
if function_name and function_name in self._tool_indices:
|
180
|
+
res = StreamingParseResult(
|
181
|
+
calls=[
|
182
|
+
ToolCallItem(
|
183
|
+
tool_index=self._tool_indices[function_name],
|
184
|
+
name=function_name,
|
185
|
+
parameters="",
|
186
|
+
)
|
187
|
+
],
|
188
|
+
)
|
189
|
+
self.current_tool_name_sent = True
|
190
|
+
else:
|
191
|
+
res = StreamingParseResult()
|
192
|
+
|
193
|
+
# Handle streaming arguments
|
194
|
+
else:
|
195
|
+
cur_arguments = current_tool_call.get("arguments")
|
196
|
+
res = StreamingParseResult()
|
197
|
+
|
198
|
+
if cur_arguments:
|
199
|
+
sent = len(self.streamed_args_for_tool[self.current_tool_id])
|
200
|
+
cur_args_json = json.dumps(cur_arguments)
|
201
|
+
prev_arguments = self.prev_tool_call_arr[self.current_tool_id].get(
|
202
|
+
"arguments"
|
203
|
+
)
|
204
|
+
|
205
|
+
argument_diff = None
|
206
|
+
if is_complete[self.current_tool_id]:
|
207
|
+
argument_diff = cur_args_json[sent:]
|
208
|
+
self._buffer = ""
|
209
|
+
self.prev_tool_call_arr[self.current_tool_id].clear()
|
210
|
+
self.current_tool_name_sent = False
|
211
|
+
self.streamed_args_for_tool[self.current_tool_id] = ""
|
212
|
+
|
213
|
+
elif prev_arguments:
|
214
|
+
prev_args_json = json.dumps(prev_arguments)
|
215
|
+
if cur_args_json != prev_args_json:
|
216
|
+
prefix = _find_common_prefix(prev_args_json, cur_args_json)
|
217
|
+
argument_diff = prefix[sent:]
|
218
|
+
|
219
|
+
if argument_diff is not None:
|
220
|
+
res = StreamingParseResult(
|
221
|
+
calls=[
|
222
|
+
ToolCallItem(
|
223
|
+
tool_index=self.current_tool_id,
|
224
|
+
parameters=argument_diff,
|
225
|
+
)
|
226
|
+
],
|
227
|
+
)
|
228
|
+
if not is_complete[self.current_tool_id]:
|
229
|
+
self.streamed_args_for_tool[
|
230
|
+
self.current_tool_id
|
231
|
+
] += argument_diff
|
232
|
+
|
233
|
+
self.prev_tool_call_arr = tool_call_arr
|
234
|
+
return res
|
235
|
+
|
236
|
+
except Exception as e:
|
237
|
+
logger.error(f"Error in parse_streaming_increment: {e}")
|
238
|
+
return StreamingParseResult()
|
239
|
+
|
240
|
+
@abstractmethod
|
241
|
+
def has_tool_call(self, text: str) -> bool:
|
242
|
+
raise NotImplementedError()
|
243
|
+
|
244
|
+
@abstractmethod
|
245
|
+
def structure_info(self) -> _GetInfoFunc:
|
246
|
+
raise NotImplementedError()
|
247
|
+
|
248
|
+
@abstractmethod
|
249
|
+
def build_ebnf(self, tools: List[Tool]) -> str:
|
250
|
+
raise NotImplementedError()
|
@@ -0,0 +1,34 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
from typing import Callable, List, Optional
|
3
|
+
|
4
|
+
from pydantic import BaseModel
|
5
|
+
|
6
|
+
|
7
|
+
class ToolCallItem(BaseModel):
|
8
|
+
"""Simple encapsulation of the parsed ToolCall result for easier usage in streaming contexts."""
|
9
|
+
|
10
|
+
tool_index: int
|
11
|
+
name: Optional[str] = None
|
12
|
+
parameters: str # JSON string
|
13
|
+
|
14
|
+
|
15
|
+
class StreamingParseResult(BaseModel):
|
16
|
+
"""Result of streaming incremental parsing."""
|
17
|
+
|
18
|
+
normal_text: str = ""
|
19
|
+
calls: List[ToolCallItem] = []
|
20
|
+
|
21
|
+
|
22
|
+
@dataclass
|
23
|
+
class StructureInfo:
|
24
|
+
begin: str
|
25
|
+
end: str
|
26
|
+
trigger: str
|
27
|
+
|
28
|
+
|
29
|
+
"""
|
30
|
+
Helper alias of function
|
31
|
+
Usually it is a function that takes a name string and returns a StructureInfo object,
|
32
|
+
which can be used to construct a structural_tag object
|
33
|
+
"""
|
34
|
+
_GetInfoFunc = Callable[[str], StructureInfo]
|
@@ -0,0 +1,157 @@
|
|
1
|
+
import json
|
2
|
+
import logging
|
3
|
+
import re
|
4
|
+
from typing import List
|
5
|
+
|
6
|
+
from sglang.srt.function_call.base_format_detector import BaseFormatDetector
|
7
|
+
from sglang.srt.function_call.core_types import (
|
8
|
+
StreamingParseResult,
|
9
|
+
StructureInfo,
|
10
|
+
ToolCallItem,
|
11
|
+
_GetInfoFunc,
|
12
|
+
)
|
13
|
+
from sglang.srt.function_call.ebnf_composer import EBNFComposer
|
14
|
+
from sglang.srt.function_call.utils import _is_complete_json
|
15
|
+
from sglang.srt.openai_api.protocol import Tool
|
16
|
+
|
17
|
+
logger = logging.getLogger(__name__)
|
18
|
+
|
19
|
+
|
20
|
+
class DeepSeekV3Detector(BaseFormatDetector):
|
21
|
+
"""
|
22
|
+
Detector for DeepSeek models.
|
23
|
+
Assumes function call format:
|
24
|
+
'<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>get_current_weather\n```json\n{"location": "Tokyo"}\n```<|tool▁call▁end|>\n<|tool▁call▁begin|>function<|tool▁sep|>get_current_weather\n```json\n{"location": "Paris"}\n```<|tool▁call▁end|><|tool▁calls▁end|><|end▁of▁sentence|>
|
25
|
+
"""
|
26
|
+
|
27
|
+
def __init__(self):
|
28
|
+
super().__init__()
|
29
|
+
self.bot_token = "<|tool▁calls▁begin|>"
|
30
|
+
self.eot_token = "<|tool▁calls▁end|>"
|
31
|
+
self.func_call_regex = r"<|tool▁call▁begin|>.*?<|tool▁call▁end|>"
|
32
|
+
self.func_detail_regex = r"<|tool▁call▁begin|>(.*)<|tool▁sep|>(.*)\n```json\n(.*)\n```<|tool▁call▁end|>"
|
33
|
+
self._last_arguments = ""
|
34
|
+
|
35
|
+
def has_tool_call(self, text: str) -> bool:
|
36
|
+
"""Check if the text contains a deepseek format tool call."""
|
37
|
+
return self.bot_token in text
|
38
|
+
|
39
|
+
def detect_and_parse(self, text: str, tools: List[Tool]) -> StreamingParseResult:
|
40
|
+
"""
|
41
|
+
One-time parsing: Detects and parses tool calls in the provided text.
|
42
|
+
|
43
|
+
:param text: The complete text to parse.
|
44
|
+
:param tools: List of available tools.
|
45
|
+
:return: ParseResult indicating success or failure, consumed text, leftover text, and parsed calls.
|
46
|
+
"""
|
47
|
+
idx = text.find(self.bot_token)
|
48
|
+
normal_text = text[:idx].strip() if idx != -1 else text
|
49
|
+
if self.bot_token not in text:
|
50
|
+
return StreamingParseResult(normal_text=normal_text, calls=[])
|
51
|
+
match_result_list = re.findall(self.func_call_regex, text, re.DOTALL)
|
52
|
+
calls = []
|
53
|
+
try:
|
54
|
+
for match_result in match_result_list:
|
55
|
+
# Get function name
|
56
|
+
func_detail = re.search(self.func_detail_regex, match_result, re.DOTALL)
|
57
|
+
func_name = func_detail.group(2)
|
58
|
+
func_args = func_detail.group(3)
|
59
|
+
func_args = json.loads(func_args)
|
60
|
+
# construct match_result for parse_base_json
|
61
|
+
match_result = {"name": func_name, "parameters": func_args}
|
62
|
+
calls.extend(self.parse_base_json(match_result, tools))
|
63
|
+
return StreamingParseResult(normal_text=normal_text, calls=calls)
|
64
|
+
except Exception as e:
|
65
|
+
logger.error(f"Error in detect_and_parse: {e}")
|
66
|
+
# return the normal text if parsing fails
|
67
|
+
return StreamingParseResult(normal_text=text)
|
68
|
+
|
69
|
+
def parse_streaming_increment(
|
70
|
+
self, new_text: str, tools: List[Tool]
|
71
|
+
) -> StreamingParseResult:
|
72
|
+
"""
|
73
|
+
Streaming incremental parsing tool calls for DeepSeekV3 format.
|
74
|
+
"""
|
75
|
+
self._buffer += new_text
|
76
|
+
current_text = self._buffer
|
77
|
+
|
78
|
+
if self.bot_token not in current_text:
|
79
|
+
self._buffer = ""
|
80
|
+
for e_token in [self.eot_token, "```", "<|tool▁call▁end|>"]:
|
81
|
+
if e_token in new_text:
|
82
|
+
new_text = new_text.replace(e_token, "")
|
83
|
+
return StreamingParseResult(normal_text=new_text)
|
84
|
+
|
85
|
+
if not hasattr(self, "_tool_indices"):
|
86
|
+
self._tool_indices = {
|
87
|
+
tool.function.name: i
|
88
|
+
for i, tool in enumerate(tools)
|
89
|
+
if tool.function and tool.function.name
|
90
|
+
}
|
91
|
+
|
92
|
+
calls: list[ToolCallItem] = []
|
93
|
+
try:
|
94
|
+
partial_match = re.search(
|
95
|
+
pattern=r"<|tool▁call▁begin|>(.*)<|tool▁sep|>(.*)\n```json\n(.*)",
|
96
|
+
string=current_text,
|
97
|
+
flags=re.DOTALL,
|
98
|
+
)
|
99
|
+
if partial_match:
|
100
|
+
func_name = partial_match.group(2).strip()
|
101
|
+
func_args_raw = partial_match.group(3).strip()
|
102
|
+
|
103
|
+
if not self.current_tool_name_sent:
|
104
|
+
calls.append(
|
105
|
+
ToolCallItem(
|
106
|
+
tool_index=self._tool_indices.get(func_name, 0),
|
107
|
+
name=func_name,
|
108
|
+
parameters="",
|
109
|
+
)
|
110
|
+
)
|
111
|
+
self.current_tool_name_sent = True
|
112
|
+
else:
|
113
|
+
argument_diff = (
|
114
|
+
func_args_raw[len(self._last_arguments) :]
|
115
|
+
if func_args_raw.startswith(self._last_arguments)
|
116
|
+
else func_args_raw
|
117
|
+
)
|
118
|
+
|
119
|
+
if argument_diff:
|
120
|
+
calls.append(
|
121
|
+
ToolCallItem(
|
122
|
+
tool_index=self._tool_indices.get(func_name, 0),
|
123
|
+
name=None,
|
124
|
+
parameters=argument_diff,
|
125
|
+
)
|
126
|
+
)
|
127
|
+
self._last_arguments += argument_diff
|
128
|
+
|
129
|
+
if _is_complete_json(func_args_raw):
|
130
|
+
result = StreamingParseResult(normal_text="", calls=calls)
|
131
|
+
self._buffer = ""
|
132
|
+
self._last_arguments = ""
|
133
|
+
self.current_tool_name_sent = False
|
134
|
+
return result
|
135
|
+
|
136
|
+
return StreamingParseResult(normal_text="", calls=calls)
|
137
|
+
|
138
|
+
except Exception as e:
|
139
|
+
logger.error(f"Error in parse_streaming_increment: {e}")
|
140
|
+
return StreamingParseResult(normal_text=current_text)
|
141
|
+
|
142
|
+
def structure_info(self) -> _GetInfoFunc:
|
143
|
+
return lambda name: StructureInfo(
|
144
|
+
begin=">" + name + "\n```json\n",
|
145
|
+
end="\n```<",
|
146
|
+
trigger=">" + name + "\n```json\n",
|
147
|
+
)
|
148
|
+
|
149
|
+
def build_ebnf(self, tools: List[Tool]):
|
150
|
+
return EBNFComposer.build_ebnf(
|
151
|
+
tools,
|
152
|
+
bot_token=self.bot_token,
|
153
|
+
eot_token=self.eot_token,
|
154
|
+
tool_call_separator="",
|
155
|
+
call_rule_fmt='"<|tool▁call▁begin|>function<|tool▁sep|>{name}\\n```json\\n" {arguments_rule} "\\n```<|tool▁call▁end|>"',
|
156
|
+
function_format="json",
|
157
|
+
)
|