coze-coding-utils 0.2.2__tar.gz → 0.2.3a1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/PKG-INFO +1 -1
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/pyproject.toml +1 -1
- coze_coding_utils-0.2.3a1/src/coze_coding_utils/helper/stream_runner.py +459 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/.gitignore +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/LICENSE +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/README.md +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/classifier.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/codes.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/exceptions.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/patterns.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/test_classifier.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/file/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/file/file.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/helper/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/helper/agent_helper.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/helper/graph_helper.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/common.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/config.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/err_trace.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/loop_trace.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/node_log.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/parser.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/write_log.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/messages/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/messages/client.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/messages/server.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/converter/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/converter/request_converter.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/converter/response_converter.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/handler.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/types/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/types/request.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/types/response.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/runtime_ctx/__init__.py +0 -0
- {coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/runtime_ctx/context.py +0 -0
|
@@ -0,0 +1,459 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import asyncio
|
|
3
|
+
import threading
|
|
4
|
+
import contextvars
|
|
5
|
+
import logging
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from typing import Any, Dict, Iterator, AsyncIterable
|
|
8
|
+
from langchain_core.runnables import RunnableConfig
|
|
9
|
+
from langgraph.graph.state import CompiledStateGraph
|
|
10
|
+
from coze_coding_utils.runtime_ctx.context import Context
|
|
11
|
+
from coze_coding_utils.helper.agent_helper import (
|
|
12
|
+
to_stream_input,
|
|
13
|
+
to_client_message,
|
|
14
|
+
agent_iter_server_messages,
|
|
15
|
+
)
|
|
16
|
+
from coze_coding_utils.messages.server import (
|
|
17
|
+
MESSAGE_END_CODE_CANCELED,
|
|
18
|
+
create_message_end_dict,
|
|
19
|
+
)
|
|
20
|
+
from coze_coding_utils.error import classify_error
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
TIMEOUT_SECONDS = 900
|
|
25
|
+
PING_INTERVAL_SECONDS = 30
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class WorkflowEventType:
|
|
29
|
+
WORKFLOW_START = "workflow_start"
|
|
30
|
+
WORKFLOW_END = "workflow_end"
|
|
31
|
+
NODE_START = "node_start" # 节点开始事件,只有debug模式发送
|
|
32
|
+
NODE_END = "node_end" # 节点结束事件,只有debug模式发送
|
|
33
|
+
ERROR = "error" # 错误事件
|
|
34
|
+
PING = "ping" # 心跳事件
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class WorkflowErrorCode:
|
|
38
|
+
CANCELED = "CANCELED" # 取消事件
|
|
39
|
+
TIMEOUT = "TIMEOUT" # 超时事件
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class BaseStreamRunner(ABC):
|
|
43
|
+
@abstractmethod
|
|
44
|
+
def stream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> Iterator[Any]:
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
async def astream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> AsyncIterable[Any]:
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class AgentStreamRunner(BaseStreamRunner):
|
|
53
|
+
def stream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> Iterator[Any]:
|
|
54
|
+
client_msg, session_id = to_client_message(payload)
|
|
55
|
+
run_config["recursion_limit"] = 100
|
|
56
|
+
run_config["configurable"] = {"thread_id": session_id}
|
|
57
|
+
stream_input = to_stream_input(client_msg)
|
|
58
|
+
t0 = time.time()
|
|
59
|
+
try:
|
|
60
|
+
items = graph.stream(stream_input, stream_mode="messages", config=run_config, context=ctx)
|
|
61
|
+
server_msgs_iter = agent_iter_server_messages(
|
|
62
|
+
items,
|
|
63
|
+
session_id=client_msg.session_id,
|
|
64
|
+
query_msg_id=client_msg.local_msg_id,
|
|
65
|
+
local_msg_id=client_msg.local_msg_id,
|
|
66
|
+
run_id=ctx.run_id,
|
|
67
|
+
log_id=ctx.logid,
|
|
68
|
+
)
|
|
69
|
+
for sm in server_msgs_iter:
|
|
70
|
+
yield sm.dict()
|
|
71
|
+
except asyncio.CancelledError:
|
|
72
|
+
logger.info(f"Stream cancelled for run_id: {ctx.run_id}")
|
|
73
|
+
end_msg = create_message_end_dict(
|
|
74
|
+
code=MESSAGE_END_CODE_CANCELED,
|
|
75
|
+
message="Stream execution cancelled",
|
|
76
|
+
session_id=client_msg.session_id,
|
|
77
|
+
query_msg_id=client_msg.local_msg_id,
|
|
78
|
+
log_id=ctx.logid,
|
|
79
|
+
time_cost_ms=int((time.time() - t0) * 1000),
|
|
80
|
+
reply_id="",
|
|
81
|
+
sequence_id=1,
|
|
82
|
+
)
|
|
83
|
+
yield end_msg
|
|
84
|
+
raise
|
|
85
|
+
except Exception as ex:
|
|
86
|
+
err = classify_error(ex, {"node_name": "stream"})
|
|
87
|
+
end_msg = create_message_end_dict(
|
|
88
|
+
code=str(err.code),
|
|
89
|
+
message=err.message,
|
|
90
|
+
session_id=client_msg.session_id,
|
|
91
|
+
query_msg_id=client_msg.local_msg_id,
|
|
92
|
+
log_id=ctx.logid,
|
|
93
|
+
time_cost_ms=int((time.time() - t0) * 1000),
|
|
94
|
+
reply_id="",
|
|
95
|
+
sequence_id=1,
|
|
96
|
+
)
|
|
97
|
+
yield end_msg
|
|
98
|
+
|
|
99
|
+
async def astream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> AsyncIterable[Any]:
|
|
100
|
+
client_msg, session_id = to_client_message(payload)
|
|
101
|
+
run_config["recursion_limit"] = 100
|
|
102
|
+
run_config["configurable"] = {"thread_id": session_id}
|
|
103
|
+
stream_input = to_stream_input(client_msg)
|
|
104
|
+
|
|
105
|
+
loop = asyncio.get_running_loop()
|
|
106
|
+
q: asyncio.Queue = asyncio.Queue()
|
|
107
|
+
context = contextvars.copy_context()
|
|
108
|
+
start_time = time.time()
|
|
109
|
+
cancelled = threading.Event()
|
|
110
|
+
|
|
111
|
+
def producer():
|
|
112
|
+
last_seq = 0
|
|
113
|
+
try:
|
|
114
|
+
if cancelled.is_set():
|
|
115
|
+
logger.info(f"Producer cancelled before start for run_id: {ctx.run_id}")
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
items = graph.stream(stream_input, stream_mode="messages", config=run_config, context=ctx)
|
|
119
|
+
server_msgs_iter = agent_iter_server_messages(
|
|
120
|
+
items,
|
|
121
|
+
session_id=client_msg.session_id,
|
|
122
|
+
query_msg_id=client_msg.local_msg_id,
|
|
123
|
+
local_msg_id=client_msg.local_msg_id,
|
|
124
|
+
run_id=ctx.run_id,
|
|
125
|
+
log_id=ctx.logid,
|
|
126
|
+
)
|
|
127
|
+
for sm in server_msgs_iter:
|
|
128
|
+
if cancelled.is_set():
|
|
129
|
+
logger.info(f"Producer cancelled during iteration for run_id: {ctx.run_id}")
|
|
130
|
+
cancel_msg = create_message_end_dict(
|
|
131
|
+
code=MESSAGE_END_CODE_CANCELED,
|
|
132
|
+
message="Stream cancelled by upstream",
|
|
133
|
+
session_id=client_msg.session_id,
|
|
134
|
+
query_msg_id=client_msg.local_msg_id,
|
|
135
|
+
log_id=ctx.logid,
|
|
136
|
+
time_cost_ms=int((time.time() - start_time) * 1000),
|
|
137
|
+
reply_id=getattr(sm, 'reply_id', ''),
|
|
138
|
+
sequence_id=last_seq + 1,
|
|
139
|
+
)
|
|
140
|
+
loop.call_soon_threadsafe(q.put_nowait, cancel_msg)
|
|
141
|
+
return
|
|
142
|
+
|
|
143
|
+
if time.time() - start_time > TIMEOUT_SECONDS:
|
|
144
|
+
logger.error(f"Agent execution timeout after {TIMEOUT_SECONDS}s for run_id: {ctx.run_id}")
|
|
145
|
+
timeout_msg = create_message_end_dict(
|
|
146
|
+
code="TIMEOUT",
|
|
147
|
+
message=f"Execution timeout: exceeded {TIMEOUT_SECONDS} seconds",
|
|
148
|
+
session_id=client_msg.session_id,
|
|
149
|
+
query_msg_id=client_msg.local_msg_id,
|
|
150
|
+
log_id=ctx.logid,
|
|
151
|
+
time_cost_ms=int((time.time() - start_time) * 1000),
|
|
152
|
+
reply_id=getattr(sm, 'reply_id', ''),
|
|
153
|
+
sequence_id=last_seq + 1,
|
|
154
|
+
)
|
|
155
|
+
loop.call_soon_threadsafe(q.put_nowait, timeout_msg)
|
|
156
|
+
return
|
|
157
|
+
loop.call_soon_threadsafe(q.put_nowait, sm.dict())
|
|
158
|
+
last_seq = sm.sequence_id
|
|
159
|
+
except Exception as ex:
|
|
160
|
+
if cancelled.is_set():
|
|
161
|
+
logger.info(f"Producer exception after cancel for run_id: {ctx.run_id}, ignoring: {ex}")
|
|
162
|
+
return
|
|
163
|
+
err = classify_error(ex, {"node_name": "astream"})
|
|
164
|
+
end_msg = create_message_end_dict(
|
|
165
|
+
code=str(err.code),
|
|
166
|
+
message=err.message,
|
|
167
|
+
session_id=client_msg.session_id,
|
|
168
|
+
query_msg_id=client_msg.local_msg_id,
|
|
169
|
+
log_id=ctx.logid,
|
|
170
|
+
time_cost_ms=int((time.time() - start_time) * 1000),
|
|
171
|
+
reply_id="",
|
|
172
|
+
sequence_id=last_seq + 1,
|
|
173
|
+
)
|
|
174
|
+
loop.call_soon_threadsafe(q.put_nowait, end_msg)
|
|
175
|
+
finally:
|
|
176
|
+
loop.call_soon_threadsafe(q.put_nowait, None)
|
|
177
|
+
|
|
178
|
+
threading.Thread(target=lambda: context.run(producer), daemon=True).start()
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
while True:
|
|
182
|
+
item = await q.get()
|
|
183
|
+
if item is None:
|
|
184
|
+
break
|
|
185
|
+
yield item
|
|
186
|
+
except asyncio.CancelledError:
|
|
187
|
+
logger.info(f"Stream cancelled for run_id: {ctx.run_id}, signaling producer to stop")
|
|
188
|
+
cancelled.set()
|
|
189
|
+
raise
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class WorkflowStreamRunner(BaseStreamRunner):
|
|
193
|
+
def __init__(self):
|
|
194
|
+
self._node_start_times: Dict[str, float] = {}
|
|
195
|
+
|
|
196
|
+
def _serialize_data(self, data: Any) -> Any:
|
|
197
|
+
if isinstance(data, dict):
|
|
198
|
+
return {k: self._serialize_data(v) for k, v in data.items()}
|
|
199
|
+
elif isinstance(data, (list, tuple)):
|
|
200
|
+
return [self._serialize_data(item) for item in data]
|
|
201
|
+
elif hasattr(data, 'model_dump'):
|
|
202
|
+
return data.model_dump()
|
|
203
|
+
elif hasattr(data, 'dict'):
|
|
204
|
+
return data.dict()
|
|
205
|
+
elif hasattr(data, '__dict__'):
|
|
206
|
+
return {k: self._serialize_data(v) for k, v in data.__dict__.items() if not k.startswith('_')}
|
|
207
|
+
else:
|
|
208
|
+
return data
|
|
209
|
+
|
|
210
|
+
def _build_event(self, event_type: str, ctx: Context, **kwargs) -> Dict[str, Any]:
|
|
211
|
+
result = {
|
|
212
|
+
"type": event_type,
|
|
213
|
+
"timestamp": int(time.time() * 1000),
|
|
214
|
+
"log_id": ctx.logid,
|
|
215
|
+
"run_id": ctx.run_id,
|
|
216
|
+
}
|
|
217
|
+
result.update(kwargs)
|
|
218
|
+
return result
|
|
219
|
+
|
|
220
|
+
def stream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> Iterator[Any]:
|
|
221
|
+
run_config["recursion_limit"] = 100
|
|
222
|
+
if "configurable" not in run_config:
|
|
223
|
+
run_config["configurable"] = {}
|
|
224
|
+
run_config["configurable"]["thread_id"] = ctx.run_id
|
|
225
|
+
|
|
226
|
+
t0 = time.time()
|
|
227
|
+
last_ping_time = t0
|
|
228
|
+
node_start_times: Dict[str, float] = {}
|
|
229
|
+
final_output = {}
|
|
230
|
+
seq = 0
|
|
231
|
+
is_debug = run_config.get("configurable", {}).get("workflow_debug", False)
|
|
232
|
+
stream_mode = "debug" if is_debug else "updates"
|
|
233
|
+
|
|
234
|
+
try:
|
|
235
|
+
seq += 1
|
|
236
|
+
yield (seq, self._build_event(WorkflowEventType.WORKFLOW_START, ctx))
|
|
237
|
+
|
|
238
|
+
for event in graph.stream(payload, stream_mode=stream_mode, config=run_config, context=ctx):
|
|
239
|
+
current_time = time.time()
|
|
240
|
+
if current_time - last_ping_time >= PING_INTERVAL_SECONDS:
|
|
241
|
+
seq += 1
|
|
242
|
+
yield (seq, self._build_event(WorkflowEventType.PING, ctx))
|
|
243
|
+
last_ping_time = current_time
|
|
244
|
+
|
|
245
|
+
if not is_debug:
|
|
246
|
+
if isinstance(event, dict):
|
|
247
|
+
logger.info(f"Debug event: {event}")
|
|
248
|
+
for node_name, node_output in event.items():
|
|
249
|
+
final_output = self._serialize_data(node_output) if node_output else {}
|
|
250
|
+
continue
|
|
251
|
+
|
|
252
|
+
event_type = event.get("type", "")
|
|
253
|
+
|
|
254
|
+
if event_type == "task":
|
|
255
|
+
node_name = event.get("payload", {}).get("name", "")
|
|
256
|
+
node_start_times[node_name] = current_time
|
|
257
|
+
|
|
258
|
+
input_data = event.get("payload", {}).get("input", {})
|
|
259
|
+
seq += 1
|
|
260
|
+
yield (seq, self._build_event(
|
|
261
|
+
WorkflowEventType.NODE_START,
|
|
262
|
+
ctx,
|
|
263
|
+
node_name=node_name,
|
|
264
|
+
input=self._serialize_data(input_data),
|
|
265
|
+
))
|
|
266
|
+
|
|
267
|
+
elif event_type == "task_result":
|
|
268
|
+
node_name = event.get("payload", {}).get("name", "")
|
|
269
|
+
result = event.get("payload", {}).get("result")
|
|
270
|
+
|
|
271
|
+
output_data = {}
|
|
272
|
+
if result is not None:
|
|
273
|
+
if isinstance(result, (list, tuple)) and len(result) > 0:
|
|
274
|
+
output_data = self._serialize_data(result[0]) if len(result) == 1 else {"results": [self._serialize_data(r) for r in result]}
|
|
275
|
+
else:
|
|
276
|
+
output_data = self._serialize_data(result)
|
|
277
|
+
|
|
278
|
+
final_output = output_data
|
|
279
|
+
|
|
280
|
+
node_start_time = node_start_times.pop(node_name, current_time)
|
|
281
|
+
time_cost_ms = int((current_time - node_start_time) * 1000)
|
|
282
|
+
|
|
283
|
+
seq += 1
|
|
284
|
+
yield (seq, self._build_event(
|
|
285
|
+
WorkflowEventType.NODE_END,
|
|
286
|
+
ctx,
|
|
287
|
+
node_name=node_name,
|
|
288
|
+
output=output_data,
|
|
289
|
+
time_cost_ms=time_cost_ms,
|
|
290
|
+
))
|
|
291
|
+
|
|
292
|
+
seq += 1
|
|
293
|
+
yield (seq, self._build_event(
|
|
294
|
+
WorkflowEventType.WORKFLOW_END,
|
|
295
|
+
ctx,
|
|
296
|
+
output=final_output,
|
|
297
|
+
time_cost_ms=int((time.time() - t0) * 1000),
|
|
298
|
+
))
|
|
299
|
+
|
|
300
|
+
except asyncio.CancelledError:
|
|
301
|
+
logger.info(f"Workflow stream cancelled for run_id: {ctx.run_id}")
|
|
302
|
+
seq += 1
|
|
303
|
+
yield (seq, self._build_event(WorkflowEventType.ERROR, ctx, code=WorkflowErrorCode.CANCELED, message="Stream execution cancelled"))
|
|
304
|
+
raise
|
|
305
|
+
except Exception as ex:
|
|
306
|
+
err = classify_error(ex, {"node_name": "workflow_stream"})
|
|
307
|
+
seq += 1
|
|
308
|
+
yield (seq, self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), message=err.message))
|
|
309
|
+
|
|
310
|
+
async def astream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> AsyncIterable[Any]:
|
|
311
|
+
run_config["recursion_limit"] = 100
|
|
312
|
+
if "configurable" not in run_config:
|
|
313
|
+
run_config["configurable"] = {}
|
|
314
|
+
run_config["configurable"]["thread_id"] = ctx.run_id
|
|
315
|
+
|
|
316
|
+
loop = asyncio.get_running_loop()
|
|
317
|
+
q: asyncio.Queue = asyncio.Queue()
|
|
318
|
+
context = contextvars.copy_context()
|
|
319
|
+
start_time = time.time()
|
|
320
|
+
cancelled = threading.Event()
|
|
321
|
+
last_ping_time = [start_time]
|
|
322
|
+
is_debug = run_config.get("configurable", {}).get("workflow_debug", False)
|
|
323
|
+
stream_mode = "debug" if is_debug else "updates"
|
|
324
|
+
logger.info(f"Stream mode: {stream_mode}")
|
|
325
|
+
seq = [0]
|
|
326
|
+
|
|
327
|
+
def producer():
|
|
328
|
+
node_start_times: Dict[str, float] = {}
|
|
329
|
+
final_output = {}
|
|
330
|
+
try:
|
|
331
|
+
if cancelled.is_set():
|
|
332
|
+
logger.info(f"Workflow producer cancelled before start for run_id: {ctx.run_id}")
|
|
333
|
+
return
|
|
334
|
+
|
|
335
|
+
seq[0] += 1
|
|
336
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.WORKFLOW_START, ctx)))
|
|
337
|
+
|
|
338
|
+
for event in graph.stream(payload, stream_mode=stream_mode, config=run_config, context=ctx):
|
|
339
|
+
if cancelled.is_set():
|
|
340
|
+
logger.info(f"Workflow producer cancelled during iteration for run_id: {ctx.run_id}")
|
|
341
|
+
seq[0] += 1
|
|
342
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=WorkflowErrorCode.CANCELED, message="Stream cancelled by upstream")))
|
|
343
|
+
return
|
|
344
|
+
|
|
345
|
+
if time.time() - start_time > TIMEOUT_SECONDS:
|
|
346
|
+
logger.error(f"Workflow execution timeout after {TIMEOUT_SECONDS}s for run_id: {ctx.run_id}")
|
|
347
|
+
seq[0] += 1
|
|
348
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=WorkflowErrorCode.TIMEOUT, message=f"Execution timeout: exceeded {TIMEOUT_SECONDS} seconds")))
|
|
349
|
+
return
|
|
350
|
+
|
|
351
|
+
current_time = time.time()
|
|
352
|
+
if current_time - last_ping_time[0] >= PING_INTERVAL_SECONDS:
|
|
353
|
+
seq[0] += 1
|
|
354
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.PING, ctx)))
|
|
355
|
+
last_ping_time[0] = current_time
|
|
356
|
+
|
|
357
|
+
if not is_debug:
|
|
358
|
+
if isinstance(event, dict):
|
|
359
|
+
for node_name, node_output in event.items():
|
|
360
|
+
logger.info(f"Node output: {node_name}")
|
|
361
|
+
final_output = self._serialize_data(node_output) if node_output else {}
|
|
362
|
+
continue
|
|
363
|
+
|
|
364
|
+
event_type = event.get("type", "")
|
|
365
|
+
|
|
366
|
+
if event_type == "task":
|
|
367
|
+
node_name = event.get("payload", {}).get("name", "")
|
|
368
|
+
node_start_times[node_name] = current_time
|
|
369
|
+
|
|
370
|
+
input_data = event.get("payload", {}).get("input", {})
|
|
371
|
+
seq[0] += 1
|
|
372
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(
|
|
373
|
+
WorkflowEventType.NODE_START,
|
|
374
|
+
ctx,
|
|
375
|
+
node_name=node_name,
|
|
376
|
+
input=self._serialize_data(input_data),
|
|
377
|
+
)))
|
|
378
|
+
|
|
379
|
+
elif event_type == "task_result":
|
|
380
|
+
node_name = event.get("payload", {}).get("name", "")
|
|
381
|
+
result = event.get("payload", {}).get("result")
|
|
382
|
+
|
|
383
|
+
output_data = {}
|
|
384
|
+
if result is not None:
|
|
385
|
+
if isinstance(result, (list, tuple)) and len(result) > 0:
|
|
386
|
+
output_data = self._serialize_data(result[0]) if len(result) == 1 else {"results": [self._serialize_data(r) for r in result]}
|
|
387
|
+
else:
|
|
388
|
+
output_data = self._serialize_data(result)
|
|
389
|
+
|
|
390
|
+
final_output = output_data
|
|
391
|
+
|
|
392
|
+
node_start_time = node_start_times.pop(node_name, current_time)
|
|
393
|
+
time_cost_ms = int((current_time - node_start_time) * 1000)
|
|
394
|
+
|
|
395
|
+
seq[0] += 1
|
|
396
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(
|
|
397
|
+
WorkflowEventType.NODE_END,
|
|
398
|
+
ctx,
|
|
399
|
+
node_name=node_name,
|
|
400
|
+
output=output_data,
|
|
401
|
+
time_cost_ms=time_cost_ms,
|
|
402
|
+
)))
|
|
403
|
+
|
|
404
|
+
seq[0] += 1
|
|
405
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(
|
|
406
|
+
WorkflowEventType.WORKFLOW_END,
|
|
407
|
+
ctx,
|
|
408
|
+
output=final_output,
|
|
409
|
+
time_cost_ms=int((time.time() - start_time) * 1000),
|
|
410
|
+
)))
|
|
411
|
+
|
|
412
|
+
except Exception as ex:
|
|
413
|
+
if cancelled.is_set():
|
|
414
|
+
logger.info(f"Workflow producer exception after cancel for run_id: {ctx.run_id}, ignoring: {ex}")
|
|
415
|
+
return
|
|
416
|
+
err = classify_error(ex, {"node_name": "workflow_astream"})
|
|
417
|
+
seq[0] += 1
|
|
418
|
+
loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), message=err.message)))
|
|
419
|
+
finally:
|
|
420
|
+
loop.call_soon_threadsafe(q.put_nowait, None)
|
|
421
|
+
|
|
422
|
+
async def ping_sender():
|
|
423
|
+
while not cancelled.is_set():
|
|
424
|
+
await asyncio.sleep(PING_INTERVAL_SECONDS)
|
|
425
|
+
if cancelled.is_set():
|
|
426
|
+
break
|
|
427
|
+
current_time = time.time()
|
|
428
|
+
if current_time - last_ping_time[0] >= PING_INTERVAL_SECONDS:
|
|
429
|
+
seq[0] += 1
|
|
430
|
+
await q.put((seq[0], self._build_event(WorkflowEventType.PING, ctx)))
|
|
431
|
+
last_ping_time[0] = current_time
|
|
432
|
+
|
|
433
|
+
threading.Thread(target=lambda: context.run(producer), daemon=True).start()
|
|
434
|
+
ping_task = asyncio.create_task(ping_sender())
|
|
435
|
+
|
|
436
|
+
try:
|
|
437
|
+
while True:
|
|
438
|
+
item = await q.get()
|
|
439
|
+
if item is None:
|
|
440
|
+
break
|
|
441
|
+
yield item
|
|
442
|
+
except asyncio.CancelledError:
|
|
443
|
+
logger.info(f"Workflow stream cancelled for run_id: {ctx.run_id}, signaling producer to stop")
|
|
444
|
+
cancelled.set()
|
|
445
|
+
raise
|
|
446
|
+
finally:
|
|
447
|
+
cancelled.set()
|
|
448
|
+
ping_task.cancel()
|
|
449
|
+
try:
|
|
450
|
+
await ping_task
|
|
451
|
+
except asyncio.CancelledError:
|
|
452
|
+
pass
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def get_stream_runner(is_agent: bool) -> BaseStreamRunner:
|
|
456
|
+
if is_agent:
|
|
457
|
+
return AgentStreamRunner()
|
|
458
|
+
else:
|
|
459
|
+
return WorkflowStreamRunner()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/__init__.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/classifier.py
RENAMED
|
File without changes
|
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/exceptions.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/patterns.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/error/test_classifier.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/file/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/helper/__init__.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/helper/agent_helper.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/helper/graph_helper.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/err_trace.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/loop_trace.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/log/write_log.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/messages/__init__.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/messages/client.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/messages/server.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/handler.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/types/__init__.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/types/request.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/openai/types/response.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/runtime_ctx/__init__.py
RENAMED
|
File without changes
|
{coze_coding_utils-0.2.2 → coze_coding_utils-0.2.3a1}/src/coze_coding_utils/runtime_ctx/context.py
RENAMED
|
File without changes
|