coze-coding-utils 0.2.2a1__py3-none-any.whl → 0.2.3a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -84,6 +84,7 @@ class ErrorCode(IntEnum):
84
84
  API_LLM_CONTENT_FILTER = 301007 # 内容过滤
85
85
  API_LLM_IMAGE_FORMAT = 301008 # 图片格式不支持
86
86
  API_LLM_VIDEO_FORMAT = 301009 # 视频格式不支持
87
+ API_PROJECT_NOT_FOUND = 301010 # 项目不存在
87
88
 
88
89
  # 302xxx - 图片生成API错误
89
90
  API_IMAGE_GEN_FAILED = 302001 # 图片生成失败
@@ -136,6 +137,7 @@ class ErrorCode(IntEnum):
136
137
  INTEGRATION_FEISHU_AUTH_FAILED = 501001 # 飞书认证失败
137
138
  INTEGRATION_FEISHU_API_FAILED = 501002 # 飞书API调用失败
138
139
  INTEGRATION_FEISHU_DOC_FAILED = 501003 # 飞书文档操作失败
140
+ INTEGRATION_FEISHU_TABLE_FAILED = 501004 # 飞书表格调用失败
139
141
 
140
142
  # 502xxx - 微信集成错误
141
143
  INTEGRATION_WECHAT_AUTH_FAILED = 502001 # 微信认证失败
@@ -112,6 +112,14 @@ ERROR_PATTERNS: List[ErrorPattern] = [
112
112
  (['headobject operation', 'not found'],
113
113
  ErrorCode.RESOURCE_S3_DOWNLOAD_FAILED, "S3对象不存在"),
114
114
 
115
+ # ==================== 权益类错误 ====================
116
+ (['因触发限流调用内置集成失败',"限流"],
117
+ ErrorCode.API_LLM_RATE_LIMIT, "限流"),
118
+
119
+ (['project not found'],
120
+ ErrorCode.API_PROJECT_NOT_FOUND, "项目不存在"),
121
+
122
+
115
123
  # ==================== OCR/文档处理错误 ====================
116
124
  (['ocr识别失败', '无法从响应中提取有效的json'],
117
125
  ErrorCode.RESOURCE_FILE_FORMAT_ERROR, "OCR识别失败"),
@@ -261,7 +269,7 @@ ERROR_PATTERNS: List[ErrorPattern] = [
261
269
  ErrorCode.CONFIG_API_KEY_MISSING, "AWS凭证缺失"),
262
270
  (['生成pdf报告失败', 'stylesheet'],
263
271
  ErrorCode.RESOURCE_FILE_FORMAT_ERROR, "PDF样式错误"),
264
- (['从数据库查询', '失败'],
272
+ (['从数据库查询'],
265
273
  ErrorCode.INTEGRATION_DB_QUERY, "数据库查询失败"),
266
274
  (['excel文件解析', '表格结构检测失败'],
267
275
  ErrorCode.RESOURCE_FILE_FORMAT_ERROR, "Excel解析失败"),
@@ -293,6 +301,8 @@ ERROR_PATTERNS: List[ErrorPattern] = [
293
301
  ErrorCode.INTEGRATION_DB_CONNECTION, "数据库连接已关闭"),
294
302
  (['psycopg2', 'postgresql'],
295
303
  ErrorCode.INTEGRATION_DB_QUERY, "数据库错误"),
304
+ (['数据库读取失败'],
305
+ ErrorCode.INTEGRATION_DB_CONNECTION, "数据库连接失败"),
296
306
 
297
307
  # ==================== 网络相关错误 ====================
298
308
  (['broken pipe', 'errno 32'],
@@ -365,6 +375,8 @@ ERROR_PATTERNS: List[ErrorPattern] = [
365
375
  ErrorCode.API_AUDIO_GEN_FAILED, "腾讯云TTS生成失败"),
366
376
 
367
377
  # ==================== 飞书相关错误 ====================
378
+ (['FeishuBitable API error'],
379
+ ErrorCode.INTEGRATION_FEISHU_TABLE_FAILED, "飞书Bitable API错误"),
368
380
  (['获取草稿列表失败'],
369
381
  ErrorCode.INTEGRATION_FEISHU_API_FAILED, "飞书获取草稿列表失败"),
370
382
  (['飞书api错误'],
@@ -92,14 +92,6 @@ def infer_file_category(path_or_url: str) -> tuple[str, str]:
92
92
  class FileOps:
93
93
  DOWNLOAD_DIR = "/tmp"
94
94
 
95
- @staticmethod
96
- def read_content(file_obj:File, max_length=10000) -> str:
97
- return ""
98
-
99
- @staticmethod
100
- def get_local_path(file_obj:File) -> str:
101
- return file_obj.url
102
-
103
95
  @staticmethod
104
96
  def _get_bytes_stream(file_obj:File) -> tuple[bytes, str]:
105
97
  """
@@ -0,0 +1,457 @@
1
+ import time
2
+ import asyncio
3
+ import threading
4
+ import contextvars
5
+ import logging
6
+ from abc import ABC, abstractmethod
7
+ from typing import Any, Dict, Iterator, AsyncIterable
8
+ from langchain_core.runnables import RunnableConfig
9
+ from langgraph.graph.state import CompiledStateGraph
10
+ from coze_coding_utils.runtime_ctx.context import Context
11
+ from coze_coding_utils.helper.agent_helper import (
12
+ to_stream_input,
13
+ to_client_message,
14
+ agent_iter_server_messages,
15
+ )
16
+ from coze_coding_utils.messages.server import (
17
+ MESSAGE_END_CODE_CANCELED,
18
+ create_message_end_dict, create_message_error_dict,
19
+ )
20
+ from coze_coding_utils.error import classify_error
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+ TIMEOUT_SECONDS = 900
25
+ PING_INTERVAL_SECONDS = 30
26
+
27
+
28
+ class WorkflowEventType:
29
+ WORKFLOW_START = "workflow_start"
30
+ WORKFLOW_END = "workflow_end"
31
+ NODE_START = "node_start" # 节点开始事件,只有debug模式发送
32
+ NODE_END = "node_end" # 节点结束事件,只有debug模式发送
33
+ ERROR = "error" # 错误事件
34
+ PING = "ping" # 心跳事件
35
+
36
+
37
+ class WorkflowErrorCode:
38
+ CANCELED = "CANCELED" # 取消事件
39
+ TIMEOUT = "TIMEOUT" # 超时事件
40
+
41
+
42
+ class BaseStreamRunner(ABC):
43
+ @abstractmethod
44
+ def stream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> Iterator[Any]:
45
+ pass
46
+
47
+ @abstractmethod
48
+ async def astream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> AsyncIterable[Any]:
49
+ pass
50
+
51
+
52
+ class AgentStreamRunner(BaseStreamRunner):
53
+ def stream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> Iterator[Any]:
54
+ client_msg, session_id = to_client_message(payload)
55
+ run_config["recursion_limit"] = 100
56
+ run_config["configurable"] = {"thread_id": session_id}
57
+ stream_input = to_stream_input(client_msg)
58
+ t0 = time.time()
59
+ try:
60
+ items = graph.stream(stream_input, stream_mode="messages", config=run_config, context=ctx)
61
+ server_msgs_iter = agent_iter_server_messages(
62
+ items,
63
+ session_id=client_msg.session_id,
64
+ query_msg_id=client_msg.local_msg_id,
65
+ local_msg_id=client_msg.local_msg_id,
66
+ run_id=ctx.run_id,
67
+ log_id=ctx.logid,
68
+ )
69
+ for sm in server_msgs_iter:
70
+ yield sm.dict()
71
+ except asyncio.CancelledError:
72
+ logger.info(f"Stream cancelled for run_id: {ctx.run_id}")
73
+ end_msg = create_message_end_dict(
74
+ code=MESSAGE_END_CODE_CANCELED,
75
+ message="Stream execution cancelled",
76
+ session_id=client_msg.session_id,
77
+ query_msg_id=client_msg.local_msg_id,
78
+ log_id=ctx.logid,
79
+ time_cost_ms=int((time.time() - t0) * 1000),
80
+ reply_id="",
81
+ sequence_id=1,
82
+ )
83
+ yield end_msg
84
+ raise
85
+ except Exception as ex:
86
+ err = classify_error(ex, {"node_name": "stream"})
87
+ end_msg = create_message_error_dict(
88
+ code=str(err.code),
89
+ message=err.message,
90
+ session_id=client_msg.session_id,
91
+ query_msg_id=client_msg.local_msg_id,
92
+ log_id=ctx.logid,
93
+ reply_id="",
94
+ sequence_id=1,
95
+ )
96
+ yield end_msg
97
+
98
+ async def astream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> AsyncIterable[Any]:
99
+ client_msg, session_id = to_client_message(payload)
100
+ run_config["recursion_limit"] = 100
101
+ run_config["configurable"] = {"thread_id": session_id}
102
+ stream_input = to_stream_input(client_msg)
103
+
104
+ loop = asyncio.get_running_loop()
105
+ q: asyncio.Queue = asyncio.Queue()
106
+ context = contextvars.copy_context()
107
+ start_time = time.time()
108
+ cancelled = threading.Event()
109
+
110
+ def producer():
111
+ last_seq = 0
112
+ try:
113
+ if cancelled.is_set():
114
+ logger.info(f"Producer cancelled before start for run_id: {ctx.run_id}")
115
+ return
116
+
117
+ items = graph.stream(stream_input, stream_mode="messages", config=run_config, context=ctx)
118
+ server_msgs_iter = agent_iter_server_messages(
119
+ items,
120
+ session_id=client_msg.session_id,
121
+ query_msg_id=client_msg.local_msg_id,
122
+ local_msg_id=client_msg.local_msg_id,
123
+ run_id=ctx.run_id,
124
+ log_id=ctx.logid,
125
+ )
126
+ for sm in server_msgs_iter:
127
+ if cancelled.is_set():
128
+ logger.info(f"Producer cancelled during iteration for run_id: {ctx.run_id}")
129
+ cancel_msg = create_message_end_dict(
130
+ code=MESSAGE_END_CODE_CANCELED,
131
+ message="Stream cancelled by upstream",
132
+ session_id=client_msg.session_id,
133
+ query_msg_id=client_msg.local_msg_id,
134
+ log_id=ctx.logid,
135
+ time_cost_ms=int((time.time() - start_time) * 1000),
136
+ reply_id=getattr(sm, 'reply_id', ''),
137
+ sequence_id=last_seq + 1,
138
+ )
139
+ loop.call_soon_threadsafe(q.put_nowait, cancel_msg)
140
+ return
141
+
142
+ if time.time() - start_time > TIMEOUT_SECONDS:
143
+ logger.error(f"Agent execution timeout after {TIMEOUT_SECONDS}s for run_id: {ctx.run_id}")
144
+ timeout_msg = create_message_end_dict(
145
+ code="TIMEOUT",
146
+ message=f"Execution timeout: exceeded {TIMEOUT_SECONDS} seconds",
147
+ session_id=client_msg.session_id,
148
+ query_msg_id=client_msg.local_msg_id,
149
+ log_id=ctx.logid,
150
+ time_cost_ms=int((time.time() - start_time) * 1000),
151
+ reply_id=getattr(sm, 'reply_id', ''),
152
+ sequence_id=last_seq + 1,
153
+ )
154
+ loop.call_soon_threadsafe(q.put_nowait, timeout_msg)
155
+ return
156
+ loop.call_soon_threadsafe(q.put_nowait, sm.dict())
157
+ last_seq = sm.sequence_id
158
+ except Exception as ex:
159
+ if cancelled.is_set():
160
+ logger.info(f"Producer exception after cancel for run_id: {ctx.run_id}, ignoring: {ex}")
161
+ return
162
+ err = classify_error(ex, {"node_name": "astream"})
163
+ end_msg = create_message_error_dict(
164
+ code=str(err.code),
165
+ message=err.message,
166
+ session_id=client_msg.session_id,
167
+ query_msg_id=client_msg.local_msg_id,
168
+ log_id=ctx.logid,
169
+ reply_id="",
170
+ sequence_id=last_seq + 1,
171
+ )
172
+ loop.call_soon_threadsafe(q.put_nowait, end_msg)
173
+ finally:
174
+ loop.call_soon_threadsafe(q.put_nowait, None)
175
+
176
+ threading.Thread(target=lambda: context.run(producer), daemon=True).start()
177
+
178
+ try:
179
+ while True:
180
+ item = await q.get()
181
+ if item is None:
182
+ break
183
+ yield item
184
+ except asyncio.CancelledError:
185
+ logger.info(f"Stream cancelled for run_id: {ctx.run_id}, signaling producer to stop")
186
+ cancelled.set()
187
+ raise
188
+
189
+
190
+ class WorkflowStreamRunner(BaseStreamRunner):
191
+ def __init__(self):
192
+ self._node_start_times: Dict[str, float] = {}
193
+
194
+ def _serialize_data(self, data: Any) -> Any:
195
+ if isinstance(data, dict):
196
+ return {k: self._serialize_data(v) for k, v in data.items()}
197
+ elif isinstance(data, (list, tuple)):
198
+ return [self._serialize_data(item) for item in data]
199
+ elif hasattr(data, 'model_dump'):
200
+ return data.model_dump()
201
+ elif hasattr(data, 'dict'):
202
+ return data.dict()
203
+ elif hasattr(data, '__dict__'):
204
+ return {k: self._serialize_data(v) for k, v in data.__dict__.items() if not k.startswith('_')}
205
+ else:
206
+ return data
207
+
208
+ def _build_event(self, event_type: str, ctx: Context, **kwargs) -> Dict[str, Any]:
209
+ result = {
210
+ "type": event_type,
211
+ "timestamp": int(time.time() * 1000),
212
+ "log_id": ctx.logid,
213
+ "run_id": ctx.run_id,
214
+ }
215
+ result.update(kwargs)
216
+ return result
217
+
218
+ def stream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> Iterator[Any]:
219
+ run_config["recursion_limit"] = 100
220
+ if "configurable" not in run_config:
221
+ run_config["configurable"] = {}
222
+ run_config["configurable"]["thread_id"] = ctx.run_id
223
+
224
+ t0 = time.time()
225
+ last_ping_time = t0
226
+ node_start_times: Dict[str, float] = {}
227
+ final_output = {}
228
+ seq = 0
229
+ is_debug = run_config.get("configurable", {}).get("workflow_debug", False)
230
+ stream_mode = "debug" if is_debug else "updates"
231
+
232
+ try:
233
+ seq += 1
234
+ yield (seq, self._build_event(WorkflowEventType.WORKFLOW_START, ctx))
235
+
236
+ for event in graph.stream(payload, stream_mode=stream_mode, config=run_config, context=ctx):
237
+ current_time = time.time()
238
+ if current_time - last_ping_time >= PING_INTERVAL_SECONDS:
239
+ seq += 1
240
+ yield (seq, self._build_event(WorkflowEventType.PING, ctx))
241
+ last_ping_time = current_time
242
+
243
+ if not is_debug:
244
+ if isinstance(event, dict):
245
+ logger.info(f"Debug event: {event}")
246
+ for node_name, node_output in event.items():
247
+ final_output = self._serialize_data(node_output) if node_output else {}
248
+ continue
249
+
250
+ event_type = event.get("type", "")
251
+
252
+ if event_type == "task":
253
+ node_name = event.get("payload", {}).get("name", "")
254
+ node_start_times[node_name] = current_time
255
+
256
+ input_data = event.get("payload", {}).get("input", {})
257
+ seq += 1
258
+ yield (seq, self._build_event(
259
+ WorkflowEventType.NODE_START,
260
+ ctx,
261
+ node_name=node_name,
262
+ input=self._serialize_data(input_data),
263
+ ))
264
+
265
+ elif event_type == "task_result":
266
+ node_name = event.get("payload", {}).get("name", "")
267
+ result = event.get("payload", {}).get("result")
268
+
269
+ output_data = {}
270
+ if result is not None:
271
+ if isinstance(result, (list, tuple)) and len(result) > 0:
272
+ output_data = self._serialize_data(result[0]) if len(result) == 1 else {"results": [self._serialize_data(r) for r in result]}
273
+ else:
274
+ output_data = self._serialize_data(result)
275
+
276
+ final_output = output_data
277
+
278
+ node_start_time = node_start_times.pop(node_name, current_time)
279
+ time_cost_ms = int((current_time - node_start_time) * 1000)
280
+
281
+ seq += 1
282
+ yield (seq, self._build_event(
283
+ WorkflowEventType.NODE_END,
284
+ ctx,
285
+ node_name=node_name,
286
+ output=output_data,
287
+ time_cost_ms=time_cost_ms,
288
+ ))
289
+
290
+ seq += 1
291
+ yield (seq, self._build_event(
292
+ WorkflowEventType.WORKFLOW_END,
293
+ ctx,
294
+ output=final_output,
295
+ time_cost_ms=int((time.time() - t0) * 1000),
296
+ ))
297
+
298
+ except asyncio.CancelledError:
299
+ logger.info(f"Workflow stream cancelled for run_id: {ctx.run_id}")
300
+ seq += 1
301
+ yield (seq, self._build_event(WorkflowEventType.ERROR, ctx, code=WorkflowErrorCode.CANCELED, message="Stream execution cancelled"))
302
+ raise
303
+ except Exception as ex:
304
+ err = classify_error(ex, {"node_name": "workflow_stream"})
305
+ seq += 1
306
+ yield (seq, self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), error_msg=err.message))
307
+
308
+ async def astream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> AsyncIterable[Any]:
309
+ run_config["recursion_limit"] = 100
310
+ if "configurable" not in run_config:
311
+ run_config["configurable"] = {}
312
+ run_config["configurable"]["thread_id"] = ctx.run_id
313
+
314
+ loop = asyncio.get_running_loop()
315
+ q: asyncio.Queue = asyncio.Queue()
316
+ context = contextvars.copy_context()
317
+ start_time = time.time()
318
+ cancelled = threading.Event()
319
+ last_ping_time = [start_time]
320
+ is_debug = run_config.get("configurable", {}).get("workflow_debug", False)
321
+ stream_mode = "debug" if is_debug else "updates"
322
+ logger.info(f"Stream mode: {stream_mode}")
323
+ seq = [0]
324
+
325
+ def producer():
326
+ node_start_times: Dict[str, float] = {}
327
+ final_output = {}
328
+ try:
329
+ if cancelled.is_set():
330
+ logger.info(f"Workflow producer cancelled before start for run_id: {ctx.run_id}")
331
+ return
332
+
333
+ seq[0] += 1
334
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.WORKFLOW_START, ctx)))
335
+
336
+ for event in graph.stream(payload, stream_mode=stream_mode, config=run_config, context=ctx):
337
+ if cancelled.is_set():
338
+ logger.info(f"Workflow producer cancelled during iteration for run_id: {ctx.run_id}")
339
+ seq[0] += 1
340
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=WorkflowErrorCode.CANCELED, message="Stream cancelled by upstream")))
341
+ return
342
+
343
+ if time.time() - start_time > TIMEOUT_SECONDS:
344
+ logger.error(f"Workflow execution timeout after {TIMEOUT_SECONDS}s for run_id: {ctx.run_id}")
345
+ seq[0] += 1
346
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=WorkflowErrorCode.TIMEOUT, message=f"Execution timeout: exceeded {TIMEOUT_SECONDS} seconds")))
347
+ return
348
+
349
+ current_time = time.time()
350
+ if current_time - last_ping_time[0] >= PING_INTERVAL_SECONDS:
351
+ seq[0] += 1
352
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.PING, ctx)))
353
+ last_ping_time[0] = current_time
354
+
355
+ if not is_debug:
356
+ if isinstance(event, dict):
357
+ for node_name, node_output in event.items():
358
+ logger.info(f"Node output: {node_name}")
359
+ final_output = self._serialize_data(node_output) if node_output else {}
360
+ continue
361
+
362
+ event_type = event.get("type", "")
363
+
364
+ if event_type == "task":
365
+ node_name = event.get("payload", {}).get("name", "")
366
+ node_start_times[node_name] = current_time
367
+
368
+ input_data = event.get("payload", {}).get("input", {})
369
+ seq[0] += 1
370
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(
371
+ WorkflowEventType.NODE_START,
372
+ ctx,
373
+ node_name=node_name,
374
+ input=self._serialize_data(input_data),
375
+ )))
376
+
377
+ elif event_type == "task_result":
378
+ node_name = event.get("payload", {}).get("name", "")
379
+ result = event.get("payload", {}).get("result")
380
+
381
+ output_data = {}
382
+ if result is not None:
383
+ if isinstance(result, (list, tuple)) and len(result) > 0:
384
+ output_data = self._serialize_data(result[0]) if len(result) == 1 else {"results": [self._serialize_data(r) for r in result]}
385
+ else:
386
+ output_data = self._serialize_data(result)
387
+
388
+ final_output = output_data
389
+
390
+ node_start_time = node_start_times.pop(node_name, current_time)
391
+ time_cost_ms = int((current_time - node_start_time) * 1000)
392
+
393
+ seq[0] += 1
394
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(
395
+ WorkflowEventType.NODE_END,
396
+ ctx,
397
+ node_name=node_name,
398
+ output=output_data,
399
+ time_cost_ms=time_cost_ms,
400
+ )))
401
+
402
+ seq[0] += 1
403
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(
404
+ WorkflowEventType.WORKFLOW_END,
405
+ ctx,
406
+ output=final_output,
407
+ time_cost_ms=int((time.time() - start_time) * 1000),
408
+ )))
409
+
410
+ except Exception as ex:
411
+ if cancelled.is_set():
412
+ logger.info(f"Workflow producer exception after cancel for run_id: {ctx.run_id}, ignoring: {ex}")
413
+ return
414
+ err = classify_error(ex, {"node_name": "workflow_astream"})
415
+ seq[0] += 1
416
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), error_msg=err.message)))
417
+ finally:
418
+ loop.call_soon_threadsafe(q.put_nowait, None)
419
+
420
+ async def ping_sender():
421
+ while not cancelled.is_set():
422
+ await asyncio.sleep(PING_INTERVAL_SECONDS)
423
+ if cancelled.is_set():
424
+ break
425
+ current_time = time.time()
426
+ if current_time - last_ping_time[0] >= PING_INTERVAL_SECONDS:
427
+ seq[0] += 1
428
+ await q.put((seq[0], self._build_event(WorkflowEventType.PING, ctx)))
429
+ last_ping_time[0] = current_time
430
+
431
+ threading.Thread(target=lambda: context.run(producer), daemon=True).start()
432
+ ping_task = asyncio.create_task(ping_sender())
433
+
434
+ try:
435
+ while True:
436
+ item = await q.get()
437
+ if item is None:
438
+ break
439
+ yield item
440
+ except asyncio.CancelledError:
441
+ logger.info(f"Workflow stream cancelled for run_id: {ctx.run_id}, signaling producer to stop")
442
+ cancelled.set()
443
+ raise
444
+ finally:
445
+ cancelled.set()
446
+ ping_task.cancel()
447
+ try:
448
+ await ping_task
449
+ except asyncio.CancelledError:
450
+ pass
451
+
452
+
453
+ def get_stream_runner(is_agent: bool) -> BaseStreamRunner:
454
+ if is_agent:
455
+ return AgentStreamRunner()
456
+ else:
457
+ return WorkflowStreamRunner()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: coze-coding-utils
3
- Version: 0.2.2a1
3
+ Version: 0.2.3a2
4
4
  Summary: Utilities for Coze coding client runtime context and helpers.
5
5
  Project-URL: Homepage, https://code.byted.org/stone/coze-coding-client
6
6
  Author: Bytedance Stone Team
@@ -1,15 +1,16 @@
1
1
  coze_coding_utils/__init__.py,sha256=OIMKOQLy07Uo5wQkLw3D7j6qRKt4o-smdW-dndYhpHo,37
2
2
  coze_coding_utils/error/__init__.py,sha256=SbhsopZ8ZQsbXKZ-GPsw3Fq8AQAOC8W6bZgUZhIOw_k,886
3
3
  coze_coding_utils/error/classifier.py,sha256=uXVmufL_sn4w7oNyvrEFXSI_8mCi4mXY353UK5d-d0Y,10028
4
- coze_coding_utils/error/codes.py,sha256=34sC528UndVa96q0B2_BpvD-PVPPyZwL3wuVErxzx2U,17028
4
+ coze_coding_utils/error/codes.py,sha256=IdSRHoWlwaIzfzUswmjT_lGS04_RHaHjSJUbV2DIhEA,17162
5
5
  coze_coding_utils/error/exceptions.py,sha256=QjGk56ovGG-2V4gHcTeJq3-3ZIQQ8DF692zgIYcEJxI,17074
6
- coze_coding_utils/error/patterns.py,sha256=YhhBcCoWQuvSbtA271eS3AB81pChypD7nDrPidQDu0s,44412
6
+ coze_coding_utils/error/patterns.py,sha256=_Z_CtsiVng6dQnqWQwYxKrZm_DrLNYL8tw5_oO5I8x8,44871
7
7
  coze_coding_utils/error/test_classifier.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  coze_coding_utils/file/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- coze_coding_utils/file/file.py,sha256=CFhnxHpmh2aPeMOq3PwSsZFsIiQ-YigYxgYvzJAfx_0,11828
9
+ coze_coding_utils/file/file.py,sha256=fBda18EGSQZ3Xl8OqEaGAb5Rd90_SmhJ1k0jgQk2v7Y,11636
10
10
  coze_coding_utils/helper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  coze_coding_utils/helper/agent_helper.py,sha256=q1ZM30xLXoW-m0NJmJ_Y0M-kUAQCBstG_j7xkqsyRSU,22546
12
12
  coze_coding_utils/helper/graph_helper.py,sha256=UNtqqiQNAQ4319qcC1vHiLYIL2eGzvGQRgXu3mgLq8Y,8893
13
+ coze_coding_utils/helper/stream_runner.py,sha256=q8lLo3i-PMVgbhWiv3X51NoiHkmyhT0-25hI_W1BDi0,20384
13
14
  coze_coding_utils/log/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
15
  coze_coding_utils/log/common.py,sha256=mUNkCm68oaPaI6-a5UwLf87AfhrMnVPkEuri16guqKc,168
15
16
  coze_coding_utils/log/config.py,sha256=Qkw3JRuGUKJ6CBY7WqHJOFeyCU47cArvUtMsSBifFMo,195
@@ -31,7 +32,7 @@ coze_coding_utils/openai/types/request.py,sha256=IuNMT2Ce1--_32R30Q2q7Lb2dAwKNy3
31
32
  coze_coding_utils/openai/types/response.py,sha256=pjHHVR8LSMVFCc3fGzKqXrdoKDIfSCJEfICd_X9Nohc,4808
32
33
  coze_coding_utils/runtime_ctx/__init__.py,sha256=4W8VliAYUP1KY2gLJ_YDy2TmcXYVm-PY7XikQD_bFwA,2
33
34
  coze_coding_utils/runtime_ctx/context.py,sha256=G8ld-WnQ1pTJe5OOXC_dTbagXj9IxmpRiPM4X_jWW6o,3992
34
- coze_coding_utils-0.2.2a1.dist-info/METADATA,sha256=v1GSxZ6VbVzMuIQfLHE5n37z1iYG4UJnKJ3BTCV-lag,979
35
- coze_coding_utils-0.2.2a1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
36
- coze_coding_utils-0.2.2a1.dist-info/licenses/LICENSE,sha256=lzckZhAjHlpSJcWvppoST095IHFpBwKiB2pKcBv7vP4,1078
37
- coze_coding_utils-0.2.2a1.dist-info/RECORD,,
35
+ coze_coding_utils-0.2.3a2.dist-info/METADATA,sha256=cGQN_B0A1fz_AH5FqH_yjgcCaQkjzURl6bJxQurKRVk,979
36
+ coze_coding_utils-0.2.3a2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
37
+ coze_coding_utils-0.2.3a2.dist-info/licenses/LICENSE,sha256=lzckZhAjHlpSJcWvppoST095IHFpBwKiB2pKcBv7vP4,1078
38
+ coze_coding_utils-0.2.3a2.dist-info/RECORD,,