coze-coding-utils 0.2.3a1__py3-none-any.whl → 0.2.3a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -84,6 +84,7 @@ class ErrorCode(IntEnum):
84
84
  API_LLM_CONTENT_FILTER = 301007 # 内容过滤
85
85
  API_LLM_IMAGE_FORMAT = 301008 # 图片格式不支持
86
86
  API_LLM_VIDEO_FORMAT = 301009 # 视频格式不支持
87
+ API_PROJECT_NOT_FOUND = 301010 # 项目不存在
87
88
 
88
89
  # 302xxx - 图片生成API错误
89
90
  API_IMAGE_GEN_FAILED = 302001 # 图片生成失败
@@ -136,6 +137,7 @@ class ErrorCode(IntEnum):
136
137
  INTEGRATION_FEISHU_AUTH_FAILED = 501001 # 飞书认证失败
137
138
  INTEGRATION_FEISHU_API_FAILED = 501002 # 飞书API调用失败
138
139
  INTEGRATION_FEISHU_DOC_FAILED = 501003 # 飞书文档操作失败
140
+ INTEGRATION_FEISHU_TABLE_FAILED = 501004 # 飞书表格调用失败
139
141
 
140
142
  # 502xxx - 微信集成错误
141
143
  INTEGRATION_WECHAT_AUTH_FAILED = 502001 # 微信认证失败
@@ -112,6 +112,14 @@ ERROR_PATTERNS: List[ErrorPattern] = [
112
112
  (['headobject operation', 'not found'],
113
113
  ErrorCode.RESOURCE_S3_DOWNLOAD_FAILED, "S3对象不存在"),
114
114
 
115
+ # ==================== 权益类错误 ====================
116
+ (['因触发限流调用内置集成失败',"限流"],
117
+ ErrorCode.API_LLM_RATE_LIMIT, "限流"),
118
+
119
+ (['project not found'],
120
+ ErrorCode.API_PROJECT_NOT_FOUND, "项目不存在"),
121
+
122
+
115
123
  # ==================== OCR/文档处理错误 ====================
116
124
  (['ocr识别失败', '无法从响应中提取有效的json'],
117
125
  ErrorCode.RESOURCE_FILE_FORMAT_ERROR, "OCR识别失败"),
@@ -261,7 +269,7 @@ ERROR_PATTERNS: List[ErrorPattern] = [
261
269
  ErrorCode.CONFIG_API_KEY_MISSING, "AWS凭证缺失"),
262
270
  (['生成pdf报告失败', 'stylesheet'],
263
271
  ErrorCode.RESOURCE_FILE_FORMAT_ERROR, "PDF样式错误"),
264
- (['从数据库查询', '失败'],
272
+ (['从数据库查询'],
265
273
  ErrorCode.INTEGRATION_DB_QUERY, "数据库查询失败"),
266
274
  (['excel文件解析', '表格结构检测失败'],
267
275
  ErrorCode.RESOURCE_FILE_FORMAT_ERROR, "Excel解析失败"),
@@ -293,6 +301,8 @@ ERROR_PATTERNS: List[ErrorPattern] = [
293
301
  ErrorCode.INTEGRATION_DB_CONNECTION, "数据库连接已关闭"),
294
302
  (['psycopg2', 'postgresql'],
295
303
  ErrorCode.INTEGRATION_DB_QUERY, "数据库错误"),
304
+ (['数据库读取失败'],
305
+ ErrorCode.INTEGRATION_DB_CONNECTION, "数据库连接失败"),
296
306
 
297
307
  # ==================== 网络相关错误 ====================
298
308
  (['broken pipe', 'errno 32'],
@@ -365,6 +375,8 @@ ERROR_PATTERNS: List[ErrorPattern] = [
365
375
  ErrorCode.API_AUDIO_GEN_FAILED, "腾讯云TTS生成失败"),
366
376
 
367
377
  # ==================== 飞书相关错误 ====================
378
+ (['FeishuBitable API error'],
379
+ ErrorCode.INTEGRATION_FEISHU_TABLE_FAILED, "飞书Bitable API错误"),
368
380
  (['获取草稿列表失败'],
369
381
  ErrorCode.INTEGRATION_FEISHU_API_FAILED, "飞书获取草稿列表失败"),
370
382
  (['飞书api错误'],
@@ -92,14 +92,6 @@ def infer_file_category(path_or_url: str) -> tuple[str, str]:
92
92
  class FileOps:
93
93
  DOWNLOAD_DIR = "/tmp"
94
94
 
95
- @staticmethod
96
- def read_content(file_obj:File, max_length=10000) -> str:
97
- return ""
98
-
99
- @staticmethod
100
- def get_local_path(file_obj:File) -> str:
101
- return file_obj.url
102
-
103
95
  @staticmethod
104
96
  def _get_bytes_stream(file_obj:File) -> tuple[bytes, str]:
105
97
  """
@@ -7,24 +7,30 @@ from abc import ABC, abstractmethod
7
7
  from typing import Any, Dict, Iterator, AsyncIterable
8
8
  from langchain_core.runnables import RunnableConfig
9
9
  from langgraph.graph.state import CompiledStateGraph
10
- from coze_coding_utils.runtime_ctx.context import Context
11
10
  from coze_coding_utils.helper.agent_helper import (
12
11
  to_stream_input,
13
- to_client_message,
14
12
  agent_iter_server_messages,
15
13
  )
14
+
15
+ from coze_coding_utils.error import classify_error
16
+ import asyncio
17
+ import time
18
+ import traceback
19
+ from typing import Any, Dict, AsyncGenerator, Callable
20
+ from coze_coding_utils.runtime_ctx.context import Context
16
21
  from coze_coding_utils.messages.server import (
17
- MESSAGE_END_CODE_CANCELED,
18
22
  create_message_end_dict,
23
+ create_message_error_dict,
24
+ MESSAGE_END_CODE_CANCELED,
19
25
  )
20
- from coze_coding_utils.error import classify_error
26
+ from coze_coding_utils.helper.agent_helper import to_client_message
27
+ from coze_coding_utils.error.classifier import ErrorClassifier
21
28
 
22
29
  logger = logging.getLogger(__name__)
23
30
 
24
31
  TIMEOUT_SECONDS = 900
25
32
  PING_INTERVAL_SECONDS = 30
26
33
 
27
-
28
34
  class WorkflowEventType:
29
35
  WORKFLOW_START = "workflow_start"
30
36
  WORKFLOW_END = "workflow_end"
@@ -84,13 +90,12 @@ class AgentStreamRunner(BaseStreamRunner):
84
90
  raise
85
91
  except Exception as ex:
86
92
  err = classify_error(ex, {"node_name": "stream"})
87
- end_msg = create_message_end_dict(
93
+ end_msg = create_message_error_dict(
88
94
  code=str(err.code),
89
95
  message=err.message,
90
96
  session_id=client_msg.session_id,
91
97
  query_msg_id=client_msg.local_msg_id,
92
98
  log_id=ctx.logid,
93
- time_cost_ms=int((time.time() - t0) * 1000),
94
99
  reply_id="",
95
100
  sequence_id=1,
96
101
  )
@@ -161,13 +166,12 @@ class AgentStreamRunner(BaseStreamRunner):
161
166
  logger.info(f"Producer exception after cancel for run_id: {ctx.run_id}, ignoring: {ex}")
162
167
  return
163
168
  err = classify_error(ex, {"node_name": "astream"})
164
- end_msg = create_message_end_dict(
169
+ end_msg = create_message_error_dict(
165
170
  code=str(err.code),
166
171
  message=err.message,
167
172
  session_id=client_msg.session_id,
168
173
  query_msg_id=client_msg.local_msg_id,
169
174
  log_id=ctx.logid,
170
- time_cost_ms=int((time.time() - start_time) * 1000),
171
175
  reply_id="",
172
176
  sequence_id=last_seq + 1,
173
177
  )
@@ -305,7 +309,7 @@ class WorkflowStreamRunner(BaseStreamRunner):
305
309
  except Exception as ex:
306
310
  err = classify_error(ex, {"node_name": "workflow_stream"})
307
311
  seq += 1
308
- yield (seq, self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), message=err.message))
312
+ yield (seq, self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), error_msg=err.message))
309
313
 
310
314
  async def astream(self, payload: Dict[str, Any], graph: CompiledStateGraph, run_config: RunnableConfig, ctx: Context) -> AsyncIterable[Any]:
311
315
  run_config["recursion_limit"] = 100
@@ -415,7 +419,7 @@ class WorkflowStreamRunner(BaseStreamRunner):
415
419
  return
416
420
  err = classify_error(ex, {"node_name": "workflow_astream"})
417
421
  seq[0] += 1
418
- loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), message=err.message)))
422
+ loop.call_soon_threadsafe(q.put_nowait, (seq[0], self._build_event(WorkflowEventType.ERROR, ctx, code=str(err.code), error_msg=err.message)))
419
423
  finally:
420
424
  loop.call_soon_threadsafe(q.put_nowait, None)
421
425
 
@@ -452,6 +456,106 @@ class WorkflowStreamRunner(BaseStreamRunner):
452
456
  pass
453
457
 
454
458
 
459
+ async def agent_stream_handler(
460
+ payload: Dict[str, Any],
461
+ ctx: Context,
462
+ run_id: str,
463
+ stream_sse_func: Callable,
464
+ sse_event_func: Callable,
465
+ error_classifier: ErrorClassifier,
466
+ register_task_func: Callable[[str, asyncio.Task], None],
467
+ ) -> AsyncGenerator[str, None]:
468
+ task = asyncio.current_task()
469
+ if task:
470
+ register_task_func(run_id, task)
471
+ logger.info(f"Registered agent streaming task for run_id: {run_id}")
472
+
473
+ client_msg, _ = to_client_message(payload)
474
+ t0 = time.time()
475
+
476
+ try:
477
+ async for chunk in stream_sse_func(payload, ctx, need_detail=False):
478
+ yield chunk
479
+ except asyncio.CancelledError:
480
+ logger.info(f"Agent stream cancelled for run_id: {run_id}")
481
+ end_msg = create_message_end_dict(
482
+ code=MESSAGE_END_CODE_CANCELED,
483
+ message="Stream cancelled by user",
484
+ session_id=client_msg.session_id,
485
+ query_msg_id=client_msg.local_msg_id,
486
+ log_id=ctx.logid,
487
+ time_cost_ms=int((time.time() - t0) * 1000),
488
+ reply_id="",
489
+ sequence_id=1,
490
+ )
491
+ yield sse_event_func(end_msg)
492
+ raise
493
+ except Exception as ex:
494
+ err = error_classifier.classify(ex, {"node_name": "agent_stream", "run_id": run_id})
495
+ logger.error(
496
+ f"Unexpected error in agent_stream: [{err.code}] {err.message}, "
497
+ f"traceback: {traceback.format_exc()}"
498
+ )
499
+ error_msg = create_message_error_dict(
500
+ code=str(err.code),
501
+ message=str(ex),
502
+ session_id=client_msg.session_id,
503
+ query_msg_id=client_msg.local_msg_id,
504
+ log_id=ctx.logid,
505
+ reply_id="",
506
+ sequence_id=1,
507
+ local_msg_id=client_msg.local_msg_id,
508
+ )
509
+ yield sse_event_func(error_msg)
510
+
511
+
512
+ async def workflow_stream_handler(
513
+ payload: Dict[str, Any],
514
+ ctx: Context,
515
+ run_id: str,
516
+ stream_sse_func: Callable,
517
+ sse_event_func: Callable,
518
+ error_classifier: ErrorClassifier,
519
+ register_task_func: Callable[[str, asyncio.Task], None],
520
+ workflow_debug: bool = False,
521
+ ) -> AsyncGenerator[str, None]:
522
+ task = asyncio.current_task()
523
+ if task:
524
+ register_task_func(run_id, task)
525
+ logger.info(f"Registered workflow streaming task for run_id: {run_id}")
526
+
527
+ try:
528
+ async for chunk in stream_sse_func(payload, ctx, need_detail=workflow_debug):
529
+ yield chunk
530
+ except asyncio.CancelledError:
531
+ logger.info(f"Workflow stream cancelled for run_id: {run_id}")
532
+ cancel_event = {
533
+ "type": "error",
534
+ "timestamp": int(time.time() * 1000),
535
+ "log_id": ctx.logid,
536
+ "run_id": run_id,
537
+ "code": "CANCELED",
538
+ "error_msg": "Stream cancelled by user",
539
+ }
540
+ yield sse_event_func(cancel_event)
541
+ raise
542
+ except Exception as ex:
543
+ err = error_classifier.classify(ex, {"node_name": "workflow_stream", "run_id": run_id})
544
+ logger.error(
545
+ f"Unexpected error in workflow_stream: [{err.code}] {err.message}, "
546
+ f"traceback: {traceback.format_exc()}"
547
+ )
548
+ error_event = {
549
+ "type": "error",
550
+ "timestamp": int(time.time() * 1000),
551
+ "log_id": ctx.logid,
552
+ "run_id": run_id,
553
+ "code": str(err.code),
554
+ "error_msg": str(ex),
555
+ }
556
+ yield sse_event_func(error_event)
557
+
558
+
455
559
  def get_stream_runner(is_agent: bool) -> BaseStreamRunner:
456
560
  if is_agent:
457
561
  return AgentStreamRunner()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: coze-coding-utils
3
- Version: 0.2.3a1
3
+ Version: 0.2.3a3
4
4
  Summary: Utilities for Coze coding client runtime context and helpers.
5
5
  Project-URL: Homepage, https://code.byted.org/stone/coze-coding-client
6
6
  Author: Bytedance Stone Team
@@ -1,16 +1,16 @@
1
1
  coze_coding_utils/__init__.py,sha256=OIMKOQLy07Uo5wQkLw3D7j6qRKt4o-smdW-dndYhpHo,37
2
2
  coze_coding_utils/error/__init__.py,sha256=SbhsopZ8ZQsbXKZ-GPsw3Fq8AQAOC8W6bZgUZhIOw_k,886
3
3
  coze_coding_utils/error/classifier.py,sha256=uXVmufL_sn4w7oNyvrEFXSI_8mCi4mXY353UK5d-d0Y,10028
4
- coze_coding_utils/error/codes.py,sha256=34sC528UndVa96q0B2_BpvD-PVPPyZwL3wuVErxzx2U,17028
4
+ coze_coding_utils/error/codes.py,sha256=IdSRHoWlwaIzfzUswmjT_lGS04_RHaHjSJUbV2DIhEA,17162
5
5
  coze_coding_utils/error/exceptions.py,sha256=QjGk56ovGG-2V4gHcTeJq3-3ZIQQ8DF692zgIYcEJxI,17074
6
- coze_coding_utils/error/patterns.py,sha256=YhhBcCoWQuvSbtA271eS3AB81pChypD7nDrPidQDu0s,44412
6
+ coze_coding_utils/error/patterns.py,sha256=_Z_CtsiVng6dQnqWQwYxKrZm_DrLNYL8tw5_oO5I8x8,44871
7
7
  coze_coding_utils/error/test_classifier.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  coze_coding_utils/file/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- coze_coding_utils/file/file.py,sha256=CFhnxHpmh2aPeMOq3PwSsZFsIiQ-YigYxgYvzJAfx_0,11828
9
+ coze_coding_utils/file/file.py,sha256=fBda18EGSQZ3Xl8OqEaGAb5Rd90_SmhJ1k0jgQk2v7Y,11636
10
10
  coze_coding_utils/helper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  coze_coding_utils/helper/agent_helper.py,sha256=q1ZM30xLXoW-m0NJmJ_Y0M-kUAQCBstG_j7xkqsyRSU,22546
12
12
  coze_coding_utils/helper/graph_helper.py,sha256=UNtqqiQNAQ4319qcC1vHiLYIL2eGzvGQRgXu3mgLq8Y,8893
13
- coze_coding_utils/helper/stream_runner.py,sha256=jxVsYSr3OM80hsf0RXLE9jnrtOZGEh43YWsggmo0D5Q,20483
13
+ coze_coding_utils/helper/stream_runner.py,sha256=f66n6QJ3zCakhk7Fe4Vz9vTZ2KJuM9v9UJfqX5S3nDA,24050
14
14
  coze_coding_utils/log/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  coze_coding_utils/log/common.py,sha256=mUNkCm68oaPaI6-a5UwLf87AfhrMnVPkEuri16guqKc,168
16
16
  coze_coding_utils/log/config.py,sha256=Qkw3JRuGUKJ6CBY7WqHJOFeyCU47cArvUtMsSBifFMo,195
@@ -32,7 +32,7 @@ coze_coding_utils/openai/types/request.py,sha256=IuNMT2Ce1--_32R30Q2q7Lb2dAwKNy3
32
32
  coze_coding_utils/openai/types/response.py,sha256=pjHHVR8LSMVFCc3fGzKqXrdoKDIfSCJEfICd_X9Nohc,4808
33
33
  coze_coding_utils/runtime_ctx/__init__.py,sha256=4W8VliAYUP1KY2gLJ_YDy2TmcXYVm-PY7XikQD_bFwA,2
34
34
  coze_coding_utils/runtime_ctx/context.py,sha256=G8ld-WnQ1pTJe5OOXC_dTbagXj9IxmpRiPM4X_jWW6o,3992
35
- coze_coding_utils-0.2.3a1.dist-info/METADATA,sha256=jQsLvJoMxI0T2yP88vJ2Ykw0xejmnum2mYtznqZvgiw,979
36
- coze_coding_utils-0.2.3a1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
37
- coze_coding_utils-0.2.3a1.dist-info/licenses/LICENSE,sha256=lzckZhAjHlpSJcWvppoST095IHFpBwKiB2pKcBv7vP4,1078
38
- coze_coding_utils-0.2.3a1.dist-info/RECORD,,
35
+ coze_coding_utils-0.2.3a3.dist-info/METADATA,sha256=BpT4ybyeGIjxSW4G62Z56WZ05ITL24mGEkKh-zDG47E,979
36
+ coze_coding_utils-0.2.3a3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
37
+ coze_coding_utils-0.2.3a3.dist-info/licenses/LICENSE,sha256=lzckZhAjHlpSJcWvppoST095IHFpBwKiB2pKcBv7vP4,1078
38
+ coze_coding_utils-0.2.3a3.dist-info/RECORD,,