coze-coding-utils 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. coze_coding_utils/__init__.py +1 -1
  2. {coze_coding_utils-0.2.0.dist-info → coze_coding_utils-0.2.1.dist-info}/METADATA +2 -17
  3. coze_coding_utils-0.2.1.dist-info/RECORD +7 -0
  4. coze_coding_utils/error/__init__.py +0 -31
  5. coze_coding_utils/error/classifier.py +0 -320
  6. coze_coding_utils/error/codes.py +0 -356
  7. coze_coding_utils/error/exceptions.py +0 -439
  8. coze_coding_utils/error/patterns.py +0 -939
  9. coze_coding_utils/error/test_classifier.py +0 -0
  10. coze_coding_utils/file/__init__.py +0 -0
  11. coze_coding_utils/file/file.py +0 -327
  12. coze_coding_utils/helper/__init__.py +0 -0
  13. coze_coding_utils/helper/agent_helper.py +0 -599
  14. coze_coding_utils/helper/graph_helper.py +0 -231
  15. coze_coding_utils/log/__init__.py +0 -0
  16. coze_coding_utils/log/common.py +0 -8
  17. coze_coding_utils/log/config.py +0 -10
  18. coze_coding_utils/log/err_trace.py +0 -88
  19. coze_coding_utils/log/loop_trace.py +0 -72
  20. coze_coding_utils/log/node_log.py +0 -487
  21. coze_coding_utils/log/parser.py +0 -255
  22. coze_coding_utils/log/write_log.py +0 -183
  23. coze_coding_utils/messages/__init__.py +0 -0
  24. coze_coding_utils/messages/client.py +0 -48
  25. coze_coding_utils/messages/server.py +0 -173
  26. coze_coding_utils/openai/__init__.py +0 -5
  27. coze_coding_utils/openai/converter/__init__.py +0 -6
  28. coze_coding_utils/openai/converter/request_converter.py +0 -165
  29. coze_coding_utils/openai/converter/response_converter.py +0 -467
  30. coze_coding_utils/openai/handler.py +0 -298
  31. coze_coding_utils/openai/types/__init__.py +0 -37
  32. coze_coding_utils/openai/types/request.py +0 -24
  33. coze_coding_utils/openai/types/response.py +0 -178
  34. coze_coding_utils-0.2.0.dist-info/RECORD +0 -37
  35. {coze_coding_utils-0.2.0.dist-info → coze_coding_utils-0.2.1.dist-info}/WHEEL +0 -0
  36. {coze_coding_utils-0.2.0.dist-info → coze_coding_utils-0.2.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,255 +0,0 @@
1
- import inspect
2
- from dataclasses import dataclass
3
- from typing import Dict, Optional, Any, Callable, cast
4
- from langgraph.graph.state import CompiledStateGraph
5
- from langgraph.graph import START, END
6
-
7
-
8
- # return: title, description, integrations
9
- def extract_title_description(func_name, text: Optional[str]):
10
- title = func_name
11
- desc = ""
12
- integrations = []
13
-
14
- if text is None:
15
- return title, desc, integrations
16
-
17
- lines = text.strip().split('\n')
18
- for line in lines:
19
- if line.startswith('title'):
20
- if line.startswith('title:'):
21
- title = line.split('title:', 1)[1].strip()
22
- if line.startswith('title:'):
23
- title = line.split('title:', 1)[1].strip()
24
- elif line.startswith('desc'):
25
- if line.startswith('desc:'):
26
- desc = line.split('desc:', 1)[1].strip()
27
- if line.startswith('desc:'):
28
- desc = line.split('desc:', 1)[1].strip()
29
- elif line.startswith("integrations"):
30
- items = ""
31
- if line.startswith('integrations:'):
32
- items = line.split('integrations:', 1)[1].strip()
33
- if line.startswith('integrations:'):
34
- items = line.split('integrations:', 1)[1].strip()
35
- for item in items.split(","):
36
- integrations.append(IntegrationInfo(title=item))
37
-
38
- if title == "":
39
- title = func_name
40
-
41
- return title, desc, integrations
42
-
43
-
44
- @dataclass
45
- class SourceLocation:
46
- file: str # 节点定义源码文件路径
47
- line: int # 节点定义源码行号
48
-
49
-
50
- @dataclass
51
- class ParamInfo:
52
- name: str # 参数名
53
- ptype: str # 参数类型
54
- optional: bool # 是否选填
55
- description: Optional[str] = None # 参数说明
56
- items: Optional['ParamInfo'] = None # 子参数,用于数组
57
- default: Optional[Any] = None # 默认值,用于基础类型
58
-
59
-
60
- @dataclass
61
- class IntegrationInfo:
62
- _id: str = "" # 集成ID
63
- title: str = "" # 集成名
64
- description: str = "" # 集成描述
65
-
66
-
67
- @dataclass
68
- class NodeInfo:
69
- node_id: str # langgraph中的node_id,可能是add_node添加或节点函数名同名
70
- name: str # func name
71
- title: str
72
- description: str = ""
73
- node_type: str = ""
74
-
75
-
76
- class LangGraphParser:
77
- def __init__(self, app: CompiledStateGraph):
78
- # 从LangGraph中获取图结构
79
- self.graph_app = app
80
- self.graph = app.get_graph()
81
- # 从图中构建节点信息
82
- self.nodes: Dict[str, NodeInfo] = {} # NodeId -> NodeInfo
83
- # 构建基础信息 - 优先使用CompiledStateGraph中的信息
84
- self._build_node_info()
85
- self.condition_funcs = self._pre_process_conditional_fork_node_info() # 跟踪condition节点的判断函数,因为中间会插入哑结点和condition节点
86
-
87
- def _is_agent_node(self, node_id: str) -> bool:
88
- """
89
- 判断是否为Agent节点,当前是模型节点,通过add_node的metadata注入标记
90
- """
91
- node = self.graph.nodes.get(node_id)
92
- if node and node.metadata:
93
- return node.metadata.get('type', '').lower() == "agent"
94
- return False
95
-
96
- def _is_loop_node(self, node_id: str) -> bool:
97
- """判断是否为循环节点"""
98
- node = self.graph.nodes.get(node_id)
99
- if node and node.metadata:
100
- _type = node.metadata.get('type', '').lower()
101
- return _type == "looparray" or _type == "loopcond"
102
-
103
- return False
104
-
105
- def _is_looparray_node(self, node_id: str) -> bool:
106
- """判断是否为列表循环"""
107
- node = self.graph.nodes.get(node_id)
108
- if node and node.metadata:
109
- _type = node.metadata.get('type', '').lower()
110
- return _type == "looparray"
111
-
112
- return False
113
-
114
- def _is_loopcond_node(self, node_id: str) -> bool:
115
- """判断是否为条件循环"""
116
- node = self.graph.nodes.get(node_id)
117
- if node and node.metadata:
118
- _type = node.metadata.get('type', '').lower()
119
- return _type == "loopcond"
120
-
121
- return False
122
-
123
- def get_node_metadata(self, func_name: str) -> dict:
124
- node_id = ""
125
- for _, node in self.nodes.items():
126
- if node.name == func_name:
127
- node_id = node.node_id
128
-
129
- node = self.graph.nodes.get(node_id)
130
- if node and node.metadata:
131
- return node.metadata
132
-
133
- return {}
134
-
135
- def find_conditional_nodes(self):
136
- conditional_nodes = set()
137
- # 构建入边映射:target -> [sources]
138
- incoming = {}
139
- for edge in self.graph.edges:
140
- incoming.setdefault(edge.target, []).append(edge.source)
141
-
142
- # 遍历所有条件边,并将条件源映射到真实业务节点
143
- for edge in self.graph.edges:
144
- if getattr(edge, "conditional", False):
145
- src = edge.source
146
- node_obj = self.graph.nodes.get(src)
147
- # 源是哑节点(编译插入,无数据函数),回溯到它的上游真实节点
148
- if not node_obj or not getattr(node_obj, "data", None):
149
- for parent in incoming.get(src, []):
150
- parent_obj = self.graph.nodes.get(parent)
151
- if parent_obj and getattr(parent_obj, "data", None):
152
- conditional_nodes.add(parent)
153
- else:
154
- conditional_nodes.add(src)
155
- return conditional_nodes
156
-
157
- def get_node_type(self, node_id):
158
- # 简单推断逻辑
159
- if node_id == START: return "start"
160
- if node_id == END: return "end"
161
- if self._is_loop_node(node_id): return "loop"
162
- if self._is_agent_node(node_id): return "agent"
163
- return "task"
164
-
165
- def _generate_node_title(self, node_name: str) -> str:
166
- """生成节点标题"""
167
- if node_name == START:
168
- return "开始"
169
- elif node_name == END:
170
- return "结束"
171
- else:
172
- return node_name
173
-
174
- def _enhance_loop_node(self, canvas_node: Dict, node_info):
175
- """完善循环节点定义"""
176
- # 添加循环条件描述
177
- canvas_node["definition"]["info"]["condition_summary"] = {}
178
- # TODO: 循环条件描述暂时使用功能描述
179
- if self._is_looparray_node(node_info.node_id):
180
- canvas_node["definition"]["info"]["looptype"] = "looparray"
181
- canvas_node["definition"]["info"]["condition_summary"]["looparray"] = node_info.description
182
- else:
183
- canvas_node["definition"]["info"]["looptype"] = "loopcond"
184
- canvas_node["definition"]["info"]["condition_summary"]["loopcond"] = node_info.description
185
-
186
- def _build_node_info(self):
187
- """
188
- 构建节点参数信息
189
- - 函数名
190
- - 函数docstring
191
- - 入参、出参
192
- - 源代码路径,行号
193
-
194
- 兜底:AST不可用或解析失败,从CompiledStateGraph提取基本信息
195
- """
196
- self._build_node_info_by_langgraph()
197
-
198
- def _build_node_info_by_langgraph(self):
199
- for node_id, node in self.graph.nodes.items():
200
- if node_id == START:
201
- # 开始节点,构建工作流的入参
202
- input_cls = self.graph_app.get_input_schema()
203
- self.nodes[node_id] = NodeInfo(
204
- node_id=node_id,
205
- name=START,
206
- title="开始",
207
- node_type="start",
208
- )
209
- continue
210
-
211
- if node_id == END:
212
- # 结束节点,构建工作流的出参
213
- output_cls = self.graph_app.get_output_schema()
214
- self.nodes[node_id] = NodeInfo(
215
- node_id=node_id,
216
- name=END,
217
- title="结束",
218
- node_type="end",
219
- )
220
- continue
221
-
222
- data = getattr(node, "data", None)
223
- if data:
224
- _func = getattr(data, "func", None)
225
- if _func is None and callable(data):
226
- _func = cast(Callable[..., Any], data)
227
- if _func is None:
228
- continue
229
- node_name = _func.__name__
230
- docstring = inspect.getdoc(_func)
231
- title, desc, integrations = extract_title_description(node_name, docstring)
232
- if node_id not in self.nodes:
233
- self.nodes[node_id] = NodeInfo(
234
- node_id=node_id,
235
- name=node_name,
236
- title=title,
237
- description=desc,
238
- node_type=self.get_node_type(node_id),
239
- )
240
-
241
- def _pre_process_conditional_fork_node_info(self):
242
- '''
243
- 构建条件节点的主节点信息,主要是描述,后续可扩展输入输出
244
- 原因:LangGraph条件判断函数不是真实节点,而就是一个普通的函数
245
- '''
246
-
247
- '''defaultdict(<class 'dict'>, {'join': {'should_continue_processing': BranchSpec(path=should_continue_processing(tags=None, recurse=True, explode_args=False, func_accepts={}), ends={'中文描述分支1': 'add_item_len', '默认分支': 'add_default_item_len'}, input_schema=<class 'graphs.state.BranchJoinInput'>)}})'''
248
- branches = self.graph_app.builder.branches
249
-
250
- conditional_funcs = {} # parent_id: key : {"func":func,"branch_start_node":}
251
- for parent_id, check in branches.items():
252
- for check_func_name, spec in check.items():
253
- conditional_funcs[check_func_name] = {
254
- "cond_node_name": "cond_" + parent_id} # 拼成前端的条件节点名
255
- return conditional_funcs
@@ -1,183 +0,0 @@
1
- import logging
2
- import logging.handlers
3
- import json
4
- from contextvars import ContextVar
5
- from typing import Optional
6
- from pathlib import Path
7
- from coze_coding_utils.runtime_ctx.context import Context
8
- from coze_coding_utils.log.config import LOG_DIR
9
-
10
- request_context: ContextVar[Optional[Context]] = ContextVar('request_context', default=None)
11
-
12
-
13
- class ContextFilter(logging.Filter):
14
-
15
- def filter(self, record: logging.LogRecord) -> bool:
16
- ctx = request_context.get()
17
-
18
- if ctx:
19
- record.log_id = ctx.logid or ''
20
- record.run_id = ctx.run_id or ''
21
- record.space_id = ctx.space_id or ''
22
- record.project_id = ctx.project_id or ''
23
- record.method = ctx.method or ''
24
- record.x_tt_env = ctx.x_tt_env or ''
25
- else:
26
- record.log_id = ''
27
- record.run_id = ''
28
- record.space_id = ''
29
- record.project_id = ''
30
- record.method = ''
31
- record.x_tt_env = ''
32
-
33
- return True
34
-
35
-
36
- class APSchedulerFilter(logging.Filter):
37
-
38
- def filter(self, record: logging.LogRecord) -> bool:
39
- if record.name.startswith('apscheduler'):
40
- message = record.getMessage()
41
- if 'Running job' in message or 'next run at:' in message:
42
- return False
43
- return True
44
-
45
-
46
- class JsonFormatter(logging.Formatter):
47
-
48
- def format(self, record: logging.LogRecord) -> str:
49
- log_data = {
50
- 'message': record.getMessage(),
51
- 'timestamp': self.formatTime(record, self.datefmt),
52
- 'level': record.levelname,
53
- 'logger': record.name,
54
- 'log_id': getattr(record, 'log_id', ''),
55
- 'run_id': getattr(record, 'run_id', ''),
56
- 'space_id': getattr(record, 'space_id', ''),
57
- 'project_id': getattr(record, 'project_id', ''),
58
- 'method': getattr(record, 'method', ''),
59
- 'x_tt_env': getattr(record, 'x_tt_env', ''),
60
- 'lineno': record.lineno,
61
- 'funcName': record.funcName,
62
- }
63
-
64
- if record.exc_info:
65
- log_data['exc_info'] = self.formatException(record.exc_info)
66
-
67
- for key, value in record.__dict__.items():
68
- if key not in ['name', 'msg', 'args', 'created', 'filename', 'funcName',
69
- 'levelname', 'levelno', 'lineno', 'module', 'msecs',
70
- 'message', 'pathname', 'process', 'processName', 'relativeCreated',
71
- 'thread', 'threadName', 'exc_info', 'exc_text', 'stack_info',
72
- 'log_id', 'run_id', 'space_id', 'project_id', 'method',
73
- 'x_tt_env', 'rpc_persist_rec_rec_biz_scene',
74
- 'rpc_persist_coze_record_root_id', 'rpc_persist_rec_root_entity_type',
75
- 'rpc_persist_rec_root_entity_id']:
76
- log_data[key] = value
77
-
78
- return json.dumps(log_data, ensure_ascii=False)
79
-
80
-
81
- class PlainTextFormatter(logging.Formatter):
82
-
83
- def format(self, record: logging.LogRecord) -> str:
84
- log_data = {
85
- 'message': record.getMessage(),
86
- 'timestamp': self.formatTime(record, self.datefmt),
87
- 'level': record.levelname,
88
- 'logger': record.name,
89
- 'log_id': getattr(record, 'log_id', ''),
90
- 'run_id': getattr(record, 'run_id', ''),
91
- 'space_id': getattr(record, 'space_id', ''),
92
- 'project_id': getattr(record, 'project_id', ''),
93
- 'method': getattr(record, 'method', ''),
94
- 'x_tt_env': getattr(record, 'x_tt_env', ''),
95
- 'lineno': record.lineno,
96
- 'funcName': record.funcName,
97
- }
98
-
99
- if record.exc_info:
100
- log_data['exc_info'] = self.formatException(record.exc_info)
101
-
102
- for key, value in record.__dict__.items():
103
- if key not in ['name', 'msg', 'args', 'created', 'filename', 'funcName',
104
- 'levelname', 'levelno', 'lineno', 'module', 'msecs',
105
- 'message', 'pathname', 'process', 'processName', 'relativeCreated',
106
- 'thread', 'threadName', 'exc_info', 'exc_text', 'stack_info',
107
- 'log_id', 'run_id', 'space_id', 'project_id', 'method',
108
- 'x_tt_env', 'rpc_persist_rec_rec_biz_scene',
109
- 'rpc_persist_coze_record_root_id', 'rpc_persist_rec_root_entity_type',
110
- 'rpc_persist_rec_root_entity_id']:
111
- log_data[key] = value
112
-
113
- return json.dumps(log_data, ensure_ascii=False)
114
-
115
-
116
- def setup_logging(
117
- log_file: Optional[str] = None,
118
- max_bytes: int = 100 * 1024 * 1024,
119
- backup_count: int = 5,
120
- log_level: str = "INFO",
121
- use_json_format: bool = True,
122
- console_output: bool = True
123
- ):
124
-
125
- if log_file is None:
126
- try:
127
- log_dir = Path(LOG_DIR)
128
- log_dir.mkdir(parents=True, exist_ok=True)
129
- log_file = str(log_dir / 'app.log')
130
- except Exception as e:
131
- fallback_log_dir = Path('/tmp/work/logs/bypass')
132
- fallback_log_dir.mkdir(parents=True, exist_ok=True)
133
- log_file = str(fallback_log_dir / 'app.log')
134
- print(f"Warning: Using fallback log directory: {fallback_log_dir}, due to error: {e}", flush=True)
135
-
136
- root_logger = logging.getLogger()
137
- root_logger.setLevel(getattr(logging, log_level.upper(), logging.INFO))
138
-
139
- root_logger.handlers.clear()
140
-
141
- context_filter = ContextFilter()
142
- apscheduler_filter = APSchedulerFilter()
143
-
144
- file_handler = logging.handlers.RotatingFileHandler(
145
- filename=log_file,
146
- maxBytes=max_bytes,
147
- backupCount=backup_count,
148
- encoding='utf-8'
149
- )
150
- file_handler.setLevel(getattr(logging, log_level.upper(), logging.INFO))
151
-
152
- if use_json_format:
153
- file_formatter = JsonFormatter()
154
- else:
155
- file_formatter = PlainTextFormatter(
156
- fmt='%(asctime)s %(levelname)s [log_id=%(log_id)s] [run_id=%(run_id)s] %(name)s:%(lineno)d %(message)s',
157
- datefmt='%Y-%m-%d %H:%M:%S'
158
- )
159
-
160
- file_handler.setFormatter(file_formatter)
161
- file_handler.addFilter(context_filter)
162
- file_handler.addFilter(apscheduler_filter)
163
- root_logger.addHandler(file_handler)
164
-
165
- if console_output:
166
- console_handler = logging.StreamHandler()
167
- console_handler.setLevel(getattr(logging, log_level.upper(), logging.INFO))
168
-
169
- console_formatter = PlainTextFormatter(
170
- fmt='%(asctime)s %(levelname)s [log_id=%(log_id)s] [run_id=%(run_id)s] %(name)s:%(lineno)d %(message)s',
171
- datefmt='%Y-%m-%d %H:%M:%S'
172
- )
173
- console_handler.setFormatter(console_formatter)
174
- console_handler.addFilter(context_filter)
175
- console_handler.addFilter(apscheduler_filter)
176
- root_logger.addHandler(console_handler)
177
-
178
- logging.info(f"Logging configured: file={log_file}, max_bytes={max_bytes}, backup_count={backup_count}")
179
-
180
- return log_file
181
-
182
-
183
- __all__ = ['setup_logging', 'request_context', 'ContextFilter', 'APSchedulerFilter', 'JsonFormatter', 'PlainTextFormatter']
File without changes
@@ -1,48 +0,0 @@
1
- from dataclasses import dataclass, field
2
- from typing import List, Optional, Literal
3
-
4
- MESSAGE_TYPE_QUERY = "query"
5
- MessageType = Literal["query"]
6
-
7
- BLOCK_TYPE_TEXT = "text"
8
- BLOCK_TYPE_UPLOAD_FILE = "upload_file"
9
-
10
- BlockType = Literal["text", "upload_file"]
11
-
12
-
13
- @dataclass
14
- class UploadFileBlockDetail:
15
- file_name: str = field(default_factory=str)
16
- file_path: str = field(default_factory=str)
17
- url: str = field(default_factory=str)
18
-
19
-
20
- @dataclass
21
- class PromptBlockContent:
22
- text: Optional[str] = field(default=None)
23
- upload_file: Optional[UploadFileBlockDetail] = field(default=None)
24
-
25
-
26
- @dataclass
27
- class PromptBlock:
28
- type: BlockType = field(default_factory=str)
29
- content: PromptBlockContent = field(default_factory=PromptBlockContent)
30
-
31
-
32
- @dataclass
33
- class QueryDetail:
34
- prompt: List[PromptBlock] = field(default_factory=list)
35
-
36
-
37
- @dataclass
38
- class ClientMessageContent:
39
- query: Optional[QueryDetail] = field(default=None)
40
-
41
-
42
- @dataclass
43
- class ClientMessage:
44
- type: MessageType = field(default_factory=str)
45
- project_id: str = field(default_factory=str)
46
- session_id: str = field(default_factory=str)
47
- local_msg_id: str = field(default_factory=str)
48
- content: ClientMessageContent = field(default_factory=ClientMessageContent)
@@ -1,173 +0,0 @@
1
- import uuid
2
- from dataclasses import dataclass, field, asdict
3
- from typing import List, Optional, Any, Dict, Literal
4
-
5
- # Message Types
6
- MESSAGE_TYPE_ANSWER = "answer"
7
- MESSAGE_TYPE_THINKING = "thinking"
8
- MESSAGE_TYPE_TOOL_REQUEST = "tool_request"
9
- MESSAGE_TYPE_TOOL_RESPONSE = "tool_response"
10
- MESSAGE_TYPE_MESSAGE_START = "message_start"
11
- MESSAGE_TYPE_MESSAGE_END = "message_end"
12
- MESSAGE_TYPE_ERROR = "error"
13
-
14
-
15
-
16
- MessageType = Literal[
17
- MESSAGE_TYPE_ANSWER,
18
- MESSAGE_TYPE_THINKING,
19
- MESSAGE_TYPE_TOOL_REQUEST,
20
- MESSAGE_TYPE_TOOL_RESPONSE,
21
- MESSAGE_TYPE_MESSAGE_START,
22
- MESSAGE_TYPE_MESSAGE_END,
23
- MESSAGE_TYPE_ERROR,
24
- ]
25
-
26
-
27
- # Message End Codes
28
- MESSAGE_END_CODE_SUCCESS = "0"
29
- MESSAGE_END_CODE_CANCELED = "1"
30
-
31
- # Tool Response Codes
32
- TOOL_RESP_CODE_SUCCESS = "0"
33
-
34
-
35
- @dataclass
36
- class TokenCost:
37
- input_tokens: int = field(default_factory=int)
38
- output_tokens: int = field(default_factory=int)
39
- total_tokens: int = field(default_factory=int)
40
-
41
-
42
- @dataclass
43
- class MessageEndDetail:
44
- code: str = field(default_factory=str) # 错误码
45
- message: str = field(default_factory=str) # 错误消息
46
-
47
- token_cost: Optional[TokenCost] = field(default=None) # 消耗的token数量
48
- time_cost_ms: Optional[int] = field(default=None) # 耗时,单位毫秒
49
-
50
-
51
- @dataclass
52
- class MessageStartDetail:
53
- local_msg_id: str = field(default_factory=str)
54
- msg_id: str = field(default_factory=str)
55
- execute_id: str = field(default_factory=str)
56
-
57
- @dataclass
58
- class ErrorDetail:
59
- local_msg_id: str = field(default_factory=str)
60
- code: str = field(default_factory=str) # 错误码
61
- error_msg: str = field(default_factory=str) # 错误消息
62
-
63
- @dataclass
64
- class ToolRequestDetail:
65
- tool_call_id: str = field(default_factory=str)
66
- tool_name: str = field(default_factory=str)
67
- parameters: Dict[str, Any] = field(default_factory=dict) # tool_name to parameters
68
-
69
-
70
- @dataclass
71
- class ToolResponseDetail:
72
- tool_call_id: str = field(default_factory=str)
73
-
74
- code: str = field(default_factory=str) # 错误码
75
- message: str = field(default_factory=str) # 错误消息
76
-
77
- result: str = field(default_factory=str) # tool执行结果
78
- time_cost_ms: Optional[int] = field(default=None) # 耗时,单位毫秒
79
-
80
-
81
- @dataclass
82
- class ServerMessageContent:
83
- answer: Optional[str] = field(default=None) # 回答内容
84
- thinking: Optional[str] = field(default=None) # 思考内容
85
- tool_request: Optional[ToolRequestDetail] = field(default=None) # tool请求详情
86
- tool_response: Optional[ToolResponseDetail] = field(default=None) # tool响应详情
87
-
88
- error: Optional[ErrorDetail] = field(default=None) # 错误详情
89
-
90
- message_start: Optional[MessageStartDetail] = field(default=None) # 消息开始详情, 接收到消息后发送
91
- message_end: Optional[MessageEndDetail] = field(default=None) # 消息结束详情, 处理完消息后发送
92
-
93
-
94
- @dataclass
95
- class ServerMessage:
96
- type: MessageType = field(default_factory=str) # 消息类型
97
- session_id: str = field(default_factory=str) # 会话id
98
- query_msg_id: str = field(default_factory=str) # 对应的 client_msg_id
99
- reply_id: str = field(default_factory=str) # 回复id, 一次回复过程中唯一
100
- msg_id: str = field(
101
- default_factory=str
102
- ) # 消息 id, 每个单独的消息(tool_request/tool_response等)都有一个唯一的 msg_id
103
- sequence_id: int = field(default_factory=int) # 消息在回复中的序号, 从1开始递增
104
- finish: bool = field(
105
- default_factory=bool
106
- ) # 如果是流式消息,比如流式的 thinking,表示是否是最后一条消息
107
- content: ServerMessageContent = field(
108
- default_factory=ServerMessageContent
109
- ) # 消息内容
110
- log_id: str = field(default_factory=str) # 日志id, 用于关联日志
111
-
112
- def dict(self):
113
- return asdict(self)
114
-
115
-
116
-
117
- def create_message_end_dict(
118
- code: str,
119
- message: str,
120
- session_id: str,
121
- query_msg_id: str,
122
- log_id: str,
123
- time_cost_ms: int,
124
- reply_id: str = '',
125
- sequence_id: int = 1,
126
- ) -> Dict[str, Any]:
127
- """创建 message_end 消息字典,复用现有的 ServerMessage 结构"""
128
- return ServerMessage(
129
- type=MESSAGE_TYPE_MESSAGE_END,
130
- session_id=session_id,
131
- query_msg_id=query_msg_id,
132
- reply_id=reply_id,
133
- msg_id=str(uuid.uuid4()),
134
- sequence_id=sequence_id,
135
- finish=True,
136
- content=ServerMessageContent(
137
- message_end=MessageEndDetail(
138
- code=code,
139
- message=message,
140
- time_cost_ms=time_cost_ms,
141
- token_cost=TokenCost(input_tokens=0, output_tokens=0, total_tokens=0),
142
- )
143
- ),
144
- log_id=log_id,
145
- ).dict()
146
-
147
-
148
- def create_message_error_dict(
149
- code: str,
150
- message: str,
151
- session_id: str,
152
- query_msg_id: str,
153
- log_id: str,
154
- reply_id: str = '',
155
- sequence_id: int = 1,
156
- local_msg_id: str = '',
157
- ) -> Dict[str, Any]:
158
- return ServerMessage(
159
- type=MESSAGE_TYPE_ERROR,
160
- session_id=session_id,
161
- query_msg_id=query_msg_id,
162
- reply_id=reply_id,
163
- sequence_id=sequence_id,
164
- finish=True,
165
- content=ServerMessageContent(
166
- error=ErrorDetail(
167
- local_msg_id=local_msg_id,
168
- code=code,
169
- error_msg=message,
170
- )
171
- ),
172
- log_id=log_id,
173
- ).dict()
@@ -1,5 +0,0 @@
1
- """OpenAI Chat Completions API 兼容层"""
2
-
3
- from coze_coding_utils.openai.handler import OpenAIChatHandler
4
-
5
- __all__ = ["OpenAIChatHandler"]
@@ -1,6 +0,0 @@
1
- """OpenAI 转换器"""
2
-
3
- from coze_coding_utils.openai.converter.request_converter import RequestConverter
4
- from coze_coding_utils.openai.converter.response_converter import ResponseConverter
5
-
6
- __all__ = ["RequestConverter", "ResponseConverter"]