sycommon-python-lib 0.1.56b18__py3-none-any.whl → 0.1.57__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sycommon/llm/get_llm.py +6 -290
- sycommon/llm/struct_token.py +192 -0
- sycommon/llm/sy_langfuse.py +103 -0
- sycommon/llm/usage_token.py +117 -0
- sycommon/rabbitmq/rabbitmq_client.py +6 -5
- sycommon/rabbitmq/rabbitmq_service_client_manager.py +6 -1
- sycommon/services.py +118 -96
- sycommon/synacos/nacos_client_base.py +3 -1
- {sycommon_python_lib-0.1.56b18.dist-info → sycommon_python_lib-0.1.57.dist-info}/METADATA +1 -1
- {sycommon_python_lib-0.1.56b18.dist-info → sycommon_python_lib-0.1.57.dist-info}/RECORD +13 -10
- {sycommon_python_lib-0.1.56b18.dist-info → sycommon_python_lib-0.1.57.dist-info}/WHEEL +0 -0
- {sycommon_python_lib-0.1.56b18.dist-info → sycommon_python_lib-0.1.57.dist-info}/entry_points.txt +0 -0
- {sycommon_python_lib-0.1.56b18.dist-info → sycommon_python_lib-0.1.57.dist-info}/top_level.txt +0 -0
sycommon/llm/get_llm.py
CHANGED
|
@@ -1,295 +1,8 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from typing import Dict, Tuple, Type, List, Optional, Callable, Any
|
|
3
|
-
from langfuse import Langfuse, get_client, propagate_attributes
|
|
4
|
-
from sycommon.config.Config import Config
|
|
5
1
|
from sycommon.llm.llm_logger import LLMLogger
|
|
6
|
-
from langchain_core.language_models import BaseChatModel
|
|
7
|
-
from langchain_core.runnables import Runnable, RunnableLambda, RunnableConfig
|
|
8
|
-
from langchain_core.output_parsers import PydanticOutputParser
|
|
9
|
-
from langchain_core.messages import BaseMessage, HumanMessage
|
|
10
2
|
from langchain.chat_models import init_chat_model
|
|
11
|
-
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
|
12
|
-
from pydantic import BaseModel, ValidationError, Field
|
|
13
3
|
from sycommon.config.LLMConfig import LLMConfig
|
|
14
|
-
from sycommon.llm.
|
|
15
|
-
from sycommon.
|
|
16
|
-
from sycommon.tools.env import get_env_var
|
|
17
|
-
from sycommon.tools.merge_headers import get_header_value
|
|
18
|
-
from langfuse.langchain import CallbackHandler
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class StructuredRunnableWithToken(Runnable):
|
|
22
|
-
"""带Token统计的Runnable类"""
|
|
23
|
-
|
|
24
|
-
def __init__(self, retry_chain: Runnable, langfuse: Langfuse):
|
|
25
|
-
super().__init__()
|
|
26
|
-
self.retry_chain = retry_chain
|
|
27
|
-
self.langfuse = langfuse
|
|
28
|
-
self.metadata = {"langfuse_session_id": SYLogger.get_trace_id()}
|
|
29
|
-
|
|
30
|
-
def _adapt_input(self, input: Any) -> List[BaseMessage]:
|
|
31
|
-
"""适配输入格式"""
|
|
32
|
-
if isinstance(input, list) and all(isinstance(x, BaseMessage) for x in input):
|
|
33
|
-
return input
|
|
34
|
-
elif isinstance(input, BaseMessage):
|
|
35
|
-
return [input]
|
|
36
|
-
elif isinstance(input, str):
|
|
37
|
-
return [HumanMessage(content=input)]
|
|
38
|
-
elif isinstance(input, dict) and "input" in input:
|
|
39
|
-
return [HumanMessage(content=str(input["input"]))]
|
|
40
|
-
else:
|
|
41
|
-
raise ValueError(f"不支持的输入格式:{type(input)}")
|
|
42
|
-
|
|
43
|
-
def _get_callback_config(self, config: Optional[RunnableConfig] = None) -> tuple[RunnableConfig, TokensCallbackHandler]:
|
|
44
|
-
"""构建包含Token统计的回调配置"""
|
|
45
|
-
# 每次调用创建新的Token处理器实例
|
|
46
|
-
token_handler = TokensCallbackHandler()
|
|
47
|
-
|
|
48
|
-
# 初始化配置
|
|
49
|
-
if config is None:
|
|
50
|
-
processed_config = {"callbacks": []}
|
|
51
|
-
else:
|
|
52
|
-
processed_config = config.copy()
|
|
53
|
-
if "callbacks" not in processed_config:
|
|
54
|
-
processed_config["callbacks"] = []
|
|
55
|
-
|
|
56
|
-
# 添加回调(去重)
|
|
57
|
-
callbacks = processed_config["callbacks"]
|
|
58
|
-
# 添加LLMLogger(如果不存在)
|
|
59
|
-
if not any(isinstance(cb, LLMLogger) for cb in callbacks):
|
|
60
|
-
callbacks.append(LLMLogger())
|
|
61
|
-
# 添加Token处理器
|
|
62
|
-
callbacks.append(token_handler)
|
|
63
|
-
|
|
64
|
-
# 按类型去重
|
|
65
|
-
callback_types = {}
|
|
66
|
-
unique_callbacks = []
|
|
67
|
-
for cb in callbacks:
|
|
68
|
-
cb_type = type(cb)
|
|
69
|
-
if cb_type not in callback_types:
|
|
70
|
-
callback_types[cb_type] = cb
|
|
71
|
-
unique_callbacks.append(cb)
|
|
72
|
-
|
|
73
|
-
processed_config["callbacks"] = unique_callbacks
|
|
74
|
-
|
|
75
|
-
return processed_config, token_handler
|
|
76
|
-
|
|
77
|
-
# 同步调用
|
|
78
|
-
def invoke(self, input: Any, config: Optional[RunnableConfig] = None, ** kwargs) -> Dict[str, Any]:
|
|
79
|
-
try:
|
|
80
|
-
with self.langfuse.start_as_current_observation(as_type="span", name="invoke") as span:
|
|
81
|
-
trace_id = SYLogger.get_trace_id()
|
|
82
|
-
userid = get_header_value(
|
|
83
|
-
SYLogger.get_headers(), "x-userid-header")
|
|
84
|
-
syVersion = get_header_value(
|
|
85
|
-
SYLogger.get_headers(), "s-y-version")
|
|
86
|
-
|
|
87
|
-
user_id = userid or syVersion or get_env_var('VERSION')
|
|
88
|
-
with propagate_attributes(session_id=trace_id, user_id=user_id):
|
|
89
|
-
processed_config, token_handler = self._get_callback_config(
|
|
90
|
-
config)
|
|
91
|
-
adapted_input = self._adapt_input(input)
|
|
92
|
-
input = {"messages": adapted_input}
|
|
93
|
-
|
|
94
|
-
span.update_trace(input=input)
|
|
95
|
-
|
|
96
|
-
structured_result = self.retry_chain.invoke(
|
|
97
|
-
input,
|
|
98
|
-
config={**processed_config, **self.metadata},
|
|
99
|
-
**kwargs
|
|
100
|
-
)
|
|
101
|
-
|
|
102
|
-
span.update_trace(output=structured_result)
|
|
103
|
-
|
|
104
|
-
token_usage = token_handler.usage_metadata
|
|
105
|
-
structured_result._token_usage_ = token_usage
|
|
106
|
-
|
|
107
|
-
return structured_result
|
|
108
|
-
except Exception as e:
|
|
109
|
-
SYLogger.error(f"同步LLM调用失败: {str(e)}", exc_info=True)
|
|
110
|
-
return None
|
|
111
|
-
|
|
112
|
-
# 异步调用
|
|
113
|
-
async def ainvoke(self, input: Any, config: Optional[RunnableConfig] = None, ** kwargs) -> Dict[str, Any]:
|
|
114
|
-
try:
|
|
115
|
-
with self.langfuse.start_as_current_observation(as_type="span", name="ainvoke") as span:
|
|
116
|
-
trace_id = SYLogger.get_trace_id()
|
|
117
|
-
userid = get_header_value(
|
|
118
|
-
SYLogger.get_headers(), "x-userid-header")
|
|
119
|
-
syVersion = get_header_value(
|
|
120
|
-
SYLogger.get_headers(), "s-y-version")
|
|
121
|
-
|
|
122
|
-
user_id = userid or syVersion or get_env_var('VERSION')
|
|
123
|
-
with propagate_attributes(session_id=trace_id, user_id=user_id):
|
|
124
|
-
processed_config, token_handler = self._get_callback_config(
|
|
125
|
-
config)
|
|
126
|
-
adapted_input = self._adapt_input(input)
|
|
127
|
-
|
|
128
|
-
input = {"messages": adapted_input}
|
|
129
|
-
|
|
130
|
-
span.update_trace(input=input)
|
|
131
|
-
|
|
132
|
-
structured_result = await self.retry_chain.ainvoke(
|
|
133
|
-
input,
|
|
134
|
-
config={**processed_config, **self.metadata},
|
|
135
|
-
**kwargs
|
|
136
|
-
)
|
|
137
|
-
|
|
138
|
-
span.update_trace(output=structured_result)
|
|
139
|
-
|
|
140
|
-
token_usage = token_handler.usage_metadata
|
|
141
|
-
structured_result._token_usage_ = token_usage
|
|
142
|
-
|
|
143
|
-
return structured_result
|
|
144
|
-
except Exception as e:
|
|
145
|
-
SYLogger.error(f"异步LLM调用失败: {str(e)}", exc_info=True)
|
|
146
|
-
return None
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
class LLMWithAutoTokenUsage(BaseChatModel):
|
|
150
|
-
"""自动为结构化调用返回token_usage的LLM包装类"""
|
|
151
|
-
llm: BaseChatModel = Field(default=None)
|
|
152
|
-
langfuse: Optional[Langfuse] = Field(default=None, exclude=True)
|
|
153
|
-
|
|
154
|
-
def __init__(self, llm: BaseChatModel, langfuse: Langfuse, **kwargs):
|
|
155
|
-
super().__init__(llm=llm, langfuse=langfuse, **kwargs)
|
|
156
|
-
|
|
157
|
-
def with_structured_output(
|
|
158
|
-
self,
|
|
159
|
-
output_model: Type[BaseModel],
|
|
160
|
-
max_retries: int = 3,
|
|
161
|
-
is_extract: bool = False,
|
|
162
|
-
override_prompt: ChatPromptTemplate = None,
|
|
163
|
-
custom_processors: Optional[List[Callable[[str], str]]] = None,
|
|
164
|
-
custom_parser: Optional[Callable[[str], BaseModel]] = None
|
|
165
|
-
) -> Runnable:
|
|
166
|
-
"""返回支持自动统计Token的结构化Runnable"""
|
|
167
|
-
parser = PydanticOutputParser(pydantic_object=output_model)
|
|
168
|
-
|
|
169
|
-
# 提示词模板
|
|
170
|
-
accuracy_instructions = """
|
|
171
|
-
字段值的抽取准确率(0~1之间),评分规则:
|
|
172
|
-
1.0(完全准确):直接从原文提取,无需任何加工,且格式与原文完全一致
|
|
173
|
-
0.9(轻微处理):数据来源明确,但需进行格式标准化或冗余信息剔除(不改变原始数值)
|
|
174
|
-
0.8(有限推断):数据需通过上下文关联或简单计算得出,仍有明确依据
|
|
175
|
-
0.8以下(不可靠):数据需大量推测、存在歧义或来源不明,处理方式:直接忽略该数据,设置为None
|
|
176
|
-
"""
|
|
177
|
-
|
|
178
|
-
if is_extract:
|
|
179
|
-
prompt = ChatPromptTemplate.from_messages([
|
|
180
|
-
MessagesPlaceholder(variable_name="messages"),
|
|
181
|
-
HumanMessage(content=f"""
|
|
182
|
-
请提取信息并遵循以下规则:
|
|
183
|
-
1. 准确率要求:{accuracy_instructions.strip()}
|
|
184
|
-
2. 输出格式:{parser.get_format_instructions()}
|
|
185
|
-
""")
|
|
186
|
-
])
|
|
187
|
-
else:
|
|
188
|
-
prompt = override_prompt or ChatPromptTemplate.from_messages([
|
|
189
|
-
MessagesPlaceholder(variable_name="messages"),
|
|
190
|
-
HumanMessage(content=f"""
|
|
191
|
-
输出格式:{parser.get_format_instructions()}
|
|
192
|
-
""")
|
|
193
|
-
])
|
|
194
|
-
|
|
195
|
-
# 文本处理函数
|
|
196
|
-
def extract_response_content(response: BaseMessage) -> str:
|
|
197
|
-
try:
|
|
198
|
-
return response.content
|
|
199
|
-
except Exception as e:
|
|
200
|
-
raise ValueError(f"提取响应内容失败:{str(e)}") from e
|
|
201
|
-
|
|
202
|
-
def strip_code_block_markers(content: str) -> str:
|
|
203
|
-
try:
|
|
204
|
-
return content.strip("```json").strip("```").strip()
|
|
205
|
-
except Exception as e:
|
|
206
|
-
raise ValueError(f"移除代码块标记失败:{str(e)}") from e
|
|
207
|
-
|
|
208
|
-
def normalize_in_json(content: str) -> str:
|
|
209
|
-
try:
|
|
210
|
-
return content.replace("None", "null").replace("none", "null").replace("NONE", "null").replace("''", '""')
|
|
211
|
-
except Exception as e:
|
|
212
|
-
raise ValueError(f"JSON格式化失败:{str(e)}") from e
|
|
213
|
-
|
|
214
|
-
def default_parse_to_pydantic(content: str) -> BaseModel:
|
|
215
|
-
try:
|
|
216
|
-
return parser.parse(content)
|
|
217
|
-
except (ValidationError, ValueError) as e:
|
|
218
|
-
raise ValueError(f"解析结构化结果失败:{str(e)}") from e
|
|
219
|
-
|
|
220
|
-
# ========== 构建处理链 ==========
|
|
221
|
-
base_chain = prompt | self.llm | RunnableLambda(
|
|
222
|
-
extract_response_content)
|
|
223
|
-
|
|
224
|
-
# 文本处理链
|
|
225
|
-
process_runnables = custom_processors or [
|
|
226
|
-
RunnableLambda(strip_code_block_markers),
|
|
227
|
-
RunnableLambda(normalize_in_json)
|
|
228
|
-
]
|
|
229
|
-
process_chain = base_chain
|
|
230
|
-
for runnable in process_runnables:
|
|
231
|
-
process_chain = process_chain | runnable
|
|
232
|
-
|
|
233
|
-
# 解析链
|
|
234
|
-
parse_chain = process_chain | RunnableLambda(
|
|
235
|
-
custom_parser or default_parse_to_pydantic)
|
|
236
|
-
|
|
237
|
-
# 重试链
|
|
238
|
-
retry_chain = parse_chain.with_retry(
|
|
239
|
-
retry_if_exception_type=(ValidationError, ValueError),
|
|
240
|
-
stop_after_attempt=max_retries,
|
|
241
|
-
wait_exponential_jitter=True,
|
|
242
|
-
exponential_jitter_params={
|
|
243
|
-
"initial": 0.1, "max": 3.0, "exp_base": 2.0, "jitter": 1.0}
|
|
244
|
-
)
|
|
245
|
-
|
|
246
|
-
return StructuredRunnableWithToken(retry_chain, self.langfuse)
|
|
247
|
-
|
|
248
|
-
# ========== 实现BaseChatModel抽象方法 ==========
|
|
249
|
-
def _generate(self, messages, stop=None, run_manager=None, ** kwargs):
|
|
250
|
-
return self.llm._generate(messages, stop=stop, run_manager=run_manager, ** kwargs)
|
|
251
|
-
|
|
252
|
-
@property
|
|
253
|
-
def _llm_type(self) -> str:
|
|
254
|
-
return self.llm._llm_type
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
def _init_langfuse() -> Tuple[List[CallbackHandler], Optional[Langfuse]]:
|
|
258
|
-
"""
|
|
259
|
-
初始化 Langfuse 组件的辅助函数
|
|
260
|
-
"""
|
|
261
|
-
callbacks = []
|
|
262
|
-
langfuse = None
|
|
263
|
-
|
|
264
|
-
# 基础日志回调
|
|
265
|
-
callbacks.append(LLMLogger())
|
|
266
|
-
config_dict = Config().config
|
|
267
|
-
|
|
268
|
-
server_name = config_dict.get('Name', '')
|
|
269
|
-
langfuse_configs = config_dict.get('LangfuseConfig', [])
|
|
270
|
-
environment = config_dict.get('Nacos', {}).get('namespaceId', '')
|
|
271
|
-
|
|
272
|
-
# 查找当前服务对应的 Langfuse 配置
|
|
273
|
-
target_config = next(
|
|
274
|
-
(item for item in langfuse_configs if item.get('name') == server_name), None
|
|
275
|
-
)
|
|
276
|
-
|
|
277
|
-
if target_config and target_config.get('enable', False):
|
|
278
|
-
# 设置环境变量
|
|
279
|
-
os.environ["LANGFUSE_SECRET_KEY"] = target_config.get('secretKey', '')
|
|
280
|
-
os.environ["LANGFUSE_PUBLIC_KEY"] = target_config.get('publicKey', '')
|
|
281
|
-
os.environ["LANGFUSE_BASE_URL"] = target_config.get('baseUrl', '')
|
|
282
|
-
os.environ["LANGFUSE_TRACING_ENVIRONMENT"] = environment
|
|
283
|
-
os.environ["OTEL_SERVICE_NAME"] = server_name
|
|
284
|
-
|
|
285
|
-
# 创建 Langfuse Handler 和 Client
|
|
286
|
-
langfuse_handler = CallbackHandler(
|
|
287
|
-
trace_context={"trace_id": SYLogger.get_trace_id()}
|
|
288
|
-
)
|
|
289
|
-
callbacks.append(langfuse_handler)
|
|
290
|
-
langfuse = get_client()
|
|
291
|
-
|
|
292
|
-
return callbacks, langfuse
|
|
4
|
+
from sycommon.llm.sy_langfuse import LangfuseInitializer
|
|
5
|
+
from sycommon.llm.usage_token import LLMWithAutoTokenUsage
|
|
293
6
|
|
|
294
7
|
|
|
295
8
|
def get_llm(
|
|
@@ -303,7 +16,10 @@ def get_llm(
|
|
|
303
16
|
if not llmConfig:
|
|
304
17
|
raise Exception(f"无效的模型配置:{model}")
|
|
305
18
|
|
|
306
|
-
|
|
19
|
+
# 初始化Langfuse
|
|
20
|
+
langfuse_callbacks, langfuse = LangfuseInitializer.get()
|
|
21
|
+
|
|
22
|
+
callbacks = [LLMLogger()] + langfuse_callbacks
|
|
307
23
|
|
|
308
24
|
llm = init_chat_model(
|
|
309
25
|
model_provider=llmConfig.provider,
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
from typing import Dict, List, Optional, Any
|
|
2
|
+
from langfuse import Langfuse, LangfuseSpan, propagate_attributes
|
|
3
|
+
from sycommon.llm.llm_logger import LLMLogger
|
|
4
|
+
from langchain_core.runnables import Runnable, RunnableConfig
|
|
5
|
+
from langchain_core.messages import BaseMessage, HumanMessage
|
|
6
|
+
from sycommon.llm.llm_tokens import TokensCallbackHandler
|
|
7
|
+
from sycommon.logging.kafka_log import SYLogger
|
|
8
|
+
from sycommon.tools.env import get_env_var
|
|
9
|
+
from sycommon.tools.merge_headers import get_header_value
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StructuredRunnableWithToken(Runnable):
|
|
13
|
+
"""带Token统计的Runnable类"""
|
|
14
|
+
|
|
15
|
+
def __init__(self, retry_chain: Runnable, langfuse: Optional[Langfuse]):
|
|
16
|
+
super().__init__()
|
|
17
|
+
self.retry_chain = retry_chain
|
|
18
|
+
self.langfuse = langfuse
|
|
19
|
+
|
|
20
|
+
def _adapt_input(self, input: Any) -> List[BaseMessage]:
|
|
21
|
+
"""适配输入格式"""
|
|
22
|
+
if isinstance(input, list) and all(isinstance(x, BaseMessage) for x in input):
|
|
23
|
+
return input
|
|
24
|
+
elif isinstance(input, BaseMessage):
|
|
25
|
+
return [input]
|
|
26
|
+
elif isinstance(input, str):
|
|
27
|
+
return [HumanMessage(content=input)]
|
|
28
|
+
elif isinstance(input, dict) and "input" in input:
|
|
29
|
+
return [HumanMessage(content=str(input["input"]))]
|
|
30
|
+
else:
|
|
31
|
+
raise ValueError(f"不支持的输入格式:{type(input)}")
|
|
32
|
+
|
|
33
|
+
def _get_callback_config(
|
|
34
|
+
self,
|
|
35
|
+
config: Optional[RunnableConfig] = None,
|
|
36
|
+
trace_id: Optional[str] = None,
|
|
37
|
+
user_id: Optional[str] = None
|
|
38
|
+
) -> tuple[RunnableConfig, TokensCallbackHandler]:
|
|
39
|
+
"""构建包含Token统计和metadata的回调配置"""
|
|
40
|
+
token_handler = TokensCallbackHandler()
|
|
41
|
+
|
|
42
|
+
if config is None:
|
|
43
|
+
processed_config = {"callbacks": [], "metadata": {}}
|
|
44
|
+
else:
|
|
45
|
+
processed_config = config.copy()
|
|
46
|
+
if "callbacks" not in processed_config:
|
|
47
|
+
processed_config["callbacks"] = []
|
|
48
|
+
if "metadata" not in processed_config:
|
|
49
|
+
processed_config["metadata"] = {}
|
|
50
|
+
|
|
51
|
+
# 添加 Langfuse metadata
|
|
52
|
+
if trace_id:
|
|
53
|
+
processed_config["metadata"]["langfuse_session_id"] = trace_id
|
|
54
|
+
if user_id:
|
|
55
|
+
processed_config["metadata"]["langfuse_user_id"] = user_id
|
|
56
|
+
|
|
57
|
+
callbacks = processed_config["callbacks"]
|
|
58
|
+
if not any(isinstance(cb, LLMLogger) for cb in callbacks):
|
|
59
|
+
callbacks.append(LLMLogger())
|
|
60
|
+
callbacks.append(token_handler)
|
|
61
|
+
|
|
62
|
+
callback_types = {}
|
|
63
|
+
unique_callbacks = []
|
|
64
|
+
for cb in callbacks:
|
|
65
|
+
cb_type = type(cb)
|
|
66
|
+
if cb_type not in callback_types:
|
|
67
|
+
callback_types[cb_type] = cb
|
|
68
|
+
unique_callbacks.append(cb)
|
|
69
|
+
|
|
70
|
+
processed_config["callbacks"] = unique_callbacks
|
|
71
|
+
|
|
72
|
+
return processed_config, token_handler
|
|
73
|
+
|
|
74
|
+
def invoke(self, input: Any, config: Optional[RunnableConfig] = None, **kwargs) -> Dict[str, Any]:
|
|
75
|
+
# 获取 trace_id 和 user_id
|
|
76
|
+
trace_id = SYLogger.get_trace_id()
|
|
77
|
+
userid = get_header_value(SYLogger.get_headers(), "x-userid-header")
|
|
78
|
+
syVersion = get_header_value(SYLogger.get_headers(), "s-y-version")
|
|
79
|
+
user_id = userid or syVersion or get_env_var('VERSION')
|
|
80
|
+
|
|
81
|
+
# 判断是否启用 Langfuse
|
|
82
|
+
if self.langfuse:
|
|
83
|
+
try:
|
|
84
|
+
with self.langfuse.start_as_current_observation(as_type="span", name="invoke") as span:
|
|
85
|
+
with propagate_attributes(session_id=trace_id, user_id=user_id):
|
|
86
|
+
span.update_trace(user_id=user_id, session_id=trace_id)
|
|
87
|
+
return self._execute_chain(input, config, trace_id, user_id, span)
|
|
88
|
+
except Exception as e:
|
|
89
|
+
# Langfuse 跟踪失败不应阻断业务,降级执行
|
|
90
|
+
SYLogger.error(f"Langfuse 同步跟踪失败: {str(e)}", exc_info=True)
|
|
91
|
+
return self._execute_chain(input, config, trace_id, user_id, None)
|
|
92
|
+
else:
|
|
93
|
+
# 未启用 Langfuse,直接执行业务逻辑
|
|
94
|
+
return self._execute_chain(input, config, trace_id, user_id, None)
|
|
95
|
+
|
|
96
|
+
async def ainvoke(self, input: Any, config: Optional[RunnableConfig] = None, **kwargs) -> Dict[str, Any]:
|
|
97
|
+
# 获取 trace_id 和 user_id
|
|
98
|
+
trace_id = SYLogger.get_trace_id()
|
|
99
|
+
userid = get_header_value(SYLogger.get_headers(), "x-userid-header")
|
|
100
|
+
syVersion = get_header_value(SYLogger.get_headers(), "s-y-version")
|
|
101
|
+
user_id = userid or syVersion or get_env_var('VERSION')
|
|
102
|
+
|
|
103
|
+
# 判断是否启用 Langfuse
|
|
104
|
+
if self.langfuse:
|
|
105
|
+
try:
|
|
106
|
+
with self.langfuse.start_as_current_observation(as_type="span", name="ainvoke") as span:
|
|
107
|
+
with propagate_attributes(session_id=trace_id, user_id=user_id):
|
|
108
|
+
span.update_trace(user_id=user_id, session_id=trace_id)
|
|
109
|
+
return await self._aexecute_chain(input, config, trace_id, user_id, span)
|
|
110
|
+
except Exception as e:
|
|
111
|
+
# Langfuse 跟踪失败不应阻断业务,降级执行
|
|
112
|
+
SYLogger.error(f"Langfuse 异步跟踪失败: {str(e)}", exc_info=True)
|
|
113
|
+
return await self._aexecute_chain(input, config, trace_id, user_id, None)
|
|
114
|
+
else:
|
|
115
|
+
# 未启用 Langfuse,直接执行业务逻辑
|
|
116
|
+
return await self._aexecute_chain(input, config, trace_id, user_id, None)
|
|
117
|
+
|
|
118
|
+
def _execute_chain(
|
|
119
|
+
self,
|
|
120
|
+
input: Any,
|
|
121
|
+
config: Optional[RunnableConfig],
|
|
122
|
+
trace_id: str,
|
|
123
|
+
user_id: str,
|
|
124
|
+
span: LangfuseSpan
|
|
125
|
+
) -> Dict[str, Any]:
|
|
126
|
+
"""执行实际的调用逻辑 (同步)"""
|
|
127
|
+
try:
|
|
128
|
+
processed_config, token_handler = self._get_callback_config(
|
|
129
|
+
config,
|
|
130
|
+
trace_id=trace_id,
|
|
131
|
+
user_id=user_id
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
adapted_input = self._adapt_input(input)
|
|
135
|
+
input_data = {"messages": adapted_input}
|
|
136
|
+
|
|
137
|
+
if span:
|
|
138
|
+
span.update_trace(input=input_data)
|
|
139
|
+
|
|
140
|
+
structured_result = self.retry_chain.invoke(
|
|
141
|
+
input_data,
|
|
142
|
+
config=processed_config
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
if span:
|
|
146
|
+
span.update_trace(output=structured_result)
|
|
147
|
+
|
|
148
|
+
token_usage = token_handler.usage_metadata
|
|
149
|
+
structured_result._token_usage_ = token_usage
|
|
150
|
+
|
|
151
|
+
return structured_result
|
|
152
|
+
except Exception as e:
|
|
153
|
+
SYLogger.error(f"同步LLM调用失败: {str(e)}", exc_info=True)
|
|
154
|
+
return None
|
|
155
|
+
|
|
156
|
+
async def _aexecute_chain(
|
|
157
|
+
self,
|
|
158
|
+
input: Any,
|
|
159
|
+
config: Optional[RunnableConfig],
|
|
160
|
+
trace_id: str,
|
|
161
|
+
user_id: str,
|
|
162
|
+
span: LangfuseSpan
|
|
163
|
+
) -> Dict[str, Any]:
|
|
164
|
+
"""执行实际的调用逻辑 (异步)"""
|
|
165
|
+
try:
|
|
166
|
+
processed_config, token_handler = self._get_callback_config(
|
|
167
|
+
config,
|
|
168
|
+
trace_id=trace_id,
|
|
169
|
+
user_id=user_id
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
adapted_input = self._adapt_input(input)
|
|
173
|
+
input_data = {"messages": adapted_input}
|
|
174
|
+
|
|
175
|
+
if span:
|
|
176
|
+
span.update_trace(input=input_data)
|
|
177
|
+
|
|
178
|
+
structured_result = await self.retry_chain.ainvoke(
|
|
179
|
+
input_data,
|
|
180
|
+
config=processed_config
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
if span:
|
|
184
|
+
span.update_trace(output=structured_result)
|
|
185
|
+
|
|
186
|
+
token_usage = token_handler.usage_metadata
|
|
187
|
+
structured_result._token_usage_ = token_usage
|
|
188
|
+
|
|
189
|
+
return structured_result
|
|
190
|
+
except Exception as e:
|
|
191
|
+
SYLogger.error(f"异步LLM调用失败: {str(e)}", exc_info=True)
|
|
192
|
+
return None
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Tuple, List, Optional, Any, Dict
|
|
3
|
+
from langfuse import Langfuse, get_client
|
|
4
|
+
from sycommon.config.Config import Config, SingletonMeta
|
|
5
|
+
from sycommon.logging.kafka_log import SYLogger
|
|
6
|
+
from langfuse.langchain import CallbackHandler
|
|
7
|
+
from sycommon.tools.env import get_env_var
|
|
8
|
+
from sycommon.tools.merge_headers import get_header_value
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class LangfuseInitializer(metaclass=SingletonMeta):
|
|
12
|
+
"""
|
|
13
|
+
Langfuse 初始化管理器
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def __init__(self):
|
|
17
|
+
self._langfuse_client: Optional[Langfuse] = None
|
|
18
|
+
self._base_callbacks: List[Any] = []
|
|
19
|
+
|
|
20
|
+
# 执行初始化
|
|
21
|
+
self._initialize()
|
|
22
|
+
|
|
23
|
+
def _initialize(self):
|
|
24
|
+
"""执行实际的配置读取和组件创建"""
|
|
25
|
+
try:
|
|
26
|
+
config_dict = Config().config
|
|
27
|
+
|
|
28
|
+
server_name = config_dict.get('Name', '')
|
|
29
|
+
langfuse_configs = config_dict.get('LangfuseConfig', [])
|
|
30
|
+
environment = config_dict.get('Nacos', {}).get('namespaceId', '')
|
|
31
|
+
|
|
32
|
+
# 3. 查找匹配的配置项
|
|
33
|
+
target_config = next(
|
|
34
|
+
(item for item in langfuse_configs if item.get(
|
|
35
|
+
'name') == server_name), None
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# 4. 如果启用且配置存在,初始化 Langfuse
|
|
39
|
+
if target_config and target_config.get('enable', False):
|
|
40
|
+
# 设置环境变量
|
|
41
|
+
os.environ["LANGFUSE_SECRET_KEY"] = target_config.get(
|
|
42
|
+
'secretKey', '')
|
|
43
|
+
os.environ["LANGFUSE_PUBLIC_KEY"] = target_config.get(
|
|
44
|
+
'publicKey', '')
|
|
45
|
+
os.environ["LANGFUSE_BASE_URL"] = target_config.get(
|
|
46
|
+
'baseUrl', '')
|
|
47
|
+
os.environ["LANGFUSE_TRACING_ENVIRONMENT"] = environment
|
|
48
|
+
os.environ["OTEL_SERVICE_NAME"] = server_name
|
|
49
|
+
|
|
50
|
+
self._langfuse_client = get_client()
|
|
51
|
+
|
|
52
|
+
langfuse_handler = CallbackHandler()
|
|
53
|
+
self._base_callbacks.append(langfuse_handler)
|
|
54
|
+
|
|
55
|
+
SYLogger.info(f"Langfuse 初始化成功 [Service: {server_name}]")
|
|
56
|
+
else:
|
|
57
|
+
SYLogger.info(f"Langfuse 未启用或未找到匹配配置 [Service: {server_name}]")
|
|
58
|
+
|
|
59
|
+
except Exception as e:
|
|
60
|
+
SYLogger.error(f"Langfuse 初始化异常: {str(e)}", exc_info=True)
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def callbacks(self) -> List[Any]:
|
|
64
|
+
"""获取回调列表"""
|
|
65
|
+
return self._base_callbacks
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def metadata(self) -> Dict[str, Any]:
|
|
69
|
+
"""动态生成包含 langfuse_session_id 和 langfuse_user_id 的 metadata"""
|
|
70
|
+
trace_id = SYLogger.get_trace_id()
|
|
71
|
+
userid = get_header_value(
|
|
72
|
+
SYLogger.get_headers(), "x-userid-header")
|
|
73
|
+
syVersion = get_header_value(
|
|
74
|
+
SYLogger.get_headers(), "s-y-version")
|
|
75
|
+
user_id = userid or syVersion or get_env_var('VERSION')
|
|
76
|
+
metadata_config = {
|
|
77
|
+
"langfuse_session_id": trace_id,
|
|
78
|
+
"langfuse_user_id": user_id,
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return metadata_config
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def client(self) -> Optional[Langfuse]:
|
|
85
|
+
"""获取 Langfuse 原生客户端实例"""
|
|
86
|
+
return self._langfuse_client
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def config(self) -> Dict[str, Any]:
|
|
90
|
+
return {
|
|
91
|
+
"callbacks": self.callbacks,
|
|
92
|
+
"metadata": self.metadata,
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
def get_components(self) -> Tuple[List[Any], Optional[Langfuse]]:
|
|
96
|
+
"""获取 Langfuse 组件"""
|
|
97
|
+
return list(self._base_callbacks), self._langfuse_client
|
|
98
|
+
|
|
99
|
+
@staticmethod
|
|
100
|
+
def get() -> Tuple[List[Any], Optional[Langfuse]]:
|
|
101
|
+
"""一句话获取组件"""
|
|
102
|
+
initializer = LangfuseInitializer()
|
|
103
|
+
return initializer.get_components()
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
from typing import Type, List, Optional, Callable
|
|
2
|
+
from langfuse import Langfuse
|
|
3
|
+
from langchain_core.language_models import BaseChatModel
|
|
4
|
+
from langchain_core.runnables import Runnable, RunnableLambda
|
|
5
|
+
from langchain_core.output_parsers import PydanticOutputParser
|
|
6
|
+
from langchain_core.messages import BaseMessage, HumanMessage
|
|
7
|
+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
|
8
|
+
from pydantic import BaseModel, ValidationError, Field
|
|
9
|
+
from sycommon.llm.struct_token import StructuredRunnableWithToken
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LLMWithAutoTokenUsage(BaseChatModel):
|
|
13
|
+
"""自动为结构化调用返回token_usage的LLM包装类"""
|
|
14
|
+
llm: BaseChatModel = Field(default=None)
|
|
15
|
+
langfuse: Optional[Langfuse] = Field(default=None, exclude=True)
|
|
16
|
+
|
|
17
|
+
def __init__(self, llm: BaseChatModel, langfuse: Langfuse, **kwargs):
|
|
18
|
+
super().__init__(llm=llm, langfuse=langfuse, **kwargs)
|
|
19
|
+
|
|
20
|
+
def with_structured_output(
|
|
21
|
+
self,
|
|
22
|
+
output_model: Type[BaseModel],
|
|
23
|
+
max_retries: int = 3,
|
|
24
|
+
is_extract: bool = False,
|
|
25
|
+
override_prompt: ChatPromptTemplate = None,
|
|
26
|
+
custom_processors: Optional[List[Callable[[str], str]]] = None,
|
|
27
|
+
custom_parser: Optional[Callable[[str], BaseModel]] = None
|
|
28
|
+
) -> Runnable:
|
|
29
|
+
"""返回支持自动统计Token的结构化Runnable"""
|
|
30
|
+
parser = PydanticOutputParser(pydantic_object=output_model)
|
|
31
|
+
|
|
32
|
+
# 提示词模板
|
|
33
|
+
accuracy_instructions = """
|
|
34
|
+
字段值的抽取准确率(0~1之间),评分规则:
|
|
35
|
+
1.0(完全准确):直接从原文提取,无需任何加工,且格式与原文完全一致
|
|
36
|
+
0.9(轻微处理):数据来源明确,但需进行格式标准化或冗余信息剔除(不改变原始数值)
|
|
37
|
+
0.8(有限推断):数据需通过上下文关联或简单计算得出,仍有明确依据
|
|
38
|
+
0.8以下(不可靠):数据需大量推测、存在歧义或来源不明,处理方式:直接忽略该数据,设置为None
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
if is_extract:
|
|
42
|
+
prompt = ChatPromptTemplate.from_messages([
|
|
43
|
+
MessagesPlaceholder(variable_name="messages"),
|
|
44
|
+
HumanMessage(content=f"""
|
|
45
|
+
请提取信息并遵循以下规则:
|
|
46
|
+
1. 准确率要求:{accuracy_instructions.strip()}
|
|
47
|
+
2. 输出格式:{parser.get_format_instructions()}
|
|
48
|
+
""")
|
|
49
|
+
])
|
|
50
|
+
else:
|
|
51
|
+
prompt = override_prompt or ChatPromptTemplate.from_messages([
|
|
52
|
+
MessagesPlaceholder(variable_name="messages"),
|
|
53
|
+
HumanMessage(content=f"""
|
|
54
|
+
输出格式:{parser.get_format_instructions()}
|
|
55
|
+
""")
|
|
56
|
+
])
|
|
57
|
+
|
|
58
|
+
# 文本处理函数
|
|
59
|
+
def extract_response_content(response: BaseMessage) -> str:
|
|
60
|
+
try:
|
|
61
|
+
return response.content
|
|
62
|
+
except Exception as e:
|
|
63
|
+
raise ValueError(f"提取响应内容失败:{str(e)}") from e
|
|
64
|
+
|
|
65
|
+
def strip_code_block_markers(content: str) -> str:
|
|
66
|
+
try:
|
|
67
|
+
return content.strip("```json").strip("```").strip()
|
|
68
|
+
except Exception as e:
|
|
69
|
+
raise ValueError(f"移除代码块标记失败:{str(e)}") from e
|
|
70
|
+
|
|
71
|
+
def normalize_in_json(content: str) -> str:
|
|
72
|
+
try:
|
|
73
|
+
return content.replace("None", "null").replace("none", "null").replace("NONE", "null").replace("''", '""')
|
|
74
|
+
except Exception as e:
|
|
75
|
+
raise ValueError(f"JSON格式化失败:{str(e)}") from e
|
|
76
|
+
|
|
77
|
+
def default_parse_to_pydantic(content: str) -> BaseModel:
|
|
78
|
+
try:
|
|
79
|
+
return parser.parse(content)
|
|
80
|
+
except (ValidationError, ValueError) as e:
|
|
81
|
+
raise ValueError(f"解析结构化结果失败:{str(e)}") from e
|
|
82
|
+
|
|
83
|
+
# ========== 构建处理链 ==========
|
|
84
|
+
base_chain = prompt | self.llm | RunnableLambda(
|
|
85
|
+
extract_response_content)
|
|
86
|
+
|
|
87
|
+
# 文本处理链
|
|
88
|
+
process_runnables = custom_processors or [
|
|
89
|
+
RunnableLambda(strip_code_block_markers),
|
|
90
|
+
RunnableLambda(normalize_in_json)
|
|
91
|
+
]
|
|
92
|
+
process_chain = base_chain
|
|
93
|
+
for runnable in process_runnables:
|
|
94
|
+
process_chain = process_chain | runnable
|
|
95
|
+
|
|
96
|
+
# 解析链
|
|
97
|
+
parse_chain = process_chain | RunnableLambda(
|
|
98
|
+
custom_parser or default_parse_to_pydantic)
|
|
99
|
+
|
|
100
|
+
# 重试链
|
|
101
|
+
retry_chain = parse_chain.with_retry(
|
|
102
|
+
retry_if_exception_type=(ValidationError, ValueError),
|
|
103
|
+
stop_after_attempt=max_retries,
|
|
104
|
+
wait_exponential_jitter=True,
|
|
105
|
+
exponential_jitter_params={
|
|
106
|
+
"initial": 0.1, "max": 3.0, "exp_base": 2.0, "jitter": 1.0}
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
return StructuredRunnableWithToken(retry_chain, self.langfuse)
|
|
110
|
+
|
|
111
|
+
# ========== 实现BaseChatModel抽象方法 ==========
|
|
112
|
+
def _generate(self, messages, stop=None, run_manager=None, ** kwargs):
|
|
113
|
+
return self.llm._generate(messages, stop=stop, run_manager=run_manager, ** kwargs)
|
|
114
|
+
|
|
115
|
+
@property
|
|
116
|
+
def _llm_type(self) -> str:
|
|
117
|
+
return self.llm._llm_type
|
|
@@ -27,6 +27,7 @@ class RabbitMQClient:
|
|
|
27
27
|
exchange_name: str = "system.topic.exchange",
|
|
28
28
|
exchange_type: str = "topic",
|
|
29
29
|
queue_name: Optional[str] = None,
|
|
30
|
+
app_name: Optional[str] = None,
|
|
30
31
|
routing_key: str = "#",
|
|
31
32
|
durable: bool = True,
|
|
32
33
|
auto_delete: bool = False,
|
|
@@ -45,6 +46,7 @@ class RabbitMQClient:
|
|
|
45
46
|
logger.warning(f"无效的exchange_type: {exchange_type},默认使用'topic'")
|
|
46
47
|
self.exchange_type = ExchangeType.TOPIC
|
|
47
48
|
|
|
49
|
+
self.app_name = app_name.strip() if app_name else None
|
|
48
50
|
self.queue_name = queue_name.strip() if queue_name else None
|
|
49
51
|
self.routing_key = routing_key.strip() if routing_key else "#"
|
|
50
52
|
self.durable = durable
|
|
@@ -104,7 +106,7 @@ class RabbitMQClient:
|
|
|
104
106
|
logger.info(f"交换机重建成功: {self.exchange_name}")
|
|
105
107
|
|
|
106
108
|
# 声明队列
|
|
107
|
-
if self.queue_name:
|
|
109
|
+
if self.queue_name and self.queue_name.endswith(f".{self.app_name}"):
|
|
108
110
|
self._queue = await self._channel.declare_queue(
|
|
109
111
|
name=self.queue_name,
|
|
110
112
|
durable=self.durable,
|
|
@@ -181,7 +183,7 @@ class RabbitMQClient:
|
|
|
181
183
|
await self._rebuild_resources()
|
|
182
184
|
|
|
183
185
|
# --- 阶段3: 恢复消费 ---
|
|
184
|
-
if was_consuming and self._message_handler and self.queue_name:
|
|
186
|
+
if was_consuming and self._message_handler and self.queue_name and self.queue_name.endswith(f".{self.app_name}"):
|
|
185
187
|
logger.info("🔄 检测到重连前处于消费状态,尝试自动恢复...")
|
|
186
188
|
try:
|
|
187
189
|
self._queue = await self._channel.declare_queue(
|
|
@@ -270,7 +272,6 @@ class RabbitMQClient:
|
|
|
270
272
|
routing_key=message.routing_key,
|
|
271
273
|
delivery_tag=message.delivery_tag,
|
|
272
274
|
traceId=message.headers.get("trace-id"),
|
|
273
|
-
headers=message.headers
|
|
274
275
|
)
|
|
275
276
|
|
|
276
277
|
SYLogger.set_trace_id(msg_obj.traceId)
|
|
@@ -317,7 +318,7 @@ class RabbitMQClient:
|
|
|
317
318
|
await self.connect()
|
|
318
319
|
|
|
319
320
|
if not self._queue:
|
|
320
|
-
if self.queue_name:
|
|
321
|
+
if self.queue_name and self.queue_name.endswith(f".{self.app_name}"):
|
|
321
322
|
self._queue = await self._channel.declare_queue(
|
|
322
323
|
name=self.queue_name,
|
|
323
324
|
durable=self.durable,
|
|
@@ -391,7 +392,7 @@ class RabbitMQClient:
|
|
|
391
392
|
|
|
392
393
|
result = await self._exchange.publish(
|
|
393
394
|
message=message,
|
|
394
|
-
routing_key=self.routing_key
|
|
395
|
+
routing_key=self.routing_key,
|
|
395
396
|
mandatory=True,
|
|
396
397
|
timeout=5.0
|
|
397
398
|
)
|
|
@@ -100,13 +100,18 @@ class RabbitMQClientManager(RabbitMQCoreService):
|
|
|
100
100
|
f"是否创建队列: {create_if_not_exists}"
|
|
101
101
|
)
|
|
102
102
|
|
|
103
|
+
final_queue_name = None
|
|
104
|
+
if create_if_not_exists and processed_queue_name.endswith(f".{app_name}"):
|
|
105
|
+
final_queue_name = processed_queue_name
|
|
106
|
+
|
|
103
107
|
# 创建客户端实例
|
|
104
108
|
client = RabbitMQClient(
|
|
105
109
|
connection_pool=cls._connection_pool,
|
|
106
110
|
exchange_name=cls._config.get(
|
|
107
111
|
'exchange_name', "system.topic.exchange"),
|
|
108
112
|
exchange_type=kwargs.get('exchange_type', "topic"),
|
|
109
|
-
queue_name=
|
|
113
|
+
queue_name=final_queue_name,
|
|
114
|
+
app_name=app_name,
|
|
110
115
|
routing_key=kwargs.get(
|
|
111
116
|
'routing_key',
|
|
112
117
|
f"{queue_name.split('.')[0]}.#"
|
sycommon/services.py
CHANGED
|
@@ -19,24 +19,25 @@ class Services(metaclass=SingletonMeta):
|
|
|
19
19
|
_loop: Optional[asyncio.AbstractEventLoop] = None
|
|
20
20
|
_config: Optional[dict] = None
|
|
21
21
|
_initialized: bool = False
|
|
22
|
-
_registered_senders: List[str] = []
|
|
23
22
|
_instance: Optional['Services'] = None
|
|
24
23
|
_app: Optional[FastAPI] = None
|
|
25
24
|
_user_lifespan: Optional[Callable] = None
|
|
26
25
|
_shutdown_lock: asyncio.Lock = asyncio.Lock()
|
|
27
26
|
|
|
28
|
-
# 用于存储待执行的异步数据库初始化任务
|
|
29
|
-
_pending_async_db_setup: List[Tuple[Callable, str]] = []
|
|
30
|
-
|
|
31
27
|
def __init__(self, config: dict, app: FastAPI):
|
|
28
|
+
super().__init__()
|
|
32
29
|
if not Services._config:
|
|
33
30
|
Services._config = config
|
|
34
31
|
Services._instance = self
|
|
35
32
|
Services._app = app
|
|
33
|
+
|
|
34
|
+
# 在实例初始化时定义变量,防止类变量污染
|
|
35
|
+
self._pending_async_db_setup: List[Tuple[Callable, str]] = []
|
|
36
|
+
|
|
36
37
|
self._init_event_loop()
|
|
37
38
|
|
|
38
39
|
def _init_event_loop(self):
|
|
39
|
-
"""
|
|
40
|
+
"""初始化事件循环"""
|
|
40
41
|
if not Services._loop:
|
|
41
42
|
try:
|
|
42
43
|
Services._loop = asyncio.get_running_loop()
|
|
@@ -63,14 +64,20 @@ class Services(metaclass=SingletonMeta):
|
|
|
63
64
|
setup_logger_levels()
|
|
64
65
|
cls._app = app
|
|
65
66
|
cls._config = config
|
|
67
|
+
# 保存原始的用户 lifespan
|
|
66
68
|
cls._user_lifespan = app.router.lifespan_context
|
|
67
69
|
|
|
68
70
|
applications.get_swagger_ui_html = custom_swagger_ui_html
|
|
69
71
|
applications.get_redoc_html = custom_redoc_html
|
|
70
72
|
|
|
71
73
|
if not cls._config:
|
|
72
|
-
|
|
73
|
-
|
|
74
|
+
try:
|
|
75
|
+
with open('app.yaml', 'r', encoding='utf-8') as f:
|
|
76
|
+
config = yaml.safe_load(f)
|
|
77
|
+
cls._config = config
|
|
78
|
+
except FileNotFoundError:
|
|
79
|
+
logging.warning("未找到 app.yaml,将使用空配置启动")
|
|
80
|
+
cls._config = {}
|
|
74
81
|
|
|
75
82
|
app.state.config = {
|
|
76
83
|
"host": cls._config.get('Host', '0.0.0.0'),
|
|
@@ -80,97 +87,95 @@ class Services(metaclass=SingletonMeta):
|
|
|
80
87
|
}
|
|
81
88
|
|
|
82
89
|
if middleware:
|
|
83
|
-
middleware(app,
|
|
90
|
+
middleware(app, cls._config)
|
|
84
91
|
|
|
85
92
|
if nacos_service:
|
|
86
|
-
nacos_service(
|
|
93
|
+
nacos_service(cls._config)
|
|
87
94
|
|
|
88
95
|
if logging_service:
|
|
89
|
-
logging_service(
|
|
96
|
+
logging_service(cls._config)
|
|
90
97
|
|
|
91
|
-
# 设置sentry
|
|
92
98
|
sy_sentry_init()
|
|
93
99
|
|
|
94
|
-
# ========== 处理数据库服务 ==========
|
|
95
|
-
# 清空之前的待执行列表(防止热重载时重复)
|
|
96
|
-
cls._pending_async_db_setup = []
|
|
97
|
-
|
|
98
|
-
if database_service:
|
|
99
|
-
# 解析配置并区分同步/异步
|
|
100
|
-
items = [database_service] if isinstance(
|
|
101
|
-
database_service, tuple) else database_service
|
|
102
|
-
for item in items:
|
|
103
|
-
db_setup_func, db_name = item
|
|
104
|
-
if asyncio.iscoroutinefunction(db_setup_func):
|
|
105
|
-
# 如果是异步函数,加入待执行列表
|
|
106
|
-
logging.info(f"检测到异步数据库服务: {db_name},将在应用启动时初始化")
|
|
107
|
-
cls._pending_async_db_setup.append(item)
|
|
108
|
-
else:
|
|
109
|
-
# 如果是同步函数,立即执行
|
|
110
|
-
logging.info(f"执行同步数据库服务: {db_name}")
|
|
111
|
-
try:
|
|
112
|
-
db_setup_func(config, db_name)
|
|
113
|
-
except Exception as e:
|
|
114
|
-
logging.error(
|
|
115
|
-
f"同步数据库服务 {db_name} 初始化失败: {e}", exc_info=True)
|
|
116
|
-
raise
|
|
117
|
-
|
|
118
|
-
# 创建组合生命周期管理器
|
|
119
100
|
@asynccontextmanager
|
|
120
|
-
async def combined_lifespan(
|
|
121
|
-
#
|
|
122
|
-
instance = cls(config,
|
|
123
|
-
|
|
124
|
-
# ========== 执行挂起的异步数据库初始化 ==========
|
|
125
|
-
if cls._pending_async_db_setup:
|
|
126
|
-
logging.info("开始执行异步数据库初始化...")
|
|
127
|
-
for db_setup_func, db_name in cls._pending_async_db_setup:
|
|
128
|
-
try:
|
|
129
|
-
await db_setup_func(config, db_name)
|
|
130
|
-
logging.info(f"异步数据库服务 {db_name} 初始化成功")
|
|
131
|
-
except Exception as e:
|
|
132
|
-
logging.error(
|
|
133
|
-
f"异步数据库服务 {db_name} 初始化失败: {e}", exc_info=True)
|
|
134
|
-
raise
|
|
135
|
-
|
|
136
|
-
# ========== 初始化 MQ ==========
|
|
137
|
-
has_valid_listeners = bool(
|
|
138
|
-
rabbitmq_listeners and len(rabbitmq_listeners) > 0)
|
|
139
|
-
has_valid_senders = bool(
|
|
140
|
-
rabbitmq_senders and len(rabbitmq_senders) > 0)
|
|
101
|
+
async def combined_lifespan(app_instance: FastAPI) -> AsyncGenerator[None, None]:
|
|
102
|
+
# 获取 Services 实例
|
|
103
|
+
instance = cls(config, app_instance)
|
|
141
104
|
|
|
142
105
|
try:
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
106
|
+
# 1. 处理数据库服务
|
|
107
|
+
if database_service:
|
|
108
|
+
instance._pending_async_db_setup = []
|
|
109
|
+
|
|
110
|
+
items = [database_service] if isinstance(
|
|
111
|
+
database_service, tuple) else database_service
|
|
112
|
+
for item in items:
|
|
113
|
+
db_setup_func, db_name = item
|
|
114
|
+
if asyncio.iscoroutinefunction(db_setup_func):
|
|
115
|
+
logging.info(f"注册异步数据库服务: {db_name}")
|
|
116
|
+
instance._pending_async_db_setup.append(item)
|
|
117
|
+
else:
|
|
118
|
+
logging.info(f"执行同步数据库服务: {db_name}")
|
|
119
|
+
try:
|
|
120
|
+
db_setup_func(config, db_name)
|
|
121
|
+
except Exception as e:
|
|
122
|
+
logging.error(
|
|
123
|
+
f"同步数据库服务 {db_name} 初始化失败: {e}", exc_info=True)
|
|
124
|
+
raise
|
|
125
|
+
|
|
126
|
+
# 2. 执行挂起的异步数据库初始化
|
|
127
|
+
if instance._pending_async_db_setup:
|
|
128
|
+
logging.info("开始执行异步数据库初始化...")
|
|
129
|
+
for db_setup_func, db_name in instance._pending_async_db_setup:
|
|
130
|
+
try:
|
|
131
|
+
await db_setup_func(config, db_name)
|
|
132
|
+
logging.info(f"异步数据库服务 {db_name} 初始化成功")
|
|
133
|
+
except Exception as e:
|
|
134
|
+
logging.error(
|
|
135
|
+
f"异步数据库服务 {db_name} 初始化失败: {e}", exc_info=True)
|
|
136
|
+
raise
|
|
137
|
+
|
|
138
|
+
# 3. 初始化 MQ
|
|
139
|
+
has_valid_listeners = bool(
|
|
140
|
+
rabbitmq_listeners and len(rabbitmq_listeners) > 0)
|
|
141
|
+
has_valid_senders = bool(
|
|
142
|
+
rabbitmq_senders and len(rabbitmq_senders) > 0)
|
|
143
|
+
|
|
144
|
+
try:
|
|
145
|
+
if has_valid_listeners or has_valid_senders:
|
|
146
|
+
await instance._setup_mq_async(
|
|
147
|
+
rabbitmq_listeners=rabbitmq_listeners if has_valid_listeners else None,
|
|
148
|
+
rabbitmq_senders=rabbitmq_senders if has_valid_senders else None,
|
|
149
|
+
has_listeners=has_valid_listeners,
|
|
150
|
+
has_senders=has_valid_senders
|
|
151
|
+
)
|
|
152
|
+
cls._initialized = True
|
|
153
|
+
logging.info("Services初始化完成")
|
|
154
|
+
except Exception as e:
|
|
155
|
+
logging.error(f"MQ初始化失败: {str(e)}", exc_info=True)
|
|
156
|
+
raise
|
|
155
157
|
|
|
156
|
-
|
|
158
|
+
app_instance.state.services = instance
|
|
157
159
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
160
|
+
# 4. 执行用户定义的生命周期
|
|
161
|
+
if cls._user_lifespan:
|
|
162
|
+
async with cls._user_lifespan(app_instance):
|
|
163
|
+
yield
|
|
164
|
+
else:
|
|
161
165
|
yield
|
|
162
|
-
else:
|
|
163
|
-
yield
|
|
164
166
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
167
|
+
except Exception:
|
|
168
|
+
# 如果启动过程中发生任何异常,确保进入 shutdown
|
|
169
|
+
logging.error("启动阶段发生异常,准备执行清理...")
|
|
170
|
+
raise
|
|
171
|
+
finally:
|
|
172
|
+
# 无论成功或失败,都会执行关闭逻辑
|
|
173
|
+
await cls.shutdown()
|
|
174
|
+
logging.info("Services已关闭")
|
|
168
175
|
|
|
169
176
|
app.router.lifespan_context = combined_lifespan
|
|
170
177
|
return app
|
|
171
178
|
|
|
172
|
-
# 移除了 _setup_database_static,因为逻辑已内联到 plugins 中
|
|
173
|
-
|
|
174
179
|
async def _setup_mq_async(
|
|
175
180
|
self,
|
|
176
181
|
rabbitmq_listeners: Optional[List[RabbitMQListenerConfig]] = None,
|
|
@@ -186,12 +191,21 @@ class Services(metaclass=SingletonMeta):
|
|
|
186
191
|
RabbitMQService.init(self._config, has_listeners, has_senders)
|
|
187
192
|
|
|
188
193
|
start_time = asyncio.get_event_loop().time()
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
194
|
+
timeout = 30 # 超时时间秒
|
|
195
|
+
|
|
196
|
+
# 等待连接池初始化
|
|
197
|
+
while not (RabbitMQService._connection_pool and RabbitMQService._connection_pool._initialized) \
|
|
198
|
+
and not RabbitMQService._is_shutdown:
|
|
199
|
+
if asyncio.get_event_loop().time() - start_time > timeout:
|
|
200
|
+
logging.error("RabbitMQ连接池初始化超时")
|
|
201
|
+
raise TimeoutError(f"RabbitMQ连接池初始化超时({timeout}秒)")
|
|
202
|
+
|
|
203
|
+
logging.debug("等待RabbitMQ连接池初始化...")
|
|
193
204
|
await asyncio.sleep(0.5)
|
|
194
205
|
|
|
206
|
+
if RabbitMQService._is_shutdown:
|
|
207
|
+
raise RuntimeError("RabbitMQService 在初始化期间被关闭")
|
|
208
|
+
|
|
195
209
|
if has_senders and rabbitmq_senders:
|
|
196
210
|
if has_listeners and rabbitmq_listeners:
|
|
197
211
|
for sender in rabbitmq_senders:
|
|
@@ -211,11 +225,8 @@ class Services(metaclass=SingletonMeta):
|
|
|
211
225
|
|
|
212
226
|
async def _setup_senders_async(self, rabbitmq_senders, has_listeners: bool):
|
|
213
227
|
"""设置发送器"""
|
|
214
|
-
Services._registered_senders = [
|
|
215
|
-
sender.queue_name for sender in rabbitmq_senders]
|
|
216
228
|
await RabbitMQService.setup_senders(rabbitmq_senders, has_listeners)
|
|
217
|
-
|
|
218
|
-
logging.info(f"已注册的RabbitMQ发送器: {Services._registered_senders}")
|
|
229
|
+
logging.info(f"RabbitMQ发送器注册完成")
|
|
219
230
|
|
|
220
231
|
async def _setup_listeners_async(self, rabbitmq_listeners, has_senders: bool):
|
|
221
232
|
"""设置监听器"""
|
|
@@ -240,14 +251,12 @@ class Services(metaclass=SingletonMeta):
|
|
|
240
251
|
|
|
241
252
|
for attempt in range(max_retries):
|
|
242
253
|
try:
|
|
243
|
-
|
|
244
|
-
cls._registered_senders = RabbitMQService._sender_client_names
|
|
245
|
-
if queue_name not in cls._registered_senders:
|
|
246
|
-
raise ValueError(f"发送器 {queue_name} 未注册")
|
|
247
|
-
|
|
254
|
+
# 依赖 RabbitMQService 的内部状态
|
|
248
255
|
sender = await RabbitMQService.get_sender(queue_name)
|
|
256
|
+
|
|
249
257
|
if not sender:
|
|
250
|
-
raise ValueError(
|
|
258
|
+
raise ValueError(
|
|
259
|
+
f"发送器 '{queue_name}' 不存在或未在 RabbitMQService 中注册")
|
|
251
260
|
|
|
252
261
|
await RabbitMQService.send_message(data, queue_name, **kwargs)
|
|
253
262
|
logging.info(f"消息发送成功(尝试 {attempt+1}/{max_retries})")
|
|
@@ -269,7 +278,20 @@ class Services(metaclass=SingletonMeta):
|
|
|
269
278
|
if RabbitMQService._is_shutdown:
|
|
270
279
|
logging.info("RabbitMQService已关闭,无需重复操作")
|
|
271
280
|
return
|
|
272
|
-
|
|
281
|
+
|
|
282
|
+
try:
|
|
283
|
+
await RabbitMQService.shutdown()
|
|
284
|
+
except Exception as e:
|
|
285
|
+
logging.error(f"关闭 RabbitMQService 时发生异常: {e}", exc_info=True)
|
|
286
|
+
|
|
273
287
|
cls._initialized = False
|
|
274
|
-
|
|
288
|
+
|
|
289
|
+
# 清理实例数据
|
|
290
|
+
if cls._instance:
|
|
291
|
+
cls._instance._pending_async_db_setup.clear()
|
|
292
|
+
|
|
293
|
+
# 这对于热重载(reload)时防止旧实例内存泄漏至关重要
|
|
294
|
+
if cls._app:
|
|
295
|
+
cls._app.state.services = None
|
|
296
|
+
|
|
275
297
|
logging.info("所有服务已关闭")
|
|
@@ -2,6 +2,7 @@ import threading
|
|
|
2
2
|
import time
|
|
3
3
|
from typing import Optional
|
|
4
4
|
import nacos
|
|
5
|
+
from sycommon.config.Config import Config
|
|
5
6
|
from sycommon.logging.kafka_log import SYLogger
|
|
6
7
|
|
|
7
8
|
|
|
@@ -94,8 +95,9 @@ class NacosClientBase:
|
|
|
94
95
|
|
|
95
96
|
try:
|
|
96
97
|
namespace_id = self.nacos_config['namespaceId']
|
|
98
|
+
service_name = Config().config.get('Name', '')
|
|
97
99
|
self.nacos_client.list_naming_instance(
|
|
98
|
-
service_name=
|
|
100
|
+
service_name=service_name,
|
|
99
101
|
namespace_id=namespace_id,
|
|
100
102
|
group_name="DEFAULT_GROUP",
|
|
101
103
|
healthy_only=True
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
command/cli.py,sha256=bP2LCLkRvfETIwWkVD70q5xFxMI4D3BpH09Ws1f-ENc,5849
|
|
2
2
|
sycommon/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
sycommon/services.py,sha256=
|
|
3
|
+
sycommon/services.py,sha256=F1fwBqVd7luywJjXeK7rF_7FzBePstyEfxXBR_5o04Q,12330
|
|
4
4
|
sycommon/config/Config.py,sha256=L4vlGsVFL1ZHEULxvE8-VyLF-wDBuOMZGmWXIldqfn8,4014
|
|
5
5
|
sycommon/config/DatabaseConfig.py,sha256=ILiUuYT9_xJZE2W-RYuC3JCt_YLKc1sbH13-MHIOPhg,804
|
|
6
6
|
sycommon/config/EmbeddingConfig.py,sha256=gPKwiDYbeu1GpdIZXMmgqM7JqBIzCXi0yYuGRLZooMI,362
|
|
@@ -20,9 +20,12 @@ sycommon/health/metrics.py,sha256=fHqO73JuhoZkNPR-xIlxieXiTCvttq-kG-tvxag1s1s,26
|
|
|
20
20
|
sycommon/health/ping.py,sha256=FTlnIKk5y1mPfS1ZGOeT5IM_2udF5aqVLubEtuBp18M,250
|
|
21
21
|
sycommon/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
22
|
sycommon/llm/embedding.py,sha256=HknwDqXmRQcAZ8-6d8wZ6n7Bv7HtxTajDt1vvzHGeFQ,8411
|
|
23
|
-
sycommon/llm/get_llm.py,sha256=
|
|
23
|
+
sycommon/llm/get_llm.py,sha256=C48gt9GCwEpR26M-cUjM74_t-el18ZvlwpGhcQfR3gs,1054
|
|
24
24
|
sycommon/llm/llm_logger.py,sha256=n4UeNy_-g4oHQOsw-VUzF4uo3JVRLtxaMp1FcI8FiEo,5437
|
|
25
25
|
sycommon/llm/llm_tokens.py,sha256=-udDyFcmyzx6UAwIi6_d_wwI5kMd5w0-WcS2soVPQxg,4309
|
|
26
|
+
sycommon/llm/struct_token.py,sha256=jlpZnTOLDmRDdrCuxZe-1pQopd6OmCM9B_gWZ48CnEQ,7655
|
|
27
|
+
sycommon/llm/sy_langfuse.py,sha256=NZv6ydfn3-cxqQvuB5WdnM9GYliO9qB_RWh_XqIS3VU,3692
|
|
28
|
+
sycommon/llm/usage_token.py,sha256=n0hytuaHI4tJi6wuOS3bd-yWzQjZ-lx5w9egHs8uYgg,5140
|
|
26
29
|
sycommon/logging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
30
|
sycommon/logging/async_sql_logger.py,sha256=_OY36XkUm__U3NhMgiecy-qd-nptZ_0gpE3J8lGAr58,2619
|
|
28
31
|
sycommon/logging/kafka_log.py,sha256=gfOqdZe0HJ3PkIFfnNWG4DZVadxsCKJ6AmelR7_Z1Xs,9960
|
|
@@ -48,10 +51,10 @@ sycommon/models/mqsend_config.py,sha256=NQX9dc8PpuquMG36GCVhJe8omAW1KVXXqr6lSRU6
|
|
|
48
51
|
sycommon/models/sso_user.py,sha256=i1WAN6k5sPcPApQEdtjpWDy7VrzWLpOrOQewGLGoGIw,2702
|
|
49
52
|
sycommon/notice/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
50
53
|
sycommon/notice/uvicorn_monitor.py,sha256=VryQYcAtjijJuGDBimbVurgwxlsLaLtkNnABPDY5Tao,7332
|
|
51
|
-
sycommon/rabbitmq/rabbitmq_client.py,sha256=
|
|
54
|
+
sycommon/rabbitmq/rabbitmq_client.py,sha256=hAbLOioU_clucJ9xq88Oo-waZOuU0ii4yBVGIjz1nBE,17992
|
|
52
55
|
sycommon/rabbitmq/rabbitmq_pool.py,sha256=BiFQgZPzSAFR-n5XhyIafoeWQXETF_31nFRDhMbe6aU,15577
|
|
53
56
|
sycommon/rabbitmq/rabbitmq_service.py,sha256=XSHo9HuIJ_lq-vizRh4xJVdZr_2zLqeLhot09qb0euA,2025
|
|
54
|
-
sycommon/rabbitmq/rabbitmq_service_client_manager.py,sha256=
|
|
57
|
+
sycommon/rabbitmq/rabbitmq_service_client_manager.py,sha256=IP9TMFeG5LSrwFPEmOy1ce4baPxBUZnWJZR3nN_-XR4,8009
|
|
55
58
|
sycommon/rabbitmq/rabbitmq_service_connection_monitor.py,sha256=uvoMuJDzJ9i63uVRq1NKFV10CvkbGnTMyEoq2rgjQx8,3013
|
|
56
59
|
sycommon/rabbitmq/rabbitmq_service_consumer_manager.py,sha256=489r1RKd5WrTNMAcWCxUZpt9yWGrNunZlLCCp-M_rzM,11497
|
|
57
60
|
sycommon/rabbitmq/rabbitmq_service_core.py,sha256=6RMvIf78DmEOZmN8dA0duA9oy4ieNswdGrOeyJdD6tU,4753
|
|
@@ -66,7 +69,7 @@ sycommon/synacos/example.py,sha256=61XL03tU8WTNOo3FUduf93F2fAwah1S0lbH1ufhRhRk,5
|
|
|
66
69
|
sycommon/synacos/example2.py,sha256=adUaru3Hy482KrOA17DfaC4nwvLj8etIDS_KrWLWmCU,4811
|
|
67
70
|
sycommon/synacos/feign.py,sha256=frB3D5LeFDtT3pJLFOwFzEOrNAJKeQNGk-BzUg9T3WM,8295
|
|
68
71
|
sycommon/synacos/feign_client.py,sha256=ExO7Pd5B3eFKDjXqBRc260K1jkI49IYguLwJJaD2R-o,16166
|
|
69
|
-
sycommon/synacos/nacos_client_base.py,sha256=
|
|
72
|
+
sycommon/synacos/nacos_client_base.py,sha256=KZgQAg9Imfr_TfM-4LXdtrnTdJ-beu6bcNJa0c2HauE,4600
|
|
70
73
|
sycommon/synacos/nacos_config_manager.py,sha256=Cff-4gpp0aD7sQVi-nEvDO4BWqK9abEDDDJ9qXKFQgs,4399
|
|
71
74
|
sycommon/synacos/nacos_heartbeat_manager.py,sha256=G80_pOn37WdO_HpYUiAfpwMqAxW0ff0Bnw0NEuge9v0,5568
|
|
72
75
|
sycommon/synacos/nacos_service.py,sha256=BezQ1eDIYwBPE567Po_Qh1Ki_z9WmhZy1J1NiTPbdHY,6118
|
|
@@ -79,8 +82,8 @@ sycommon/tools/env.py,sha256=Ah-tBwG2C0_hwLGFebVQgKdWWXCjTzBuF23gCkLHYy4,2437
|
|
|
79
82
|
sycommon/tools/merge_headers.py,sha256=u9u8_1ZIuGIminWsw45YJ5qnsx9MB-Fot0VPge7itPw,4941
|
|
80
83
|
sycommon/tools/snowflake.py,sha256=xQlYXwYnI85kSJ1rZ89gMVBhzemP03xrMPVX9vVa3MY,9228
|
|
81
84
|
sycommon/tools/timing.py,sha256=OiiE7P07lRoMzX9kzb8sZU9cDb0zNnqIlY5pWqHcnkY,2064
|
|
82
|
-
sycommon_python_lib-0.1.
|
|
83
|
-
sycommon_python_lib-0.1.
|
|
84
|
-
sycommon_python_lib-0.1.
|
|
85
|
-
sycommon_python_lib-0.1.
|
|
86
|
-
sycommon_python_lib-0.1.
|
|
85
|
+
sycommon_python_lib-0.1.57.dist-info/METADATA,sha256=qQp7G8uJ3kBSlZNbOTs3CVq4yoUa5PCfPHq0IFxxsOA,7299
|
|
86
|
+
sycommon_python_lib-0.1.57.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
87
|
+
sycommon_python_lib-0.1.57.dist-info/entry_points.txt,sha256=q_h2nbvhhmdnsOUZEIwpuoDjaNfBF9XqppDEmQn9d_A,46
|
|
88
|
+
sycommon_python_lib-0.1.57.dist-info/top_level.txt,sha256=98CJ-cyM2WIKxLz-Pf0AitWLhJyrfXvyY8slwjTXNuc,17
|
|
89
|
+
sycommon_python_lib-0.1.57.dist-info/RECORD,,
|
|
File without changes
|
{sycommon_python_lib-0.1.56b18.dist-info → sycommon_python_lib-0.1.57.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{sycommon_python_lib-0.1.56b18.dist-info → sycommon_python_lib-0.1.57.dist-info}/top_level.txt
RENAMED
|
File without changes
|