sycommon-python-lib 0.1.57b1__py3-none-any.whl → 0.1.57b4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sycommon/llm/embedding.py +236 -72
- sycommon/rabbitmq/rabbitmq_client.py +43 -23
- sycommon/synacos/nacos_client_base.py +3 -1
- {sycommon_python_lib-0.1.57b1.dist-info → sycommon_python_lib-0.1.57b4.dist-info}/METADATA +1 -1
- {sycommon_python_lib-0.1.57b1.dist-info → sycommon_python_lib-0.1.57b4.dist-info}/RECORD +8 -8
- {sycommon_python_lib-0.1.57b1.dist-info → sycommon_python_lib-0.1.57b4.dist-info}/WHEEL +0 -0
- {sycommon_python_lib-0.1.57b1.dist-info → sycommon_python_lib-0.1.57b4.dist-info}/entry_points.txt +0 -0
- {sycommon_python_lib-0.1.57b1.dist-info → sycommon_python_lib-0.1.57b4.dist-info}/top_level.txt +0 -0
sycommon/llm/embedding.py
CHANGED
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import json
|
|
3
2
|
import aiohttp
|
|
4
|
-
|
|
5
|
-
|
|
3
|
+
import atexit
|
|
4
|
+
from typing import Union, List, Optional, Dict
|
|
6
5
|
from sycommon.config.Config import SingletonMeta
|
|
7
6
|
from sycommon.config.EmbeddingConfig import EmbeddingConfig
|
|
8
7
|
from sycommon.config.RerankerConfig import RerankerConfig
|
|
@@ -23,12 +22,113 @@ class Embedding(metaclass=SingletonMeta):
|
|
|
23
22
|
self.reranker_base_url = RerankerConfig.from_config(
|
|
24
23
|
self.default_reranker_model).baseUrl
|
|
25
24
|
|
|
25
|
+
# [修复] 缓存配置URL,避免高并发下重复读取配置文件
|
|
26
|
+
self._embedding_url_cache: Dict[str, str] = {
|
|
27
|
+
self.default_embedding_model: self.embeddings_base_url
|
|
28
|
+
}
|
|
29
|
+
self._reranker_url_cache: Dict[str, str] = {
|
|
30
|
+
self.default_reranker_model: self.reranker_base_url
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
# [修复] 缓存模型的向量维度,用于生成兜底零向量
|
|
34
|
+
self._model_dim_cache: Dict[str, int] = {}
|
|
35
|
+
|
|
26
36
|
# 并发信号量
|
|
27
37
|
self.semaphore = asyncio.Semaphore(self.max_concurrency)
|
|
28
|
-
# 全局默认超时:永不超时(None)
|
|
29
38
|
self.default_timeout = aiohttp.ClientTimeout(total=None)
|
|
30
39
|
|
|
31
|
-
|
|
40
|
+
# 核心优化:创建全局可复用的ClientSession(连接池复用)
|
|
41
|
+
self.session = None
|
|
42
|
+
# 重试配置(可根据需要调整)
|
|
43
|
+
self.max_retry_attempts = 3 # 最大重试次数
|
|
44
|
+
self.retry_wait_base = 0.5 # 基础等待时间(秒)
|
|
45
|
+
|
|
46
|
+
# [修复] 注册退出钩子,确保程序结束时关闭连接池
|
|
47
|
+
atexit.register(self._sync_close_session)
|
|
48
|
+
|
|
49
|
+
async def init_session(self):
|
|
50
|
+
"""初始化全局ClientSession(仅创建一次)"""
|
|
51
|
+
if self.session is None or self.session.closed:
|
|
52
|
+
# 配置连接池参数,适配高并发
|
|
53
|
+
connector = aiohttp.TCPConnector(
|
|
54
|
+
limit=self.max_concurrency * 2, # 连接池最大连接数(建议是并发数的2倍)
|
|
55
|
+
limit_per_host=self.max_concurrency, # 每个域名的最大连接数
|
|
56
|
+
ttl_dns_cache=300, # DNS缓存时间
|
|
57
|
+
enable_cleanup_closed=True # 自动清理关闭的连接
|
|
58
|
+
)
|
|
59
|
+
self.session = aiohttp.ClientSession(
|
|
60
|
+
connector=connector,
|
|
61
|
+
timeout=self.default_timeout
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
async def close_session(self):
|
|
65
|
+
"""关闭全局Session(程序退出时调用)"""
|
|
66
|
+
if self.session and not self.session.closed:
|
|
67
|
+
await self.session.close()
|
|
68
|
+
|
|
69
|
+
def _sync_close_session(self):
|
|
70
|
+
"""同步关闭Session的封装,供atexit调用"""
|
|
71
|
+
# 注意:atexit在主线程运行,如果当前没有事件循环,这个操作可能会受限
|
|
72
|
+
# 但它能捕获大多数正常退出的场景。对于asyncio程序,建议显式调用cleanup
|
|
73
|
+
try:
|
|
74
|
+
loop = asyncio.get_event_loop()
|
|
75
|
+
if loop.is_running():
|
|
76
|
+
# 如果loop还在跑,创建一个任务去关闭
|
|
77
|
+
loop.create_task(self.close_session())
|
|
78
|
+
else:
|
|
79
|
+
# 如果loop已经停止,尝试运行一次
|
|
80
|
+
loop.run_until_complete(self.close_session())
|
|
81
|
+
except Exception:
|
|
82
|
+
# 静默处理清理失败,避免退出报错
|
|
83
|
+
pass
|
|
84
|
+
|
|
85
|
+
async def _retry_request(self, func, *args, **kwargs):
|
|
86
|
+
"""
|
|
87
|
+
原生异步重试封装函数
|
|
88
|
+
Args:
|
|
89
|
+
func: 待重试的异步函数
|
|
90
|
+
*args: 函数参数
|
|
91
|
+
**kwargs: 函数关键字参数
|
|
92
|
+
Returns:
|
|
93
|
+
函数执行结果,重试失败返回None
|
|
94
|
+
"""
|
|
95
|
+
attempt = 0
|
|
96
|
+
while attempt < self.max_retry_attempts:
|
|
97
|
+
try:
|
|
98
|
+
return await func(*args, **kwargs)
|
|
99
|
+
except (aiohttp.ClientConnectionResetError, asyncio.TimeoutError, aiohttp.ClientError) as e:
|
|
100
|
+
attempt += 1
|
|
101
|
+
if attempt >= self.max_retry_attempts:
|
|
102
|
+
SYLogger.error(
|
|
103
|
+
f"Request failed after {attempt} retries: {str(e)}")
|
|
104
|
+
return None
|
|
105
|
+
# 指数退避等待:0.5s → 1s → 2s(最大不超过5s)
|
|
106
|
+
wait_time = min(self.retry_wait_base * (2 ** (attempt - 1)), 5)
|
|
107
|
+
SYLogger.warning(
|
|
108
|
+
f"Retry {func.__name__} (attempt {attempt}/{self.max_retry_attempts}): {str(e)}, wait {wait_time}s")
|
|
109
|
+
await asyncio.sleep(wait_time)
|
|
110
|
+
except Exception as e:
|
|
111
|
+
# 非重试类异常直接返回None
|
|
112
|
+
SYLogger.error(
|
|
113
|
+
f"Non-retryable error in {func.__name__}: {str(e)}")
|
|
114
|
+
return None
|
|
115
|
+
return None
|
|
116
|
+
|
|
117
|
+
def _get_embedding_url(self, model: str) -> str:
|
|
118
|
+
"""获取Embedding URL(带缓存)"""
|
|
119
|
+
if model not in self._embedding_url_cache:
|
|
120
|
+
self._embedding_url_cache[model] = EmbeddingConfig.from_config(
|
|
121
|
+
model).baseUrl
|
|
122
|
+
return self._embedding_url_cache[model]
|
|
123
|
+
|
|
124
|
+
def _get_reranker_url(self, model: str) -> str:
|
|
125
|
+
"""获取Reranker URL(带缓存)"""
|
|
126
|
+
if model not in self._reranker_url_cache:
|
|
127
|
+
self._reranker_url_cache[model] = RerankerConfig.from_config(
|
|
128
|
+
model).baseUrl
|
|
129
|
+
return self._reranker_url_cache[model]
|
|
130
|
+
|
|
131
|
+
async def _get_embeddings_http_core(
|
|
32
132
|
self,
|
|
33
133
|
input: Union[str, List[str]],
|
|
34
134
|
encoding_format: str = None,
|
|
@@ -36,13 +136,14 @@ class Embedding(metaclass=SingletonMeta):
|
|
|
36
136
|
timeout: aiohttp.ClientTimeout = None,
|
|
37
137
|
**kwargs
|
|
38
138
|
):
|
|
139
|
+
"""embedding请求核心逻辑(剥离重试,供重试封装调用)"""
|
|
140
|
+
await self.init_session() # 确保Session已初始化
|
|
39
141
|
async with self.semaphore:
|
|
40
|
-
# 优先使用传入的超时,无则用全局默认
|
|
41
142
|
request_timeout = timeout or self.default_timeout
|
|
42
|
-
|
|
43
|
-
# 优先使用传入的模型名,无则用默认值
|
|
44
143
|
target_model = model or self.default_embedding_model
|
|
45
|
-
|
|
144
|
+
|
|
145
|
+
# [修复] 使用缓存获取URL
|
|
146
|
+
target_base_url = self._get_embedding_url(target_model)
|
|
46
147
|
url = f"{target_base_url}/v1/embeddings"
|
|
47
148
|
|
|
48
149
|
request_body = {
|
|
@@ -52,25 +153,33 @@ class Embedding(metaclass=SingletonMeta):
|
|
|
52
153
|
}
|
|
53
154
|
request_body.update(kwargs)
|
|
54
155
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
return None
|
|
68
|
-
except Exception as e:
|
|
69
|
-
SYLogger.error(
|
|
70
|
-
f"Embedding request unexpected error (model: {target_model}): {str(e)}")
|
|
71
|
-
return None
|
|
156
|
+
# 复用全局Session
|
|
157
|
+
async with self.session.post(
|
|
158
|
+
url,
|
|
159
|
+
json=request_body,
|
|
160
|
+
timeout=request_timeout
|
|
161
|
+
) as response:
|
|
162
|
+
if response.status != 200:
|
|
163
|
+
error_detail = await response.text()
|
|
164
|
+
SYLogger.error(
|
|
165
|
+
f"Embedding request failed (model: {target_model}): {error_detail}")
|
|
166
|
+
return None
|
|
167
|
+
return await response.json()
|
|
72
168
|
|
|
73
|
-
async def
|
|
169
|
+
async def _get_embeddings_http_async(
|
|
170
|
+
self,
|
|
171
|
+
input: Union[str, List[str]],
|
|
172
|
+
encoding_format: str = None,
|
|
173
|
+
model: str = None,
|
|
174
|
+
timeout: aiohttp.ClientTimeout = None, ** kwargs
|
|
175
|
+
):
|
|
176
|
+
"""对外暴露的embedding请求方法(包含重试)"""
|
|
177
|
+
return await self._retry_request(
|
|
178
|
+
self._get_embeddings_http_core,
|
|
179
|
+
input, encoding_format, model, timeout, ** kwargs
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
async def _get_reranker_http_core(
|
|
74
183
|
self,
|
|
75
184
|
documents: List[str],
|
|
76
185
|
query: str,
|
|
@@ -79,16 +188,16 @@ class Embedding(metaclass=SingletonMeta):
|
|
|
79
188
|
max_chunks_per_doc: Optional[int] = None,
|
|
80
189
|
return_documents: Optional[bool] = True,
|
|
81
190
|
return_len: Optional[bool] = True,
|
|
82
|
-
timeout: aiohttp.ClientTimeout = None,
|
|
83
|
-
**kwargs
|
|
191
|
+
timeout: aiohttp.ClientTimeout = None, ** kwargs
|
|
84
192
|
):
|
|
193
|
+
"""reranker请求核心逻辑(剥离重试,供重试封装调用)"""
|
|
194
|
+
await self.init_session() # 确保Session已初始化
|
|
85
195
|
async with self.semaphore:
|
|
86
|
-
# 优先使用传入的超时,无则用全局默认
|
|
87
196
|
request_timeout = timeout or self.default_timeout
|
|
88
|
-
|
|
89
|
-
# 优先使用传入的模型名,无则用默认值
|
|
90
197
|
target_model = model or self.default_reranker_model
|
|
91
|
-
|
|
198
|
+
|
|
199
|
+
# [修复] 使用缓存获取URL
|
|
200
|
+
target_base_url = self._get_reranker_url(target_model)
|
|
92
201
|
url = f"{target_base_url}/v1/rerank"
|
|
93
202
|
|
|
94
203
|
request_body = {
|
|
@@ -99,27 +208,39 @@ class Embedding(metaclass=SingletonMeta):
|
|
|
99
208
|
"max_chunks_per_doc": max_chunks_per_doc,
|
|
100
209
|
"return_documents": return_documents,
|
|
101
210
|
"return_len": return_len,
|
|
102
|
-
"kwargs": json.dumps(kwargs),
|
|
103
211
|
}
|
|
104
212
|
request_body.update(kwargs)
|
|
105
213
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
214
|
+
# 复用全局Session
|
|
215
|
+
async with self.session.post(
|
|
216
|
+
url,
|
|
217
|
+
json=request_body,
|
|
218
|
+
timeout=request_timeout
|
|
219
|
+
) as response:
|
|
220
|
+
if response.status != 200:
|
|
221
|
+
error_detail = await response.text()
|
|
222
|
+
SYLogger.error(
|
|
223
|
+
f"Rerank request failed (model: {target_model}): {error_detail}")
|
|
224
|
+
return None
|
|
225
|
+
return await response.json()
|
|
226
|
+
|
|
227
|
+
async def _get_reranker_http_async(
|
|
228
|
+
self,
|
|
229
|
+
documents: List[str],
|
|
230
|
+
query: str,
|
|
231
|
+
top_n: Optional[int] = None,
|
|
232
|
+
model: str = None,
|
|
233
|
+
max_chunks_per_doc: Optional[int] = None,
|
|
234
|
+
return_documents: Optional[bool] = True,
|
|
235
|
+
return_len: Optional[bool] = True,
|
|
236
|
+
timeout: aiohttp.ClientTimeout = None, ** kwargs
|
|
237
|
+
):
|
|
238
|
+
"""对外暴露的reranker请求方法(包含重试)"""
|
|
239
|
+
return await self._retry_request(
|
|
240
|
+
self._get_reranker_http_core,
|
|
241
|
+
documents, query, top_n, model, max_chunks_per_doc,
|
|
242
|
+
return_documents, return_len, timeout, **kwargs
|
|
243
|
+
)
|
|
123
244
|
|
|
124
245
|
async def get_embeddings(
|
|
125
246
|
self,
|
|
@@ -145,28 +266,70 @@ class Embedding(metaclass=SingletonMeta):
|
|
|
145
266
|
SYLogger.warning(
|
|
146
267
|
f"Invalid timeout type: {type(timeout)}, must be int/float, use default timeout")
|
|
147
268
|
|
|
269
|
+
actual_model = model or self.default_embedding_model
|
|
270
|
+
|
|
148
271
|
SYLogger.info(
|
|
149
|
-
f"Requesting embeddings for corpus: {corpus} (model: {
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
for
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
272
|
+
f"Requesting embeddings for corpus: {len(corpus)} items (model: {actual_model}, max_concurrency: {self.max_concurrency}, timeout: {timeout or 'None'})")
|
|
273
|
+
|
|
274
|
+
all_vectors = []
|
|
275
|
+
|
|
276
|
+
# [修复] 增加 Chunk 处理逻辑,防止 corpus 过大导致内存溢出或协程过多
|
|
277
|
+
# 每次最多处理 max_concurrency * 2 个请求,避免一次性创建几十万个协程
|
|
278
|
+
batch_size = self.max_concurrency * 2
|
|
279
|
+
|
|
280
|
+
for i in range(0, len(corpus), batch_size):
|
|
281
|
+
batch_texts = corpus[i: i + batch_size]
|
|
282
|
+
|
|
283
|
+
# 给每个异步任务传入模型名称和超时配置
|
|
284
|
+
tasks = [self._get_embeddings_http_async(
|
|
285
|
+
text, model=model, timeout=request_timeout) for text in batch_texts]
|
|
286
|
+
results = await asyncio.gather(*tasks)
|
|
287
|
+
|
|
288
|
+
for result in results:
|
|
289
|
+
if result is None:
|
|
290
|
+
# [修复] 尝试获取真实维度或使用配置兜底,不再硬编码 1024
|
|
291
|
+
dim = self._model_dim_cache.get(actual_model)
|
|
292
|
+
|
|
293
|
+
# 如果缓存中没有维度,尝试从配置对象获取(假设Config类有dimension属性)
|
|
294
|
+
if dim is None:
|
|
295
|
+
try:
|
|
296
|
+
config = EmbeddingConfig.from_config(actual_model)
|
|
297
|
+
if hasattr(config, 'dimension'):
|
|
298
|
+
dim = config.dimension
|
|
299
|
+
else:
|
|
300
|
+
# 最后的兜底:如果配置也没有,必须有一个默认值防止崩溃
|
|
301
|
+
# bge-large 通常是 1024
|
|
302
|
+
dim = 1024
|
|
303
|
+
SYLogger.warning(
|
|
304
|
+
f"Cannot get dimension from config for {actual_model}, use default 1024")
|
|
305
|
+
except Exception:
|
|
306
|
+
dim = 1024
|
|
307
|
+
|
|
308
|
+
zero_vector = [0.0] * dim
|
|
309
|
+
all_vectors.append(zero_vector)
|
|
310
|
+
SYLogger.warning(
|
|
311
|
+
f"Embedding request failed, append zero vector ({dim}D) for model {actual_model}")
|
|
312
|
+
continue
|
|
313
|
+
|
|
314
|
+
# 从返回结果中提取向量并更新维度缓存
|
|
315
|
+
# 正常情况下 result["data"] 是一个列表
|
|
316
|
+
try:
|
|
317
|
+
for item in result["data"]:
|
|
318
|
+
embedding = item["embedding"]
|
|
319
|
+
# [修复] 动态学习并缓存维度
|
|
320
|
+
if actual_model not in self._model_dim_cache:
|
|
321
|
+
self._model_dim_cache[actual_model] = len(
|
|
322
|
+
embedding)
|
|
323
|
+
all_vectors.append(embedding)
|
|
324
|
+
except (KeyError, TypeError) as e:
|
|
325
|
+
SYLogger.error(f"Failed to parse embedding result: {e}")
|
|
326
|
+
# 解析失败也补零
|
|
327
|
+
dim = self._model_dim_cache.get(actual_model, 1024)
|
|
328
|
+
all_vectors.append([0.0] * dim)
|
|
166
329
|
|
|
167
330
|
SYLogger.info(
|
|
168
|
-
f"Embeddings for corpus: {
|
|
169
|
-
return
|
|
331
|
+
f"Embeddings for corpus created: {len(all_vectors)} vectors (model: {actual_model})")
|
|
332
|
+
return all_vectors
|
|
170
333
|
|
|
171
334
|
async def get_reranker(
|
|
172
335
|
self,
|
|
@@ -194,11 +357,12 @@ class Embedding(metaclass=SingletonMeta):
|
|
|
194
357
|
SYLogger.warning(
|
|
195
358
|
f"Invalid timeout type: {type(timeout)}, must be int/float, use default timeout")
|
|
196
359
|
|
|
360
|
+
actual_model = model or self.default_reranker_model
|
|
197
361
|
SYLogger.info(
|
|
198
|
-
f"Requesting reranker for top_results: {top_results} (model: {
|
|
362
|
+
f"Requesting reranker for top_results: {top_results} (model: {actual_model}, max_concurrency: {self.max_concurrency}, timeout: {timeout or 'None'})")
|
|
199
363
|
|
|
200
364
|
data = await self._get_reranker_http_async(
|
|
201
365
|
top_results, query, model=model, timeout=request_timeout)
|
|
202
366
|
SYLogger.info(
|
|
203
|
-
f"Reranker for top_results
|
|
367
|
+
f"Reranker for top_results completed (model: {actual_model})")
|
|
204
368
|
return data
|
|
@@ -256,6 +256,9 @@ class RabbitMQClient:
|
|
|
256
256
|
self._message_handler = handler
|
|
257
257
|
|
|
258
258
|
async def _process_message_callback(self, message: AbstractIncomingMessage):
|
|
259
|
+
# 定义标志位,记录我们是否需要重试(即业务是否失败)
|
|
260
|
+
should_retry = False
|
|
261
|
+
|
|
259
262
|
try:
|
|
260
263
|
msg_obj: MQMsgModel
|
|
261
264
|
if self.auto_parse_json:
|
|
@@ -264,8 +267,9 @@ class RabbitMQClient:
|
|
|
264
267
|
msg_obj = MQMsgModel(**body_dict)
|
|
265
268
|
except json.JSONDecodeError as e:
|
|
266
269
|
logger.error(f"JSON解析失败: {e}")
|
|
267
|
-
|
|
268
|
-
|
|
270
|
+
# 格式错误,无法处理,直接拒绝不重试
|
|
271
|
+
await message.reject(requeue=False)
|
|
272
|
+
return # 这里 return 了,不会走下面的 finally
|
|
269
273
|
else:
|
|
270
274
|
msg_obj = MQMsgModel(
|
|
271
275
|
body=message.body.decode("utf-8"),
|
|
@@ -279,32 +283,48 @@ class RabbitMQClient:
|
|
|
279
283
|
if self._message_handler:
|
|
280
284
|
await self._message_handler(msg_obj, message)
|
|
281
285
|
|
|
282
|
-
|
|
286
|
+
# 如果正常执行到这里,说明业务成功
|
|
287
|
+
# await message.ack()
|
|
288
|
+
# 我们移除这里的 ack,统一交给 finally 处理
|
|
283
289
|
|
|
284
290
|
except Exception as e:
|
|
285
291
|
logger.error(f"消息处理异常: {e}", exc_info=True)
|
|
286
|
-
|
|
287
|
-
|
|
292
|
+
# 业务异常,标记需要重试
|
|
293
|
+
should_retry = True
|
|
288
294
|
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
headers
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
headers=headers,
|
|
298
|
-
content_type=message.content_type,
|
|
299
|
-
delivery_mode=message.delivery_mode
|
|
300
|
-
)
|
|
301
|
-
# 这里的 publish 如果失败,会触发重连机制
|
|
302
|
-
# 但注意,当前是在回调线程中,建议做好异常捕获
|
|
303
|
-
await self._exchange.publish(new_msg, routing_key=message.routing_key)
|
|
304
|
-
await message.ack()
|
|
305
|
-
except Exception as pub_err:
|
|
306
|
-
logger.error(f"重试发布失败: {pub_err}")
|
|
295
|
+
finally:
|
|
296
|
+
# 【核心修复】无论发生什么,最后都要给 MQ 一个交待
|
|
297
|
+
if should_retry:
|
|
298
|
+
headers = dict(message.headers) if message.headers else {}
|
|
299
|
+
current_retry = int(headers.get("x-retry-count", 0))
|
|
300
|
+
|
|
301
|
+
if current_retry >= 3:
|
|
302
|
+
logger.warning(f"重试次数超限,丢弃消息: {message.delivery_tag}")
|
|
307
303
|
await message.reject(requeue=False)
|
|
304
|
+
else:
|
|
305
|
+
headers["x-retry-count"] = current_retry + 1
|
|
306
|
+
try:
|
|
307
|
+
new_msg = Message(
|
|
308
|
+
body=message.body,
|
|
309
|
+
headers=headers,
|
|
310
|
+
content_type=message.content_type,
|
|
311
|
+
delivery_mode=message.delivery_mode
|
|
312
|
+
)
|
|
313
|
+
# 发送新消息用于重试
|
|
314
|
+
await self._exchange.publish(new_msg, routing_key=message.routing_key)
|
|
315
|
+
|
|
316
|
+
# 【关键】新消息发成功了,现在可以安全地 Ack 掉旧消息了
|
|
317
|
+
# 这样旧消息才会从队列中移除,避免死循环
|
|
318
|
+
await message.ack()
|
|
319
|
+
|
|
320
|
+
except Exception as pub_err:
|
|
321
|
+
logger.error(f"重试发布失败,消息将丢失: {pub_err}")
|
|
322
|
+
# 发布失败,无法重试,只能丢弃旧消息(或者 Nack requeue=True)
|
|
323
|
+
# 为了防止死循环,这里通常建议 Reject (False) 并配置死信队列
|
|
324
|
+
await message.reject(requeue=False)
|
|
325
|
+
else:
|
|
326
|
+
# 业务正常执行,直接 Ack
|
|
327
|
+
await message.ack()
|
|
308
328
|
|
|
309
329
|
async def start_consuming(self) -> Optional[ConsumerTag]:
|
|
310
330
|
if self._closed:
|
|
@@ -2,6 +2,7 @@ import threading
|
|
|
2
2
|
import time
|
|
3
3
|
from typing import Optional
|
|
4
4
|
import nacos
|
|
5
|
+
from sycommon.config.Config import Config
|
|
5
6
|
from sycommon.logging.kafka_log import SYLogger
|
|
6
7
|
|
|
7
8
|
|
|
@@ -94,8 +95,9 @@ class NacosClientBase:
|
|
|
94
95
|
|
|
95
96
|
try:
|
|
96
97
|
namespace_id = self.nacos_config['namespaceId']
|
|
98
|
+
service_name = Config().config.get('Name', '')
|
|
97
99
|
self.nacos_client.list_naming_instance(
|
|
98
|
-
service_name=
|
|
100
|
+
service_name=service_name,
|
|
99
101
|
namespace_id=namespace_id,
|
|
100
102
|
group_name="DEFAULT_GROUP",
|
|
101
103
|
healthy_only=True
|
|
@@ -19,7 +19,7 @@ sycommon/health/health_check.py,sha256=EhfbhspRpQiKJaxdtE-PzpKQO_ucaFKtQxIm16F5M
|
|
|
19
19
|
sycommon/health/metrics.py,sha256=fHqO73JuhoZkNPR-xIlxieXiTCvttq-kG-tvxag1s1s,268
|
|
20
20
|
sycommon/health/ping.py,sha256=FTlnIKk5y1mPfS1ZGOeT5IM_2udF5aqVLubEtuBp18M,250
|
|
21
21
|
sycommon/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
-
sycommon/llm/embedding.py,sha256=
|
|
22
|
+
sycommon/llm/embedding.py,sha256=rasx8xBOq-mQdWZ5RSzpwjbkAKi7Da-FDWiTm-Ga2Bs,15606
|
|
23
23
|
sycommon/llm/get_llm.py,sha256=C48gt9GCwEpR26M-cUjM74_t-el18ZvlwpGhcQfR3gs,1054
|
|
24
24
|
sycommon/llm/llm_logger.py,sha256=n4UeNy_-g4oHQOsw-VUzF4uo3JVRLtxaMp1FcI8FiEo,5437
|
|
25
25
|
sycommon/llm/llm_tokens.py,sha256=-udDyFcmyzx6UAwIi6_d_wwI5kMd5w0-WcS2soVPQxg,4309
|
|
@@ -51,7 +51,7 @@ sycommon/models/mqsend_config.py,sha256=NQX9dc8PpuquMG36GCVhJe8omAW1KVXXqr6lSRU6
|
|
|
51
51
|
sycommon/models/sso_user.py,sha256=i1WAN6k5sPcPApQEdtjpWDy7VrzWLpOrOQewGLGoGIw,2702
|
|
52
52
|
sycommon/notice/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
53
53
|
sycommon/notice/uvicorn_monitor.py,sha256=VryQYcAtjijJuGDBimbVurgwxlsLaLtkNnABPDY5Tao,7332
|
|
54
|
-
sycommon/rabbitmq/rabbitmq_client.py,sha256=
|
|
54
|
+
sycommon/rabbitmq/rabbitmq_client.py,sha256=PaO9shCi665MOuDXhSmRJtOBC-ayo9sD9U3rQq90Rqc,19071
|
|
55
55
|
sycommon/rabbitmq/rabbitmq_pool.py,sha256=BiFQgZPzSAFR-n5XhyIafoeWQXETF_31nFRDhMbe6aU,15577
|
|
56
56
|
sycommon/rabbitmq/rabbitmq_service.py,sha256=XSHo9HuIJ_lq-vizRh4xJVdZr_2zLqeLhot09qb0euA,2025
|
|
57
57
|
sycommon/rabbitmq/rabbitmq_service_client_manager.py,sha256=IP9TMFeG5LSrwFPEmOy1ce4baPxBUZnWJZR3nN_-XR4,8009
|
|
@@ -69,7 +69,7 @@ sycommon/synacos/example.py,sha256=61XL03tU8WTNOo3FUduf93F2fAwah1S0lbH1ufhRhRk,5
|
|
|
69
69
|
sycommon/synacos/example2.py,sha256=adUaru3Hy482KrOA17DfaC4nwvLj8etIDS_KrWLWmCU,4811
|
|
70
70
|
sycommon/synacos/feign.py,sha256=frB3D5LeFDtT3pJLFOwFzEOrNAJKeQNGk-BzUg9T3WM,8295
|
|
71
71
|
sycommon/synacos/feign_client.py,sha256=ExO7Pd5B3eFKDjXqBRc260K1jkI49IYguLwJJaD2R-o,16166
|
|
72
|
-
sycommon/synacos/nacos_client_base.py,sha256=
|
|
72
|
+
sycommon/synacos/nacos_client_base.py,sha256=KZgQAg9Imfr_TfM-4LXdtrnTdJ-beu6bcNJa0c2HauE,4600
|
|
73
73
|
sycommon/synacos/nacos_config_manager.py,sha256=Cff-4gpp0aD7sQVi-nEvDO4BWqK9abEDDDJ9qXKFQgs,4399
|
|
74
74
|
sycommon/synacos/nacos_heartbeat_manager.py,sha256=G80_pOn37WdO_HpYUiAfpwMqAxW0ff0Bnw0NEuge9v0,5568
|
|
75
75
|
sycommon/synacos/nacos_service.py,sha256=BezQ1eDIYwBPE567Po_Qh1Ki_z9WmhZy1J1NiTPbdHY,6118
|
|
@@ -82,8 +82,8 @@ sycommon/tools/env.py,sha256=Ah-tBwG2C0_hwLGFebVQgKdWWXCjTzBuF23gCkLHYy4,2437
|
|
|
82
82
|
sycommon/tools/merge_headers.py,sha256=u9u8_1ZIuGIminWsw45YJ5qnsx9MB-Fot0VPge7itPw,4941
|
|
83
83
|
sycommon/tools/snowflake.py,sha256=xQlYXwYnI85kSJ1rZ89gMVBhzemP03xrMPVX9vVa3MY,9228
|
|
84
84
|
sycommon/tools/timing.py,sha256=OiiE7P07lRoMzX9kzb8sZU9cDb0zNnqIlY5pWqHcnkY,2064
|
|
85
|
-
sycommon_python_lib-0.1.
|
|
86
|
-
sycommon_python_lib-0.1.
|
|
87
|
-
sycommon_python_lib-0.1.
|
|
88
|
-
sycommon_python_lib-0.1.
|
|
89
|
-
sycommon_python_lib-0.1.
|
|
85
|
+
sycommon_python_lib-0.1.57b4.dist-info/METADATA,sha256=DzUZnbSOLPma462MbLY8-WQAaj9wh9BjkB1eSGxzq1A,7301
|
|
86
|
+
sycommon_python_lib-0.1.57b4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
87
|
+
sycommon_python_lib-0.1.57b4.dist-info/entry_points.txt,sha256=q_h2nbvhhmdnsOUZEIwpuoDjaNfBF9XqppDEmQn9d_A,46
|
|
88
|
+
sycommon_python_lib-0.1.57b4.dist-info/top_level.txt,sha256=98CJ-cyM2WIKxLz-Pf0AitWLhJyrfXvyY8slwjTXNuc,17
|
|
89
|
+
sycommon_python_lib-0.1.57b4.dist-info/RECORD,,
|
|
File without changes
|
{sycommon_python_lib-0.1.57b1.dist-info → sycommon_python_lib-0.1.57b4.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{sycommon_python_lib-0.1.57b1.dist-info → sycommon_python_lib-0.1.57b4.dist-info}/top_level.txt
RENAMED
|
File without changes
|