sycommon-python-lib 0.1.15__tar.gz → 0.1.17__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sycommon-python-lib might be problematic. Click here for more details.

Files changed (59) hide show
  1. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/PKG-INFO +1 -1
  2. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/pyproject.toml +1 -1
  3. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/logging/kafka_log.py +1 -1
  4. sycommon_python_lib-0.1.17/src/sycommon/rabbitmq/rabbitmq_client.py +668 -0
  5. sycommon_python_lib-0.1.17/src/sycommon/rabbitmq/rabbitmq_pool.py +104 -0
  6. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/rabbitmq/rabbitmq_service.py +88 -85
  7. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/services.py +18 -17
  8. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon_python_lib.egg-info/PKG-INFO +1 -1
  9. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon_python_lib.egg-info/SOURCES.txt +1 -0
  10. sycommon_python_lib-0.1.15/src/sycommon/rabbitmq/rabbitmq_client.py +0 -901
  11. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/README.md +0 -0
  12. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/setup.cfg +0 -0
  13. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/command/cli.py +0 -0
  14. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/__init__.py +0 -0
  15. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/config/Config.py +0 -0
  16. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/config/DatabaseConfig.py +0 -0
  17. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/config/EmbeddingConfig.py +0 -0
  18. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/config/LLMConfig.py +0 -0
  19. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/config/MQConfig.py +0 -0
  20. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/config/RerankerConfig.py +0 -0
  21. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/config/__init__.py +0 -0
  22. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/database/base_db_service.py +0 -0
  23. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/database/database_service.py +0 -0
  24. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/health/__init__.py +0 -0
  25. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/health/health_check.py +0 -0
  26. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/health/ping.py +0 -0
  27. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/logging/__init__.py +0 -0
  28. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/logging/logger_wrapper.py +0 -0
  29. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/__init__.py +0 -0
  30. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/context.py +0 -0
  31. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/cors.py +0 -0
  32. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/docs.py +0 -0
  33. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/exception.py +0 -0
  34. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/middleware.py +0 -0
  35. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/monitor_memory.py +0 -0
  36. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/mq.py +0 -0
  37. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/timeout.py +0 -0
  38. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/middleware/traceid.py +0 -0
  39. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/models/__init__.py +0 -0
  40. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/models/base_http.py +0 -0
  41. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/models/log.py +0 -0
  42. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/models/mqlistener_config.py +0 -0
  43. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/models/mqmsg_model.py +0 -0
  44. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/models/mqsend_config.py +0 -0
  45. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/models/sso_user.py +0 -0
  46. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/sse/__init__.py +0 -0
  47. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/sse/event.py +0 -0
  48. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/sse/sse.py +0 -0
  49. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/synacos/__init__.py +0 -0
  50. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/synacos/feign.py +0 -0
  51. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/synacos/nacos_service.py +0 -0
  52. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/tools/__init__.py +0 -0
  53. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/tools/docs.py +0 -0
  54. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/tools/snowflake.py +0 -0
  55. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon/tools/timing.py +0 -0
  56. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon_python_lib.egg-info/dependency_links.txt +0 -0
  57. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon_python_lib.egg-info/entry_points.txt +0 -0
  58. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon_python_lib.egg-info/requires.txt +0 -0
  59. {sycommon_python_lib-0.1.15 → sycommon_python_lib-0.1.17}/src/sycommon_python_lib.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sycommon-python-lib
3
- Version: 0.1.15
3
+ Version: 0.1.17
4
4
  Summary: Add your description here
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "sycommon-python-lib"
3
- version = "0.1.15"
3
+ version = "0.1.17"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
@@ -72,7 +72,7 @@ class KafkaLogger(metaclass=SingletonMeta):
72
72
  connections_max_idle_ms=540000, # 连接最大空闲时间
73
73
  reconnect_backoff_max_ms=10000, # 增加重连退避最大时间
74
74
  max_in_flight_requests_per_connection=1, # 限制单个连接上未确认的请求数量
75
- enable_idempotence=True, # 开启幂等性
75
+ # enable_idempotence=True, # 开启幂等性
76
76
  )
77
77
 
78
78
  # 启动后台发送线程
@@ -0,0 +1,668 @@
1
+ import asyncio
2
+ import json
3
+ from typing import Callable, Coroutine, Optional, Dict, Any, Union, Set
4
+ from aio_pika import Message, DeliveryMode, ExchangeType
5
+ from aio_pika.abc import (
6
+ AbstractChannel,
7
+ AbstractExchange,
8
+ AbstractQueue,
9
+ AbstractIncomingMessage,
10
+ ConsumerTag,
11
+ )
12
+ from aiormq.exceptions import ChannelInvalidStateError, ConnectionClosed
13
+
14
+ from sycommon.logging.kafka_log import SYLogger
15
+ from sycommon.models.mqmsg_model import MQMsgModel
16
+ from sycommon.rabbitmq.rabbitmq_pool import RabbitMQConnectionPool
17
+
18
+ # 最大重试次数限制
19
+ MAX_RETRY_COUNT = 3
20
+
21
+ logger = SYLogger
22
+
23
+
24
+ class RabbitMQClient:
25
+ """
26
+ RabbitMQ客户端(基于连接池),支持集群多节点配置
27
+ 提供自动故障转移、连接恢复和消息可靠性保障
28
+ """
29
+
30
+ def __init__(
31
+ self,
32
+ connection_pool: RabbitMQConnectionPool,
33
+ exchange_name: str = "system.topic.exchange",
34
+ exchange_type: str = "topic",
35
+ queue_name: Optional[str] = None,
36
+ routing_key: str = "#",
37
+ durable: bool = True,
38
+ auto_delete: bool = False,
39
+ auto_parse_json: bool = True,
40
+ create_if_not_exists: bool = True,
41
+ connection_timeout: int = 10,
42
+ rpc_timeout: int = 10,
43
+ reconnection_delay: int = 1,
44
+ max_reconnection_attempts: int = 5,
45
+ prefetch_count: int = 2,
46
+ consumption_stall_threshold: int = 10
47
+ ):
48
+ """
49
+ 初始化RabbitMQ客户端(依赖连接池)
50
+
51
+ :param connection_pool: 连接池实例
52
+ """
53
+ # 连接池依赖
54
+ self.connection_pool = connection_pool
55
+
56
+ # 交换器和队列参数
57
+ self.exchange_name = exchange_name
58
+ self.exchange_type = ExchangeType(exchange_type)
59
+ self.queue_name = queue_name
60
+ self.routing_key = routing_key
61
+ self.durable = durable
62
+ self.auto_delete = auto_delete
63
+
64
+ # 行为控制参数
65
+ self.auto_parse_json = auto_parse_json
66
+ self.create_if_not_exists = create_if_not_exists
67
+ self.connection_timeout = connection_timeout
68
+ self.rpc_timeout = rpc_timeout
69
+ self.prefetch_count = prefetch_count
70
+
71
+ # 重连参数
72
+ self.reconnection_delay = reconnection_delay
73
+ self.max_reconnection_attempts = max_reconnection_attempts
74
+
75
+ # 消息处理参数
76
+ self.consumption_stall_threshold = consumption_stall_threshold
77
+
78
+ # 通道和资源对象(从池获取)
79
+ self.channel: Optional[AbstractChannel] = None
80
+ self.exchange: Optional[AbstractExchange] = None
81
+ self.queue: Optional[AbstractQueue] = None
82
+
83
+ # 状态跟踪
84
+ self.actual_queue_name: Optional[str] = None
85
+ self._exchange_exists = False
86
+ self._queue_exists = False
87
+ self._queue_bound = False
88
+ self._is_consuming = False
89
+ self._closed = False
90
+ self._consumer_tag: Optional[ConsumerTag] = None
91
+ self._last_activity_timestamp = asyncio.get_event_loop().time()
92
+ self._last_message_processed = asyncio.get_event_loop().time()
93
+
94
+ # 任务和处理器
95
+ self.message_handler: Optional[Callable[
96
+ [Union[Dict[str, Any], str], AbstractIncomingMessage],
97
+ Coroutine[Any, Any, None]
98
+ ]] = None
99
+ self._consuming_task: Optional[asyncio.Task] = None
100
+ self._reconnect_task: Optional[asyncio.Task] = None
101
+ self._keepalive_task: Optional[asyncio.Task] = None
102
+ self._monitor_task: Optional[asyncio.Task] = None
103
+
104
+ # 消息处理跟踪
105
+ self._processing_message_ids: Set[str] = set()
106
+
107
+ @property
108
+ def is_connected(self) -> bool:
109
+ """检查当前通道是否有效"""
110
+ return (not self._closed and
111
+ self.channel is not None and
112
+ not self.channel.is_closed and
113
+ self.exchange is not None)
114
+
115
+ def _update_activity_timestamp(self) -> None:
116
+ """更新最后活动时间戳"""
117
+ self._last_activity_timestamp = asyncio.get_event_loop().time()
118
+
119
+ def _update_message_processed_timestamp(self) -> None:
120
+ """更新最后消息处理时间戳"""
121
+ self._last_message_processed = asyncio.get_event_loop().time()
122
+
123
+ async def _get_channel(self) -> AbstractChannel:
124
+ """从通道池获取通道(使用上下文管理器)"""
125
+ if not self.connection_pool.channel_pool:
126
+ raise Exception("连接池未初始化,请先调用init_pools")
127
+
128
+ # 使用async with获取通道,并通过变量返回
129
+ async with self.connection_pool.channel_pool.acquire() as channel:
130
+ return channel
131
+
132
+ async def _check_exchange_exists(self, channel: AbstractChannel) -> bool:
133
+ """检查交换机是否存在"""
134
+ try:
135
+ # 使用被动模式检查交换机
136
+ await asyncio.wait_for(
137
+ channel.declare_exchange(
138
+ name=self.exchange_name,
139
+ type=self.exchange_type,
140
+ passive=True
141
+ ),
142
+ timeout=self.rpc_timeout
143
+ )
144
+ return True
145
+ except Exception:
146
+ return False
147
+
148
+ async def _check_queue_exists(self, channel: AbstractChannel) -> bool:
149
+ """检查队列是否存在"""
150
+ if not self.queue_name:
151
+ return False
152
+ try:
153
+ # 使用被动模式检查队列
154
+ await asyncio.wait_for(
155
+ channel.declare_queue(
156
+ name=self.queue_name,
157
+ passive=True
158
+ ),
159
+ timeout=self.rpc_timeout
160
+ )
161
+ return True
162
+ except Exception:
163
+ return False
164
+
165
+ async def _bind_queue(self, channel: AbstractChannel, queue: AbstractQueue, exchange: AbstractExchange) -> bool:
166
+ """将队列绑定到交换机"""
167
+ bind_routing_key = self.routing_key if self.routing_key else '#'
168
+
169
+ for attempt in range(MAX_RETRY_COUNT + 1):
170
+ try:
171
+ await asyncio.wait_for(
172
+ queue.bind(
173
+ exchange,
174
+ routing_key=bind_routing_key
175
+ ),
176
+ timeout=self.rpc_timeout
177
+ )
178
+ logger.info(
179
+ f"队列 '{self.queue_name}' 已绑定到交换机 '{self.exchange_name}',路由键: {bind_routing_key}")
180
+ return True
181
+ except Exception as e:
182
+ logger.warning(
183
+ f"队列绑定失败(第{attempt+1}次尝试): {str(e)}")
184
+ if attempt < MAX_RETRY_COUNT:
185
+ await asyncio.sleep(1)
186
+ return False
187
+
188
+ async def connect(self, force_reconnect: bool = False, declare_queue: bool = True) -> None:
189
+ """
190
+ 从连接池获取资源并初始化(交换机、队列)
191
+ """
192
+ logger.debug(
193
+ f"连接参数 - force_reconnect={force_reconnect}, "
194
+ f"declare_queue={declare_queue}, create_if_not_exists={self.create_if_not_exists}"
195
+ )
196
+
197
+ # 如果已连接且不强制重连,则直接返回
198
+ if self.is_connected and not force_reconnect:
199
+ return
200
+
201
+ # 取消正在进行的重连任务
202
+ if self._reconnect_task and not self._reconnect_task.done():
203
+ self._reconnect_task.cancel()
204
+
205
+ # 重置状态
206
+ self._exchange_exists = False
207
+ self._queue_exists = False
208
+ self._queue_bound = False
209
+
210
+ retries = 0
211
+ last_exception = None
212
+
213
+ while retries < self.max_reconnection_attempts:
214
+ try:
215
+ # 从池获取通道
216
+ self.channel = await self._get_channel()
217
+ await self.channel.set_qos(prefetch_count=self.prefetch_count)
218
+
219
+ # 处理交换机
220
+ exchange_exists = await self._check_exchange_exists(self.channel)
221
+ if not exchange_exists:
222
+ if self.create_if_not_exists:
223
+ # 创建交换机
224
+ self.exchange = await asyncio.wait_for(
225
+ self.channel.declare_exchange(
226
+ name=self.exchange_name,
227
+ type=self.exchange_type,
228
+ durable=self.durable,
229
+ auto_delete=self.auto_delete
230
+ ),
231
+ timeout=self.rpc_timeout
232
+ )
233
+ logger.info(f"已创建交换机 '{self.exchange_name}'")
234
+ else:
235
+ raise Exception(
236
+ f"交换机 '{self.exchange_name}' 不存在且不允许自动创建")
237
+ else:
238
+ # 获取已有交换机
239
+ self.exchange = await self.channel.get_exchange(self.exchange_name)
240
+ logger.info(f"使用已存在的交换机 '{self.exchange_name}'")
241
+
242
+ # 处理队列
243
+ if declare_queue and self.queue_name:
244
+ queue_exists = await self._check_queue_exists(self.channel)
245
+
246
+ if not queue_exists:
247
+ if not self.create_if_not_exists:
248
+ raise Exception(
249
+ f"队列 '{self.queue_name}' 不存在且不允许自动创建")
250
+
251
+ # 创建队列
252
+ self.queue = await asyncio.wait_for(
253
+ self.channel.declare_queue(
254
+ name=self.queue_name,
255
+ durable=self.durable,
256
+ auto_delete=self.auto_delete,
257
+ exclusive=False
258
+ ),
259
+ timeout=self.rpc_timeout
260
+ )
261
+ self.actual_queue_name = self.queue_name
262
+ logger.info(f"已创建队列 '{self.queue_name}'")
263
+ else:
264
+ # 获取已有队列
265
+ self.queue = await self.channel.get_queue(self.queue_name)
266
+ self.actual_queue_name = self.queue_name
267
+ logger.info(f"使用已存在的队列 '{self.queue_name}'")
268
+
269
+ # 绑定队列到交换机
270
+ if self.queue and self.exchange:
271
+ bound = await self._bind_queue(self.channel, self.queue, self.exchange)
272
+ if not bound:
273
+ raise Exception(f"队列 '{self.queue_name}' 绑定到交换机失败")
274
+ else:
275
+ # 不声明队列时的状态处理
276
+ self.queue = None
277
+ self.actual_queue_name = None
278
+ logger.debug(f"跳过队列 '{self.queue_name}' 的声明和绑定")
279
+
280
+ # 验证连接状态
281
+ if not self.is_connected:
282
+ raise Exception("连接验证失败,状态异常")
283
+
284
+ # 如果之前在消费,重新开始消费
285
+ if self._is_consuming and self.message_handler:
286
+ await self.start_consuming()
287
+
288
+ # 启动连接监控和保活任务
289
+ self._start_monitoring()
290
+ self._start_keepalive()
291
+
292
+ self._update_activity_timestamp()
293
+ logger.info(f"RabbitMQ客户端初始化成功 (队列: {self.actual_queue_name})")
294
+ return
295
+
296
+ except Exception as e:
297
+ last_exception = e
298
+ logger.warning(f"资源初始化失败: {str(e)},重试中...")
299
+ # 释放当前通道(放回池并重新获取)
300
+ self.channel = None
301
+ retries += 1
302
+ if retries < self.max_reconnection_attempts:
303
+ await asyncio.sleep(self.reconnection_delay)
304
+
305
+ logger.error(f"最终初始化失败: {str(last_exception)}")
306
+ raise Exception(
307
+ f"经过{self.max_reconnection_attempts}次重试后仍无法初始化客户端。最后错误: {str(last_exception)}")
308
+
309
+ def _start_monitoring(self) -> None:
310
+ """启动连接和消费监控任务"""
311
+ if self._closed or (self._monitor_task and not self._monitor_task.done()):
312
+ return
313
+
314
+ async def monitor():
315
+ while not self._closed and self.channel:
316
+ try:
317
+ # 检查通道状态
318
+ if self.channel.is_closed:
319
+ logger.warning("检测到通道已关闭,尝试重建")
320
+ await self._recreate_channel()
321
+ continue
322
+
323
+ # 检查消费停滞
324
+ if self._is_consuming:
325
+ current_time = asyncio.get_event_loop().time()
326
+ if current_time - self._last_message_processed > self.consumption_stall_threshold:
327
+ if self._is_consuming and self.message_handler:
328
+ await self.stop_consuming()
329
+ await asyncio.sleep(1)
330
+ await self.start_consuming()
331
+ except Exception as e:
332
+ logger.error(f"监控任务出错: {str(e)}")
333
+ await asyncio.sleep(1)
334
+
335
+ await asyncio.sleep(5) # 每5秒检查一次
336
+
337
+ self._monitor_task = asyncio.create_task(monitor())
338
+
339
+ async def _recreate_channel(self) -> None:
340
+ try:
341
+ # 无需手动释放,上下文管理器会自动处理
342
+ self.channel = await self._get_channel()
343
+ await self.channel.set_qos(prefetch_count=self.prefetch_count)
344
+
345
+ # 重新获取交换机
346
+ self.exchange = await self.channel.get_exchange(self.exchange_name)
347
+
348
+ # 重新绑定队列
349
+ if self.queue_name:
350
+ self.queue = await self.channel.get_queue(self.queue_name)
351
+ if self.queue and self.exchange:
352
+ await self._bind_queue(self.channel, self.queue, self.exchange)
353
+
354
+ # 重新开始消费
355
+ if self._is_consuming and self.message_handler:
356
+ await self.start_consuming()
357
+
358
+ logger.info("通道已重建并恢复服务")
359
+ self._update_activity_timestamp()
360
+ except Exception as e:
361
+ logger.error(f"通道重建失败: {str(e)},触发重连")
362
+ await self.connect(force_reconnect=True)
363
+
364
+ def _start_keepalive(self) -> None:
365
+ """启动连接保活任务"""
366
+ if self._closed or (self._keepalive_task and not self._keepalive_task.done()):
367
+ return
368
+
369
+ async def keepalive():
370
+ while not self._closed and self.is_connected:
371
+ current_time = asyncio.get_event_loop().time()
372
+ # 检查是否超过指定时间无活动
373
+ if current_time - self._last_activity_timestamp > self.connection_pool.heartbeat * 1.5:
374
+ logger.debug(
375
+ f"连接 {self.connection_pool.heartbeat*1.5}s 无活动,执行保活检查")
376
+ try:
377
+ if self.channel.is_closed:
378
+ logger.warning("连接已关闭,触发重连")
379
+ await self.connect(force_reconnect=True)
380
+ return
381
+
382
+ # 轻量级操作保持连接活跃
383
+ await asyncio.wait_for(
384
+ self.channel.declare_exchange(
385
+ name=self.exchange_name,
386
+ type=self.exchange_type,
387
+ passive=True
388
+ ),
389
+ timeout=5
390
+ )
391
+ self._update_activity_timestamp()
392
+ except Exception as e:
393
+ logger.warning(f"保活检查失败: {str(e)},触发重连")
394
+ await self.connect(force_reconnect=True)
395
+
396
+ await asyncio.sleep(self.connection_pool.heartbeat / 2)
397
+
398
+ self._keepalive_task = asyncio.create_task(keepalive())
399
+
400
+ async def _schedule_reconnect(self) -> None:
401
+ """安排重新连接"""
402
+ if self._reconnect_task and not self._reconnect_task.done():
403
+ return
404
+
405
+ logger.info(f"将在 {self.reconnection_delay} 秒后尝试重新连接...")
406
+
407
+ async def reconnect():
408
+ try:
409
+ await asyncio.sleep(self.reconnection_delay)
410
+ if not self._closed:
411
+ await self.connect(force_reconnect=True)
412
+ except Exception as e:
413
+ logger.error(f"重连任务失败: {str(e)}")
414
+ if not self._closed:
415
+ await self._schedule_reconnect()
416
+
417
+ self._reconnect_task = asyncio.create_task(reconnect())
418
+
419
+ async def close(self) -> None:
420
+ """关闭客户端并释放资源"""
421
+ self._closed = True
422
+ self._is_consuming = False
423
+
424
+ # 取消所有任务
425
+ for task in [self._keepalive_task, self._reconnect_task,
426
+ self._consuming_task, self._monitor_task]:
427
+ if task and not task.done():
428
+ task.cancel()
429
+ try:
430
+ await task
431
+ except asyncio.CancelledError:
432
+ pass
433
+
434
+ # 重置状态
435
+ self.channel = None
436
+ self.exchange = None
437
+ self.queue = None
438
+ self._consumer_tag = None
439
+ self._processing_message_ids.clear()
440
+
441
+ logger.info("RabbitMQ客户端已关闭")
442
+
443
+ async def publish(
444
+ self,
445
+ message_body: Union[str, Dict[str, Any]],
446
+ routing_key: Optional[str] = None,
447
+ content_type: str = "application/json",
448
+ headers: Optional[Dict[str, Any]] = None,
449
+ delivery_mode: DeliveryMode = DeliveryMode.PERSISTENT
450
+ ) -> None:
451
+ """发布消息(从池获取通道,自动重试)"""
452
+ if not self.is_connected:
453
+ logger.warning("连接已关闭,尝试重连后发布消息")
454
+ await self.connect(force_reconnect=True)
455
+
456
+ if not self.channel or not self.exchange:
457
+ raise Exception("RabbitMQ连接未初始化")
458
+
459
+ # 处理消息体
460
+ if isinstance(message_body, dict):
461
+ message_body_str = json.dumps(message_body, ensure_ascii=False)
462
+ if content_type == "text/plain":
463
+ content_type = "application/json"
464
+ else:
465
+ message_body_str = str(message_body)
466
+
467
+ # 创建消息对象
468
+ message = Message(
469
+ body=message_body_str.encode(),
470
+ content_type=content_type,
471
+ headers=headers or {},
472
+ delivery_mode=delivery_mode
473
+ )
474
+
475
+ # 发布消息(带重试机制)
476
+ retry_count = 0
477
+ max_retries = 2
478
+ while retry_count < max_retries:
479
+ try:
480
+ # 从池获取新通道用于发布(避免长时间占用消费通道)
481
+ async with self.connection_pool.channel_pool.acquire() as publish_channel:
482
+ exchange = await publish_channel.get_exchange(self.exchange_name)
483
+ confirmed = await exchange.publish(
484
+ message,
485
+ routing_key=routing_key or self.routing_key or '#',
486
+ mandatory=True,
487
+ timeout=5.0
488
+ )
489
+ if not confirmed:
490
+ raise Exception("消息未被服务器确认接收")
491
+
492
+ self._update_activity_timestamp()
493
+ logger.debug(f"消息已发布到交换机 '{self.exchange_name}'")
494
+ return
495
+ except (ConnectionClosed, ChannelInvalidStateError):
496
+ retry_count += 1
497
+ logger.warning(f"连接已关闭,尝试重连后重新发布 (重试次数: {retry_count})")
498
+ await self.connect(force_reconnect=True)
499
+ except Exception as e:
500
+ retry_count += 1
501
+ logger.error(f"消息发布失败 (重试次数: {retry_count}): {str(e)}")
502
+ if retry_count < max_retries:
503
+ await asyncio.sleep(1)
504
+
505
+ raise Exception(f"消息发布失败,经过{retry_count}次重试仍未成功")
506
+
507
+ # 以下方法(消息消费相关)逻辑与原有保持一致,仅适配连接池
508
+ def set_message_handler(self, handler):
509
+ self.message_handler = handler
510
+
511
+ async def start_consuming(self) -> ConsumerTag:
512
+ if self._is_consuming:
513
+ logger.debug("已经在消费中,返回现有consumer_tag")
514
+ if self._consumer_tag:
515
+ return self._consumer_tag
516
+ raise Exception("消费已启动但未获取到consumer_tag")
517
+
518
+ if not self.is_connected:
519
+ await self.connect()
520
+
521
+ if not self.queue:
522
+ raise Exception("队列未初始化,无法开始消费")
523
+
524
+ if not self.message_handler:
525
+ raise Exception("未设置消息处理函数")
526
+
527
+ self._is_consuming = True
528
+ logger.info(f"开始消费队列: {self.actual_queue_name}")
529
+
530
+ try:
531
+ self._consumer_tag = await self.queue.consume(
532
+ self._message_wrapper,
533
+ no_ack=False # 手动确认消息
534
+ )
535
+
536
+ logger.info(
537
+ f"消费者已启动,队列: {self.actual_queue_name}, tag: {self._consumer_tag}")
538
+ return self._consumer_tag
539
+ except Exception as e:
540
+ self._is_consuming = False
541
+ logger.error(f"启动消费失败: {str(e)}", exc_info=True)
542
+ raise
543
+
544
+ async def _safe_cancel_consumer(self) -> bool:
545
+ if not self._consumer_tag or not self.queue or not self.channel:
546
+ return True
547
+
548
+ try:
549
+ await asyncio.wait_for(
550
+ self.queue.cancel(self._consumer_tag),
551
+ timeout=self.rpc_timeout
552
+ )
553
+ logger.info(f"消费者 {self._consumer_tag} 已取消")
554
+ return True
555
+ except Exception as e:
556
+ logger.error(f"取消消费者异常: {str(e)}")
557
+ return False
558
+
559
+ async def stop_consuming(self) -> None:
560
+ if not self._is_consuming:
561
+ return
562
+
563
+ self._is_consuming = False
564
+
565
+ if self._consumer_tag and self.queue:
566
+ await self._safe_cancel_consumer()
567
+
568
+ # 等待所有正在处理的消息完成
569
+ if self._processing_message_ids:
570
+ logger.info(
571
+ f"等待 {len(self._processing_message_ids)} 个正在处理的消息完成...")
572
+ while self._processing_message_ids and not self._closed:
573
+ await asyncio.sleep(0.1)
574
+
575
+ # 清理状态
576
+ self._consumer_tag = None
577
+ self._processing_message_ids.clear()
578
+
579
+ logger.info(f"已停止消费队列: {self.actual_queue_name}")
580
+
581
+ async def _parse_message(self, message: AbstractIncomingMessage) -> Union[Dict[str, Any], str]:
582
+ try:
583
+ body_str = message.body.decode('utf-8')
584
+ self._update_activity_timestamp()
585
+
586
+ if self.auto_parse_json:
587
+ return json.loads(body_str)
588
+ return body_str
589
+ except json.JSONDecodeError:
590
+ logger.warning(f"消息解析JSON失败,返回原始字符串")
591
+ return body_str
592
+ except Exception as e:
593
+ logger.error(f"消息解析出错: {str(e)}")
594
+ return message.body.decode('utf-8')
595
+
596
+ async def _message_wrapper(self, message: AbstractIncomingMessage) -> None:
597
+ if not self.message_handler or not self._is_consuming:
598
+ logger.warning("未设置消息处理器或已停止消费")
599
+ # await message.ack()
600
+ return
601
+
602
+ message_id = message.message_id or str(id(message))
603
+ if message_id in self._processing_message_ids:
604
+ logger.warning(f"检测到重复处理的消息ID: {message_id},直接确认")
605
+ await message.ack()
606
+ return
607
+
608
+ self._processing_message_ids.add(message_id)
609
+
610
+ try:
611
+ logger.debug(f"收到队列 {self.actual_queue_name} 的消息: {message_id}")
612
+
613
+ parsed_data = await self._parse_message(message)
614
+ await self.message_handler(MQMsgModel(** parsed_data), message)
615
+
616
+ await message.ack()
617
+ self._update_activity_timestamp()
618
+ self._update_message_processed_timestamp()
619
+ logger.debug(f"消息 {message_id} 处理完成并确认")
620
+
621
+ except Exception as e:
622
+ current_headers = message.headers or {}
623
+ retry_count = current_headers.get('x-retry-count', 0)
624
+ retry_count += 1
625
+
626
+ logger.error(
627
+ f"消息 {message_id} 处理出错(第{retry_count}次重试): {str(e)}",
628
+ exc_info=True
629
+ )
630
+
631
+ if retry_count >= MAX_RETRY_COUNT:
632
+ logger.error(
633
+ f"消息 {message_id} 已达到最大重试次数({MAX_RETRY_COUNT}次),标记为失败")
634
+ await message.ack()
635
+ self._update_activity_timestamp()
636
+ return
637
+
638
+ new_headers = current_headers.copy()
639
+ new_headers['x-retry-count'] = retry_count
640
+
641
+ new_message = Message(
642
+ body=message.body,
643
+ content_type=message.content_type,
644
+ headers=new_headers,
645
+ delivery_mode=message.delivery_mode
646
+ )
647
+
648
+ await message.reject(requeue=False)
649
+
650
+ if self.exchange:
651
+ await self.exchange.publish(
652
+ new_message,
653
+ routing_key=self.routing_key or '#',
654
+ mandatory=True,
655
+ timeout=5.0
656
+ )
657
+ self._update_activity_timestamp()
658
+ logger.info(f"消息 {message_id} 已重新发布,当前重试次数: {retry_count}")
659
+ finally:
660
+ if message_id in self._processing_message_ids:
661
+ self._processing_message_ids.remove(message_id)
662
+
663
+ async def __aenter__(self):
664
+ await self.connect()
665
+ return self
666
+
667
+ async def __aexit__(self, exc_type, exc, tb):
668
+ await self.close()