sycommon-python-lib 0.1.56b11__tar.gz → 0.1.56b13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/PKG-INFO +1 -1
  2. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/pyproject.toml +1 -1
  3. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/llm/embedding.py +78 -23
  4. sycommon_python_lib-0.1.56b13/src/sycommon/rabbitmq/rabbitmq_client.py +448 -0
  5. sycommon_python_lib-0.1.56b13/src/sycommon/rabbitmq/rabbitmq_pool.py +398 -0
  6. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/rabbitmq/rabbitmq_service_core.py +2 -2
  7. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon_python_lib.egg-info/PKG-INFO +1 -1
  8. sycommon_python_lib-0.1.56b11/src/sycommon/rabbitmq/rabbitmq_client.py +0 -514
  9. sycommon_python_lib-0.1.56b11/src/sycommon/rabbitmq/rabbitmq_pool.py +0 -370
  10. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/README.md +0 -0
  11. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/setup.cfg +0 -0
  12. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/command/cli.py +0 -0
  13. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/__init__.py +0 -0
  14. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/Config.py +0 -0
  15. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/DatabaseConfig.py +0 -0
  16. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/EmbeddingConfig.py +0 -0
  17. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/LLMConfig.py +0 -0
  18. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/MQConfig.py +0 -0
  19. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/RerankerConfig.py +0 -0
  20. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/SentryConfig.py +0 -0
  21. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/config/__init__.py +0 -0
  22. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/database/async_base_db_service.py +0 -0
  23. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/database/async_database_service.py +0 -0
  24. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/database/base_db_service.py +0 -0
  25. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/database/database_service.py +0 -0
  26. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/health/__init__.py +0 -0
  27. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/health/health_check.py +0 -0
  28. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/health/metrics.py +0 -0
  29. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/health/ping.py +0 -0
  30. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/llm/__init__.py +0 -0
  31. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/llm/get_llm.py +0 -0
  32. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/llm/llm_logger.py +0 -0
  33. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/llm/llm_tokens.py +0 -0
  34. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/logging/__init__.py +0 -0
  35. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/logging/async_sql_logger.py +0 -0
  36. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/logging/kafka_log.py +0 -0
  37. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/logging/logger_levels.py +0 -0
  38. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/logging/logger_wrapper.py +0 -0
  39. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/logging/sql_logger.py +0 -0
  40. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/__init__.py +0 -0
  41. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/context.py +0 -0
  42. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/cors.py +0 -0
  43. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/docs.py +0 -0
  44. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/exception.py +0 -0
  45. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/middleware.py +0 -0
  46. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/monitor_memory.py +0 -0
  47. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/mq.py +0 -0
  48. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/timeout.py +0 -0
  49. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/middleware/traceid.py +0 -0
  50. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/models/__init__.py +0 -0
  51. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/models/base_http.py +0 -0
  52. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/models/log.py +0 -0
  53. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/models/mqlistener_config.py +0 -0
  54. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/models/mqmsg_model.py +0 -0
  55. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/models/mqsend_config.py +0 -0
  56. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/models/sso_user.py +0 -0
  57. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/notice/__init__.py +0 -0
  58. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/notice/uvicorn_monitor.py +0 -0
  59. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/rabbitmq/rabbitmq_service.py +0 -0
  60. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/rabbitmq/rabbitmq_service_client_manager.py +0 -0
  61. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/rabbitmq/rabbitmq_service_connection_monitor.py +0 -0
  62. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/rabbitmq/rabbitmq_service_consumer_manager.py +0 -0
  63. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/rabbitmq/rabbitmq_service_producer_manager.py +0 -0
  64. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/sentry/__init__.py +0 -0
  65. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/sentry/sy_sentry.py +0 -0
  66. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/services.py +0 -0
  67. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/sse/__init__.py +0 -0
  68. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/sse/event.py +0 -0
  69. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/sse/sse.py +0 -0
  70. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/__init__.py +0 -0
  71. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/example.py +0 -0
  72. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/example2.py +0 -0
  73. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/feign.py +0 -0
  74. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/feign_client.py +0 -0
  75. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/nacos_client_base.py +0 -0
  76. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/nacos_config_manager.py +0 -0
  77. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/nacos_heartbeat_manager.py +0 -0
  78. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/nacos_service.py +0 -0
  79. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/nacos_service_discovery.py +0 -0
  80. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/nacos_service_registration.py +0 -0
  81. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/synacos/param.py +0 -0
  82. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/tools/__init__.py +0 -0
  83. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/tools/docs.py +0 -0
  84. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/tools/env.py +0 -0
  85. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/tools/merge_headers.py +0 -0
  86. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/tools/snowflake.py +0 -0
  87. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon/tools/timing.py +0 -0
  88. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon_python_lib.egg-info/SOURCES.txt +0 -0
  89. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon_python_lib.egg-info/dependency_links.txt +0 -0
  90. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon_python_lib.egg-info/entry_points.txt +0 -0
  91. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon_python_lib.egg-info/requires.txt +0 -0
  92. {sycommon_python_lib-0.1.56b11 → sycommon_python_lib-0.1.56b13}/src/sycommon_python_lib.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sycommon-python-lib
3
- Version: 0.1.56b11
3
+ Version: 0.1.56b13
4
4
  Summary: Add your description here
5
5
  Requires-Python: >=3.11
6
6
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "sycommon-python-lib"
3
- version = "0.1.56-beta11"
3
+ version = "0.1.56-beta13"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -25,15 +25,21 @@ class Embedding(metaclass=SingletonMeta):
25
25
 
26
26
  # 并发信号量
27
27
  self.semaphore = asyncio.Semaphore(self.max_concurrency)
28
+ # 全局默认超时:永不超时(None)
29
+ self.default_timeout = aiohttp.ClientTimeout(total=None)
28
30
 
29
31
  async def _get_embeddings_http_async(
30
32
  self,
31
33
  input: Union[str, List[str]],
32
34
  encoding_format: str = None,
33
35
  model: str = None,
36
+ timeout: aiohttp.ClientTimeout = None,
34
37
  **kwargs
35
38
  ):
36
39
  async with self.semaphore:
40
+ # 优先使用传入的超时,无则用全局默认
41
+ request_timeout = timeout or self.default_timeout
42
+
37
43
  # 优先使用传入的模型名,无则用默认值
38
44
  target_model = model or self.default_embedding_model
39
45
  target_base_url = EmbeddingConfig.from_config(target_model).baseUrl
@@ -46,14 +52,23 @@ class Embedding(metaclass=SingletonMeta):
46
52
  }
47
53
  request_body.update(kwargs)
48
54
 
49
- async with aiohttp.ClientSession() as session:
50
- async with session.post(url, json=request_body) as response:
51
- if response.status != 200:
52
- error_detail = await response.text()
53
- SYLogger.error(
54
- f"Embedding request failed (model: {target_model}): {error_detail}")
55
- return None
56
- return await response.json()
55
+ try:
56
+ async with aiohttp.ClientSession(timeout=request_timeout) as session:
57
+ async with session.post(url, json=request_body) as response:
58
+ if response.status != 200:
59
+ error_detail = await response.text()
60
+ SYLogger.error(
61
+ f"Embedding request failed (model: {target_model}): {error_detail}")
62
+ return None
63
+ return await response.json()
64
+ except asyncio.TimeoutError:
65
+ SYLogger.error(
66
+ f"Embedding request timeout (model: {target_model})")
67
+ return None
68
+ except Exception as e:
69
+ SYLogger.error(
70
+ f"Embedding request unexpected error (model: {target_model}): {str(e)}")
71
+ return None
57
72
 
58
73
  async def _get_reranker_http_async(
59
74
  self,
@@ -64,9 +79,13 @@ class Embedding(metaclass=SingletonMeta):
64
79
  max_chunks_per_doc: Optional[int] = None,
65
80
  return_documents: Optional[bool] = True,
66
81
  return_len: Optional[bool] = True,
82
+ timeout: aiohttp.ClientTimeout = None,
67
83
  **kwargs
68
84
  ):
69
85
  async with self.semaphore:
86
+ # 优先使用传入的超时,无则用全局默认
87
+ request_timeout = timeout or self.default_timeout
88
+
70
89
  # 优先使用传入的模型名,无则用默认值
71
90
  target_model = model or self.default_reranker_model
72
91
  target_base_url = RerankerConfig.from_config(target_model).baseUrl
@@ -84,19 +103,29 @@ class Embedding(metaclass=SingletonMeta):
84
103
  }
85
104
  request_body.update(kwargs)
86
105
 
87
- async with aiohttp.ClientSession() as session:
88
- async with session.post(url, json=request_body) as response:
89
- if response.status != 200:
90
- error_detail = await response.text()
91
- SYLogger.error(
92
- f"Rerank request failed (model: {target_model}): {error_detail}")
93
- return None
94
- return await response.json()
106
+ try:
107
+ async with aiohttp.ClientSession(timeout=request_timeout) as session:
108
+ async with session.post(url, json=request_body) as response:
109
+ if response.status != 200:
110
+ error_detail = await response.text()
111
+ SYLogger.error(
112
+ f"Rerank request failed (model: {target_model}): {error_detail}")
113
+ return None
114
+ return await response.json()
115
+ except asyncio.TimeoutError:
116
+ SYLogger.error(
117
+ f"Rerank request timeout (model: {target_model})")
118
+ return None
119
+ except Exception as e:
120
+ SYLogger.error(
121
+ f"Rerank request unexpected error (model: {target_model}): {str(e)}")
122
+ return None
95
123
 
96
124
  async def get_embeddings(
97
125
  self,
98
126
  corpus: List[str],
99
- model: str = None
127
+ model: str = None,
128
+ timeout: Optional[Union[int, float]] = None
100
129
  ):
101
130
  """
102
131
  获取语料库的嵌入向量,结果顺序与输入语料库顺序一致
@@ -104,12 +133,24 @@ class Embedding(metaclass=SingletonMeta):
104
133
  Args:
105
134
  corpus: 待生成嵌入向量的文本列表
106
135
  model: 可选,指定使用的embedding模型名称,默认使用bge-large-zh-v1.5
136
+ timeout: 可选,超时时间(秒):
137
+ - 传int/float:表示总超时时间(秒)
138
+ - 不传/None:使用默认永不超时配置
107
139
  """
140
+ request_timeout = None
141
+ if timeout is not None:
142
+ if isinstance(timeout, (int, float)):
143
+ request_timeout = aiohttp.ClientTimeout(total=timeout)
144
+ else:
145
+ SYLogger.warning(
146
+ f"Invalid timeout type: {type(timeout)}, must be int/float, use default timeout")
147
+
108
148
  SYLogger.info(
109
- f"Requesting embeddings for corpus: {corpus} (model: {model or self.default_embedding_model}, max_concurrency: {self.max_concurrency})")
110
- # 给每个异步任务传入模型名称
149
+ f"Requesting embeddings for corpus: {corpus} (model: {model or self.default_embedding_model}, max_concurrency: {self.max_concurrency}, timeout: {timeout or 'None'})")
150
+
151
+ # 给每个异步任务传入模型名称和超时配置
111
152
  tasks = [self._get_embeddings_http_async(
112
- text, model=model) for text in corpus]
153
+ text, model=model, timeout=request_timeout) for text in corpus]
113
154
  results = await asyncio.gather(*tasks)
114
155
 
115
156
  vectors = []
@@ -131,7 +172,8 @@ class Embedding(metaclass=SingletonMeta):
131
172
  self,
132
173
  top_results: List[str],
133
174
  query: str,
134
- model: str = None
175
+ model: str = None,
176
+ timeout: Optional[Union[int, float]] = None
135
177
  ):
136
178
  """
137
179
  对搜索结果进行重排序
@@ -140,10 +182,23 @@ class Embedding(metaclass=SingletonMeta):
140
182
  top_results: 待重排序的文本列表
141
183
  query: 排序参考的查询语句
142
184
  model: 可选,指定使用的reranker模型名称,默认使用bge-reranker-large
185
+ timeout: 可选,超时时间(秒):
186
+ - 传int/float:表示总超时时间(秒)
187
+ - 不传/None:使用默认永不超时配置
143
188
  """
189
+ request_timeout = None
190
+ if timeout is not None:
191
+ if isinstance(timeout, (int, float)):
192
+ request_timeout = aiohttp.ClientTimeout(total=timeout)
193
+ else:
194
+ SYLogger.warning(
195
+ f"Invalid timeout type: {type(timeout)}, must be int/float, use default timeout")
196
+
144
197
  SYLogger.info(
145
- f"Requesting reranker for top_results: {top_results} (model: {model or self.default_reranker_model}, max_concurrency: {self.max_concurrency})")
146
- data = await self._get_reranker_http_async(top_results, query, model=model)
198
+ f"Requesting reranker for top_results: {top_results} (model: {model or self.default_reranker_model}, max_concurrency: {self.max_concurrency}, timeout: {timeout or 'None'})")
199
+
200
+ data = await self._get_reranker_http_async(
201
+ top_results, query, model=model, timeout=request_timeout)
147
202
  SYLogger.info(
148
203
  f"Reranker for top_results: {top_results} completed (model: {model or self.default_reranker_model})")
149
204
  return data
@@ -0,0 +1,448 @@
1
+ import asyncio
2
+ import json
3
+ from typing import Optional, Callable, Coroutine, Dict, Any, Union
4
+ from aio_pika import Channel, Message, DeliveryMode, ExchangeType
5
+ from aio_pika.abc import (
6
+ AbstractExchange,
7
+ AbstractQueue,
8
+ AbstractIncomingMessage,
9
+ ConsumerTag,
10
+ AbstractRobustConnection,
11
+ )
12
+ from sycommon.rabbitmq.rabbitmq_pool import RabbitMQConnectionPool
13
+ from sycommon.logging.kafka_log import SYLogger
14
+ from sycommon.models.mqmsg_model import MQMsgModel
15
+
16
+ logger = SYLogger
17
+
18
+
19
+ class RabbitMQClient:
20
+ """
21
+ RabbitMQ 客户端
22
+ """
23
+
24
+ def __init__(
25
+ self,
26
+ connection_pool: RabbitMQConnectionPool,
27
+ exchange_name: str = "system.topic.exchange",
28
+ exchange_type: str = "topic",
29
+ queue_name: Optional[str] = None,
30
+ routing_key: str = "#",
31
+ durable: bool = True,
32
+ auto_delete: bool = False,
33
+ auto_parse_json: bool = True,
34
+ create_if_not_exists: bool = True,
35
+ **kwargs,
36
+ ):
37
+ self.connection_pool = connection_pool
38
+ if not self.connection_pool._initialized:
39
+ raise RuntimeError("连接池未初始化,请先调用 connection_pool.init_pools()")
40
+
41
+ self.exchange_name = exchange_name.strip()
42
+ try:
43
+ self.exchange_type = ExchangeType(exchange_type.lower())
44
+ except ValueError:
45
+ logger.warning(f"无效的exchange_type: {exchange_type},默认使用'topic'")
46
+ self.exchange_type = ExchangeType.TOPIC
47
+
48
+ self.queue_name = queue_name.strip() if queue_name else None
49
+ self.routing_key = routing_key.strip() if routing_key else "#"
50
+ self.durable = durable
51
+ self.auto_delete = auto_delete
52
+ self.auto_parse_json = auto_parse_json
53
+ self.create_if_not_exists = create_if_not_exists
54
+
55
+ # 资源状态
56
+ self._channel: Optional[Channel] = None
57
+ self._channel_conn: Optional[AbstractRobustConnection] = None
58
+ self._exchange: Optional[AbstractExchange] = None
59
+ self._queue: Optional[AbstractQueue] = None
60
+ self._consumer_tag: Optional[ConsumerTag] = None
61
+ self._message_handler: Optional[Callable[[
62
+ MQMsgModel, AbstractIncomingMessage], Coroutine[Any, Any, None]]] = None
63
+ self._closed = False
64
+
65
+ # 并发控制
66
+ self._consume_lock = asyncio.Lock()
67
+ self._connect_lock = asyncio.Lock()
68
+
69
+ # 防止并发重连覆盖
70
+ self._connecting = False
71
+ self._connect_condition = asyncio.Condition()
72
+
73
+ self._conn_close_callback: Optional[Callable] = None
74
+ self._reconnect_semaphore = asyncio.Semaphore(1)
75
+ self._current_reconnect_task: Optional[asyncio.Task] = None
76
+ self._RECONNECT_INTERVAL = 15
77
+
78
+ @property
79
+ async def is_connected(self) -> bool:
80
+ if self._closed:
81
+ return False
82
+ try:
83
+ return (
84
+ self._channel and not self._channel.is_closed
85
+ and self._channel_conn and not self._channel_conn.is_closed
86
+ and self._exchange is not None
87
+ and (not self.queue_name or self._queue is not None)
88
+ )
89
+ except Exception:
90
+ return False
91
+
92
+ async def _rebuild_resources(self) -> None:
93
+ if not self._channel or self._channel.is_closed:
94
+ raise RuntimeError("无有效通道,无法重建资源")
95
+
96
+ # 声明交换机
97
+ self._exchange = await self._channel.declare_exchange(
98
+ name=self.exchange_name,
99
+ type=self.exchange_type,
100
+ durable=self.durable,
101
+ auto_delete=self.auto_delete,
102
+ passive=not self.create_if_not_exists,
103
+ )
104
+ logger.info(f"交换机重建成功: {self.exchange_name}")
105
+
106
+ # 声明队列
107
+ if self.queue_name:
108
+ self._queue = await self._channel.declare_queue(
109
+ name=self.queue_name,
110
+ durable=self.durable,
111
+ auto_delete=self.auto_delete,
112
+ passive=not self.create_if_not_exists,
113
+ )
114
+ await self._queue.bind(exchange=self._exchange, routing_key=self.routing_key)
115
+ logger.info(f"队列重建成功: {self.queue_name}")
116
+
117
+ async def connect(self) -> None:
118
+ if self._closed:
119
+ raise RuntimeError("客户端已关闭,无法重新连接")
120
+
121
+ # 1. 并发控制:使用 _connect_lock 保证只有一个协程在执行连接流程
122
+ async with self._connect_lock:
123
+ # 如果已经在连了,等待其完成
124
+ if self._connecting:
125
+ logger.debug("连接正在进行中,等待现有连接完成...")
126
+ try:
127
+ # 等待条件变量,超时设为 60 秒防止死等
128
+ await asyncio.wait_for(
129
+ self._connect_condition.wait_for(
130
+ lambda: not self._connecting),
131
+ timeout=60.0
132
+ )
133
+ except asyncio.TimeoutError:
134
+ raise RuntimeError("等待连接超时")
135
+
136
+ # 等待结束后,再次检查状态
137
+ if not await self.is_connected:
138
+ raise RuntimeError("等待重连后,连接状态依然无效")
139
+ return
140
+
141
+ # 标记开始连接
142
+ self._connecting = True
143
+
144
+ # 释放 _connect_lock,允许其他协程读取状态,但在连接完成前阻止新的连接请求
145
+ # 注意:这里释放了 _connect_lock,但 self._connecting = True 阻止了新的连接流程
146
+
147
+ try:
148
+ # --- 阶段1: 清理旧资源 ---
149
+ # 重新获取锁进行资源清理
150
+ async with self._connect_lock:
151
+ was_consuming = self._consumer_tag is not None
152
+
153
+ if self._channel_conn and self._conn_close_callback:
154
+ try:
155
+ self._channel_conn.close_callbacks.discard(
156
+ self._conn_close_callback)
157
+ except Exception:
158
+ pass
159
+
160
+ self._channel = None
161
+ self._channel_conn = None
162
+ self._exchange = None
163
+ self._queue = None
164
+ self._conn_close_callback = None
165
+
166
+ # --- 阶段2: 获取新连接 (耗时IO) ---
167
+ self._channel, self._channel_conn = await self.connection_pool.acquire_channel()
168
+
169
+ # 设置回调
170
+ def on_conn_closed(conn, exc):
171
+ logger.warning(f"检测到连接关闭: {exc}")
172
+ if not self._closed and not self._connecting:
173
+ asyncio.create_task(self._safe_reconnect())
174
+
175
+ self._conn_close_callback = on_conn_closed
176
+ if self._channel_conn:
177
+ self._channel_conn.close_callbacks.add(
178
+ self._conn_close_callback)
179
+
180
+ # 重建资源
181
+ await self._rebuild_resources()
182
+
183
+ # --- 阶段3: 恢复消费 ---
184
+ if was_consuming and self._message_handler and self.queue_name:
185
+ logger.info("🔄 检测到重连前处于消费状态,尝试自动恢复...")
186
+ try:
187
+ self._queue = await self._channel.declare_queue(
188
+ name=self.queue_name,
189
+ durable=self.durable,
190
+ auto_delete=self.auto_delete,
191
+ passive=False,
192
+ )
193
+ await self._queue.bind(exchange=self._exchange, routing_key=self.routing_key)
194
+ self._consumer_tag = await self._queue.consume(self._process_message_callback)
195
+ logger.info(f"✅ 消费已自动恢复: {self._consumer_tag}")
196
+ except Exception as e:
197
+ logger.error(f"❌ 自动恢复消费失败: {e}")
198
+ self._consumer_tag = None
199
+ else:
200
+ self._consumer_tag = None
201
+
202
+ logger.info("客户端连接初始化完成")
203
+
204
+ except Exception as e:
205
+ logger.error(f"客户端连接失败: {str(e)}", exc_info=True)
206
+
207
+ # 异常时清理资源
208
+ async with self._connect_lock:
209
+ if self._channel_conn and self._conn_close_callback:
210
+ self._channel_conn.close_callbacks.discard(
211
+ self._conn_close_callback)
212
+ self._channel = None
213
+ self._channel_conn = None
214
+ self._consumer_tag = None
215
+
216
+ raise
217
+
218
+ finally:
219
+ # 【关键修复】必须在持有 Condition 内部锁的情况下调用 notify_all
220
+ # 这里使用 async with self._connect_condition: 自动完成 acquire() ... notify_all() ... release()
221
+ async with self._connect_condition:
222
+ self._connecting = False
223
+ self._connect_condition.notify_all()
224
+
225
+ async def _safe_reconnect(self):
226
+ """安全重连任务(仅用于被动监听连接关闭)"""
227
+ async with self._reconnect_semaphore:
228
+ if self._closed:
229
+ return
230
+
231
+ # 如果已经在重连,直接忽略
232
+ if self._connecting:
233
+ return
234
+
235
+ logger.info(f"将在{self._RECONNECT_INTERVAL}秒后尝试重连...")
236
+ await asyncio.sleep(self._RECONNECT_INTERVAL)
237
+
238
+ if self._closed or await self.is_connected:
239
+ return
240
+
241
+ try:
242
+ self._current_reconnect_task = asyncio.create_task(
243
+ self.connect())
244
+ await self._current_reconnect_task
245
+ except Exception as e:
246
+ logger.warning(f"重连失败: {str(e)}")
247
+ finally:
248
+ self._current_reconnect_task = None
249
+
250
+ async def set_message_handler(self, handler: Callable[..., Coroutine]) -> None:
251
+ if not asyncio.iscoroutinefunction(handler):
252
+ raise TypeError("消息处理器必须是协程函数")
253
+ async with self._consume_lock:
254
+ self._message_handler = handler
255
+
256
+ async def _process_message_callback(self, message: AbstractIncomingMessage):
257
+ try:
258
+ msg_obj: MQMsgModel
259
+ if self.auto_parse_json:
260
+ try:
261
+ body_dict = json.loads(message.body.decode("utf-8"))
262
+ msg_obj = MQMsgModel(**body_dict)
263
+ except json.JSONDecodeError as e:
264
+ logger.error(f"JSON解析失败: {e}")
265
+ await message.nack(requeue=False)
266
+ return
267
+ else:
268
+ msg_obj = MQMsgModel(
269
+ body=message.body.decode("utf-8"),
270
+ routing_key=message.routing_key,
271
+ delivery_tag=message.delivery_tag,
272
+ traceId=message.headers.get("trace-id"),
273
+ headers=message.headers
274
+ )
275
+
276
+ SYLogger.set_trace_id(msg_obj.traceId)
277
+
278
+ if self._message_handler:
279
+ await self._message_handler(msg_obj, message)
280
+
281
+ await message.ack()
282
+
283
+ except Exception as e:
284
+ logger.error(f"消息处理异常: {e}", exc_info=True)
285
+ headers = dict(message.headers) if message.headers else {}
286
+ current_retry = int(headers.get("x-retry-count", 0))
287
+
288
+ if current_retry >= 3:
289
+ logger.warning(f"重试次数超限,丢弃消息: {message.delivery_tag}")
290
+ await message.reject(requeue=False)
291
+ else:
292
+ headers["x-retry-count"] = current_retry + 1
293
+ try:
294
+ new_msg = Message(
295
+ body=message.body,
296
+ headers=headers,
297
+ content_type=message.content_type,
298
+ delivery_mode=message.delivery_mode
299
+ )
300
+ # 这里的 publish 如果失败,会触发重连机制
301
+ # 但注意,当前是在回调线程中,建议做好异常捕获
302
+ await self._exchange.publish(new_msg, routing_key=message.routing_key)
303
+ await message.ack()
304
+ except Exception as pub_err:
305
+ logger.error(f"重试发布失败: {pub_err}")
306
+ await message.reject(requeue=False)
307
+
308
+ async def start_consuming(self) -> Optional[ConsumerTag]:
309
+ if self._closed:
310
+ raise RuntimeError("客户端已关闭,无法启动消费")
311
+
312
+ async with self._consume_lock:
313
+ if not self._message_handler:
314
+ raise RuntimeError("未设置消息处理器")
315
+
316
+ if not await self.is_connected:
317
+ await self.connect()
318
+
319
+ if not self._queue:
320
+ if self.queue_name:
321
+ self._queue = await self._channel.declare_queue(
322
+ name=self.queue_name,
323
+ durable=self.durable,
324
+ auto_delete=self.auto_delete,
325
+ passive=not self.create_if_not_exists,
326
+ )
327
+ await self._queue.bind(exchange=self._exchange, routing_key=self.routing_key)
328
+ else:
329
+ raise RuntimeError("未配置队列名")
330
+
331
+ self._consumer_tag = await self._queue.consume(self._process_message_callback)
332
+ logger.info(
333
+ f"开始消费队列: {self._queue.name},tag: {self._consumer_tag}")
334
+ return self._consumer_tag
335
+
336
+ async def stop_consuming(self) -> None:
337
+ async with self._consume_lock:
338
+ if self._consumer_tag and self._queue and self._channel:
339
+ try:
340
+ await self._queue.cancel(self._consumer_tag)
341
+ logger.info(f"停止消费成功: {self._consumer_tag}")
342
+ except Exception as e:
343
+ logger.warning(f"停止消费异常: {e}")
344
+ self._consumer_tag = None
345
+
346
+ async def _handle_publish_failure(self):
347
+ try:
348
+ logger.info("检测到发布异常,强制连接池切换节点...")
349
+ await self.connection_pool.force_reconnect()
350
+ # 连接池切换后,必须刷新客户端资源
351
+ await self.connect()
352
+ logger.info("故障转移完成,资源已刷新")
353
+ except Exception as e:
354
+ logger.error(f"故障转移失败: {e}")
355
+ raise
356
+
357
+ async def publish(
358
+ self,
359
+ message_body: Union[str, Dict[str, Any], MQMsgModel],
360
+ headers: Optional[Dict[str, Any]] = None,
361
+ content_type: str = "application/json",
362
+ delivery_mode: DeliveryMode = DeliveryMode.PERSISTENT,
363
+ retry_count: int = 3,
364
+ ) -> None:
365
+ if self._closed:
366
+ raise RuntimeError("客户端已关闭,无法发布消息")
367
+
368
+ try:
369
+ if isinstance(message_body, MQMsgModel):
370
+ body = json.dumps(message_body.to_dict(),
371
+ ensure_ascii=False).encode("utf-8")
372
+ elif isinstance(message_body, dict):
373
+ body = json.dumps(
374
+ message_body, ensure_ascii=False).encode("utf-8")
375
+ elif isinstance(message_body, str):
376
+ body = message_body.encode("utf-8")
377
+ else:
378
+ raise TypeError(f"不支持的消息体类型: {type(message_body)}")
379
+ except Exception as e:
380
+ logger.error(f"消息体序列化失败: {e}")
381
+ raise
382
+
383
+ message = Message(body=body, headers=headers or {},
384
+ content_type=content_type, delivery_mode=delivery_mode)
385
+ last_exception = None
386
+
387
+ for retry in range(retry_count):
388
+ try:
389
+ if not await self.is_connected:
390
+ await self.connect()
391
+
392
+ result = await self._exchange.publish(
393
+ message=message,
394
+ routing_key=self.routing_key or self.queue_name or "#",
395
+ mandatory=True,
396
+ timeout=5.0
397
+ )
398
+
399
+ if result is None:
400
+ raise RuntimeError(f"消息未找到匹配的队列: {self.routing_key}")
401
+
402
+ logger.info(f"发布成功: {self.routing_key}")
403
+ return
404
+
405
+ except RuntimeError as e:
406
+ if "未找到匹配的队列" in str(e):
407
+ raise
408
+ last_exception = str(e)
409
+ await self._handle_publish_failure()
410
+
411
+ except Exception as e:
412
+ last_exception = str(e)
413
+ logger.error(f"发布异常: {e}")
414
+ await self._handle_publish_failure()
415
+
416
+ await asyncio.sleep(5)
417
+
418
+ raise RuntimeError(f"消息发布最终失败: {last_exception}")
419
+
420
+ async def close(self) -> None:
421
+ self._closed = True
422
+ logger.info("开始关闭RabbitMQ客户端...")
423
+
424
+ if self._current_reconnect_task and not self._current_reconnect_task.done():
425
+ self._current_reconnect_task.cancel()
426
+ try:
427
+ await self._current_reconnect_task
428
+ except asyncio.CancelledError:
429
+ pass
430
+
431
+ await self.stop_consuming()
432
+
433
+ async with self._connect_lock:
434
+ if self._conn_close_callback and self._channel_conn:
435
+ self._channel_conn.close_callbacks.discard(
436
+ self._conn_close_callback)
437
+
438
+ self._channel = None
439
+ self._channel_conn = None
440
+ self._exchange = None
441
+ self._queue = None
442
+ self._message_handler = None
443
+
444
+ # 确保唤醒可能正在等待 connect 的任务
445
+ self._connecting = False
446
+ self._connect_condition.notify_all()
447
+
448
+ logger.info("客户端已关闭")