sycommon-python-lib 0.1.54__py3-none-any.whl → 0.1.55__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sycommon/database/async_base_db_service.py +36 -0
- sycommon/database/async_database_service.py +96 -0
- sycommon/logging/async_sql_logger.py +65 -0
- sycommon/logging/kafka_log.py +21 -9
- sycommon/logging/logger_levels.py +23 -0
- sycommon/middleware/context.py +2 -0
- sycommon/middleware/traceid.py +155 -32
- sycommon/services.py +73 -60
- sycommon/synacos/feign.py +15 -8
- sycommon/synacos/feign_client.py +24 -10
- sycommon/synacos/nacos_service.py +1 -0
- sycommon/tools/merge_headers.py +97 -0
- sycommon/tools/snowflake.py +290 -23
- {sycommon_python_lib-0.1.54.dist-info → sycommon_python_lib-0.1.55.dist-info}/METADATA +9 -8
- {sycommon_python_lib-0.1.54.dist-info → sycommon_python_lib-0.1.55.dist-info}/RECORD +18 -13
- {sycommon_python_lib-0.1.54.dist-info → sycommon_python_lib-0.1.55.dist-info}/WHEEL +0 -0
- {sycommon_python_lib-0.1.54.dist-info → sycommon_python_lib-0.1.55.dist-info}/entry_points.txt +0 -0
- {sycommon_python_lib-0.1.54.dist-info → sycommon_python_lib-0.1.55.dist-info}/top_level.txt +0 -0
sycommon/services.py
CHANGED
|
@@ -7,6 +7,7 @@ from fastapi import FastAPI, applications
|
|
|
7
7
|
from pydantic import BaseModel
|
|
8
8
|
from typing import Any, Callable, Dict, List, Tuple, Union, Optional, AsyncGenerator
|
|
9
9
|
from sycommon.config.Config import SingletonMeta
|
|
10
|
+
from sycommon.logging.logger_levels import setup_logger_levels
|
|
10
11
|
from sycommon.models.mqlistener_config import RabbitMQListenerConfig
|
|
11
12
|
from sycommon.models.mqsend_config import RabbitMQSendConfig
|
|
12
13
|
from sycommon.rabbitmq.rabbitmq_service import RabbitMQService
|
|
@@ -23,6 +24,9 @@ class Services(metaclass=SingletonMeta):
|
|
|
23
24
|
_user_lifespan: Optional[Callable] = None
|
|
24
25
|
_shutdown_lock: asyncio.Lock = asyncio.Lock()
|
|
25
26
|
|
|
27
|
+
# 用于存储待执行的异步数据库初始化任务
|
|
28
|
+
_pending_async_db_setup: List[Tuple[Callable, str]] = []
|
|
29
|
+
|
|
26
30
|
def __init__(self, config: dict, app: FastAPI):
|
|
27
31
|
if not Services._config:
|
|
28
32
|
Services._config = config
|
|
@@ -48,25 +52,25 @@ class Services(metaclass=SingletonMeta):
|
|
|
48
52
|
nacos_service: Optional[Callable[[dict], None]] = None,
|
|
49
53
|
logging_service: Optional[Callable[[dict], None]] = None,
|
|
50
54
|
database_service: Optional[Union[
|
|
51
|
-
Tuple[Callable
|
|
52
|
-
List[Tuple[Callable
|
|
55
|
+
Tuple[Callable, str],
|
|
56
|
+
List[Tuple[Callable, str]]
|
|
53
57
|
]] = None,
|
|
54
58
|
rabbitmq_listeners: Optional[List[RabbitMQListenerConfig]] = None,
|
|
55
59
|
rabbitmq_senders: Optional[List[RabbitMQSendConfig]] = None
|
|
56
60
|
) -> FastAPI:
|
|
57
61
|
load_dotenv()
|
|
58
|
-
|
|
62
|
+
setup_logger_levels()
|
|
59
63
|
cls._app = app
|
|
60
64
|
cls._config = config
|
|
61
65
|
cls._user_lifespan = app.router.lifespan_context
|
|
62
|
-
|
|
66
|
+
|
|
63
67
|
applications.get_swagger_ui_html = custom_swagger_ui_html
|
|
64
68
|
applications.get_redoc_html = custom_redoc_html
|
|
65
|
-
|
|
69
|
+
|
|
66
70
|
if not cls._config:
|
|
67
71
|
config = yaml.safe_load(open('app.yaml', 'r', encoding='utf-8'))
|
|
68
72
|
cls._config = config
|
|
69
|
-
|
|
73
|
+
|
|
70
74
|
app.state.config = {
|
|
71
75
|
"host": cls._config.get('Host', '0.0.0.0'),
|
|
72
76
|
"port": cls._config.get('Port', 8080),
|
|
@@ -74,7 +78,6 @@ class Services(metaclass=SingletonMeta):
|
|
|
74
78
|
"h11_max_incomplete_event_size": cls._config.get('H11MaxIncompleteEventSize', 1024 * 1024 * 10)
|
|
75
79
|
}
|
|
76
80
|
|
|
77
|
-
# 立即配置非异步服务(在应用启动前)
|
|
78
81
|
if middleware:
|
|
79
82
|
middleware(app, config)
|
|
80
83
|
|
|
@@ -84,25 +87,62 @@ class Services(metaclass=SingletonMeta):
|
|
|
84
87
|
if logging_service:
|
|
85
88
|
logging_service(config)
|
|
86
89
|
|
|
90
|
+
# ========== 处理数据库服务 ==========
|
|
91
|
+
# 清空之前的待执行列表(防止热重载时重复)
|
|
92
|
+
cls._pending_async_db_setup = []
|
|
93
|
+
|
|
87
94
|
if database_service:
|
|
88
|
-
|
|
95
|
+
# 解析配置并区分同步/异步
|
|
96
|
+
items = [database_service] if isinstance(
|
|
97
|
+
database_service, tuple) else database_service
|
|
98
|
+
for item in items:
|
|
99
|
+
db_setup_func, db_name = item
|
|
100
|
+
if asyncio.iscoroutinefunction(db_setup_func):
|
|
101
|
+
# 如果是异步函数,加入待执行列表
|
|
102
|
+
logging.info(f"检测到异步数据库服务: {db_name},将在应用启动时初始化")
|
|
103
|
+
cls._pending_async_db_setup.append(item)
|
|
104
|
+
else:
|
|
105
|
+
# 如果是同步函数,立即执行
|
|
106
|
+
logging.info(f"执行同步数据库服务: {db_name}")
|
|
107
|
+
try:
|
|
108
|
+
db_setup_func(config, db_name)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logging.error(
|
|
111
|
+
f"同步数据库服务 {db_name} 初始化失败: {e}", exc_info=True)
|
|
112
|
+
raise
|
|
89
113
|
|
|
90
114
|
# 创建组合生命周期管理器
|
|
91
115
|
@asynccontextmanager
|
|
92
116
|
async def combined_lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
|
93
117
|
# 1. 执行Services自身的初始化
|
|
94
118
|
instance = cls(config, app)
|
|
95
|
-
|
|
119
|
+
|
|
120
|
+
# ========== 执行挂起的异步数据库初始化 ==========
|
|
121
|
+
if cls._pending_async_db_setup:
|
|
122
|
+
logging.info("开始执行异步数据库初始化...")
|
|
123
|
+
for db_setup_func, db_name in cls._pending_async_db_setup:
|
|
124
|
+
try:
|
|
125
|
+
await db_setup_func(config, db_name)
|
|
126
|
+
logging.info(f"异步数据库服务 {db_name} 初始化成功")
|
|
127
|
+
except Exception as e:
|
|
128
|
+
logging.error(
|
|
129
|
+
f"异步数据库服务 {db_name} 初始化失败: {e}", exc_info=True)
|
|
130
|
+
raise
|
|
131
|
+
|
|
132
|
+
# ========== 初始化 MQ ==========
|
|
133
|
+
has_valid_listeners = bool(
|
|
96
134
|
rabbitmq_listeners and len(rabbitmq_listeners) > 0)
|
|
97
|
-
|
|
135
|
+
has_valid_senders = bool(
|
|
136
|
+
rabbitmq_senders and len(rabbitmq_senders) > 0)
|
|
98
137
|
|
|
99
138
|
try:
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
139
|
+
if has_valid_listeners or has_valid_senders:
|
|
140
|
+
await instance._setup_mq_async(
|
|
141
|
+
rabbitmq_listeners=rabbitmq_listeners if has_valid_listeners else None,
|
|
142
|
+
rabbitmq_senders=rabbitmq_senders if has_valid_senders else None,
|
|
143
|
+
has_listeners=has_valid_listeners,
|
|
144
|
+
has_senders=has_valid_senders
|
|
145
|
+
)
|
|
106
146
|
cls._initialized = True
|
|
107
147
|
logging.info("Services初始化完成")
|
|
108
148
|
except Exception as e:
|
|
@@ -114,28 +154,18 @@ class Services(metaclass=SingletonMeta):
|
|
|
114
154
|
# 2. 执行用户定义的生命周期
|
|
115
155
|
if cls._user_lifespan:
|
|
116
156
|
async with cls._user_lifespan(app):
|
|
117
|
-
yield
|
|
157
|
+
yield
|
|
118
158
|
else:
|
|
119
|
-
yield
|
|
159
|
+
yield
|
|
120
160
|
|
|
121
161
|
# 3. 执行Services的关闭逻辑
|
|
122
162
|
await cls.shutdown()
|
|
123
163
|
logging.info("Services已关闭")
|
|
124
164
|
|
|
125
|
-
# 设置组合生命周期
|
|
126
165
|
app.router.lifespan_context = combined_lifespan
|
|
127
|
-
|
|
128
166
|
return app
|
|
129
167
|
|
|
130
|
-
|
|
131
|
-
def _setup_database_static(database_service, config):
|
|
132
|
-
"""静态方法:设置数据库服务"""
|
|
133
|
-
if isinstance(database_service, tuple):
|
|
134
|
-
db_setup, db_name = database_service
|
|
135
|
-
db_setup(config, db_name)
|
|
136
|
-
elif isinstance(database_service, list):
|
|
137
|
-
for db_setup, db_name in database_service:
|
|
138
|
-
db_setup(config, db_name)
|
|
168
|
+
# 移除了 _setup_database_static,因为逻辑已内联到 plugins 中
|
|
139
169
|
|
|
140
170
|
async def _setup_mq_async(
|
|
141
171
|
self,
|
|
@@ -144,11 +174,13 @@ class Services(metaclass=SingletonMeta):
|
|
|
144
174
|
has_listeners: bool = False,
|
|
145
175
|
has_senders: bool = False,
|
|
146
176
|
):
|
|
147
|
-
"""异步设置MQ
|
|
148
|
-
|
|
177
|
+
"""异步设置MQ相关服务"""
|
|
178
|
+
if not (has_listeners or has_senders):
|
|
179
|
+
logging.info("无RabbitMQ监听器/发送器配置,跳过RabbitMQService初始化")
|
|
180
|
+
return
|
|
181
|
+
|
|
149
182
|
RabbitMQService.init(self._config, has_listeners, has_senders)
|
|
150
183
|
|
|
151
|
-
# 优化:等待连接池“存在且初始化完成”(避免提前执行后续逻辑)
|
|
152
184
|
start_time = asyncio.get_event_loop().time()
|
|
153
185
|
while not (RabbitMQService._connection_pool and RabbitMQService._connection_pool._initialized) and not RabbitMQService._is_shutdown:
|
|
154
186
|
if asyncio.get_event_loop().time() - start_time > 30:
|
|
@@ -156,42 +188,33 @@ class Services(metaclass=SingletonMeta):
|
|
|
156
188
|
logging.info("等待RabbitMQ连接池初始化...")
|
|
157
189
|
await asyncio.sleep(0.5)
|
|
158
190
|
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
# 判断是否有监听器,如果有遍历监听器列表,队列名一样将prefetch_count属性设置到发送器对象中
|
|
162
|
-
if rabbitmq_listeners:
|
|
191
|
+
if has_senders and rabbitmq_senders:
|
|
192
|
+
if has_listeners and rabbitmq_listeners:
|
|
163
193
|
for sender in rabbitmq_senders:
|
|
164
194
|
for listener in rabbitmq_listeners:
|
|
165
195
|
if sender.queue_name == listener.queue_name:
|
|
166
196
|
sender.prefetch_count = listener.prefetch_count
|
|
167
197
|
await self._setup_senders_async(rabbitmq_senders, has_listeners)
|
|
168
198
|
|
|
169
|
-
|
|
170
|
-
if rabbitmq_listeners:
|
|
199
|
+
if has_listeners and rabbitmq_listeners:
|
|
171
200
|
await self._setup_listeners_async(rabbitmq_listeners, has_senders)
|
|
172
201
|
|
|
173
|
-
# 验证初始化结果
|
|
174
202
|
if has_listeners:
|
|
175
|
-
|
|
176
|
-
listener_count = len(
|
|
177
|
-
RabbitMQService._consumer_tasks)
|
|
203
|
+
listener_count = len(RabbitMQService._consumer_tasks)
|
|
178
204
|
logging.info(f"监听器初始化完成,共启动 {listener_count} 个消费者")
|
|
179
205
|
if listener_count == 0:
|
|
180
206
|
logging.warning("未成功初始化任何监听器,请检查配置或MQ服务状态")
|
|
181
207
|
|
|
182
208
|
async def _setup_senders_async(self, rabbitmq_senders, has_listeners: bool):
|
|
183
|
-
"""
|
|
209
|
+
"""设置发送器"""
|
|
184
210
|
Services._registered_senders = [
|
|
185
211
|
sender.queue_name for sender in rabbitmq_senders]
|
|
186
|
-
|
|
187
|
-
# 将是否有监听器的信息传递给RabbitMQService(异步调用)
|
|
188
212
|
await RabbitMQService.setup_senders(rabbitmq_senders, has_listeners)
|
|
189
|
-
# 更新已注册的发送器(从RabbitMQService获取实际注册的名称)
|
|
190
213
|
Services._registered_senders = RabbitMQService._sender_client_names
|
|
191
214
|
logging.info(f"已注册的RabbitMQ发送器: {Services._registered_senders}")
|
|
192
215
|
|
|
193
216
|
async def _setup_listeners_async(self, rabbitmq_listeners, has_senders: bool):
|
|
194
|
-
"""
|
|
217
|
+
"""设置监听器"""
|
|
195
218
|
await RabbitMQService.setup_listeners(rabbitmq_listeners, has_senders)
|
|
196
219
|
|
|
197
220
|
@classmethod
|
|
@@ -202,7 +225,7 @@ class Services(metaclass=SingletonMeta):
|
|
|
202
225
|
max_retries: int = 3,
|
|
203
226
|
retry_delay: float = 1.0, **kwargs
|
|
204
227
|
) -> None:
|
|
205
|
-
"""
|
|
228
|
+
"""发送消息"""
|
|
206
229
|
if not cls._initialized or not cls._loop:
|
|
207
230
|
logging.error("Services not properly initialized!")
|
|
208
231
|
raise ValueError("服务未正确初始化")
|
|
@@ -213,18 +236,15 @@ class Services(metaclass=SingletonMeta):
|
|
|
213
236
|
|
|
214
237
|
for attempt in range(max_retries):
|
|
215
238
|
try:
|
|
216
|
-
# 验证发送器是否注册
|
|
217
239
|
if queue_name not in cls._registered_senders:
|
|
218
240
|
cls._registered_senders = RabbitMQService._sender_client_names
|
|
219
241
|
if queue_name not in cls._registered_senders:
|
|
220
242
|
raise ValueError(f"发送器 {queue_name} 未注册")
|
|
221
243
|
|
|
222
|
-
# 获取发送器(适配新的异步get_sender方法)
|
|
223
244
|
sender = await RabbitMQService.get_sender(queue_name)
|
|
224
245
|
if not sender:
|
|
225
246
|
raise ValueError(f"发送器 '{queue_name}' 不存在或连接无效")
|
|
226
247
|
|
|
227
|
-
# 发送消息(调用RabbitMQService的异步send_message)
|
|
228
248
|
await RabbitMQService.send_message(data, queue_name, **kwargs)
|
|
229
249
|
logging.info(f"消息发送成功(尝试 {attempt+1}/{max_retries})")
|
|
230
250
|
return
|
|
@@ -234,25 +254,18 @@ class Services(metaclass=SingletonMeta):
|
|
|
234
254
|
logging.error(
|
|
235
255
|
f"消息发送失败(已尝试 {max_retries} 次): {str(e)}", exc_info=True)
|
|
236
256
|
raise
|
|
237
|
-
|
|
238
257
|
logging.warning(
|
|
239
|
-
f"消息发送失败(尝试 {attempt+1}/{max_retries}): {str(e)},"
|
|
240
|
-
f"{retry_delay}秒后重试..."
|
|
241
|
-
)
|
|
258
|
+
f"消息发送失败(尝试 {attempt+1}/{max_retries}): {str(e)},{retry_delay}秒后重试...")
|
|
242
259
|
await asyncio.sleep(retry_delay)
|
|
243
260
|
|
|
244
261
|
@classmethod
|
|
245
262
|
async def shutdown(cls):
|
|
246
|
-
"""
|
|
263
|
+
"""关闭所有服务"""
|
|
247
264
|
async with cls._shutdown_lock:
|
|
248
265
|
if RabbitMQService._is_shutdown:
|
|
249
266
|
logging.info("RabbitMQService已关闭,无需重复操作")
|
|
250
267
|
return
|
|
251
|
-
|
|
252
|
-
# 关闭RabbitMQ服务(异步调用,内部会关闭所有客户端+消费任务)
|
|
253
268
|
await RabbitMQService.shutdown()
|
|
254
|
-
|
|
255
|
-
# 清理全局状态
|
|
256
269
|
cls._initialized = False
|
|
257
270
|
cls._registered_senders.clear()
|
|
258
271
|
logging.info("所有服务已关闭")
|
sycommon/synacos/feign.py
CHANGED
|
@@ -2,6 +2,7 @@ import io
|
|
|
2
2
|
import os
|
|
3
3
|
import time
|
|
4
4
|
|
|
5
|
+
from sycommon.tools.merge_headers import merge_headers
|
|
5
6
|
from sycommon.tools.snowflake import Snowflake
|
|
6
7
|
|
|
7
8
|
import aiohttp
|
|
@@ -24,14 +25,18 @@ async def feign(service_name, api_path, method='GET', params=None, headers=None,
|
|
|
24
25
|
try:
|
|
25
26
|
# 初始化headers,确保是可修改的字典
|
|
26
27
|
headers = headers.copy() if headers else {}
|
|
28
|
+
headers = merge_headers(SYLogger.get_headers(), headers)
|
|
27
29
|
if "x-traceId-header" not in headers:
|
|
28
|
-
headers["x-traceId-header"] = SYLogger.get_trace_id() or Snowflake.
|
|
30
|
+
headers["x-traceId-header"] = SYLogger.get_trace_id() or Snowflake.id
|
|
29
31
|
|
|
30
32
|
# 处理JSON请求的Content-Type
|
|
31
33
|
is_json_request = method.upper() in ["POST", "PUT", "PATCH"] and not (
|
|
32
34
|
files or form_data or file_path)
|
|
33
|
-
if is_json_request
|
|
34
|
-
headers
|
|
35
|
+
if is_json_request:
|
|
36
|
+
# 将headers的key全部转为小写,统一判断
|
|
37
|
+
headers_lower = {k.lower(): v for k, v in headers.items()}
|
|
38
|
+
if "content-type" not in headers_lower:
|
|
39
|
+
headers["Content-Type"] = "application/json"
|
|
35
40
|
|
|
36
41
|
nacos_service = NacosService(None)
|
|
37
42
|
version = headers.get('s-y-version')
|
|
@@ -47,7 +52,7 @@ async def feign(service_name, api_path, method='GET', params=None, headers=None,
|
|
|
47
52
|
instance = instances[int(time.time()) % len(instances)]
|
|
48
53
|
|
|
49
54
|
SYLogger.info(f"nacos:开始调用服务: {service_name}")
|
|
50
|
-
SYLogger.info(f"nacos:请求头: {headers}")
|
|
55
|
+
# SYLogger.info(f"nacos:请求头: {headers}")
|
|
51
56
|
|
|
52
57
|
ip = instance.get('ip')
|
|
53
58
|
port = instance.get('port')
|
|
@@ -135,13 +140,15 @@ async def _handle_feign_response(response, service_name: str, api_path: str):
|
|
|
135
140
|
"""
|
|
136
141
|
try:
|
|
137
142
|
status_code = response.status
|
|
138
|
-
content_type = response.headers.get('Content-Type', '')
|
|
143
|
+
content_type = response.headers.get('Content-Type', '')
|
|
144
|
+
content_type = content_type.lower() if content_type else ''
|
|
145
|
+
|
|
139
146
|
response_body = None
|
|
140
147
|
|
|
141
148
|
if status_code == 200:
|
|
142
|
-
if 'application/json' in content_type:
|
|
149
|
+
if content_type and 'application/json' in content_type:
|
|
143
150
|
response_body = await response.json()
|
|
144
|
-
elif 'text/' in content_type:
|
|
151
|
+
elif content_type and 'text/' in content_type:
|
|
145
152
|
# 文本类型(text/plain、text/html等):按文本读取
|
|
146
153
|
try:
|
|
147
154
|
response_body = await response.text(encoding='utf-8')
|
|
@@ -158,7 +165,7 @@ async def _handle_feign_response(response, service_name: str, api_path: str):
|
|
|
158
165
|
else:
|
|
159
166
|
# 非200状态:统一读取响应体(兼容文本/二进制错误信息)
|
|
160
167
|
try:
|
|
161
|
-
if 'application/json' in content_type:
|
|
168
|
+
if content_type and 'application/json' in content_type:
|
|
162
169
|
response_body = await response.json()
|
|
163
170
|
else:
|
|
164
171
|
response_body = await response.text(encoding='utf-8', errors='ignore')
|
sycommon/synacos/feign_client.py
CHANGED
|
@@ -5,6 +5,7 @@ import inspect
|
|
|
5
5
|
from typing import Any, Dict, Optional, Literal, Type, TypeVar
|
|
6
6
|
from urllib.parse import urljoin
|
|
7
7
|
|
|
8
|
+
from sycommon.tools.merge_headers import merge_headers
|
|
8
9
|
from sycommon.tools.snowflake import Snowflake
|
|
9
10
|
|
|
10
11
|
import aiohttp
|
|
@@ -28,7 +29,9 @@ def feign_client(
|
|
|
28
29
|
default_headers: Optional[Dict[str, str]] = None
|
|
29
30
|
):
|
|
30
31
|
default_headers = default_headers or {}
|
|
31
|
-
default_headers
|
|
32
|
+
default_headers = {k.lower(): v for k, v in default_headers.items()}
|
|
33
|
+
default_headers = merge_headers(SYLogger.get_headers(), default_headers)
|
|
34
|
+
default_headers["x-traceId-header"] = SYLogger.get_trace_id() or Snowflake.id
|
|
32
35
|
|
|
33
36
|
def decorator(cls):
|
|
34
37
|
class FeignClient:
|
|
@@ -36,7 +39,8 @@ def feign_client(
|
|
|
36
39
|
self.service_name = service_name
|
|
37
40
|
self.path_prefix = path_prefix
|
|
38
41
|
self.default_timeout = default_timeout
|
|
39
|
-
self.default_headers =
|
|
42
|
+
self.default_headers = {
|
|
43
|
+
k.lower(): v for k, v in default_headers.copy().items()}
|
|
40
44
|
self.nacos_manager: Optional[NacosService] = None
|
|
41
45
|
self.session: Optional[aiohttp.ClientSession] = None
|
|
42
46
|
|
|
@@ -65,7 +69,8 @@ def feign_client(
|
|
|
65
69
|
method = request_meta.get("method", "GET").upper()
|
|
66
70
|
path = request_meta.get("path", "")
|
|
67
71
|
is_upload = request_meta.get("is_upload", False)
|
|
68
|
-
method_headers =
|
|
72
|
+
method_headers = {
|
|
73
|
+
k.lower(): v for k, v in request_meta.get("headers", {}).items()}
|
|
69
74
|
timeout = request_meta.get(
|
|
70
75
|
"timeout", self.default_timeout)
|
|
71
76
|
|
|
@@ -155,12 +160,16 @@ def feign_client(
|
|
|
155
160
|
def _build_headers(self, param_meta: Dict[str, Param], bound_args: Dict[str, Any], method_headers: Dict[str, str]) -> Dict[str, str]:
|
|
156
161
|
headers = self.default_headers.copy()
|
|
157
162
|
headers.update(method_headers)
|
|
158
|
-
headers
|
|
163
|
+
headers = merge_headers(SYLogger.get_headers(), headers)
|
|
164
|
+
headers["x-traceId-header"] = SYLogger.get_trace_id() or Snowflake.id
|
|
165
|
+
|
|
166
|
+
# 处理参数中的Header类型
|
|
159
167
|
for name, meta in param_meta.items():
|
|
160
168
|
if isinstance(meta, Header) and name in bound_args:
|
|
161
169
|
value = bound_args[name]
|
|
162
170
|
if value is not None:
|
|
163
|
-
|
|
171
|
+
header_key = meta.get_key(name).lower()
|
|
172
|
+
headers[header_key] = str(value)
|
|
164
173
|
return headers
|
|
165
174
|
|
|
166
175
|
def _replace_path_params(self, path: str, param_meta: Dict[str, Param], bound_args: Dict[str, Any]) -> str:
|
|
@@ -225,10 +234,14 @@ def feign_client(
|
|
|
225
234
|
value) if not isinstance(value, dict) else value)
|
|
226
235
|
return form_data
|
|
227
236
|
|
|
228
|
-
#
|
|
237
|
+
# 从headers中获取Content-Type(已小写key)
|
|
229
238
|
content_type = self.default_headers.get(
|
|
230
|
-
"
|
|
231
|
-
|
|
239
|
+
"content-type") or method_headers.get("content-type", "")
|
|
240
|
+
# 转为小写进行判断
|
|
241
|
+
content_type_lower = content_type.lower()
|
|
242
|
+
|
|
243
|
+
# 处理表单提交(x-www-form-urlencoded)
|
|
244
|
+
if "application/x-www-form-urlencoded" in content_type_lower:
|
|
232
245
|
form_data = {}
|
|
233
246
|
for name, value in bound_args.items():
|
|
234
247
|
meta = param_meta.get(name)
|
|
@@ -274,7 +287,8 @@ def feign_client(
|
|
|
274
287
|
"""处理响应(支持 Pydantic 模型解析)"""
|
|
275
288
|
status = response.status
|
|
276
289
|
if 200 <= status < 300:
|
|
277
|
-
content_type = response.headers.get(
|
|
290
|
+
content_type = response.headers.get(
|
|
291
|
+
"content-type", "").lower()
|
|
278
292
|
if "application/json" in content_type:
|
|
279
293
|
json_data = await response.json()
|
|
280
294
|
# 若指定了 Pydantic 响应模型,自动解析
|
|
@@ -303,7 +317,7 @@ def feign_request(
|
|
|
303
317
|
func._feign_meta = {
|
|
304
318
|
"method": method.upper(),
|
|
305
319
|
"path": path,
|
|
306
|
-
"headers": headers.
|
|
320
|
+
"headers": {k.lower(): v for k, v in headers.items()} if headers else {},
|
|
307
321
|
"is_upload": False,
|
|
308
322
|
"timeout": timeout
|
|
309
323
|
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
def merge_headers(
|
|
2
|
+
source_headers, # 来源headers(支持多种格式:字典/MutableHeaders/键值对列表/元组)
|
|
3
|
+
target_headers, # 目标headers(原有值需保留,同名覆盖source)
|
|
4
|
+
keep_keys=None, # 需保留的key集合(None表示保留所有)
|
|
5
|
+
delete_keys={'content-length', 'accept',
|
|
6
|
+
'content-type', 'sec-fetch-mode',
|
|
7
|
+
'sec-fetch-dest', 'sec-fetch-site',
|
|
8
|
+
'pragma', 'cache-control',
|
|
9
|
+
'accept-encoding', 'priority'}, # 需删除的source key集合
|
|
10
|
+
encoding='utf-8' # 字符编码(处理bytes转换)
|
|
11
|
+
) -> dict:
|
|
12
|
+
"""
|
|
13
|
+
合并headers,最终规则:
|
|
14
|
+
1. 所有key统一转为小写进行比较判断(完全大小写无关)
|
|
15
|
+
2. target_headers 同名key 完全覆盖 source_headers(source同名key不生效)
|
|
16
|
+
3. delete_keys 作用于source_headers:source中所有该列表内的key一律不添加(无论是否新增)
|
|
17
|
+
4. target_headers 中的key即使在delete_keys也始终保留,不受删除规则影响
|
|
18
|
+
5. 自动处理bytes/其他类型的键值转换为字符串
|
|
19
|
+
6. 最终输出的key全部为小写
|
|
20
|
+
"""
|
|
21
|
+
# 初始化并统一转为小写集合
|
|
22
|
+
keep_keys = {k.lower() for k in keep_keys} if keep_keys else set()
|
|
23
|
+
delete_keys = {k.lower() for k in delete_keys} if delete_keys else set()
|
|
24
|
+
|
|
25
|
+
# 修复1:兼容 MutableHeaders/普通字典/None 等 target_headers 类型
|
|
26
|
+
if target_headers is None:
|
|
27
|
+
target_dict = {}
|
|
28
|
+
elif hasattr(target_headers, 'items'):
|
|
29
|
+
# 支持 MutableHeaders/Headers/普通字典(都有items()方法)
|
|
30
|
+
target_dict = dict(target_headers.items())
|
|
31
|
+
else:
|
|
32
|
+
# 兜底:可迭代对象转为字典
|
|
33
|
+
target_dict = dict(target_headers) if isinstance(
|
|
34
|
+
target_headers, (list, tuple)) else {}
|
|
35
|
+
|
|
36
|
+
# 标准化target_headers:key转为小写,保留原有值
|
|
37
|
+
processed_headers = {k.lower(): v for k, v in target_dict.items()}
|
|
38
|
+
target_original_keys = set(processed_headers.keys())
|
|
39
|
+
|
|
40
|
+
# 修复2:统一处理 source_headers 格式,确保是键值对迭代器
|
|
41
|
+
# 步骤1:将source_headers转为标准的键值对列表
|
|
42
|
+
if source_headers is None:
|
|
43
|
+
source_kv_list = []
|
|
44
|
+
elif hasattr(source_headers, 'items'):
|
|
45
|
+
# 字典/MutableHeaders → 转为键值对列表
|
|
46
|
+
source_kv_list = list(source_headers.items())
|
|
47
|
+
elif isinstance(source_headers, (list, tuple)):
|
|
48
|
+
# 列表/元组 → 校验并过滤合法的键值对(仅保留长度为2的元组/列表)
|
|
49
|
+
source_kv_list = []
|
|
50
|
+
for item in source_headers:
|
|
51
|
+
if isinstance(item, (list, tuple)) and len(item) == 2:
|
|
52
|
+
source_kv_list.append(item)
|
|
53
|
+
else:
|
|
54
|
+
# 跳过非法格式(如长度≠2的元素),避免解包报错
|
|
55
|
+
continue
|
|
56
|
+
else:
|
|
57
|
+
# 其他类型 → 空列表(避免迭代报错)
|
|
58
|
+
source_kv_list = []
|
|
59
|
+
|
|
60
|
+
# 处理来源headers的键值转换和合并(遍历标准化后的键值对)
|
|
61
|
+
for key, value in source_kv_list:
|
|
62
|
+
# 转换key为字符串并统一转为小写(判断用)
|
|
63
|
+
if not isinstance(key, str):
|
|
64
|
+
try:
|
|
65
|
+
key = key.decode(encoding, errors='replace') if isinstance(
|
|
66
|
+
key, bytes) else str(key)
|
|
67
|
+
except Exception:
|
|
68
|
+
# 极端情况:无法转换的key直接跳过
|
|
69
|
+
continue
|
|
70
|
+
|
|
71
|
+
key_lower = key.lower()
|
|
72
|
+
|
|
73
|
+
# 转换value为字符串
|
|
74
|
+
if not isinstance(value, str):
|
|
75
|
+
try:
|
|
76
|
+
value = value.decode(encoding, errors='replace') if isinstance(
|
|
77
|
+
value, bytes) else str(value)
|
|
78
|
+
except Exception:
|
|
79
|
+
# 无法转换的value设为空字符串
|
|
80
|
+
value = ""
|
|
81
|
+
|
|
82
|
+
# 过滤1:source的key在删除列表 → 直接跳过
|
|
83
|
+
if key_lower in delete_keys:
|
|
84
|
+
continue
|
|
85
|
+
|
|
86
|
+
# 过滤2:仅保留指定的key(如果设置了keep_keys)
|
|
87
|
+
if keep_keys and key_lower not in keep_keys:
|
|
88
|
+
continue
|
|
89
|
+
|
|
90
|
+
# 过滤3:target已有同名key → 直接跳过(target值覆盖source)
|
|
91
|
+
if key_lower in target_original_keys:
|
|
92
|
+
continue
|
|
93
|
+
|
|
94
|
+
# 仅添加符合条件的key-value(最终key为小写)
|
|
95
|
+
processed_headers[key_lower] = value
|
|
96
|
+
|
|
97
|
+
return processed_headers
|