tamar-file-hub-client 0.0.1__py3-none-any.whl → 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. file_hub_client/__init__.py +39 -0
  2. file_hub_client/client.py +43 -6
  3. file_hub_client/rpc/async_client.py +91 -11
  4. file_hub_client/rpc/gen/taple_service_pb2.py +225 -0
  5. file_hub_client/rpc/gen/taple_service_pb2_grpc.py +1626 -0
  6. file_hub_client/rpc/generate_grpc.py +2 -2
  7. file_hub_client/rpc/interceptors.py +550 -0
  8. file_hub_client/rpc/protos/taple_service.proto +874 -0
  9. file_hub_client/rpc/sync_client.py +91 -9
  10. file_hub_client/schemas/__init__.py +60 -0
  11. file_hub_client/schemas/taple.py +413 -0
  12. file_hub_client/services/__init__.py +5 -0
  13. file_hub_client/services/file/async_blob_service.py +558 -482
  14. file_hub_client/services/file/async_file_service.py +18 -9
  15. file_hub_client/services/file/base_file_service.py +19 -6
  16. file_hub_client/services/file/sync_blob_service.py +554 -478
  17. file_hub_client/services/file/sync_file_service.py +18 -9
  18. file_hub_client/services/folder/async_folder_service.py +20 -11
  19. file_hub_client/services/folder/sync_folder_service.py +20 -11
  20. file_hub_client/services/taple/__init__.py +10 -0
  21. file_hub_client/services/taple/async_taple_service.py +2281 -0
  22. file_hub_client/services/taple/base_taple_service.py +353 -0
  23. file_hub_client/services/taple/idempotent_taple_mixin.py +142 -0
  24. file_hub_client/services/taple/sync_taple_service.py +2256 -0
  25. file_hub_client/utils/__init__.py +43 -1
  26. file_hub_client/utils/file_utils.py +59 -11
  27. file_hub_client/utils/idempotency.py +196 -0
  28. file_hub_client/utils/logging.py +315 -0
  29. file_hub_client/utils/retry.py +241 -2
  30. file_hub_client/utils/smart_retry.py +403 -0
  31. tamar_file_hub_client-0.0.2.dist-info/METADATA +2050 -0
  32. tamar_file_hub_client-0.0.2.dist-info/RECORD +57 -0
  33. tamar_file_hub_client-0.0.1.dist-info/METADATA +0 -874
  34. tamar_file_hub_client-0.0.1.dist-info/RECORD +0 -44
  35. {tamar_file_hub_client-0.0.1.dist-info → tamar_file_hub_client-0.0.2.dist-info}/WHEEL +0 -0
  36. {tamar_file_hub_client-0.0.1.dist-info → tamar_file_hub_client-0.0.2.dist-info}/top_level.txt +0 -0
@@ -11,7 +11,18 @@ from .file_utils import (
11
11
  calculate_file_hash,
12
12
  split_file_chunks,
13
13
  )
14
- from .retry import retry_with_backoff
14
+ from .retry import (
15
+ retry_with_backoff,
16
+ retry_on_lock_conflict
17
+ )
18
+ from .smart_retry import (
19
+ smart_retry,
20
+ retry_on_network_errors,
21
+ retry_on_conflict,
22
+ no_retry,
23
+ ErrorClassifier,
24
+ RetryStrategy
25
+ )
15
26
  from .upload_helper import (
16
27
  HttpUploader,
17
28
  AsyncHttpUploader,
@@ -23,6 +34,18 @@ from .download_helper import (
23
34
  AsyncHttpDownloader,
24
35
  DownloadProgress,
25
36
  )
37
+ from .idempotency import (
38
+ IdempotencyKeyGenerator,
39
+ IdempotencyManager,
40
+ generate_idempotency_key
41
+ )
42
+ from .logging import (
43
+ setup_logging,
44
+ get_logger,
45
+ GrpcRequestLogger,
46
+ grpc_request_context,
47
+ log_grpc_call,
48
+ )
26
49
 
27
50
  __all__ = [
28
51
  # 文件工具
@@ -34,6 +57,13 @@ __all__ = [
34
57
 
35
58
  # 重试工具
36
59
  "retry_with_backoff",
60
+ "retry_on_lock_conflict",
61
+ "smart_retry",
62
+ "retry_on_network_errors",
63
+ "retry_on_conflict",
64
+ "no_retry",
65
+ "ErrorClassifier",
66
+ "RetryStrategy",
37
67
 
38
68
  # 上传助手
39
69
  "HttpUploader",
@@ -45,4 +75,16 @@ __all__ = [
45
75
  "HttpDownloader",
46
76
  "AsyncHttpDownloader",
47
77
  "DownloadProgress",
78
+
79
+ # 幂等性工具
80
+ "IdempotencyKeyGenerator",
81
+ "IdempotencyManager",
82
+ "generate_idempotency_key",
83
+
84
+ # 日志工具
85
+ "setup_logging",
86
+ "get_logger",
87
+ "GrpcRequestLogger",
88
+ "grpc_request_context",
89
+ "log_grpc_call",
48
90
  ]
@@ -18,8 +18,56 @@ def get_file_mime_type(file_path: Union[str, Path]) -> str:
18
18
  MIME类型
19
19
  """
20
20
  file_path = Path(file_path)
21
- mime_type, _ = mimetypes.guess_type(str(file_path))
22
- return mime_type or "application/octet-stream"
21
+
22
+ # 定义常见文件扩展名到MIME类型的映射,确保跨平台一致性
23
+ extension_mime_map = {
24
+ '.csv': 'text/csv',
25
+ '.txt': 'text/plain',
26
+ '.json': 'application/json',
27
+ '.xml': 'application/xml',
28
+ '.html': 'text/html',
29
+ '.htm': 'text/html',
30
+ '.pdf': 'application/pdf',
31
+ '.doc': 'application/msword',
32
+ '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
33
+ '.xls': 'application/vnd.ms-excel',
34
+ '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
35
+ '.ppt': 'application/vnd.ms-powerpoint',
36
+ '.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
37
+ '.jpg': 'image/jpeg',
38
+ '.jpeg': 'image/jpeg',
39
+ '.png': 'image/png',
40
+ '.gif': 'image/gif',
41
+ '.bmp': 'image/bmp',
42
+ '.webp': 'image/webp',
43
+ '.mp3': 'audio/mpeg',
44
+ '.wav': 'audio/wav',
45
+ '.mp4': 'video/mp4',
46
+ '.avi': 'video/x-msvideo',
47
+ '.mov': 'video/quicktime',
48
+ '.zip': 'application/zip',
49
+ '.rar': 'application/vnd.rar',
50
+ '.7z': 'application/x-7z-compressed',
51
+ '.tar': 'application/x-tar',
52
+ '.gz': 'application/gzip',
53
+ }
54
+
55
+ # 获取文件扩展名(转为小写)
56
+ extension = file_path.suffix.lower()
57
+
58
+ # 优先使用自定义映射,确保常见文件类型的一致性
59
+ if extension in extension_mime_map:
60
+ return extension_mime_map[extension]
61
+
62
+ # 如果自定义映射中没有,尝试使用magic进行内容检测
63
+ try:
64
+ import magic
65
+ mime = magic.Magic(mime=True)
66
+ return mime.from_file(str(file_path))
67
+ except ImportError:
68
+ # 如果magic不可用,使用mimetypes作为fallback
69
+ mime_type, _ = mimetypes.guess_type(str(file_path))
70
+ return mime_type or "application/octet-stream"
23
71
 
24
72
 
25
73
  def get_file_extension(file_name: str) -> str:
@@ -65,18 +113,18 @@ def calculate_file_hash(file_path: Union[str, Path], algorithm: str = "sha256")
65
113
  """
66
114
  file_path = Path(file_path)
67
115
  hash_obj = hashlib.new(algorithm)
68
-
116
+
69
117
  with open(file_path, "rb") as f:
70
118
  while chunk := f.read(8192):
71
119
  hash_obj.update(chunk)
72
-
120
+
73
121
  return hash_obj.hexdigest()
74
122
 
75
123
 
76
124
  def split_file_chunks(
77
- file_obj: BinaryIO,
78
- chunk_size: int = 1024 * 1024, # 默认1MB
79
- start_offset: int = 0
125
+ file_obj: BinaryIO,
126
+ chunk_size: int = 1024 * 1024, # 默认1MB
127
+ start_offset: int = 0
80
128
  ) -> Generator[tuple[bytes, int, bool], None, None]:
81
129
  """
82
130
  将文件分割成块
@@ -91,15 +139,15 @@ def split_file_chunks(
91
139
  """
92
140
  file_obj.seek(start_offset)
93
141
  offset = start_offset
94
-
142
+
95
143
  while True:
96
144
  chunk = file_obj.read(chunk_size)
97
145
  if not chunk:
98
146
  break
99
-
147
+
100
148
  is_last = len(chunk) < chunk_size
101
149
  yield chunk, offset, is_last
102
-
150
+
103
151
  offset += len(chunk)
104
152
  if is_last:
105
- break
153
+ break
@@ -0,0 +1,196 @@
1
+ """
2
+ 幂等性支持工具
3
+ """
4
+ import uuid
5
+ import hashlib
6
+ from typing import Optional, Dict, Any, Union
7
+ from datetime import datetime, timedelta
8
+
9
+
10
+ class IdempotencyKeyGenerator:
11
+ """幂等性键生成器"""
12
+
13
+ @staticmethod
14
+ def generate_uuid_key() -> str:
15
+ """生成基于UUID的幂等性键"""
16
+ return str(uuid.uuid4())
17
+
18
+ @staticmethod
19
+ def generate_content_based_key(content: Dict[str, Any]) -> str:
20
+ """
21
+ 基于内容生成幂等性键
22
+
23
+ Args:
24
+ content: 要处理的内容字典
25
+
26
+ Returns:
27
+ 基于内容生成的幂等性键
28
+ """
29
+ # 排序内容以确保一致性
30
+ sorted_content = _sort_dict_recursively(content)
31
+ content_str = str(sorted_content)
32
+
33
+ # 生成SHA256哈希
34
+ hash_obj = hashlib.sha256(content_str.encode('utf-8'))
35
+ return hash_obj.hexdigest()
36
+
37
+ @staticmethod
38
+ def generate_timestamp_key(prefix: str = "taple") -> str:
39
+ """
40
+ 基于时间戳生成幂等性键
41
+
42
+ Args:
43
+ prefix: 键前缀
44
+
45
+ Returns:
46
+ 基于时间戳的幂等性键
47
+ """
48
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f")
49
+ return f"{prefix}_{timestamp}_{uuid.uuid4().hex[:8]}"
50
+
51
+
52
+ class IdempotencyManager:
53
+ """幂等性管理器"""
54
+
55
+ def __init__(self, default_ttl_minutes: int = 60):
56
+ """
57
+ 初始化幂等性管理器
58
+
59
+ Args:
60
+ default_ttl_minutes: 默认的幂等性键TTL(分钟)
61
+ """
62
+ self._cache: Dict[str, Dict[str, Any]] = {}
63
+ self._default_ttl = timedelta(minutes=default_ttl_minutes)
64
+
65
+ def register_operation(
66
+ self,
67
+ idempotency_key: str,
68
+ operation_type: str,
69
+ params: Dict[str, Any],
70
+ ttl_minutes: Optional[int] = None
71
+ ) -> bool:
72
+ """
73
+ 注册幂等性操作
74
+
75
+ Args:
76
+ idempotency_key: 幂等性键
77
+ operation_type: 操作类型
78
+ params: 操作参数
79
+ ttl_minutes: TTL分钟数(可选)
80
+
81
+ Returns:
82
+ True if 新注册,False if 已存在
83
+ """
84
+ if idempotency_key in self._cache:
85
+ # 检查是否过期
86
+ cache_entry = self._cache[idempotency_key]
87
+ if datetime.now() > cache_entry['expires_at']:
88
+ # 过期,删除并重新注册
89
+ del self._cache[idempotency_key]
90
+ else:
91
+ # 未过期,检查操作是否一致
92
+ if (cache_entry['operation_type'] == operation_type and
93
+ cache_entry['params'] == params):
94
+ return False # 相同操作,幂等
95
+ else:
96
+ raise ValueError(f"幂等性键冲突: {idempotency_key}")
97
+
98
+ # 注册新操作
99
+ ttl = timedelta(minutes=ttl_minutes) if ttl_minutes else self._default_ttl
100
+ self._cache[idempotency_key] = {
101
+ 'operation_type': operation_type,
102
+ 'params': params,
103
+ 'created_at': datetime.now(),
104
+ 'expires_at': datetime.now() + ttl,
105
+ 'result': None
106
+ }
107
+ return True
108
+
109
+ def get_cached_result(self, idempotency_key: str) -> Optional[Any]:
110
+ """
111
+ 获取缓存的结果
112
+
113
+ Args:
114
+ idempotency_key: 幂等性键
115
+
116
+ Returns:
117
+ 缓存的结果,如果不存在返回None
118
+ """
119
+ if idempotency_key not in self._cache:
120
+ return None
121
+
122
+ cache_entry = self._cache[idempotency_key]
123
+ if datetime.now() > cache_entry['expires_at']:
124
+ del self._cache[idempotency_key]
125
+ return None
126
+
127
+ return cache_entry.get('result')
128
+
129
+ def cache_result(self, idempotency_key: str, result: Any) -> None:
130
+ """
131
+ 缓存操作结果
132
+
133
+ Args:
134
+ idempotency_key: 幂等性键
135
+ result: 操作结果
136
+ """
137
+ if idempotency_key in self._cache:
138
+ self._cache[idempotency_key]['result'] = result
139
+
140
+ def cleanup_expired(self) -> int:
141
+ """
142
+ 清理过期的缓存条目
143
+
144
+ Returns:
145
+ 清理的条目数量
146
+ """
147
+ now = datetime.now()
148
+ expired_keys = [
149
+ key for key, entry in self._cache.items()
150
+ if now > entry['expires_at']
151
+ ]
152
+
153
+ for key in expired_keys:
154
+ del self._cache[key]
155
+
156
+ return len(expired_keys)
157
+
158
+
159
+ def _sort_dict_recursively(obj: Any) -> Any:
160
+ """递归排序字典,确保生成一致的哈希"""
161
+ if isinstance(obj, dict):
162
+ return {k: _sort_dict_recursively(v) for k, v in sorted(obj.items())}
163
+ elif isinstance(obj, list):
164
+ return [_sort_dict_recursively(item) for item in obj]
165
+ else:
166
+ return obj
167
+
168
+
169
+ def generate_idempotency_key(
170
+ operation_type: str,
171
+ params: Optional[Dict[str, Any]] = None,
172
+ method: str = "uuid"
173
+ ) -> str:
174
+ """
175
+ 生成幂等性键的便捷函数
176
+
177
+ Args:
178
+ operation_type: 操作类型
179
+ params: 操作参数(可选)
180
+ method: 生成方法("uuid", "content", "timestamp")
181
+
182
+ Returns:
183
+ 生成的幂等性键
184
+ """
185
+ generator = IdempotencyKeyGenerator()
186
+
187
+ if method == "uuid":
188
+ return generator.generate_uuid_key()
189
+ elif method == "content" and params:
190
+ content = {"operation_type": operation_type, "params": params}
191
+ return generator.generate_content_based_key(content)
192
+ elif method == "timestamp":
193
+ return generator.generate_timestamp_key(operation_type)
194
+ else:
195
+ # 默认使用UUID
196
+ return generator.generate_uuid_key()
@@ -0,0 +1,315 @@
1
+ """
2
+ 日志配置和工具
3
+ """
4
+ import logging
5
+ import sys
6
+ import time
7
+ import json
8
+ import traceback
9
+ from typing import Optional, Any, Dict
10
+ from functools import wraps
11
+ from contextlib import contextmanager
12
+ from datetime import datetime
13
+
14
+ # 创建SDK专用的日志记录器 - 使用独立的命名空间避免冲突
15
+ SDK_LOGGER_NAME = "file_hub_client.grpc"
16
+ logger = logging.getLogger(SDK_LOGGER_NAME)
17
+
18
+
19
+ class GrpcJSONFormatter(logging.Formatter):
20
+ """gRPC请求的JSON格式化器"""
21
+
22
+ def format(self, record):
23
+ log_type = getattr(record, "log_type", "info")
24
+ log_data = {
25
+ "timestamp": datetime.fromtimestamp(record.created).isoformat(),
26
+ "level": record.levelname,
27
+ "type": log_type,
28
+ "uri": getattr(record, "uri", None),
29
+ "request_id": getattr(record, "request_id", None),
30
+ "data": getattr(record, "data", None),
31
+ "message": record.getMessage(),
32
+ "duration": getattr(record, "duration", None),
33
+ "logger": record.name, # 添加logger名称以区分SDK日志
34
+ }
35
+
36
+ # 增加 trace 支持
37
+ if hasattr(record, "trace"):
38
+ log_data["trace"] = getattr(record, "trace")
39
+
40
+ # 添加异常信息(如果有的话)
41
+ if hasattr(record, "exc_info") and record.exc_info:
42
+ log_data["exception"] = {
43
+ "type": record.exc_info[0].__name__ if record.exc_info[0] else None,
44
+ "message": str(record.exc_info[1]) if record.exc_info[1] else None,
45
+ "traceback": traceback.format_exception(*record.exc_info)
46
+ }
47
+
48
+ # 过滤掉None值
49
+ log_data = {k: v for k, v in log_data.items() if v is not None}
50
+
51
+ return json.dumps(log_data, ensure_ascii=False)
52
+
53
+
54
+ def get_default_formatter() -> logging.Formatter:
55
+ """获取默认的JSON格式化器"""
56
+ return GrpcJSONFormatter()
57
+
58
+
59
+ def setup_logging(
60
+ level: str = "INFO",
61
+ format_string: Optional[str] = None,
62
+ enable_grpc_logging: bool = True,
63
+ log_request_payload: bool = False,
64
+ log_response_payload: bool = False,
65
+ handler: Optional[logging.Handler] = None,
66
+ use_json_format: bool = True
67
+ ):
68
+ """
69
+ 设置SDK日志记录配置
70
+
71
+ Args:
72
+ level: 日志级别 (DEBUG, INFO, WARNING, ERROR, CRITICAL)
73
+ format_string: 自定义日志格式(当use_json_format=False时使用)
74
+ enable_grpc_logging: 是否启用gRPC请求日志
75
+ log_request_payload: 是否记录请求载荷
76
+ log_response_payload: 是否记录响应载荷
77
+ handler: 自定义日志处理器
78
+ use_json_format: 是否使用JSON格式(默认True)
79
+ """
80
+ # 设置日志级别
81
+ log_level = getattr(logging, level.upper(), logging.INFO)
82
+ logger.setLevel(log_level)
83
+
84
+ # 清除现有的处理器(只清除SDK的logger)
85
+ logger.handlers.clear()
86
+
87
+ # 创建处理器
88
+ if handler is None:
89
+ handler = logging.StreamHandler(sys.stdout)
90
+
91
+ # 设置日志格式
92
+ if use_json_format:
93
+ formatter = get_default_formatter()
94
+ else:
95
+ if format_string is None:
96
+ format_string = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
97
+ formatter = logging.Formatter(format_string, datefmt="%Y-%m-%d %H:%M:%S")
98
+
99
+ handler.setFormatter(formatter)
100
+
101
+ # 添加处理器
102
+ logger.addHandler(handler)
103
+
104
+ # 设置gRPC日志配置
105
+ logger.grpc_logging_enabled = enable_grpc_logging
106
+ logger.log_request_payload = log_request_payload
107
+ logger.log_response_payload = log_response_payload
108
+
109
+ # 防止日志传播到根日志记录器 - 保持SDK日志独立
110
+ logger.propagate = False
111
+
112
+ # 初始化日志(使用JSON格式)
113
+ if enable_grpc_logging:
114
+ log_record = logging.LogRecord(
115
+ name=logger.name,
116
+ level=logging.INFO,
117
+ pathname="",
118
+ lineno=0,
119
+ msg="📡 文件中心客户端 gRPC 日志已初始化",
120
+ args=(),
121
+ exc_info=None
122
+ )
123
+ log_record.log_type = "info"
124
+ log_record.data = {
125
+ "level": level,
126
+ "grpc_logging": enable_grpc_logging,
127
+ "json_format": use_json_format
128
+ }
129
+ logger.handle(log_record)
130
+
131
+
132
+ def get_logger() -> logging.Logger:
133
+ """获取SDK日志记录器"""
134
+ return logger
135
+
136
+
137
+ class GrpcRequestLogger:
138
+ """gRPC请求日志记录器"""
139
+
140
+ def __init__(self, logger: logging.Logger):
141
+ self.logger = logger
142
+ self.enable_grpc_logging = getattr(logger, 'grpc_logging_enabled', True)
143
+ self.log_request_payload = getattr(logger, 'log_request_payload', False)
144
+ self.log_response_payload = getattr(logger, 'log_response_payload', False)
145
+
146
+ def log_request_start(self, method_name: str, request_id: str, metadata: Dict[str, Any],
147
+ request_payload: Any = None):
148
+ """记录请求开始"""
149
+ if not self.enable_grpc_logging:
150
+ return
151
+
152
+ # 提取关键元数据
153
+ user_info = {}
154
+ if metadata:
155
+ metadata_dict = dict(metadata) if isinstance(metadata, list) else metadata
156
+ user_info = {
157
+ 'org_id': metadata_dict.get('x-org-id'),
158
+ 'user_id': metadata_dict.get('x-user-id'),
159
+ 'client_ip': metadata_dict.get('x-client-ip'),
160
+ 'client_version': metadata_dict.get('x-client-version')
161
+ }
162
+ user_info = {k: v for k, v in user_info.items() if v is not None}
163
+
164
+ # 创建日志记录
165
+ log_record = logging.LogRecord(
166
+ name=self.logger.name,
167
+ level=logging.INFO,
168
+ pathname="",
169
+ lineno=0,
170
+ msg=f"📤 gRPC 请求: {method_name}",
171
+ args=(),
172
+ exc_info=None
173
+ )
174
+
175
+ # 添加自定义字段
176
+ log_record.log_type = "request"
177
+ log_record.uri = method_name
178
+ log_record.request_id = request_id
179
+ log_record.data = user_info
180
+
181
+ # 记录请求载荷
182
+ if request_payload is not None:
183
+ if isinstance(request_payload, dict):
184
+ # 已经是字典格式,直接合并
185
+ log_record.data.update(request_payload)
186
+ else:
187
+ # 其他格式,添加到payload字段
188
+ log_record.data["payload"] = request_payload
189
+
190
+ self.logger.handle(log_record)
191
+
192
+ def log_request_end(self, method_name: str, request_id: str, duration_ms: float,
193
+ response_payload: Any = None, error: Exception = None):
194
+ """记录请求结束"""
195
+ if not self.enable_grpc_logging:
196
+ return
197
+
198
+ if error:
199
+ # 错误日志
200
+ log_record = logging.LogRecord(
201
+ name=self.logger.name,
202
+ level=logging.ERROR,
203
+ pathname="",
204
+ lineno=0,
205
+ msg=f"❌ gRPC 错误: {method_name} - {str(error)}",
206
+ args=(),
207
+ exc_info=(type(error), error, error.__traceback__) if error else None
208
+ )
209
+ log_record.log_type = "error"
210
+ log_record.uri = method_name
211
+ log_record.request_id = request_id
212
+ log_record.duration = duration_ms
213
+ log_record.data = {"error": str(error)}
214
+
215
+ self.logger.handle(log_record)
216
+ else:
217
+ # 响应日志
218
+ log_record = logging.LogRecord(
219
+ name=self.logger.name,
220
+ level=logging.INFO,
221
+ pathname="",
222
+ lineno=0,
223
+ msg=f"✅ gRPC 响应: {method_name}",
224
+ args=(),
225
+ exc_info=None
226
+ )
227
+ log_record.log_type = "response"
228
+ log_record.uri = method_name
229
+ log_record.request_id = request_id
230
+ log_record.duration = duration_ms
231
+
232
+ # 记录响应载荷(如果启用)
233
+ if self.log_response_payload and response_payload is not None:
234
+ log_record.data = {"response_payload": self._safe_serialize(response_payload)}
235
+
236
+ self.logger.handle(log_record)
237
+
238
+ def _safe_serialize(self, obj: Any) -> str:
239
+ """安全地序列化对象,避免敏感信息泄露"""
240
+ try:
241
+ if hasattr(obj, 'SerializeToString'):
242
+ # protobuf 对象
243
+ return f"<Proto object: {type(obj).__name__}>"
244
+ elif hasattr(obj, '__dict__'):
245
+ # 普通对象
246
+ return f"<Object: {type(obj).__name__}>"
247
+ else:
248
+ # 基本类型
249
+ return str(obj)[:200] # 限制长度
250
+ except Exception:
251
+ return f"<Unserializable: {type(obj).__name__}>"
252
+
253
+
254
+ @contextmanager
255
+ def grpc_request_context(method_name: str, request_id: str, metadata: Dict[str, Any],
256
+ request_payload: Any = None):
257
+ """gRPC请求上下文管理器"""
258
+ request_logger = GrpcRequestLogger(get_logger())
259
+ start_time = time.time()
260
+
261
+ try:
262
+ # 记录请求开始
263
+ request_logger.log_request_start(method_name, request_id, metadata, request_payload)
264
+ yield request_logger
265
+
266
+ except Exception as e:
267
+ # 记录请求错误
268
+ duration_ms = (time.time() - start_time) * 1000
269
+ request_logger.log_request_end(method_name, request_id, duration_ms, error=e)
270
+ raise
271
+
272
+ else:
273
+ # 记录请求成功结束
274
+ duration_ms = (time.time() - start_time) * 1000
275
+ request_logger.log_request_end(method_name, request_id, duration_ms)
276
+
277
+
278
+ def log_grpc_call(method_name: str):
279
+ """gRPC调用日志装饰器"""
280
+ def decorator(func):
281
+ @wraps(func)
282
+ def sync_wrapper(*args, **kwargs):
283
+ # 提取request_id和metadata
284
+ request_id = kwargs.get('request_id', 'unknown')
285
+ metadata = kwargs.get('metadata', {})
286
+
287
+ with grpc_request_context(method_name, request_id, metadata) as request_logger:
288
+ result = func(*args, **kwargs)
289
+ request_logger.log_request_end(method_name, request_id, 0, response_payload=result)
290
+ return result
291
+
292
+ @wraps(func)
293
+ async def async_wrapper(*args, **kwargs):
294
+ # 提取request_id和metadata
295
+ request_id = kwargs.get('request_id', 'unknown')
296
+ metadata = kwargs.get('metadata', {})
297
+
298
+ with grpc_request_context(method_name, request_id, metadata) as request_logger:
299
+ result = await func(*args, **kwargs)
300
+ request_logger.log_request_end(method_name, request_id, 0, response_payload=result)
301
+ return result
302
+
303
+ # 根据函数类型返回对应的包装器
304
+ import asyncio
305
+ if asyncio.iscoroutinefunction(func):
306
+ return async_wrapper
307
+ else:
308
+ return sync_wrapper
309
+
310
+ return decorator
311
+
312
+
313
+ # 默认初始化(可以被用户重新配置)
314
+ if not logger.handlers:
315
+ setup_logging()