tamar-file-hub-client 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- file_hub_client/rpc/interceptors.py +35 -4
- file_hub_client/services/file/async_blob_service.py +19 -9
- file_hub_client/services/file/sync_blob_service.py +21 -12
- file_hub_client/utils/logging.py +4 -0
- file_hub_client/utils/retry.py +46 -4
- {tamar_file_hub_client-0.0.3.dist-info → tamar_file_hub_client-0.0.5.dist-info}/METADATA +1 -1
- {tamar_file_hub_client-0.0.3.dist-info → tamar_file_hub_client-0.0.5.dist-info}/RECORD +9 -9
- {tamar_file_hub_client-0.0.3.dist-info → tamar_file_hub_client-0.0.5.dist-info}/WHEEL +0 -0
- {tamar_file_hub_client-0.0.3.dist-info → tamar_file_hub_client-0.0.5.dist-info}/top_level.txt +0 -0
@@ -86,8 +86,29 @@ def _sanitize_request_data(data: Any, max_string_length: int = 200, max_binary_p
|
|
86
86
|
# 递归处理字典
|
87
87
|
result = {}
|
88
88
|
for key, value in data.items():
|
89
|
-
#
|
90
|
-
if key.lower() in ['
|
89
|
+
# 检查是否是需要特殊处理的字段
|
90
|
+
if key.lower() in ['operations'] and isinstance(value, list):
|
91
|
+
# 对于 operations 字段,特殊处理以显示操作类型和数量
|
92
|
+
if len(value) > 5:
|
93
|
+
ops_summary = []
|
94
|
+
# 统计操作类型
|
95
|
+
op_types = {}
|
96
|
+
for op in value:
|
97
|
+
if isinstance(op, dict):
|
98
|
+
for op_type in ['edit', 'create', 'update', 'delete', 'clear']:
|
99
|
+
if op_type in op:
|
100
|
+
op_types[op_type] = op_types.get(op_type, 0) + 1
|
101
|
+
|
102
|
+
# 显示前3个操作
|
103
|
+
for i in range(min(3, len(value))):
|
104
|
+
ops_summary.append(_sanitize_request_data(value[i], max_string_length, max_binary_preview))
|
105
|
+
|
106
|
+
# 添加统计信息
|
107
|
+
ops_summary.append(f"... 总计 {len(value)} 个操作: {', '.join(f'{k}={v}' for k, v in op_types.items())}")
|
108
|
+
result[key] = ops_summary
|
109
|
+
else:
|
110
|
+
result[key] = _sanitize_request_data(value, max_string_length, max_binary_preview)
|
111
|
+
elif key.lower() in ['content', 'data', 'file', 'file_content', 'binary', 'blob', 'bytes', 'image', 'attachment']:
|
91
112
|
if isinstance(value, (bytes, bytearray)):
|
92
113
|
# 二进制内容,显示长度和预览
|
93
114
|
preview = base64.b64encode(value[:max_binary_preview]).decode('utf-8')
|
@@ -106,8 +127,18 @@ def _sanitize_request_data(data: Any, max_string_length: int = 200, max_binary_p
|
|
106
127
|
result[key] = _sanitize_request_data(value, max_string_length, max_binary_preview)
|
107
128
|
return result
|
108
129
|
elif isinstance(data, list):
|
109
|
-
#
|
110
|
-
|
130
|
+
# 递归处理列表,限制列表长度以避免日志过长
|
131
|
+
max_list_items = 10 # 最多显示10个元素
|
132
|
+
if len(data) > max_list_items:
|
133
|
+
# 显示前5个和后5个元素
|
134
|
+
preview_items = (
|
135
|
+
[_sanitize_request_data(item, max_string_length, max_binary_preview) for item in data[:5]] +
|
136
|
+
[f"... {len(data) - max_list_items} more items ..."] +
|
137
|
+
[_sanitize_request_data(item, max_string_length, max_binary_preview) for item in data[-5:]]
|
138
|
+
)
|
139
|
+
return preview_items
|
140
|
+
else:
|
141
|
+
return [_sanitize_request_data(item, max_string_length, max_binary_preview) for item in data]
|
111
142
|
elif isinstance(data, tuple):
|
112
143
|
# 递归处理元组
|
113
144
|
return tuple(_sanitize_request_data(item, max_string_length, max_binary_preview) for item in data)
|
@@ -401,12 +401,12 @@ class AsyncBlobService(BaseFileService):
|
|
401
401
|
# 参数验证:必须提供 file 或 url 之一
|
402
402
|
if file is None and not url:
|
403
403
|
raise ValidationError("必须提供 file 或 url 参数之一")
|
404
|
-
|
404
|
+
|
405
405
|
# 如果提供了URL,先下载文件
|
406
406
|
if url:
|
407
407
|
# 下载文件到内存
|
408
408
|
downloaded_content = await self.http_downloader.download(url)
|
409
|
-
|
409
|
+
|
410
410
|
# 如果没有指定文件名,从URL中提取
|
411
411
|
if not file_name:
|
412
412
|
from urllib.parse import urlparse
|
@@ -414,22 +414,32 @@ class AsyncBlobService(BaseFileService):
|
|
414
414
|
parsed_url = urlparse(url)
|
415
415
|
url_path = PathLib(parsed_url.path)
|
416
416
|
file_name = url_path.name if url_path.name else f"download_{hashlib.md5(url.encode()).hexdigest()[:8]}"
|
417
|
-
|
417
|
+
|
418
418
|
# 使用下载的内容作为file参数
|
419
419
|
file = downloaded_content
|
420
|
-
|
420
|
+
|
421
421
|
# 提取文件信息(bytes会返回默认的MIME类型,我们稍后会基于文件名重新计算)
|
422
|
-
|
423
|
-
|
422
|
+
_, content, file_size, _, _, file_hash = self._extract_file_info(file)
|
423
|
+
|
424
424
|
# file_name已经在上面设置了(要么是用户指定的,要么是从URL提取的)
|
425
425
|
extracted_file_name = file_name
|
426
|
-
|
426
|
+
|
427
427
|
# 基于文件名计算文件类型和MIME类型
|
428
|
-
file_type = Path(extracted_file_name).suffix.lstrip('.').lower() if Path(
|
428
|
+
file_type = Path(extracted_file_name).suffix.lstrip('.').lower() if Path(
|
429
|
+
extracted_file_name).suffix else 'dat'
|
429
430
|
mime_type = get_file_mime_type(Path(extracted_file_name))
|
430
431
|
else:
|
431
432
|
# 解析文件参数,提取文件信息
|
432
|
-
extracted_file_name, content, file_size,
|
433
|
+
extracted_file_name, content, file_size, extract_mime_type, extract_file_type, file_hash = self._extract_file_info(
|
434
|
+
file)
|
435
|
+
if file_name:
|
436
|
+
extracted_file_name = file_name
|
437
|
+
mime_type = get_file_mime_type(file_name)
|
438
|
+
file_type = Path(extracted_file_name).suffix.lstrip('.').lower() if Path(
|
439
|
+
extracted_file_name).suffix else 'dat'
|
440
|
+
else:
|
441
|
+
mime_type = extract_mime_type
|
442
|
+
file_type = extract_file_type
|
433
443
|
|
434
444
|
# 根据文件大小自动选择上传模式
|
435
445
|
if mode == UploadMode.NORMAL:
|
@@ -392,19 +392,19 @@ class SyncBlobService(BaseFileService):
|
|
392
392
|
|
393
393
|
Returns:
|
394
394
|
文件信息
|
395
|
-
|
395
|
+
|
396
396
|
Note:
|
397
397
|
必须提供 file 或 url 参数之一
|
398
398
|
"""
|
399
399
|
# 参数验证:必须提供 file 或 url 之一
|
400
400
|
if file is None and not url:
|
401
401
|
raise ValidationError("必须提供 file 或 url 参数之一")
|
402
|
-
|
402
|
+
|
403
403
|
# 如果提供了URL,先下载文件
|
404
404
|
if url:
|
405
405
|
# 下载文件到内存
|
406
406
|
downloaded_content = self.http_downloader.download(url)
|
407
|
-
|
407
|
+
|
408
408
|
# 如果没有指定文件名,从URL中提取
|
409
409
|
if not file_name:
|
410
410
|
from urllib.parse import urlparse
|
@@ -412,24 +412,32 @@ class SyncBlobService(BaseFileService):
|
|
412
412
|
parsed_url = urlparse(url)
|
413
413
|
url_path = PathLib(parsed_url.path)
|
414
414
|
file_name = url_path.name if url_path.name else f"download_{hashlib.md5(url.encode()).hexdigest()[:8]}"
|
415
|
-
|
415
|
+
|
416
416
|
# 使用下载的内容作为file参数
|
417
417
|
file = downloaded_content
|
418
|
-
|
418
|
+
|
419
419
|
# 提取文件信息(bytes会返回默认的MIME类型,我们稍后会基于文件名重新计算)
|
420
|
-
|
421
|
-
|
420
|
+
_, content, file_size, _, _, file_hash = self._extract_file_info(file)
|
421
|
+
|
422
422
|
# file_name已经在上面设置了(要么是用户指定的,要么是从URL提取的)
|
423
423
|
extracted_file_name = file_name
|
424
|
-
|
424
|
+
|
425
425
|
# 基于文件名计算文件类型和MIME类型
|
426
|
-
file_type = Path(extracted_file_name).suffix.lstrip('.').lower() if Path(
|
426
|
+
file_type = Path(extracted_file_name).suffix.lstrip('.').lower() if Path(
|
427
|
+
extracted_file_name).suffix else 'dat'
|
427
428
|
mime_type = get_file_mime_type(Path(extracted_file_name))
|
428
429
|
else:
|
429
430
|
# 解析文件参数,提取文件信息
|
430
|
-
extracted_file_name, content, file_size,
|
431
|
-
|
432
|
-
|
431
|
+
extracted_file_name, content, file_size, extract_mime_type, extract_file_type, file_hash = self._extract_file_info(
|
432
|
+
file)
|
433
|
+
if file_name:
|
434
|
+
extracted_file_name = file_name
|
435
|
+
mime_type = get_file_mime_type(file_name)
|
436
|
+
file_type = Path(extracted_file_name).suffix.lstrip('.').lower() if Path(
|
437
|
+
extracted_file_name).suffix else 'dat'
|
438
|
+
else:
|
439
|
+
mime_type = extract_mime_type
|
440
|
+
file_type = extract_file_type
|
433
441
|
|
434
442
|
# 根据文件大小自动选择上传模式
|
435
443
|
if mode == UploadMode.NORMAL:
|
@@ -503,6 +511,7 @@ class SyncBlobService(BaseFileService):
|
|
503
511
|
|
504
512
|
Args:
|
505
513
|
file_id: 文件ID
|
514
|
+
expire_seconds: 过期时间(秒)
|
506
515
|
request_id: 请求ID(可选,如果不提供则自动生成)
|
507
516
|
**metadata: 额外的元数据(如 x-org-id, x-user-id 等)
|
508
517
|
|
file_hub_client/utils/logging.py
CHANGED
@@ -109,6 +109,10 @@ def setup_logging(
|
|
109
109
|
# 防止日志传播到根日志记录器 - 保持SDK日志独立
|
110
110
|
logger.propagate = False
|
111
111
|
|
112
|
+
# 对整个 file_hub_client 包设置隔离,确保所有子模块的日志都不会传播
|
113
|
+
parent_logger = logging.getLogger('file_hub_client')
|
114
|
+
parent_logger.propagate = False
|
115
|
+
|
112
116
|
# 初始化日志(使用JSON格式)
|
113
117
|
if enable_grpc_logging:
|
114
118
|
log_record = logging.LogRecord(
|
file_hub_client/utils/retry.py
CHANGED
@@ -67,10 +67,20 @@ def retry_with_backoff(
|
|
67
67
|
raise
|
68
68
|
|
69
69
|
if attempt < max_retries:
|
70
|
+
# 提取更详细的错误信息
|
71
|
+
error_details = str(e)
|
72
|
+
if hasattr(e, 'code') and hasattr(e, 'details'):
|
73
|
+
# gRPC 错误
|
74
|
+
error_details = f"gRPC {e.code().name}: {e.details()}"
|
75
|
+
elif hasattr(e, 'response') and hasattr(e.response, 'status_code'):
|
76
|
+
# HTTP 错误
|
77
|
+
error_details = f"HTTP {e.response.status_code}: {str(e)}"
|
78
|
+
|
70
79
|
logger.warning(
|
71
80
|
f"🔄 触发重试 | 操作: {func.__name__} | "
|
72
81
|
f"尝试: {attempt + 1}/{max_retries + 1} | "
|
73
|
-
f"
|
82
|
+
f"错误类型: {type(e).__name__} | "
|
83
|
+
f"错误详情: {error_details} | "
|
74
84
|
f"延迟: {delay:.1f}秒"
|
75
85
|
)
|
76
86
|
await asyncio.sleep(delay)
|
@@ -109,10 +119,20 @@ def retry_with_backoff(
|
|
109
119
|
raise
|
110
120
|
|
111
121
|
if attempt < max_retries:
|
122
|
+
# 提取更详细的错误信息
|
123
|
+
error_details = str(e)
|
124
|
+
if hasattr(e, 'code') and hasattr(e, 'details'):
|
125
|
+
# gRPC 错误
|
126
|
+
error_details = f"gRPC {e.code().name}: {e.details()}"
|
127
|
+
elif hasattr(e, 'response') and hasattr(e.response, 'status_code'):
|
128
|
+
# HTTP 错误
|
129
|
+
error_details = f"HTTP {e.response.status_code}: {str(e)}"
|
130
|
+
|
112
131
|
logger.warning(
|
113
132
|
f"🔄 触发重试 | 操作: {func.__name__} | "
|
114
133
|
f"尝试: {attempt + 1}/{max_retries + 1} | "
|
115
|
-
f"
|
134
|
+
f"错误类型: {type(e).__name__} | "
|
135
|
+
f"错误详情: {error_details} | "
|
116
136
|
f"延迟: {delay:.1f}秒"
|
117
137
|
)
|
118
138
|
time.sleep(delay)
|
@@ -162,10 +182,21 @@ def retry_on_lock_conflict(
|
|
162
182
|
if _is_lock_conflict(result):
|
163
183
|
last_result = result
|
164
184
|
if attempt < max_retries:
|
185
|
+
# 提取冲突详细信息
|
186
|
+
conflict_details = "lock_conflict"
|
187
|
+
if isinstance(result, dict):
|
188
|
+
conflict_info = result.get('conflict_info', {})
|
189
|
+
if conflict_info:
|
190
|
+
conflict_details = f"{conflict_info.get('conflict_type', 'lock_conflict')}"
|
191
|
+
if 'resolution_suggestion' in conflict_info:
|
192
|
+
conflict_details += f" - {conflict_info['resolution_suggestion']}"
|
193
|
+
if 'error_message' in result:
|
194
|
+
conflict_details += f" - {result['error_message']}"
|
195
|
+
|
165
196
|
logger.warning(
|
166
197
|
f"🔒 锁冲突重试 | 操作: {func.__name__} | "
|
167
198
|
f"尝试: {attempt + 1}/{max_retries + 1} | "
|
168
|
-
f"
|
199
|
+
f"冲突详情: {conflict_details} | "
|
169
200
|
f"延迟: {delay:.1f}秒"
|
170
201
|
)
|
171
202
|
await asyncio.sleep(delay)
|
@@ -196,10 +227,21 @@ def retry_on_lock_conflict(
|
|
196
227
|
if _is_lock_conflict(result):
|
197
228
|
last_result = result
|
198
229
|
if attempt < max_retries:
|
230
|
+
# 提取冲突详细信息
|
231
|
+
conflict_details = "lock_conflict"
|
232
|
+
if isinstance(result, dict):
|
233
|
+
conflict_info = result.get('conflict_info', {})
|
234
|
+
if conflict_info:
|
235
|
+
conflict_details = f"{conflict_info.get('conflict_type', 'lock_conflict')}"
|
236
|
+
if 'resolution_suggestion' in conflict_info:
|
237
|
+
conflict_details += f" - {conflict_info['resolution_suggestion']}"
|
238
|
+
if 'error_message' in result:
|
239
|
+
conflict_details += f" - {result['error_message']}"
|
240
|
+
|
199
241
|
logger.warning(
|
200
242
|
f"🔒 锁冲突重试 | 操作: {func.__name__} | "
|
201
243
|
f"尝试: {attempt + 1}/{max_retries + 1} | "
|
202
|
-
f"
|
244
|
+
f"冲突详情: {conflict_details} | "
|
203
245
|
f"延迟: {delay:.1f}秒"
|
204
246
|
)
|
205
247
|
time.sleep(delay)
|
@@ -10,7 +10,7 @@ file_hub_client/errors/exceptions.py,sha256=0RDkAv4t2GeVefm1H5eaXGxd6Od-drXzPIVJ
|
|
10
10
|
file_hub_client/rpc/__init__.py,sha256=dhRt0D_US3j2Dam47nhd23RowVZ-nvVowhPsNFf4GZo,204
|
11
11
|
file_hub_client/rpc/async_client.py,sha256=vMi4Aqc_Ajg2sdOgXHiLjP_4FkPtg5tDpSDPK6hg-cM,14038
|
12
12
|
file_hub_client/rpc/generate_grpc.py,sha256=opzstxWdW7vqR9OxrgUCSUkZe8IqgcOdruqWGIzCneI,2158
|
13
|
-
file_hub_client/rpc/interceptors.py,sha256=
|
13
|
+
file_hub_client/rpc/interceptors.py,sha256=UeQ8u-olMiYwScXJrlzqPS-odGomH-h3xJqOwPznRUo,22071
|
14
14
|
file_hub_client/rpc/sync_client.py,sha256=ZWBmP-TU8eeAfl2lV8sLu9nsxo_aOPAVmt9ww_T8Hwg,14054
|
15
15
|
file_hub_client/rpc/gen/__init__.py,sha256=NJLqr9ezUXeOyy1J0sMPn3Kl_8IyGw7GAzzzewO3MIw,45
|
16
16
|
file_hub_client/rpc/gen/file_service_pb2.py,sha256=7zEL8RFMp9gcigEqWnZFREX8ylljfQzBHwg43SbKt-A,8354
|
@@ -29,10 +29,10 @@ file_hub_client/schemas/folder.py,sha256=D7UFsLCou-7CCXCQvuRObaBQEGmETsm1cgGOG1c
|
|
29
29
|
file_hub_client/schemas/taple.py,sha256=qoH458xMAdYg8_jKdYqHJy5-gZ1ZFzOAkSPFROQuhQI,18172
|
30
30
|
file_hub_client/services/__init__.py,sha256=yh5mir0dKB_LtJMk2hTpQI9WSlguaxtVD2KomMnzxdM,514
|
31
31
|
file_hub_client/services/file/__init__.py,sha256=aJygo_AzYk5NN-ezp-a9YlugJ82wVIP9e5e54fl0UsI,342
|
32
|
-
file_hub_client/services/file/async_blob_service.py,sha256=
|
32
|
+
file_hub_client/services/file/async_blob_service.py,sha256=x4Xmxc5FqeiYZ7yKb5_fDjJhgxpq79g7dJuGbHIWMow,20484
|
33
33
|
file_hub_client/services/file/async_file_service.py,sha256=QxVfwPoJe_oj8t7EOLHMQF3PQf4E1-HctQR7yvY9D3g,8585
|
34
34
|
file_hub_client/services/file/base_file_service.py,sha256=wTxplrTk9klfwIHOPfTL0TQd6gX4nEmkYtIhpiZ3GVo,4791
|
35
|
-
file_hub_client/services/file/sync_blob_service.py,sha256=
|
35
|
+
file_hub_client/services/file/sync_blob_service.py,sha256=pnUciPJ0oOLOLK1WDvigJ5gdL5lfCDw-qJX1WMGZvew,20265
|
36
36
|
file_hub_client/services/file/sync_file_service.py,sha256=i1pLCcGNWMlWQfAW4dlhLsEiV3oc1jXKmKax35k0CGw,8439
|
37
37
|
file_hub_client/services/folder/__init__.py,sha256=vGbMOlNiEBdnWZB1xE74RJtoroI28hKHCWfQV1GqKQc,210
|
38
38
|
file_hub_client/services/folder/async_folder_service.py,sha256=uFEmtW8EXYvaKYT2JCitWbdTGR1EtHlx_eBN5P3JUZg,7293
|
@@ -47,11 +47,11 @@ file_hub_client/utils/converter.py,sha256=TX69Bqk-PwNdv2hYQ07_tW6HQnQycHcJkGeRns
|
|
47
47
|
file_hub_client/utils/download_helper.py,sha256=Mc8TQSWjHxIglJMkKlGy9r3LZe8e_Mwe6D3sfn6IOnY,13338
|
48
48
|
file_hub_client/utils/file_utils.py,sha256=Ly8R5KJS_3lbgJxNZkc4sSBKuGgn-fYeh17GEY4pyy8,4359
|
49
49
|
file_hub_client/utils/idempotency.py,sha256=zuXDlpAc9VTkTsarlnkO0VuJ77yON6j1TX0GvL9Xd9k,6029
|
50
|
-
file_hub_client/utils/logging.py,sha256=
|
51
|
-
file_hub_client/utils/retry.py,sha256=
|
50
|
+
file_hub_client/utils/logging.py,sha256=IxcvWkA0G9s9BMiXIeFAdJX5G-Lc5-JFlS2yxOX1Swo,11741
|
51
|
+
file_hub_client/utils/retry.py,sha256=MyEAYHEGljoHvVs4Kh1hX_Y3iDGcppVCSUvVkNGbC28,15609
|
52
52
|
file_hub_client/utils/smart_retry.py,sha256=RjBhyG6SNDfMXxNxKU_qayWDD6Ihp7ow6_BPjhgflM0,16465
|
53
53
|
file_hub_client/utils/upload_helper.py,sha256=gEtn9OXVJiGUpVev_fqrDnRQ6AFiiP9goLzFrVpqXmU,22569
|
54
|
-
tamar_file_hub_client-0.0.
|
55
|
-
tamar_file_hub_client-0.0.
|
56
|
-
tamar_file_hub_client-0.0.
|
57
|
-
tamar_file_hub_client-0.0.
|
54
|
+
tamar_file_hub_client-0.0.5.dist-info/METADATA,sha256=Ri0jUnjTvBxHIBNVmp1pc-7tSygTk3j_ff11O-Hxgxg,64873
|
55
|
+
tamar_file_hub_client-0.0.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
56
|
+
tamar_file_hub_client-0.0.5.dist-info/top_level.txt,sha256=9wcR7hyAJQdJg_kuH6WR3nmpJ8O-j8aJNK8f_kcFy6U,16
|
57
|
+
tamar_file_hub_client-0.0.5.dist-info/RECORD,,
|
File without changes
|
{tamar_file_hub_client-0.0.3.dist-info → tamar_file_hub_client-0.0.5.dist-info}/top_level.txt
RENAMED
File without changes
|