tamar-file-hub-client 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. file_hub_client/__init__.py +88 -0
  2. file_hub_client/client.py +414 -0
  3. file_hub_client/enums/__init__.py +12 -0
  4. file_hub_client/enums/export_format.py +16 -0
  5. file_hub_client/enums/role.py +7 -0
  6. file_hub_client/enums/upload_mode.py +11 -0
  7. file_hub_client/errors/__init__.py +30 -0
  8. file_hub_client/errors/exceptions.py +93 -0
  9. file_hub_client/py.typed +1 -0
  10. file_hub_client/rpc/__init__.py +10 -0
  11. file_hub_client/rpc/async_client.py +312 -0
  12. file_hub_client/rpc/gen/__init__.py +1 -0
  13. file_hub_client/rpc/gen/file_service_pb2.py +74 -0
  14. file_hub_client/rpc/gen/file_service_pb2_grpc.py +533 -0
  15. file_hub_client/rpc/gen/folder_service_pb2.py +53 -0
  16. file_hub_client/rpc/gen/folder_service_pb2_grpc.py +269 -0
  17. file_hub_client/rpc/generate_grpc.py +76 -0
  18. file_hub_client/rpc/protos/file_service.proto +147 -0
  19. file_hub_client/rpc/protos/folder_service.proto +65 -0
  20. file_hub_client/rpc/sync_client.py +313 -0
  21. file_hub_client/schemas/__init__.py +43 -0
  22. file_hub_client/schemas/context.py +160 -0
  23. file_hub_client/schemas/file.py +89 -0
  24. file_hub_client/schemas/folder.py +29 -0
  25. file_hub_client/services/__init__.py +17 -0
  26. file_hub_client/services/file/__init__.py +14 -0
  27. file_hub_client/services/file/async_blob_service.py +482 -0
  28. file_hub_client/services/file/async_file_service.py +257 -0
  29. file_hub_client/services/file/base_file_service.py +103 -0
  30. file_hub_client/services/file/sync_blob_service.py +478 -0
  31. file_hub_client/services/file/sync_file_service.py +255 -0
  32. file_hub_client/services/folder/__init__.py +10 -0
  33. file_hub_client/services/folder/async_folder_service.py +206 -0
  34. file_hub_client/services/folder/sync_folder_service.py +205 -0
  35. file_hub_client/utils/__init__.py +48 -0
  36. file_hub_client/utils/converter.py +108 -0
  37. file_hub_client/utils/download_helper.py +355 -0
  38. file_hub_client/utils/file_utils.py +105 -0
  39. file_hub_client/utils/retry.py +69 -0
  40. file_hub_client/utils/upload_helper.py +527 -0
  41. tamar_file_hub_client-0.0.1.dist-info/METADATA +874 -0
  42. tamar_file_hub_client-0.0.1.dist-info/RECORD +44 -0
  43. tamar_file_hub_client-0.0.1.dist-info/WHEEL +5 -0
  44. tamar_file_hub_client-0.0.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,527 @@
1
+ """
2
+ 上传助手模块
3
+
4
+ 提供HTTP上传、断点续传、进度监控等功能
5
+ """
6
+ import os
7
+ import time
8
+ import asyncio
9
+ import aiohttp
10
+ import requests
11
+ from pathlib import Path
12
+ from typing import Union, BinaryIO, Optional, Callable, Dict, Any, AsyncGenerator
13
+ from dataclasses import dataclass
14
+ import hashlib
15
+
16
+
17
+ @dataclass
18
+ class UploadProgress:
19
+ """上传进度信息"""
20
+ total_size: int
21
+ uploaded_size: int
22
+ percentage: float
23
+ speed: float # bytes per second
24
+ remaining_time: float # seconds
25
+
26
+ @property
27
+ def is_completed(self) -> bool:
28
+ return self.uploaded_size >= self.total_size
29
+
30
+
31
+ class HttpUploader:
32
+ """HTTP上传器,支持同步上传"""
33
+
34
+ def __init__(self, chunk_size: int = 1024 * 1024 * 5, total_retries: int = 3, retry_delay_seconds: int = 5):
35
+ self.chunk_size = chunk_size # 默认5MB分片
36
+ self.total_retries = total_retries
37
+ self.retry_delay_seconds = retry_delay_seconds
38
+
39
+ def start_resumable_session(self, url: str, total_file_size: Optional[int] = None,
40
+ mine_type: Optional[str] = None) -> str:
41
+ """
42
+ 启动 GCS 的断点续传会话,返回 session URI。
43
+
44
+ Args:
45
+ url (str): GCS 预签名上传初始化 URL。
46
+ total_file_size (Optional[int]): 文件总大小(可选)。
47
+ mine_type (Optional[str]): 文件 Content-Type。
48
+ Returns:
49
+ str: GCS 返回的会话 URI。
50
+ """
51
+ content_range_header = f"bytes */{total_file_size}" if total_file_size is not None else "bytes */*"
52
+ headers = {"Content-Range": content_range_header, "x-goog-resumable": "start"}
53
+ if mine_type is not None:
54
+ headers["Content-Type"] = mine_type
55
+
56
+ response = self._request("POST", url, headers=headers)
57
+ if response.status_code in [200, 201]:
58
+ session_uri = response.headers.get("Location")
59
+ if session_uri:
60
+ print(f"成功获取断点续传 URI: {session_uri}")
61
+ return session_uri
62
+
63
+ raise Exception(f"Failed to start resumable session: {response.status_code} - {response.text}")
64
+
65
+ def check_uploaded_size(self, url: str, total_file_size: Optional[int] = None,
66
+ mine_type: Optional[str] = None) -> int:
67
+ """
68
+ 查询 GCS 可恢复上传的当前进度(已上传的字节数)。
69
+
70
+ Args:
71
+ url (str): GCS 可恢复上传的会话 URI。
72
+ total_file_size (Optional[int]): 文件的总大小(可选)。
73
+ 如果已知,提供此参数可以帮助 GCS 进行更精确的判断。
74
+ 如果 GCS 响应 200 OK,且提供了此参数,则直接返回此值。
75
+ mine_type (Optional[str]): 文件 Content-Type。
76
+ Returns:
77
+ int: 已上传的字节数。
78
+ - 如果上传已完成 (200 OK),返回 total_file_size。如果 total_file_size 未知,则返回 0(表示需要服务器端后续验证)。
79
+ - 如果上传仍在进行 (308 Resume Incomplete),返回已上传的字节数。
80
+ - 如果查询失败或会话无效,返回 0(表示从头开始或会话已失效)。
81
+ """
82
+ # 构建 Content-Range 头。如果知道总大小,提供它更准确。
83
+ content_range_header = f"bytes */{total_file_size}" if total_file_size is not None else "bytes */*"
84
+ headers = {"Content-Range": content_range_header}
85
+ if mine_type is not None:
86
+ headers["Content-Type"] = mine_type
87
+
88
+ # 执行查询
89
+ response = self._request("PUT", url, headers=headers)
90
+ if response.status_code == 200:
91
+ # GCS 响应 200 OK 表示整个文件已经完整上传。
92
+ # 此时,如果知道文件总大小,可以直接返回它。
93
+ # 如果不知道,通过 headers 获取文件大小,通常为 'x-goog-stored-content-length'
94
+ if total_file_size:
95
+ print(f"查询当前已上传文件大小:{total_file_size} bytes")
96
+ return total_file_size
97
+ else:
98
+ content_length = response.headers.get("x-goog-stored-content-length")
99
+ print(f"查询当前已上传文件大小:{content_length} bytes")
100
+ if content_length:
101
+ return int(content_length)
102
+ elif response.status_code == 308:
103
+ # GCS 响应 308 Resume Incomplete,表示部分上传成功。
104
+ content_length = response.headers.get("Content-Length")
105
+ print(f"查询当前已上传文件大小:{content_length} bytes")
106
+ if content_length is not None:
107
+ return int(content_length)
108
+ else:
109
+ # 理论上 308 响应应该有 Content-Length 头,但作为健壮性处理
110
+ print(f"查询当前已上传文件大小:0 bytes")
111
+ return 0 # 没有 Content-Length 头,从头开始
112
+
113
+ raise Exception(f"Failed to check uploaded size: {response.status_code} - {response.text}")
114
+
115
+ def upload(
116
+ self,
117
+ url: str,
118
+ content: Optional[Union[bytes, BinaryIO, Path]] = None,
119
+ headers: Optional[Dict[str, str]] = None,
120
+ progress_callback: Optional[Callable[[UploadProgress], None]] = None,
121
+ total_size: Optional[int] = None,
122
+ is_resume: bool = False,
123
+ ) -> requests.Response:
124
+ """
125
+ 上传文件到指定URL
126
+
127
+ Args:
128
+ url: 上传URL
129
+ content: 文件内容
130
+ method: HTTP方法
131
+ headers: 请求头
132
+ progress_callback: 进度回调函数
133
+ is_resume: 是否断点续传
134
+ """
135
+ headers = headers or {}
136
+
137
+ # 获取文件大小(不生成 chunk,避免提前读取)
138
+ final_total_size = self._calculate_total_size(content) if total_size is None else total_size
139
+
140
+ # 若断点续传,查询 resume_from 位置
141
+ if is_resume:
142
+ resume_from = self.check_uploaded_size(url, final_total_size, mine_type=headers.get("Content-Type"))
143
+ else:
144
+ resume_from = 0
145
+
146
+ # 生成从 resume_from 开始的 chunk
147
+ chunks = self._generate_chunks(content, resume_from)
148
+
149
+ # 如果是断点续传,设置Range头
150
+ if is_resume:
151
+ headers['Content-Range'] = f'bytes {resume_from}-{final_total_size - 1}/{final_total_size}'
152
+
153
+ return self._request(
154
+ method="PUT",
155
+ url=url,
156
+ headers=headers,
157
+ data=self._wrap_chunks_with_progress(chunks, final_total_size, resume_from, time.time(), progress_callback)
158
+ )
159
+
160
+ def _request(
161
+ self,
162
+ method: str,
163
+ url: str,
164
+ headers: Optional[Dict[str, str]] = None,
165
+ data: Optional[Union[bytes, BinaryIO, any]] = None
166
+ ) -> requests.Response:
167
+ """通用请求方法,带重试机制"""
168
+ for attempt in range(self.total_retries):
169
+ try:
170
+ response = requests.request(method=method, url=url, headers=headers, data=data, timeout=None)
171
+ response.raise_for_status()
172
+ return response
173
+ except requests.exceptions.RequestException as e:
174
+ if attempt < self.total_retries - 1:
175
+ print(f"[{attempt + 1}/{self.total_retries}] 请求失败,{self.retry_delay_seconds}s 后重试: {e}")
176
+ time.sleep(self.retry_delay_seconds)
177
+ else:
178
+ print(f"[{self.total_retries}] 最后一次重试失败: {e}")
179
+ raise e
180
+
181
+ def _calculate_total_size(self, content: Union[bytes, Path, BinaryIO]) -> int:
182
+ if isinstance(content, bytes):
183
+ return len(content)
184
+ elif isinstance(content, Path):
185
+ if not content.exists():
186
+ raise FileNotFoundError(f"File not found: {content}")
187
+ return content.stat().st_size
188
+ elif hasattr(content, 'seek') and hasattr(content, 'tell'):
189
+ current = content.tell()
190
+ content.seek(0, 2)
191
+ size = content.tell()
192
+ content.seek(current)
193
+ return size
194
+ else:
195
+ raise ValueError("Unsupported content type")
196
+
197
+ def _bytes_to_chunks(self, data: bytes, start: int = 0):
198
+ """将字节数据转换为分块"""
199
+ for i in range(start, len(data), self.chunk_size):
200
+ yield data[i:i + self.chunk_size]
201
+
202
+ def _file_to_chunks(self, file_path: Path, start: int = 0):
203
+ """将文件转换为分块"""
204
+ with open(file_path, 'rb') as f:
205
+ f.seek(start)
206
+ while True:
207
+ chunk = f.read(self.chunk_size)
208
+ if not chunk:
209
+ break
210
+ yield chunk
211
+
212
+ def _stream_to_chunks(self, stream: BinaryIO):
213
+ """将流转换为分块"""
214
+ while True:
215
+ chunk = stream.read(self.chunk_size)
216
+ if not chunk:
217
+ break
218
+ yield chunk
219
+
220
+ def _generate_chunks(self, content: Union[bytes, Path, BinaryIO], start: int):
221
+ if isinstance(content, bytes):
222
+ return self._bytes_to_chunks(content, start)
223
+ elif isinstance(content, Path):
224
+ return self._file_to_chunks(content, start)
225
+ elif hasattr(content, 'seek') and hasattr(content, 'read'):
226
+ content.seek(start)
227
+ return self._stream_to_chunks(content)
228
+ else:
229
+ raise ValueError("Unsupported content type")
230
+
231
+ def _wrap_chunks_with_progress(self, chunks, total_size, uploaded_size, start_time, callback):
232
+ """包装分块迭代器,添加进度回调"""
233
+ for chunk in chunks:
234
+ yield chunk
235
+ uploaded_size += len(chunk)
236
+
237
+ if callback:
238
+ elapsed = time.time() - start_time
239
+ speed = uploaded_size / elapsed if elapsed > 0 else 0
240
+ remaining = (total_size - uploaded_size) / speed if speed > 0 else 0
241
+
242
+ progress = UploadProgress(
243
+ total_size=total_size,
244
+ uploaded_size=uploaded_size,
245
+ percentage=uploaded_size / total_size * 100,
246
+ speed=speed,
247
+ remaining_time=remaining
248
+ )
249
+ callback(progress)
250
+
251
+
252
+ class AsyncHttpUploader:
253
+ """异步HTTP上传器"""
254
+
255
+ def __init__(self, chunk_size: int = 1024 * 1024 * 5, total_retries: int = 3, retry_delay_seconds: int = 5):
256
+ self.chunk_size = chunk_size # 默认5MB分片
257
+ self.total_retries = total_retries
258
+ self.retry_delay_seconds = retry_delay_seconds
259
+
260
+ async def start_resumable_session(self, url: str, total_file_size: Optional[int] = None,
261
+ mine_type: Optional[str] = None) -> str:
262
+ """
263
+ 启动 GCS 的断点续传会话,返回 session URI。
264
+
265
+ Args:
266
+ url (str): GCS 预签名上传初始化 URL。
267
+ total_file_size (Optional[int]): 文件总大小(可选)。
268
+ mine_type (Optional[str]): 文件 Content-Type。
269
+ Returns:
270
+ str: GCS 返回的会话 URI。
271
+ """
272
+ content_range_header = f"bytes */{total_file_size}" if total_file_size is not None else "bytes */*"
273
+ headers = {"Content-Range": content_range_header, "x-goog-resumable": "start"}
274
+ if mine_type is not None:
275
+ headers["Content-Type"] = mine_type
276
+
277
+ response = await self._request("POST", url, headers=headers)
278
+ if response.status in [200, 201]:
279
+ session_uri = response.headers.get("Location")
280
+ if session_uri:
281
+ print(f"成功获取断点续传 URI: {session_uri}")
282
+ return session_uri
283
+
284
+ text = await response.text()
285
+ raise Exception(f"Failed to start resumable session: {response.status} - {text}")
286
+
287
+ async def check_uploaded_size(self, url: str, total_file_size: Optional[int] = None,
288
+ mine_type: Optional[str] = None) -> int:
289
+ """
290
+ 查询 GCS 可恢复上传的当前进度(已上传的字节数)。
291
+
292
+ Args:
293
+ url (str): GCS 可恢复上传的会话 URI。
294
+ total_file_size (Optional[int]): 文件的总大小(可选)。
295
+ 如果已知,提供此参数可以帮助 GCS 进行更精确的判断。
296
+ 如果 GCS 响应 200 OK,且提供了此参数,则直接返回此值。
297
+ mine_type (Optional[str]): 文件 Content-Type。
298
+ Returns:
299
+ int: 已上传的字节数。
300
+ - 如果上传已完成 (200 OK),返回 total_file_size。如果 total_file_size 未知,则返回 0(表示需要服务器端后续验证)。
301
+ - 如果上传仍在进行 (308 Resume Incomplete),返回已上传的字节数。
302
+ - 如果查询失败或会话无效,返回 0(表示从头开始或会话已失效)。
303
+ """
304
+ # 构建 Content-Range 头。如果知道总大小,提供它更准确。
305
+ content_range_header = f"bytes */{total_file_size}" if total_file_size is not None else "bytes */*"
306
+ headers = {"Content-Range": content_range_header}
307
+ if mine_type is not None:
308
+ headers["Content-Type"] = mine_type
309
+
310
+ # 发送一个空的 PUT 请求来查询已上传字节数
311
+ # timeout 应该根据网络情况和预期响应时间设置
312
+ response = await self._request("PUT", url, headers=headers)
313
+
314
+ if response.status == 200:
315
+ # GCS 响应 200 OK 表示整个文件已经完整上传。
316
+ # 此时,如果知道文件总大小,可以直接返回它。
317
+ # 如果不知道,通过 headers 获取文件大小,通常为 'x-goog-stored-content-length'
318
+ if total_file_size:
319
+ print(f"查询当前已上传文件大小:{total_file_size} bytes")
320
+ return total_file_size
321
+ else:
322
+ content_length = response.headers.get("x-goog-stored-content-length")
323
+ print(f"查询当前已上传文件大小:{content_length} bytes")
324
+ if content_length:
325
+ return int(content_length)
326
+
327
+ elif response.status == 308:
328
+ # GCS 响应 308 Resume Incomplete,表示部分上传成功。
329
+ content_length = response.headers.get("Content-Length")
330
+ print(f"查询当前已上传文件大小:{content_length} bytes")
331
+ if content_length is not None:
332
+ return int(content_length)
333
+ else:
334
+ # 理论上 308 响应应该有 Content-Length 头,但作为健壮性处理
335
+ print("查询当前已上传文件大小:0 bytes")
336
+ return 0 # 没有 Content-Length 头,从头开始
337
+
338
+ text = await response.text()
339
+ raise Exception(f"Failed to check uploaded size: {response.status} - {text}")
340
+
341
+ async def upload(
342
+ self,
343
+ url: str,
344
+ content: Union[bytes, BinaryIO, Path],
345
+ headers: Optional[Dict[str, str]] = None,
346
+ progress_callback: Optional[Callable[[UploadProgress], None]] = None,
347
+ total_size: Optional[int] = None,
348
+ is_resume: bool = False,
349
+ ) -> aiohttp.ClientResponse:
350
+ """
351
+ 异步上传文件到指定URL
352
+
353
+ Args:
354
+ url: 上传URL
355
+ content: 文件内容
356
+ headers: 请求头
357
+ progress_callback: 进度回调函数
358
+ is_resume: 是否断点续传
359
+ """
360
+ headers = headers or {}
361
+
362
+ # 获取文件大小(避免读取内容)
363
+ final_total_size = total_size or await self._calculate_total_size(content)
364
+
365
+ # 如果是断点续传,查询服务端 resume_from
366
+ if is_resume:
367
+ resume_from = await self.check_uploaded_size(url, final_total_size, mine_type=headers.get("Content-Type"))
368
+ else:
369
+ resume_from = 0
370
+
371
+ # 生成 chunk 流(从 resume_from 开始)
372
+ chunks = await self._generate_chunks(content, resume_from)
373
+
374
+ # 设置断点续传头
375
+ if is_resume:
376
+ headers["Content-Range"] = f"bytes {resume_from}-{final_total_size - 1}/{final_total_size}"
377
+
378
+ # 包装成带进度的流
379
+ wrapped_chunks = self._wrap_chunks_with_progress(
380
+ chunks, final_total_size, resume_from, time.time(), progress_callback
381
+ )
382
+
383
+ # 发起异步请求上传
384
+ return await self._request(
385
+ method="PUT",
386
+ url=url,
387
+ headers=headers,
388
+ data=wrapped_chunks,
389
+ )
390
+
391
+ async def _request(
392
+ self,
393
+ method: str,
394
+ url: str,
395
+ headers: Optional[Dict[str, str]] = None,
396
+ data: Optional[Union[bytes, str, asyncio.StreamReader, any]] = None,
397
+ ) -> aiohttp.ClientResponse:
398
+ """
399
+ 通用异步请求方法,带自动重试机制。
400
+
401
+ Args:
402
+ method (str): 请求方法,如 "POST", "PUT"
403
+ url (str): 请求地址
404
+ headers (dict): 请求头
405
+ data: 请求体
406
+
407
+ Returns:
408
+ aiohttp.ClientResponse: 最终成功的响应对象(注意:需由调用方处理 resp.text() / resp.json())
409
+ """
410
+ headers = headers or {}
411
+
412
+ for attempt in range(self.total_retries):
413
+ try:
414
+ async with aiohttp.ClientSession() as session:
415
+ async with session.request(method, url, headers=headers, data=data) as resp:
416
+ if resp.status < 400:
417
+ return resp
418
+ text = await resp.text()
419
+ raise RuntimeError(f"HTTP {resp.status}: {text}")
420
+ except Exception as e:
421
+ if attempt < self.total_retries - 1:
422
+ await asyncio.sleep(self.retry_delay_seconds)
423
+ else:
424
+ raise RuntimeError(f"Request failed after {self.total_retries} attempts: {e}")
425
+
426
+ async def _calculate_total_size(self, content: Union[bytes, Path, BinaryIO]) -> int:
427
+ if isinstance(content, bytes):
428
+ return len(content)
429
+ elif isinstance(content, Path):
430
+ return (await aiofiles.os.stat(str(content))).st_size
431
+ elif hasattr(content, "seek") and hasattr(content, "tell"):
432
+ current = content.tell()
433
+ content.seek(0, 2)
434
+ size = content.tell()
435
+ content.seek(current)
436
+ return size
437
+ else:
438
+ raise ValueError("Unsupported content type")
439
+
440
+ async def _generate_chunks(self, content: Union[bytes, Path, BinaryIO], start: int) -> AsyncGenerator[bytes, None]:
441
+ if isinstance(content, bytes):
442
+ return self._bytes_to_chunks(content, start)
443
+ elif isinstance(content, Path):
444
+ return self._file_to_chunks_async(content, start)
445
+ elif hasattr(content, "seek") and hasattr(content, "read"):
446
+ await asyncio.get_event_loop().run_in_executor(None, content.seek, start)
447
+ return self._stream_to_chunks(content)
448
+ else:
449
+ raise ValueError("Unsupported content type")
450
+
451
+ async def _bytes_to_chunks(self, data: bytes, start: int = 0):
452
+ for i in range(start, len(data), self.chunk_size):
453
+ yield data[i:i + self.chunk_size]
454
+
455
+ async def _file_to_chunks_async(self, file_path: Path, start: int = 0):
456
+ async with aiofiles.open(file_path, 'rb') as f:
457
+ await f.seek(start)
458
+ while True:
459
+ chunk = await f.read(self.chunk_size)
460
+ if not chunk:
461
+ break
462
+ yield chunk
463
+
464
+ async def _stream_to_chunks(self, stream: BinaryIO):
465
+ loop = asyncio.get_event_loop()
466
+ while True:
467
+ # 使用线程方式读取同步流内容
468
+ chunk = await loop.run_in_executor(None, stream.read, self.chunk_size)
469
+ if not chunk:
470
+ break
471
+ yield chunk
472
+
473
+ async def _wrap_chunks_with_progress(
474
+ self,
475
+ chunks,
476
+ total_size: int,
477
+ uploaded_size: int,
478
+ start_time: float,
479
+ callback: Optional[Callable[[UploadProgress], None]] = None,
480
+ ):
481
+ async for chunk in chunks:
482
+ yield chunk
483
+ uploaded_size += len(chunk)
484
+
485
+ if callback:
486
+ elapsed = asyncio.get_event_loop().time() - start_time
487
+ speed = uploaded_size / elapsed if elapsed > 0 else 0
488
+ remaining = (total_size - uploaded_size) / speed if speed > 0 else 0
489
+ progress = UploadProgress(
490
+ total_size=total_size,
491
+ uploaded_size=uploaded_size,
492
+ percentage=uploaded_size / total_size * 100,
493
+ speed=speed,
494
+ remaining_time=remaining
495
+ )
496
+ if asyncio.iscoroutinefunction(callback):
497
+ await callback(progress)
498
+ else:
499
+ callback(progress)
500
+
501
+
502
+ def calculate_file_md5(content: Union[bytes, BinaryIO, Path], chunk_size: int = 8192) -> str:
503
+ """计算文件MD5值"""
504
+ md5 = hashlib.md5()
505
+
506
+ if isinstance(content, bytes):
507
+ md5.update(content)
508
+ elif isinstance(content, Path):
509
+ with open(content, 'rb') as f:
510
+ while chunk := f.read(chunk_size):
511
+ md5.update(chunk)
512
+ else:
513
+ # 文件对象
514
+ pos = content.tell()
515
+ content.seek(0)
516
+ while chunk := content.read(chunk_size):
517
+ md5.update(chunk)
518
+ content.seek(pos)
519
+
520
+ return md5.hexdigest()
521
+
522
+
523
+ # 尝试导入aiofiles(可选依赖)
524
+ try:
525
+ import aiofiles
526
+ except ImportError:
527
+ aiofiles = None