p115client 0.0.5.8.3__tar.gz → 0.0.5.8.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/PKG-INFO +2 -2
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/_upload.py +75 -51
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/client.py +83 -45
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/download.py +180 -41
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/pyproject.toml +2 -2
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/LICENSE +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/__init__.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/const.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/exception.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/py.typed +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/__init__.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/edit.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/export_dir.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/fs_files.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/iterdir.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/life.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/pool.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/request.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/upload.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/tool/xys.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/p115client/type.py +0 -0
- {p115client-0.0.5.8.3 → p115client-0.0.5.8.4}/readme.md +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: p115client
|
3
|
-
Version: 0.0.5.8.
|
3
|
+
Version: 0.0.5.8.4
|
4
4
|
Summary: Python 115 webdisk client.
|
5
5
|
Home-page: https://github.com/ChenyangGao/p115client
|
6
6
|
License: MIT
|
@@ -40,7 +40,7 @@ Requires-Dist: python-filewrap (>=0.2.8)
|
|
40
40
|
Requires-Dist: python-hashtools (>=0.0.3.3)
|
41
41
|
Requires-Dist: python-http_request (>=0.0.6)
|
42
42
|
Requires-Dist: python-httpfile (>=0.0.5.2)
|
43
|
-
Requires-Dist: python-iterutils (>=0.1.
|
43
|
+
Requires-Dist: python-iterutils (>=0.1.10)
|
44
44
|
Requires-Dist: python-property (>=0.0.3)
|
45
45
|
Requires-Dist: python-startfile (>=0.0.2)
|
46
46
|
Requires-Dist: python-undefined (>=0.0.3)
|
@@ -10,8 +10,8 @@ __all__ = [
|
|
10
10
|
|
11
11
|
from base64 import b64encode
|
12
12
|
from collections.abc import (
|
13
|
-
AsyncGenerator, AsyncIterable, AsyncIterator, Awaitable,
|
14
|
-
ItemsView, Iterable, Iterator, Mapping, Sequence, Sized,
|
13
|
+
AsyncGenerator, AsyncIterable, AsyncIterator, Awaitable, Buffer, Callable,
|
14
|
+
Coroutine, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence, Sized,
|
15
15
|
)
|
16
16
|
from datetime import datetime
|
17
17
|
from email.utils import formatdate
|
@@ -25,7 +25,7 @@ from xml.etree.ElementTree import fromstring
|
|
25
25
|
|
26
26
|
from asynctools import ensure_aiter, ensure_async
|
27
27
|
from filewrap import (
|
28
|
-
|
28
|
+
SupportsRead, buffer_length,
|
29
29
|
bio_chunk_iter, bio_chunk_async_iter,
|
30
30
|
bio_skip_iter, bio_skip_async_iter,
|
31
31
|
bytes_iter_to_async_reader, bytes_iter_to_reader,
|
@@ -41,20 +41,13 @@ from .exception import MultipartUploadAbort
|
|
41
41
|
from .type import MultipartResumeData
|
42
42
|
|
43
43
|
|
44
|
-
def
|
45
|
-
if isinstance(b, Sized):
|
46
|
-
return len(b)
|
47
|
-
else:
|
48
|
-
return len(memoryview(b))
|
49
|
-
|
50
|
-
|
51
|
-
def to_base64(s: bytes | str, /) -> str:
|
44
|
+
def to_base64(s: Buffer | str, /) -> str:
|
52
45
|
if isinstance(s, str):
|
53
46
|
s = bytes(s, "utf-8")
|
54
47
|
return str(b64encode(s), "ascii")
|
55
48
|
|
56
49
|
|
57
|
-
def
|
50
|
+
def maybe_integer(n: int | str, /) -> int | str:
|
58
51
|
if isinstance(n, str) and n.isdecimal():
|
59
52
|
n = int(n)
|
60
53
|
return n
|
@@ -155,19 +148,19 @@ def oss_upload_sign(
|
|
155
148
|
# "replicationProgress", "requestPayment", "requesterQosInfo", "resourceGroup", "resourcePool",
|
156
149
|
# "resourcePoolBuckets", "resourcePoolInfo", "response-cache-control", "response-content-disposition",
|
157
150
|
# "response-content-encoding", "response-content-language", "response-content-type", "response-expires",
|
158
|
-
# "restore", "security-token", "sequential", "startTime", "stat", "status", "style", "styleName",
|
159
|
-
# "tagging", "transferAcceleration", "uploadId", "uploads", "versionId", "versioning",
|
160
|
-
# "website", "worm", "wormExtend", "wormId", "x-oss-ac-forward-allow",
|
161
|
-
# "x-oss-ac-
|
162
|
-
# "x-oss-
|
163
|
-
# "x-oss-traffic-limit", "x-oss-write-get-object-response",
|
151
|
+
# "restore", "security-token", "sequential", "startTime", "stat", "status", "style", "styleName",
|
152
|
+
# "symlink", "tagging", "transferAcceleration", "uploadId", "uploads", "versionId", "versioning",
|
153
|
+
# "versions", "vod", "website", "worm", "wormExtend", "wormId", "x-oss-ac-forward-allow",
|
154
|
+
# "x-oss-ac-source-ip", "x-oss-ac-subnet-mask", "x-oss-ac-vpc-id", "x-oss-access-point-name",
|
155
|
+
# "x-oss-async-process", "x-oss-process", "x-oss-redundancy-transition-taskid", "x-oss-request-payer",
|
156
|
+
# "x-oss-target-redundancy-type", "x-oss-traffic-limit", "x-oss-write-get-object-response",
|
164
157
|
# )
|
165
158
|
date = formatdate(usegmt=True)
|
166
159
|
if params is None:
|
167
160
|
params = ""
|
168
161
|
elif not isinstance(params, str):
|
169
162
|
params = urlencode(params)
|
170
|
-
if params:
|
163
|
+
if params and not params.startswith("?"):
|
171
164
|
params = "?" + params
|
172
165
|
if headers:
|
173
166
|
if isinstance(headers, Mapping):
|
@@ -183,7 +176,7 @@ def oss_upload_sign(
|
|
183
176
|
headers_str = ""
|
184
177
|
content_md5 = headers.setdefault("content-md5", "")
|
185
178
|
content_type = headers.setdefault("content-type", "")
|
186
|
-
date = headers.get("x-oss-date") or headers.get("date"
|
179
|
+
date = headers.get("x-oss-date") or headers.get("date") or ""
|
187
180
|
if not date:
|
188
181
|
date = headers["date"] = formatdate(usegmt=True)
|
189
182
|
signature_data = f"""\
|
@@ -269,13 +262,16 @@ def oss_multipart_part_iter(
|
|
269
262
|
) -> Iterator[dict] | AsyncIterator[dict]:
|
270
263
|
"""罗列某个分块上传任务,已经上传的分块
|
271
264
|
"""
|
265
|
+
request_kwargs.update(
|
266
|
+
method="GET",
|
267
|
+
params={"uploadId": upload_id},
|
268
|
+
headers={"x-oss-security-token": token["SecurityToken"]},
|
269
|
+
)
|
270
|
+
request_kwargs.setdefault("parse", lambda _, content: fromstring(content))
|
272
271
|
def gen_step():
|
273
|
-
request_kwargs["
|
274
|
-
request_kwargs["headers"] = {"x-oss-security-token": token["SecurityToken"]}
|
275
|
-
request_kwargs["params"] = params = {"uploadId": upload_id}
|
276
|
-
request_kwargs.setdefault("parse", False)
|
272
|
+
params = request_kwargs["params"]
|
277
273
|
while True:
|
278
|
-
|
274
|
+
etree = yield oss_upload_request(
|
279
275
|
request,
|
280
276
|
url=url,
|
281
277
|
bucket=bucket,
|
@@ -284,12 +280,11 @@ def oss_multipart_part_iter(
|
|
284
280
|
async_=async_,
|
285
281
|
**request_kwargs,
|
286
282
|
)
|
287
|
-
etree = fromstring(content)
|
288
283
|
for el in etree.iterfind("Part"):
|
289
|
-
yield Yield({sel.tag:
|
290
|
-
if etree.find("IsTruncated")
|
284
|
+
yield Yield({sel.tag: maybe_integer(sel.text) for sel in el}, identity=True)
|
285
|
+
if getattr(etree.find("IsTruncated"), "text") == "false":
|
291
286
|
break
|
292
|
-
params["part-number-marker"] = etree.find("NextPartNumberMarker")
|
287
|
+
params["part-number-marker"] = getattr(etree.find("NextPartNumberMarker"), "text")
|
293
288
|
return run_gen_step_iter(gen_step, async_=async_)
|
294
289
|
|
295
290
|
|
@@ -327,12 +322,14 @@ def oss_multipart_upload_init(
|
|
327
322
|
async_: Literal[False, True] = False,
|
328
323
|
**request_kwargs,
|
329
324
|
) -> str | Coroutine[Any, Any, str]:
|
330
|
-
"""
|
325
|
+
"""分块上传的初始化,获取 upload_id
|
331
326
|
"""
|
327
|
+
request_kwargs.update(
|
328
|
+
method="POST",
|
329
|
+
params={"sequential": "1", "uploads": "1"},
|
330
|
+
headers={"x-oss-security-token": token["SecurityToken"]},
|
331
|
+
)
|
332
332
|
request_kwargs.setdefault("parse", parse_upload_id)
|
333
|
-
request_kwargs["method"] = "POST"
|
334
|
-
request_kwargs["params"] = {"sequential": "1", "uploads": "1"}
|
335
|
-
request_kwargs["headers"] = {"x-oss-security-token": token["SecurityToken"]}
|
336
333
|
return oss_upload_request(
|
337
334
|
request,
|
338
335
|
url=url,
|
@@ -387,20 +384,26 @@ def oss_multipart_upload_complete(
|
|
387
384
|
async_: Literal[False, True] = False,
|
388
385
|
**request_kwargs,
|
389
386
|
) -> dict | Coroutine[Any, Any, dict]:
|
390
|
-
"""
|
387
|
+
"""完成分块上传任务,会在请求头中包含回调数据,请求体中包含分块信息
|
391
388
|
"""
|
392
|
-
request_kwargs
|
393
|
-
|
394
|
-
|
395
|
-
"
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
389
|
+
request_kwargs.update(
|
390
|
+
method="POST",
|
391
|
+
params={"uploadId": upload_id},
|
392
|
+
data=b"".join((
|
393
|
+
b"<CompleteMultipartUpload>",
|
394
|
+
*map(
|
395
|
+
b"<Part><PartNumber>%d</PartNumber><ETag>%s</ETag></Part>".__mod__,
|
396
|
+
((part["PartNumber"], bytes(part["ETag"], "ascii")) for part in parts),
|
397
|
+
),
|
398
|
+
b"</CompleteMultipartUpload>",
|
399
|
+
)),
|
400
|
+
headers={
|
401
|
+
"x-oss-security-token": token["SecurityToken"],
|
402
|
+
"x-oss-callback": to_base64(callback["callback"]),
|
403
|
+
"x-oss-callback-var": to_base64(callback["callback_var"]),
|
404
|
+
"content-type": "text/xml",
|
405
|
+
},
|
406
|
+
)
|
404
407
|
return oss_upload_request(
|
405
408
|
request,
|
406
409
|
url=url,
|
@@ -449,12 +452,14 @@ def oss_multipart_upload_cancel(
|
|
449
452
|
async_: Literal[False, True] = False,
|
450
453
|
**request_kwargs,
|
451
454
|
) -> bool | Coroutine[Any, Any, bool]:
|
452
|
-
"""
|
455
|
+
"""取消分块上传任务,返回成功与否
|
453
456
|
"""
|
457
|
+
request_kwargs.update(
|
458
|
+
method="DELETE",
|
459
|
+
params={"uploadId": upload_id},
|
460
|
+
headers={"x-oss-security-token": token["SecurityToken"]},
|
461
|
+
)
|
454
462
|
request_kwargs.setdefault("parse", lambda resp: 200 <= resp.status_code < 300 or resp.status_code == 404)
|
455
|
-
request_kwargs["method"] = "DELETE"
|
456
|
-
request_kwargs["params"] = {"uploadId": upload_id}
|
457
|
-
request_kwargs["headers"] = {"x-oss-security-token": token["SecurityToken"]}
|
458
463
|
return oss_upload_request(
|
459
464
|
request,
|
460
465
|
url=url,
|
@@ -520,7 +525,7 @@ def oss_multipart_upload_part(
|
|
520
525
|
) -> dict | Coroutine[Any, Any, dict]:
|
521
526
|
"""上传一个分片,返回一个字典,包含如下字段:
|
522
527
|
|
523
|
-
.. python
|
528
|
+
.. code:: python
|
524
529
|
|
525
530
|
{
|
526
531
|
"PartNumber": int, # 分块序号,从 1 开始计数
|
@@ -924,3 +929,22 @@ def oss_multipart_upload(
|
|
924
929
|
yield close_reporthook
|
925
930
|
return run_gen_step(gen_step, async_=async_)
|
926
931
|
|
932
|
+
|
933
|
+
# class MultipartUploader:
|
934
|
+
# def __init__
|
935
|
+
# def __del__
|
936
|
+
# async def __aiter__
|
937
|
+
# def __iter__
|
938
|
+
# async def __aenter__
|
939
|
+
# async def __aexit__
|
940
|
+
# def __enter__
|
941
|
+
# def __exit__
|
942
|
+
# # 0. 应该设计 1 个类,支持同步和异步,实例化不会进行初始化(为了对异步进行适配)
|
943
|
+
# # 1. 可以作为上下文管理器或者迭代器使用
|
944
|
+
# # 2. 上下文管理器也返回迭代器(迭代器迭代时,如果未打开文件或者没有上传信息,则会初始化以获取)
|
945
|
+
# # 3. 中途可以暂停或取消
|
946
|
+
# # 4. seekable: path, url (支持 range request), file reader (seekable)
|
947
|
+
# # 5. 支持进度条
|
948
|
+
# # 6. 设计一个工具函数,放到 p115client.tool.upload 模块中
|
949
|
+
# ...
|
950
|
+
|
@@ -3985,6 +3985,13 @@ class P115OpenClient(ClientRequestMixin):
|
|
3985
3985
|
class P115Client(P115OpenClient):
|
3986
3986
|
"""115 的客户端对象
|
3987
3987
|
|
3988
|
+
.. note::
|
3989
|
+
目前允许 1 个用户同时登录多个开放平台应用(用 AppID 区别),但如果多次登录同 1 个应用,则只有最近登录的有效
|
3990
|
+
|
3991
|
+
目前不允许短时间内再次用 `refresh_token` 刷新 `access_token`,但你可以用登录的方式再次授权登录以获取 `access_token`,即可不受频率限制
|
3992
|
+
|
3993
|
+
1 个 `refresh_token` 只能使用 1 次,可获取新的 `refresh_token` 和 `access_token`,如果请求刷新时,发送成功但读取失败,可能导致 `refresh_token` 报废,这时需要重新授权登录
|
3994
|
+
|
3988
3995
|
:param cookies: 115 的 cookies,要包含 `UID`、`CID`、`KID` 和 `SEID` 等
|
3989
3996
|
|
3990
3997
|
- 如果是 None,则会要求人工扫二维码登录
|
@@ -4910,14 +4917,14 @@ class P115Client(P115OpenClient):
|
|
4910
4917
|
data = resp["data"]
|
4911
4918
|
if replace is False:
|
4912
4919
|
inst: P115OpenClient | Self = P115OpenClient.from_token(data["access_token"], data["refresh_token"])
|
4913
|
-
inst.app_id = app_id
|
4914
4920
|
else:
|
4915
4921
|
if replace is True:
|
4916
4922
|
inst = self
|
4917
4923
|
else:
|
4918
4924
|
inst = replace
|
4919
4925
|
inst.refresh_token = data["refresh_token"]
|
4920
|
-
|
4926
|
+
inst.access_token = data["access_token"]
|
4927
|
+
inst.app_id = app_id
|
4921
4928
|
return inst
|
4922
4929
|
return run_gen_step(gen_step, async_=async_)
|
4923
4930
|
|
@@ -5195,10 +5202,14 @@ class P115Client(P115OpenClient):
|
|
5195
5202
|
elif data is not None:
|
5196
5203
|
request_kwargs["data"] = data
|
5197
5204
|
request_kwargs.setdefault("parse", default_parse)
|
5198
|
-
|
5205
|
+
use_cookies = not url.startswith("https://proapi.115.com/open/")
|
5206
|
+
if not use_cookies:
|
5199
5207
|
headers["cookie"] = ""
|
5200
|
-
return request(url=url, method=method, **request_kwargs)
|
5201
5208
|
def gen_step():
|
5209
|
+
if async_:
|
5210
|
+
lock: Lock | AsyncLock = self.request_alock
|
5211
|
+
else:
|
5212
|
+
lock = self.request_lock
|
5202
5213
|
check_for_relogin = self.check_for_relogin
|
5203
5214
|
cant_relogin = not callable(check_for_relogin)
|
5204
5215
|
if get_cookies is not None:
|
@@ -5208,59 +5219,86 @@ class P115Client(P115OpenClient):
|
|
5208
5219
|
for i in count(0):
|
5209
5220
|
exc = None
|
5210
5221
|
try:
|
5211
|
-
if
|
5212
|
-
if
|
5213
|
-
|
5214
|
-
|
5215
|
-
if get_cookies_need_arg:
|
5216
|
-
cookies_ = yield get_cookies(async_)
|
5222
|
+
if use_cookies:
|
5223
|
+
if get_cookies is None:
|
5224
|
+
if need_set_cookies:
|
5225
|
+
cookies_old = headers["cookie"] = self.cookies_str
|
5217
5226
|
else:
|
5218
|
-
|
5219
|
-
|
5220
|
-
|
5221
|
-
|
5222
|
-
|
5227
|
+
if get_cookies_need_arg:
|
5228
|
+
cookies_ = yield get_cookies(async_)
|
5229
|
+
else:
|
5230
|
+
cookies_ = yield get_cookies()
|
5231
|
+
if not cookies_:
|
5232
|
+
raise ValueError("can't get new cookies")
|
5233
|
+
headers["cookie"] = cookies_
|
5234
|
+
resp = yield partial(request, url=url, method=method, **request_kwargs)
|
5235
|
+
return resp
|
5223
5236
|
except BaseException as e:
|
5224
5237
|
exc = e
|
5225
|
-
if cant_relogin or not need_set_cookies:
|
5238
|
+
if cant_relogin or use_cookies and not need_set_cookies:
|
5226
5239
|
raise
|
5227
5240
|
if isinstance(e, (AuthenticationError, LoginError)):
|
5228
|
-
if
|
5241
|
+
if use_cookies and (
|
5242
|
+
get_cookies is not None or
|
5243
|
+
cookies_old != self.cookies_str or
|
5244
|
+
cookies_old != self._read_cookies()
|
5245
|
+
):
|
5229
5246
|
continue
|
5230
5247
|
raise
|
5231
5248
|
res = yield partial(cast(Callable, check_for_relogin), e)
|
5232
5249
|
if not res if isinstance(res, bool) else res != 405:
|
5233
5250
|
raise
|
5234
|
-
if
|
5235
|
-
|
5236
|
-
|
5237
|
-
|
5238
|
-
|
5239
|
-
|
5240
|
-
|
5241
|
-
lock
|
5242
|
-
|
5251
|
+
if use_cookies:
|
5252
|
+
if get_cookies is not None:
|
5253
|
+
continue
|
5254
|
+
cookies = self.cookies_str
|
5255
|
+
if not cookies_equal(cookies, cookies_old):
|
5256
|
+
continue
|
5257
|
+
cookies_mtime = getattr(self, "cookies_mtime", 0)
|
5258
|
+
yield lock.acquire
|
5259
|
+
try:
|
5260
|
+
cookies_new = self.cookies_str
|
5261
|
+
cookies_mtime_new = getattr(self, "cookies_mtime", 0)
|
5262
|
+
if cookies_equal(cookies, cookies_new):
|
5263
|
+
m = CRE_COOKIES_UID_search(cookies)
|
5264
|
+
uid = "" if m is None else m[0]
|
5265
|
+
need_read_cookies = cookies_mtime_new > cookies_mtime
|
5266
|
+
if need_read_cookies:
|
5267
|
+
cookies_new = self._read_cookies()
|
5268
|
+
if i and cookies_equal(cookies_old, cookies_new):
|
5269
|
+
raise
|
5270
|
+
if not (need_read_cookies and cookies_new):
|
5271
|
+
warn(f"relogin to refresh cookies: UID={uid!r} app={self.login_app()!r}", category=P115Warning)
|
5272
|
+
yield self.login_another_app(
|
5273
|
+
replace=True,
|
5274
|
+
async_=async_, # type: ignore
|
5275
|
+
)
|
5276
|
+
finally:
|
5277
|
+
lock.release()
|
5243
5278
|
else:
|
5244
|
-
|
5245
|
-
lock.acquire
|
5246
|
-
|
5247
|
-
|
5248
|
-
|
5249
|
-
|
5250
|
-
|
5251
|
-
|
5252
|
-
|
5253
|
-
|
5254
|
-
|
5255
|
-
|
5256
|
-
|
5257
|
-
|
5258
|
-
|
5259
|
-
|
5260
|
-
|
5261
|
-
|
5279
|
+
access_token = self.access_token
|
5280
|
+
yield lock.acquire
|
5281
|
+
try:
|
5282
|
+
if access_token != self.access_token:
|
5283
|
+
continue
|
5284
|
+
if hasattr(self, "app_id"):
|
5285
|
+
app_id = self.app_id
|
5286
|
+
yield self.login_another_open(
|
5287
|
+
app_id,
|
5288
|
+
replace=True,
|
5289
|
+
async_=async_, # type: ignore
|
5290
|
+
)
|
5291
|
+
warn(f"relogin to refresh token: {app_id=}", category=P115Warning)
|
5292
|
+
else:
|
5293
|
+
resp = yield self.refresh_access_token(
|
5294
|
+
async_=async_, # type: ignore
|
5295
|
+
)
|
5296
|
+
check_response(resp)
|
5297
|
+
warn("relogin to refresh token (using refresh_token)", category=P115Warning)
|
5298
|
+
finally:
|
5299
|
+
lock.release()
|
5262
5300
|
finally:
|
5263
|
-
if (cookies_ and
|
5301
|
+
if (use_cookies and cookies_ and
|
5264
5302
|
get_cookies is not None and
|
5265
5303
|
revert_cookies is not None and (
|
5266
5304
|
not exc or not (
|
@@ -5,7 +5,7 @@ __author__ = "ChenyangGao <https://chenyanggao.github.io>"
|
|
5
5
|
__all__ = [
|
6
6
|
"reduce_image_url_layers", "batch_get_url", "iter_url_batches", "iter_files_with_url",
|
7
7
|
"iter_images_with_url", "iter_subtitles_with_url", "iter_subtitle_batches", "make_strm",
|
8
|
-
"iter_download_nodes", "iter_download_files",
|
8
|
+
"iter_download_nodes", "iter_download_files", "get_remaining_open_count",
|
9
9
|
]
|
10
10
|
__doc__ = "这个模块提供了一些和下载有关的函数"
|
11
11
|
|
@@ -21,10 +21,12 @@ from mimetypes import guess_type
|
|
21
21
|
from os import fsdecode, makedirs, remove, PathLike
|
22
22
|
from os.path import abspath, dirname, join as joinpath, normpath, splitext
|
23
23
|
from queue import SimpleQueue
|
24
|
+
from shutil import rmtree
|
24
25
|
from threading import Lock
|
25
26
|
from time import time
|
26
27
|
from typing import cast, overload, Any, Final, Literal, TypedDict
|
27
28
|
from urllib.parse import quote, urlsplit
|
29
|
+
from urllib.request import urlopen, Request
|
28
30
|
from uuid import uuid4
|
29
31
|
from warnings import warn
|
30
32
|
|
@@ -91,7 +93,7 @@ def batch_get_url(
|
|
91
93
|
|
92
94
|
:param client: 115 客户端或 cookies
|
93
95
|
:param id_or_pickcode: 如果是 int,视为 id,如果是 str,视为 pickcode
|
94
|
-
:param user_agent: "
|
96
|
+
:param user_agent: "user-agent" 请求头的值
|
95
97
|
:param async_: 是否异步
|
96
98
|
:param request_kwargs: 其它请求参数
|
97
99
|
|
@@ -100,9 +102,9 @@ def batch_get_url(
|
|
100
102
|
if isinstance(client, str):
|
101
103
|
client = P115Client(client, check_for_relogin=True)
|
102
104
|
if headers := request_kwargs.get("headers"):
|
103
|
-
request_kwargs["headers"] = dict(headers, **{"
|
105
|
+
request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
|
104
106
|
else:
|
105
|
-
request_kwargs["headers"] = {"
|
107
|
+
request_kwargs["headers"] = {"user-agent": user_agent}
|
106
108
|
def gen_step():
|
107
109
|
if isinstance(id_or_pickcode, int):
|
108
110
|
resp = yield client.fs_file_skim(
|
@@ -200,7 +202,7 @@ def iter_url_batches(
|
|
200
202
|
|
201
203
|
:param client: 115 客户端或 cookies
|
202
204
|
:param pickcodes: 一个迭代器,产生提取码 pickcode
|
203
|
-
:param user_agent: "
|
205
|
+
:param user_agent: "user-agent" 请求头的值
|
204
206
|
:param batch_size: 每一个批次处理的个量
|
205
207
|
:param async_: 是否异步
|
206
208
|
:param request_kwargs: 其它请求参数
|
@@ -210,9 +212,9 @@ def iter_url_batches(
|
|
210
212
|
if isinstance(client, str):
|
211
213
|
client = P115Client(client, check_for_relogin=True)
|
212
214
|
if headers := request_kwargs.get("headers"):
|
213
|
-
request_kwargs["headers"] = dict(headers, **{"
|
215
|
+
request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
|
214
216
|
else:
|
215
|
-
request_kwargs["headers"] = {"
|
217
|
+
request_kwargs["headers"] = {"user-agent": user_agent}
|
216
218
|
if batch_size <= 0:
|
217
219
|
batch_size = 1
|
218
220
|
def gen_step():
|
@@ -243,7 +245,6 @@ def iter_url_batches(
|
|
243
245
|
return run_gen_step_iter(gen_step, async_=async_)
|
244
246
|
|
245
247
|
|
246
|
-
# TODO: 支持按批获取 url,以减少总的耗时
|
247
248
|
@overload
|
248
249
|
def iter_files_with_url(
|
249
250
|
client: str | P115Client,
|
@@ -336,7 +337,7 @@ def iter_files_with_url(
|
|
336
337
|
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
337
338
|
:param app: 使用某个 app (设备)的接口
|
338
339
|
:param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
|
339
|
-
:param user_agent: "
|
340
|
+
:param user_agent: "user-agent" 请求头的值
|
340
341
|
:param async_: 是否异步
|
341
342
|
:param request_kwargs: 其它请求参数
|
342
343
|
|
@@ -846,8 +847,9 @@ def make_strm(
|
|
846
847
|
origin: str = "http://localhost:8000",
|
847
848
|
update: bool = False,
|
848
849
|
discard: bool = True,
|
849
|
-
use_abspath:
|
850
|
+
use_abspath: bool = True,
|
850
851
|
with_root: bool = False,
|
852
|
+
with_tree: bool = True,
|
851
853
|
without_suffix: bool = True,
|
852
854
|
complete_url: bool = True,
|
853
855
|
suffix: str = "",
|
@@ -871,8 +873,9 @@ def make_strm(
|
|
871
873
|
origin: str = "http://localhost:8000",
|
872
874
|
update: bool = False,
|
873
875
|
discard: bool = True,
|
874
|
-
use_abspath:
|
876
|
+
use_abspath: bool = True,
|
875
877
|
with_root: bool = False,
|
878
|
+
with_tree: bool = True,
|
876
879
|
without_suffix: bool = True,
|
877
880
|
complete_url: bool = True,
|
878
881
|
suffix: str = "",
|
@@ -895,8 +898,9 @@ def make_strm(
|
|
895
898
|
origin: str = "http://localhost:8000",
|
896
899
|
update: bool = False,
|
897
900
|
discard: bool = True,
|
898
|
-
use_abspath:
|
901
|
+
use_abspath: bool = True,
|
899
902
|
with_root: bool = False,
|
903
|
+
with_tree: bool = True,
|
900
904
|
without_suffix: bool = True,
|
901
905
|
complete_url: bool = True,
|
902
906
|
suffix: str = "",
|
@@ -923,9 +927,9 @@ def make_strm(
|
|
923
927
|
|
924
928
|
- 如果为 True,则使用 115 的完整路径
|
925
929
|
- 如果为 False,则使用从 `cid` 的目录开始的相对路径
|
926
|
-
- 如果为 None,则所有文件保存在到同一个目录内
|
927
930
|
|
928
|
-
:param with_root:
|
931
|
+
:param with_root: 仅在 use_abspath 为 False 时生效。如果为 True,则在 `save_dir` 下创建一个和 `cid` 目录名字相同的目录,作为实际的 `save_dir`
|
932
|
+
:param with_tree: 如果为 False,则所有文件直接保存到 `save_dir` 下,不构建多级的目录结构
|
929
933
|
:param without_suffix: 是否去除原来的扩展名。如果为 False,则直接用 ".strm" 拼接到原来的路径后面;如果为 True,则去掉原来的扩展名后再拼接
|
930
934
|
:param complete_url: 是否需要完整的 url
|
931
935
|
|
@@ -965,20 +969,42 @@ def make_strm(
|
|
965
969
|
ignored: list[str] = []
|
966
970
|
removed: list[str] = []
|
967
971
|
append = list.append
|
972
|
+
add = set.add
|
968
973
|
if discard:
|
969
974
|
seen: set[str] = set()
|
970
975
|
seen_add = seen.add
|
971
976
|
existing: set[str] = set()
|
972
977
|
def do_discard():
|
978
|
+
if not seen:
|
979
|
+
rmtree(savedir)
|
980
|
+
makedirs(savedir, exist_ok=True)
|
981
|
+
return
|
982
|
+
dirs: set[str] = {""}
|
983
|
+
for path in seen:
|
984
|
+
while path := dirname(path):
|
985
|
+
add(dirs, path)
|
986
|
+
removed_dirs: set[str] = set()
|
973
987
|
for path in existing - seen:
|
974
|
-
|
975
|
-
|
988
|
+
d = dirname(path)
|
989
|
+
if d in dirs:
|
990
|
+
path = joinpath(savedir, path)
|
991
|
+
remove(path)
|
992
|
+
elif d not in removed_dirs:
|
993
|
+
while True:
|
994
|
+
add(removed_dirs, d)
|
995
|
+
pdir = dirname(d)
|
996
|
+
if not pdir or pdir in dirs:
|
997
|
+
rmtree(joinpath(savedir, d))
|
998
|
+
break
|
999
|
+
elif pdir in removed_dirs:
|
1000
|
+
break
|
1001
|
+
d = pdir
|
976
1002
|
append(removed, path)
|
977
1003
|
def normalize_path(attr: dict, /) -> str:
|
978
|
-
if
|
979
|
-
path = attr["name"]
|
980
|
-
else:
|
1004
|
+
if with_tree:
|
981
1005
|
path = attr["path"][abspath_prefix_length:]
|
1006
|
+
else:
|
1007
|
+
path = attr["name"]
|
982
1008
|
if without_suffix:
|
983
1009
|
path = splitext(path)[0]
|
984
1010
|
relpath = normpath(path) + ".strm"
|
@@ -1016,14 +1042,8 @@ def make_strm(
|
|
1016
1042
|
def gen_step():
|
1017
1043
|
nonlocal abspath_prefix_length, savedir
|
1018
1044
|
start_t = time()
|
1019
|
-
if discard:
|
1020
|
-
strm_files = iglob("**/*.strm", root_dir=savedir, recursive=True)
|
1021
|
-
if async_:
|
1022
|
-
task: Any = create_task(to_thread(existing.update, strm_files))
|
1023
|
-
else:
|
1024
|
-
task = run_as_thread(existing.update, strm_files)
|
1025
1045
|
if cid:
|
1026
|
-
if use_abspath
|
1046
|
+
if use_abspath or with_tree:
|
1027
1047
|
root = yield get_path_to_cid(
|
1028
1048
|
client,
|
1029
1049
|
cid,
|
@@ -1033,7 +1053,12 @@ def make_strm(
|
|
1033
1053
|
**request_kwargs,
|
1034
1054
|
)
|
1035
1055
|
abspath_prefix_length = len(root) + 1
|
1036
|
-
|
1056
|
+
if use_abspath:
|
1057
|
+
savedir += normpath(root)
|
1058
|
+
elif with_root:
|
1059
|
+
name = root.rpartition("/")[-1]
|
1060
|
+
savedir = joinpath(savedir, name)
|
1061
|
+
elif with_root:
|
1037
1062
|
resp = yield client.fs_file_skim(
|
1038
1063
|
cid,
|
1039
1064
|
async_=async_, # type: ignore
|
@@ -1042,6 +1067,12 @@ def make_strm(
|
|
1042
1067
|
check_response(resp)
|
1043
1068
|
name = posix_escape_name(unescape_115_charref(resp["data"][0]["file_name"]))
|
1044
1069
|
savedir = joinpath(savedir, name)
|
1070
|
+
if discard:
|
1071
|
+
strm_files = iglob("**/*.strm", root_dir=savedir, recursive=True)
|
1072
|
+
if async_:
|
1073
|
+
task: Any = create_task(to_thread(existing.update, strm_files))
|
1074
|
+
else:
|
1075
|
+
task = run_as_thread(existing.update, strm_files)
|
1045
1076
|
params: dict[str, Any] = {}
|
1046
1077
|
if use_abspath is not None:
|
1047
1078
|
params["path_already"] = path_already
|
@@ -1089,9 +1120,10 @@ def make_strm(
|
|
1089
1120
|
@overload
|
1090
1121
|
def iter_download_nodes(
|
1091
1122
|
client: str | P115Client,
|
1092
|
-
pickcode: int | str,
|
1123
|
+
pickcode: int | str = "",
|
1093
1124
|
files: bool = True,
|
1094
1125
|
max_workers: None | int = 1,
|
1126
|
+
app: str = "android",
|
1095
1127
|
*,
|
1096
1128
|
async_: Literal[False] = False,
|
1097
1129
|
**request_kwargs,
|
@@ -1100,9 +1132,10 @@ def iter_download_nodes(
|
|
1100
1132
|
@overload
|
1101
1133
|
def iter_download_nodes(
|
1102
1134
|
client: str | P115Client,
|
1103
|
-
pickcode: int | str,
|
1135
|
+
pickcode: int | str = "",
|
1104
1136
|
files: bool = True,
|
1105
1137
|
max_workers: None | int = 1,
|
1138
|
+
app: str = "android",
|
1106
1139
|
*,
|
1107
1140
|
async_: Literal[True],
|
1108
1141
|
**request_kwargs,
|
@@ -1110,9 +1143,10 @@ def iter_download_nodes(
|
|
1110
1143
|
...
|
1111
1144
|
def iter_download_nodes(
|
1112
1145
|
client: str | P115Client,
|
1113
|
-
pickcode: int | str,
|
1146
|
+
pickcode: int | str = "",
|
1114
1147
|
files: bool = True,
|
1115
1148
|
max_workers: None | int = 1,
|
1149
|
+
app: str = "android",
|
1116
1150
|
*,
|
1117
1151
|
async_: Literal[False, True] = False,
|
1118
1152
|
**request_kwargs,
|
@@ -1123,6 +1157,7 @@ def iter_download_nodes(
|
|
1123
1157
|
:param pickcode: 目录的 提取码 或者 id
|
1124
1158
|
:param files: 如果为 True,则只获取文件,否则只获取目录
|
1125
1159
|
:param max_workers: 最大并发数,如果为 None 或 <= 0,则默认为 20
|
1160
|
+
:param app: 使用某个 app (设备)的接口
|
1126
1161
|
:param async_: 是否异步
|
1127
1162
|
:param request_kwargs: 其它请求参数
|
1128
1163
|
|
@@ -1130,18 +1165,18 @@ def iter_download_nodes(
|
|
1130
1165
|
"""
|
1131
1166
|
if isinstance(client, str):
|
1132
1167
|
client = P115Client(client, check_for_relogin=True)
|
1168
|
+
get_base_url = cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__
|
1133
1169
|
if files:
|
1134
1170
|
method = client.download_files
|
1135
1171
|
else:
|
1136
1172
|
method = client.download_folders
|
1137
1173
|
if max_workers == 1:
|
1138
|
-
def gen_step():
|
1139
|
-
nonlocal pickcode
|
1174
|
+
def gen_step(pickcode):
|
1140
1175
|
if isinstance(pickcode, int):
|
1141
1176
|
resp = yield client.fs_file_skim(pickcode, async_=async_, **request_kwargs)
|
1142
1177
|
check_response(resp)
|
1143
1178
|
pickcode = resp["data"][0]["pick_code"]
|
1144
|
-
request_kwargs.setdefault("base_url",
|
1179
|
+
request_kwargs.setdefault("base_url", get_base_url)
|
1145
1180
|
for i in count(1):
|
1146
1181
|
payload = {"pickcode": pickcode, "page": i}
|
1147
1182
|
resp = yield method(payload, async_=async_, **request_kwargs)
|
@@ -1158,7 +1193,7 @@ def iter_download_nodes(
|
|
1158
1193
|
q = SimpleQueue()
|
1159
1194
|
get, put = q.get, q.put_nowait
|
1160
1195
|
max_page = 0
|
1161
|
-
def request():
|
1196
|
+
def request(pickcode):
|
1162
1197
|
nonlocal max_page
|
1163
1198
|
while True:
|
1164
1199
|
page = get_next_page()
|
@@ -1178,8 +1213,8 @@ def iter_download_nodes(
|
|
1178
1213
|
put(data["list"])
|
1179
1214
|
if not data["has_next_page"]:
|
1180
1215
|
max_page = page
|
1181
|
-
def gen_step():
|
1182
|
-
nonlocal max_workers
|
1216
|
+
def gen_step(pickcode):
|
1217
|
+
nonlocal max_workers
|
1183
1218
|
if async_:
|
1184
1219
|
if max_workers is None or max_workers <= 0:
|
1185
1220
|
max_workers = 20
|
@@ -1222,7 +1257,7 @@ def iter_download_nodes(
|
|
1222
1257
|
if not n:
|
1223
1258
|
put(sentinel)
|
1224
1259
|
for i in range(n):
|
1225
|
-
submit(run_gen_step, request, async_=async_).add_done_callback(countdown)
|
1260
|
+
submit(run_gen_step, request(pickcode), async_=async_).add_done_callback(countdown)
|
1226
1261
|
while True:
|
1227
1262
|
ls = yield get
|
1228
1263
|
if ls is sentinel:
|
@@ -1232,7 +1267,26 @@ def iter_download_nodes(
|
|
1232
1267
|
yield YieldFrom(ls, identity=True)
|
1233
1268
|
finally:
|
1234
1269
|
yield shutdown
|
1235
|
-
|
1270
|
+
if pickcode:
|
1271
|
+
return run_gen_step_iter(gen_step(pickcode), async_=async_)
|
1272
|
+
else:
|
1273
|
+
def chain():
|
1274
|
+
with with_iter_next(iterdir(
|
1275
|
+
client,
|
1276
|
+
ensure_file=False,
|
1277
|
+
app=app,
|
1278
|
+
normalize_attr=normalize_attr_simple,
|
1279
|
+
raise_for_changed_count=True,
|
1280
|
+
async_=async_,
|
1281
|
+
**request_kwargs,
|
1282
|
+
)) as get_next:
|
1283
|
+
while True:
|
1284
|
+
attr = yield get_next
|
1285
|
+
yield YieldFrom(
|
1286
|
+
run_gen_step_iter(gen_step(attr["pickcode"]), async_=async_),
|
1287
|
+
identity=True,
|
1288
|
+
)
|
1289
|
+
return run_gen_step_iter(chain, async_=async_)
|
1236
1290
|
|
1237
1291
|
|
1238
1292
|
@overload
|
@@ -1243,6 +1297,7 @@ def iter_download_files(
|
|
1243
1297
|
escape: None | bool | Callable[[str], str] = True,
|
1244
1298
|
with_ancestors: bool = True,
|
1245
1299
|
max_workers: None | int = None,
|
1300
|
+
app: str = "android",
|
1246
1301
|
*,
|
1247
1302
|
async_: Literal[False] = False,
|
1248
1303
|
**request_kwargs,
|
@@ -1256,6 +1311,7 @@ def iter_download_files(
|
|
1256
1311
|
escape: None | bool | Callable[[str], str] = True,
|
1257
1312
|
with_ancestors: bool = True,
|
1258
1313
|
max_workers: None | int = None,
|
1314
|
+
app: str = "android",
|
1259
1315
|
*,
|
1260
1316
|
async_: Literal[True],
|
1261
1317
|
**request_kwargs,
|
@@ -1268,6 +1324,7 @@ def iter_download_files(
|
|
1268
1324
|
escape: None | bool | Callable[[str], str] = True,
|
1269
1325
|
with_ancestors: bool = True,
|
1270
1326
|
max_workers: None | int = None,
|
1327
|
+
app: str = "android",
|
1271
1328
|
*,
|
1272
1329
|
async_: Literal[False, True] = False,
|
1273
1330
|
**request_kwargs,
|
@@ -1291,6 +1348,7 @@ def iter_download_files(
|
|
1291
1348
|
:param with_ancestors: 文件信息中是否要包含 "ancestors"
|
1292
1349
|
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
1293
1350
|
:param max_workers: 最大并发数,如果为 None 或 <= 0,则默认为 20
|
1351
|
+
:param app: 使用某个 app (设备)的接口
|
1294
1352
|
:param async_: 是否异步
|
1295
1353
|
:param request_kwargs: 其它请求参数
|
1296
1354
|
|
@@ -1358,9 +1416,9 @@ def iter_download_files(
|
|
1358
1416
|
with with_iter_next(iterdir(
|
1359
1417
|
client,
|
1360
1418
|
id_to_dirnode=id_to_dirnode,
|
1361
|
-
app=
|
1419
|
+
app=app,
|
1362
1420
|
raise_for_changed_count=True,
|
1363
|
-
async_=async_,
|
1421
|
+
async_=async_,
|
1364
1422
|
**request_kwargs,
|
1365
1423
|
)) as get_next:
|
1366
1424
|
while True:
|
@@ -1375,7 +1433,10 @@ def iter_download_files(
|
|
1375
1433
|
**defaults,
|
1376
1434
|
}, identity=True)
|
1377
1435
|
for pickcode in pickcodes:
|
1378
|
-
yield YieldFrom(
|
1436
|
+
yield YieldFrom(
|
1437
|
+
run_gen_step_iter(gen_step(pickcode), async_=async_),
|
1438
|
+
identity=True,
|
1439
|
+
)
|
1379
1440
|
return
|
1380
1441
|
if not pickcode:
|
1381
1442
|
resp = yield client.fs_file_skim(cid, async_=async_, **request_kwargs)
|
@@ -1402,6 +1463,7 @@ def iter_download_files(
|
|
1402
1463
|
pickcode,
|
1403
1464
|
files=False,
|
1404
1465
|
max_workers=max_workers,
|
1466
|
+
app=app,
|
1405
1467
|
async_=async_,
|
1406
1468
|
**request_kwargs,
|
1407
1469
|
)) as get_next:
|
@@ -1421,6 +1483,7 @@ def iter_download_files(
|
|
1421
1483
|
pickcode,
|
1422
1484
|
files=True,
|
1423
1485
|
max_workers=max_workers,
|
1486
|
+
app=app,
|
1424
1487
|
async_=async_, # type: ignore
|
1425
1488
|
**request_kwargs,
|
1426
1489
|
)) as get_next:
|
@@ -1446,3 +1509,79 @@ def iter_download_files(
|
|
1446
1509
|
yield YieldFrom(map(norm_attr, cache), identity=True)
|
1447
1510
|
return run_gen_step_iter(gen_step, async_=async_)
|
1448
1511
|
|
1512
|
+
|
1513
|
+
@overload
|
1514
|
+
def get_remaining_open_count(
|
1515
|
+
client: str | P115Client,
|
1516
|
+
app: str = "android",
|
1517
|
+
*,
|
1518
|
+
async_: Literal[False] = False,
|
1519
|
+
**request_kwargs,
|
1520
|
+
) -> int:
|
1521
|
+
...
|
1522
|
+
@overload
|
1523
|
+
def get_remaining_open_count(
|
1524
|
+
client: str | P115Client,
|
1525
|
+
app: str = "android",
|
1526
|
+
*,
|
1527
|
+
async_: Literal[True],
|
1528
|
+
**request_kwargs,
|
1529
|
+
) -> Coroutine[Any, Any, int]:
|
1530
|
+
...
|
1531
|
+
def get_remaining_open_count(
|
1532
|
+
client: str | P115Client,
|
1533
|
+
app: str = "android",
|
1534
|
+
*,
|
1535
|
+
async_: Literal[False, True] = False,
|
1536
|
+
**request_kwargs,
|
1537
|
+
) -> int | Coroutine[Any, Any, int]:
|
1538
|
+
"""获取剩余的可打开下载链接数
|
1539
|
+
|
1540
|
+
.. note::
|
1541
|
+
假设总数是 n,通常总数是 10,偶尔会调整,如果已经有 m 个被打开的链接,则返回的数字是 n-m
|
1542
|
+
|
1543
|
+
:param client: 115 客户端或 cookies
|
1544
|
+
:param app: 使用某个 app (设备)的接口
|
1545
|
+
:param async_: 是否异步
|
1546
|
+
:param request_kwargs: 其它请求参数
|
1547
|
+
|
1548
|
+
:return: 个数
|
1549
|
+
"""
|
1550
|
+
if isinstance(client, str):
|
1551
|
+
client = P115Client(client, check_for_relogin=True)
|
1552
|
+
if not isinstance(client, P115Client) or app == "open":
|
1553
|
+
get_url: Callable[..., P115URL] = client.download_url_open
|
1554
|
+
elif app in ("", "web", "desktop", "harmony"):
|
1555
|
+
get_url = client.download_url
|
1556
|
+
else:
|
1557
|
+
get_url = partial(client.download_url, app=app)
|
1558
|
+
def gen_step():
|
1559
|
+
cache: list = []
|
1560
|
+
add_to_cache = cache.append
|
1561
|
+
try:
|
1562
|
+
with with_iter_next(iter_download_nodes(
|
1563
|
+
client,
|
1564
|
+
app=app,
|
1565
|
+
async_=async_,
|
1566
|
+
**request_kwargs,
|
1567
|
+
)) as get_next:
|
1568
|
+
while True:
|
1569
|
+
info = yield get_next
|
1570
|
+
if int(info["fs"]) <= 1024 * 1024 * 200:
|
1571
|
+
continue
|
1572
|
+
try:
|
1573
|
+
url = yield get_url(info["pc"], async_=async_)
|
1574
|
+
except FileNotFoundError:
|
1575
|
+
continue
|
1576
|
+
request = Request(url, headers={"user-agent": ""})
|
1577
|
+
if async_:
|
1578
|
+
file = yield to_thread(urlopen, request)
|
1579
|
+
else:
|
1580
|
+
file = urlopen(request)
|
1581
|
+
add_to_cache(file)
|
1582
|
+
finally:
|
1583
|
+
for f in cache:
|
1584
|
+
f.close()
|
1585
|
+
return len(cache)
|
1586
|
+
return run_gen_step(gen_step, async_=async_)
|
1587
|
+
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "p115client"
|
3
|
-
version = "0.0.5.8.
|
3
|
+
version = "0.0.5.8.4"
|
4
4
|
description = "Python 115 webdisk client."
|
5
5
|
authors = ["ChenyangGao <wosiwujm@gmail.com>"]
|
6
6
|
license = "MIT"
|
@@ -48,7 +48,7 @@ python-filewrap = ">=0.2.8"
|
|
48
48
|
python-hashtools = ">=0.0.3.3"
|
49
49
|
python-httpfile = ">=0.0.5.2"
|
50
50
|
python-http_request = ">=0.0.6"
|
51
|
-
python-iterutils = ">=0.1.
|
51
|
+
python-iterutils = ">=0.1.10"
|
52
52
|
python-property = ">=0.0.3"
|
53
53
|
python-startfile = ">=0.0.2"
|
54
54
|
python-undefined = ">=0.0.3"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|