p115client 0.0.5.12.3__py3-none-any.whl → 0.0.5.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- p115client/client.py +260 -188
- p115client/tool/attr.py +11 -5
- p115client/tool/download.py +350 -262
- p115client/tool/edit.py +27 -35
- p115client/tool/export_dir.py +37 -26
- p115client/tool/history.py +135 -11
- p115client/tool/iterdir.py +2407 -3292
- p115client/tool/life.py +7 -1
- p115client/tool/upload.py +28 -20
- p115client/tool/util.py +23 -20
- p115client/tool/xys.py +0 -1
- p115client/type.py +9 -2
- {p115client-0.0.5.12.3.dist-info → p115client-0.0.5.13.dist-info}/METADATA +5 -4
- p115client-0.0.5.13.dist-info/RECORD +28 -0
- p115client-0.0.5.12.3.dist-info/RECORD +0 -28
- {p115client-0.0.5.12.3.dist-info → p115client-0.0.5.13.dist-info}/LICENSE +0 -0
- {p115client-0.0.5.12.3.dist-info → p115client-0.0.5.13.dist-info}/WHEEL +0 -0
p115client/tool/download.py
CHANGED
@@ -10,12 +10,13 @@ __all__ = [
|
|
10
10
|
__doc__ = "这个模块提供了一些和下载有关的函数"
|
11
11
|
|
12
12
|
from asyncio import create_task, to_thread, Queue as AsyncQueue, TaskGroup
|
13
|
-
from collections.abc import
|
13
|
+
from collections.abc import (
|
14
|
+
AsyncIterator, Callable, Coroutine, Iterable, Iterator, MutableMapping,
|
15
|
+
)
|
14
16
|
from concurrent.futures import ThreadPoolExecutor
|
15
|
-
from errno import ENOTDIR
|
16
17
|
from functools import partial
|
17
18
|
from glob import iglob
|
18
|
-
from itertools import chain, count, cycle
|
19
|
+
from itertools import batched, chain, count, cycle
|
19
20
|
from os import fsdecode, makedirs, remove, PathLike
|
20
21
|
from os.path import abspath, dirname, join as joinpath, normpath, splitext
|
21
22
|
from queue import SimpleQueue
|
@@ -28,15 +29,22 @@ from urllib.request import urlopen, Request
|
|
28
29
|
from uuid import uuid4
|
29
30
|
from warnings import warn
|
30
31
|
|
31
|
-
from asynctools import
|
32
|
+
from asynctools import async_chain
|
32
33
|
from concurrenttools import run_as_thread, thread_batch, async_batch
|
33
34
|
from encode_uri import encode_uri_component_loose
|
34
|
-
from iterutils import
|
35
|
-
|
35
|
+
from iterutils import (
|
36
|
+
as_gen_step, chunked, run_gen_step, run_gen_step_iter, through,
|
37
|
+
with_iter_next, Yield, YieldFrom,
|
38
|
+
)
|
39
|
+
from p115client import (
|
40
|
+
check_response, normalize_attr, normalize_attr_simple, P115Client,
|
41
|
+
P115OpenClient, P115URL,
|
42
|
+
)
|
36
43
|
from p115client.exception import P115Warning
|
44
|
+
from p115pickcode import to_id, to_pickcode
|
37
45
|
|
38
46
|
from .iterdir import (
|
39
|
-
get_path_to_cid, iterdir, iter_files,
|
47
|
+
get_path_to_cid, iterdir, iter_files, iter_files_with_path,
|
40
48
|
unescape_115_charref, posix_escape_name, DirNode, ID_TO_DIRNODE_CACHE,
|
41
49
|
)
|
42
50
|
from .util import reduce_image_url_layers
|
@@ -44,9 +52,10 @@ from .util import reduce_image_url_layers
|
|
44
52
|
|
45
53
|
@overload
|
46
54
|
def batch_get_url(
|
47
|
-
client: str | P115Client,
|
48
|
-
|
55
|
+
client: str | P115Client | P115OpenClient,
|
56
|
+
pickcode: int | str | Iterable[int | str],
|
49
57
|
user_agent: str = "",
|
58
|
+
app: str = "android",
|
50
59
|
*,
|
51
60
|
async_: Literal[False] = False,
|
52
61
|
**request_kwargs,
|
@@ -54,32 +63,30 @@ def batch_get_url(
|
|
54
63
|
...
|
55
64
|
@overload
|
56
65
|
def batch_get_url(
|
57
|
-
client: str | P115Client,
|
58
|
-
|
66
|
+
client: str | P115Client | P115OpenClient,
|
67
|
+
pickcode: int | str | Iterable[int | str],
|
59
68
|
user_agent: str = "",
|
69
|
+
app: str = "android",
|
60
70
|
*,
|
61
71
|
async_: Literal[True],
|
62
72
|
**request_kwargs,
|
63
73
|
) -> Coroutine[Any, Any, dict[int, P115URL]]:
|
64
74
|
...
|
65
75
|
def batch_get_url(
|
66
|
-
client: str | P115Client,
|
67
|
-
|
76
|
+
client: str | P115Client | P115OpenClient,
|
77
|
+
pickcode: int | str | Iterable[int | str],
|
68
78
|
user_agent: str = "",
|
79
|
+
app: str = "android",
|
69
80
|
*,
|
70
81
|
async_: Literal[False, True] = False,
|
71
82
|
**request_kwargs,
|
72
83
|
) -> dict[int, P115URL] | Coroutine[Any, Any, dict[int, P115URL]]:
|
73
84
|
"""批量获取下载链接
|
74
85
|
|
75
|
-
.. attention::
|
76
|
-
请确保所有的 pickcode 都是有效的,要么是现在存在的,要么是以前存在过被删除的。
|
77
|
-
|
78
|
-
如果有目录的 pickcode 混在其中,则会自动排除。
|
79
|
-
|
80
86
|
:param client: 115 客户端或 cookies
|
81
|
-
:param
|
87
|
+
:param pickcode: pickcode 或 id
|
82
88
|
:param user_agent: "user-agent" 请求头的值
|
89
|
+
:param app: 使用指定 app(设备)的接口
|
83
90
|
:param async_: 是否异步
|
84
91
|
:param request_kwargs: 其它请求参数
|
85
92
|
|
@@ -91,41 +98,16 @@ def batch_get_url(
|
|
91
98
|
request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
|
92
99
|
else:
|
93
100
|
request_kwargs["headers"] = {"user-agent": user_agent}
|
101
|
+
if isinstance(pickcode, (int, str)):
|
102
|
+
pickcode = to_pickcode(pickcode)
|
103
|
+
elif not isinstance(pickcode, str):
|
104
|
+
pickcode = ",".join(map(to_pickcode, pickcode))
|
105
|
+
if not isinstance(client, P115Client) or app == "open":
|
106
|
+
get_download_url: Callable = client.download_url_info_open
|
107
|
+
else:
|
108
|
+
get_download_url = partial(client.download_url_app, app=app)
|
94
109
|
def gen_step():
|
95
|
-
|
96
|
-
resp = yield client.fs_file_skim(
|
97
|
-
id_or_pickcode,
|
98
|
-
async_=async_,
|
99
|
-
**request_kwargs,
|
100
|
-
)
|
101
|
-
if not resp or not resp["state"]:
|
102
|
-
return {}
|
103
|
-
pickcode = resp["data"][0]["pick_code"]
|
104
|
-
elif isinstance(id_or_pickcode, str):
|
105
|
-
pickcode = id_or_pickcode
|
106
|
-
if not (len(pickcode) == 17 and pickcode.isalnum()):
|
107
|
-
return {}
|
108
|
-
else:
|
109
|
-
ids: list[int] = []
|
110
|
-
pickcodes: list[str] = []
|
111
|
-
for val in id_or_pickcode:
|
112
|
-
if isinstance(val, int):
|
113
|
-
ids.append(val)
|
114
|
-
elif len(val) == 17 and val.isalnum():
|
115
|
-
pickcodes.append(val)
|
116
|
-
if ids:
|
117
|
-
resp = yield client.fs_file_skim(
|
118
|
-
ids,
|
119
|
-
method="POST",
|
120
|
-
async_=async_,
|
121
|
-
**request_kwargs,
|
122
|
-
)
|
123
|
-
if resp and resp["state"]:
|
124
|
-
pickcodes.extend(info["pick_code"] for info in resp["data"])
|
125
|
-
if not pickcodes:
|
126
|
-
return {}
|
127
|
-
pickcode = ",".join(pickcodes)
|
128
|
-
resp = yield client.download_url_app(pickcode, async_=async_, **request_kwargs)
|
110
|
+
resp = yield get_download_url(pickcode, async_=async_, **request_kwargs)
|
129
111
|
if not resp["state"]:
|
130
112
|
if resp.get("errno") != 50003:
|
131
113
|
check_response(resp)
|
@@ -139,7 +121,7 @@ def batch_get_url(
|
|
139
121
|
name=info["file_name"],
|
140
122
|
size=int(info["file_size"]),
|
141
123
|
sha1=info["sha1"],
|
142
|
-
|
124
|
+
is_dir=False,
|
143
125
|
headers=headers,
|
144
126
|
)
|
145
127
|
for id, info in resp["data"].items()
|
@@ -150,10 +132,11 @@ def batch_get_url(
|
|
150
132
|
|
151
133
|
@overload
|
152
134
|
def iter_url_batches(
|
153
|
-
client: str | P115Client,
|
154
|
-
pickcodes: Iterator[str],
|
135
|
+
client: str | P115Client | P115OpenClient,
|
136
|
+
pickcodes: Iterator[int | str],
|
155
137
|
user_agent: str = "",
|
156
138
|
batch_size: int = 10,
|
139
|
+
app: str = "android",
|
157
140
|
*,
|
158
141
|
async_: Literal[False] = False,
|
159
142
|
**request_kwargs,
|
@@ -161,20 +144,22 @@ def iter_url_batches(
|
|
161
144
|
...
|
162
145
|
@overload
|
163
146
|
def iter_url_batches(
|
164
|
-
client: str | P115Client,
|
165
|
-
pickcodes: Iterator[str],
|
147
|
+
client: str | P115Client | P115OpenClient,
|
148
|
+
pickcodes: Iterator[int | str],
|
166
149
|
user_agent: str = "",
|
167
150
|
batch_size: int = 10,
|
151
|
+
app: str = "android",
|
168
152
|
*,
|
169
153
|
async_: Literal[True],
|
170
154
|
**request_kwargs,
|
171
155
|
) -> AsyncIterator[P115URL]:
|
172
156
|
...
|
173
157
|
def iter_url_batches(
|
174
|
-
client: str | P115Client,
|
175
|
-
pickcodes: Iterator[str],
|
158
|
+
client: str | P115Client | P115OpenClient,
|
159
|
+
pickcodes: Iterator[int | str],
|
176
160
|
user_agent: str = "",
|
177
161
|
batch_size: int = 10,
|
162
|
+
app: str = "android",
|
178
163
|
*,
|
179
164
|
async_: Literal[False, True] = False,
|
180
165
|
**request_kwargs,
|
@@ -187,9 +172,10 @@ def iter_url_batches(
|
|
187
172
|
如果有目录的 pickcode 混在其中,则会自动排除。
|
188
173
|
|
189
174
|
:param client: 115 客户端或 cookies
|
190
|
-
:param pickcodes:
|
175
|
+
:param pickcodes: 一个迭代器,产生 pickcode 或 id
|
191
176
|
:param user_agent: "user-agent" 请求头的值
|
192
177
|
:param batch_size: 每一个批次处理的个量
|
178
|
+
:param app: 使用指定 app(设备)的接口
|
193
179
|
:param async_: 是否异步
|
194
180
|
:param request_kwargs: 其它请求参数
|
195
181
|
|
@@ -201,13 +187,16 @@ def iter_url_batches(
|
|
201
187
|
request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
|
202
188
|
else:
|
203
189
|
request_kwargs["headers"] = {"user-agent": user_agent}
|
190
|
+
if not isinstance(client, P115Client) or app == "open":
|
191
|
+
get_download_url: Callable = client.download_url_info_open
|
192
|
+
else:
|
193
|
+
get_download_url = partial(client.download_url_app, app=app)
|
204
194
|
if batch_size <= 0:
|
205
195
|
batch_size = 1
|
206
196
|
def gen_step():
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
pcs,
|
197
|
+
for pcs in batched(map(to_pickcode, pickcodes), batch_size):
|
198
|
+
resp = yield get_download_url(
|
199
|
+
",".join(pcs),
|
211
200
|
async_=async_,
|
212
201
|
**request_kwargs,
|
213
202
|
)
|
@@ -225,7 +214,7 @@ def iter_url_batches(
|
|
225
214
|
name=info["file_name"],
|
226
215
|
size=int(info["file_size"]),
|
227
216
|
sha1=info["sha1"],
|
228
|
-
|
217
|
+
is_dir=False,
|
229
218
|
headers=headers,
|
230
219
|
))
|
231
220
|
return run_gen_step_iter(gen_step, async_)
|
@@ -233,8 +222,8 @@ def iter_url_batches(
|
|
233
222
|
|
234
223
|
@overload
|
235
224
|
def iter_files_with_url(
|
236
|
-
client: str | P115Client,
|
237
|
-
cid: int = 0,
|
225
|
+
client: str | P115Client | P115OpenClient,
|
226
|
+
cid: int | str = 0,
|
238
227
|
suffixes: None | str | Iterable[str] = None,
|
239
228
|
type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
|
240
229
|
cur: Literal[0, 1] = 0,
|
@@ -254,8 +243,8 @@ def iter_files_with_url(
|
|
254
243
|
...
|
255
244
|
@overload
|
256
245
|
def iter_files_with_url(
|
257
|
-
client: str | P115Client,
|
258
|
-
cid: int = 0,
|
246
|
+
client: str | P115Client | P115OpenClient,
|
247
|
+
cid: int | str = 0,
|
259
248
|
suffixes: None | str | Iterable[str] = None,
|
260
249
|
type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
|
261
250
|
cur: Literal[0, 1] = 0,
|
@@ -274,8 +263,8 @@ def iter_files_with_url(
|
|
274
263
|
) -> AsyncIterator[dict]:
|
275
264
|
...
|
276
265
|
def iter_files_with_url(
|
277
|
-
client: str | P115Client,
|
278
|
-
cid: int = 0,
|
266
|
+
client: str | P115Client | P115OpenClient,
|
267
|
+
cid: int | str = 0,
|
279
268
|
suffixes: None | str | Iterable[str] = None,
|
280
269
|
type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
|
281
270
|
cur: Literal[0, 1] = 0,
|
@@ -295,7 +284,7 @@ def iter_files_with_url(
|
|
295
284
|
"""获取文件信息和下载链接
|
296
285
|
|
297
286
|
:param client: 115 客户端或 cookies
|
298
|
-
:param cid: 目录 id
|
287
|
+
:param cid: 目录 id 或 pickcode
|
299
288
|
:param suffixes: 扩展名,可以有多个,最前面的 "." 可以省略
|
300
289
|
:param type: 文件类型
|
301
290
|
|
@@ -321,7 +310,7 @@ def iter_files_with_url(
|
|
321
310
|
|
322
311
|
:param normalize_attr: 把数据进行转换处理,使之便于阅读
|
323
312
|
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
324
|
-
:param app:
|
313
|
+
:param app: 使用指定 app(设备)的接口
|
325
314
|
:param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
|
326
315
|
:param user_agent: "user-agent" 请求头的值
|
327
316
|
:param async_: 是否异步
|
@@ -349,6 +338,7 @@ def iter_files_with_url(
|
|
349
338
|
get_url = client.download_url
|
350
339
|
else:
|
351
340
|
get_url = partial(client.download_url, app=app)
|
341
|
+
cid = to_id(cid)
|
352
342
|
def gen_step():
|
353
343
|
if suffixes is None:
|
354
344
|
it = iter_files(
|
@@ -386,7 +376,7 @@ def iter_files_with_url(
|
|
386
376
|
with with_iter_next(it) as get_next:
|
387
377
|
while True:
|
388
378
|
attr = yield get_next()
|
389
|
-
if attr.get("
|
379
|
+
if attr.get("is_collect", False):
|
390
380
|
if attr["size"] < 1024 * 1024 * 115:
|
391
381
|
attr["url"] = yield get_url(
|
392
382
|
attr["pickcode"],
|
@@ -408,8 +398,8 @@ def iter_files_with_url(
|
|
408
398
|
|
409
399
|
@overload
|
410
400
|
def iter_images_with_url(
|
411
|
-
client: str | P115Client,
|
412
|
-
cid: int = 0,
|
401
|
+
client: str | P115Client | P115OpenClient,
|
402
|
+
cid: int | str = 0,
|
413
403
|
suffixes: None | str | Iterable[str] = None,
|
414
404
|
cur: Literal[0, 1] = 0,
|
415
405
|
with_ancestors: bool = False,
|
@@ -427,8 +417,8 @@ def iter_images_with_url(
|
|
427
417
|
...
|
428
418
|
@overload
|
429
419
|
def iter_images_with_url(
|
430
|
-
client: str | P115Client,
|
431
|
-
cid: int = 0,
|
420
|
+
client: str | P115Client | P115OpenClient,
|
421
|
+
cid: int | str = 0,
|
432
422
|
suffixes: None | str | Iterable[str] = None,
|
433
423
|
cur: Literal[0, 1] = 0,
|
434
424
|
with_ancestors: bool = False,
|
@@ -445,8 +435,8 @@ def iter_images_with_url(
|
|
445
435
|
) -> AsyncIterator[dict]:
|
446
436
|
...
|
447
437
|
def iter_images_with_url(
|
448
|
-
client: str | P115Client,
|
449
|
-
cid: int = 0,
|
438
|
+
client: str | P115Client | P115OpenClient,
|
439
|
+
cid: int | str = 0,
|
450
440
|
suffixes: None | str | Iterable[str] = None,
|
451
441
|
cur: Literal[0, 1] = 0,
|
452
442
|
with_ancestors: bool = False,
|
@@ -467,7 +457,7 @@ def iter_images_with_url(
|
|
467
457
|
请不要把不能被 115 识别为图片的文件扩展名放在 `suffixes` 参数中传入,这只是浪费时间,最后也只能获得普通的下载链接
|
468
458
|
|
469
459
|
:param client: 115 客户端或 cookies
|
470
|
-
:param cid: 目录 id
|
460
|
+
:param cid: 目录 id 或 pickcode
|
471
461
|
:param suffixes: 扩展名,可以有多个,最前面的 "." 可以省略(请确保扩展名确实能被 115 认为是图片,否则会因为不能批量获取到链接而浪费一些时间再去单独生成下载链接);如果不传(默认),则会获取所有图片
|
472
462
|
:param cur: 仅当前目录。0: 否(将遍历子目录树上所有叶子节点),1: 是
|
473
463
|
:param with_ancestors: 文件信息中是否要包含 "ancestors"
|
@@ -482,7 +472,7 @@ def iter_images_with_url(
|
|
482
472
|
|
483
473
|
:param normalize_attr: 把数据进行转换处理,使之便于阅读
|
484
474
|
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
485
|
-
:param app:
|
475
|
+
:param app: 使用指定 app(设备)的接口
|
486
476
|
:param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
|
487
477
|
:param async_: 是否异步
|
488
478
|
:param request_kwargs: 其它请求参数
|
@@ -509,6 +499,7 @@ def iter_images_with_url(
|
|
509
499
|
get_url = client.download_url
|
510
500
|
else:
|
511
501
|
get_url = partial(client.download_url, app=app)
|
502
|
+
cid = to_id(cid)
|
512
503
|
def gen_step():
|
513
504
|
if suffixes is None:
|
514
505
|
it = iter_files(
|
@@ -544,7 +535,7 @@ def iter_images_with_url(
|
|
544
535
|
try:
|
545
536
|
attr["url"] = reduce_image_url_layers(attr["thumb"])
|
546
537
|
except KeyError:
|
547
|
-
if attr.get("
|
538
|
+
if attr.get("is_collect", False):
|
548
539
|
if attr["size"] < 1024 * 1024 * 115:
|
549
540
|
attr["url"] = yield get_url(
|
550
541
|
attr["pickcode"],
|
@@ -566,8 +557,8 @@ def iter_images_with_url(
|
|
566
557
|
|
567
558
|
@overload
|
568
559
|
def iter_subtitles_with_url(
|
569
|
-
client: str | P115Client,
|
570
|
-
cid: int = 0,
|
560
|
+
client: str | P115Client | P115OpenClient,
|
561
|
+
cid: int | str = 0,
|
571
562
|
suffixes: str | Iterable[str] = (".srt", ".ass", ".ssa"),
|
572
563
|
cur: Literal[0, 1] = 0,
|
573
564
|
with_ancestors: bool = False,
|
@@ -585,8 +576,8 @@ def iter_subtitles_with_url(
|
|
585
576
|
...
|
586
577
|
@overload
|
587
578
|
def iter_subtitles_with_url(
|
588
|
-
client: str | P115Client,
|
589
|
-
cid: int = 0,
|
579
|
+
client: str | P115Client | P115OpenClient,
|
580
|
+
cid: int | str = 0,
|
590
581
|
suffixes: str | Iterable[str] = (".srt", ".ass", ".ssa"),
|
591
582
|
cur: Literal[0, 1] = 0,
|
592
583
|
with_ancestors: bool = False,
|
@@ -603,8 +594,8 @@ def iter_subtitles_with_url(
|
|
603
594
|
) -> AsyncIterator[dict]:
|
604
595
|
...
|
605
596
|
def iter_subtitles_with_url(
|
606
|
-
client: str | P115Client,
|
607
|
-
cid: int = 0,
|
597
|
+
client: str | P115Client | P115OpenClient,
|
598
|
+
cid: int | str = 0,
|
608
599
|
suffixes: str | Iterable[str] = (".srt", ".ass", ".ssa"),
|
609
600
|
cur: Literal[0, 1] = 0,
|
610
601
|
with_ancestors: bool = False,
|
@@ -630,7 +621,7 @@ def iter_subtitles_with_url(
|
|
630
621
|
请不要把不能被 115 识别为字幕的文件扩展名放在 `suffixes` 参数中传入,这只是浪费时间,最后也只能获得普通的下载链接
|
631
622
|
|
632
623
|
:param client: 115 客户端或 cookies
|
633
|
-
:param cid: 目录 id
|
624
|
+
:param cid: 目录 id 或 pickcode
|
634
625
|
:param suffixes: 扩展名,可以有多个,最前面的 "." 可以省略(请确保扩展名确实能被 115 认为是字幕,否则会因为不能批量获取到链接而浪费一些时间再去单独生成下载链接)
|
635
626
|
:param cur: 仅当前目录。0: 否(将遍历子目录树上所有叶子节点),1: 是
|
636
627
|
:param with_ancestors: 文件信息中是否要包含 "ancestors"
|
@@ -645,7 +636,7 @@ def iter_subtitles_with_url(
|
|
645
636
|
|
646
637
|
:param normalize_attr: 把数据进行转换处理,使之便于阅读
|
647
638
|
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
648
|
-
:param app:
|
639
|
+
:param app: 使用指定 app(设备)的接口
|
649
640
|
:param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
|
650
641
|
:param async_: 是否异步
|
651
642
|
:param request_kwargs: 其它请求参数
|
@@ -656,16 +647,30 @@ def iter_subtitles_with_url(
|
|
656
647
|
client = P115Client(client, check_for_relogin=True)
|
657
648
|
if not isinstance(client, P115Client) or app == "open":
|
658
649
|
get_url: Callable[..., P115URL] = client.download_url_open
|
650
|
+
fs_mkdir: Callable = client.fs_mkdir_open
|
651
|
+
fs_copy: Callable = client.fs_copy_open
|
652
|
+
fs_delete: Callable = client.fs_delete_open
|
653
|
+
fs_video_subtitle: Callable = client.fs_video_subtitle_open
|
659
654
|
elif app in ("", "web", "desktop", "harmony"):
|
660
655
|
get_url = client.download_url
|
656
|
+
fs_mkdir = client.fs_mkdir
|
657
|
+
fs_copy = client.fs_copy
|
658
|
+
fs_delete = client.fs_delete
|
659
|
+
fs_video_subtitle = client.fs_video_subtitle
|
661
660
|
else:
|
662
661
|
get_url = partial(client.download_url, app=app)
|
662
|
+
fs_mkdir = partial(client.fs_mkdir_app, app=app)
|
663
|
+
fs_copy = partial(client.fs_copy_app, app=app)
|
664
|
+
fs_delete = partial(client.fs_delete_app, app=app)
|
665
|
+
fs_video_subtitle = partial(client.fs_video_subtitle_app, app=app)
|
666
|
+
cid = to_id(cid)
|
663
667
|
def gen_step():
|
664
668
|
nonlocal suffixes
|
665
669
|
if isinstance(suffixes, str):
|
666
670
|
suffixes = suffixes,
|
667
|
-
do_chain: Callable =
|
668
|
-
|
671
|
+
do_chain: Callable = async_chain.from_iterable if async_ else chain.from_iterable
|
672
|
+
do_next = anext if async_ else next
|
673
|
+
with with_iter_next(chunked(do_chain(
|
669
674
|
iter_files(
|
670
675
|
client,
|
671
676
|
cid,
|
@@ -683,35 +688,42 @@ def iter_subtitles_with_url(
|
|
683
688
|
**request_kwargs,
|
684
689
|
)
|
685
690
|
for suffix in suffixes
|
686
|
-
), 1000)
|
687
|
-
do_next = anext if async_ else next
|
688
|
-
with with_iter_next(it) as get_next:
|
691
|
+
), 1000)) as get_next:
|
689
692
|
while True:
|
690
693
|
items: tuple[dict] = yield get_next()
|
691
|
-
resp = yield
|
694
|
+
resp = yield fs_mkdir(
|
692
695
|
f"subtitle-{uuid4()}",
|
693
696
|
async_=async_,
|
694
697
|
**request_kwargs,
|
695
698
|
)
|
696
699
|
check_response(resp)
|
697
700
|
try:
|
698
|
-
|
699
|
-
|
701
|
+
if "cid" in resp:
|
702
|
+
scid = resp["cid"]
|
703
|
+
else:
|
704
|
+
data = resp["data"]
|
705
|
+
if "category_id" in data:
|
706
|
+
scid = data["category_id"]
|
707
|
+
else:
|
708
|
+
scid = data["file_id"]
|
709
|
+
resp = yield fs_copy(
|
700
710
|
(attr["id"] for attr in items),
|
701
711
|
pid=scid,
|
702
712
|
async_=async_,
|
703
713
|
**request_kwargs,
|
704
714
|
)
|
705
715
|
check_response(resp)
|
706
|
-
attr = yield do_next(
|
716
|
+
attr = yield do_next(iter_files(
|
707
717
|
client,
|
708
718
|
scid,
|
709
719
|
first_page_size=1,
|
720
|
+
normalize_attr=None,
|
710
721
|
base_url=True,
|
722
|
+
app=app,
|
711
723
|
async_=async_, # type: ignore
|
712
724
|
**request_kwargs,
|
713
725
|
))
|
714
|
-
resp = yield
|
726
|
+
resp = yield fs_video_subtitle(
|
715
727
|
attr["pc"],
|
716
728
|
async_=async_,
|
717
729
|
**request_kwargs,
|
@@ -722,14 +734,14 @@ def iter_subtitles_with_url(
|
|
722
734
|
if info.get("file_id")
|
723
735
|
}
|
724
736
|
finally:
|
725
|
-
yield
|
737
|
+
yield fs_delete(scid, async_=async_, **request_kwargs)
|
726
738
|
if subtitles:
|
727
739
|
for attr in items:
|
728
740
|
attr["url"] = subtitles[attr["sha1"]]
|
729
741
|
yield Yield(attr)
|
730
742
|
else:
|
731
743
|
for attr in items:
|
732
|
-
if attr.get("
|
744
|
+
if attr.get("is_collect", False):
|
733
745
|
if attr["size"] < 1024 * 1024 * 115:
|
734
746
|
attr["url"] = yield get_url(
|
735
747
|
attr["pickcode"],
|
@@ -751,9 +763,10 @@ def iter_subtitles_with_url(
|
|
751
763
|
|
752
764
|
@overload
|
753
765
|
def iter_subtitle_batches(
|
754
|
-
client: str | P115Client,
|
755
|
-
file_ids: Iterable[int],
|
766
|
+
client: str | P115Client | P115OpenClient,
|
767
|
+
file_ids: Iterable[int | str],
|
756
768
|
batch_size: int = 1_000,
|
769
|
+
app: str = "web",
|
757
770
|
*,
|
758
771
|
async_: Literal[False] = False,
|
759
772
|
**request_kwargs,
|
@@ -761,18 +774,20 @@ def iter_subtitle_batches(
|
|
761
774
|
...
|
762
775
|
@overload
|
763
776
|
def iter_subtitle_batches(
|
764
|
-
client: str | P115Client,
|
765
|
-
file_ids: Iterable[int],
|
777
|
+
client: str | P115Client | P115OpenClient,
|
778
|
+
file_ids: Iterable[int | str],
|
766
779
|
batch_size: int = 1_000,
|
780
|
+
app: str = "web",
|
767
781
|
*,
|
768
782
|
async_: Literal[True],
|
769
783
|
**request_kwargs,
|
770
784
|
) -> AsyncIterator[dict]:
|
771
785
|
...
|
772
786
|
def iter_subtitle_batches(
|
773
|
-
client: str | P115Client,
|
774
|
-
file_ids: Iterable[int],
|
787
|
+
client: str | P115Client | P115OpenClient,
|
788
|
+
file_ids: Iterable[int | str],
|
775
789
|
batch_size: int = 1_000,
|
790
|
+
app: str = "web",
|
776
791
|
*,
|
777
792
|
async_: Literal[False, True] = False,
|
778
793
|
**request_kwargs,
|
@@ -786,7 +801,7 @@ def iter_subtitle_batches(
|
|
786
801
|
目前看来 115 只支持:".srt"、".ass"、".ssa",如果不能被 115 识别为字幕,将会被自动略过
|
787
802
|
|
788
803
|
:param client: 115 客户端或 cookies
|
789
|
-
:param file_ids: 一组文件的 id
|
804
|
+
:param file_ids: 一组文件的 id 或 pickcode
|
790
805
|
:param batch_size: 每一个批次处理的个量
|
791
806
|
:param async_: 是否异步
|
792
807
|
:param request_kwargs: 其它请求参数
|
@@ -797,33 +812,50 @@ def iter_subtitle_batches(
|
|
797
812
|
client = P115Client(client, check_for_relogin=True)
|
798
813
|
if batch_size <= 0:
|
799
814
|
batch_size = 1_000
|
815
|
+
if not isinstance(client, P115Client) or app == "open":
|
816
|
+
fs_mkdir: Callable = client.fs_mkdir_open
|
817
|
+
fs_copy: Callable = client.fs_copy_open
|
818
|
+
fs_delete: Callable = client.fs_delete_open
|
819
|
+
fs_video_subtitle: Callable = client.fs_video_subtitle_open
|
820
|
+
elif app in ("", "web", "desktop", "harmony"):
|
821
|
+
fs_mkdir = client.fs_mkdir
|
822
|
+
fs_copy = client.fs_copy
|
823
|
+
fs_delete = client.fs_delete
|
824
|
+
fs_video_subtitle = client.fs_video_subtitle
|
825
|
+
else:
|
826
|
+
fs_mkdir = partial(client.fs_mkdir_app, app=app)
|
827
|
+
fs_copy = partial(client.fs_copy_app, app=app)
|
828
|
+
fs_delete = partial(client.fs_delete_app, app=app)
|
829
|
+
fs_video_subtitle = partial(client.fs_video_subtitle_app, app=app)
|
800
830
|
def gen_step():
|
801
831
|
do_next: Callable = anext if async_ else next
|
802
|
-
for ids in
|
832
|
+
for ids in batched(map(to_id, file_ids), batch_size):
|
803
833
|
try:
|
804
|
-
resp = yield
|
834
|
+
resp = yield fs_mkdir(
|
805
835
|
f"subtitle-{uuid4()}",
|
806
836
|
async_=async_,
|
807
837
|
**request_kwargs,
|
808
838
|
)
|
809
839
|
check_response(resp)
|
810
840
|
scid = resp["cid"]
|
811
|
-
resp = yield
|
841
|
+
resp = yield fs_copy(
|
812
842
|
ids,
|
813
843
|
pid=scid,
|
814
844
|
async_=async_,
|
815
845
|
**request_kwargs,
|
816
846
|
)
|
817
847
|
check_response(resp)
|
818
|
-
attr = yield do_next(
|
848
|
+
attr = yield do_next(iter_files(
|
819
849
|
client,
|
820
850
|
scid,
|
821
851
|
first_page_size=1,
|
852
|
+
normalize_attr=None,
|
822
853
|
base_url=True,
|
823
|
-
|
854
|
+
app=app,
|
855
|
+
async_=async_, # type: ignore
|
824
856
|
**request_kwargs,
|
825
857
|
))
|
826
|
-
resp = yield
|
858
|
+
resp = yield fs_video_subtitle(
|
827
859
|
attr["pc"],
|
828
860
|
async_=async_,
|
829
861
|
**request_kwargs,
|
@@ -835,14 +867,15 @@ def iter_subtitle_batches(
|
|
835
867
|
except (StopIteration, StopAsyncIteration):
|
836
868
|
pass
|
837
869
|
finally:
|
838
|
-
yield
|
870
|
+
yield fs_delete(scid, async_=async_, **request_kwargs)
|
839
871
|
return run_gen_step_iter(gen_step, async_)
|
840
872
|
|
841
873
|
|
874
|
+
# TODO: 要支持 open 接口
|
842
875
|
@overload
|
843
876
|
def make_strm(
|
844
877
|
client: str | P115Client,
|
845
|
-
cid: int = 0,
|
878
|
+
cid: int | str = 0,
|
846
879
|
save_dir: bytes | str | PathLike = ".",
|
847
880
|
origin: str = "http://localhost:8000",
|
848
881
|
update: bool = False,
|
@@ -868,7 +901,7 @@ def make_strm(
|
|
868
901
|
@overload
|
869
902
|
def make_strm(
|
870
903
|
client: str | P115Client,
|
871
|
-
cid: int = 0,
|
904
|
+
cid: int | str = 0,
|
872
905
|
save_dir: bytes | str | PathLike = ".",
|
873
906
|
origin: str = "http://localhost:8000",
|
874
907
|
update: bool = False,
|
@@ -893,7 +926,7 @@ def make_strm(
|
|
893
926
|
...
|
894
927
|
def make_strm(
|
895
928
|
client: str | P115Client,
|
896
|
-
cid: int = 0,
|
929
|
+
cid: int | str = 0,
|
897
930
|
save_dir: bytes | str | PathLike = ".",
|
898
931
|
origin: str = "http://localhost:8000",
|
899
932
|
update: bool = False,
|
@@ -918,7 +951,7 @@ def make_strm(
|
|
918
951
|
"""生成 strm 保存到本地
|
919
952
|
|
920
953
|
:param client: 115 客户端或 cookies
|
921
|
-
:param cid: 目录 id
|
954
|
+
:param cid: 目录 id 或 pickcode
|
922
955
|
:param save_dir: 本地的保存目录,默认是当前工作目录
|
923
956
|
:param origin: strm 文件的 `HTTP 源 <https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Origin>`_
|
924
957
|
:param update: 是否更新 strm 文件,如果为 False,则跳过已存在的路径
|
@@ -951,7 +984,7 @@ def make_strm(
|
|
951
984
|
:param max_workers: 最大并发数,主要用于限制同时打开的文件数
|
952
985
|
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
953
986
|
:param path_already: 如果为 True,则说明 id_to_dirnode 中已经具备构建路径所需要的目录节点,所以不会再去拉取目录节点的信息
|
954
|
-
:param app:
|
987
|
+
:param app: 使用指定 app(设备)的接口
|
955
988
|
:param fs_files_cooldown: `fs_files` 接口调用的冷却时间,大于 0,则使用此时间间隔执行并发
|
956
989
|
:param fs_files_max_workers: `fs_files` 接口调用的最大并发数
|
957
990
|
:param async_: 是否异步
|
@@ -1039,6 +1072,7 @@ def make_strm(
|
|
1039
1072
|
append(ignored, path)
|
1040
1073
|
return
|
1041
1074
|
append(upserted, path)
|
1075
|
+
cid = to_id(cid)
|
1042
1076
|
def gen_step():
|
1043
1077
|
nonlocal abspath_prefix_length, savedir
|
1044
1078
|
start_t = time()
|
@@ -1120,8 +1154,10 @@ def make_strm(
|
|
1120
1154
|
@overload
|
1121
1155
|
def iter_download_nodes(
|
1122
1156
|
client: str | P115Client,
|
1123
|
-
pickcode:
|
1157
|
+
pickcode: str | int = "",
|
1124
1158
|
files: bool = True,
|
1159
|
+
ensure_name: bool = False,
|
1160
|
+
id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
|
1125
1161
|
max_workers: None | int = 1,
|
1126
1162
|
app: str = "android",
|
1127
1163
|
*,
|
@@ -1132,8 +1168,10 @@ def iter_download_nodes(
|
|
1132
1168
|
@overload
|
1133
1169
|
def iter_download_nodes(
|
1134
1170
|
client: str | P115Client,
|
1135
|
-
pickcode:
|
1171
|
+
pickcode: str | int = "",
|
1136
1172
|
files: bool = True,
|
1173
|
+
ensure_name: bool = False,
|
1174
|
+
id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
|
1137
1175
|
max_workers: None | int = 1,
|
1138
1176
|
app: str = "android",
|
1139
1177
|
*,
|
@@ -1143,8 +1181,10 @@ def iter_download_nodes(
|
|
1143
1181
|
...
|
1144
1182
|
def iter_download_nodes(
|
1145
1183
|
client: str | P115Client,
|
1146
|
-
pickcode:
|
1184
|
+
pickcode: str | int = "",
|
1147
1185
|
files: bool = True,
|
1186
|
+
ensure_name: bool = False,
|
1187
|
+
id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
|
1148
1188
|
max_workers: None | int = 1,
|
1149
1189
|
app: str = "android",
|
1150
1190
|
*,
|
@@ -1154,10 +1194,12 @@ def iter_download_nodes(
|
|
1154
1194
|
"""获取一个目录内所有的文件或者目录的信息(简略)
|
1155
1195
|
|
1156
1196
|
:param client: 115 客户端或 cookies
|
1157
|
-
:param pickcode: 目录的
|
1197
|
+
:param pickcode: 目录的 pickcode 或 id
|
1158
1198
|
:param files: 如果为 True,则只获取文件,否则只获取目录
|
1159
|
-
:param
|
1160
|
-
:param
|
1199
|
+
:param ensure_name: 确保返回数据中有 "name" 字段
|
1200
|
+
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
1201
|
+
:param max_workers: 最大并发数,如果为 None 或 <= 0,则自动确定
|
1202
|
+
:param app: 使用指定 app(设备)的接口
|
1161
1203
|
:param async_: 是否异步
|
1162
1204
|
:param request_kwargs: 其它请求参数
|
1163
1205
|
|
@@ -1166,23 +1208,69 @@ def iter_download_nodes(
|
|
1166
1208
|
if isinstance(client, str):
|
1167
1209
|
client = P115Client(client, check_for_relogin=True)
|
1168
1210
|
get_base_url = cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__
|
1211
|
+
if async_:
|
1212
|
+
if max_workers is None or max_workers <= 0:
|
1213
|
+
max_workers = 20
|
1214
|
+
elif max_workers is not None and max_workers <= 0:
|
1215
|
+
max_workers = None
|
1169
1216
|
if files:
|
1170
1217
|
method = client.download_files
|
1171
1218
|
else:
|
1172
1219
|
method = client.download_folders
|
1173
|
-
|
1220
|
+
if id_to_dirnode is None:
|
1221
|
+
id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
|
1222
|
+
file_skim = client.fs_file_skim
|
1223
|
+
@as_gen_step
|
1224
|
+
def normalize_attrs(attrs: list[dict], /):
|
1225
|
+
if files:
|
1226
|
+
for i, info in enumerate(attrs):
|
1227
|
+
attrs[i] = {
|
1228
|
+
"is_dir": False,
|
1229
|
+
"id": to_id(info["pc"]),
|
1230
|
+
"pickcode": info["pc"],
|
1231
|
+
"parent_id": int(info["pid"]),
|
1232
|
+
"size": info["fs"],
|
1233
|
+
}
|
1234
|
+
if ensure_name:
|
1235
|
+
resp = yield file_skim(
|
1236
|
+
(a["id"] for a in attrs),
|
1237
|
+
method="POST",
|
1238
|
+
async_=async_,
|
1239
|
+
**request_kwargs,
|
1240
|
+
)
|
1241
|
+
if resp.get("error") != "文件不存在":
|
1242
|
+
check_response(resp)
|
1243
|
+
nodes = {int(a["file_id"]): a for a in resp["data"]}
|
1244
|
+
for attr in attrs:
|
1245
|
+
if node := nodes.get(attr["id"]):
|
1246
|
+
attr["sha1"] = node["sha1"]
|
1247
|
+
attr["name"] = unescape_115_charref(node["file_name"])
|
1248
|
+
else:
|
1249
|
+
for i, info in enumerate(attrs):
|
1250
|
+
attrs[i] = {
|
1251
|
+
"is_dir": True,
|
1252
|
+
"id": int(info["fid"]),
|
1253
|
+
"name": info["fn"],
|
1254
|
+
"parent_id": int(info["pid"]),
|
1255
|
+
}
|
1256
|
+
if id_to_dirnode is not ... and id_to_dirnode is not None:
|
1257
|
+
for attr in attrs:
|
1258
|
+
id_to_dirnode[attr["id"]] = DirNode(attr["name"], attr["parent_id"])
|
1259
|
+
return attrs
|
1260
|
+
get_nodes = partial(
|
1261
|
+
method,
|
1262
|
+
async_=async_,
|
1263
|
+
**{"base_url": get_base_url, **request_kwargs},
|
1264
|
+
)
|
1174
1265
|
if max_workers == 1:
|
1175
|
-
def gen_step(pickcode):
|
1176
|
-
|
1177
|
-
resp = yield client.fs_file_skim(pickcode, async_=async_, **request_kwargs)
|
1178
|
-
check_response(resp)
|
1179
|
-
pickcode = resp["data"][0]["pick_code"]
|
1266
|
+
def gen_step(pickcode: int | str, /):
|
1267
|
+
pickcode = to_pickcode(pickcode)
|
1180
1268
|
for i in count(1):
|
1181
1269
|
payload = {"pickcode": pickcode, "page": i}
|
1182
1270
|
resp = yield get_nodes(payload)
|
1183
1271
|
check_response(resp)
|
1184
1272
|
data = resp["data"]
|
1185
|
-
yield YieldFrom(data["list"])
|
1273
|
+
yield YieldFrom(normalize_attrs(data["list"]))
|
1186
1274
|
if not data["has_next_page"]:
|
1187
1275
|
break
|
1188
1276
|
else:
|
@@ -1193,7 +1281,8 @@ def iter_download_nodes(
|
|
1193
1281
|
else:
|
1194
1282
|
q = SimpleQueue()
|
1195
1283
|
get, put = q.get, q.put_nowait
|
1196
|
-
|
1284
|
+
@as_gen_step
|
1285
|
+
def request(pickcode: str, /):
|
1197
1286
|
nonlocal max_page
|
1198
1287
|
while True:
|
1199
1288
|
page = get_next_page()
|
@@ -1206,37 +1295,23 @@ def iter_download_nodes(
|
|
1206
1295
|
put(e)
|
1207
1296
|
return
|
1208
1297
|
data = resp["data"]
|
1209
|
-
put(data["list"])
|
1298
|
+
put((yield normalize_attrs(data["list"])))
|
1210
1299
|
if not data["has_next_page"]:
|
1211
1300
|
max_page = page
|
1212
|
-
def gen_step(pickcode):
|
1213
|
-
nonlocal max_workers, max_page, get_next_page
|
1214
|
-
max_page = 0
|
1215
|
-
get_next_page = count(1).__next__
|
1301
|
+
def gen_step(pickcode: int | str, /):
|
1216
1302
|
if async_:
|
1217
|
-
|
1218
|
-
max_workers = 20
|
1219
|
-
n = max_workers
|
1303
|
+
n = cast(int, max_workers)
|
1220
1304
|
task_group = TaskGroup()
|
1221
1305
|
yield task_group.__aenter__()
|
1222
1306
|
create_task = task_group.create_task
|
1223
1307
|
submit: Callable = lambda f, /, *a, **k: create_task(f(*a, **k))
|
1224
1308
|
shutdown: Callable = lambda: task_group.__aexit__(None, None, None)
|
1225
1309
|
else:
|
1226
|
-
if max_workers is not None and max_workers <= 0:
|
1227
|
-
max_workers = None
|
1228
1310
|
executor = ThreadPoolExecutor(max_workers)
|
1229
1311
|
n = executor._max_workers
|
1230
1312
|
submit = executor.submit
|
1231
1313
|
shutdown = lambda: executor.shutdown(False, cancel_futures=True)
|
1232
|
-
|
1233
|
-
resp = yield client.fs_file_skim(
|
1234
|
-
pickcode,
|
1235
|
-
async_=async_, # type: ignore
|
1236
|
-
**request_kwargs,
|
1237
|
-
)
|
1238
|
-
check_response(resp)
|
1239
|
-
pickcode = resp["data"][0]["pick_code"]
|
1314
|
+
pickcode = to_pickcode(pickcode)
|
1240
1315
|
try:
|
1241
1316
|
sentinel = object()
|
1242
1317
|
countdown: Callable
|
@@ -1253,8 +1328,8 @@ def iter_download_nodes(
|
|
1253
1328
|
n -= 1
|
1254
1329
|
if not n:
|
1255
1330
|
put(sentinel)
|
1256
|
-
for
|
1257
|
-
submit(
|
1331
|
+
for _ in range(n):
|
1332
|
+
submit(request, pickcode, async_=async_).add_done_callback(countdown)
|
1258
1333
|
while True:
|
1259
1334
|
ls = yield get()
|
1260
1335
|
if ls is sentinel:
|
@@ -1268,29 +1343,38 @@ def iter_download_nodes(
|
|
1268
1343
|
return run_gen_step_iter(gen_step(pickcode), async_)
|
1269
1344
|
else:
|
1270
1345
|
def chain():
|
1346
|
+
nonlocal max_page, get_next_page
|
1347
|
+
pickcodes: list[str] = []
|
1348
|
+
add_pickcode = pickcodes.append
|
1271
1349
|
with with_iter_next(iterdir(
|
1272
1350
|
client,
|
1273
|
-
ensure_file=False,
|
1351
|
+
ensure_file=None if files else False,
|
1274
1352
|
app=app,
|
1275
1353
|
normalize_attr=normalize_attr_simple,
|
1354
|
+
id_to_dirnode=id_to_dirnode,
|
1276
1355
|
raise_for_changed_count=True,
|
1277
1356
|
async_=async_,
|
1278
1357
|
**request_kwargs,
|
1279
1358
|
)) as get_next:
|
1280
1359
|
while True:
|
1281
1360
|
attr = yield get_next()
|
1282
|
-
if
|
1283
|
-
|
1284
|
-
|
1285
|
-
)
|
1286
|
-
|
1361
|
+
if attr["is_dir"]:
|
1362
|
+
if not files:
|
1363
|
+
yield Yield(attr)
|
1364
|
+
add_pickcode(attr["pickcode"])
|
1365
|
+
elif files:
|
1366
|
+
yield Yield(attr)
|
1367
|
+
for pickcode in pickcodes:
|
1368
|
+
yield YieldFrom(run_gen_step_iter(gen_step(pickcode), async_))
|
1369
|
+
max_page = 0
|
1370
|
+
get_next_page = count(1).__next__
|
1287
1371
|
return run_gen_step_iter(chain, async_)
|
1288
1372
|
|
1289
1373
|
|
1290
1374
|
@overload
|
1291
1375
|
def iter_download_files(
|
1292
1376
|
client: str | P115Client,
|
1293
|
-
cid: int = 0,
|
1377
|
+
cid: int | str = 0,
|
1294
1378
|
id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
|
1295
1379
|
escape: None | bool | Callable[[str], str] = True,
|
1296
1380
|
with_ancestors: bool = True,
|
@@ -1304,7 +1388,7 @@ def iter_download_files(
|
|
1304
1388
|
@overload
|
1305
1389
|
def iter_download_files(
|
1306
1390
|
client: str | P115Client,
|
1307
|
-
cid: int = 0,
|
1391
|
+
cid: int | str = 0,
|
1308
1392
|
id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
|
1309
1393
|
escape: None | bool | Callable[[str], str] = True,
|
1310
1394
|
with_ancestors: bool = True,
|
@@ -1317,7 +1401,7 @@ def iter_download_files(
|
|
1317
1401
|
...
|
1318
1402
|
def iter_download_files(
|
1319
1403
|
client: str | P115Client,
|
1320
|
-
cid: int = 0,
|
1404
|
+
cid: int | str = 0,
|
1321
1405
|
id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
|
1322
1406
|
escape: None | bool | Callable[[str], str] = True,
|
1323
1407
|
with_ancestors: bool = True,
|
@@ -1330,12 +1414,10 @@ def iter_download_files(
|
|
1330
1414
|
"""获取一个目录内所有的文件信息(简略),且包括 "dir_ancestors"、"dirname"
|
1331
1415
|
|
1332
1416
|
.. note::
|
1333
|
-
并不提供文件的
|
1334
|
-
|
1335
|
-
如果要通过 pickcode 获取基本信息,请用 `P115Client.fs_supervision`
|
1417
|
+
并不提供文件的 name,如果需要获得 name,你可以在之后获取下载链接,然后从下载链接中获取实际的名字
|
1336
1418
|
|
1337
1419
|
:param client: 115 客户端或 cookies
|
1338
|
-
:param cid: 目录 id
|
1420
|
+
:param cid: 目录 id 或 pickcode
|
1339
1421
|
:param escape: 对文件名进行转义
|
1340
1422
|
|
1341
1423
|
- 如果为 None,则不处理;否则,这个函数用来对文件名中某些符号进行转义,例如 "/" 等
|
@@ -1345,8 +1427,8 @@ def iter_download_files(
|
|
1345
1427
|
|
1346
1428
|
:param with_ancestors: 文件信息中是否要包含 "ancestors"
|
1347
1429
|
:param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
|
1348
|
-
:param max_workers: 最大并发数,如果为 None 或 <= 0
|
1349
|
-
:param app:
|
1430
|
+
:param max_workers: 最大并发数,如果为 None 或 <= 0,则自动确定
|
1431
|
+
:param app: 使用指定 app(设备)的接口
|
1350
1432
|
:param async_: 是否异步
|
1351
1433
|
:param request_kwargs: 其它请求参数
|
1352
1434
|
|
@@ -1356,7 +1438,7 @@ def iter_download_files(
|
|
1356
1438
|
client = P115Client(client, check_for_relogin=True)
|
1357
1439
|
if id_to_dirnode is None:
|
1358
1440
|
id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
|
1359
|
-
|
1441
|
+
elif id_to_dirnode is ...:
|
1360
1442
|
id_to_dirnode = {}
|
1361
1443
|
if isinstance(escape, bool):
|
1362
1444
|
if escape:
|
@@ -1396,21 +1478,86 @@ def iter_download_files(
|
|
1396
1478
|
else:
|
1397
1479
|
dirname = id_to_path[pid] = get_path(id_to_dirnode[pid]) + "/"
|
1398
1480
|
return dirname + name
|
1399
|
-
def norm_attr(
|
1400
|
-
pid =
|
1401
|
-
attr = {"parent_id": pid, "pickcode": info["pc"], "size": info["fs"]}
|
1481
|
+
def norm_attr(attr: dict, /) -> dict:
|
1482
|
+
pid = attr["parent_id"]
|
1402
1483
|
pnode = id_to_dirnode[pid]
|
1403
1484
|
if with_ancestors:
|
1404
1485
|
attr["dir_ancestors"] = get_ancestors(pid, pnode)
|
1405
1486
|
attr["dirname"] = get_path(pnode)
|
1406
1487
|
return attr
|
1407
|
-
|
1408
|
-
|
1488
|
+
ancestors_loaded: None | bool = False
|
1489
|
+
@as_gen_step
|
1490
|
+
def load_ancestors(pickcode: str, /):
|
1491
|
+
nonlocal ancestors_loaded
|
1492
|
+
try:
|
1493
|
+
yield through(iter_download_nodes(
|
1494
|
+
client,
|
1495
|
+
pickcode,
|
1496
|
+
files=False,
|
1497
|
+
id_to_dirnode=id_to_dirnode,
|
1498
|
+
max_workers=max_workers,
|
1499
|
+
app=app,
|
1500
|
+
async_=async_,
|
1501
|
+
**request_kwargs,
|
1502
|
+
))
|
1503
|
+
finally:
|
1504
|
+
ancestors_loaded = True
|
1505
|
+
def gen_step(pickcode: str = to_pickcode(cid), /):
|
1506
|
+
nonlocal ancestors_loaded
|
1507
|
+
if pickcode:
|
1508
|
+
if cid:
|
1509
|
+
from .iterdir import _iter_fs_files
|
1510
|
+
do_next: Callable = anext if async_ else next
|
1511
|
+
yield do_next(_iter_fs_files(
|
1512
|
+
client,
|
1513
|
+
to_id(cid),
|
1514
|
+
page_size=1,
|
1515
|
+
id_to_dirnode=id_to_dirnode,
|
1516
|
+
async_=async_,
|
1517
|
+
**request_kwargs,
|
1518
|
+
))
|
1519
|
+
if async_:
|
1520
|
+
task: Any = create_task(load_ancestors(pickcode))
|
1521
|
+
else:
|
1522
|
+
task = run_as_thread(load_ancestors, pickcode)
|
1523
|
+
cache: list[dict] = []
|
1524
|
+
add_to_cache = cache.append
|
1525
|
+
with with_iter_next(iter_download_nodes(
|
1526
|
+
client,
|
1527
|
+
pickcode,
|
1528
|
+
files=True,
|
1529
|
+
max_workers=max_workers,
|
1530
|
+
app=app,
|
1531
|
+
async_=async_,
|
1532
|
+
**request_kwargs,
|
1533
|
+
)) as get_next:
|
1534
|
+
while True:
|
1535
|
+
attr = yield get_next()
|
1536
|
+
if ancestors_loaded is None:
|
1537
|
+
yield Yield(norm_attr(attr))
|
1538
|
+
elif ancestors_loaded:
|
1539
|
+
yield YieldFrom(map(norm_attr, cache))
|
1540
|
+
cache.clear()
|
1541
|
+
if async_:
|
1542
|
+
yield task
|
1543
|
+
else:
|
1544
|
+
task.result()
|
1545
|
+
ancestors_loaded = None
|
1546
|
+
else:
|
1547
|
+
add_to_cache(attr)
|
1548
|
+
if cache:
|
1549
|
+
if async_:
|
1550
|
+
yield task
|
1551
|
+
else:
|
1552
|
+
task.result()
|
1553
|
+
yield YieldFrom(map(norm_attr, cache))
|
1554
|
+
else:
|
1409
1555
|
defaults = {
|
1410
1556
|
"dir_ancestors": [{"id": 0, "parent_id": 0, "name": ""}],
|
1411
1557
|
"dirname": "/",
|
1412
1558
|
}
|
1413
1559
|
pickcodes: list[str] = []
|
1560
|
+
add_pickcode = pickcodes.append
|
1414
1561
|
with with_iter_next(iterdir(
|
1415
1562
|
client,
|
1416
1563
|
id_to_dirnode=id_to_dirnode,
|
@@ -1422,7 +1569,7 @@ def iter_download_files(
|
|
1422
1569
|
while True:
|
1423
1570
|
attr = yield get_next()
|
1424
1571
|
if attr["is_dir"]:
|
1425
|
-
|
1572
|
+
add_pickcode(attr["pickcode"])
|
1426
1573
|
else:
|
1427
1574
|
yield Yield({
|
1428
1575
|
"parent_id": attr["parent_id"],
|
@@ -1432,82 +1579,13 @@ def iter_download_files(
|
|
1432
1579
|
})
|
1433
1580
|
for pickcode in pickcodes:
|
1434
1581
|
yield YieldFrom(run_gen_step_iter(gen_step(pickcode), async_))
|
1435
|
-
|
1436
|
-
if not pickcode:
|
1437
|
-
resp = yield client.fs_file_skim(cid, async_=async_, **request_kwargs)
|
1438
|
-
check_response(resp)
|
1439
|
-
info = resp["data"][0]
|
1440
|
-
if info["sha1"]:
|
1441
|
-
raise NotADirectoryError(ENOTDIR, info)
|
1442
|
-
pickcode = info["pick_code"]
|
1443
|
-
ancestors_loaded: None | bool = False
|
1444
|
-
def load_ancestors():
|
1445
|
-
nonlocal ancestors_loaded
|
1446
|
-
if cid:
|
1447
|
-
resp = yield client.fs_files(
|
1448
|
-
{"cid": cid, "limit": 1},
|
1449
|
-
async_=async_,
|
1450
|
-
**request_kwargs,
|
1451
|
-
)
|
1452
|
-
check_response(resp)
|
1453
|
-
for info in resp["path"][1:]:
|
1454
|
-
id_to_dirnode[int(info["cid"])] = DirNode(info["name"], int(info["pid"]))
|
1455
|
-
try:
|
1456
|
-
with with_iter_next(iter_download_nodes(
|
1457
|
-
client,
|
1458
|
-
pickcode,
|
1459
|
-
files=False,
|
1460
|
-
max_workers=max_workers,
|
1461
|
-
app=app,
|
1462
|
-
async_=async_,
|
1463
|
-
**request_kwargs,
|
1464
|
-
)) as get_next:
|
1465
|
-
while True:
|
1466
|
-
info = yield get_next()
|
1467
|
-
id_to_dirnode[int(info["fid"])] = DirNode(info["fn"], int(info["pid"]))
|
1468
|
-
finally:
|
1469
|
-
ancestors_loaded = True
|
1470
|
-
if async_:
|
1471
|
-
task: Any = create_task(run_gen_step(load_ancestors, True))
|
1472
|
-
else:
|
1473
|
-
task = run_as_thread(run_gen_step, load_ancestors)
|
1474
|
-
cache: list[dict] = []
|
1475
|
-
add_to_cache = cache.append
|
1476
|
-
with with_iter_next(iter_download_nodes(
|
1477
|
-
client,
|
1478
|
-
pickcode,
|
1479
|
-
files=True,
|
1480
|
-
max_workers=max_workers,
|
1481
|
-
app=app,
|
1482
|
-
async_=async_, # type: ignore
|
1483
|
-
**request_kwargs,
|
1484
|
-
)) as get_next:
|
1485
|
-
while True:
|
1486
|
-
info = yield get_next()
|
1487
|
-
if ancestors_loaded is None:
|
1488
|
-
yield Yield(norm_attr(info))
|
1489
|
-
elif ancestors_loaded:
|
1490
|
-
yield YieldFrom(map(norm_attr, cache))
|
1491
|
-
cache.clear()
|
1492
|
-
if async_:
|
1493
|
-
yield task
|
1494
|
-
else:
|
1495
|
-
task.result()
|
1496
|
-
ancestors_loaded = None
|
1497
|
-
else:
|
1498
|
-
add_to_cache(info)
|
1499
|
-
if cache:
|
1500
|
-
if async_:
|
1501
|
-
yield task
|
1502
|
-
else:
|
1503
|
-
task.result()
|
1504
|
-
yield YieldFrom(map(norm_attr, cache))
|
1582
|
+
ancestors_loaded = False
|
1505
1583
|
return run_gen_step_iter(gen_step, async_)
|
1506
1584
|
|
1507
1585
|
|
1508
1586
|
@overload
|
1509
1587
|
def get_remaining_open_count(
|
1510
|
-
client: str | P115Client,
|
1588
|
+
client: str | P115Client | P115OpenClient,
|
1511
1589
|
app: str = "android",
|
1512
1590
|
*,
|
1513
1591
|
async_: Literal[False] = False,
|
@@ -1516,7 +1594,7 @@ def get_remaining_open_count(
|
|
1516
1594
|
...
|
1517
1595
|
@overload
|
1518
1596
|
def get_remaining_open_count(
|
1519
|
-
client: str | P115Client,
|
1597
|
+
client: str | P115Client | P115OpenClient,
|
1520
1598
|
app: str = "android",
|
1521
1599
|
*,
|
1522
1600
|
async_: Literal[True],
|
@@ -1524,7 +1602,7 @@ def get_remaining_open_count(
|
|
1524
1602
|
) -> Coroutine[Any, Any, int]:
|
1525
1603
|
...
|
1526
1604
|
def get_remaining_open_count(
|
1527
|
-
client: str | P115Client,
|
1605
|
+
client: str | P115Client | P115OpenClient,
|
1528
1606
|
app: str = "android",
|
1529
1607
|
*,
|
1530
1608
|
async_: Literal[False, True] = False,
|
@@ -1536,7 +1614,7 @@ def get_remaining_open_count(
|
|
1536
1614
|
假设总数是 n,通常总数是 10,偶尔会调整,如果已经有 m 个被打开的链接,则返回的数字是 n-m
|
1537
1615
|
|
1538
1616
|
:param client: 115 客户端或 cookies
|
1539
|
-
:param app:
|
1617
|
+
:param app: 使用指定 app(设备)的接口
|
1540
1618
|
:param async_: 是否异步
|
1541
1619
|
:param request_kwargs: 其它请求参数
|
1542
1620
|
|
@@ -1554,18 +1632,28 @@ def get_remaining_open_count(
|
|
1554
1632
|
cache: list = []
|
1555
1633
|
add_to_cache = cache.append
|
1556
1634
|
try:
|
1557
|
-
|
1558
|
-
|
1559
|
-
|
1560
|
-
|
1561
|
-
|
1562
|
-
|
1635
|
+
if isinstance(client, P115OpenClient):
|
1636
|
+
it: Iterator[dict] | AsyncIterator[dict] = iter_files(
|
1637
|
+
client,
|
1638
|
+
type=4,
|
1639
|
+
app=app,
|
1640
|
+
async_=async_, # type: ignore
|
1641
|
+
**request_kwargs,
|
1642
|
+
)
|
1643
|
+
else:
|
1644
|
+
it = iter_download_nodes(
|
1645
|
+
client,
|
1646
|
+
app=app,
|
1647
|
+
async_=async_,
|
1648
|
+
**request_kwargs,
|
1649
|
+
)
|
1650
|
+
with with_iter_next(it) as get_next:
|
1563
1651
|
while True:
|
1564
|
-
|
1565
|
-
if
|
1652
|
+
attr = yield get_next()
|
1653
|
+
if attr["size"] <= 1024 * 1024 * 200:
|
1566
1654
|
continue
|
1567
1655
|
try:
|
1568
|
-
url = yield get_url(
|
1656
|
+
url = yield get_url(attr["pickcode"], async_=async_)
|
1569
1657
|
except FileNotFoundError:
|
1570
1658
|
continue
|
1571
1659
|
request = Request(url, headers={"user-agent": ""})
|