p115client 0.0.5.12.3__py3-none-any.whl → 0.0.5.13.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,12 +10,13 @@ __all__ = [
10
10
  __doc__ = "这个模块提供了一些和下载有关的函数"
11
11
 
12
12
  from asyncio import create_task, to_thread, Queue as AsyncQueue, TaskGroup
13
- from collections.abc import AsyncIterator, Callable, Coroutine, Iterable, Iterator, MutableMapping
13
+ from collections.abc import (
14
+ AsyncIterator, Callable, Coroutine, Iterable, Iterator, MutableMapping,
15
+ )
14
16
  from concurrent.futures import ThreadPoolExecutor
15
- from errno import ENOTDIR
16
17
  from functools import partial
17
18
  from glob import iglob
18
- from itertools import chain, count, cycle, islice
19
+ from itertools import batched, chain, count, cycle
19
20
  from os import fsdecode, makedirs, remove, PathLike
20
21
  from os.path import abspath, dirname, join as joinpath, normpath, splitext
21
22
  from queue import SimpleQueue
@@ -28,15 +29,22 @@ from urllib.request import urlopen, Request
28
29
  from uuid import uuid4
29
30
  from warnings import warn
30
31
 
31
- from asynctools import async_chain_from_iterable
32
+ from asynctools import async_chain
32
33
  from concurrenttools import run_as_thread, thread_batch, async_batch
33
34
  from encode_uri import encode_uri_component_loose
34
- from iterutils import chunked, run_gen_step, run_gen_step_iter, with_iter_next, Yield, YieldFrom
35
- from p115client import check_response, normalize_attr, normalize_attr_simple, P115Client, P115URL
35
+ from iterutils import (
36
+ as_gen_step, chunked, run_gen_step, run_gen_step_iter, through,
37
+ with_iter_next, Yield, YieldFrom,
38
+ )
39
+ from p115client import (
40
+ check_response, normalize_attr, normalize_attr_simple, P115Client,
41
+ P115OpenClient, P115URL,
42
+ )
36
43
  from p115client.exception import P115Warning
44
+ from p115pickcode import to_id, to_pickcode
37
45
 
38
46
  from .iterdir import (
39
- get_path_to_cid, iterdir, iter_files, iter_files_raw, iter_files_with_path,
47
+ get_path_to_cid, iterdir, iter_files, iter_files_with_path,
40
48
  unescape_115_charref, posix_escape_name, DirNode, ID_TO_DIRNODE_CACHE,
41
49
  )
42
50
  from .util import reduce_image_url_layers
@@ -44,9 +52,10 @@ from .util import reduce_image_url_layers
44
52
 
45
53
  @overload
46
54
  def batch_get_url(
47
- client: str | P115Client,
48
- id_or_pickcode: int | str | Iterable[int | str],
55
+ client: str | P115Client | P115OpenClient,
56
+ pickcode: int | str | Iterable[int | str],
49
57
  user_agent: str = "",
58
+ app: str = "android",
50
59
  *,
51
60
  async_: Literal[False] = False,
52
61
  **request_kwargs,
@@ -54,32 +63,30 @@ def batch_get_url(
54
63
  ...
55
64
  @overload
56
65
  def batch_get_url(
57
- client: str | P115Client,
58
- id_or_pickcode: int | str | Iterable[int | str],
66
+ client: str | P115Client | P115OpenClient,
67
+ pickcode: int | str | Iterable[int | str],
59
68
  user_agent: str = "",
69
+ app: str = "android",
60
70
  *,
61
71
  async_: Literal[True],
62
72
  **request_kwargs,
63
73
  ) -> Coroutine[Any, Any, dict[int, P115URL]]:
64
74
  ...
65
75
  def batch_get_url(
66
- client: str | P115Client,
67
- id_or_pickcode: int | str | Iterable[int | str],
76
+ client: str | P115Client | P115OpenClient,
77
+ pickcode: int | str | Iterable[int | str],
68
78
  user_agent: str = "",
79
+ app: str = "android",
69
80
  *,
70
81
  async_: Literal[False, True] = False,
71
82
  **request_kwargs,
72
83
  ) -> dict[int, P115URL] | Coroutine[Any, Any, dict[int, P115URL]]:
73
84
  """批量获取下载链接
74
85
 
75
- .. attention::
76
- 请确保所有的 pickcode 都是有效的,要么是现在存在的,要么是以前存在过被删除的。
77
-
78
- 如果有目录的 pickcode 混在其中,则会自动排除。
79
-
80
86
  :param client: 115 客户端或 cookies
81
- :param id_or_pickcode: 如果是 int,视为 id,如果是 str,视为 pickcode
87
+ :param pickcode: pickcode id
82
88
  :param user_agent: "user-agent" 请求头的值
89
+ :param app: 使用指定 app(设备)的接口
83
90
  :param async_: 是否异步
84
91
  :param request_kwargs: 其它请求参数
85
92
 
@@ -91,41 +98,16 @@ def batch_get_url(
91
98
  request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
92
99
  else:
93
100
  request_kwargs["headers"] = {"user-agent": user_agent}
101
+ if isinstance(pickcode, (int, str)):
102
+ pickcode = to_pickcode(pickcode)
103
+ elif not isinstance(pickcode, str):
104
+ pickcode = ",".join(map(to_pickcode, pickcode))
105
+ if not isinstance(client, P115Client) or app == "open":
106
+ get_download_url: Callable = client.download_url_info_open
107
+ else:
108
+ get_download_url = partial(client.download_url_app, app=app)
94
109
  def gen_step():
95
- if isinstance(id_or_pickcode, int):
96
- resp = yield client.fs_file_skim(
97
- id_or_pickcode,
98
- async_=async_,
99
- **request_kwargs,
100
- )
101
- if not resp or not resp["state"]:
102
- return {}
103
- pickcode = resp["data"][0]["pick_code"]
104
- elif isinstance(id_or_pickcode, str):
105
- pickcode = id_or_pickcode
106
- if not (len(pickcode) == 17 and pickcode.isalnum()):
107
- return {}
108
- else:
109
- ids: list[int] = []
110
- pickcodes: list[str] = []
111
- for val in id_or_pickcode:
112
- if isinstance(val, int):
113
- ids.append(val)
114
- elif len(val) == 17 and val.isalnum():
115
- pickcodes.append(val)
116
- if ids:
117
- resp = yield client.fs_file_skim(
118
- ids,
119
- method="POST",
120
- async_=async_,
121
- **request_kwargs,
122
- )
123
- if resp and resp["state"]:
124
- pickcodes.extend(info["pick_code"] for info in resp["data"])
125
- if not pickcodes:
126
- return {}
127
- pickcode = ",".join(pickcodes)
128
- resp = yield client.download_url_app(pickcode, async_=async_, **request_kwargs)
110
+ resp = yield get_download_url(pickcode, async_=async_, **request_kwargs)
129
111
  if not resp["state"]:
130
112
  if resp.get("errno") != 50003:
131
113
  check_response(resp)
@@ -139,7 +121,7 @@ def batch_get_url(
139
121
  name=info["file_name"],
140
122
  size=int(info["file_size"]),
141
123
  sha1=info["sha1"],
142
- is_directory=False,
124
+ is_dir=False,
143
125
  headers=headers,
144
126
  )
145
127
  for id, info in resp["data"].items()
@@ -150,10 +132,11 @@ def batch_get_url(
150
132
 
151
133
  @overload
152
134
  def iter_url_batches(
153
- client: str | P115Client,
154
- pickcodes: Iterator[str],
135
+ client: str | P115Client | P115OpenClient,
136
+ pickcodes: Iterator[int | str],
155
137
  user_agent: str = "",
156
138
  batch_size: int = 10,
139
+ app: str = "android",
157
140
  *,
158
141
  async_: Literal[False] = False,
159
142
  **request_kwargs,
@@ -161,20 +144,22 @@ def iter_url_batches(
161
144
  ...
162
145
  @overload
163
146
  def iter_url_batches(
164
- client: str | P115Client,
165
- pickcodes: Iterator[str],
147
+ client: str | P115Client | P115OpenClient,
148
+ pickcodes: Iterator[int | str],
166
149
  user_agent: str = "",
167
150
  batch_size: int = 10,
151
+ app: str = "android",
168
152
  *,
169
153
  async_: Literal[True],
170
154
  **request_kwargs,
171
155
  ) -> AsyncIterator[P115URL]:
172
156
  ...
173
157
  def iter_url_batches(
174
- client: str | P115Client,
175
- pickcodes: Iterator[str],
158
+ client: str | P115Client | P115OpenClient,
159
+ pickcodes: Iterator[int | str],
176
160
  user_agent: str = "",
177
161
  batch_size: int = 10,
162
+ app: str = "android",
178
163
  *,
179
164
  async_: Literal[False, True] = False,
180
165
  **request_kwargs,
@@ -187,9 +172,10 @@ def iter_url_batches(
187
172
  如果有目录的 pickcode 混在其中,则会自动排除。
188
173
 
189
174
  :param client: 115 客户端或 cookies
190
- :param pickcodes: 一个迭代器,产生提取码 pickcode
175
+ :param pickcodes: 一个迭代器,产生 pickcode 或 id
191
176
  :param user_agent: "user-agent" 请求头的值
192
177
  :param batch_size: 每一个批次处理的个量
178
+ :param app: 使用指定 app(设备)的接口
193
179
  :param async_: 是否异步
194
180
  :param request_kwargs: 其它请求参数
195
181
 
@@ -201,13 +187,16 @@ def iter_url_batches(
201
187
  request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
202
188
  else:
203
189
  request_kwargs["headers"] = {"user-agent": user_agent}
190
+ if not isinstance(client, P115Client) or app == "open":
191
+ get_download_url: Callable = client.download_url_info_open
192
+ else:
193
+ get_download_url = partial(client.download_url_app, app=app)
204
194
  if batch_size <= 0:
205
195
  batch_size = 1
206
196
  def gen_step():
207
- it = iter(pickcodes)
208
- while pcs := ",".join(islice(it, batch_size)):
209
- resp = yield client.download_url_app(
210
- pcs,
197
+ for pcs in batched(map(to_pickcode, pickcodes), batch_size):
198
+ resp = yield get_download_url(
199
+ ",".join(pcs),
211
200
  async_=async_,
212
201
  **request_kwargs,
213
202
  )
@@ -225,7 +214,7 @@ def iter_url_batches(
225
214
  name=info["file_name"],
226
215
  size=int(info["file_size"]),
227
216
  sha1=info["sha1"],
228
- is_directory=False,
217
+ is_dir=False,
229
218
  headers=headers,
230
219
  ))
231
220
  return run_gen_step_iter(gen_step, async_)
@@ -233,8 +222,8 @@ def iter_url_batches(
233
222
 
234
223
  @overload
235
224
  def iter_files_with_url(
236
- client: str | P115Client,
237
- cid: int = 0,
225
+ client: str | P115Client | P115OpenClient,
226
+ cid: int | str = 0,
238
227
  suffixes: None | str | Iterable[str] = None,
239
228
  type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
240
229
  cur: Literal[0, 1] = 0,
@@ -254,8 +243,8 @@ def iter_files_with_url(
254
243
  ...
255
244
  @overload
256
245
  def iter_files_with_url(
257
- client: str | P115Client,
258
- cid: int = 0,
246
+ client: str | P115Client | P115OpenClient,
247
+ cid: int | str = 0,
259
248
  suffixes: None | str | Iterable[str] = None,
260
249
  type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
261
250
  cur: Literal[0, 1] = 0,
@@ -274,8 +263,8 @@ def iter_files_with_url(
274
263
  ) -> AsyncIterator[dict]:
275
264
  ...
276
265
  def iter_files_with_url(
277
- client: str | P115Client,
278
- cid: int = 0,
266
+ client: str | P115Client | P115OpenClient,
267
+ cid: int | str = 0,
279
268
  suffixes: None | str | Iterable[str] = None,
280
269
  type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
281
270
  cur: Literal[0, 1] = 0,
@@ -295,7 +284,7 @@ def iter_files_with_url(
295
284
  """获取文件信息和下载链接
296
285
 
297
286
  :param client: 115 客户端或 cookies
298
- :param cid: 目录 id
287
+ :param cid: 目录 id 或 pickcode
299
288
  :param suffixes: 扩展名,可以有多个,最前面的 "." 可以省略
300
289
  :param type: 文件类型
301
290
 
@@ -321,7 +310,7 @@ def iter_files_with_url(
321
310
 
322
311
  :param normalize_attr: 把数据进行转换处理,使之便于阅读
323
312
  :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
324
- :param app: 使用某个 app (设备)的接口
313
+ :param app: 使用指定 app(设备)的接口
325
314
  :param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
326
315
  :param user_agent: "user-agent" 请求头的值
327
316
  :param async_: 是否异步
@@ -349,6 +338,7 @@ def iter_files_with_url(
349
338
  get_url = client.download_url
350
339
  else:
351
340
  get_url = partial(client.download_url, app=app)
341
+ cid = to_id(cid)
352
342
  def gen_step():
353
343
  if suffixes is None:
354
344
  it = iter_files(
@@ -386,7 +376,7 @@ def iter_files_with_url(
386
376
  with with_iter_next(it) as get_next:
387
377
  while True:
388
378
  attr = yield get_next()
389
- if attr.get("violated", False):
379
+ if attr.get("is_collect", False):
390
380
  if attr["size"] < 1024 * 1024 * 115:
391
381
  attr["url"] = yield get_url(
392
382
  attr["pickcode"],
@@ -408,8 +398,8 @@ def iter_files_with_url(
408
398
 
409
399
  @overload
410
400
  def iter_images_with_url(
411
- client: str | P115Client,
412
- cid: int = 0,
401
+ client: str | P115Client | P115OpenClient,
402
+ cid: int | str = 0,
413
403
  suffixes: None | str | Iterable[str] = None,
414
404
  cur: Literal[0, 1] = 0,
415
405
  with_ancestors: bool = False,
@@ -427,8 +417,8 @@ def iter_images_with_url(
427
417
  ...
428
418
  @overload
429
419
  def iter_images_with_url(
430
- client: str | P115Client,
431
- cid: int = 0,
420
+ client: str | P115Client | P115OpenClient,
421
+ cid: int | str = 0,
432
422
  suffixes: None | str | Iterable[str] = None,
433
423
  cur: Literal[0, 1] = 0,
434
424
  with_ancestors: bool = False,
@@ -445,8 +435,8 @@ def iter_images_with_url(
445
435
  ) -> AsyncIterator[dict]:
446
436
  ...
447
437
  def iter_images_with_url(
448
- client: str | P115Client,
449
- cid: int = 0,
438
+ client: str | P115Client | P115OpenClient,
439
+ cid: int | str = 0,
450
440
  suffixes: None | str | Iterable[str] = None,
451
441
  cur: Literal[0, 1] = 0,
452
442
  with_ancestors: bool = False,
@@ -467,7 +457,7 @@ def iter_images_with_url(
467
457
  请不要把不能被 115 识别为图片的文件扩展名放在 `suffixes` 参数中传入,这只是浪费时间,最后也只能获得普通的下载链接
468
458
 
469
459
  :param client: 115 客户端或 cookies
470
- :param cid: 目录 id
460
+ :param cid: 目录 id 或 pickcode
471
461
  :param suffixes: 扩展名,可以有多个,最前面的 "." 可以省略(请确保扩展名确实能被 115 认为是图片,否则会因为不能批量获取到链接而浪费一些时间再去单独生成下载链接);如果不传(默认),则会获取所有图片
472
462
  :param cur: 仅当前目录。0: 否(将遍历子目录树上所有叶子节点),1: 是
473
463
  :param with_ancestors: 文件信息中是否要包含 "ancestors"
@@ -482,7 +472,7 @@ def iter_images_with_url(
482
472
 
483
473
  :param normalize_attr: 把数据进行转换处理,使之便于阅读
484
474
  :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
485
- :param app: 使用某个 app (设备)的接口
475
+ :param app: 使用指定 app(设备)的接口
486
476
  :param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
487
477
  :param async_: 是否异步
488
478
  :param request_kwargs: 其它请求参数
@@ -509,6 +499,7 @@ def iter_images_with_url(
509
499
  get_url = client.download_url
510
500
  else:
511
501
  get_url = partial(client.download_url, app=app)
502
+ cid = to_id(cid)
512
503
  def gen_step():
513
504
  if suffixes is None:
514
505
  it = iter_files(
@@ -544,7 +535,7 @@ def iter_images_with_url(
544
535
  try:
545
536
  attr["url"] = reduce_image_url_layers(attr["thumb"])
546
537
  except KeyError:
547
- if attr.get("violated", False):
538
+ if attr.get("is_collect", False):
548
539
  if attr["size"] < 1024 * 1024 * 115:
549
540
  attr["url"] = yield get_url(
550
541
  attr["pickcode"],
@@ -566,8 +557,8 @@ def iter_images_with_url(
566
557
 
567
558
  @overload
568
559
  def iter_subtitles_with_url(
569
- client: str | P115Client,
570
- cid: int = 0,
560
+ client: str | P115Client | P115OpenClient,
561
+ cid: int | str = 0,
571
562
  suffixes: str | Iterable[str] = (".srt", ".ass", ".ssa"),
572
563
  cur: Literal[0, 1] = 0,
573
564
  with_ancestors: bool = False,
@@ -585,8 +576,8 @@ def iter_subtitles_with_url(
585
576
  ...
586
577
  @overload
587
578
  def iter_subtitles_with_url(
588
- client: str | P115Client,
589
- cid: int = 0,
579
+ client: str | P115Client | P115OpenClient,
580
+ cid: int | str = 0,
590
581
  suffixes: str | Iterable[str] = (".srt", ".ass", ".ssa"),
591
582
  cur: Literal[0, 1] = 0,
592
583
  with_ancestors: bool = False,
@@ -603,8 +594,8 @@ def iter_subtitles_with_url(
603
594
  ) -> AsyncIterator[dict]:
604
595
  ...
605
596
  def iter_subtitles_with_url(
606
- client: str | P115Client,
607
- cid: int = 0,
597
+ client: str | P115Client | P115OpenClient,
598
+ cid: int | str = 0,
608
599
  suffixes: str | Iterable[str] = (".srt", ".ass", ".ssa"),
609
600
  cur: Literal[0, 1] = 0,
610
601
  with_ancestors: bool = False,
@@ -630,7 +621,7 @@ def iter_subtitles_with_url(
630
621
  请不要把不能被 115 识别为字幕的文件扩展名放在 `suffixes` 参数中传入,这只是浪费时间,最后也只能获得普通的下载链接
631
622
 
632
623
  :param client: 115 客户端或 cookies
633
- :param cid: 目录 id
624
+ :param cid: 目录 id 或 pickcode
634
625
  :param suffixes: 扩展名,可以有多个,最前面的 "." 可以省略(请确保扩展名确实能被 115 认为是字幕,否则会因为不能批量获取到链接而浪费一些时间再去单独生成下载链接)
635
626
  :param cur: 仅当前目录。0: 否(将遍历子目录树上所有叶子节点),1: 是
636
627
  :param with_ancestors: 文件信息中是否要包含 "ancestors"
@@ -645,7 +636,7 @@ def iter_subtitles_with_url(
645
636
 
646
637
  :param normalize_attr: 把数据进行转换处理,使之便于阅读
647
638
  :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
648
- :param app: 使用某个 app (设备)的接口
639
+ :param app: 使用指定 app(设备)的接口
649
640
  :param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
650
641
  :param async_: 是否异步
651
642
  :param request_kwargs: 其它请求参数
@@ -656,16 +647,30 @@ def iter_subtitles_with_url(
656
647
  client = P115Client(client, check_for_relogin=True)
657
648
  if not isinstance(client, P115Client) or app == "open":
658
649
  get_url: Callable[..., P115URL] = client.download_url_open
650
+ fs_mkdir: Callable = client.fs_mkdir_open
651
+ fs_copy: Callable = client.fs_copy_open
652
+ fs_delete: Callable = client.fs_delete_open
653
+ fs_video_subtitle: Callable = client.fs_video_subtitle_open
659
654
  elif app in ("", "web", "desktop", "harmony"):
660
655
  get_url = client.download_url
656
+ fs_mkdir = client.fs_mkdir
657
+ fs_copy = client.fs_copy
658
+ fs_delete = client.fs_delete
659
+ fs_video_subtitle = client.fs_video_subtitle
661
660
  else:
662
661
  get_url = partial(client.download_url, app=app)
662
+ fs_mkdir = partial(client.fs_mkdir_app, app=app)
663
+ fs_copy = partial(client.fs_copy_app, app=app)
664
+ fs_delete = partial(client.fs_delete_app, app=app)
665
+ fs_video_subtitle = partial(client.fs_video_subtitle_app, app=app)
666
+ cid = to_id(cid)
663
667
  def gen_step():
664
668
  nonlocal suffixes
665
669
  if isinstance(suffixes, str):
666
670
  suffixes = suffixes,
667
- do_chain: Callable = async_chain_from_iterable if async_ else chain.from_iterable
668
- it = chunked(do_chain(
671
+ do_chain: Callable = async_chain.from_iterable if async_ else chain.from_iterable
672
+ do_next = anext if async_ else next
673
+ with with_iter_next(chunked(do_chain(
669
674
  iter_files(
670
675
  client,
671
676
  cid,
@@ -683,35 +688,42 @@ def iter_subtitles_with_url(
683
688
  **request_kwargs,
684
689
  )
685
690
  for suffix in suffixes
686
- ), 1000)
687
- do_next = anext if async_ else next
688
- with with_iter_next(it) as get_next:
691
+ ), 1000)) as get_next:
689
692
  while True:
690
693
  items: tuple[dict] = yield get_next()
691
- resp = yield client.fs_mkdir(
694
+ resp = yield fs_mkdir(
692
695
  f"subtitle-{uuid4()}",
693
696
  async_=async_,
694
697
  **request_kwargs,
695
698
  )
696
699
  check_response(resp)
697
700
  try:
698
- scid = resp["cid"]
699
- resp = yield client.fs_copy(
701
+ if "cid" in resp:
702
+ scid = resp["cid"]
703
+ else:
704
+ data = resp["data"]
705
+ if "category_id" in data:
706
+ scid = data["category_id"]
707
+ else:
708
+ scid = data["file_id"]
709
+ resp = yield fs_copy(
700
710
  (attr["id"] for attr in items),
701
711
  pid=scid,
702
712
  async_=async_,
703
713
  **request_kwargs,
704
714
  )
705
715
  check_response(resp)
706
- attr = yield do_next(iter_files_raw(
716
+ attr = yield do_next(iter_files(
707
717
  client,
708
718
  scid,
709
719
  first_page_size=1,
720
+ normalize_attr=None,
710
721
  base_url=True,
722
+ app=app,
711
723
  async_=async_, # type: ignore
712
724
  **request_kwargs,
713
725
  ))
714
- resp = yield client.fs_video_subtitle(
726
+ resp = yield fs_video_subtitle(
715
727
  attr["pc"],
716
728
  async_=async_,
717
729
  **request_kwargs,
@@ -722,14 +734,14 @@ def iter_subtitles_with_url(
722
734
  if info.get("file_id")
723
735
  }
724
736
  finally:
725
- yield client.fs_delete(scid, async_=async_, **request_kwargs)
737
+ yield fs_delete(scid, async_=async_, **request_kwargs)
726
738
  if subtitles:
727
739
  for attr in items:
728
740
  attr["url"] = subtitles[attr["sha1"]]
729
741
  yield Yield(attr)
730
742
  else:
731
743
  for attr in items:
732
- if attr.get("violated", False):
744
+ if attr.get("is_collect", False):
733
745
  if attr["size"] < 1024 * 1024 * 115:
734
746
  attr["url"] = yield get_url(
735
747
  attr["pickcode"],
@@ -751,9 +763,10 @@ def iter_subtitles_with_url(
751
763
 
752
764
  @overload
753
765
  def iter_subtitle_batches(
754
- client: str | P115Client,
755
- file_ids: Iterable[int],
766
+ client: str | P115Client | P115OpenClient,
767
+ file_ids: Iterable[int | str],
756
768
  batch_size: int = 1_000,
769
+ app: str = "web",
757
770
  *,
758
771
  async_: Literal[False] = False,
759
772
  **request_kwargs,
@@ -761,18 +774,20 @@ def iter_subtitle_batches(
761
774
  ...
762
775
  @overload
763
776
  def iter_subtitle_batches(
764
- client: str | P115Client,
765
- file_ids: Iterable[int],
777
+ client: str | P115Client | P115OpenClient,
778
+ file_ids: Iterable[int | str],
766
779
  batch_size: int = 1_000,
780
+ app: str = "web",
767
781
  *,
768
782
  async_: Literal[True],
769
783
  **request_kwargs,
770
784
  ) -> AsyncIterator[dict]:
771
785
  ...
772
786
  def iter_subtitle_batches(
773
- client: str | P115Client,
774
- file_ids: Iterable[int],
787
+ client: str | P115Client | P115OpenClient,
788
+ file_ids: Iterable[int | str],
775
789
  batch_size: int = 1_000,
790
+ app: str = "web",
776
791
  *,
777
792
  async_: Literal[False, True] = False,
778
793
  **request_kwargs,
@@ -786,7 +801,7 @@ def iter_subtitle_batches(
786
801
  目前看来 115 只支持:".srt"、".ass"、".ssa",如果不能被 115 识别为字幕,将会被自动略过
787
802
 
788
803
  :param client: 115 客户端或 cookies
789
- :param file_ids: 一组文件的 id(必须全是 115 所认为的字幕)
804
+ :param file_ids: 一组文件的 id pickcode
790
805
  :param batch_size: 每一个批次处理的个量
791
806
  :param async_: 是否异步
792
807
  :param request_kwargs: 其它请求参数
@@ -797,33 +812,50 @@ def iter_subtitle_batches(
797
812
  client = P115Client(client, check_for_relogin=True)
798
813
  if batch_size <= 0:
799
814
  batch_size = 1_000
815
+ if not isinstance(client, P115Client) or app == "open":
816
+ fs_mkdir: Callable = client.fs_mkdir_open
817
+ fs_copy: Callable = client.fs_copy_open
818
+ fs_delete: Callable = client.fs_delete_open
819
+ fs_video_subtitle: Callable = client.fs_video_subtitle_open
820
+ elif app in ("", "web", "desktop", "harmony"):
821
+ fs_mkdir = client.fs_mkdir
822
+ fs_copy = client.fs_copy
823
+ fs_delete = client.fs_delete
824
+ fs_video_subtitle = client.fs_video_subtitle
825
+ else:
826
+ fs_mkdir = partial(client.fs_mkdir_app, app=app)
827
+ fs_copy = partial(client.fs_copy_app, app=app)
828
+ fs_delete = partial(client.fs_delete_app, app=app)
829
+ fs_video_subtitle = partial(client.fs_video_subtitle_app, app=app)
800
830
  def gen_step():
801
831
  do_next: Callable = anext if async_ else next
802
- for ids in chunked(file_ids, batch_size):
832
+ for ids in batched(map(to_id, file_ids), batch_size):
803
833
  try:
804
- resp = yield client.fs_mkdir(
834
+ resp = yield fs_mkdir(
805
835
  f"subtitle-{uuid4()}",
806
836
  async_=async_,
807
837
  **request_kwargs,
808
838
  )
809
839
  check_response(resp)
810
840
  scid = resp["cid"]
811
- resp = yield client.fs_copy(
841
+ resp = yield fs_copy(
812
842
  ids,
813
843
  pid=scid,
814
844
  async_=async_,
815
845
  **request_kwargs,
816
846
  )
817
847
  check_response(resp)
818
- attr = yield do_next(iter_files_raw(
848
+ attr = yield do_next(iter_files(
819
849
  client,
820
850
  scid,
821
851
  first_page_size=1,
852
+ normalize_attr=None,
822
853
  base_url=True,
823
- async_=async_,
854
+ app=app,
855
+ async_=async_, # type: ignore
824
856
  **request_kwargs,
825
857
  ))
826
- resp = yield client.fs_video_subtitle(
858
+ resp = yield fs_video_subtitle(
827
859
  attr["pc"],
828
860
  async_=async_,
829
861
  **request_kwargs,
@@ -835,14 +867,16 @@ def iter_subtitle_batches(
835
867
  except (StopIteration, StopAsyncIteration):
836
868
  pass
837
869
  finally:
838
- yield client.fs_delete(scid, async_=async_, **request_kwargs)
870
+ yield fs_delete(scid, async_=async_, **request_kwargs)
839
871
  return run_gen_step_iter(gen_step, async_)
840
872
 
841
873
 
874
+ # TODO: 要支持 open 接口
875
+ # TODO: 后续还可用 iter_download_nodes 接口,来更快地拉取数据
842
876
  @overload
843
877
  def make_strm(
844
878
  client: str | P115Client,
845
- cid: int = 0,
879
+ cid: int | str = 0,
846
880
  save_dir: bytes | str | PathLike = ".",
847
881
  origin: str = "http://localhost:8000",
848
882
  update: bool = False,
@@ -868,7 +902,7 @@ def make_strm(
868
902
  @overload
869
903
  def make_strm(
870
904
  client: str | P115Client,
871
- cid: int = 0,
905
+ cid: int | str = 0,
872
906
  save_dir: bytes | str | PathLike = ".",
873
907
  origin: str = "http://localhost:8000",
874
908
  update: bool = False,
@@ -893,7 +927,7 @@ def make_strm(
893
927
  ...
894
928
  def make_strm(
895
929
  client: str | P115Client,
896
- cid: int = 0,
930
+ cid: int | str = 0,
897
931
  save_dir: bytes | str | PathLike = ".",
898
932
  origin: str = "http://localhost:8000",
899
933
  update: bool = False,
@@ -918,7 +952,7 @@ def make_strm(
918
952
  """生成 strm 保存到本地
919
953
 
920
954
  :param client: 115 客户端或 cookies
921
- :param cid: 目录 id
955
+ :param cid: 目录 id 或 pickcode
922
956
  :param save_dir: 本地的保存目录,默认是当前工作目录
923
957
  :param origin: strm 文件的 `HTTP 源 <https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Origin>`_
924
958
  :param update: 是否更新 strm 文件,如果为 False,则跳过已存在的路径
@@ -951,7 +985,7 @@ def make_strm(
951
985
  :param max_workers: 最大并发数,主要用于限制同时打开的文件数
952
986
  :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
953
987
  :param path_already: 如果为 True,则说明 id_to_dirnode 中已经具备构建路径所需要的目录节点,所以不会再去拉取目录节点的信息
954
- :param app: 使用某个 app (设备)的接口
988
+ :param app: 使用指定 app(设备)的接口
955
989
  :param fs_files_cooldown: `fs_files` 接口调用的冷却时间,大于 0,则使用此时间间隔执行并发
956
990
  :param fs_files_max_workers: `fs_files` 接口调用的最大并发数
957
991
  :param async_: 是否异步
@@ -1039,6 +1073,7 @@ def make_strm(
1039
1073
  append(ignored, path)
1040
1074
  return
1041
1075
  append(upserted, path)
1076
+ cid = to_id(cid)
1042
1077
  def gen_step():
1043
1078
  nonlocal abspath_prefix_length, savedir
1044
1079
  start_t = time()
@@ -1120,8 +1155,10 @@ def make_strm(
1120
1155
  @overload
1121
1156
  def iter_download_nodes(
1122
1157
  client: str | P115Client,
1123
- pickcode: int | str = "",
1158
+ pickcode: str | int = "",
1124
1159
  files: bool = True,
1160
+ ensure_name: bool = False,
1161
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
1125
1162
  max_workers: None | int = 1,
1126
1163
  app: str = "android",
1127
1164
  *,
@@ -1132,8 +1169,10 @@ def iter_download_nodes(
1132
1169
  @overload
1133
1170
  def iter_download_nodes(
1134
1171
  client: str | P115Client,
1135
- pickcode: int | str = "",
1172
+ pickcode: str | int = "",
1136
1173
  files: bool = True,
1174
+ ensure_name: bool = False,
1175
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
1137
1176
  max_workers: None | int = 1,
1138
1177
  app: str = "android",
1139
1178
  *,
@@ -1143,8 +1182,10 @@ def iter_download_nodes(
1143
1182
  ...
1144
1183
  def iter_download_nodes(
1145
1184
  client: str | P115Client,
1146
- pickcode: int | str = "",
1185
+ pickcode: str | int = "",
1147
1186
  files: bool = True,
1187
+ ensure_name: bool = False,
1188
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
1148
1189
  max_workers: None | int = 1,
1149
1190
  app: str = "android",
1150
1191
  *,
@@ -1154,10 +1195,12 @@ def iter_download_nodes(
1154
1195
  """获取一个目录内所有的文件或者目录的信息(简略)
1155
1196
 
1156
1197
  :param client: 115 客户端或 cookies
1157
- :param pickcode: 目录的 提取码 或者 id
1198
+ :param pickcode: 目录的 pickcode id
1158
1199
  :param files: 如果为 True,则只获取文件,否则只获取目录
1159
- :param max_workers: 最大并发数,如果为 None 或 <= 0,则默认为 20
1160
- :param app: 使用某个 app (设备)的接口
1200
+ :param ensure_name: 确保返回数据中有 "name" 字段
1201
+ :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
1202
+ :param max_workers: 最大并发数,如果为 None 或 <= 0,则自动确定
1203
+ :param app: 使用指定 app(设备)的接口
1161
1204
  :param async_: 是否异步
1162
1205
  :param request_kwargs: 其它请求参数
1163
1206
 
@@ -1166,23 +1209,72 @@ def iter_download_nodes(
1166
1209
  if isinstance(client, str):
1167
1210
  client = P115Client(client, check_for_relogin=True)
1168
1211
  get_base_url = cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__
1212
+ if async_:
1213
+ if max_workers is None or max_workers <= 0:
1214
+ max_workers = 20
1215
+ elif max_workers is not None and max_workers <= 0:
1216
+ max_workers = None
1169
1217
  if files:
1170
1218
  method = client.download_files
1171
1219
  else:
1172
1220
  method = client.download_folders
1173
- get_nodes = partial(method, async_=async_, **{"base_url": get_base_url, **request_kwargs})
1174
- if max_workers == 1:
1175
- def gen_step(pickcode):
1176
- if isinstance(pickcode, int):
1177
- resp = yield client.fs_file_skim(pickcode, async_=async_, **request_kwargs)
1221
+ if id_to_dirnode is None:
1222
+ id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
1223
+ file_skim = client.fs_file_skim
1224
+ def normalize_attrs(attrs: list[dict], /):
1225
+ if files:
1226
+ for i, info in enumerate(attrs):
1227
+ attrs[i] = {
1228
+ "is_dir": False,
1229
+ "id": to_id(info["pc"]),
1230
+ "pickcode": info["pc"],
1231
+ "parent_id": int(info["pid"]),
1232
+ "size": info["fs"],
1233
+ }
1234
+ else:
1235
+ for i, info in enumerate(attrs):
1236
+ attrs[i] = {
1237
+ "is_dir": True,
1238
+ "id": int(info["fid"]),
1239
+ "name": info["fn"],
1240
+ "parent_id": int(info["pid"]),
1241
+ }
1242
+ if id_to_dirnode is not ... and id_to_dirnode is not None:
1243
+ for attr in attrs:
1244
+ id_to_dirnode[attr["id"]] = DirNode(attr["name"], attr["parent_id"])
1245
+ return attrs
1246
+ if files and ensure_name:
1247
+ prepare = normalize_attrs
1248
+ @as_gen_step
1249
+ def normalize_attrs(attrs: list[dict], /):
1250
+ prepare(attrs)
1251
+ resp = yield file_skim(
1252
+ (a["id"] for a in attrs),
1253
+ method="POST",
1254
+ async_=async_,
1255
+ **request_kwargs,
1256
+ )
1257
+ if resp.get("error") != "文件不存在":
1178
1258
  check_response(resp)
1179
- pickcode = resp["data"][0]["pick_code"]
1259
+ nodes = {int(a["file_id"]): a for a in resp["data"]}
1260
+ for attr in attrs:
1261
+ if node := nodes.get(attr["id"]):
1262
+ attr["sha1"] = node["sha1"]
1263
+ attr["name"] = unescape_115_charref(node["file_name"])
1264
+ get_nodes = partial(
1265
+ method,
1266
+ async_=async_,
1267
+ **{"base_url": get_base_url, **request_kwargs},
1268
+ )
1269
+ if max_workers == 1:
1270
+ def gen_step(pickcode: int | str, /):
1271
+ pickcode = to_pickcode(pickcode)
1180
1272
  for i in count(1):
1181
1273
  payload = {"pickcode": pickcode, "page": i}
1182
1274
  resp = yield get_nodes(payload)
1183
1275
  check_response(resp)
1184
1276
  data = resp["data"]
1185
- yield YieldFrom(data["list"])
1277
+ yield YieldFrom(normalize_attrs(data["list"]))
1186
1278
  if not data["has_next_page"]:
1187
1279
  break
1188
1280
  else:
@@ -1193,7 +1285,8 @@ def iter_download_nodes(
1193
1285
  else:
1194
1286
  q = SimpleQueue()
1195
1287
  get, put = q.get, q.put_nowait
1196
- def request(pickcode):
1288
+ @as_gen_step
1289
+ def request(pickcode: str, /):
1197
1290
  nonlocal max_page
1198
1291
  while True:
1199
1292
  page = get_next_page()
@@ -1206,37 +1299,23 @@ def iter_download_nodes(
1206
1299
  put(e)
1207
1300
  return
1208
1301
  data = resp["data"]
1209
- put(data["list"])
1302
+ put((yield normalize_attrs(data["list"])))
1210
1303
  if not data["has_next_page"]:
1211
1304
  max_page = page
1212
- def gen_step(pickcode):
1213
- nonlocal max_workers, max_page, get_next_page
1214
- max_page = 0
1215
- get_next_page = count(1).__next__
1305
+ def gen_step(pickcode: int | str, /):
1216
1306
  if async_:
1217
- if max_workers is None or max_workers <= 0:
1218
- max_workers = 20
1219
- n = max_workers
1307
+ n = cast(int, max_workers)
1220
1308
  task_group = TaskGroup()
1221
1309
  yield task_group.__aenter__()
1222
1310
  create_task = task_group.create_task
1223
1311
  submit: Callable = lambda f, /, *a, **k: create_task(f(*a, **k))
1224
1312
  shutdown: Callable = lambda: task_group.__aexit__(None, None, None)
1225
1313
  else:
1226
- if max_workers is not None and max_workers <= 0:
1227
- max_workers = None
1228
1314
  executor = ThreadPoolExecutor(max_workers)
1229
1315
  n = executor._max_workers
1230
1316
  submit = executor.submit
1231
1317
  shutdown = lambda: executor.shutdown(False, cancel_futures=True)
1232
- if isinstance(pickcode, int):
1233
- resp = yield client.fs_file_skim(
1234
- pickcode,
1235
- async_=async_, # type: ignore
1236
- **request_kwargs,
1237
- )
1238
- check_response(resp)
1239
- pickcode = resp["data"][0]["pick_code"]
1318
+ pickcode = to_pickcode(pickcode)
1240
1319
  try:
1241
1320
  sentinel = object()
1242
1321
  countdown: Callable
@@ -1253,8 +1332,8 @@ def iter_download_nodes(
1253
1332
  n -= 1
1254
1333
  if not n:
1255
1334
  put(sentinel)
1256
- for i in range(n):
1257
- submit(run_gen_step, request(pickcode), async_=async_).add_done_callback(countdown)
1335
+ for _ in range(n):
1336
+ submit(request, pickcode, async_=async_).add_done_callback(countdown)
1258
1337
  while True:
1259
1338
  ls = yield get()
1260
1339
  if ls is sentinel:
@@ -1268,29 +1347,38 @@ def iter_download_nodes(
1268
1347
  return run_gen_step_iter(gen_step(pickcode), async_)
1269
1348
  else:
1270
1349
  def chain():
1350
+ nonlocal max_page, get_next_page
1351
+ pickcodes: list[str] = []
1352
+ add_pickcode = pickcodes.append
1271
1353
  with with_iter_next(iterdir(
1272
1354
  client,
1273
- ensure_file=False,
1355
+ ensure_file=None if files else False,
1274
1356
  app=app,
1275
1357
  normalize_attr=normalize_attr_simple,
1358
+ id_to_dirnode=id_to_dirnode,
1276
1359
  raise_for_changed_count=True,
1277
1360
  async_=async_,
1278
1361
  **request_kwargs,
1279
1362
  )) as get_next:
1280
1363
  while True:
1281
1364
  attr = yield get_next()
1282
- if not files:
1283
- yield Yield(
1284
- {"fid": str(attr["id"]), "pid": "0", "fn": attr["name"]}
1285
- )
1286
- yield YieldFrom(run_gen_step_iter(gen_step(attr["pickcode"]), async_))
1365
+ if attr["is_dir"]:
1366
+ if not files:
1367
+ yield Yield(attr)
1368
+ add_pickcode(attr["pickcode"])
1369
+ elif files:
1370
+ yield Yield(attr)
1371
+ for pickcode in pickcodes:
1372
+ yield YieldFrom(run_gen_step_iter(gen_step(pickcode), async_))
1373
+ max_page = 0
1374
+ get_next_page = count(1).__next__
1287
1375
  return run_gen_step_iter(chain, async_)
1288
1376
 
1289
1377
 
1290
1378
  @overload
1291
1379
  def iter_download_files(
1292
1380
  client: str | P115Client,
1293
- cid: int = 0,
1381
+ cid: int | str = 0,
1294
1382
  id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
1295
1383
  escape: None | bool | Callable[[str], str] = True,
1296
1384
  with_ancestors: bool = True,
@@ -1304,7 +1392,7 @@ def iter_download_files(
1304
1392
  @overload
1305
1393
  def iter_download_files(
1306
1394
  client: str | P115Client,
1307
- cid: int = 0,
1395
+ cid: int | str = 0,
1308
1396
  id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
1309
1397
  escape: None | bool | Callable[[str], str] = True,
1310
1398
  with_ancestors: bool = True,
@@ -1317,7 +1405,7 @@ def iter_download_files(
1317
1405
  ...
1318
1406
  def iter_download_files(
1319
1407
  client: str | P115Client,
1320
- cid: int = 0,
1408
+ cid: int | str = 0,
1321
1409
  id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
1322
1410
  escape: None | bool | Callable[[str], str] = True,
1323
1411
  with_ancestors: bool = True,
@@ -1330,12 +1418,10 @@ def iter_download_files(
1330
1418
  """获取一个目录内所有的文件信息(简略),且包括 "dir_ancestors"、"dirname"
1331
1419
 
1332
1420
  .. note::
1333
- 并不提供文件的 id 和 name,但有 pickcode,如果需要获得 name,你可以在之后获取下载链接,然后从下载链接中获取实际的名字
1334
-
1335
- 如果要通过 pickcode 获取基本信息,请用 `P115Client.fs_supervision`
1421
+ 并不提供文件的 name,如果需要获得 name,你可以在之后获取下载链接,然后从下载链接中获取实际的名字
1336
1422
 
1337
1423
  :param client: 115 客户端或 cookies
1338
- :param cid: 目录 id
1424
+ :param cid: 目录 id 或 pickcode
1339
1425
  :param escape: 对文件名进行转义
1340
1426
 
1341
1427
  - 如果为 None,则不处理;否则,这个函数用来对文件名中某些符号进行转义,例如 "/" 等
@@ -1345,8 +1431,8 @@ def iter_download_files(
1345
1431
 
1346
1432
  :param with_ancestors: 文件信息中是否要包含 "ancestors"
1347
1433
  :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
1348
- :param max_workers: 最大并发数,如果为 None 或 <= 0,则默认为 20
1349
- :param app: 使用某个 app (设备)的接口
1434
+ :param max_workers: 最大并发数,如果为 None 或 <= 0,则自动确定
1435
+ :param app: 使用指定 app(设备)的接口
1350
1436
  :param async_: 是否异步
1351
1437
  :param request_kwargs: 其它请求参数
1352
1438
 
@@ -1356,7 +1442,7 @@ def iter_download_files(
1356
1442
  client = P115Client(client, check_for_relogin=True)
1357
1443
  if id_to_dirnode is None:
1358
1444
  id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
1359
- else:
1445
+ elif id_to_dirnode is ...:
1360
1446
  id_to_dirnode = {}
1361
1447
  if isinstance(escape, bool):
1362
1448
  if escape:
@@ -1396,21 +1482,86 @@ def iter_download_files(
1396
1482
  else:
1397
1483
  dirname = id_to_path[pid] = get_path(id_to_dirnode[pid]) + "/"
1398
1484
  return dirname + name
1399
- def norm_attr(info: dict, /) -> dict:
1400
- pid = int(info["pid"])
1401
- attr = {"parent_id": pid, "pickcode": info["pc"], "size": info["fs"]}
1485
+ def norm_attr(attr: dict, /) -> dict:
1486
+ pid = attr["parent_id"]
1402
1487
  pnode = id_to_dirnode[pid]
1403
1488
  if with_ancestors:
1404
1489
  attr["dir_ancestors"] = get_ancestors(pid, pnode)
1405
1490
  attr["dirname"] = get_path(pnode)
1406
1491
  return attr
1407
- def gen_step(pickcode: str = ""):
1408
- if not cid:
1492
+ ancestors_loaded: None | bool = False
1493
+ @as_gen_step
1494
+ def load_ancestors(pickcode: str, /):
1495
+ nonlocal ancestors_loaded
1496
+ try:
1497
+ yield through(iter_download_nodes(
1498
+ client,
1499
+ pickcode,
1500
+ files=False,
1501
+ id_to_dirnode=id_to_dirnode,
1502
+ max_workers=max_workers,
1503
+ app=app,
1504
+ async_=async_,
1505
+ **request_kwargs,
1506
+ ))
1507
+ finally:
1508
+ ancestors_loaded = True
1509
+ def gen_step(pickcode: str = to_pickcode(cid), /):
1510
+ nonlocal ancestors_loaded
1511
+ if pickcode:
1512
+ if cid:
1513
+ from .iterdir import _iter_fs_files
1514
+ do_next: Callable = anext if async_ else next
1515
+ yield do_next(_iter_fs_files(
1516
+ client,
1517
+ to_id(cid),
1518
+ page_size=1,
1519
+ id_to_dirnode=id_to_dirnode,
1520
+ async_=async_,
1521
+ **request_kwargs,
1522
+ ))
1523
+ if async_:
1524
+ task: Any = create_task(load_ancestors(pickcode))
1525
+ else:
1526
+ task = run_as_thread(load_ancestors, pickcode)
1527
+ cache: list[dict] = []
1528
+ add_to_cache = cache.append
1529
+ with with_iter_next(iter_download_nodes(
1530
+ client,
1531
+ pickcode,
1532
+ files=True,
1533
+ max_workers=max_workers,
1534
+ app=app,
1535
+ async_=async_,
1536
+ **request_kwargs,
1537
+ )) as get_next:
1538
+ while True:
1539
+ attr = yield get_next()
1540
+ if ancestors_loaded is None:
1541
+ yield Yield(norm_attr(attr))
1542
+ elif ancestors_loaded:
1543
+ yield YieldFrom(map(norm_attr, cache))
1544
+ cache.clear()
1545
+ if async_:
1546
+ yield task
1547
+ else:
1548
+ task.result()
1549
+ ancestors_loaded = None
1550
+ else:
1551
+ add_to_cache(attr)
1552
+ if cache:
1553
+ if async_:
1554
+ yield task
1555
+ else:
1556
+ task.result()
1557
+ yield YieldFrom(map(norm_attr, cache))
1558
+ else:
1409
1559
  defaults = {
1410
1560
  "dir_ancestors": [{"id": 0, "parent_id": 0, "name": ""}],
1411
1561
  "dirname": "/",
1412
1562
  }
1413
1563
  pickcodes: list[str] = []
1564
+ add_pickcode = pickcodes.append
1414
1565
  with with_iter_next(iterdir(
1415
1566
  client,
1416
1567
  id_to_dirnode=id_to_dirnode,
@@ -1422,7 +1573,7 @@ def iter_download_files(
1422
1573
  while True:
1423
1574
  attr = yield get_next()
1424
1575
  if attr["is_dir"]:
1425
- pickcodes.append(attr["pickcode"])
1576
+ add_pickcode(attr["pickcode"])
1426
1577
  else:
1427
1578
  yield Yield({
1428
1579
  "parent_id": attr["parent_id"],
@@ -1432,82 +1583,13 @@ def iter_download_files(
1432
1583
  })
1433
1584
  for pickcode in pickcodes:
1434
1585
  yield YieldFrom(run_gen_step_iter(gen_step(pickcode), async_))
1435
- return
1436
- if not pickcode:
1437
- resp = yield client.fs_file_skim(cid, async_=async_, **request_kwargs)
1438
- check_response(resp)
1439
- info = resp["data"][0]
1440
- if info["sha1"]:
1441
- raise NotADirectoryError(ENOTDIR, info)
1442
- pickcode = info["pick_code"]
1443
- ancestors_loaded: None | bool = False
1444
- def load_ancestors():
1445
- nonlocal ancestors_loaded
1446
- if cid:
1447
- resp = yield client.fs_files(
1448
- {"cid": cid, "limit": 1},
1449
- async_=async_,
1450
- **request_kwargs,
1451
- )
1452
- check_response(resp)
1453
- for info in resp["path"][1:]:
1454
- id_to_dirnode[int(info["cid"])] = DirNode(info["name"], int(info["pid"]))
1455
- try:
1456
- with with_iter_next(iter_download_nodes(
1457
- client,
1458
- pickcode,
1459
- files=False,
1460
- max_workers=max_workers,
1461
- app=app,
1462
- async_=async_,
1463
- **request_kwargs,
1464
- )) as get_next:
1465
- while True:
1466
- info = yield get_next()
1467
- id_to_dirnode[int(info["fid"])] = DirNode(info["fn"], int(info["pid"]))
1468
- finally:
1469
- ancestors_loaded = True
1470
- if async_:
1471
- task: Any = create_task(run_gen_step(load_ancestors, True))
1472
- else:
1473
- task = run_as_thread(run_gen_step, load_ancestors)
1474
- cache: list[dict] = []
1475
- add_to_cache = cache.append
1476
- with with_iter_next(iter_download_nodes(
1477
- client,
1478
- pickcode,
1479
- files=True,
1480
- max_workers=max_workers,
1481
- app=app,
1482
- async_=async_, # type: ignore
1483
- **request_kwargs,
1484
- )) as get_next:
1485
- while True:
1486
- info = yield get_next()
1487
- if ancestors_loaded is None:
1488
- yield Yield(norm_attr(info))
1489
- elif ancestors_loaded:
1490
- yield YieldFrom(map(norm_attr, cache))
1491
- cache.clear()
1492
- if async_:
1493
- yield task
1494
- else:
1495
- task.result()
1496
- ancestors_loaded = None
1497
- else:
1498
- add_to_cache(info)
1499
- if cache:
1500
- if async_:
1501
- yield task
1502
- else:
1503
- task.result()
1504
- yield YieldFrom(map(norm_attr, cache))
1586
+ ancestors_loaded = False
1505
1587
  return run_gen_step_iter(gen_step, async_)
1506
1588
 
1507
1589
 
1508
1590
  @overload
1509
1591
  def get_remaining_open_count(
1510
- client: str | P115Client,
1592
+ client: str | P115Client | P115OpenClient,
1511
1593
  app: str = "android",
1512
1594
  *,
1513
1595
  async_: Literal[False] = False,
@@ -1516,7 +1598,7 @@ def get_remaining_open_count(
1516
1598
  ...
1517
1599
  @overload
1518
1600
  def get_remaining_open_count(
1519
- client: str | P115Client,
1601
+ client: str | P115Client | P115OpenClient,
1520
1602
  app: str = "android",
1521
1603
  *,
1522
1604
  async_: Literal[True],
@@ -1524,7 +1606,7 @@ def get_remaining_open_count(
1524
1606
  ) -> Coroutine[Any, Any, int]:
1525
1607
  ...
1526
1608
  def get_remaining_open_count(
1527
- client: str | P115Client,
1609
+ client: str | P115Client | P115OpenClient,
1528
1610
  app: str = "android",
1529
1611
  *,
1530
1612
  async_: Literal[False, True] = False,
@@ -1536,7 +1618,7 @@ def get_remaining_open_count(
1536
1618
  假设总数是 n,通常总数是 10,偶尔会调整,如果已经有 m 个被打开的链接,则返回的数字是 n-m
1537
1619
 
1538
1620
  :param client: 115 客户端或 cookies
1539
- :param app: 使用某个 app (设备)的接口
1621
+ :param app: 使用指定 app(设备)的接口
1540
1622
  :param async_: 是否异步
1541
1623
  :param request_kwargs: 其它请求参数
1542
1624
 
@@ -1554,18 +1636,28 @@ def get_remaining_open_count(
1554
1636
  cache: list = []
1555
1637
  add_to_cache = cache.append
1556
1638
  try:
1557
- with with_iter_next(iter_download_nodes(
1558
- client,
1559
- app=app,
1560
- async_=async_,
1561
- **request_kwargs,
1562
- )) as get_next:
1639
+ if isinstance(client, P115OpenClient):
1640
+ it: Iterator[dict] | AsyncIterator[dict] = iter_files(
1641
+ client,
1642
+ type=4,
1643
+ app=app,
1644
+ async_=async_, # type: ignore
1645
+ **request_kwargs,
1646
+ )
1647
+ else:
1648
+ it = iter_download_nodes(
1649
+ client,
1650
+ app=app,
1651
+ async_=async_,
1652
+ **request_kwargs,
1653
+ )
1654
+ with with_iter_next(it) as get_next:
1563
1655
  while True:
1564
- info = yield get_next()
1565
- if int(info["fs"]) <= 1024 * 1024 * 200:
1656
+ attr = yield get_next()
1657
+ if attr["size"] <= 1024 * 1024 * 200:
1566
1658
  continue
1567
1659
  try:
1568
- url = yield get_url(info["pc"], async_=async_)
1660
+ url = yield get_url(attr["pickcode"], async_=async_)
1569
1661
  except FileNotFoundError:
1570
1662
  continue
1571
1663
  request = Request(url, headers={"user-agent": ""})