p115client 0.0.5.14.1__tar.gz → 0.0.5.14.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/PKG-INFO +2 -2
  2. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/client.py +163 -69
  3. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/download.py +8 -11
  4. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/fs_files.py +1 -1
  5. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/iterdir.py +366 -59
  6. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/pyproject.toml +2 -2
  7. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/LICENSE +0 -0
  8. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/__init__.py +0 -0
  9. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/_upload.py +0 -0
  10. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/const.py +0 -0
  11. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/exception.py +0 -0
  12. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/py.typed +0 -0
  13. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/__init__.py +0 -0
  14. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/attr.py +0 -0
  15. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/auth.py +0 -0
  16. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/edit.py +0 -0
  17. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/export_dir.py +0 -0
  18. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/history.py +0 -0
  19. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/life.py +0 -0
  20. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/offline.py +0 -0
  21. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/pool.py +0 -0
  22. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/request.py +0 -0
  23. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/upload.py +0 -0
  24. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/util.py +0 -0
  25. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/tool/xys.py +0 -0
  26. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/p115client/type.py +0 -0
  27. {p115client-0.0.5.14.1 → p115client-0.0.5.14.3}/readme.md +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: p115client
3
- Version: 0.0.5.14.1
3
+ Version: 0.0.5.14.3
4
4
  Summary: Python 115 webdisk client.
5
5
  Home-page: https://github.com/ChenyangGao/p115client
6
6
  License: MIT
@@ -41,7 +41,7 @@ Requires-Dist: python-filewrap (>=0.2.8)
41
41
  Requires-Dist: python-hashtools (>=0.0.3.3)
42
42
  Requires-Dist: python-http_request (>=0.0.6)
43
43
  Requires-Dist: python-httpfile (>=0.0.5.2)
44
- Requires-Dist: python-iterutils (>=0.2.5.3)
44
+ Requires-Dist: python-iterutils (>=0.2.5.4)
45
45
  Requires-Dist: python-property (>=0.0.3)
46
46
  Requires-Dist: python-startfile (>=0.0.2)
47
47
  Requires-Dist: python-undefined (>=0.0.3)
@@ -59,7 +59,7 @@ from orjson import dumps, loads
59
59
  from p115cipher.fast import (
60
60
  rsa_encode, rsa_decode, ecdh_encode_token, ecdh_aes_encode, ecdh_aes_decode, make_upload_payload,
61
61
  )
62
- from p115pickcode import get_stable_point
62
+ from p115pickcode import get_stable_point, to_id, to_pickcode
63
63
  from property import locked_cacheproperty
64
64
  from re import compile as re_compile
65
65
  from startfile import startfile, startfile_async # type: ignore
@@ -629,29 +629,32 @@ def check_response(resp: dict | Awaitable[dict], /) -> dict | Coroutine[Any, Any
629
629
 
630
630
  @overload
631
631
  def normalize_attr_web(
632
- info: Mapping,
632
+ info: Mapping[str, Any],
633
633
  /,
634
634
  simple: bool = False,
635
635
  keep_raw: bool = False,
636
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
636
637
  *,
637
638
  dict_cls: None = None,
638
639
  ) -> dict[str, Any]:
639
640
  ...
640
641
  @overload
641
642
  def normalize_attr_web[D: dict[str, Any]](
642
- info: Mapping,
643
+ info: Mapping[str, Any],
643
644
  /,
644
645
  simple: bool = False,
645
646
  keep_raw: bool = False,
647
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
646
648
  *,
647
649
  dict_cls: type[D],
648
650
  ) -> D:
649
651
  ...
650
652
  def normalize_attr_web[D: dict[str, Any]](
651
- info: Mapping,
653
+ info: Mapping[str, Any],
652
654
  /,
653
655
  simple: bool = False,
654
656
  keep_raw: bool = False,
657
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
655
658
  *,
656
659
  dict_cls: None | type[D] = None,
657
660
  ) -> dict[str, Any] | D:
@@ -660,6 +663,7 @@ def normalize_attr_web[D: dict[str, Any]](
660
663
  :param info: 原始数据
661
664
  :param simple: 只提取少量必要字段 "is_dir", "id", "parent_id", "name", "sha1", "size", "pickcode", "is_collect", "ctime", "mtime", "type"
662
665
  :param keep_raw: 是否保留原始数据,如果为 True,则保存到 "raw" 字段
666
+ :param default: 一些预设值,可被覆盖
663
667
  :param dict_cls: 字典类型
664
668
 
665
669
  :return: 翻译后的 dict 类型数据
@@ -667,6 +671,8 @@ def normalize_attr_web[D: dict[str, Any]](
667
671
  if dict_cls is None:
668
672
  dict_cls = cast(type[D], dict)
669
673
  attr: dict[str, Any] = dict_cls()
674
+ if default:
675
+ attr.update(default)
670
676
  is_dir = attr["is_dir"] = "fid" not in info
671
677
  if is_dir:
672
678
  attr["id"] = int(info["cid"]) # category_id
@@ -771,29 +777,33 @@ def normalize_attr_web[D: dict[str, Any]](
771
777
 
772
778
  @overload
773
779
  def normalize_attr_app(
774
- info: Mapping,
780
+ info: Mapping[str, Any],
775
781
  /,
776
782
  simple: bool = False,
777
783
  keep_raw: bool = False,
784
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
778
785
  *,
779
786
  dict_cls: None = None,
780
787
  ) -> dict[str, Any]:
781
788
  ...
782
789
  @overload
783
790
  def normalize_attr_app[D: dict[str, Any]](
784
- info: Mapping,
791
+ info: Mapping[str, Any],
785
792
  /,
786
793
  simple: bool = False,
787
794
  keep_raw: bool = False,
795
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
788
796
  *,
789
797
  dict_cls: type[D],
790
798
  ) -> D:
791
799
  ...
792
800
  def normalize_attr_app[D: dict[str, Any]](
793
- info: Mapping,
801
+ info: Mapping[str, Any],
794
802
  /,
795
803
  simple: bool = False,
796
804
  keep_raw: bool = False,
805
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
806
+ *,
797
807
  dict_cls: None | type[D] = None,
798
808
  ) -> dict[str, Any] | D:
799
809
  """翻译 `P115Client.fs_files_app` 接口响应的文件信息数据,使之便于阅读
@@ -801,6 +811,7 @@ def normalize_attr_app[D: dict[str, Any]](
801
811
  :param info: 原始数据
802
812
  :param simple: 只提取少量必要字段 "is_dir", "id", "parent_id", "name", "sha1", "size", "pickcode", "is_collect", "ctime", "mtime", "type"
803
813
  :param keep_raw: 是否保留原始数据,如果为 True,则保存到 "raw" 字段
814
+ :param default: 一些预设值,可被覆盖
804
815
  :param dict_cls: 字典类型
805
816
 
806
817
  :return: 翻译后的 dict 类型数据
@@ -808,8 +819,10 @@ def normalize_attr_app[D: dict[str, Any]](
808
819
  if dict_cls is None:
809
820
  dict_cls = cast(type[D], dict)
810
821
  attr: dict[str, Any] = dict_cls()
822
+ if default:
823
+ attr.update(default)
811
824
  is_dir = attr["is_dir"] = info["fc"] == "0" # file_category
812
- attr["id"] = int(info["fid"]) # file_id
825
+ attr["id"] = int(info["fid"]) # file_id
813
826
  attr["parent_id"] = int(info["pid"]) # parent_id
814
827
  attr["name"] = info["fn"]
815
828
  sha1 = attr["sha1"] = info.get("sha1") or ""
@@ -892,29 +905,33 @@ def normalize_attr_app[D: dict[str, Any]](
892
905
 
893
906
  @overload
894
907
  def normalize_attr_app2(
895
- info: Mapping,
908
+ info: Mapping[str, Any],
896
909
  /,
897
910
  simple: bool = False,
898
911
  keep_raw: bool = False,
912
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
899
913
  *,
900
914
  dict_cls: None = None,
901
915
  ) -> dict[str, Any]:
902
916
  ...
903
917
  @overload
904
918
  def normalize_attr_app2[D: dict[str, Any]](
905
- info: Mapping,
919
+ info: Mapping[str, Any],
906
920
  /,
907
921
  simple: bool = False,
908
922
  keep_raw: bool = False,
923
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
909
924
  *,
910
925
  dict_cls: type[D],
911
926
  ) -> D:
912
927
  ...
913
928
  def normalize_attr_app2[D: dict[str, Any]](
914
- info: Mapping,
929
+ info: Mapping[str, Any],
915
930
  /,
916
931
  simple: bool = False,
917
932
  keep_raw: bool = False,
933
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
934
+ *,
918
935
  dict_cls: None | type[D] = None,
919
936
  ) -> dict[str, Any] | D:
920
937
  """翻译 `P115Client.fs_files_app2` 接口响应的文件信息数据,使之便于阅读
@@ -922,6 +939,7 @@ def normalize_attr_app2[D: dict[str, Any]](
922
939
  :param info: 原始数据
923
940
  :param simple: 只提取少量必要字段 "is_dir", "id", "parent_id", "name", "sha1", "size", "pickcode", "is_collect", "ctime", "mtime", "type"
924
941
  :param keep_raw: 是否保留原始数据,如果为 True,则保存到 "raw" 字段
942
+ :param default: 一些预设值,可被覆盖
925
943
  :param dict_cls: 字典类型
926
944
 
927
945
  :return: 翻译后的 dict 类型数据
@@ -929,6 +947,8 @@ def normalize_attr_app2[D: dict[str, Any]](
929
947
  if dict_cls is None:
930
948
  dict_cls = cast(type[D], dict)
931
949
  attr: dict[str, Any] = dict_cls()
950
+ if default:
951
+ attr.update(default)
932
952
  if "file_id" in info and "parent_id" in info:
933
953
  if "file_category" in info:
934
954
  is_dir = not int(info["file_category"])
@@ -1052,29 +1072,32 @@ def normalize_attr_app2[D: dict[str, Any]](
1052
1072
 
1053
1073
  @overload
1054
1074
  def normalize_attr(
1055
- info: Mapping,
1075
+ info: Mapping[str, Any],
1056
1076
  /,
1057
1077
  simple: bool = False,
1058
1078
  keep_raw: bool = False,
1079
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
1059
1080
  *,
1060
1081
  dict_cls: None = None,
1061
1082
  ) -> AttrDict[str, Any]:
1062
1083
  ...
1063
1084
  @overload
1064
1085
  def normalize_attr[D: dict[str, Any]](
1065
- info: Mapping,
1086
+ info: Mapping[str, Any],
1066
1087
  /,
1067
1088
  simple: bool = False,
1068
1089
  keep_raw: bool = False,
1090
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
1069
1091
  *,
1070
1092
  dict_cls: type[D],
1071
1093
  ) -> D:
1072
1094
  ...
1073
1095
  def normalize_attr[D: dict[str, Any]](
1074
- info: Mapping,
1096
+ info: Mapping[str, Any],
1075
1097
  /,
1076
1098
  simple: bool = False,
1077
1099
  keep_raw: bool = False,
1100
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
1078
1101
  *,
1079
1102
  dict_cls: None | type[D] = None,
1080
1103
  ) -> AttrDict[str, Any] | D:
@@ -1083,55 +1106,68 @@ def normalize_attr[D: dict[str, Any]](
1083
1106
  :param info: 原始数据
1084
1107
  :param simple: 只提取少量必要字段 "is_dir", "id", "parent_id", "name", "sha1", "size", "pickcode", "is_collect", "ctime", "mtime"
1085
1108
  :param keep_raw: 是否保留原始数据,如果为 True,则保存到 "raw" 字段
1109
+ :param default: 一些预设值,可被覆盖
1086
1110
  :param dict_cls: 字典类型
1087
1111
 
1088
1112
  :return: 翻译后的 dict 类型数据
1089
1113
  """
1114
+ if "fn" in info:
1115
+ call = normalize_attr_app
1116
+ elif "file_id" in info or "category_id" in info:
1117
+ call = normalize_attr_app2
1118
+ else:
1119
+ call = normalize_attr_web
1090
1120
  if dict_cls is None:
1091
- if "fn" in info:
1092
- return normalize_attr_app(info, simple=simple, keep_raw=keep_raw, dict_cls=AttrDict)
1093
- elif "file_id" in info or "category_id" in info:
1094
- return normalize_attr_app2(info, simple=simple, keep_raw=keep_raw, dict_cls=AttrDict)
1095
- else:
1096
- return normalize_attr_web(info, simple=simple, keep_raw=keep_raw, dict_cls=AttrDict)
1121
+ return call(info, simple=simple, keep_raw=keep_raw, default=default, dict_cls=AttrDict)
1097
1122
  else:
1098
- if "fn" in info:
1099
- return normalize_attr_app(info, simple=simple, keep_raw=keep_raw, dict_cls=dict_cls)
1100
- elif "file_id" in info or "category_id" in info:
1101
- return normalize_attr_app2(info, simple=simple, keep_raw=keep_raw, dict_cls=dict_cls)
1102
- else:
1103
- return normalize_attr_web(info, simple=simple, keep_raw=keep_raw, dict_cls=dict_cls)
1123
+ return call(info, simple=simple, keep_raw=keep_raw, default=default, dict_cls=dict_cls)
1104
1124
 
1105
1125
 
1106
1126
  @overload
1107
1127
  def normalize_attr_simple(
1108
- info: Mapping,
1128
+ info: Mapping[str, Any],
1109
1129
  /,
1110
1130
  keep_raw: bool = False,
1131
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
1111
1132
  *,
1112
- dict_cls: None,
1113
- ) -> dict[str, Any]:
1133
+ dict_cls: None = None,
1134
+ ) -> AttrDict[str, Any]:
1114
1135
  ...
1115
1136
  @overload
1116
1137
  def normalize_attr_simple[D: dict[str, Any]](
1117
- info: Mapping,
1138
+ info: Mapping[str, Any],
1118
1139
  /,
1119
1140
  keep_raw: bool = False,
1141
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
1120
1142
  *,
1121
- dict_cls: type[D] = AttrDict, # type: ignore
1143
+ dict_cls: type[D],
1122
1144
  ) -> D:
1123
1145
  ...
1124
1146
  def normalize_attr_simple[D: dict[str, Any]](
1125
- info: Mapping,
1147
+ info: Mapping[str, Any],
1126
1148
  /,
1127
1149
  keep_raw: bool = False,
1150
+ default: None | Mapping[str, Any] | Iterable[tuple[str, Any]] = None,
1128
1151
  *,
1129
- dict_cls: None | type[D] = AttrDict, # type: ignore
1130
- ) -> dict[str, Any] | D:
1152
+ dict_cls: None | type[D] = None,
1153
+ ) -> AttrDict[str, Any] | D:
1154
+ """翻译获取自罗列目录、搜索、获取文件信息等接口的数据,使之便于阅读
1155
+
1156
+ .. note::
1157
+ 只提取少量必要字段 "is_dir", "id", "parent_id", "name", "sha1", "size", "pickcode", "is_collect", "ctime", "mtime"
1158
+
1159
+ :param info: 原始数据
1160
+ :param keep_raw: 是否保留原始数据,如果为 True,则保存到 "raw" 字段
1161
+ :param default: 一些预设值,可被覆盖
1162
+ :param dict_cls: 字典类型
1163
+
1164
+ :return: 翻译后的 dict 类型数据
1165
+ """
1131
1166
  return normalize_attr(
1132
1167
  info,
1133
1168
  simple=True,
1134
1169
  keep_raw=keep_raw,
1170
+ default=default,
1135
1171
  dict_cls=dict_cls,
1136
1172
  )
1137
1173
 
@@ -5407,6 +5443,26 @@ class P115OpenClient(ClientRequestMixin):
5407
5443
  upload_file_open = upload_file
5408
5444
  vip_qr_url_open = vip_qr_url
5409
5445
 
5446
+ to_id = staticmethod(to_id)
5447
+
5448
+ def to_pickcode(
5449
+ self,
5450
+ id: int | str,
5451
+ /,
5452
+ prefix: Literal["a", "b", "c", "d", "e", "fa", "fb", "fc", "fd", "fe"] = "a",
5453
+ ) -> str:
5454
+ """把可能是 id 或 pickcode 的一律转换成 pickcode
5455
+
5456
+ .. note::
5457
+ 规定:空提取码 "" 对应的 id 是 0
5458
+
5459
+ :param id: 可能是 id 或 pickcode
5460
+ :param prefix: 前缀
5461
+
5462
+ :return: pickcode
5463
+ """
5464
+ return to_pickcode(id, self.pickcode_stable_point, prefix=prefix)
5465
+
5410
5466
 
5411
5467
  class P115Client(P115OpenClient):
5412
5468
  """115 的客户端对象
@@ -18473,41 +18529,6 @@ class P115Client(P115OpenClient):
18473
18529
  payload = {"file_id": payload}
18474
18530
  return self.request(url=api, method="POST", data=payload, async_=async_, **request_kwargs)
18475
18531
 
18476
- @overload
18477
- def offline_info(
18478
- self,
18479
- /,
18480
- base_url: bool | str | Callable[[], str] = False,
18481
- *,
18482
- async_: Literal[False] = False,
18483
- **request_kwargs,
18484
- ) -> dict:
18485
- ...
18486
- @overload
18487
- def offline_info(
18488
- self,
18489
- /,
18490
- base_url: bool | str | Callable[[], str] = False,
18491
- *,
18492
- async_: Literal[True],
18493
- **request_kwargs,
18494
- ) -> Coroutine[Any, Any, dict]:
18495
- ...
18496
- def offline_info(
18497
- self,
18498
- /,
18499
- base_url: bool | str | Callable[[], str] = False,
18500
- *,
18501
- async_: Literal[False, True] = False,
18502
- **request_kwargs,
18503
- ) -> dict | Coroutine[Any, Any, dict]:
18504
- """获取关于离线的限制的信息,以及 sign 和 time 字段(各个添加任务的接口需要)
18505
-
18506
- GET https://115.com/?ct=offline&ac=space
18507
- """
18508
- api = complete_api("/?ct=offline&ac=space", base_url=base_url)
18509
- return self.request(url=api, async_=async_, **request_kwargs)
18510
-
18511
18532
  @overload
18512
18533
  def offline_list(
18513
18534
  self,
@@ -18824,6 +18845,79 @@ class P115Client(P115OpenClient):
18824
18845
  **request_kwargs,
18825
18846
  )
18826
18847
 
18848
+ @overload
18849
+ def offline_sign(
18850
+ self,
18851
+ /,
18852
+ base_url: bool | str | Callable[[], str] = False,
18853
+ *,
18854
+ async_: Literal[False] = False,
18855
+ **request_kwargs,
18856
+ ) -> dict:
18857
+ ...
18858
+ @overload
18859
+ def offline_sign(
18860
+ self,
18861
+ /,
18862
+ base_url: bool | str | Callable[[], str] = False,
18863
+ *,
18864
+ async_: Literal[True],
18865
+ **request_kwargs,
18866
+ ) -> Coroutine[Any, Any, dict]:
18867
+ ...
18868
+ def offline_sign(
18869
+ self,
18870
+ /,
18871
+ base_url: bool | str | Callable[[], str] = False,
18872
+ *,
18873
+ async_: Literal[False, True] = False,
18874
+ **request_kwargs,
18875
+ ) -> dict | Coroutine[Any, Any, dict]:
18876
+ """获取 sign 和 time 字段(各个添加任务的接口需要),以及其它信息
18877
+
18878
+ GET https://115.com/?ct=offline&ac=space
18879
+ """
18880
+ api = complete_api("/?ct=offline&ac=space", base_url=base_url)
18881
+ return self.request(url=api, async_=async_, **request_kwargs)
18882
+
18883
+ @overload
18884
+ def offline_sign_app(
18885
+ self,
18886
+ /,
18887
+ base_url: bool | str | Callable[[], str] = False,
18888
+ app: str = "android",
18889
+ *,
18890
+ async_: Literal[False] = False,
18891
+ **request_kwargs,
18892
+ ) -> dict:
18893
+ ...
18894
+ @overload
18895
+ def offline_sign_app(
18896
+ self,
18897
+ /,
18898
+ base_url: bool | str | Callable[[], str] = False,
18899
+ app: str = "android",
18900
+ *,
18901
+ async_: Literal[True],
18902
+ **request_kwargs,
18903
+ ) -> Coroutine[Any, Any, dict]:
18904
+ ...
18905
+ def offline_sign_app(
18906
+ self,
18907
+ /,
18908
+ base_url: bool | str | Callable[[], str] = False,
18909
+ app: str = "android",
18910
+ *,
18911
+ async_: Literal[False, True] = False,
18912
+ **request_kwargs,
18913
+ ) -> dict | Coroutine[Any, Any, dict]:
18914
+ """获取 sign 和 time 字段(各个添加任务的接口需要)
18915
+
18916
+ GET https://proapi.115.com/android/files/offlinesign
18917
+ """
18918
+ api = complete_proapi("/files/offlinesign", base_url, app)
18919
+ return self.request(url=api, async_=async_, **request_kwargs)
18920
+
18827
18921
  @overload
18828
18922
  def offline_task_count(
18829
18923
  self,
@@ -41,7 +41,7 @@ from p115client import (
41
41
  P115OpenClient, P115URL,
42
42
  )
43
43
  from p115client.exception import P115Warning
44
- from p115pickcode import to_id, to_pickcode
44
+ from p115pickcode import to_id
45
45
 
46
46
  from .iterdir import (
47
47
  get_path_to_cid, iterdir, iter_files, iter_files_with_path,
@@ -94,15 +94,14 @@ def batch_get_url(
94
94
  """
95
95
  if isinstance(client, str):
96
96
  client = P115Client(client, check_for_relogin=True)
97
- stable_point = client.pickcode_stable_point
98
97
  if headers := request_kwargs.get("headers"):
99
98
  request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
100
99
  else:
101
100
  request_kwargs["headers"] = {"user-agent": user_agent}
102
101
  if isinstance(pickcode, (int, str)):
103
- pickcode = to_pickcode(pickcode, stable_point)
102
+ pickcode = client.to_pickcode(pickcode)
104
103
  elif not isinstance(pickcode, str):
105
- pickcode = ",".join(to_pickcode(pc, stable_point) for pc in pickcode)
104
+ pickcode = ",".join(map(client.to_pickcode, pickcode))
106
105
  if not isinstance(client, P115Client) or app == "open":
107
106
  get_download_url: Callable = client.download_url_info_open
108
107
  else:
@@ -184,7 +183,6 @@ def iter_url_batches(
184
183
  """
185
184
  if isinstance(client, str):
186
185
  client = P115Client(client, check_for_relogin=True)
187
- stable_point = client.pickcode_stable_point
188
186
  if headers := request_kwargs.get("headers"):
189
187
  request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
190
188
  else:
@@ -196,7 +194,7 @@ def iter_url_batches(
196
194
  if batch_size <= 0:
197
195
  batch_size = 1
198
196
  def gen_step():
199
- for pcs in batched((to_pickcode(pc, stable_point) for pc in pickcodes), batch_size):
197
+ for pcs in batched(map(client.to_pickcode, pickcodes), batch_size):
200
198
  resp = yield get_download_url(
201
199
  ",".join(pcs),
202
200
  async_=async_,
@@ -1210,7 +1208,6 @@ def iter_download_nodes(
1210
1208
  """
1211
1209
  if isinstance(client, str):
1212
1210
  client = P115Client(client, check_for_relogin=True)
1213
- stable_point = client.pickcode_stable_point
1214
1211
  get_base_url = cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__
1215
1212
  if async_:
1216
1213
  if max_workers is None or max_workers <= 0:
@@ -1272,9 +1269,10 @@ def iter_download_nodes(
1272
1269
  async_=async_,
1273
1270
  **{"base_url": get_base_url, **request_kwargs},
1274
1271
  )
1272
+ to_pickcode = client.to_pickcode
1275
1273
  if max_workers == 1:
1276
1274
  def gen_step(pickcode: int | str, /):
1277
- pickcode = to_pickcode(pickcode, stable_point)
1275
+ pickcode = to_pickcode(pickcode)
1278
1276
  for i in count(1):
1279
1277
  payload = {"pickcode": pickcode, "page": i}
1280
1278
  resp = yield get_nodes(payload)
@@ -1324,7 +1322,7 @@ def iter_download_nodes(
1324
1322
  n = executor._max_workers
1325
1323
  submit = executor.submit
1326
1324
  shutdown = lambda: executor.shutdown(False, cancel_futures=True)
1327
- pickcode = to_pickcode(pickcode, stable_point)
1325
+ pickcode = to_pickcode(pickcode)
1328
1326
  try:
1329
1327
  sentinel = object()
1330
1328
  countdown: Callable
@@ -1449,7 +1447,6 @@ def iter_download_files(
1449
1447
  """
1450
1448
  if isinstance(client, str):
1451
1449
  client = P115Client(client, check_for_relogin=True)
1452
- stable_point = client.pickcode_stable_point
1453
1450
  if id_to_dirnode is None:
1454
1451
  id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
1455
1452
  elif id_to_dirnode is ...:
@@ -1594,7 +1591,7 @@ def iter_download_files(
1594
1591
  for pickcode in pickcodes:
1595
1592
  yield YieldFrom(run_gen_step_iter(gen_step(pickcode), async_))
1596
1593
  ancestors_loaded = False
1597
- return run_gen_step_iter(gen_step(to_pickcode(cid, stable_point)), async_)
1594
+ return run_gen_step_iter(gen_step(client.to_pickcode(cid)), async_)
1598
1595
 
1599
1596
 
1600
1597
  @overload
@@ -19,7 +19,7 @@ from time import time
19
19
  from typing import cast, overload, Any, Final, Literal
20
20
  from warnings import warn
21
21
 
22
- from iterutils import as_gen_step, run_gen_step_iter, Yield
22
+ from iterutils import run_gen_step_iter, Yield
23
23
  from p115client import check_response, P115Client, P115OpenClient
24
24
  from p115client.client import get_status_code
25
25
  from p115client.exception import BusyOSError, DataError, P115Warning
@@ -11,29 +11,29 @@ __all__ = [
11
11
  "ensure_attr_path", "ensure_attr_path_using_star_event",
12
12
  "iterdir", "iter_stared_dirs", "iter_dirs", "iter_dirs_with_path",
13
13
  "iter_files", "iter_files_with_path", "iter_files_with_path_skim",
14
- "iter_nodes", "iter_nodes_skim", "iter_nodes_by_pickcode",
15
- "iter_nodes_using_update", "iter_nodes_using_info",
16
- "iter_nodes_using_star_event", "iter_dir_nodes_using_star",
17
- "iter_parents", "iter_files_shortcut", "iter_dupfiles", "iter_image_files",
18
- "search_iter", "share_iterdir", "share_iter_files", "share_search_iter",
14
+ "traverse_tree", "traverse_tree_with_path", "iter_nodes",
15
+ "iter_nodes_skim", "iter_nodes_by_pickcode", "iter_nodes_using_update",
16
+ "iter_nodes_using_info", "iter_nodes_using_star_event",
17
+ "iter_dir_nodes_using_star", "iter_parents", "iter_files_shortcut",
18
+ "iter_dupfiles", "iter_image_files", "search_iter", "share_iterdir",
19
+ "share_iter_files", "share_search_iter",
19
20
  ]
20
21
  __doc__ = "这个模块提供了一些和目录信息罗列有关的函数"
21
22
 
22
- # TODO: 再实现 2 个方法,利用 iter_download_nodes,一个有 path,一个没有,可以把某个目录下的所有节点都搞出来,导出时,先导出目录节点,再导出文件节点,但它们是并发执行的,然后必有字段:id, parent_id, pickcode, name, is_dir, sha1 等
23
-
24
23
  # TODO: 路径表示法,应该支持 / 和 > 开头,而不仅仅是 / 开头
25
- # TODO: 对于路径,增加 top_id 和 relpath 字段,表示搜素目录的 id 和相对于搜索路径的相对路径
26
24
  # TODO: get_id* 这类方法,应该放在 attr.py,用来获取某个 id 对应的值(根本还是 get_attr)
27
25
  # TODO: 创造函数 get_id, get_parent_id, get_ancestors, get_sha1, get_pickcode, get_path 等,支持多种类型的参数,目前已有的名字太长,需要改造,甚至转为私有,另外这些函数或许可以放到另一个包中,attr.py
28
26
  # TODO: 去除掉一些并不便利的办法,然后加上 traverse 和 walk 方法,通过递归拉取(支持深度和广度优先遍历)
29
27
  # TODO: 要获取某个 id 对应的路径,可以先用 fs_file_skim 或 fs_info 看一下是不是存在,以及是不是文件,然后再选择响应最快的办法获取
30
28
 
31
- from asyncio import create_task, sleep as async_sleep
29
+ from asyncio import create_task, sleep as async_sleep, Task
32
30
  from collections import defaultdict
33
31
  from collections.abc import (
34
- AsyncIterable, AsyncIterator, Callable, Coroutine, Iterable,
35
- Iterator, Mapping, MutableMapping, Sequence,
32
+ AsyncIterable, AsyncIterator, Callable, Coroutine, Generator,
33
+ Iterable, Iterator, Mapping, MutableMapping, Sequence,
36
34
  )
35
+ from contextlib import contextmanager
36
+ from concurrent.futures import Future
37
37
  from dataclasses import dataclass
38
38
  from errno import EIO, ENOENT, ENOTDIR
39
39
  from functools import partial
@@ -60,7 +60,7 @@ from p115client import (
60
60
  P115Client, P115OpenClient, P115OSError, P115Warning,
61
61
  )
62
62
  from p115client.type import P115ID
63
- from p115pickcode import pickcode_to_id, to_id, to_pickcode
63
+ from p115pickcode import pickcode_to_id, to_id
64
64
  from posixpatht import path_is_dir_form, splitext, splits
65
65
 
66
66
  from .attr import type_of_attr
@@ -138,45 +138,123 @@ def _overview_attr(info: Mapping, /) -> OverviewAttr:
138
138
  return OverviewAttr(is_dir, id, pid, name, ctime, mtime)
139
139
 
140
140
 
141
- def _update_resp_2_id_to_dirnode(
141
+ def _update_resp_ancestors(
142
142
  resp: dict,
143
- id_to_dirnode: EllipsisType | MutableMapping[int, tuple[str, int] | DirNode],
143
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
144
144
  /,
145
145
  error: None | OSError = FileNotFoundError(ENOENT, "not found"),
146
146
  ) -> dict:
147
+ list_append = list.append
148
+ need_update_id_to_dirnode = id_to_dirnode not in (..., None)
147
149
  if "path" in resp:
148
- if id_to_dirnode is not ...:
149
- for info in resp["path"][1:]:
150
- id_to_dirnode[int(info["cid"])] = DirNode(info["name"], int(info["pid"]))
150
+ ancestors = resp["ancestors"] = []
151
+ start_idx = not resp["path"][0]["cid"]
152
+ if start_idx:
153
+ list_append(ancestors, {"id": 0, "parent_id": 0, "name": ""})
154
+ for info in resp["path"][start_idx:]:
155
+ id, name, pid = int(info["cid"]), info["name"], int(info["pid"])
156
+ list_append(ancestors, {"id": id, "parent_id": pid, "name": name})
157
+ if need_update_id_to_dirnode:
158
+ id_to_dirnode[id] = DirNode(name, pid) # type: ignore
151
159
  else:
160
+ if resp and "paths" not in resp:
161
+ check_response(resp)
162
+ resp = resp["data"]
152
163
  if not resp:
153
164
  if error is None:
154
165
  return resp
155
166
  raise error
156
- if "paths" not in resp:
157
- check_response(resp)
158
- resp = resp["data"]
159
- if not resp:
160
- if error is None:
161
- return resp
162
- raise error
163
- if id_to_dirnode is not ...:
164
- paths = resp["paths"]
165
- info = paths[0]
166
- pid = int(info["file_id"])
167
- for info in paths[1:]:
168
- fid = int(info["file_id"])
169
- id_to_dirnode[fid] = DirNode(info["file_name"], pid)
170
- pid = fid
171
- if not resp["sha1"]:
172
- if "file_id" in resp:
173
- fid = int(resp["file_id"])
174
- else:
175
- fid = to_id(resp["pick_code"])
176
- id_to_dirnode[fid] = DirNode(resp["file_name"], pid)
167
+ ancestors = resp["ancestors"] = []
168
+ pid = int(resp["paths"][0]["file_id"])
169
+ for info in resp["paths"][1:]:
170
+ id = int(info["file_id"])
171
+ name = info["file_name"]
172
+ list_append(ancestors, {"id": id, "parent_id": pid, "name": name})
173
+ if need_update_id_to_dirnode:
174
+ id_to_dirnode[id] = DirNode(name, pid) # type: ignore
175
+ pid = id
176
+ if not resp["sha1"]:
177
+ if "file_id" in resp:
178
+ id = int(resp["file_id"])
179
+ else:
180
+ id = to_id(resp["pick_code"])
181
+ name = resp["file_name"]
182
+ list_append(ancestors, {"id": id, "parent_id": pid, "name": name})
183
+ if need_update_id_to_dirnode:
184
+ id_to_dirnode[id] = DirNode(name, pid) # type: ignore
177
185
  return resp
178
186
 
179
187
 
188
+ def _make_top_adder(
189
+ top_id: int,
190
+ id_to_dirnode: MutableMapping[int, tuple[str, int] | DirNode],
191
+ ) -> Callable:
192
+ top_ancestors: list[dict]
193
+ if not top_id:
194
+ top_ancestors = [{"id": 0, "parent_id": 0, "name": ""}]
195
+ def add_top[T: MutableMapping](attr: T, /) -> T:
196
+ nonlocal top_ancestors
197
+ try:
198
+ top_ancestors
199
+ except NameError:
200
+ top_ancestors = []
201
+ add_ancestor = top_ancestors.append
202
+ tid = top_id
203
+ while tid and tid in id_to_dirnode:
204
+ name, pid = id_to_dirnode[tid]
205
+ add_ancestor({"id": tid, "parent_id": pid, "name": name})
206
+ tid = pid
207
+ if not tid:
208
+ add_ancestor({"id": 0, "parent_id": 0, "name": ""})
209
+ top_ancestors.reverse()
210
+ attr["top_id"] = top_id
211
+ attr["top_ancestors"] = top_ancestors
212
+ return attr
213
+ return add_top
214
+
215
+
216
+ @overload
217
+ @contextmanager
218
+ def cache_loading[T](
219
+ it: Iterator[T],
220
+ /,
221
+ ) -> Generator[tuple[list[T], Future]]:
222
+ ...
223
+ @overload
224
+ @contextmanager
225
+ def cache_loading[T](
226
+ it: AsyncIterator[T],
227
+ /,
228
+ ) -> Generator[tuple[list[T], Task]]:
229
+ ...
230
+ @contextmanager
231
+ def cache_loading[T](
232
+ it: Iterator[T] | AsyncIterator[T],
233
+ /,
234
+ ) -> Generator[tuple[list[T], Future | Task]]:
235
+ cache: list[T] = []
236
+ add_to_cache = cache.append
237
+ running = True
238
+ if isinstance(it, AsyncIterator):
239
+ async def arunner():
240
+ async for e in it:
241
+ add_to_cache(e)
242
+ if not running:
243
+ break
244
+ task: Future | Task = create_task(arunner())
245
+ else:
246
+ def runner():
247
+ for e in it:
248
+ add_to_cache(e)
249
+ if not running:
250
+ break
251
+ task = run_as_thread(runner)
252
+ try:
253
+ yield (cache, task)
254
+ finally:
255
+ running = False
256
+
257
+
180
258
  # TODO: 支持 open
181
259
  @overload
182
260
  def get_path_to_cid(
@@ -405,13 +483,11 @@ def get_file_count(
405
483
  if cid != int(resp["path"][-1]["cid"]):
406
484
  resp["cid"] = cid
407
485
  raise NotADirectoryError(ENOTDIR, resp)
408
- if id_to_dirnode is not ...:
409
- for info in resp["path"][1:]:
410
- id_to_dirnode[int(info["cid"])] = DirNode(info["name"], int(info["pid"]))
486
+ _update_resp_ancestors(resp, id_to_dirnode)
411
487
  return int(resp["count"])
412
488
  else:
413
489
  resp = yield get_resp_of_category_get(cid)
414
- resp = _update_resp_2_id_to_dirnode(resp, id_to_dirnode, FileNotFoundError(ENOENT, cid))
490
+ resp = _update_resp_ancestors(resp, id_to_dirnode, FileNotFoundError(ENOENT, cid))
415
491
  if resp["sha1"]:
416
492
  resp["cid"] = cid
417
493
  raise NotADirectoryError(ENOTDIR, resp)
@@ -577,7 +653,7 @@ def get_ancestors(
577
653
  return ancestors
578
654
  else:
579
655
  resp = yield get_resp_of_category_get(fid)
580
- resp = _update_resp_2_id_to_dirnode(resp, id_to_dirnode)
656
+ resp = _update_resp_ancestors(resp, id_to_dirnode)
581
657
  for info in resp["paths"]:
582
658
  add_ancestor({
583
659
  "parent_id": pid,
@@ -607,7 +683,7 @@ def get_ancestors(
607
683
  id_to_dirnode[ans["id"]] = DirNode(ans["name"], ans["parent_id"])
608
684
  else:
609
685
  resp = yield get_resp_of_category_get(fid)
610
- resp = _update_resp_2_id_to_dirnode(resp, id_to_dirnode)
686
+ resp = _update_resp_ancestors(resp, id_to_dirnode)
611
687
  for info in resp["paths"]:
612
688
  add_ancestor({
613
689
  "parent_id": pid,
@@ -835,7 +911,7 @@ def get_id_to_path(
835
911
  if not isinstance(client, P115Client) or app == "open":
836
912
  path = ">" + ">".join(patht)
837
913
  resp = yield client.fs_info_open(path, async_=async_, **request_kwargs)
838
- data = _update_resp_2_id_to_dirnode(resp, id_to_dirnode)
914
+ data = _update_resp_ancestors(resp, id_to_dirnode)
839
915
  return P115ID(data["file_id"], data, about="path", path=path)
840
916
  i = 0
841
917
  start_parent_id = parent_id
@@ -1312,7 +1388,7 @@ def ensure_attr_path[D: dict](
1312
1388
  pid = id_to_dirnode[pid][1]
1313
1389
  if pid and pid not in dangling_id_to_name:
1314
1390
  resp = yield get_info(pid, async_=async_, **request_kwargs)
1315
- resp = _update_resp_2_id_to_dirnode(resp, id_to_dirnode, None)
1391
+ resp = _update_resp_ancestors(resp, id_to_dirnode, None)
1316
1392
  if not resp:
1317
1393
  dangling_id_to_name[pid] = ""
1318
1394
  yield Yield(attr)
@@ -1506,6 +1582,7 @@ def _iter_fs_files(
1506
1582
  id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
1507
1583
  raise_for_changed_count: bool = False,
1508
1584
  ensure_file: None | bool = None,
1585
+ hold_top: bool = True,
1509
1586
  app: str = "android",
1510
1587
  cooldown: int | float = 0,
1511
1588
  max_workers: None | int = None,
@@ -1525,6 +1602,7 @@ def _iter_fs_files(
1525
1602
  id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
1526
1603
  raise_for_changed_count: bool = False,
1527
1604
  ensure_file: None | bool = None,
1605
+ hold_top: bool = True,
1528
1606
  app: str = "android",
1529
1607
  cooldown: int | float = 0,
1530
1608
  max_workers: None | int = None,
@@ -1543,6 +1621,7 @@ def _iter_fs_files(
1543
1621
  id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = ...,
1544
1622
  raise_for_changed_count: bool = False,
1545
1623
  ensure_file: None | bool = None,
1624
+ hold_top: bool = True,
1546
1625
  app: str = "android",
1547
1626
  cooldown: int | float = 0,
1548
1627
  max_workers: None | int = None,
@@ -1565,6 +1644,7 @@ def _iter_fs_files(
1565
1644
  - False: 必须是目录
1566
1645
  - None: 可以是目录或文件
1567
1646
 
1647
+ :param hold_top: 保留顶层目录信息,返回字段增加 "top_id", "top_ancestors"
1568
1648
  :param app: 使用指定 app(设备)的接口
1569
1649
  :param cooldown: 冷却时间,大于 0,则使用此时间间隔执行并发
1570
1650
  :param max_workers: 最大并发数,如果为 None 或 <= 0,则自动确定
@@ -1647,12 +1727,12 @@ def _iter_fs_files(
1647
1727
  max_workers=max_workers,
1648
1728
  **request_kwargs,
1649
1729
  )
1730
+ top_id = int(payload.get("cid") or 0)
1650
1731
  with with_iter_next(it) as get_next:
1651
1732
  while True:
1652
1733
  resp = yield get_next()
1653
- if id_to_dirnode is not ...:
1654
- for info in resp["path"][1:]:
1655
- id_to_dirnode[int(info["cid"])] = DirNode(info["name"], int(info["pid"]))
1734
+ _update_resp_ancestors(resp, id_to_dirnode)
1735
+ ancestors = resp["ancestors"]
1656
1736
  for info in resp["data"]:
1657
1737
  if normalize_attr is None:
1658
1738
  attr: dict | OverviewAttr = _overview_attr(info)
@@ -1667,6 +1747,9 @@ def _iter_fs_files(
1667
1747
  continue
1668
1748
  if with_dirname:
1669
1749
  info["dirname"] = pid_to_name[attr["parent_id"]]
1750
+ if hold_top:
1751
+ info["top_id"] = top_id
1752
+ info["top_ancestors"] = ancestors
1670
1753
  yield Yield(info)
1671
1754
  return run_gen_step_iter(gen_step, async_)
1672
1755
 
@@ -2026,7 +2109,8 @@ def iter_dirs_with_path(
2026
2109
  async_=async_, # type: ignore
2027
2110
  **request_kwargs,
2028
2111
  ))
2029
- return YieldFrom(ensure_attr_path(
2112
+ add_top = _make_top_adder(to_id(cid), id_to_dirnode)
2113
+ return YieldFrom(do_map(add_top, ensure_attr_path(
2030
2114
  client,
2031
2115
  attrs,
2032
2116
  with_ancestors=with_ancestors,
@@ -2035,7 +2119,7 @@ def iter_dirs_with_path(
2035
2119
  app=app,
2036
2120
  async_=async_,
2037
2121
  **request_kwargs,
2038
- ))
2122
+ )))
2039
2123
  return run_gen_step_iter(gen_step, async_)
2040
2124
 
2041
2125
 
@@ -2294,7 +2378,6 @@ def iter_files_with_path(
2294
2378
  raise ValueError("please set the non-zero value of suffix or type")
2295
2379
  if isinstance(client, str):
2296
2380
  client = P115Client(client, check_for_relogin=True)
2297
- stable_point = client.pickcode_stable_point
2298
2381
  if isinstance(escape, bool):
2299
2382
  if escape:
2300
2383
  from posixpatht import escape
@@ -2377,7 +2460,7 @@ def iter_files_with_path(
2377
2460
  if id:
2378
2461
  yield through(iter_download_nodes(
2379
2462
  client,
2380
- to_pickcode(id, stable_point),
2463
+ client.to_pickcode(id),
2381
2464
  files=False,
2382
2465
  id_to_dirnode=id_to_dirnode,
2383
2466
  max_workers=None,
@@ -2516,7 +2599,6 @@ def iter_files_with_path_skim(
2516
2599
  from .download import iter_download_nodes
2517
2600
  if isinstance(client, str):
2518
2601
  client = P115Client(client, check_for_relogin=True)
2519
- stable_point = client.pickcode_stable_point
2520
2602
  if isinstance(escape, bool):
2521
2603
  if escape:
2522
2604
  from posixpatht import escape
@@ -2561,6 +2643,8 @@ def iter_files_with_path_skim(
2561
2643
  dirname = id_to_path[pid] = get_path(id_to_dirnode[pid]) + "/"
2562
2644
  return dirname + name
2563
2645
  def update_path(attr: dict, /) -> dict:
2646
+ attr["top_id"] = top_id
2647
+ attr["top_ancestors"] = top_ancestors
2564
2648
  try:
2565
2649
  if with_ancestors:
2566
2650
  attr["ancestors"] = get_ancestors(attr["id"], attr)
@@ -2585,12 +2669,28 @@ def iter_files_with_path_skim(
2585
2669
  def set_path_already(*_):
2586
2670
  nonlocal _path_already
2587
2671
  _path_already = True
2672
+ top_id = cid
2673
+ if not cid:
2674
+ top_ancestors = [{"id": 0, "parent_id": 0, "name": ""}]
2675
+ else:
2676
+ top_ancestors = []
2677
+ if id_to_dirnode:
2678
+ add_ancestor = top_ancestors.append
2679
+ tid = top_id
2680
+ while tid and tid in id_to_dirnode:
2681
+ name, pid = id_to_dirnode[tid]
2682
+ add_ancestor({"id": tid, "parent_id": pid, "name": name})
2683
+ tid = pid
2684
+ if not tid:
2685
+ add_ancestor({"id": 0, "parent_id": 0, "name": ""})
2686
+ top_ancestors.reverse()
2588
2687
  @as_gen_step
2589
2688
  def fetch_dirs(id: int | str, /):
2689
+ nonlocal top_ancestors
2590
2690
  if id:
2591
2691
  if cid:
2592
2692
  do_next: Callable = anext if async_ else next
2593
- yield do_next(_iter_fs_files(
2693
+ attr = yield do_next(_iter_fs_files(
2594
2694
  client,
2595
2695
  to_id(id),
2596
2696
  page_size=1,
@@ -2598,9 +2698,10 @@ def iter_files_with_path_skim(
2598
2698
  async_=async_,
2599
2699
  **request_kwargs,
2600
2700
  ))
2701
+ top_ancestors = attr["top_ancestors"]
2601
2702
  yield through(iter_download_nodes(
2602
2703
  client,
2603
- to_pickcode(id, stable_point),
2704
+ client.to_pickcode(id),
2604
2705
  files=False,
2605
2706
  id_to_dirnode=id_to_dirnode,
2606
2707
  max_workers=max_workers,
@@ -2664,6 +2765,212 @@ def iter_files_with_path_skim(
2664
2765
  return run_gen_step_iter(gen_step, async_)
2665
2766
 
2666
2767
 
2768
+ @overload
2769
+ def traverse_tree(
2770
+ client: str | P115Client,
2771
+ cid: int | str = 0,
2772
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
2773
+ app: str = "android",
2774
+ max_workers: None | int = None,
2775
+ *,
2776
+ async_: Literal[False] = False,
2777
+ **request_kwargs,
2778
+ ) -> Iterator[dict]:
2779
+ ...
2780
+ @overload
2781
+ def traverse_tree(
2782
+ client: str | P115Client,
2783
+ cid: int | str = 0,
2784
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
2785
+ app: str = "android",
2786
+ max_workers: None | int = None,
2787
+ *,
2788
+ async_: Literal[True],
2789
+ **request_kwargs,
2790
+ ) -> AsyncIterator[dict]:
2791
+ ...
2792
+ def traverse_tree(
2793
+ client: str | P115Client,
2794
+ cid: int | str = 0,
2795
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
2796
+ app: str = "android",
2797
+ max_workers: None | int = None,
2798
+ *,
2799
+ async_: Literal[False, True] = False,
2800
+ **request_kwargs,
2801
+ ) -> Iterator[dict] | AsyncIterator[dict]:
2802
+ """遍历目录树,获取文件信息
2803
+
2804
+ :param client: 115 客户端或 cookies
2805
+ :param cid: 目录 id 或 pickcode
2806
+ :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
2807
+ :param app: 使用指定 app(设备)的接口
2808
+ :param max_workers: 最大并发数,如果为 None 或 <= 0,则自动确定
2809
+ :param async_: 是否异步
2810
+ :param request_kwargs: 其它请求参数
2811
+
2812
+ :return: 迭代器,返回此目录内的(仅文件)文件信息
2813
+ """
2814
+ if isinstance(client, str):
2815
+ client = P115Client(client, check_for_relogin=True)
2816
+ if id_to_dirnode is None:
2817
+ id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
2818
+ elif id_to_dirnode is ...:
2819
+ id_to_dirnode = {}
2820
+ from .download import iter_download_nodes
2821
+ to_pickcode = client.to_pickcode
2822
+ def fulfill_dir_node(attr: dict, /) -> dict:
2823
+ attr["pickcode"] = to_pickcode(attr["id"], "fa")
2824
+ attr["size"] = 0
2825
+ attr["sha1"] = ""
2826
+ return attr
2827
+ def gen_step():
2828
+ files = iter_download_nodes(
2829
+ client,
2830
+ cid,
2831
+ files=True,
2832
+ ensure_name=True,
2833
+ id_to_dirnode=id_to_dirnode,
2834
+ app=app,
2835
+ max_workers=max_workers,
2836
+ async_=async_,
2837
+ **request_kwargs,
2838
+ )
2839
+ with cache_loading(files) as (cache, task):
2840
+ yield YieldFrom(do_map(fulfill_dir_node, iter_download_nodes(
2841
+ client,
2842
+ cid,
2843
+ files=False,
2844
+ id_to_dirnode=id_to_dirnode,
2845
+ app=app,
2846
+ max_workers=max_workers,
2847
+ async_=async_,
2848
+ **request_kwargs,
2849
+ )))
2850
+ if isinstance(task, Task):
2851
+ yield task
2852
+ else:
2853
+ task.result()
2854
+ yield YieldFrom(cache)
2855
+ yield YieldFrom(files)
2856
+ return run_gen_step_iter(gen_step, async_)
2857
+
2858
+
2859
+ @overload
2860
+ def traverse_tree_with_path(
2861
+ client: str | P115Client,
2862
+ cid: int | str = 0,
2863
+ with_ancestors: bool = False,
2864
+ escape: None | bool | Callable[[str], str] = True,
2865
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
2866
+ app: str = "android",
2867
+ max_workers: None | int = None,
2868
+ *,
2869
+ async_: Literal[False] = False,
2870
+ **request_kwargs,
2871
+ ) -> Iterator[dict]:
2872
+ ...
2873
+ @overload
2874
+ def traverse_tree_with_path(
2875
+ client: str | P115Client,
2876
+ cid: int | str = 0,
2877
+ with_ancestors: bool = False,
2878
+ escape: None | bool | Callable[[str], str] = True,
2879
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
2880
+ app: str = "android",
2881
+ max_workers: None | int = None,
2882
+ *,
2883
+ async_: Literal[True],
2884
+ **request_kwargs,
2885
+ ) -> AsyncIterator[dict]:
2886
+ ...
2887
+ def traverse_tree_with_path(
2888
+ client: str | P115Client,
2889
+ cid: int | str = 0,
2890
+ with_ancestors: bool = False,
2891
+ escape: None | bool | Callable[[str], str] = True,
2892
+ id_to_dirnode: None | EllipsisType | MutableMapping[int, tuple[str, int] | DirNode] = None,
2893
+ app: str = "android",
2894
+ max_workers: None | int = None,
2895
+ *,
2896
+ async_: Literal[False, True] = False,
2897
+ **request_kwargs,
2898
+ ) -> Iterator[dict] | AsyncIterator[dict]:
2899
+ """遍历目录树,获取文件信息(包含 "path",可选 "ancestors")
2900
+
2901
+ :param client: 115 客户端或 cookies
2902
+ :param cid: 目录 id 或 pickcode
2903
+ :param with_ancestors: 文件信息中是否要包含 "ancestors"
2904
+ :param escape: 对文件名进行转义
2905
+
2906
+ - 如果为 None,则不处理;否则,这个函数用来对文件名中某些符号进行转义,例如 "/" 等
2907
+ - 如果为 True,则使用 `posixpatht.escape`,会对文件名中 "/",或单独出现的 "." 和 ".." 用 "\\" 进行转义
2908
+ - 如果为 False,则使用 `posix_escape_name` 函数对名字进行转义,会把文件名中的 "/" 转换为 "|"
2909
+ - 如果为 Callable,则用你所提供的调用,以或者转义后的名字
2910
+
2911
+ :param id_to_dirnode: 字典,保存 id 到对应文件的 `DirNode(name, parent_id)` 命名元组的字典
2912
+ :param app: 使用指定 app(设备)的接口
2913
+ :param max_workers: 最大并发数,如果为 None 或 <= 0,则自动确定
2914
+ :param async_: 是否异步
2915
+ :param request_kwargs: 其它请求参数
2916
+
2917
+ :return: 迭代器,返回此目录内的(仅文件)文件信息
2918
+ """
2919
+ if isinstance(client, str):
2920
+ client = P115Client(client, check_for_relogin=True)
2921
+ if id_to_dirnode is None:
2922
+ id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
2923
+ elif id_to_dirnode is ...:
2924
+ id_to_dirnode = {}
2925
+ from .download import iter_download_nodes
2926
+ to_pickcode = client.to_pickcode
2927
+ def fulfill_dir_node(attr: dict, /) -> dict:
2928
+ attr["pickcode"] = to_pickcode(attr["id"], "fa")
2929
+ attr["size"] = 0
2930
+ attr["sha1"] = ""
2931
+ return attr
2932
+ def gen_step():
2933
+ files = iter_download_nodes(
2934
+ client,
2935
+ cid,
2936
+ files=True,
2937
+ ensure_name=True,
2938
+ id_to_dirnode=id_to_dirnode,
2939
+ app=app,
2940
+ max_workers=max_workers,
2941
+ async_=async_,
2942
+ **request_kwargs,
2943
+ )
2944
+ with cache_loading(files) as (cache, task):
2945
+ yield YieldFrom(do_map(fulfill_dir_node, iter_dirs_with_path(
2946
+ client,
2947
+ cid,
2948
+ with_ancestors=with_ancestors,
2949
+ escape=escape,
2950
+ id_to_dirnode=id_to_dirnode,
2951
+ app=app,
2952
+ max_workers=max_workers,
2953
+ async_=async_,
2954
+ **request_kwargs,
2955
+ )))
2956
+ if isinstance(task, Task):
2957
+ yield task
2958
+ else:
2959
+ task.result()
2960
+ add_top = _make_top_adder(to_id(cid), id_to_dirnode)
2961
+ yield YieldFrom(do_map(add_top, ensure_attr_path(
2962
+ client,
2963
+ chain(cache, files), # type: ignore
2964
+ with_ancestors=with_ancestors,
2965
+ escape=escape,
2966
+ id_to_dirnode=id_to_dirnode,
2967
+ app=app,
2968
+ async_=async_,
2969
+ **request_kwargs,
2970
+ )))
2971
+ return run_gen_step_iter(gen_step, async_)
2972
+
2973
+
2667
2974
  @overload
2668
2975
  def iter_nodes(
2669
2976
  client: str | P115Client,
@@ -2872,7 +3179,6 @@ def iter_nodes_by_pickcode(
2872
3179
  """
2873
3180
  if isinstance(client, str):
2874
3181
  client = P115Client(client, check_for_relogin=True)
2875
- stable_point = client.pickcode_stable_point
2876
3182
  if id_to_dirnode is None:
2877
3183
  id_to_dirnode = ID_TO_DIRNODE_CACHE[client.user_id]
2878
3184
  methods: list[Callable] = []
@@ -2910,7 +3216,7 @@ def iter_nodes_by_pickcode(
2910
3216
  project,
2911
3217
  conmap(
2912
3218
  get_response,
2913
- (to_pickcode(pc, stable_point) for pc in pickcodes),
3219
+ map(client.to_pickcode, pickcodes),
2914
3220
  max_workers=max_workers,
2915
3221
  kwargs=request_kwargs,
2916
3222
  async_=async_,
@@ -3096,7 +3402,7 @@ def iter_nodes_using_info(
3096
3402
  return None
3097
3403
  check_response(resp)
3098
3404
  if id_to_dirnode is not ...:
3099
- _update_resp_2_id_to_dirnode(resp, id_to_dirnode)
3405
+ _update_resp_ancestors(resp, id_to_dirnode)
3100
3406
  return resp
3101
3407
  return do_filter(None, do_map(
3102
3408
  project,
@@ -4214,6 +4520,7 @@ def share_search_iter(
4214
4520
 
4215
4521
  '''
4216
4522
  # TODO: 需要优化,大优化,优化不好,就删了
4523
+ # TODO: 可以在拉取的同时,检测其它待拉取目录大小,但需要设定冷却时间(例如一秒最多 10 次查询)
4217
4524
  @overload
4218
4525
  def traverse_files(
4219
4526
  client: str | P115Client,
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "p115client"
3
- version = "0.0.5.14.1"
3
+ version = "0.0.5.14.3"
4
4
  description = "Python 115 webdisk client."
5
5
  authors = ["ChenyangGao <wosiwujm@gmail.com>"]
6
6
  license = "MIT"
@@ -49,7 +49,7 @@ python-filewrap = ">=0.2.8"
49
49
  python-hashtools = ">=0.0.3.3"
50
50
  python-httpfile = ">=0.0.5.2"
51
51
  python-http_request = ">=0.0.6"
52
- python-iterutils = ">=0.2.5.3"
52
+ python-iterutils = ">=0.2.5.4"
53
53
  python-property = ">=0.0.3"
54
54
  python-startfile = ">=0.0.2"
55
55
  python-undefined = ">=0.0.3"
File without changes