p115client 0.0.5.5.4__py3-none-any.whl → 0.0.5.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
p115client/client.py CHANGED
@@ -6922,13 +6922,13 @@ class P115Client:
6922
6922
 
6923
6923
  - 全部: 0
6924
6924
  - ??: 1
6925
- - ??: 2
6925
+ - 离线下载: 2
6926
6926
  - 播放视频: 3
6927
6927
  - 上传: 4
6928
6928
  - ??: 5
6929
6929
  - ??: 6
6930
6930
  - 接收: 7
6931
- - 移入: 8
6931
+ - 移动: 8
6932
6932
 
6933
6933
  - with_file: 0 | 1 = 0
6934
6934
  """
@@ -7077,13 +7077,13 @@ class P115Client:
7077
7077
 
7078
7078
  - 全部: 0
7079
7079
  - ??: 1
7080
- - ??: 2
7080
+ - 离线下载: 2
7081
7081
  - 播放视频: 3
7082
7082
  - 上传: 4
7083
7083
  - ??: 5
7084
7084
  - ??: 6
7085
7085
  - 接收: 7
7086
- - 移入: 8
7086
+ - 移动: 8
7087
7087
 
7088
7088
  - with_file: 0 | 1 = 0
7089
7089
  """
@@ -7141,9 +7141,9 @@ class P115Client:
7141
7141
  - 播放视频: 3
7142
7142
  - 上传: 4
7143
7143
  - ??: 5
7144
- - ??: 6(似乎和离线下载有关)
7144
+ - ??: 6
7145
7145
  - 接收: 7
7146
- - 移入: 8
7146
+ - 移动: 8
7147
7147
  """
7148
7148
  api = complete_webapi("/history/list", base_url=base_url)
7149
7149
  if isinstance(payload, (int, str)):
@@ -7202,9 +7202,9 @@ class P115Client:
7202
7202
  - 播放视频: 3
7203
7203
  - 上传: 4
7204
7204
  - ??: 5
7205
- - ??: 6(似乎和离线下载有关)
7205
+ - ??: 6
7206
7206
  - 接收: 7
7207
- - 移入: 8
7207
+ - 移动: 8
7208
7208
  """
7209
7209
  api = complete_proapi("/history/list", base_url, app)
7210
7210
  if isinstance(payload, (int, str)):
@@ -8382,6 +8382,13 @@ class P115Client:
8382
8382
 
8383
8383
  POST https://webapi.115.com/files/move
8384
8384
 
8385
+ .. caution::
8386
+ 你可以把文件或目录移动到其它目录 id 下,即使是不存在的 id
8387
+
8388
+ 因此,我定义了一个概念,悬空节点,此节点的 aid=1,但它有一个祖先节点,要么不存在,要么 aid != 1
8389
+
8390
+ 你可以用 `P115Client.tool_space` 方法,使用【校验空间】功能,把所有悬空节点找出来,放到根目录下的【修复文件】目录,此接口一天只能用一次
8391
+
8385
8392
  :payload:
8386
8393
  - fid: int | str 💡 文件或目录 id,只接受单个 id
8387
8394
  - fid[]: int | str
@@ -8407,8 +8414,9 @@ class P115Client:
8407
8414
  @overload
8408
8415
  def fs_move_app(
8409
8416
  self,
8410
- payload: dict,
8417
+ payload: int | str | dict | Iterable[int | str],
8411
8418
  /,
8419
+ pid: int = 0,
8412
8420
  app: str = "android",
8413
8421
  base_url: bool | str | Callable[[], str] = False,
8414
8422
  *,
@@ -8419,8 +8427,9 @@ class P115Client:
8419
8427
  @overload
8420
8428
  def fs_move_app(
8421
8429
  self,
8422
- payload: dict,
8430
+ payload: int | str | dict | Iterable[int | str],
8423
8431
  /,
8432
+ pid: int = 0,
8424
8433
  app: str = "android",
8425
8434
  base_url: bool | str | Callable[[], str] = False,
8426
8435
  *,
@@ -8430,8 +8439,9 @@ class P115Client:
8430
8439
  ...
8431
8440
  def fs_move_app(
8432
8441
  self,
8433
- payload: dict,
8442
+ payload: int | str | dict | Iterable[int | str],
8434
8443
  /,
8444
+ pid: int = 0,
8435
8445
  app: str = "android",
8436
8446
  base_url: bool | str | Callable[[], str] = False,
8437
8447
  *,
@@ -8448,7 +8458,16 @@ class P115Client:
8448
8458
  - user_id: int | str = <default> 💡 不用管
8449
8459
  """
8450
8460
  api = complete_proapi("/files/move", base_url, app)
8451
- payload = dict(payload, user_id=self.user_id)
8461
+ if isinstance(payload, (int, str)):
8462
+ payload = {"ids": payload, "user_id": self.user_id}
8463
+ elif isinstance(payload, dict):
8464
+ payload = dict(payload, user_id=self.user_id)
8465
+ else:
8466
+ payload = {f"fid[{i}]": fid for i, fid in enumerate(payload)}
8467
+ if not payload:
8468
+ return {"state": False, "message": "no op"}
8469
+ payload["user_id"] = self.user_id
8470
+ payload.setdefault("pid", pid)
8452
8471
  return self.request(url=api, method="POST", data=payload, async_=async_, **request_kwargs)
8453
8472
 
8454
8473
  @overload
@@ -1089,7 +1089,7 @@ def make_strm(
1089
1089
  @overload
1090
1090
  def iter_download_nodes(
1091
1091
  client: str | P115Client,
1092
- pickcode: str,
1092
+ pickcode: int | str,
1093
1093
  files: bool = True,
1094
1094
  max_workers: None | int = 1,
1095
1095
  *,
@@ -1100,7 +1100,7 @@ def iter_download_nodes(
1100
1100
  @overload
1101
1101
  def iter_download_nodes(
1102
1102
  client: str | P115Client,
1103
- pickcode: str,
1103
+ pickcode: int | str,
1104
1104
  files: bool = True,
1105
1105
  max_workers: None | int = 1,
1106
1106
  *,
@@ -1110,7 +1110,7 @@ def iter_download_nodes(
1110
1110
  ...
1111
1111
  def iter_download_nodes(
1112
1112
  client: str | P115Client,
1113
- pickcode: str,
1113
+ pickcode: int | str,
1114
1114
  files: bool = True,
1115
1115
  max_workers: None | int = 1,
1116
1116
  *,
@@ -1120,7 +1120,7 @@ def iter_download_nodes(
1120
1120
  """获取一个目录内所有的文件或者目录的信息(简略)
1121
1121
 
1122
1122
  :param client: 115 客户端或 cookies
1123
- :param pickcode: 目录的提取码
1123
+ :param pickcode: 目录的 提取码 或者 id
1124
1124
  :param files: 如果为 True,则只获取文件,否则只获取目录
1125
1125
  :param max_workers: 最大并发数,如果为 None 或 <= 0,则默认为 20
1126
1126
  :param async_: 是否异步
@@ -1134,9 +1134,14 @@ def iter_download_nodes(
1134
1134
  method = client.download_files
1135
1135
  else:
1136
1136
  method = client.download_folders
1137
- request_kwargs.setdefault("base_url", cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__)
1138
1137
  if max_workers == 1:
1139
1138
  def gen_step():
1139
+ nonlocal pickcode
1140
+ if isinstance(pickcode, int):
1141
+ resp = yield client.fs_file_skim(pickcode, async_=async_, **request_kwargs)
1142
+ check_response(resp)
1143
+ pickcode = resp["data"][0]["pick_code"]
1144
+ request_kwargs.setdefault("base_url", cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__)
1140
1145
  for i in count(1):
1141
1146
  payload = {"pickcode": pickcode, "page": i}
1142
1147
  resp = yield method(payload, async_=async_, **request_kwargs)
@@ -1174,7 +1179,7 @@ def iter_download_nodes(
1174
1179
  if not data["has_next_page"]:
1175
1180
  max_page = page
1176
1181
  def gen_step():
1177
- nonlocal max_workers
1182
+ nonlocal max_workers, pickcode
1178
1183
  if async_:
1179
1184
  if max_workers is None or max_workers <= 0:
1180
1185
  max_workers = 20
@@ -1191,6 +1196,15 @@ def iter_download_nodes(
1191
1196
  n = executor._max_workers
1192
1197
  submit = executor.submit
1193
1198
  shutdown = lambda: executor.shutdown(False, cancel_futures=True)
1199
+ if isinstance(pickcode, int):
1200
+ resp = yield client.fs_file_skim(
1201
+ pickcode,
1202
+ async_=async_, # type: ignore
1203
+ **request_kwargs,
1204
+ )
1205
+ check_response(resp)
1206
+ pickcode = resp["data"][0]["pick_code"]
1207
+ request_kwargs.setdefault("base_url", cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__)
1194
1208
  try:
1195
1209
  sentinel = object()
1196
1210
  countdown: Callable
@@ -47,6 +47,7 @@ def iter_fs_files(
47
47
  /,
48
48
  first_page_size: int = 0,
49
49
  page_size: int = 10_000,
50
+ count: int = -1,
50
51
  callback: None | Callable[[dict], Any] = None,
51
52
  app: str = "web",
52
53
  raise_for_changed_count: bool = False,
@@ -62,6 +63,7 @@ def iter_fs_files(
62
63
  /,
63
64
  first_page_size: int = 0,
64
65
  page_size: int = 10_000,
66
+ count: int = -1,
65
67
  callback: None | Callable[[dict], Any] = None,
66
68
  app: str = "web",
67
69
  raise_for_changed_count: bool = False,
@@ -76,6 +78,7 @@ def iter_fs_files(
76
78
  /,
77
79
  first_page_size: int = 0,
78
80
  page_size: int = 10_000,
81
+ count: int = -1,
79
82
  callback: None | Callable[[dict], Any] = None,
80
83
  app: str = "web",
81
84
  raise_for_changed_count: bool = False,
@@ -89,6 +92,7 @@ def iter_fs_files(
89
92
  :param payload: 目录的 id 或者详细的查询参数
90
93
  :param first_page_size: 首次拉取的分页大小,如果 <= 0,则自动确定
91
94
  :param page_size: 分页大小,如果 <= 0,则自动确定
95
+ :param count: 文件总数
92
96
  :param callback: 回调函数,调用后,会获得一个值,会添加到返回值中,key 为 "callback"
93
97
  :param app: 使用此设备的接口
94
98
  :param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
@@ -116,7 +120,6 @@ def iter_fs_files(
116
120
  else:
117
121
  request_kwargs.setdefault("base_url", get_proapi_origin)
118
122
  fs_files = partial(client.fs_files_app, app=app, async_=async_, **request_kwargs)
119
- count = -1
120
123
  def get_files(payload: dict, /):
121
124
  nonlocal count
122
125
  while True:
@@ -167,6 +170,8 @@ def iter_fs_files_threaded(
167
170
  payload: int | str | dict = 0,
168
171
  /,
169
172
  page_size: int = 7_000,
173
+ count: int = -1,
174
+ wait_for_count: bool = False,
170
175
  callback: None | Callable[[dict], Any] = None,
171
176
  app: str = "web",
172
177
  raise_for_changed_count: bool = False,
@@ -179,6 +184,8 @@ def iter_fs_files_threaded(
179
184
  :param client: 115 网盘客户端对象
180
185
  :param payload: 目录的 id 或者详细的查询参数
181
186
  :param page_size: 分页大小,如果 <= 0,则自动确定
187
+ :param count: 文件总数
188
+ :param wait_for_count: 如果为 True,则在确定 count 前,不进行并发
182
189
  :param callback: 回调函数,调用后,会获得一个值,会添加到返回值中,key 为 "callback"
183
190
  :param app: 使用此设备的接口
184
191
  :param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
@@ -206,7 +213,6 @@ def iter_fs_files_threaded(
206
213
  else:
207
214
  request_kwargs.setdefault("base_url", get_proapi_origin)
208
215
  fs_files = partial(client.fs_files_app, app=app, **request_kwargs)
209
- count = -1
210
216
  def get_files(payload: dict, /):
211
217
  nonlocal count
212
218
  resp = fs_files(payload)
@@ -245,7 +251,10 @@ def iter_fs_files_threaded(
245
251
  offset = payload["offset"]
246
252
  while True:
247
253
  try:
248
- resp = future.result(max(0, ts + cooldown - time()))
254
+ if wait_for_count and count < 0:
255
+ resp = future.result()
256
+ else:
257
+ resp = future.result(max(0, ts + cooldown - time()))
249
258
  except TimeoutError:
250
259
  payload["offset"] += page_size
251
260
  if count < 0 or payload["offset"] < count:
@@ -275,6 +284,8 @@ async def iter_fs_files_asynchronized(
275
284
  payload: int | str | dict = 0,
276
285
  /,
277
286
  page_size: int = 7_000,
287
+ count: int = -1,
288
+ wait_for_count: bool = False,
278
289
  callback: None | Callable[[dict], Any] = None,
279
290
  app: str = "web",
280
291
  raise_for_changed_count: bool = False,
@@ -286,6 +297,8 @@ async def iter_fs_files_asynchronized(
286
297
  :param client: 115 网盘客户端对象
287
298
  :param payload: 目录的 id 或者详细的查询参数
288
299
  :param page_size: 分页大小,如果 <= 0,则自动确定
300
+ :param count: 文件总数
301
+ :param wait_for_count: 如果为 True,则在确定 count 前,不进行并发
289
302
  :param callback: 回调函数,调用后,会获得一个值,会添加到返回值中,key 为 "callback"
290
303
  :param app: 使用此设备的接口
291
304
  :param raise_for_changed_count: 分批拉取时,发现总数发生变化后,是否报错
@@ -312,7 +325,6 @@ async def iter_fs_files_asynchronized(
312
325
  else:
313
326
  request_kwargs.setdefault("base_url", get_proapi_origin)
314
327
  fs_files = partial(client.fs_files_app, app=app, **request_kwargs)
315
- count = -1
316
328
  async def get_files(payload: dict, /):
317
329
  nonlocal count
318
330
  resp = await fs_files(payload, async_=True) # type: ignore
@@ -355,7 +367,10 @@ async def iter_fs_files_asynchronized(
355
367
  offset = payload["offset"]
356
368
  while True:
357
369
  try:
358
- resp = await wait_for(shield(task), max(0, ts + cooldown - time()))
370
+ if wait_for_count and count < 0:
371
+ resp = await task
372
+ else:
373
+ resp = await wait_for(shield(task), max(0, ts + cooldown - time()))
359
374
  except TimeoutError:
360
375
  payload["offset"] += page_size
361
376
  if count < 0 or payload["offset"] < count:
@@ -9,8 +9,8 @@ __all__ = [
9
9
  "iter_nodes_skim", "iter_stared_dirs_raw", "iter_stared_dirs", "ensure_attr_path",
10
10
  "ensure_attr_path_by_category_get", "iterdir_raw", "iterdir", "iterdir_limited",
11
11
  "iter_files_raw", "iter_files", "traverse_files", "iter_dupfiles", "iter_image_files",
12
- "iter_dangling_files", "share_extract_payload", "share_iterdir", "share_iter_files",
13
- "iter_selected_nodes", "iter_selected_nodes_by_pickcode", "iter_selected_nodes_using_category_get",
12
+ "share_extract_payload", "share_iterdir", "share_iter_files", "iter_selected_nodes",
13
+ "iter_selected_nodes_by_pickcode", "iter_selected_nodes_using_category_get",
14
14
  "iter_selected_nodes_using_edit", "iter_selected_nodes_using_star_event",
15
15
  "iter_selected_dirs_using_star", "iter_files_with_dirname", "iter_files_with_path",
16
16
  "iter_files_with_path_by_export_dir", "iter_parents_3_level", "iter_dir_nodes",
@@ -3040,123 +3040,6 @@ def iter_image_files(
3040
3040
  return run_gen_step_iter(gen_step, async_=async_)
3041
3041
 
3042
3042
 
3043
- @overload
3044
- def iter_dangling_files(
3045
- client: str | P115Client,
3046
- cid: int = 0,
3047
- page_size: int = 0,
3048
- suffix: str = "",
3049
- type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
3050
- normalize_attr: Callable[[dict], dict] = normalize_attr,
3051
- app: str = "web",
3052
- *,
3053
- async_: Literal[False] = False,
3054
- **request_kwargs,
3055
- ) -> Iterator[dict]:
3056
- ...
3057
- @overload
3058
- def iter_dangling_files(
3059
- client: str | P115Client,
3060
- cid: int = 0,
3061
- page_size: int = 0,
3062
- suffix: str = "",
3063
- type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
3064
- normalize_attr: Callable[[dict], dict] = normalize_attr,
3065
- app: str = "web",
3066
- *,
3067
- async_: Literal[True],
3068
- **request_kwargs,
3069
- ) -> AsyncIterator[dict]:
3070
- ...
3071
- def iter_dangling_files(
3072
- client: str | P115Client,
3073
- cid: int = 0,
3074
- page_size: int = 0,
3075
- suffix: str = "",
3076
- type: Literal[1, 2, 3, 4, 5, 6, 7, 99] = 99,
3077
- normalize_attr: Callable[[dict], dict] = normalize_attr,
3078
- app: str = "web",
3079
- *,
3080
- async_: Literal[False, True] = False,
3081
- **request_kwargs,
3082
- ) -> Iterator[dict] | AsyncIterator[dict]:
3083
- """找出所有悬空的文件,即所在的目录 id 不为 0 且不存在
3084
-
3085
- .. todo::
3086
- 实际上,广义的悬空,包括所有这样的文件或目录,它们的祖先节点中存在一个节点,这个节点的 id 目前不存在于网盘(可能被删除或移入回收站)
3087
-
3088
- .. danger::
3089
- 你可以用 `P115Client.fs_move` 方法,把文件或目录随意移动到任何目录 id 下,即使这个 id 不存在
3090
-
3091
- .. note::
3092
- 你可以用 `P115Client.tool_space` 方法,把所有悬空文件找出来,放到专门的目录中,但这个接口一天只能用一次
3093
-
3094
- :param client: 115 客户端或 cookies
3095
- :param cid: 目录 id
3096
- :param page_size: 分页大小
3097
- :param suffix: 后缀名(优先级高于 type)
3098
- :param type: 文件类型
3099
-
3100
- - 1: 文档
3101
- - 2: 图片
3102
- - 3: 音频
3103
- - 4: 视频
3104
- - 5: 压缩包
3105
- - 6: 应用
3106
- - 7: 书籍
3107
- - 99: 仅文件
3108
-
3109
- :param normalize_attr: 把数据进行转换处理,使之便于阅读
3110
- :param app: 使用某个 app (设备)的接口
3111
- :param async_: 是否异步
3112
- :param request_kwargs: 其它请求参数
3113
-
3114
- :return: 迭代器,返回此目录内的(仅文件)文件信息
3115
- """
3116
- if not isinstance(client, P115Client):
3117
- client = P115Client(client, check_for_relogin=True)
3118
- if page_size <= 0:
3119
- page_size = 10_000
3120
- elif page_size < 16:
3121
- page_size = 16
3122
- if app in ("", "web", "desktop", "harmony"):
3123
- fs_files: Callable = client.fs_files
3124
- else:
3125
- fs_files = partial(client.fs_files_app, app=app)
3126
- def gen_step():
3127
- na_cids: set[int] = set()
3128
- ok_cids: set[int] = set()
3129
- payload = {"cid": cid, "limit": page_size, "offset": 0, "suffix": suffix, "type": type}
3130
- while True:
3131
- resp = yield fs_files(payload, async_=async_, **request_kwargs)
3132
- if cid and int(resp["path"][-1]["cid"]) != cid:
3133
- break
3134
- if resp["offset"] != payload["offset"]:
3135
- break
3136
- t = tuple(map(_overview_attr, resp["data"]))
3137
- pids = {
3138
- pid for a in t
3139
- if (pid := a.parent_id) not in na_cids
3140
- and pid not in ok_cids
3141
- }
3142
- if pids:
3143
- if async_:
3144
- na_cids.update(iter_nodes_skim(client, pids, **request_kwargs))
3145
- else:
3146
- yield async_foreach(
3147
- na_cids.add,
3148
- iter_nodes_skim(client, pids, async_=True, **request_kwargs),
3149
- )
3150
- ok_cids |= pids - na_cids
3151
- for a, info in zip(t, resp["data"]):
3152
- if a.parent_id in na_cids:
3153
- yield Yield(normalize_attr(info), identity=True)
3154
- payload["offset"] += len(resp["data"]) # type: ignore
3155
- if payload["offset"] >= resp["count"]:
3156
- break
3157
- return run_gen_step_iter(gen_step, async_=async_)
3158
-
3159
-
3160
3043
  def share_extract_payload(link: str, /) -> SharePayload:
3161
3044
  """从链接中提取 share_code 和 receive_code
3162
3045
 
p115client/tool/life.py CHANGED
@@ -11,7 +11,7 @@ __doc__ = "这个模块提供了一些和 115 生活操作事件有关的函数"
11
11
  from asyncio import sleep as async_sleep
12
12
  from collections.abc import AsyncIterator, Container, Coroutine, Iterator
13
13
  from functools import partial
14
- from itertools import count
14
+ from itertools import count, cycle
15
15
  from time import time, sleep
16
16
  from typing import overload, Any, Final, Literal
17
17
 
@@ -243,6 +243,7 @@ def iter_life_behavior_once(
243
243
  if app in ("", "web", "desktop", "harmony"):
244
244
  life_behavior_detail = partial(client.life_behavior_detail, **request_kwargs)
245
245
  else:
246
+ request_kwargs.setdefault("base_url", cycle(("http://proapi.115.com", "https://proapi.115.com")).__next__)
246
247
  life_behavior_detail = partial(client.life_behavior_detail_app, app=app, **request_kwargs)
247
248
  if first_batch_size <= 0:
248
249
  first_batch_size = 64 if from_time or from_id else 1000
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: p115client
3
- Version: 0.0.5.5.4
3
+ Version: 0.0.5.5.5
4
4
  Summary: Python 115 webdisk client.
5
5
  Home-page: https://github.com/ChenyangGao/p115client
6
6
  License: MIT
@@ -1,23 +1,23 @@
1
1
  LICENSE,sha256=o5242_N2TgDsWwFhPn7yr8YJNF7XsJM5NxUMtcT97bc,1100
2
2
  p115client/__init__.py,sha256=1mx7njuAlqcuEWONTjSiiGnXyyNyqOcJyNX1FMHqQ-4,214
3
3
  p115client/_upload.py,sha256=4Qt7bJOAgob3lcbX1yUZ0hilkAXABD2J9BSLYPIGybQ,29900
4
- p115client/client.py,sha256=XtCw_KpNgqvNZYaQzD-c5at7N9dg2OrPI9tp-2s24uU,615516
4
+ p115client/client.py,sha256=FpB-gG4f3Vv5ZwSET4jBGVJg9FcNPaYEj_aGN8t4e8g,616516
5
5
  p115client/const.py,sha256=maIZfJAiUuEnXIKc8TMAyW_UboDUJPwYpPS8LjPFp_U,4321
6
6
  p115client/exception.py,sha256=Ugjr__aSlYRDYwoOz7273ngV-gFX2z-ohsJmCba8nnQ,2657
7
7
  p115client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  p115client/tool/__init__.py,sha256=2YrKoAcFYOuqu2nUBoPVhxMOseAvcLE_LcnbZV11UKw,324
9
- p115client/tool/download.py,sha256=uKNKUJML2LWEI4JVoakMEafmbPvo-OV37Oi0d0qu3v4,55486
9
+ p115client/tool/download.py,sha256=KY-8QHhCm_Mk-ygtuQXuPtoIIJHRWg5XNQQx0WDncGA,56232
10
10
  p115client/tool/edit.py,sha256=NQiyVoOKKed6VbUj_AamGmTWXfcioOFqq5buFUWOh_8,16428
11
11
  p115client/tool/export_dir.py,sha256=QFHK5HKllTV6ZgCnnf_BFd4CQq-gqcuLx49clcbvmDo,23871
12
- p115client/tool/fs_files.py,sha256=tpW054WUYqa1J1A9TiSAvHh4RCJ53bc6rwTGlbUswhA,14973
13
- p115client/tool/iterdir.py,sha256=rvy1Rm5T7TX4-gazeuSzigDe7-D91ZgDCMaSx-Vltig,188125
14
- p115client/tool/life.py,sha256=kWUfHfWJxEMsX939Vu6DEaLI_Qcbfw2yGDHinZqjxas,17461
12
+ p115client/tool/fs_files.py,sha256=aOU2Ea7_CLUfzq3_UJx2SBF8ubJYAkZ4J-e2H3OfqpA,15602
13
+ p115client/tool/iterdir.py,sha256=hpzOCaYi0-8XA6yS3RVnMp9a1FT25qjpZwnPJYNOXYw,183974
14
+ p115client/tool/life.py,sha256=8STXfjL8DPp0YTLFsE7Dt_6c2wj_0AvScq6WrjX9i9M,17583
15
15
  p115client/tool/pool.py,sha256=vFV3t4g8KuQ4Rlttd1bs7d0Lc8jFUufdmDQweMcLZZw,9779
16
16
  p115client/tool/request.py,sha256=SWsezW9EYZGS3R-TbZxMG-8bN3YWJ0-GzgvKlvRBSCM,7042
17
17
  p115client/tool/upload.py,sha256=qK1OQYxP-Faq2eMDhc5sBXJiSr8m8EZ_gb0O_iA2TrI,15915
18
18
  p115client/tool/xys.py,sha256=6NZUkWNsWW6dHDBUpMitmE-1YlkobWIUQ86CBjKJ4mQ,3633
19
19
  p115client/type.py,sha256=e4g9URQBE23XN2dGomldj8wC6NlDWBBSVC5Bmd8giBc,5993
20
- p115client-0.0.5.5.4.dist-info/LICENSE,sha256=o5242_N2TgDsWwFhPn7yr8YJNF7XsJM5NxUMtcT97bc,1100
21
- p115client-0.0.5.5.4.dist-info/METADATA,sha256=T_JVuV9ZA4ZQmawUE7xfeR2Gr7Z6cAWKjgJmZihcMIg,7143
22
- p115client-0.0.5.5.4.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
23
- p115client-0.0.5.5.4.dist-info/RECORD,,
20
+ p115client-0.0.5.5.5.dist-info/LICENSE,sha256=o5242_N2TgDsWwFhPn7yr8YJNF7XsJM5NxUMtcT97bc,1100
21
+ p115client-0.0.5.5.5.dist-info/METADATA,sha256=9Q9J8L5qSZGbLuKxSErtiIjKhwkcjhN_2O1aVdvyrFY,7143
22
+ p115client-0.0.5.5.5.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
23
+ p115client-0.0.5.5.5.dist-info/RECORD,,