p115client 0.0.5.12__py3-none-any.whl → 0.0.5.12.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- p115client/_upload.py +3 -3
- p115client/client.py +216 -87
- p115client/const.py +7 -4
- p115client/tool/attr.py +1 -1
- p115client/tool/auth.py +1 -1
- p115client/tool/download.py +15 -23
- p115client/tool/edit.py +2 -2
- p115client/tool/export_dir.py +6 -6
- p115client/tool/fs_files.py +52 -36
- p115client/tool/history.py +2 -2
- p115client/tool/iterdir.py +37 -42
- p115client/tool/life.py +4 -4
- p115client/tool/offline.py +41 -9
- p115client/tool/pool.py +5 -5
- p115client/tool/upload.py +9 -9
- p115client/tool/xys.py +5 -5
- {p115client-0.0.5.12.dist-info → p115client-0.0.5.12.2.dist-info}/METADATA +2 -2
- p115client-0.0.5.12.2.dist-info/RECORD +28 -0
- p115client-0.0.5.12.dist-info/RECORD +0 -28
- {p115client-0.0.5.12.dist-info → p115client-0.0.5.12.2.dist-info}/LICENSE +0 -0
- {p115client-0.0.5.12.dist-info → p115client-0.0.5.12.2.dist-info}/WHEEL +0 -0
p115client/tool/iterdir.py
CHANGED
@@ -40,9 +40,9 @@ from weakref import WeakValueDictionary
|
|
40
40
|
from asynctools import async_chain, async_filter, async_map, to_list
|
41
41
|
from concurrenttools import run_as_thread, taskgroup_map, threadpool_map
|
42
42
|
from iterutils import (
|
43
|
-
|
44
|
-
|
45
|
-
|
43
|
+
bfs_gen, chunked, ensure_aiter, foreach, flatten, iter_unique,
|
44
|
+
run_gen_step, run_gen_step_iter, through, async_through, with_iter_next,
|
45
|
+
Yield, YieldFrom,
|
46
46
|
)
|
47
47
|
from iter_collect import iter_keyed_dups, SupportsLT
|
48
48
|
from orjson import loads
|
@@ -238,7 +238,7 @@ def get_path_to_cid(
|
|
238
238
|
return "/" + path
|
239
239
|
else:
|
240
240
|
return path
|
241
|
-
return run_gen_step(gen_step,
|
241
|
+
return run_gen_step(gen_step, async_)
|
242
242
|
|
243
243
|
|
244
244
|
@overload
|
@@ -370,7 +370,7 @@ def get_file_count(
|
|
370
370
|
node = DirNode(info["file_name"], pid)
|
371
371
|
id_to_dirnode[(pid := int(info["file_id"]))] = node
|
372
372
|
return int(resp["count"]) - int(resp.get("folder_count") or 0)
|
373
|
-
return run_gen_step(gen_step,
|
373
|
+
return run_gen_step(gen_step, async_)
|
374
374
|
|
375
375
|
|
376
376
|
@overload
|
@@ -590,7 +590,7 @@ def get_ancestors(
|
|
590
590
|
if not resp.get("sha1") and id_to_dirnode is not ...:
|
591
591
|
id_to_dirnode[ans["id"]] = DirNode(ans["name"], ans["parent_id"])
|
592
592
|
return ancestors
|
593
|
-
return run_gen_step(gen_step,
|
593
|
+
return run_gen_step(gen_step, async_)
|
594
594
|
|
595
595
|
|
596
596
|
@overload
|
@@ -692,7 +692,7 @@ def get_ancestors_to_cid(
|
|
692
692
|
parts.append({"id": 0, "name": "", "parent_id": 0})
|
693
693
|
parts.reverse()
|
694
694
|
return parts
|
695
|
-
return run_gen_step(gen_step,
|
695
|
+
return run_gen_step(gen_step, async_)
|
696
696
|
|
697
697
|
|
698
698
|
# TODO: 使用 search 接口以在特定目录之下搜索某个名字,以便减少风控
|
@@ -887,7 +887,7 @@ def get_id_to_path(
|
|
887
887
|
if ensure_file is None or ensure_file ^ attr["is_dir"]:
|
888
888
|
return P115ID(attr["id"], attr, about="path")
|
889
889
|
raise error
|
890
|
-
return run_gen_step(gen_step,
|
890
|
+
return run_gen_step(gen_step, async_)
|
891
891
|
|
892
892
|
|
893
893
|
@overload
|
@@ -941,7 +941,7 @@ def get_id_to_pickcode(
|
|
941
941
|
check_response(resp)
|
942
942
|
data = resp["data"]
|
943
943
|
return P115ID(data["file_id"], data, about="pickcode")
|
944
|
-
return run_gen_step(gen_step,
|
944
|
+
return run_gen_step(gen_step, async_)
|
945
945
|
|
946
946
|
|
947
947
|
@overload
|
@@ -1001,7 +1001,7 @@ def get_id_to_sha1(
|
|
1001
1001
|
else:
|
1002
1002
|
raise FileNotFoundError(ENOENT, file_sha1)
|
1003
1003
|
return P115ID(data["file_id"], data, about="sha1", file_sha1=file_sha1)
|
1004
|
-
return run_gen_step(gen_step,
|
1004
|
+
return run_gen_step(gen_step, async_)
|
1005
1005
|
|
1006
1006
|
|
1007
1007
|
@overload
|
@@ -1054,7 +1054,7 @@ def iter_nodes_skim(
|
|
1054
1054
|
for a in resp["data"]:
|
1055
1055
|
a["file_name"] = unescape_115_charref(a["file_name"])
|
1056
1056
|
yield YieldFrom(resp["data"])
|
1057
|
-
return run_gen_step_iter(gen_step,
|
1057
|
+
return run_gen_step_iter(gen_step, async_)
|
1058
1058
|
|
1059
1059
|
|
1060
1060
|
@overload
|
@@ -1203,7 +1203,7 @@ def _iter_fs_files(
|
|
1203
1203
|
yield Yield(info)
|
1204
1204
|
except (StopAsyncIteration, StopIteration):
|
1205
1205
|
pass
|
1206
|
-
return run_gen_step_iter(gen_step,
|
1206
|
+
return run_gen_step_iter(gen_step, async_)
|
1207
1207
|
|
1208
1208
|
|
1209
1209
|
@overload
|
@@ -1593,7 +1593,7 @@ def ensure_attr_path[D: dict](
|
|
1593
1593
|
attr.setdefault("ancestors", None)
|
1594
1594
|
attr.setdefault("path", "")
|
1595
1595
|
yield Yield(attr)
|
1596
|
-
return run_gen_step_iter(gen_step,
|
1596
|
+
return run_gen_step_iter(gen_step, async_)
|
1597
1597
|
|
1598
1598
|
|
1599
1599
|
@overload
|
@@ -2078,7 +2078,7 @@ def iterdir(
|
|
2078
2078
|
yield YieldFrom(do_map(process, it)) # type: ignore
|
2079
2079
|
else:
|
2080
2080
|
yield YieldFrom(do_map(normalize_attr, it)) # type: ignore
|
2081
|
-
return run_gen_step_iter(gen_step,
|
2081
|
+
return run_gen_step_iter(gen_step, async_)
|
2082
2082
|
|
2083
2083
|
|
2084
2084
|
def iterdir_limited(
|
@@ -2187,7 +2187,7 @@ def iterdir_limited(
|
|
2187
2187
|
attr["path"] = dirname + name
|
2188
2188
|
yield attr
|
2189
2189
|
def gen_step():
|
2190
|
-
resp: dict = yield run_gen_step(request,
|
2190
|
+
resp: dict = yield run_gen_step(request, async_)
|
2191
2191
|
yield YieldFrom(iter_attrs(resp))
|
2192
2192
|
count = int(resp["count"])
|
2193
2193
|
count_fetched = len(resp["data"])
|
@@ -2239,7 +2239,7 @@ def iterdir_limited(
|
|
2239
2239
|
yield YieldFrom(iter_attrs(resp))
|
2240
2240
|
if diff := count_files - len(seen_files):
|
2241
2241
|
warn(f"lost {diff} files: cid={cid}", category=P115Warning)
|
2242
|
-
return run_gen_step_iter(gen_step,
|
2242
|
+
return run_gen_step_iter(gen_step, async_)
|
2243
2243
|
|
2244
2244
|
|
2245
2245
|
@overload
|
@@ -2609,7 +2609,7 @@ def iter_files(
|
|
2609
2609
|
async_=async_, # type: ignore
|
2610
2610
|
**request_kwargs,
|
2611
2611
|
))
|
2612
|
-
return run_gen_step_iter(gen_step,
|
2612
|
+
return run_gen_step_iter(gen_step, async_)
|
2613
2613
|
|
2614
2614
|
|
2615
2615
|
@overload
|
@@ -2814,7 +2814,7 @@ def traverse_files(
|
|
2814
2814
|
type_of_attr(attr) == type
|
2815
2815
|
):
|
2816
2816
|
yield Yield(attr)
|
2817
|
-
return run_gen_step_iter(gen_step,
|
2817
|
+
return run_gen_step_iter(gen_step, async_)
|
2818
2818
|
|
2819
2819
|
|
2820
2820
|
@overload
|
@@ -2885,7 +2885,6 @@ def iter_dirs(
|
|
2885
2885
|
it = do_map(project, it)
|
2886
2886
|
if with_pickcode:
|
2887
2887
|
file_skim = client.fs_file_skim
|
2888
|
-
@as_gen_step(async_=async_)
|
2889
2888
|
def batch_load_pickcode(batch: Sequence[dict], /):
|
2890
2889
|
resp = yield file_skim(
|
2891
2890
|
(a["id"] for a in batch),
|
@@ -2901,14 +2900,14 @@ def iter_dirs(
|
|
2901
2900
|
def gen_step(iterable):
|
2902
2901
|
batch_map = taskgroup_map if async_ else threadpool_map
|
2903
2902
|
with with_iter_next(batch_map(
|
2904
|
-
batch_load_pickcode,
|
2903
|
+
lambda batch: run_gen_step(batch_load_pickcode(batch), async_),
|
2905
2904
|
chunked(iterable, 3000),
|
2906
2905
|
max_workers=max_workers,
|
2907
2906
|
)) as get_next:
|
2908
2907
|
while True:
|
2909
2908
|
batch = yield get_next()
|
2910
2909
|
yield YieldFrom(batch)
|
2911
|
-
it = run_gen_step_iter(gen_step(it),
|
2910
|
+
it = run_gen_step_iter(gen_step(it), async_)
|
2912
2911
|
return it
|
2913
2912
|
|
2914
2913
|
|
@@ -3160,7 +3159,7 @@ def iter_image_files(
|
|
3160
3159
|
if offset >= count:
|
3161
3160
|
break
|
3162
3161
|
payload["offset"] = offset
|
3163
|
-
return run_gen_step_iter(gen_step,
|
3162
|
+
return run_gen_step_iter(gen_step, async_)
|
3164
3163
|
|
3165
3164
|
|
3166
3165
|
@overload
|
@@ -3273,7 +3272,7 @@ def share_iterdir(
|
|
3273
3272
|
payload["offset"] += page_size # type: ignore
|
3274
3273
|
if payload["offset"] >= count: # type: ignore
|
3275
3274
|
break
|
3276
|
-
return run_gen_step_iter(gen_step,
|
3275
|
+
return run_gen_step_iter(gen_step, async_)
|
3277
3276
|
|
3278
3277
|
|
3279
3278
|
@overload
|
@@ -3376,7 +3375,7 @@ def share_iter_files(
|
|
3376
3375
|
yield Yield({k: attr[k] for k in ("id", "sha1", "name", "size", "path")})
|
3377
3376
|
except (StopIteration, StopAsyncIteration):
|
3378
3377
|
pass
|
3379
|
-
return run_gen_step(gen_step,
|
3378
|
+
return run_gen_step(gen_step, async_)
|
3380
3379
|
|
3381
3380
|
|
3382
3381
|
@overload
|
@@ -3546,7 +3545,7 @@ def share_get_id_to_path(
|
|
3546
3545
|
if ensure_file is None or ensure_file ^ attr["is_dir"]:
|
3547
3546
|
return P115ID(attr["id"], attr, about="path")
|
3548
3547
|
raise error
|
3549
|
-
return run_gen_step(gen_step,
|
3548
|
+
return run_gen_step(gen_step, async_)
|
3550
3549
|
|
3551
3550
|
|
3552
3551
|
@overload
|
@@ -4088,7 +4087,7 @@ def iter_selected_nodes_using_star_event(
|
|
4088
4087
|
break
|
4089
4088
|
except (StopIteration, StopAsyncIteration):
|
4090
4089
|
pass
|
4091
|
-
return run_gen_step_iter(gen_step,
|
4090
|
+
return run_gen_step_iter(gen_step, async_)
|
4092
4091
|
|
4093
4092
|
|
4094
4093
|
@overload
|
@@ -4188,7 +4187,7 @@ def iter_selected_dirs_using_star(
|
|
4188
4187
|
break
|
4189
4188
|
except (StopIteration, StopAsyncIteration):
|
4190
4189
|
pass
|
4191
|
-
return run_gen_step_iter(gen_step,
|
4190
|
+
return run_gen_step_iter(gen_step, async_)
|
4192
4191
|
|
4193
4192
|
|
4194
4193
|
@overload
|
@@ -4362,7 +4361,7 @@ def iter_files_with_dirname(
|
|
4362
4361
|
it = iter_parents_3_level(
|
4363
4362
|
client,
|
4364
4363
|
iter_unique((async_map if async_ else map)(
|
4365
|
-
get_pid, run_gen_step_iter(gen_step,
|
4364
|
+
get_pid, run_gen_step_iter(gen_step, async_))), # type: ignore
|
4366
4365
|
async_=async_, # type: ignore
|
4367
4366
|
**request_kwargs,
|
4368
4367
|
)
|
@@ -4376,8 +4375,8 @@ def iter_files_with_dirname(
|
|
4376
4375
|
for attr in files:
|
4377
4376
|
attr["parents"] = (attr["dir_name"], *id_to_parents[attr["parent_id"]])
|
4378
4377
|
yield Yield(attr)
|
4379
|
-
return run_gen_step_iter(gen_step2,
|
4380
|
-
return run_gen_step_iter(gen_step,
|
4378
|
+
return run_gen_step_iter(gen_step2, async_)
|
4379
|
+
return run_gen_step_iter(gen_step, async_)
|
4381
4380
|
|
4382
4381
|
|
4383
4382
|
@overload
|
@@ -4550,7 +4549,7 @@ def iter_files_with_path(
|
|
4550
4549
|
)) as get_next:
|
4551
4550
|
while True:
|
4552
4551
|
attr = yield get_next()
|
4553
|
-
yield run_gen_step(fetch_dirs(attr["pickcode"]),
|
4552
|
+
yield run_gen_step(fetch_dirs(attr["pickcode"]), async_)
|
4554
4553
|
if with_ancestors:
|
4555
4554
|
id_to_ancestors: dict[int, list[dict]] = {}
|
4556
4555
|
def get_ancestors(id: int, attr: dict | tuple[str, int] | DirNode, /) -> list[dict]:
|
@@ -4597,7 +4596,7 @@ def iter_files_with_path(
|
|
4597
4596
|
add_to_cache = cache.append
|
4598
4597
|
if not path_already:
|
4599
4598
|
if async_:
|
4600
|
-
task: Any = create_task(run_gen_step(fetch_dirs(cid),
|
4599
|
+
task: Any = create_task(run_gen_step(fetch_dirs(cid), True))
|
4601
4600
|
else:
|
4602
4601
|
task = run_as_thread(run_gen_step, fetch_dirs(cid))
|
4603
4602
|
task.add_done_callback(set_path_already)
|
@@ -4641,7 +4640,7 @@ def iter_files_with_path(
|
|
4641
4640
|
else:
|
4642
4641
|
task.result()
|
4643
4642
|
yield YieldFrom(map(update_path, cache))
|
4644
|
-
return run_gen_step_iter(gen_step,
|
4643
|
+
return run_gen_step_iter(gen_step, async_)
|
4645
4644
|
|
4646
4645
|
|
4647
4646
|
@overload
|
@@ -4898,7 +4897,7 @@ def iter_files_with_path_by_export_dir(
|
|
4898
4897
|
name = escape(name)
|
4899
4898
|
attr["path"] = dir_path + name
|
4900
4899
|
yield Yield(attr)
|
4901
|
-
return run_gen_step_iter(gen_step,
|
4900
|
+
return run_gen_step_iter(gen_step, async_)
|
4902
4901
|
|
4903
4902
|
|
4904
4903
|
@overload
|
@@ -4989,7 +4988,7 @@ def iter_parents_3_level(
|
|
4989
4988
|
ids = (async_filter if async_ else filter)(None, ids) # type: ignore
|
4990
4989
|
return flatten(
|
4991
4990
|
batch_map(
|
4992
|
-
lambda ids, /: run_gen_step(get_parents(ids),
|
4991
|
+
lambda ids, /: run_gen_step(get_parents(ids), async_),
|
4993
4992
|
chunked(ids, 1150),
|
4994
4993
|
max_workers=max_workers,
|
4995
4994
|
),
|
@@ -5088,12 +5087,8 @@ def iter_dir_nodes(
|
|
5088
5087
|
"name": attr["name"],
|
5089
5088
|
}
|
5090
5089
|
)
|
5091
|
-
yield YieldFrom(run_gen_step_iter(
|
5092
|
-
|
5093
|
-
may_call=False,
|
5094
|
-
async_=async_,
|
5095
|
-
))
|
5096
|
-
return run_gen_step_iter(gen_step(cid or 0), may_call=False, async_=async_)
|
5090
|
+
yield YieldFrom(run_gen_step_iter(gen_step(attr["pickcode"]), async_))
|
5091
|
+
return run_gen_step_iter(gen_step(cid or 0), async_)
|
5097
5092
|
|
5098
5093
|
|
5099
5094
|
@overload
|
@@ -5172,5 +5167,5 @@ def search_for_any_file(
|
|
5172
5167
|
)
|
5173
5168
|
check_response(resp)
|
5174
5169
|
return bool(resp["data"])
|
5175
|
-
return run_gen_step(gen_step,
|
5170
|
+
return run_gen_step(gen_step, async_)
|
5176
5171
|
|
p115client/tool/life.py
CHANGED
@@ -172,7 +172,7 @@ def iter_life_list(
|
|
172
172
|
else:
|
173
173
|
sleep(1 - diff)
|
174
174
|
end_time = int(time())
|
175
|
-
return run_gen_step_iter(gen_step,
|
175
|
+
return run_gen_step_iter(gen_step, async_)
|
176
176
|
|
177
177
|
|
178
178
|
@overload
|
@@ -276,7 +276,7 @@ def iter_life_behavior_once(
|
|
276
276
|
ts_last_call = time()
|
277
277
|
resp = yield life_behavior_detail(payload, async_=async_)
|
278
278
|
events = check_response(resp)["data"]["list"]
|
279
|
-
return run_gen_step_iter(gen_step,
|
279
|
+
return run_gen_step_iter(gen_step, async_)
|
280
280
|
|
281
281
|
|
282
282
|
@overload
|
@@ -377,7 +377,7 @@ def iter_life_behavior(
|
|
377
377
|
if not type and ignore_types and event["type"] in ignore_types:
|
378
378
|
continue
|
379
379
|
yield Yield(event)
|
380
|
-
return run_gen_step_iter(gen_step,
|
380
|
+
return run_gen_step_iter(gen_step, async_)
|
381
381
|
|
382
382
|
|
383
383
|
@overload
|
@@ -469,5 +469,5 @@ def iter_life_behavior_list(
|
|
469
469
|
continue
|
470
470
|
push(event)
|
471
471
|
yield Yield(ls)
|
472
|
-
return run_gen_step_iter(gen_step,
|
472
|
+
return run_gen_step_iter(gen_step, async_)
|
473
473
|
|
p115client/tool/offline.py
CHANGED
@@ -7,12 +7,14 @@ __doc__ = "这个模块提供了一些和离线下载有关的函数"
|
|
7
7
|
|
8
8
|
from asyncio import sleep as async_sleep
|
9
9
|
from collections.abc import AsyncIterator, Callable, Iterable, Iterator
|
10
|
+
from errno import EBUSY
|
10
11
|
from itertools import count
|
11
12
|
from time import sleep, time
|
12
13
|
from typing import overload, Literal
|
13
14
|
|
14
15
|
from iterutils import run_gen_step_iter, with_iter_next, Yield, YieldFrom
|
15
16
|
from p115client import check_response, P115Client, P115OpenClient
|
17
|
+
from p115client.exception import BusyOSError
|
16
18
|
|
17
19
|
|
18
20
|
@overload
|
@@ -22,7 +24,8 @@ def offline_iter(
|
|
22
24
|
page_start: int = 1,
|
23
25
|
page_stop: int = -1,
|
24
26
|
cooldown: float = 0,
|
25
|
-
|
27
|
+
raise_for_update: bool = False,
|
28
|
+
use_open_api: bool = False,
|
26
29
|
*,
|
27
30
|
async_: Literal[False] = False,
|
28
31
|
**request_kwargs,
|
@@ -35,6 +38,7 @@ def offline_iter(
|
|
35
38
|
page_start: int = 1,
|
36
39
|
page_stop: int = -1,
|
37
40
|
cooldown: float = 0,
|
41
|
+
raise_for_update: bool = False,
|
38
42
|
use_open_api: bool = False,
|
39
43
|
*,
|
40
44
|
async_: Literal[True],
|
@@ -47,6 +51,7 @@ def offline_iter(
|
|
47
51
|
page_start: int = 1,
|
48
52
|
page_stop: int = -1,
|
49
53
|
cooldown: float = 0,
|
54
|
+
raise_for_update: bool = False,
|
50
55
|
use_open_api: bool = False,
|
51
56
|
*,
|
52
57
|
async_: Literal[False, True] = False,
|
@@ -54,10 +59,18 @@ def offline_iter(
|
|
54
59
|
) -> Iterator[dict] | AsyncIterator[dict]:
|
55
60
|
"""遍历任务列表,获取任务信息
|
56
61
|
|
62
|
+
.. tip::
|
63
|
+
在逐页拉取的间隔期间,任务列表可能发生变化,可能导致重复和遗漏:
|
64
|
+
|
65
|
+
1. 新增任务,特别是状态为进行中
|
66
|
+
2. 删除任务
|
67
|
+
3. 曾经取得的进行中的任务,变为完成
|
68
|
+
|
57
69
|
:param client: 115 客户端或 cookies
|
58
70
|
:param page_start: 开始页数
|
59
71
|
:param page_stop: 结束页数(不含),如果 <= 0,则不限
|
60
72
|
:param cooldown: 接口调用冷却时间,单位:秒
|
73
|
+
:param raise_for_update: 当列表发生更新时,是否报错退出
|
61
74
|
:param use_open_api: 是否使用 open api
|
62
75
|
:param async_: 是否异步
|
63
76
|
:param request_kwargs: 其它请求参数
|
@@ -79,24 +92,43 @@ def offline_iter(
|
|
79
92
|
offline_list = client.offline_list_open
|
80
93
|
else:
|
81
94
|
offline_list = client.offline_list
|
82
|
-
|
95
|
+
may_sleep = cooldown > 0
|
96
|
+
if may_sleep:
|
83
97
|
do_sleep = async_sleep if async_ else sleep
|
84
|
-
|
98
|
+
last_t: float = 0
|
99
|
+
if raise_for_update:
|
100
|
+
count = -1
|
101
|
+
seen: set[str] = set()
|
102
|
+
add_info_hash = seen.add
|
85
103
|
for page in pages:
|
86
|
-
if
|
87
|
-
|
88
|
-
|
104
|
+
if may_sleep:
|
105
|
+
if last_t and (diff := last_t + cooldown - time()) > 0:
|
106
|
+
yield do_sleep(diff)
|
107
|
+
last_t = time()
|
89
108
|
resp = yield offline_list(page, async_=async_, **request_kwargs)
|
90
109
|
check_response(resp)
|
91
110
|
if use_open_api:
|
92
111
|
resp = resp["data"]
|
112
|
+
if raise_for_update:
|
113
|
+
if count < 0:
|
114
|
+
count = resp["count"]
|
115
|
+
elif count != resp["count"]:
|
116
|
+
raise BusyOSError(EBUSY, f"detected count changes: {count} != {resp['count']}")
|
93
117
|
tasks = resp["tasks"]
|
94
118
|
if not tasks:
|
95
119
|
break
|
96
|
-
|
120
|
+
if raise_for_update:
|
121
|
+
for task in tasks:
|
122
|
+
info_hash = task["info_hash"]
|
123
|
+
if info_hash in seen:
|
124
|
+
raise BusyOSError(EBUSY, f"detected duplicate task: info_hash={info_hash!r}")
|
125
|
+
add_info_hash(info_hash)
|
126
|
+
yield Yield(task)
|
127
|
+
else:
|
128
|
+
yield YieldFrom(resp["tasks"])
|
97
129
|
if len(tasks) < 30 or page >= resp["page_count"]:
|
98
130
|
break
|
99
|
-
return run_gen_step_iter(gen_step, async_
|
131
|
+
return run_gen_step_iter(gen_step, async_)
|
100
132
|
|
101
133
|
|
102
134
|
@overload
|
@@ -160,5 +192,5 @@ def offline_restart_iter(
|
|
160
192
|
)
|
161
193
|
resp["task"] = task
|
162
194
|
yield Yield(resp)
|
163
|
-
return run_gen_step_iter(gen_step, async_
|
195
|
+
return run_gen_step_iter(gen_step, async_)
|
164
196
|
|
p115client/tool/pool.py
CHANGED
@@ -87,7 +87,7 @@ def generate_auth_factory(
|
|
87
87
|
"authorization": "Bearer " + resp["data"]["access_token"],
|
88
88
|
"app_id": str(app_id),
|
89
89
|
}
|
90
|
-
return run_gen_step(gen_step,
|
90
|
+
return run_gen_step(gen_step, async_)
|
91
91
|
return make_cookies
|
92
92
|
|
93
93
|
|
@@ -141,7 +141,7 @@ def generate_cookies_factory(
|
|
141
141
|
"cookie": "; ".join(f"{k}={v}" for k, v in resp["data"]["cookie"].items()),
|
142
142
|
"app": app,
|
143
143
|
}
|
144
|
-
return run_gen_step(gen_step,
|
144
|
+
return run_gen_step(gen_step, async_)
|
145
145
|
return make_cookies
|
146
146
|
|
147
147
|
|
@@ -164,7 +164,7 @@ def generate_client_factory(
|
|
164
164
|
def gen_step():
|
165
165
|
headers = yield call(async_=async_)
|
166
166
|
return cls(headers["cookie"])
|
167
|
-
return run_gen_step(gen_step,
|
167
|
+
return run_gen_step(gen_step, async_)
|
168
168
|
return make_client
|
169
169
|
|
170
170
|
|
@@ -214,7 +214,7 @@ def make_pool[T](
|
|
214
214
|
value = generate()
|
215
215
|
val = ComparedWithID(value)
|
216
216
|
return value, partial(heappush, heap_, (time(), val))
|
217
|
-
return run_gen_step(call,
|
217
|
+
return run_gen_step(call, async_)
|
218
218
|
if not lock:
|
219
219
|
setattr(get_value, "heap", heap_)
|
220
220
|
return get_value
|
@@ -376,7 +376,7 @@ def call_wrap_with_pool(get_cert_headers: Callable, /, func: Callable) -> Callab
|
|
376
376
|
if not isinstance(e, (AuthenticationError, LoginError)) and get_status_code(e) != 405:
|
377
377
|
revert()
|
378
378
|
raise
|
379
|
-
return run_gen_step(gen_step,
|
379
|
+
return run_gen_step(gen_step, async_)
|
380
380
|
return update_wrapper(wrapper, func)
|
381
381
|
|
382
382
|
|
p115client/tool/upload.py
CHANGED
@@ -98,9 +98,9 @@ def iter_115_to_115(
|
|
98
98
|
|
99
99
|
:return: 迭代器,产生转移结果,有 3 种类型:"good"、"fail" 和 "skip"
|
100
100
|
"""
|
101
|
-
@as_gen_step
|
101
|
+
@as_gen_step
|
102
102
|
def upload(attr: dict, pid: int, /):
|
103
|
-
@as_gen_step
|
103
|
+
@as_gen_step
|
104
104
|
def read_range_bytes_or_hash(sign_check: str, /):
|
105
105
|
if attr["is_collect"]:
|
106
106
|
url = yield from_client.download_url(
|
@@ -163,7 +163,7 @@ def iter_115_to_115(
|
|
163
163
|
else:
|
164
164
|
return {"type": "fail", "attr": attr, "resp": None, "exc": e}
|
165
165
|
key_of_id = "id" if with_root else "parent_id"
|
166
|
-
@as_gen_step
|
166
|
+
@as_gen_step
|
167
167
|
def get_pid(attr: dict, /):
|
168
168
|
if use_iter_files:
|
169
169
|
if attr["is_collect"] and attr["size"] >= 1024 * 1024 * 115:
|
@@ -268,9 +268,9 @@ def iter_115_to_115_resume(
|
|
268
268
|
|
269
269
|
:return: 迭代器,产生转移结果,有 3 种类型:"good"、"fail" 和 "skip"
|
270
270
|
"""
|
271
|
-
@as_gen_step
|
271
|
+
@as_gen_step
|
272
272
|
def upload(attr: dict, pid: int, /):
|
273
|
-
@as_gen_step
|
273
|
+
@as_gen_step
|
274
274
|
def read_range_bytes_or_hash(sign_check: str, /):
|
275
275
|
if attr["is_collect"]:
|
276
276
|
url = yield from_client.download_url(
|
@@ -316,7 +316,7 @@ def iter_115_to_115_resume(
|
|
316
316
|
return {"type": "fail", "attr": attr, "resp": None, "exc": e}
|
317
317
|
dirt_to_cid: dict[tuple[str, ...], int] = {}
|
318
318
|
key_of_id = "id" if with_root else "parent_id"
|
319
|
-
@as_gen_step
|
319
|
+
@as_gen_step
|
320
320
|
def get_pid(attr: dict, /):
|
321
321
|
if attr["is_collect"] and attr["size"] >= 1024 * 1024 * 115:
|
322
322
|
return Return({"type": "skip", "attr": attr, "resp": None})
|
@@ -456,7 +456,7 @@ def iter_115_to_115_resume(
|
|
456
456
|
arg_func=get_pid,
|
457
457
|
max_workers=max_workers,
|
458
458
|
))
|
459
|
-
return run_gen_step_iter(gen_step,
|
459
|
+
return run_gen_step_iter(gen_step, async_)
|
460
460
|
|
461
461
|
|
462
462
|
@overload
|
@@ -716,7 +716,7 @@ def multipart_upload_init(
|
|
716
716
|
upload_data["parts"] = []
|
717
717
|
upload_data["_upload_"] = None
|
718
718
|
return upload_data
|
719
|
-
return run_gen_step(gen_step, async_
|
719
|
+
return run_gen_step(gen_step, async_)
|
720
720
|
|
721
721
|
|
722
722
|
def multipart_upload_url(
|
@@ -865,5 +865,5 @@ def multipart_upload_complete(
|
|
865
865
|
async_=async_,
|
866
866
|
**request_kwargs,
|
867
867
|
)
|
868
|
-
return run_gen_step(gen_step, async_
|
868
|
+
return run_gen_step(gen_step, async_)
|
869
869
|
|
p115client/tool/xys.py
CHANGED
@@ -59,7 +59,7 @@ def wish_info(
|
|
59
59
|
)
|
60
60
|
check_response(resp)
|
61
61
|
return resp["data"]
|
62
|
-
return run_gen_step(gen_step,
|
62
|
+
return run_gen_step(gen_step, async_)
|
63
63
|
|
64
64
|
|
65
65
|
@overload
|
@@ -110,7 +110,7 @@ def wish_make(
|
|
110
110
|
)
|
111
111
|
check_response(resp)
|
112
112
|
return P115StrID(resp["data"]["xys_id"], resp["data"])
|
113
|
-
return run_gen_step(gen_step,
|
113
|
+
return run_gen_step(gen_step, async_)
|
114
114
|
|
115
115
|
|
116
116
|
@overload
|
@@ -170,7 +170,7 @@ def wish_answer(
|
|
170
170
|
)
|
171
171
|
check_response(resp)
|
172
172
|
return P115StrID(resp["data"]["aid_id"], resp["data"])
|
173
|
-
return run_gen_step(gen_step,
|
173
|
+
return run_gen_step(gen_step, async_)
|
174
174
|
|
175
175
|
|
176
176
|
@overload
|
@@ -330,7 +330,7 @@ def wish_iter(
|
|
330
330
|
if not ls:
|
331
331
|
break
|
332
332
|
payload["page"] += 1
|
333
|
-
return run_gen_step_iter(gen_step,
|
333
|
+
return run_gen_step_iter(gen_step, async_)
|
334
334
|
|
335
335
|
|
336
336
|
@overload
|
@@ -389,7 +389,7 @@ def wish_aid_iter(
|
|
389
389
|
if not ls:
|
390
390
|
break
|
391
391
|
payload["page"] += 1
|
392
|
-
return run_gen_step_iter(gen_step,
|
392
|
+
return run_gen_step_iter(gen_step, async_)
|
393
393
|
|
394
394
|
|
395
395
|
# TODO: 再实现一个漂流瓶
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: p115client
|
3
|
-
Version: 0.0.5.12
|
3
|
+
Version: 0.0.5.12.2
|
4
4
|
Summary: Python 115 webdisk client.
|
5
5
|
Home-page: https://github.com/ChenyangGao/p115client
|
6
6
|
License: MIT
|
@@ -40,7 +40,7 @@ Requires-Dist: python-filewrap (>=0.2.8)
|
|
40
40
|
Requires-Dist: python-hashtools (>=0.0.3.3)
|
41
41
|
Requires-Dist: python-http_request (>=0.0.6)
|
42
42
|
Requires-Dist: python-httpfile (>=0.0.5.2)
|
43
|
-
Requires-Dist: python-iterutils (>=0.2.1)
|
43
|
+
Requires-Dist: python-iterutils (>=0.2.4.1)
|
44
44
|
Requires-Dist: python-property (>=0.0.3)
|
45
45
|
Requires-Dist: python-startfile (>=0.0.2)
|
46
46
|
Requires-Dist: python-undefined (>=0.0.3)
|
@@ -0,0 +1,28 @@
|
|
1
|
+
LICENSE,sha256=o5242_N2TgDsWwFhPn7yr8YJNF7XsJM5NxUMtcT97bc,1100
|
2
|
+
p115client/__init__.py,sha256=1mx7njuAlqcuEWONTjSiiGnXyyNyqOcJyNX1FMHqQ-4,214
|
3
|
+
p115client/_upload.py,sha256=199Hc9B9vUMphDdwh8vK3yCIPusIfeAzzOLpC-WZ8C8,31184
|
4
|
+
p115client/client.py,sha256=IRlyc45i5gtZD19MjS3MGqm8ZKh-XZe3mQbHF31KVLs,795356
|
5
|
+
p115client/const.py,sha256=ZYtBtTFB0eljIR0tze21_52Gl0pQxvyqMmDwgB1bqNU,7803
|
6
|
+
p115client/exception.py,sha256=4SZ8ubOLMRxtcqc0u1kNzXqH1a6wwXJFwGnRDURoEgQ,3708
|
7
|
+
p115client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
+
p115client/tool/__init__.py,sha256=IwwzbCQ7jpRoFGTQUCfGBx8zvjqjgSFs98yKpYyI46s,429
|
9
|
+
p115client/tool/attr.py,sha256=8szVu_V3_s0DU4oL3wU_XpYBsBzJ3AAHZqX1oCTZlk0,2956
|
10
|
+
p115client/tool/auth.py,sha256=qlRcfEjzebseiHR2dyBpOoPpxB4k8P6Tj729S2TLB1s,1672
|
11
|
+
p115client/tool/download.py,sha256=_Kp5_AKNAFmBXsUvL0zm8XBkPVlJxC4n4vj6-E0t6BE,61009
|
12
|
+
p115client/tool/edit.py,sha256=UCECvHYTy_GGpsaYbaNGtEBT0s5pMD4k1M-s8GG6iFI,17725
|
13
|
+
p115client/tool/export_dir.py,sha256=WZAiRFqe984-oXTEgkVhwHi4MTrExVL_tGZIzWCkdL0,24364
|
14
|
+
p115client/tool/fs_files.py,sha256=VqHO2eiTwxaHmYR4CwVXifo3dT_wdcvvPVg4FSrIn4E,15896
|
15
|
+
p115client/tool/history.py,sha256=Hk7W1Qko6P68AoukNc-cYFoGV5XemtZBqWBEm4uKt5c,7416
|
16
|
+
p115client/tool/iterdir.py,sha256=t17T5XTCof7ZAbPKAzTAyhpVzfknVsJFb4SK5giXdsI,199635
|
17
|
+
p115client/tool/life.py,sha256=Ik8jdFM9_DiT5ivd5h2Hag4ViBVOY5WRCCrmbCqKnMY,17209
|
18
|
+
p115client/tool/offline.py,sha256=6HaGkbsAAx3FPFvSr_7LZvrUw_fp3QRB2y2kVRHNgZ0,6291
|
19
|
+
p115client/tool/pool.py,sha256=PImYG4fU7retZVFDPYib9e87J3RvJvugOW1mxX9jSU0,13831
|
20
|
+
p115client/tool/request.py,sha256=rjXuQwRganE5Z-4rfgnyPFjE4jzdQSLdIs9s0cIDshU,7043
|
21
|
+
p115client/tool/upload.py,sha256=z8eD5BAlw6SgWdaJBJ4VtyDMPr8xG3wz6oCxE6xxPTg,32568
|
22
|
+
p115client/tool/util.py,sha256=pAa8gc4BcnVTpNcXbNZU4tBUMjSB04DGOpzDdzfbto8,3934
|
23
|
+
p115client/tool/xys.py,sha256=Xx8sRh4h4jlowFhRMTDTtOhftgmhevehAU27jMsKh8s,10191
|
24
|
+
p115client/type.py,sha256=7kOp98uLaYqcTTCgCrb3DRcl8ukMpn7ibsnVvtw2nG8,6250
|
25
|
+
p115client-0.0.5.12.2.dist-info/LICENSE,sha256=o5242_N2TgDsWwFhPn7yr8YJNF7XsJM5NxUMtcT97bc,1100
|
26
|
+
p115client-0.0.5.12.2.dist-info/METADATA,sha256=_MF9z3xGkNCLuaWn9yk6ZQBvSnSLzCltzEcDBHI_5s4,8194
|
27
|
+
p115client-0.0.5.12.2.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
28
|
+
p115client-0.0.5.12.2.dist-info/RECORD,,
|