p115client 0.0.5.10.9__py3-none-any.whl → 0.0.5.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- p115client/_upload.py +8 -8
- p115client/client.py +963 -256
- p115client/tool/attr.py +1 -1
- p115client/tool/download.py +86 -69
- p115client/tool/edit.py +3 -3
- p115client/tool/export_dir.py +22 -23
- p115client/tool/fs_files.py +4 -7
- p115client/tool/history.py +5 -5
- p115client/tool/iterdir.py +87 -88
- p115client/tool/life.py +12 -12
- p115client/tool/pool.py +5 -5
- p115client/tool/upload.py +4 -4
- p115client/tool/xys.py +10 -7
- {p115client-0.0.5.10.9.dist-info → p115client-0.0.5.11.1.dist-info}/METADATA +2 -2
- p115client-0.0.5.11.1.dist-info/RECORD +26 -0
- p115client-0.0.5.10.9.dist-info/RECORD +0 -26
- {p115client-0.0.5.10.9.dist-info → p115client-0.0.5.11.1.dist-info}/LICENSE +0 -0
- {p115client-0.0.5.10.9.dist-info → p115client-0.0.5.11.1.dist-info}/WHEEL +0 -0
p115client/tool/iterdir.py
CHANGED
@@ -238,7 +238,7 @@ def get_path_to_cid(
|
|
238
238
|
return "/" + path
|
239
239
|
else:
|
240
240
|
return path
|
241
|
-
return run_gen_step(gen_step, async_=async_)
|
241
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
242
242
|
|
243
243
|
|
244
244
|
@overload
|
@@ -370,7 +370,7 @@ def get_file_count(
|
|
370
370
|
node = DirNode(info["file_name"], pid)
|
371
371
|
id_to_dirnode[(pid := int(info["file_id"]))] = node
|
372
372
|
return int(resp["count"]) - int(resp.get("folder_count") or 0)
|
373
|
-
return run_gen_step(gen_step, async_=async_)
|
373
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
374
374
|
|
375
375
|
|
376
376
|
@overload
|
@@ -590,7 +590,7 @@ def get_ancestors(
|
|
590
590
|
if not resp.get("sha1") and id_to_dirnode is not ...:
|
591
591
|
id_to_dirnode[ans["id"]] = DirNode(ans["name"], ans["parent_id"])
|
592
592
|
return ancestors
|
593
|
-
return run_gen_step(gen_step, async_=async_)
|
593
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
594
594
|
|
595
595
|
|
596
596
|
@overload
|
@@ -692,7 +692,7 @@ def get_ancestors_to_cid(
|
|
692
692
|
parts.append({"id": 0, "name": "", "parent_id": 0})
|
693
693
|
parts.reverse()
|
694
694
|
return parts
|
695
|
-
return run_gen_step(gen_step, async_=async_)
|
695
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
696
696
|
|
697
697
|
|
698
698
|
# TODO: 使用 search 接口以在特定目录之下搜索某个名字,以便减少风控
|
@@ -865,7 +865,7 @@ def get_id_to_path(
|
|
865
865
|
)) as get_next:
|
866
866
|
found = False
|
867
867
|
while not found:
|
868
|
-
attr = yield get_next
|
868
|
+
attr = yield get_next()
|
869
869
|
found = (attr["name"].replace("/", "|") if is_posixpath else attr["name"]) == name
|
870
870
|
parent_id = attr["id"]
|
871
871
|
if not found:
|
@@ -882,12 +882,12 @@ def get_id_to_path(
|
|
882
882
|
**request_kwargs,
|
883
883
|
)) as get_next:
|
884
884
|
while True:
|
885
|
-
attr = yield get_next
|
885
|
+
attr = yield get_next()
|
886
886
|
if (attr["name"].replace("/", "|") if is_posixpath else attr["name"]) == name:
|
887
887
|
if ensure_file is None or ensure_file ^ attr["is_dir"]:
|
888
888
|
return P115ID(attr["id"], attr, about="path")
|
889
889
|
raise error
|
890
|
-
return run_gen_step(gen_step, async_=async_)
|
890
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
891
891
|
|
892
892
|
|
893
893
|
@overload
|
@@ -941,7 +941,7 @@ def get_id_to_pickcode(
|
|
941
941
|
check_response(resp)
|
942
942
|
data = resp["data"]
|
943
943
|
return P115ID(data["file_id"], data, about="pickcode")
|
944
|
-
return run_gen_step(gen_step, async_=async_)
|
944
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
945
945
|
|
946
946
|
|
947
947
|
@overload
|
@@ -1001,7 +1001,7 @@ def get_id_to_sha1(
|
|
1001
1001
|
else:
|
1002
1002
|
raise FileNotFoundError(ENOENT, file_sha1)
|
1003
1003
|
return P115ID(data["file_id"], data, about="sha1", file_sha1=file_sha1)
|
1004
|
-
return run_gen_step(gen_step, async_=async_)
|
1004
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
1005
1005
|
|
1006
1006
|
|
1007
1007
|
@overload
|
@@ -1053,8 +1053,8 @@ def iter_nodes_skim(
|
|
1053
1053
|
check_response(resp)
|
1054
1054
|
for a in resp["data"]:
|
1055
1055
|
a["file_name"] = unescape_115_charref(a["file_name"])
|
1056
|
-
yield YieldFrom(resp["data"],
|
1057
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
1056
|
+
yield YieldFrom(resp["data"], may_await=False)
|
1057
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
1058
1058
|
|
1059
1059
|
|
1060
1060
|
@overload
|
@@ -1191,7 +1191,7 @@ def _iter_fs_files(
|
|
1191
1191
|
attr = _overview_attr(info)
|
1192
1192
|
if attr.is_dir:
|
1193
1193
|
id_to_dirnode[attr.id] = DirNode(attr.name, attr.parent_id)
|
1194
|
-
yield YieldFrom(resp["data"],
|
1194
|
+
yield YieldFrom(resp["data"], may_await=False)
|
1195
1195
|
else:
|
1196
1196
|
for info in resp["data"]:
|
1197
1197
|
attr = _overview_attr(info)
|
@@ -1200,10 +1200,10 @@ def _iter_fs_files(
|
|
1200
1200
|
id_to_dirnode[attr.id] = DirNode(attr.name, attr.parent_id)
|
1201
1201
|
elif ensure_file is False:
|
1202
1202
|
return
|
1203
|
-
yield Yield(info,
|
1203
|
+
yield Yield(info, may_await=False)
|
1204
1204
|
except (StopAsyncIteration, StopIteration):
|
1205
1205
|
pass
|
1206
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
1206
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
1207
1207
|
|
1208
1208
|
|
1209
1209
|
@overload
|
@@ -1592,8 +1592,8 @@ def ensure_attr_path[D: dict](
|
|
1592
1592
|
warn(f"{type(e).__module__}.{type(e).__qualname__}: {e} of {attr}", category=P115Warning)
|
1593
1593
|
attr.setdefault("ancestors", None)
|
1594
1594
|
attr.setdefault("path", "")
|
1595
|
-
yield Yield(attr,
|
1596
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
1595
|
+
yield Yield(attr, may_await=False)
|
1596
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
1597
1597
|
|
1598
1598
|
|
1599
1599
|
@overload
|
@@ -2075,10 +2075,10 @@ def iterdir(
|
|
2075
2075
|
name = escape(name)
|
2076
2076
|
attr["path"] = dirname + name
|
2077
2077
|
return attr
|
2078
|
-
yield YieldFrom(do_map(process, it),
|
2078
|
+
yield YieldFrom(do_map(process, it), may_await=False) # type: ignore
|
2079
2079
|
else:
|
2080
|
-
yield YieldFrom(do_map(normalize_attr, it),
|
2081
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2080
|
+
yield YieldFrom(do_map(normalize_attr, it), may_await=False) # type: ignore
|
2081
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2082
2082
|
|
2083
2083
|
|
2084
2084
|
def iterdir_limited(
|
@@ -2159,7 +2159,7 @@ def iterdir_limited(
|
|
2159
2159
|
for info in resp["path"][1:]
|
2160
2160
|
]
|
2161
2161
|
return resp
|
2162
|
-
def iter_attrs(resp):
|
2162
|
+
def iter_attrs(resp, /):
|
2163
2163
|
if with_path:
|
2164
2164
|
names: Iterator[str] = (info["name"] for info in ancestors)
|
2165
2165
|
if escape is not None:
|
@@ -2187,8 +2187,8 @@ def iterdir_limited(
|
|
2187
2187
|
attr["path"] = dirname + name
|
2188
2188
|
yield attr
|
2189
2189
|
def gen_step():
|
2190
|
-
resp: dict = yield run_gen_step(request, async_=async_)
|
2191
|
-
yield YieldFrom(iter_attrs(resp),
|
2190
|
+
resp: dict = yield run_gen_step(request, simple=True, async_=async_)
|
2191
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2192
2192
|
count = int(resp["count"])
|
2193
2193
|
count_fetched = len(resp["data"])
|
2194
2194
|
if count > count_fetched:
|
@@ -2216,30 +2216,30 @@ def iterdir_limited(
|
|
2216
2216
|
count_top = count_top_dirs + count_top_files
|
2217
2217
|
if count <= count_fetched * 2 - count_top:
|
2218
2218
|
resp = request({"asc": 0, "offset": count_top, "limit": count - count_fetched})
|
2219
|
-
yield YieldFrom(iter_attrs(resp),
|
2219
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2220
2220
|
return
|
2221
2221
|
if diff := count_dirs - len(seen_dirs):
|
2222
2222
|
if diff > count_fetched - count_top_dirs:
|
2223
2223
|
resp = request({"nf": 1, "offset": len(seen_dirs)})
|
2224
|
-
yield YieldFrom(iter_attrs(resp),
|
2224
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2225
2225
|
diff = count_dirs - len(seen_dirs)
|
2226
2226
|
if diff > 0:
|
2227
2227
|
resp = request({"asc": 0, "nf": 1, "offset": count_top_dirs, "limit": diff})
|
2228
|
-
yield YieldFrom(iter_attrs(resp),
|
2228
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2229
2229
|
|
2230
2230
|
if diff := count_dirs - len(seen_dirs):
|
2231
2231
|
warn(f"lost {diff} directories: cid={cid}", category=P115Warning)
|
2232
2232
|
if diff := count_files - len(seen_files):
|
2233
2233
|
if diff > count_fetched - count_top_files:
|
2234
2234
|
resp = request({"show_dir": 0, "offset": len(seen_files)})
|
2235
|
-
yield YieldFrom(iter_attrs(resp),
|
2235
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2236
2236
|
diff = count_files - len(seen_files)
|
2237
2237
|
if diff > 0:
|
2238
2238
|
resp = request({"asc": 0, "show_dir": 0, "offset": count_top_files, "limit": diff})
|
2239
|
-
yield YieldFrom(iter_attrs(resp),
|
2239
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2240
2240
|
if diff := count_files - len(seen_files):
|
2241
2241
|
warn(f"lost {diff} files: cid={cid}", category=P115Warning)
|
2242
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2242
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2243
2243
|
|
2244
2244
|
|
2245
2245
|
@overload
|
@@ -2592,9 +2592,9 @@ def iter_files(
|
|
2592
2592
|
add_to_cache(attr)
|
2593
2593
|
else:
|
2594
2594
|
return attr
|
2595
|
-
yield YieldFrom(do_filter(bool, do_map(process, it)),
|
2595
|
+
yield YieldFrom(do_filter(bool, do_map(process, it)), may_await=False) # type: ignore
|
2596
2596
|
else:
|
2597
|
-
yield YieldFrom(do_map(normalize_attr, it),
|
2597
|
+
yield YieldFrom(do_map(normalize_attr, it), may_await=False) # type: ignore
|
2598
2598
|
if (with_ancestors or with_path) and cache:
|
2599
2599
|
yield YieldFrom(ensure_attr_path(
|
2600
2600
|
client,
|
@@ -2608,8 +2608,8 @@ def iter_files(
|
|
2608
2608
|
app=app,
|
2609
2609
|
async_=async_, # type: ignore
|
2610
2610
|
**request_kwargs,
|
2611
|
-
),
|
2612
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2611
|
+
), may_await=False)
|
2612
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2613
2613
|
|
2614
2614
|
|
2615
2615
|
@overload
|
@@ -2736,8 +2736,7 @@ def traverse_files(
|
|
2736
2736
|
def gen_step():
|
2737
2737
|
nonlocal cid
|
2738
2738
|
if auto_splitting_tasks:
|
2739
|
-
get_count =
|
2740
|
-
get_file_count,
|
2739
|
+
get_count = get_file_count(
|
2741
2740
|
client,
|
2742
2741
|
id_to_dirnode=id_to_dirnode,
|
2743
2742
|
**{**request_kwargs, "timeout": auto_splitting_statistics_timeout}
|
@@ -2773,7 +2772,7 @@ def traverse_files(
|
|
2773
2772
|
max_workers=max_workers,
|
2774
2773
|
async_=async_, # type: ignore
|
2775
2774
|
**request_kwargs,
|
2776
|
-
),
|
2775
|
+
), may_await=False)
|
2777
2776
|
else:
|
2778
2777
|
yield YieldFrom(iter_files(
|
2779
2778
|
client,
|
@@ -2789,7 +2788,7 @@ def traverse_files(
|
|
2789
2788
|
max_workers=max_workers,
|
2790
2789
|
async_=async_, # type: ignore
|
2791
2790
|
**request_kwargs,
|
2792
|
-
),
|
2791
|
+
), may_await=False)
|
2793
2792
|
else:
|
2794
2793
|
with with_iter_next(iterdir(
|
2795
2794
|
client,
|
@@ -2805,7 +2804,7 @@ def traverse_files(
|
|
2805
2804
|
async_=async_,
|
2806
2805
|
**request_kwargs,
|
2807
2806
|
)) as get_next:
|
2808
|
-
attr = yield get_next
|
2807
|
+
attr = yield get_next()
|
2809
2808
|
if attr.get("is_dir") or attr.get("is_directory"):
|
2810
2809
|
send(attr["id"])
|
2811
2810
|
elif (
|
@@ -2814,8 +2813,8 @@ def traverse_files(
|
|
2814
2813
|
type > 7 or
|
2815
2814
|
type_of_attr(attr) == type
|
2816
2815
|
):
|
2817
|
-
yield Yield(attr,
|
2818
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2816
|
+
yield Yield(attr, may_await=False)
|
2817
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2819
2818
|
|
2820
2819
|
|
2821
2820
|
@overload
|
@@ -2907,9 +2906,9 @@ def iter_dirs(
|
|
2907
2906
|
max_workers=max_workers,
|
2908
2907
|
)) as get_next:
|
2909
2908
|
while True:
|
2910
|
-
batch = yield get_next
|
2911
|
-
yield YieldFrom(batch,
|
2912
|
-
it = run_gen_step_iter(gen_step(it), async_=async_)
|
2909
|
+
batch = yield get_next()
|
2910
|
+
yield YieldFrom(batch, may_await=False)
|
2911
|
+
it = run_gen_step_iter(gen_step(it), simple=True, async_=async_)
|
2913
2912
|
return it
|
2914
2913
|
|
2915
2914
|
|
@@ -3156,12 +3155,12 @@ def iter_image_files(
|
|
3156
3155
|
count = int(resp.get("count") or 0)
|
3157
3156
|
if offset != resp["offset"]:
|
3158
3157
|
break
|
3159
|
-
yield YieldFrom(map(normalize, resp["data"]),
|
3158
|
+
yield YieldFrom(map(normalize, resp["data"]), may_await=False)
|
3160
3159
|
offset += len(resp["data"])
|
3161
3160
|
if offset >= count:
|
3162
3161
|
break
|
3163
3162
|
payload["offset"] = offset
|
3164
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
3163
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
3165
3164
|
|
3166
3165
|
|
3167
3166
|
@overload
|
@@ -3270,11 +3269,11 @@ def share_iterdir(
|
|
3270
3269
|
id_to_dirnode[oattr.id] = DirNode(oattr.name, oattr.parent_id)
|
3271
3270
|
if normalize_attr is not None:
|
3272
3271
|
attr = normalize_attr(attr)
|
3273
|
-
yield Yield(attr,
|
3272
|
+
yield Yield(attr, may_await=False)
|
3274
3273
|
payload["offset"] += page_size # type: ignore
|
3275
3274
|
if payload["offset"] >= count: # type: ignore
|
3276
3275
|
break
|
3277
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
3276
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
3278
3277
|
|
3279
3278
|
|
3280
3279
|
@overload
|
@@ -3372,12 +3371,12 @@ def share_iter_files(
|
|
3372
3371
|
"name": info["fn"],
|
3373
3372
|
"size": int(info["si"]),
|
3374
3373
|
"path": f"/{info['pt']}/{info['fn']}",
|
3375
|
-
},
|
3374
|
+
}, may_await=False)
|
3376
3375
|
else:
|
3377
|
-
yield Yield({k: attr[k] for k in ("id", "sha1", "name", "size", "path")},
|
3376
|
+
yield Yield({k: attr[k] for k in ("id", "sha1", "name", "size", "path")}, may_await=False)
|
3378
3377
|
except (StopIteration, StopAsyncIteration):
|
3379
3378
|
pass
|
3380
|
-
return run_gen_step(gen_step, async_=async_)
|
3379
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
3381
3380
|
|
3382
3381
|
|
3383
3382
|
@overload
|
@@ -3524,7 +3523,7 @@ def share_get_id_to_path(
|
|
3524
3523
|
)) as get_next:
|
3525
3524
|
found = False
|
3526
3525
|
while not found:
|
3527
|
-
attr = yield get_next
|
3526
|
+
attr = yield get_next()
|
3528
3527
|
found = attr["is_dir"] and (attr["name"].replace("/", "|") if is_posixpath else attr["name"]) == name
|
3529
3528
|
parent_id = attr["id"]
|
3530
3529
|
if not found:
|
@@ -3542,12 +3541,12 @@ def share_get_id_to_path(
|
|
3542
3541
|
**request_kwargs,
|
3543
3542
|
)) as get_next:
|
3544
3543
|
while True:
|
3545
|
-
attr = yield get_next
|
3544
|
+
attr = yield get_next()
|
3546
3545
|
if (attr["name"].replace("/", "|") if is_posixpath else attr["name"]) == name:
|
3547
3546
|
if ensure_file is None or ensure_file ^ attr["is_dir"]:
|
3548
3547
|
return P115ID(attr["id"], attr, about="path")
|
3549
3548
|
raise error
|
3550
|
-
return run_gen_step(gen_step, async_=async_)
|
3549
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
3551
3550
|
|
3552
3551
|
|
3553
3552
|
@overload
|
@@ -4059,7 +4058,7 @@ def iter_selected_nodes_using_star_event(
|
|
4059
4058
|
id_to_dirnode[fid] = DirNode(name, pid)
|
4060
4059
|
if fid in ids:
|
4061
4060
|
if not normalize_attr:
|
4062
|
-
yield Yield(event,
|
4061
|
+
yield Yield(event, may_await=False)
|
4063
4062
|
elif normalize_attr is True:
|
4064
4063
|
attr = {
|
4065
4064
|
"id": fid,
|
@@ -4081,15 +4080,15 @@ def iter_selected_nodes_using_star_event(
|
|
4081
4080
|
attr["type"] = 3
|
4082
4081
|
else:
|
4083
4082
|
attr["type"] = type_of_attr(attr)
|
4084
|
-
yield Yield(attr,
|
4083
|
+
yield Yield(attr, may_await=False)
|
4085
4084
|
else:
|
4086
|
-
yield Yield(normalize_attr(event),
|
4085
|
+
yield Yield(normalize_attr(event), may_await=False)
|
4087
4086
|
discard(fid)
|
4088
4087
|
if not ids:
|
4089
4088
|
break
|
4090
4089
|
except (StopIteration, StopAsyncIteration):
|
4091
4090
|
pass
|
4092
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4091
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4093
4092
|
|
4094
4093
|
|
4095
4094
|
@overload
|
@@ -4183,13 +4182,13 @@ def iter_selected_dirs_using_star(
|
|
4183
4182
|
break
|
4184
4183
|
cid = attr["id"]
|
4185
4184
|
if cid in ids:
|
4186
|
-
yield Yield(info,
|
4185
|
+
yield Yield(info, may_await=False)
|
4187
4186
|
discard(cid)
|
4188
4187
|
if not ids:
|
4189
4188
|
break
|
4190
4189
|
except (StopIteration, StopAsyncIteration):
|
4191
4190
|
pass
|
4192
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4191
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4193
4192
|
|
4194
4193
|
|
4195
4194
|
@overload
|
@@ -4347,10 +4346,10 @@ def iter_files_with_dirname(
|
|
4347
4346
|
get_next = it.__anext__ if async_ else it.__next__
|
4348
4347
|
try:
|
4349
4348
|
while True:
|
4350
|
-
resp = yield get_next
|
4349
|
+
resp = yield get_next()
|
4351
4350
|
for attr in resp["data"]:
|
4352
4351
|
attr.update(pid_to_info[attr["parent_id"]])
|
4353
|
-
yield Yield(attr,
|
4352
|
+
yield Yield(attr, may_await=False)
|
4354
4353
|
except (StopIteration, StopAsyncIteration):
|
4355
4354
|
pass
|
4356
4355
|
if with_parents_4_level:
|
@@ -4363,22 +4362,22 @@ def iter_files_with_dirname(
|
|
4363
4362
|
it = iter_parents_3_level(
|
4364
4363
|
client,
|
4365
4364
|
iter_unique((async_map if async_ else map)(
|
4366
|
-
get_pid, run_gen_step_iter(gen_step, async_=async_))), # type: ignore
|
4365
|
+
get_pid, run_gen_step_iter(gen_step, simple=True, async_=async_))), # type: ignore
|
4367
4366
|
async_=async_, # type: ignore
|
4368
4367
|
**request_kwargs,
|
4369
4368
|
)
|
4370
4369
|
if async_:
|
4371
4370
|
async def collect():
|
4372
4371
|
return {k: v async for k, v in cast(AsyncIterator, it)}
|
4373
|
-
id_to_parents: dict[int, tuple[str, str, str]] = yield collect
|
4372
|
+
id_to_parents: dict[int, tuple[str, str, str]] = yield collect()
|
4374
4373
|
else:
|
4375
4374
|
id_to_parents = dict(it) # type: ignore
|
4376
4375
|
id_to_parents[0] = ("", "", "")
|
4377
4376
|
for attr in files:
|
4378
4377
|
attr["parents"] = (attr["dir_name"], *id_to_parents[attr["parent_id"]])
|
4379
|
-
yield Yield(attr,
|
4380
|
-
return run_gen_step_iter(gen_step2, async_=async_)
|
4381
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4378
|
+
yield Yield(attr, may_await=False)
|
4379
|
+
return run_gen_step_iter(gen_step2, simple=True, async_=async_)
|
4380
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4382
4381
|
|
4383
4382
|
|
4384
4383
|
@overload
|
@@ -4518,7 +4517,7 @@ def iter_files_with_path(
|
|
4518
4517
|
_path_already: None | bool = None if path_already else False
|
4519
4518
|
if not path_already:
|
4520
4519
|
from .download import iter_download_nodes
|
4521
|
-
def set_path_already(*
|
4520
|
+
def set_path_already(*_):
|
4522
4521
|
nonlocal _path_already
|
4523
4522
|
_path_already = True
|
4524
4523
|
def fetch_dirs(id: int | str, /):
|
@@ -4536,9 +4535,9 @@ def iter_files_with_path(
|
|
4536
4535
|
max_workers=None,
|
4537
4536
|
async_=async_,
|
4538
4537
|
**request_kwargs,
|
4539
|
-
)) as
|
4538
|
+
)) as get_next:
|
4540
4539
|
while True:
|
4541
|
-
info = yield
|
4540
|
+
info = yield get_next()
|
4542
4541
|
id_to_dirnode[int(info["fid"])] = DirNode(info["fn"], int(info["pid"]))
|
4543
4542
|
else:
|
4544
4543
|
with with_iter_next(iterdir(
|
@@ -4550,8 +4549,8 @@ def iter_files_with_path(
|
|
4550
4549
|
**request_kwargs,
|
4551
4550
|
)) as get_next:
|
4552
4551
|
while True:
|
4553
|
-
attr = yield get_next
|
4554
|
-
yield run_gen_step(fetch_dirs(attr["pickcode"]), async_=async_)
|
4552
|
+
attr = yield get_next()
|
4553
|
+
yield run_gen_step(fetch_dirs(attr["pickcode"]), simple=True, async_=async_)
|
4555
4554
|
if with_ancestors:
|
4556
4555
|
id_to_ancestors: dict[int, list[dict]] = {}
|
4557
4556
|
def get_ancestors(id: int, attr: dict | tuple[str, int] | DirNode, /) -> list[dict]:
|
@@ -4598,7 +4597,7 @@ def iter_files_with_path(
|
|
4598
4597
|
add_to_cache = cache.append
|
4599
4598
|
if not path_already:
|
4600
4599
|
if async_:
|
4601
|
-
task: Any = create_task(run_gen_step(fetch_dirs(cid), async_=True))
|
4600
|
+
task: Any = create_task(run_gen_step(fetch_dirs(cid), simple=True, async_=True))
|
4602
4601
|
else:
|
4603
4602
|
task = run_as_thread(run_gen_step, fetch_dirs(cid))
|
4604
4603
|
task.add_done_callback(set_path_already)
|
@@ -4621,18 +4620,18 @@ def iter_files_with_path(
|
|
4621
4620
|
**request_kwargs,
|
4622
4621
|
)) as get_next:
|
4623
4622
|
while True:
|
4624
|
-
attr = yield get_next
|
4623
|
+
attr = yield get_next()
|
4625
4624
|
if _path_already is None:
|
4626
|
-
yield Yield(update_path(attr),
|
4625
|
+
yield Yield(update_path(attr), may_await=False)
|
4627
4626
|
elif _path_already:
|
4628
4627
|
if async_:
|
4629
4628
|
yield task
|
4630
4629
|
else:
|
4631
4630
|
task.result()
|
4632
4631
|
if cache:
|
4633
|
-
yield YieldFrom(map(update_path, cache),
|
4632
|
+
yield YieldFrom(map(update_path, cache), may_await=False)
|
4634
4633
|
cache.clear()
|
4635
|
-
yield Yield(update_path(attr),
|
4634
|
+
yield Yield(update_path(attr), may_await=False)
|
4636
4635
|
_path_already = None
|
4637
4636
|
else:
|
4638
4637
|
add_to_cache(attr)
|
@@ -4641,8 +4640,8 @@ def iter_files_with_path(
|
|
4641
4640
|
yield task
|
4642
4641
|
else:
|
4643
4642
|
task.result()
|
4644
|
-
yield YieldFrom(map(update_path, cache),
|
4645
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4643
|
+
yield YieldFrom(map(update_path, cache), may_await=False)
|
4644
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4646
4645
|
|
4647
4646
|
|
4648
4647
|
@overload
|
@@ -4898,8 +4897,8 @@ def iter_files_with_path_by_export_dir(
|
|
4898
4897
|
if escape is not None:
|
4899
4898
|
name = escape(name)
|
4900
4899
|
attr["path"] = dir_path + name
|
4901
|
-
yield Yield(attr,
|
4902
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4900
|
+
yield Yield(attr, may_await=False)
|
4901
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4903
4902
|
|
4904
4903
|
|
4905
4904
|
@overload
|
@@ -4990,7 +4989,7 @@ def iter_parents_3_level(
|
|
4990
4989
|
ids = (async_filter if async_ else filter)(None, ids) # type: ignore
|
4991
4990
|
return flatten(
|
4992
4991
|
batch_map(
|
4993
|
-
lambda ids, /: run_gen_step(get_parents(ids), async_=async_),
|
4992
|
+
lambda ids, /: run_gen_step(get_parents(ids), simple=True, async_=async_),
|
4994
4993
|
chunked(ids, 1150),
|
4995
4994
|
max_workers=max_workers,
|
4996
4995
|
),
|
@@ -5062,7 +5061,7 @@ def iter_dir_nodes(
|
|
5062
5061
|
**request_kwargs,
|
5063
5062
|
)) as get_next_info:
|
5064
5063
|
while True:
|
5065
|
-
info = yield get_next_info
|
5064
|
+
info = yield get_next_info()
|
5066
5065
|
id = int(info["fid"])
|
5067
5066
|
parent_id = int(info["pid"])
|
5068
5067
|
name = info["fn"]
|
@@ -5070,7 +5069,7 @@ def iter_dir_nodes(
|
|
5070
5069
|
id_to_dirnode[id] = DirNode(name, parent_id)
|
5071
5070
|
yield Yield(
|
5072
5071
|
{"id": id, "parent_id": parent_id, "name": name},
|
5073
|
-
|
5072
|
+
may_await=False,
|
5074
5073
|
)
|
5075
5074
|
else:
|
5076
5075
|
with with_iter_next(iterdir(
|
@@ -5082,20 +5081,20 @@ def iter_dir_nodes(
|
|
5082
5081
|
**request_kwargs,
|
5083
5082
|
)) as get_next:
|
5084
5083
|
while True:
|
5085
|
-
attr = yield get_next
|
5084
|
+
attr = yield get_next()
|
5086
5085
|
yield Yield(
|
5087
5086
|
{
|
5088
5087
|
"id": attr["id"],
|
5089
5088
|
"parent_id": attr["parent_id"],
|
5090
5089
|
"name": attr["name"],
|
5091
5090
|
},
|
5092
|
-
|
5091
|
+
may_await=False,
|
5093
5092
|
)
|
5094
5093
|
yield YieldFrom(
|
5095
|
-
run_gen_step_iter(gen_step(attr["pickcode"]), async_=async_),
|
5096
|
-
|
5094
|
+
run_gen_step_iter(gen_step(attr["pickcode"]), simple=True, async_=async_),
|
5095
|
+
may_await=False,
|
5097
5096
|
)
|
5098
|
-
return run_gen_step_iter(gen_step(cid or 0), async_=async_)
|
5097
|
+
return run_gen_step_iter(gen_step(cid or 0), simple=True, async_=async_)
|
5099
5098
|
|
5100
5099
|
|
5101
5100
|
@overload
|
@@ -5174,5 +5173,5 @@ def search_for_any_file(
|
|
5174
5173
|
)
|
5175
5174
|
check_response(resp)
|
5176
5175
|
return bool(resp["data"])
|
5177
|
-
return run_gen_step(gen_step, async_=async_)
|
5176
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
5178
5177
|
|
p115client/tool/life.py
CHANGED
@@ -137,14 +137,14 @@ def iter_life_list(
|
|
137
137
|
for items in data["list"]:
|
138
138
|
if "items" not in items:
|
139
139
|
if start_time < items["update_time"] < end_time:
|
140
|
-
yield Yield(items,
|
140
|
+
yield Yield(items, may_await=False)
|
141
141
|
continue
|
142
142
|
behavior_type = items["behavior_type"]
|
143
143
|
date = items["date"]
|
144
144
|
for item in items["items"]:
|
145
145
|
item["behavior_type"] = behavior_type
|
146
146
|
item["date"] = date
|
147
|
-
yield Yield(item,
|
147
|
+
yield Yield(item, may_await=False)
|
148
148
|
if behavior_type.startswith("upload_") or items["total"] > len(items["items"]):
|
149
149
|
seen_items: set[str] = {item["id"] for item in items["items"]}
|
150
150
|
payload = {"offset": 0, "limit": 32, "type": behavior_type, "date": date}
|
@@ -158,7 +158,7 @@ def iter_life_list(
|
|
158
158
|
seen_items.add(item["id"])
|
159
159
|
item["behavior_type"] = behavior_type
|
160
160
|
item["date"] = date
|
161
|
-
yield Yield(item,
|
161
|
+
yield Yield(item, may_await=False)
|
162
162
|
else:
|
163
163
|
if not resp["data"]["next_page"]:
|
164
164
|
break
|
@@ -172,7 +172,7 @@ def iter_life_list(
|
|
172
172
|
else:
|
173
173
|
sleep(1 - diff)
|
174
174
|
end_time = int(time())
|
175
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
175
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
176
176
|
|
177
177
|
|
178
178
|
@overload
|
@@ -262,7 +262,7 @@ def iter_life_behavior_once(
|
|
262
262
|
return
|
263
263
|
fid = event["file_id"]
|
264
264
|
if fid not in seen:
|
265
|
-
yield Yield(event,
|
265
|
+
yield Yield(event, may_await=False)
|
266
266
|
seen_add(fid)
|
267
267
|
offset += len(events)
|
268
268
|
if offset >= int(resp["data"]["count"]):
|
@@ -276,7 +276,7 @@ def iter_life_behavior_once(
|
|
276
276
|
ts_last_call = time()
|
277
277
|
resp = yield life_behavior_detail(payload, async_=async_)
|
278
278
|
events = check_response(resp)["data"]["list"]
|
279
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
279
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
280
280
|
|
281
281
|
|
282
282
|
@overload
|
@@ -369,15 +369,15 @@ def iter_life_behavior(
|
|
369
369
|
)) as get_next:
|
370
370
|
sub_first_loop = True
|
371
371
|
while True:
|
372
|
-
event = yield get_next
|
372
|
+
event = yield get_next()
|
373
373
|
if sub_first_loop:
|
374
374
|
from_id = int(event["id"])
|
375
375
|
from_time = int(event["update_time"])
|
376
376
|
sub_first_loop = False
|
377
377
|
if not type and ignore_types and event["type"] in ignore_types:
|
378
378
|
continue
|
379
|
-
yield Yield(event,
|
380
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
379
|
+
yield Yield(event, may_await=False)
|
380
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
381
381
|
|
382
382
|
|
383
383
|
@overload
|
@@ -460,7 +460,7 @@ def iter_life_behavior_list(
|
|
460
460
|
)) as get_next:
|
461
461
|
first_loop = True
|
462
462
|
while True:
|
463
|
-
event = yield get_next
|
463
|
+
event = yield get_next()
|
464
464
|
if first_loop:
|
465
465
|
from_id = int(event["id"])
|
466
466
|
from_time = int(event["update_time"])
|
@@ -468,6 +468,6 @@ def iter_life_behavior_list(
|
|
468
468
|
if not type and ignore_types and event["type"] in ignore_types:
|
469
469
|
continue
|
470
470
|
push(event)
|
471
|
-
yield Yield(ls,
|
472
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
471
|
+
yield Yield(ls, may_await=False)
|
472
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
473
473
|
|