p115client 0.0.5.10.8__py3-none-any.whl → 0.0.5.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- p115client/_upload.py +6 -6
- p115client/client.py +1130 -318
- p115client/tool/attr.py +1 -1
- p115client/tool/download.py +76 -59
- p115client/tool/edit.py +3 -3
- p115client/tool/export_dir.py +18 -19
- p115client/tool/fs_files.py +4 -7
- p115client/tool/history.py +4 -4
- p115client/tool/iterdir.py +151 -72
- p115client/tool/life.py +10 -10
- p115client/tool/pool.py +5 -5
- p115client/tool/upload.py +3 -3
- p115client/tool/xys.py +10 -7
- {p115client-0.0.5.10.8.dist-info → p115client-0.0.5.11.dist-info}/METADATA +2 -2
- p115client-0.0.5.11.dist-info/RECORD +26 -0
- p115client-0.0.5.10.8.dist-info/RECORD +0 -26
- {p115client-0.0.5.10.8.dist-info → p115client-0.0.5.11.dist-info}/LICENSE +0 -0
- {p115client-0.0.5.10.8.dist-info → p115client-0.0.5.11.dist-info}/WHEEL +0 -0
p115client/tool/iterdir.py
CHANGED
@@ -13,6 +13,7 @@ __all__ = [
|
|
13
13
|
"iter_selected_nodes_using_edit", "iter_selected_nodes_using_star_event",
|
14
14
|
"iter_selected_dirs_using_star", "iter_files_with_dirname", "iter_files_with_path",
|
15
15
|
"iter_files_with_path_by_export_dir", "iter_parents_3_level", "iter_dir_nodes",
|
16
|
+
"search_for_any_file",
|
16
17
|
]
|
17
18
|
__doc__ = "这个模块提供了一些和目录信息罗列有关的函数"
|
18
19
|
|
@@ -237,7 +238,7 @@ def get_path_to_cid(
|
|
237
238
|
return "/" + path
|
238
239
|
else:
|
239
240
|
return path
|
240
|
-
return run_gen_step(gen_step, async_=async_)
|
241
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
241
242
|
|
242
243
|
|
243
244
|
@overload
|
@@ -369,7 +370,7 @@ def get_file_count(
|
|
369
370
|
node = DirNode(info["file_name"], pid)
|
370
371
|
id_to_dirnode[(pid := int(info["file_id"]))] = node
|
371
372
|
return int(resp["count"]) - int(resp.get("folder_count") or 0)
|
372
|
-
return run_gen_step(gen_step, async_=async_)
|
373
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
373
374
|
|
374
375
|
|
375
376
|
@overload
|
@@ -589,7 +590,7 @@ def get_ancestors(
|
|
589
590
|
if not resp.get("sha1") and id_to_dirnode is not ...:
|
590
591
|
id_to_dirnode[ans["id"]] = DirNode(ans["name"], ans["parent_id"])
|
591
592
|
return ancestors
|
592
|
-
return run_gen_step(gen_step, async_=async_)
|
593
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
593
594
|
|
594
595
|
|
595
596
|
@overload
|
@@ -691,7 +692,7 @@ def get_ancestors_to_cid(
|
|
691
692
|
parts.append({"id": 0, "name": "", "parent_id": 0})
|
692
693
|
parts.reverse()
|
693
694
|
return parts
|
694
|
-
return run_gen_step(gen_step, async_=async_)
|
695
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
695
696
|
|
696
697
|
|
697
698
|
# TODO: 使用 search 接口以在特定目录之下搜索某个名字,以便减少风控
|
@@ -886,7 +887,7 @@ def get_id_to_path(
|
|
886
887
|
if ensure_file is None or ensure_file ^ attr["is_dir"]:
|
887
888
|
return P115ID(attr["id"], attr, about="path")
|
888
889
|
raise error
|
889
|
-
return run_gen_step(gen_step, async_=async_)
|
890
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
890
891
|
|
891
892
|
|
892
893
|
@overload
|
@@ -940,7 +941,7 @@ def get_id_to_pickcode(
|
|
940
941
|
check_response(resp)
|
941
942
|
data = resp["data"]
|
942
943
|
return P115ID(data["file_id"], data, about="pickcode")
|
943
|
-
return run_gen_step(gen_step, async_=async_)
|
944
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
944
945
|
|
945
946
|
|
946
947
|
@overload
|
@@ -1000,7 +1001,7 @@ def get_id_to_sha1(
|
|
1000
1001
|
else:
|
1001
1002
|
raise FileNotFoundError(ENOENT, file_sha1)
|
1002
1003
|
return P115ID(data["file_id"], data, about="sha1", file_sha1=file_sha1)
|
1003
|
-
return run_gen_step(gen_step, async_=async_)
|
1004
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
1004
1005
|
|
1005
1006
|
|
1006
1007
|
@overload
|
@@ -1052,8 +1053,8 @@ def iter_nodes_skim(
|
|
1052
1053
|
check_response(resp)
|
1053
1054
|
for a in resp["data"]:
|
1054
1055
|
a["file_name"] = unescape_115_charref(a["file_name"])
|
1055
|
-
yield YieldFrom(resp["data"],
|
1056
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
1056
|
+
yield YieldFrom(resp["data"], may_await=False)
|
1057
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
1057
1058
|
|
1058
1059
|
|
1059
1060
|
@overload
|
@@ -1190,7 +1191,7 @@ def _iter_fs_files(
|
|
1190
1191
|
attr = _overview_attr(info)
|
1191
1192
|
if attr.is_dir:
|
1192
1193
|
id_to_dirnode[attr.id] = DirNode(attr.name, attr.parent_id)
|
1193
|
-
yield YieldFrom(resp["data"],
|
1194
|
+
yield YieldFrom(resp["data"], may_await=False)
|
1194
1195
|
else:
|
1195
1196
|
for info in resp["data"]:
|
1196
1197
|
attr = _overview_attr(info)
|
@@ -1199,10 +1200,10 @@ def _iter_fs_files(
|
|
1199
1200
|
id_to_dirnode[attr.id] = DirNode(attr.name, attr.parent_id)
|
1200
1201
|
elif ensure_file is False:
|
1201
1202
|
return
|
1202
|
-
yield Yield(info,
|
1203
|
+
yield Yield(info, may_await=False)
|
1203
1204
|
except (StopAsyncIteration, StopIteration):
|
1204
1205
|
pass
|
1205
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
1206
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
1206
1207
|
|
1207
1208
|
|
1208
1209
|
@overload
|
@@ -1591,8 +1592,8 @@ def ensure_attr_path[D: dict](
|
|
1591
1592
|
warn(f"{type(e).__module__}.{type(e).__qualname__}: {e} of {attr}", category=P115Warning)
|
1592
1593
|
attr.setdefault("ancestors", None)
|
1593
1594
|
attr.setdefault("path", "")
|
1594
|
-
yield Yield(attr,
|
1595
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
1595
|
+
yield Yield(attr, may_await=False)
|
1596
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
1596
1597
|
|
1597
1598
|
|
1598
1599
|
@overload
|
@@ -2074,10 +2075,10 @@ def iterdir(
|
|
2074
2075
|
name = escape(name)
|
2075
2076
|
attr["path"] = dirname + name
|
2076
2077
|
return attr
|
2077
|
-
yield YieldFrom(do_map(process, it),
|
2078
|
+
yield YieldFrom(do_map(process, it), may_await=False) # type: ignore
|
2078
2079
|
else:
|
2079
|
-
yield YieldFrom(do_map(normalize_attr, it),
|
2080
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2080
|
+
yield YieldFrom(do_map(normalize_attr, it), may_await=False) # type: ignore
|
2081
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2081
2082
|
|
2082
2083
|
|
2083
2084
|
def iterdir_limited(
|
@@ -2186,8 +2187,8 @@ def iterdir_limited(
|
|
2186
2187
|
attr["path"] = dirname + name
|
2187
2188
|
yield attr
|
2188
2189
|
def gen_step():
|
2189
|
-
resp: dict = yield run_gen_step(request, async_=async_)
|
2190
|
-
yield YieldFrom(iter_attrs(resp),
|
2190
|
+
resp: dict = yield run_gen_step(request, simple=True, async_=async_)
|
2191
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2191
2192
|
count = int(resp["count"])
|
2192
2193
|
count_fetched = len(resp["data"])
|
2193
2194
|
if count > count_fetched:
|
@@ -2215,30 +2216,30 @@ def iterdir_limited(
|
|
2215
2216
|
count_top = count_top_dirs + count_top_files
|
2216
2217
|
if count <= count_fetched * 2 - count_top:
|
2217
2218
|
resp = request({"asc": 0, "offset": count_top, "limit": count - count_fetched})
|
2218
|
-
yield YieldFrom(iter_attrs(resp),
|
2219
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2219
2220
|
return
|
2220
2221
|
if diff := count_dirs - len(seen_dirs):
|
2221
2222
|
if diff > count_fetched - count_top_dirs:
|
2222
2223
|
resp = request({"nf": 1, "offset": len(seen_dirs)})
|
2223
|
-
yield YieldFrom(iter_attrs(resp),
|
2224
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2224
2225
|
diff = count_dirs - len(seen_dirs)
|
2225
2226
|
if diff > 0:
|
2226
2227
|
resp = request({"asc": 0, "nf": 1, "offset": count_top_dirs, "limit": diff})
|
2227
|
-
yield YieldFrom(iter_attrs(resp),
|
2228
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2228
2229
|
|
2229
2230
|
if diff := count_dirs - len(seen_dirs):
|
2230
2231
|
warn(f"lost {diff} directories: cid={cid}", category=P115Warning)
|
2231
2232
|
if diff := count_files - len(seen_files):
|
2232
2233
|
if diff > count_fetched - count_top_files:
|
2233
2234
|
resp = request({"show_dir": 0, "offset": len(seen_files)})
|
2234
|
-
yield YieldFrom(iter_attrs(resp),
|
2235
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2235
2236
|
diff = count_files - len(seen_files)
|
2236
2237
|
if diff > 0:
|
2237
2238
|
resp = request({"asc": 0, "show_dir": 0, "offset": count_top_files, "limit": diff})
|
2238
|
-
yield YieldFrom(iter_attrs(resp),
|
2239
|
+
yield YieldFrom(iter_attrs(resp), may_await=False)
|
2239
2240
|
if diff := count_files - len(seen_files):
|
2240
2241
|
warn(f"lost {diff} files: cid={cid}", category=P115Warning)
|
2241
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2242
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2242
2243
|
|
2243
2244
|
|
2244
2245
|
@overload
|
@@ -2591,9 +2592,9 @@ def iter_files(
|
|
2591
2592
|
add_to_cache(attr)
|
2592
2593
|
else:
|
2593
2594
|
return attr
|
2594
|
-
yield YieldFrom(do_filter(bool, do_map(process, it)),
|
2595
|
+
yield YieldFrom(do_filter(bool, do_map(process, it)), may_await=False) # type: ignore
|
2595
2596
|
else:
|
2596
|
-
yield YieldFrom(do_map(normalize_attr, it),
|
2597
|
+
yield YieldFrom(do_map(normalize_attr, it), may_await=False) # type: ignore
|
2597
2598
|
if (with_ancestors or with_path) and cache:
|
2598
2599
|
yield YieldFrom(ensure_attr_path(
|
2599
2600
|
client,
|
@@ -2607,8 +2608,8 @@ def iter_files(
|
|
2607
2608
|
app=app,
|
2608
2609
|
async_=async_, # type: ignore
|
2609
2610
|
**request_kwargs,
|
2610
|
-
),
|
2611
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2611
|
+
), may_await=False)
|
2612
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2612
2613
|
|
2613
2614
|
|
2614
2615
|
@overload
|
@@ -2735,8 +2736,7 @@ def traverse_files(
|
|
2735
2736
|
def gen_step():
|
2736
2737
|
nonlocal cid
|
2737
2738
|
if auto_splitting_tasks:
|
2738
|
-
get_count =
|
2739
|
-
get_file_count,
|
2739
|
+
get_count = get_file_count(
|
2740
2740
|
client,
|
2741
2741
|
id_to_dirnode=id_to_dirnode,
|
2742
2742
|
**{**request_kwargs, "timeout": auto_splitting_statistics_timeout}
|
@@ -2772,7 +2772,7 @@ def traverse_files(
|
|
2772
2772
|
max_workers=max_workers,
|
2773
2773
|
async_=async_, # type: ignore
|
2774
2774
|
**request_kwargs,
|
2775
|
-
),
|
2775
|
+
), may_await=False)
|
2776
2776
|
else:
|
2777
2777
|
yield YieldFrom(iter_files(
|
2778
2778
|
client,
|
@@ -2788,7 +2788,7 @@ def traverse_files(
|
|
2788
2788
|
max_workers=max_workers,
|
2789
2789
|
async_=async_, # type: ignore
|
2790
2790
|
**request_kwargs,
|
2791
|
-
),
|
2791
|
+
), may_await=False)
|
2792
2792
|
else:
|
2793
2793
|
with with_iter_next(iterdir(
|
2794
2794
|
client,
|
@@ -2813,8 +2813,8 @@ def traverse_files(
|
|
2813
2813
|
type > 7 or
|
2814
2814
|
type_of_attr(attr) == type
|
2815
2815
|
):
|
2816
|
-
yield Yield(attr,
|
2817
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
2816
|
+
yield Yield(attr, may_await=False)
|
2817
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
2818
2818
|
|
2819
2819
|
|
2820
2820
|
@overload
|
@@ -2907,8 +2907,8 @@ def iter_dirs(
|
|
2907
2907
|
)) as get_next:
|
2908
2908
|
while True:
|
2909
2909
|
batch = yield get_next
|
2910
|
-
yield YieldFrom(batch,
|
2911
|
-
it = run_gen_step_iter(gen_step(it), async_=async_)
|
2910
|
+
yield YieldFrom(batch, may_await=False)
|
2911
|
+
it = run_gen_step_iter(gen_step(it), simple=True, async_=async_)
|
2912
2912
|
return it
|
2913
2913
|
|
2914
2914
|
|
@@ -3155,12 +3155,12 @@ def iter_image_files(
|
|
3155
3155
|
count = int(resp.get("count") or 0)
|
3156
3156
|
if offset != resp["offset"]:
|
3157
3157
|
break
|
3158
|
-
yield YieldFrom(map(normalize, resp["data"]),
|
3158
|
+
yield YieldFrom(map(normalize, resp["data"]), may_await=False)
|
3159
3159
|
offset += len(resp["data"])
|
3160
3160
|
if offset >= count:
|
3161
3161
|
break
|
3162
3162
|
payload["offset"] = offset
|
3163
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
3163
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
3164
3164
|
|
3165
3165
|
|
3166
3166
|
@overload
|
@@ -3269,11 +3269,11 @@ def share_iterdir(
|
|
3269
3269
|
id_to_dirnode[oattr.id] = DirNode(oattr.name, oattr.parent_id)
|
3270
3270
|
if normalize_attr is not None:
|
3271
3271
|
attr = normalize_attr(attr)
|
3272
|
-
yield Yield(attr,
|
3272
|
+
yield Yield(attr, may_await=False)
|
3273
3273
|
payload["offset"] += page_size # type: ignore
|
3274
3274
|
if payload["offset"] >= count: # type: ignore
|
3275
3275
|
break
|
3276
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
3276
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
3277
3277
|
|
3278
3278
|
|
3279
3279
|
@overload
|
@@ -3371,12 +3371,12 @@ def share_iter_files(
|
|
3371
3371
|
"name": info["fn"],
|
3372
3372
|
"size": int(info["si"]),
|
3373
3373
|
"path": f"/{info['pt']}/{info['fn']}",
|
3374
|
-
},
|
3374
|
+
}, may_await=False)
|
3375
3375
|
else:
|
3376
|
-
yield Yield({k: attr[k] for k in ("id", "sha1", "name", "size", "path")},
|
3376
|
+
yield Yield({k: attr[k] for k in ("id", "sha1", "name", "size", "path")}, may_await=False)
|
3377
3377
|
except (StopIteration, StopAsyncIteration):
|
3378
3378
|
pass
|
3379
|
-
return run_gen_step(gen_step, async_=async_)
|
3379
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
3380
3380
|
|
3381
3381
|
|
3382
3382
|
@overload
|
@@ -3546,7 +3546,7 @@ def share_get_id_to_path(
|
|
3546
3546
|
if ensure_file is None or ensure_file ^ attr["is_dir"]:
|
3547
3547
|
return P115ID(attr["id"], attr, about="path")
|
3548
3548
|
raise error
|
3549
|
-
return run_gen_step(gen_step, async_=async_)
|
3549
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
3550
3550
|
|
3551
3551
|
|
3552
3552
|
@overload
|
@@ -4058,7 +4058,7 @@ def iter_selected_nodes_using_star_event(
|
|
4058
4058
|
id_to_dirnode[fid] = DirNode(name, pid)
|
4059
4059
|
if fid in ids:
|
4060
4060
|
if not normalize_attr:
|
4061
|
-
yield Yield(event,
|
4061
|
+
yield Yield(event, may_await=False)
|
4062
4062
|
elif normalize_attr is True:
|
4063
4063
|
attr = {
|
4064
4064
|
"id": fid,
|
@@ -4080,15 +4080,15 @@ def iter_selected_nodes_using_star_event(
|
|
4080
4080
|
attr["type"] = 3
|
4081
4081
|
else:
|
4082
4082
|
attr["type"] = type_of_attr(attr)
|
4083
|
-
yield Yield(attr,
|
4083
|
+
yield Yield(attr, may_await=False)
|
4084
4084
|
else:
|
4085
|
-
yield Yield(normalize_attr(event),
|
4085
|
+
yield Yield(normalize_attr(event), may_await=False)
|
4086
4086
|
discard(fid)
|
4087
4087
|
if not ids:
|
4088
4088
|
break
|
4089
4089
|
except (StopIteration, StopAsyncIteration):
|
4090
4090
|
pass
|
4091
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4091
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4092
4092
|
|
4093
4093
|
|
4094
4094
|
@overload
|
@@ -4182,13 +4182,13 @@ def iter_selected_dirs_using_star(
|
|
4182
4182
|
break
|
4183
4183
|
cid = attr["id"]
|
4184
4184
|
if cid in ids:
|
4185
|
-
yield Yield(info,
|
4185
|
+
yield Yield(info, may_await=False)
|
4186
4186
|
discard(cid)
|
4187
4187
|
if not ids:
|
4188
4188
|
break
|
4189
4189
|
except (StopIteration, StopAsyncIteration):
|
4190
4190
|
pass
|
4191
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4191
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4192
4192
|
|
4193
4193
|
|
4194
4194
|
@overload
|
@@ -4349,7 +4349,7 @@ def iter_files_with_dirname(
|
|
4349
4349
|
resp = yield get_next
|
4350
4350
|
for attr in resp["data"]:
|
4351
4351
|
attr.update(pid_to_info[attr["parent_id"]])
|
4352
|
-
yield Yield(attr,
|
4352
|
+
yield Yield(attr, may_await=False)
|
4353
4353
|
except (StopIteration, StopAsyncIteration):
|
4354
4354
|
pass
|
4355
4355
|
if with_parents_4_level:
|
@@ -4362,7 +4362,7 @@ def iter_files_with_dirname(
|
|
4362
4362
|
it = iter_parents_3_level(
|
4363
4363
|
client,
|
4364
4364
|
iter_unique((async_map if async_ else map)(
|
4365
|
-
get_pid, run_gen_step_iter(gen_step, async_=async_))), # type: ignore
|
4365
|
+
get_pid, run_gen_step_iter(gen_step, simple=True, async_=async_))), # type: ignore
|
4366
4366
|
async_=async_, # type: ignore
|
4367
4367
|
**request_kwargs,
|
4368
4368
|
)
|
@@ -4375,9 +4375,9 @@ def iter_files_with_dirname(
|
|
4375
4375
|
id_to_parents[0] = ("", "", "")
|
4376
4376
|
for attr in files:
|
4377
4377
|
attr["parents"] = (attr["dir_name"], *id_to_parents[attr["parent_id"]])
|
4378
|
-
yield Yield(attr,
|
4379
|
-
return run_gen_step_iter(gen_step2, async_=async_)
|
4380
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4378
|
+
yield Yield(attr, may_await=False)
|
4379
|
+
return run_gen_step_iter(gen_step2, simple=True, async_=async_)
|
4380
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4381
4381
|
|
4382
4382
|
|
4383
4383
|
@overload
|
@@ -4517,7 +4517,7 @@ def iter_files_with_path(
|
|
4517
4517
|
_path_already: None | bool = None if path_already else False
|
4518
4518
|
if not path_already:
|
4519
4519
|
from .download import iter_download_nodes
|
4520
|
-
def set_path_already(*
|
4520
|
+
def set_path_already(*_):
|
4521
4521
|
nonlocal _path_already
|
4522
4522
|
_path_already = True
|
4523
4523
|
def fetch_dirs(id: int | str, /):
|
@@ -4550,7 +4550,7 @@ def iter_files_with_path(
|
|
4550
4550
|
)) as get_next:
|
4551
4551
|
while True:
|
4552
4552
|
attr = yield get_next
|
4553
|
-
yield run_gen_step(fetch_dirs(attr["pickcode"]), async_=async_)
|
4553
|
+
yield run_gen_step(fetch_dirs(attr["pickcode"]), simple=True, async_=async_)
|
4554
4554
|
if with_ancestors:
|
4555
4555
|
id_to_ancestors: dict[int, list[dict]] = {}
|
4556
4556
|
def get_ancestors(id: int, attr: dict | tuple[str, int] | DirNode, /) -> list[dict]:
|
@@ -4597,7 +4597,7 @@ def iter_files_with_path(
|
|
4597
4597
|
add_to_cache = cache.append
|
4598
4598
|
if not path_already:
|
4599
4599
|
if async_:
|
4600
|
-
task: Any = create_task(run_gen_step(fetch_dirs(cid), async_=True))
|
4600
|
+
task: Any = create_task(run_gen_step(fetch_dirs(cid), simple=True, async_=True))
|
4601
4601
|
else:
|
4602
4602
|
task = run_as_thread(run_gen_step, fetch_dirs(cid))
|
4603
4603
|
task.add_done_callback(set_path_already)
|
@@ -4622,16 +4622,16 @@ def iter_files_with_path(
|
|
4622
4622
|
while True:
|
4623
4623
|
attr = yield get_next
|
4624
4624
|
if _path_already is None:
|
4625
|
-
yield Yield(update_path(attr),
|
4625
|
+
yield Yield(update_path(attr), may_await=False)
|
4626
4626
|
elif _path_already:
|
4627
4627
|
if async_:
|
4628
4628
|
yield task
|
4629
4629
|
else:
|
4630
4630
|
task.result()
|
4631
4631
|
if cache:
|
4632
|
-
yield YieldFrom(map(update_path, cache),
|
4632
|
+
yield YieldFrom(map(update_path, cache), may_await=False)
|
4633
4633
|
cache.clear()
|
4634
|
-
yield Yield(update_path(attr),
|
4634
|
+
yield Yield(update_path(attr), may_await=False)
|
4635
4635
|
_path_already = None
|
4636
4636
|
else:
|
4637
4637
|
add_to_cache(attr)
|
@@ -4640,8 +4640,8 @@ def iter_files_with_path(
|
|
4640
4640
|
yield task
|
4641
4641
|
else:
|
4642
4642
|
task.result()
|
4643
|
-
yield YieldFrom(map(update_path, cache),
|
4644
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4643
|
+
yield YieldFrom(map(update_path, cache), may_await=False)
|
4644
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4645
4645
|
|
4646
4646
|
|
4647
4647
|
@overload
|
@@ -4897,8 +4897,8 @@ def iter_files_with_path_by_export_dir(
|
|
4897
4897
|
if escape is not None:
|
4898
4898
|
name = escape(name)
|
4899
4899
|
attr["path"] = dir_path + name
|
4900
|
-
yield Yield(attr,
|
4901
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
4900
|
+
yield Yield(attr, may_await=False)
|
4901
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
4902
4902
|
|
4903
4903
|
|
4904
4904
|
@overload
|
@@ -4989,7 +4989,7 @@ def iter_parents_3_level(
|
|
4989
4989
|
ids = (async_filter if async_ else filter)(None, ids) # type: ignore
|
4990
4990
|
return flatten(
|
4991
4991
|
batch_map(
|
4992
|
-
lambda ids, /: run_gen_step(get_parents(ids), async_=async_),
|
4992
|
+
lambda ids, /: run_gen_step(get_parents(ids), simple=True, async_=async_),
|
4993
4993
|
chunked(ids, 1150),
|
4994
4994
|
max_workers=max_workers,
|
4995
4995
|
),
|
@@ -5069,7 +5069,7 @@ def iter_dir_nodes(
|
|
5069
5069
|
id_to_dirnode[id] = DirNode(name, parent_id)
|
5070
5070
|
yield Yield(
|
5071
5071
|
{"id": id, "parent_id": parent_id, "name": name},
|
5072
|
-
|
5072
|
+
may_await=False,
|
5073
5073
|
)
|
5074
5074
|
else:
|
5075
5075
|
with with_iter_next(iterdir(
|
@@ -5088,11 +5088,90 @@ def iter_dir_nodes(
|
|
5088
5088
|
"parent_id": attr["parent_id"],
|
5089
5089
|
"name": attr["name"],
|
5090
5090
|
},
|
5091
|
-
|
5091
|
+
may_await=False,
|
5092
5092
|
)
|
5093
5093
|
yield YieldFrom(
|
5094
|
-
run_gen_step_iter(gen_step(attr["pickcode"]), async_=async_),
|
5095
|
-
|
5094
|
+
run_gen_step_iter(gen_step(attr["pickcode"]), simple=True, async_=async_),
|
5095
|
+
may_await=False,
|
5096
5096
|
)
|
5097
|
-
return run_gen_step_iter(gen_step(cid or 0), async_=async_)
|
5097
|
+
return run_gen_step_iter(gen_step(cid or 0), simple=True, async_=async_)
|
5098
|
+
|
5099
|
+
|
5100
|
+
@overload
|
5101
|
+
def search_for_any_file(
|
5102
|
+
client: str | P115Client,
|
5103
|
+
cid: int = 0,
|
5104
|
+
search_value: str = ".",
|
5105
|
+
suffix: str = "",
|
5106
|
+
type: int = 99,
|
5107
|
+
app: str = "web",
|
5108
|
+
*,
|
5109
|
+
async_: Literal[False] = False,
|
5110
|
+
**request_kwargs,
|
5111
|
+
) -> bool:
|
5112
|
+
...
|
5113
|
+
@overload
|
5114
|
+
def search_for_any_file(
|
5115
|
+
client: str | P115Client,
|
5116
|
+
cid: int = 0,
|
5117
|
+
search_value: str = ".",
|
5118
|
+
suffix: str = "",
|
5119
|
+
type: int = 99,
|
5120
|
+
app: str = "web",
|
5121
|
+
*,
|
5122
|
+
async_: Literal[True],
|
5123
|
+
**request_kwargs,
|
5124
|
+
) -> Coroutine[Any, Any, bool]:
|
5125
|
+
...
|
5126
|
+
def search_for_any_file(
|
5127
|
+
client: str | P115Client,
|
5128
|
+
cid: int = 0,
|
5129
|
+
search_value: str = ".",
|
5130
|
+
suffix: str = "",
|
5131
|
+
type: int = 99,
|
5132
|
+
app: str = "web",
|
5133
|
+
*,
|
5134
|
+
async_: Literal[False, True] = False,
|
5135
|
+
**request_kwargs,
|
5136
|
+
) -> bool | Coroutine[Any, Any, bool]:
|
5137
|
+
"""搜索以判断是否存在某种文件
|
5138
|
+
|
5139
|
+
:param client: 115 客户端或 cookies
|
5140
|
+
:param cid: 目录 id
|
5141
|
+
:param search_value: 搜索关键词,搜索到的文件名必须包含这个字符串
|
5142
|
+
:param suffix: 后缀名(优先级高于 type)
|
5143
|
+
:param type: 文件类型
|
5144
|
+
|
5145
|
+
- 1: 文档
|
5146
|
+
- 2: 图片
|
5147
|
+
- 3: 音频
|
5148
|
+
- 4: 视频
|
5149
|
+
- 5: 压缩包
|
5150
|
+
- 6: 应用
|
5151
|
+
- 7: 书籍
|
5152
|
+
- 99: 仅文件
|
5153
|
+
|
5154
|
+
:param app: 使用某个 app (设备)的接口
|
5155
|
+
:param async_: 是否异步
|
5156
|
+
:param request_kwargs: 其它请求参数
|
5157
|
+
|
5158
|
+
:return: 是否存在某种文件
|
5159
|
+
"""
|
5160
|
+
if isinstance(client, str):
|
5161
|
+
client = P115Client(client, check_for_relogin=True)
|
5162
|
+
if not isinstance(client, P115Client) or app == "open":
|
5163
|
+
fs_search: Callable = client.fs_search_open
|
5164
|
+
elif app in ("", "web", "desktop", "harmony"):
|
5165
|
+
fs_search = partial(client.fs_search, app=app)
|
5166
|
+
else:
|
5167
|
+
fs_search = client.fs_search_app
|
5168
|
+
def gen_step():
|
5169
|
+
resp = yield fs_search(
|
5170
|
+
{"cid": cid, "limit": 1, "search_value": search_value, "suffix": suffix, "type": type},
|
5171
|
+
async_=async_,
|
5172
|
+
**request_kwargs,
|
5173
|
+
)
|
5174
|
+
check_response(resp)
|
5175
|
+
return bool(resp["data"])
|
5176
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
5098
5177
|
|
p115client/tool/life.py
CHANGED
@@ -137,14 +137,14 @@ def iter_life_list(
|
|
137
137
|
for items in data["list"]:
|
138
138
|
if "items" not in items:
|
139
139
|
if start_time < items["update_time"] < end_time:
|
140
|
-
yield Yield(items,
|
140
|
+
yield Yield(items, may_await=False)
|
141
141
|
continue
|
142
142
|
behavior_type = items["behavior_type"]
|
143
143
|
date = items["date"]
|
144
144
|
for item in items["items"]:
|
145
145
|
item["behavior_type"] = behavior_type
|
146
146
|
item["date"] = date
|
147
|
-
yield Yield(item,
|
147
|
+
yield Yield(item, may_await=False)
|
148
148
|
if behavior_type.startswith("upload_") or items["total"] > len(items["items"]):
|
149
149
|
seen_items: set[str] = {item["id"] for item in items["items"]}
|
150
150
|
payload = {"offset": 0, "limit": 32, "type": behavior_type, "date": date}
|
@@ -158,7 +158,7 @@ def iter_life_list(
|
|
158
158
|
seen_items.add(item["id"])
|
159
159
|
item["behavior_type"] = behavior_type
|
160
160
|
item["date"] = date
|
161
|
-
yield Yield(item,
|
161
|
+
yield Yield(item, may_await=False)
|
162
162
|
else:
|
163
163
|
if not resp["data"]["next_page"]:
|
164
164
|
break
|
@@ -172,7 +172,7 @@ def iter_life_list(
|
|
172
172
|
else:
|
173
173
|
sleep(1 - diff)
|
174
174
|
end_time = int(time())
|
175
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
175
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
176
176
|
|
177
177
|
|
178
178
|
@overload
|
@@ -262,7 +262,7 @@ def iter_life_behavior_once(
|
|
262
262
|
return
|
263
263
|
fid = event["file_id"]
|
264
264
|
if fid not in seen:
|
265
|
-
yield Yield(event,
|
265
|
+
yield Yield(event, may_await=False)
|
266
266
|
seen_add(fid)
|
267
267
|
offset += len(events)
|
268
268
|
if offset >= int(resp["data"]["count"]):
|
@@ -276,7 +276,7 @@ def iter_life_behavior_once(
|
|
276
276
|
ts_last_call = time()
|
277
277
|
resp = yield life_behavior_detail(payload, async_=async_)
|
278
278
|
events = check_response(resp)["data"]["list"]
|
279
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
279
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
280
280
|
|
281
281
|
|
282
282
|
@overload
|
@@ -376,8 +376,8 @@ def iter_life_behavior(
|
|
376
376
|
sub_first_loop = False
|
377
377
|
if not type and ignore_types and event["type"] in ignore_types:
|
378
378
|
continue
|
379
|
-
yield Yield(event,
|
380
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
379
|
+
yield Yield(event, may_await=False)
|
380
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
381
381
|
|
382
382
|
|
383
383
|
@overload
|
@@ -468,6 +468,6 @@ def iter_life_behavior_list(
|
|
468
468
|
if not type and ignore_types and event["type"] in ignore_types:
|
469
469
|
continue
|
470
470
|
push(event)
|
471
|
-
yield Yield(ls,
|
472
|
-
return run_gen_step_iter(gen_step, async_=async_)
|
471
|
+
yield Yield(ls, may_await=False)
|
472
|
+
return run_gen_step_iter(gen_step, simple=True, async_=async_)
|
473
473
|
|
p115client/tool/pool.py
CHANGED
@@ -88,7 +88,7 @@ def generate_auth_factory(
|
|
88
88
|
"authorization": "Bearer " + resp["data"]["access_token"],
|
89
89
|
"app_id": str(app_id),
|
90
90
|
}
|
91
|
-
return run_gen_step(gen_step, async_=async_)
|
91
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
92
92
|
return make_cookies
|
93
93
|
|
94
94
|
|
@@ -142,7 +142,7 @@ def generate_cookies_factory(
|
|
142
142
|
"cookie": "; ".join(f"{k}={v}" for k, v in resp["data"]["cookie"].items()),
|
143
143
|
"app": app,
|
144
144
|
}
|
145
|
-
return run_gen_step(gen_step, async_=async_)
|
145
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
146
146
|
return make_cookies
|
147
147
|
|
148
148
|
|
@@ -165,7 +165,7 @@ def generate_client_factory(
|
|
165
165
|
def gen_step():
|
166
166
|
headers = yield call(async_=async_)
|
167
167
|
return cls(headers["cookie"])
|
168
|
-
return run_gen_step(gen_step, async_=async_)
|
168
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
169
169
|
return make_client
|
170
170
|
|
171
171
|
|
@@ -215,7 +215,7 @@ def make_pool[T](
|
|
215
215
|
value = generate()
|
216
216
|
val = ComparedWithID(value)
|
217
217
|
return value, partial(heappush, heap_, (time(), val))
|
218
|
-
return run_gen_step(call, async_=async_)
|
218
|
+
return run_gen_step(call, simple=True, async_=async_)
|
219
219
|
if not lock:
|
220
220
|
setattr(get_value, "heap", heap_)
|
221
221
|
return get_value
|
@@ -377,7 +377,7 @@ def call_wrap_with_pool(get_cert_headers: Callable, /, func: Callable) -> Callab
|
|
377
377
|
if not isinstance(e, (AuthenticationError, LoginError)) and get_status_code(e) != 405:
|
378
378
|
revert()
|
379
379
|
raise
|
380
|
-
return run_gen_step(gen_step, async_=async_)
|
380
|
+
return run_gen_step(gen_step, simple=True, async_=async_)
|
381
381
|
return update_wrapper(wrapper, func)
|
382
382
|
|
383
383
|
|