p115client 0.0.5.10.8__py3-none-any.whl → 0.0.5.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
p115client/tool/attr.py CHANGED
@@ -72,7 +72,7 @@ def get_attr(
72
72
  resp = yield client.fs_file(id, async_=async_, **request_kwargs)
73
73
  check_response(resp)
74
74
  return normalize_attr_web(resp["data"][0])
75
- return run_gen_step(gen_step, async_=async_)
75
+ return run_gen_step(gen_step, simple=True, async_=async_)
76
76
 
77
77
 
78
78
  def type_of_attr(attr: Mapping, /) -> int:
@@ -145,7 +145,7 @@ def batch_get_url(
145
145
  for id, info in resp["data"].items()
146
146
  if info["url"]
147
147
  }
148
- return run_gen_step(gen_step, async_=async_)
148
+ return run_gen_step(gen_step, simple=True, async_=async_)
149
149
 
150
150
 
151
151
  @overload
@@ -227,8 +227,8 @@ def iter_url_batches(
227
227
  sha1=info["sha1"],
228
228
  is_directory=False,
229
229
  headers=headers,
230
- ), identity=True)
231
- return run_gen_step_iter(gen_step, async_=async_)
230
+ ), may_await=False)
231
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
232
232
 
233
233
 
234
234
  @overload
@@ -343,6 +343,12 @@ def iter_files_with_url(
343
343
  async_=async_,
344
344
  **request_kwargs,
345
345
  )
346
+ if not isinstance(client, P115Client) or app == "open":
347
+ get_url: Callable[..., P115URL] = client.download_url_open
348
+ elif app in ("", "web", "desktop", "harmony"):
349
+ get_url = client.download_url
350
+ else:
351
+ get_url = partial(client.download_url, app=app)
346
352
  def gen_step():
347
353
  if suffixes is None:
348
354
  it = iter_files(
@@ -368,18 +374,22 @@ def iter_files_with_url(
368
374
  cid,
369
375
  suffixes=suffix,
370
376
  app=app,
377
+ user_agent=user_agent,
371
378
  **params, # type: ignore
372
379
  ),
373
- identity=True,
380
+ may_await=False,
374
381
  )
375
382
  return
383
+ if headers := request_kwargs.get("headers"):
384
+ request_kwargs["headers"] = dict(headers, **{"user-agent": user_agent})
385
+ else:
386
+ request_kwargs["headers"] = {"user-agent": user_agent}
376
387
  with with_iter_next(it) as get_next:
377
388
  while True:
378
389
  attr = yield get_next
379
390
  if attr.get("violated", False):
380
391
  if attr["size"] < 1024 * 1024 * 115:
381
- attr["url"] = yield partial(
382
- client.download_url,
392
+ attr["url"] = yield get_url(
383
393
  attr["pickcode"],
384
394
  use_web_api=True,
385
395
  async_=async_,
@@ -388,14 +398,13 @@ def iter_files_with_url(
388
398
  else:
389
399
  warn(f"unable to get url for {attr!r}", category=P115Warning)
390
400
  else:
391
- attr["url"] = yield partial(
392
- client.download_url,
401
+ attr["url"] = yield get_url(
393
402
  attr["pickcode"],
394
403
  async_=async_,
395
404
  **request_kwargs,
396
405
  )
397
- yield Yield(attr, identity=True)
398
- return run_gen_step_iter(gen_step, async_=async_)
406
+ yield Yield(attr, may_await=False)
407
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
399
408
 
400
409
 
401
410
  @overload
@@ -495,6 +504,12 @@ def iter_images_with_url(
495
504
  async_=async_,
496
505
  **request_kwargs
497
506
  )
507
+ if not isinstance(client, P115Client) or app == "open":
508
+ get_url: Callable[..., P115URL] = client.download_url_open
509
+ elif app in ("", "web", "desktop", "harmony"):
510
+ get_url = client.download_url
511
+ else:
512
+ get_url = partial(client.download_url, app=app)
498
513
  def gen_step():
499
514
  if suffixes is None:
500
515
  it = iter_files(
@@ -522,7 +537,7 @@ def iter_images_with_url(
522
537
  app=app,
523
538
  **params, # type: ignore
524
539
  ),
525
- identity=True,
540
+ may_await=False,
526
541
  )
527
542
  return
528
543
  with with_iter_next(it) as get_next:
@@ -533,8 +548,7 @@ def iter_images_with_url(
533
548
  except KeyError:
534
549
  if attr.get("violated", False):
535
550
  if attr["size"] < 1024 * 1024 * 115:
536
- attr["url"] = yield partial(
537
- client.download_url,
551
+ attr["url"] = yield get_url(
538
552
  attr["pickcode"],
539
553
  use_web_api=True,
540
554
  async_=async_,
@@ -543,14 +557,13 @@ def iter_images_with_url(
543
557
  else:
544
558
  warn(f"unable to get url for {attr!r}", category=P115Warning)
545
559
  else:
546
- attr["url"] = yield partial(
547
- client.download_url,
560
+ attr["url"] = yield get_url(
548
561
  attr["pickcode"],
549
562
  async_=async_,
550
563
  **request_kwargs,
551
564
  )
552
- yield Yield(attr, identity=True)
553
- return run_gen_step_iter(gen_step, async_=async_)
565
+ yield Yield(attr, may_await=False)
566
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
554
567
 
555
568
 
556
569
  @overload
@@ -643,6 +656,12 @@ def iter_subtitles_with_url(
643
656
  """
644
657
  if isinstance(client, str):
645
658
  client = P115Client(client, check_for_relogin=True)
659
+ if not isinstance(client, P115Client) or app == "open":
660
+ get_url: Callable[..., P115URL] = client.download_url_open
661
+ elif app in ("", "web", "desktop", "harmony"):
662
+ get_url = client.download_url
663
+ else:
664
+ get_url = partial(client.download_url, app=app)
646
665
  def gen_step():
647
666
  nonlocal suffixes
648
667
  if isinstance(suffixes, str):
@@ -706,32 +725,30 @@ def iter_subtitles_with_url(
706
725
  }
707
726
  finally:
708
727
  yield client.fs_delete(scid, async_=async_, **request_kwargs)
709
- if subtitles:
710
- for attr in items:
711
- attr["url"] = subtitles[attr["sha1"]]
712
- yield Yield(attr, identity=True)
713
- else:
714
- for attr in items:
715
- if attr.get("violated", False):
716
- if attr["size"] < 1024 * 1024 * 115:
717
- attr["url"] = yield partial(
718
- client.download_url,
728
+ if subtitles:
729
+ for attr in items:
730
+ attr["url"] = subtitles[attr["sha1"]]
731
+ yield Yield(attr, may_await=False)
732
+ else:
733
+ for attr in items:
734
+ if attr.get("violated", False):
735
+ if attr["size"] < 1024 * 1024 * 115:
736
+ attr["url"] = yield get_url(
737
+ attr["pickcode"],
738
+ use_web_api=True,
739
+ async_=async_,
740
+ **request_kwargs,
741
+ )
742
+ else:
743
+ warn(f"unable to get url for {attr!r}", category=P115Warning)
744
+ else:
745
+ attr["url"] = yield get_url(
719
746
  attr["pickcode"],
720
- use_web_api=True,
721
747
  async_=async_,
722
748
  **request_kwargs,
723
749
  )
724
- else:
725
- warn(f"unable to get url for {attr!r}", category=P115Warning)
726
- else:
727
- attr["url"] = yield partial(
728
- client.download_url,
729
- attr["pickcode"],
730
- async_=async_,
731
- **request_kwargs,
732
- )
733
- yield Yield(attr, identity=True)
734
- return run_gen_step_iter(gen_step, async_=async_)
750
+ yield Yield(attr, may_await=False)
751
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
735
752
 
736
753
 
737
754
  @overload
@@ -816,13 +833,13 @@ def iter_subtitle_batches(
816
833
  check_response(resp)
817
834
  yield YieldFrom(
818
835
  filter(lambda info: "file_id" in info, resp["data"]["list"]),
819
- identity=True,
836
+ may_await=False,
820
837
  )
821
838
  except (StopIteration, StopAsyncIteration):
822
839
  pass
823
840
  finally:
824
841
  yield client.fs_delete(scid, async_=async_, **request_kwargs)
825
- return run_gen_step_iter(gen_step, async_=async_)
842
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
826
843
 
827
844
 
828
845
  @overload
@@ -1063,7 +1080,7 @@ def make_strm(
1063
1080
  if use_abspath is not None:
1064
1081
  params["path_already"] = path_already
1065
1082
  yield (async_batch if async_ else thread_batch)(
1066
- lambda attr: run_gen_step(save(attr), async_=async_),
1083
+ lambda attr: run_gen_step(save(attr), simple=True, async_=async_),
1067
1084
  (iter_files if use_abspath is None else iter_files_with_path)(
1068
1085
  client,
1069
1086
  cid,
@@ -1100,7 +1117,7 @@ def make_strm(
1100
1117
  "ignore": ignored,
1101
1118
  "remove": removed,
1102
1119
  }
1103
- return run_gen_step(gen_step, async_=async_)
1120
+ return run_gen_step(gen_step, simple=True, async_=async_)
1104
1121
 
1105
1122
 
1106
1123
  @overload
@@ -1168,7 +1185,7 @@ def iter_download_nodes(
1168
1185
  resp = yield get_nodes(payload)
1169
1186
  check_response(resp)
1170
1187
  data = resp["data"]
1171
- yield YieldFrom(data["list"], identity=True)
1188
+ yield YieldFrom(data["list"], may_await=False)
1172
1189
  if not data["has_next_page"]:
1173
1190
  break
1174
1191
  else:
@@ -1247,11 +1264,11 @@ def iter_download_nodes(
1247
1264
  break
1248
1265
  elif isinstance(ls, BaseException):
1249
1266
  raise ls
1250
- yield YieldFrom(ls, identity=True)
1267
+ yield YieldFrom(ls, may_await=False)
1251
1268
  finally:
1252
1269
  yield shutdown
1253
1270
  if pickcode:
1254
- return run_gen_step_iter(gen_step(pickcode), async_=async_)
1271
+ return run_gen_step_iter(gen_step(pickcode), simple=True, async_=async_)
1255
1272
  else:
1256
1273
  def chain():
1257
1274
  with with_iter_next(iterdir(
@@ -1268,13 +1285,13 @@ def iter_download_nodes(
1268
1285
  if not files:
1269
1286
  yield Yield(
1270
1287
  {"fid": str(attr["id"]), "pid": "0", "fn": attr["name"]},
1271
- identity=True,
1288
+ may_await=False,
1272
1289
  )
1273
1290
  yield YieldFrom(
1274
- run_gen_step_iter(gen_step(attr["pickcode"]), async_=async_),
1275
- identity=True,
1291
+ run_gen_step_iter(gen_step(attr["pickcode"]), simple=True, async_=async_),
1292
+ may_await=False,
1276
1293
  )
1277
- return run_gen_step_iter(chain, async_=async_)
1294
+ return run_gen_step_iter(chain, simple=True, async_=async_)
1278
1295
 
1279
1296
 
1280
1297
  @overload
@@ -1419,11 +1436,11 @@ def iter_download_files(
1419
1436
  "pickcode": attr["pickcode"],
1420
1437
  "size": attr["size"],
1421
1438
  **defaults,
1422
- }, identity=True)
1439
+ }, may_await=False)
1423
1440
  for pickcode in pickcodes:
1424
1441
  yield YieldFrom(
1425
- run_gen_step_iter(gen_step(pickcode), async_=async_),
1426
- identity=True,
1442
+ run_gen_step_iter(gen_step(pickcode), simple=True, async_=async_),
1443
+ may_await=False,
1427
1444
  )
1428
1445
  return
1429
1446
  if not pickcode:
@@ -1461,7 +1478,7 @@ def iter_download_files(
1461
1478
  finally:
1462
1479
  ancestors_loaded = True
1463
1480
  if async_:
1464
- task: Any = create_task(run_gen_step(load_ancestors, async_=True))
1481
+ task: Any = create_task(run_gen_step(load_ancestors, simple=True, async_=True))
1465
1482
  else:
1466
1483
  task = run_as_thread(run_gen_step, load_ancestors)
1467
1484
  cache: list[dict] = []
@@ -1478,9 +1495,9 @@ def iter_download_files(
1478
1495
  while True:
1479
1496
  info = yield get_next
1480
1497
  if ancestors_loaded is None:
1481
- yield Yield(norm_attr(info), identity=True)
1498
+ yield Yield(norm_attr(info), may_await=False)
1482
1499
  elif ancestors_loaded:
1483
- yield YieldFrom(map(norm_attr, cache), identity=True)
1500
+ yield YieldFrom(map(norm_attr, cache), may_await=False)
1484
1501
  cache.clear()
1485
1502
  if async_:
1486
1503
  yield task
@@ -1494,8 +1511,8 @@ def iter_download_files(
1494
1511
  yield task
1495
1512
  else:
1496
1513
  task.result()
1497
- yield YieldFrom(map(norm_attr, cache), identity=True)
1498
- return run_gen_step_iter(gen_step, async_=async_)
1514
+ yield YieldFrom(map(norm_attr, cache), may_await=False)
1515
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
1499
1516
 
1500
1517
 
1501
1518
  @overload
@@ -1571,5 +1588,5 @@ def get_remaining_open_count(
1571
1588
  for f in cache:
1572
1589
  f.close()
1573
1590
  return len(cache)
1574
- return run_gen_step(gen_step, async_=async_)
1591
+ return run_gen_step(gen_step, simple=True, async_=async_)
1575
1592
 
p115client/tool/edit.py CHANGED
@@ -74,7 +74,7 @@ def update_abstract(
74
74
  if max_workers is None or max_workers <= 0:
75
75
  max_workers = 20 if async_ else None
76
76
  def gen_step():
77
- setter = partial(getattr(client, method), async_=async_, **request_kwargs)
77
+ setter = getattr(client, method)(async_=async_, **request_kwargs)
78
78
  def call(batch, /):
79
79
  return check_response(setter(batch, value))
80
80
  if max_workers == 1:
@@ -91,7 +91,7 @@ def update_abstract(
91
91
  chunked(ids, batch_size),
92
92
  max_workers=max_workers
93
93
  ))
94
- return run_gen_step(gen_step, async_=async_)
94
+ return run_gen_step(gen_step, simple=True, async_=async_)
95
95
 
96
96
 
97
97
  @overload
@@ -634,5 +634,5 @@ def batch_unstar(
634
634
  async_=async_, # type: ignore
635
635
  **request_kwargs,
636
636
  )
637
- return run_gen_step(gen_step, async_=async_)
637
+ return run_gen_step(gen_step, simple=True, async_=async_)
638
638
 
@@ -95,7 +95,7 @@ def parse_export_dir_as_dict_iter(
95
95
  stack[depth]["name"] += "\n" + line[:-1]
96
96
  continue
97
97
  else:
98
- yield Yield(stack[depth], identity=True)
98
+ yield Yield(stack[depth], may_await=False)
99
99
  name = m[1]
100
100
  depth = (len(line) - len(name)) // 2 - 1
101
101
  item = {
@@ -110,7 +110,7 @@ def parse_export_dir_as_dict_iter(
110
110
  push(item)
111
111
  except (StopIteration, StopAsyncIteration):
112
112
  if depth:
113
- yield Yield(stack[depth], identity=True)
113
+ yield Yield(stack[depth], may_await=False)
114
114
  finally:
115
115
  if close_file:
116
116
  if async_:
@@ -120,7 +120,7 @@ def parse_export_dir_as_dict_iter(
120
120
  yield ensure_async(close, threaded=True)
121
121
  elif callable(close := getattr(file, "close", None)):
122
122
  close()
123
- return run_gen_step_iter(gen_step, async_=async_)
123
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
124
124
 
125
125
 
126
126
  @overload
@@ -206,7 +206,7 @@ def parse_export_dir_as_path_iter(
206
206
  stack[depth] += "\n" + line[:-1]
207
207
  continue
208
208
  elif depth:
209
- yield Yield(stack[depth], identity=True)
209
+ yield Yield(stack[depth], may_await=False)
210
210
  else:
211
211
  yield "/" if root == "根目录" else root
212
212
  name = m[1]
@@ -220,7 +220,7 @@ def parse_export_dir_as_path_iter(
220
220
  push(path)
221
221
  except (StopIteration, StopAsyncIteration):
222
222
  if depth:
223
- yield Yield(stack[depth], identity=True)
223
+ yield Yield(stack[depth], may_await=False)
224
224
  finally:
225
225
  if close_file:
226
226
  if async_:
@@ -230,7 +230,7 @@ def parse_export_dir_as_path_iter(
230
230
  yield ensure_async(close, threaded=True)
231
231
  elif callable(close := getattr(file, "close", None)):
232
232
  close()
233
- return run_gen_step_iter(gen_step, async_=async_)
233
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
234
234
 
235
235
 
236
236
  @overload
@@ -298,7 +298,7 @@ def parse_export_dir_as_patht_iter(
298
298
  stack[depth] += "\n" + line[:-1]
299
299
  continue
300
300
  else:
301
- yield Yield(stack[:depth+1], identity=True)
301
+ yield Yield(stack[:depth+1], may_await=False)
302
302
  name = m[1]
303
303
  depth = (len(line) - len(name)) // 2 - from_top_root
304
304
  try:
@@ -307,7 +307,7 @@ def parse_export_dir_as_patht_iter(
307
307
  push(name)
308
308
  except (StopIteration, StopAsyncIteration):
309
309
  if depth:
310
- yield Yield(stack[:depth+1], identity=True)
310
+ yield Yield(stack[:depth+1], may_await=False)
311
311
  finally:
312
312
  if close_file:
313
313
  if async_:
@@ -317,7 +317,7 @@ def parse_export_dir_as_patht_iter(
317
317
  yield ensure_async(close, threaded=True)
318
318
  elif callable(close := getattr(file, "close", None)):
319
319
  close()
320
- return run_gen_step_iter(gen_step, async_=async_)
320
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
321
321
 
322
322
 
323
323
  @overload
@@ -406,7 +406,7 @@ def export_dir(
406
406
  payload["layer_limit"] = layer_limit
407
407
  resp = yield client.fs_export_dir(payload, async_=async_, **request_kwargs)
408
408
  return check_response(resp)["data"]["export_id"]
409
- return run_gen_step(gen_step, async_=async_)
409
+ return run_gen_step(gen_step, simple=True, async_=async_)
410
410
 
411
411
 
412
412
  @overload
@@ -486,7 +486,7 @@ def export_dir_result(
486
486
  raise TimeoutError(export_id)
487
487
  if check_interval:
488
488
  yield do_sleep(min(check_interval, remaining_seconds))
489
- return run_gen_step(gen_step, async_=async_)
489
+ return run_gen_step(gen_step, simple=True, async_=async_)
490
490
 
491
491
 
492
492
  @overload
@@ -566,8 +566,9 @@ def export_dir_parse_iter(
566
566
  parse_iter = partial(parse_export_dir_as_path_iter, async_=True)
567
567
  else:
568
568
  parse_iter = parse_export_dir_as_path_iter
569
+ get_url = client.download_url
569
570
  def gen_step():
570
- nonlocal export_id
571
+ nonlocal export_id, delete
571
572
  if not export_id:
572
573
  export_id = yield export_dir(
573
574
  client,
@@ -589,7 +590,7 @@ def export_dir_parse_iter(
589
590
  )
590
591
  else:
591
592
  result = yield context(
592
- lambda *a: export_dir_result(
593
+ lambda *_: export_dir_result(
593
594
  client,
594
595
  export_id,
595
596
  timeout=timeout,
@@ -610,16 +611,14 @@ def export_dir_parse_iter(
610
611
  delete = False
611
612
  try:
612
613
  try:
613
- url: str = yield partial(
614
- client.download_url,
614
+ url: str = yield get_url(
615
615
  pickcode,
616
616
  use_web_api=True,
617
617
  async_=async_,
618
618
  **request_kwargs,
619
619
  )
620
620
  except OSError:
621
- url = yield partial(
622
- client.download_url,
621
+ url = yield get_url(
623
622
  pickcode,
624
623
  async_=async_,
625
624
  **request_kwargs,
@@ -630,7 +629,7 @@ def export_dir_parse_iter(
630
629
  file_wrapper: IO = AsyncTextIOWrapper(AsyncBufferedReader(file), encoding="utf-16", newline="\n")
631
630
  else:
632
631
  file_wrapper = TextIOWrapper(BufferedReader(file), encoding="utf-16", newline="\n")
633
- yield YieldFrom(parse_iter(file_wrapper), identity=True) # type: ignore
632
+ yield YieldFrom(parse_iter(file_wrapper), may_await=False) # type: ignore
634
633
  finally:
635
634
  if async_:
636
635
  if callable(aclose := getattr(file, "aclose", None)):
@@ -646,5 +645,5 @@ def export_dir_parse_iter(
646
645
  async_=async_, # type: ignore
647
646
  **request_kwargs,
648
647
  )
649
- return run_gen_step_iter(gen_step, async_=async_)
648
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
650
649
 
@@ -141,20 +141,17 @@ def iter_fs_files(
141
141
  warn(message, category=P115Warning)
142
142
  count = count_new
143
143
  if callback is not None:
144
- if async_:
145
- resp["callback"] = yield partial(callback, resp)
146
- else:
147
- resp["callback"] = callback(resp)
144
+ resp["callback"] = yield callback(resp)
148
145
  return resp
149
146
  def gen_step():
150
147
  while True:
151
- resp = yield run_gen_step(get_files(payload), async_=async_)
148
+ resp = yield run_gen_step(get_files(payload), simple=True, async_=async_)
152
149
  payload["limit"] = page_size
153
- yield Yield(resp, identity=True)
150
+ yield Yield(resp, may_await=False)
154
151
  payload["offset"] += len(resp["data"])
155
152
  if payload["offset"] >= count:
156
153
  break
157
- return run_gen_step_iter(gen_step, async_=async_)
154
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
158
155
 
159
156
 
160
157
  def iter_fs_files_threaded(
@@ -105,7 +105,7 @@ def iter_history_list_once(
105
105
  if from_id and event_id <= from_id or from_time and int(event["update_time"]) < from_time:
106
106
  return
107
107
  if event_id not in seen:
108
- yield Yield(event, identity=True)
108
+ yield Yield(event, may_await=False)
109
109
  seen_add(event_id)
110
110
  offset += len(events)
111
111
  if offset >= int(resp["data"]["total"]):
@@ -119,7 +119,7 @@ def iter_history_list_once(
119
119
  ts_last_call = time()
120
120
  resp = yield history_list(payload, async_=async_)
121
121
  events = check_response(resp)["data"]["list"]
122
- return run_gen_step_iter(gen_step, async_=async_)
122
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
123
123
 
124
124
 
125
125
  @overload
@@ -219,6 +219,6 @@ def iter_history_list(
219
219
  from_id = int(event["id"])
220
220
  from_time = int(event["update_time"])
221
221
  sub_first_loop = False
222
- yield Yield(event, identity=True)
223
- return run_gen_step_iter(gen_step, async_=async_)
222
+ yield Yield(event, may_await=False)
223
+ return run_gen_step_iter(gen_step, simple=True, async_=async_)
224
224