p115client 0.0.5.11__py3-none-any.whl → 0.0.5.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -145,7 +145,7 @@ def batch_get_url(
145
145
  for id, info in resp["data"].items()
146
146
  if info["url"]
147
147
  }
148
- return run_gen_step(gen_step, simple=True, async_=async_)
148
+ return run_gen_step(gen_step, may_call=False, async_=async_)
149
149
 
150
150
 
151
151
  @overload
@@ -227,8 +227,8 @@ def iter_url_batches(
227
227
  sha1=info["sha1"],
228
228
  is_directory=False,
229
229
  headers=headers,
230
- ), may_await=False)
231
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
230
+ ))
231
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
232
232
 
233
233
 
234
234
  @overload
@@ -376,8 +376,7 @@ def iter_files_with_url(
376
376
  app=app,
377
377
  user_agent=user_agent,
378
378
  **params, # type: ignore
379
- ),
380
- may_await=False,
379
+ )
381
380
  )
382
381
  return
383
382
  if headers := request_kwargs.get("headers"):
@@ -386,7 +385,7 @@ def iter_files_with_url(
386
385
  request_kwargs["headers"] = {"user-agent": user_agent}
387
386
  with with_iter_next(it) as get_next:
388
387
  while True:
389
- attr = yield get_next
388
+ attr = yield get_next()
390
389
  if attr.get("violated", False):
391
390
  if attr["size"] < 1024 * 1024 * 115:
392
391
  attr["url"] = yield get_url(
@@ -403,8 +402,8 @@ def iter_files_with_url(
403
402
  async_=async_,
404
403
  **request_kwargs,
405
404
  )
406
- yield Yield(attr, may_await=False)
407
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
405
+ yield Yield(attr)
406
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
408
407
 
409
408
 
410
409
  @overload
@@ -536,13 +535,12 @@ def iter_images_with_url(
536
535
  suffixes=suffix,
537
536
  app=app,
538
537
  **params, # type: ignore
539
- ),
540
- may_await=False,
538
+ )
541
539
  )
542
540
  return
543
541
  with with_iter_next(it) as get_next:
544
542
  while True:
545
- attr = yield get_next
543
+ attr = yield get_next()
546
544
  try:
547
545
  attr["url"] = reduce_image_url_layers(attr["thumb"])
548
546
  except KeyError:
@@ -562,8 +560,8 @@ def iter_images_with_url(
562
560
  async_=async_,
563
561
  **request_kwargs,
564
562
  )
565
- yield Yield(attr, may_await=False)
566
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
563
+ yield Yield(attr)
564
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
567
565
 
568
566
 
569
567
  @overload
@@ -689,7 +687,7 @@ def iter_subtitles_with_url(
689
687
  do_next = anext if async_ else next
690
688
  with with_iter_next(it) as get_next:
691
689
  while True:
692
- items: tuple[dict] = yield get_next
690
+ items: tuple[dict] = yield get_next()
693
691
  resp = yield client.fs_mkdir(
694
692
  f"subtitle-{uuid4()}",
695
693
  async_=async_,
@@ -728,7 +726,7 @@ def iter_subtitles_with_url(
728
726
  if subtitles:
729
727
  for attr in items:
730
728
  attr["url"] = subtitles[attr["sha1"]]
731
- yield Yield(attr, may_await=False)
729
+ yield Yield(attr)
732
730
  else:
733
731
  for attr in items:
734
732
  if attr.get("violated", False):
@@ -747,8 +745,8 @@ def iter_subtitles_with_url(
747
745
  async_=async_,
748
746
  **request_kwargs,
749
747
  )
750
- yield Yield(attr, may_await=False)
751
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
748
+ yield Yield(attr)
749
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
752
750
 
753
751
 
754
752
  @overload
@@ -832,14 +830,13 @@ def iter_subtitle_batches(
832
830
  )
833
831
  check_response(resp)
834
832
  yield YieldFrom(
835
- filter(lambda info: "file_id" in info, resp["data"]["list"]),
836
- may_await=False,
833
+ filter(lambda info: "file_id" in info, resp["data"]["list"])
837
834
  )
838
835
  except (StopIteration, StopAsyncIteration):
839
836
  pass
840
837
  finally:
841
838
  yield client.fs_delete(scid, async_=async_, **request_kwargs)
842
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
839
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
843
840
 
844
841
 
845
842
  @overload
@@ -1080,7 +1077,7 @@ def make_strm(
1080
1077
  if use_abspath is not None:
1081
1078
  params["path_already"] = path_already
1082
1079
  yield (async_batch if async_ else thread_batch)(
1083
- lambda attr: run_gen_step(save(attr), simple=True, async_=async_),
1080
+ lambda attr: run_gen_step(save(attr), may_call=False, async_=async_),
1084
1081
  (iter_files if use_abspath is None else iter_files_with_path)(
1085
1082
  client,
1086
1083
  cid,
@@ -1117,7 +1114,7 @@ def make_strm(
1117
1114
  "ignore": ignored,
1118
1115
  "remove": removed,
1119
1116
  }
1120
- return run_gen_step(gen_step, simple=True, async_=async_)
1117
+ return run_gen_step(gen_step, may_call=False, async_=async_)
1121
1118
 
1122
1119
 
1123
1120
  @overload
@@ -1185,7 +1182,7 @@ def iter_download_nodes(
1185
1182
  resp = yield get_nodes(payload)
1186
1183
  check_response(resp)
1187
1184
  data = resp["data"]
1188
- yield YieldFrom(data["list"], may_await=False)
1185
+ yield YieldFrom(data["list"])
1189
1186
  if not data["has_next_page"]:
1190
1187
  break
1191
1188
  else:
@@ -1259,16 +1256,16 @@ def iter_download_nodes(
1259
1256
  for i in range(n):
1260
1257
  submit(run_gen_step, request(pickcode), async_=async_).add_done_callback(countdown)
1261
1258
  while True:
1262
- ls = yield get
1259
+ ls = yield get()
1263
1260
  if ls is sentinel:
1264
1261
  break
1265
1262
  elif isinstance(ls, BaseException):
1266
1263
  raise ls
1267
- yield YieldFrom(ls, may_await=False)
1264
+ yield YieldFrom(ls)
1268
1265
  finally:
1269
- yield shutdown
1266
+ yield shutdown()
1270
1267
  if pickcode:
1271
- return run_gen_step_iter(gen_step(pickcode), simple=True, async_=async_)
1268
+ return run_gen_step_iter(gen_step(pickcode), may_call=False, async_=async_)
1272
1269
  else:
1273
1270
  def chain():
1274
1271
  with with_iter_next(iterdir(
@@ -1281,17 +1278,17 @@ def iter_download_nodes(
1281
1278
  **request_kwargs,
1282
1279
  )) as get_next:
1283
1280
  while True:
1284
- attr = yield get_next
1281
+ attr = yield get_next()
1285
1282
  if not files:
1286
1283
  yield Yield(
1287
- {"fid": str(attr["id"]), "pid": "0", "fn": attr["name"]},
1288
- may_await=False,
1284
+ {"fid": str(attr["id"]), "pid": "0", "fn": attr["name"]}
1289
1285
  )
1290
- yield YieldFrom(
1291
- run_gen_step_iter(gen_step(attr["pickcode"]), simple=True, async_=async_),
1292
- may_await=False,
1293
- )
1294
- return run_gen_step_iter(chain, simple=True, async_=async_)
1286
+ yield YieldFrom(run_gen_step_iter(
1287
+ gen_step(attr["pickcode"]),
1288
+ may_call=False,
1289
+ async_=async_,
1290
+ ))
1291
+ return run_gen_step_iter(chain, may_call=False, async_=async_)
1295
1292
 
1296
1293
 
1297
1294
  @overload
@@ -1427,7 +1424,7 @@ def iter_download_files(
1427
1424
  **request_kwargs,
1428
1425
  )) as get_next:
1429
1426
  while True:
1430
- attr = yield get_next
1427
+ attr = yield get_next()
1431
1428
  if attr["is_dir"]:
1432
1429
  pickcodes.append(attr["pickcode"])
1433
1430
  else:
@@ -1436,12 +1433,13 @@ def iter_download_files(
1436
1433
  "pickcode": attr["pickcode"],
1437
1434
  "size": attr["size"],
1438
1435
  **defaults,
1439
- }, may_await=False)
1436
+ })
1440
1437
  for pickcode in pickcodes:
1441
- yield YieldFrom(
1442
- run_gen_step_iter(gen_step(pickcode), simple=True, async_=async_),
1443
- may_await=False,
1444
- )
1438
+ yield YieldFrom(run_gen_step_iter(
1439
+ gen_step(pickcode),
1440
+ may_call=False,
1441
+ async_=async_,
1442
+ ))
1445
1443
  return
1446
1444
  if not pickcode:
1447
1445
  resp = yield client.fs_file_skim(cid, async_=async_, **request_kwargs)
@@ -1473,12 +1471,12 @@ def iter_download_files(
1473
1471
  **request_kwargs,
1474
1472
  )) as get_next:
1475
1473
  while True:
1476
- info = yield get_next
1474
+ info = yield get_next()
1477
1475
  id_to_dirnode[int(info["fid"])] = DirNode(info["fn"], int(info["pid"]))
1478
1476
  finally:
1479
1477
  ancestors_loaded = True
1480
1478
  if async_:
1481
- task: Any = create_task(run_gen_step(load_ancestors, simple=True, async_=True))
1479
+ task: Any = create_task(run_gen_step(load_ancestors, may_call=False, async_=True))
1482
1480
  else:
1483
1481
  task = run_as_thread(run_gen_step, load_ancestors)
1484
1482
  cache: list[dict] = []
@@ -1493,11 +1491,11 @@ def iter_download_files(
1493
1491
  **request_kwargs,
1494
1492
  )) as get_next:
1495
1493
  while True:
1496
- info = yield get_next
1494
+ info = yield get_next()
1497
1495
  if ancestors_loaded is None:
1498
- yield Yield(norm_attr(info), may_await=False)
1496
+ yield Yield(norm_attr(info))
1499
1497
  elif ancestors_loaded:
1500
- yield YieldFrom(map(norm_attr, cache), may_await=False)
1498
+ yield YieldFrom(map(norm_attr, cache))
1501
1499
  cache.clear()
1502
1500
  if async_:
1503
1501
  yield task
@@ -1511,8 +1509,8 @@ def iter_download_files(
1511
1509
  yield task
1512
1510
  else:
1513
1511
  task.result()
1514
- yield YieldFrom(map(norm_attr, cache), may_await=False)
1515
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
1512
+ yield YieldFrom(map(norm_attr, cache))
1513
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
1516
1514
 
1517
1515
 
1518
1516
  @overload
@@ -1571,7 +1569,7 @@ def get_remaining_open_count(
1571
1569
  **request_kwargs,
1572
1570
  )) as get_next:
1573
1571
  while True:
1574
- info = yield get_next
1572
+ info = yield get_next()
1575
1573
  if int(info["fs"]) <= 1024 * 1024 * 200:
1576
1574
  continue
1577
1575
  try:
@@ -1588,5 +1586,5 @@ def get_remaining_open_count(
1588
1586
  for f in cache:
1589
1587
  f.close()
1590
1588
  return len(cache)
1591
- return run_gen_step(gen_step, simple=True, async_=async_)
1589
+ return run_gen_step(gen_step, may_call=False, async_=async_)
1592
1590
 
p115client/tool/edit.py CHANGED
@@ -91,7 +91,7 @@ def update_abstract(
91
91
  chunked(ids, batch_size),
92
92
  max_workers=max_workers
93
93
  ))
94
- return run_gen_step(gen_step, simple=True, async_=async_)
94
+ return run_gen_step(gen_step, may_call=False, async_=async_)
95
95
 
96
96
 
97
97
  @overload
@@ -634,5 +634,5 @@ def batch_unstar(
634
634
  async_=async_, # type: ignore
635
635
  **request_kwargs,
636
636
  )
637
- return run_gen_step(gen_step, simple=True, async_=async_)
637
+ return run_gen_step(gen_step, may_call=False, async_=async_)
638
638
 
@@ -95,7 +95,7 @@ def parse_export_dir_as_dict_iter(
95
95
  stack[depth]["name"] += "\n" + line[:-1]
96
96
  continue
97
97
  else:
98
- yield Yield(stack[depth], may_await=False)
98
+ yield Yield(stack[depth])
99
99
  name = m[1]
100
100
  depth = (len(line) - len(name)) // 2 - 1
101
101
  item = {
@@ -110,17 +110,17 @@ def parse_export_dir_as_dict_iter(
110
110
  push(item)
111
111
  except (StopIteration, StopAsyncIteration):
112
112
  if depth:
113
- yield Yield(stack[depth], may_await=False)
113
+ yield Yield(stack[depth])
114
114
  finally:
115
115
  if close_file:
116
116
  if async_:
117
117
  if callable(aclose := getattr(file, "aclose", None)):
118
- yield aclose
118
+ yield aclose()
119
119
  elif callable(close := getattr(file, "close", None)):
120
120
  yield ensure_async(close, threaded=True)
121
121
  elif callable(close := getattr(file, "close", None)):
122
122
  close()
123
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
123
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
124
124
 
125
125
 
126
126
  @overload
@@ -206,7 +206,7 @@ def parse_export_dir_as_path_iter(
206
206
  stack[depth] += "\n" + line[:-1]
207
207
  continue
208
208
  elif depth:
209
- yield Yield(stack[depth], may_await=False)
209
+ yield Yield(stack[depth])
210
210
  else:
211
211
  yield "/" if root == "根目录" else root
212
212
  name = m[1]
@@ -220,17 +220,17 @@ def parse_export_dir_as_path_iter(
220
220
  push(path)
221
221
  except (StopIteration, StopAsyncIteration):
222
222
  if depth:
223
- yield Yield(stack[depth], may_await=False)
223
+ yield Yield(stack[depth])
224
224
  finally:
225
225
  if close_file:
226
226
  if async_:
227
227
  if callable(aclose := getattr(file, "aclose", None)):
228
- yield aclose
228
+ yield aclose()
229
229
  elif callable(close := getattr(file, "close", None)):
230
230
  yield ensure_async(close, threaded=True)
231
231
  elif callable(close := getattr(file, "close", None)):
232
232
  close()
233
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
233
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
234
234
 
235
235
 
236
236
  @overload
@@ -298,7 +298,7 @@ def parse_export_dir_as_patht_iter(
298
298
  stack[depth] += "\n" + line[:-1]
299
299
  continue
300
300
  else:
301
- yield Yield(stack[:depth+1], may_await=False)
301
+ yield Yield(stack[:depth+1])
302
302
  name = m[1]
303
303
  depth = (len(line) - len(name)) // 2 - from_top_root
304
304
  try:
@@ -307,17 +307,17 @@ def parse_export_dir_as_patht_iter(
307
307
  push(name)
308
308
  except (StopIteration, StopAsyncIteration):
309
309
  if depth:
310
- yield Yield(stack[:depth+1], may_await=False)
310
+ yield Yield(stack[:depth+1])
311
311
  finally:
312
312
  if close_file:
313
313
  if async_:
314
314
  if callable(aclose := getattr(file, "aclose", None)):
315
- yield aclose
315
+ yield aclose()
316
316
  elif callable(close := getattr(file, "close", None)):
317
317
  yield ensure_async(close, threaded=True)
318
318
  elif callable(close := getattr(file, "close", None)):
319
319
  close()
320
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
320
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
321
321
 
322
322
 
323
323
  @overload
@@ -406,7 +406,7 @@ def export_dir(
406
406
  payload["layer_limit"] = layer_limit
407
407
  resp = yield client.fs_export_dir(payload, async_=async_, **request_kwargs)
408
408
  return check_response(resp)["data"]["export_id"]
409
- return run_gen_step(gen_step, simple=True, async_=async_)
409
+ return run_gen_step(gen_step, may_call=False, async_=async_)
410
410
 
411
411
 
412
412
  @overload
@@ -486,7 +486,7 @@ def export_dir_result(
486
486
  raise TimeoutError(export_id)
487
487
  if check_interval:
488
488
  yield do_sleep(min(check_interval, remaining_seconds))
489
- return run_gen_step(gen_step, simple=True, async_=async_)
489
+ return run_gen_step(gen_step, may_call=False, async_=async_)
490
490
 
491
491
 
492
492
  @overload
@@ -629,11 +629,11 @@ def export_dir_parse_iter(
629
629
  file_wrapper: IO = AsyncTextIOWrapper(AsyncBufferedReader(file), encoding="utf-16", newline="\n")
630
630
  else:
631
631
  file_wrapper = TextIOWrapper(BufferedReader(file), encoding="utf-16", newline="\n")
632
- yield YieldFrom(parse_iter(file_wrapper), may_await=False) # type: ignore
632
+ yield YieldFrom(parse_iter(file_wrapper)) # type: ignore
633
633
  finally:
634
634
  if async_:
635
635
  if callable(aclose := getattr(file, "aclose", None)):
636
- yield aclose
636
+ yield aclose()
637
637
  elif callable(close := getattr(file, "close", None)):
638
638
  yield ensure_async(close, threaded=True)
639
639
  elif callable(close := getattr(file, "close", None)):
@@ -645,5 +645,5 @@ def export_dir_parse_iter(
645
645
  async_=async_, # type: ignore
646
646
  **request_kwargs,
647
647
  )
648
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
648
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
649
649
 
@@ -145,13 +145,13 @@ def iter_fs_files(
145
145
  return resp
146
146
  def gen_step():
147
147
  while True:
148
- resp = yield run_gen_step(get_files(payload), simple=True, async_=async_)
148
+ resp = yield run_gen_step(get_files(payload), may_call=False, async_=async_)
149
149
  payload["limit"] = page_size
150
- yield Yield(resp, may_await=False)
150
+ yield Yield(resp)
151
151
  payload["offset"] += len(resp["data"])
152
152
  if payload["offset"] >= count:
153
153
  break
154
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
154
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
155
155
 
156
156
 
157
157
  def iter_fs_files_threaded(
@@ -105,7 +105,7 @@ def iter_history_list_once(
105
105
  if from_id and event_id <= from_id or from_time and int(event["update_time"]) < from_time:
106
106
  return
107
107
  if event_id not in seen:
108
- yield Yield(event, may_await=False)
108
+ yield Yield(event)
109
109
  seen_add(event_id)
110
110
  offset += len(events)
111
111
  if offset >= int(resp["data"]["total"]):
@@ -119,7 +119,7 @@ def iter_history_list_once(
119
119
  ts_last_call = time()
120
120
  resp = yield history_list(payload, async_=async_)
121
121
  events = check_response(resp)["data"]["list"]
122
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
122
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
123
123
 
124
124
 
125
125
  @overload
@@ -214,11 +214,11 @@ def iter_history_list(
214
214
  )) as get_next:
215
215
  sub_first_loop = True
216
216
  while True:
217
- event = yield get_next
217
+ event = yield get_next()
218
218
  if sub_first_loop:
219
219
  from_id = int(event["id"])
220
220
  from_time = int(event["update_time"])
221
221
  sub_first_loop = False
222
- yield Yield(event, may_await=False)
223
- return run_gen_step_iter(gen_step, simple=True, async_=async_)
222
+ yield Yield(event)
223
+ return run_gen_step_iter(gen_step, may_call=False, async_=async_)
224
224