videoconverter-worker 1.0.5__tar.gz → 1.0.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: videoconverter-worker
3
- Version: 1.0.5
3
+ Version: 1.0.7
4
4
  Summary: VideoConverter Python Worker:从 queue 目录读取任务并执行切分/去字幕/合成
5
5
  License: MIT
6
6
  Keywords: videoconverter,ffmpeg,worker,video
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "videoconverter-worker"
7
- version = "1.0.5"
7
+ version = "1.0.7"
8
8
  description = "VideoConverter Python Worker:从 queue 目录读取任务并执行切分/去字幕/合成"
9
9
  readme = "README.txt"
10
10
  requires-python = ">=3.8"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: videoconverter-worker
3
- Version: 1.0.5
3
+ Version: 1.0.7
4
4
  Summary: VideoConverter Python Worker:从 queue 目录读取任务并执行切分/去字幕/合成
5
5
  License: MIT
6
6
  Keywords: videoconverter,ffmpeg,worker,video
@@ -31,13 +31,26 @@ logging.basicConfig(
31
31
  logger = logging.getLogger("worker")
32
32
 
33
33
 
34
+ def _cpu_ram_str() -> str:
35
+ """若已安装 psutil 则返回 ' CPU 12% RAM 34%',否则返回空字符串。不增加强制依赖。"""
36
+ try:
37
+ import psutil
38
+ cpu = psutil.cpu_percent(interval=None)
39
+ mem = psutil.virtual_memory().percent
40
+ return f" CPU {cpu:.0f}% RAM {mem:.0f}%"
41
+ except Exception:
42
+ return ""
43
+
44
+
34
45
  def _desub_spinner(stop_event: threading.Event) -> None:
35
- """去字幕阶段同一行跑马灯,避免用户以为死机。"""
46
+ """去字幕阶段同一行跑马灯,避免用户以为死机。仅 TTY 且 1 秒刷新一次;若已装 psutil 会显示 CPU/RAM 占用。"""
36
47
  i = 0
37
- while not stop_event.wait(0.25):
48
+ while not stop_event.wait(1.0):
38
49
  dots = "." * ((i % 3) + 1)
39
50
  try:
40
- sys.stderr.write("\r [INFO] worker: 去字幕中 " + dots + " ")
51
+ ts = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
52
+ extra = _cpu_ram_str()
53
+ sys.stderr.write(f"\r{ts} [INFO] worker: 去字幕中 {dots}{extra} ")
41
54
  sys.stderr.flush()
42
55
  except (OSError, UnicodeEncodeError):
43
56
  break
@@ -352,11 +365,23 @@ def run_simple_compose(
352
365
  chunk_list = [c for c in (metadata.get("chunks") or []) if c.get("originalPath")]
353
366
  logger.info("切分完成: %d 块,开始去字幕", len(chunk_list))
354
367
 
368
+ use_spinner = getattr(sys.stderr, "isatty", lambda: False)() # 仅真正终端跑跑马灯,避免 Colab/后台时满屏刷日志
355
369
  stop_spinner = threading.Event()
356
370
  spinner = threading.Thread(target=_desub_spinner, args=(stop_spinner,), daemon=True)
357
- spinner.start()
371
+
372
+ def _stop_spinner_newline():
373
+ stop_spinner.set()
374
+ spinner.join(timeout=1.5)
375
+ try:
376
+ sys.stderr.write("\n")
377
+ sys.stderr.flush()
378
+ except (OSError, UnicodeEncodeError):
379
+ pass
380
+
381
+ if use_spinner:
382
+ spinner.start()
358
383
  try:
359
- for ch in chunk_list:
384
+ for i, ch in enumerate(chunk_list):
360
385
  rel = ch.get("originalPath", "")
361
386
  if not rel:
362
387
  continue
@@ -374,17 +399,18 @@ def run_simple_compose(
374
399
  cfg["endTime"] = 0
375
400
  cfg["forceKeyframeAtStart"] = True
376
401
  run_desubtitle(cfg, str(chunk_path), str(output_file))
402
+ if use_spinner:
403
+ _stop_spinner_newline()
377
404
  meta_path = Path(output_dir) / video_id / "metadata.json"
378
405
  if meta_path.exists():
379
406
  update_chunk_processed(str(meta_path), chunk_id, str(output_file))
407
+ if use_spinner and i + 1 < len(chunk_list):
408
+ stop_spinner = threading.Event()
409
+ spinner = threading.Thread(target=_desub_spinner, args=(stop_spinner,), daemon=True)
410
+ spinner.start()
380
411
  finally:
381
- stop_spinner.set()
382
- spinner.join(timeout=1.0)
383
- try:
384
- sys.stderr.write("\n")
385
- sys.stderr.flush()
386
- except (OSError, UnicodeEncodeError):
387
- pass
412
+ if use_spinner:
413
+ _stop_spinner_newline()
388
414
 
389
415
  data = load_metadata(str(Path(output_dir) / video_id / "metadata.json"))
390
416
  processed = get_processed_chunks(data)