mkv2cast 1.2.7.post4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mkv2cast/__init__.py +77 -0
- mkv2cast/__main__.py +14 -0
- mkv2cast/cli.py +1886 -0
- mkv2cast/config.py +638 -0
- mkv2cast/converter.py +1454 -0
- mkv2cast/history.py +389 -0
- mkv2cast/i18n.py +179 -0
- mkv2cast/integrity.py +176 -0
- mkv2cast/json_progress.py +311 -0
- mkv2cast/locales/de/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/de/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/locales/en/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/en/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/locales/es/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/es/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/locales/fr/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/fr/LC_MESSAGES/mkv2cast.po +430 -0
- mkv2cast/locales/it/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/it/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/notifications.py +196 -0
- mkv2cast/pipeline.py +641 -0
- mkv2cast/ui/__init__.py +26 -0
- mkv2cast/ui/legacy_ui.py +136 -0
- mkv2cast/ui/rich_ui.py +462 -0
- mkv2cast/ui/simple_rich.py +243 -0
- mkv2cast/watcher.py +293 -0
- mkv2cast-1.2.7.post4.dist-info/METADATA +1411 -0
- mkv2cast-1.2.7.post4.dist-info/RECORD +31 -0
- mkv2cast-1.2.7.post4.dist-info/WHEEL +4 -0
- mkv2cast-1.2.7.post4.dist-info/entry_points.txt +2 -0
- mkv2cast-1.2.7.post4.dist-info/licenses/LICENSE +50 -0
mkv2cast/cli.py
ADDED
|
@@ -0,0 +1,1886 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Command-line interface for mkv2cast.
|
|
3
|
+
|
|
4
|
+
This is the main entry point for the application.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import argparse
|
|
8
|
+
import datetime
|
|
9
|
+
import fnmatch
|
|
10
|
+
import os
|
|
11
|
+
import re
|
|
12
|
+
import shutil
|
|
13
|
+
import subprocess
|
|
14
|
+
import sys
|
|
15
|
+
import threading
|
|
16
|
+
import time
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
19
|
+
|
|
20
|
+
from mkv2cast import __author__, __license__, __url__, __version__
|
|
21
|
+
from mkv2cast.config import (
|
|
22
|
+
TOML_AVAILABLE,
|
|
23
|
+
Config,
|
|
24
|
+
apply_config_to_args,
|
|
25
|
+
get_app_dirs,
|
|
26
|
+
load_config_file,
|
|
27
|
+
save_default_config,
|
|
28
|
+
)
|
|
29
|
+
from mkv2cast.converter import (
|
|
30
|
+
build_transcode_cmd,
|
|
31
|
+
decide_for,
|
|
32
|
+
have_encoder,
|
|
33
|
+
pick_backend,
|
|
34
|
+
probe_duration_ms,
|
|
35
|
+
test_amf,
|
|
36
|
+
test_nvenc,
|
|
37
|
+
)
|
|
38
|
+
from mkv2cast.history import SQLITE_AVAILABLE, HistoryDB, HistoryRecorder
|
|
39
|
+
from mkv2cast.i18n import _, setup_i18n
|
|
40
|
+
from mkv2cast.integrity import integrity_check as do_integrity_check
|
|
41
|
+
from mkv2cast.json_progress import JSONProgressOutput, parse_ffmpeg_progress_for_json
|
|
42
|
+
from mkv2cast.notifications import (
|
|
43
|
+
check_notification_support,
|
|
44
|
+
notify_interrupted,
|
|
45
|
+
notify_partial,
|
|
46
|
+
notify_success,
|
|
47
|
+
)
|
|
48
|
+
from mkv2cast.ui import RICH_AVAILABLE
|
|
49
|
+
from mkv2cast.ui.legacy_ui import LegacyProgressUI, fmt_hms
|
|
50
|
+
|
|
51
|
+
# Conditionally import Rich UI and Pipeline
|
|
52
|
+
if RICH_AVAILABLE:
|
|
53
|
+
from mkv2cast.pipeline import PipelineOrchestrator
|
|
54
|
+
from mkv2cast.ui.rich_ui import RichProgressUI
|
|
55
|
+
from mkv2cast.ui.simple_rich import SimpleRichUI
|
|
56
|
+
|
|
57
|
+
# -------------------- GLOBAL STATE --------------------
|
|
58
|
+
|
|
59
|
+
# Global app directories (initialized in main)
|
|
60
|
+
APP_DIRS: Dict[str, Path] = {}
|
|
61
|
+
|
|
62
|
+
# Global history database (initialized in main)
|
|
63
|
+
HISTORY_DB: Optional[HistoryDB] = None
|
|
64
|
+
|
|
65
|
+
# Track all running ffmpeg processes for proper cleanup
|
|
66
|
+
_active_processes: List[subprocess.Popen] = []
|
|
67
|
+
_processes_lock = threading.Lock()
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def register_process(proc: subprocess.Popen) -> None:
|
|
71
|
+
"""Register a process for tracking."""
|
|
72
|
+
with _processes_lock:
|
|
73
|
+
_active_processes.append(proc)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def unregister_process(proc: subprocess.Popen) -> None:
|
|
77
|
+
"""Unregister a process from tracking."""
|
|
78
|
+
with _processes_lock:
|
|
79
|
+
if proc in _active_processes:
|
|
80
|
+
_active_processes.remove(proc)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def terminate_all_processes() -> None:
|
|
84
|
+
"""Terminate all active processes and wait for cleanup."""
|
|
85
|
+
with _processes_lock:
|
|
86
|
+
procs = list(_active_processes)
|
|
87
|
+
|
|
88
|
+
if not procs:
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
print(f"\n🛑 {_('Stopping')} {len(procs)} {_('processes')}...", file=sys.stderr, flush=True)
|
|
92
|
+
|
|
93
|
+
for proc in procs:
|
|
94
|
+
try:
|
|
95
|
+
if proc.poll() is None:
|
|
96
|
+
proc.terminate()
|
|
97
|
+
except Exception:
|
|
98
|
+
pass
|
|
99
|
+
|
|
100
|
+
time.sleep(0.5)
|
|
101
|
+
|
|
102
|
+
for proc in procs:
|
|
103
|
+
try:
|
|
104
|
+
if proc.poll() is None:
|
|
105
|
+
proc.kill()
|
|
106
|
+
except Exception:
|
|
107
|
+
pass
|
|
108
|
+
|
|
109
|
+
for proc in procs:
|
|
110
|
+
try:
|
|
111
|
+
proc.wait(timeout=5)
|
|
112
|
+
except Exception:
|
|
113
|
+
pass
|
|
114
|
+
|
|
115
|
+
for proc in procs:
|
|
116
|
+
try:
|
|
117
|
+
if proc.stdout:
|
|
118
|
+
proc.stdout.close()
|
|
119
|
+
if proc.stderr:
|
|
120
|
+
proc.stderr.close()
|
|
121
|
+
except Exception:
|
|
122
|
+
pass
|
|
123
|
+
|
|
124
|
+
with _processes_lock:
|
|
125
|
+
_active_processes.clear()
|
|
126
|
+
|
|
127
|
+
print(f"✓ {_('All processes stopped')}", file=sys.stderr, flush=True)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
# -------------------- ARGUMENT PARSING --------------------
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def parse_args(args: Optional[List[str]] = None) -> Tuple[Config, Optional[Path]]:
|
|
134
|
+
"""Parse command-line arguments and return config + optional file path."""
|
|
135
|
+
parser = argparse.ArgumentParser(
|
|
136
|
+
description=_("Smart MKV -> Cast compatible converter with VAAPI/QSV hardware acceleration."),
|
|
137
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
138
|
+
epilog="""
|
|
139
|
+
Examples:
|
|
140
|
+
%(prog)s # Process all MKV files in current directory
|
|
141
|
+
%(prog)s movie.mkv # Process single file
|
|
142
|
+
%(prog)s --debug --dryrun # Debug mode, show commands without running
|
|
143
|
+
%(prog)s --hw cpu --preset fast # Use CPU encoding with fast preset
|
|
144
|
+
%(prog)s --no-pipeline # Disable parallel processing
|
|
145
|
+
%(prog)s --encode-workers 3 # Use 3 parallel encodes
|
|
146
|
+
%(prog)s -I '*sample*' -I '*.eng.*' # Ignore sample files and English tracks
|
|
147
|
+
%(prog)s -i '*French*' -i '*2024*' # Only process French 2024 files
|
|
148
|
+
%(prog)s --show-dirs # Show config/cache/log directories
|
|
149
|
+
%(prog)s --history # Show recent conversion history
|
|
150
|
+
%(prog)s --lang fr # Force French language
|
|
151
|
+
""",
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# Version
|
|
155
|
+
parser.add_argument(
|
|
156
|
+
"-V",
|
|
157
|
+
"--version",
|
|
158
|
+
action="version",
|
|
159
|
+
version=f"%(prog)s {__version__}\nAuthor: {__author__}\nLicense: {__license__}\nURL: {__url__}",
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Positional argument
|
|
163
|
+
parser.add_argument("file", nargs="?", help=_("Optional .mkv file to process"))
|
|
164
|
+
|
|
165
|
+
# Output settings
|
|
166
|
+
out_group = parser.add_argument_group(_("Output settings"))
|
|
167
|
+
out_group.add_argument("--suffix", default=".cast", help=_("Output file suffix (default: .cast)"))
|
|
168
|
+
out_group.add_argument("--container", choices=["mkv", "mp4"], default="mkv", help=_("Output container"))
|
|
169
|
+
|
|
170
|
+
# Scan settings
|
|
171
|
+
scan_group = parser.add_argument_group(_("Scan settings"))
|
|
172
|
+
scan_group.add_argument("-r", "--recursive", action="store_true", default=True)
|
|
173
|
+
scan_group.add_argument("--no-recursive", action="store_false", dest="recursive")
|
|
174
|
+
scan_group.add_argument("--ignore-pattern", "-I", action="append", default=[], metavar="PATTERN")
|
|
175
|
+
scan_group.add_argument("--ignore-path", action="append", default=[], metavar="PATH")
|
|
176
|
+
scan_group.add_argument("--include-pattern", "-i", action="append", default=[], metavar="PATTERN")
|
|
177
|
+
scan_group.add_argument("--include-path", action="append", default=[], metavar="PATH")
|
|
178
|
+
|
|
179
|
+
# Watch mode
|
|
180
|
+
watch_group = parser.add_argument_group(_("Watch mode"))
|
|
181
|
+
watch_group.add_argument(
|
|
182
|
+
"--watch",
|
|
183
|
+
"-w",
|
|
184
|
+
action="store_true",
|
|
185
|
+
default=False,
|
|
186
|
+
help=_("Watch directory for new MKV files and convert automatically"),
|
|
187
|
+
)
|
|
188
|
+
watch_group.add_argument(
|
|
189
|
+
"--watch-interval",
|
|
190
|
+
type=float,
|
|
191
|
+
default=5.0,
|
|
192
|
+
help=_("Polling interval in seconds for watch mode (default: 5)"),
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
# Debug/test
|
|
196
|
+
debug_group = parser.add_argument_group(_("Debug/test"))
|
|
197
|
+
debug_group.add_argument("-d", "--debug", action="store_true", help=_("Enable debug output"))
|
|
198
|
+
debug_group.add_argument("-n", "--dryrun", action="store_true", help=_("Dry run"))
|
|
199
|
+
debug_group.add_argument(
|
|
200
|
+
"--json-progress",
|
|
201
|
+
action="store_true",
|
|
202
|
+
help=_("Output JSON progress for integration with other applications"),
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
# Codec decisions
|
|
206
|
+
codec_group = parser.add_argument_group(_("Codec decisions"))
|
|
207
|
+
codec_group.add_argument("--skip-when-ok", action="store_true", default=True)
|
|
208
|
+
codec_group.add_argument("--no-skip-when-ok", action="store_false", dest="skip_when_ok")
|
|
209
|
+
codec_group.add_argument("--force-h264", action="store_true")
|
|
210
|
+
codec_group.add_argument("--allow-hevc", action="store_true")
|
|
211
|
+
codec_group.add_argument("--force-aac", action="store_true")
|
|
212
|
+
codec_group.add_argument("--keep-surround", action="store_true")
|
|
213
|
+
codec_group.add_argument("--no-silence", action="store_false", dest="add_silence_if_no_audio")
|
|
214
|
+
|
|
215
|
+
# Encoding quality
|
|
216
|
+
quality_group = parser.add_argument_group(_("Encoding quality"))
|
|
217
|
+
quality_group.add_argument("--abr", default="192k")
|
|
218
|
+
quality_group.add_argument("--crf", type=int, default=20)
|
|
219
|
+
quality_group.add_argument(
|
|
220
|
+
"--preset",
|
|
221
|
+
default="slow",
|
|
222
|
+
choices=["ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow"],
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
# Profiles
|
|
226
|
+
profile_group = parser.add_argument_group(_("Profiles"))
|
|
227
|
+
profile_group.add_argument(
|
|
228
|
+
"--profile",
|
|
229
|
+
choices=["fast", "balanced", "quality"],
|
|
230
|
+
default=None,
|
|
231
|
+
help=_("Apply encoding profile (fast, balanced, quality)"),
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
# Hardware acceleration
|
|
235
|
+
hw_group = parser.add_argument_group(_("Hardware acceleration"))
|
|
236
|
+
hw_group.add_argument("--hw", choices=["auto", "nvenc", "amf", "qsv", "vaapi", "cpu"], default="auto")
|
|
237
|
+
hw_group.add_argument("--vaapi-device", default="/dev/dri/renderD128")
|
|
238
|
+
hw_group.add_argument("--vaapi-qp", type=int, default=23)
|
|
239
|
+
hw_group.add_argument("--qsv-quality", type=int, default=23)
|
|
240
|
+
hw_group.add_argument("--nvenc-cq", type=int, default=23, help=_("NVENC constant quality (0-51)"))
|
|
241
|
+
hw_group.add_argument("--amf-quality", type=int, default=23, help=_("AMD AMF quality (0-51)"))
|
|
242
|
+
|
|
243
|
+
# Audio track selection
|
|
244
|
+
audio_group = parser.add_argument_group(_("Audio track selection"))
|
|
245
|
+
audio_group.add_argument(
|
|
246
|
+
"--audio-lang",
|
|
247
|
+
type=str,
|
|
248
|
+
default=None,
|
|
249
|
+
help=_("Comma-separated language codes priority (e.g., fre,fra,fr,eng)"),
|
|
250
|
+
)
|
|
251
|
+
audio_group.add_argument("--audio-track", type=int, default=None, help=_("Explicit audio track index (0-based)"))
|
|
252
|
+
|
|
253
|
+
# Subtitle selection
|
|
254
|
+
subtitle_group = parser.add_argument_group(_("Subtitle selection"))
|
|
255
|
+
subtitle_group.add_argument(
|
|
256
|
+
"--subtitle-lang",
|
|
257
|
+
type=str,
|
|
258
|
+
default=None,
|
|
259
|
+
help=_("Comma-separated subtitle language codes (e.g., fre,eng)"),
|
|
260
|
+
)
|
|
261
|
+
subtitle_group.add_argument(
|
|
262
|
+
"--subtitle-track", type=int, default=None, help=_("Explicit subtitle track index (0-based)")
|
|
263
|
+
)
|
|
264
|
+
subtitle_group.add_argument(
|
|
265
|
+
"--prefer-forced-subs",
|
|
266
|
+
action="store_true",
|
|
267
|
+
default=True,
|
|
268
|
+
help=_("Prefer forced subtitles in audio language (default: True)"),
|
|
269
|
+
)
|
|
270
|
+
subtitle_group.add_argument(
|
|
271
|
+
"--no-forced-subs",
|
|
272
|
+
action="store_true",
|
|
273
|
+
default=False,
|
|
274
|
+
help=_("Don't prefer forced subtitles"),
|
|
275
|
+
)
|
|
276
|
+
subtitle_group.add_argument("--no-subtitles", action="store_true", default=False, help=_("Disable all subtitles"))
|
|
277
|
+
|
|
278
|
+
# Preservation
|
|
279
|
+
preserve_group = parser.add_argument_group(_("Preservation"))
|
|
280
|
+
preserve_group.add_argument(
|
|
281
|
+
"--no-metadata",
|
|
282
|
+
action="store_false",
|
|
283
|
+
dest="preserve_metadata",
|
|
284
|
+
default=True,
|
|
285
|
+
help=_("Disable metadata preservation"),
|
|
286
|
+
)
|
|
287
|
+
preserve_group.add_argument(
|
|
288
|
+
"--no-chapters",
|
|
289
|
+
action="store_false",
|
|
290
|
+
dest="preserve_chapters",
|
|
291
|
+
default=True,
|
|
292
|
+
help=_("Disable chapter preservation"),
|
|
293
|
+
)
|
|
294
|
+
preserve_group.add_argument(
|
|
295
|
+
"--no-attachments",
|
|
296
|
+
action="store_false",
|
|
297
|
+
dest="preserve_attachments",
|
|
298
|
+
default=True,
|
|
299
|
+
help=_("Disable MKV attachment preservation"),
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
# Integrity checks
|
|
303
|
+
integrity_group = parser.add_argument_group(_("Integrity checks"))
|
|
304
|
+
integrity_group.add_argument("--integrity-check", action="store_true", default=True)
|
|
305
|
+
integrity_group.add_argument("--no-integrity-check", action="store_false", dest="integrity_check")
|
|
306
|
+
integrity_group.add_argument("--stable-wait", type=int, default=3)
|
|
307
|
+
integrity_group.add_argument("--deep-check", action="store_true")
|
|
308
|
+
|
|
309
|
+
# Disk guards
|
|
310
|
+
disk_group = parser.add_argument_group(_("Disk guards"))
|
|
311
|
+
disk_group.add_argument(
|
|
312
|
+
"--min-free-mb",
|
|
313
|
+
type=int,
|
|
314
|
+
default=1024,
|
|
315
|
+
help=_("Minimum free space to keep in output filesystem (MB)"),
|
|
316
|
+
)
|
|
317
|
+
disk_group.add_argument(
|
|
318
|
+
"--min-free-tmp-mb",
|
|
319
|
+
type=int,
|
|
320
|
+
default=512,
|
|
321
|
+
help=_("Minimum free space to keep in temp filesystem (MB)"),
|
|
322
|
+
)
|
|
323
|
+
disk_group.add_argument(
|
|
324
|
+
"--max-output-mb",
|
|
325
|
+
type=int,
|
|
326
|
+
default=0,
|
|
327
|
+
help=_("Maximum output size per file (MB, 0 disables)"),
|
|
328
|
+
)
|
|
329
|
+
disk_group.add_argument(
|
|
330
|
+
"--max-output-ratio",
|
|
331
|
+
type=float,
|
|
332
|
+
default=0.0,
|
|
333
|
+
help=_("Maximum output size ratio vs input (0 disables)"),
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
# UI settings
|
|
337
|
+
ui_group = parser.add_argument_group(_("UI settings"))
|
|
338
|
+
ui_group.add_argument("--no-progress", action="store_false", dest="progress")
|
|
339
|
+
ui_group.add_argument("--bar-width", type=int, default=26)
|
|
340
|
+
ui_group.add_argument("--ui-refresh-ms", type=int, default=120)
|
|
341
|
+
ui_group.add_argument("--stats-period", type=float, default=0.2)
|
|
342
|
+
|
|
343
|
+
# Pipeline mode
|
|
344
|
+
pipeline_group = parser.add_argument_group(_("Pipeline mode"))
|
|
345
|
+
pipeline_group.add_argument("--pipeline", action="store_true", default=True)
|
|
346
|
+
pipeline_group.add_argument("--no-pipeline", action="store_false", dest="pipeline")
|
|
347
|
+
|
|
348
|
+
# Reliability
|
|
349
|
+
retry_group = parser.add_argument_group(_("Reliability"))
|
|
350
|
+
retry_group.add_argument(
|
|
351
|
+
"--retry-attempts",
|
|
352
|
+
type=int,
|
|
353
|
+
default=1,
|
|
354
|
+
help=_("Number of retries after failure (0 disables)"),
|
|
355
|
+
)
|
|
356
|
+
retry_group.add_argument(
|
|
357
|
+
"--retry-delay",
|
|
358
|
+
type=float,
|
|
359
|
+
default=2.0,
|
|
360
|
+
help=_("Delay between retries in seconds"),
|
|
361
|
+
)
|
|
362
|
+
retry_group.add_argument(
|
|
363
|
+
"--no-retry-fallback-cpu",
|
|
364
|
+
action="store_false",
|
|
365
|
+
dest="retry_fallback_cpu",
|
|
366
|
+
default=True,
|
|
367
|
+
help=_("Disable CPU fallback on last retry"),
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
# Parallelism
|
|
371
|
+
parallel_group = parser.add_argument_group(_("Parallelism"))
|
|
372
|
+
parallel_group.add_argument("--encode-workers", type=int, default=0, metavar="N")
|
|
373
|
+
parallel_group.add_argument("--integrity-workers", type=int, default=0, metavar="N")
|
|
374
|
+
|
|
375
|
+
# Notifications (new)
|
|
376
|
+
notify_group = parser.add_argument_group(_("Notifications"))
|
|
377
|
+
notify_group.add_argument(
|
|
378
|
+
"--notify", action="store_true", default=True, help=_("Send desktop notification when done (default: enabled)")
|
|
379
|
+
)
|
|
380
|
+
notify_group.add_argument(
|
|
381
|
+
"--no-notify", action="store_false", dest="notify", help=_("Disable desktop notifications")
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
# Internationalization (new)
|
|
385
|
+
i18n_group = parser.add_argument_group(_("Internationalization"))
|
|
386
|
+
i18n_group.add_argument(
|
|
387
|
+
"--lang", choices=["en", "fr", "es", "it", "de"], default=None, help=_("Force language (default: auto-detect)")
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
# Utility commands
|
|
391
|
+
util_group = parser.add_argument_group(_("Utility commands"))
|
|
392
|
+
util_group.add_argument("--show-dirs", action="store_true")
|
|
393
|
+
util_group.add_argument("--history", nargs="?", const=20, type=int, metavar="N")
|
|
394
|
+
util_group.add_argument("--history-stats", action="store_true")
|
|
395
|
+
util_group.add_argument("--clean-tmp", action="store_true")
|
|
396
|
+
util_group.add_argument("--clean-logs", type=int, metavar="DAYS")
|
|
397
|
+
util_group.add_argument("--clean-history", type=int, metavar="DAYS")
|
|
398
|
+
util_group.add_argument("--check-requirements", action="store_true")
|
|
399
|
+
|
|
400
|
+
parsed_args = parser.parse_args(args)
|
|
401
|
+
|
|
402
|
+
# Setup i18n if language specified
|
|
403
|
+
if parsed_args.lang:
|
|
404
|
+
setup_i18n(parsed_args.lang)
|
|
405
|
+
|
|
406
|
+
# Build config
|
|
407
|
+
cfg = Config(
|
|
408
|
+
suffix=parsed_args.suffix,
|
|
409
|
+
container=parsed_args.container,
|
|
410
|
+
recursive=parsed_args.recursive,
|
|
411
|
+
ignore_patterns=parsed_args.ignore_pattern or [],
|
|
412
|
+
ignore_paths=parsed_args.ignore_path or [],
|
|
413
|
+
include_patterns=parsed_args.include_pattern or [],
|
|
414
|
+
include_paths=parsed_args.include_path or [],
|
|
415
|
+
debug=parsed_args.debug,
|
|
416
|
+
dryrun=parsed_args.dryrun,
|
|
417
|
+
skip_when_ok=parsed_args.skip_when_ok,
|
|
418
|
+
force_h264=parsed_args.force_h264,
|
|
419
|
+
allow_hevc=parsed_args.allow_hevc,
|
|
420
|
+
force_aac=parsed_args.force_aac,
|
|
421
|
+
keep_surround=parsed_args.keep_surround,
|
|
422
|
+
add_silence_if_no_audio=parsed_args.add_silence_if_no_audio,
|
|
423
|
+
abr=parsed_args.abr,
|
|
424
|
+
crf=parsed_args.crf,
|
|
425
|
+
preset=parsed_args.preset,
|
|
426
|
+
profile=parsed_args.profile,
|
|
427
|
+
hw=parsed_args.hw,
|
|
428
|
+
vaapi_device=parsed_args.vaapi_device,
|
|
429
|
+
vaapi_qp=parsed_args.vaapi_qp,
|
|
430
|
+
qsv_quality=parsed_args.qsv_quality,
|
|
431
|
+
nvenc_cq=parsed_args.nvenc_cq,
|
|
432
|
+
amf_quality=parsed_args.amf_quality,
|
|
433
|
+
audio_lang=parsed_args.audio_lang,
|
|
434
|
+
audio_track=parsed_args.audio_track,
|
|
435
|
+
subtitle_lang=parsed_args.subtitle_lang,
|
|
436
|
+
subtitle_track=parsed_args.subtitle_track,
|
|
437
|
+
prefer_forced_subs=not parsed_args.no_forced_subs,
|
|
438
|
+
no_subtitles=parsed_args.no_subtitles,
|
|
439
|
+
preserve_metadata=parsed_args.preserve_metadata,
|
|
440
|
+
preserve_chapters=parsed_args.preserve_chapters,
|
|
441
|
+
preserve_attachments=parsed_args.preserve_attachments,
|
|
442
|
+
integrity_check=parsed_args.integrity_check,
|
|
443
|
+
stable_wait=parsed_args.stable_wait,
|
|
444
|
+
deep_check=parsed_args.deep_check,
|
|
445
|
+
disk_min_free_mb=parsed_args.min_free_mb,
|
|
446
|
+
disk_min_free_tmp_mb=parsed_args.min_free_tmp_mb,
|
|
447
|
+
max_output_mb=parsed_args.max_output_mb,
|
|
448
|
+
max_output_ratio=parsed_args.max_output_ratio,
|
|
449
|
+
progress=parsed_args.progress,
|
|
450
|
+
bar_width=parsed_args.bar_width,
|
|
451
|
+
ui_refresh_ms=parsed_args.ui_refresh_ms,
|
|
452
|
+
stats_period=parsed_args.stats_period,
|
|
453
|
+
pipeline=parsed_args.pipeline,
|
|
454
|
+
encode_workers=parsed_args.encode_workers,
|
|
455
|
+
integrity_workers=parsed_args.integrity_workers,
|
|
456
|
+
notify=parsed_args.notify,
|
|
457
|
+
lang=parsed_args.lang,
|
|
458
|
+
json_progress=parsed_args.json_progress,
|
|
459
|
+
retry_attempts=parsed_args.retry_attempts,
|
|
460
|
+
retry_delay_sec=parsed_args.retry_delay,
|
|
461
|
+
retry_fallback_cpu=parsed_args.retry_fallback_cpu,
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
if cfg.profile:
|
|
465
|
+
try:
|
|
466
|
+
cfg.apply_profile(cfg.profile, only_if_default=False)
|
|
467
|
+
except ValueError as exc:
|
|
468
|
+
print(f"Warning: {exc}", file=sys.stderr)
|
|
469
|
+
|
|
470
|
+
single = Path(parsed_args.file).expanduser() if parsed_args.file else None
|
|
471
|
+
return cfg, single
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
# -------------------- FILE FILTERING --------------------
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def is_our_output_or_tmp(name: str, cfg: Config) -> bool:
|
|
478
|
+
"""Check if filename is our output or temp file."""
|
|
479
|
+
if ".tmp." in name:
|
|
480
|
+
return True
|
|
481
|
+
if cfg.suffix in name:
|
|
482
|
+
return True
|
|
483
|
+
if ".h264." in name or ".aac." in name or ".remux." in name:
|
|
484
|
+
return True
|
|
485
|
+
return False
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def _matches_pattern(filepath: Path, patterns: List[str]) -> bool:
|
|
489
|
+
"""Check if filepath matches any glob patterns."""
|
|
490
|
+
if not patterns:
|
|
491
|
+
return False
|
|
492
|
+
name = filepath.name.lower()
|
|
493
|
+
for pattern in patterns:
|
|
494
|
+
pattern_lower = pattern.lower()
|
|
495
|
+
if fnmatch.fnmatch(name, pattern_lower):
|
|
496
|
+
return True
|
|
497
|
+
if "*" not in pattern and "?" not in pattern:
|
|
498
|
+
if fnmatch.fnmatch(name, f"*{pattern_lower}*"):
|
|
499
|
+
return True
|
|
500
|
+
return False
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
def _matches_path(filepath: Path, paths: List[str]) -> bool:
|
|
504
|
+
"""Check if filepath matches any paths."""
|
|
505
|
+
if not paths:
|
|
506
|
+
return False
|
|
507
|
+
path_str = str(filepath)
|
|
508
|
+
for check_path in paths:
|
|
509
|
+
check_path_normalized = check_path.rstrip("/\\")
|
|
510
|
+
if "/" not in check_path and "\\" not in check_path:
|
|
511
|
+
if f"/{check_path_normalized}/" in path_str or f"\\{check_path_normalized}\\" in path_str:
|
|
512
|
+
return True
|
|
513
|
+
if path_str.endswith(f"/{check_path_normalized}") or path_str.endswith(f"\\{check_path_normalized}"):
|
|
514
|
+
return True
|
|
515
|
+
else:
|
|
516
|
+
if check_path_normalized in path_str:
|
|
517
|
+
return True
|
|
518
|
+
return False
|
|
519
|
+
|
|
520
|
+
|
|
521
|
+
def should_process_file(filepath: Path, cfg: Config) -> Tuple[bool, Optional[str]]:
|
|
522
|
+
"""Check if file should be processed based on filters."""
|
|
523
|
+
if cfg.include_patterns or cfg.include_paths:
|
|
524
|
+
matches_include = _matches_pattern(filepath, cfg.include_patterns) or _matches_path(filepath, cfg.include_paths)
|
|
525
|
+
if not matches_include:
|
|
526
|
+
return False, "no include match"
|
|
527
|
+
|
|
528
|
+
if _matches_pattern(filepath, cfg.ignore_patterns):
|
|
529
|
+
return False, "matches ignore pattern"
|
|
530
|
+
|
|
531
|
+
if _matches_path(filepath, cfg.ignore_paths):
|
|
532
|
+
return False, "in ignored path"
|
|
533
|
+
|
|
534
|
+
return True, None
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
def output_exists_for_input(inp: Path, cfg: Config) -> bool:
|
|
538
|
+
"""Check if output already exists for input file."""
|
|
539
|
+
d = inp.parent
|
|
540
|
+
stem = inp.stem
|
|
541
|
+
ext = cfg.container
|
|
542
|
+
patterns = [f"{stem}*{cfg.suffix}.{ext}"]
|
|
543
|
+
for pat in patterns:
|
|
544
|
+
for p in d.glob(pat):
|
|
545
|
+
if p.is_file() and ".tmp." not in p.name:
|
|
546
|
+
return True
|
|
547
|
+
for p in d.glob(f"{stem}*.tmp.*.{ext}"):
|
|
548
|
+
if p.is_file():
|
|
549
|
+
return True
|
|
550
|
+
return False
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
def collect_targets(root: Path, single: Optional[Path], cfg: Config) -> Tuple[List[Path], List[Tuple[Path, str]]]:
|
|
554
|
+
"""Collect target files for processing."""
|
|
555
|
+
targets: List[Path] = []
|
|
556
|
+
ignored: List[Tuple[Path, str]] = []
|
|
557
|
+
|
|
558
|
+
if single is not None:
|
|
559
|
+
p = single.resolve()
|
|
560
|
+
if not p.exists() or not p.is_file():
|
|
561
|
+
raise RuntimeError(f"{_('File not found')}: {p}")
|
|
562
|
+
if p.suffix.lower() != ".mkv":
|
|
563
|
+
raise RuntimeError(_("Only .mkv files supported"))
|
|
564
|
+
if p.name.startswith(".") or is_our_output_or_tmp(p.name, cfg):
|
|
565
|
+
return [], []
|
|
566
|
+
should, reason = should_process_file(p, cfg)
|
|
567
|
+
if not should:
|
|
568
|
+
ignored.append((p, reason or "filtered"))
|
|
569
|
+
return [], ignored
|
|
570
|
+
targets.append(p)
|
|
571
|
+
return targets, ignored
|
|
572
|
+
|
|
573
|
+
if cfg.recursive:
|
|
574
|
+
for dirpath, dirnames, filenames in os.walk(root):
|
|
575
|
+
d = Path(dirpath)
|
|
576
|
+
new_dirnames = []
|
|
577
|
+
for dn in dirnames:
|
|
578
|
+
if dn.startswith("."):
|
|
579
|
+
continue
|
|
580
|
+
dir_path = d / dn
|
|
581
|
+
should, _reason = should_process_file(dir_path, cfg)
|
|
582
|
+
if not should:
|
|
583
|
+
continue
|
|
584
|
+
new_dirnames.append(dn)
|
|
585
|
+
dirnames[:] = new_dirnames
|
|
586
|
+
|
|
587
|
+
for fn in filenames:
|
|
588
|
+
if not fn.lower().endswith(".mkv"):
|
|
589
|
+
continue
|
|
590
|
+
if fn.startswith("."):
|
|
591
|
+
continue
|
|
592
|
+
if is_our_output_or_tmp(fn, cfg):
|
|
593
|
+
continue
|
|
594
|
+
p = d / fn
|
|
595
|
+
should, reason = should_process_file(p, cfg)
|
|
596
|
+
if not should:
|
|
597
|
+
ignored.append((p, reason or "filtered"))
|
|
598
|
+
continue
|
|
599
|
+
targets.append(p)
|
|
600
|
+
else:
|
|
601
|
+
for p in root.glob("*.mkv"):
|
|
602
|
+
if not p.is_file():
|
|
603
|
+
continue
|
|
604
|
+
if is_our_output_or_tmp(p.name, cfg):
|
|
605
|
+
continue
|
|
606
|
+
should, reason = should_process_file(p, cfg)
|
|
607
|
+
if not should:
|
|
608
|
+
ignored.append((p, reason or "filtered"))
|
|
609
|
+
continue
|
|
610
|
+
targets.append(p)
|
|
611
|
+
|
|
612
|
+
targets.sort()
|
|
613
|
+
return targets, ignored
|
|
614
|
+
|
|
615
|
+
|
|
616
|
+
# -------------------- SYSTEM DETECTION --------------------
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
def is_running_as_root() -> bool:
|
|
620
|
+
"""Check if script is running as root."""
|
|
621
|
+
return os.geteuid() == 0
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
def get_total_ram_gb() -> int:
|
|
625
|
+
"""Get total RAM in GB."""
|
|
626
|
+
try:
|
|
627
|
+
with open("/proc/meminfo") as f:
|
|
628
|
+
for line in f:
|
|
629
|
+
if line.startswith("MemTotal:"):
|
|
630
|
+
parts = line.split()
|
|
631
|
+
if len(parts) >= 2:
|
|
632
|
+
kb = int(parts[1])
|
|
633
|
+
return kb // (1024 * 1024)
|
|
634
|
+
except Exception:
|
|
635
|
+
pass
|
|
636
|
+
return 8
|
|
637
|
+
|
|
638
|
+
|
|
639
|
+
def get_gpu_info() -> Tuple[str, int]:
|
|
640
|
+
"""
|
|
641
|
+
Detect GPU type and VRAM.
|
|
642
|
+
Returns: (gpu_type, vram_mb)
|
|
643
|
+
gpu_type: "nvidia", "amd", "intel", "unknown"
|
|
644
|
+
"""
|
|
645
|
+
import glob
|
|
646
|
+
|
|
647
|
+
gpu_type = "unknown"
|
|
648
|
+
vram_mb = 0
|
|
649
|
+
|
|
650
|
+
# Try to detect NVIDIA GPU
|
|
651
|
+
try:
|
|
652
|
+
result = subprocess.run(
|
|
653
|
+
["nvidia-smi", "--query-gpu=memory.total", "--format=csv,noheader,nounits"],
|
|
654
|
+
capture_output=True,
|
|
655
|
+
text=True,
|
|
656
|
+
timeout=5,
|
|
657
|
+
)
|
|
658
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
659
|
+
vram_mb = int(result.stdout.strip().split("\n")[0])
|
|
660
|
+
gpu_type = "nvidia"
|
|
661
|
+
return gpu_type, vram_mb
|
|
662
|
+
except Exception:
|
|
663
|
+
pass
|
|
664
|
+
|
|
665
|
+
# Try to detect AMD GPU via VRAM info
|
|
666
|
+
try:
|
|
667
|
+
result = subprocess.run(["lspci"], capture_output=True, text=True, timeout=5)
|
|
668
|
+
if result.returncode == 0 and "AMD" in result.stdout and "VGA" in result.stdout:
|
|
669
|
+
gpu_type = "amd"
|
|
670
|
+
for path in glob.glob("/sys/class/drm/card*/device/mem_info_vram_total"):
|
|
671
|
+
try:
|
|
672
|
+
with open(path) as f:
|
|
673
|
+
vram_mb = int(f.read().strip()) // (1024 * 1024)
|
|
674
|
+
break
|
|
675
|
+
except Exception:
|
|
676
|
+
pass
|
|
677
|
+
return gpu_type, vram_mb
|
|
678
|
+
except Exception:
|
|
679
|
+
pass
|
|
680
|
+
|
|
681
|
+
# Try to detect Intel GPU
|
|
682
|
+
try:
|
|
683
|
+
result = subprocess.run(["lspci"], capture_output=True, text=True, timeout=5)
|
|
684
|
+
if result.returncode == 0 and "Intel" in result.stdout and "VGA" in result.stdout:
|
|
685
|
+
gpu_type = "intel"
|
|
686
|
+
vram_mb = 2048 # Intel iGPU uses system RAM, estimate ~2GB
|
|
687
|
+
return gpu_type, vram_mb
|
|
688
|
+
except Exception:
|
|
689
|
+
pass
|
|
690
|
+
|
|
691
|
+
return gpu_type, vram_mb
|
|
692
|
+
|
|
693
|
+
|
|
694
|
+
def auto_detect_workers(backend: str) -> Tuple[int, int]:
|
|
695
|
+
"""
|
|
696
|
+
Auto-detect optimal worker counts based on backend and system resources.
|
|
697
|
+
Returns: (encode_workers, integrity_workers)
|
|
698
|
+
"""
|
|
699
|
+
ram_gb = get_total_ram_gb()
|
|
700
|
+
cores = os.cpu_count() or 4
|
|
701
|
+
|
|
702
|
+
if backend in ("vaapi", "qsv"):
|
|
703
|
+
gpu_type, vram_mb = get_gpu_info()
|
|
704
|
+
|
|
705
|
+
# Base encode workers on VRAM (if detected) or RAM as fallback
|
|
706
|
+
if vram_mb >= 8192: # 8GB+ VRAM (high-end desktop GPU)
|
|
707
|
+
encode = 3
|
|
708
|
+
elif vram_mb >= 4096: # 4GB+ VRAM (mid-range GPU)
|
|
709
|
+
encode = 2
|
|
710
|
+
elif ram_gb >= 32:
|
|
711
|
+
encode = 2
|
|
712
|
+
else:
|
|
713
|
+
encode = 1
|
|
714
|
+
integrity = encode + 1
|
|
715
|
+
else:
|
|
716
|
+
if cores >= 16 and ram_gb >= 32:
|
|
717
|
+
encode = 2
|
|
718
|
+
else:
|
|
719
|
+
encode = 1
|
|
720
|
+
integrity = 2 if cores >= 8 else 1
|
|
721
|
+
|
|
722
|
+
return encode, integrity
|
|
723
|
+
|
|
724
|
+
|
|
725
|
+
# -------------------- MULTI-USER CLEANUP (ROOT) --------------------
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
def get_all_users_mkv2cast_dirs() -> List[Dict[str, Any]]:
|
|
729
|
+
"""
|
|
730
|
+
Get mkv2cast directories for all users on the system.
|
|
731
|
+
Only returns users that have mkv2cast data directories.
|
|
732
|
+
"""
|
|
733
|
+
users: List[Dict[str, Any]] = []
|
|
734
|
+
home_base = Path("/home")
|
|
735
|
+
|
|
736
|
+
if not home_base.exists():
|
|
737
|
+
return users
|
|
738
|
+
|
|
739
|
+
try:
|
|
740
|
+
for entry in home_base.iterdir():
|
|
741
|
+
if not entry.is_dir():
|
|
742
|
+
continue
|
|
743
|
+
|
|
744
|
+
cache_dir = entry / ".cache" / "mkv2cast"
|
|
745
|
+
state_dir = entry / ".local" / "state" / "mkv2cast"
|
|
746
|
+
config_dir = entry / ".config" / "mkv2cast"
|
|
747
|
+
|
|
748
|
+
if cache_dir.exists() or state_dir.exists():
|
|
749
|
+
users.append(
|
|
750
|
+
{
|
|
751
|
+
"user": entry.name,
|
|
752
|
+
"home": entry,
|
|
753
|
+
"cache": cache_dir,
|
|
754
|
+
"tmp": cache_dir / "tmp",
|
|
755
|
+
"state": state_dir,
|
|
756
|
+
"logs": state_dir / "logs",
|
|
757
|
+
"config": config_dir,
|
|
758
|
+
}
|
|
759
|
+
)
|
|
760
|
+
except PermissionError:
|
|
761
|
+
pass
|
|
762
|
+
|
|
763
|
+
return users
|
|
764
|
+
|
|
765
|
+
|
|
766
|
+
def cleanup_all_users_logs(days: int, verbose: bool = True) -> Dict[str, int]:
|
|
767
|
+
"""
|
|
768
|
+
Clean logs for all users (requires root).
|
|
769
|
+
Returns dict of {username: removed_count}.
|
|
770
|
+
"""
|
|
771
|
+
results: Dict[str, int] = {}
|
|
772
|
+
users = get_all_users_mkv2cast_dirs()
|
|
773
|
+
|
|
774
|
+
for user_info in users:
|
|
775
|
+
username = str(user_info["user"])
|
|
776
|
+
logs_dir: Path = user_info["logs"]
|
|
777
|
+
|
|
778
|
+
if not logs_dir.exists():
|
|
779
|
+
continue
|
|
780
|
+
|
|
781
|
+
cutoff = time.time() - (days * 86400)
|
|
782
|
+
removed = 0
|
|
783
|
+
|
|
784
|
+
try:
|
|
785
|
+
for f in logs_dir.glob("*.log"):
|
|
786
|
+
try:
|
|
787
|
+
if f.stat().st_mtime < cutoff:
|
|
788
|
+
f.unlink()
|
|
789
|
+
removed += 1
|
|
790
|
+
except Exception:
|
|
791
|
+
pass
|
|
792
|
+
|
|
793
|
+
if removed > 0 or verbose:
|
|
794
|
+
results[username] = removed
|
|
795
|
+
except PermissionError:
|
|
796
|
+
if verbose:
|
|
797
|
+
print(f" Warning: Cannot access logs for user {username}", file=sys.stderr)
|
|
798
|
+
|
|
799
|
+
return results
|
|
800
|
+
|
|
801
|
+
|
|
802
|
+
def cleanup_all_users_tmp(max_age_hours: int = 0, verbose: bool = True) -> Dict[str, int]:
|
|
803
|
+
"""
|
|
804
|
+
Clean tmp files for all users (requires root).
|
|
805
|
+
max_age_hours=0 means clean all tmp files.
|
|
806
|
+
Returns dict of {username: removed_count}.
|
|
807
|
+
"""
|
|
808
|
+
results: Dict[str, int] = {}
|
|
809
|
+
users = get_all_users_mkv2cast_dirs()
|
|
810
|
+
|
|
811
|
+
for user_info in users:
|
|
812
|
+
username = str(user_info["user"])
|
|
813
|
+
tmp_dir: Path = user_info["tmp"]
|
|
814
|
+
|
|
815
|
+
if not tmp_dir.exists():
|
|
816
|
+
continue
|
|
817
|
+
|
|
818
|
+
if max_age_hours > 0:
|
|
819
|
+
cutoff = time.time() - (max_age_hours * 3600)
|
|
820
|
+
else:
|
|
821
|
+
cutoff = time.time()
|
|
822
|
+
|
|
823
|
+
removed = 0
|
|
824
|
+
|
|
825
|
+
try:
|
|
826
|
+
for pattern in ["*.tmp.*.mkv", "*.tmp.*.mp4"]:
|
|
827
|
+
for f in tmp_dir.glob(pattern):
|
|
828
|
+
try:
|
|
829
|
+
if max_age_hours == 0 or f.stat().st_mtime < cutoff:
|
|
830
|
+
f.unlink()
|
|
831
|
+
removed += 1
|
|
832
|
+
except Exception:
|
|
833
|
+
pass
|
|
834
|
+
|
|
835
|
+
if removed > 0 or verbose:
|
|
836
|
+
results[username] = removed
|
|
837
|
+
except PermissionError:
|
|
838
|
+
if verbose:
|
|
839
|
+
print(f" Warning: Cannot access tmp for user {username}", file=sys.stderr)
|
|
840
|
+
|
|
841
|
+
return results
|
|
842
|
+
|
|
843
|
+
|
|
844
|
+
# -------------------- LOGGING --------------------
|
|
845
|
+
|
|
846
|
+
|
|
847
|
+
def get_log_path(inp: Path) -> Path:
|
|
848
|
+
"""Generate log path for input file."""
|
|
849
|
+
logs_dir = APP_DIRS.get("logs", Path.home() / ".local" / "state" / "mkv2cast" / "logs")
|
|
850
|
+
logs_dir.mkdir(parents=True, exist_ok=True)
|
|
851
|
+
|
|
852
|
+
date_str = datetime.date.today().isoformat()
|
|
853
|
+
safe_name = re.sub(r"[^\w\-.]", "_", inp.stem)[:80]
|
|
854
|
+
return logs_dir / f"{date_str}_{safe_name}.log"
|
|
855
|
+
|
|
856
|
+
|
|
857
|
+
def get_tmp_path(inp: Path, worker_id: int, tag: str, cfg: Config) -> Path:
|
|
858
|
+
"""Generate temp path for encoding."""
|
|
859
|
+
tmp_dir = APP_DIRS.get("tmp", Path.home() / ".cache" / "mkv2cast" / "tmp")
|
|
860
|
+
tmp_dir.mkdir(parents=True, exist_ok=True)
|
|
861
|
+
|
|
862
|
+
stem = inp.stem
|
|
863
|
+
ext = cfg.container
|
|
864
|
+
suffix = cfg.suffix
|
|
865
|
+
pid = os.getpid()
|
|
866
|
+
|
|
867
|
+
return tmp_dir / f"{stem}{tag}{suffix}.tmp.{pid}.{worker_id}.{ext}"
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
# -------------------- UTILITY COMMANDS --------------------
|
|
871
|
+
|
|
872
|
+
|
|
873
|
+
def check_requirements() -> int:
|
|
874
|
+
"""Check system requirements."""
|
|
875
|
+
print(f"mkv2cast v{__version__} - {_('Requirements Check')}")
|
|
876
|
+
print("=" * 50)
|
|
877
|
+
print()
|
|
878
|
+
|
|
879
|
+
all_ok = True
|
|
880
|
+
|
|
881
|
+
print(f"{_('System requirements')} ({_('mandatory')}):")
|
|
882
|
+
print("-" * 40)
|
|
883
|
+
|
|
884
|
+
# Check ffmpeg
|
|
885
|
+
try:
|
|
886
|
+
result = subprocess.run(["ffmpeg", "-version"], capture_output=True, text=True, timeout=5)
|
|
887
|
+
if result.returncode == 0:
|
|
888
|
+
version_line = result.stdout.split("\n")[0] if result.stdout else "unknown"
|
|
889
|
+
print(f" ✓ ffmpeg: {version_line}")
|
|
890
|
+
else:
|
|
891
|
+
print(" ✗ ffmpeg: installed but returned error")
|
|
892
|
+
all_ok = False
|
|
893
|
+
except FileNotFoundError:
|
|
894
|
+
print(" ✗ ffmpeg: NOT FOUND")
|
|
895
|
+
all_ok = False
|
|
896
|
+
except Exception as e:
|
|
897
|
+
print(f" ✗ ffmpeg: error - {e}")
|
|
898
|
+
all_ok = False
|
|
899
|
+
|
|
900
|
+
# Check ffprobe
|
|
901
|
+
try:
|
|
902
|
+
result = subprocess.run(["ffprobe", "-version"], capture_output=True, text=True, timeout=5)
|
|
903
|
+
if result.returncode == 0:
|
|
904
|
+
print(" ✓ ffprobe: found")
|
|
905
|
+
else:
|
|
906
|
+
print(" ✗ ffprobe: error")
|
|
907
|
+
all_ok = False
|
|
908
|
+
except FileNotFoundError:
|
|
909
|
+
print(" ✗ ffprobe: NOT FOUND")
|
|
910
|
+
all_ok = False
|
|
911
|
+
except Exception:
|
|
912
|
+
all_ok = False
|
|
913
|
+
|
|
914
|
+
# Python version
|
|
915
|
+
py_version = sys.version_info
|
|
916
|
+
if py_version >= (3, 8):
|
|
917
|
+
print(f" ✓ Python: {py_version.major}.{py_version.minor}.{py_version.micro}")
|
|
918
|
+
else:
|
|
919
|
+
print(f" ✗ Python: {py_version.major}.{py_version.minor} (need 3.8+)")
|
|
920
|
+
all_ok = False
|
|
921
|
+
|
|
922
|
+
print()
|
|
923
|
+
print(f"{_('Optional dependencies')}:")
|
|
924
|
+
print("-" * 40)
|
|
925
|
+
|
|
926
|
+
print(f" {'✓' if RICH_AVAILABLE else '○'} rich: {'installed' if RICH_AVAILABLE else 'NOT INSTALLED'}")
|
|
927
|
+
print(f" {'✓' if TOML_AVAILABLE else '○'} TOML support: {'available' if TOML_AVAILABLE else 'not available'}")
|
|
928
|
+
print(f" {'✓' if SQLITE_AVAILABLE else '○'} SQLite: {'available' if SQLITE_AVAILABLE else 'not available'}")
|
|
929
|
+
|
|
930
|
+
notif = check_notification_support()
|
|
931
|
+
print(f" {'✓' if notif['any'] else '○'} Notifications: {'available' if notif['any'] else 'not available'}")
|
|
932
|
+
|
|
933
|
+
print()
|
|
934
|
+
print(f"{_('Hardware acceleration')}:")
|
|
935
|
+
print("-" * 40)
|
|
936
|
+
|
|
937
|
+
vaapi_device = Path("/dev/dri/renderD128")
|
|
938
|
+
if vaapi_device.exists():
|
|
939
|
+
print(f" ✓ VAAPI device: {vaapi_device}")
|
|
940
|
+
if have_encoder("h264_vaapi"):
|
|
941
|
+
print(" ✓ h264_vaapi encoder: available")
|
|
942
|
+
else:
|
|
943
|
+
print(" ○ h264_vaapi encoder: not available")
|
|
944
|
+
else:
|
|
945
|
+
print(" ○ VAAPI device: not found")
|
|
946
|
+
|
|
947
|
+
if have_encoder("h264_qsv"):
|
|
948
|
+
print(" ✓ h264_qsv encoder: available")
|
|
949
|
+
else:
|
|
950
|
+
print(" ○ h264_qsv encoder: not available")
|
|
951
|
+
|
|
952
|
+
# Check NVIDIA NVENC
|
|
953
|
+
try:
|
|
954
|
+
subprocess.run(["nvidia-smi"], capture_output=True, timeout=5.0, check=True)
|
|
955
|
+
nvidia_available = True
|
|
956
|
+
except (subprocess.CalledProcessError, FileNotFoundError, subprocess.TimeoutExpired):
|
|
957
|
+
nvidia_available = False
|
|
958
|
+
|
|
959
|
+
if nvidia_available:
|
|
960
|
+
print(" ✓ NVIDIA GPU: detected")
|
|
961
|
+
if have_encoder("h264_nvenc"):
|
|
962
|
+
if test_nvenc():
|
|
963
|
+
print(" ✓ h264_nvenc encoder: available and working")
|
|
964
|
+
else:
|
|
965
|
+
print(" ○ h264_nvenc encoder: available but test failed")
|
|
966
|
+
else:
|
|
967
|
+
print(" ○ h264_nvenc encoder: not available")
|
|
968
|
+
else:
|
|
969
|
+
print(" ○ NVIDIA GPU: not detected")
|
|
970
|
+
|
|
971
|
+
# Check AMD AMF
|
|
972
|
+
if have_encoder("h264_amf"):
|
|
973
|
+
if test_amf():
|
|
974
|
+
print(" ✓ h264_amf encoder: available and working")
|
|
975
|
+
else:
|
|
976
|
+
print(" ○ h264_amf encoder: available but test failed")
|
|
977
|
+
else:
|
|
978
|
+
print(" ○ h264_amf encoder: not available")
|
|
979
|
+
|
|
980
|
+
print()
|
|
981
|
+
if all_ok:
|
|
982
|
+
print(f"✓ {_('All requirements satisfied')}")
|
|
983
|
+
return 0
|
|
984
|
+
else:
|
|
985
|
+
print(f"✗ {_('Some requirements missing')}")
|
|
986
|
+
return 1
|
|
987
|
+
|
|
988
|
+
|
|
989
|
+
def handle_utility_commands(cfg: Config, args: argparse.Namespace) -> Optional[int]:
|
|
990
|
+
"""Handle utility commands that exit immediately."""
|
|
991
|
+
global APP_DIRS, HISTORY_DB
|
|
992
|
+
|
|
993
|
+
if args.check_requirements:
|
|
994
|
+
return check_requirements()
|
|
995
|
+
|
|
996
|
+
APP_DIRS = get_app_dirs()
|
|
997
|
+
|
|
998
|
+
if args.show_dirs:
|
|
999
|
+
print(f"mkv2cast {_('directories')}:")
|
|
1000
|
+
print()
|
|
1001
|
+
print(f"{_('User directories')} (XDG):")
|
|
1002
|
+
print(f" Config: {APP_DIRS['config']}")
|
|
1003
|
+
print(f" State: {APP_DIRS['state']}")
|
|
1004
|
+
print(f" Logs: {APP_DIRS['logs']}")
|
|
1005
|
+
print(f" Cache: {APP_DIRS['cache']}")
|
|
1006
|
+
print(f" Tmp: {APP_DIRS['tmp']}")
|
|
1007
|
+
return 0
|
|
1008
|
+
|
|
1009
|
+
if args.clean_tmp:
|
|
1010
|
+
if is_running_as_root():
|
|
1011
|
+
print(_("Running as root - cleaning tmp for all users..."))
|
|
1012
|
+
results = cleanup_all_users_tmp(max_age_hours=0, verbose=True)
|
|
1013
|
+
total = sum(results.values())
|
|
1014
|
+
for user, count in results.items():
|
|
1015
|
+
if count > 0:
|
|
1016
|
+
print(f" {user}: {count} {_('files removed')}")
|
|
1017
|
+
print(f"{_('Total removed')}: {total} {_('temp files')}")
|
|
1018
|
+
else:
|
|
1019
|
+
tmp_dir = APP_DIRS.get("tmp")
|
|
1020
|
+
if tmp_dir and tmp_dir.exists():
|
|
1021
|
+
removed = 0
|
|
1022
|
+
for pattern in ["*.tmp.*.mkv", "*.tmp.*.mp4"]:
|
|
1023
|
+
for f in tmp_dir.glob(pattern):
|
|
1024
|
+
try:
|
|
1025
|
+
f.unlink()
|
|
1026
|
+
removed += 1
|
|
1027
|
+
except Exception:
|
|
1028
|
+
pass
|
|
1029
|
+
print(f"{_('Removed')} {removed} {_('temp files')}")
|
|
1030
|
+
return 0
|
|
1031
|
+
|
|
1032
|
+
if args.clean_logs is not None:
|
|
1033
|
+
if is_running_as_root():
|
|
1034
|
+
print(_("Running as root - cleaning logs for all users..."))
|
|
1035
|
+
results = cleanup_all_users_logs(args.clean_logs, verbose=True)
|
|
1036
|
+
total = sum(results.values())
|
|
1037
|
+
for user, count in results.items():
|
|
1038
|
+
if count > 0:
|
|
1039
|
+
print(f" {user}: {count} {_('files removed')}")
|
|
1040
|
+
print(f"{_('Total removed')}: {total} {_('log files older than')} {args.clean_logs} {_('days')}")
|
|
1041
|
+
else:
|
|
1042
|
+
logs_dir = APP_DIRS.get("logs")
|
|
1043
|
+
if logs_dir and logs_dir.exists():
|
|
1044
|
+
cutoff = time.time() - (args.clean_logs * 86400)
|
|
1045
|
+
removed = 0
|
|
1046
|
+
for f in logs_dir.glob("*.log"):
|
|
1047
|
+
try:
|
|
1048
|
+
if f.stat().st_mtime < cutoff:
|
|
1049
|
+
f.unlink()
|
|
1050
|
+
removed += 1
|
|
1051
|
+
except Exception:
|
|
1052
|
+
pass
|
|
1053
|
+
print(f"{_('Removed')} {removed} {_('log files older than')} {args.clean_logs} {_('days')}")
|
|
1054
|
+
return 0
|
|
1055
|
+
|
|
1056
|
+
HISTORY_DB = HistoryDB(APP_DIRS["state"])
|
|
1057
|
+
|
|
1058
|
+
if args.clean_history is not None:
|
|
1059
|
+
removed = HISTORY_DB.clean_old(args.clean_history)
|
|
1060
|
+
print(f"{_('Removed')} {removed} {_('history entries')}")
|
|
1061
|
+
return 0
|
|
1062
|
+
|
|
1063
|
+
if args.history is not None:
|
|
1064
|
+
limit = min(max(1, args.history), 1000)
|
|
1065
|
+
recent = HISTORY_DB.get_recent(limit)
|
|
1066
|
+
|
|
1067
|
+
if not recent:
|
|
1068
|
+
print(_("No conversion history found."))
|
|
1069
|
+
return 0
|
|
1070
|
+
|
|
1071
|
+
term_cols = shutil.get_terminal_size((80, 20)).columns
|
|
1072
|
+
print(f"{_('Recent conversions')} ({_('last')} {len(recent)}):")
|
|
1073
|
+
print("-" * min(80, term_cols - 1))
|
|
1074
|
+
|
|
1075
|
+
for entry in recent:
|
|
1076
|
+
inp = entry.get("input_path") or entry.get("input", "?")
|
|
1077
|
+
inp_name = Path(inp).name if inp else "?"
|
|
1078
|
+
status = entry.get("status", "?")
|
|
1079
|
+
started = entry.get("started_at") or entry.get("started", "?")
|
|
1080
|
+
if started and len(started) > 19:
|
|
1081
|
+
started = started[:19]
|
|
1082
|
+
|
|
1083
|
+
status_icon = {"done": "✓", "failed": "✗", "skipped": "⊘", "running": "⚙"}.get(status, "?")
|
|
1084
|
+
|
|
1085
|
+
max_name_len = max(20, term_cols - 45)
|
|
1086
|
+
if len(inp_name) > max_name_len:
|
|
1087
|
+
inp_name = inp_name[: max_name_len - 3] + "..."
|
|
1088
|
+
|
|
1089
|
+
print(f" {status_icon} [{started}] {status:8} {inp_name}")
|
|
1090
|
+
return 0
|
|
1091
|
+
|
|
1092
|
+
if args.history_stats:
|
|
1093
|
+
stats = HISTORY_DB.get_stats()
|
|
1094
|
+
by_status = stats.get("by_status", {})
|
|
1095
|
+
|
|
1096
|
+
print(f"{_('Conversion statistics')}:")
|
|
1097
|
+
print("-" * 40)
|
|
1098
|
+
total = sum(by_status.values())
|
|
1099
|
+
print(f" {_('Total conversions')}: {total}")
|
|
1100
|
+
for status, count in sorted(by_status.items()):
|
|
1101
|
+
pct = (count / total * 100) if total > 0 else 0
|
|
1102
|
+
print(f" {status:12} {count:5} ({pct:.1f}%)")
|
|
1103
|
+
|
|
1104
|
+
avg_time = stats.get("avg_encode_time", 0)
|
|
1105
|
+
total_time = stats.get("total_encode_time", 0)
|
|
1106
|
+
print(f" {_('Average encode time')}: {avg_time:.1f}s")
|
|
1107
|
+
print(f" {_('Total encode time')}: {total_time:.1f}s ({total_time / 3600:.1f}h)")
|
|
1108
|
+
return 0
|
|
1109
|
+
|
|
1110
|
+
return None
|
|
1111
|
+
|
|
1112
|
+
|
|
1113
|
+
# -------------------- MAIN --------------------
|
|
1114
|
+
|
|
1115
|
+
|
|
1116
|
+
def main_json_progress(single: Optional[Path], cfg: Config) -> Tuple[int, int, int, int, bool]:
|
|
1117
|
+
"""JSON progress output mode for integration with external tools.
|
|
1118
|
+
|
|
1119
|
+
Outputs structured JSON to stdout for each progress update.
|
|
1120
|
+
"""
|
|
1121
|
+
from mkv2cast.config import CFG as global_cfg
|
|
1122
|
+
|
|
1123
|
+
global_cfg.__dict__.update(cfg.__dict__)
|
|
1124
|
+
|
|
1125
|
+
ok, skipped, failed, interrupted_count = 0, 0, 0, 0
|
|
1126
|
+
interrupted = False
|
|
1127
|
+
|
|
1128
|
+
root = Path(".").resolve()
|
|
1129
|
+
targets, _ignored = collect_targets(root, single, cfg)
|
|
1130
|
+
if not targets:
|
|
1131
|
+
json_out = JSONProgressOutput()
|
|
1132
|
+
json_out.start(0, pick_backend(cfg), 1, 1)
|
|
1133
|
+
json_out.complete()
|
|
1134
|
+
return 0, 0, 0, 0, False
|
|
1135
|
+
|
|
1136
|
+
backend = pick_backend(cfg)
|
|
1137
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1138
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1139
|
+
json_out = JSONProgressOutput()
|
|
1140
|
+
|
|
1141
|
+
# Probe durations for all files
|
|
1142
|
+
for inp in targets:
|
|
1143
|
+
dur_ms = probe_duration_ms(inp)
|
|
1144
|
+
json_out.file_queued(inp, dur_ms)
|
|
1145
|
+
|
|
1146
|
+
json_out.start(len(targets), backend, 1, 1)
|
|
1147
|
+
|
|
1148
|
+
for inp in targets:
|
|
1149
|
+
if is_our_output_or_tmp(str(inp), cfg):
|
|
1150
|
+
continue
|
|
1151
|
+
|
|
1152
|
+
history.start(inp)
|
|
1153
|
+
integrity_time = 0.0
|
|
1154
|
+
|
|
1155
|
+
log_path = get_log_path(inp)
|
|
1156
|
+
|
|
1157
|
+
# Integrity check
|
|
1158
|
+
if cfg.integrity_check:
|
|
1159
|
+
json_out.file_checking(inp)
|
|
1160
|
+
success, integrity_time = do_integrity_check(inp, True, cfg.stable_wait, cfg.deep_check)
|
|
1161
|
+
json_out.file_check_done(inp)
|
|
1162
|
+
if not success:
|
|
1163
|
+
json_out.file_done(inp, skipped=True, error="integrity failed")
|
|
1164
|
+
history.finish(inp, "skipped", error_msg="integrity failed", integrity_time=integrity_time)
|
|
1165
|
+
skipped += 1
|
|
1166
|
+
continue
|
|
1167
|
+
|
|
1168
|
+
try:
|
|
1169
|
+
d = decide_for(inp, cfg)
|
|
1170
|
+
except Exception as e:
|
|
1171
|
+
json_out.file_done(inp, error=str(e))
|
|
1172
|
+
history.finish(inp, "failed", error_msg=str(e), integrity_time=integrity_time)
|
|
1173
|
+
failed += 1
|
|
1174
|
+
continue
|
|
1175
|
+
|
|
1176
|
+
if (not d.need_v) and (not d.need_a) and cfg.skip_when_ok:
|
|
1177
|
+
json_out.file_done(inp, skipped=True)
|
|
1178
|
+
history.finish(inp, "skipped", error_msg="compatible", integrity_time=integrity_time)
|
|
1179
|
+
skipped += 1
|
|
1180
|
+
continue
|
|
1181
|
+
|
|
1182
|
+
tag = ""
|
|
1183
|
+
if d.need_v:
|
|
1184
|
+
tag += ".h264"
|
|
1185
|
+
if d.need_a:
|
|
1186
|
+
tag += ".aac"
|
|
1187
|
+
if not tag:
|
|
1188
|
+
tag = ".remux"
|
|
1189
|
+
|
|
1190
|
+
final = inp.parent / f"{inp.stem}{tag}{cfg.suffix}.{cfg.container}"
|
|
1191
|
+
if final.exists():
|
|
1192
|
+
json_out.file_done(inp, output_path=final, skipped=True)
|
|
1193
|
+
history.finish(inp, "skipped", output_path=final, error_msg="output exists", integrity_time=integrity_time)
|
|
1194
|
+
skipped += 1
|
|
1195
|
+
continue
|
|
1196
|
+
|
|
1197
|
+
tmp = get_tmp_path(inp, 0, tag, cfg)
|
|
1198
|
+
dur_ms = probe_duration_ms(inp)
|
|
1199
|
+
|
|
1200
|
+
cmd, _stage = build_transcode_cmd(inp, d, backend, tmp, log_path, cfg)
|
|
1201
|
+
|
|
1202
|
+
if cfg.dryrun:
|
|
1203
|
+
json_out.file_done(inp, skipped=True)
|
|
1204
|
+
history.finish(inp, "skipped", error_msg="dryrun", integrity_time=integrity_time)
|
|
1205
|
+
skipped += 1
|
|
1206
|
+
continue
|
|
1207
|
+
|
|
1208
|
+
json_out.file_encoding_start(inp, dur_ms)
|
|
1209
|
+
start_encode = time.time()
|
|
1210
|
+
|
|
1211
|
+
try:
|
|
1212
|
+
proc = subprocess.Popen(
|
|
1213
|
+
cmd,
|
|
1214
|
+
stdout=subprocess.PIPE,
|
|
1215
|
+
stderr=subprocess.PIPE,
|
|
1216
|
+
text=True,
|
|
1217
|
+
)
|
|
1218
|
+
if proc.stderr is None:
|
|
1219
|
+
raise RuntimeError("Failed to capture stderr")
|
|
1220
|
+
|
|
1221
|
+
for line in proc.stderr:
|
|
1222
|
+
progress_data = parse_ffmpeg_progress_for_json(line)
|
|
1223
|
+
if progress_data:
|
|
1224
|
+
json_out.file_progress(
|
|
1225
|
+
inp,
|
|
1226
|
+
frame=progress_data.get("frame", 0),
|
|
1227
|
+
fps=progress_data.get("fps", 0.0),
|
|
1228
|
+
time_ms=progress_data.get("time_ms", 0),
|
|
1229
|
+
bitrate=progress_data.get("bitrate", ""),
|
|
1230
|
+
speed=progress_data.get("speed", ""),
|
|
1231
|
+
size_bytes=progress_data.get("size_bytes", 0),
|
|
1232
|
+
)
|
|
1233
|
+
|
|
1234
|
+
proc.wait()
|
|
1235
|
+
rc = proc.returncode
|
|
1236
|
+
|
|
1237
|
+
except KeyboardInterrupt:
|
|
1238
|
+
interrupted = True
|
|
1239
|
+
interrupted_count += 1
|
|
1240
|
+
encode_time = time.time() - start_encode
|
|
1241
|
+
if tmp.exists():
|
|
1242
|
+
tmp.unlink(missing_ok=True)
|
|
1243
|
+
json_out.file_done(inp, error="interrupted")
|
|
1244
|
+
history.finish(
|
|
1245
|
+
inp,
|
|
1246
|
+
"interrupted",
|
|
1247
|
+
error_msg="interrupted",
|
|
1248
|
+
encode_time=encode_time,
|
|
1249
|
+
integrity_time=integrity_time,
|
|
1250
|
+
)
|
|
1251
|
+
break
|
|
1252
|
+
except Exception as e:
|
|
1253
|
+
failed += 1
|
|
1254
|
+
encode_time = time.time() - start_encode
|
|
1255
|
+
if tmp.exists():
|
|
1256
|
+
tmp.unlink(missing_ok=True)
|
|
1257
|
+
json_out.file_done(inp, error=str(e))
|
|
1258
|
+
history.finish(inp, "failed", error_msg=str(e), encode_time=encode_time, integrity_time=integrity_time)
|
|
1259
|
+
continue
|
|
1260
|
+
|
|
1261
|
+
encode_time = time.time() - start_encode
|
|
1262
|
+
|
|
1263
|
+
if rc == 0:
|
|
1264
|
+
try:
|
|
1265
|
+
shutil.move(str(tmp), str(final))
|
|
1266
|
+
ok += 1
|
|
1267
|
+
json_out.file_done(inp, output_path=final)
|
|
1268
|
+
output_size = final.stat().st_size if final.exists() else 0
|
|
1269
|
+
history.finish(
|
|
1270
|
+
inp,
|
|
1271
|
+
"done",
|
|
1272
|
+
output_path=final,
|
|
1273
|
+
encode_time=encode_time,
|
|
1274
|
+
integrity_time=integrity_time,
|
|
1275
|
+
output_size=output_size,
|
|
1276
|
+
)
|
|
1277
|
+
except Exception as e:
|
|
1278
|
+
failed += 1
|
|
1279
|
+
if tmp.exists():
|
|
1280
|
+
tmp.unlink(missing_ok=True)
|
|
1281
|
+
json_out.file_done(inp, error=f"move failed: {e}")
|
|
1282
|
+
history.finish(
|
|
1283
|
+
inp,
|
|
1284
|
+
"failed",
|
|
1285
|
+
error_msg=f"move failed: {e}",
|
|
1286
|
+
encode_time=encode_time,
|
|
1287
|
+
integrity_time=integrity_time,
|
|
1288
|
+
)
|
|
1289
|
+
else:
|
|
1290
|
+
failed += 1
|
|
1291
|
+
if tmp.exists():
|
|
1292
|
+
tmp.unlink(missing_ok=True)
|
|
1293
|
+
json_out.file_done(inp, error=f"ffmpeg returned {rc}")
|
|
1294
|
+
history.finish(
|
|
1295
|
+
inp,
|
|
1296
|
+
"failed",
|
|
1297
|
+
error_msg=f"ffmpeg returned {rc}",
|
|
1298
|
+
encode_time=encode_time,
|
|
1299
|
+
integrity_time=integrity_time,
|
|
1300
|
+
)
|
|
1301
|
+
|
|
1302
|
+
json_out.complete()
|
|
1303
|
+
return ok, skipped, failed, interrupted_count, interrupted
|
|
1304
|
+
|
|
1305
|
+
|
|
1306
|
+
def main_legacy(single: Optional[Path], cfg: Config) -> Tuple[int, int, int, int, bool]:
|
|
1307
|
+
"""Legacy sequential mode. Returns (ok, skipped, failed, interrupted, was_interrupted)."""
|
|
1308
|
+
from mkv2cast.config import CFG as global_cfg
|
|
1309
|
+
|
|
1310
|
+
global_cfg.__dict__.update(cfg.__dict__)
|
|
1311
|
+
|
|
1312
|
+
root = Path(".").resolve()
|
|
1313
|
+
backend = pick_backend(cfg)
|
|
1314
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1315
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1316
|
+
print(f"{_('Backend selected')}: {backend}", flush=True)
|
|
1317
|
+
|
|
1318
|
+
ui = LegacyProgressUI(cfg.progress, cfg.bar_width)
|
|
1319
|
+
targets, ignored_files = collect_targets(root, single, cfg)
|
|
1320
|
+
|
|
1321
|
+
if not targets:
|
|
1322
|
+
print(_("No MKV files to process."), flush=True)
|
|
1323
|
+
return 0, 0, 0, 0, False
|
|
1324
|
+
|
|
1325
|
+
len(targets)
|
|
1326
|
+
ok = 0
|
|
1327
|
+
skipped = 0
|
|
1328
|
+
failed = 0
|
|
1329
|
+
interrupted = False
|
|
1330
|
+
|
|
1331
|
+
for _i, inp in enumerate(targets, start=1):
|
|
1332
|
+
history.start(inp)
|
|
1333
|
+
integrity_time = 0.0
|
|
1334
|
+
|
|
1335
|
+
if output_exists_for_input(inp, cfg):
|
|
1336
|
+
skipped += 1
|
|
1337
|
+
history.finish(inp, "skipped", error_msg="output exists", integrity_time=integrity_time)
|
|
1338
|
+
continue
|
|
1339
|
+
|
|
1340
|
+
log_path = get_log_path(inp)
|
|
1341
|
+
ui.log(f"==> {inp}")
|
|
1342
|
+
|
|
1343
|
+
# Integrity check
|
|
1344
|
+
success, integrity_time = do_integrity_check(
|
|
1345
|
+
inp, enabled=cfg.integrity_check, stable_wait=cfg.stable_wait, deep_check=cfg.deep_check, log_path=log_path
|
|
1346
|
+
)
|
|
1347
|
+
if not success:
|
|
1348
|
+
skipped += 1
|
|
1349
|
+
ui.log(f" SKIP: {_('integrity check failed')}")
|
|
1350
|
+
history.finish(inp, "skipped", error_msg="integrity check failed", integrity_time=integrity_time)
|
|
1351
|
+
continue
|
|
1352
|
+
|
|
1353
|
+
try:
|
|
1354
|
+
d = decide_for(inp, cfg)
|
|
1355
|
+
except Exception as e:
|
|
1356
|
+
failed += 1
|
|
1357
|
+
ui.log(f" FAILED: {e}")
|
|
1358
|
+
history.finish(inp, "failed", error_msg=str(e), integrity_time=integrity_time)
|
|
1359
|
+
continue
|
|
1360
|
+
|
|
1361
|
+
if (not d.need_v) and (not d.need_a) and cfg.skip_when_ok:
|
|
1362
|
+
skipped += 1
|
|
1363
|
+
ui.log(f" OK: {_('compatible')}")
|
|
1364
|
+
history.finish(inp, "skipped", error_msg="compatible", integrity_time=integrity_time)
|
|
1365
|
+
continue
|
|
1366
|
+
|
|
1367
|
+
tag = ""
|
|
1368
|
+
if d.need_v:
|
|
1369
|
+
tag += ".h264"
|
|
1370
|
+
if d.need_a:
|
|
1371
|
+
tag += ".aac"
|
|
1372
|
+
if not tag:
|
|
1373
|
+
tag = ".remux"
|
|
1374
|
+
|
|
1375
|
+
final = inp.parent / f"{inp.stem}{tag}{cfg.suffix}.{cfg.container}"
|
|
1376
|
+
if final.exists():
|
|
1377
|
+
skipped += 1
|
|
1378
|
+
history.finish(inp, "skipped", error_msg="output exists", integrity_time=integrity_time)
|
|
1379
|
+
continue
|
|
1380
|
+
|
|
1381
|
+
tmp = get_tmp_path(inp, 0, tag, cfg)
|
|
1382
|
+
ui.log(f" -> {final}")
|
|
1383
|
+
|
|
1384
|
+
cmd, stage = build_transcode_cmd(inp, d, backend, tmp, log_path, cfg)
|
|
1385
|
+
|
|
1386
|
+
if cfg.dryrun:
|
|
1387
|
+
ui.log(f"DRYRUN: {' '.join(cmd)}")
|
|
1388
|
+
skipped += 1
|
|
1389
|
+
history.finish(inp, "skipped", error_msg="dryrun", integrity_time=integrity_time)
|
|
1390
|
+
continue
|
|
1391
|
+
|
|
1392
|
+
probe_duration_ms(inp)
|
|
1393
|
+
start_encode = time.time()
|
|
1394
|
+
|
|
1395
|
+
try:
|
|
1396
|
+
result = subprocess.run(cmd, capture_output=True, timeout=86400)
|
|
1397
|
+
rc = result.returncode
|
|
1398
|
+
except KeyboardInterrupt:
|
|
1399
|
+
interrupted = True
|
|
1400
|
+
encode_time = time.time() - start_encode
|
|
1401
|
+
if tmp.exists():
|
|
1402
|
+
tmp.unlink(missing_ok=True)
|
|
1403
|
+
history.finish(
|
|
1404
|
+
inp,
|
|
1405
|
+
"interrupted",
|
|
1406
|
+
error_msg="interrupted",
|
|
1407
|
+
encode_time=encode_time,
|
|
1408
|
+
integrity_time=integrity_time,
|
|
1409
|
+
)
|
|
1410
|
+
break
|
|
1411
|
+
except Exception as e:
|
|
1412
|
+
failed += 1
|
|
1413
|
+
encode_time = time.time() - start_encode
|
|
1414
|
+
if tmp.exists():
|
|
1415
|
+
tmp.unlink(missing_ok=True)
|
|
1416
|
+
ui.log(f" FAILED: {e}")
|
|
1417
|
+
history.finish(inp, "failed", error_msg=str(e), encode_time=encode_time, integrity_time=integrity_time)
|
|
1418
|
+
continue
|
|
1419
|
+
|
|
1420
|
+
encode_time = time.time() - start_encode
|
|
1421
|
+
|
|
1422
|
+
if rc == 0:
|
|
1423
|
+
try:
|
|
1424
|
+
shutil.move(str(tmp), str(final))
|
|
1425
|
+
ok += 1
|
|
1426
|
+
ui.log(" DONE")
|
|
1427
|
+
output_size = final.stat().st_size if final.exists() else 0
|
|
1428
|
+
history.finish(
|
|
1429
|
+
inp,
|
|
1430
|
+
"done",
|
|
1431
|
+
output_path=final,
|
|
1432
|
+
encode_time=encode_time,
|
|
1433
|
+
integrity_time=integrity_time,
|
|
1434
|
+
output_size=output_size,
|
|
1435
|
+
)
|
|
1436
|
+
except Exception as e:
|
|
1437
|
+
failed += 1
|
|
1438
|
+
if tmp.exists():
|
|
1439
|
+
tmp.unlink(missing_ok=True)
|
|
1440
|
+
ui.log(f" FAILED: {e}")
|
|
1441
|
+
history.finish(
|
|
1442
|
+
inp,
|
|
1443
|
+
"failed",
|
|
1444
|
+
error_msg=str(e),
|
|
1445
|
+
encode_time=encode_time,
|
|
1446
|
+
integrity_time=integrity_time,
|
|
1447
|
+
)
|
|
1448
|
+
else:
|
|
1449
|
+
failed += 1
|
|
1450
|
+
if tmp.exists():
|
|
1451
|
+
tmp.unlink(missing_ok=True)
|
|
1452
|
+
ui.log(f" FAILED (rc={rc})")
|
|
1453
|
+
history.finish(
|
|
1454
|
+
inp,
|
|
1455
|
+
"failed",
|
|
1456
|
+
error_msg=f"ffmpeg returned {rc}",
|
|
1457
|
+
encode_time=encode_time,
|
|
1458
|
+
integrity_time=integrity_time,
|
|
1459
|
+
)
|
|
1460
|
+
|
|
1461
|
+
return ok, skipped, failed, 1 if interrupted else 0, interrupted
|
|
1462
|
+
|
|
1463
|
+
|
|
1464
|
+
def main_rich(single: Optional[Path], cfg: Config) -> Tuple[int, int, int, int, bool]:
|
|
1465
|
+
"""Rich UI sequential mode. Returns (ok, skipped, failed, interrupted, was_interrupted)."""
|
|
1466
|
+
from mkv2cast.config import CFG as global_cfg
|
|
1467
|
+
|
|
1468
|
+
global_cfg.__dict__.update(cfg.__dict__)
|
|
1469
|
+
|
|
1470
|
+
start_time = time.time()
|
|
1471
|
+
ui = SimpleRichUI(cfg.progress)
|
|
1472
|
+
root = Path(".").resolve()
|
|
1473
|
+
backend = pick_backend(cfg)
|
|
1474
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1475
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1476
|
+
|
|
1477
|
+
ui.console.print(f"[bold]mkv2cast[/bold] v{__version__}")
|
|
1478
|
+
ui.console.print(f"[dim]{_('Backend')}:[/dim] [cyan]{backend}[/cyan]")
|
|
1479
|
+
ui.console.print()
|
|
1480
|
+
|
|
1481
|
+
targets, ignored_files = collect_targets(root, single, cfg)
|
|
1482
|
+
|
|
1483
|
+
if not targets:
|
|
1484
|
+
ui.console.print(f"[yellow]{_('No MKV files to process.')}[/yellow]")
|
|
1485
|
+
return 0, 0, 0, 0, False
|
|
1486
|
+
|
|
1487
|
+
total_files = len(targets)
|
|
1488
|
+
ui.console.print(f"[dim]{_('Found')} {total_files} {_('file(s) to process')}[/dim]")
|
|
1489
|
+
|
|
1490
|
+
ok = 0
|
|
1491
|
+
skipped = 0
|
|
1492
|
+
failed = 0
|
|
1493
|
+
interrupted = False
|
|
1494
|
+
|
|
1495
|
+
for idx, inp in enumerate(targets, start=1):
|
|
1496
|
+
history.start(inp)
|
|
1497
|
+
integrity_time = 0.0
|
|
1498
|
+
|
|
1499
|
+
if output_exists_for_input(inp, cfg):
|
|
1500
|
+
skipped += 1
|
|
1501
|
+
history.finish(inp, "skipped", error_msg="output exists", integrity_time=integrity_time)
|
|
1502
|
+
continue
|
|
1503
|
+
|
|
1504
|
+
log_path = get_log_path(inp)
|
|
1505
|
+
|
|
1506
|
+
# Integrity check
|
|
1507
|
+
success, integrity_time = do_integrity_check(
|
|
1508
|
+
inp, enabled=cfg.integrity_check, stable_wait=cfg.stable_wait, deep_check=cfg.deep_check, log_path=log_path
|
|
1509
|
+
)
|
|
1510
|
+
if not success:
|
|
1511
|
+
ui.log_file_start(inp, inp)
|
|
1512
|
+
ui.log_skip(_("integrity check failed"))
|
|
1513
|
+
history.finish(inp, "skipped", error_msg="integrity check failed", integrity_time=integrity_time)
|
|
1514
|
+
continue
|
|
1515
|
+
|
|
1516
|
+
try:
|
|
1517
|
+
d = decide_for(inp, cfg)
|
|
1518
|
+
except Exception as e:
|
|
1519
|
+
ui.log_file_start(inp, inp)
|
|
1520
|
+
ui.log_error(str(e))
|
|
1521
|
+
history.finish(inp, "failed", error_msg=str(e), integrity_time=integrity_time)
|
|
1522
|
+
continue
|
|
1523
|
+
|
|
1524
|
+
if (not d.need_v) and (not d.need_a) and cfg.skip_when_ok:
|
|
1525
|
+
ui.log_file_start(inp, inp)
|
|
1526
|
+
ui.log_compatible()
|
|
1527
|
+
history.finish(inp, "skipped", error_msg="compatible", integrity_time=integrity_time)
|
|
1528
|
+
continue
|
|
1529
|
+
|
|
1530
|
+
tag = ""
|
|
1531
|
+
if d.need_v:
|
|
1532
|
+
tag += ".h264"
|
|
1533
|
+
if d.need_a:
|
|
1534
|
+
tag += ".aac"
|
|
1535
|
+
if not tag:
|
|
1536
|
+
tag = ".remux"
|
|
1537
|
+
|
|
1538
|
+
final = inp.parent / f"{inp.stem}{tag}{cfg.suffix}.{cfg.container}"
|
|
1539
|
+
if final.exists():
|
|
1540
|
+
skipped += 1
|
|
1541
|
+
history.finish(inp, "skipped", error_msg="output exists", integrity_time=integrity_time)
|
|
1542
|
+
continue
|
|
1543
|
+
|
|
1544
|
+
tmp = get_tmp_path(inp, 0, tag, cfg)
|
|
1545
|
+
ui.log_file_start(inp, final)
|
|
1546
|
+
|
|
1547
|
+
cmd, stage = build_transcode_cmd(inp, d, backend, tmp, log_path, cfg)
|
|
1548
|
+
|
|
1549
|
+
if cfg.dryrun:
|
|
1550
|
+
ui.console.print(f" [dim]DRYRUN: {' '.join(cmd)}[/dim]")
|
|
1551
|
+
skipped += 1
|
|
1552
|
+
history.finish(inp, "skipped", error_msg="dryrun", integrity_time=integrity_time)
|
|
1553
|
+
continue
|
|
1554
|
+
|
|
1555
|
+
dur_ms = probe_duration_ms(inp)
|
|
1556
|
+
start_encode = time.time()
|
|
1557
|
+
|
|
1558
|
+
try:
|
|
1559
|
+
rc, stderr = ui.run_ffmpeg_with_progress(cmd, stage, dur_ms, idx, total_files)
|
|
1560
|
+
except KeyboardInterrupt:
|
|
1561
|
+
interrupted = True
|
|
1562
|
+
encode_time = time.time() - start_encode
|
|
1563
|
+
if tmp.exists():
|
|
1564
|
+
tmp.unlink(missing_ok=True)
|
|
1565
|
+
history.finish(
|
|
1566
|
+
inp,
|
|
1567
|
+
"interrupted",
|
|
1568
|
+
error_msg="interrupted",
|
|
1569
|
+
encode_time=encode_time,
|
|
1570
|
+
integrity_time=integrity_time,
|
|
1571
|
+
)
|
|
1572
|
+
break
|
|
1573
|
+
except Exception as e:
|
|
1574
|
+
ui.log_error(str(e))
|
|
1575
|
+
if tmp.exists():
|
|
1576
|
+
tmp.unlink(missing_ok=True)
|
|
1577
|
+
encode_time = time.time() - start_encode
|
|
1578
|
+
history.finish(inp, "failed", error_msg=str(e), encode_time=encode_time, integrity_time=integrity_time)
|
|
1579
|
+
continue
|
|
1580
|
+
|
|
1581
|
+
encode_time = time.time() - start_encode
|
|
1582
|
+
|
|
1583
|
+
if rc == 0:
|
|
1584
|
+
try:
|
|
1585
|
+
shutil.move(str(tmp), str(final))
|
|
1586
|
+
output_size = final.stat().st_size if final.exists() else 0
|
|
1587
|
+
ui.log_success(encode_time, output_size)
|
|
1588
|
+
history.finish(
|
|
1589
|
+
inp,
|
|
1590
|
+
"done",
|
|
1591
|
+
output_path=final,
|
|
1592
|
+
encode_time=encode_time,
|
|
1593
|
+
integrity_time=integrity_time,
|
|
1594
|
+
output_size=output_size,
|
|
1595
|
+
)
|
|
1596
|
+
except Exception as e:
|
|
1597
|
+
ui.log_error(str(e))
|
|
1598
|
+
if tmp.exists():
|
|
1599
|
+
tmp.unlink(missing_ok=True)
|
|
1600
|
+
history.finish(
|
|
1601
|
+
inp,
|
|
1602
|
+
"failed",
|
|
1603
|
+
error_msg=str(e),
|
|
1604
|
+
encode_time=encode_time,
|
|
1605
|
+
integrity_time=integrity_time,
|
|
1606
|
+
)
|
|
1607
|
+
else:
|
|
1608
|
+
ui.log_error(f"ffmpeg exit code {rc}")
|
|
1609
|
+
if tmp.exists():
|
|
1610
|
+
tmp.unlink(missing_ok=True)
|
|
1611
|
+
history.finish(
|
|
1612
|
+
inp,
|
|
1613
|
+
"failed",
|
|
1614
|
+
error_msg=f"ffmpeg returned {rc}",
|
|
1615
|
+
encode_time=encode_time,
|
|
1616
|
+
integrity_time=integrity_time,
|
|
1617
|
+
)
|
|
1618
|
+
|
|
1619
|
+
ok, skipped, failed, _processed = ui.get_stats()
|
|
1620
|
+
|
|
1621
|
+
# Print summary
|
|
1622
|
+
ui.print_summary(time.time() - start_time)
|
|
1623
|
+
|
|
1624
|
+
return ok, skipped, failed, 1 if interrupted else 0, interrupted
|
|
1625
|
+
|
|
1626
|
+
|
|
1627
|
+
def main_pipeline(single: Optional[Path], cfg: Config) -> Tuple[int, int, int, int, bool]:
|
|
1628
|
+
"""Pipeline mode with parallel workers. Returns (ok, skipped, failed, interrupted_count, was_interrupted)."""
|
|
1629
|
+
from mkv2cast.config import CFG as global_cfg
|
|
1630
|
+
|
|
1631
|
+
global_cfg.__dict__.update(cfg.__dict__)
|
|
1632
|
+
|
|
1633
|
+
root = Path(".").resolve()
|
|
1634
|
+
backend = pick_backend(cfg)
|
|
1635
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1636
|
+
|
|
1637
|
+
# Collect targets
|
|
1638
|
+
targets, ignored_files = collect_targets(root, single, cfg)
|
|
1639
|
+
|
|
1640
|
+
if not targets:
|
|
1641
|
+
print(_("No MKV files to process."), flush=True)
|
|
1642
|
+
return 0, 0, 0, 0, False
|
|
1643
|
+
|
|
1644
|
+
# Determine worker counts
|
|
1645
|
+
if cfg.encode_workers == 0 or cfg.integrity_workers == 0:
|
|
1646
|
+
auto_encode, auto_integrity = auto_detect_workers(backend)
|
|
1647
|
+
encode_workers = cfg.encode_workers if cfg.encode_workers > 0 else auto_encode
|
|
1648
|
+
integrity_workers = cfg.integrity_workers if cfg.integrity_workers > 0 else auto_integrity
|
|
1649
|
+
else:
|
|
1650
|
+
encode_workers = cfg.encode_workers
|
|
1651
|
+
integrity_workers = cfg.integrity_workers
|
|
1652
|
+
|
|
1653
|
+
# Create Rich UI
|
|
1654
|
+
ui = RichProgressUI(total_files=len(targets), encode_workers=encode_workers, integrity_workers=integrity_workers)
|
|
1655
|
+
|
|
1656
|
+
# Header
|
|
1657
|
+
ui.console.print(f"[bold]mkv2cast[/bold] v{__version__}")
|
|
1658
|
+
ui.console.print(f"[dim]{_('Backend')}:[/dim] [cyan]{backend}[/cyan]")
|
|
1659
|
+
ui.console.print(f"[dim]{_('Workers')}:[/dim] {encode_workers} {_('encode')}, {integrity_workers} {_('integrity')}")
|
|
1660
|
+
ui.console.print(f"[dim]{_('Files')}:[/dim] {len(targets)} {_('to process')}")
|
|
1661
|
+
ui.console.print()
|
|
1662
|
+
|
|
1663
|
+
# Create and run pipeline
|
|
1664
|
+
pipeline = PipelineOrchestrator(
|
|
1665
|
+
targets=targets,
|
|
1666
|
+
backend=backend,
|
|
1667
|
+
ui=ui,
|
|
1668
|
+
cfg=cfg,
|
|
1669
|
+
history=history,
|
|
1670
|
+
encode_workers=encode_workers,
|
|
1671
|
+
integrity_workers=integrity_workers,
|
|
1672
|
+
get_log_path=get_log_path,
|
|
1673
|
+
get_tmp_path=lambda inp, wid, tag: get_tmp_path(inp, wid, tag, cfg),
|
|
1674
|
+
output_exists_fn=output_exists_for_input,
|
|
1675
|
+
)
|
|
1676
|
+
|
|
1677
|
+
ok, skipped, failed, interrupted = pipeline.run()
|
|
1678
|
+
|
|
1679
|
+
# Print final summary
|
|
1680
|
+
ui.console.print()
|
|
1681
|
+
ui.console.print(f"[bold]═══ {_('Summary')} ═══[/bold]")
|
|
1682
|
+
ui.console.print(f" [green]✓ {_('Converted')}:[/green] {ok}")
|
|
1683
|
+
ui.console.print(f" [yellow]⊘ {_('Skipped')}:[/yellow] {skipped}")
|
|
1684
|
+
ui.console.print(f" [red]✗ {_('Failed')}:[/red] {failed}")
|
|
1685
|
+
|
|
1686
|
+
return ok, skipped, failed, 1 if interrupted else 0, interrupted
|
|
1687
|
+
|
|
1688
|
+
|
|
1689
|
+
def run_watch_mode(single: Optional[Path], cfg: Config, interval: float) -> int:
|
|
1690
|
+
"""Run in watch mode, monitoring directory for new MKV files."""
|
|
1691
|
+
from mkv2cast.watcher import watch_directory
|
|
1692
|
+
|
|
1693
|
+
# Determine watch path
|
|
1694
|
+
if single and single.is_dir():
|
|
1695
|
+
watch_path = single
|
|
1696
|
+
elif single and single.is_file():
|
|
1697
|
+
watch_path = single.parent
|
|
1698
|
+
else:
|
|
1699
|
+
watch_path = Path.cwd()
|
|
1700
|
+
|
|
1701
|
+
print(f"mkv2cast v{__version__} - {_('Watch Mode')}")
|
|
1702
|
+
print()
|
|
1703
|
+
|
|
1704
|
+
backend = pick_backend(cfg)
|
|
1705
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1706
|
+
history = HistoryRecorder(HISTORY_DB, backend)
|
|
1707
|
+
|
|
1708
|
+
def convert_single(filepath: Path) -> None:
|
|
1709
|
+
"""Convert a single file when detected."""
|
|
1710
|
+
from mkv2cast.converter import convert_file
|
|
1711
|
+
|
|
1712
|
+
print(f"[{_('NEW')}] {filepath.name}")
|
|
1713
|
+
|
|
1714
|
+
history.start(filepath)
|
|
1715
|
+
start_encode = time.time()
|
|
1716
|
+
|
|
1717
|
+
success, output_path, message = convert_file(
|
|
1718
|
+
input_path=filepath,
|
|
1719
|
+
cfg=cfg,
|
|
1720
|
+
backend=backend,
|
|
1721
|
+
)
|
|
1722
|
+
|
|
1723
|
+
encode_time = time.time() - start_encode
|
|
1724
|
+
|
|
1725
|
+
if success:
|
|
1726
|
+
if output_path:
|
|
1727
|
+
print(f" [✓] {_('Converted')}: {output_path.name}")
|
|
1728
|
+
else:
|
|
1729
|
+
print(f" [⊘] {_('Skipped')}: {message}")
|
|
1730
|
+
else:
|
|
1731
|
+
print(f" [✗] {_('Failed')}: {message}")
|
|
1732
|
+
|
|
1733
|
+
skip_reason = message
|
|
1734
|
+
is_skip = (
|
|
1735
|
+
output_path is None
|
|
1736
|
+
or skip_reason.startswith("Already compatible")
|
|
1737
|
+
or skip_reason.startswith("Output already exists")
|
|
1738
|
+
or skip_reason.startswith("DRYRUN:")
|
|
1739
|
+
)
|
|
1740
|
+
|
|
1741
|
+
if success and not is_skip and output_path:
|
|
1742
|
+
output_size = output_path.stat().st_size if output_path.exists() else 0
|
|
1743
|
+
history.finish(
|
|
1744
|
+
filepath,
|
|
1745
|
+
"done",
|
|
1746
|
+
output_path=output_path,
|
|
1747
|
+
output_size=output_size,
|
|
1748
|
+
encode_time=encode_time,
|
|
1749
|
+
)
|
|
1750
|
+
elif success:
|
|
1751
|
+
history.finish(filepath, "skipped", error_msg=skip_reason, encode_time=encode_time)
|
|
1752
|
+
else:
|
|
1753
|
+
history.finish(filepath, "failed", error_msg=message, encode_time=encode_time)
|
|
1754
|
+
|
|
1755
|
+
try:
|
|
1756
|
+
watch_directory(
|
|
1757
|
+
path=watch_path,
|
|
1758
|
+
convert_callback=convert_single,
|
|
1759
|
+
cfg=cfg,
|
|
1760
|
+
interval=interval,
|
|
1761
|
+
)
|
|
1762
|
+
except KeyboardInterrupt:
|
|
1763
|
+
print(f"\n{_('Interrupted')}")
|
|
1764
|
+
|
|
1765
|
+
return 0
|
|
1766
|
+
|
|
1767
|
+
|
|
1768
|
+
def main() -> int:
|
|
1769
|
+
"""Main entry point."""
|
|
1770
|
+
global APP_DIRS, HISTORY_DB
|
|
1771
|
+
|
|
1772
|
+
# Initialize i18n
|
|
1773
|
+
setup_i18n()
|
|
1774
|
+
|
|
1775
|
+
# Parse arguments
|
|
1776
|
+
cfg, single = parse_args()
|
|
1777
|
+
|
|
1778
|
+
# Update global config
|
|
1779
|
+
from mkv2cast.config import CFG as global_cfg
|
|
1780
|
+
|
|
1781
|
+
global_cfg.__dict__.update(cfg.__dict__)
|
|
1782
|
+
|
|
1783
|
+
# Initialize directories
|
|
1784
|
+
APP_DIRS = get_app_dirs()
|
|
1785
|
+
|
|
1786
|
+
# Create default config if needed
|
|
1787
|
+
save_default_config(APP_DIRS["config"])
|
|
1788
|
+
|
|
1789
|
+
# Load config file
|
|
1790
|
+
file_config = load_config_file(APP_DIRS["config"])
|
|
1791
|
+
if file_config:
|
|
1792
|
+
apply_config_to_args(file_config, cfg)
|
|
1793
|
+
if cfg.profile:
|
|
1794
|
+
try:
|
|
1795
|
+
cfg.apply_profile(cfg.profile, only_if_default=True)
|
|
1796
|
+
except ValueError as exc:
|
|
1797
|
+
print(f"Warning: {exc}", file=sys.stderr)
|
|
1798
|
+
|
|
1799
|
+
# Initialize history database
|
|
1800
|
+
HISTORY_DB = HistoryDB(APP_DIRS["state"])
|
|
1801
|
+
|
|
1802
|
+
# Handle utility commands
|
|
1803
|
+
_parser = argparse.ArgumentParser(add_help=False)
|
|
1804
|
+
_parser.add_argument("--show-dirs", action="store_true")
|
|
1805
|
+
_parser.add_argument("--history", nargs="?", const=20, type=int)
|
|
1806
|
+
_parser.add_argument("--history-stats", action="store_true")
|
|
1807
|
+
_parser.add_argument("--clean-tmp", action="store_true")
|
|
1808
|
+
_parser.add_argument("--clean-logs", type=int)
|
|
1809
|
+
_parser.add_argument("--clean-history", type=int)
|
|
1810
|
+
_parser.add_argument("--check-requirements", action="store_true")
|
|
1811
|
+
_util_args, _remaining = _parser.parse_known_args()
|
|
1812
|
+
|
|
1813
|
+
result = handle_utility_commands(cfg, _util_args)
|
|
1814
|
+
if result is not None:
|
|
1815
|
+
return result
|
|
1816
|
+
|
|
1817
|
+
# Handle watch mode
|
|
1818
|
+
_watch_parser = argparse.ArgumentParser(add_help=False)
|
|
1819
|
+
_watch_parser.add_argument("--watch", "-w", action="store_true")
|
|
1820
|
+
_watch_parser.add_argument("--watch-interval", type=float, default=5.0)
|
|
1821
|
+
_watch_args, _ = _watch_parser.parse_known_args()
|
|
1822
|
+
|
|
1823
|
+
if _watch_args.watch:
|
|
1824
|
+
return run_watch_mode(single, cfg, _watch_args.watch_interval)
|
|
1825
|
+
|
|
1826
|
+
# Run main processing
|
|
1827
|
+
start_time = time.time()
|
|
1828
|
+
|
|
1829
|
+
# Choose UI mode:
|
|
1830
|
+
# - JSON progress mode if --json-progress enabled
|
|
1831
|
+
# - Pipeline mode with Rich UI if Rich available and --pipeline enabled
|
|
1832
|
+
# - Simple Rich mode if Rich available but --no-pipeline
|
|
1833
|
+
# - Legacy mode if Rich not available
|
|
1834
|
+
use_json = cfg.json_progress
|
|
1835
|
+
use_pipeline = RICH_AVAILABLE and cfg.pipeline and cfg.progress and not use_json
|
|
1836
|
+
use_simple_rich = RICH_AVAILABLE and not cfg.pipeline and cfg.progress and not use_json
|
|
1837
|
+
|
|
1838
|
+
if use_json:
|
|
1839
|
+
ok, skipped, failed, interrupted_count, was_interrupted = main_json_progress(single, cfg)
|
|
1840
|
+
total_time = fmt_hms(time.time() - start_time)
|
|
1841
|
+
elif use_pipeline:
|
|
1842
|
+
ok, skipped, failed, interrupted_count, was_interrupted = main_pipeline(single, cfg)
|
|
1843
|
+
total_time = fmt_hms(time.time() - start_time)
|
|
1844
|
+
# Summary is printed by main_pipeline
|
|
1845
|
+
elif use_simple_rich:
|
|
1846
|
+
ok, skipped, failed, interrupted_count, was_interrupted = main_rich(single, cfg)
|
|
1847
|
+
total_time = fmt_hms(time.time() - start_time)
|
|
1848
|
+
# Summary is printed by main_rich
|
|
1849
|
+
else:
|
|
1850
|
+
ok, skipped, failed, interrupted_count, was_interrupted = main_legacy(single, cfg)
|
|
1851
|
+
total_time = fmt_hms(time.time() - start_time)
|
|
1852
|
+
|
|
1853
|
+
# Print summary (legacy mode)
|
|
1854
|
+
print()
|
|
1855
|
+
trans_summary = _("Summary") # type: ignore[operator]
|
|
1856
|
+
trans_ok = _("Transcoded OK") # type: ignore[operator]
|
|
1857
|
+
trans_skipped = _("Skipped") # type: ignore[operator]
|
|
1858
|
+
trans_failed = _("Failed") # type: ignore[operator]
|
|
1859
|
+
trans_time = _("Total time") # type: ignore[operator]
|
|
1860
|
+
print(f"=== {trans_summary} ===")
|
|
1861
|
+
print(f"{trans_ok}: {ok}")
|
|
1862
|
+
print(f"{trans_skipped}: {skipped}")
|
|
1863
|
+
print(f"{trans_failed}: {failed}")
|
|
1864
|
+
print(f"{trans_time}: {total_time}")
|
|
1865
|
+
|
|
1866
|
+
# Send notification if enabled
|
|
1867
|
+
if cfg.notify:
|
|
1868
|
+
if was_interrupted:
|
|
1869
|
+
notify_interrupted()
|
|
1870
|
+
elif failed == 0 and ok > 0:
|
|
1871
|
+
if cfg.notify_on_success:
|
|
1872
|
+
notify_success(ok, total_time)
|
|
1873
|
+
elif failed > 0:
|
|
1874
|
+
if cfg.notify_on_failure:
|
|
1875
|
+
notify_partial(ok, failed, skipped, total_time)
|
|
1876
|
+
elif ok > 0 or skipped > 0:
|
|
1877
|
+
if cfg.notify_on_success:
|
|
1878
|
+
notify_partial(ok, failed, skipped, total_time)
|
|
1879
|
+
|
|
1880
|
+
if was_interrupted:
|
|
1881
|
+
return 130
|
|
1882
|
+
return 0 if failed == 0 else 2
|
|
1883
|
+
|
|
1884
|
+
|
|
1885
|
+
if __name__ == "__main__":
|
|
1886
|
+
sys.exit(main())
|