mkv2cast 1.2.7.post4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mkv2cast/__init__.py +77 -0
- mkv2cast/__main__.py +14 -0
- mkv2cast/cli.py +1886 -0
- mkv2cast/config.py +638 -0
- mkv2cast/converter.py +1454 -0
- mkv2cast/history.py +389 -0
- mkv2cast/i18n.py +179 -0
- mkv2cast/integrity.py +176 -0
- mkv2cast/json_progress.py +311 -0
- mkv2cast/locales/de/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/de/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/locales/en/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/en/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/locales/es/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/es/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/locales/fr/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/fr/LC_MESSAGES/mkv2cast.po +430 -0
- mkv2cast/locales/it/LC_MESSAGES/mkv2cast.mo +0 -0
- mkv2cast/locales/it/LC_MESSAGES/mkv2cast.po +382 -0
- mkv2cast/notifications.py +196 -0
- mkv2cast/pipeline.py +641 -0
- mkv2cast/ui/__init__.py +26 -0
- mkv2cast/ui/legacy_ui.py +136 -0
- mkv2cast/ui/rich_ui.py +462 -0
- mkv2cast/ui/simple_rich.py +243 -0
- mkv2cast/watcher.py +293 -0
- mkv2cast-1.2.7.post4.dist-info/METADATA +1411 -0
- mkv2cast-1.2.7.post4.dist-info/RECORD +31 -0
- mkv2cast-1.2.7.post4.dist-info/WHEEL +4 -0
- mkv2cast-1.2.7.post4.dist-info/entry_points.txt +2 -0
- mkv2cast-1.2.7.post4.dist-info/licenses/LICENSE +50 -0
mkv2cast/history.py
ADDED
|
@@ -0,0 +1,389 @@
|
|
|
1
|
+
"""
|
|
2
|
+
History database for mkv2cast conversions.
|
|
3
|
+
|
|
4
|
+
Tracks all conversions with timestamps, durations, and results.
|
|
5
|
+
Uses SQLite as primary storage with JSONL fallback if SQLite is unavailable.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import datetime
|
|
9
|
+
import json
|
|
10
|
+
import threading
|
|
11
|
+
import time
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Dict, List, Optional, Tuple
|
|
14
|
+
|
|
15
|
+
# SQLite support (usually available, but check anyway)
|
|
16
|
+
try:
|
|
17
|
+
import sqlite3
|
|
18
|
+
|
|
19
|
+
SQLITE_AVAILABLE = True
|
|
20
|
+
except ImportError:
|
|
21
|
+
SQLITE_AVAILABLE = False
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class HistoryDB:
|
|
25
|
+
"""History storage with SQLite primary and JSONL text fallback."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, state_dir: Path):
|
|
28
|
+
self.state_dir = state_dir
|
|
29
|
+
self._use_sqlite = SQLITE_AVAILABLE
|
|
30
|
+
|
|
31
|
+
if self._use_sqlite:
|
|
32
|
+
self._db_path = state_dir / "history.db"
|
|
33
|
+
self._init_sqlite()
|
|
34
|
+
else:
|
|
35
|
+
self._log_path = state_dir / "history.log"
|
|
36
|
+
|
|
37
|
+
def _init_sqlite(self) -> None:
|
|
38
|
+
"""Initialize SQLite database."""
|
|
39
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
40
|
+
conn.execute("""
|
|
41
|
+
CREATE TABLE IF NOT EXISTS conversions (
|
|
42
|
+
id INTEGER PRIMARY KEY,
|
|
43
|
+
input_path TEXT NOT NULL,
|
|
44
|
+
output_path TEXT,
|
|
45
|
+
input_size INTEGER,
|
|
46
|
+
output_size INTEGER,
|
|
47
|
+
duration_ms INTEGER,
|
|
48
|
+
started_at TEXT NOT NULL,
|
|
49
|
+
finished_at TEXT,
|
|
50
|
+
status TEXT NOT NULL,
|
|
51
|
+
backend TEXT,
|
|
52
|
+
error_msg TEXT,
|
|
53
|
+
encode_time_s REAL,
|
|
54
|
+
integrity_time_s REAL
|
|
55
|
+
)
|
|
56
|
+
""")
|
|
57
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_started ON conversions(started_at)")
|
|
58
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_status ON conversions(status)")
|
|
59
|
+
conn.commit()
|
|
60
|
+
conn.close()
|
|
61
|
+
|
|
62
|
+
def record_start(self, input_path: Path, backend: str, input_size: int = 0) -> int:
|
|
63
|
+
"""Record conversion start, return entry ID."""
|
|
64
|
+
started_at = datetime.datetime.now().isoformat()
|
|
65
|
+
|
|
66
|
+
if self._use_sqlite:
|
|
67
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
68
|
+
cur = conn.execute(
|
|
69
|
+
"""INSERT INTO conversions (input_path, input_size, started_at, status, backend)
|
|
70
|
+
VALUES (?, ?, ?, ?, ?)""",
|
|
71
|
+
(str(input_path), input_size, started_at, "running", backend),
|
|
72
|
+
)
|
|
73
|
+
entry_id = cur.lastrowid or 0
|
|
74
|
+
conn.commit()
|
|
75
|
+
conn.close()
|
|
76
|
+
return entry_id
|
|
77
|
+
else:
|
|
78
|
+
# For JSONL, we use timestamp as pseudo-ID
|
|
79
|
+
entry_id = int(time.time() * 1000)
|
|
80
|
+
entry = {
|
|
81
|
+
"id": entry_id,
|
|
82
|
+
"input": str(input_path),
|
|
83
|
+
"input_size": input_size,
|
|
84
|
+
"started": started_at,
|
|
85
|
+
"status": "running",
|
|
86
|
+
"backend": backend,
|
|
87
|
+
}
|
|
88
|
+
with self._log_path.open("a", encoding="utf-8") as f:
|
|
89
|
+
f.write(json.dumps(entry) + "\n")
|
|
90
|
+
return entry_id
|
|
91
|
+
|
|
92
|
+
def record_finish(
|
|
93
|
+
self,
|
|
94
|
+
entry_id: int,
|
|
95
|
+
output_path: Optional[Path],
|
|
96
|
+
status: str,
|
|
97
|
+
encode_time: float = 0,
|
|
98
|
+
integrity_time: float = 0,
|
|
99
|
+
output_size: int = 0,
|
|
100
|
+
duration_ms: int = 0,
|
|
101
|
+
error_msg: Optional[str] = None,
|
|
102
|
+
) -> None:
|
|
103
|
+
"""Update entry with completion info."""
|
|
104
|
+
finished_at = datetime.datetime.now().isoformat()
|
|
105
|
+
|
|
106
|
+
if self._use_sqlite:
|
|
107
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
108
|
+
conn.execute(
|
|
109
|
+
"""UPDATE conversions SET
|
|
110
|
+
output_path=?, output_size=?, duration_ms=?, finished_at=?,
|
|
111
|
+
status=?, error_msg=?, encode_time_s=?, integrity_time_s=?
|
|
112
|
+
WHERE id=?""",
|
|
113
|
+
(
|
|
114
|
+
str(output_path) if output_path else None,
|
|
115
|
+
output_size,
|
|
116
|
+
duration_ms,
|
|
117
|
+
finished_at,
|
|
118
|
+
status,
|
|
119
|
+
error_msg,
|
|
120
|
+
encode_time,
|
|
121
|
+
integrity_time,
|
|
122
|
+
entry_id,
|
|
123
|
+
),
|
|
124
|
+
)
|
|
125
|
+
conn.commit()
|
|
126
|
+
conn.close()
|
|
127
|
+
else:
|
|
128
|
+
# For JSONL, append a new line with the update
|
|
129
|
+
entry = {
|
|
130
|
+
"id": entry_id,
|
|
131
|
+
"output": str(output_path) if output_path else None,
|
|
132
|
+
"output_size": output_size,
|
|
133
|
+
"finished": finished_at,
|
|
134
|
+
"status": status,
|
|
135
|
+
"encode_time": encode_time,
|
|
136
|
+
"integrity_time": integrity_time,
|
|
137
|
+
"error_msg": error_msg,
|
|
138
|
+
}
|
|
139
|
+
with self._log_path.open("a", encoding="utf-8") as f:
|
|
140
|
+
f.write(json.dumps(entry) + "\n")
|
|
141
|
+
|
|
142
|
+
def record_skip(self, input_path: Path, reason: str, backend: str) -> None:
|
|
143
|
+
"""Record a skipped file."""
|
|
144
|
+
now = datetime.datetime.now().isoformat()
|
|
145
|
+
|
|
146
|
+
if self._use_sqlite:
|
|
147
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
148
|
+
conn.execute(
|
|
149
|
+
"""INSERT INTO conversions (input_path, started_at, finished_at, status, backend, error_msg)
|
|
150
|
+
VALUES (?, ?, ?, ?, ?, ?)""",
|
|
151
|
+
(str(input_path), now, now, "skipped", backend, reason),
|
|
152
|
+
)
|
|
153
|
+
conn.commit()
|
|
154
|
+
conn.close()
|
|
155
|
+
else:
|
|
156
|
+
entry = {
|
|
157
|
+
"input": str(input_path),
|
|
158
|
+
"started": now,
|
|
159
|
+
"finished": now,
|
|
160
|
+
"status": "skipped",
|
|
161
|
+
"backend": backend,
|
|
162
|
+
"reason": reason,
|
|
163
|
+
}
|
|
164
|
+
with self._log_path.open("a", encoding="utf-8") as f:
|
|
165
|
+
f.write(json.dumps(entry) + "\n")
|
|
166
|
+
|
|
167
|
+
def get_recent(self, limit: int = 20) -> List[dict]:
|
|
168
|
+
"""Get recent conversions."""
|
|
169
|
+
if self._use_sqlite:
|
|
170
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
171
|
+
conn.row_factory = sqlite3.Row
|
|
172
|
+
cur = conn.execute("""SELECT * FROM conversions ORDER BY started_at DESC LIMIT ?""", (limit,))
|
|
173
|
+
rows = [dict(row) for row in cur.fetchall()]
|
|
174
|
+
conn.close()
|
|
175
|
+
return rows
|
|
176
|
+
else:
|
|
177
|
+
# Read JSONL and get last N entries
|
|
178
|
+
if not self._log_path.exists():
|
|
179
|
+
return []
|
|
180
|
+
entries = []
|
|
181
|
+
with self._log_path.open("r", encoding="utf-8") as f:
|
|
182
|
+
for line in f:
|
|
183
|
+
line = line.strip()
|
|
184
|
+
if line:
|
|
185
|
+
try:
|
|
186
|
+
entries.append(json.loads(line))
|
|
187
|
+
except json.JSONDecodeError:
|
|
188
|
+
pass
|
|
189
|
+
# Merge updates with starts (by id)
|
|
190
|
+
merged: Dict[int, dict] = {}
|
|
191
|
+
for e in entries:
|
|
192
|
+
eid = e.get("id")
|
|
193
|
+
if eid in merged:
|
|
194
|
+
merged[eid].update(e)
|
|
195
|
+
else:
|
|
196
|
+
merged[eid] = e
|
|
197
|
+
# Sort by started time descending
|
|
198
|
+
result = sorted(merged.values(), key=lambda x: x.get("started", ""), reverse=True)
|
|
199
|
+
return result[:limit]
|
|
200
|
+
|
|
201
|
+
def get_stats(self) -> dict:
|
|
202
|
+
"""Get conversion statistics."""
|
|
203
|
+
if self._use_sqlite:
|
|
204
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
205
|
+
stats: dict = {}
|
|
206
|
+
|
|
207
|
+
# Total counts by status
|
|
208
|
+
cur = conn.execute("SELECT status, COUNT(*) FROM conversions GROUP BY status")
|
|
209
|
+
stats["by_status"] = {row[0]: row[1] for row in cur.fetchall()}
|
|
210
|
+
|
|
211
|
+
# Average encode time for successful conversions
|
|
212
|
+
cur = conn.execute(
|
|
213
|
+
'SELECT AVG(encode_time_s), SUM(encode_time_s) FROM conversions WHERE status="done" AND encode_time_s > 0'
|
|
214
|
+
)
|
|
215
|
+
row = cur.fetchone()
|
|
216
|
+
stats["avg_encode_time"] = row[0] or 0
|
|
217
|
+
stats["total_encode_time"] = row[1] or 0
|
|
218
|
+
|
|
219
|
+
# Total size processed
|
|
220
|
+
cur = conn.execute('SELECT SUM(input_size), SUM(output_size) FROM conversions WHERE status="done"')
|
|
221
|
+
row = cur.fetchone()
|
|
222
|
+
stats["total_input_size"] = row[0] or 0
|
|
223
|
+
stats["total_output_size"] = row[1] or 0
|
|
224
|
+
|
|
225
|
+
conn.close()
|
|
226
|
+
return stats
|
|
227
|
+
|
|
228
|
+
# Basic stats from JSONL (non-SQLite path)
|
|
229
|
+
recent = self.get_recent(1000)
|
|
230
|
+
result: dict = {"by_status": {}}
|
|
231
|
+
for e in recent:
|
|
232
|
+
s = e.get("status", "unknown")
|
|
233
|
+
result["by_status"][s] = result["by_status"].get(s, 0) + 1
|
|
234
|
+
|
|
235
|
+
done = [e for e in recent if e.get("status") == "done"]
|
|
236
|
+
if done:
|
|
237
|
+
times = [e.get("encode_time", 0) for e in done if e.get("encode_time")]
|
|
238
|
+
result["avg_encode_time"] = sum(times) / len(times) if times else 0
|
|
239
|
+
result["total_encode_time"] = sum(times)
|
|
240
|
+
else:
|
|
241
|
+
result["avg_encode_time"] = 0
|
|
242
|
+
result["total_encode_time"] = 0
|
|
243
|
+
|
|
244
|
+
result["total_input_size"] = sum(e.get("input_size", 0) for e in done)
|
|
245
|
+
result["total_output_size"] = sum(e.get("output_size", 0) for e in done)
|
|
246
|
+
return result
|
|
247
|
+
|
|
248
|
+
def clean_old(self, days: int) -> int:
|
|
249
|
+
"""Remove entries older than N days. Returns count removed."""
|
|
250
|
+
cutoff = (datetime.datetime.now() - datetime.timedelta(days=days)).isoformat()
|
|
251
|
+
|
|
252
|
+
if self._use_sqlite:
|
|
253
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
254
|
+
cur = conn.execute("DELETE FROM conversions WHERE started_at < ?", (cutoff,))
|
|
255
|
+
count = cur.rowcount
|
|
256
|
+
conn.commit()
|
|
257
|
+
conn.close()
|
|
258
|
+
return count
|
|
259
|
+
else:
|
|
260
|
+
# For JSONL, rewrite file without old entries
|
|
261
|
+
if not self._log_path.exists():
|
|
262
|
+
return 0
|
|
263
|
+
entries = []
|
|
264
|
+
removed = 0
|
|
265
|
+
with self._log_path.open("r", encoding="utf-8") as f:
|
|
266
|
+
for line in f:
|
|
267
|
+
line = line.strip()
|
|
268
|
+
if line:
|
|
269
|
+
try:
|
|
270
|
+
e = json.loads(line)
|
|
271
|
+
if e.get("started", "") >= cutoff:
|
|
272
|
+
entries.append(line)
|
|
273
|
+
else:
|
|
274
|
+
removed += 1
|
|
275
|
+
except json.JSONDecodeError:
|
|
276
|
+
pass
|
|
277
|
+
with self._log_path.open("w", encoding="utf-8") as f:
|
|
278
|
+
for line in entries:
|
|
279
|
+
f.write(line + "\n")
|
|
280
|
+
return removed
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
class HistoryRecorder:
|
|
284
|
+
"""Track in-flight conversions and safely persist history updates."""
|
|
285
|
+
|
|
286
|
+
def __init__(self, history_db: Optional[HistoryDB], backend: str) -> None:
|
|
287
|
+
self._history_db = history_db
|
|
288
|
+
self._backend = backend
|
|
289
|
+
self._lock = threading.Lock()
|
|
290
|
+
self._active: Dict[str, Tuple[int, float]] = {}
|
|
291
|
+
|
|
292
|
+
def start(self, input_path: Path, input_size: int = 0) -> int:
|
|
293
|
+
"""Record a conversion start and track it for later completion."""
|
|
294
|
+
if self._history_db is None:
|
|
295
|
+
return 0
|
|
296
|
+
|
|
297
|
+
if input_size <= 0:
|
|
298
|
+
try:
|
|
299
|
+
input_size = input_path.stat().st_size
|
|
300
|
+
except Exception:
|
|
301
|
+
input_size = 0
|
|
302
|
+
|
|
303
|
+
try:
|
|
304
|
+
entry_id = self._history_db.record_start(input_path, self._backend, input_size)
|
|
305
|
+
except Exception:
|
|
306
|
+
return 0
|
|
307
|
+
|
|
308
|
+
with self._lock:
|
|
309
|
+
self._active[str(input_path)] = (entry_id, time.time())
|
|
310
|
+
|
|
311
|
+
return entry_id
|
|
312
|
+
|
|
313
|
+
def finish(
|
|
314
|
+
self,
|
|
315
|
+
input_path: Path,
|
|
316
|
+
status: str,
|
|
317
|
+
output_path: Optional[Path] = None,
|
|
318
|
+
encode_time: float = 0,
|
|
319
|
+
integrity_time: float = 0,
|
|
320
|
+
output_size: int = 0,
|
|
321
|
+
duration_ms: int = 0,
|
|
322
|
+
error_msg: Optional[str] = None,
|
|
323
|
+
) -> None:
|
|
324
|
+
"""Record completion for a tracked conversion."""
|
|
325
|
+
if self._history_db is None:
|
|
326
|
+
return
|
|
327
|
+
|
|
328
|
+
entry_id = 0
|
|
329
|
+
started_at = 0.0
|
|
330
|
+
key = str(input_path)
|
|
331
|
+
|
|
332
|
+
with self._lock:
|
|
333
|
+
entry_id, started_at = self._active.pop(key, (0, 0.0))
|
|
334
|
+
|
|
335
|
+
if entry_id <= 0:
|
|
336
|
+
if status == "skipped" and error_msg:
|
|
337
|
+
self.skip(input_path, error_msg)
|
|
338
|
+
return
|
|
339
|
+
|
|
340
|
+
if duration_ms <= 0 and started_at:
|
|
341
|
+
duration_ms = int((time.time() - started_at) * 1000)
|
|
342
|
+
|
|
343
|
+
try:
|
|
344
|
+
self._history_db.record_finish(
|
|
345
|
+
entry_id,
|
|
346
|
+
output_path,
|
|
347
|
+
status,
|
|
348
|
+
encode_time=encode_time,
|
|
349
|
+
integrity_time=integrity_time,
|
|
350
|
+
output_size=output_size,
|
|
351
|
+
duration_ms=duration_ms,
|
|
352
|
+
error_msg=error_msg,
|
|
353
|
+
)
|
|
354
|
+
except Exception:
|
|
355
|
+
pass
|
|
356
|
+
|
|
357
|
+
def skip(self, input_path: Path, reason: str) -> None:
|
|
358
|
+
"""Record a skipped file without a prior start entry."""
|
|
359
|
+
if self._history_db is None:
|
|
360
|
+
return
|
|
361
|
+
|
|
362
|
+
try:
|
|
363
|
+
self._history_db.record_skip(input_path, reason, self._backend)
|
|
364
|
+
except Exception:
|
|
365
|
+
pass
|
|
366
|
+
|
|
367
|
+
def interrupt_all(self, reason: str = "interrupted") -> None:
|
|
368
|
+
"""Mark any active conversions as interrupted."""
|
|
369
|
+
if self._history_db is None:
|
|
370
|
+
return
|
|
371
|
+
|
|
372
|
+
with self._lock:
|
|
373
|
+
items = list(self._active.items())
|
|
374
|
+
self._active.clear()
|
|
375
|
+
|
|
376
|
+
now = time.time()
|
|
377
|
+
|
|
378
|
+
for _path_str, (entry_id, started_at) in items:
|
|
379
|
+
duration_ms = int((now - started_at) * 1000) if started_at else 0
|
|
380
|
+
try:
|
|
381
|
+
self._history_db.record_finish(
|
|
382
|
+
entry_id,
|
|
383
|
+
None,
|
|
384
|
+
"interrupted",
|
|
385
|
+
duration_ms=duration_ms,
|
|
386
|
+
error_msg=reason,
|
|
387
|
+
)
|
|
388
|
+
except Exception:
|
|
389
|
+
pass
|
mkv2cast/i18n.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Internationalization (i18n) support for mkv2cast.
|
|
3
|
+
|
|
4
|
+
Uses Python's gettext module for translations.
|
|
5
|
+
Supports: English (en), French (fr), Spanish (es), Italian (it), German (de)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import gettext
|
|
9
|
+
import locale
|
|
10
|
+
import os
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Callable, Optional
|
|
13
|
+
|
|
14
|
+
# Global translation function
|
|
15
|
+
_current_translation: Optional[Callable[[str], str]] = None
|
|
16
|
+
|
|
17
|
+
# Supported languages
|
|
18
|
+
SUPPORTED_LANGUAGES = ["en", "fr", "es", "it", "de"]
|
|
19
|
+
DEFAULT_LANGUAGE = "en"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_locales_dir() -> Path:
|
|
23
|
+
"""Get the locales directory path."""
|
|
24
|
+
return Path(__file__).parent / "locales"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def detect_system_language() -> str:
|
|
28
|
+
"""
|
|
29
|
+
Detect the system language from environment.
|
|
30
|
+
Returns language code (e.g., 'fr', 'en', 'es').
|
|
31
|
+
"""
|
|
32
|
+
# Check environment variables in order of priority
|
|
33
|
+
for env_var in ["MKV2CAST_LANG", "LANGUAGE", "LC_ALL", "LC_MESSAGES", "LANG"]:
|
|
34
|
+
lang = os.environ.get(env_var, "")
|
|
35
|
+
if lang:
|
|
36
|
+
# Extract language code (e.g., 'fr_FR.UTF-8' -> 'fr')
|
|
37
|
+
lang_code = lang.split("_")[0].split(".")[0].lower()
|
|
38
|
+
if lang_code in SUPPORTED_LANGUAGES:
|
|
39
|
+
return lang_code
|
|
40
|
+
|
|
41
|
+
# Try locale module (use newer API to avoid deprecation warning)
|
|
42
|
+
try:
|
|
43
|
+
# Try getlocale first (Python 3.11+)
|
|
44
|
+
loc = locale.getlocale()[0]
|
|
45
|
+
if loc:
|
|
46
|
+
lang_code = loc.split("_")[0].lower()
|
|
47
|
+
if lang_code in SUPPORTED_LANGUAGES:
|
|
48
|
+
return lang_code
|
|
49
|
+
except Exception:
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
return DEFAULT_LANGUAGE
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def setup_i18n(lang: Optional[str] = None) -> Callable[[str], str]:
|
|
56
|
+
"""
|
|
57
|
+
Configure internationalization and return the translation function.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
lang: Language code (e.g., 'fr', 'en'). If None, auto-detect from system.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Translation function that takes a string and returns translated string.
|
|
64
|
+
"""
|
|
65
|
+
global _current_translation
|
|
66
|
+
|
|
67
|
+
if lang is None:
|
|
68
|
+
lang = detect_system_language()
|
|
69
|
+
|
|
70
|
+
# Normalize language code
|
|
71
|
+
lang = lang.lower().split("_")[0].split(".")[0]
|
|
72
|
+
|
|
73
|
+
if lang not in SUPPORTED_LANGUAGES:
|
|
74
|
+
lang = DEFAULT_LANGUAGE
|
|
75
|
+
|
|
76
|
+
locales_dir = get_locales_dir()
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
translation = gettext.translation(
|
|
80
|
+
"mkv2cast", localedir=locales_dir, languages=[lang, DEFAULT_LANGUAGE], fallback=True
|
|
81
|
+
)
|
|
82
|
+
_current_translation = translation.gettext
|
|
83
|
+
except Exception:
|
|
84
|
+
# Fallback to identity function
|
|
85
|
+
def identity_fn(x: str) -> str:
|
|
86
|
+
return x
|
|
87
|
+
|
|
88
|
+
_current_translation = identity_fn
|
|
89
|
+
|
|
90
|
+
return _current_translation if _current_translation else (lambda x: x)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _(message: str) -> str:
|
|
94
|
+
"""
|
|
95
|
+
Translate a message.
|
|
96
|
+
|
|
97
|
+
This is the main translation function. Import and use as:
|
|
98
|
+
from mkv2cast.i18n import _
|
|
99
|
+
print(_("Processing file..."))
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
message: The message to translate (in English).
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Translated message, or original if no translation found.
|
|
106
|
+
"""
|
|
107
|
+
global _current_translation
|
|
108
|
+
|
|
109
|
+
if _current_translation is None:
|
|
110
|
+
setup_i18n()
|
|
111
|
+
|
|
112
|
+
return _current_translation(message) if _current_translation else message
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def get_current_language() -> str:
|
|
116
|
+
"""Get the currently configured language code."""
|
|
117
|
+
return detect_system_language()
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def ngettext(singular: str, plural: str, n: int) -> str:
|
|
121
|
+
"""
|
|
122
|
+
Translate a message with singular/plural forms.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
singular: Singular form of the message.
|
|
126
|
+
plural: Plural form of the message.
|
|
127
|
+
n: Count to determine which form to use.
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
Appropriate translated form based on count.
|
|
131
|
+
"""
|
|
132
|
+
# Simple implementation - for full support, use gettext.ngettext
|
|
133
|
+
if n == 1:
|
|
134
|
+
return _(singular)
|
|
135
|
+
return _(plural)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
# Translation strings catalog
|
|
139
|
+
# These are the strings that need translation in the application
|
|
140
|
+
TRANSLATION_CATALOG = [
|
|
141
|
+
# General
|
|
142
|
+
"Processing file: {filename}",
|
|
143
|
+
"Conversion complete",
|
|
144
|
+
"Conversion failed",
|
|
145
|
+
"Skipped: {reason}",
|
|
146
|
+
"No MKV files to process.",
|
|
147
|
+
"Backend selected: {backend}",
|
|
148
|
+
# Progress
|
|
149
|
+
"Checking integrity...",
|
|
150
|
+
"Encoding...",
|
|
151
|
+
"Waiting for file stability...",
|
|
152
|
+
"Done",
|
|
153
|
+
"Failed",
|
|
154
|
+
"Skipped",
|
|
155
|
+
# Summary
|
|
156
|
+
"Summary",
|
|
157
|
+
"Total files seen",
|
|
158
|
+
"Transcoded OK",
|
|
159
|
+
"Skipped",
|
|
160
|
+
"Failed",
|
|
161
|
+
"Interrupted",
|
|
162
|
+
"Total time",
|
|
163
|
+
# Notifications
|
|
164
|
+
"mkv2cast - Conversion Complete",
|
|
165
|
+
"Successfully converted {count} file(s)",
|
|
166
|
+
"mkv2cast - Conversion Failed",
|
|
167
|
+
"Failed to convert {count} file(s)",
|
|
168
|
+
# Errors
|
|
169
|
+
"File not found: {path}",
|
|
170
|
+
"Integrity check failed",
|
|
171
|
+
"Output already exists",
|
|
172
|
+
"ffmpeg error (rc={rc})",
|
|
173
|
+
# Help texts
|
|
174
|
+
"Smart MKV to Chromecast-compatible converter with hardware acceleration",
|
|
175
|
+
"Process all MKV files in current directory",
|
|
176
|
+
"Process single file",
|
|
177
|
+
"Enable debug output",
|
|
178
|
+
"Dry run - show commands without executing",
|
|
179
|
+
]
|