talks-reducer 0.6.3__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- talks_reducer/__about__.py +1 -1
- talks_reducer/cli.py +9 -3
- talks_reducer/{gui.py → gui/__init__.py} +375 -1192
- talks_reducer/gui/__main__.py +8 -0
- talks_reducer/gui/discovery.py +126 -0
- talks_reducer/gui/layout.py +526 -0
- talks_reducer/gui/preferences.py +113 -0
- talks_reducer/gui/remote.py +356 -0
- talks_reducer/gui/theme.py +269 -0
- talks_reducer/models.py +1 -1
- talks_reducer/pipeline.py +142 -92
- talks_reducer/server.py +52 -4
- talks_reducer/service_client.py +56 -4
- {talks_reducer-0.6.3.dist-info → talks_reducer-0.7.0.dist-info}/METADATA +14 -5
- talks_reducer-0.7.0.dist-info/RECORD +29 -0
- talks_reducer-0.6.3.dist-info/RECORD +0 -23
- {talks_reducer-0.6.3.dist-info → talks_reducer-0.7.0.dist-info}/WHEEL +0 -0
- {talks_reducer-0.6.3.dist-info → talks_reducer-0.7.0.dist-info}/entry_points.txt +0 -0
- {talks_reducer-0.6.3.dist-info → talks_reducer-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {talks_reducer-0.6.3.dist-info → talks_reducer-0.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,113 @@
|
|
1
|
+
"""Utilities for locating and persisting GUI preference settings."""
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import json
|
6
|
+
import os
|
7
|
+
import sys
|
8
|
+
from pathlib import Path
|
9
|
+
from typing import Mapping, MutableMapping, Optional
|
10
|
+
|
11
|
+
|
12
|
+
def determine_config_path(
|
13
|
+
platform: Optional[str] = None,
|
14
|
+
env: Optional[Mapping[str, str]] = None,
|
15
|
+
home: Optional[Path] = None,
|
16
|
+
) -> Path:
|
17
|
+
"""Return the path to the GUI settings file for the current platform."""
|
18
|
+
|
19
|
+
platform_name = platform if platform is not None else sys.platform
|
20
|
+
env_mapping = env if env is not None else os.environ
|
21
|
+
home_path = Path(home) if home is not None else Path.home()
|
22
|
+
|
23
|
+
if platform_name == "win32":
|
24
|
+
appdata = env_mapping.get("APPDATA")
|
25
|
+
if appdata:
|
26
|
+
base = Path(appdata)
|
27
|
+
else:
|
28
|
+
base = home_path / "AppData" / "Roaming"
|
29
|
+
elif platform_name == "darwin":
|
30
|
+
base = home_path / "Library" / "Application Support"
|
31
|
+
else:
|
32
|
+
xdg_config = env_mapping.get("XDG_CONFIG_HOME")
|
33
|
+
base = Path(xdg_config) if xdg_config else home_path / ".config"
|
34
|
+
|
35
|
+
return base / "talks-reducer" / "settings.json"
|
36
|
+
|
37
|
+
|
38
|
+
def load_settings(config_path: Path) -> dict[str, object]:
|
39
|
+
"""Load settings from *config_path*, returning an empty dict on failure."""
|
40
|
+
|
41
|
+
try:
|
42
|
+
with config_path.open("r", encoding="utf-8") as handle:
|
43
|
+
data = json.load(handle)
|
44
|
+
except FileNotFoundError:
|
45
|
+
return {}
|
46
|
+
except (OSError, json.JSONDecodeError):
|
47
|
+
return {}
|
48
|
+
|
49
|
+
if isinstance(data, dict):
|
50
|
+
return data
|
51
|
+
return {}
|
52
|
+
|
53
|
+
|
54
|
+
class GUIPreferences:
|
55
|
+
"""In-memory representation of GUI preferences backed by JSON storage."""
|
56
|
+
|
57
|
+
def __init__(
|
58
|
+
self,
|
59
|
+
config_path: Path,
|
60
|
+
settings: Optional[MutableMapping[str, object]] = None,
|
61
|
+
) -> None:
|
62
|
+
self._config_path = config_path
|
63
|
+
if settings is None:
|
64
|
+
self._settings: MutableMapping[str, object] = load_settings(config_path)
|
65
|
+
else:
|
66
|
+
self._settings = settings
|
67
|
+
|
68
|
+
@property
|
69
|
+
def data(self) -> MutableMapping[str, object]:
|
70
|
+
"""Return the underlying mutable mapping of settings."""
|
71
|
+
|
72
|
+
return self._settings
|
73
|
+
|
74
|
+
def get(self, key: str, default: object) -> object:
|
75
|
+
"""Return the setting *key*, storing *default* when missing."""
|
76
|
+
|
77
|
+
value = self._settings.get(key, default)
|
78
|
+
if key not in self._settings:
|
79
|
+
self._settings[key] = value
|
80
|
+
return value
|
81
|
+
|
82
|
+
def get_float(self, key: str, default: float) -> float:
|
83
|
+
"""Return *key* as a float, normalising persisted string values."""
|
84
|
+
|
85
|
+
raw_value = self.get(key, default)
|
86
|
+
try:
|
87
|
+
number = float(raw_value)
|
88
|
+
except (TypeError, ValueError):
|
89
|
+
number = float(default)
|
90
|
+
|
91
|
+
if self._settings.get(key) != number:
|
92
|
+
self._settings[key] = number
|
93
|
+
self.save()
|
94
|
+
|
95
|
+
return number
|
96
|
+
|
97
|
+
def update(self, key: str, value: object) -> None:
|
98
|
+
"""Persist the provided *value* when it differs from the stored value."""
|
99
|
+
|
100
|
+
if self._settings.get(key) == value:
|
101
|
+
return
|
102
|
+
self._settings[key] = value
|
103
|
+
self.save()
|
104
|
+
|
105
|
+
def save(self) -> None:
|
106
|
+
"""Write the current settings to disk, creating parent directories."""
|
107
|
+
|
108
|
+
try:
|
109
|
+
self._config_path.parent.mkdir(parents=True, exist_ok=True)
|
110
|
+
with self._config_path.open("w", encoding="utf-8") as handle:
|
111
|
+
json.dump(self._settings, handle, indent=2, sort_keys=True)
|
112
|
+
except OSError:
|
113
|
+
pass
|
@@ -0,0 +1,356 @@
|
|
1
|
+
"""Utilities for interacting with Talks Reducer remote servers."""
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import importlib
|
6
|
+
import os
|
7
|
+
import time
|
8
|
+
import urllib.error
|
9
|
+
import urllib.parse
|
10
|
+
import urllib.request
|
11
|
+
from pathlib import Path
|
12
|
+
from typing import TYPE_CHECKING, Callable, Dict, List, Optional
|
13
|
+
|
14
|
+
from ..pipeline import ProcessingAborted
|
15
|
+
|
16
|
+
if TYPE_CHECKING: # pragma: no cover - imported for type checking only
|
17
|
+
from . import TalksReducerGUI
|
18
|
+
|
19
|
+
|
20
|
+
def normalize_server_url(server_url: str) -> str:
|
21
|
+
"""Return *server_url* with a scheme and default path when missing."""
|
22
|
+
|
23
|
+
parsed = urllib.parse.urlsplit(server_url)
|
24
|
+
if not parsed.scheme:
|
25
|
+
parsed = urllib.parse.urlsplit(f"http://{server_url}")
|
26
|
+
|
27
|
+
netloc = parsed.netloc or parsed.path
|
28
|
+
if not netloc:
|
29
|
+
return server_url
|
30
|
+
|
31
|
+
path = parsed.path if parsed.netloc else ""
|
32
|
+
normalized_path = path or "/"
|
33
|
+
return urllib.parse.urlunsplit((parsed.scheme, netloc, normalized_path, "", ""))
|
34
|
+
|
35
|
+
|
36
|
+
def format_server_host(server_url: str) -> str:
|
37
|
+
"""Return the host label for *server_url* suitable for log messages."""
|
38
|
+
|
39
|
+
parsed = urllib.parse.urlsplit(server_url)
|
40
|
+
if not parsed.scheme:
|
41
|
+
parsed = urllib.parse.urlsplit(f"http://{server_url}")
|
42
|
+
|
43
|
+
host = parsed.netloc or parsed.path or server_url
|
44
|
+
if parsed.netloc and parsed.path and parsed.path not in {"", "/"}:
|
45
|
+
host = f"{parsed.netloc}{parsed.path}"
|
46
|
+
|
47
|
+
host = host.rstrip("/").split(":")[0]
|
48
|
+
return host or server_url
|
49
|
+
|
50
|
+
|
51
|
+
def ping_server(server_url: str, *, timeout: float = 5.0) -> bool:
|
52
|
+
"""Return ``True`` if *server_url* responds with an HTTP status."""
|
53
|
+
|
54
|
+
normalized = normalize_server_url(server_url)
|
55
|
+
request = urllib.request.Request(
|
56
|
+
normalized,
|
57
|
+
headers={"User-Agent": "talks-reducer-gui"},
|
58
|
+
method="GET",
|
59
|
+
)
|
60
|
+
|
61
|
+
try:
|
62
|
+
with urllib.request.urlopen(request, timeout=timeout) as response: # type: ignore[arg-type]
|
63
|
+
status = getattr(response, "status", None)
|
64
|
+
if status is None:
|
65
|
+
status = response.getcode()
|
66
|
+
if status is None:
|
67
|
+
return False
|
68
|
+
return 200 <= int(status) < 500
|
69
|
+
except (urllib.error.URLError, ValueError):
|
70
|
+
return False
|
71
|
+
|
72
|
+
|
73
|
+
def check_remote_server(
|
74
|
+
server_url: str,
|
75
|
+
*,
|
76
|
+
success_status: str,
|
77
|
+
waiting_status: str,
|
78
|
+
failure_status: str,
|
79
|
+
on_log: Callable[[str], None],
|
80
|
+
on_status: Callable[[str, str], None],
|
81
|
+
success_message: Optional[str] = None,
|
82
|
+
waiting_message_template: str = "Waiting server {host} (attempt {attempt}/{max_attempts})",
|
83
|
+
failure_message: Optional[str] = None,
|
84
|
+
stop_check: Optional[Callable[[], bool]] = None,
|
85
|
+
on_stop: Optional[Callable[[], None]] = None,
|
86
|
+
switch_to_local_on_failure: bool = False,
|
87
|
+
alert_on_failure: bool = False,
|
88
|
+
warning_title: str = "Server unavailable",
|
89
|
+
warning_message: Optional[str] = None,
|
90
|
+
max_attempts: int = 5,
|
91
|
+
delay: float = 1.0,
|
92
|
+
on_switch_to_local: Optional[Callable[[], None]] = None,
|
93
|
+
on_alert: Optional[Callable[[str, str], None]] = None,
|
94
|
+
ping: Callable[[str], bool] = ping_server,
|
95
|
+
sleep: Callable[[float], None] = time.sleep,
|
96
|
+
) -> bool:
|
97
|
+
"""Ping *server_url* until it responds or attempts are exhausted."""
|
98
|
+
|
99
|
+
host_label = format_server_host(server_url)
|
100
|
+
format_kwargs = {"host": host_label, "max_attempts": max_attempts}
|
101
|
+
|
102
|
+
success_text = (
|
103
|
+
success_message.format(**format_kwargs)
|
104
|
+
if success_message
|
105
|
+
else f"Server {host_label} is ready"
|
106
|
+
)
|
107
|
+
failure_text = (
|
108
|
+
failure_message.format(**format_kwargs)
|
109
|
+
if failure_message
|
110
|
+
else f"Server {host_label} is unreachable"
|
111
|
+
)
|
112
|
+
|
113
|
+
for attempt in range(1, max_attempts + 1):
|
114
|
+
if stop_check and stop_check():
|
115
|
+
if on_stop:
|
116
|
+
on_stop()
|
117
|
+
return False
|
118
|
+
|
119
|
+
if ping(server_url):
|
120
|
+
on_log(success_text)
|
121
|
+
on_status(success_status, success_text)
|
122
|
+
return True
|
123
|
+
|
124
|
+
if attempt < max_attempts:
|
125
|
+
wait_text = waiting_message_template.format(
|
126
|
+
attempt=attempt, max_attempts=max_attempts, host=host_label
|
127
|
+
)
|
128
|
+
on_log(wait_text)
|
129
|
+
on_status(waiting_status, wait_text)
|
130
|
+
if stop_check and stop_check():
|
131
|
+
if on_stop:
|
132
|
+
on_stop()
|
133
|
+
return False
|
134
|
+
if delay:
|
135
|
+
sleep(delay)
|
136
|
+
|
137
|
+
on_log(failure_text)
|
138
|
+
on_status(failure_status, failure_text)
|
139
|
+
|
140
|
+
if switch_to_local_on_failure and on_switch_to_local:
|
141
|
+
on_switch_to_local()
|
142
|
+
|
143
|
+
if alert_on_failure and on_alert:
|
144
|
+
message = (
|
145
|
+
warning_message.format(**format_kwargs) if warning_message else failure_text
|
146
|
+
)
|
147
|
+
on_alert(warning_title, message)
|
148
|
+
|
149
|
+
return False
|
150
|
+
|
151
|
+
|
152
|
+
def check_remote_server_for_gui(
|
153
|
+
gui: "TalksReducerGUI",
|
154
|
+
server_url: str,
|
155
|
+
*,
|
156
|
+
success_status: str,
|
157
|
+
waiting_status: str,
|
158
|
+
failure_status: str,
|
159
|
+
success_message: Optional[str] = None,
|
160
|
+
waiting_message_template: str = "Waiting server {host} (attempt {attempt}/{max_attempts})",
|
161
|
+
failure_message: Optional[str] = None,
|
162
|
+
stop_check: Optional[Callable[[], bool]] = None,
|
163
|
+
on_stop: Optional[Callable[[], None]] = None,
|
164
|
+
switch_to_local_on_failure: bool = False,
|
165
|
+
alert_on_failure: bool = False,
|
166
|
+
warning_title: str = "Server unavailable",
|
167
|
+
warning_message: Optional[str] = None,
|
168
|
+
max_attempts: int = 5,
|
169
|
+
delay: float = 1.0,
|
170
|
+
) -> bool:
|
171
|
+
"""GUI-aware wrapper around :func:`check_remote_server`."""
|
172
|
+
|
173
|
+
def log_callback(message: str) -> None:
|
174
|
+
gui._schedule_on_ui_thread(lambda msg=message: gui._append_log(msg))
|
175
|
+
|
176
|
+
def status_callback(status: str, message: str) -> None:
|
177
|
+
gui._schedule_on_ui_thread(lambda s=status, m=message: gui._set_status(s, m))
|
178
|
+
|
179
|
+
if switch_to_local_on_failure:
|
180
|
+
|
181
|
+
def switch_callback() -> None:
|
182
|
+
gui._schedule_on_ui_thread(lambda: gui.processing_mode_var.set("local"))
|
183
|
+
|
184
|
+
else:
|
185
|
+
switch_callback = None
|
186
|
+
|
187
|
+
if alert_on_failure:
|
188
|
+
|
189
|
+
def alert_callback(title: str, message: str) -> None:
|
190
|
+
gui._schedule_on_ui_thread(
|
191
|
+
lambda t=title, m=message: gui.messagebox.showwarning(t, m)
|
192
|
+
)
|
193
|
+
|
194
|
+
else:
|
195
|
+
alert_callback = None
|
196
|
+
|
197
|
+
return check_remote_server(
|
198
|
+
server_url,
|
199
|
+
success_status=success_status,
|
200
|
+
waiting_status=waiting_status,
|
201
|
+
failure_status=failure_status,
|
202
|
+
success_message=success_message,
|
203
|
+
waiting_message_template=waiting_message_template,
|
204
|
+
failure_message=failure_message,
|
205
|
+
stop_check=stop_check,
|
206
|
+
on_stop=on_stop,
|
207
|
+
switch_to_local_on_failure=switch_to_local_on_failure,
|
208
|
+
alert_on_failure=alert_on_failure,
|
209
|
+
warning_title=warning_title,
|
210
|
+
warning_message=warning_message,
|
211
|
+
max_attempts=max_attempts,
|
212
|
+
delay=delay,
|
213
|
+
on_log=log_callback,
|
214
|
+
on_status=status_callback,
|
215
|
+
on_switch_to_local=switch_callback,
|
216
|
+
on_alert=alert_callback,
|
217
|
+
ping=lambda url: gui._ping_server(url),
|
218
|
+
sleep=time.sleep,
|
219
|
+
)
|
220
|
+
|
221
|
+
|
222
|
+
def process_files_via_server(
|
223
|
+
gui: "TalksReducerGUI",
|
224
|
+
files: List[str],
|
225
|
+
args: Dict[str, object],
|
226
|
+
server_url: str,
|
227
|
+
*,
|
228
|
+
open_after_convert: bool,
|
229
|
+
default_remote_destination: Callable[[Path, bool], Path],
|
230
|
+
parse_summary: Callable[[str], tuple[Optional[float], Optional[float]]],
|
231
|
+
) -> bool:
|
232
|
+
"""Send *files* to the configured server for processing."""
|
233
|
+
|
234
|
+
def _ensure_not_stopped() -> None:
|
235
|
+
if gui._stop_requested:
|
236
|
+
raise ProcessingAborted("Remote processing cancelled by user.")
|
237
|
+
|
238
|
+
try:
|
239
|
+
service_module = importlib.import_module("talks_reducer.service_client")
|
240
|
+
except ModuleNotFoundError as exc:
|
241
|
+
gui._append_log(f"Server client unavailable: {exc}")
|
242
|
+
gui._schedule_on_ui_thread(
|
243
|
+
lambda: gui.messagebox.showerror(
|
244
|
+
"Server unavailable",
|
245
|
+
"Remote processing requires the gradio_client package.",
|
246
|
+
)
|
247
|
+
)
|
248
|
+
gui._schedule_on_ui_thread(lambda: gui._set_status("Error"))
|
249
|
+
return False
|
250
|
+
|
251
|
+
host_label = format_server_host(server_url)
|
252
|
+
gui._schedule_on_ui_thread(
|
253
|
+
lambda: gui._set_status("waiting", f"Waiting server {host_label}...")
|
254
|
+
)
|
255
|
+
|
256
|
+
available = check_remote_server_for_gui(
|
257
|
+
gui,
|
258
|
+
server_url,
|
259
|
+
success_status="waiting",
|
260
|
+
waiting_status="Error",
|
261
|
+
failure_status="Error",
|
262
|
+
failure_message=(
|
263
|
+
"Server {host} is unreachable after {max_attempts} attempts. Switching to local mode."
|
264
|
+
),
|
265
|
+
stop_check=lambda: gui._stop_requested,
|
266
|
+
on_stop=_ensure_not_stopped,
|
267
|
+
switch_to_local_on_failure=True,
|
268
|
+
alert_on_failure=True,
|
269
|
+
warning_message=(
|
270
|
+
"Server {host} is not reachable. Switching to local processing mode."
|
271
|
+
),
|
272
|
+
)
|
273
|
+
|
274
|
+
_ensure_not_stopped()
|
275
|
+
|
276
|
+
if not available:
|
277
|
+
return False
|
278
|
+
|
279
|
+
output_override = args.get("output_file") if len(files) == 1 else None
|
280
|
+
allowed_remote_keys = {
|
281
|
+
"output_file",
|
282
|
+
"small",
|
283
|
+
"silent_threshold",
|
284
|
+
"sounded_speed",
|
285
|
+
"silent_speed",
|
286
|
+
}
|
287
|
+
ignored = [key for key in args if key not in allowed_remote_keys]
|
288
|
+
if ignored:
|
289
|
+
ignored_options = ", ".join(sorted(ignored))
|
290
|
+
gui._append_log(f"Server mode ignores the following options: {ignored_options}")
|
291
|
+
|
292
|
+
small_mode = bool(args.get("small", False))
|
293
|
+
|
294
|
+
for index, file in enumerate(files, start=1):
|
295
|
+
_ensure_not_stopped()
|
296
|
+
basename = os.path.basename(file)
|
297
|
+
gui._append_log(f"Uploading {index}/{len(files)}: {basename} to {server_url}")
|
298
|
+
input_path = Path(file)
|
299
|
+
|
300
|
+
if output_override is not None:
|
301
|
+
output_path = Path(output_override)
|
302
|
+
if output_path.is_dir():
|
303
|
+
output_path = (
|
304
|
+
output_path
|
305
|
+
/ default_remote_destination(input_path, small=small_mode).name
|
306
|
+
)
|
307
|
+
else:
|
308
|
+
output_path = default_remote_destination(input_path, small=small_mode)
|
309
|
+
|
310
|
+
try:
|
311
|
+
destination, summary, log_text = service_module.send_video(
|
312
|
+
input_path=input_path,
|
313
|
+
output_path=output_path,
|
314
|
+
server_url=server_url,
|
315
|
+
small=small_mode,
|
316
|
+
silent_threshold=args.get("silent_threshold"),
|
317
|
+
sounded_speed=args.get("sounded_speed"),
|
318
|
+
silent_speed=args.get("silent_speed"),
|
319
|
+
stream_updates=True,
|
320
|
+
log_callback=gui._append_log,
|
321
|
+
should_cancel=lambda: gui._stop_requested,
|
322
|
+
)
|
323
|
+
_ensure_not_stopped()
|
324
|
+
except ProcessingAborted:
|
325
|
+
raise
|
326
|
+
except Exception as exc: # pragma: no cover - network safeguard
|
327
|
+
error_detail = f"{exc.__class__.__name__}: {exc}"
|
328
|
+
error_msg = f"Processing failed: {error_detail}"
|
329
|
+
gui._append_log(error_msg)
|
330
|
+
gui._schedule_on_ui_thread(lambda: gui._set_status("Error"))
|
331
|
+
gui._schedule_on_ui_thread(
|
332
|
+
lambda: gui.messagebox.showerror(
|
333
|
+
"Server error", f"Failed to process {basename}: {error_detail}"
|
334
|
+
)
|
335
|
+
)
|
336
|
+
return False
|
337
|
+
|
338
|
+
gui._last_output = Path(destination)
|
339
|
+
time_ratio, size_ratio = parse_summary(summary)
|
340
|
+
gui._last_time_ratio = time_ratio
|
341
|
+
gui._last_size_ratio = size_ratio
|
342
|
+
for line in summary.splitlines():
|
343
|
+
gui._append_log(line)
|
344
|
+
if log_text.strip():
|
345
|
+
gui._append_log("Server log:")
|
346
|
+
for line in log_text.splitlines():
|
347
|
+
gui._append_log(line)
|
348
|
+
if open_after_convert:
|
349
|
+
gui._schedule_on_ui_thread(
|
350
|
+
lambda path=gui._last_output: gui._open_in_file_manager(path)
|
351
|
+
)
|
352
|
+
|
353
|
+
gui._append_log("All jobs finished successfully.")
|
354
|
+
gui._schedule_on_ui_thread(lambda: gui.open_button.configure(state=gui.tk.NORMAL))
|
355
|
+
gui._schedule_on_ui_thread(gui._clear_input_files)
|
356
|
+
return True
|