talks-reducer 0.5.5__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- talks_reducer/__about__.py +1 -1
- talks_reducer/cli.py +194 -33
- talks_reducer/discovery.py +42 -12
- talks_reducer/gui.py +694 -168
- talks_reducer/pipeline.py +1 -1
- talks_reducer/server.py +110 -23
- talks_reducer/server_tray.py +230 -18
- talks_reducer/service_client.py +258 -4
- talks_reducer/version_utils.py +22 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/METADATA +18 -7
- talks_reducer-0.6.1.dist-info/RECORD +22 -0
- talks_reducer-0.5.5.dist-info/RECORD +0 -21
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/WHEEL +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/entry_points.txt +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/licenses/LICENSE +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/top_level.txt +0 -0
talks_reducer/pipeline.py
CHANGED
@@ -211,7 +211,7 @@ def speed_up_video(
|
|
211
211
|
audio_sample_count = audio_data.shape[0]
|
212
212
|
max_audio_volume = audio_utils.get_max_volume(audio_data)
|
213
213
|
|
214
|
-
reporter.log("\
|
214
|
+
reporter.log("\nInformation:")
|
215
215
|
reporter.log(f"- Max Audio Volume: {max_audio_volume}")
|
216
216
|
|
217
217
|
samples_per_frame = wav_sample_rate / frame_rate
|
talks_reducer/server.py
CHANGED
@@ -5,10 +5,13 @@ from __future__ import annotations
|
|
5
5
|
import argparse
|
6
6
|
import atexit
|
7
7
|
import shutil
|
8
|
+
import socket
|
8
9
|
import tempfile
|
9
10
|
from contextlib import AbstractContextManager, suppress
|
10
11
|
from pathlib import Path
|
11
|
-
from
|
12
|
+
from queue import SimpleQueue
|
13
|
+
from threading import Thread
|
14
|
+
from typing import Callable, Iterator, Optional, Sequence
|
12
15
|
|
13
16
|
import gradio as gr
|
14
17
|
|
@@ -16,6 +19,7 @@ from talks_reducer.ffmpeg import FFmpegNotFoundError
|
|
16
19
|
from talks_reducer.models import ProcessingOptions, ProcessingResult
|
17
20
|
from talks_reducer.pipeline import speed_up_video
|
18
21
|
from talks_reducer.progress import ProgressHandle, SignalProgressReporter
|
22
|
+
from talks_reducer.version_utils import resolve_version
|
19
23
|
|
20
24
|
|
21
25
|
class _GradioProgressHandle(AbstractContextManager[ProgressHandle]):
|
@@ -86,10 +90,12 @@ class GradioProgressReporter(SignalProgressReporter):
|
|
86
90
|
self,
|
87
91
|
progress_callback: Optional[Callable[[int, int, str], None]] = None,
|
88
92
|
*,
|
93
|
+
log_callback: Optional[Callable[[str], None]] = None,
|
89
94
|
max_log_lines: int = 500,
|
90
95
|
) -> None:
|
91
96
|
super().__init__()
|
92
97
|
self._progress_callback = progress_callback
|
98
|
+
self._log_callback = log_callback
|
93
99
|
self._max_log_lines = max_log_lines
|
94
100
|
self._active_desc = "Processing"
|
95
101
|
self.logs: list[str] = []
|
@@ -103,6 +109,8 @@ class GradioProgressReporter(SignalProgressReporter):
|
|
103
109
|
self.logs.append(text)
|
104
110
|
if len(self.logs) > self._max_log_lines:
|
105
111
|
self.logs = self.logs[-self._max_log_lines :]
|
112
|
+
if self._log_callback is not None:
|
113
|
+
self._log_callback(text)
|
106
114
|
|
107
115
|
def task(
|
108
116
|
self,
|
@@ -136,6 +144,8 @@ class GradioProgressReporter(SignalProgressReporter):
|
|
136
144
|
self._progress_callback(bounded_current, total_value, display_desc)
|
137
145
|
|
138
146
|
|
147
|
+
_FAVICON_PATH = Path(__file__).resolve().parent.parent / "docs" / "assets" / "icon.ico"
|
148
|
+
_FAVICON_PATH_STR = str(_FAVICON_PATH) if _FAVICON_PATH.exists() else None
|
139
149
|
_WORKSPACES: list[Path] = []
|
140
150
|
|
141
151
|
|
@@ -157,6 +167,26 @@ def _cleanup_workspaces() -> None:
|
|
157
167
|
_WORKSPACES.clear()
|
158
168
|
|
159
169
|
|
170
|
+
def _describe_server_host() -> str:
|
171
|
+
"""Return a human-readable description of the server hostname and IP."""
|
172
|
+
|
173
|
+
hostname = socket.gethostname().strip()
|
174
|
+
ip_address = ""
|
175
|
+
|
176
|
+
with suppress(OSError):
|
177
|
+
resolved_ip = socket.gethostbyname(hostname or "localhost")
|
178
|
+
if resolved_ip:
|
179
|
+
ip_address = resolved_ip
|
180
|
+
|
181
|
+
if hostname and ip_address and hostname != ip_address:
|
182
|
+
return f"{hostname} ({ip_address})"
|
183
|
+
if ip_address:
|
184
|
+
return ip_address
|
185
|
+
if hostname:
|
186
|
+
return hostname
|
187
|
+
return "unknown"
|
188
|
+
|
189
|
+
|
160
190
|
def _build_output_path(input_path: Path, workspace: Path, small: bool) -> Path:
|
161
191
|
"""Mirror the CLI output naming scheme inside the workspace directory."""
|
162
192
|
|
@@ -213,7 +243,7 @@ def process_video(
|
|
213
243
|
file_path: Optional[str],
|
214
244
|
small_video: bool,
|
215
245
|
progress: Optional[gr.Progress] = gr.Progress(track_tqdm=False),
|
216
|
-
) -> tuple[Optional[str], str, str, Optional[str]]:
|
246
|
+
) -> Iterator[tuple[Optional[str], str, str, Optional[str]]]:
|
217
247
|
"""Run the Talks Reducer pipeline for a single uploaded file."""
|
218
248
|
|
219
249
|
if not file_path:
|
@@ -235,7 +265,15 @@ def process_video(
|
|
235
265
|
|
236
266
|
progress_callback = _callback
|
237
267
|
|
238
|
-
|
268
|
+
events: "SimpleQueue[tuple[str, object]]" = SimpleQueue()
|
269
|
+
|
270
|
+
def _log_callback(message: str) -> None:
|
271
|
+
events.put(("log", message))
|
272
|
+
|
273
|
+
reporter = GradioProgressReporter(
|
274
|
+
progress_callback=progress_callback,
|
275
|
+
log_callback=_log_callback,
|
276
|
+
)
|
239
277
|
|
240
278
|
options = ProcessingOptions(
|
241
279
|
input_file=input_path,
|
@@ -244,38 +282,86 @@ def process_video(
|
|
244
282
|
small=small_video,
|
245
283
|
)
|
246
284
|
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
285
|
+
def _worker() -> None:
|
286
|
+
try:
|
287
|
+
result = speed_up_video(options, reporter=reporter)
|
288
|
+
except FFmpegNotFoundError as exc: # pragma: no cover - depends on runtime env
|
289
|
+
events.put(("error", gr.Error(str(exc))))
|
290
|
+
except FileNotFoundError as exc:
|
291
|
+
events.put(("error", gr.Error(str(exc))))
|
292
|
+
except Exception as exc: # pragma: no cover - defensive fallback
|
293
|
+
reporter.log(f"Error: {exc}")
|
294
|
+
events.put(("error", gr.Error(f"Failed to process the video: {exc}")))
|
295
|
+
else:
|
296
|
+
reporter.log("Processing complete.")
|
297
|
+
events.put(("result", result))
|
298
|
+
finally:
|
299
|
+
events.put(("done", None))
|
300
|
+
|
301
|
+
worker = Thread(target=_worker, daemon=True)
|
302
|
+
worker.start()
|
303
|
+
|
304
|
+
collected_logs: list[str] = []
|
305
|
+
final_result: Optional[ProcessingResult] = None
|
306
|
+
error: Optional[gr.Error] = None
|
307
|
+
|
308
|
+
while True:
|
309
|
+
kind, payload = events.get()
|
310
|
+
if kind == "log":
|
311
|
+
text = str(payload).strip()
|
312
|
+
if text:
|
313
|
+
collected_logs.append(text)
|
314
|
+
yield (
|
315
|
+
gr.update(),
|
316
|
+
"\n".join(collected_logs),
|
317
|
+
gr.update(),
|
318
|
+
gr.update(),
|
319
|
+
)
|
320
|
+
elif kind == "result":
|
321
|
+
final_result = payload # type: ignore[assignment]
|
322
|
+
elif kind == "error":
|
323
|
+
error = payload # type: ignore[assignment]
|
324
|
+
elif kind == "done":
|
325
|
+
break
|
326
|
+
|
327
|
+
worker.join()
|
328
|
+
|
329
|
+
if error is not None:
|
330
|
+
raise error
|
331
|
+
|
332
|
+
if final_result is None:
|
333
|
+
raise gr.Error("Failed to process the video.")
|
334
|
+
|
335
|
+
log_text = "\n".join(collected_logs)
|
336
|
+
summary = _format_summary(final_result)
|
337
|
+
|
338
|
+
yield (
|
339
|
+
str(final_result.output_file),
|
263
340
|
log_text,
|
264
341
|
summary,
|
265
|
-
str(
|
342
|
+
str(final_result.output_file),
|
266
343
|
)
|
267
344
|
|
268
345
|
|
269
346
|
def build_interface() -> gr.Blocks:
|
270
347
|
"""Construct the Gradio Blocks application for the simple web UI."""
|
271
348
|
|
272
|
-
|
349
|
+
server_identity = _describe_server_host()
|
350
|
+
|
351
|
+
app_version = resolve_version()
|
352
|
+
version_suffix = (
|
353
|
+
f" v{app_version}" if app_version and app_version != "unknown" else ""
|
354
|
+
)
|
355
|
+
|
356
|
+
with gr.Blocks(title=f"Talks Reducer Web UI{version_suffix}") as demo:
|
273
357
|
gr.Markdown(
|
274
|
-
"""
|
275
|
-
## Talks Reducer
|
358
|
+
f"""
|
359
|
+
## Talks Reducer Web UI{version_suffix}
|
276
360
|
Drop a video into the zone below or click to browse. **Small video** is enabled
|
277
361
|
by default to apply the 720p/128k preset before processing starts—clear it to
|
278
362
|
keep the original resolution.
|
363
|
+
|
364
|
+
Video will be rendered on server **{server_identity}**.
|
279
365
|
""".strip()
|
280
366
|
)
|
281
367
|
|
@@ -337,6 +423,7 @@ def main(argv: Optional[Sequence[str]] = None) -> None:
|
|
337
423
|
server_port=args.port,
|
338
424
|
share=args.share,
|
339
425
|
inbrowser=not args.no_browser,
|
426
|
+
favicon_path=_FAVICON_PATH_STR,
|
340
427
|
)
|
341
428
|
|
342
429
|
|
talks_reducer/server_tray.py
CHANGED
@@ -4,6 +4,7 @@ from __future__ import annotations
|
|
4
4
|
|
5
5
|
import argparse
|
6
6
|
import atexit
|
7
|
+
import base64
|
7
8
|
import logging
|
8
9
|
import subprocess
|
9
10
|
import sys
|
@@ -12,55 +13,256 @@ import time
|
|
12
13
|
import webbrowser
|
13
14
|
from contextlib import suppress
|
14
15
|
from importlib import resources
|
16
|
+
from io import BytesIO
|
15
17
|
from pathlib import Path
|
16
|
-
from typing import Any, Optional, Sequence
|
18
|
+
from typing import Any, Iterator, Optional, Sequence
|
19
|
+
from urllib.parse import urlsplit, urlunsplit
|
17
20
|
|
18
21
|
from PIL import Image
|
19
22
|
|
20
23
|
from .server import build_interface
|
24
|
+
from .version_utils import resolve_version
|
21
25
|
|
22
26
|
try: # pragma: no cover - import guarded for clearer error message at runtime
|
23
27
|
import pystray
|
24
28
|
except ModuleNotFoundError as exc: # pragma: no cover - handled in ``main``
|
25
29
|
PYSTRAY_IMPORT_ERROR = exc
|
26
30
|
pystray = None # type: ignore[assignment]
|
31
|
+
except Exception as exc: # pragma: no cover - handled in ``main``
|
32
|
+
PYSTRAY_IMPORT_ERROR = exc
|
33
|
+
pystray = None # type: ignore[assignment]
|
27
34
|
else:
|
28
35
|
PYSTRAY_IMPORT_ERROR = None
|
29
36
|
|
30
37
|
|
31
38
|
LOGGER = logging.getLogger(__name__)
|
39
|
+
APP_VERSION = resolve_version()
|
32
40
|
|
33
41
|
|
34
42
|
def _guess_local_url(host: Optional[str], port: int) -> str:
|
35
43
|
"""Return the URL the server is most likely reachable at locally."""
|
36
44
|
|
37
|
-
if host in (None, "", "0.0.0.0"
|
38
|
-
hostname = "
|
45
|
+
if host in (None, "", "0.0.0.0"):
|
46
|
+
hostname = "127.0.0.1"
|
47
|
+
elif host == "::":
|
48
|
+
hostname = "::1"
|
39
49
|
else:
|
40
50
|
hostname = host
|
41
51
|
return f"http://{hostname}:{port}/"
|
42
52
|
|
43
53
|
|
54
|
+
def _normalize_local_url(url: str, host: Optional[str], port: int) -> str:
|
55
|
+
"""Rewrite *url* when a wildcard host should map to the loopback address."""
|
56
|
+
|
57
|
+
if host not in (None, "", "0.0.0.0"):
|
58
|
+
return url
|
59
|
+
|
60
|
+
try:
|
61
|
+
parsed = urlsplit(url)
|
62
|
+
except ValueError:
|
63
|
+
return _guess_local_url(host, port)
|
64
|
+
|
65
|
+
hostname = parsed.hostname or ""
|
66
|
+
if hostname in ("", "0.0.0.0"):
|
67
|
+
netloc = f"127.0.0.1:{parsed.port or port}"
|
68
|
+
return urlunsplit(
|
69
|
+
(
|
70
|
+
parsed.scheme or "http",
|
71
|
+
netloc,
|
72
|
+
parsed.path or "/",
|
73
|
+
parsed.query,
|
74
|
+
parsed.fragment,
|
75
|
+
)
|
76
|
+
)
|
77
|
+
|
78
|
+
return url
|
79
|
+
|
80
|
+
|
81
|
+
def _iter_icon_candidates() -> Iterator[Path]:
|
82
|
+
"""Yield possible tray icon paths ordered from most to least specific."""
|
83
|
+
|
84
|
+
module_path = Path(__file__).resolve()
|
85
|
+
package_root = module_path.parent
|
86
|
+
project_root = package_root.parent
|
87
|
+
|
88
|
+
frozen_root: Optional[Path] = None
|
89
|
+
frozen_value = getattr(sys, "_MEIPASS", None)
|
90
|
+
if frozen_value:
|
91
|
+
with suppress(Exception):
|
92
|
+
frozen_root = Path(str(frozen_value)).resolve()
|
93
|
+
|
94
|
+
executable_root: Optional[Path] = None
|
95
|
+
with suppress(Exception):
|
96
|
+
executable_root = Path(sys.executable).resolve().parent
|
97
|
+
|
98
|
+
launcher_root: Optional[Path] = None
|
99
|
+
with suppress(Exception):
|
100
|
+
launcher_root = Path(sys.argv[0]).resolve().parent
|
101
|
+
|
102
|
+
base_roots: list[Path] = []
|
103
|
+
for candidate in (
|
104
|
+
package_root,
|
105
|
+
project_root,
|
106
|
+
frozen_root,
|
107
|
+
executable_root,
|
108
|
+
launcher_root,
|
109
|
+
):
|
110
|
+
if candidate and candidate not in base_roots:
|
111
|
+
base_roots.append(candidate)
|
112
|
+
|
113
|
+
expanded_roots: list[Path] = []
|
114
|
+
suffixes = (
|
115
|
+
Path(""),
|
116
|
+
Path("_internal"),
|
117
|
+
Path("Contents") / "Resources",
|
118
|
+
Path("Resources"),
|
119
|
+
)
|
120
|
+
for root in base_roots:
|
121
|
+
for suffix in suffixes:
|
122
|
+
candidate_root = (root / suffix).resolve()
|
123
|
+
if candidate_root not in expanded_roots:
|
124
|
+
expanded_roots.append(candidate_root)
|
125
|
+
|
126
|
+
if sys.platform == "win32":
|
127
|
+
icon_names = ("icon.ico", "icon.png")
|
128
|
+
else:
|
129
|
+
icon_names = ("icon.png", "icon.ico")
|
130
|
+
relative_paths = (
|
131
|
+
Path("docs") / "assets",
|
132
|
+
Path("assets"),
|
133
|
+
Path("talks_reducer") / "assets",
|
134
|
+
Path(""),
|
135
|
+
)
|
136
|
+
|
137
|
+
seen: set[Path] = set()
|
138
|
+
for root in expanded_roots:
|
139
|
+
if not root.exists():
|
140
|
+
continue
|
141
|
+
for relative in relative_paths:
|
142
|
+
for icon_name in icon_names:
|
143
|
+
candidate = (root / relative / icon_name).resolve()
|
144
|
+
if candidate in seen:
|
145
|
+
continue
|
146
|
+
seen.add(candidate)
|
147
|
+
yield candidate
|
148
|
+
|
149
|
+
|
44
150
|
def _load_icon() -> Image.Image:
|
45
|
-
"""Load the tray icon image, falling back to
|
151
|
+
"""Load the tray icon image, falling back to the embedded pen artwork."""
|
46
152
|
|
47
153
|
LOGGER.debug("Attempting to load tray icon image.")
|
48
154
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
155
|
+
for candidate in _iter_icon_candidates():
|
156
|
+
LOGGER.debug("Checking icon candidate at %s", candidate)
|
157
|
+
if candidate.exists():
|
158
|
+
try:
|
159
|
+
with Image.open(candidate) as image:
|
160
|
+
loaded = image.copy()
|
161
|
+
except Exception as exc: # pragma: no cover - diagnostic log
|
162
|
+
LOGGER.warning("Failed to load tray icon from %s: %s", candidate, exc)
|
163
|
+
else:
|
164
|
+
LOGGER.debug("Loaded tray icon from %s", candidate)
|
165
|
+
return loaded
|
166
|
+
|
167
|
+
LOGGER.warning("Falling back to generated tray icon; packaged image not found")
|
168
|
+
image = Image.new("RGBA", (64, 64), color=(37, 99, 235, 255))
|
169
|
+
image.putpixel((0, 0), (255, 255, 255, 255))
|
170
|
+
image.putpixel((63, 63), (17, 24, 39, 255))
|
171
|
+
return image
|
172
|
+
|
53
173
|
|
54
|
-
|
174
|
+
_EMBEDDED_ICON_BASE64 = (
|
175
|
+
"iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAAARnQU1BAACx"
|
176
|
+
"jwv8YQUAAAAJcEhZcwAADsIAAA7CARUoSoAAAA3MSURBVHhe5Zt7cB31dcc/Z/deyVfPK11JDsLW"
|
177
|
+
"xAEmMJDGk+mEFGza8oiZ8l+GSWiwMTBJQzDpA2xT4kcKpsWxoSl0SiCTEAPFDYWkEJtCeYSnG0oh"
|
178
|
+
"sQHbacCWbcmyZOlKV5L1vLt7+sdvf3d3rx6hnfwT6Tuzurvnd36Pc37nd37nd3YlzID65o+1OOn0"
|
179
|
+
"FSCXIXKeQJuq1pTz/RYggJYTZ8Fs/AIgIqOgHaq8B/p8UCzuHuzt7ilnxlaIo64+W+VU1a4Tx7lR"
|
180
|
+
"RFoiFgUx9xL2r/Hq4ZAibsMfqw2qhiMhgiJhu4qG3YQ9qGGSkF9j/YdFEcIyiOrFnntVg+/6Y6e2"
|
181
|
+
"DQ0MjMSLEgrILmz9pKRSO0Wcz5jCSNQ5AQ32Br6/stB9fL8llRRQv7D1HCeVfl7EWWQFNgqYI8Jb"
|
182
|
+
"qPYGXnFFoafrl1gF1DY21aUz1W8g8qnZFticgeoH/uT4Hwz29uQdgNSCzCZEPlXON2chcpaTrrwD"
|
183
|
+
"QLItpy1x0hX7xHFqAbTcgcwxxJz0WFAsLnXETX1BRGoteb5ARDLipq5yELkkuVfNbUQiCiJymSMi"
|
184
|
+
"54KAzjl//1Hwe46ii6AskJgnUKhzUJzygrkPG1GCMz9WfjlK8a3Ow9lPIlLAPLQDQCIFzD8fCAkL"
|
185
|
+
"mFcozXbSByjQ32mugYm5vDOW1rs4Vh0aKANdynVXCbetcThrkZLvUIK57RtUGlrbAnFd6e8IeOD2"
|
186
|
+
"FKsvqqTSFY4MeDz0UpE77/PBgVyrSY4oIPrRfEagSuH40RglRdPixQT/hwOXAPlhYLC85P+BZshV"
|
187
|
+
"hjKIEAQB0tDaFkyqKyNVAe3fyfDxOgd8wAVP4M3DRbb9uMiu3YrbDNlKCFSSy2MahfiBMth1lNu3"
|
188
|
+
"bOFz55/PyMgIj//rEzz+LzvJLV7ykU6drkBvN3zpIuGK3xcy6WBKtisOEUEVRMKUmU3JCRRGhEde"
|
189
|
+
"DXj9V9DUCEqkAB3zXSqaA97fmmFxjYv6CmHqjjQUxpWn3y5y7Q+K8CFkW8FxZjeB/s4j7Hj4YVat"
|
190
|
+
"WoUTaqswOMg99/w9d2654zcqwRHoy8NNK4QtqzNks6lo6QrJvJ/YSSgbU2liDEPHiUn+8sFxfrIP"
|
191
|
+
"cjVGAaETVPzA+AHbiW1TJyGbElYvq+TDuzN88xsOhS6l/4SajKFdFzEURsc545zzuHzFipLwANn6"
|
192
|
+
"ev761vVs3LSZfEd7oqwcgQKjyp/+cZpsQxr1FQ3HGN2Hl29+sWW+hrTk8+LWCq6/LAUDkf4cAClN"
|
193
|
+
"t4FVcImuQBHOyDr8zZUZXv1BJVd8Xsh3wtCETlF8UCzS1JiloqIiWQBUV1dz6/p13HzLWvo62n+D"
|
194
|
+
"JQmZSjuAj4AEW7KO7cW0FxFKZwHDbqi2qk1Tq4b3gZD2hIvOTPPoX2XYcVcKLwv5TjMjFvW11fzX"
|
195
|
+
"njc4fLg9IsZQU1PDxg3fZNU1q+k7Nr0SLOX1d4toMUBcQVwQR2KX2cfshVtGiz2TgsnxgBf3Gqdu"
|
196
|
+
"23cztfXf8lREquCmS9PUVzpGAyLGNsRah0SqCiDjCkuXpLh6mUtVLuDFnyljo1BVY+qMDRUYHhll"
|
197
|
+
"+bJlVFdXlwSzyGQynH/+Z/ng0GH2vvUm1dmGKQ6ushaeelNpqfapSgUMDHnkC2XXgE9+0Cdf8MkP"
|
198
|
+
"euQLPv3hc1/BPnt0nvR45MVJbn8kINdq5FNVJNvaFoz7jrg55cDWDG21rvEFYpIk0RqIz1LMXlJQ"
|
199
|
+
"BPZ8WGT7k0X+/Vkl1QwNGYfeY+2svGY1d2/fxsKWllj9CMeOHeOGG9fw7DO7aVq8ZMoWGSgUjjPF"
|
200
|
+
"pGdFfNUkVpCQM9kPsNvgjAqYUttsKeYpPhhBRCEF/ePKLrtbHILcIod8ZzurVl/L9m3fnlEJ7e3t"
|
201
|
+
"rLr2eva89kpJCdavWH14Cv4MOnDEKCo+TWp28lI7TshnYeOAcAk44lYpa2JLoOQAw0pJR2nM3FyG"
|
202
|
+
"oj5UucLSj7usXJ6iuinghZd8qGvk3bdeoftkL8suvGDa5dDQ0MCyCy/gtT0/p/1/DlBd32DaDIUa"
|
203
|
+
"6ISJYaU4zLTX5DQ0S5+YEGrDlG8cJmZQlWzr4mDcdyXdpLx/l7EAmx+0b4aMjFbS6W5DZ2mfXSgK"
|
204
|
+
"7PnAY9OOSd54T2DQWMLd27fR0txsx5HAgYMH+dKXV/L+3n00LW7DV2WgD+5a6XDZZ1wq09YijCkI"
|
205
|
+
"gtqNSs07RvuLwNCY8MRrHv/wZEDTonBrDSEIgfo2EHJIN4cKqDEKMN1Ys7d34fYYM6U4bGAj9k+F"
|
206
|
+
"sK/TY+nN49TiMNzVzvVf+Qrbtm4ll8uVVwdg//79fOGqq2k/0U0xn+HbX3O55coMbrr84BqXJu6v"
|
207
|
+
"YnDg1LDPhodGuW+30hjTu10CpVYdjU+pifvjwmu4HRIzzSSmUphUzlnocs2FDsMnlKa2T/DQ97/P"
|
208
|
+
"bRs2UChMH9yfe+65/Nl1qynmewDl4k+7uGkHDZ2AerFAxwuDIC8wv6Wgx1wUlZoalys+68LENOOz"
|
209
|
+
"gZACKRVzyFE7k/bNsFFCuXKtyVv+uPO2dIAJT+kfVqgKNQdkFmSmthdDKp0q3Re9qOHEBhENLjnz"
|
210
|
+
"oQzm1tDHi2U8MTix5qcecEqXjQHKoDrz22PHnCOee89j93NKrkHo62jn5rVr2WLH7WTr68trAHC4"
|
211
|
+
"vZ0dj+6E+maoEJ76T49Tw54Jglwb3IQBjpsMhMSRMGAKeVzoOenx2M98yJb3ZCDZ0AfUNSm/uCvD"
|
212
|
+
"ongcENOb2iURyitmfcRKwnsxscGJ4YB/frXI+ns86hc6DHa1c8vadWzatJH6urqQP4kjR4/y9TU3"
|
213
|
+
"8dwzu2luM4elvm746ueFS5c6VKQil5wwM2KOJ7ReERiZhCf3+Dz139DUFDlBK1mgJg7QMd+hvkl5"
|
214
|
+
"J6GAqP34FphQQJyA8f5jvvLCAY9NjxZ59y2l4XSHgePt3LJuHZs3bqRuBuGPHj3GV792Ay/8x7M0"
|
215
|
+
"tS0hCEcrQL4AnCIpcKk0jtI0ReUN0FST3AEgcoKSbW3TUd8h26S8szXDohq37Jha3okR2n7WUpp1"
|
216
|
+
"F97v9vmnXZM88GgAdUJzvdDb0c7adevZtHHDjMIf6+jghq+v4dlndpUCIRvcYKPByWnkp0zeEAvS"
|
217
|
+
"UOUmaeWIKyAY8x2ZsgRKnOGvxgQO6YIx997RgCd+XmTN/R70Co2LwBGz5tetv5VNGzdQO100AnR3"
|
218
|
+
"d3PjTd/g33785JRQ2BHomwR64IKlSk2FEtgNOWaKcatU4KVjgg5Ac8vM0aOJA2IKqG1SfmktILCC"
|
219
|
+
"x2rHFCCYGZ8EXv/A486dE7zyMmQWCpm04ervPMJNf/4X/O2WO2ac+ZMnT3Lz2nU89ugjCbO3GPTg"
|
220
|
+
"EzXw4A1plp6ZxgkFjH+4k7BPMX/6BpV7nxrnH3crueYpBgJJC1isY75LTU7ZuzVuAWaKBbPNRekm"
|
221
|
+
"M+sf5n2+99wk2x/0YQHkmqJtc6LoMdLTycGDv+Lssz9Z1rVBPp9n7br17PjhQ9MKT5ihfnxzii9e"
|
222
|
+
"XAWx4/a0a93+qDkWd5wosmLzKAcHoTGclDisBTgmxSmJg0KpQZsHCK1B0krBC3h4zwRnrR1j+4M+"
|
223
|
+
"DadDYy4yQ4CRoRGW/9HFnHbaxyJiDP39/WzYtHlW4Q1JOXuxOZtoEMsI2Webq1A1fstmtXxlYYPL"
|
224
|
+
"5860znMahCKWIsGE/Gh0qdljPQfeOORxzb1jXHtbEXcIcotKAXKiAbeqko6uE4yOjUXEEEb4TTz4"
|
225
|
+
"3ftnFJ5wPYPQ1RfmJ0uXIPYwFvqD0l1YjgPDoz4f9ACZ8paTcAQlUKXSSR4XCQchFXB0KGDLT8ZZ"
|
226
|
+
"ft0Eu55VcouE+srQ5EWSH0wC9ZkFHPn1QZ5++mk8zyvRe3t72bj5Wzxw//3G4c0gPHZCcvCdp4oc"
|
227
|
+
"PDTBZFHxvGCaK0YvKr6v9Pf77Hh+nDfegcYF5S0nIdnWNi2qw0ha+fC+DGdkXWN/Lpzy4Ll3i6zd"
|
228
|
+
"McnRfbFssF2CFmVLkdBvDBw/ym0bNvCHy5fTPzDAYzt/xDO7np515uNwBPpOQbYavrhUqF1gT6kR"
|
229
|
+
"yleuIvyiHV5+W8mVvnicikQc4Lou+Y6Av7vF5YbLF5BJCQe6PO796SSPPB5AA+Sqky9GSsdQylZA"
|
230
|
+
"QgkwcPxIRHCqaTq9ZUrWZzYIcMqHiUGTmJ0VttnsR5j5eBzguq4ISl+HcsmlQnOD8KOXFfqYVYu/"
|
231
|
+
"yzAK8CMFEJpt/xAwArUtUOHOTeGJWUBpF7Dhb2MdNJ4G6bksfOw+2gbLdoC5jGhi5+UXYnFIbAmU"
|
232
|
+
"7eXzBeWxz7xD6QuR+aSIhBMUkcnY8zyD4iAcM1vgXN30ZsWgg2r4D0TzaBFECd/3HFV9IaLPIyWY"
|
233
|
+
"4O9lRz3vp6hO/5pmDsJOsqoW1fefcAonT3SoBg9ZHzCnraAkmqJB8Hihp+s9B8AfH9uigf46wTwn"
|
234
|
+
"Ec5+oN3qFTdgzwJD/X0Dge9frar9EV/4eczvOkrfMYRvulXH1PdXFk6eOEb8MFTo7nxbPe9yDYJD"
|
235
|
+
"iS9BbTLOPMxymfIoUxc+xzqfymOo09RKtBmrHD7GCYYo4Thtf8lxm11eg6Az8Ip/MtDd+ZIlJ96f"
|
236
|
+
"jJ8a6qqoXLBTHLca4TxESgll03Akb7leTKchzSYvEwMPRUzwmMpRu3bwlseUSVxAq5C4kJYU664E"
|
237
|
+
"c8yf0CB4WIuTXy6cPPF+vHjaOgD1Laed6abSVyJyCfDpQLW57B2xTB89WXI4qhJCmrUutbRpEG/Z"
|
238
|
+
"tjEDa4ktORoRkR5F96P6ivr+k4WeroOl0hj+F2nUsotZ+OvIAAAAAElFTkSuQmCC"
|
239
|
+
)
|
240
|
+
|
241
|
+
|
242
|
+
def _load_embedded_icon() -> Image.Image:
|
243
|
+
"""Decode and return the embedded Talks Reducer tray icon."""
|
244
|
+
|
245
|
+
data = base64.b64decode(_EMBEDDED_ICON_BASE64)
|
246
|
+
with Image.open(BytesIO(data)) as image:
|
247
|
+
return image.copy()
|
248
|
+
|
249
|
+
|
250
|
+
def _load_icon() -> Image.Image:
|
251
|
+
"""Load the tray icon image, falling back to the embedded pen artwork."""
|
252
|
+
|
253
|
+
LOGGER.debug("Attempting to load tray icon image.")
|
254
|
+
|
255
|
+
for candidate in _iter_icon_candidates():
|
55
256
|
LOGGER.debug("Checking icon candidate at %s", candidate)
|
56
257
|
if candidate.exists():
|
57
258
|
try:
|
58
|
-
|
259
|
+
with Image.open(candidate) as image:
|
260
|
+
loaded = image.copy()
|
59
261
|
except Exception as exc: # pragma: no cover - diagnostic log
|
60
262
|
LOGGER.warning("Failed to load tray icon from %s: %s", candidate, exc)
|
61
263
|
else:
|
62
264
|
LOGGER.debug("Loaded tray icon from %s", candidate)
|
63
|
-
return
|
265
|
+
return loaded
|
64
266
|
|
65
267
|
with suppress(FileNotFoundError):
|
66
268
|
resource_icon = resources.files("talks_reducer") / "assets" / "icon.png"
|
@@ -68,15 +270,17 @@ def _load_icon() -> Image.Image:
|
|
68
270
|
LOGGER.debug("Loading tray icon from package resources")
|
69
271
|
with resource_icon.open("rb") as handle:
|
70
272
|
try:
|
71
|
-
|
273
|
+
with Image.open(handle) as image:
|
274
|
+
return image.copy()
|
72
275
|
except Exception as exc: # pragma: no cover - diagnostic log
|
73
276
|
LOGGER.warning(
|
74
277
|
"Failed to load tray icon from package resources: %s", exc
|
75
278
|
)
|
76
279
|
|
77
280
|
LOGGER.warning("Falling back to generated tray icon; packaged image not found")
|
78
|
-
# Fallback to a simple accent-colored square to avoid import errors
|
79
281
|
image = Image.new("RGBA", (64, 64), color=(37, 99, 235, 255))
|
282
|
+
image.putpixel((0, 0), (255, 255, 255, 255))
|
283
|
+
image.putpixel((63, 63), (17, 24, 39, 255))
|
80
284
|
return image
|
81
285
|
|
82
286
|
|
@@ -130,9 +334,9 @@ class _ServerTrayApplication:
|
|
130
334
|
)
|
131
335
|
|
132
336
|
self._server_handle = server
|
133
|
-
|
134
|
-
|
135
|
-
)
|
337
|
+
fallback_url = _guess_local_url(self._host, self._port)
|
338
|
+
local_url = getattr(server, "local_url", fallback_url)
|
339
|
+
self._local_url = _normalize_local_url(local_url, self._host, self._port)
|
136
340
|
self._share_url = getattr(server, "share_url", None)
|
137
341
|
self._ready_event.set()
|
138
342
|
LOGGER.info("Server ready at %s", self._local_url)
|
@@ -261,7 +465,12 @@ class _ServerTrayApplication:
|
|
261
465
|
return
|
262
466
|
|
263
467
|
icon_image = _load_icon()
|
468
|
+
version_suffix = (
|
469
|
+
f" v{APP_VERSION}" if APP_VERSION and APP_VERSION != "unknown" else ""
|
470
|
+
)
|
471
|
+
version_label = f"Talks Reducer{version_suffix}"
|
264
472
|
menu = pystray.Menu(
|
473
|
+
pystray.MenuItem(version_label, None, enabled=False),
|
265
474
|
pystray.MenuItem(
|
266
475
|
"Open GUI",
|
267
476
|
self._launch_gui,
|
@@ -271,7 +480,10 @@ class _ServerTrayApplication:
|
|
271
480
|
pystray.MenuItem("Quit", self._handle_quit),
|
272
481
|
)
|
273
482
|
self._icon = pystray.Icon(
|
274
|
-
"talks-reducer",
|
483
|
+
"talks-reducer",
|
484
|
+
icon_image,
|
485
|
+
f"{version_label} Server",
|
486
|
+
menu=menu,
|
275
487
|
)
|
276
488
|
|
277
489
|
if self._tray_mode == "pystray-detached":
|
@@ -337,7 +549,7 @@ def main(argv: Optional[Sequence[str]] = None) -> None:
|
|
337
549
|
description="Launch the Talks Reducer server with a system tray icon."
|
338
550
|
)
|
339
551
|
parser.add_argument(
|
340
|
-
"--host", dest="host", default=
|
552
|
+
"--host", dest="host", default="0.0.0.0", help="Custom host to bind."
|
341
553
|
)
|
342
554
|
parser.add_argument(
|
343
555
|
"--port",
|