talks-reducer 0.5.5__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- talks_reducer/__about__.py +1 -1
- talks_reducer/cli.py +192 -15
- talks_reducer/discovery.py +42 -12
- talks_reducer/gui.py +637 -162
- talks_reducer/pipeline.py +1 -1
- talks_reducer/server.py +103 -22
- talks_reducer/server_tray.py +216 -17
- talks_reducer/service_client.py +258 -4
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.0.dist-info}/METADATA +13 -4
- talks_reducer-0.6.0.dist-info/RECORD +21 -0
- talks_reducer-0.5.5.dist-info/RECORD +0 -21
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.0.dist-info}/WHEEL +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.0.dist-info}/entry_points.txt +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.0.dist-info}/licenses/LICENSE +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.0.dist-info}/top_level.txt +0 -0
talks_reducer/pipeline.py
CHANGED
@@ -211,7 +211,7 @@ def speed_up_video(
|
|
211
211
|
audio_sample_count = audio_data.shape[0]
|
212
212
|
max_audio_volume = audio_utils.get_max_volume(audio_data)
|
213
213
|
|
214
|
-
reporter.log("\
|
214
|
+
reporter.log("\nInformation:")
|
215
215
|
reporter.log(f"- Max Audio Volume: {max_audio_volume}")
|
216
216
|
|
217
217
|
samples_per_frame = wav_sample_rate / frame_rate
|
talks_reducer/server.py
CHANGED
@@ -5,10 +5,13 @@ from __future__ import annotations
|
|
5
5
|
import argparse
|
6
6
|
import atexit
|
7
7
|
import shutil
|
8
|
+
import socket
|
8
9
|
import tempfile
|
9
10
|
from contextlib import AbstractContextManager, suppress
|
10
11
|
from pathlib import Path
|
11
|
-
from
|
12
|
+
from queue import SimpleQueue
|
13
|
+
from threading import Thread
|
14
|
+
from typing import Callable, Iterator, Optional, Sequence
|
12
15
|
|
13
16
|
import gradio as gr
|
14
17
|
|
@@ -86,10 +89,12 @@ class GradioProgressReporter(SignalProgressReporter):
|
|
86
89
|
self,
|
87
90
|
progress_callback: Optional[Callable[[int, int, str], None]] = None,
|
88
91
|
*,
|
92
|
+
log_callback: Optional[Callable[[str], None]] = None,
|
89
93
|
max_log_lines: int = 500,
|
90
94
|
) -> None:
|
91
95
|
super().__init__()
|
92
96
|
self._progress_callback = progress_callback
|
97
|
+
self._log_callback = log_callback
|
93
98
|
self._max_log_lines = max_log_lines
|
94
99
|
self._active_desc = "Processing"
|
95
100
|
self.logs: list[str] = []
|
@@ -103,6 +108,8 @@ class GradioProgressReporter(SignalProgressReporter):
|
|
103
108
|
self.logs.append(text)
|
104
109
|
if len(self.logs) > self._max_log_lines:
|
105
110
|
self.logs = self.logs[-self._max_log_lines :]
|
111
|
+
if self._log_callback is not None:
|
112
|
+
self._log_callback(text)
|
106
113
|
|
107
114
|
def task(
|
108
115
|
self,
|
@@ -136,6 +143,8 @@ class GradioProgressReporter(SignalProgressReporter):
|
|
136
143
|
self._progress_callback(bounded_current, total_value, display_desc)
|
137
144
|
|
138
145
|
|
146
|
+
_FAVICON_PATH = Path(__file__).resolve().parent.parent / "docs" / "assets" / "icon.ico"
|
147
|
+
_FAVICON_PATH_STR = str(_FAVICON_PATH) if _FAVICON_PATH.exists() else None
|
139
148
|
_WORKSPACES: list[Path] = []
|
140
149
|
|
141
150
|
|
@@ -157,6 +166,26 @@ def _cleanup_workspaces() -> None:
|
|
157
166
|
_WORKSPACES.clear()
|
158
167
|
|
159
168
|
|
169
|
+
def _describe_server_host() -> str:
|
170
|
+
"""Return a human-readable description of the server hostname and IP."""
|
171
|
+
|
172
|
+
hostname = socket.gethostname().strip()
|
173
|
+
ip_address = ""
|
174
|
+
|
175
|
+
with suppress(OSError):
|
176
|
+
resolved_ip = socket.gethostbyname(hostname or "localhost")
|
177
|
+
if resolved_ip:
|
178
|
+
ip_address = resolved_ip
|
179
|
+
|
180
|
+
if hostname and ip_address and hostname != ip_address:
|
181
|
+
return f"{hostname} ({ip_address})"
|
182
|
+
if ip_address:
|
183
|
+
return ip_address
|
184
|
+
if hostname:
|
185
|
+
return hostname
|
186
|
+
return "unknown"
|
187
|
+
|
188
|
+
|
160
189
|
def _build_output_path(input_path: Path, workspace: Path, small: bool) -> Path:
|
161
190
|
"""Mirror the CLI output naming scheme inside the workspace directory."""
|
162
191
|
|
@@ -213,7 +242,7 @@ def process_video(
|
|
213
242
|
file_path: Optional[str],
|
214
243
|
small_video: bool,
|
215
244
|
progress: Optional[gr.Progress] = gr.Progress(track_tqdm=False),
|
216
|
-
) -> tuple[Optional[str], str, str, Optional[str]]:
|
245
|
+
) -> Iterator[tuple[Optional[str], str, str, Optional[str]]]:
|
217
246
|
"""Run the Talks Reducer pipeline for a single uploaded file."""
|
218
247
|
|
219
248
|
if not file_path:
|
@@ -235,7 +264,15 @@ def process_video(
|
|
235
264
|
|
236
265
|
progress_callback = _callback
|
237
266
|
|
238
|
-
|
267
|
+
events: "SimpleQueue[tuple[str, object]]" = SimpleQueue()
|
268
|
+
|
269
|
+
def _log_callback(message: str) -> None:
|
270
|
+
events.put(("log", message))
|
271
|
+
|
272
|
+
reporter = GradioProgressReporter(
|
273
|
+
progress_callback=progress_callback,
|
274
|
+
log_callback=_log_callback,
|
275
|
+
)
|
239
276
|
|
240
277
|
options = ProcessingOptions(
|
241
278
|
input_file=input_path,
|
@@ -244,38 +281,81 @@ def process_video(
|
|
244
281
|
small=small_video,
|
245
282
|
)
|
246
283
|
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
284
|
+
def _worker() -> None:
|
285
|
+
try:
|
286
|
+
result = speed_up_video(options, reporter=reporter)
|
287
|
+
except FFmpegNotFoundError as exc: # pragma: no cover - depends on runtime env
|
288
|
+
events.put(("error", gr.Error(str(exc))))
|
289
|
+
except FileNotFoundError as exc:
|
290
|
+
events.put(("error", gr.Error(str(exc))))
|
291
|
+
except Exception as exc: # pragma: no cover - defensive fallback
|
292
|
+
reporter.log(f"Error: {exc}")
|
293
|
+
events.put(("error", gr.Error(f"Failed to process the video: {exc}")))
|
294
|
+
else:
|
295
|
+
reporter.log("Processing complete.")
|
296
|
+
events.put(("result", result))
|
297
|
+
finally:
|
298
|
+
events.put(("done", None))
|
299
|
+
|
300
|
+
worker = Thread(target=_worker, daemon=True)
|
301
|
+
worker.start()
|
302
|
+
|
303
|
+
collected_logs: list[str] = []
|
304
|
+
final_result: Optional[ProcessingResult] = None
|
305
|
+
error: Optional[gr.Error] = None
|
306
|
+
|
307
|
+
while True:
|
308
|
+
kind, payload = events.get()
|
309
|
+
if kind == "log":
|
310
|
+
text = str(payload).strip()
|
311
|
+
if text:
|
312
|
+
collected_logs.append(text)
|
313
|
+
yield (
|
314
|
+
gr.update(),
|
315
|
+
"\n".join(collected_logs),
|
316
|
+
gr.update(),
|
317
|
+
gr.update(),
|
318
|
+
)
|
319
|
+
elif kind == "result":
|
320
|
+
final_result = payload # type: ignore[assignment]
|
321
|
+
elif kind == "error":
|
322
|
+
error = payload # type: ignore[assignment]
|
323
|
+
elif kind == "done":
|
324
|
+
break
|
325
|
+
|
326
|
+
worker.join()
|
327
|
+
|
328
|
+
if error is not None:
|
329
|
+
raise error
|
330
|
+
|
331
|
+
if final_result is None:
|
332
|
+
raise gr.Error("Failed to process the video.")
|
333
|
+
|
334
|
+
log_text = "\n".join(collected_logs)
|
335
|
+
summary = _format_summary(final_result)
|
336
|
+
|
337
|
+
yield (
|
338
|
+
str(final_result.output_file),
|
263
339
|
log_text,
|
264
340
|
summary,
|
265
|
-
str(
|
341
|
+
str(final_result.output_file),
|
266
342
|
)
|
267
343
|
|
268
344
|
|
269
345
|
def build_interface() -> gr.Blocks:
|
270
346
|
"""Construct the Gradio Blocks application for the simple web UI."""
|
271
347
|
|
348
|
+
server_identity = _describe_server_host()
|
349
|
+
|
272
350
|
with gr.Blocks(title="Talks Reducer Web UI") as demo:
|
273
351
|
gr.Markdown(
|
274
|
-
"""
|
275
|
-
## Talks Reducer
|
352
|
+
f"""
|
353
|
+
## Talks Reducer Web UI
|
276
354
|
Drop a video into the zone below or click to browse. **Small video** is enabled
|
277
355
|
by default to apply the 720p/128k preset before processing starts—clear it to
|
278
356
|
keep the original resolution.
|
357
|
+
|
358
|
+
Video will be rendered on server **{server_identity}**.
|
279
359
|
""".strip()
|
280
360
|
)
|
281
361
|
|
@@ -337,6 +417,7 @@ def main(argv: Optional[Sequence[str]] = None) -> None:
|
|
337
417
|
server_port=args.port,
|
338
418
|
share=args.share,
|
339
419
|
inbrowser=not args.no_browser,
|
420
|
+
favicon_path=_FAVICON_PATH_STR,
|
340
421
|
)
|
341
422
|
|
342
423
|
|
talks_reducer/server_tray.py
CHANGED
@@ -4,6 +4,7 @@ from __future__ import annotations
|
|
4
4
|
|
5
5
|
import argparse
|
6
6
|
import atexit
|
7
|
+
import base64
|
7
8
|
import logging
|
8
9
|
import subprocess
|
9
10
|
import sys
|
@@ -12,8 +13,10 @@ import time
|
|
12
13
|
import webbrowser
|
13
14
|
from contextlib import suppress
|
14
15
|
from importlib import resources
|
16
|
+
from io import BytesIO
|
15
17
|
from pathlib import Path
|
16
|
-
from typing import Any, Optional, Sequence
|
18
|
+
from typing import Any, Iterator, Optional, Sequence
|
19
|
+
from urllib.parse import urlsplit, urlunsplit
|
17
20
|
|
18
21
|
from PIL import Image
|
19
22
|
|
@@ -24,6 +27,9 @@ try: # pragma: no cover - import guarded for clearer error message at runtime
|
|
24
27
|
except ModuleNotFoundError as exc: # pragma: no cover - handled in ``main``
|
25
28
|
PYSTRAY_IMPORT_ERROR = exc
|
26
29
|
pystray = None # type: ignore[assignment]
|
30
|
+
except Exception as exc: # pragma: no cover - handled in ``main``
|
31
|
+
PYSTRAY_IMPORT_ERROR = exc
|
32
|
+
pystray = None # type: ignore[assignment]
|
27
33
|
else:
|
28
34
|
PYSTRAY_IMPORT_ERROR = None
|
29
35
|
|
@@ -34,33 +40,224 @@ LOGGER = logging.getLogger(__name__)
|
|
34
40
|
def _guess_local_url(host: Optional[str], port: int) -> str:
|
35
41
|
"""Return the URL the server is most likely reachable at locally."""
|
36
42
|
|
37
|
-
if host in (None, "", "0.0.0.0"
|
38
|
-
hostname = "
|
43
|
+
if host in (None, "", "0.0.0.0"):
|
44
|
+
hostname = "127.0.0.1"
|
45
|
+
elif host == "::":
|
46
|
+
hostname = "::1"
|
39
47
|
else:
|
40
48
|
hostname = host
|
41
49
|
return f"http://{hostname}:{port}/"
|
42
50
|
|
43
51
|
|
52
|
+
def _normalize_local_url(url: str, host: Optional[str], port: int) -> str:
|
53
|
+
"""Rewrite *url* when a wildcard host should map to the loopback address."""
|
54
|
+
|
55
|
+
if host not in (None, "", "0.0.0.0"):
|
56
|
+
return url
|
57
|
+
|
58
|
+
try:
|
59
|
+
parsed = urlsplit(url)
|
60
|
+
except ValueError:
|
61
|
+
return _guess_local_url(host, port)
|
62
|
+
|
63
|
+
hostname = parsed.hostname or ""
|
64
|
+
if hostname in ("", "0.0.0.0"):
|
65
|
+
netloc = f"127.0.0.1:{parsed.port or port}"
|
66
|
+
return urlunsplit(
|
67
|
+
(
|
68
|
+
parsed.scheme or "http",
|
69
|
+
netloc,
|
70
|
+
parsed.path or "/",
|
71
|
+
parsed.query,
|
72
|
+
parsed.fragment,
|
73
|
+
)
|
74
|
+
)
|
75
|
+
|
76
|
+
return url
|
77
|
+
|
78
|
+
|
79
|
+
def _iter_icon_candidates() -> Iterator[Path]:
|
80
|
+
"""Yield possible tray icon paths ordered from most to least specific."""
|
81
|
+
|
82
|
+
module_path = Path(__file__).resolve()
|
83
|
+
package_root = module_path.parent
|
84
|
+
project_root = package_root.parent
|
85
|
+
|
86
|
+
frozen_root: Optional[Path] = None
|
87
|
+
frozen_value = getattr(sys, "_MEIPASS", None)
|
88
|
+
if frozen_value:
|
89
|
+
with suppress(Exception):
|
90
|
+
frozen_root = Path(str(frozen_value)).resolve()
|
91
|
+
|
92
|
+
executable_root: Optional[Path] = None
|
93
|
+
with suppress(Exception):
|
94
|
+
executable_root = Path(sys.executable).resolve().parent
|
95
|
+
|
96
|
+
launcher_root: Optional[Path] = None
|
97
|
+
with suppress(Exception):
|
98
|
+
launcher_root = Path(sys.argv[0]).resolve().parent
|
99
|
+
|
100
|
+
base_roots: list[Path] = []
|
101
|
+
for candidate in (
|
102
|
+
package_root,
|
103
|
+
project_root,
|
104
|
+
frozen_root,
|
105
|
+
executable_root,
|
106
|
+
launcher_root,
|
107
|
+
):
|
108
|
+
if candidate and candidate not in base_roots:
|
109
|
+
base_roots.append(candidate)
|
110
|
+
|
111
|
+
expanded_roots: list[Path] = []
|
112
|
+
suffixes = (
|
113
|
+
Path(""),
|
114
|
+
Path("_internal"),
|
115
|
+
Path("Contents") / "Resources",
|
116
|
+
Path("Resources"),
|
117
|
+
)
|
118
|
+
for root in base_roots:
|
119
|
+
for suffix in suffixes:
|
120
|
+
candidate_root = (root / suffix).resolve()
|
121
|
+
if candidate_root not in expanded_roots:
|
122
|
+
expanded_roots.append(candidate_root)
|
123
|
+
|
124
|
+
icon_names = ("icon.png", "icon.ico")
|
125
|
+
relative_paths = (
|
126
|
+
Path("docs") / "assets",
|
127
|
+
Path("assets"),
|
128
|
+
Path("talks_reducer") / "assets",
|
129
|
+
Path(""),
|
130
|
+
)
|
131
|
+
|
132
|
+
seen: set[Path] = set()
|
133
|
+
for root in expanded_roots:
|
134
|
+
if not root.exists():
|
135
|
+
continue
|
136
|
+
for relative in relative_paths:
|
137
|
+
for icon_name in icon_names:
|
138
|
+
candidate = (root / relative / icon_name).resolve()
|
139
|
+
if candidate in seen:
|
140
|
+
continue
|
141
|
+
seen.add(candidate)
|
142
|
+
yield candidate
|
143
|
+
|
144
|
+
|
44
145
|
def _load_icon() -> Image.Image:
|
45
|
-
"""Load the tray icon image, falling back to
|
146
|
+
"""Load the tray icon image, falling back to the embedded pen artwork."""
|
46
147
|
|
47
148
|
LOGGER.debug("Attempting to load tray icon image.")
|
48
149
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
150
|
+
for candidate in _iter_icon_candidates():
|
151
|
+
LOGGER.debug("Checking icon candidate at %s", candidate)
|
152
|
+
if candidate.exists():
|
153
|
+
try:
|
154
|
+
with Image.open(candidate) as image:
|
155
|
+
loaded = image.copy()
|
156
|
+
except Exception as exc: # pragma: no cover - diagnostic log
|
157
|
+
LOGGER.warning("Failed to load tray icon from %s: %s", candidate, exc)
|
158
|
+
else:
|
159
|
+
LOGGER.debug("Loaded tray icon from %s", candidate)
|
160
|
+
return loaded
|
161
|
+
|
162
|
+
LOGGER.warning("Falling back to generated tray icon; packaged image not found")
|
163
|
+
image = Image.new("RGBA", (64, 64), color=(37, 99, 235, 255))
|
164
|
+
image.putpixel((0, 0), (255, 255, 255, 255))
|
165
|
+
image.putpixel((63, 63), (17, 24, 39, 255))
|
166
|
+
return image
|
167
|
+
|
168
|
+
|
169
|
+
_EMBEDDED_ICON_BASE64 = (
|
170
|
+
"iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAAARnQU1BAACx"
|
171
|
+
"jwv8YQUAAAAJcEhZcwAADsIAAA7CARUoSoAAAA3MSURBVHhe5Zt7cB31dcc/Z/deyVfPK11JDsLW"
|
172
|
+
"xAEmMJDGk+mEFGza8oiZ8l+GSWiwMTBJQzDpA2xT4kcKpsWxoSl0SiCTEAPFDYWkEJtCeYSnG0oh"
|
173
|
+
"sQHbacCWbcmyZOlKV5L1vLt7+sdvf3d3rx6hnfwT6Tuzurvnd36Pc37nd37nd3YlzID65o+1OOn0"
|
174
|
+
"FSCXIXKeQJuq1pTz/RYggJYTZ8Fs/AIgIqOgHaq8B/p8UCzuHuzt7ilnxlaIo64+W+VU1a4Tx7lR"
|
175
|
+
"RFoiFgUx9xL2r/Hq4ZAibsMfqw2qhiMhgiJhu4qG3YQ9qGGSkF9j/YdFEcIyiOrFnntVg+/6Y6e2"
|
176
|
+
"DQ0MjMSLEgrILmz9pKRSO0Wcz5jCSNQ5AQ32Br6/stB9fL8llRRQv7D1HCeVfl7EWWQFNgqYI8Jb"
|
177
|
+
"qPYGXnFFoafrl1gF1DY21aUz1W8g8qnZFticgeoH/uT4Hwz29uQdgNSCzCZEPlXON2chcpaTrrwD"
|
178
|
+
"QLItpy1x0hX7xHFqAbTcgcwxxJz0WFAsLnXETX1BRGoteb5ARDLipq5yELkkuVfNbUQiCiJymSMi"
|
179
|
+
"54KAzjl//1Hwe46ii6AskJgnUKhzUJzygrkPG1GCMz9WfjlK8a3Ow9lPIlLAPLQDQCIFzD8fCAkL"
|
180
|
+
"mFcozXbSByjQ32mugYm5vDOW1rs4Vh0aKANdynVXCbetcThrkZLvUIK57RtUGlrbAnFd6e8IeOD2"
|
181
|
+
"FKsvqqTSFY4MeDz0UpE77/PBgVyrSY4oIPrRfEagSuH40RglRdPixQT/hwOXAPlhYLC85P+BZshV"
|
182
|
+
"hjKIEAQB0tDaFkyqKyNVAe3fyfDxOgd8wAVP4M3DRbb9uMiu3YrbDNlKCFSSy2MahfiBMth1lNu3"
|
183
|
+
"bOFz55/PyMgIj//rEzz+LzvJLV7ykU6drkBvN3zpIuGK3xcy6WBKtisOEUEVRMKUmU3JCRRGhEde"
|
184
|
+
"DXj9V9DUCEqkAB3zXSqaA97fmmFxjYv6CmHqjjQUxpWn3y5y7Q+K8CFkW8FxZjeB/s4j7Hj4YVat"
|
185
|
+
"WoUTaqswOMg99/w9d2654zcqwRHoy8NNK4QtqzNks6lo6QrJvJ/YSSgbU2liDEPHiUn+8sFxfrIP"
|
186
|
+
"cjVGAaETVPzA+AHbiW1TJyGbElYvq+TDuzN88xsOhS6l/4SajKFdFzEURsc545zzuHzFipLwANn6"
|
187
|
+
"ev761vVs3LSZfEd7oqwcgQKjyp/+cZpsQxr1FQ3HGN2Hl29+sWW+hrTk8+LWCq6/LAUDkf4cAClN"
|
188
|
+
"t4FVcImuQBHOyDr8zZUZXv1BJVd8Xsh3wtCETlF8UCzS1JiloqIiWQBUV1dz6/p13HzLWvo62n+D"
|
189
|
+
"JQmZSjuAj4AEW7KO7cW0FxFKZwHDbqi2qk1Tq4b3gZD2hIvOTPPoX2XYcVcKLwv5TjMjFvW11fzX"
|
190
|
+
"njc4fLg9IsZQU1PDxg3fZNU1q+k7Nr0SLOX1d4toMUBcQVwQR2KX2cfshVtGiz2TgsnxgBf3Gqdu"
|
191
|
+
"23cztfXf8lREquCmS9PUVzpGAyLGNsRah0SqCiDjCkuXpLh6mUtVLuDFnyljo1BVY+qMDRUYHhll"
|
192
|
+
"+bJlVFdXlwSzyGQynH/+Z/ng0GH2vvUm1dmGKQ6ushaeelNpqfapSgUMDHnkC2XXgE9+0Cdf8MkP"
|
193
|
+
"euQLPv3hc1/BPnt0nvR45MVJbn8kINdq5FNVJNvaFoz7jrg55cDWDG21rvEFYpIk0RqIz1LMXlJQ"
|
194
|
+
"BPZ8WGT7k0X+/Vkl1QwNGYfeY+2svGY1d2/fxsKWllj9CMeOHeOGG9fw7DO7aVq8ZMoWGSgUjjPF"
|
195
|
+
"pGdFfNUkVpCQM9kPsNvgjAqYUttsKeYpPhhBRCEF/ePKLrtbHILcIod8ZzurVl/L9m3fnlEJ7e3t"
|
196
|
+
"rLr2eva89kpJCdavWH14Cv4MOnDEKCo+TWp28lI7TshnYeOAcAk44lYpa2JLoOQAw0pJR2nM3FyG"
|
197
|
+
"oj5UucLSj7usXJ6iuinghZd8qGvk3bdeoftkL8suvGDa5dDQ0MCyCy/gtT0/p/1/DlBd32DaDIUa"
|
198
|
+
"6ISJYaU4zLTX5DQ0S5+YEGrDlG8cJmZQlWzr4mDcdyXdpLx/l7EAmx+0b4aMjFbS6W5DZ2mfXSgK"
|
199
|
+
"7PnAY9OOSd54T2DQWMLd27fR0txsx5HAgYMH+dKXV/L+3n00LW7DV2WgD+5a6XDZZ1wq09YijCkI"
|
200
|
+
"gtqNSs07RvuLwNCY8MRrHv/wZEDTonBrDSEIgfo2EHJIN4cKqDEKMN1Ys7d34fYYM6U4bGAj9k+F"
|
201
|
+
"sK/TY+nN49TiMNzVzvVf+Qrbtm4ll8uVVwdg//79fOGqq2k/0U0xn+HbX3O55coMbrr84BqXJu6v"
|
202
|
+
"YnDg1LDPhodGuW+30hjTu10CpVYdjU+pifvjwmu4HRIzzSSmUphUzlnocs2FDsMnlKa2T/DQ97/P"
|
203
|
+
"bRs2UChMH9yfe+65/Nl1qynmewDl4k+7uGkHDZ2AerFAxwuDIC8wv6Wgx1wUlZoalys+68LENOOz"
|
204
|
+
"gZACKRVzyFE7k/bNsFFCuXKtyVv+uPO2dIAJT+kfVqgKNQdkFmSmthdDKp0q3Re9qOHEBhENLjnz"
|
205
|
+
"oQzm1tDHi2U8MTix5qcecEqXjQHKoDrz22PHnCOee89j93NKrkHo62jn5rVr2WLH7WTr68trAHC4"
|
206
|
+
"vZ0dj+6E+maoEJ76T49Tw54Jglwb3IQBjpsMhMSRMGAKeVzoOenx2M98yJb3ZCDZ0AfUNSm/uCvD"
|
207
|
+
"ongcENOb2iURyitmfcRKwnsxscGJ4YB/frXI+ns86hc6DHa1c8vadWzatJH6urqQP4kjR4/y9TU3"
|
208
|
+
"8dwzu2luM4elvm746ueFS5c6VKQil5wwM2KOJ7ReERiZhCf3+Dz139DUFDlBK1mgJg7QMd+hvkl5"
|
209
|
+
"J6GAqP34FphQQJyA8f5jvvLCAY9NjxZ59y2l4XSHgePt3LJuHZs3bqRuBuGPHj3GV792Ay/8x7M0"
|
210
|
+
"tS0hCEcrQL4AnCIpcKk0jtI0ReUN0FST3AEgcoKSbW3TUd8h26S8szXDohq37Jha3okR2n7WUpp1"
|
211
|
+
"F97v9vmnXZM88GgAdUJzvdDb0c7adevZtHHDjMIf6+jghq+v4dlndpUCIRvcYKPByWnkp0zeEAvS"
|
212
|
+
"UOUmaeWIKyAY8x2ZsgRKnOGvxgQO6YIx997RgCd+XmTN/R70Co2LwBGz5tetv5VNGzdQO100AnR3"
|
213
|
+
"d3PjTd/g33785JRQ2BHomwR64IKlSk2FEtgNOWaKcatU4KVjgg5Ac8vM0aOJA2IKqG1SfmktILCC"
|
214
|
+
"x2rHFCCYGZ8EXv/A486dE7zyMmQWCpm04ervPMJNf/4X/O2WO2ac+ZMnT3Lz2nU89ugjCbO3GPTg"
|
215
|
+
"EzXw4A1plp6ZxgkFjH+4k7BPMX/6BpV7nxrnH3crueYpBgJJC1isY75LTU7ZuzVuAWaKBbPNRekm"
|
216
|
+
"M+sf5n2+99wk2x/0YQHkmqJtc6LoMdLTycGDv+Lssz9Z1rVBPp9n7br17PjhQ9MKT5ihfnxzii9e"
|
217
|
+
"XAWx4/a0a93+qDkWd5wosmLzKAcHoTGclDisBTgmxSmJg0KpQZsHCK1B0krBC3h4zwRnrR1j+4M+"
|
218
|
+
"DadDYy4yQ4CRoRGW/9HFnHbaxyJiDP39/WzYtHlW4Q1JOXuxOZtoEMsI2Webq1A1fstmtXxlYYPL"
|
219
|
+
"5860znMahCKWIsGE/Gh0qdljPQfeOORxzb1jXHtbEXcIcotKAXKiAbeqko6uE4yOjUXEEEb4TTz4"
|
220
|
+
"3ftnFJ5wPYPQ1RfmJ0uXIPYwFvqD0l1YjgPDoz4f9ACZ8paTcAQlUKXSSR4XCQchFXB0KGDLT8ZZ"
|
221
|
+
"ft0Eu55VcouE+srQ5EWSH0wC9ZkFHPn1QZ5++mk8zyvRe3t72bj5Wzxw//3G4c0gPHZCcvCdp4oc"
|
222
|
+
"PDTBZFHxvGCaK0YvKr6v9Pf77Hh+nDfegcYF5S0nIdnWNi2qw0ha+fC+DGdkXWN/Lpzy4Ll3i6zd"
|
223
|
+
"McnRfbFssF2CFmVLkdBvDBw/ym0bNvCHy5fTPzDAYzt/xDO7np515uNwBPpOQbYavrhUqF1gT6kR"
|
224
|
+
"yleuIvyiHV5+W8mVvnicikQc4Lou+Y6Av7vF5YbLF5BJCQe6PO796SSPPB5AA+Sqky9GSsdQylZA"
|
225
|
+
"QgkwcPxIRHCqaTq9ZUrWZzYIcMqHiUGTmJ0VttnsR5j5eBzguq4ISl+HcsmlQnOD8KOXFfqYVYu/"
|
226
|
+
"yzAK8CMFEJpt/xAwArUtUOHOTeGJWUBpF7Dhb2MdNJ4G6bksfOw+2gbLdoC5jGhi5+UXYnFIbAmU"
|
227
|
+
"7eXzBeWxz7xD6QuR+aSIhBMUkcnY8zyD4iAcM1vgXN30ZsWgg2r4D0TzaBFECd/3HFV9IaLPIyWY"
|
228
|
+
"4O9lRz3vp6hO/5pmDsJOsqoW1fefcAonT3SoBg9ZHzCnraAkmqJB8Hihp+s9B8AfH9uigf46wTwn"
|
229
|
+
"Ec5+oN3qFTdgzwJD/X0Dge9frar9EV/4eczvOkrfMYRvulXH1PdXFk6eOEb8MFTo7nxbPe9yDYJD"
|
230
|
+
"iS9BbTLOPMxymfIoUxc+xzqfymOo09RKtBmrHD7GCYYo4Thtf8lxm11eg6Az8Ip/MtDd+ZIlJ96f"
|
231
|
+
"jJ8a6qqoXLBTHLca4TxESgll03Akb7leTKchzSYvEwMPRUzwmMpRu3bwlseUSVxAq5C4kJYU664E"
|
232
|
+
"c8yf0CB4WIuTXy6cPPF+vHjaOgD1Laed6abSVyJyCfDpQLW57B2xTB89WXI4qhJCmrUutbRpEG/Z"
|
233
|
+
"tjEDa4ktORoRkR5F96P6ivr+k4WeroOl0hj+F2nUsotZ+OvIAAAAAElFTkSuQmCC"
|
234
|
+
)
|
235
|
+
|
236
|
+
|
237
|
+
def _load_embedded_icon() -> Image.Image:
|
238
|
+
"""Decode and return the embedded Talks Reducer tray icon."""
|
239
|
+
|
240
|
+
data = base64.b64decode(_EMBEDDED_ICON_BASE64)
|
241
|
+
with Image.open(BytesIO(data)) as image:
|
242
|
+
return image.copy()
|
243
|
+
|
244
|
+
|
245
|
+
def _load_icon() -> Image.Image:
|
246
|
+
"""Load the tray icon image, falling back to the embedded pen artwork."""
|
247
|
+
|
248
|
+
LOGGER.debug("Attempting to load tray icon image.")
|
53
249
|
|
54
|
-
for candidate in
|
250
|
+
for candidate in _iter_icon_candidates():
|
55
251
|
LOGGER.debug("Checking icon candidate at %s", candidate)
|
56
252
|
if candidate.exists():
|
57
253
|
try:
|
58
|
-
|
254
|
+
with Image.open(candidate) as image:
|
255
|
+
loaded = image.copy()
|
59
256
|
except Exception as exc: # pragma: no cover - diagnostic log
|
60
257
|
LOGGER.warning("Failed to load tray icon from %s: %s", candidate, exc)
|
61
258
|
else:
|
62
259
|
LOGGER.debug("Loaded tray icon from %s", candidate)
|
63
|
-
return
|
260
|
+
return loaded
|
64
261
|
|
65
262
|
with suppress(FileNotFoundError):
|
66
263
|
resource_icon = resources.files("talks_reducer") / "assets" / "icon.png"
|
@@ -68,15 +265,17 @@ def _load_icon() -> Image.Image:
|
|
68
265
|
LOGGER.debug("Loading tray icon from package resources")
|
69
266
|
with resource_icon.open("rb") as handle:
|
70
267
|
try:
|
71
|
-
|
268
|
+
with Image.open(handle) as image:
|
269
|
+
return image.copy()
|
72
270
|
except Exception as exc: # pragma: no cover - diagnostic log
|
73
271
|
LOGGER.warning(
|
74
272
|
"Failed to load tray icon from package resources: %s", exc
|
75
273
|
)
|
76
274
|
|
77
275
|
LOGGER.warning("Falling back to generated tray icon; packaged image not found")
|
78
|
-
# Fallback to a simple accent-colored square to avoid import errors
|
79
276
|
image = Image.new("RGBA", (64, 64), color=(37, 99, 235, 255))
|
277
|
+
image.putpixel((0, 0), (255, 255, 255, 255))
|
278
|
+
image.putpixel((63, 63), (17, 24, 39, 255))
|
80
279
|
return image
|
81
280
|
|
82
281
|
|
@@ -130,9 +329,9 @@ class _ServerTrayApplication:
|
|
130
329
|
)
|
131
330
|
|
132
331
|
self._server_handle = server
|
133
|
-
|
134
|
-
|
135
|
-
)
|
332
|
+
fallback_url = _guess_local_url(self._host, self._port)
|
333
|
+
local_url = getattr(server, "local_url", fallback_url)
|
334
|
+
self._local_url = _normalize_local_url(local_url, self._host, self._port)
|
136
335
|
self._share_url = getattr(server, "share_url", None)
|
137
336
|
self._ready_event.set()
|
138
337
|
LOGGER.info("Server ready at %s", self._local_url)
|
@@ -337,7 +536,7 @@ def main(argv: Optional[Sequence[str]] = None) -> None:
|
|
337
536
|
description="Launch the Talks Reducer server with a system tray icon."
|
338
537
|
)
|
339
538
|
parser.add_argument(
|
340
|
-
"--host", dest="host", default=
|
539
|
+
"--host", dest="host", default="0.0.0.0", help="Custom host to bind."
|
341
540
|
)
|
342
541
|
parser.add_argument(
|
343
542
|
"--port",
|