talks-reducer 0.5.5__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- talks_reducer/__about__.py +1 -1
- talks_reducer/cli.py +194 -33
- talks_reducer/discovery.py +42 -12
- talks_reducer/gui.py +694 -168
- talks_reducer/pipeline.py +1 -1
- talks_reducer/server.py +110 -23
- talks_reducer/server_tray.py +230 -18
- talks_reducer/service_client.py +258 -4
- talks_reducer/version_utils.py +22 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/METADATA +18 -7
- talks_reducer-0.6.1.dist-info/RECORD +22 -0
- talks_reducer-0.5.5.dist-info/RECORD +0 -21
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/WHEEL +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/entry_points.txt +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/licenses/LICENSE +0 -0
- {talks_reducer-0.5.5.dist-info → talks_reducer-0.6.1.dist-info}/top_level.txt +0 -0
talks_reducer/__about__.py
CHANGED
talks_reducer/cli.py
CHANGED
@@ -4,23 +4,21 @@ from __future__ import annotations
|
|
4
4
|
|
5
5
|
import argparse
|
6
6
|
import os
|
7
|
+
import shutil
|
8
|
+
import subprocess
|
7
9
|
import sys
|
8
10
|
import time
|
9
11
|
from importlib import import_module
|
10
|
-
from importlib.metadata import PackageNotFoundError, version
|
11
12
|
from pathlib import Path
|
12
|
-
from typing import Dict, List, Optional, Sequence
|
13
|
+
from typing import Dict, List, Optional, Sequence, Tuple
|
13
14
|
|
14
15
|
from . import audio
|
15
16
|
|
16
|
-
try:
|
17
|
-
from .__about__ import __version__ as _about_version
|
18
|
-
except Exception: # pragma: no cover - fallback if metadata file missing
|
19
|
-
_about_version = ""
|
20
17
|
from .ffmpeg import FFmpegNotFoundError
|
21
18
|
from .models import ProcessingOptions, default_temp_folder
|
22
19
|
from .pipeline import speed_up_video
|
23
20
|
from .progress import TqdmProgressReporter
|
21
|
+
from .version_utils import resolve_version
|
24
22
|
|
25
23
|
|
26
24
|
def _build_parser() -> argparse.ArgumentParser:
|
@@ -31,7 +29,7 @@ def _build_parser() -> argparse.ArgumentParser:
|
|
31
29
|
)
|
32
30
|
|
33
31
|
# Add version argument
|
34
|
-
pkg_version =
|
32
|
+
pkg_version = resolve_version()
|
35
33
|
|
36
34
|
parser.add_argument(
|
37
35
|
"--version",
|
@@ -104,21 +102,20 @@ def _build_parser() -> argparse.ArgumentParser:
|
|
104
102
|
default=None,
|
105
103
|
help="Process videos via a Talks Reducer server at the provided base URL (for example, http://localhost:9005).",
|
106
104
|
)
|
105
|
+
parser.add_argument(
|
106
|
+
"--host",
|
107
|
+
dest="host",
|
108
|
+
default=None,
|
109
|
+
help="Shortcut for --url when targeting a Talks Reducer server on port 9005 (for example, localhost).",
|
110
|
+
)
|
111
|
+
parser.add_argument(
|
112
|
+
"--server-stream",
|
113
|
+
action="store_true",
|
114
|
+
help="Stream remote progress updates when using --url.",
|
115
|
+
)
|
107
116
|
return parser
|
108
117
|
|
109
118
|
|
110
|
-
def _resolve_version() -> str:
|
111
|
-
"""Determine the package version for CLI reporting."""
|
112
|
-
|
113
|
-
if _about_version:
|
114
|
-
return _about_version
|
115
|
-
|
116
|
-
try:
|
117
|
-
return version("talks-reducer")
|
118
|
-
except (PackageNotFoundError, Exception):
|
119
|
-
return "unknown"
|
120
|
-
|
121
|
-
|
122
119
|
def gather_input_files(paths: List[str]) -> List[str]:
|
123
120
|
"""Expand provided paths into a flat list of files that contain audio streams."""
|
124
121
|
|
@@ -146,21 +143,22 @@ def _print_total_time(start_time: float) -> None:
|
|
146
143
|
|
147
144
|
def _process_via_server(
|
148
145
|
files: Sequence[str], parsed_args: argparse.Namespace, *, start_time: float
|
149
|
-
) ->
|
150
|
-
"""Upload *files* to the configured server and download the results.
|
146
|
+
) -> Tuple[bool, Optional[str]]:
|
147
|
+
"""Upload *files* to the configured server and download the results.
|
148
|
+
|
149
|
+
Returns a tuple of (success, error_message). When *success* is ``False``,
|
150
|
+
``error_message`` contains the reason and the caller should fall back to the
|
151
|
+
local processing pipeline.
|
152
|
+
"""
|
151
153
|
|
152
154
|
try:
|
153
155
|
from . import service_client
|
154
156
|
except ImportError as exc: # pragma: no cover - optional dependency guard
|
155
|
-
|
156
|
-
"Server mode requires the gradio_client dependency." f" ({exc})",
|
157
|
-
file=sys.stderr,
|
158
|
-
)
|
159
|
-
sys.exit(1)
|
157
|
+
return False, ("Server mode requires the gradio_client dependency. " f"({exc})")
|
160
158
|
|
161
159
|
server_url = parsed_args.server_url
|
162
160
|
if not server_url:
|
163
|
-
return
|
161
|
+
return False, "Server URL was not provided."
|
164
162
|
|
165
163
|
output_override: Optional[Path] = None
|
166
164
|
if parsed_args.output_file and len(files) == 1:
|
@@ -195,23 +193,58 @@ def _process_via_server(
|
|
195
193
|
print(
|
196
194
|
f"Processing file {index}/{len(files)} '{basename}' via server {server_url}"
|
197
195
|
)
|
196
|
+
printed_log_header = False
|
197
|
+
progress_state: dict[str, tuple[Optional[int], Optional[int], str]] = {}
|
198
|
+
stream_updates = bool(getattr(parsed_args, "server_stream", False))
|
199
|
+
|
200
|
+
def _stream_server_log(line: str) -> None:
|
201
|
+
nonlocal printed_log_header
|
202
|
+
if not printed_log_header:
|
203
|
+
print("\nServer log:", flush=True)
|
204
|
+
printed_log_header = True
|
205
|
+
print(line, flush=True)
|
206
|
+
|
207
|
+
def _stream_progress(
|
208
|
+
desc: str, current: Optional[int], total: Optional[int], unit: str
|
209
|
+
) -> None:
|
210
|
+
key = desc or "Processing"
|
211
|
+
state = (current, total, unit)
|
212
|
+
if progress_state.get(key) == state:
|
213
|
+
return
|
214
|
+
progress_state[key] = state
|
215
|
+
|
216
|
+
parts: list[str] = []
|
217
|
+
if current is not None and total and total > 0:
|
218
|
+
percent = (current / total) * 100
|
219
|
+
parts.append(f"{current}/{total}")
|
220
|
+
parts.append(f"{percent:.1f}%")
|
221
|
+
elif current is not None:
|
222
|
+
parts.append(str(current))
|
223
|
+
if unit:
|
224
|
+
parts.append(unit)
|
225
|
+
message = " ".join(parts).strip()
|
226
|
+
print(f"{key}: {message or 'update'}", flush=True)
|
227
|
+
|
198
228
|
try:
|
199
229
|
destination, summary, log_text = service_client.send_video(
|
200
230
|
input_path=Path(file),
|
201
231
|
output_path=output_override,
|
202
232
|
server_url=server_url,
|
203
233
|
small=bool(parsed_args.small),
|
234
|
+
log_callback=_stream_server_log,
|
235
|
+
stream_updates=stream_updates,
|
236
|
+
progress_callback=_stream_progress if stream_updates else None,
|
204
237
|
)
|
205
238
|
except Exception as exc: # pragma: no cover - network failure safeguard
|
206
|
-
|
207
|
-
sys.exit(1)
|
239
|
+
return False, f"Failed to process {basename} via server: {exc}"
|
208
240
|
|
209
241
|
print(summary)
|
210
242
|
print(f"Saved processed video to {destination}")
|
211
|
-
if log_text.strip():
|
243
|
+
if log_text.strip() and not printed_log_header:
|
212
244
|
print("\nServer log:\n" + log_text)
|
213
245
|
|
214
246
|
_print_total_time(start_time)
|
247
|
+
return True, None
|
215
248
|
|
216
249
|
|
217
250
|
def _launch_gui(argv: Sequence[str]) -> bool:
|
@@ -230,7 +263,7 @@ def _launch_gui(argv: Sequence[str]) -> bool:
|
|
230
263
|
|
231
264
|
|
232
265
|
def _launch_server(argv: Sequence[str]) -> bool:
|
233
|
-
"""Attempt to launch the Gradio
|
266
|
+
"""Attempt to launch the Gradio server with the provided arguments."""
|
234
267
|
|
235
268
|
try:
|
236
269
|
server_module = import_module(".server", __package__)
|
@@ -245,6 +278,103 @@ def _launch_server(argv: Sequence[str]) -> bool:
|
|
245
278
|
return True
|
246
279
|
|
247
280
|
|
281
|
+
def _find_server_tray_binary() -> Optional[Path]:
|
282
|
+
"""Return the best available path to the server tray executable."""
|
283
|
+
|
284
|
+
binary_name = "talks-reducer-server-tray"
|
285
|
+
candidates: List[Path] = []
|
286
|
+
|
287
|
+
which_path = shutil.which(binary_name)
|
288
|
+
if which_path:
|
289
|
+
candidates.append(Path(which_path))
|
290
|
+
|
291
|
+
try:
|
292
|
+
launcher_dir = Path(sys.argv[0]).resolve().parent
|
293
|
+
except Exception:
|
294
|
+
launcher_dir = None
|
295
|
+
|
296
|
+
potential_names = [binary_name]
|
297
|
+
if sys.platform == "win32":
|
298
|
+
potential_names = [f"{binary_name}.exe", binary_name]
|
299
|
+
|
300
|
+
if launcher_dir is not None:
|
301
|
+
for name in potential_names:
|
302
|
+
candidates.append(launcher_dir / name)
|
303
|
+
|
304
|
+
for candidate in candidates:
|
305
|
+
if candidate and candidate.exists() and os.access(candidate, os.X_OK):
|
306
|
+
return candidate
|
307
|
+
|
308
|
+
return None
|
309
|
+
|
310
|
+
|
311
|
+
def _should_hide_subprocess_console() -> bool:
|
312
|
+
"""Return ``True` ` when a detached Windows launch should hide the console."""
|
313
|
+
|
314
|
+
if sys.platform != "win32":
|
315
|
+
return False
|
316
|
+
|
317
|
+
try:
|
318
|
+
import ctypes
|
319
|
+
except Exception: # pragma: no cover - optional runtime dependency
|
320
|
+
return False
|
321
|
+
|
322
|
+
try:
|
323
|
+
get_console_window = ctypes.windll.kernel32.GetConsoleWindow # type: ignore[attr-defined]
|
324
|
+
except Exception: # pragma: no cover - platform specific guard
|
325
|
+
return False
|
326
|
+
|
327
|
+
try:
|
328
|
+
handle = get_console_window()
|
329
|
+
except Exception: # pragma: no cover - defensive fallback
|
330
|
+
return False
|
331
|
+
|
332
|
+
return handle == 0
|
333
|
+
|
334
|
+
|
335
|
+
def _launch_server_tray_binary(argv: Sequence[str]) -> bool:
|
336
|
+
"""Launch the packaged server tray executable when available."""
|
337
|
+
|
338
|
+
command = _find_server_tray_binary()
|
339
|
+
if command is None:
|
340
|
+
return False
|
341
|
+
|
342
|
+
tray_args = [str(command), *list(argv)]
|
343
|
+
|
344
|
+
run_kwargs: Dict[str, object] = {"check": False}
|
345
|
+
|
346
|
+
if sys.platform == "win32":
|
347
|
+
no_window_flag = getattr(subprocess, "CREATE_NO_WINDOW", 0)
|
348
|
+
if no_window_flag and _should_hide_subprocess_console():
|
349
|
+
run_kwargs["creationflags"] = no_window_flag
|
350
|
+
|
351
|
+
try:
|
352
|
+
result = subprocess.run(tray_args, **run_kwargs)
|
353
|
+
except OSError:
|
354
|
+
return False
|
355
|
+
|
356
|
+
return result.returncode == 0
|
357
|
+
|
358
|
+
|
359
|
+
def _launch_server_tray(argv: Sequence[str]) -> bool:
|
360
|
+
"""Attempt to launch the server tray helper with the provided arguments."""
|
361
|
+
|
362
|
+
if _launch_server_tray_binary(argv):
|
363
|
+
return True
|
364
|
+
|
365
|
+
try:
|
366
|
+
tray_module = import_module(".server_tray", __package__)
|
367
|
+
except ImportError:
|
368
|
+
return False
|
369
|
+
|
370
|
+
tray_main = getattr(tray_module, "main", None)
|
371
|
+
if tray_main is None:
|
372
|
+
return False
|
373
|
+
|
374
|
+
tray_main(list(argv))
|
375
|
+
return True
|
376
|
+
|
377
|
+
|
248
378
|
def main(argv: Optional[Sequence[str]] = None) -> None:
|
249
379
|
"""Entry point for the command line interface.
|
250
380
|
|
@@ -256,6 +386,14 @@ def main(argv: Optional[Sequence[str]] = None) -> None:
|
|
256
386
|
else:
|
257
387
|
argv_list = list(argv)
|
258
388
|
|
389
|
+
if "--server" in argv_list:
|
390
|
+
index = argv_list.index("--server")
|
391
|
+
tray_args = argv_list[index + 1 :]
|
392
|
+
if not _launch_server_tray(tray_args):
|
393
|
+
print("Server tray mode is unavailable.", file=sys.stderr)
|
394
|
+
sys.exit(1)
|
395
|
+
return
|
396
|
+
|
259
397
|
if argv_list and argv_list[0] in {"server", "serve"}:
|
260
398
|
if not _launch_server(argv_list[1:]):
|
261
399
|
print("Gradio server mode is unavailable.", file=sys.stderr)
|
@@ -272,6 +410,11 @@ def main(argv: Optional[Sequence[str]] = None) -> None:
|
|
272
410
|
|
273
411
|
parser = _build_parser()
|
274
412
|
parsed_args = parser.parse_args(argv_list)
|
413
|
+
|
414
|
+
host_value = getattr(parsed_args, "host", None)
|
415
|
+
if host_value:
|
416
|
+
parsed_args.server_url = f"http://{host_value}:9005"
|
417
|
+
|
275
418
|
start_time = time.time()
|
276
419
|
|
277
420
|
files = gather_input_files(parsed_args.input_file)
|
@@ -281,15 +424,33 @@ def main(argv: Optional[Sequence[str]] = None) -> None:
|
|
281
424
|
}
|
282
425
|
del args["input_file"]
|
283
426
|
|
427
|
+
if "host" in args:
|
428
|
+
del args["host"]
|
429
|
+
|
284
430
|
if len(files) > 1 and "output_file" in args:
|
285
431
|
del args["output_file"]
|
286
432
|
|
433
|
+
fallback_messages: List[str] = []
|
287
434
|
if parsed_args.server_url:
|
288
|
-
|
289
|
-
|
435
|
+
server_success, error_message = _process_via_server(
|
436
|
+
files, parsed_args, start_time=start_time
|
437
|
+
)
|
438
|
+
if server_success:
|
439
|
+
return
|
440
|
+
|
441
|
+
fallback_reason = error_message or "Server processing is unavailable."
|
442
|
+
print(fallback_reason, file=sys.stderr)
|
443
|
+
fallback_messages.append(fallback_reason)
|
444
|
+
|
445
|
+
fallback_notice = "Falling back to local processing pipeline."
|
446
|
+
print(fallback_notice, file=sys.stderr)
|
447
|
+
fallback_messages.append(fallback_notice)
|
290
448
|
|
291
449
|
reporter = TqdmProgressReporter()
|
292
450
|
|
451
|
+
for message in fallback_messages:
|
452
|
+
reporter.log(message)
|
453
|
+
|
293
454
|
for index, file in enumerate(files):
|
294
455
|
print(f"Processing file {index + 1}/{len(files)} '{os.path.basename(file)}'")
|
295
456
|
local_options = dict(args)
|
talks_reducer/discovery.py
CHANGED
@@ -7,11 +7,21 @@ import socket
|
|
7
7
|
from concurrent.futures import ThreadPoolExecutor
|
8
8
|
from contextlib import closing
|
9
9
|
from http.client import HTTPConnection
|
10
|
-
from typing import Iterable, Iterator, List, Optional, Set
|
10
|
+
from typing import Callable, Iterable, Iterator, List, Optional, Set
|
11
11
|
|
12
12
|
DEFAULT_PORT = 9005
|
13
13
|
DEFAULT_TIMEOUT = 0.4
|
14
14
|
|
15
|
+
_EXCLUDED_HOSTS = {"127.0.0.1", "localhost", "0.0.0.0"}
|
16
|
+
|
17
|
+
|
18
|
+
def _should_include_host(host: Optional[str]) -> bool:
|
19
|
+
"""Return ``True`` when *host* should be scanned for discovery."""
|
20
|
+
|
21
|
+
if not host:
|
22
|
+
return False
|
23
|
+
return host not in _EXCLUDED_HOSTS
|
24
|
+
|
15
25
|
|
16
26
|
def _iter_local_ipv4_addresses() -> Iterator[str]:
|
17
27
|
"""Yield IPv4 addresses that belong to the local machine."""
|
@@ -43,16 +53,19 @@ def _iter_local_ipv4_addresses() -> Iterator[str]:
|
|
43
53
|
def _build_default_host_candidates(prefix_length: int = 24) -> List[str]:
|
44
54
|
"""Return a list of host candidates based on detected local networks."""
|
45
55
|
|
46
|
-
hosts: Set[str] =
|
56
|
+
hosts: Set[str] = set()
|
47
57
|
|
48
58
|
for address in _iter_local_ipv4_addresses():
|
49
|
-
|
59
|
+
if _should_include_host(address):
|
60
|
+
hosts.add(address)
|
50
61
|
try:
|
51
62
|
network = ipaddress.ip_network(f"{address}/{prefix_length}", strict=False)
|
52
63
|
except ValueError:
|
53
64
|
continue
|
54
65
|
for host in network.hosts():
|
55
|
-
|
66
|
+
host_str = str(host)
|
67
|
+
if _should_include_host(host_str):
|
68
|
+
hosts.add(host_str)
|
56
69
|
|
57
70
|
return sorted(hosts)
|
58
71
|
|
@@ -82,36 +95,53 @@ def _probe_host(host: str, port: int, timeout: float) -> Optional[str]:
|
|
82
95
|
return None
|
83
96
|
|
84
97
|
|
98
|
+
ProgressCallback = Callable[[int, int], None]
|
99
|
+
|
100
|
+
|
85
101
|
def discover_servers(
|
86
102
|
*,
|
87
103
|
port: int = DEFAULT_PORT,
|
88
104
|
timeout: float = DEFAULT_TIMEOUT,
|
89
105
|
hosts: Optional[Iterable[str]] = None,
|
106
|
+
progress_callback: Optional[ProgressCallback] = None,
|
90
107
|
) -> List[str]:
|
91
108
|
"""Scan *hosts* for running Talks Reducer servers on *port*.
|
92
109
|
|
93
110
|
When *hosts* is omitted, the local /24 networks derived from available IPv4
|
94
|
-
addresses are scanned. ``localhost
|
95
|
-
|
111
|
+
addresses are scanned. ``127.0.0.1``, ``localhost``, and ``0.0.0.0`` are
|
112
|
+
excluded to avoid duplicating local endpoints. The optional
|
113
|
+
*progress_callback* receives the number of scanned hosts and the total
|
114
|
+
candidate count whenever discovery advances. The function returns a sorted
|
115
|
+
list of unique base URLs.
|
96
116
|
"""
|
97
117
|
|
98
118
|
if hosts is None:
|
99
|
-
candidates =
|
119
|
+
candidates = sorted(
|
120
|
+
{
|
121
|
+
host
|
122
|
+
for host in _build_default_host_candidates()
|
123
|
+
if _should_include_host(host)
|
124
|
+
}
|
125
|
+
)
|
100
126
|
else:
|
101
|
-
candidates = sorted(
|
102
|
-
if "127.0.0.1" not in candidates:
|
103
|
-
candidates.append("127.0.0.1")
|
104
|
-
if "localhost" not in candidates:
|
105
|
-
candidates.append("localhost")
|
127
|
+
candidates = sorted({host for host in hosts if _should_include_host(host)})
|
106
128
|
|
107
129
|
results: List[str] = []
|
130
|
+
total = len(candidates)
|
131
|
+
|
132
|
+
if progress_callback is not None:
|
133
|
+
progress_callback(0, total)
|
108
134
|
|
109
135
|
with ThreadPoolExecutor(max_workers=32) as executor:
|
136
|
+
scanned = 0
|
110
137
|
for url in executor.map(
|
111
138
|
lambda host: _probe_host(host, port, timeout), candidates
|
112
139
|
):
|
113
140
|
if url and url not in results:
|
114
141
|
results.append(url)
|
142
|
+
scanned += 1
|
143
|
+
if progress_callback is not None:
|
144
|
+
progress_callback(scanned, total)
|
115
145
|
|
116
146
|
return results
|
117
147
|
|