talks-reducer 0.7.1__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@ import shutil
8
8
  import time
9
9
  from contextlib import suppress
10
10
  from pathlib import Path
11
- from typing import Callable, Optional, Sequence, Tuple
11
+ from typing import Any, AsyncIterator, Callable, Optional, Sequence, Tuple
12
12
 
13
13
  from gradio_client import Client
14
14
  from gradio_client import file as gradio_file
@@ -20,6 +20,55 @@ except ImportError: # pragma: no cover - allow running as script
20
20
  from talks_reducer.pipeline import ProcessingAborted
21
21
 
22
22
 
23
+ class StreamingJob:
24
+ """Adapter that provides a consistent interface for streaming jobs."""
25
+
26
+ def __init__(self, job: Any) -> None:
27
+ self._job = job
28
+
29
+ @property
30
+ def raw(self) -> Any:
31
+ """Return the wrapped job instance."""
32
+
33
+ return self._job
34
+
35
+ @property
36
+ def supports_streaming(self) -> bool:
37
+ """Return ``True`` when the remote job can stream async updates."""
38
+
39
+ communicator = getattr(self._job, "communicator", None)
40
+ return communicator is not None
41
+
42
+ async def async_iter_updates(self) -> AsyncIterator[Any]:
43
+ """Yield updates from the wrapped job asynchronously."""
44
+
45
+ async for update in self._job: # type: ignore[async-for]
46
+ yield update
47
+
48
+ def status(self) -> Any:
49
+ """Return the latest status update from the job when available."""
50
+
51
+ status_method = getattr(self._job, "status", None)
52
+ if callable(status_method):
53
+ return status_method()
54
+ raise AttributeError("Wrapped job does not expose a status() method")
55
+
56
+ def outputs(self) -> Any:
57
+ """Return cached outputs from the job when available."""
58
+
59
+ outputs_method = getattr(self._job, "outputs", None)
60
+ if callable(outputs_method):
61
+ return outputs_method()
62
+ raise AttributeError("Wrapped job does not expose an outputs() method")
63
+
64
+ def cancel(self) -> None:
65
+ """Cancel the remote job when supported."""
66
+
67
+ cancel_method = getattr(self._job, "cancel", None)
68
+ if callable(cancel_method):
69
+ cancel_method()
70
+
71
+
23
72
  def send_video(
24
73
  input_path: Path,
25
74
  output_path: Optional[Path],
@@ -35,6 +84,10 @@ def send_video(
35
84
  progress_callback: Optional[
36
85
  Callable[[str, Optional[int], Optional[int], str], None]
37
86
  ] = None,
87
+ client_factory: Optional[Callable[[str], Client]] = None,
88
+ job_factory: Optional[
89
+ Callable[[Client, Tuple[Any, ...], dict[str, Any]], Any]
90
+ ] = None,
38
91
  ) -> Tuple[Path, str, str]:
39
92
  """Upload *input_path* to the Gradio server and download the processed video.
40
93
 
@@ -45,15 +98,23 @@ def send_video(
45
98
  if not input_path.exists():
46
99
  raise FileNotFoundError(f"Input file does not exist: {input_path}")
47
100
 
48
- client = Client(server_url)
49
- job = client.submit(
101
+ client_builder = client_factory or Client
102
+ client = client_builder(server_url)
103
+ submit_args: Tuple[Any, ...] = (
50
104
  gradio_file(str(input_path)),
51
105
  bool(small),
52
106
  silent_threshold,
53
107
  sounded_speed,
54
108
  silent_speed,
55
- api_name="/process_video",
56
109
  )
110
+ submit_kwargs: dict[str, Any] = {"api_name": "/process_video"}
111
+
112
+ if job_factory is not None:
113
+ job = job_factory(client, submit_args, submit_kwargs)
114
+ else:
115
+ job = client.submit(*submit_args, **submit_kwargs)
116
+
117
+ streaming_job = StreamingJob(job)
57
118
 
58
119
  cancelled = False
59
120
 
@@ -62,7 +123,7 @@ def send_video(
62
123
  if should_cancel and should_cancel():
63
124
  if not cancelled:
64
125
  with suppress(Exception):
65
- job.cancel()
126
+ streaming_job.cancel()
66
127
  cancelled = True
67
128
  raise ProcessingAborted("Remote processing cancelled by user.")
68
129
 
@@ -85,7 +146,7 @@ def send_video(
85
146
  if should_cancel is not None:
86
147
  stream_kwargs["cancel_callback"] = _cancel_if_requested
87
148
  consumed_stream = _stream_job_updates(
88
- job,
149
+ streaming_job,
89
150
  _emit_new_lines,
90
151
  **stream_kwargs,
91
152
  )
@@ -190,7 +251,7 @@ def _emit_progress_update(
190
251
 
191
252
 
192
253
  async def _pump_job_updates(
193
- job,
254
+ job: StreamingJob,
194
255
  emit_log: Callable[[str], None],
195
256
  progress_callback: Optional[
196
257
  Callable[[str, Optional[int], Optional[int], str], None]
@@ -199,7 +260,7 @@ async def _pump_job_updates(
199
260
  ) -> None:
200
261
  """Consume asynchronous updates from *job* and emit logs and progress."""
201
262
 
202
- async for update in job: # type: ignore[async-for]
263
+ async for update in job.async_iter_updates():
203
264
  if cancel_callback:
204
265
  cancel_callback()
205
266
  update_type = getattr(update, "type", "status")
@@ -242,15 +303,18 @@ def _poll_job_updates(
242
303
  ) -> None:
243
304
  """Poll *job* for outputs and status updates when async streaming is unavailable."""
244
305
 
306
+ streaming_job = job if isinstance(job, StreamingJob) else StreamingJob(job)
307
+ raw_job = streaming_job.raw
308
+
245
309
  while True:
246
310
  if cancel_callback:
247
311
  cancel_callback()
248
- if job.done():
312
+ if hasattr(raw_job, "done") and raw_job.done():
249
313
  break
250
314
 
251
315
  status: Optional[StatusUpdate] = None
252
316
  with suppress(Exception):
253
- status = job.status() # type: ignore[assignment]
317
+ status = streaming_job.status() # type: ignore[assignment]
254
318
 
255
319
  if status is not None:
256
320
  if progress_callback:
@@ -268,7 +332,7 @@ def _poll_job_updates(
268
332
 
269
333
  outputs = []
270
334
  with suppress(Exception):
271
- outputs = job.outputs()
335
+ outputs = streaming_job.outputs()
272
336
  if outputs:
273
337
  latest = outputs[-1]
274
338
  if isinstance(latest, (list, tuple)) and len(latest) == 4:
@@ -280,7 +344,7 @@ def _poll_job_updates(
280
344
 
281
345
 
282
346
  def _stream_job_updates(
283
- job,
347
+ job: StreamingJob,
284
348
  emit_log: Callable[[str], None],
285
349
  *,
286
350
  progress_callback: Optional[
@@ -294,8 +358,7 @@ def _stream_job_updates(
294
358
  generator-based fallback should be used.
295
359
  """
296
360
 
297
- communicator = getattr(job, "communicator", None)
298
- if communicator is None:
361
+ if not job.supports_streaming:
299
362
  return False
300
363
 
301
364
  try:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: talks-reducer
3
- Version: 0.7.1
3
+ Version: 0.8.0
4
4
  Summary: CLI for speeding up long-form talks by removing silence
5
5
  Author: Talks Reducer Maintainers
6
6
  License-Expression: MIT
@@ -26,7 +26,7 @@ Requires-Dist: bump-my-version>=0.5.0; extra == "dev"
26
26
  Requires-Dist: pyinstaller>=6.4.0; extra == "dev"
27
27
  Dynamic: license-file
28
28
 
29
- # Talks Reducer
29
+ # Talks Reducer [![Coverage Status](https://coveralls.io/repos/github/popstas/talks-reducer/badge.svg?branch=master)](https://coveralls.io/github/popstas/talks-reducer?branch=master)
30
30
 
31
31
  Talks Reducer shortens long-form presentations by removing silent gaps and optionally re-encoding them to smaller files. The
32
32
  project was renamed from **jumpcutter** to emphasize its focus on conference talks and screencasts.
@@ -174,6 +174,28 @@ The helper wraps the Gradio API exposed by `server.py`, waits for processing to
174
174
  path you provide. Pass `--small` to mirror the **Small video** checkbox or `--print-log` to stream the server log after the
175
175
  download finishes.
176
176
 
177
+ ## Faster PyInstaller builds
178
+
179
+ PyInstaller spends most of its time walking imports. To keep GUI builds snappy:
180
+
181
+ - Create a dedicated virtual environment for packaging the GUI and install only
182
+ the runtime dependencies you need (for example `pip install -r
183
+ requirements.txt -r scripts/requirements-pyinstaller.txt`). Avoid installing
184
+ heavy ML stacks such as Torch or TensorFlow in that environment so PyInstaller
185
+ never attempts to analyze them.
186
+ - Use the committed `talks-reducer.spec` file via `./scripts/build-gui.sh`.
187
+ The spec excludes Torch, TensorFlow, TensorBoard, torchvision/torchaudio,
188
+ Pandas, Qt bindings, setuptools' vendored helpers, and other bulky modules
189
+ that previously slowed the analysis stage. Set
190
+ `PYINSTALLER_EXTRA_EXCLUDES=module1,module2` if you need to drop additional
191
+ imports for an experimental build.
192
+ - Keep optional imports in the codebase lazy (wrapped in `try/except` or moved
193
+ inside functions) so the analyzer only sees the dependencies required for the
194
+ shipping GUI.
195
+
196
+ The script keeps incremental build artifacts in `build/` between runs. Pass
197
+ `--clean` to `scripts/build-gui.sh` when you want a full rebuild.
198
+
177
199
  ## Contributing
178
200
  See `CONTRIBUTION.md` for development setup details and guidance on sharing improvements.
179
201
 
@@ -0,0 +1,33 @@
1
+ talks_reducer/__about__.py,sha256=utyM7vPaN3BCqdzGixouWTlzTCZvliX_-gFu6cJYQEQ,92
2
+ talks_reducer/__init__.py,sha256=Kzh1hXaw6Vq3DyTqrnJGOq8pn0P8lvaDcsg1bFUjFKk,208
3
+ talks_reducer/__main__.py,sha256=azR_vh8HFPLaOnh-L6gUFWsL67I6iHtbeH5rQhsipGY,299
4
+ talks_reducer/audio.py,sha256=sjHMeY0H9ESG-Gn5BX0wFRBX7sXjWwsgS8u9Vb0bJ88,4396
5
+ talks_reducer/chunks.py,sha256=IpdZxRFPURSG5wP-OQ_p09CVP8wcKwIFysV29zOTSWI,2959
6
+ talks_reducer/cli.py,sha256=0vf6KvZnU1_xweqF5RGd3xg_-CsaHaFSwputrD4Br9s,18833
7
+ talks_reducer/discovery.py,sha256=nuD7AyPbytc34nq9awYdSeJukAPVfFXVtNJK8i6JqY4,6384
8
+ talks_reducer/ffmpeg.py,sha256=dsHBOBcr5XCSg0q3xmzLOcibBiEdyrXdEQa-ze5vQsM,12551
9
+ talks_reducer/icons.py,sha256=jHjpjPS4Kib9WXXAA4BYQdWoztePdQ2ClT55YlHYMY8,3822
10
+ talks_reducer/models.py,sha256=a1cHCVTNTJYh9I437CuANiaz5R_s-uECeGyK7WB67HQ,2018
11
+ talks_reducer/pipeline.py,sha256=jiN0IETmZXfABWMCNObRicTXnJYT-Go8q4_f4-x2ir8,15132
12
+ talks_reducer/progress.py,sha256=Mh43M6VWhjjUv9CI22xfD2EJ_7Aq3PCueqefQ9Bd5-o,4565
13
+ talks_reducer/server.py,sha256=AuCAyBGkL3knAHJZoozNFXQJg0HKTc8rDSTgG0vZbXU,17767
14
+ talks_reducer/server_tray.py,sha256=98AUPHhI0jSQHMZRz-xOUjVSD2HQExX_84LrHd2Rf-Y,18397
15
+ talks_reducer/service_client.py,sha256=dZDfkA20s8SiBWevVXzYWqbfYwJV-BlhUje2ilKzOPg,14873
16
+ talks_reducer/version_utils.py,sha256=TkYrTznVb2JqxFXzVzPd6PEnYP2MH7dxKl1J4-3DjMA,755
17
+ talks_reducer/gui/__init__.py,sha256=1J57-lH-UGMaB-rNCWDaQbQWo_amF_IyDyQmsIwsszM,523
18
+ talks_reducer/gui/__main__.py,sha256=_YX3qLkcORZUlpvYZL1RfI5840_9xzzQ9oP4Zoif6f8,147
19
+ talks_reducer/gui/app.py,sha256=i0cb9GTAMIq9vgIo6rk8_b7VsJ3MhDUcoxhBua9OhRc,53006
20
+ talks_reducer/gui/discovery.py,sha256=af9FLWJLBzI7ehrw3Gg4a9WhYSATt7LNYrkCWcJ7CDA,4111
21
+ talks_reducer/gui/layout.py,sha256=k1FvZ55I96ue96Debf63dtX3-6bmnWL8PbsSQ6cT6qE,16742
22
+ talks_reducer/gui/preferences.py,sha256=ahDLPNIzvL71sw8WvgY9-TV_kaWTl_JTkn1gf2Z1EaA,3531
23
+ talks_reducer/gui/progress.py,sha256=wvIyqkpsC7qNvDlcf4G7Ts6P-VM_dPkcJqSvyYr2af8,2388
24
+ talks_reducer/gui/remote.py,sha256=kpwYqizupw8szoIAf2O25xcZNoIK2ILryQMmANvI5PI,12330
25
+ talks_reducer/gui/startup.py,sha256=mJNLVd7ooYHaUuYnym06AK4hz8mJZbxzwgI3PQCQ3Ds,6588
26
+ talks_reducer/gui/theme.py,sha256=ueqpPVPOwnLkeHlOlWkUcdcoClJrAqz9LWT79p33Xic,7718
27
+ talks_reducer/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ talks_reducer-0.8.0.dist-info/licenses/LICENSE,sha256=jN17mHNR3e84awmH3AbpWBcBDBzPxEH0rcOFoj1s7sQ,1124
29
+ talks_reducer-0.8.0.dist-info/METADATA,sha256=XVLE5qg3XNgRFaFl9cAJ2nAFAoB2MLki-GUrOPfhync,10160
30
+ talks_reducer-0.8.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
31
+ talks_reducer-0.8.0.dist-info/entry_points.txt,sha256=X2pjoh2vWBXXExVWorv1mbA1aTEVP3fyuZH4AixqZK4,208
32
+ talks_reducer-0.8.0.dist-info/top_level.txt,sha256=pJWGcy__LR9JIEKH3QJyFmk9XrIsiFtqvuMNxFdIzDU,14
33
+ talks_reducer-0.8.0.dist-info/RECORD,,
@@ -1,29 +0,0 @@
1
- talks_reducer/__about__.py,sha256=wwxwKZDkQSt_KMTilch61vXb8z9MEha3gfTpYfebr70,92
2
- talks_reducer/__init__.py,sha256=Kzh1hXaw6Vq3DyTqrnJGOq8pn0P8lvaDcsg1bFUjFKk,208
3
- talks_reducer/__main__.py,sha256=azR_vh8HFPLaOnh-L6gUFWsL67I6iHtbeH5rQhsipGY,299
4
- talks_reducer/audio.py,sha256=sjHMeY0H9ESG-Gn5BX0wFRBX7sXjWwsgS8u9Vb0bJ88,4396
5
- talks_reducer/chunks.py,sha256=IpdZxRFPURSG5wP-OQ_p09CVP8wcKwIFysV29zOTSWI,2959
6
- talks_reducer/cli.py,sha256=MeL5feATcJc0bGPDuW_L3HgepKQUi335zs3HVg47WrE,16474
7
- talks_reducer/discovery.py,sha256=BJ-iMir65cJMs0u-_EYdknBQT_grvCZaJNOx1xGi2PU,4590
8
- talks_reducer/ffmpeg.py,sha256=dsHBOBcr5XCSg0q3xmzLOcibBiEdyrXdEQa-ze5vQsM,12551
9
- talks_reducer/models.py,sha256=a1cHCVTNTJYh9I437CuANiaz5R_s-uECeGyK7WB67HQ,2018
10
- talks_reducer/pipeline.py,sha256=OGZG_3G1fh6LFQw9NuhnLq7gwJ5YcJ6l76QNWJydD7c,13630
11
- talks_reducer/progress.py,sha256=Mh43M6VWhjjUv9CI22xfD2EJ_7Aq3PCueqefQ9Bd5-o,4565
12
- talks_reducer/server.py,sha256=CLcgEyfBjsglSG1VkqiaP-4NsZZklL5o7ayYwnNMqbs,15782
13
- talks_reducer/server_tray.py,sha256=GBjx7Fr18Uy5O38ZjM5VXR77ou2_AEUqx2wN8MOZuss,23380
14
- talks_reducer/service_client.py,sha256=8C2v2aNj8UAECfy1pw7oIzCK3Ktx5E6kZoNSYWH-8m8,12656
15
- talks_reducer/version_utils.py,sha256=TkYrTznVb2JqxFXzVzPd6PEnYP2MH7dxKl1J4-3DjMA,755
16
- talks_reducer/gui/__init__.py,sha256=UQJtyb87wwZyvauPo0mM_aiau9NAhKbl4ggwJoPCNC0,59870
17
- talks_reducer/gui/__main__.py,sha256=9YWkGopLypanfMMq_RoQjjpPScTOxA7-biqMhQq-SSM,140
18
- talks_reducer/gui/discovery.py,sha256=6AXPcFGXqHZNhSBE1O5PyoH_CEMCb0Jk-9JGFwyAuRk,4108
19
- talks_reducer/gui/layout.py,sha256=rFzNt78sf6TQzHkEBUmINdD5-iJAWkBKHkIo_v5f7iU,17146
20
- talks_reducer/gui/preferences.py,sha256=ahDLPNIzvL71sw8WvgY9-TV_kaWTl_JTkn1gf2Z1EaA,3531
21
- talks_reducer/gui/remote.py,sha256=92HebrIo009GgRD7RBriw9yR8sbYHocsPzmjPe4ybhA,12071
22
- talks_reducer/gui/theme.py,sha256=ueqpPVPOwnLkeHlOlWkUcdcoClJrAqz9LWT79p33Xic,7718
23
- talks_reducer/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- talks_reducer-0.7.1.dist-info/licenses/LICENSE,sha256=jN17mHNR3e84awmH3AbpWBcBDBzPxEH0rcOFoj1s7sQ,1124
25
- talks_reducer-0.7.1.dist-info/METADATA,sha256=tiabxtCJb9UUqczxKpW0j27BWPWmfMfBAmb2n6jRt_U,8810
26
- talks_reducer-0.7.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
- talks_reducer-0.7.1.dist-info/entry_points.txt,sha256=X2pjoh2vWBXXExVWorv1mbA1aTEVP3fyuZH4AixqZK4,208
28
- talks_reducer-0.7.1.dist-info/top_level.txt,sha256=pJWGcy__LR9JIEKH3QJyFmk9XrIsiFtqvuMNxFdIzDU,14
29
- talks_reducer-0.7.1.dist-info/RECORD,,