DeepFabric 4.10.1__py3-none-any.whl → 4.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfabric/cli.py +83 -27
- deepfabric/cloud_upload.py +1 -1
- deepfabric/config.py +6 -4
- deepfabric/constants.py +1 -1
- deepfabric/dataset_manager.py +264 -62
- deepfabric/generator.py +687 -82
- deepfabric/graph.py +25 -1
- deepfabric/llm/retry_handler.py +28 -9
- deepfabric/progress.py +42 -0
- deepfabric/topic_manager.py +22 -2
- deepfabric/topic_model.py +26 -0
- deepfabric/tree.py +41 -16
- deepfabric/tui.py +448 -349
- deepfabric/utils.py +4 -1
- {deepfabric-4.10.1.dist-info → deepfabric-4.11.0.dist-info}/METADATA +3 -1
- {deepfabric-4.10.1.dist-info → deepfabric-4.11.0.dist-info}/RECORD +19 -19
- {deepfabric-4.10.1.dist-info → deepfabric-4.11.0.dist-info}/licenses/LICENSE +1 -1
- {deepfabric-4.10.1.dist-info → deepfabric-4.11.0.dist-info}/WHEEL +0 -0
- {deepfabric-4.10.1.dist-info → deepfabric-4.11.0.dist-info}/entry_points.txt +0 -0
deepfabric/tui.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import contextlib
|
|
2
2
|
import json
|
|
3
|
+
import math
|
|
3
4
|
import os
|
|
4
5
|
import re
|
|
5
6
|
|
|
7
|
+
from abc import abstractmethod
|
|
6
8
|
from collections import deque
|
|
7
9
|
from dataclasses import dataclass
|
|
8
10
|
from time import monotonic
|
|
@@ -30,71 +32,6 @@ if TYPE_CHECKING:
|
|
|
30
32
|
from .error_codes import ClassifiedError
|
|
31
33
|
|
|
32
34
|
|
|
33
|
-
class TopicBuildingMixin:
|
|
34
|
-
"""Mixin providing shared functionality for Tree and Graph building TUIs.
|
|
35
|
-
|
|
36
|
-
Provides common implementations for:
|
|
37
|
-
- _refresh_left(): Update events panel in left column
|
|
38
|
-
- on_error(): Handle error events from progress reporter
|
|
39
|
-
- on_step_start()/on_step_complete(): No-op handlers for step events
|
|
40
|
-
- update_status_panel(): Update status panel (requires _status_panel() in subclass)
|
|
41
|
-
|
|
42
|
-
Subclasses must have these attributes:
|
|
43
|
-
- tui: DeepFabricTUI instance
|
|
44
|
-
- live_display: Live | None
|
|
45
|
-
- live_layout: Layout | None
|
|
46
|
-
- events_log: deque
|
|
47
|
-
"""
|
|
48
|
-
|
|
49
|
-
tui: "DeepFabricTUI"
|
|
50
|
-
live_display: "Live | None"
|
|
51
|
-
live_layout: "Layout | None"
|
|
52
|
-
events_log: "deque"
|
|
53
|
-
|
|
54
|
-
def stop_live(self) -> None:
|
|
55
|
-
"""Stop the Live display if it's running."""
|
|
56
|
-
if self.live_display:
|
|
57
|
-
self.live_display.stop()
|
|
58
|
-
self.live_display = None
|
|
59
|
-
|
|
60
|
-
def _refresh_left(self) -> None:
|
|
61
|
-
"""Update events panel in left column."""
|
|
62
|
-
if self.live_layout is not None:
|
|
63
|
-
try:
|
|
64
|
-
self.live_layout["main"]["left"]["events"].update(
|
|
65
|
-
self.tui.build_events_panel(list(self.events_log))
|
|
66
|
-
)
|
|
67
|
-
except Exception:
|
|
68
|
-
return
|
|
69
|
-
|
|
70
|
-
def on_error(self, error: "ClassifiedError", metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
71
|
-
"""Handle error events - log to events panel."""
|
|
72
|
-
error_event = error.to_event()
|
|
73
|
-
self.events_log.append(f"X {error_event}")
|
|
74
|
-
self._refresh_left()
|
|
75
|
-
|
|
76
|
-
def on_step_start(self, step_name: str, metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
77
|
-
"""Handle step start - topic building doesn't need specific handling."""
|
|
78
|
-
pass
|
|
79
|
-
|
|
80
|
-
def on_step_complete(self, step_name: str, metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
81
|
-
"""Handle step complete - topic building doesn't need specific handling."""
|
|
82
|
-
pass
|
|
83
|
-
|
|
84
|
-
def update_status_panel(self) -> None:
|
|
85
|
-
"""Update the status panel in the right column."""
|
|
86
|
-
if self.live_layout is None:
|
|
87
|
-
return
|
|
88
|
-
try:
|
|
89
|
-
self.live_layout["main"]["right"]["status"].update(self._status_panel())
|
|
90
|
-
except Exception:
|
|
91
|
-
return
|
|
92
|
-
|
|
93
|
-
def _status_panel(self) -> Panel:
|
|
94
|
-
"""Create status panel - must be implemented by subclass."""
|
|
95
|
-
raise NotImplementedError
|
|
96
|
-
|
|
97
|
-
|
|
98
35
|
# Constants
|
|
99
36
|
STREAM_BUFFER_DISPLAY_THRESHOLD = 1000 # Show ellipsis if accumulated text exceeds this
|
|
100
37
|
STREAM_TEXT_MAX_LENGTH = 8000 # Max characters to display in streaming text
|
|
@@ -111,23 +48,26 @@ TOPIC_PREVIEW_OFFSET = 13
|
|
|
111
48
|
# Truncation limits for event log display
|
|
112
49
|
EVENT_TOPIC_MAX_LENGTH = 20 # Max chars for topic names in events
|
|
113
50
|
EVENT_ERROR_MAX_LENGTH = 80 # Max chars for error summaries in events
|
|
51
|
+
ERROR_MESSAGE_MAX_LENGTH = 200 # Max chars for detailed error messages in simple mode
|
|
114
52
|
|
|
115
53
|
|
|
116
54
|
@dataclass
|
|
117
55
|
class TUISettings:
|
|
118
56
|
mode: str = "rich" # 'rich' or 'simple'
|
|
119
57
|
syntax: bool = True # enable syntax highlighting in preview
|
|
58
|
+
show_failures: bool = False # show failure details in real-time
|
|
120
59
|
|
|
121
60
|
|
|
122
61
|
_tui_settings = TUISettings()
|
|
123
62
|
|
|
124
63
|
|
|
125
|
-
def configure_tui(mode: str) -> None:
|
|
64
|
+
def configure_tui(mode: str, show_failures: bool = False) -> None:
|
|
126
65
|
mode = (mode or "rich").lower().strip()
|
|
127
66
|
if mode not in {"rich", "simple"}:
|
|
128
67
|
mode = "rich"
|
|
129
68
|
_tui_settings.mode = mode
|
|
130
69
|
_tui_settings.syntax = mode == "rich"
|
|
70
|
+
_tui_settings.show_failures = show_failures
|
|
131
71
|
|
|
132
72
|
|
|
133
73
|
def get_tui_settings() -> TUISettings:
|
|
@@ -290,44 +230,105 @@ class DeepFabricTUI:
|
|
|
290
230
|
self.console.print(f"• {message}", style="blue")
|
|
291
231
|
|
|
292
232
|
|
|
293
|
-
class
|
|
294
|
-
"""
|
|
233
|
+
class TopicGenerationTUI(StreamObserver):
|
|
234
|
+
"""Abstract base for Tree and Graph building TUIs.
|
|
235
|
+
|
|
236
|
+
Provides shared initialization, layout setup, streaming, retry handling,
|
|
237
|
+
and event management. Subclasses customize via template methods.
|
|
238
|
+
|
|
239
|
+
Subclasses must implement:
|
|
240
|
+
- _get_title() -> str
|
|
241
|
+
- _get_subtitle(model_name) -> str
|
|
242
|
+
- _get_footer_description() -> str
|
|
243
|
+
- _topic_model_type() -> str ('tree' or 'graph')
|
|
244
|
+
- _status_panel() -> Panel
|
|
245
|
+
"""
|
|
295
246
|
|
|
296
247
|
def __init__(self, tui: DeepFabricTUI):
|
|
297
248
|
self.tui = tui
|
|
298
249
|
self.console = tui.console
|
|
299
250
|
self.progress = None
|
|
300
251
|
self.overall_task = None
|
|
301
|
-
self.generated_paths = 0
|
|
302
252
|
self.failed_attempts = 0
|
|
303
|
-
self.current_depth = 0
|
|
304
|
-
self.max_depth = 0
|
|
305
253
|
self.stream_buffer = deque(maxlen=2000)
|
|
306
|
-
self.live_display = None
|
|
254
|
+
self.live_display: Live | None = None
|
|
307
255
|
self.live_layout: Layout | None = None
|
|
308
256
|
self.events_log = deque(maxlen=EVENT_LOG_MAX_LINES)
|
|
309
|
-
self.simple_mode = False
|
|
310
257
|
self.current_topic_path: list[str] | None = None
|
|
311
258
|
self.root_topic: str | None = None
|
|
259
|
+
self.max_depth = 0
|
|
260
|
+
self.current_depth = 0
|
|
261
|
+
self._is_simple = get_tui_settings().mode == "simple"
|
|
262
|
+
self.simple_progress: Progress | None = None
|
|
263
|
+
self.simple_task = None
|
|
264
|
+
|
|
265
|
+
# ---- Template methods for subclass customization ----
|
|
266
|
+
|
|
267
|
+
@abstractmethod
|
|
268
|
+
def _get_title(self) -> str:
|
|
269
|
+
"""Return header title for this TUI."""
|
|
270
|
+
...
|
|
271
|
+
|
|
272
|
+
@abstractmethod
|
|
273
|
+
def _get_subtitle(self, model_name: str) -> str:
|
|
274
|
+
"""Return header subtitle for this TUI."""
|
|
275
|
+
...
|
|
276
|
+
|
|
277
|
+
@abstractmethod
|
|
278
|
+
def _get_footer_description(self) -> str:
|
|
279
|
+
"""Return footer progress description."""
|
|
280
|
+
...
|
|
281
|
+
|
|
282
|
+
@abstractmethod
|
|
283
|
+
def _topic_model_type(self) -> str:
|
|
284
|
+
"""Return 'tree' or 'graph' for context panel."""
|
|
285
|
+
...
|
|
286
|
+
|
|
287
|
+
@abstractmethod
|
|
288
|
+
def _status_panel(self) -> Panel:
|
|
289
|
+
"""Create status panel - must be implemented by subclass."""
|
|
290
|
+
...
|
|
291
|
+
|
|
292
|
+
@abstractmethod
|
|
293
|
+
def _get_simple_total(self, depth: int, degree: int) -> int:
|
|
294
|
+
"""Return the total for the simple mode progress bar."""
|
|
295
|
+
...
|
|
296
|
+
|
|
297
|
+
# ---- Lifecycle ----
|
|
312
298
|
|
|
313
299
|
def start_building(self, model_name: str, depth: int, degree: int, root_topic: str) -> None:
|
|
314
|
-
"""Start the
|
|
300
|
+
"""Start the building process. Handles both simple and rich modes."""
|
|
315
301
|
self.max_depth = depth
|
|
316
302
|
self.root_topic = root_topic
|
|
317
303
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
self.console.print(
|
|
304
|
+
header_panel = self.tui.create_header(
|
|
305
|
+
self._get_title(),
|
|
306
|
+
self._get_subtitle(model_name),
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Simple/headless mode: print config summary, optional progress bar, no Live
|
|
310
|
+
if self._is_simple:
|
|
311
|
+
self.console.print("\n[bold cyan]Topic Generation[/bold cyan]")
|
|
312
|
+
self.tui.info(f"Model: {model_name}")
|
|
313
|
+
self.tui.info(f"Topic configuration: depth={depth}, degree={degree}")
|
|
326
314
|
self.console.print()
|
|
327
|
-
self.
|
|
315
|
+
total = self._get_simple_total(depth, degree)
|
|
316
|
+
if self.console.is_terminal:
|
|
317
|
+
self.simple_progress = Progress(
|
|
318
|
+
SpinnerColumn(),
|
|
319
|
+
TextColumn("[progress.description]{task.description}"),
|
|
320
|
+
BarColumn(),
|
|
321
|
+
MofNCompleteColumn(table_column=Column(justify="right")),
|
|
322
|
+
TimeElapsedColumn(),
|
|
323
|
+
console=self.console,
|
|
324
|
+
)
|
|
325
|
+
self.simple_task = self.simple_progress.add_task(
|
|
326
|
+
self._get_footer_description(), total=total
|
|
327
|
+
)
|
|
328
|
+
self.simple_progress.start()
|
|
328
329
|
return
|
|
329
330
|
|
|
330
|
-
#
|
|
331
|
+
# Rich mode: build full two-pane layout with footer
|
|
331
332
|
self.progress = Progress(
|
|
332
333
|
SpinnerColumn(),
|
|
333
334
|
TextColumn(
|
|
@@ -338,17 +339,13 @@ class TreeBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
338
339
|
TimeElapsedColumn(),
|
|
339
340
|
console=self.console,
|
|
340
341
|
)
|
|
341
|
-
|
|
342
|
+
|
|
342
343
|
layout = Layout(name="root")
|
|
343
344
|
layout.split(Layout(name="main"), Layout(name="footer", size=3))
|
|
344
345
|
left = Layout(name="left", ratio=3)
|
|
345
346
|
right = Layout(name="right", ratio=2)
|
|
346
347
|
right.minimum_size = STREAM_PANEL_WIDTH
|
|
347
348
|
|
|
348
|
-
header_panel = self.tui.create_header(
|
|
349
|
-
"DeepFabric Tree Generation",
|
|
350
|
-
f"Building hierarchical topic structure with {model_name}",
|
|
351
|
-
)
|
|
352
349
|
stats = {"Model": model_name, "Depth": f"{depth}", "Degree": f"{degree}"}
|
|
353
350
|
stats_table = self.tui.create_stats_table(stats)
|
|
354
351
|
params_panel = Panel(stats_table, title="Generation Parameters", border_style="dim")
|
|
@@ -363,7 +360,7 @@ class TreeBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
363
360
|
left["params"].update(params_panel)
|
|
364
361
|
left["context"].update(self._context_panel())
|
|
365
362
|
left["events"].update(self.tui.build_events_panel(list(self.events_log)))
|
|
366
|
-
|
|
363
|
+
|
|
367
364
|
right.split(
|
|
368
365
|
Layout(name="status", size=8),
|
|
369
366
|
Layout(name="preview"),
|
|
@@ -372,59 +369,109 @@ class TreeBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
372
369
|
right["status"].update(self._status_panel())
|
|
373
370
|
right["preview"].update(self.tui.build_stream_panel("Waiting for generation..."))
|
|
374
371
|
|
|
375
|
-
# Start Live display with layout
|
|
376
372
|
self.live_layout = layout
|
|
377
|
-
|
|
373
|
+
footer_desc = self._get_footer_description()
|
|
378
374
|
self.footer_progress = self.tui.create_footer(layout, title="Run Status")
|
|
379
|
-
self.footer_task = self.footer_progress.add_task(
|
|
375
|
+
self.footer_task = self.footer_progress.add_task(footer_desc, total=depth)
|
|
380
376
|
|
|
381
377
|
self.live_display = Live(layout, console=self.console, refresh_per_second=15, screen=True)
|
|
382
378
|
self.live_display.start()
|
|
383
|
-
self.overall_task = self.progress.add_task(f"
|
|
384
|
-
|
|
385
|
-
def start_depth_level(self, depth: int) -> None:
|
|
386
|
-
"""Update progress for new depth level."""
|
|
387
|
-
self.current_depth = depth
|
|
388
|
-
if self.progress and self.overall_task is not None:
|
|
389
|
-
self.progress.update(
|
|
390
|
-
self.overall_task,
|
|
391
|
-
description=f"Building topic tree (depth {depth}/{self.max_depth})",
|
|
392
|
-
)
|
|
393
|
-
self.events_log.append(f"→ Depth {depth}/{self.max_depth} started")
|
|
394
|
-
self._refresh_left()
|
|
395
|
-
# Advance footer on each depth start (only after first)
|
|
396
|
-
self.update_status_panel()
|
|
379
|
+
self.overall_task = self.progress.add_task(f"{footer_desc} (depth 1/{depth})")
|
|
397
380
|
|
|
398
|
-
def
|
|
399
|
-
"""
|
|
400
|
-
self.
|
|
401
|
-
|
|
402
|
-
|
|
381
|
+
def stop_live(self) -> None:
|
|
382
|
+
"""Stop the Live display if it's running."""
|
|
383
|
+
if self.live_display:
|
|
384
|
+
self.live_display.stop()
|
|
385
|
+
self.live_display = None
|
|
403
386
|
|
|
404
|
-
def
|
|
405
|
-
"""
|
|
406
|
-
if
|
|
407
|
-
|
|
387
|
+
def advance_simple_progress(self, advance: int = 1, description: str = "") -> None:
|
|
388
|
+
"""Advance the simple mode progress bar."""
|
|
389
|
+
if self.simple_progress is not None and self.simple_task is not None:
|
|
390
|
+
with contextlib.suppress(Exception):
|
|
391
|
+
if description:
|
|
392
|
+
self.simple_progress.update(
|
|
393
|
+
self.simple_task, advance=advance, description=description
|
|
394
|
+
)
|
|
395
|
+
else:
|
|
396
|
+
self.simple_progress.update(self.simple_task, advance=advance)
|
|
397
|
+
|
|
398
|
+
def stop_simple_progress(self) -> None:
|
|
399
|
+
"""Stop the simple mode progress bar."""
|
|
400
|
+
if self.simple_progress is not None:
|
|
401
|
+
self.simple_progress.stop()
|
|
402
|
+
self.simple_progress = None
|
|
403
|
+
|
|
404
|
+
def _simple_print(self, message: str) -> None:
|
|
405
|
+
"""Print a message in simple mode, routing through the progress bar if active."""
|
|
406
|
+
if self.simple_progress is not None:
|
|
407
|
+
self.simple_progress.console.print(message)
|
|
408
408
|
else:
|
|
409
|
-
self.
|
|
410
|
-
# Log succinct outcome
|
|
411
|
-
status = "ok" if success else "fail"
|
|
412
|
-
self.events_log.append(f"✓ Subtree {status} (+{generated_count} paths)")
|
|
413
|
-
self._refresh_left()
|
|
414
|
-
self.update_status_panel()
|
|
415
|
-
# Advance footer on completed depth
|
|
416
|
-
with contextlib.suppress(Exception):
|
|
417
|
-
self.footer_progress.update(self.footer_task, advance=1)
|
|
409
|
+
self.console.print(message)
|
|
418
410
|
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
411
|
+
# ---- Panel refresh helpers ----
|
|
412
|
+
|
|
413
|
+
def _refresh_left(self) -> None:
|
|
414
|
+
"""Update events panel in left column."""
|
|
415
|
+
if self.live_layout is not None:
|
|
416
|
+
try:
|
|
417
|
+
self.live_layout["main"]["left"]["events"].update(
|
|
418
|
+
self.tui.build_events_panel(list(self.events_log))
|
|
419
|
+
)
|
|
420
|
+
except Exception:
|
|
421
|
+
return
|
|
422
|
+
|
|
423
|
+
def update_status_panel(self) -> None:
|
|
424
|
+
"""Update the status panel in the right column."""
|
|
425
|
+
if self.live_layout is None:
|
|
426
|
+
return
|
|
427
|
+
try:
|
|
428
|
+
self.live_layout["main"]["right"]["status"].update(self._status_panel())
|
|
429
|
+
except Exception:
|
|
430
|
+
return
|
|
431
|
+
|
|
432
|
+
def _context_panel(self) -> Panel:
|
|
433
|
+
return self.tui.build_context_panel(
|
|
434
|
+
root_topic=self.root_topic,
|
|
435
|
+
topic_model_type=self._topic_model_type(),
|
|
436
|
+
path=self.current_topic_path,
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
def _refresh_context(self) -> None:
|
|
440
|
+
if self.live_layout is not None:
|
|
441
|
+
try:
|
|
442
|
+
self.live_layout["main"]["left"]["context"].update(self._context_panel())
|
|
443
|
+
except Exception:
|
|
444
|
+
return
|
|
445
|
+
|
|
446
|
+
# ---- StreamObserver event handlers ----
|
|
447
|
+
|
|
448
|
+
def on_error(self, error: "ClassifiedError", metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
449
|
+
"""Handle error events - log to events panel."""
|
|
450
|
+
error_event = error.to_event()
|
|
451
|
+
self.events_log.append(f"X {error_event}")
|
|
423
452
|
self._refresh_left()
|
|
424
|
-
|
|
453
|
+
|
|
454
|
+
# In simple mode with --show-failures, print detailed error immediately
|
|
455
|
+
if self._is_simple and get_tui_settings().show_failures:
|
|
456
|
+
self.tui.console.print(f"[red]✗ FAILURE:[/red] {error_event}")
|
|
457
|
+
if error.message and error.message != error_event:
|
|
458
|
+
msg = (
|
|
459
|
+
error.message[:ERROR_MESSAGE_MAX_LENGTH] + "..."
|
|
460
|
+
if len(error.message) > ERROR_MESSAGE_MAX_LENGTH
|
|
461
|
+
else error.message
|
|
462
|
+
)
|
|
463
|
+
self.tui.console.print(f" [dim]{msg}[/dim]")
|
|
464
|
+
|
|
465
|
+
def on_step_start(self, step_name: str, metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
466
|
+
"""Handle step start - topic building doesn't need specific handling."""
|
|
467
|
+
pass
|
|
468
|
+
|
|
469
|
+
def on_step_complete(self, step_name: str, metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
470
|
+
"""Handle step complete - topic building doesn't need specific handling."""
|
|
471
|
+
pass
|
|
425
472
|
|
|
426
473
|
def on_stream_chunk(self, _source: str, chunk: str, _metadata: dict[str, Any]) -> None:
|
|
427
|
-
"""Handle streaming text from
|
|
474
|
+
"""Handle streaming text from topic generation."""
|
|
428
475
|
self.stream_buffer.append(chunk)
|
|
429
476
|
if self.live_display and self.live_layout is not None:
|
|
430
477
|
accumulated_text = "".join(self.stream_buffer)
|
|
@@ -469,7 +516,7 @@ class TreeBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
469
516
|
error_summary: str,
|
|
470
517
|
metadata: dict[str, Any],
|
|
471
518
|
) -> None:
|
|
472
|
-
"""Handle retry events
|
|
519
|
+
"""Handle retry events by logging a concise message."""
|
|
473
520
|
_ = metadata # Unused for now
|
|
474
521
|
try:
|
|
475
522
|
self.events_log.append(
|
|
@@ -480,33 +527,103 @@ class TreeBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
480
527
|
# Swallow errors to avoid breaking progress reporting
|
|
481
528
|
return
|
|
482
529
|
|
|
483
|
-
def
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
530
|
+
def on_llm_retry(
|
|
531
|
+
self,
|
|
532
|
+
provider: str,
|
|
533
|
+
attempt: int,
|
|
534
|
+
wait: float,
|
|
535
|
+
error_summary: str,
|
|
536
|
+
metadata: dict[str, Any],
|
|
537
|
+
) -> None:
|
|
538
|
+
"""Handle LLM API retry events (rate limits, transient errors)."""
|
|
539
|
+
_ = metadata
|
|
540
|
+
try:
|
|
541
|
+
short_msg = f"↻ {provider} retry (attempt {attempt}), backoff {wait:.1f}s"
|
|
542
|
+
self.events_log.append(short_msg)
|
|
543
|
+
self._refresh_left()
|
|
489
544
|
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
545
|
+
if self._is_simple:
|
|
546
|
+
self._simple_print(f" [yellow]{short_msg}: {error_summary}[/yellow]")
|
|
547
|
+
except Exception:
|
|
548
|
+
return
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
class TreeBuildingTUI(TopicGenerationTUI):
|
|
552
|
+
"""TUI for tree building operations."""
|
|
553
|
+
|
|
554
|
+
def __init__(self, tui: DeepFabricTUI):
|
|
555
|
+
super().__init__(tui)
|
|
556
|
+
self.generated_paths = 0
|
|
557
|
+
|
|
558
|
+
def _get_title(self) -> str:
|
|
559
|
+
return "DeepFabric Tree Generation"
|
|
560
|
+
|
|
561
|
+
def _get_subtitle(self, model_name: str) -> str:
|
|
562
|
+
return f"Building hierarchical topic structure with {model_name}"
|
|
563
|
+
|
|
564
|
+
def _get_footer_description(self) -> str:
|
|
565
|
+
return "Building topic tree"
|
|
566
|
+
|
|
567
|
+
def _topic_model_type(self) -> str:
|
|
568
|
+
return "tree"
|
|
569
|
+
|
|
570
|
+
def _get_simple_total(self, depth: int, degree: int) -> int:
|
|
571
|
+
if degree <= 1:
|
|
572
|
+
return depth
|
|
573
|
+
return (degree**depth - 1) // (degree - 1)
|
|
574
|
+
|
|
575
|
+
def start_depth_level(self, depth: int) -> None:
|
|
576
|
+
"""Update progress for new depth level."""
|
|
577
|
+
self.current_depth = depth
|
|
578
|
+
if self.progress and self.overall_task is not None:
|
|
579
|
+
self.progress.update(
|
|
580
|
+
self.overall_task,
|
|
581
|
+
description=f"Building topic tree (depth {depth}/{self.max_depth})",
|
|
582
|
+
)
|
|
583
|
+
self.events_log.append(f"→ Depth {depth}/{self.max_depth} started")
|
|
584
|
+
self._refresh_left()
|
|
585
|
+
self.update_status_panel()
|
|
586
|
+
|
|
587
|
+
def start_subtree_generation(self, node_path: list[str], _num_subtopics: int) -> None:
|
|
588
|
+
"""Log subtree generation without updating progress to avoid flicker."""
|
|
589
|
+
self.current_topic_path = node_path
|
|
590
|
+
self._refresh_context()
|
|
591
|
+
|
|
592
|
+
def complete_subtree_generation(self, success: bool, generated_count: int) -> None:
|
|
593
|
+
"""Track completion without updating progress bar."""
|
|
594
|
+
if success:
|
|
595
|
+
self.generated_paths += generated_count
|
|
596
|
+
else:
|
|
597
|
+
self.failed_attempts += 1
|
|
598
|
+
# Log succinct outcome
|
|
599
|
+
status = "ok" if success else "fail"
|
|
600
|
+
self.events_log.append(f"✓ Subtree {status} (+{generated_count} paths)")
|
|
601
|
+
self._refresh_left()
|
|
602
|
+
self.update_status_panel()
|
|
603
|
+
# Advance footer on completed depth
|
|
604
|
+
with contextlib.suppress(Exception):
|
|
605
|
+
self.footer_progress.update(self.footer_task, advance=1)
|
|
606
|
+
|
|
607
|
+
def add_failure(self) -> None:
|
|
608
|
+
"""Record a generation failure."""
|
|
609
|
+
self.failed_attempts += 1
|
|
610
|
+
self.advance_simple_progress()
|
|
611
|
+
self.events_log.append("✗ Generation failed")
|
|
612
|
+
self._refresh_left()
|
|
613
|
+
self.update_status_panel()
|
|
496
614
|
|
|
497
615
|
def finish_building(self, total_paths: int, failed_generations: int) -> None:
|
|
498
616
|
"""Finish the tree building process."""
|
|
499
617
|
if self.live_display:
|
|
500
618
|
self.live_display.stop()
|
|
619
|
+
self.stop_simple_progress()
|
|
501
620
|
|
|
502
621
|
# Final summary
|
|
503
622
|
self.console.print()
|
|
504
623
|
if failed_generations > 0:
|
|
505
|
-
self.tui.warning(f"
|
|
624
|
+
self.tui.warning(f"Created {total_paths} unique topics ({failed_generations} failed)")
|
|
506
625
|
else:
|
|
507
|
-
self.tui.success("
|
|
508
|
-
|
|
509
|
-
self.tui.info(f"Generated {total_paths} total paths")
|
|
626
|
+
self.tui.success(f"Created {total_paths} unique topics")
|
|
510
627
|
self.events_log.append("✓ Tree building completed")
|
|
511
628
|
self.update_status_panel()
|
|
512
629
|
|
|
@@ -522,101 +639,37 @@ class TreeBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
522
639
|
return Panel(table, title="Status", border_style="dim", padding=(0, 1))
|
|
523
640
|
|
|
524
641
|
|
|
525
|
-
class GraphBuildingTUI(
|
|
526
|
-
"""TUI for graph building operations
|
|
642
|
+
class GraphBuildingTUI(TopicGenerationTUI):
|
|
643
|
+
"""TUI for graph building operations."""
|
|
527
644
|
|
|
528
645
|
def __init__(self, tui: DeepFabricTUI):
|
|
529
|
-
|
|
530
|
-
self.console = tui.console
|
|
531
|
-
self.progress = None
|
|
532
|
-
self.overall_task = None
|
|
646
|
+
super().__init__(tui)
|
|
533
647
|
self.nodes_count = 1 # Start with root
|
|
534
648
|
self.edges_count = 0
|
|
535
|
-
self.failed_attempts = 0
|
|
536
|
-
self.stream_buffer = deque(maxlen=2000)
|
|
537
|
-
self.live_display = None
|
|
538
|
-
self.live_layout: Layout | None = None
|
|
539
|
-
self.events_log = deque(maxlen=EVENT_LOG_MAX_LINES)
|
|
540
|
-
self.simple_mode = False
|
|
541
|
-
self.current_topic_path: list[str] | None = None
|
|
542
|
-
self.root_topic: str | None = None
|
|
543
649
|
|
|
544
|
-
def
|
|
545
|
-
"
|
|
546
|
-
self.max_depth = depth
|
|
547
|
-
self.current_depth = 0
|
|
548
|
-
self.root_topic = root_topic
|
|
549
|
-
# If simple/headless mode, print static header and return
|
|
550
|
-
if get_tui_settings().mode == "simple":
|
|
551
|
-
header = self.tui.create_header(
|
|
552
|
-
"DeepFabric Graph Generation",
|
|
553
|
-
f"Building interconnected topic structure with {model_name}",
|
|
554
|
-
)
|
|
555
|
-
self.console.print(header)
|
|
556
|
-
self.console.print(f"Configuration: depth={depth}, degree={degree}")
|
|
557
|
-
self.console.print()
|
|
558
|
-
self.simple_mode = True
|
|
559
|
-
return
|
|
650
|
+
def _get_title(self) -> str:
|
|
651
|
+
return "DeepFabric Graph Generation"
|
|
560
652
|
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
SpinnerColumn(),
|
|
564
|
-
TextColumn(
|
|
565
|
-
"[bold blue]{task.description}",
|
|
566
|
-
table_column=Column(ratio=1, overflow="ellipsis"),
|
|
567
|
-
),
|
|
568
|
-
BarColumn(bar_width=None),
|
|
569
|
-
MofNCompleteColumn(),
|
|
570
|
-
TimeElapsedColumn(),
|
|
571
|
-
console=self.console,
|
|
572
|
-
)
|
|
573
|
-
# Two-pane layout: left header + events; right status + preview with footer at bottom
|
|
574
|
-
layout = Layout(name="root")
|
|
575
|
-
layout.split(Layout(name="main"), Layout(name="footer", size=3))
|
|
576
|
-
left = Layout(name="left", ratio=3)
|
|
577
|
-
right = Layout(name="right", ratio=2)
|
|
578
|
-
right.minimum_size = STREAM_PANEL_WIDTH
|
|
653
|
+
def _get_subtitle(self, model_name: str) -> str:
|
|
654
|
+
return f"Building interconnected topic structure with {model_name}"
|
|
579
655
|
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
f"Building interconnected topic structure with {model_name}",
|
|
583
|
-
)
|
|
584
|
-
stats = {"Model": model_name, "Depth": f"{depth}", "Degree": f"{degree}"}
|
|
585
|
-
stats_table = self.tui.create_stats_table(stats)
|
|
586
|
-
params_panel = Panel(stats_table, title="Generation Parameters", border_style="dim")
|
|
587
|
-
|
|
588
|
-
left.split(
|
|
589
|
-
Layout(name="header", size=4),
|
|
590
|
-
Layout(name="params", size=5),
|
|
591
|
-
Layout(name="context", size=5),
|
|
592
|
-
Layout(name="events"),
|
|
593
|
-
)
|
|
594
|
-
left["header"].update(header_panel)
|
|
595
|
-
left["params"].update(params_panel)
|
|
596
|
-
left["context"].update(self._context_panel())
|
|
597
|
-
left["events"].update(self.tui.build_events_panel(list(self.events_log)))
|
|
598
|
-
# Right column: status + preview (preview fills remaining space)
|
|
599
|
-
right.split(
|
|
600
|
-
Layout(name="status", size=8),
|
|
601
|
-
Layout(name="preview"),
|
|
602
|
-
)
|
|
603
|
-
layout["main"].split_row(left, right)
|
|
604
|
-
right["status"].update(self._status_panel())
|
|
605
|
-
right["preview"].update(self.tui.build_stream_panel("Waiting for generation..."))
|
|
656
|
+
def _get_footer_description(self) -> str:
|
|
657
|
+
return "Building topic graph"
|
|
606
658
|
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
self.footer_task = self.footer_progress.add_task("Building topic graph", total=depth)
|
|
659
|
+
def _topic_model_type(self) -> str:
|
|
660
|
+
return "graph"
|
|
610
661
|
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
self.live_display.start()
|
|
614
|
-
self.overall_task = self.progress.add_task(" Building topic graph", total=depth)
|
|
662
|
+
def _get_simple_total(self, depth: int, degree: int) -> int: # noqa: ARG002
|
|
663
|
+
return depth
|
|
615
664
|
|
|
616
665
|
def start_depth_level(self, depth: int, leaf_count: int) -> None:
|
|
617
666
|
"""Update for new depth level."""
|
|
618
|
-
if self.
|
|
619
|
-
|
|
667
|
+
if self._is_simple:
|
|
668
|
+
desc = f"Depth {depth}/{self.max_depth} ({leaf_count} nodes)"
|
|
669
|
+
if self.simple_progress is not None:
|
|
670
|
+
self.advance_simple_progress(advance=0, description=desc)
|
|
671
|
+
else:
|
|
672
|
+
self.console.print(f" Depth {depth}: expanding {leaf_count} nodes...")
|
|
620
673
|
elif self.progress and self.overall_task is not None:
|
|
621
674
|
self.progress.update(
|
|
622
675
|
self.overall_task,
|
|
@@ -637,10 +690,13 @@ class GraphBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
637
690
|
|
|
638
691
|
def complete_depth_level(self, depth: int) -> None:
|
|
639
692
|
"""Complete a depth level."""
|
|
640
|
-
if self.
|
|
641
|
-
self.
|
|
642
|
-
|
|
643
|
-
|
|
693
|
+
if self._is_simple:
|
|
694
|
+
if self.simple_progress is not None:
|
|
695
|
+
self.advance_simple_progress()
|
|
696
|
+
else:
|
|
697
|
+
self.console.print(
|
|
698
|
+
f" Depth {depth} complete (nodes: {self.nodes_count}, edges: {self.edges_count})"
|
|
699
|
+
)
|
|
644
700
|
elif self.progress and self.overall_task is not None:
|
|
645
701
|
self.progress.advance(self.overall_task, 1)
|
|
646
702
|
self.events_log.append(f"✓ Depth {depth} complete")
|
|
@@ -653,12 +709,12 @@ class GraphBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
653
709
|
def add_failure(self, node_topic: str) -> None:
|
|
654
710
|
"""Record a generation failure."""
|
|
655
711
|
self.failed_attempts += 1
|
|
656
|
-
if self.
|
|
712
|
+
if self._is_simple:
|
|
657
713
|
if len(node_topic) > EVENT_ERROR_MAX_LENGTH:
|
|
658
714
|
topic_display = node_topic[:EVENT_ERROR_MAX_LENGTH] + "..."
|
|
659
715
|
else:
|
|
660
716
|
topic_display = node_topic
|
|
661
|
-
self.
|
|
717
|
+
self._simple_print(f" [red]✗ Node expansion failed: {topic_display}[/red]")
|
|
662
718
|
self.events_log.append("✗ Node expansion failed")
|
|
663
719
|
self._refresh_left()
|
|
664
720
|
|
|
@@ -692,9 +748,9 @@ class GraphBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
692
748
|
else:
|
|
693
749
|
error_display = error_summary
|
|
694
750
|
|
|
695
|
-
# In simple mode, print
|
|
696
|
-
if self.
|
|
697
|
-
self.
|
|
751
|
+
# In simple mode, print through progress-aware helper
|
|
752
|
+
if self._is_simple:
|
|
753
|
+
self._simple_print(
|
|
698
754
|
f" [yellow]↻ Retry {attempt}/{max_attempts} '{topic_display}': {error_display}[/yellow]"
|
|
699
755
|
)
|
|
700
756
|
|
|
@@ -706,100 +762,35 @@ class GraphBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
706
762
|
# Best-effort, swallow errors to avoid breaking progress reporting
|
|
707
763
|
return
|
|
708
764
|
|
|
709
|
-
def on_retry(
|
|
710
|
-
self,
|
|
711
|
-
sample_idx: int,
|
|
712
|
-
attempt: int,
|
|
713
|
-
max_attempts: int,
|
|
714
|
-
error_summary: str,
|
|
715
|
-
metadata: dict[str, Any],
|
|
716
|
-
) -> None:
|
|
717
|
-
"""Handle retry events from the progress reporter.
|
|
718
|
-
|
|
719
|
-
Provides a minimal implementation so GraphBuildingTUI is not abstract;
|
|
720
|
-
logs a concise retry message to the events panel.
|
|
721
|
-
"""
|
|
722
|
-
_ = metadata # Unused for now
|
|
723
|
-
try:
|
|
724
|
-
self.events_log.append(
|
|
725
|
-
f"↻ Retry sample {sample_idx} attempt {attempt}/{max_attempts}: {error_summary}"
|
|
726
|
-
)
|
|
727
|
-
self._refresh_left()
|
|
728
|
-
except Exception:
|
|
729
|
-
# Best-effort, swallow errors to avoid breaking progress reporting
|
|
730
|
-
return
|
|
731
|
-
|
|
732
|
-
def on_stream_chunk(self, _source: str, chunk: str, _metadata: dict[str, Any]) -> None:
|
|
733
|
-
"""Handle streaming text from graph generation."""
|
|
734
|
-
self.stream_buffer.append(chunk)
|
|
735
|
-
if self.live_display and self.live_layout is not None:
|
|
736
|
-
accumulated_text = "".join(self.stream_buffer)
|
|
737
|
-
if len(accumulated_text) > STREAM_TEXT_MAX_LENGTH:
|
|
738
|
-
accumulated_text = "..." + accumulated_text[-STREAM_TEXT_MAX_LENGTH:]
|
|
739
|
-
display_text = accumulated_text.replace("\r", "")
|
|
740
|
-
display_text = re.sub(r"[^\S\n]+", " ", display_text)
|
|
741
|
-
|
|
742
|
-
# Compute dynamic preview lines based on terminal height
|
|
743
|
-
# Use TOPIC_PREVIEW_OFFSET for tree/graph TUIs (simpler layout)
|
|
744
|
-
terminal_height = self.console.size.height
|
|
745
|
-
target_lines = max(MIN_PREVIEW_LINES, terminal_height - TOPIC_PREVIEW_OFFSET)
|
|
746
|
-
lines = display_text.splitlines()
|
|
747
|
-
|
|
748
|
-
# Handle low-newline content (like JSON) to fill panel properly
|
|
749
|
-
if len(lines) >= int(target_lines / 2):
|
|
750
|
-
# Plenty of newlines: take the last N lines
|
|
751
|
-
visible_lines = lines[-target_lines:]
|
|
752
|
-
else:
|
|
753
|
-
# Low-newline content: take a character tail and then split
|
|
754
|
-
approx_right_cols = max(40, int(self.console.size.width * 0.42))
|
|
755
|
-
char_tail = max(800, approx_right_cols * max(8, target_lines - 2))
|
|
756
|
-
tail = display_text[-char_tail:]
|
|
757
|
-
visible_lines = tail.splitlines()[-target_lines:]
|
|
758
|
-
|
|
759
|
-
visible = "\n".join(visible_lines)
|
|
760
|
-
|
|
761
|
-
# Update the streaming panel
|
|
762
|
-
try:
|
|
763
|
-
container = self.live_layout["main"]["right"]["preview"]
|
|
764
|
-
except Exception:
|
|
765
|
-
container = self.live_layout["main"]["right"]
|
|
766
|
-
container.update(self.tui.build_stream_panel(visible))
|
|
767
|
-
|
|
768
|
-
def _context_panel(self) -> Panel:
|
|
769
|
-
return self.tui.build_context_panel(
|
|
770
|
-
root_topic=self.root_topic,
|
|
771
|
-
topic_model_type="graph",
|
|
772
|
-
path=self.current_topic_path,
|
|
773
|
-
)
|
|
774
|
-
|
|
775
|
-
def _refresh_context(self) -> None:
|
|
776
|
-
if self.live_layout is not None:
|
|
777
|
-
try:
|
|
778
|
-
self.live_layout["main"]["left"]["context"].update(self._context_panel())
|
|
779
|
-
except Exception:
|
|
780
|
-
return
|
|
781
|
-
|
|
782
765
|
def finish_building(self, failed_generations: int) -> None:
|
|
783
766
|
"""Finish the graph building process."""
|
|
784
767
|
if self.live_display:
|
|
785
768
|
self.live_display.stop()
|
|
769
|
+
self.stop_simple_progress()
|
|
786
770
|
|
|
787
|
-
# Show final stats
|
|
788
771
|
self.console.print()
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
# Final summary
|
|
799
|
-
if failed_generations > 0:
|
|
800
|
-
self.tui.warning(f"Graph building complete with {failed_generations} failures")
|
|
772
|
+
if self._is_simple:
|
|
773
|
+
# One-liner summary for simple/headless mode
|
|
774
|
+
if failed_generations > 0:
|
|
775
|
+
self.tui.warning(
|
|
776
|
+
f"Created {self.nodes_count} unique topics ({failed_generations} failed)"
|
|
777
|
+
)
|
|
778
|
+
else:
|
|
779
|
+
self.tui.success(f"Created {self.nodes_count} unique topics")
|
|
801
780
|
else:
|
|
802
|
-
|
|
781
|
+
# Rich mode: show detailed stats panel
|
|
782
|
+
stats_table = self.tui.create_stats_table(
|
|
783
|
+
{
|
|
784
|
+
"Total Nodes": self.nodes_count,
|
|
785
|
+
"Total Edges": self.edges_count,
|
|
786
|
+
"Failed Attempts": self.failed_attempts,
|
|
787
|
+
}
|
|
788
|
+
)
|
|
789
|
+
self.console.print(Panel(stats_table, title="Final Statistics", border_style="dim"))
|
|
790
|
+
if failed_generations > 0:
|
|
791
|
+
self.tui.warning(f"Graph building complete with {failed_generations} failures")
|
|
792
|
+
else:
|
|
793
|
+
self.tui.success("Graph building completed successfully")
|
|
803
794
|
self.events_log.append("✓ Graph building completed")
|
|
804
795
|
self.update_status_panel()
|
|
805
796
|
|
|
@@ -808,7 +799,7 @@ class GraphBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
|
808
799
|
table = Table(show_header=False, box=None, padding=(0, 1))
|
|
809
800
|
table.add_column(style="cyan", no_wrap=True)
|
|
810
801
|
table.add_column(style="white")
|
|
811
|
-
table.add_row("Depth:", f"{self.current_depth}/{
|
|
802
|
+
table.add_row("Depth:", f"{self.current_depth}/{self.max_depth}")
|
|
812
803
|
table.add_row("Nodes:", str(self.nodes_count))
|
|
813
804
|
table.add_row("Edges:", str(self.edges_count))
|
|
814
805
|
if self.failed_attempts:
|
|
@@ -853,6 +844,9 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
853
844
|
self.last_checkpoint_samples = 0
|
|
854
845
|
self._resumed_from_checkpoint = False # Set by set_checkpoint_resume_status()
|
|
855
846
|
self._stop_requested = False # Set when graceful stop requested via Ctrl+C
|
|
847
|
+
self._is_cycle_based = False # Set by init_status; controls "Cycle" vs "Step" labels
|
|
848
|
+
self._is_simple = get_tui_settings().mode != "rich"
|
|
849
|
+
self.simple_progress: Progress | None = None # Set by dataset_manager for simple mode
|
|
856
850
|
# Retry tracking for simple mode
|
|
857
851
|
self.step_retries: list[dict] = [] # Retries in current step
|
|
858
852
|
|
|
@@ -872,26 +866,69 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
872
866
|
return self.progress
|
|
873
867
|
|
|
874
868
|
def build_generation_panels(
|
|
875
|
-
self,
|
|
869
|
+
self,
|
|
870
|
+
model_name: str,
|
|
871
|
+
num_steps: int,
|
|
872
|
+
batch_size: int,
|
|
873
|
+
total_samples: int | None = None,
|
|
874
|
+
is_cycle_based: bool = False,
|
|
875
|
+
unique_topics: int = 0,
|
|
876
|
+
final_cycle_size: int = 0,
|
|
877
|
+
checkpoint_interval: int = 0,
|
|
876
878
|
) -> tuple[Panel, Panel]:
|
|
877
|
-
"""Return header and parameters panels for layout use (no direct printing).
|
|
879
|
+
"""Return header and parameters panels for layout use (no direct printing).
|
|
880
|
+
|
|
881
|
+
Args:
|
|
882
|
+
model_name: Name of the LLM model being used.
|
|
883
|
+
num_steps: Number of steps (step-based) or cycles (cycle-based).
|
|
884
|
+
batch_size: Batch size (step-based) or concurrency (cycle-based).
|
|
885
|
+
total_samples: Explicit total samples count. If None, calculated as num_steps * batch_size.
|
|
886
|
+
is_cycle_based: If True, display "Cycles" and "Concurrency" instead of "Steps" and "Batch Size".
|
|
887
|
+
unique_topics: Number of unique topics (cycle-based).
|
|
888
|
+
final_cycle_size: Size of the final cycle (cycle-based).
|
|
889
|
+
checkpoint_interval: Checkpoint interval in samples.
|
|
890
|
+
"""
|
|
878
891
|
header = self.tui.create_header(
|
|
879
892
|
"DeepFabric Dataset Generation",
|
|
880
893
|
f"Creating synthetic traces with {model_name}",
|
|
881
894
|
)
|
|
882
|
-
stats = {
|
|
883
|
-
"Model": model_name,
|
|
884
|
-
"Steps": num_steps,
|
|
885
|
-
"Batch Size": batch_size,
|
|
886
|
-
"Total Samples": num_steps * batch_size,
|
|
887
|
-
}
|
|
888
|
-
stats_table = self.tui.create_stats_table(stats)
|
|
889
|
-
params_panel = Panel(stats_table, title="Generation Parameters", border_style="dim")
|
|
890
895
|
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
896
|
+
display_total = total_samples if total_samples is not None else num_steps * batch_size
|
|
897
|
+
|
|
898
|
+
lines = [f"[cyan]Model:[/] {model_name}"]
|
|
899
|
+
|
|
900
|
+
if is_cycle_based:
|
|
901
|
+
lines.append(
|
|
902
|
+
f"[cyan]Number samples:[/] {display_total}, [cyan]Concurrency:[/] {batch_size}"
|
|
903
|
+
)
|
|
904
|
+
cycles_line = (
|
|
905
|
+
f"[cyan]Cycles needed:[/] {num_steps} "
|
|
906
|
+
f"({display_total} samples ÷ {unique_topics} unique topics)"
|
|
907
|
+
)
|
|
908
|
+
if final_cycle_size and unique_topics and final_cycle_size < unique_topics:
|
|
909
|
+
cycles_line += f", final cycle: {final_cycle_size} topics (partial)"
|
|
910
|
+
lines.append(cycles_line)
|
|
911
|
+
log_msg = f"Start • cycles={num_steps} concurrency={batch_size} total={display_total}"
|
|
912
|
+
else:
|
|
913
|
+
lines.append(
|
|
914
|
+
f"[cyan]Number samples:[/] {display_total}, [cyan]Batch size:[/] {batch_size}"
|
|
915
|
+
)
|
|
916
|
+
log_msg = f"Start • steps={num_steps} batch={batch_size} total={display_total}"
|
|
917
|
+
|
|
918
|
+
if checkpoint_interval and checkpoint_interval > 0:
|
|
919
|
+
total_cp = math.ceil(display_total / checkpoint_interval)
|
|
920
|
+
lines.append(
|
|
921
|
+
f"[cyan]Checkpoint:[/] every {checkpoint_interval} samples "
|
|
922
|
+
f"({total_cp} total checkpoints)"
|
|
923
|
+
)
|
|
924
|
+
|
|
925
|
+
params_panel = Panel(
|
|
926
|
+
Text.from_markup("\n".join(lines)),
|
|
927
|
+
title="Generation Parameters",
|
|
928
|
+
border_style="dim",
|
|
894
929
|
)
|
|
930
|
+
|
|
931
|
+
self.events_log.append(log_msg)
|
|
895
932
|
return header, params_panel
|
|
896
933
|
|
|
897
934
|
def on_stream_chunk(self, _source: str, chunk: str, _metadata: dict[str, Any]) -> None:
|
|
@@ -1016,8 +1053,17 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
1016
1053
|
self.stream_buffer.clear()
|
|
1017
1054
|
|
|
1018
1055
|
# Deprecated printer retained for backward compatibility
|
|
1019
|
-
def show_generation_header(
|
|
1020
|
-
|
|
1056
|
+
def show_generation_header(
|
|
1057
|
+
self,
|
|
1058
|
+
model_name: str,
|
|
1059
|
+
num_steps: int,
|
|
1060
|
+
batch_size: int,
|
|
1061
|
+
total_samples: int | None = None,
|
|
1062
|
+
is_cycle_based: bool = False,
|
|
1063
|
+
) -> None:
|
|
1064
|
+
header, params_panel = self.build_generation_panels(
|
|
1065
|
+
model_name, num_steps, batch_size, total_samples, is_cycle_based
|
|
1066
|
+
)
|
|
1021
1067
|
self.console.print(header)
|
|
1022
1068
|
self.console.print(params_panel)
|
|
1023
1069
|
self.console.print()
|
|
@@ -1039,11 +1085,16 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
1039
1085
|
|
|
1040
1086
|
# --- Status Panel helpers ---
|
|
1041
1087
|
def init_status(
|
|
1042
|
-
self,
|
|
1088
|
+
self,
|
|
1089
|
+
total_steps: int,
|
|
1090
|
+
total_samples: int,
|
|
1091
|
+
checkpoint_enabled: bool = False,
|
|
1092
|
+
is_cycle_based: bool = False,
|
|
1043
1093
|
) -> None:
|
|
1044
1094
|
self.status_total_steps = total_steps
|
|
1045
1095
|
self.status_total_samples = total_samples
|
|
1046
1096
|
self.status_current_step = 0
|
|
1097
|
+
self._is_cycle_based = is_cycle_based
|
|
1047
1098
|
# Preserve samples_done and failed_total if resuming from checkpoint
|
|
1048
1099
|
if not getattr(self, "_resumed_from_checkpoint", False):
|
|
1049
1100
|
self.status_samples_done = 0
|
|
@@ -1103,21 +1154,29 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
1103
1154
|
table = Table(show_header=False, box=None, padding=(0, 1))
|
|
1104
1155
|
table.add_column(style="cyan", no_wrap=True)
|
|
1105
1156
|
table.add_column(style="white")
|
|
1106
|
-
|
|
1157
|
+
label = "Cycle:" if self._is_cycle_based else "Step:"
|
|
1158
|
+
last_label = "Last Cycle:" if self._is_cycle_based else "Last Step:"
|
|
1159
|
+
table.add_row(label, f"{self.status_current_step}/{self.status_total_steps}")
|
|
1107
1160
|
if self.status_last_step_duration > 0:
|
|
1108
|
-
table.add_row(
|
|
1161
|
+
table.add_row(last_label, f"{self.status_last_step_duration:0.1f}s")
|
|
1109
1162
|
table.add_row("Generated:", f"{self.status_samples_done}/{self.status_total_samples}")
|
|
1110
1163
|
if self.status_failed_total:
|
|
1111
1164
|
table.add_row("Failed:", str(self.status_failed_total))
|
|
1112
1165
|
if self.checkpoint_enabled:
|
|
1113
1166
|
if self.checkpoint_count > 0:
|
|
1114
1167
|
table.add_row(
|
|
1115
|
-
"Checkpoints:",
|
|
1168
|
+
"Checkpoints:",
|
|
1169
|
+
f"{self.checkpoint_count} ({self.last_checkpoint_samples} samples)",
|
|
1116
1170
|
)
|
|
1117
1171
|
else:
|
|
1118
1172
|
table.add_row("Checkpoints:", "0 (enabled)")
|
|
1119
1173
|
if self._stop_requested:
|
|
1120
|
-
|
|
1174
|
+
if self.checkpoint_enabled:
|
|
1175
|
+
table.add_row("[yellow]Stopping:[/yellow]", "[yellow]at next checkpoint[/yellow]")
|
|
1176
|
+
else:
|
|
1177
|
+
table.add_row(
|
|
1178
|
+
"[yellow]Stopping:[/yellow]", "[yellow]saving partial results[/yellow]"
|
|
1179
|
+
)
|
|
1121
1180
|
return Panel(table, title="Status", border_style="dim", padding=(0, 1))
|
|
1122
1181
|
|
|
1123
1182
|
def update_status_panel(self) -> None:
|
|
@@ -1231,6 +1290,18 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
1231
1290
|
# Log to events panel with error indicator
|
|
1232
1291
|
self.log_event(f"X {error_event}")
|
|
1233
1292
|
|
|
1293
|
+
# In simple mode with --show-failures, print detailed error immediately
|
|
1294
|
+
if self._is_simple and get_tui_settings().show_failures:
|
|
1295
|
+
self.tui.console.print(f"[red]✗ FAILURE:[/red] {error_event}")
|
|
1296
|
+
if error.message and error.message != error_event:
|
|
1297
|
+
# Show truncated full message if different from event
|
|
1298
|
+
msg = (
|
|
1299
|
+
error.message[:ERROR_MESSAGE_MAX_LENGTH] + "..."
|
|
1300
|
+
if len(error.message) > ERROR_MESSAGE_MAX_LENGTH
|
|
1301
|
+
else error.message
|
|
1302
|
+
)
|
|
1303
|
+
self.tui.console.print(f" [dim]{msg}[/dim]")
|
|
1304
|
+
|
|
1234
1305
|
def on_retry(
|
|
1235
1306
|
self,
|
|
1236
1307
|
sample_idx: int,
|
|
@@ -1256,7 +1327,7 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
1256
1327
|
"""
|
|
1257
1328
|
_ = metadata # Unused for now
|
|
1258
1329
|
|
|
1259
|
-
if
|
|
1330
|
+
if self._is_simple:
|
|
1260
1331
|
# Simple mode: track for summary at step completion
|
|
1261
1332
|
self.step_retries.append(
|
|
1262
1333
|
{
|
|
@@ -1288,6 +1359,34 @@ class DatasetGenerationTUI(StreamObserver):
|
|
|
1288
1359
|
return f"{total_retries} retry for sample {list(samples_with_retries)[0]}"
|
|
1289
1360
|
return f"{total_retries} retries across {len(samples_with_retries)} samples"
|
|
1290
1361
|
|
|
1362
|
+
def on_llm_retry(
|
|
1363
|
+
self,
|
|
1364
|
+
provider: str,
|
|
1365
|
+
attempt: int,
|
|
1366
|
+
wait: float,
|
|
1367
|
+
error_summary: str,
|
|
1368
|
+
metadata: dict[str, Any],
|
|
1369
|
+
) -> None:
|
|
1370
|
+
"""Handle LLM API retry events (rate limits, transient errors)."""
|
|
1371
|
+
_ = metadata
|
|
1372
|
+
try:
|
|
1373
|
+
short_msg = f"↻ {provider} retry (attempt {attempt}), backoff {wait:.1f}s"
|
|
1374
|
+
self.events_log.append(short_msg)
|
|
1375
|
+
if self.live_layout is not None:
|
|
1376
|
+
self.live_layout["main"]["left"]["events"].update(
|
|
1377
|
+
self.tui.build_events_panel(list(self.events_log))
|
|
1378
|
+
)
|
|
1379
|
+
|
|
1380
|
+
if self._is_simple:
|
|
1381
|
+
if self.simple_progress is not None:
|
|
1382
|
+
self.simple_progress.console.print(
|
|
1383
|
+
f" [yellow]{short_msg}: {error_summary}[/yellow]"
|
|
1384
|
+
)
|
|
1385
|
+
else:
|
|
1386
|
+
self.console.print(f" [yellow]{short_msg}: {error_summary}[/yellow]")
|
|
1387
|
+
except Exception:
|
|
1388
|
+
return
|
|
1389
|
+
|
|
1291
1390
|
|
|
1292
1391
|
# Global TUI instances
|
|
1293
1392
|
_tui_instance = None
|