DeepFabric 4.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfabric/__init__.py +70 -0
- deepfabric/__main__.py +6 -0
- deepfabric/auth.py +382 -0
- deepfabric/builders.py +303 -0
- deepfabric/builders_agent.py +1304 -0
- deepfabric/cli.py +1288 -0
- deepfabric/config.py +899 -0
- deepfabric/config_manager.py +251 -0
- deepfabric/constants.py +94 -0
- deepfabric/dataset_manager.py +534 -0
- deepfabric/error_codes.py +581 -0
- deepfabric/evaluation/__init__.py +47 -0
- deepfabric/evaluation/backends/__init__.py +32 -0
- deepfabric/evaluation/backends/ollama_backend.py +137 -0
- deepfabric/evaluation/backends/tool_call_parsers.py +409 -0
- deepfabric/evaluation/backends/transformers_backend.py +326 -0
- deepfabric/evaluation/evaluator.py +845 -0
- deepfabric/evaluation/evaluators/__init__.py +13 -0
- deepfabric/evaluation/evaluators/base.py +104 -0
- deepfabric/evaluation/evaluators/builtin/__init__.py +5 -0
- deepfabric/evaluation/evaluators/builtin/tool_calling.py +93 -0
- deepfabric/evaluation/evaluators/registry.py +66 -0
- deepfabric/evaluation/inference.py +155 -0
- deepfabric/evaluation/metrics.py +397 -0
- deepfabric/evaluation/parser.py +304 -0
- deepfabric/evaluation/reporters/__init__.py +13 -0
- deepfabric/evaluation/reporters/base.py +56 -0
- deepfabric/evaluation/reporters/cloud_reporter.py +195 -0
- deepfabric/evaluation/reporters/file_reporter.py +61 -0
- deepfabric/evaluation/reporters/multi_reporter.py +56 -0
- deepfabric/exceptions.py +67 -0
- deepfabric/factory.py +26 -0
- deepfabric/generator.py +1084 -0
- deepfabric/graph.py +545 -0
- deepfabric/hf_hub.py +214 -0
- deepfabric/kaggle_hub.py +219 -0
- deepfabric/llm/__init__.py +41 -0
- deepfabric/llm/api_key_verifier.py +534 -0
- deepfabric/llm/client.py +1206 -0
- deepfabric/llm/errors.py +105 -0
- deepfabric/llm/rate_limit_config.py +262 -0
- deepfabric/llm/rate_limit_detector.py +278 -0
- deepfabric/llm/retry_handler.py +270 -0
- deepfabric/metrics.py +212 -0
- deepfabric/progress.py +262 -0
- deepfabric/prompts.py +290 -0
- deepfabric/schemas.py +1000 -0
- deepfabric/spin/__init__.py +6 -0
- deepfabric/spin/client.py +263 -0
- deepfabric/spin/models.py +26 -0
- deepfabric/stream_simulator.py +90 -0
- deepfabric/tools/__init__.py +5 -0
- deepfabric/tools/defaults.py +85 -0
- deepfabric/tools/loader.py +87 -0
- deepfabric/tools/mcp_client.py +677 -0
- deepfabric/topic_manager.py +303 -0
- deepfabric/topic_model.py +20 -0
- deepfabric/training/__init__.py +35 -0
- deepfabric/training/api_key_prompt.py +302 -0
- deepfabric/training/callback.py +363 -0
- deepfabric/training/metrics_sender.py +301 -0
- deepfabric/tree.py +438 -0
- deepfabric/tui.py +1267 -0
- deepfabric/update_checker.py +166 -0
- deepfabric/utils.py +150 -0
- deepfabric/validation.py +143 -0
- deepfabric-4.4.0.dist-info/METADATA +702 -0
- deepfabric-4.4.0.dist-info/RECORD +71 -0
- deepfabric-4.4.0.dist-info/WHEEL +4 -0
- deepfabric-4.4.0.dist-info/entry_points.txt +2 -0
- deepfabric-4.4.0.dist-info/licenses/LICENSE +201 -0
deepfabric/tui.py
ADDED
|
@@ -0,0 +1,1267 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
|
|
6
|
+
from collections import deque
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from time import monotonic
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
from rich.align import Align
|
|
12
|
+
from rich.console import Console, RenderableType
|
|
13
|
+
from rich.layout import Layout
|
|
14
|
+
from rich.live import Live
|
|
15
|
+
from rich.panel import Panel
|
|
16
|
+
from rich.progress import (
|
|
17
|
+
BarColumn,
|
|
18
|
+
MofNCompleteColumn,
|
|
19
|
+
Progress,
|
|
20
|
+
SpinnerColumn,
|
|
21
|
+
TextColumn,
|
|
22
|
+
TimeElapsedColumn,
|
|
23
|
+
)
|
|
24
|
+
from rich.table import Column, Table
|
|
25
|
+
from rich.text import Text
|
|
26
|
+
|
|
27
|
+
from .progress import StreamObserver
|
|
28
|
+
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from .error_codes import ClassifiedError
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TopicBuildingMixin:
|
|
34
|
+
"""Mixin providing shared functionality for Tree and Graph building TUIs.
|
|
35
|
+
|
|
36
|
+
Provides common implementations for:
|
|
37
|
+
- _refresh_left(): Update events panel in left column
|
|
38
|
+
- on_error(): Handle error events from progress reporter
|
|
39
|
+
- on_step_start()/on_step_complete(): No-op handlers for step events
|
|
40
|
+
- update_status_panel(): Update status panel (requires _status_panel() in subclass)
|
|
41
|
+
|
|
42
|
+
Subclasses must have these attributes:
|
|
43
|
+
- tui: DeepFabricTUI instance
|
|
44
|
+
- live_layout: Layout | None
|
|
45
|
+
- events_log: deque
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
tui: "DeepFabricTUI"
|
|
49
|
+
live_layout: "Layout | None"
|
|
50
|
+
events_log: "deque"
|
|
51
|
+
|
|
52
|
+
def _refresh_left(self) -> None:
|
|
53
|
+
"""Update events panel in left column."""
|
|
54
|
+
if self.live_layout is not None:
|
|
55
|
+
try:
|
|
56
|
+
self.live_layout["main"]["left"]["events"].update(
|
|
57
|
+
self.tui.build_events_panel(list(self.events_log))
|
|
58
|
+
)
|
|
59
|
+
except Exception:
|
|
60
|
+
return
|
|
61
|
+
|
|
62
|
+
def on_error(self, error: "ClassifiedError", metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
63
|
+
"""Handle error events - log to events panel."""
|
|
64
|
+
error_event = error.to_event()
|
|
65
|
+
self.events_log.append(f"X {error_event}")
|
|
66
|
+
self._refresh_left()
|
|
67
|
+
|
|
68
|
+
def on_step_start(self, step_name: str, metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
69
|
+
"""Handle step start - topic building doesn't need specific handling."""
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
def on_step_complete(self, step_name: str, metadata: dict[str, Any]) -> None: # noqa: ARG002
|
|
73
|
+
"""Handle step complete - topic building doesn't need specific handling."""
|
|
74
|
+
pass
|
|
75
|
+
|
|
76
|
+
def update_status_panel(self) -> None:
|
|
77
|
+
"""Update the status panel in the right column."""
|
|
78
|
+
if self.live_layout is None:
|
|
79
|
+
return
|
|
80
|
+
try:
|
|
81
|
+
self.live_layout["main"]["right"]["status"].update(self._status_panel())
|
|
82
|
+
except Exception:
|
|
83
|
+
return
|
|
84
|
+
|
|
85
|
+
def _status_panel(self) -> Panel:
|
|
86
|
+
"""Create status panel - must be implemented by subclass."""
|
|
87
|
+
raise NotImplementedError
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# Constants
|
|
91
|
+
STREAM_BUFFER_DISPLAY_THRESHOLD = 1000 # Show ellipsis if accumulated text exceeds this
|
|
92
|
+
STREAM_TEXT_MAX_LENGTH = 8000 # Max characters to display in streaming text
|
|
93
|
+
STREAM_PANEL_WIDTH = 64 # Minimum width for the right-hand streaming pane
|
|
94
|
+
EVENT_LOG_MAX_LINES = 8
|
|
95
|
+
STREAM_RENDER_THROTTLE_S = 0.06
|
|
96
|
+
STREAM_FIXED_LINES = 16 # Fixed visible lines for streaming preview (used by all previews)
|
|
97
|
+
MIN_PREVIEW_LINES = 4 # Minimum preview lines to enforce
|
|
98
|
+
# Vertical space occupied by other UI elements when calculating dynamic preview height.
|
|
99
|
+
# Accounts for: footer (3) + status panel (8) + panel borders and margins (~9)
|
|
100
|
+
PREVIEW_VERTICAL_OFFSET = 20
|
|
101
|
+
# Offset for Graph/Tree TUIs which have simpler layouts: footer (3) + status (8) + borders (2)
|
|
102
|
+
TOPIC_PREVIEW_OFFSET = 13
|
|
103
|
+
# Truncation limits for event log display
|
|
104
|
+
EVENT_TOPIC_MAX_LENGTH = 20 # Max chars for topic names in events
|
|
105
|
+
EVENT_ERROR_MAX_LENGTH = 80 # Max chars for error summaries in events
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@dataclass
|
|
109
|
+
class TUISettings:
|
|
110
|
+
mode: str = "rich" # 'rich' or 'simple'
|
|
111
|
+
syntax: bool = True # enable syntax highlighting in preview
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
_tui_settings = TUISettings()
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def configure_tui(mode: str) -> None:
|
|
118
|
+
mode = (mode or "rich").lower().strip()
|
|
119
|
+
if mode not in {"rich", "simple"}:
|
|
120
|
+
mode = "rich"
|
|
121
|
+
_tui_settings.mode = mode
|
|
122
|
+
_tui_settings.syntax = mode == "rich"
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def get_tui_settings() -> TUISettings:
|
|
126
|
+
return _tui_settings
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def get_preview_lines() -> int:
|
|
130
|
+
"""Get preview height in lines, overridable by DF_TUI_PREVIEW_LINES env var."""
|
|
131
|
+
try:
|
|
132
|
+
v = int(os.getenv("DF_TUI_PREVIEW_LINES", str(STREAM_FIXED_LINES)))
|
|
133
|
+
except Exception: # noqa: BLE001
|
|
134
|
+
return STREAM_FIXED_LINES
|
|
135
|
+
else:
|
|
136
|
+
return v if v > MIN_PREVIEW_LINES else STREAM_FIXED_LINES
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class DeepFabricTUI:
|
|
140
|
+
"""Main TUI controller for DeepFabric operations."""
|
|
141
|
+
|
|
142
|
+
def __init__(self, console: Console | None = None):
|
|
143
|
+
"""Initialize the TUI with rich console."""
|
|
144
|
+
self.console = console or Console()
|
|
145
|
+
|
|
146
|
+
def create_header(self, title: str, subtitle: str = "") -> Panel:
|
|
147
|
+
"""Create a styled header panel."""
|
|
148
|
+
content = Text(title, style="bold cyan")
|
|
149
|
+
if subtitle:
|
|
150
|
+
content.append(f"\n{subtitle}", style="dim")
|
|
151
|
+
|
|
152
|
+
return Panel(
|
|
153
|
+
content,
|
|
154
|
+
border_style="bright_blue",
|
|
155
|
+
padding=(0, 1),
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
def build_stream_panel(
|
|
159
|
+
self, content: RenderableType | str, title: str = "Streaming Preview"
|
|
160
|
+
) -> Panel:
|
|
161
|
+
"""Create a compact right-hand panel showing recent streaming output.
|
|
162
|
+
|
|
163
|
+
Accepts any Rich renderable (Text, Syntax, Group, etc.) or a plain string.
|
|
164
|
+
"""
|
|
165
|
+
renderable: RenderableType
|
|
166
|
+
renderable = Text(content, style="dim") if isinstance(content, str) else content
|
|
167
|
+
# Wrap in Align to ensure content is top-aligned when panel expands
|
|
168
|
+
aligned = Align(renderable, vertical="top")
|
|
169
|
+
return Panel(aligned, title=title, border_style="dim", padding=(0, 1), expand=True)
|
|
170
|
+
|
|
171
|
+
def build_events_panel(self, events: list[str], title: str = "Events") -> Panel:
|
|
172
|
+
"""Create a compact events panel for the left column."""
|
|
173
|
+
if not events:
|
|
174
|
+
text = Text("Waiting...", style="dim")
|
|
175
|
+
else:
|
|
176
|
+
# Keep events short; show newest at bottom
|
|
177
|
+
# Colorize based on prefix:
|
|
178
|
+
# X = red (error)
|
|
179
|
+
# checkmark = green (success)
|
|
180
|
+
# T+ = green (tool success)
|
|
181
|
+
# T- = red (tool failure)
|
|
182
|
+
# T = cyan (tool execution, fallback)
|
|
183
|
+
text = Text()
|
|
184
|
+
for i, event in enumerate(events[-EVENT_LOG_MAX_LINES:]):
|
|
185
|
+
if i > 0:
|
|
186
|
+
text.append("\n")
|
|
187
|
+
if event.startswith("X "):
|
|
188
|
+
text.append("X ", style="bold red")
|
|
189
|
+
text.append(event[2:])
|
|
190
|
+
elif event.startswith("T+ "):
|
|
191
|
+
# Tool execution success - green
|
|
192
|
+
text.append("TOOL: ", style="bold green")
|
|
193
|
+
text.append(event[3:])
|
|
194
|
+
elif event.startswith("T- "):
|
|
195
|
+
# Tool execution failure - red
|
|
196
|
+
text.append("TOOL ", style="bold red")
|
|
197
|
+
text.append(event[3:])
|
|
198
|
+
elif event.startswith("✓ ") or event.startswith("✔ "):
|
|
199
|
+
text.append(event[0] + " ", style="bold green")
|
|
200
|
+
text.append(event[2:])
|
|
201
|
+
elif event.startswith("T "):
|
|
202
|
+
# Fallback for generic tool events - cyan
|
|
203
|
+
text.append("T ", style="bold cyan")
|
|
204
|
+
text.append(event[2:])
|
|
205
|
+
else:
|
|
206
|
+
text.append(event)
|
|
207
|
+
return Panel(text, title=title, border_style="dim", padding=(0, 1), expand=True)
|
|
208
|
+
|
|
209
|
+
def create_footer(self, layout: Layout, title: str = "Run Status") -> Progress:
|
|
210
|
+
"""Attach a footer progress panel to the provided root layout.
|
|
211
|
+
|
|
212
|
+
Expects the root layout to have children: 'main' and 'footer'.
|
|
213
|
+
Returns a Progress instance so callers can create tasks and update.
|
|
214
|
+
"""
|
|
215
|
+
progress = Progress(
|
|
216
|
+
SpinnerColumn(),
|
|
217
|
+
TextColumn(
|
|
218
|
+
"[bold blue]{task.description}",
|
|
219
|
+
table_column=Column(ratio=1, overflow="ellipsis"),
|
|
220
|
+
),
|
|
221
|
+
BarColumn(bar_width=None),
|
|
222
|
+
MofNCompleteColumn(),
|
|
223
|
+
TimeElapsedColumn(),
|
|
224
|
+
console=self.console,
|
|
225
|
+
)
|
|
226
|
+
layout["footer"].update(Panel(progress, title=title, border_style="dim", padding=(0, 1)))
|
|
227
|
+
return progress
|
|
228
|
+
|
|
229
|
+
def create_stats_table(self, stats: dict[str, Any]) -> Table:
|
|
230
|
+
"""Create a statistics table."""
|
|
231
|
+
table = Table(show_header=False, box=None, padding=(0, 1))
|
|
232
|
+
table.add_column(style="cyan", no_wrap=True)
|
|
233
|
+
table.add_column(style="white")
|
|
234
|
+
|
|
235
|
+
for key, value in stats.items():
|
|
236
|
+
table.add_row(f"{key}:", str(value))
|
|
237
|
+
|
|
238
|
+
return table
|
|
239
|
+
|
|
240
|
+
def build_context_panel(
|
|
241
|
+
self,
|
|
242
|
+
*,
|
|
243
|
+
root_topic: str | None,
|
|
244
|
+
topic_model_type: str | None,
|
|
245
|
+
path: list[str] | None,
|
|
246
|
+
) -> Panel:
|
|
247
|
+
"""Create a small context panel for current topic path info."""
|
|
248
|
+
table = Table(show_header=False, box=None, padding=(0, 1))
|
|
249
|
+
table.add_column(style="cyan", no_wrap=True)
|
|
250
|
+
table.add_column(style="white")
|
|
251
|
+
|
|
252
|
+
label_current = "Current Leaf" if topic_model_type == "tree" else "Current Node"
|
|
253
|
+
|
|
254
|
+
if root_topic:
|
|
255
|
+
table.add_row("Root Topic:", root_topic)
|
|
256
|
+
|
|
257
|
+
if path:
|
|
258
|
+
parent = path[-2] if len(path) > 1 else "-"
|
|
259
|
+
leaf = path[-1]
|
|
260
|
+
table.add_row(f"{label_current}:", leaf)
|
|
261
|
+
table.add_row("Parent:", str(parent))
|
|
262
|
+
table.add_row("Path:", " → ".join(path))
|
|
263
|
+
else:
|
|
264
|
+
table.add_row("Status:", "Waiting for first sample...")
|
|
265
|
+
|
|
266
|
+
return Panel(table, title="Context", border_style="dim", padding=(0, 1))
|
|
267
|
+
|
|
268
|
+
def success(self, message: str) -> None:
|
|
269
|
+
"""Display a success message."""
|
|
270
|
+
self.console.print(f" {message}", style="green")
|
|
271
|
+
|
|
272
|
+
def warning(self, message: str) -> None:
|
|
273
|
+
"""Display a warning message."""
|
|
274
|
+
self.console.print(f"⚠️ {message}", style="yellow")
|
|
275
|
+
|
|
276
|
+
def error(self, message: str) -> None:
|
|
277
|
+
"""Display an error message."""
|
|
278
|
+
self.console.print(f"❌ {message}", style="red")
|
|
279
|
+
|
|
280
|
+
def info(self, message: str) -> None:
|
|
281
|
+
"""Display an info message."""
|
|
282
|
+
self.console.print(f" {message}", style="blue")
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
class TreeBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
286
|
+
"""TUI for tree building operations with simplified progress and streaming."""
|
|
287
|
+
|
|
288
|
+
def __init__(self, tui: DeepFabricTUI):
|
|
289
|
+
self.tui = tui
|
|
290
|
+
self.console = tui.console
|
|
291
|
+
self.progress = None
|
|
292
|
+
self.overall_task = None
|
|
293
|
+
self.generated_paths = 0
|
|
294
|
+
self.failed_attempts = 0
|
|
295
|
+
self.current_depth = 0
|
|
296
|
+
self.max_depth = 0
|
|
297
|
+
self.stream_buffer = deque(maxlen=2000)
|
|
298
|
+
self.live_display = None
|
|
299
|
+
self.live_layout: Layout | None = None
|
|
300
|
+
self.events_log = deque(maxlen=EVENT_LOG_MAX_LINES)
|
|
301
|
+
self.simple_mode = False
|
|
302
|
+
self.current_topic_path: list[str] | None = None
|
|
303
|
+
self.root_topic: str | None = None
|
|
304
|
+
|
|
305
|
+
def start_building(self, model_name: str, depth: int, degree: int, root_topic: str) -> None:
|
|
306
|
+
"""Start the tree building process."""
|
|
307
|
+
self.max_depth = depth
|
|
308
|
+
self.root_topic = root_topic
|
|
309
|
+
|
|
310
|
+
# If simple/headless mode, print static header and return without Live
|
|
311
|
+
if get_tui_settings().mode == "simple":
|
|
312
|
+
header_panel = self.tui.create_header(
|
|
313
|
+
"DeepFabric Tree Generation",
|
|
314
|
+
f"Building hierarchical topic structure with {model_name}",
|
|
315
|
+
)
|
|
316
|
+
self.console.print(header_panel)
|
|
317
|
+
self.console.print(f"Configuration: depth={depth}, degree={degree}")
|
|
318
|
+
self.console.print()
|
|
319
|
+
self.simple_mode = True
|
|
320
|
+
return
|
|
321
|
+
|
|
322
|
+
# Create simple progress display with indeterminate progress
|
|
323
|
+
self.progress = Progress(
|
|
324
|
+
SpinnerColumn(),
|
|
325
|
+
TextColumn(
|
|
326
|
+
"[bold blue]{task.description}",
|
|
327
|
+
table_column=Column(ratio=1, overflow="ellipsis"),
|
|
328
|
+
),
|
|
329
|
+
BarColumn(bar_width=None),
|
|
330
|
+
TimeElapsedColumn(),
|
|
331
|
+
console=self.console,
|
|
332
|
+
)
|
|
333
|
+
# Two-pane layout: left header + progress + events; right status + preview
|
|
334
|
+
layout = Layout(name="root")
|
|
335
|
+
layout.split(Layout(name="main"), Layout(name="footer", size=3))
|
|
336
|
+
left = Layout(name="left", ratio=3)
|
|
337
|
+
right = Layout(name="right", ratio=2)
|
|
338
|
+
right.minimum_size = STREAM_PANEL_WIDTH
|
|
339
|
+
|
|
340
|
+
header_panel = self.tui.create_header(
|
|
341
|
+
"DeepFabric Tree Generation",
|
|
342
|
+
f"Building hierarchical topic structure with {model_name}",
|
|
343
|
+
)
|
|
344
|
+
stats = {"Model": model_name, "Depth": f"{depth}", "Degree": f"{degree}"}
|
|
345
|
+
stats_table = self.tui.create_stats_table(stats)
|
|
346
|
+
params_panel = Panel(stats_table, title="Generation Parameters", border_style="dim")
|
|
347
|
+
|
|
348
|
+
left.split(
|
|
349
|
+
Layout(name="header", size=4),
|
|
350
|
+
Layout(name="params", size=5),
|
|
351
|
+
Layout(name="context", size=5),
|
|
352
|
+
Layout(name="events"),
|
|
353
|
+
)
|
|
354
|
+
left["header"].update(header_panel)
|
|
355
|
+
left["params"].update(params_panel)
|
|
356
|
+
left["context"].update(self._context_panel())
|
|
357
|
+
left["events"].update(self.tui.build_events_panel(list(self.events_log)))
|
|
358
|
+
# Right column: status + preview (preview fills remaining space)
|
|
359
|
+
right.split(
|
|
360
|
+
Layout(name="status", size=8),
|
|
361
|
+
Layout(name="preview"),
|
|
362
|
+
)
|
|
363
|
+
layout["main"].split_row(left, right)
|
|
364
|
+
right["status"].update(self._status_panel())
|
|
365
|
+
right["preview"].update(self.tui.build_stream_panel("Waiting for generation..."))
|
|
366
|
+
|
|
367
|
+
# Start Live display with layout
|
|
368
|
+
self.live_layout = layout
|
|
369
|
+
# Footer progress
|
|
370
|
+
self.footer_progress = self.tui.create_footer(layout, title="Run Status")
|
|
371
|
+
self.footer_task = self.footer_progress.add_task("Building topic tree", total=depth)
|
|
372
|
+
|
|
373
|
+
self.live_display = Live(layout, console=self.console, refresh_per_second=15, screen=True)
|
|
374
|
+
self.live_display.start()
|
|
375
|
+
self.overall_task = self.progress.add_task(f"Building topic tree (depth 1/{depth})")
|
|
376
|
+
|
|
377
|
+
def start_depth_level(self, depth: int) -> None:
|
|
378
|
+
"""Update progress for new depth level."""
|
|
379
|
+
self.current_depth = depth
|
|
380
|
+
if self.progress and self.overall_task is not None:
|
|
381
|
+
self.progress.update(
|
|
382
|
+
self.overall_task,
|
|
383
|
+
description=f"Building topic tree (depth {depth}/{self.max_depth})",
|
|
384
|
+
)
|
|
385
|
+
self.events_log.append(f"→ Depth {depth}/{self.max_depth} started")
|
|
386
|
+
self._refresh_left()
|
|
387
|
+
# Advance footer on each depth start (only after first)
|
|
388
|
+
self.update_status_panel()
|
|
389
|
+
|
|
390
|
+
def start_subtree_generation(self, node_path: list[str], _num_subtopics: int) -> None:
|
|
391
|
+
"""Log subtree generation without updating progress to avoid flicker."""
|
|
392
|
+
self.current_topic_path = node_path
|
|
393
|
+
self._refresh_context()
|
|
394
|
+
pass
|
|
395
|
+
|
|
396
|
+
def complete_subtree_generation(self, success: bool, generated_count: int) -> None:
|
|
397
|
+
"""Track completion without updating progress bar."""
|
|
398
|
+
if success:
|
|
399
|
+
self.generated_paths += generated_count
|
|
400
|
+
else:
|
|
401
|
+
self.failed_attempts += 1
|
|
402
|
+
# Log succinct outcome
|
|
403
|
+
status = "ok" if success else "fail"
|
|
404
|
+
self.events_log.append(f"✓ Subtree {status} (+{generated_count} paths)")
|
|
405
|
+
self._refresh_left()
|
|
406
|
+
self.update_status_panel()
|
|
407
|
+
# Advance footer on completed depth
|
|
408
|
+
with contextlib.suppress(Exception):
|
|
409
|
+
self.footer_progress.update(self.footer_task, advance=1)
|
|
410
|
+
|
|
411
|
+
def add_failure(self) -> None:
|
|
412
|
+
"""Record a generation failure."""
|
|
413
|
+
self.failed_attempts += 1
|
|
414
|
+
self.events_log.append("✗ Generation failed")
|
|
415
|
+
self._refresh_left()
|
|
416
|
+
self.update_status_panel()
|
|
417
|
+
|
|
418
|
+
def on_stream_chunk(self, _source: str, chunk: str, _metadata: dict[str, Any]) -> None:
|
|
419
|
+
"""Handle streaming text from tree generation."""
|
|
420
|
+
self.stream_buffer.append(chunk)
|
|
421
|
+
if self.live_display and self.live_layout is not None:
|
|
422
|
+
accumulated_text = "".join(self.stream_buffer)
|
|
423
|
+
# Trim to last N chars for performance
|
|
424
|
+
if len(accumulated_text) > STREAM_TEXT_MAX_LENGTH:
|
|
425
|
+
accumulated_text = "..." + accumulated_text[-STREAM_TEXT_MAX_LENGTH:]
|
|
426
|
+
# Clean CRs, normalize spaces but preserve newlines
|
|
427
|
+
display_text = accumulated_text.replace("\r", "")
|
|
428
|
+
display_text = re.sub(r"[^\S\n]+", " ", display_text)
|
|
429
|
+
|
|
430
|
+
# Compute dynamic preview lines based on terminal height
|
|
431
|
+
# Use TOPIC_PREVIEW_OFFSET for tree/graph TUIs (simpler layout)
|
|
432
|
+
terminal_height = self.console.size.height
|
|
433
|
+
target_lines = max(MIN_PREVIEW_LINES, terminal_height - TOPIC_PREVIEW_OFFSET)
|
|
434
|
+
lines = display_text.splitlines()
|
|
435
|
+
|
|
436
|
+
# Handle low-newline content (like JSON) to fill panel properly
|
|
437
|
+
if len(lines) >= int(target_lines / 2):
|
|
438
|
+
# Plenty of newlines: take the last N lines
|
|
439
|
+
visible_lines = lines[-target_lines:]
|
|
440
|
+
else:
|
|
441
|
+
# Low-newline content: take a character tail and then split
|
|
442
|
+
approx_right_cols = max(40, int(self.console.size.width * 0.42))
|
|
443
|
+
char_tail = max(800, approx_right_cols * max(8, target_lines - 2))
|
|
444
|
+
tail = display_text[-char_tail:]
|
|
445
|
+
visible_lines = tail.splitlines()[-target_lines:]
|
|
446
|
+
|
|
447
|
+
visible = "\n".join(visible_lines)
|
|
448
|
+
|
|
449
|
+
# Update right-hand panel (nested preview)
|
|
450
|
+
try:
|
|
451
|
+
container = self.live_layout["main"]["right"]["preview"]
|
|
452
|
+
except Exception:
|
|
453
|
+
container = self.live_layout["main"]["right"]
|
|
454
|
+
container.update(self.tui.build_stream_panel(visible))
|
|
455
|
+
|
|
456
|
+
def on_retry(
|
|
457
|
+
self,
|
|
458
|
+
sample_idx: int,
|
|
459
|
+
attempt: int,
|
|
460
|
+
max_attempts: int,
|
|
461
|
+
error_summary: str,
|
|
462
|
+
metadata: dict[str, Any],
|
|
463
|
+
) -> None:
|
|
464
|
+
"""Handle retry events from the progress reporter by logging a concise message."""
|
|
465
|
+
_ = metadata # Unused for now
|
|
466
|
+
try:
|
|
467
|
+
self.events_log.append(
|
|
468
|
+
f"↻ Retry sample {sample_idx} attempt {attempt}/{max_attempts}: {error_summary}"
|
|
469
|
+
)
|
|
470
|
+
self._refresh_left()
|
|
471
|
+
except Exception:
|
|
472
|
+
# Swallow errors to avoid breaking progress reporting
|
|
473
|
+
return
|
|
474
|
+
|
|
475
|
+
def _context_panel(self) -> Panel:
|
|
476
|
+
return self.tui.build_context_panel(
|
|
477
|
+
root_topic=self.root_topic,
|
|
478
|
+
topic_model_type="tree",
|
|
479
|
+
path=self.current_topic_path,
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
def _refresh_context(self) -> None:
|
|
483
|
+
if self.live_layout is not None:
|
|
484
|
+
try:
|
|
485
|
+
self.live_layout["main"]["left"]["context"].update(self._context_panel())
|
|
486
|
+
except Exception:
|
|
487
|
+
return
|
|
488
|
+
|
|
489
|
+
def finish_building(self, total_paths: int, failed_generations: int) -> None:
|
|
490
|
+
"""Finish the tree building process."""
|
|
491
|
+
if self.live_display:
|
|
492
|
+
self.live_display.stop()
|
|
493
|
+
|
|
494
|
+
# Final summary
|
|
495
|
+
self.console.print()
|
|
496
|
+
if failed_generations > 0:
|
|
497
|
+
self.tui.warning(f"Tree building complete with {failed_generations} failures")
|
|
498
|
+
else:
|
|
499
|
+
self.tui.success("Tree building completed successfully")
|
|
500
|
+
|
|
501
|
+
self.tui.info(f"Generated {total_paths} total paths")
|
|
502
|
+
self.events_log.append("✓ Tree building completed")
|
|
503
|
+
self.update_status_panel()
|
|
504
|
+
|
|
505
|
+
# ---- Status panel for Tree ----
|
|
506
|
+
def _status_panel(self) -> Panel:
|
|
507
|
+
table = Table(show_header=False, box=None, padding=(0, 1))
|
|
508
|
+
table.add_column(style="cyan", no_wrap=True)
|
|
509
|
+
table.add_column(style="white")
|
|
510
|
+
table.add_row("Depth:", f"{self.current_depth}/{self.max_depth}")
|
|
511
|
+
table.add_row("Nodes:", str(self.generated_paths))
|
|
512
|
+
if self.failed_attempts:
|
|
513
|
+
table.add_row("Failed:", str(self.failed_attempts))
|
|
514
|
+
return Panel(table, title="Status", border_style="dim", padding=(0, 1))
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
class GraphBuildingTUI(TopicBuildingMixin, StreamObserver):
|
|
518
|
+
"""TUI for graph building operations with simplified progress and streaming."""
|
|
519
|
+
|
|
520
|
+
def __init__(self, tui: DeepFabricTUI):
|
|
521
|
+
self.tui = tui
|
|
522
|
+
self.console = tui.console
|
|
523
|
+
self.progress = None
|
|
524
|
+
self.overall_task = None
|
|
525
|
+
self.nodes_count = 1 # Start with root
|
|
526
|
+
self.edges_count = 0
|
|
527
|
+
self.failed_attempts = 0
|
|
528
|
+
self.stream_buffer = deque(maxlen=2000)
|
|
529
|
+
self.live_display = None
|
|
530
|
+
self.live_layout: Layout | None = None
|
|
531
|
+
self.events_log = deque(maxlen=EVENT_LOG_MAX_LINES)
|
|
532
|
+
self.simple_mode = False
|
|
533
|
+
self.current_topic_path: list[str] | None = None
|
|
534
|
+
self.root_topic: str | None = None
|
|
535
|
+
|
|
536
|
+
def start_building(self, model_name: str, depth: int, degree: int, root_topic: str) -> None:
|
|
537
|
+
"""Start the graph building process."""
|
|
538
|
+
self.max_depth = depth
|
|
539
|
+
self.current_depth = 0
|
|
540
|
+
self.root_topic = root_topic
|
|
541
|
+
# If simple/headless mode, print static header and return
|
|
542
|
+
if get_tui_settings().mode == "simple":
|
|
543
|
+
header = self.tui.create_header(
|
|
544
|
+
"DeepFabric Graph Generation",
|
|
545
|
+
f"Building interconnected topic structure with {model_name}",
|
|
546
|
+
)
|
|
547
|
+
self.console.print(header)
|
|
548
|
+
self.console.print(f"Configuration: depth={depth}, degree={degree}")
|
|
549
|
+
self.console.print()
|
|
550
|
+
self.simple_mode = True
|
|
551
|
+
return
|
|
552
|
+
|
|
553
|
+
# Create simple progress display
|
|
554
|
+
self.progress = Progress(
|
|
555
|
+
SpinnerColumn(),
|
|
556
|
+
TextColumn(
|
|
557
|
+
"[bold blue]{task.description}",
|
|
558
|
+
table_column=Column(ratio=1, overflow="ellipsis"),
|
|
559
|
+
),
|
|
560
|
+
BarColumn(bar_width=None),
|
|
561
|
+
MofNCompleteColumn(),
|
|
562
|
+
TimeElapsedColumn(),
|
|
563
|
+
console=self.console,
|
|
564
|
+
)
|
|
565
|
+
# Two-pane layout: left header + events; right status + preview with footer at bottom
|
|
566
|
+
layout = Layout(name="root")
|
|
567
|
+
layout.split(Layout(name="main"), Layout(name="footer", size=3))
|
|
568
|
+
left = Layout(name="left", ratio=3)
|
|
569
|
+
right = Layout(name="right", ratio=2)
|
|
570
|
+
right.minimum_size = STREAM_PANEL_WIDTH
|
|
571
|
+
|
|
572
|
+
header_panel = self.tui.create_header(
|
|
573
|
+
"DeepFabric Graph Generation",
|
|
574
|
+
f"Building interconnected topic structure with {model_name}",
|
|
575
|
+
)
|
|
576
|
+
stats = {"Model": model_name, "Depth": f"{depth}", "Degree": f"{degree}"}
|
|
577
|
+
stats_table = self.tui.create_stats_table(stats)
|
|
578
|
+
params_panel = Panel(stats_table, title="Generation Parameters", border_style="dim")
|
|
579
|
+
|
|
580
|
+
left.split(
|
|
581
|
+
Layout(name="header", size=4),
|
|
582
|
+
Layout(name="params", size=5),
|
|
583
|
+
Layout(name="context", size=5),
|
|
584
|
+
Layout(name="events"),
|
|
585
|
+
)
|
|
586
|
+
left["header"].update(header_panel)
|
|
587
|
+
left["params"].update(params_panel)
|
|
588
|
+
left["context"].update(self._context_panel())
|
|
589
|
+
left["events"].update(self.tui.build_events_panel(list(self.events_log)))
|
|
590
|
+
# Right column: status + preview (preview fills remaining space)
|
|
591
|
+
right.split(
|
|
592
|
+
Layout(name="status", size=8),
|
|
593
|
+
Layout(name="preview"),
|
|
594
|
+
)
|
|
595
|
+
layout["main"].split_row(left, right)
|
|
596
|
+
right["status"].update(self._status_panel())
|
|
597
|
+
right["preview"].update(self.tui.build_stream_panel("Waiting for generation..."))
|
|
598
|
+
|
|
599
|
+
# Footer progress
|
|
600
|
+
self.footer_progress = self.tui.create_footer(layout, title="Run Status")
|
|
601
|
+
self.footer_task = self.footer_progress.add_task("Building topic graph", total=depth)
|
|
602
|
+
|
|
603
|
+
self.live_layout = layout
|
|
604
|
+
self.live_display = Live(layout, console=self.console, refresh_per_second=15, screen=True)
|
|
605
|
+
self.live_display.start()
|
|
606
|
+
self.overall_task = self.progress.add_task(" Building topic graph", total=depth)
|
|
607
|
+
|
|
608
|
+
def start_depth_level(self, depth: int, leaf_count: int) -> None:
|
|
609
|
+
"""Update for new depth level."""
|
|
610
|
+
if self.simple_mode:
|
|
611
|
+
self.console.print(f" Depth {depth}: expanding {leaf_count} nodes...")
|
|
612
|
+
elif self.progress and self.overall_task is not None:
|
|
613
|
+
self.progress.update(
|
|
614
|
+
self.overall_task,
|
|
615
|
+
description=f" Building graph - depth {depth} ({leaf_count} nodes to expand)",
|
|
616
|
+
)
|
|
617
|
+
self.events_log.append(f"→ Depth {depth} start ({leaf_count} nodes)")
|
|
618
|
+
self._refresh_left()
|
|
619
|
+
self.current_depth = depth
|
|
620
|
+
self.update_status_panel()
|
|
621
|
+
|
|
622
|
+
def complete_node_expansion(
|
|
623
|
+
self, node_topic: str, subtopics_added: int, connections_added: int
|
|
624
|
+
) -> None:
|
|
625
|
+
"""Track node expansion."""
|
|
626
|
+
_ = node_topic # Mark as intentionally unused
|
|
627
|
+
self.nodes_count += subtopics_added
|
|
628
|
+
self.edges_count += subtopics_added + connections_added
|
|
629
|
+
|
|
630
|
+
def complete_depth_level(self, depth: int) -> None:
|
|
631
|
+
"""Complete a depth level."""
|
|
632
|
+
if self.simple_mode:
|
|
633
|
+
self.console.print(
|
|
634
|
+
f" Depth {depth} complete (nodes: {self.nodes_count}, edges: {self.edges_count})"
|
|
635
|
+
)
|
|
636
|
+
elif self.progress and self.overall_task is not None:
|
|
637
|
+
self.progress.advance(self.overall_task, 1)
|
|
638
|
+
self.events_log.append(f"✓ Depth {depth} complete")
|
|
639
|
+
self._refresh_left()
|
|
640
|
+
self.update_status_panel()
|
|
641
|
+
# Advance footer on depth complete
|
|
642
|
+
with contextlib.suppress(Exception):
|
|
643
|
+
self.footer_progress.update(self.footer_task, advance=1)
|
|
644
|
+
|
|
645
|
+
def add_failure(self, node_topic: str) -> None:
|
|
646
|
+
"""Record a generation failure."""
|
|
647
|
+
self.failed_attempts += 1
|
|
648
|
+
if self.simple_mode:
|
|
649
|
+
if len(node_topic) > EVENT_ERROR_MAX_LENGTH:
|
|
650
|
+
topic_display = node_topic[:EVENT_ERROR_MAX_LENGTH] + "..."
|
|
651
|
+
else:
|
|
652
|
+
topic_display = node_topic
|
|
653
|
+
self.console.print(f" [red]✗ Node expansion failed: {topic_display}[/red]")
|
|
654
|
+
self.events_log.append("✗ Node expansion failed")
|
|
655
|
+
self._refresh_left()
|
|
656
|
+
|
|
657
|
+
def on_node_retry(
|
|
658
|
+
self,
|
|
659
|
+
node_topic: str,
|
|
660
|
+
attempt: int,
|
|
661
|
+
max_attempts: int,
|
|
662
|
+
error_summary: str,
|
|
663
|
+
metadata: dict[str, Any],
|
|
664
|
+
) -> None:
|
|
665
|
+
"""Handle node expansion retry events from the progress reporter.
|
|
666
|
+
|
|
667
|
+
Args:
|
|
668
|
+
node_topic: Topic of the node being expanded
|
|
669
|
+
attempt: Current attempt number (1-based)
|
|
670
|
+
max_attempts: Total number of attempts allowed
|
|
671
|
+
error_summary: Brief description of the error
|
|
672
|
+
metadata: Additional context
|
|
673
|
+
"""
|
|
674
|
+
_ = metadata # Unused for now
|
|
675
|
+
try:
|
|
676
|
+
# Truncate node topic for display
|
|
677
|
+
if len(node_topic) > EVENT_TOPIC_MAX_LENGTH:
|
|
678
|
+
topic_display = node_topic[:EVENT_TOPIC_MAX_LENGTH] + "..."
|
|
679
|
+
else:
|
|
680
|
+
topic_display = node_topic
|
|
681
|
+
# Truncate error summary
|
|
682
|
+
if len(error_summary) > EVENT_ERROR_MAX_LENGTH:
|
|
683
|
+
error_display = error_summary[:EVENT_ERROR_MAX_LENGTH] + "..."
|
|
684
|
+
else:
|
|
685
|
+
error_display = error_summary
|
|
686
|
+
|
|
687
|
+
# In simple mode, print directly to console
|
|
688
|
+
if self.simple_mode:
|
|
689
|
+
self.console.print(
|
|
690
|
+
f" [yellow]↻ Retry {attempt}/{max_attempts} '{topic_display}': {error_display}[/yellow]"
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
self.events_log.append(
|
|
694
|
+
f"↻ Retry {attempt}/{max_attempts} '{topic_display}': {error_display}"
|
|
695
|
+
)
|
|
696
|
+
self._refresh_left()
|
|
697
|
+
except Exception:
|
|
698
|
+
# Best-effort, swallow errors to avoid breaking progress reporting
|
|
699
|
+
return
|
|
700
|
+
|
|
701
|
+
def on_retry(
|
|
702
|
+
self,
|
|
703
|
+
sample_idx: int,
|
|
704
|
+
attempt: int,
|
|
705
|
+
max_attempts: int,
|
|
706
|
+
error_summary: str,
|
|
707
|
+
metadata: dict[str, Any],
|
|
708
|
+
) -> None:
|
|
709
|
+
"""Handle retry events from the progress reporter.
|
|
710
|
+
|
|
711
|
+
Provides a minimal implementation so GraphBuildingTUI is not abstract;
|
|
712
|
+
logs a concise retry message to the events panel.
|
|
713
|
+
"""
|
|
714
|
+
_ = metadata # Unused for now
|
|
715
|
+
try:
|
|
716
|
+
self.events_log.append(
|
|
717
|
+
f"↻ Retry sample {sample_idx} attempt {attempt}/{max_attempts}: {error_summary}"
|
|
718
|
+
)
|
|
719
|
+
self._refresh_left()
|
|
720
|
+
except Exception:
|
|
721
|
+
# Best-effort, swallow errors to avoid breaking progress reporting
|
|
722
|
+
return
|
|
723
|
+
|
|
724
|
+
def on_stream_chunk(self, _source: str, chunk: str, _metadata: dict[str, Any]) -> None:
|
|
725
|
+
"""Handle streaming text from graph generation."""
|
|
726
|
+
self.stream_buffer.append(chunk)
|
|
727
|
+
if self.live_display and self.live_layout is not None:
|
|
728
|
+
accumulated_text = "".join(self.stream_buffer)
|
|
729
|
+
if len(accumulated_text) > STREAM_TEXT_MAX_LENGTH:
|
|
730
|
+
accumulated_text = "..." + accumulated_text[-STREAM_TEXT_MAX_LENGTH:]
|
|
731
|
+
display_text = accumulated_text.replace("\r", "")
|
|
732
|
+
display_text = re.sub(r"[^\S\n]+", " ", display_text)
|
|
733
|
+
|
|
734
|
+
# Compute dynamic preview lines based on terminal height
|
|
735
|
+
# Use TOPIC_PREVIEW_OFFSET for tree/graph TUIs (simpler layout)
|
|
736
|
+
terminal_height = self.console.size.height
|
|
737
|
+
target_lines = max(MIN_PREVIEW_LINES, terminal_height - TOPIC_PREVIEW_OFFSET)
|
|
738
|
+
lines = display_text.splitlines()
|
|
739
|
+
|
|
740
|
+
# Handle low-newline content (like JSON) to fill panel properly
|
|
741
|
+
if len(lines) >= int(target_lines / 2):
|
|
742
|
+
# Plenty of newlines: take the last N lines
|
|
743
|
+
visible_lines = lines[-target_lines:]
|
|
744
|
+
else:
|
|
745
|
+
# Low-newline content: take a character tail and then split
|
|
746
|
+
approx_right_cols = max(40, int(self.console.size.width * 0.42))
|
|
747
|
+
char_tail = max(800, approx_right_cols * max(8, target_lines - 2))
|
|
748
|
+
tail = display_text[-char_tail:]
|
|
749
|
+
visible_lines = tail.splitlines()[-target_lines:]
|
|
750
|
+
|
|
751
|
+
visible = "\n".join(visible_lines)
|
|
752
|
+
|
|
753
|
+
# Update the streaming panel
|
|
754
|
+
try:
|
|
755
|
+
container = self.live_layout["main"]["right"]["preview"]
|
|
756
|
+
except Exception:
|
|
757
|
+
container = self.live_layout["main"]["right"]
|
|
758
|
+
container.update(self.tui.build_stream_panel(visible))
|
|
759
|
+
|
|
760
|
+
def _context_panel(self) -> Panel:
|
|
761
|
+
return self.tui.build_context_panel(
|
|
762
|
+
root_topic=self.root_topic,
|
|
763
|
+
topic_model_type="graph",
|
|
764
|
+
path=self.current_topic_path,
|
|
765
|
+
)
|
|
766
|
+
|
|
767
|
+
def _refresh_context(self) -> None:
|
|
768
|
+
if self.live_layout is not None:
|
|
769
|
+
try:
|
|
770
|
+
self.live_layout["main"]["left"]["context"].update(self._context_panel())
|
|
771
|
+
except Exception:
|
|
772
|
+
return
|
|
773
|
+
|
|
774
|
+
def finish_building(self, failed_generations: int) -> None:
|
|
775
|
+
"""Finish the graph building process."""
|
|
776
|
+
if self.live_display:
|
|
777
|
+
self.live_display.stop()
|
|
778
|
+
|
|
779
|
+
# Show final stats
|
|
780
|
+
self.console.print()
|
|
781
|
+
stats_table = self.tui.create_stats_table(
|
|
782
|
+
{
|
|
783
|
+
"Total Nodes": self.nodes_count,
|
|
784
|
+
"Total Edges": self.edges_count,
|
|
785
|
+
"Failed Attempts": self.failed_attempts,
|
|
786
|
+
}
|
|
787
|
+
)
|
|
788
|
+
self.console.print(Panel(stats_table, title="Final Statistics", border_style="dim"))
|
|
789
|
+
|
|
790
|
+
# Final summary
|
|
791
|
+
if failed_generations > 0:
|
|
792
|
+
self.tui.warning(f"Graph building complete with {failed_generations} failures")
|
|
793
|
+
else:
|
|
794
|
+
self.tui.success("Graph building completed successfully")
|
|
795
|
+
self.events_log.append("✓ Graph building completed")
|
|
796
|
+
self.update_status_panel()
|
|
797
|
+
|
|
798
|
+
# ---- Status panel for Graph ----
|
|
799
|
+
def _status_panel(self) -> Panel:
|
|
800
|
+
table = Table(show_header=False, box=None, padding=(0, 1))
|
|
801
|
+
table.add_column(style="cyan", no_wrap=True)
|
|
802
|
+
table.add_column(style="white")
|
|
803
|
+
table.add_row("Depth:", f"{self.current_depth}/{getattr(self, 'max_depth', 0)}")
|
|
804
|
+
table.add_row("Nodes:", str(self.nodes_count))
|
|
805
|
+
table.add_row("Edges:", str(self.edges_count))
|
|
806
|
+
if self.failed_attempts:
|
|
807
|
+
table.add_row("Failed:", str(self.failed_attempts))
|
|
808
|
+
return Panel(table, title="Status", border_style="dim", padding=(0, 1))
|
|
809
|
+
|
|
810
|
+
|
|
811
|
+
class DatasetGenerationTUI(StreamObserver):
|
|
812
|
+
"""Enhanced TUI for dataset generation with rich integration and streaming display."""
|
|
813
|
+
|
|
814
|
+
live_display: Live | None
|
|
815
|
+
live_layout: Layout | None
|
|
816
|
+
|
|
817
|
+
def __init__(self, tui: DeepFabricTUI):
|
|
818
|
+
self.tui = tui
|
|
819
|
+
self.console = tui.console
|
|
820
|
+
self.stream_buffer = deque(maxlen=2000) # Last ~2000 chars of streaming text
|
|
821
|
+
self.current_step = ""
|
|
822
|
+
self.current_sample_type = "" # Track the type of sample being generated
|
|
823
|
+
self.live_display = None # Will be set by dataset_manager
|
|
824
|
+
self.live_layout = None # Provided by dataset_manager
|
|
825
|
+
self.progress = None
|
|
826
|
+
self.stream_text = Text("Waiting for generation...", style="dim") # Streaming content
|
|
827
|
+
self.events_log = deque(maxlen=EVENT_LOG_MAX_LINES)
|
|
828
|
+
# Context tracking
|
|
829
|
+
self.root_topic_prompt: str | None = None
|
|
830
|
+
self.topic_model_type: str | None = None # 'tree' or 'graph'
|
|
831
|
+
self.current_topic_path: list[str] | None = None
|
|
832
|
+
self._last_render_t = 0.0
|
|
833
|
+
self._last_visible_key = ""
|
|
834
|
+
# Status tracking
|
|
835
|
+
self.status_total_steps = 0
|
|
836
|
+
self.status_current_step = 0
|
|
837
|
+
self.status_total_samples = 0
|
|
838
|
+
self.status_samples_done = 0
|
|
839
|
+
self.status_failed_total = 0
|
|
840
|
+
self.status_step_started_at = 0.0
|
|
841
|
+
# Retry tracking for simple mode
|
|
842
|
+
self.step_retries: list[dict] = [] # Retries in current step
|
|
843
|
+
|
|
844
|
+
def create_rich_progress(self) -> Progress:
|
|
845
|
+
"""Create a rich progress bar for dataset generation (without TimeRemainingColumn)."""
|
|
846
|
+
self.progress = Progress(
|
|
847
|
+
SpinnerColumn(),
|
|
848
|
+
TextColumn(
|
|
849
|
+
"[bold blue]{task.description}",
|
|
850
|
+
table_column=Column(ratio=1, overflow="ellipsis"),
|
|
851
|
+
),
|
|
852
|
+
BarColumn(bar_width=None),
|
|
853
|
+
MofNCompleteColumn(),
|
|
854
|
+
TimeElapsedColumn(),
|
|
855
|
+
console=self.console,
|
|
856
|
+
)
|
|
857
|
+
return self.progress
|
|
858
|
+
|
|
859
|
+
def build_generation_panels(
|
|
860
|
+
self, model_name: str, num_steps: int, batch_size: int
|
|
861
|
+
) -> tuple[Panel, Panel]:
|
|
862
|
+
"""Return header and parameters panels for layout use (no direct printing)."""
|
|
863
|
+
header = self.tui.create_header(
|
|
864
|
+
"DeepFabric Dataset Generation",
|
|
865
|
+
f"Creating synthetic traces with {model_name}",
|
|
866
|
+
)
|
|
867
|
+
stats = {
|
|
868
|
+
"Model": model_name,
|
|
869
|
+
"Steps": num_steps,
|
|
870
|
+
"Batch Size": batch_size,
|
|
871
|
+
"Total Samples": num_steps * batch_size,
|
|
872
|
+
}
|
|
873
|
+
stats_table = self.tui.create_stats_table(stats)
|
|
874
|
+
params_panel = Panel(stats_table, title="Generation Parameters", border_style="dim")
|
|
875
|
+
|
|
876
|
+
# Seed events log
|
|
877
|
+
self.events_log.append(
|
|
878
|
+
f"Start • steps={num_steps} batch={batch_size} total={num_steps * batch_size}"
|
|
879
|
+
)
|
|
880
|
+
return header, params_panel
|
|
881
|
+
|
|
882
|
+
def on_stream_chunk(self, _source: str, chunk: str, _metadata: dict[str, Any]) -> None:
|
|
883
|
+
"""Handle incoming streaming text chunks from LLM.
|
|
884
|
+
|
|
885
|
+
Args:
|
|
886
|
+
source: Source identifier (e.g., "user_question", "tool_sim_weather")
|
|
887
|
+
chunk: Text chunk from LLM
|
|
888
|
+
metadata: Additional context
|
|
889
|
+
"""
|
|
890
|
+
# Append chunk to buffer (deque auto-trims to maxlen)
|
|
891
|
+
self.stream_buffer.append(chunk)
|
|
892
|
+
|
|
893
|
+
# Update the live display if it's running
|
|
894
|
+
if self.live_display and self.live_layout is not None:
|
|
895
|
+
self.update_stream_panel()
|
|
896
|
+
|
|
897
|
+
def on_step_start(self, step_name: str, metadata: dict[str, Any]) -> None:
|
|
898
|
+
"""Update current step display.
|
|
899
|
+
|
|
900
|
+
Args:
|
|
901
|
+
step_name: Human-readable step name
|
|
902
|
+
metadata: Additional context (sample_idx, conversation_type, etc.)
|
|
903
|
+
"""
|
|
904
|
+
# Update current step
|
|
905
|
+
self.current_step = step_name
|
|
906
|
+
|
|
907
|
+
# Extract and update sample type from metadata if available
|
|
908
|
+
if "conversation_type" in metadata:
|
|
909
|
+
conv_type = metadata["conversation_type"]
|
|
910
|
+
# Map conversation types to friendly names
|
|
911
|
+
type_map = {
|
|
912
|
+
"basic": "Basic Q&A",
|
|
913
|
+
"chain_of_thought": "Chain of Thought",
|
|
914
|
+
"single_turn_agent": "Single-Turn Agent (Tool Calling)",
|
|
915
|
+
"multi_turn_agent": "Multi-Turn Agent (Tool Calling)",
|
|
916
|
+
}
|
|
917
|
+
self.current_sample_type = type_map.get(conv_type, conv_type)
|
|
918
|
+
elif "agent_mode" in metadata:
|
|
919
|
+
agent_mode = metadata["agent_mode"]
|
|
920
|
+
if agent_mode == "single_turn":
|
|
921
|
+
self.current_sample_type = "Single-Turn Agent (Tool Calling)"
|
|
922
|
+
elif agent_mode == "multi_turn":
|
|
923
|
+
self.current_sample_type = "Multi-Turn Agent (Tool Calling)"
|
|
924
|
+
else:
|
|
925
|
+
self.current_sample_type = f"Agent ({agent_mode})"
|
|
926
|
+
|
|
927
|
+
# Update current topic path if provided
|
|
928
|
+
topic_path = metadata.get("topic_path") if isinstance(metadata, dict) else None
|
|
929
|
+
if topic_path:
|
|
930
|
+
# Ensure list[str]
|
|
931
|
+
try:
|
|
932
|
+
self.current_topic_path = list(topic_path)
|
|
933
|
+
except Exception: # noqa: BLE001
|
|
934
|
+
self.current_topic_path = None
|
|
935
|
+
self.update_context_panel()
|
|
936
|
+
|
|
937
|
+
# Don't print anything - the progress bar already shows progress
|
|
938
|
+
# Just silently update internal state
|
|
939
|
+
|
|
940
|
+
def on_step_complete(self, step_name: str, metadata: dict[str, Any]) -> None:
|
|
941
|
+
"""Handle step completion.
|
|
942
|
+
|
|
943
|
+
Args:
|
|
944
|
+
step_name: Human-readable step name
|
|
945
|
+
metadata: Additional context
|
|
946
|
+
"""
|
|
947
|
+
# Could add completion markers or timing info here if desired
|
|
948
|
+
pass
|
|
949
|
+
|
|
950
|
+
def on_tool_execution(self, tool_name: str, success: bool, metadata: dict[str, Any]) -> None:
|
|
951
|
+
"""Handle tool execution events from Spin.
|
|
952
|
+
|
|
953
|
+
Args:
|
|
954
|
+
tool_name: Name of the tool executed
|
|
955
|
+
success: Whether execution succeeded
|
|
956
|
+
metadata: Additional context (arguments, error_type, etc.)
|
|
957
|
+
"""
|
|
958
|
+
# Format arguments preview
|
|
959
|
+
args = metadata.get("arguments", {})
|
|
960
|
+
args_preview = self._format_args_preview(args, max_len=20)
|
|
961
|
+
|
|
962
|
+
# Use prefix patterns that build_events_panel recognizes for coloring
|
|
963
|
+
# T+ = green (success), T- = red (failure)
|
|
964
|
+
if success:
|
|
965
|
+
self.log_event(f"T+ {tool_name}({args_preview})")
|
|
966
|
+
else:
|
|
967
|
+
error_type = metadata.get("error_type", "error")
|
|
968
|
+
self.log_event(f"T- {tool_name}({args_preview}) -> {error_type}")
|
|
969
|
+
|
|
970
|
+
def _format_args_preview(self, args: dict[str, Any], max_len: int = 20) -> str:
|
|
971
|
+
"""Format tool arguments as truncated JSON preview.
|
|
972
|
+
|
|
973
|
+
Args:
|
|
974
|
+
args: Tool arguments dictionary
|
|
975
|
+
max_len: Maximum length before truncation
|
|
976
|
+
|
|
977
|
+
Returns:
|
|
978
|
+
Truncated JSON string with ellipsis if needed
|
|
979
|
+
"""
|
|
980
|
+
if not args:
|
|
981
|
+
return ""
|
|
982
|
+
try:
|
|
983
|
+
json_str = json.dumps(args, separators=(",", ":"))
|
|
984
|
+
if len(json_str) <= max_len:
|
|
985
|
+
return json_str
|
|
986
|
+
return json_str[:max_len] + "..."
|
|
987
|
+
except Exception: # noqa: BLE001
|
|
988
|
+
return "..."
|
|
989
|
+
|
|
990
|
+
def get_stream_display(self) -> str:
|
|
991
|
+
"""Build the streaming text display from buffer.
|
|
992
|
+
|
|
993
|
+
Returns:
|
|
994
|
+
Formatted string of recent LLM output
|
|
995
|
+
"""
|
|
996
|
+
if not self.stream_buffer:
|
|
997
|
+
return "[dim italic]Waiting for generation...[/dim italic]"
|
|
998
|
+
|
|
999
|
+
# Get recent text from buffer
|
|
1000
|
+
recent_text = "".join(self.stream_buffer)
|
|
1001
|
+
|
|
1002
|
+
# Truncate if too long and add ellipsis
|
|
1003
|
+
max_display_length = 300
|
|
1004
|
+
if len(recent_text) > max_display_length:
|
|
1005
|
+
recent_text = "..." + recent_text[-max_display_length:]
|
|
1006
|
+
|
|
1007
|
+
return f"[dim]{recent_text}[/dim]"
|
|
1008
|
+
|
|
1009
|
+
def clear_stream_buffer(self) -> None:
|
|
1010
|
+
"""Clear the streaming text buffer (e.g., between samples)."""
|
|
1011
|
+
self.stream_buffer.clear()
|
|
1012
|
+
|
|
1013
|
+
# Deprecated printer retained for backward compatibility
|
|
1014
|
+
def show_generation_header(self, model_name: str, num_steps: int, batch_size: int) -> None:
|
|
1015
|
+
header, params_panel = self.build_generation_panels(model_name, num_steps, batch_size)
|
|
1016
|
+
self.console.print(header)
|
|
1017
|
+
self.console.print(params_panel)
|
|
1018
|
+
self.console.print()
|
|
1019
|
+
|
|
1020
|
+
def _context_panel(self) -> Panel:
|
|
1021
|
+
return self.tui.build_context_panel(
|
|
1022
|
+
root_topic=self.root_topic_prompt,
|
|
1023
|
+
topic_model_type=self.topic_model_type,
|
|
1024
|
+
path=self.current_topic_path,
|
|
1025
|
+
)
|
|
1026
|
+
|
|
1027
|
+
def update_context_panel(self) -> None:
|
|
1028
|
+
if self.live_layout is None:
|
|
1029
|
+
return
|
|
1030
|
+
try:
|
|
1031
|
+
self.live_layout["main"]["left"]["context"].update(self._context_panel())
|
|
1032
|
+
except Exception:
|
|
1033
|
+
return
|
|
1034
|
+
|
|
1035
|
+
# --- Status Panel helpers ---
|
|
1036
|
+
def init_status(self, total_steps: int, total_samples: int) -> None:
|
|
1037
|
+
self.status_total_steps = total_steps
|
|
1038
|
+
self.status_total_samples = total_samples
|
|
1039
|
+
self.status_current_step = 0
|
|
1040
|
+
self.status_samples_done = 0
|
|
1041
|
+
self.status_failed_total = 0
|
|
1042
|
+
self.status_step_started_at = 0.0
|
|
1043
|
+
|
|
1044
|
+
def status_step_start(self, step: int, total_steps: int | None = None) -> None:
|
|
1045
|
+
self.status_current_step = step
|
|
1046
|
+
if total_steps is not None:
|
|
1047
|
+
self.status_total_steps = total_steps
|
|
1048
|
+
self.status_step_started_at = monotonic()
|
|
1049
|
+
self.update_status_panel()
|
|
1050
|
+
|
|
1051
|
+
def status_step_complete(self, samples_generated: int, failed_in_step: int = 0) -> None:
|
|
1052
|
+
self.status_samples_done += max(0, int(samples_generated))
|
|
1053
|
+
self.status_failed_total += max(0, int(failed_in_step))
|
|
1054
|
+
self.update_status_panel()
|
|
1055
|
+
|
|
1056
|
+
def _status_panel(self) -> Panel:
|
|
1057
|
+
elapsed = 0.0
|
|
1058
|
+
if self.status_step_started_at:
|
|
1059
|
+
elapsed = max(0.0, monotonic() - self.status_step_started_at)
|
|
1060
|
+
table = Table(show_header=False, box=None, padding=(0, 1))
|
|
1061
|
+
table.add_column(style="cyan", no_wrap=True)
|
|
1062
|
+
table.add_column(style="white")
|
|
1063
|
+
table.add_row("Step:", f"{self.status_current_step}/{self.status_total_steps}")
|
|
1064
|
+
table.add_row("Step Elapsed:", f"{elapsed:0.1f}s")
|
|
1065
|
+
table.add_row("Generated:", f"{self.status_samples_done}/{self.status_total_samples}")
|
|
1066
|
+
if self.status_failed_total:
|
|
1067
|
+
table.add_row("Failed:", str(self.status_failed_total))
|
|
1068
|
+
return Panel(table, title="Status", border_style="dim", padding=(0, 1))
|
|
1069
|
+
|
|
1070
|
+
def update_status_panel(self) -> None:
|
|
1071
|
+
if self.live_layout is None:
|
|
1072
|
+
return
|
|
1073
|
+
try:
|
|
1074
|
+
self.live_layout["main"]["right"]["status"].update(self._status_panel())
|
|
1075
|
+
except Exception:
|
|
1076
|
+
return
|
|
1077
|
+
|
|
1078
|
+
def success(self, message: str) -> None:
|
|
1079
|
+
"""Display a success message."""
|
|
1080
|
+
self.tui.success(message)
|
|
1081
|
+
|
|
1082
|
+
def warning(self, message: str) -> None:
|
|
1083
|
+
"""Display a warning message."""
|
|
1084
|
+
self.tui.warning(message)
|
|
1085
|
+
|
|
1086
|
+
def error(self, message: str) -> None:
|
|
1087
|
+
"""Display an error message."""
|
|
1088
|
+
self.tui.error(message)
|
|
1089
|
+
|
|
1090
|
+
def info(self, message: str) -> None:
|
|
1091
|
+
"""Display an info message."""
|
|
1092
|
+
self.tui.info(message)
|
|
1093
|
+
|
|
1094
|
+
# --- Compact two-pane helpers ---
|
|
1095
|
+
def update_stream_panel(self) -> None:
|
|
1096
|
+
"""Refresh the right-hand streaming panel with current buffer text."""
|
|
1097
|
+
if self.live_layout is None:
|
|
1098
|
+
return
|
|
1099
|
+
|
|
1100
|
+
# Throttle: avoid re-rendering too frequently
|
|
1101
|
+
now = monotonic()
|
|
1102
|
+
if now - getattr(self, "_last_render_t", 0.0) < STREAM_RENDER_THROTTLE_S: # noqa: PLR2004
|
|
1103
|
+
return
|
|
1104
|
+
|
|
1105
|
+
# Build multi-line text; show the last N lines based on terminal height
|
|
1106
|
+
accumulated_text = "".join(self.stream_buffer)
|
|
1107
|
+
if len(accumulated_text) > STREAM_TEXT_MAX_LENGTH:
|
|
1108
|
+
accumulated_text = "..." + accumulated_text[-STREAM_TEXT_MAX_LENGTH:]
|
|
1109
|
+
|
|
1110
|
+
normalized = accumulated_text.replace("\r", "")
|
|
1111
|
+
normalized = re.sub(r"[^\S\n]+", " ", normalized)
|
|
1112
|
+
|
|
1113
|
+
# Calculate target lines based on terminal height
|
|
1114
|
+
terminal_height = self.console.size.height
|
|
1115
|
+
target_lines = max(MIN_PREVIEW_LINES, terminal_height - PREVIEW_VERTICAL_OFFSET)
|
|
1116
|
+
lines = normalized.splitlines()
|
|
1117
|
+
if len(lines) >= int(target_lines / 2):
|
|
1118
|
+
# Plenty of newlines: take the last N lines
|
|
1119
|
+
visible_lines = lines[-target_lines:]
|
|
1120
|
+
else:
|
|
1121
|
+
# Low-newline content: take a character tail and then split
|
|
1122
|
+
approx_right_cols = max(40, int(self.console.size.width * 0.42))
|
|
1123
|
+
char_tail = max(800, approx_right_cols * max(8, target_lines - 2))
|
|
1124
|
+
tail = normalized[-char_tail:]
|
|
1125
|
+
visible_lines = tail.splitlines()[-target_lines:]
|
|
1126
|
+
|
|
1127
|
+
# Flexible-height layout handles stability; render just the last N lines
|
|
1128
|
+
visible = "\n".join(visible_lines)
|
|
1129
|
+
|
|
1130
|
+
# Skip update if content suffix hasn't changed
|
|
1131
|
+
key = visible[-200:]
|
|
1132
|
+
if key == getattr(self, "_last_visible_key", ""):
|
|
1133
|
+
return
|
|
1134
|
+
self._last_visible_key = key
|
|
1135
|
+
|
|
1136
|
+
# Build simple dim text renderable (syntax highlighting removed)
|
|
1137
|
+
renderable = Text(visible, style="dim")
|
|
1138
|
+
|
|
1139
|
+
title = (
|
|
1140
|
+
f"Streaming Preview • {self.current_sample_type}"
|
|
1141
|
+
if self.current_sample_type
|
|
1142
|
+
else "Streaming Preview"
|
|
1143
|
+
)
|
|
1144
|
+
# Support both old layout (right only) and new split layout (right.preview)
|
|
1145
|
+
try:
|
|
1146
|
+
container = self.live_layout["main"]["right"]["preview"]
|
|
1147
|
+
except Exception:
|
|
1148
|
+
container = self.live_layout["main"]["right"]
|
|
1149
|
+
container.update(self.tui.build_stream_panel(renderable, title=title))
|
|
1150
|
+
self._last_render_t = now
|
|
1151
|
+
|
|
1152
|
+
def log_event(self, message: str) -> None:
|
|
1153
|
+
"""Append an event to the left-side event log and refresh."""
|
|
1154
|
+
self.events_log.append(message)
|
|
1155
|
+
if self.live_layout is not None:
|
|
1156
|
+
self.live_layout["main"]["left"]["events"].update(
|
|
1157
|
+
self.tui.build_events_panel(list(self.events_log))
|
|
1158
|
+
)
|
|
1159
|
+
|
|
1160
|
+
def on_error(self, error: "ClassifiedError", metadata: dict[str, Any]) -> None:
|
|
1161
|
+
"""Handle error events from the progress reporter.
|
|
1162
|
+
|
|
1163
|
+
Displays concise error information in the Events panel using
|
|
1164
|
+
standardized DeepFabric error codes.
|
|
1165
|
+
|
|
1166
|
+
Args:
|
|
1167
|
+
error: ClassifiedError with error code and details
|
|
1168
|
+
metadata: Additional context (sample_idx, etc.)
|
|
1169
|
+
"""
|
|
1170
|
+
# Format concise error message for Events panel
|
|
1171
|
+
error_event = error.to_event()
|
|
1172
|
+
|
|
1173
|
+
# Add sample context if available
|
|
1174
|
+
sample_idx = metadata.get("sample_idx")
|
|
1175
|
+
if sample_idx is not None:
|
|
1176
|
+
error_event = f"[{sample_idx}] {error_event}"
|
|
1177
|
+
|
|
1178
|
+
# Log to events panel with error indicator
|
|
1179
|
+
self.log_event(f"X {error_event}")
|
|
1180
|
+
|
|
1181
|
+
def on_retry(
|
|
1182
|
+
self,
|
|
1183
|
+
sample_idx: int,
|
|
1184
|
+
attempt: int,
|
|
1185
|
+
max_attempts: int,
|
|
1186
|
+
error_summary: str,
|
|
1187
|
+
metadata: dict[str, Any],
|
|
1188
|
+
) -> None:
|
|
1189
|
+
"""Handle retry events from the progress reporter.
|
|
1190
|
+
|
|
1191
|
+
In rich mode, we don't log individual retries to avoid cluttering the
|
|
1192
|
+
events panel - the streaming preview shows activity and final errors
|
|
1193
|
+
are logged via on_error.
|
|
1194
|
+
|
|
1195
|
+
In simple mode, tracks retries for display at step completion.
|
|
1196
|
+
|
|
1197
|
+
Args:
|
|
1198
|
+
sample_idx: 1-based sample index
|
|
1199
|
+
attempt: Current attempt number (1-based)
|
|
1200
|
+
max_attempts: Total number of attempts allowed
|
|
1201
|
+
error_summary: Brief description of the validation error
|
|
1202
|
+
metadata: Additional context
|
|
1203
|
+
"""
|
|
1204
|
+
_ = metadata # Unused for now
|
|
1205
|
+
|
|
1206
|
+
if get_tui_settings().mode != "rich":
|
|
1207
|
+
# Simple mode: track for summary at step completion
|
|
1208
|
+
self.step_retries.append(
|
|
1209
|
+
{
|
|
1210
|
+
"sample_idx": sample_idx,
|
|
1211
|
+
"attempt": attempt,
|
|
1212
|
+
"max_attempts": max_attempts,
|
|
1213
|
+
"error_summary": error_summary,
|
|
1214
|
+
}
|
|
1215
|
+
)
|
|
1216
|
+
|
|
1217
|
+
def clear_step_retries(self) -> None:
|
|
1218
|
+
"""Clear retry tracking for the current step."""
|
|
1219
|
+
self.step_retries.clear()
|
|
1220
|
+
|
|
1221
|
+
def get_step_retry_summary(self) -> str | None:
|
|
1222
|
+
"""Get a summary of retries in the current step.
|
|
1223
|
+
|
|
1224
|
+
Returns:
|
|
1225
|
+
Summary string or None if no retries occurred
|
|
1226
|
+
"""
|
|
1227
|
+
if not self.step_retries:
|
|
1228
|
+
return None
|
|
1229
|
+
|
|
1230
|
+
# Count unique samples that had retries
|
|
1231
|
+
samples_with_retries = {r["sample_idx"] for r in self.step_retries}
|
|
1232
|
+
total_retries = len(self.step_retries)
|
|
1233
|
+
|
|
1234
|
+
if len(samples_with_retries) == 1:
|
|
1235
|
+
return f"{total_retries} retry for sample {list(samples_with_retries)[0]}"
|
|
1236
|
+
return f"{total_retries} retries across {len(samples_with_retries)} samples"
|
|
1237
|
+
|
|
1238
|
+
|
|
1239
|
+
# Global TUI instances
|
|
1240
|
+
_tui_instance = None
|
|
1241
|
+
_dataset_tui_instance = None
|
|
1242
|
+
|
|
1243
|
+
|
|
1244
|
+
def get_tui() -> DeepFabricTUI:
|
|
1245
|
+
"""Get the global TUI instance."""
|
|
1246
|
+
global _tui_instance # noqa: PLW0603
|
|
1247
|
+
if _tui_instance is None:
|
|
1248
|
+
_tui_instance = DeepFabricTUI()
|
|
1249
|
+
return _tui_instance
|
|
1250
|
+
|
|
1251
|
+
|
|
1252
|
+
def get_tree_tui() -> TreeBuildingTUI:
|
|
1253
|
+
"""Get a tree building TUI instance."""
|
|
1254
|
+
return TreeBuildingTUI(get_tui())
|
|
1255
|
+
|
|
1256
|
+
|
|
1257
|
+
def get_graph_tui() -> GraphBuildingTUI:
|
|
1258
|
+
"""Get a graph building TUI instance."""
|
|
1259
|
+
return GraphBuildingTUI(get_tui())
|
|
1260
|
+
|
|
1261
|
+
|
|
1262
|
+
def get_dataset_tui() -> DatasetGenerationTUI:
|
|
1263
|
+
"""Get the global dataset generation TUI instance (singleton)."""
|
|
1264
|
+
global _dataset_tui_instance # noqa: PLW0603
|
|
1265
|
+
if _dataset_tui_instance is None:
|
|
1266
|
+
_dataset_tui_instance = DatasetGenerationTUI(get_tui())
|
|
1267
|
+
return _dataset_tui_instance
|