DeepFabric 4.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfabric/__init__.py +70 -0
- deepfabric/__main__.py +6 -0
- deepfabric/auth.py +382 -0
- deepfabric/builders.py +303 -0
- deepfabric/builders_agent.py +1304 -0
- deepfabric/cli.py +1288 -0
- deepfabric/config.py +899 -0
- deepfabric/config_manager.py +251 -0
- deepfabric/constants.py +94 -0
- deepfabric/dataset_manager.py +534 -0
- deepfabric/error_codes.py +581 -0
- deepfabric/evaluation/__init__.py +47 -0
- deepfabric/evaluation/backends/__init__.py +32 -0
- deepfabric/evaluation/backends/ollama_backend.py +137 -0
- deepfabric/evaluation/backends/tool_call_parsers.py +409 -0
- deepfabric/evaluation/backends/transformers_backend.py +326 -0
- deepfabric/evaluation/evaluator.py +845 -0
- deepfabric/evaluation/evaluators/__init__.py +13 -0
- deepfabric/evaluation/evaluators/base.py +104 -0
- deepfabric/evaluation/evaluators/builtin/__init__.py +5 -0
- deepfabric/evaluation/evaluators/builtin/tool_calling.py +93 -0
- deepfabric/evaluation/evaluators/registry.py +66 -0
- deepfabric/evaluation/inference.py +155 -0
- deepfabric/evaluation/metrics.py +397 -0
- deepfabric/evaluation/parser.py +304 -0
- deepfabric/evaluation/reporters/__init__.py +13 -0
- deepfabric/evaluation/reporters/base.py +56 -0
- deepfabric/evaluation/reporters/cloud_reporter.py +195 -0
- deepfabric/evaluation/reporters/file_reporter.py +61 -0
- deepfabric/evaluation/reporters/multi_reporter.py +56 -0
- deepfabric/exceptions.py +67 -0
- deepfabric/factory.py +26 -0
- deepfabric/generator.py +1084 -0
- deepfabric/graph.py +545 -0
- deepfabric/hf_hub.py +214 -0
- deepfabric/kaggle_hub.py +219 -0
- deepfabric/llm/__init__.py +41 -0
- deepfabric/llm/api_key_verifier.py +534 -0
- deepfabric/llm/client.py +1206 -0
- deepfabric/llm/errors.py +105 -0
- deepfabric/llm/rate_limit_config.py +262 -0
- deepfabric/llm/rate_limit_detector.py +278 -0
- deepfabric/llm/retry_handler.py +270 -0
- deepfabric/metrics.py +212 -0
- deepfabric/progress.py +262 -0
- deepfabric/prompts.py +290 -0
- deepfabric/schemas.py +1000 -0
- deepfabric/spin/__init__.py +6 -0
- deepfabric/spin/client.py +263 -0
- deepfabric/spin/models.py +26 -0
- deepfabric/stream_simulator.py +90 -0
- deepfabric/tools/__init__.py +5 -0
- deepfabric/tools/defaults.py +85 -0
- deepfabric/tools/loader.py +87 -0
- deepfabric/tools/mcp_client.py +677 -0
- deepfabric/topic_manager.py +303 -0
- deepfabric/topic_model.py +20 -0
- deepfabric/training/__init__.py +35 -0
- deepfabric/training/api_key_prompt.py +302 -0
- deepfabric/training/callback.py +363 -0
- deepfabric/training/metrics_sender.py +301 -0
- deepfabric/tree.py +438 -0
- deepfabric/tui.py +1267 -0
- deepfabric/update_checker.py +166 -0
- deepfabric/utils.py +150 -0
- deepfabric/validation.py +143 -0
- deepfabric-4.4.0.dist-info/METADATA +702 -0
- deepfabric-4.4.0.dist-info/RECORD +71 -0
- deepfabric-4.4.0.dist-info/WHEEL +4 -0
- deepfabric-4.4.0.dist-info/entry_points.txt +2 -0
- deepfabric-4.4.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import traceback
|
|
3
|
+
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from .config import DeepFabricConfig
|
|
7
|
+
from .constants import (
|
|
8
|
+
TOPIC_TREE_DEFAULT_DEGREE,
|
|
9
|
+
TOPIC_TREE_DEFAULT_DEPTH,
|
|
10
|
+
TOPIC_TREE_DEFAULT_TEMPERATURE,
|
|
11
|
+
)
|
|
12
|
+
from .exceptions import ConfigurationError
|
|
13
|
+
from .factory import create_topic_generator
|
|
14
|
+
from .graph import Graph
|
|
15
|
+
from .topic_model import TopicModel
|
|
16
|
+
from .tree import Tree
|
|
17
|
+
from .tui import get_graph_tui, get_tree_tui, get_tui
|
|
18
|
+
from .utils import read_topic_tree_from_jsonl
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from .topic_model import TopicModel
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _ensure_not_running_loop(func_name: str) -> None:
|
|
25
|
+
"""Raise a helpful error if invoked from an active asyncio loop."""
|
|
26
|
+
try:
|
|
27
|
+
loop = asyncio.get_running_loop()
|
|
28
|
+
except RuntimeError:
|
|
29
|
+
return
|
|
30
|
+
|
|
31
|
+
if loop.is_running():
|
|
32
|
+
msg = (
|
|
33
|
+
f"{func_name} cannot be called from within an active event loop. "
|
|
34
|
+
f"Use `{func_name}_async` instead."
|
|
35
|
+
)
|
|
36
|
+
raise RuntimeError(msg)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
async def _process_graph_events(graph: Graph, debug: bool = False) -> dict | None:
|
|
40
|
+
from .progress import ProgressReporter # noqa: PLC0415
|
|
41
|
+
|
|
42
|
+
tui = get_graph_tui()
|
|
43
|
+
|
|
44
|
+
# Create and attach progress reporter for streaming
|
|
45
|
+
progress_reporter = ProgressReporter()
|
|
46
|
+
progress_reporter.attach(tui)
|
|
47
|
+
graph.progress_reporter = progress_reporter
|
|
48
|
+
|
|
49
|
+
tui_started = False
|
|
50
|
+
|
|
51
|
+
final_event = None
|
|
52
|
+
try:
|
|
53
|
+
async for event in graph.build_async():
|
|
54
|
+
if event["event"] == "depth_start":
|
|
55
|
+
if not tui_started:
|
|
56
|
+
tui.start_building(
|
|
57
|
+
graph.model_name, graph.depth, graph.degree, graph.topic_prompt
|
|
58
|
+
)
|
|
59
|
+
tui_started = True
|
|
60
|
+
depth = int(event["depth"]) if isinstance(event["depth"], str | int) else 0
|
|
61
|
+
leaf_count = (
|
|
62
|
+
int(event.get("leaf_count", 0))
|
|
63
|
+
if isinstance(event.get("leaf_count", 0), str | int)
|
|
64
|
+
else 0
|
|
65
|
+
)
|
|
66
|
+
tui.start_depth_level(depth, leaf_count)
|
|
67
|
+
elif event["event"] == "node_expanded":
|
|
68
|
+
subtopics_added = (
|
|
69
|
+
int(event["subtopics_added"])
|
|
70
|
+
if isinstance(event["subtopics_added"], str | int)
|
|
71
|
+
else 0
|
|
72
|
+
)
|
|
73
|
+
connections_added = (
|
|
74
|
+
int(event.get("connections_added", 0))
|
|
75
|
+
if isinstance(event.get("connections_added", 0), str | int)
|
|
76
|
+
else 0
|
|
77
|
+
)
|
|
78
|
+
tui.complete_node_expansion(event["node_topic"], subtopics_added, connections_added)
|
|
79
|
+
elif event["event"] == "depth_complete":
|
|
80
|
+
depth = int(event["depth"]) if isinstance(event["depth"], str | int) else 0
|
|
81
|
+
tui.complete_depth_level(depth)
|
|
82
|
+
elif event["event"] == "build_complete":
|
|
83
|
+
failed_generations = (
|
|
84
|
+
int(event.get("failed_generations", 0))
|
|
85
|
+
if isinstance(event.get("failed_generations", 0), str | int)
|
|
86
|
+
else 0
|
|
87
|
+
)
|
|
88
|
+
tui.finish_building(failed_generations)
|
|
89
|
+
final_event = event
|
|
90
|
+
|
|
91
|
+
if debug and failed_generations > 0 and hasattr(graph, "failed_generations"):
|
|
92
|
+
get_tui().error("\nDebug: Graph generation failures:")
|
|
93
|
+
for idx, failure in enumerate(graph.failed_generations, 1):
|
|
94
|
+
node_id = failure.get("node_id", "unknown")
|
|
95
|
+
attempts = failure.get("attempts", 0)
|
|
96
|
+
last_error = failure.get("last_error", "Unknown error")
|
|
97
|
+
get_tui().error(f" [{idx}] Node ID: {node_id}, Attempts: {attempts}")
|
|
98
|
+
get_tui().error(f" Error: {last_error}")
|
|
99
|
+
except Exception as e:
|
|
100
|
+
if debug:
|
|
101
|
+
get_tui().error(f"Debug: Full traceback:\n{traceback.format_exc()}")
|
|
102
|
+
get_tui().error(f"Graph build failed: {str(e)}")
|
|
103
|
+
raise
|
|
104
|
+
else:
|
|
105
|
+
return final_event
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
async def _process_tree_events(tree: Tree, debug: bool = False) -> dict | None:
|
|
109
|
+
from .progress import ProgressReporter # noqa: PLC0415
|
|
110
|
+
|
|
111
|
+
tui = get_tree_tui()
|
|
112
|
+
|
|
113
|
+
# Create and attach progress reporter for streaming
|
|
114
|
+
progress_reporter = ProgressReporter()
|
|
115
|
+
progress_reporter.attach(tui)
|
|
116
|
+
tree.progress_reporter = progress_reporter
|
|
117
|
+
|
|
118
|
+
final_event = None
|
|
119
|
+
try:
|
|
120
|
+
async for event in tree.build_async():
|
|
121
|
+
if event["event"] == "build_start":
|
|
122
|
+
depth = int(event["depth"]) if isinstance(event["depth"], str | int) else 0
|
|
123
|
+
degree = int(event["degree"]) if isinstance(event["degree"], str | int) else 0
|
|
124
|
+
tui.start_building(event["model_name"], depth, degree, tree.topic_prompt)
|
|
125
|
+
elif event["event"] == "subtopics_generated":
|
|
126
|
+
if not event["success"]:
|
|
127
|
+
tui.add_failure()
|
|
128
|
+
if debug and "error" in event:
|
|
129
|
+
get_tui().error(f"Debug: Tree generation failure - {event['error']}")
|
|
130
|
+
elif event["event"] == "build_complete":
|
|
131
|
+
total_paths = (
|
|
132
|
+
int(event["total_paths"]) if isinstance(event["total_paths"], str | int) else 0
|
|
133
|
+
)
|
|
134
|
+
failed_generations = (
|
|
135
|
+
int(event["failed_generations"])
|
|
136
|
+
if isinstance(event["failed_generations"], str | int)
|
|
137
|
+
else 0
|
|
138
|
+
)
|
|
139
|
+
tui.finish_building(total_paths, failed_generations)
|
|
140
|
+
final_event = event
|
|
141
|
+
|
|
142
|
+
if debug and failed_generations > 0 and hasattr(tree, "failed_generations"):
|
|
143
|
+
get_tui().error("\nDebug: Tree generation failures:")
|
|
144
|
+
for idx, failure in enumerate(tree.failed_generations, 1):
|
|
145
|
+
get_tui().error(
|
|
146
|
+
f" [{idx}] Path: {' -> '.join(failure.get('node_path', []))}"
|
|
147
|
+
)
|
|
148
|
+
get_tui().error(f" Error: {failure.get('error', 'Unknown error')}")
|
|
149
|
+
except Exception as e:
|
|
150
|
+
if debug:
|
|
151
|
+
get_tui().error(f"Debug: Full traceback:\n{traceback.format_exc()}")
|
|
152
|
+
get_tui().error(f"Tree build failed: {str(e)}")
|
|
153
|
+
raise
|
|
154
|
+
else:
|
|
155
|
+
return final_event
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def handle_graph_events(graph: Graph, debug: bool = False) -> dict | None:
|
|
159
|
+
"""
|
|
160
|
+
Build graph with TUI progress.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
graph: Graph object to build
|
|
164
|
+
debug: Enable debug output
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
Final build event dictionary or None
|
|
168
|
+
|
|
169
|
+
Raises:
|
|
170
|
+
Exception: If graph build fails
|
|
171
|
+
"""
|
|
172
|
+
_ensure_not_running_loop("handle_graph_events")
|
|
173
|
+
return asyncio.run(_process_graph_events(graph, debug=debug))
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
async def handle_graph_events_async(graph: Graph, debug: bool = False) -> dict | None:
|
|
177
|
+
return await _process_graph_events(graph, debug=debug)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def handle_tree_events(tree: Tree, debug: bool = False) -> dict | None:
|
|
181
|
+
"""
|
|
182
|
+
Build tree with TUI progress.
|
|
183
|
+
|
|
184
|
+
Args:
|
|
185
|
+
tree: Tree object to build
|
|
186
|
+
debug: Enable debug output
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
Final build event dictionary or None
|
|
190
|
+
|
|
191
|
+
Raises:
|
|
192
|
+
Exception: If tree build fails
|
|
193
|
+
"""
|
|
194
|
+
_ensure_not_running_loop("handle_tree_events")
|
|
195
|
+
return asyncio.run(_process_tree_events(tree, debug=debug))
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
async def handle_tree_events_async(tree: Tree, debug: bool = False) -> dict | None:
|
|
199
|
+
return await _process_tree_events(tree, debug=debug)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def load_or_build_topic_model(
|
|
203
|
+
config: DeepFabricConfig,
|
|
204
|
+
topics_load: str | None = None,
|
|
205
|
+
topics_overrides: dict | None = None,
|
|
206
|
+
provider: str | None = None,
|
|
207
|
+
model: str | None = None,
|
|
208
|
+
base_url: str | None = None,
|
|
209
|
+
debug: bool = False,
|
|
210
|
+
) -> TopicModel:
|
|
211
|
+
"""
|
|
212
|
+
Load topic model from file or build new one.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
config: DeepFabricConfig object
|
|
216
|
+
topics_load: Path to existing topics file (JSONL for tree, JSON for graph)
|
|
217
|
+
topics_overrides: Override parameters for topic generation
|
|
218
|
+
provider: LLM provider
|
|
219
|
+
model: Model name
|
|
220
|
+
base_url: Base URL for LLM API
|
|
221
|
+
debug: Enable debug output
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
TopicModel (Tree or Graph)
|
|
225
|
+
|
|
226
|
+
Raises:
|
|
227
|
+
ConfigurationError: If loading or building fails
|
|
228
|
+
"""
|
|
229
|
+
tui = get_tui()
|
|
230
|
+
|
|
231
|
+
if topics_load:
|
|
232
|
+
# Determine mode from config or file extension
|
|
233
|
+
is_graph = config.topics.mode == "graph" or topics_load.endswith(".json")
|
|
234
|
+
|
|
235
|
+
if is_graph:
|
|
236
|
+
tui.info(f"Reading topic graph from JSON file: {topics_load}")
|
|
237
|
+
topics_params = config.get_topics_params(**(topics_overrides or {}))
|
|
238
|
+
return Graph.from_json(topics_load, topics_params)
|
|
239
|
+
tui.info(f"Reading topic tree from JSONL file: {topics_load}")
|
|
240
|
+
dict_list = read_topic_tree_from_jsonl(topics_load)
|
|
241
|
+
|
|
242
|
+
final_provider = provider or "ollama"
|
|
243
|
+
final_model = model or "mistral:latest"
|
|
244
|
+
|
|
245
|
+
topic_model = Tree(
|
|
246
|
+
topic_prompt="default",
|
|
247
|
+
provider=final_provider,
|
|
248
|
+
model_name=final_model,
|
|
249
|
+
topic_system_prompt="",
|
|
250
|
+
degree=TOPIC_TREE_DEFAULT_DEGREE,
|
|
251
|
+
depth=TOPIC_TREE_DEFAULT_DEPTH,
|
|
252
|
+
temperature=TOPIC_TREE_DEFAULT_TEMPERATURE,
|
|
253
|
+
base_url=base_url,
|
|
254
|
+
)
|
|
255
|
+
topic_model.from_dict_list(dict_list)
|
|
256
|
+
return topic_model
|
|
257
|
+
|
|
258
|
+
# Build new topic model
|
|
259
|
+
topic_model = create_topic_generator(config, topics_overrides=topics_overrides)
|
|
260
|
+
|
|
261
|
+
# Build with appropriate event handler
|
|
262
|
+
if isinstance(topic_model, Graph):
|
|
263
|
+
handle_graph_events(topic_model, debug=debug)
|
|
264
|
+
elif isinstance(topic_model, Tree):
|
|
265
|
+
handle_tree_events(topic_model, debug=debug)
|
|
266
|
+
|
|
267
|
+
return topic_model
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def save_topic_model(
|
|
271
|
+
topic_model: TopicModel,
|
|
272
|
+
config: DeepFabricConfig,
|
|
273
|
+
topics_save_as: str | None = None,
|
|
274
|
+
) -> None:
|
|
275
|
+
"""
|
|
276
|
+
Save topic model to file.
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
topic_model: TopicModel to save (Tree or Graph)
|
|
280
|
+
config: DeepFabricConfig object
|
|
281
|
+
topics_save_as: Override path for saving topics
|
|
282
|
+
|
|
283
|
+
Raises:
|
|
284
|
+
ConfigurationError: If saving fails
|
|
285
|
+
"""
|
|
286
|
+
tui = get_tui()
|
|
287
|
+
|
|
288
|
+
if isinstance(topic_model, Tree):
|
|
289
|
+
try:
|
|
290
|
+
tree_save_path = topics_save_as or config.topics.save_as or "topic_tree.jsonl"
|
|
291
|
+
topic_model.save(tree_save_path)
|
|
292
|
+
tui.success(f"Topic tree saved to {tree_save_path}")
|
|
293
|
+
tui.info(f"Total paths: {len(topic_model.tree_paths)}")
|
|
294
|
+
except Exception as e:
|
|
295
|
+
raise ConfigurationError(f"Error saving topic tree: {str(e)}") from e
|
|
296
|
+
|
|
297
|
+
elif isinstance(topic_model, Graph):
|
|
298
|
+
try:
|
|
299
|
+
graph_save_path = topics_save_as or config.topics.save_as or "topic_graph.json"
|
|
300
|
+
topic_model.save(graph_save_path)
|
|
301
|
+
tui.success(f"Topic graph saved to {graph_save_path}")
|
|
302
|
+
except Exception as e:
|
|
303
|
+
raise ConfigurationError(f"Error saving topic graph: {str(e)}") from e
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class TopicModel(ABC):
|
|
5
|
+
"""Abstract base class for topic models like Tree and Graph."""
|
|
6
|
+
|
|
7
|
+
@abstractmethod
|
|
8
|
+
async def build_async(self) -> None:
|
|
9
|
+
"""Asynchronously build the topic model."""
|
|
10
|
+
raise NotImplementedError
|
|
11
|
+
|
|
12
|
+
def build(self) -> None: # pragma: no cover - legacy compatibility
|
|
13
|
+
"""Deprecated synchronous entry point kept for legacy compatibility."""
|
|
14
|
+
msg = "TopicModel.build() is no longer supported. Use build_async() instead."
|
|
15
|
+
raise RuntimeError(msg)
|
|
16
|
+
|
|
17
|
+
@abstractmethod
|
|
18
|
+
def get_all_paths(self) -> list[list[str]]:
|
|
19
|
+
"""Returns all the paths in the topic model."""
|
|
20
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""DeepFabric training metrics logging.
|
|
2
|
+
|
|
3
|
+
This module provides integration with HuggingFace Trainer and TRL trainers
|
|
4
|
+
to log training metrics to the DeepFabric SaaS backend.
|
|
5
|
+
|
|
6
|
+
Features:
|
|
7
|
+
- Non-blocking async metrics sending
|
|
8
|
+
- Notebook-friendly API key prompts (like wandb)
|
|
9
|
+
- Graceful handling of failures without impacting training
|
|
10
|
+
|
|
11
|
+
Usage:
|
|
12
|
+
from deepfabric.training import DeepFabricCallback
|
|
13
|
+
|
|
14
|
+
trainer = Trainer(
|
|
15
|
+
model=model,
|
|
16
|
+
args=training_args,
|
|
17
|
+
train_dataset=train_dataset,
|
|
18
|
+
)
|
|
19
|
+
trainer.add_callback(DeepFabricCallback(trainer))
|
|
20
|
+
trainer.train()
|
|
21
|
+
|
|
22
|
+
Environment Variables:
|
|
23
|
+
DEEPFABRIC_API_KEY: API key for authentication
|
|
24
|
+
DEEPFABRIC_API_URL: SaaS backend URL (default: https://api.deepfabric.ai)
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
from __future__ import annotations
|
|
28
|
+
|
|
29
|
+
from .callback import DeepFabricCallback
|
|
30
|
+
from .metrics_sender import MetricsSender
|
|
31
|
+
|
|
32
|
+
__all__ = [
|
|
33
|
+
"DeepFabricCallback",
|
|
34
|
+
"MetricsSender",
|
|
35
|
+
]
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
"""API key prompt for notebooks and terminals."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
# Cache for API key to avoid repeated prompts
|
|
13
|
+
_api_key_cache: str | None = None
|
|
14
|
+
_api_key_checked: bool = False
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _is_notebook() -> bool:
|
|
18
|
+
"""Check if running in Jupyter/Colab notebook."""
|
|
19
|
+
try:
|
|
20
|
+
from IPython import get_ipython # type: ignore # noqa: PLC0415
|
|
21
|
+
|
|
22
|
+
shell = get_ipython()
|
|
23
|
+
if shell is None:
|
|
24
|
+
is_nb = False
|
|
25
|
+
else:
|
|
26
|
+
shell_name = shell.__class__.__name__
|
|
27
|
+
# ZMQInteractiveShell = Jupyter, Shell = Colab
|
|
28
|
+
is_nb = shell_name in ("ZMQInteractiveShell", "Shell", "Google Colab")
|
|
29
|
+
except (NameError, AttributeError, ImportError):
|
|
30
|
+
return False
|
|
31
|
+
else:
|
|
32
|
+
return is_nb
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _is_colab() -> bool:
|
|
36
|
+
"""Check if running in Google Colab specifically."""
|
|
37
|
+
try:
|
|
38
|
+
import google.colab # type: ignore # noqa: F401, PLC0415
|
|
39
|
+
except ImportError:
|
|
40
|
+
return False
|
|
41
|
+
else:
|
|
42
|
+
return True
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _is_interactive_terminal() -> bool:
|
|
46
|
+
"""Check if running in interactive terminal."""
|
|
47
|
+
try:
|
|
48
|
+
return sys.stdin is not None and sys.stdin.isatty()
|
|
49
|
+
except Exception:
|
|
50
|
+
return False
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _show_notebook_prompt() -> str | None:
|
|
54
|
+
"""Show inline widget in Jupyter/Colab (like wandb).
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
API key string or None if skipped
|
|
58
|
+
"""
|
|
59
|
+
try:
|
|
60
|
+
import ipywidgets as widgets # type: ignore # noqa: PLC0415
|
|
61
|
+
|
|
62
|
+
from IPython.display import HTML, display # type: ignore # noqa: PLC0415
|
|
63
|
+
except ImportError:
|
|
64
|
+
logger.debug("ipywidgets not available, falling back to terminal prompt")
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
# Result container for callback
|
|
68
|
+
result = {"key": None, "submitted": False}
|
|
69
|
+
|
|
70
|
+
# Create styled input widget
|
|
71
|
+
api_key_input = widgets.Password(
|
|
72
|
+
placeholder="Enter your DeepFabric API key",
|
|
73
|
+
description="",
|
|
74
|
+
layout=widgets.Layout(width="300px"),
|
|
75
|
+
style={"description_width": "0px"},
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
submit_button = widgets.Button(
|
|
79
|
+
description="Submit",
|
|
80
|
+
button_style="primary",
|
|
81
|
+
layout=widgets.Layout(width="80px"),
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
skip_button = widgets.Button(
|
|
85
|
+
description="Skip",
|
|
86
|
+
button_style="",
|
|
87
|
+
layout=widgets.Layout(width="80px"),
|
|
88
|
+
tooltip="Disable logging for this session",
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
status_output = widgets.Output()
|
|
92
|
+
|
|
93
|
+
def on_submit(_button): # noqa: ARG001
|
|
94
|
+
key = api_key_input.value.strip()
|
|
95
|
+
if key:
|
|
96
|
+
result["key"] = key
|
|
97
|
+
result["submitted"] = True
|
|
98
|
+
with status_output:
|
|
99
|
+
status_output.clear_output()
|
|
100
|
+
print("API key set. Training metrics will be logged to DeepFabric.")
|
|
101
|
+
else:
|
|
102
|
+
with status_output:
|
|
103
|
+
status_output.clear_output()
|
|
104
|
+
print("Please enter a valid API key.")
|
|
105
|
+
|
|
106
|
+
def on_skip(_button): # noqa: ARG001
|
|
107
|
+
result["key"] = None
|
|
108
|
+
result["submitted"] = True
|
|
109
|
+
with status_output:
|
|
110
|
+
status_output.clear_output()
|
|
111
|
+
print("Logging disabled for this session.")
|
|
112
|
+
|
|
113
|
+
submit_button.on_click(on_submit)
|
|
114
|
+
skip_button.on_click(on_skip)
|
|
115
|
+
|
|
116
|
+
# Display styled header
|
|
117
|
+
display(
|
|
118
|
+
HTML(
|
|
119
|
+
"""
|
|
120
|
+
<div style="background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
|
121
|
+
border-radius: 8px; padding: 16px; margin: 8px 0; color: white;">
|
|
122
|
+
<div style="display: flex; align-items: center; margin-bottom: 8px;">
|
|
123
|
+
<svg width="24" height="24" viewBox="0 0 24 24" fill="white" style="margin-right: 8px;">
|
|
124
|
+
<path d="M12 2L2 7l10 5 10-5-10-5zM2 17l10 5 10-5M2 12l10 5 10-5"/>
|
|
125
|
+
</svg>
|
|
126
|
+
<span style="font-size: 18px; font-weight: 600;">DeepFabric Training Metrics</span>
|
|
127
|
+
</div>
|
|
128
|
+
<p style="margin: 0; font-size: 14px; opacity: 0.9;">
|
|
129
|
+
Enter your API key to automatically log training metrics.<br>
|
|
130
|
+
Get your key at <a href="https://app.deepfabric.ai/settings/api"
|
|
131
|
+
target="_blank" style="color: #fff; text-decoration: underline;">
|
|
132
|
+
app.deepfabric.ai/settings/api</a>
|
|
133
|
+
</p>
|
|
134
|
+
</div>
|
|
135
|
+
"""
|
|
136
|
+
)
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Display input widgets
|
|
140
|
+
input_box = widgets.HBox(
|
|
141
|
+
[api_key_input, submit_button, skip_button],
|
|
142
|
+
layout=widgets.Layout(margin="8px 0"),
|
|
143
|
+
)
|
|
144
|
+
display(input_box)
|
|
145
|
+
display(status_output)
|
|
146
|
+
|
|
147
|
+
# Wait for user input (with timeout)
|
|
148
|
+
timeout = 300 # 5 minutes
|
|
149
|
+
start = time.monotonic()
|
|
150
|
+
|
|
151
|
+
while not result["submitted"] and (time.monotonic() - start) < timeout:
|
|
152
|
+
time.sleep(0.1)
|
|
153
|
+
|
|
154
|
+
if not result["submitted"]:
|
|
155
|
+
# Timeout - treat as skip
|
|
156
|
+
with status_output:
|
|
157
|
+
status_output.clear_output()
|
|
158
|
+
print("Prompt timed out. Logging disabled for this session.")
|
|
159
|
+
return None
|
|
160
|
+
|
|
161
|
+
return result["key"]
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _show_colab_prompt() -> str | None:
|
|
165
|
+
"""Show Colab-specific prompt using getpass.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
API key string or None if skipped
|
|
169
|
+
"""
|
|
170
|
+
try:
|
|
171
|
+
from getpass import getpass # noqa: PLC0415
|
|
172
|
+
|
|
173
|
+
from IPython.display import HTML, display # type: ignore # noqa: PLC0415
|
|
174
|
+
|
|
175
|
+
display(
|
|
176
|
+
HTML(
|
|
177
|
+
"""
|
|
178
|
+
<div style="background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
|
179
|
+
border-radius: 8px; padding: 16px; margin: 8px 0; color: white;">
|
|
180
|
+
<div style="font-size: 18px; font-weight: 600; margin-bottom: 8px;">
|
|
181
|
+
DeepFabric Training Metrics
|
|
182
|
+
</div>
|
|
183
|
+
<p style="margin: 0; font-size: 14px; opacity: 0.9;">
|
|
184
|
+
Enter your API key below to log training metrics.<br>
|
|
185
|
+
Get your key at <a href="https://app.deepfabric.ai/settings/api"
|
|
186
|
+
target="_blank" style="color: #fff;">app.deepfabric.ai/settings/api</a><br>
|
|
187
|
+
<em>Press Enter without typing to skip.</em>
|
|
188
|
+
</p>
|
|
189
|
+
</div>
|
|
190
|
+
"""
|
|
191
|
+
)
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
key = getpass("DeepFabric API Key: ").strip()
|
|
195
|
+
except Exception as e:
|
|
196
|
+
logger.debug(f"Colab prompt failed: {e}")
|
|
197
|
+
return None
|
|
198
|
+
else:
|
|
199
|
+
return key if key else None
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _show_terminal_prompt() -> str | None:
|
|
203
|
+
"""Show terminal input prompt.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
API key string or None if skipped
|
|
207
|
+
"""
|
|
208
|
+
print()
|
|
209
|
+
print("=" * 60)
|
|
210
|
+
print(" DeepFabric Training Metrics")
|
|
211
|
+
print("=" * 60)
|
|
212
|
+
print()
|
|
213
|
+
print(" Enter your API key to log training metrics to DeepFabric.")
|
|
214
|
+
print(" Get your key at: https://app.deepfabric.ai/settings/api")
|
|
215
|
+
print()
|
|
216
|
+
print(" Press Enter without typing to skip (disable logging).")
|
|
217
|
+
print()
|
|
218
|
+
print("=" * 60)
|
|
219
|
+
|
|
220
|
+
try:
|
|
221
|
+
key = input(" API Key: ").strip()
|
|
222
|
+
except (EOFError, KeyboardInterrupt):
|
|
223
|
+
print()
|
|
224
|
+
print(" Logging disabled for this session.")
|
|
225
|
+
print()
|
|
226
|
+
return None
|
|
227
|
+
else:
|
|
228
|
+
print()
|
|
229
|
+
if key:
|
|
230
|
+
print(" API key set. Training metrics will be logged.")
|
|
231
|
+
else:
|
|
232
|
+
print(" Logging disabled for this session.")
|
|
233
|
+
print()
|
|
234
|
+
return key if key else None
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def get_api_key(force_prompt: bool = False) -> str | None:
|
|
238
|
+
"""Get API key from environment or prompt user.
|
|
239
|
+
|
|
240
|
+
Priority:
|
|
241
|
+
1. DEEPFABRIC_API_KEY environment variable
|
|
242
|
+
2. Cached value from previous prompt
|
|
243
|
+
3. Interactive prompt (notebook widget or terminal input)
|
|
244
|
+
4. None (silently disable logging in non-interactive environments)
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
force_prompt: If True, prompt even if env var is set
|
|
248
|
+
|
|
249
|
+
Returns:
|
|
250
|
+
API key string or None if unavailable/skipped
|
|
251
|
+
"""
|
|
252
|
+
global _api_key_cache, _api_key_checked # noqa: PLW0603
|
|
253
|
+
|
|
254
|
+
# Check environment variable first
|
|
255
|
+
env_key = os.getenv("DEEPFABRIC_API_KEY")
|
|
256
|
+
if env_key and not force_prompt:
|
|
257
|
+
return env_key
|
|
258
|
+
|
|
259
|
+
# Return cached value if already checked
|
|
260
|
+
if _api_key_checked and not force_prompt:
|
|
261
|
+
return _api_key_cache
|
|
262
|
+
|
|
263
|
+
# Mark as checked to avoid repeated prompts
|
|
264
|
+
_api_key_checked = True
|
|
265
|
+
|
|
266
|
+
# Try interactive prompts
|
|
267
|
+
if _is_colab():
|
|
268
|
+
try:
|
|
269
|
+
_api_key_cache = _show_colab_prompt()
|
|
270
|
+
except Exception as e:
|
|
271
|
+
logger.debug(f"Colab prompt failed: {e}")
|
|
272
|
+
else:
|
|
273
|
+
return _api_key_cache
|
|
274
|
+
|
|
275
|
+
if _is_notebook():
|
|
276
|
+
try:
|
|
277
|
+
_api_key_cache = _show_notebook_prompt()
|
|
278
|
+
except Exception as e:
|
|
279
|
+
logger.debug(f"Notebook prompt failed: {e}")
|
|
280
|
+
else:
|
|
281
|
+
if _api_key_cache is not None:
|
|
282
|
+
return _api_key_cache
|
|
283
|
+
|
|
284
|
+
if _is_interactive_terminal():
|
|
285
|
+
try:
|
|
286
|
+
_api_key_cache = _show_terminal_prompt()
|
|
287
|
+
except Exception as e:
|
|
288
|
+
logger.debug(f"Terminal prompt failed: {e}")
|
|
289
|
+
else:
|
|
290
|
+
return _api_key_cache
|
|
291
|
+
|
|
292
|
+
# Non-interactive environment - silently disable
|
|
293
|
+
logger.debug("Non-interactive environment, auto-logging disabled")
|
|
294
|
+
_api_key_cache = None
|
|
295
|
+
return None
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def clear_api_key_cache() -> None:
|
|
299
|
+
"""Clear the cached API key (for testing)."""
|
|
300
|
+
global _api_key_cache, _api_key_checked # noqa: PLW0603
|
|
301
|
+
_api_key_cache = None
|
|
302
|
+
_api_key_checked = False
|