synth-ai 0.2.2.dev0__py3-none-any.whl → 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- synth_ai/cli/__init__.py +66 -0
- synth_ai/cli/balance.py +205 -0
- synth_ai/cli/calc.py +70 -0
- synth_ai/cli/demo.py +74 -0
- synth_ai/{cli.py → cli/legacy_root_backup.py} +60 -15
- synth_ai/cli/man.py +103 -0
- synth_ai/cli/recent.py +126 -0
- synth_ai/cli/root.py +184 -0
- synth_ai/cli/status.py +126 -0
- synth_ai/cli/traces.py +136 -0
- synth_ai/cli/watch.py +508 -0
- synth_ai/config/base_url.py +53 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/analyze_semantic_words_markdown.py +252 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/filter_traces_sft_duckdb_v2_backup.py +413 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/filter_traces_sft_turso.py +646 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/kick_off_ft_synth.py +34 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/test_crafter_react_agent_lm_synth.py +1740 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/test_crafter_react_agent_lm_synth_v2_backup.py +1318 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/filter_traces_sft_duckdb_v2_backup.py +386 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/filter_traces_sft_turso.py +580 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/run_rollouts_for_models_and_compare_v2_backup.py +1352 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/test_crafter_react_agent_openai_v2_backup.py +2551 -0
- synth_ai/environments/examples/crafter_classic/agent_demos/crafter_trace_evaluation.py +1 -1
- synth_ai/environments/examples/crafter_classic/agent_demos/old/traces/session_crafter_episode_16_15227b68-2906-416f-acc4-d6a9b4fa5828_20250725_001154.json +1363 -1
- synth_ai/environments/examples/crafter_classic/agent_demos/test_crafter_react_agent.py +3 -3
- synth_ai/environments/examples/enron/dataset/corbt___enron_emails_sample_questions/default/0.0.0/293c9fe8170037e01cc9cf5834e0cd5ef6f1a6bb/dataset_info.json +1 -0
- synth_ai/environments/examples/nethack/helpers/achievements.json +64 -0
- synth_ai/environments/examples/red/units/test_exploration_strategy.py +1 -1
- synth_ai/environments/examples/red/units/test_menu_bug_reproduction.py +5 -5
- synth_ai/environments/examples/red/units/test_movement_debug.py +2 -2
- synth_ai/environments/examples/red/units/test_retry_movement.py +1 -1
- synth_ai/environments/examples/sokoban/engine_helpers/vendored/envs/available_envs.json +122 -0
- synth_ai/environments/examples/sokoban/verified_puzzles.json +54987 -0
- synth_ai/experimental/synth_oss.py +446 -0
- synth_ai/learning/core.py +21 -0
- synth_ai/learning/gateway.py +4 -0
- synth_ai/learning/prompts/mipro.py +0 -0
- synth_ai/lm/__init__.py +3 -0
- synth_ai/lm/core/main.py +4 -0
- synth_ai/lm/core/main_v3.py +68 -13
- synth_ai/lm/core/vendor_clients.py +4 -0
- synth_ai/lm/provider_support/openai.py +11 -2
- synth_ai/lm/vendors/base.py +7 -0
- synth_ai/lm/vendors/openai_standard.py +339 -4
- synth_ai/lm/vendors/openai_standard_responses.py +243 -0
- synth_ai/lm/vendors/synth_client.py +155 -5
- synth_ai/lm/warmup.py +54 -17
- synth_ai/tracing/__init__.py +18 -0
- synth_ai/tracing_v1/__init__.py +29 -14
- synth_ai/tracing_v3/config.py +13 -7
- synth_ai/tracing_v3/db_config.py +6 -6
- synth_ai/tracing_v3/turso/manager.py +8 -8
- synth_ai/tui/__main__.py +13 -0
- synth_ai/tui/dashboard.py +329 -0
- synth_ai/v0/tracing/__init__.py +0 -0
- synth_ai/{tracing → v0/tracing}/base_client.py +3 -3
- synth_ai/{tracing → v0/tracing}/client_manager.py +1 -1
- synth_ai/{tracing → v0/tracing}/context.py +1 -1
- synth_ai/{tracing → v0/tracing}/decorators.py +11 -11
- synth_ai/v0/tracing/events/__init__.py +0 -0
- synth_ai/{tracing → v0/tracing}/events/manage.py +4 -4
- synth_ai/{tracing → v0/tracing}/events/scope.py +6 -6
- synth_ai/{tracing → v0/tracing}/events/store.py +3 -3
- synth_ai/{tracing → v0/tracing}/immediate_client.py +6 -6
- synth_ai/{tracing → v0/tracing}/log_client_base.py +2 -2
- synth_ai/{tracing → v0/tracing}/retry_queue.py +3 -3
- synth_ai/{tracing → v0/tracing}/trackers.py +2 -2
- synth_ai/{tracing → v0/tracing}/upload.py +4 -4
- synth_ai/v0/tracing_v1/__init__.py +16 -0
- synth_ai/{tracing_v1 → v0/tracing_v1}/base_client.py +3 -3
- synth_ai/{tracing_v1 → v0/tracing_v1}/client_manager.py +1 -1
- synth_ai/{tracing_v1 → v0/tracing_v1}/context.py +1 -1
- synth_ai/{tracing_v1 → v0/tracing_v1}/decorators.py +11 -11
- synth_ai/v0/tracing_v1/events/__init__.py +0 -0
- synth_ai/{tracing_v1 → v0/tracing_v1}/events/manage.py +4 -4
- synth_ai/{tracing_v1 → v0/tracing_v1}/events/scope.py +6 -6
- synth_ai/{tracing_v1 → v0/tracing_v1}/events/store.py +3 -3
- synth_ai/{tracing_v1 → v0/tracing_v1}/immediate_client.py +6 -6
- synth_ai/{tracing_v1 → v0/tracing_v1}/log_client_base.py +2 -2
- synth_ai/{tracing_v1 → v0/tracing_v1}/retry_queue.py +3 -3
- synth_ai/{tracing_v1 → v0/tracing_v1}/trackers.py +2 -2
- synth_ai/{tracing_v1 → v0/tracing_v1}/upload.py +4 -4
- {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/METADATA +98 -4
- {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/RECORD +98 -62
- /synth_ai/{tracing/events/__init__.py → environments/examples/crafter_classic/debug_translation.py} +0 -0
- /synth_ai/{tracing_v1/events/__init__.py → learning/prompts/gepa.py} +0 -0
- /synth_ai/{tracing → v0/tracing}/abstractions.py +0 -0
- /synth_ai/{tracing → v0/tracing}/config.py +0 -0
- /synth_ai/{tracing → v0/tracing}/local.py +0 -0
- /synth_ai/{tracing → v0/tracing}/utils.py +0 -0
- /synth_ai/{tracing_v1 → v0/tracing_v1}/abstractions.py +0 -0
- /synth_ai/{tracing_v1 → v0/tracing_v1}/config.py +0 -0
- /synth_ai/{tracing_v1 → v0/tracing_v1}/local.py +0 -0
- /synth_ai/{tracing_v1 → v0/tracing_v1}/utils.py +0 -0
- {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/WHEEL +0 -0
- {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/entry_points.txt +0 -0
- {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/licenses/LICENSE +0 -0
- {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/top_level.txt +0 -0
synth_ai/tracing_v3/db_config.py
CHANGED
@@ -18,7 +18,7 @@ class DatabaseConfig:
|
|
18
18
|
"""Centralized database configuration management."""
|
19
19
|
|
20
20
|
# Default values from serve.sh
|
21
|
-
DEFAULT_DB_FILE = "synth_ai.db"
|
21
|
+
DEFAULT_DB_FILE = "traces/v3/synth_ai.db"
|
22
22
|
DEFAULT_HTTP_PORT = 8080
|
23
23
|
|
24
24
|
def __init__(
|
@@ -58,15 +58,15 @@ class DatabaseConfig:
|
|
58
58
|
|
59
59
|
if os.path.exists(sqld_data_path):
|
60
60
|
# sqld is managing the database
|
61
|
-
logger.
|
61
|
+
logger.debug(f"✅ Using sqld-managed database at: {sqld_data_path}")
|
62
62
|
actual_db_path = sqld_data_path
|
63
63
|
else:
|
64
64
|
# Direct SQLite file
|
65
65
|
if not os.path.exists(abs_path):
|
66
|
-
logger.
|
67
|
-
logger.
|
66
|
+
logger.debug(f"⚠️ Database file not found at: {abs_path}")
|
67
|
+
logger.debug("🔧 Make sure to run './serve.sh' to start the turso/sqld service")
|
68
68
|
else:
|
69
|
-
logger.
|
69
|
+
logger.debug(f"📁 Using direct SQLite file at: {abs_path}")
|
70
70
|
actual_db_path = abs_path
|
71
71
|
|
72
72
|
# SQLite URLs need 3 slashes for absolute paths
|
@@ -147,7 +147,7 @@ def get_default_db_config() -> DatabaseConfig:
|
|
147
147
|
# sqld is already running, don't start a new one
|
148
148
|
sqld_running = True
|
149
149
|
use_sqld = False
|
150
|
-
logger.
|
150
|
+
logger.debug(f"✅ Detected sqld already running on port {sqld_port}")
|
151
151
|
except Exception as e:
|
152
152
|
logger.debug(f"Could not check for sqld process: {e}")
|
153
153
|
|
@@ -91,7 +91,7 @@ class AsyncSQLTraceManager:
|
|
91
91
|
concurrent scenarios.
|
92
92
|
"""
|
93
93
|
if self.engine is None:
|
94
|
-
logger.
|
94
|
+
logger.debug(f"🔗 Initializing database connection to: {self.db_url}")
|
95
95
|
|
96
96
|
# For SQLite, use NullPool to avoid connection pool issues
|
97
97
|
# SQLite doesn't handle concurrent connections well, so we create
|
@@ -103,12 +103,12 @@ class AsyncSQLTraceManager:
|
|
103
103
|
|
104
104
|
# Check if database file exists
|
105
105
|
if not os.path.exists(db_path):
|
106
|
-
logger.
|
107
|
-
logger.
|
106
|
+
logger.debug(f"⚠️ Database file not found: {db_path}")
|
107
|
+
logger.debug(
|
108
108
|
"🔧 Make sure './serve.sh' is running to start the turso/sqld service"
|
109
109
|
)
|
110
110
|
else:
|
111
|
-
logger.
|
111
|
+
logger.debug(f"✅ Found database file: {db_path}")
|
112
112
|
|
113
113
|
# Set a high busy timeout to handle concurrent access
|
114
114
|
# This allows SQLite to wait instead of immediately failing
|
@@ -145,7 +145,7 @@ class AsyncSQLTraceManager:
|
|
145
145
|
if self._schema_ready:
|
146
146
|
return
|
147
147
|
|
148
|
-
logger.
|
148
|
+
logger.debug("📊 Initializing database schema...")
|
149
149
|
|
150
150
|
async with self.engine.begin() as conn:
|
151
151
|
# Use a transaction to ensure atomic schema creation
|
@@ -154,14 +154,14 @@ class AsyncSQLTraceManager:
|
|
154
154
|
await conn.run_sync(
|
155
155
|
lambda sync_conn: Base.metadata.create_all(sync_conn, checkfirst=True)
|
156
156
|
)
|
157
|
-
logger.info("✅ Database schema created/verified successfully")
|
157
|
+
#logger.info("✅ Database schema created/verified successfully")
|
158
158
|
except Exception as e:
|
159
159
|
# If tables already exist, that's fine - another worker created them
|
160
160
|
if "already exists" not in str(e):
|
161
161
|
logger.error(f"❌ Failed to create database schema: {e}")
|
162
162
|
raise
|
163
163
|
else:
|
164
|
-
logger.
|
164
|
+
logger.debug("✅ Database schema already exists")
|
165
165
|
|
166
166
|
# Enable foreign keys for SQLite - critical for data integrity
|
167
167
|
# This must be done for each connection in SQLite
|
@@ -183,7 +183,7 @@ class AsyncSQLTraceManager:
|
|
183
183
|
logger.warning(f"Could not create view {view_name}: {e}")
|
184
184
|
|
185
185
|
self._schema_ready = True
|
186
|
-
logger.
|
186
|
+
#logger.debug("🎯 Database ready for use!")
|
187
187
|
|
188
188
|
@asynccontextmanager
|
189
189
|
async def session(self):
|
synth_ai/tui/__main__.py
ADDED
@@ -0,0 +1,329 @@
|
|
1
|
+
#!/usr/bin/env python3
|
2
|
+
"""
|
3
|
+
Interactive TUI Dashboard for Synth AI experiments.
|
4
|
+
|
5
|
+
Launch with: python -m synth_ai.tui.dashboard
|
6
|
+
"""
|
7
|
+
|
8
|
+
import asyncio
|
9
|
+
import logging
|
10
|
+
from datetime import datetime
|
11
|
+
from typing import List, Optional, Dict, Any
|
12
|
+
from urllib.parse import urlparse
|
13
|
+
|
14
|
+
from textual.app import App, ComposeResult
|
15
|
+
from textual.containers import Container, Horizontal, Vertical
|
16
|
+
from textual.widgets import (
|
17
|
+
Header, Footer, DataTable, Static, Input, Button,
|
18
|
+
TabbedContent, TabPane, Label, ProgressBar
|
19
|
+
)
|
20
|
+
from textual.reactive import reactive
|
21
|
+
from textual.binding import Binding
|
22
|
+
from textual import on
|
23
|
+
from textual.timer import Timer
|
24
|
+
|
25
|
+
from ..tracing_v3.turso.manager import AsyncSQLTraceManager
|
26
|
+
|
27
|
+
class ExperimentRow:
|
28
|
+
"""Data structure for experiment display."""
|
29
|
+
def __init__(self, exp_id: str, name: str, description: str,
|
30
|
+
created_at: datetime, sessions: int, events: int,
|
31
|
+
messages: int, cost: float, tokens: int):
|
32
|
+
self.exp_id = exp_id
|
33
|
+
self.name = name or "Unnamed"
|
34
|
+
self.description = description or ""
|
35
|
+
self.created_at = created_at
|
36
|
+
self.sessions = sessions
|
37
|
+
self.events = events
|
38
|
+
self.messages = messages
|
39
|
+
self.cost = cost
|
40
|
+
self.tokens = tokens
|
41
|
+
|
42
|
+
def to_row(self) -> List[str]:
|
43
|
+
"""Convert to table row format."""
|
44
|
+
return [
|
45
|
+
self.exp_id[:8], # Shortened ID
|
46
|
+
self.name[:20], # Truncated name
|
47
|
+
str(self.sessions),
|
48
|
+
str(self.events),
|
49
|
+
str(self.messages),
|
50
|
+
f"${self.cost:.4f}",
|
51
|
+
f"{self.tokens:,}",
|
52
|
+
self.created_at.strftime("%H:%M")
|
53
|
+
]
|
54
|
+
|
55
|
+
class ExperimentTable(DataTable):
|
56
|
+
"""Custom DataTable for experiments with refresh capability."""
|
57
|
+
|
58
|
+
def __init__(self, **kwargs):
|
59
|
+
super().__init__(**kwargs)
|
60
|
+
self.experiments: List[ExperimentRow] = []
|
61
|
+
self.selected_exp_id: Optional[str] = None
|
62
|
+
|
63
|
+
def setup_table(self):
|
64
|
+
"""Initialize table columns."""
|
65
|
+
self.add_columns(
|
66
|
+
"ID", "Name", "Sessions", "Events",
|
67
|
+
"Messages", "Cost", "Tokens", "Time"
|
68
|
+
)
|
69
|
+
|
70
|
+
async def refresh_data(self, db_manager: AsyncSQLTraceManager):
|
71
|
+
"""Refresh experiment data from database."""
|
72
|
+
try:
|
73
|
+
# Get experiment list with stats using raw query
|
74
|
+
df = await db_manager.query_traces("""
|
75
|
+
SELECT
|
76
|
+
e.experiment_id,
|
77
|
+
e.name,
|
78
|
+
e.description,
|
79
|
+
e.created_at,
|
80
|
+
COUNT(DISTINCT st.session_id) as num_sessions,
|
81
|
+
COUNT(DISTINCT ev.id) as num_events,
|
82
|
+
COUNT(DISTINCT m.id) as num_messages,
|
83
|
+
SUM(CASE WHEN ev.event_type = 'cais' THEN ev.cost_usd ELSE 0 END) / 100.0 as total_cost,
|
84
|
+
SUM(CASE WHEN ev.event_type = 'cais' THEN ev.total_tokens ELSE 0 END) as total_tokens
|
85
|
+
FROM experiments e
|
86
|
+
LEFT JOIN session_traces st ON e.experiment_id = st.experiment_id
|
87
|
+
LEFT JOIN events ev ON st.session_id = ev.session_id
|
88
|
+
LEFT JOIN messages m ON st.session_id = m.session_id
|
89
|
+
GROUP BY e.experiment_id, e.name, e.description, e.created_at
|
90
|
+
ORDER BY e.created_at DESC
|
91
|
+
""")
|
92
|
+
|
93
|
+
self.experiments.clear()
|
94
|
+
self.clear()
|
95
|
+
|
96
|
+
if not df.empty:
|
97
|
+
for _, row in df.iterrows():
|
98
|
+
exp_row = ExperimentRow(
|
99
|
+
exp_id=row['experiment_id'],
|
100
|
+
name=row['name'],
|
101
|
+
description=row['description'],
|
102
|
+
created_at=row['created_at'],
|
103
|
+
sessions=int(row['num_sessions'] or 0),
|
104
|
+
events=int(row['num_events'] or 0),
|
105
|
+
messages=int(row['num_messages'] or 0),
|
106
|
+
cost=float(row['total_cost'] or 0.0),
|
107
|
+
tokens=int(row['total_tokens'] or 0)
|
108
|
+
)
|
109
|
+
self.experiments.append(exp_row)
|
110
|
+
self.add_row(*exp_row.to_row(), key=exp_row.exp_id)
|
111
|
+
|
112
|
+
except Exception as e:
|
113
|
+
logging.error(f"Failed to refresh experiments: {e}")
|
114
|
+
|
115
|
+
def get_selected_experiment(self) -> Optional[ExperimentRow]:
|
116
|
+
"""Get currently selected experiment."""
|
117
|
+
if self.cursor_row >= 0 and self.cursor_row < len(self.experiments):
|
118
|
+
return self.experiments[self.cursor_row]
|
119
|
+
return None
|
120
|
+
|
121
|
+
class ExperimentDetail(Static):
|
122
|
+
"""Detailed view of selected experiment."""
|
123
|
+
|
124
|
+
def __init__(self, **kwargs):
|
125
|
+
super().__init__(**kwargs)
|
126
|
+
self.current_experiment: Optional[ExperimentRow] = None
|
127
|
+
|
128
|
+
def update_experiment(self, experiment: Optional[ExperimentRow]):
|
129
|
+
"""Update the displayed experiment details."""
|
130
|
+
self.current_experiment = experiment
|
131
|
+
if experiment:
|
132
|
+
details = f"""
|
133
|
+
🔬 **{experiment.name}**
|
134
|
+
ID: {experiment.exp_id}
|
135
|
+
Description: {experiment.description or 'No description'}
|
136
|
+
|
137
|
+
📊 **Statistics**
|
138
|
+
Sessions: {experiment.sessions}
|
139
|
+
Events: {experiment.events}
|
140
|
+
Messages: {experiment.messages}
|
141
|
+
Cost: ${experiment.cost:.4f}
|
142
|
+
Tokens: {experiment.tokens:,}
|
143
|
+
|
144
|
+
🕒 **Created**: {experiment.created_at.strftime('%Y-%m-%d %H:%M:%S')}
|
145
|
+
""".strip()
|
146
|
+
else:
|
147
|
+
details = "Select an experiment to view details"
|
148
|
+
|
149
|
+
self.update(details)
|
150
|
+
|
151
|
+
class DatabaseStatus(Static):
|
152
|
+
"""Display database connection status."""
|
153
|
+
|
154
|
+
connection_status = reactive("🔴 Disconnected")
|
155
|
+
|
156
|
+
def __init__(self, **kwargs):
|
157
|
+
super().__init__(**kwargs)
|
158
|
+
|
159
|
+
def render(self) -> str:
|
160
|
+
return f"Database: {self.connection_status}"
|
161
|
+
|
162
|
+
def set_connected(self, url: str):
|
163
|
+
parsed = urlparse(url)
|
164
|
+
host_info = f"{parsed.hostname}:{parsed.port}" if parsed.port else str(parsed.hostname)
|
165
|
+
self.connection_status = f"🟢 Connected ({host_info})"
|
166
|
+
|
167
|
+
def set_disconnected(self, error: str = ""):
|
168
|
+
error_text = f" - {error}" if error else ""
|
169
|
+
self.connection_status = f"🔴 Disconnected{error_text}"
|
170
|
+
|
171
|
+
class SynthDashboard(App):
|
172
|
+
"""Main Synth AI TUI Dashboard application."""
|
173
|
+
|
174
|
+
CSS = """
|
175
|
+
Screen {
|
176
|
+
layout: grid;
|
177
|
+
grid-size: 2 3;
|
178
|
+
grid-gutter: 1;
|
179
|
+
}
|
180
|
+
|
181
|
+
#header {
|
182
|
+
column-span: 2;
|
183
|
+
height: 3;
|
184
|
+
}
|
185
|
+
|
186
|
+
#experiments-table {
|
187
|
+
row-span: 2;
|
188
|
+
}
|
189
|
+
|
190
|
+
#experiment-detail {
|
191
|
+
height: 1fr;
|
192
|
+
}
|
193
|
+
|
194
|
+
#status-bar {
|
195
|
+
column-span: 2;
|
196
|
+
height: 3;
|
197
|
+
}
|
198
|
+
|
199
|
+
ExperimentTable {
|
200
|
+
height: 100%;
|
201
|
+
}
|
202
|
+
|
203
|
+
ExperimentDetail {
|
204
|
+
border: solid $primary;
|
205
|
+
padding: 1;
|
206
|
+
height: 100%;
|
207
|
+
}
|
208
|
+
|
209
|
+
DatabaseStatus {
|
210
|
+
height: 1;
|
211
|
+
padding: 0 1;
|
212
|
+
}
|
213
|
+
"""
|
214
|
+
|
215
|
+
BINDINGS = [
|
216
|
+
Binding("q", "quit", "Quit"),
|
217
|
+
Binding("r", "refresh", "Refresh"),
|
218
|
+
Binding("d", "toggle_debug", "Debug"),
|
219
|
+
("ctrl+c", "quit", "Quit"),
|
220
|
+
]
|
221
|
+
|
222
|
+
def __init__(self, db_url: str = "sqlite+aiosqlite:///./synth_ai.db/dbs/default/data"):
|
223
|
+
super().__init__()
|
224
|
+
self.db_url = db_url
|
225
|
+
self.db_manager: Optional[AsyncSQLTraceManager] = None
|
226
|
+
self.refresh_timer: Optional[Timer] = None
|
227
|
+
|
228
|
+
def compose(self) -> ComposeResult:
|
229
|
+
"""Create the UI layout."""
|
230
|
+
yield Header(show_clock=True)
|
231
|
+
|
232
|
+
with Container(id="experiments-table"):
|
233
|
+
yield Static("🧪 Experiments", classes="section-title")
|
234
|
+
yield ExperimentTable(id="experiments")
|
235
|
+
|
236
|
+
with Container(id="experiment-detail"):
|
237
|
+
yield Static("📋 Details", classes="section-title")
|
238
|
+
yield ExperimentDetail(id="detail")
|
239
|
+
|
240
|
+
with Container(id="status-bar"):
|
241
|
+
yield DatabaseStatus(id="db-status")
|
242
|
+
yield Footer()
|
243
|
+
|
244
|
+
async def on_mount(self) -> None:
|
245
|
+
"""Initialize the app when mounted."""
|
246
|
+
# Setup database connection
|
247
|
+
try:
|
248
|
+
self.db_manager = AsyncSQLTraceManager(self.db_url)
|
249
|
+
await self.db_manager.initialize()
|
250
|
+
|
251
|
+
db_status = self.query_one("#db-status", DatabaseStatus)
|
252
|
+
db_status.set_connected(self.db_url)
|
253
|
+
|
254
|
+
except Exception as e:
|
255
|
+
logging.error(f"Failed to connect to database: {e}")
|
256
|
+
db_status = self.query_one("#db-status", DatabaseStatus)
|
257
|
+
db_status.set_disconnected(str(e))
|
258
|
+
|
259
|
+
# Setup experiment table
|
260
|
+
exp_table = self.query_one("#experiments", ExperimentTable)
|
261
|
+
exp_table.setup_table()
|
262
|
+
|
263
|
+
# Initial data load
|
264
|
+
await self.action_refresh()
|
265
|
+
|
266
|
+
# Start auto-refresh timer (every 5 seconds)
|
267
|
+
self.refresh_timer = self.set_interval(5.0, self._auto_refresh)
|
268
|
+
|
269
|
+
async def _auto_refresh(self) -> None:
|
270
|
+
"""Auto-refresh data periodically."""
|
271
|
+
if self.db_manager:
|
272
|
+
exp_table = self.query_one("#experiments", ExperimentTable)
|
273
|
+
await exp_table.refresh_data(self.db_manager)
|
274
|
+
|
275
|
+
async def action_refresh(self) -> None:
|
276
|
+
"""Manual refresh action."""
|
277
|
+
if self.db_manager:
|
278
|
+
exp_table = self.query_one("#experiments", ExperimentTable)
|
279
|
+
await exp_table.refresh_data(self.db_manager)
|
280
|
+
|
281
|
+
async def action_quit(self) -> None:
|
282
|
+
"""Quit the application."""
|
283
|
+
if self.refresh_timer:
|
284
|
+
self.refresh_timer.stop()
|
285
|
+
if self.db_manager:
|
286
|
+
await self.db_manager.close()
|
287
|
+
self.exit()
|
288
|
+
|
289
|
+
def action_toggle_debug(self) -> None:
|
290
|
+
"""Toggle debug mode."""
|
291
|
+
# Could add debug panel or logging level toggle
|
292
|
+
pass
|
293
|
+
|
294
|
+
@on(DataTable.RowHighlighted, "#experiments")
|
295
|
+
def on_experiment_selected(self, event: DataTable.RowHighlighted) -> None:
|
296
|
+
"""Handle experiment selection."""
|
297
|
+
exp_table = self.query_one("#experiments", ExperimentTable)
|
298
|
+
selected_exp = exp_table.get_selected_experiment()
|
299
|
+
|
300
|
+
detail_panel = self.query_one("#detail", ExperimentDetail)
|
301
|
+
detail_panel.update_experiment(selected_exp)
|
302
|
+
|
303
|
+
def main():
|
304
|
+
"""Main entry point for the dashboard."""
|
305
|
+
import argparse
|
306
|
+
|
307
|
+
parser = argparse.ArgumentParser(description="Synth AI Interactive Dashboard")
|
308
|
+
parser.add_argument(
|
309
|
+
"-u", "--url",
|
310
|
+
default="sqlite+libsql://http://127.0.0.1:8080",
|
311
|
+
help="Database URL (default: sqlite+libsql://http://127.0.0.1:8080)"
|
312
|
+
)
|
313
|
+
parser.add_argument(
|
314
|
+
"--debug",
|
315
|
+
action="store_true",
|
316
|
+
help="Enable debug logging"
|
317
|
+
)
|
318
|
+
|
319
|
+
args = parser.parse_args()
|
320
|
+
|
321
|
+
if args.debug:
|
322
|
+
logging.basicConfig(level=logging.DEBUG)
|
323
|
+
|
324
|
+
# Run the dashboard
|
325
|
+
app = SynthDashboard(db_url=args.url)
|
326
|
+
app.run()
|
327
|
+
|
328
|
+
if __name__ == "__main__":
|
329
|
+
main()
|
File without changes
|
@@ -4,9 +4,9 @@ from abc import ABC, abstractmethod
|
|
4
4
|
from dataclasses import dataclass
|
5
5
|
from typing import Any, Dict, Optional
|
6
6
|
|
7
|
-
from
|
8
|
-
from
|
9
|
-
from
|
7
|
+
from .abstractions import Event
|
8
|
+
from .config import TracingConfig
|
9
|
+
from .events.store import event_store
|
10
10
|
|
11
11
|
logger = logging.getLogger(__name__)
|
12
12
|
|
@@ -7,9 +7,9 @@ from functools import wraps
|
|
7
7
|
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Literal, ParamSpec, TypeVar, Union
|
8
8
|
|
9
9
|
if TYPE_CHECKING:
|
10
|
-
from
|
10
|
+
from .trackers import SynthTrackerAsync, SynthTrackerSync
|
11
11
|
|
12
|
-
from
|
12
|
+
from .abstractions import (
|
13
13
|
AgentComputeStep,
|
14
14
|
ArbitraryInputs,
|
15
15
|
ArbitraryOutputs,
|
@@ -18,28 +18,28 @@ from synth_ai.tracing.abstractions import (
|
|
18
18
|
MessageInputs,
|
19
19
|
MessageOutputs,
|
20
20
|
)
|
21
|
-
from
|
22
|
-
from
|
21
|
+
from .config import EventManagement, LoggingMode, Origin, TracingConfig
|
22
|
+
from .context import (
|
23
23
|
get_current_context,
|
24
24
|
trace_context,
|
25
25
|
)
|
26
|
-
from
|
27
|
-
from
|
28
|
-
from
|
26
|
+
from .events.manage import set_current_event
|
27
|
+
from .events.store import event_store
|
28
|
+
from .immediate_client import (
|
29
29
|
AsyncImmediateLogClient,
|
30
30
|
ImmediateLogClient,
|
31
31
|
)
|
32
|
-
from
|
32
|
+
from .local import (
|
33
33
|
_local,
|
34
34
|
active_events_var,
|
35
35
|
logger,
|
36
36
|
)
|
37
|
-
from
|
38
|
-
from
|
37
|
+
from .retry_queue import initialize_retry_queue, retry_queue
|
38
|
+
from .trackers import (
|
39
39
|
synth_tracker_async,
|
40
40
|
synth_tracker_sync,
|
41
41
|
)
|
42
|
-
from
|
42
|
+
from .utils import get_system_id
|
43
43
|
|
44
44
|
logger = logging.getLogger(__name__)
|
45
45
|
|
File without changes
|
@@ -1,9 +1,9 @@
|
|
1
1
|
import time
|
2
2
|
from typing import Literal, Optional
|
3
3
|
|
4
|
-
from
|
5
|
-
from
|
6
|
-
from
|
4
|
+
from ..abstractions import Event
|
5
|
+
from ..events.store import event_store
|
6
|
+
from ..local import _local, logger
|
7
7
|
|
8
8
|
|
9
9
|
def get_current_event(event_type: str) -> "Event":
|
@@ -75,7 +75,7 @@ def set_current_event(event: Optional["Event"], decorator_type: Literal["sync",
|
|
75
75
|
_local.active_events[unique_key] = event # Unique key for async
|
76
76
|
|
77
77
|
else:
|
78
|
-
from
|
78
|
+
from ..local import (
|
79
79
|
active_events_var,
|
80
80
|
system_id_var,
|
81
81
|
system_instance_id_var,
|
@@ -1,20 +1,20 @@
|
|
1
1
|
import time
|
2
2
|
from contextlib import contextmanager
|
3
3
|
|
4
|
-
from
|
5
|
-
from
|
6
|
-
from
|
4
|
+
from ..abstractions import Event
|
5
|
+
from ..config import LoggingMode
|
6
|
+
from ..decorators import (
|
7
7
|
_local,
|
8
8
|
clear_current_event,
|
9
9
|
get_tracing_config,
|
10
10
|
set_current_event,
|
11
11
|
)
|
12
|
-
from
|
13
|
-
from
|
12
|
+
from ..events.store import event_store
|
13
|
+
from ..immediate_client import (
|
14
14
|
AsyncImmediateLogClient,
|
15
15
|
ImmediateLogClient,
|
16
16
|
)
|
17
|
-
from
|
17
|
+
from ..local import (
|
18
18
|
system_id_var,
|
19
19
|
system_instance_id_var,
|
20
20
|
system_name_var,
|
@@ -4,8 +4,8 @@ import time
|
|
4
4
|
from threading import RLock # Change this import
|
5
5
|
from typing import Any, Dict, List
|
6
6
|
|
7
|
-
from
|
8
|
-
from
|
7
|
+
from ..abstractions import Event, EventPartitionElement, SystemTrace
|
8
|
+
from ..local import ( # Import context variables
|
9
9
|
_local,
|
10
10
|
active_events_var,
|
11
11
|
)
|
@@ -36,7 +36,7 @@ class EventStore:
|
|
36
36
|
# Get system_instance_metadata from context if available
|
37
37
|
system_instance_metadata = {}
|
38
38
|
try:
|
39
|
-
from
|
39
|
+
from ..context import get_current_context
|
40
40
|
|
41
41
|
context = get_current_context()
|
42
42
|
if (
|
@@ -5,10 +5,10 @@ from typing import Dict
|
|
5
5
|
|
6
6
|
import httpx
|
7
7
|
|
8
|
-
from
|
9
|
-
from
|
10
|
-
from
|
11
|
-
from
|
8
|
+
from .abstractions import Event
|
9
|
+
from .client_manager import ClientManager
|
10
|
+
from .config import TracingConfig
|
11
|
+
from .log_client_base import BaseAsyncLogClient, BaseLogClient
|
12
12
|
|
13
13
|
logger = logging.getLogger(__name__)
|
14
14
|
|
@@ -22,7 +22,7 @@ class ImmediateLogClient(BaseLogClient):
|
|
22
22
|
|
23
23
|
def send_event(self, event: Event, system_info: Dict[str, str]) -> bool:
|
24
24
|
"""Send a single event with retries and fallback"""
|
25
|
-
from
|
25
|
+
from .retry_queue import (
|
26
26
|
retry_queue, # Import here to avoid circular import
|
27
27
|
)
|
28
28
|
|
@@ -73,7 +73,7 @@ class AsyncImmediateLogClient(BaseAsyncLogClient):
|
|
73
73
|
|
74
74
|
async def send_event(self, event: Event, system_info: Dict[str, str]) -> bool:
|
75
75
|
"""Send a single event with retries and fallback (async version)"""
|
76
|
-
from
|
76
|
+
from .retry_queue import retry_queue
|
77
77
|
|
78
78
|
if not self.config.api_key:
|
79
79
|
logger.error("No API key provided")
|
@@ -5,8 +5,8 @@ from collections import deque
|
|
5
5
|
from dataclasses import dataclass
|
6
6
|
from typing import Dict, List, Optional, Tuple
|
7
7
|
|
8
|
-
from
|
9
|
-
from
|
8
|
+
from .abstractions import Event
|
9
|
+
from .config import TracingConfig
|
10
10
|
|
11
11
|
logger = logging.getLogger(__name__)
|
12
12
|
|
@@ -88,7 +88,7 @@ class RetryQueue:
|
|
88
88
|
failure_count = 0
|
89
89
|
|
90
90
|
try:
|
91
|
-
from
|
91
|
+
from .immediate_client import (
|
92
92
|
ImmediateLogClient, # Import here to avoid circular import
|
93
93
|
)
|
94
94
|
|
@@ -4,8 +4,8 @@ from typing import Any, Dict, List, Literal, Optional, Tuple, Union
|
|
4
4
|
|
5
5
|
from pydantic import BaseModel
|
6
6
|
|
7
|
-
from
|
8
|
-
from
|
7
|
+
from .config import VALID_TYPES, Message, ModelParams
|
8
|
+
from .local import _local
|
9
9
|
|
10
10
|
# Existing SynthTrackerSync and SynthTrackerAsync classes...
|
11
11
|
|