synth-ai 0.2.2.dev0__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. synth_ai/cli/__init__.py +66 -0
  2. synth_ai/cli/balance.py +205 -0
  3. synth_ai/cli/calc.py +70 -0
  4. synth_ai/cli/demo.py +74 -0
  5. synth_ai/{cli.py → cli/legacy_root_backup.py} +60 -15
  6. synth_ai/cli/man.py +103 -0
  7. synth_ai/cli/recent.py +126 -0
  8. synth_ai/cli/root.py +184 -0
  9. synth_ai/cli/status.py +126 -0
  10. synth_ai/cli/traces.py +136 -0
  11. synth_ai/cli/watch.py +508 -0
  12. synth_ai/config/base_url.py +53 -0
  13. synth_ai/environments/examples/crafter_classic/agent_demos/analyze_semantic_words_markdown.py +252 -0
  14. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/filter_traces_sft_duckdb_v2_backup.py +413 -0
  15. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/filter_traces_sft_turso.py +646 -0
  16. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/kick_off_ft_synth.py +34 -0
  17. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/test_crafter_react_agent_lm_synth.py +1740 -0
  18. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_modal_ft/test_crafter_react_agent_lm_synth_v2_backup.py +1318 -0
  19. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/filter_traces_sft_duckdb_v2_backup.py +386 -0
  20. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/filter_traces_sft_turso.py +580 -0
  21. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/run_rollouts_for_models_and_compare_v2_backup.py +1352 -0
  22. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_openai_ft/test_crafter_react_agent_openai_v2_backup.py +2551 -0
  23. synth_ai/environments/examples/crafter_classic/agent_demos/crafter_trace_evaluation.py +1 -1
  24. synth_ai/environments/examples/crafter_classic/agent_demos/old/traces/session_crafter_episode_16_15227b68-2906-416f-acc4-d6a9b4fa5828_20250725_001154.json +1363 -1
  25. synth_ai/environments/examples/crafter_classic/agent_demos/test_crafter_react_agent.py +3 -3
  26. synth_ai/environments/examples/enron/dataset/corbt___enron_emails_sample_questions/default/0.0.0/293c9fe8170037e01cc9cf5834e0cd5ef6f1a6bb/dataset_info.json +1 -0
  27. synth_ai/environments/examples/nethack/helpers/achievements.json +64 -0
  28. synth_ai/environments/examples/red/units/test_exploration_strategy.py +1 -1
  29. synth_ai/environments/examples/red/units/test_menu_bug_reproduction.py +5 -5
  30. synth_ai/environments/examples/red/units/test_movement_debug.py +2 -2
  31. synth_ai/environments/examples/red/units/test_retry_movement.py +1 -1
  32. synth_ai/environments/examples/sokoban/engine_helpers/vendored/envs/available_envs.json +122 -0
  33. synth_ai/environments/examples/sokoban/verified_puzzles.json +54987 -0
  34. synth_ai/experimental/synth_oss.py +446 -0
  35. synth_ai/learning/core.py +21 -0
  36. synth_ai/learning/gateway.py +4 -0
  37. synth_ai/learning/prompts/mipro.py +0 -0
  38. synth_ai/lm/__init__.py +3 -0
  39. synth_ai/lm/core/main.py +4 -0
  40. synth_ai/lm/core/main_v3.py +68 -13
  41. synth_ai/lm/core/vendor_clients.py +4 -0
  42. synth_ai/lm/provider_support/openai.py +11 -2
  43. synth_ai/lm/vendors/base.py +7 -0
  44. synth_ai/lm/vendors/openai_standard.py +339 -4
  45. synth_ai/lm/vendors/openai_standard_responses.py +243 -0
  46. synth_ai/lm/vendors/synth_client.py +155 -5
  47. synth_ai/lm/warmup.py +54 -17
  48. synth_ai/tracing/__init__.py +18 -0
  49. synth_ai/tracing_v1/__init__.py +29 -14
  50. synth_ai/tracing_v3/config.py +13 -7
  51. synth_ai/tracing_v3/db_config.py +6 -6
  52. synth_ai/tracing_v3/turso/manager.py +8 -8
  53. synth_ai/tui/__main__.py +13 -0
  54. synth_ai/tui/dashboard.py +329 -0
  55. synth_ai/v0/tracing/__init__.py +0 -0
  56. synth_ai/{tracing → v0/tracing}/base_client.py +3 -3
  57. synth_ai/{tracing → v0/tracing}/client_manager.py +1 -1
  58. synth_ai/{tracing → v0/tracing}/context.py +1 -1
  59. synth_ai/{tracing → v0/tracing}/decorators.py +11 -11
  60. synth_ai/v0/tracing/events/__init__.py +0 -0
  61. synth_ai/{tracing → v0/tracing}/events/manage.py +4 -4
  62. synth_ai/{tracing → v0/tracing}/events/scope.py +6 -6
  63. synth_ai/{tracing → v0/tracing}/events/store.py +3 -3
  64. synth_ai/{tracing → v0/tracing}/immediate_client.py +6 -6
  65. synth_ai/{tracing → v0/tracing}/log_client_base.py +2 -2
  66. synth_ai/{tracing → v0/tracing}/retry_queue.py +3 -3
  67. synth_ai/{tracing → v0/tracing}/trackers.py +2 -2
  68. synth_ai/{tracing → v0/tracing}/upload.py +4 -4
  69. synth_ai/v0/tracing_v1/__init__.py +16 -0
  70. synth_ai/{tracing_v1 → v0/tracing_v1}/base_client.py +3 -3
  71. synth_ai/{tracing_v1 → v0/tracing_v1}/client_manager.py +1 -1
  72. synth_ai/{tracing_v1 → v0/tracing_v1}/context.py +1 -1
  73. synth_ai/{tracing_v1 → v0/tracing_v1}/decorators.py +11 -11
  74. synth_ai/v0/tracing_v1/events/__init__.py +0 -0
  75. synth_ai/{tracing_v1 → v0/tracing_v1}/events/manage.py +4 -4
  76. synth_ai/{tracing_v1 → v0/tracing_v1}/events/scope.py +6 -6
  77. synth_ai/{tracing_v1 → v0/tracing_v1}/events/store.py +3 -3
  78. synth_ai/{tracing_v1 → v0/tracing_v1}/immediate_client.py +6 -6
  79. synth_ai/{tracing_v1 → v0/tracing_v1}/log_client_base.py +2 -2
  80. synth_ai/{tracing_v1 → v0/tracing_v1}/retry_queue.py +3 -3
  81. synth_ai/{tracing_v1 → v0/tracing_v1}/trackers.py +2 -2
  82. synth_ai/{tracing_v1 → v0/tracing_v1}/upload.py +4 -4
  83. {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/METADATA +98 -4
  84. {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/RECORD +98 -62
  85. /synth_ai/{tracing/events/__init__.py → environments/examples/crafter_classic/debug_translation.py} +0 -0
  86. /synth_ai/{tracing_v1/events/__init__.py → learning/prompts/gepa.py} +0 -0
  87. /synth_ai/{tracing → v0/tracing}/abstractions.py +0 -0
  88. /synth_ai/{tracing → v0/tracing}/config.py +0 -0
  89. /synth_ai/{tracing → v0/tracing}/local.py +0 -0
  90. /synth_ai/{tracing → v0/tracing}/utils.py +0 -0
  91. /synth_ai/{tracing_v1 → v0/tracing_v1}/abstractions.py +0 -0
  92. /synth_ai/{tracing_v1 → v0/tracing_v1}/config.py +0 -0
  93. /synth_ai/{tracing_v1 → v0/tracing_v1}/local.py +0 -0
  94. /synth_ai/{tracing_v1 → v0/tracing_v1}/utils.py +0 -0
  95. {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/WHEEL +0 -0
  96. {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/entry_points.txt +0 -0
  97. {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/licenses/LICENSE +0 -0
  98. {synth_ai-0.2.2.dev0.dist-info → synth_ai-0.2.3.dist-info}/top_level.txt +0 -0
synth_ai/cli/recent.py ADDED
@@ -0,0 +1,126 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ CLI: experiments active in the last K hours with summary stats.
4
+ """
5
+
6
+ import asyncio
7
+ from datetime import datetime, timedelta
8
+ from typing import Optional
9
+
10
+ import click
11
+ from rich.console import Console
12
+ from rich.table import Table
13
+ from rich import box
14
+
15
+
16
+ def _fmt_int(v) -> str:
17
+ try:
18
+ return f"{int(v):,}"
19
+ except Exception:
20
+ return "0"
21
+
22
+
23
+ def _fmt_money(v) -> str:
24
+ try:
25
+ return f"${float(v or 0.0):.4f}"
26
+ except Exception:
27
+ return "$0.0000"
28
+
29
+
30
+ def _fmt_time(v) -> str:
31
+ try:
32
+ return str(v)
33
+ except Exception:
34
+ return "-"
35
+
36
+
37
+ async def _fetch_recent(db_url: str, hours: float):
38
+ from synth_ai.tracing_v3.turso.manager import AsyncSQLTraceManager
39
+
40
+ start_time = datetime.now() - timedelta(hours=hours)
41
+
42
+ db = AsyncSQLTraceManager(db_url)
43
+ await db.initialize()
44
+ try:
45
+ query = """
46
+ WITH windowed_sessions AS (
47
+ SELECT *
48
+ FROM session_traces
49
+ WHERE created_at >= :start_time
50
+ )
51
+ SELECT
52
+ e.experiment_id,
53
+ e.name,
54
+ e.description,
55
+ MIN(ws.created_at) AS window_start,
56
+ MAX(ws.created_at) AS window_end,
57
+ COUNT(DISTINCT ws.session_id) AS runs,
58
+ COUNT(DISTINCT ev.id) AS events,
59
+ COUNT(DISTINCT m.id) AS messages,
60
+ SUM(CASE WHEN ev.event_type = 'cais' THEN ev.cost_usd ELSE 0 END) / 100.0 AS cost_usd,
61
+ SUM(CASE WHEN ev.event_type = 'cais' THEN ev.total_tokens ELSE 0 END) AS tokens
62
+ FROM windowed_sessions ws
63
+ LEFT JOIN experiments e ON ws.experiment_id = e.experiment_id
64
+ LEFT JOIN events ev ON ws.session_id = ev.session_id
65
+ LEFT JOIN messages m ON ws.session_id = m.session_id
66
+ GROUP BY e.experiment_id, e.name, e.description
67
+ ORDER BY window_end DESC
68
+ """
69
+ df = await db.query_traces(query, {"start_time": start_time})
70
+ return df
71
+ finally:
72
+ await db.close()
73
+
74
+
75
+ def register(cli):
76
+ @cli.command()
77
+ @click.option(
78
+ "--url",
79
+ "db_url",
80
+ default="sqlite+aiosqlite:///./synth_ai.db/dbs/default/data",
81
+ help="Database URL",
82
+ )
83
+ @click.option("--hours", default=24.0, type=float, help="Look back window in hours")
84
+ @click.option("--limit", default=20, type=int, help="Max experiments to display")
85
+ def recent(db_url: str, hours: float, limit: int):
86
+ """List experiments with activity in the last K hours with summary stats."""
87
+
88
+ console = Console()
89
+
90
+ async def _run():
91
+ df = await _fetch_recent(db_url, hours)
92
+
93
+ table = Table(title=f"Experiments in last {hours:g}h", header_style="bold", box=box.SIMPLE)
94
+ for col in ["Experiment", "Runs", "First", "Last", "Events", "Msgs", "Cost", "Tokens"]:
95
+ table.add_column(col, justify="right" if col in {"Runs","Events","Msgs","Tokens"} else "left")
96
+
97
+ if df is None or df.empty:
98
+ table.add_row("-", "0", "-", "-", "-", "-", "-", "-")
99
+ else:
100
+ count = 0
101
+ for _, r in df.iterrows():
102
+ if count >= limit:
103
+ break
104
+ count += 1
105
+ name = r.get("name") or "Unnamed"
106
+ exp_disp = f"{name[:28]} [dim]({_short(r.get('experiment_id'))})[/dim]"
107
+ table.add_row(
108
+ exp_disp,
109
+ _fmt_int(r.get("runs", 0)),
110
+ _fmt_time(r.get("window_start")),
111
+ _fmt_time(r.get("window_end")),
112
+ _fmt_int(r.get("events", 0)),
113
+ _fmt_int(r.get("messages", 0)),
114
+ _fmt_money(r.get("cost_usd", 0.0)),
115
+ _fmt_int(r.get("tokens", 0)),
116
+ )
117
+
118
+ console.print(table)
119
+
120
+ def _short(exp_id) -> str:
121
+ try:
122
+ return str(exp_id)[:8]
123
+ except Exception:
124
+ return ""
125
+
126
+ asyncio.run(_run())
synth_ai/cli/root.py ADDED
@@ -0,0 +1,184 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Canonical CLI entrypoint for Synth AI (moved from synth_ai/cli.py).
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+ import os
10
+ import shutil
11
+ import signal
12
+ import subprocess
13
+ import sys
14
+ import time
15
+ from typing import Optional
16
+
17
+ import click
18
+
19
+
20
+ def find_sqld_binary() -> Optional[str]:
21
+ sqld_path = shutil.which("sqld")
22
+ if sqld_path:
23
+ return sqld_path
24
+ common_paths = [
25
+ "/usr/local/bin/sqld",
26
+ "/usr/bin/sqld",
27
+ os.path.expanduser("~/.local/bin/sqld"),
28
+ os.path.expanduser("~/bin/sqld"),
29
+ ]
30
+ for path in common_paths:
31
+ if os.path.exists(path) and os.access(path, os.X_OK):
32
+ return path
33
+ return None
34
+
35
+
36
+ def install_sqld() -> str:
37
+ click.echo("🔧 sqld not found. Installing...")
38
+ script = """#!/bin/bash
39
+ set -e
40
+ SQLD_VERSION="v0.26.2"
41
+ OS=$(uname -s | tr '[:upper:]' '[:lower:]')
42
+ ARCH=$(uname -m)
43
+ case "$ARCH" in
44
+ x86_64) ARCH="x86_64" ;;
45
+ aarch64|arm64) ARCH="aarch64" ;;
46
+ *) echo "Unsupported architecture: $ARCH"; exit 1 ;;
47
+ esac
48
+ URL="https://github.com/tursodatabase/libsql/releases/download/libsql-server-${SQLD_VERSION}/sqld-${OS}-${ARCH}.tar.xz"
49
+ TMP_DIR=$(mktemp -d)
50
+ cd "$TMP_DIR"
51
+ curl -L -o sqld.tar.xz "$URL"
52
+ tar -xf sqld.tar.xz
53
+ mkdir -p ~/.local/bin
54
+ mv sqld ~/.local/bin/
55
+ chmod +x ~/.local/bin/sqld
56
+ cd -
57
+ rm -rf "$TMP_DIR"
58
+ """
59
+ path = "/tmp/install_sqld.sh"
60
+ with open(path, "w") as f:
61
+ f.write(script)
62
+ subprocess.run(["bash", path], check=True)
63
+ os.unlink(path)
64
+ local_bin = os.path.expanduser("~/.local/bin")
65
+ if local_bin not in os.environ.get("PATH", ""):
66
+ os.environ["PATH"] = f"{local_bin}:{os.environ.get('PATH', '')}"
67
+ return os.path.expanduser("~/.local/bin/sqld")
68
+
69
+
70
+ @click.group()
71
+ def cli():
72
+ """Synth AI - Software for aiding the best and multiplying the will."""
73
+
74
+
75
+ @cli.command()
76
+ @click.option("--db-file", default="traces/v3/synth_ai.db", help="Database file path")
77
+ @click.option("--sqld-port", default=8080, type=int, help="Port for sqld HTTP interface")
78
+ @click.option("--env-port", default=8901, type=int, help="Port for environment service")
79
+ @click.option("--no-sqld", is_flag=True, help="Skip starting sqld daemon")
80
+ @click.option("--no-env", is_flag=True, help="Skip starting environment service")
81
+ @click.option("--reload/--no-reload", default=False, help="Enable auto-reload (default: off). Or set SYNTH_RELOAD=1")
82
+ @click.option("--force/--no-force", default=True, help="Kill any process already bound to --env-port without prompting")
83
+ def serve(db_file: str, sqld_port: int, env_port: int, no_sqld: bool, no_env: bool, reload: bool, force: bool):
84
+ logging.basicConfig(level=logging.INFO, format="%(message)s")
85
+ processes = []
86
+
87
+ def signal_handler(sig, frame):
88
+ click.echo("\n🛑 Shutting down services...")
89
+ for proc in processes:
90
+ if proc.poll() is None:
91
+ proc.terminate()
92
+ try:
93
+ proc.wait(timeout=5)
94
+ except subprocess.TimeoutExpired:
95
+ proc.kill()
96
+ sys.exit(0)
97
+
98
+ signal.signal(signal.SIGINT, signal_handler)
99
+ signal.signal(signal.SIGTERM, signal_handler)
100
+
101
+ if not no_sqld:
102
+ try:
103
+ result = subprocess.run(["pgrep", "-f", f"sqld.*--http-listen-addr.*:{sqld_port}"], capture_output=True, text=True)
104
+ if result.returncode != 0:
105
+ sqld_bin = find_sqld_binary() or install_sqld()
106
+ click.echo(f"🗄️ Starting sqld (local only) on port {sqld_port}")
107
+ proc = subprocess.Popen([sqld_bin, "--db-path", db_file, "--http-listen-addr", f"127.0.0.1:{sqld_port}"], stdout=open("sqld.log", "w"), stderr=subprocess.STDOUT)
108
+ processes.append(proc)
109
+ time.sleep(2)
110
+ except FileNotFoundError:
111
+ pass
112
+
113
+ if not no_env:
114
+ click.echo("")
115
+ click.echo(f"🚀 Starting Synth-AI Environment Service on port {env_port}")
116
+ click.echo("")
117
+
118
+ # Ensure port is free
119
+ try:
120
+ import socket
121
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
122
+ in_use = s.connect_ex(("127.0.0.1", env_port)) == 0
123
+ except Exception:
124
+ in_use = False
125
+ if in_use:
126
+ pids: list[str] = []
127
+ try:
128
+ out = subprocess.run(["lsof", "-ti", f":{env_port}"], capture_output=True, text=True)
129
+ if out.returncode == 0 and out.stdout.strip():
130
+ pids = [p for p in out.stdout.strip().splitlines() if p]
131
+ except FileNotFoundError:
132
+ pids = []
133
+ if force:
134
+ if pids:
135
+ subprocess.run(["kill", "-9", *pids], check=False)
136
+ time.sleep(0.5)
137
+ else:
138
+ suffix = f" PIDs: {', '.join(pids)}" if pids else ""
139
+ if click.confirm(f"⚠️ Port {env_port} is in use.{suffix} Kill and continue?", default=True):
140
+ if pids:
141
+ subprocess.run(["kill", "-9", *pids], check=False)
142
+ time.sleep(0.5)
143
+ else:
144
+ click.echo("❌ Aborting.")
145
+ sys.exit(1)
146
+
147
+ env = os.environ.copy()
148
+ env["SYNTH_LOGGING"] = "true"
149
+ click.echo("📦 Environment:")
150
+ click.echo(f" Python: {sys.executable}")
151
+ click.echo(f" Working directory: {os.getcwd()}")
152
+ click.echo("")
153
+ click.echo("🔄 Starting services...")
154
+ click.echo(f" - sqld daemon: http://127.0.0.1:{sqld_port}")
155
+ click.echo(f" - Environment service: http://127.0.0.1:{env_port}")
156
+ click.echo("")
157
+ click.echo("💡 Tips:")
158
+ click.echo(" - Check sqld.log if database issues occur")
159
+ click.echo(" - Use Ctrl+C to stop all services")
160
+ reload_enabled = reload or (os.getenv("SYNTH_RELOAD", "0") == "1")
161
+ click.echo(" - Auto-reload ENABLED (code changes restart service)" if reload_enabled else " - Auto-reload DISABLED (stable in-memory sessions)")
162
+ click.echo("")
163
+
164
+ uvicorn_cmd = [
165
+ sys.executable, "-m", "uvicorn", "synth_ai.environments.service.app:app",
166
+ "--host", "0.0.0.0", "--port", str(env_port), "--log-level", "info",
167
+ ]
168
+ if reload_enabled:
169
+ uvicorn_cmd.append("--reload")
170
+ if os.path.exists("synth_ai"):
171
+ uvicorn_cmd.extend(["--reload-dir", "synth_ai"])
172
+ proc = subprocess.Popen(uvicorn_cmd, env=env)
173
+ processes.append(proc)
174
+
175
+ if processes:
176
+ click.echo("\n✨ All services started! Press Ctrl+C to stop.")
177
+ try:
178
+ for proc in processes:
179
+ proc.wait()
180
+ except KeyboardInterrupt:
181
+ pass
182
+ else:
183
+ click.echo("No services to start.")
184
+
synth_ai/cli/status.py ADDED
@@ -0,0 +1,126 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ CLI: status of agent runs/versions and environment service.
4
+ """
5
+
6
+ import asyncio
7
+ from typing import Optional
8
+
9
+ import click
10
+ from rich.console import Console
11
+ from rich.panel import Panel
12
+ from rich.table import Table
13
+ from rich import box
14
+ import requests
15
+
16
+
17
+ async def _db_stats(db_url: str) -> dict:
18
+ from synth_ai.tracing_v3.turso.manager import AsyncSQLTraceManager
19
+
20
+ db = AsyncSQLTraceManager(db_url)
21
+ await db.initialize()
22
+ try:
23
+ out: dict = {}
24
+ # Totals
25
+ totals = await db.query_traces(
26
+ """
27
+ SELECT
28
+ (SELECT COUNT(*) FROM session_traces) AS sessions,
29
+ (SELECT COUNT(*) FROM experiments) AS experiments,
30
+ (SELECT COUNT(*) FROM events) AS events,
31
+ (SELECT COUNT(*) FROM messages) AS messages,
32
+ (SELECT COALESCE(SUM(CASE WHEN event_type='cais' THEN cost_usd ELSE 0 END),0)/100.0 FROM events) AS total_cost_usd,
33
+ (SELECT COALESCE(SUM(CASE WHEN event_type='cais' THEN total_tokens ELSE 0 END),0) FROM events) AS total_tokens
34
+ """
35
+ )
36
+ if not totals.empty:
37
+ out["totals"] = totals.iloc[0].to_dict()
38
+ else:
39
+ out["totals"] = {}
40
+
41
+ # Systems summary
42
+ systems = await db.query_traces(
43
+ """
44
+ SELECT system_type, COUNT(*) as count FROM systems GROUP BY system_type
45
+ """
46
+ )
47
+ out["systems"] = systems
48
+
49
+ versions = await db.query_traces(
50
+ """
51
+ SELECT COUNT(*) as version_count FROM system_versions
52
+ """
53
+ )
54
+ if not versions.empty:
55
+ out["version_count"] = int(versions.iloc[0]["version_count"])
56
+ else:
57
+ out["version_count"] = 0
58
+ return out
59
+ finally:
60
+ await db.close()
61
+
62
+
63
+ def register(cli):
64
+ @cli.command()
65
+ @click.option(
66
+ "--url",
67
+ "db_url",
68
+ default="sqlite+aiosqlite:///./synth_ai.db/dbs/default/data",
69
+ help="Database URL",
70
+ )
71
+ @click.option("--service-url", default="http://127.0.0.1:8901", help="Environment service URL")
72
+ def status(db_url: str, service_url: str):
73
+ """Show DB stats, agent/environment system counts, and env service health."""
74
+ console = Console()
75
+
76
+ async def _run():
77
+ # DB
78
+ stats = await _db_stats(db_url)
79
+
80
+ # Env service
81
+ health_text = "[red]unreachable[/red]"
82
+ envs_list = []
83
+ try:
84
+ r = requests.get(f"{service_url}/health", timeout=2)
85
+ if r.ok:
86
+ data = r.json()
87
+ health_text = "[green]ok[/green]"
88
+ envs_list = data.get("supported_environments", [])
89
+ else:
90
+ health_text = f"[red]{r.status_code}[/red]"
91
+ except Exception:
92
+ pass
93
+
94
+ # Render
95
+ totals = stats.get("totals", {})
96
+ lines = []
97
+ lines.append(f"DB: [dim]{db_url}[/dim]")
98
+ lines.append(
99
+ f"Experiments: {int(totals.get('experiments', 0)):,} "
100
+ f"Sessions: {int(totals.get('sessions', 0)):,} "
101
+ f"Events: {int(totals.get('events', 0)):,} "
102
+ f"Messages: {int(totals.get('messages', 0)):,}"
103
+ )
104
+ lines.append(
105
+ f"Cost: ${float(totals.get('total_cost_usd', 0.0) or 0.0):.4f} "
106
+ f"Tokens: {int(totals.get('total_tokens', 0)):,}"
107
+ )
108
+ lines.append("")
109
+ lines.append(f"Env Service: {health_text} [dim]{service_url}[/dim]")
110
+ if envs_list:
111
+ lines.append("Environments: " + ", ".join(sorted(envs_list)[:10]) + (" ..." if len(envs_list) > 10 else ""))
112
+
113
+ panel_main = Panel("\n".join(lines), title="Synth AI Status", border_style="cyan")
114
+ console.print(panel_main)
115
+
116
+ # Systems table
117
+ sys_df = stats.get("systems")
118
+ if sys_df is not None and not sys_df.empty:
119
+ tbl = Table(title=f"Systems (versions: {stats.get('version_count', 0)})", box=box.SIMPLE, header_style="bold")
120
+ tbl.add_column("Type")
121
+ tbl.add_column("Count", justify="right")
122
+ for _, r in sys_df.iterrows():
123
+ tbl.add_row(str(r.get("system_type", "-")), f"{int(r.get('count', 0)):,}")
124
+ console.print(tbl)
125
+
126
+ asyncio.run(_run())
synth_ai/cli/traces.py ADDED
@@ -0,0 +1,136 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ CLI: basic info about traces (runs).
4
+ """
5
+
6
+ import os
7
+ import asyncio
8
+ from typing import Optional, Dict, Tuple, List
9
+
10
+ import click
11
+ from rich.console import Console
12
+ from rich.table import Table
13
+ from rich import box
14
+
15
+
16
+ def register(cli):
17
+ @cli.command()
18
+ @click.option(
19
+ "--url",
20
+ "db_url",
21
+ default="sqlite+aiosqlite:///./synth_ai.db/dbs/default/data",
22
+ help="Database URL",
23
+ )
24
+ @click.option("--limit", default=25, type=int, help="Max sessions to display")
25
+ def traces(db_url: str, limit: int):
26
+ """Show local trace DBs, traces per DB, and per-system counts."""
27
+ console = Console()
28
+
29
+ async def _run():
30
+ from synth_ai.tracing_v3.turso.manager import AsyncSQLTraceManager
31
+
32
+ # Discover DBs under ./synth_ai.db/dbs (or override via env)
33
+ root = os.getenv("SYNTH_TRACES_ROOT", "./synth_ai.db/dbs")
34
+ if not os.path.isdir(root):
35
+ console.print(f"[red]No DB root found:[/red] {root}")
36
+ return
37
+
38
+ entries: List[Tuple[str, str]] = []
39
+ for name in sorted(os.listdir(root)):
40
+ path = os.path.join(root, name)
41
+ data_path = os.path.join(path, "data")
42
+ if os.path.isdir(path) and os.path.isfile(data_path):
43
+ entries.append((name, os.path.abspath(path)))
44
+
45
+ if not entries:
46
+ console.print("[dim]No trace databases found.[/dim]")
47
+ return
48
+
49
+ def _dir_size_bytes(dir_path: str) -> int:
50
+ total = 0
51
+ for dp, _, files in os.walk(dir_path):
52
+ for fn in files:
53
+ fp = os.path.join(dp, fn)
54
+ try:
55
+ total += os.path.getsize(fp)
56
+ except OSError:
57
+ pass
58
+ return total
59
+
60
+ async def db_counts(db_dir: str) -> Tuple[int, Dict[str, int], int, Optional[str], int]:
61
+ data_file = os.path.join(db_dir, "data")
62
+ mgr = AsyncSQLTraceManager(f"sqlite+aiosqlite:///{data_file}")
63
+ await mgr.initialize()
64
+ try:
65
+ traces_df = await mgr.query_traces("SELECT COUNT(*) AS c FROM session_traces")
66
+ traces_count = int(traces_df.iloc[0]["c"]) if traces_df is not None and not traces_df.empty else 0
67
+ try:
68
+ systems_df = await mgr.query_traces(
69
+ "SELECT system_type, COUNT(*) AS c FROM systems GROUP BY system_type"
70
+ )
71
+ system_counts = {
72
+ str(r["system_type"] or "-"): int(r["c"] or 0)
73
+ for _, r in (systems_df or []).iterrows()
74
+ } if systems_df is not None and not systems_df.empty else {}
75
+ except Exception:
76
+ system_counts = {}
77
+ try:
78
+ exps_df = await mgr.query_traces("SELECT COUNT(*) AS c FROM experiments")
79
+ exps_count = int(exps_df.iloc[0]["c"]) if exps_df is not None and not exps_df.empty else 0
80
+ except Exception:
81
+ exps_count = 0
82
+ try:
83
+ last_df = await mgr.query_traces("SELECT MAX(created_at) AS last_created_at FROM session_traces")
84
+ last_created = (
85
+ str(last_df.iloc[0]["last_created_at"]) if last_df is not None and not last_df.empty else None
86
+ )
87
+ except Exception:
88
+ last_created = None
89
+ size_bytes = _dir_size_bytes(db_dir)
90
+ return traces_count, system_counts, exps_count, last_created, size_bytes
91
+ finally:
92
+ await mgr.close()
93
+
94
+ results = []
95
+ for name, db_dir in entries:
96
+ try:
97
+ counts = await db_counts(db_dir)
98
+ except Exception:
99
+ counts = (0, {}, 0, None, 0)
100
+ results.append((name, counts))
101
+
102
+ # DB summary table
103
+ summary = Table(title="Trace Databases", box=box.SIMPLE, header_style="bold")
104
+ for col in ["DB", "Traces", "Experiments", "Last Activity", "Size (GB)"]:
105
+ summary.add_column(col, justify="right" if col in {"Traces", "Experiments"} else "left")
106
+
107
+ aggregate_systems: Dict[str, int] = {}
108
+ total_bytes = 0
109
+ for name, (traces_count, system_counts, experiments_count, last_created_at, size_bytes) in results:
110
+ total_bytes += int(size_bytes or 0)
111
+ gb = (int(size_bytes or 0) / (1024**3))
112
+ summary.add_row(
113
+ name,
114
+ f"{traces_count:,}",
115
+ f"{experiments_count:,}",
116
+ str(last_created_at or "-"),
117
+ f"{gb:.2f}",
118
+ )
119
+ for k, v in system_counts.items():
120
+ aggregate_systems[k] = aggregate_systems.get(k, 0) + int(v)
121
+ console.print(summary)
122
+
123
+ # Total storage line
124
+ total_gb = total_bytes / (1024**3)
125
+ console.print(f"[dim]Total storage across DBs:[/dim] [bold]{total_gb:.2f} GB[/bold]")
126
+
127
+ # Per-system aggregate across DBs
128
+ if aggregate_systems:
129
+ st = Table(title="Per-System (all DBs)", box=box.SIMPLE, header_style="bold")
130
+ st.add_column("System")
131
+ st.add_column("Count", justify="right")
132
+ for sys_name, count in sorted(aggregate_systems.items(), key=lambda x: (-x[1], x[0])):
133
+ st.add_row(sys_name or "-", f"{int(count):,}")
134
+ console.print(st)
135
+
136
+ asyncio.run(_run())