code-data-ark 2.0.11__tar.gz → 2.0.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/.flake8 +2 -0
  2. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/PKG-INFO +1 -1
  3. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/__init__.py +1 -1
  4. code_data_ark-2.0.12/cda/ui/actions.py +64 -0
  5. code_data_ark-2.0.12/cda/ui/db/__init__.py +31 -0
  6. code_data_ark-2.0.12/cda/ui/db/base.py +69 -0
  7. code_data_ark-2.0.12/cda/ui/db/memory.py +14 -0
  8. code_data_ark-2.0.12/cda/ui/db/overview.py +84 -0
  9. code_data_ark-2.0.12/cda/ui/db/search.py +27 -0
  10. code_data_ark-2.0.12/cda/ui/db/sessions.py +125 -0
  11. code_data_ark-2.0.12/cda/ui/db/signals.py +49 -0
  12. code_data_ark-2.0.12/cda/ui/db/tokens.py +33 -0
  13. code_data_ark-2.0.12/cda/ui/db/tools.py +47 -0
  14. code_data_ark-2.0.12/cda/ui/db/workspaces.py +33 -0
  15. code_data_ark-2.0.12/cda/ui/routes.py +181 -0
  16. code_data_ark-2.0.12/cda/ui/static/__init__.py +0 -0
  17. code_data_ark-2.0.12/cda/ui/static/web.css +892 -0
  18. code_data_ark-2.0.12/cda/ui/static/web.js +862 -0
  19. code_data_ark-2.0.12/cda/ui/templates.py +418 -0
  20. code_data_ark-2.0.12/cda/ui/web.py +28 -0
  21. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/pyproject.toml +1 -1
  22. code_data_ark-2.0.11/cda/ui/web.py +0 -2908
  23. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/.github/workflows/ci.yml +0 -0
  24. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/.gitignore +0 -0
  25. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/bin/release.py +0 -0
  26. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/__main__.py +0 -0
  27. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/kernel/__init__.py +0 -0
  28. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/kernel/control_db.py +0 -0
  29. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/kernel/paths.py +0 -0
  30. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/kernel/pmf_kernel.py +0 -0
  31. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/kernel/selfcheck.py +0 -0
  32. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/pipeline/__init__.py +0 -0
  33. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/pipeline/embed.py +0 -0
  34. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/pipeline/extract.py +0 -0
  35. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/pipeline/ingest.py +0 -0
  36. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/pipeline/parse_edits.py +0 -0
  37. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/pipeline/reconstruct.py +0 -0
  38. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/pipeline/watcher.py +0 -0
  39. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/ui/__init__.py +0 -0
  40. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/cda/ui/cli.py +0 -0
  41. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/changelog.md +0 -0
  42. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/contributing.md +0 -0
  43. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/docs/architecture.md +0 -0
  44. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/docs/examples/usage.md +0 -0
  45. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/docs/pmf_kernel.md +0 -0
  46. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/docs/roadmap.md +0 -0
  47. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/license +0 -0
  48. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/makefile +0 -0
  49. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/readme.md +0 -0
  50. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/tests/test_basic.py +0 -0
  51. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/tests/test_selfcheck.py +0 -0
  52. {code_data_ark-2.0.11 → code_data_ark-2.0.12}/version +0 -0
@@ -17,3 +17,5 @@ extend-ignore =
17
17
  per-file-ignores =
18
18
  # Conditional import at bottom of watcher.py main guard
19
19
  cda/pipeline/watcher.py:E402
20
+ # HTML/SVG template strings are inherently long
21
+ cda/ui/templates.py:E501
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: code-data-ark
3
- Version: 2.0.11
3
+ Version: 2.0.12
4
4
  Summary: Code Data Ark — local observability and intelligence platform for VS Code + Copilot Chat sessions
5
5
  Project-URL: Homepage, https://github.com/goCosmix/cda
6
6
  Project-URL: Repository, https://github.com/goCosmix/cda.git
@@ -1,3 +1,3 @@
1
1
  """Code Data Ark — local observability and intelligence platform for VS Code + Copilot Chat sessions."""
2
2
 
3
- __version__ = "2.0.11"
3
+ __version__ = "2.0.12"
@@ -0,0 +1,64 @@
1
+ import subprocess
2
+ import sys
3
+ import threading
4
+ from datetime import datetime
5
+ from typing import Any, Dict
6
+
7
+
8
+ ACTION_STATE: Dict[str, Any] = {}
9
+ ACTION_LOCK = threading.Lock()
10
+
11
+
12
+ def run_action_background(action_id, action_name):
13
+ """Execute pipeline action in background thread."""
14
+ with ACTION_LOCK:
15
+ ACTION_STATE[action_id] = {
16
+ "status": "running",
17
+ "action": action_name,
18
+ "started_at": datetime.now().isoformat(),
19
+ "output": ""
20
+ }
21
+
22
+ try:
23
+ if action_name == "sync":
24
+ result = subprocess.run(
25
+ [sys.executable, "-m", "cda.pipeline.ingest"],
26
+ capture_output=True,
27
+ text=True,
28
+ timeout=300
29
+ )
30
+ elif action_name == "reconstruct":
31
+ result = subprocess.run(
32
+ [sys.executable, "-m", "cda.pipeline.reconstruct"],
33
+ capture_output=True,
34
+ text=True,
35
+ timeout=300
36
+ )
37
+ elif action_name == "embed-build":
38
+ result = subprocess.run(
39
+ [sys.executable, "-m", "cda.pipeline.embed", "build"],
40
+ capture_output=True,
41
+ text=True,
42
+ timeout=600
43
+ )
44
+ elif action_name == "watch-start":
45
+ result = subprocess.run(
46
+ [sys.executable, "-m", "cda.pipeline.watcher", "start"],
47
+ capture_output=True,
48
+ text=True,
49
+ timeout=30
50
+ )
51
+ else:
52
+ result = None
53
+
54
+ with ACTION_LOCK:
55
+ if result:
56
+ ACTION_STATE[action_id]["status"] = "completed" if result.returncode == 0 else "failed"
57
+ ACTION_STATE[action_id]["output"] = result.stdout + result.stderr
58
+ ACTION_STATE[action_id]["returncode"] = result.returncode
59
+ ACTION_STATE[action_id]["completed_at"] = datetime.now().isoformat()
60
+ except Exception as e:
61
+ with ACTION_LOCK:
62
+ ACTION_STATE[action_id]["status"] = "error"
63
+ ACTION_STATE[action_id]["output"] = str(e)
64
+ ACTION_STATE[action_id]["completed_at"] = datetime.now().isoformat()
@@ -0,0 +1,31 @@
1
+ from .base import get_db, query_rows, query_one, safe_rows, safe_one, table_exists, execute_stmt
2
+ from .overview import get_overview
3
+ from .sessions import get_sessions, get_session_detail
4
+ from .search import get_search_results
5
+ from .workspaces import get_workspaces, get_workspace_detail
6
+ from .memory import get_memory
7
+ from .tools import get_tool_calls, get_vfs
8
+ from .signals import get_alerts, get_behavioral_signals
9
+ from .tokens import get_tokens
10
+
11
+ __all__ = [
12
+ "get_db",
13
+ "query_rows",
14
+ "query_one",
15
+ "safe_rows",
16
+ "safe_one",
17
+ "table_exists",
18
+ "execute_stmt",
19
+ "get_overview",
20
+ "get_sessions",
21
+ "get_session_detail",
22
+ "get_search_results",
23
+ "get_workspaces",
24
+ "get_workspace_detail",
25
+ "get_memory",
26
+ "get_tool_calls",
27
+ "get_vfs",
28
+ "get_alerts",
29
+ "get_behavioral_signals",
30
+ "get_tokens",
31
+ ]
@@ -0,0 +1,69 @@
1
+ import sqlite3
2
+ from cda.kernel.paths import DB_PATH
3
+
4
+
5
+ def get_db():
6
+ """Get database connection with proper settings."""
7
+ conn = sqlite3.connect(str(DB_PATH), timeout=10)
8
+ conn.row_factory = sqlite3.Row
9
+ conn.execute("PRAGMA journal_mode=WAL")
10
+ conn.execute("PRAGMA synchronous=NORMAL")
11
+ return conn
12
+
13
+
14
+ def query_rows(sql, params=()):
15
+ """Execute SELECT and return rows as dicts."""
16
+ try:
17
+ conn = get_db()
18
+ cursor = conn.execute(sql, params)
19
+ rows = [dict(row) for row in cursor.fetchall()]
20
+ conn.close()
21
+ return rows
22
+ except Exception as e:
23
+ return {"error": str(e)}
24
+
25
+
26
+ def query_one(sql, params=()):
27
+ """Execute SELECT and return single row or None."""
28
+ rows = query_rows(sql, params)
29
+ if isinstance(rows, dict) and "error" in rows:
30
+ return rows
31
+ return rows[0] if rows else None
32
+
33
+
34
+ def safe_rows(rows):
35
+ """Normalize query_rows output to an array for APIs."""
36
+ if isinstance(rows, dict) and "error" in rows:
37
+ return []
38
+ return rows or []
39
+
40
+
41
+ def safe_one(row):
42
+ """Normalize query_one output to a dict or None."""
43
+ if isinstance(row, dict) and "error" in row:
44
+ return None
45
+ return row
46
+
47
+
48
+ def table_exists(table_name):
49
+ """Return True if a table exists in the current database."""
50
+ try:
51
+ row = query_one(
52
+ "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
53
+ (table_name,)
54
+ )
55
+ return bool(row)
56
+ except Exception:
57
+ return False
58
+
59
+
60
+ def execute_stmt(sql, params=()):
61
+ """Execute INSERT/UPDATE/DELETE statement."""
62
+ try:
63
+ conn = get_db()
64
+ conn.execute(sql, params)
65
+ conn.commit()
66
+ conn.close()
67
+ return {"ok": True}
68
+ except Exception as e:
69
+ return {"error": str(e)}
@@ -0,0 +1,14 @@
1
+ from .base import query_rows
2
+
3
+
4
+ def get_memory():
5
+ """Get all memory files."""
6
+ try:
7
+ memory = query_rows("""
8
+ SELECT id, scope, workspace_id, session_id, filename, size_bytes, ingested_at
9
+ FROM memory_files
10
+ ORDER BY ingested_at DESC
11
+ """)
12
+ return {"memory": memory}
13
+ except Exception as e:
14
+ return {"error": str(e)}
@@ -0,0 +1,84 @@
1
+ from .base import query_rows, query_one, safe_rows, safe_one, table_exists
2
+
3
+
4
+ def get_overview():
5
+ """Dashboard overview stats."""
6
+ try:
7
+ has_analysis = table_exists('session_analysis')
8
+ has_exchanges = table_exists('exchanges')
9
+ has_signals = table_exists('exchange_signals')
10
+ has_alerts = table_exists('anomaly_alerts')
11
+
12
+ stats = query_one(f"""
13
+ SELECT
14
+ (SELECT COUNT(*) FROM sessions) as total_sessions,
15
+ {("(SELECT COUNT(*) FROM exchanges)" if has_exchanges else "0")} as total_exchanges,
16
+ {("(SELECT AVG(heat_score) FROM session_analysis WHERE heat_score IS NOT NULL)" if has_analysis else "0")} as avg_heat,
17
+ {("(SELECT COUNT(*) FROM session_analysis WHERE heat_score >= 50)" if has_analysis else "0")} as critical_sessions,
18
+ {("(SELECT COUNT(*) FROM anomaly_alerts)" if has_alerts else "0")} as alert_count,
19
+ (SELECT COUNT(*) FROM workspaces) as workspace_count,
20
+ (SELECT MAX(created_at) FROM sessions) as last_session
21
+ """)
22
+
23
+ heat_dist = safe_rows(query_rows("""
24
+ SELECT
25
+ CASE
26
+ WHEN heat_score < 20 THEN '0-19'
27
+ WHEN heat_score < 40 THEN '20-39'
28
+ WHEN heat_score < 60 THEN '40-59'
29
+ WHEN heat_score < 80 THEN '60-79'
30
+ ELSE '80-100'
31
+ END as range,
32
+ COUNT(*) as count
33
+ FROM session_analysis
34
+ WHERE heat_score IS NOT NULL
35
+ GROUP BY range
36
+ ORDER BY range
37
+ """)) if has_analysis else []
38
+
39
+ keywords = safe_rows(query_rows("""
40
+ SELECT matched_keyword as keyword, SUM(count) as total_count
41
+ FROM (
42
+ SELECT matched_keyword, COUNT(*) as count
43
+ FROM exchange_signals
44
+ WHERE matched_keyword IS NOT NULL
45
+ GROUP BY matched_keyword
46
+ )
47
+ GROUP BY matched_keyword
48
+ ORDER BY total_count DESC
49
+ LIMIT 15
50
+ """)) if has_signals else []
51
+
52
+ exchange_count_expr = (
53
+ "(SELECT COUNT(*) FROM exchanges WHERE exchanges.session_id = s.session_id)"
54
+ if has_exchanges else "0"
55
+ )
56
+ if has_analysis:
57
+ recent = safe_rows(query_rows(f"""
58
+ SELECT s.session_id as id, s.title, sa.heat_score,
59
+ {exchange_count_expr} as exchange_count,
60
+ s.created_at
61
+ FROM sessions s
62
+ LEFT JOIN session_analysis sa ON sa.session_id = s.session_id
63
+ ORDER BY s.created_at DESC
64
+ LIMIT 10
65
+ """))
66
+ else:
67
+ recent = safe_rows(query_rows(f"""
68
+ SELECT s.session_id as id, s.title, NULL as heat_score,
69
+ {exchange_count_expr} as exchange_count,
70
+ s.created_at
71
+ FROM sessions s
72
+ ORDER BY s.created_at DESC
73
+ LIMIT 10
74
+ """))
75
+
76
+ stats = safe_one(stats)
77
+ return {
78
+ "stats": dict(stats) if stats else {},
79
+ "heat_distribution": heat_dist,
80
+ "keywords": keywords,
81
+ "recent_sessions": recent
82
+ }
83
+ except Exception as e:
84
+ return {"error": str(e)}
@@ -0,0 +1,27 @@
1
+ from .base import query_rows
2
+
3
+
4
+ def get_search_results(query, limit=50):
5
+ """Full-text search across exchanges."""
6
+ try:
7
+ results = query_rows("""
8
+ SELECT
9
+ e.session_id,
10
+ s.title,
11
+ sa.heat_score,
12
+ e.id as exchange_id,
13
+ e.exchange_index,
14
+ e.user_message,
15
+ e.response_text,
16
+ e.user_ts
17
+ FROM fts_exchanges fts
18
+ JOIN exchanges e ON fts.rowid = e.id
19
+ JOIN sessions s ON e.session_id = s.session_id
20
+ LEFT JOIN session_analysis sa ON sa.session_id = e.session_id
21
+ WHERE fts_exchanges MATCH ?
22
+ ORDER BY rank
23
+ LIMIT ?
24
+ """, (query, limit))
25
+ return {"results": results, "query": query, "count": len(results)}
26
+ except Exception as e:
27
+ return {"error": str(e)}
@@ -0,0 +1,125 @@
1
+ from .base import query_rows, query_one, safe_rows, safe_one, table_exists
2
+
3
+
4
+ def get_sessions(limit=50, offset=0):
5
+ """List all sessions with heat scores."""
6
+ try:
7
+ has_analysis = table_exists('session_analysis')
8
+ has_exchanges = table_exists('exchanges')
9
+ exchange_count_expr = (
10
+ "(SELECT COUNT(*) FROM exchanges WHERE exchanges.session_id = s.session_id)"
11
+ if has_exchanges else "0"
12
+ )
13
+
14
+ if has_analysis:
15
+ sessions = safe_rows(query_rows(f"""
16
+ SELECT s.session_id as id, s.title, sa.heat_score, s.workspace_id,
17
+ {exchange_count_expr} as exchange_count,
18
+ s.created_at
19
+ FROM sessions s
20
+ LEFT JOIN session_analysis sa ON sa.session_id = s.session_id
21
+ ORDER BY s.created_at DESC
22
+ LIMIT ? OFFSET ?
23
+ """, (limit, offset)))
24
+ else:
25
+ sessions = safe_rows(query_rows(f"""
26
+ SELECT s.session_id as id, s.title, NULL as heat_score, s.workspace_id,
27
+ {exchange_count_expr} as exchange_count,
28
+ s.created_at
29
+ FROM sessions s
30
+ ORDER BY s.created_at DESC
31
+ LIMIT ? OFFSET ?
32
+ """, (limit, offset)))
33
+
34
+ total = safe_one(query_one("SELECT COUNT(*) as count FROM sessions"))
35
+
36
+ return {
37
+ "sessions": sessions,
38
+ "total": total["count"] if total else 0,
39
+ "limit": limit,
40
+ "offset": offset
41
+ }
42
+ except Exception as e:
43
+ return {"error": str(e)}
44
+
45
+
46
+ def get_session_detail(session_id):
47
+ """Get full session with all exchanges and signals."""
48
+ if not session_id:
49
+ return {"error": "Missing session_id"}
50
+
51
+ try:
52
+ has_exchanges = table_exists('exchanges')
53
+ has_tool_calls = table_exists('tool_calls')
54
+ has_vfs = table_exists('vfs')
55
+ has_alerts = table_exists('anomaly_alerts')
56
+ has_signals = table_exists('exchange_signals')
57
+ has_analysis = table_exists('session_analysis')
58
+
59
+ session = safe_one(query_one(
60
+ "SELECT * FROM sessions WHERE session_id = ?", (session_id,)
61
+ ))
62
+ if not session:
63
+ return {"error": "Session not found"}
64
+
65
+ exchanges = safe_rows(query_rows("""
66
+ SELECT id, exchange_index, user_message as user_input, response_text as assistant_response,
67
+ tool_calls, tool_call_count, ingested_at as created_at
68
+ FROM exchanges
69
+ WHERE session_id = ?
70
+ ORDER BY ingested_at ASC
71
+ """, (session_id,))) if has_exchanges else []
72
+
73
+ tool_calls = safe_rows(query_rows("""
74
+ SELECT id, session_id, exchange_index, request_id, tool_call_id, tool_name,
75
+ file_path, arguments_json, has_output, ingested_at
76
+ FROM tool_calls
77
+ WHERE session_id = ?
78
+ ORDER BY ingested_at ASC
79
+ """, (session_id,))) if has_tool_calls else []
80
+
81
+ vfs_entries = safe_rows(query_rows("""
82
+ SELECT id, source_type, source_path, filename, content_type, size_bytes, sha256, ingested_at
83
+ FROM vfs
84
+ WHERE session_id = ?
85
+ ORDER BY filename ASC
86
+ """, (session_id,))) if has_vfs else []
87
+
88
+ alerts = safe_rows(query_rows("""
89
+ SELECT id, alert_type, severity, message, created_at
90
+ FROM anomaly_alerts
91
+ WHERE session_id = ?
92
+ ORDER BY created_at DESC
93
+ """, (session_id,))) if has_alerts else []
94
+
95
+ signals = safe_rows(query_rows("""
96
+ SELECT * FROM exchange_signals
97
+ WHERE session_id = ?
98
+ ORDER BY ts DESC
99
+ """, (session_id,))) if has_signals else []
100
+
101
+ signal_summary = safe_rows(query_rows("""
102
+ SELECT signal_type, COUNT(*) as count
103
+ FROM exchange_signals
104
+ WHERE session_id = ?
105
+ GROUP BY signal_type
106
+ """, (session_id,))) if has_signals else []
107
+
108
+ analysis = safe_one(query_one("""
109
+ SELECT * FROM session_analysis
110
+ WHERE session_id = ?
111
+ LIMIT 1
112
+ """, (session_id,))) if has_analysis else None
113
+
114
+ return {
115
+ "session": dict(session),
116
+ "analysis": analysis,
117
+ "exchanges": exchanges,
118
+ "tool_calls": tool_calls,
119
+ "vfs": vfs_entries,
120
+ "alerts": alerts,
121
+ "signals": signals,
122
+ "signal_summary": signal_summary
123
+ }
124
+ except Exception as e:
125
+ return {"error": str(e)}
@@ -0,0 +1,49 @@
1
+ from .base import query_rows, query_one
2
+
3
+
4
+ def get_alerts(limit=50):
5
+ """Get anomaly alerts."""
6
+ try:
7
+ alerts = query_rows("""
8
+ SELECT id, session_id, alert_type, message, severity, created_at
9
+ FROM anomaly_alerts
10
+ ORDER BY created_at DESC
11
+ LIMIT ?
12
+ """, (limit,))
13
+
14
+ session_titles = {}
15
+ for alert in alerts:
16
+ if alert["session_id"] not in session_titles:
17
+ sess = query_one(
18
+ "SELECT title FROM sessions WHERE session_id = ?",
19
+ (alert["session_id"],)
20
+ )
21
+ session_titles[alert["session_id"]] = sess["title"] if sess else "Unknown"
22
+
23
+ for alert in alerts:
24
+ alert["session_title"] = session_titles.get(alert["session_id"], "Unknown")
25
+
26
+ return {"alerts": alerts}
27
+ except Exception as e:
28
+ return {"error": str(e)}
29
+
30
+
31
+ def get_behavioral_signals(session_id=None):
32
+ """Get behavioral signal analysis."""
33
+ try:
34
+ if session_id:
35
+ signals = query_rows("""
36
+ SELECT signal_type, COUNT(*) as count
37
+ FROM exchange_signals
38
+ WHERE session_id = ?
39
+ GROUP BY signal_type
40
+ """, (session_id,))
41
+ else:
42
+ signals = query_rows("""
43
+ SELECT signal_type, COUNT(*) as count
44
+ FROM exchange_signals
45
+ GROUP BY signal_type
46
+ """)
47
+ return {"signals": signals}
48
+ except Exception as e:
49
+ return {"error": str(e)}
@@ -0,0 +1,33 @@
1
+ from .base import query_rows
2
+
3
+
4
+ def get_tokens(session_id=None):
5
+ """Get token usage analysis."""
6
+ try:
7
+ if session_id:
8
+ tokens = query_rows("""
9
+ SELECT
10
+ SUM(prompt_tokens) as total_prompt,
11
+ SUM(completion_tokens) as total_completion,
12
+ SUM(cached_tokens) as total_cached,
13
+ SUM(prompt_tokens + completion_tokens) as total_tokens,
14
+ COUNT(*) as turn_count,
15
+ GROUP_CONCAT(DISTINCT model_id) as models
16
+ FROM token_usage
17
+ WHERE session_id = ?
18
+ """, (session_id,))
19
+ else:
20
+ tokens = query_rows("""
21
+ SELECT
22
+ SUM(prompt_tokens) as total_prompt,
23
+ SUM(completion_tokens) as total_completion,
24
+ SUM(cached_tokens) as total_cached,
25
+ SUM(prompt_tokens + completion_tokens) as total_tokens,
26
+ COUNT(*) as turn_count,
27
+ COUNT(DISTINCT session_id) as session_count,
28
+ GROUP_CONCAT(DISTINCT model_id) as models
29
+ FROM token_usage
30
+ """)
31
+ return {"tokens": tokens}
32
+ except Exception as e:
33
+ return {"error": str(e)}
@@ -0,0 +1,47 @@
1
+ from .base import query_rows
2
+
3
+
4
+ def get_tool_calls(query_str=None, limit=50):
5
+ """Search tool calls."""
6
+ try:
7
+ if query_str:
8
+ results = query_rows("""
9
+ SELECT tc.id, tc.session_id, tc.exchange_index, tc.request_id,
10
+ tc.tool_call_id, tc.tool_name, tc.file_path,
11
+ tc.arguments_json, tc.has_output, tc.ingested_at,
12
+ s.title as session_title
13
+ FROM tool_calls tc
14
+ JOIN sessions s ON tc.session_id = s.session_id
15
+ WHERE tc.tool_name LIKE ? OR tc.arguments_json LIKE ?
16
+ ORDER BY tc.ingested_at DESC
17
+ LIMIT ?
18
+ """, (f"%{query_str}%", f"%{query_str}%", limit))
19
+ else:
20
+ results = query_rows("""
21
+ SELECT tc.id, tc.session_id, tc.exchange_index, tc.request_id,
22
+ tc.tool_call_id, tc.tool_name, tc.file_path,
23
+ tc.arguments_json, tc.has_output, tc.ingested_at,
24
+ s.title as session_title
25
+ FROM tool_calls tc
26
+ JOIN sessions s ON tc.session_id = s.session_id
27
+ ORDER BY tc.ingested_at DESC
28
+ LIMIT ?
29
+ """, (limit,))
30
+ return {"tool_calls": results, "query": query_str, "count": len(results)}
31
+ except Exception as e:
32
+ return {"error": str(e)}
33
+
34
+
35
+ def get_vfs(session_id):
36
+ """List VFS files for a session."""
37
+ try:
38
+ vfs = query_rows("""
39
+ SELECT id, session_id, source_type, source_path, filename,
40
+ content_type, size_bytes, sha256, ingested_at
41
+ FROM vfs
42
+ WHERE session_id = ?
43
+ ORDER BY filename
44
+ """, (session_id,))
45
+ return {"vfs": vfs, "session_id": session_id}
46
+ except Exception as e:
47
+ return {"error": str(e)}
@@ -0,0 +1,33 @@
1
+ from .base import query_rows
2
+
3
+
4
+ def get_workspaces():
5
+ """List all workspaces with session counts."""
6
+ try:
7
+ workspaces = query_rows("""
8
+ SELECT w.workspace_id, w.uri, w.name, w.type, w.session_count,
9
+ (SELECT MAX(s.created_at) FROM sessions s
10
+ WHERE s.workspace_id = w.workspace_id) as last_session
11
+ FROM workspaces w
12
+ ORDER BY w.session_count DESC
13
+ """)
14
+ return {"workspaces": workspaces}
15
+ except Exception as e:
16
+ return {"error": str(e)}
17
+
18
+
19
+ def get_workspace_detail(workspace_id):
20
+ """Get all sessions for a workspace."""
21
+ try:
22
+ sessions = query_rows("""
23
+ SELECT s.session_id as id, s.title, sa.heat_score,
24
+ (SELECT COUNT(*) FROM exchanges WHERE exchanges.session_id = s.session_id) as exchange_count,
25
+ s.created_at
26
+ FROM sessions s
27
+ LEFT JOIN session_analysis sa ON sa.session_id = s.session_id
28
+ WHERE s.workspace_id = ?
29
+ ORDER BY s.created_at DESC
30
+ """, (workspace_id,))
31
+ return {"workspace_id": workspace_id, "sessions": sessions}
32
+ except Exception as e:
33
+ return {"error": str(e)}