code-data-ark 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cda/__init__.py +3 -0
- cda/kernel/__init__.py +0 -0
- cda/kernel/control_db.py +151 -0
- cda/kernel/pmf_kernel.py +364 -0
- cda/kernel/selfcheck.py +299 -0
- cda/pipeline/__init__.py +0 -0
- cda/pipeline/embed.py +694 -0
- cda/pipeline/extract.py +1064 -0
- cda/pipeline/ingest.py +673 -0
- cda/pipeline/parse_edits.py +250 -0
- cda/pipeline/reconstruct.py +536 -0
- cda/pipeline/watcher.py +783 -0
- cda/ui/__init__.py +0 -0
- cda/ui/cli.py +2587 -0
- cda/ui/web.py +2848 -0
- code_data_ark-2.0.2.dist-info/METADATA +495 -0
- code_data_ark-2.0.2.dist-info/RECORD +20 -0
- code_data_ark-2.0.2.dist-info/WHEEL +4 -0
- code_data_ark-2.0.2.dist-info/entry_points.txt +2 -0
- code_data_ark-2.0.2.dist-info/licenses/license +21 -0
cda/ui/cli.py
ADDED
|
@@ -0,0 +1,2587 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
cda — Code Direct Ask
|
|
4
|
+
CLI for querying, searching, and managing the Code Data Ark session database.
|
|
5
|
+
|
|
6
|
+
Commands:
|
|
7
|
+
cda search <query> Full-text search across all exchanges
|
|
8
|
+
cda code-search <pattern> Search code symbols and content
|
|
9
|
+
cda sessions List all sessions (newest first)
|
|
10
|
+
cda session <id> Show all exchanges in a session
|
|
11
|
+
cda exchange <id> <idx> Show one full exchange with tool calls
|
|
12
|
+
cda workspaces List all registered workspaces
|
|
13
|
+
cda workspace <id> Show sessions for a workspace
|
|
14
|
+
cda memory Show all memory files
|
|
15
|
+
cda tools <query> Search tool call arguments and names
|
|
16
|
+
cda replay <id> Print a session as a readable conversation
|
|
17
|
+
cda stats System-wide stats and coverage summary
|
|
18
|
+
cda status Watcher daemon status and queue information
|
|
19
|
+
cda watch start Start the live watcher daemon
|
|
20
|
+
cda watch stop Stop the watcher daemon
|
|
21
|
+
cda watch restart Restart the watcher daemon
|
|
22
|
+
cda pmf services List embedded PMF kernel services
|
|
23
|
+
cda pmf start <service> Start a service (ui, watcher, sync, etc.)
|
|
24
|
+
cda pmf stop <service> Stop a service
|
|
25
|
+
cda pmf restart <service> Restart a service
|
|
26
|
+
cda pmf logs <service> Tail service logs
|
|
27
|
+
cda check Run a full self-diagnostic. The system checks itself.
|
|
28
|
+
cda serve Start the local web UI on port 10001
|
|
29
|
+
cda sync Full re-ingest from disk (rebuilds entire DB)
|
|
30
|
+
cda reconstruct Re-run reconstruction and FTS rebuild only
|
|
31
|
+
cda embed build Build semantic embeddings and session intelligence
|
|
32
|
+
cda query <sql> Raw SQL query against the DB
|
|
33
|
+
cda export <id> Export a session as JSON, JSONL, or text
|
|
34
|
+
cda vfs ls <session_id> List VFS blobs for a session
|
|
35
|
+
cda vfs cat <vfs_id> Print decompressed content of a VFS blob
|
|
36
|
+
cda policy allow <pattern> Add an allow pattern for search results
|
|
37
|
+
cda policy deny <pattern> Add a deny pattern for search results
|
|
38
|
+
cda policy list List current policies
|
|
39
|
+
cda signals [session] Show behavioral signals
|
|
40
|
+
cda heat [session] Frustration and heat analysis
|
|
41
|
+
cda behavior Aggregate behavioral intelligence
|
|
42
|
+
cda saved Sessions that recovered from high heat
|
|
43
|
+
cda tokens [session] Token usage analysis
|
|
44
|
+
cda compactions [session] Context compaction events
|
|
45
|
+
cda edits Edit session analytics
|
|
46
|
+
cda semantic-search <query> Semantic search using embeddings
|
|
47
|
+
cda similar <session> Find sessions similar to a session
|
|
48
|
+
cda summarize <session> Show session summary, topics, and recommendations
|
|
49
|
+
cda topics Show semantic topic tags
|
|
50
|
+
cda alerts <session> Show semantic anomaly alerts
|
|
51
|
+
cda recommend <session> Show session recommendations
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
import os
|
|
55
|
+
import sys
|
|
56
|
+
import json
|
|
57
|
+
import gzip
|
|
58
|
+
import sqlite3
|
|
59
|
+
import subprocess
|
|
60
|
+
import textwrap
|
|
61
|
+
import datetime
|
|
62
|
+
from pathlib import Path
|
|
63
|
+
from cda.pipeline.reconstruct import decompress_vfs
|
|
64
|
+
from cda.kernel.pmf_kernel import PMFKernel, PMFKernelError
|
|
65
|
+
|
|
66
|
+
import click
|
|
67
|
+
|
|
68
|
+
# Package-relative paths
|
|
69
|
+
PACKAGE_DIR = Path(__file__).resolve().parent
|
|
70
|
+
ARK_DIR = PACKAGE_DIR.parent.parent.parent
|
|
71
|
+
LOCAL_DIR = ARK_DIR / "local"
|
|
72
|
+
DB_PATH = LOCAL_DIR / "data" / "cda.db"
|
|
73
|
+
PID_FILE = LOCAL_DIR / "run" / "watcher.pid"
|
|
74
|
+
UI_PID_FILE = LOCAL_DIR / "run" / "ui.pid"
|
|
75
|
+
UI_LOG_FILE = LOCAL_DIR / "logs" / "ui.log"
|
|
76
|
+
WATCHER = PACKAGE_DIR.parent / "pipeline" / "watcher.py"
|
|
77
|
+
INGEST = PACKAGE_DIR.parent / "pipeline" / "ingest.py"
|
|
78
|
+
RECON = PACKAGE_DIR.parent / "pipeline" / "reconstruct.py"
|
|
79
|
+
EXTRACT = PACKAGE_DIR.parent / "pipeline" / "extract.py"
|
|
80
|
+
EMBED = PACKAGE_DIR.parent / "pipeline" / "embed.py"
|
|
81
|
+
|
|
82
|
+
kernel = PMFKernel()
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
# ─────────────────────────────────────────────
|
|
86
|
+
# ANSI colors (no dep)
|
|
87
|
+
# ─────────────────────────────────────────────
|
|
88
|
+
|
|
89
|
+
class C:
|
|
90
|
+
RESET = "\033[0m"
|
|
91
|
+
BOLD = "\033[1m"
|
|
92
|
+
DIM = "\033[2m"
|
|
93
|
+
RED = "\033[91m"
|
|
94
|
+
GREEN = "\033[92m"
|
|
95
|
+
YELLOW = "\033[93m"
|
|
96
|
+
BLUE = "\033[94m"
|
|
97
|
+
MAGENTA= "\033[95m"
|
|
98
|
+
CYAN = "\033[96m"
|
|
99
|
+
WHITE = "\033[97m"
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def bold(s): return f"{C.BOLD}{s}{C.RESET}"
|
|
103
|
+
def dim(s): return f"{C.DIM}{s}{C.RESET}"
|
|
104
|
+
def green(s): return f"{C.GREEN}{s}{C.RESET}"
|
|
105
|
+
def yellow(s): return f"{C.YELLOW}{s}{C.RESET}"
|
|
106
|
+
def red(s): return f"{C.RED}{s}{C.RESET}"
|
|
107
|
+
def cyan(s): return f"{C.CYAN}{s}{C.RESET}"
|
|
108
|
+
def magenta(s): return f"{C.MAGENTA}{s}{C.RESET}"
|
|
109
|
+
def blue(s): return f"{C.BLUE}{s}{C.RESET}"
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def hr(char="─", width=80): return dim(char * width)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def fmt_ts(iso_or_ms):
|
|
116
|
+
"""Format ISO timestamp or ms-since-epoch to readable local time."""
|
|
117
|
+
if not iso_or_ms:
|
|
118
|
+
return dim("—")
|
|
119
|
+
try:
|
|
120
|
+
if isinstance(iso_or_ms, (int, float)):
|
|
121
|
+
dt = datetime.datetime.fromtimestamp(iso_or_ms / 1000)
|
|
122
|
+
else:
|
|
123
|
+
dt = datetime.datetime.fromisoformat(str(iso_or_ms).replace("Z", "+00:00"))
|
|
124
|
+
dt = dt.astimezone()
|
|
125
|
+
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
126
|
+
except Exception:
|
|
127
|
+
return str(iso_or_ms)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def fmt_size(n):
|
|
131
|
+
if n is None: return "—"
|
|
132
|
+
if n < 1024: return f"{n}B"
|
|
133
|
+
if n < 1024**2: return f"{n//1024}KB"
|
|
134
|
+
return f"{n/1024/1024:.1f}MB"
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def truncate(s, n=80):
|
|
138
|
+
s = (s or "").replace("\n", " ").strip()
|
|
139
|
+
return s[:n] + "…" if len(s) > n else s
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def table(rows, headers, widths=None):
|
|
143
|
+
"""Print a simple aligned table."""
|
|
144
|
+
if not rows:
|
|
145
|
+
click.echo(dim(" (no results)"))
|
|
146
|
+
return
|
|
147
|
+
# Auto widths
|
|
148
|
+
cols = len(headers)
|
|
149
|
+
if widths is None:
|
|
150
|
+
widths = [max(len(str(headers[i])), max(len(str(r[i])) for r in rows)) for i in range(cols)]
|
|
151
|
+
header_line = " " + " ".join(bold(str(headers[i]).ljust(widths[i])) for i in range(cols))
|
|
152
|
+
click.echo(header_line)
|
|
153
|
+
click.echo(" " + dim(" ".join("─" * widths[i] for i in range(cols))))
|
|
154
|
+
for row in rows:
|
|
155
|
+
click.echo(" " + " ".join(str(row[i]).ljust(widths[i]) for i in range(cols)))
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
# ─────────────────────────────────────────────
|
|
159
|
+
# DB
|
|
160
|
+
# ─────────────────────────────────────────────
|
|
161
|
+
|
|
162
|
+
def db():
|
|
163
|
+
if not DB_PATH.exists():
|
|
164
|
+
click.echo(red(f"DB not found: {DB_PATH}"))
|
|
165
|
+
click.echo(f"Run: {bold('cda sync')} to initialize")
|
|
166
|
+
sys.exit(1)
|
|
167
|
+
conn = sqlite3.connect(str(DB_PATH), timeout=10)
|
|
168
|
+
conn.row_factory = sqlite3.Row
|
|
169
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
170
|
+
conn.execute("PRAGMA synchronous=NORMAL")
|
|
171
|
+
conn.execute("PRAGMA cache_size=-2000")
|
|
172
|
+
conn.execute("PRAGMA mmap_size=268435456")
|
|
173
|
+
conn.execute("PRAGMA temp_store=MEMORY")
|
|
174
|
+
return conn
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def short_id(s, n=8):
|
|
178
|
+
return (s or "")[:n]
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _decode_vfs_text(blob):
|
|
182
|
+
if not blob:
|
|
183
|
+
return ""
|
|
184
|
+
try:
|
|
185
|
+
raw = decompress_vfs(blob)
|
|
186
|
+
except Exception:
|
|
187
|
+
raw = blob
|
|
188
|
+
if isinstance(raw, str):
|
|
189
|
+
return raw
|
|
190
|
+
for encoding in ('utf-8', 'latin-1'):
|
|
191
|
+
try:
|
|
192
|
+
return raw.decode(encoding)
|
|
193
|
+
except Exception:
|
|
194
|
+
continue
|
|
195
|
+
return ""
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _code_search_snippet(text, match, radius=80):
|
|
199
|
+
start = max(0, match.start() - radius)
|
|
200
|
+
end = min(len(text), match.end() + radius)
|
|
201
|
+
snippet = text[start:end].replace("\n", " ")
|
|
202
|
+
return snippet.strip()
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def import_embed_module():
|
|
206
|
+
try:
|
|
207
|
+
import importlib
|
|
208
|
+
import cda.embed as embed
|
|
209
|
+
return importlib.reload(embed)
|
|
210
|
+
except Exception as exc:
|
|
211
|
+
raise RuntimeError(
|
|
212
|
+
"Semantic intelligence requires the embed module and its dependencies. "
|
|
213
|
+
"Install sentence-transformers and retry. "
|
|
214
|
+
f"Details: {exc}"
|
|
215
|
+
) from exc
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
# ─────────────────────────────────────────────
|
|
219
|
+
# CLI root
|
|
220
|
+
# ─────────────────────────────────────────────
|
|
221
|
+
|
|
222
|
+
class CDAGroup(click.Group):
|
|
223
|
+
def format_help(self, ctx, formatter):
|
|
224
|
+
click.echo("================================================================================")
|
|
225
|
+
click.echo(" CDA \u2014 Code Direct Ask")
|
|
226
|
+
click.echo("================================================================================")
|
|
227
|
+
click.echo(" System : cda")
|
|
228
|
+
click.echo(f" Runtime : {DB_PATH}")
|
|
229
|
+
click.echo(" Status : active\n")
|
|
230
|
+
click.echo("Usage:")
|
|
231
|
+
|
|
232
|
+
commands = []
|
|
233
|
+
for cmd in self.list_commands(ctx):
|
|
234
|
+
c = self.get_command(ctx, cmd)
|
|
235
|
+
if c and not c.hidden:
|
|
236
|
+
help_str = c.get_short_help_str(80) or ""
|
|
237
|
+
commands.append((cmd, help_str))
|
|
238
|
+
|
|
239
|
+
if commands:
|
|
240
|
+
max_len = max(len(c[0]) for c in commands)
|
|
241
|
+
for cmd, help_str in commands:
|
|
242
|
+
click.echo(f" cda {cmd.ljust(max_len)} {help_str}")
|
|
243
|
+
click.echo("")
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
@click.group(cls=CDAGroup, invoke_without_command=True)
|
|
247
|
+
@click.pass_context
|
|
248
|
+
@click.version_option("1.0.0", prog_name="cda")
|
|
249
|
+
def cli(ctx):
|
|
250
|
+
if ctx.invoked_subcommand is None:
|
|
251
|
+
click.echo(ctx.get_help())
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
# ─────────────────────────────────────────────
|
|
255
|
+
# STATS
|
|
256
|
+
# ─────────────────────────────────────────────
|
|
257
|
+
|
|
258
|
+
@cli.command()
|
|
259
|
+
def stats():
|
|
260
|
+
"""System-wide stats and coverage summary."""
|
|
261
|
+
conn = db()
|
|
262
|
+
click.echo()
|
|
263
|
+
click.echo(bold(" Code Data Ark ") + dim(str(DB_PATH)))
|
|
264
|
+
click.echo(hr())
|
|
265
|
+
|
|
266
|
+
tables = [
|
|
267
|
+
("workspaces", "Registered workspaces"),
|
|
268
|
+
("sessions", "Total sessions"),
|
|
269
|
+
("exchanges", "Reconstructed exchanges"),
|
|
270
|
+
("tool_calls", "Indexed tool calls"),
|
|
271
|
+
("edit_sessions", "Edit sessions parsed"),
|
|
272
|
+
("edited_files", "Edited files tracked"),
|
|
273
|
+
("transcript_events", "Transcript events"),
|
|
274
|
+
("chat_messages", "Chat messages"),
|
|
275
|
+
("vfs", "VFS blobs"),
|
|
276
|
+
("state_items", "state.vscdb items"),
|
|
277
|
+
("memory_files", "Memory files"),
|
|
278
|
+
("embeddings", "Semantic embeddings"),
|
|
279
|
+
("session_summaries", "Session summaries"),
|
|
280
|
+
("anomaly_alerts", "Anomaly alerts"),
|
|
281
|
+
("recommendations", "Recommendations"),
|
|
282
|
+
]
|
|
283
|
+
for tbl, label in tables:
|
|
284
|
+
try:
|
|
285
|
+
n = conn.execute(f"SELECT COUNT(*) FROM {tbl}").fetchone()[0]
|
|
286
|
+
click.echo(f" {label:<30} {bold(str(n)):>10}")
|
|
287
|
+
except Exception:
|
|
288
|
+
pass
|
|
289
|
+
|
|
290
|
+
click.echo()
|
|
291
|
+
# VFS breakdown
|
|
292
|
+
click.echo(bold(" VFS by type:"))
|
|
293
|
+
for r in conn.execute("SELECT source_type, COUNT(*) n, SUM(size_bytes) total FROM vfs GROUP BY source_type ORDER BY total DESC").fetchall():
|
|
294
|
+
click.echo(f" {r['source_type']:<22} {r['n']:>6} files {fmt_size(r['total']):>10} raw")
|
|
295
|
+
|
|
296
|
+
click.echo()
|
|
297
|
+
# Coverage
|
|
298
|
+
has_t = conn.execute("SELECT COUNT(*) FROM session_storage WHERE has_transcript=1").fetchone()[0]
|
|
299
|
+
has_c = conn.execute("SELECT COUNT(*) FROM session_storage WHERE has_chat_session=1").fetchone()[0]
|
|
300
|
+
has_e = conn.execute("SELECT COUNT(*) FROM session_storage WHERE has_edit_session=1").fetchone()[0]
|
|
301
|
+
has_to = conn.execute("SELECT COUNT(*) FROM session_storage WHERE has_tool_outputs=1").fetchone()[0]
|
|
302
|
+
click.echo(bold(" Session coverage:"))
|
|
303
|
+
click.echo(f" Transcripts: {has_t}")
|
|
304
|
+
click.echo(f" Chat sessions: {has_c}")
|
|
305
|
+
click.echo(f" Edit sessions: {has_e}")
|
|
306
|
+
click.echo(f" Tool outputs: {has_to}")
|
|
307
|
+
|
|
308
|
+
db_size = DB_PATH.stat().st_size
|
|
309
|
+
click.echo()
|
|
310
|
+
click.echo(f" DB size: {bold(fmt_size(db_size))}")
|
|
311
|
+
click.echo()
|
|
312
|
+
conn.close()
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
# ─────────────────────────────────────────────
|
|
316
|
+
# STATUS (watcher)
|
|
317
|
+
# ─────────────────────────────────────────────
|
|
318
|
+
|
|
319
|
+
@cli.command()
|
|
320
|
+
def status():
|
|
321
|
+
"""Watcher daemon status."""
|
|
322
|
+
print("STATUS COMMAND CALLED")
|
|
323
|
+
click.echo()
|
|
324
|
+
if PID_FILE.exists():
|
|
325
|
+
pid = PID_FILE.read_text().strip()
|
|
326
|
+
try:
|
|
327
|
+
os.kill(int(pid), 0)
|
|
328
|
+
click.echo(f" Watcher: {green('RUNNING')} pid={bold(pid)}")
|
|
329
|
+
except (ProcessLookupError, ValueError):
|
|
330
|
+
click.echo(f" Watcher: {red('DEAD')} (stale pid file: {pid})")
|
|
331
|
+
else:
|
|
332
|
+
click.echo(f" Watcher: {yellow('STOPPED')}")
|
|
333
|
+
click.echo(f" Start with: {bold('cda watch start')}")
|
|
334
|
+
|
|
335
|
+
# Queue status
|
|
336
|
+
queue_dir = LOCAL_DIR / "queue"
|
|
337
|
+
if queue_dir.exists():
|
|
338
|
+
pending = len(list(queue_dir.glob("*.json")))
|
|
339
|
+
completed = len(list(queue_dir.glob("*.completed")))
|
|
340
|
+
click.echo(f" Queue: {pending} pending, {completed} completed")
|
|
341
|
+
if pending > 0:
|
|
342
|
+
# Show last pending operation
|
|
343
|
+
pending_files = sorted(queue_dir.glob("*.json"))
|
|
344
|
+
if pending_files:
|
|
345
|
+
try:
|
|
346
|
+
data = json.loads(pending_files[-1].read_text())
|
|
347
|
+
click.echo(f" Last pending: {data.get('type', 'unknown')} at {fmt_ts(data.get('timestamp'))}")
|
|
348
|
+
except Exception:
|
|
349
|
+
pass
|
|
350
|
+
else:
|
|
351
|
+
click.echo(f" Queue: {dim('not initialized')}")
|
|
352
|
+
|
|
353
|
+
# Last activity from file_offsets
|
|
354
|
+
try:
|
|
355
|
+
conn = db()
|
|
356
|
+
row = conn.execute("SELECT MAX(updated_at) FROM file_offsets").fetchone()
|
|
357
|
+
if row and row[0]:
|
|
358
|
+
click.echo(f" Last offset update: {fmt_ts(row[0])}")
|
|
359
|
+
row2 = conn.execute("SELECT MAX(ingested_at) FROM transcript_events").fetchone()
|
|
360
|
+
if row2 and row2[0]:
|
|
361
|
+
click.echo(f" Last event ingested: {fmt_ts(row2[0])}")
|
|
362
|
+
conn.close()
|
|
363
|
+
except Exception:
|
|
364
|
+
pass
|
|
365
|
+
click.echo()
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
@cli.command("serve")
|
|
369
|
+
@click.option("--host", default="127.0.0.1", show_default=True, help="Local host to bind the web UI")
|
|
370
|
+
@click.option("--port", default=10001, show_default=True, help="Local port for the web UI")
|
|
371
|
+
def serve(host, port):
|
|
372
|
+
"""Start the local web UI for Code Data Ark in the foreground."""
|
|
373
|
+
click.echo(yellow(f" Starting local web UI at http://{host}:{port}"))
|
|
374
|
+
click.echo(yellow(" Use `cda ui start` to launch it as a background service."))
|
|
375
|
+
try:
|
|
376
|
+
import importlib
|
|
377
|
+
import cda.ui.web as web
|
|
378
|
+
importlib.reload(web)
|
|
379
|
+
except Exception as exc:
|
|
380
|
+
click.echo(red(" Failed to start web UI. Ensure the package is installed and importable."))
|
|
381
|
+
click.echo(red(f" Details: {exc}"))
|
|
382
|
+
return
|
|
383
|
+
web.start_server(host=host, port=port)
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
@cli.group()
|
|
387
|
+
def ui():
|
|
388
|
+
"""Manage the Code Data Ark web UI as a background service."""
|
|
389
|
+
pass
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
def _ui_is_running():
|
|
393
|
+
if not UI_PID_FILE.exists():
|
|
394
|
+
return False, None
|
|
395
|
+
pid = UI_PID_FILE.read_text().strip()
|
|
396
|
+
try:
|
|
397
|
+
os.kill(int(pid), 0)
|
|
398
|
+
return True, int(pid)
|
|
399
|
+
except (ProcessLookupError, ValueError):
|
|
400
|
+
return False, None
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
@ui.command("start")
|
|
404
|
+
@click.option("--host", default="127.0.0.1", show_default=True, help="Local host to bind the web UI")
|
|
405
|
+
@click.option("--port", default=10001, show_default=True, help="Local port for the web UI")
|
|
406
|
+
def ui_start(host, port):
|
|
407
|
+
"""Start the web UI as a background service."""
|
|
408
|
+
try:
|
|
409
|
+
result = kernel.start_service("ui", options={"host": host, "port": port})
|
|
410
|
+
click.echo(green(f" Web UI started in background at http://{host}:{port} pid={result['pid']}"))
|
|
411
|
+
click.echo(yellow(f" Logs: {UI_LOG_FILE}"))
|
|
412
|
+
except PMFKernelError as exc:
|
|
413
|
+
click.echo(red(f" Failed to start UI: {exc}"))
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
@ui.command("stop")
|
|
417
|
+
def ui_stop():
|
|
418
|
+
"""Stop the background web UI service."""
|
|
419
|
+
try:
|
|
420
|
+
result = kernel.stop_service("ui")
|
|
421
|
+
click.echo(green(f" Stopped web UI pid={result['pid'] or 'unknown'}"))
|
|
422
|
+
except PMFKernelError as exc:
|
|
423
|
+
click.echo(yellow(f" {exc}"))
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
@ui.command("status")
|
|
427
|
+
def ui_status():
|
|
428
|
+
"""Show whether the background web UI is running."""
|
|
429
|
+
try:
|
|
430
|
+
result = kernel.service_status("ui")
|
|
431
|
+
if result["status"] == "running":
|
|
432
|
+
click.echo(green(f" Web UI is running pid={result['pid']}"))
|
|
433
|
+
click.echo(f" Log: {result['log_file']}")
|
|
434
|
+
else:
|
|
435
|
+
click.echo(yellow(" Web UI is not running."))
|
|
436
|
+
click.echo(" Start it with: cda ui start")
|
|
437
|
+
except PMFKernelError as exc:
|
|
438
|
+
click.echo(red(f" {exc}"))
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
@ui.command("restart")
|
|
442
|
+
def ui_restart():
|
|
443
|
+
"""Restart the background web UI service."""
|
|
444
|
+
try:
|
|
445
|
+
kernel.restart_service("ui")
|
|
446
|
+
click.echo(green(" Web UI restarted."))
|
|
447
|
+
except PMFKernelError as exc:
|
|
448
|
+
click.echo(red(f" Failed to restart UI: {exc}"))
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@cli.group()
|
|
452
|
+
def pmf():
|
|
453
|
+
"""Manage the embedded PMF kernel and Ark runtime services."""
|
|
454
|
+
pass
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
@pmf.command("services")
|
|
458
|
+
def pmf_services():
|
|
459
|
+
"""List embedded PMF services and runtime status."""
|
|
460
|
+
rows = kernel.services()
|
|
461
|
+
click.echo()
|
|
462
|
+
click.echo(bold(" PMF Runtime Services"))
|
|
463
|
+
click.echo(hr())
|
|
464
|
+
for service in rows:
|
|
465
|
+
status = green(service["status"]) if service["status"] == "running" else yellow(service["status"])
|
|
466
|
+
click.echo(f" {bold(service['label']):<20} {status:<10} pid={service['pid'] or '—'}")
|
|
467
|
+
click.echo(f" {service['description']}")
|
|
468
|
+
click.echo()
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
@pmf.command("status")
|
|
472
|
+
@click.argument("service_id", required=False)
|
|
473
|
+
def pmf_status(service_id):
|
|
474
|
+
"""Show PMF runtime status for one or all services."""
|
|
475
|
+
if service_id:
|
|
476
|
+
try:
|
|
477
|
+
service = kernel.service_status(service_id)
|
|
478
|
+
click.echo()
|
|
479
|
+
click.echo(bold(f" {service['label']}"))
|
|
480
|
+
click.echo(f" Status: {service['status']}")
|
|
481
|
+
click.echo(f" PID: {service['pid'] or '—'}")
|
|
482
|
+
click.echo(f" Started: {service['started_at'] or '—'}")
|
|
483
|
+
click.echo(f" Log: {service['log_file'] or '—'}")
|
|
484
|
+
click.echo()
|
|
485
|
+
except PMFKernelError as exc:
|
|
486
|
+
click.echo(red(f" {exc}"))
|
|
487
|
+
else:
|
|
488
|
+
pmf_services()
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
@pmf.command("start")
|
|
492
|
+
@click.argument("service_id")
|
|
493
|
+
@click.option("--host", default="127.0.0.1", help="Host override for UI service")
|
|
494
|
+
@click.option("--port", default=10001, help="Port override for UI service")
|
|
495
|
+
def pmf_start(service_id, host, port):
|
|
496
|
+
"""Start a PMF-managed Ark service."""
|
|
497
|
+
options = {"host": host, "port": port} if service_id == "ui" else None
|
|
498
|
+
try:
|
|
499
|
+
result = kernel.start_service(service_id, options=options)
|
|
500
|
+
click.echo(green(f" Started {result['label']} pid={result['pid']}"))
|
|
501
|
+
except PMFKernelError as exc:
|
|
502
|
+
click.echo(red(f" {exc}"))
|
|
503
|
+
|
|
504
|
+
|
|
505
|
+
@pmf.command("stop")
|
|
506
|
+
@click.argument("service_id")
|
|
507
|
+
def pmf_stop(service_id):
|
|
508
|
+
"""Stop a PMF-managed Ark service."""
|
|
509
|
+
try:
|
|
510
|
+
result = kernel.stop_service(service_id)
|
|
511
|
+
click.echo(green(f" Stopped {result['label']}"))
|
|
512
|
+
except PMFKernelError as exc:
|
|
513
|
+
click.echo(red(f" {exc}"))
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
@pmf.command("restart")
|
|
517
|
+
@click.argument("service_id")
|
|
518
|
+
def pmf_restart(service_id):
|
|
519
|
+
"""Restart a PMF-managed Ark service."""
|
|
520
|
+
try:
|
|
521
|
+
result = kernel.restart_service(service_id)
|
|
522
|
+
click.echo(green(f" Restarted {result['label']} pid={result['pid']}"))
|
|
523
|
+
except PMFKernelError as exc:
|
|
524
|
+
click.echo(red(f" {exc}"))
|
|
525
|
+
|
|
526
|
+
|
|
527
|
+
@pmf.command("logs")
|
|
528
|
+
@click.argument("service_id")
|
|
529
|
+
@click.option("--tail", default=50, show_default=True, help="Lines to tail from the log file")
|
|
530
|
+
def pmf_logs(service_id, tail):
|
|
531
|
+
"""Display the last lines from a PMF service log."""
|
|
532
|
+
try:
|
|
533
|
+
output = kernel.tail_log(service_id, lines=tail)
|
|
534
|
+
click.echo(output)
|
|
535
|
+
except PMFKernelError as exc:
|
|
536
|
+
click.echo(red(f" {exc}"))
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
@cli.group()
|
|
540
|
+
def embed():
|
|
541
|
+
"""Build and inspect semantic intelligence."""
|
|
542
|
+
pass
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
@embed.command("build")
|
|
546
|
+
def embed_build():
|
|
547
|
+
"""Build semantic embeddings and session intelligence."""
|
|
548
|
+
click.echo(yellow(" Building semantic intelligence..."))
|
|
549
|
+
result = subprocess.run([sys.executable, str(EMBED)], capture_output=False)
|
|
550
|
+
if result.returncode == 0:
|
|
551
|
+
click.echo(green(" Embed build complete"))
|
|
552
|
+
else:
|
|
553
|
+
click.echo(red(" Embed build failed"))
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
@cli.command("semantic-search")
|
|
557
|
+
@click.argument("query")
|
|
558
|
+
@click.option("--limit", default=5, show_default=True, help="Maximum results")
|
|
559
|
+
def semantic_search(query, limit):
|
|
560
|
+
"""Semantic search using embeddings."""
|
|
561
|
+
try:
|
|
562
|
+
embed = import_embed_module()
|
|
563
|
+
except RuntimeError as exc:
|
|
564
|
+
click.echo(red(str(exc)))
|
|
565
|
+
return
|
|
566
|
+
conn = db()
|
|
567
|
+
results = embed.semantic_search(conn, query, top_k=limit)
|
|
568
|
+
conn.close()
|
|
569
|
+
if not results:
|
|
570
|
+
click.echo(dim(" No semantic results found."))
|
|
571
|
+
return
|
|
572
|
+
click.echo(bold(f" Top {len(results)} semantic matches:"))
|
|
573
|
+
for idx, (row, score) in enumerate(results, 1):
|
|
574
|
+
click.echo(f" {idx}. [{row['entity_type']}] {row['entity_id'][:16]} score={score:.4f}")
|
|
575
|
+
click.echo(f" {truncate(row['content_text'], 140)}")
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
def _show_similar(session_id, limit):
|
|
579
|
+
try:
|
|
580
|
+
embed = import_embed_module()
|
|
581
|
+
except RuntimeError as exc:
|
|
582
|
+
click.echo(red(str(exc)))
|
|
583
|
+
return
|
|
584
|
+
conn = db()
|
|
585
|
+
results = embed.find_similar_entities(conn, "session", session_id, top_k=limit)
|
|
586
|
+
conn.close()
|
|
587
|
+
if not results:
|
|
588
|
+
click.echo(dim(" No similar sessions found."))
|
|
589
|
+
return
|
|
590
|
+
click.echo(bold(f" Similar sessions to {session_id[:16]}:"))
|
|
591
|
+
for idx, (row, score) in enumerate(results, 1):
|
|
592
|
+
click.echo(f" {idx}. session={row['session_id'][:16]} agent={row['entity_type']} score={score:.4f}")
|
|
593
|
+
click.echo(f" {truncate(row['content_text'], 140)}")
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
@cli.command("similar")
|
|
597
|
+
@click.argument("session_id")
|
|
598
|
+
@click.option("--limit", default=5, show_default=True, help="Maximum similar sessions")
|
|
599
|
+
def similar(session_id, limit):
|
|
600
|
+
"""Find sessions similar to a given session."""
|
|
601
|
+
_show_similar(session_id, limit)
|
|
602
|
+
|
|
603
|
+
|
|
604
|
+
@cli.command("related")
|
|
605
|
+
@click.argument("session_id")
|
|
606
|
+
@click.option("--limit", default=5, show_default=True, help="Maximum related sessions")
|
|
607
|
+
def related(session_id, limit):
|
|
608
|
+
"""Alias for finding sessions related by semantic similarity."""
|
|
609
|
+
_show_similar(session_id, limit)
|
|
610
|
+
|
|
611
|
+
|
|
612
|
+
@cli.command("summarize")
|
|
613
|
+
@click.argument("session_id")
|
|
614
|
+
def summarize(session_id):
|
|
615
|
+
"""Show session summary, topic tags, and recommendations."""
|
|
616
|
+
try:
|
|
617
|
+
embed = import_embed_module()
|
|
618
|
+
except RuntimeError as exc:
|
|
619
|
+
click.echo(red(str(exc)))
|
|
620
|
+
return
|
|
621
|
+
conn = db()
|
|
622
|
+
summary = embed.get_session_summary(conn, session_id)
|
|
623
|
+
alerts = embed.get_session_alerts(conn, session_id)
|
|
624
|
+
recs = embed.get_session_recommendations(conn, session_id)
|
|
625
|
+
conn.close()
|
|
626
|
+
if not summary:
|
|
627
|
+
click.echo(red(" No summary available. Run cda embed build first."))
|
|
628
|
+
return
|
|
629
|
+
click.echo(bold(" Summary:"))
|
|
630
|
+
click.echo(f" {summary['summary_text']}")
|
|
631
|
+
click.echo(bold(" Topics:"))
|
|
632
|
+
click.echo(f" {summary['topic_tags'] or dim('none')}")
|
|
633
|
+
if alerts:
|
|
634
|
+
click.echo(bold(" Alerts:"))
|
|
635
|
+
for a in alerts:
|
|
636
|
+
click.echo(f" [{a['severity']}] {a['message']}")
|
|
637
|
+
if recs:
|
|
638
|
+
click.echo(bold(" Recommendations:"))
|
|
639
|
+
for r in recs:
|
|
640
|
+
click.echo(f" - {r['recommendation_text']}")
|
|
641
|
+
|
|
642
|
+
|
|
643
|
+
@cli.command("topics")
|
|
644
|
+
@click.option("--limit", default=20, show_default=True, help="Maximum topic tags to show")
|
|
645
|
+
def topics(limit):
|
|
646
|
+
"""Show semantic topic tags."""
|
|
647
|
+
try:
|
|
648
|
+
embed = import_embed_module()
|
|
649
|
+
except RuntimeError as exc:
|
|
650
|
+
click.echo(red(str(exc)))
|
|
651
|
+
return
|
|
652
|
+
conn = db()
|
|
653
|
+
topics = embed.get_topic_counts(conn, limit)
|
|
654
|
+
conn.close()
|
|
655
|
+
if not topics:
|
|
656
|
+
click.echo(dim(" No topic tags available. Run cda embed build first."))
|
|
657
|
+
return
|
|
658
|
+
click.echo(bold(" Topic tags:"))
|
|
659
|
+
for tag, count in topics:
|
|
660
|
+
click.echo(f" {tag:<18} {count}")
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
@cli.command("alerts")
|
|
664
|
+
@click.argument("session_id")
|
|
665
|
+
def alerts(session_id):
|
|
666
|
+
"""Show semantic anomaly alerts for a session."""
|
|
667
|
+
try:
|
|
668
|
+
embed = import_embed_module()
|
|
669
|
+
except RuntimeError as exc:
|
|
670
|
+
click.echo(red(str(exc)))
|
|
671
|
+
return
|
|
672
|
+
conn = db()
|
|
673
|
+
alerts = embed.get_session_alerts(conn, session_id)
|
|
674
|
+
conn.close()
|
|
675
|
+
if not alerts:
|
|
676
|
+
click.echo(dim(" No alerts found."))
|
|
677
|
+
return
|
|
678
|
+
click.echo(bold(" Alerts:"))
|
|
679
|
+
for a in alerts:
|
|
680
|
+
click.echo(f" [{a['severity']}] {a['message']}")
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
@cli.command("recommend")
|
|
684
|
+
@click.argument("session_id")
|
|
685
|
+
def recommend(session_id):
|
|
686
|
+
"""Show session recommendations."""
|
|
687
|
+
try:
|
|
688
|
+
embed = import_embed_module()
|
|
689
|
+
except RuntimeError as exc:
|
|
690
|
+
click.echo(red(str(exc)))
|
|
691
|
+
return
|
|
692
|
+
conn = db()
|
|
693
|
+
recs = embed.get_session_recommendations(conn, session_id)
|
|
694
|
+
conn.close()
|
|
695
|
+
if not recs:
|
|
696
|
+
click.echo(dim(" No recommendations found."))
|
|
697
|
+
return
|
|
698
|
+
click.echo(bold(" Recommendations:"))
|
|
699
|
+
for r in recs:
|
|
700
|
+
click.echo(f" - {r['recommendation_text']}")
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
# ─────────────────────────────────────────────
|
|
704
|
+
# WATCH
|
|
705
|
+
# ─────────────────────────────────────────────
|
|
706
|
+
|
|
707
|
+
@cli.group()
|
|
708
|
+
def watch():
|
|
709
|
+
"""Manage the live watcher daemon."""
|
|
710
|
+
pass
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
@watch.command("start")
|
|
714
|
+
def watch_start():
|
|
715
|
+
"""Start the live sync watcher daemon."""
|
|
716
|
+
try:
|
|
717
|
+
result = kernel.start_service("watcher")
|
|
718
|
+
click.echo(green(f" Watcher started pid={result['pid']}"))
|
|
719
|
+
except PMFKernelError as exc:
|
|
720
|
+
click.echo(red(f" {exc}"))
|
|
721
|
+
|
|
722
|
+
|
|
723
|
+
@watch.command("stop")
|
|
724
|
+
def watch_stop():
|
|
725
|
+
"""Stop the live sync watcher daemon."""
|
|
726
|
+
try:
|
|
727
|
+
kernel.stop_service("watcher")
|
|
728
|
+
click.echo(green(" Watcher stopped"))
|
|
729
|
+
except PMFKernelError as exc:
|
|
730
|
+
click.echo(yellow(f" {exc}"))
|
|
731
|
+
|
|
732
|
+
|
|
733
|
+
@watch.command("restart")
|
|
734
|
+
def watch_restart():
|
|
735
|
+
"""Restart the watcher daemon."""
|
|
736
|
+
try:
|
|
737
|
+
result = kernel.restart_service("watcher")
|
|
738
|
+
click.echo(green(f" Watcher restarted pid={result['pid']}"))
|
|
739
|
+
except PMFKernelError as exc:
|
|
740
|
+
click.echo(red(f" Failed to restart watcher: {exc}"))
|
|
741
|
+
|
|
742
|
+
|
|
743
|
+
# ─────────────────────────────────────────────
|
|
744
|
+
# SYNC / RECONSTRUCT
|
|
745
|
+
# ─────────────────────────────────────────────
|
|
746
|
+
|
|
747
|
+
@cli.command()
|
|
748
|
+
def sync():
|
|
749
|
+
"""Full re-ingest from disk (rebuilds entire DB)."""
|
|
750
|
+
from cda.kernel.control_db import start_run, finish_run, log_event
|
|
751
|
+
|
|
752
|
+
run_id = start_run(trigger="manual")
|
|
753
|
+
stages_done = []
|
|
754
|
+
errors = 0
|
|
755
|
+
|
|
756
|
+
click.echo(yellow(" Running full ingest — this rewrites the DB..."))
|
|
757
|
+
result = subprocess.run([sys.executable, str(INGEST)], capture_output=False)
|
|
758
|
+
if result.returncode != 0:
|
|
759
|
+
click.echo(red(" Ingest failed"))
|
|
760
|
+
finish_run(run_id, stages_done, {}, errors=1, exit_code=1, notes="ingest failed")
|
|
761
|
+
return
|
|
762
|
+
stages_done.append("ingest")
|
|
763
|
+
|
|
764
|
+
click.echo(green(" Ingest complete"))
|
|
765
|
+
click.echo(yellow(" Running reconstruction..."))
|
|
766
|
+
result = subprocess.run([sys.executable, str(RECON)], capture_output=False)
|
|
767
|
+
if result.returncode != 0:
|
|
768
|
+
click.echo(red(" Reconstruction failed"))
|
|
769
|
+
finish_run(run_id, stages_done, {}, errors=1, exit_code=1, notes="reconstruct failed")
|
|
770
|
+
return
|
|
771
|
+
stages_done.append("reconstruct")
|
|
772
|
+
|
|
773
|
+
click.echo(green(" Reconstruction complete"))
|
|
774
|
+
click.echo(yellow(" Running analysis..."))
|
|
775
|
+
result = subprocess.run([sys.executable, str(EXTRACT)], capture_output=False)
|
|
776
|
+
if result.returncode != 0:
|
|
777
|
+
click.echo(red(" Analysis failed"))
|
|
778
|
+
finish_run(run_id, stages_done, {}, errors=1, exit_code=1, notes="extract failed")
|
|
779
|
+
return
|
|
780
|
+
stages_done.append("extract")
|
|
781
|
+
|
|
782
|
+
click.echo(green(" Analysis complete"))
|
|
783
|
+
click.echo(yellow(" Running semantic intelligence..."))
|
|
784
|
+
result = subprocess.run([sys.executable, str(EMBED)], capture_output=False)
|
|
785
|
+
if result.returncode != 0:
|
|
786
|
+
click.echo(red(" Semantic intelligence failed"))
|
|
787
|
+
errors += 1
|
|
788
|
+
else:
|
|
789
|
+
stages_done.append("embed")
|
|
790
|
+
|
|
791
|
+
# Collect final counts from cda.db
|
|
792
|
+
counts = {}
|
|
793
|
+
try:
|
|
794
|
+
_conn = sqlite3.connect(DB_PATH)
|
|
795
|
+
counts["sessions"] = _conn.execute("SELECT COUNT(*) FROM sessions").fetchone()[0]
|
|
796
|
+
counts["exchanges"] = _conn.execute("SELECT COUNT(*) FROM exchanges").fetchone()[0]
|
|
797
|
+
counts["tool_calls"] = _conn.execute("SELECT COUNT(*) FROM tool_calls").fetchone()[0]
|
|
798
|
+
counts["vfs_files"] = _conn.execute("SELECT COUNT(*) FROM vfs").fetchone()[0]
|
|
799
|
+
_conn.close()
|
|
800
|
+
except Exception:
|
|
801
|
+
pass
|
|
802
|
+
|
|
803
|
+
finish_run(run_id, stages_done, counts, errors=errors, exit_code=0)
|
|
804
|
+
log_event("sync.complete", detail=f"sessions={counts.get('sessions')}, exchanges={counts.get('exchanges')}")
|
|
805
|
+
click.echo(green(" Done"))
|
|
806
|
+
|
|
807
|
+
|
|
808
|
+
@cli.command()
|
|
809
|
+
def reconstruct():
|
|
810
|
+
"""Re-run session reconstruction and FTS rebuild only."""
|
|
811
|
+
click.echo(yellow(" Reconstructing exchanges..."))
|
|
812
|
+
subprocess.run([sys.executable, str(RECON)], capture_output=False)
|
|
813
|
+
click.echo(green(" Done"))
|
|
814
|
+
|
|
815
|
+
|
|
816
|
+
# ─────────────────────────────────────────────
|
|
817
|
+
# WORKSPACES
|
|
818
|
+
# ─────────────────────────────────────────────
|
|
819
|
+
|
|
820
|
+
@cli.command()
|
|
821
|
+
def workspaces():
|
|
822
|
+
"""List all registered workspaces."""
|
|
823
|
+
conn = db()
|
|
824
|
+
rows = conn.execute(
|
|
825
|
+
"SELECT workspace_id, name, type, session_count, uri FROM workspaces ORDER BY session_count DESC"
|
|
826
|
+
).fetchall()
|
|
827
|
+
conn.close()
|
|
828
|
+
click.echo()
|
|
829
|
+
click.echo(bold(f" {len(rows)} workspaces"))
|
|
830
|
+
click.echo(hr())
|
|
831
|
+
for r in rows:
|
|
832
|
+
sessions_label = green(str(r['session_count'])) if r['session_count'] > 0 else dim("0")
|
|
833
|
+
click.echo(
|
|
834
|
+
f" {cyan(r['workspace_id'][:16])} "
|
|
835
|
+
f"{bold(truncate(r['name'] or '?', 30)):<32} "
|
|
836
|
+
f"{dim(r['type'] or '?'):<10} "
|
|
837
|
+
f"{sessions_label} sessions"
|
|
838
|
+
)
|
|
839
|
+
click.echo(f" {dim(truncate(r['uri'] or '', 70))}")
|
|
840
|
+
click.echo()
|
|
841
|
+
|
|
842
|
+
|
|
843
|
+
@cli.command()
|
|
844
|
+
@click.argument("workspace_id")
|
|
845
|
+
def workspace(workspace_id):
|
|
846
|
+
"""Show all sessions for a workspace (partial ID ok)."""
|
|
847
|
+
conn = db()
|
|
848
|
+
rows = conn.execute(
|
|
849
|
+
"""SELECT s.session_id, s.title, s.created_at, s.last_message_at,
|
|
850
|
+
s.request_count, ss.has_transcript, ss.has_chat_session, ss.has_tool_outputs
|
|
851
|
+
FROM sessions s
|
|
852
|
+
LEFT JOIN session_storage ss USING(session_id)
|
|
853
|
+
WHERE s.workspace_id LIKE ?
|
|
854
|
+
ORDER BY s.last_message_at DESC""",
|
|
855
|
+
(f"{workspace_id}%",)
|
|
856
|
+
).fetchall()
|
|
857
|
+
conn.close()
|
|
858
|
+
click.echo()
|
|
859
|
+
click.echo(bold(f" {len(rows)} sessions in workspace {cyan(workspace_id[:16])}"))
|
|
860
|
+
click.echo(hr())
|
|
861
|
+
for r in rows:
|
|
862
|
+
flags = ""
|
|
863
|
+
if r['has_transcript']: flags += green("T")
|
|
864
|
+
if r['has_chat_session']: flags += cyan("C")
|
|
865
|
+
if r['has_tool_outputs']: flags += yellow("O")
|
|
866
|
+
click.echo(
|
|
867
|
+
f" {cyan(r['session_id'][:16])} "
|
|
868
|
+
f"{bold(truncate(r['title'] or 'untitled', 42)):<44} "
|
|
869
|
+
f"{fmt_ts(r['last_message_at'])} "
|
|
870
|
+
f"{dim(str(r['request_count'] or 0)+' evts')}"
|
|
871
|
+
f" [{flags}]"
|
|
872
|
+
)
|
|
873
|
+
click.echo()
|
|
874
|
+
|
|
875
|
+
|
|
876
|
+
# ─────────────────────────────────────────────
|
|
877
|
+
# SESSIONS
|
|
878
|
+
# ─────────────────────────────────────────────
|
|
879
|
+
|
|
880
|
+
@cli.command()
|
|
881
|
+
@click.option("--workspace", "-w", default=None, help="Filter by workspace ID prefix")
|
|
882
|
+
@click.option("--limit", "-n", default=50, help="Max results")
|
|
883
|
+
def sessions(workspace, limit):
|
|
884
|
+
"""List sessions, newest first."""
|
|
885
|
+
conn = db()
|
|
886
|
+
if workspace:
|
|
887
|
+
rows = conn.execute(
|
|
888
|
+
"""SELECT s.session_id, s.workspace_id, s.title, s.created_at, s.last_message_at, s.request_count
|
|
889
|
+
FROM sessions s WHERE s.workspace_id LIKE ?
|
|
890
|
+
ORDER BY s.last_message_at DESC LIMIT ?""",
|
|
891
|
+
(f"{workspace}%", limit)
|
|
892
|
+
).fetchall()
|
|
893
|
+
else:
|
|
894
|
+
rows = conn.execute(
|
|
895
|
+
"""SELECT s.session_id, s.workspace_id, s.title, s.created_at, s.last_message_at, s.request_count
|
|
896
|
+
FROM sessions s ORDER BY s.last_message_at DESC LIMIT ?""",
|
|
897
|
+
(limit,)
|
|
898
|
+
).fetchall()
|
|
899
|
+
conn.close()
|
|
900
|
+
click.echo()
|
|
901
|
+
click.echo(bold(f" {len(rows)} sessions"))
|
|
902
|
+
click.echo(hr())
|
|
903
|
+
for r in rows:
|
|
904
|
+
click.echo(
|
|
905
|
+
f" {cyan(r['session_id'][:16])} "
|
|
906
|
+
f"{dim(r['workspace_id'][:8])} "
|
|
907
|
+
f"{bold(truncate(r['title'] or 'untitled', 44)):<46} "
|
|
908
|
+
f"{fmt_ts(r['last_message_at'])}"
|
|
909
|
+
)
|
|
910
|
+
click.echo()
|
|
911
|
+
|
|
912
|
+
|
|
913
|
+
# ─────────────────────────────────────────────
|
|
914
|
+
# SESSION (detail)
|
|
915
|
+
# ─────────────────────────────────────────────
|
|
916
|
+
|
|
917
|
+
@cli.command()
|
|
918
|
+
@click.argument("session_id")
|
|
919
|
+
def session(session_id):
|
|
920
|
+
"""Show all exchanges in a session."""
|
|
921
|
+
conn = db()
|
|
922
|
+
meta = conn.execute(
|
|
923
|
+
"SELECT * FROM sessions WHERE session_id LIKE ?", (f"{session_id}%",)
|
|
924
|
+
).fetchone()
|
|
925
|
+
if not meta:
|
|
926
|
+
click.echo(red(f" Session not found: {session_id}"))
|
|
927
|
+
conn.close()
|
|
928
|
+
return
|
|
929
|
+
|
|
930
|
+
sid = meta['session_id']
|
|
931
|
+
click.echo()
|
|
932
|
+
click.echo(bold(f" Session: {cyan(sid[:16])}"))
|
|
933
|
+
click.echo(f" Title: {bold(meta['title'] or 'untitled')}")
|
|
934
|
+
click.echo(f" Created: {fmt_ts(meta['created_at'])} Last msg: {fmt_ts(meta['last_message_at'])}")
|
|
935
|
+
click.echo(hr())
|
|
936
|
+
|
|
937
|
+
rows = conn.execute(
|
|
938
|
+
"""SELECT exchange_index, user_ts, user_message, reasoning_text, response_text,
|
|
939
|
+
tool_call_count, has_tool_output
|
|
940
|
+
FROM exchanges WHERE session_id=? ORDER BY exchange_index""",
|
|
941
|
+
(sid,)
|
|
942
|
+
).fetchall()
|
|
943
|
+
conn.close()
|
|
944
|
+
|
|
945
|
+
for r in rows:
|
|
946
|
+
tc = r['tool_call_count'] or 0
|
|
947
|
+
has_out = r['has_tool_output']
|
|
948
|
+
tc_label = (green if has_out else yellow)(f" [{tc} tools]") if tc > 0 else ""
|
|
949
|
+
r_len = len(r['reasoning_text'] or "")
|
|
950
|
+
resp_len = len(r['response_text'] or "")
|
|
951
|
+
idx_str = bold(f"[{r['exchange_index']:>2}]")
|
|
952
|
+
dim_str = dim(f"reason:{r_len}b resp:{resp_len}b")
|
|
953
|
+
click.echo(
|
|
954
|
+
f" {idx_str} "
|
|
955
|
+
f"{fmt_ts(r['user_ts'])} "
|
|
956
|
+
f"{tc_label} "
|
|
957
|
+
f"{dim_str}"
|
|
958
|
+
)
|
|
959
|
+
msg = truncate(r['user_message'] or "", 80)
|
|
960
|
+
if msg:
|
|
961
|
+
click.echo(f" {cyan('>')} {msg}")
|
|
962
|
+
click.echo()
|
|
963
|
+
click.echo(dim(f" Use: cda exchange {sid[:16]} <index> to view full exchange"))
|
|
964
|
+
click.echo()
|
|
965
|
+
|
|
966
|
+
|
|
967
|
+
# ─────────────────────────────────────────────
|
|
968
|
+
# EXCHANGE (full detail)
|
|
969
|
+
# ─────────────────────────────────────────────
|
|
970
|
+
|
|
971
|
+
@cli.command()
|
|
972
|
+
@click.argument("session_id")
|
|
973
|
+
@click.argument("index", type=int)
|
|
974
|
+
@click.option("--tool-outputs", "-t", is_flag=True, help="Include full tool output content")
|
|
975
|
+
@click.option("--reasoning", "-r", is_flag=True, help="Include reasoning text")
|
|
976
|
+
def exchange(session_id, index, tool_outputs, reasoning):
|
|
977
|
+
"""Show one full exchange with all tool calls."""
|
|
978
|
+
conn = db()
|
|
979
|
+
row = conn.execute(
|
|
980
|
+
"""SELECT * FROM exchanges WHERE session_id LIKE ? AND exchange_index=?""",
|
|
981
|
+
(f"{session_id}%", index)
|
|
982
|
+
).fetchone()
|
|
983
|
+
if not row:
|
|
984
|
+
click.echo(red(f" Exchange [{index}] not found in session {session_id}"))
|
|
985
|
+
conn.close()
|
|
986
|
+
return
|
|
987
|
+
|
|
988
|
+
click.echo()
|
|
989
|
+
click.echo(bold(f" Exchange [{index}] — {cyan(row['session_id'][:16])}"))
|
|
990
|
+
click.echo(f" {fmt_ts(row['user_ts'])}")
|
|
991
|
+
click.echo(hr())
|
|
992
|
+
|
|
993
|
+
# User message
|
|
994
|
+
click.echo(bold(f"\n {cyan('USER')}"))
|
|
995
|
+
for line in (row['user_message'] or "").splitlines():
|
|
996
|
+
click.echo(f" {line}")
|
|
997
|
+
|
|
998
|
+
# Reasoning
|
|
999
|
+
if reasoning and row['reasoning_text']:
|
|
1000
|
+
click.echo(bold(f"\n {magenta('REASONING')}"))
|
|
1001
|
+
for line in textwrap.wrap(row['reasoning_text'], 90):
|
|
1002
|
+
click.echo(f" {dim(line)}")
|
|
1003
|
+
|
|
1004
|
+
# Tool calls
|
|
1005
|
+
if row['tool_call_count']:
|
|
1006
|
+
click.echo(bold(f"\n {yellow('TOOL CALLS')} ({row['tool_call_count']})"))
|
|
1007
|
+
try:
|
|
1008
|
+
calls = json.loads(row['tool_calls'] or "[]")
|
|
1009
|
+
except Exception:
|
|
1010
|
+
calls = []
|
|
1011
|
+
for i, tc in enumerate(calls):
|
|
1012
|
+
click.echo(f"\n {bold(f'[{i}]')} {yellow(tc.get('name', '?'))} {dim(tc.get('toolCallId', '')[:24])}")
|
|
1013
|
+
args = tc.get('arguments', {})
|
|
1014
|
+
if isinstance(args, dict):
|
|
1015
|
+
for k, v in args.items():
|
|
1016
|
+
v_str = truncate(str(v), 100)
|
|
1017
|
+
click.echo(f" {cyan(k)}: {v_str}")
|
|
1018
|
+
elif args:
|
|
1019
|
+
click.echo(f" {truncate(str(args), 100)}")
|
|
1020
|
+
success = tc.get('success')
|
|
1021
|
+
if success is not None:
|
|
1022
|
+
label = green("✓") if success else red("✗")
|
|
1023
|
+
click.echo(f" {label} success={success}")
|
|
1024
|
+
if tool_outputs and tc.get('output'):
|
|
1025
|
+
click.echo(f" {bold('output:')}")
|
|
1026
|
+
for line in (tc['output'] or "")[:2000].splitlines():
|
|
1027
|
+
click.echo(f" {dim(line)}")
|
|
1028
|
+
|
|
1029
|
+
# Response
|
|
1030
|
+
if row['response_text']:
|
|
1031
|
+
click.echo(bold(f"\n {green('ASSISTANT')}"))
|
|
1032
|
+
for line in (row['response_text'] or "").splitlines():
|
|
1033
|
+
click.echo(f" {line}")
|
|
1034
|
+
|
|
1035
|
+
click.echo()
|
|
1036
|
+
conn.close()
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
# ─────────────────────────────────────────────
|
|
1040
|
+
# REPLAY
|
|
1041
|
+
# ─────────────────────────────────────────────
|
|
1042
|
+
|
|
1043
|
+
@cli.command()
|
|
1044
|
+
@click.argument("session_id")
|
|
1045
|
+
@click.option("--reasoning", "-r", is_flag=True, help="Include reasoning text")
|
|
1046
|
+
def replay(session_id, reasoning):
|
|
1047
|
+
"""Print a full session as a readable conversation."""
|
|
1048
|
+
conn = db()
|
|
1049
|
+
meta = conn.execute(
|
|
1050
|
+
"SELECT * FROM sessions WHERE session_id LIKE ?", (f"{session_id}%",)
|
|
1051
|
+
).fetchone()
|
|
1052
|
+
if not meta:
|
|
1053
|
+
click.echo(red(f" Session not found: {session_id}"))
|
|
1054
|
+
conn.close()
|
|
1055
|
+
return
|
|
1056
|
+
|
|
1057
|
+
sid = meta['session_id']
|
|
1058
|
+
rows = conn.execute(
|
|
1059
|
+
"SELECT * FROM exchanges WHERE session_id=? ORDER BY exchange_index",
|
|
1060
|
+
(sid,)
|
|
1061
|
+
).fetchall()
|
|
1062
|
+
conn.close()
|
|
1063
|
+
|
|
1064
|
+
click.echo()
|
|
1065
|
+
click.echo(bold(f" ═══ {meta['title'] or 'Session'} ═══"))
|
|
1066
|
+
click.echo(dim(f" {sid} · {fmt_ts(meta['created_at'])}"))
|
|
1067
|
+
click.echo()
|
|
1068
|
+
|
|
1069
|
+
for r in rows:
|
|
1070
|
+
if r['user_message']:
|
|
1071
|
+
click.echo(f"{cyan(bold(' YOU'))} {dim(fmt_ts(r['user_ts']))}")
|
|
1072
|
+
click.echo()
|
|
1073
|
+
for line in (r['user_message'] or "").splitlines():
|
|
1074
|
+
click.echo(f" {line}")
|
|
1075
|
+
click.echo()
|
|
1076
|
+
|
|
1077
|
+
if reasoning and r['reasoning_text']:
|
|
1078
|
+
click.echo(f"{magenta(bold(' [thinking]'))}")
|
|
1079
|
+
for line in textwrap.wrap(r['reasoning_text'][:500], 88):
|
|
1080
|
+
click.echo(f" {dim(line)}")
|
|
1081
|
+
click.echo()
|
|
1082
|
+
|
|
1083
|
+
if r['tool_call_count']:
|
|
1084
|
+
try:
|
|
1085
|
+
calls = json.loads(r['tool_calls'] or "[]")
|
|
1086
|
+
except Exception:
|
|
1087
|
+
calls = []
|
|
1088
|
+
names = ", ".join(tc.get('name', '?') for tc in calls[:5])
|
|
1089
|
+
more = f" +{len(calls)-5}" if len(calls) > 5 else ""
|
|
1090
|
+
click.echo(f" {yellow(bold(' ⚙'))} {dim(f'tools: {names}{more}')}")
|
|
1091
|
+
click.echo()
|
|
1092
|
+
|
|
1093
|
+
if r['response_text']:
|
|
1094
|
+
click.echo(f"{green(bold(' CDA'))}")
|
|
1095
|
+
click.echo()
|
|
1096
|
+
for line in (r['response_text'] or "").splitlines():
|
|
1097
|
+
click.echo(f" {line}")
|
|
1098
|
+
click.echo()
|
|
1099
|
+
|
|
1100
|
+
click.echo(dim(f" {'─' * 76}"))
|
|
1101
|
+
click.echo()
|
|
1102
|
+
|
|
1103
|
+
|
|
1104
|
+
# ─────────────────────────────────────────────
|
|
1105
|
+
# SEARCH
|
|
1106
|
+
# ─────────────────────────────────────────────
|
|
1107
|
+
|
|
1108
|
+
@cli.command()
|
|
1109
|
+
@click.argument("query")
|
|
1110
|
+
@click.option("--session", "-s", default=None, help="Limit to session ID prefix")
|
|
1111
|
+
@click.option("--workspace", "-w", default=None, help="Limit to workspace ID prefix")
|
|
1112
|
+
@click.option("--limit", "-n", default=20, help="Max results")
|
|
1113
|
+
@click.option("--full", "-f", is_flag=True, help="Show full response text, not snippet")
|
|
1114
|
+
def search(query, session, workspace, limit, full):
|
|
1115
|
+
"""Full-text search across all exchanges."""
|
|
1116
|
+
conn = db()
|
|
1117
|
+
# Quote hyphens for FTS5
|
|
1118
|
+
fts_query = f'"{query}"' if '-' in query and not query.startswith('"') else query
|
|
1119
|
+
hl_open = "\033[93m"
|
|
1120
|
+
hl_close = "\033[0m"
|
|
1121
|
+
try:
|
|
1122
|
+
sql = (
|
|
1123
|
+
"SELECT e.session_id, e.workspace_id, e.exchange_index, e.user_ts,"
|
|
1124
|
+
" e.user_message, e.response_text, e.tool_call_count,"
|
|
1125
|
+
f" snippet(fts_exchanges, 4, '{hl_open}', '{hl_close}', '...', 20) AS snip"
|
|
1126
|
+
" FROM fts_exchanges"
|
|
1127
|
+
" JOIN exchanges e ON e.id = fts_exchanges.rowid"
|
|
1128
|
+
" WHERE fts_exchanges MATCH ?"
|
|
1129
|
+
)
|
|
1130
|
+
params = [fts_query]
|
|
1131
|
+
if session:
|
|
1132
|
+
sql += " AND e.session_id LIKE ?"
|
|
1133
|
+
params.append(f"{session}%")
|
|
1134
|
+
if workspace:
|
|
1135
|
+
sql += " AND e.workspace_id LIKE ?"
|
|
1136
|
+
params.append(f"{workspace}%")
|
|
1137
|
+
sql += " ORDER BY rank LIMIT ?"
|
|
1138
|
+
params.append(limit)
|
|
1139
|
+
rows = conn.execute(sql, params).fetchall()
|
|
1140
|
+
except Exception as e:
|
|
1141
|
+
click.echo(red(f" Search error: {e}"))
|
|
1142
|
+
conn.close()
|
|
1143
|
+
return
|
|
1144
|
+
|
|
1145
|
+
conn.close()
|
|
1146
|
+
click.echo()
|
|
1147
|
+
click.echo(bold(f" {len(rows)} results for {cyan(repr(query))}"))
|
|
1148
|
+
click.echo(hr())
|
|
1149
|
+
|
|
1150
|
+
filtered_rows = []
|
|
1151
|
+
for r in rows:
|
|
1152
|
+
# Apply policy filter
|
|
1153
|
+
text_to_check = (r['user_message'] or '') + ' ' + (r['response_text'] or '')
|
|
1154
|
+
if check_policy(text_to_check):
|
|
1155
|
+
filtered_rows.append(r)
|
|
1156
|
+
|
|
1157
|
+
if len(filtered_rows) != len(rows):
|
|
1158
|
+
click.echo(yellow(f" Policy filtered: {len(rows) - len(filtered_rows)} results hidden"))
|
|
1159
|
+
click.echo()
|
|
1160
|
+
|
|
1161
|
+
for r in filtered_rows:
|
|
1162
|
+
tc_label = yellow(f"[{r['tool_call_count']} tools] ") if r['tool_call_count'] else ""
|
|
1163
|
+
idx_str = bold(f"[{r['exchange_index']:>2}]")
|
|
1164
|
+
click.echo(
|
|
1165
|
+
f"\n {cyan(r['session_id'][:16])} "
|
|
1166
|
+
f"{dim(r['workspace_id'][:8])} "
|
|
1167
|
+
f"{idx_str} "
|
|
1168
|
+
f"{fmt_ts(r['user_ts'])} {tc_label}"
|
|
1169
|
+
)
|
|
1170
|
+
if r['user_message']:
|
|
1171
|
+
click.echo(f" {cyan('Q:')} {truncate(r['user_message'], 80)}")
|
|
1172
|
+
if full and r['response_text']:
|
|
1173
|
+
click.echo(f" {green('A:')} {truncate(r['response_text'], 120)}")
|
|
1174
|
+
else:
|
|
1175
|
+
click.echo(f" {dim(r['snip'])}")
|
|
1176
|
+
click.echo(f" {dim('cda exchange ' + r['session_id'][:16] + ' ' + str(r['exchange_index']))}")
|
|
1177
|
+
|
|
1178
|
+
click.echo()
|
|
1179
|
+
|
|
1180
|
+
|
|
1181
|
+
# ─────────────────────────────────────────────
|
|
1182
|
+
# TOOLS SEARCH
|
|
1183
|
+
# ─────────────────────────────────────────────
|
|
1184
|
+
|
|
1185
|
+
@cli.command()
|
|
1186
|
+
@click.argument("query", default="")
|
|
1187
|
+
@click.option("--limit", "-n", default=30)
|
|
1188
|
+
@click.option("--top", is_flag=True, help="Show top tools by call count")
|
|
1189
|
+
def tools(query, limit, top):
|
|
1190
|
+
"""Search tool calls table. No query = show top tools by frequency."""
|
|
1191
|
+
conn = db()
|
|
1192
|
+
click.echo()
|
|
1193
|
+
|
|
1194
|
+
if top or not query:
|
|
1195
|
+
rows = conn.execute(
|
|
1196
|
+
"""SELECT tool_name, COUNT(*) n, COUNT(DISTINCT session_id) sessions,
|
|
1197
|
+
SUM(has_output) with_output
|
|
1198
|
+
FROM tool_calls GROUP BY tool_name ORDER BY n DESC LIMIT ?""",
|
|
1199
|
+
(limit,)
|
|
1200
|
+
).fetchall()
|
|
1201
|
+
conn.close()
|
|
1202
|
+
click.echo(bold(f" Tool call frequency (top {len(rows)})"))
|
|
1203
|
+
click.echo(hr())
|
|
1204
|
+
for r in rows:
|
|
1205
|
+
click.echo(
|
|
1206
|
+
f" {yellow(r['tool_name']):<45} "
|
|
1207
|
+
f"{bold(str(r['n'])):>6} calls "
|
|
1208
|
+
f"{r['sessions']:>4} sessions "
|
|
1209
|
+
f"{dim(str(r['with_output'])+' w/output')}"
|
|
1210
|
+
)
|
|
1211
|
+
click.echo()
|
|
1212
|
+
return
|
|
1213
|
+
|
|
1214
|
+
q = f'%{query}%'
|
|
1215
|
+
rows = conn.execute(
|
|
1216
|
+
"""SELECT session_id, exchange_index, tool_name, file_path, arguments_json, has_output
|
|
1217
|
+
FROM tool_calls
|
|
1218
|
+
WHERE tool_name LIKE ? OR file_path LIKE ? OR arguments_json LIKE ?
|
|
1219
|
+
ORDER BY session_id, exchange_index LIMIT ?""",
|
|
1220
|
+
(q, q, q, limit)
|
|
1221
|
+
).fetchall()
|
|
1222
|
+
conn.close()
|
|
1223
|
+
|
|
1224
|
+
click.echo(bold(f" {len(rows)} tool calls matching {cyan(repr(query))}"))
|
|
1225
|
+
click.echo(hr())
|
|
1226
|
+
for r in rows:
|
|
1227
|
+
fp = dim(f" → {r['file_path']}") if r['file_path'] else ''
|
|
1228
|
+
out = green(" [out]") if r['has_output'] else ''
|
|
1229
|
+
click.echo(
|
|
1230
|
+
f" {dim(r['session_id'][:16])} [{r['exchange_index']:>2}] "
|
|
1231
|
+
f"{yellow(r['tool_name'])}{out}{fp}"
|
|
1232
|
+
)
|
|
1233
|
+
click.echo()
|
|
1234
|
+
|
|
1235
|
+
|
|
1236
|
+
# ─────────────────────────────────────────────
|
|
1237
|
+
# EDITS
|
|
1238
|
+
# ─────────────────────────────────────────────
|
|
1239
|
+
|
|
1240
|
+
@cli.command()
|
|
1241
|
+
@click.option("--session", "-s", default=None, help="Show edits for a specific session")
|
|
1242
|
+
@click.option("--file", "-f", "file_query", default=None, help="Filter by file path substring")
|
|
1243
|
+
@click.option("--changed-only", is_flag=True, help="Show only sessions with modifications")
|
|
1244
|
+
@click.option("--limit", "-n", default=30)
|
|
1245
|
+
def edits(session, file_query, changed_only, limit):
|
|
1246
|
+
"""Show edit session analytics (files modified per session)."""
|
|
1247
|
+
conn = db()
|
|
1248
|
+
click.echo()
|
|
1249
|
+
|
|
1250
|
+
if session:
|
|
1251
|
+
# Detail view for one session
|
|
1252
|
+
row = conn.execute(
|
|
1253
|
+
"SELECT * FROM edit_sessions WHERE session_id=?", (session,)
|
|
1254
|
+
).fetchone()
|
|
1255
|
+
if not row:
|
|
1256
|
+
click.echo(red(f" No edit session for: {session}"))
|
|
1257
|
+
conn.close()
|
|
1258
|
+
return
|
|
1259
|
+
click.echo(bold(f" Edit session: {cyan(session[:16])}"))
|
|
1260
|
+
click.echo(hr())
|
|
1261
|
+
click.echo(f" Total files: {row['total_files']}")
|
|
1262
|
+
click.echo(f" Modified files: {bold(str(row['modified_files']))}")
|
|
1263
|
+
click.echo(f" Edit rounds: {row['edit_rounds']}")
|
|
1264
|
+
click.echo()
|
|
1265
|
+
files = conn.execute(
|
|
1266
|
+
"SELECT file_path, language_id, was_modified FROM edited_files WHERE session_id=? ORDER BY was_modified DESC, file_path",
|
|
1267
|
+
(session,)
|
|
1268
|
+
).fetchall()
|
|
1269
|
+
for f in files:
|
|
1270
|
+
marker = green(" ✓ ") if f['was_modified'] else dim(" · ")
|
|
1271
|
+
click.echo(f"{marker}{f['file_path']} {dim(f['language_id'] or '')}")
|
|
1272
|
+
click.echo()
|
|
1273
|
+
conn.close()
|
|
1274
|
+
return
|
|
1275
|
+
|
|
1276
|
+
if file_query:
|
|
1277
|
+
rows = conn.execute(
|
|
1278
|
+
"""SELECT ef.session_id, ef.file_path, ef.language_id, ef.was_modified
|
|
1279
|
+
FROM edited_files ef WHERE ef.file_path LIKE ?
|
|
1280
|
+
ORDER BY ef.was_modified DESC, ef.file_path LIMIT ?""",
|
|
1281
|
+
(f'%{file_query}%', limit)
|
|
1282
|
+
).fetchall()
|
|
1283
|
+
conn.close()
|
|
1284
|
+
click.echo(bold(f" {len(rows)} file records matching {cyan(repr(file_query))}"))
|
|
1285
|
+
click.echo(hr())
|
|
1286
|
+
for r in rows:
|
|
1287
|
+
marker = green(" ✓ ") if r['was_modified'] else dim(" · ")
|
|
1288
|
+
click.echo(f"{marker}{dim(r['session_id'][:16])} {r['file_path']} {dim(r['language_id'] or '')}")
|
|
1289
|
+
click.echo()
|
|
1290
|
+
return
|
|
1291
|
+
|
|
1292
|
+
# Summary view
|
|
1293
|
+
sql = "SELECT session_id, total_files, modified_files, edit_rounds FROM edit_sessions"
|
|
1294
|
+
if changed_only:
|
|
1295
|
+
sql += " WHERE modified_files > 0"
|
|
1296
|
+
sql += " ORDER BY modified_files DESC, total_files DESC LIMIT ?"
|
|
1297
|
+
rows = conn.execute(sql, (limit,)).fetchall()
|
|
1298
|
+
|
|
1299
|
+
total_mod = conn.execute("SELECT SUM(modified_files) FROM edit_sessions").fetchone()[0] or 0
|
|
1300
|
+
total_sess = conn.execute("SELECT COUNT(*) FROM edit_sessions").fetchone()[0]
|
|
1301
|
+
with_changes = conn.execute("SELECT COUNT(*) FROM edit_sessions WHERE modified_files>0").fetchone()[0]
|
|
1302
|
+
conn.close()
|
|
1303
|
+
|
|
1304
|
+
click.echo(bold(f" {total_sess} edit sessions | {with_changes} with changes | {total_mod} total modified files"))
|
|
1305
|
+
click.echo(hr())
|
|
1306
|
+
for r in rows:
|
|
1307
|
+
mod_label = bold(green(str(r['modified_files']))) if r['modified_files'] else dim("0")
|
|
1308
|
+
click.echo(
|
|
1309
|
+
f" {cyan(r['session_id'][:16])} "
|
|
1310
|
+
f"files={r['total_files']:>4} "
|
|
1311
|
+
f"modified={mod_label} "
|
|
1312
|
+
f"rounds={dim(str(r['edit_rounds']))}"
|
|
1313
|
+
)
|
|
1314
|
+
click.echo()
|
|
1315
|
+
|
|
1316
|
+
|
|
1317
|
+
# ─────────────────────────────────────────────
|
|
1318
|
+
# MEMORY
|
|
1319
|
+
# ─────────────────────────────────────────────
|
|
1320
|
+
|
|
1321
|
+
@cli.command()
|
|
1322
|
+
@click.option("--scope", default=None, help="Filter: global | workspace | session | repo")
|
|
1323
|
+
@click.option("--cat", default=None, help="Print content of file by filename")
|
|
1324
|
+
def memory(scope, cat):
|
|
1325
|
+
"""Show all memory files (global + workspace)."""
|
|
1326
|
+
conn = db()
|
|
1327
|
+
|
|
1328
|
+
if cat:
|
|
1329
|
+
row = conn.execute(
|
|
1330
|
+
"SELECT scope, workspace_id, filename, content FROM memory_files WHERE filename LIKE ?",
|
|
1331
|
+
(f"%{cat}%",)
|
|
1332
|
+
).fetchone()
|
|
1333
|
+
conn.close()
|
|
1334
|
+
if not row:
|
|
1335
|
+
click.echo(red(f" No memory file matching: {cat}"))
|
|
1336
|
+
return
|
|
1337
|
+
click.echo()
|
|
1338
|
+
click.echo(bold(f" [{row['scope']}] {row['filename']} {dim(row['workspace_id'] or '')}"))
|
|
1339
|
+
click.echo(hr())
|
|
1340
|
+
click.echo(row['content'])
|
|
1341
|
+
return
|
|
1342
|
+
|
|
1343
|
+
sql = "SELECT scope, workspace_id, filename, size_bytes, ingested_at FROM memory_files"
|
|
1344
|
+
params = []
|
|
1345
|
+
if scope:
|
|
1346
|
+
sql += " WHERE scope=?"
|
|
1347
|
+
params.append(scope)
|
|
1348
|
+
sql += " ORDER BY scope, filename"
|
|
1349
|
+
rows = conn.execute(sql, params).fetchall()
|
|
1350
|
+
conn.close()
|
|
1351
|
+
|
|
1352
|
+
click.echo()
|
|
1353
|
+
click.echo(bold(f" {len(rows)} memory files"))
|
|
1354
|
+
click.echo(hr())
|
|
1355
|
+
for r in rows:
|
|
1356
|
+
scope_label = cyan(r['scope']) if r['scope'] == 'global' else green(r['scope'])
|
|
1357
|
+
click.echo(
|
|
1358
|
+
f" [{scope_label}] "
|
|
1359
|
+
f"{bold(r['filename']):<40} "
|
|
1360
|
+
f"{fmt_size(r['size_bytes']):>8} "
|
|
1361
|
+
f"{dim(r['workspace_id'] or '(global)')}"
|
|
1362
|
+
)
|
|
1363
|
+
click.echo()
|
|
1364
|
+
|
|
1365
|
+
|
|
1366
|
+
# ─────────────────────────────────────────────
|
|
1367
|
+
# VFS
|
|
1368
|
+
# ─────────────────────────────────────────────
|
|
1369
|
+
|
|
1370
|
+
@cli.group()
|
|
1371
|
+
def vfs():
|
|
1372
|
+
"""VFS blob storage operations."""
|
|
1373
|
+
pass
|
|
1374
|
+
|
|
1375
|
+
|
|
1376
|
+
@vfs.command("ls")
|
|
1377
|
+
@click.argument("session_id")
|
|
1378
|
+
def vfs_ls(session_id):
|
|
1379
|
+
"""List VFS blobs for a session."""
|
|
1380
|
+
conn = db()
|
|
1381
|
+
rows = conn.execute(
|
|
1382
|
+
"""SELECT id, source_type, filename, size_bytes, sha256, ingested_at
|
|
1383
|
+
FROM vfs WHERE session_id LIKE ? ORDER BY source_type, filename""",
|
|
1384
|
+
(f"{session_id}%",)
|
|
1385
|
+
).fetchall()
|
|
1386
|
+
conn.close()
|
|
1387
|
+
click.echo()
|
|
1388
|
+
click.echo(bold(f" {len(rows)} VFS blobs for {cyan(session_id[:16])}"))
|
|
1389
|
+
click.echo(hr())
|
|
1390
|
+
for r in rows:
|
|
1391
|
+
click.echo(
|
|
1392
|
+
f" {bold(str(r['id'])):>8} "
|
|
1393
|
+
f"{yellow(r['source_type']):<18} "
|
|
1394
|
+
f"{r['filename']:<35} "
|
|
1395
|
+
f"{fmt_size(r['size_bytes']):>10} "
|
|
1396
|
+
f"{dim(r['sha256'])}"
|
|
1397
|
+
)
|
|
1398
|
+
click.echo()
|
|
1399
|
+
|
|
1400
|
+
|
|
1401
|
+
@vfs.command("cat")
|
|
1402
|
+
@click.argument("vfs_id", type=int)
|
|
1403
|
+
@click.option("--raw", "-r", is_flag=True, help="Print raw bytes (hex)")
|
|
1404
|
+
@click.option("--lines", "-n", default=0, type=int, help="Limit output lines")
|
|
1405
|
+
def vfs_cat(vfs_id, raw, lines):
|
|
1406
|
+
"""Print decompressed content of a VFS blob."""
|
|
1407
|
+
conn = db()
|
|
1408
|
+
row = conn.execute(
|
|
1409
|
+
"SELECT source_type, filename, content, size_bytes FROM vfs WHERE id=?", (vfs_id,)
|
|
1410
|
+
).fetchone()
|
|
1411
|
+
conn.close()
|
|
1412
|
+
if not row:
|
|
1413
|
+
click.echo(red(f" VFS blob {vfs_id} not found"))
|
|
1414
|
+
return
|
|
1415
|
+
try:
|
|
1416
|
+
data = gzip.decompress(row['content'])
|
|
1417
|
+
except Exception:
|
|
1418
|
+
data = row['content']
|
|
1419
|
+
|
|
1420
|
+
click.echo()
|
|
1421
|
+
click.echo(bold(f" VFS {vfs_id} {yellow(row['source_type'])} {row['filename']} {fmt_size(row['size_bytes'])}"))
|
|
1422
|
+
click.echo(hr())
|
|
1423
|
+
|
|
1424
|
+
if raw:
|
|
1425
|
+
click.echo(data.hex())
|
|
1426
|
+
return
|
|
1427
|
+
|
|
1428
|
+
text = data.decode('utf-8', errors='replace')
|
|
1429
|
+
if lines > 0:
|
|
1430
|
+
output_lines = text.splitlines()[:lines]
|
|
1431
|
+
text = "\n".join(output_lines)
|
|
1432
|
+
click.echo(text)
|
|
1433
|
+
|
|
1434
|
+
|
|
1435
|
+
@vfs.command("types")
|
|
1436
|
+
def vfs_types():
|
|
1437
|
+
"""Summary of VFS blob types and sizes."""
|
|
1438
|
+
conn = db()
|
|
1439
|
+
rows = conn.execute(
|
|
1440
|
+
"SELECT source_type, COUNT(*) n, SUM(size_bytes) total, SUM(LENGTH(content)) compressed FROM vfs GROUP BY source_type ORDER BY total DESC"
|
|
1441
|
+
).fetchall()
|
|
1442
|
+
conn.close()
|
|
1443
|
+
click.echo()
|
|
1444
|
+
click.echo(bold(" VFS storage summary"))
|
|
1445
|
+
click.echo(hr())
|
|
1446
|
+
for r in rows:
|
|
1447
|
+
ratio = (r['compressed'] / r['total'] * 100) if r['total'] else 0
|
|
1448
|
+
click.echo(
|
|
1449
|
+
f" {yellow(r['source_type']):<22} "
|
|
1450
|
+
f"{r['n']:>6} blobs "
|
|
1451
|
+
f"{fmt_size(r['total']):>10} raw "
|
|
1452
|
+
f"{fmt_size(r['compressed']):>10} stored "
|
|
1453
|
+
f"{dim(f'{ratio:.0f}% ratio')}"
|
|
1454
|
+
)
|
|
1455
|
+
click.echo()
|
|
1456
|
+
|
|
1457
|
+
|
|
1458
|
+
# ─────────────────────────────────────────────
|
|
1459
|
+
# POLICY
|
|
1460
|
+
# ─────────────────────────────────────────────
|
|
1461
|
+
|
|
1462
|
+
@cli.group()
|
|
1463
|
+
def policy():
|
|
1464
|
+
"""Manage data access policies."""
|
|
1465
|
+
pass
|
|
1466
|
+
|
|
1467
|
+
|
|
1468
|
+
@policy.command("allow")
|
|
1469
|
+
@click.argument("pattern")
|
|
1470
|
+
def policy_allow(pattern):
|
|
1471
|
+
"""Add an allow pattern for search results."""
|
|
1472
|
+
# For now, store in a simple text file
|
|
1473
|
+
policy_file = LOCAL_DIR / "config" / "policy.txt"
|
|
1474
|
+
try:
|
|
1475
|
+
with open(policy_file, "a") as f:
|
|
1476
|
+
f.write(f"ALLOW {pattern}\n")
|
|
1477
|
+
click.echo(green(f" Added allow pattern: {pattern}"))
|
|
1478
|
+
except Exception as e:
|
|
1479
|
+
click.echo(red(f" Error: {e}"))
|
|
1480
|
+
|
|
1481
|
+
|
|
1482
|
+
@policy.command("deny")
|
|
1483
|
+
@click.argument("pattern")
|
|
1484
|
+
def policy_deny(pattern):
|
|
1485
|
+
"""Add a deny pattern for search results."""
|
|
1486
|
+
policy_file = LOCAL_DIR / "config" / "policy.txt"
|
|
1487
|
+
try:
|
|
1488
|
+
with open(policy_file, "a") as f:
|
|
1489
|
+
f.write(f"DENY {pattern}\n")
|
|
1490
|
+
click.echo(green(f" Added deny pattern: {pattern}"))
|
|
1491
|
+
except Exception as e:
|
|
1492
|
+
click.echo(red(f" Error: {e}"))
|
|
1493
|
+
|
|
1494
|
+
|
|
1495
|
+
@policy.command("list")
|
|
1496
|
+
def policy_list():
|
|
1497
|
+
"""List current policies."""
|
|
1498
|
+
policy_file = LOCAL_DIR / "config" / "policy.txt"
|
|
1499
|
+
if not policy_file.exists():
|
|
1500
|
+
click.echo(dim(" No policies configured"))
|
|
1501
|
+
return
|
|
1502
|
+
|
|
1503
|
+
click.echo()
|
|
1504
|
+
click.echo(bold(" Data Access Policies"))
|
|
1505
|
+
click.echo(hr())
|
|
1506
|
+
try:
|
|
1507
|
+
with open(policy_file, "r") as f:
|
|
1508
|
+
for line in f:
|
|
1509
|
+
line = line.strip()
|
|
1510
|
+
if line.startswith("ALLOW "):
|
|
1511
|
+
click.echo(green(f" ALLOW {line[6:]}"))
|
|
1512
|
+
elif line.startswith("DENY "):
|
|
1513
|
+
click.echo(red(f" DENY {line[5:]}"))
|
|
1514
|
+
except Exception as e:
|
|
1515
|
+
click.echo(red(f" Error reading policies: {e}"))
|
|
1516
|
+
click.echo()
|
|
1517
|
+
|
|
1518
|
+
|
|
1519
|
+
def check_policy(text):
|
|
1520
|
+
"""Check if text passes policy filters. Returns True if allowed."""
|
|
1521
|
+
policy_file = LOCAL_DIR / "config" / "policy.txt"
|
|
1522
|
+
if not policy_file.exists():
|
|
1523
|
+
return True # No policies = allow all
|
|
1524
|
+
|
|
1525
|
+
allow_patterns = []
|
|
1526
|
+
deny_patterns = []
|
|
1527
|
+
try:
|
|
1528
|
+
with open(policy_file, "r") as f:
|
|
1529
|
+
for line in f:
|
|
1530
|
+
line = line.strip()
|
|
1531
|
+
if line.startswith("ALLOW "):
|
|
1532
|
+
allow_patterns.append(line[6:])
|
|
1533
|
+
elif line.startswith("DENY "):
|
|
1534
|
+
deny_patterns.append(line[5:])
|
|
1535
|
+
|
|
1536
|
+
# Check deny patterns first - if any match, deny
|
|
1537
|
+
for deny in deny_patterns:
|
|
1538
|
+
if deny in text:
|
|
1539
|
+
return False
|
|
1540
|
+
|
|
1541
|
+
# If no allow patterns, allow (since deny check passed)
|
|
1542
|
+
if not allow_patterns:
|
|
1543
|
+
return True
|
|
1544
|
+
|
|
1545
|
+
# If allow patterns exist, check if any match
|
|
1546
|
+
for allow in allow_patterns:
|
|
1547
|
+
if allow in text:
|
|
1548
|
+
return True
|
|
1549
|
+
|
|
1550
|
+
# Allow patterns exist but none match - deny
|
|
1551
|
+
return False
|
|
1552
|
+
|
|
1553
|
+
except Exception:
|
|
1554
|
+
return True # On error, allow
|
|
1555
|
+
|
|
1556
|
+
|
|
1557
|
+
# ─────────────────────────────────────────────
|
|
1558
|
+
@cli.command()
|
|
1559
|
+
@click.argument("pattern")
|
|
1560
|
+
@click.option("--symbol", "-s", is_flag=True, help="Search symbols only")
|
|
1561
|
+
@click.option("--path", "-p", type=str, help="Filter by file path pattern")
|
|
1562
|
+
@click.option("--regex", "-r", is_flag=True, help="Treat pattern as regex")
|
|
1563
|
+
@click.option("--workspace", "-w", type=str, help="Filter by workspace ID")
|
|
1564
|
+
@click.option("--limit", "-l", type=int, default=50, help="Max results")
|
|
1565
|
+
def code_search(pattern, symbol, path, regex, workspace, limit):
|
|
1566
|
+
"""Search code symbols and content using AST-indexed data."""
|
|
1567
|
+
conn = db()
|
|
1568
|
+
|
|
1569
|
+
# If symbol search, use symbols table
|
|
1570
|
+
if symbol:
|
|
1571
|
+
try:
|
|
1572
|
+
query = "SELECT file_path, symbol_name, symbol_type, line_number, context FROM symbols WHERE 1=1"
|
|
1573
|
+
params = []
|
|
1574
|
+
|
|
1575
|
+
if workspace:
|
|
1576
|
+
query += " AND workspace_id LIKE ?"
|
|
1577
|
+
params.append(f"{workspace}%")
|
|
1578
|
+
|
|
1579
|
+
if regex:
|
|
1580
|
+
# For regex, we'd need more complex logic - for now, use LIKE
|
|
1581
|
+
query += " AND symbol_name LIKE ?"
|
|
1582
|
+
params.append(f"%{pattern}%")
|
|
1583
|
+
else:
|
|
1584
|
+
query += " AND symbol_name LIKE ?"
|
|
1585
|
+
params.append(f"%{pattern}%")
|
|
1586
|
+
|
|
1587
|
+
if path:
|
|
1588
|
+
query += " AND file_path LIKE ?"
|
|
1589
|
+
params.append(f"%{path}%")
|
|
1590
|
+
|
|
1591
|
+
query += f" ORDER BY symbol_name LIMIT {limit}"
|
|
1592
|
+
|
|
1593
|
+
rows = conn.execute(query, params).fetchall()
|
|
1594
|
+
except sqlite3.OperationalError as e:
|
|
1595
|
+
if "no such table" in str(e):
|
|
1596
|
+
click.echo(yellow(" Symbols table not yet created. Run 'python extract.py' to initialize."))
|
|
1597
|
+
conn.close()
|
|
1598
|
+
return
|
|
1599
|
+
raise
|
|
1600
|
+
conn.close()
|
|
1601
|
+
|
|
1602
|
+
if not rows:
|
|
1603
|
+
click.echo(dim(f" No symbols found matching '{pattern}'"))
|
|
1604
|
+
return
|
|
1605
|
+
|
|
1606
|
+
click.echo()
|
|
1607
|
+
click.echo(bold(f" Code symbols ({len(rows)} results)"))
|
|
1608
|
+
click.echo(hr())
|
|
1609
|
+
for r in rows:
|
|
1610
|
+
click.echo(f" {cyan(r['symbol_type']):<10} {bold(r['symbol_name']):<30} {dim(r['file_path'])}:{r['line_number']}")
|
|
1611
|
+
if r['context']:
|
|
1612
|
+
click.echo(f" {dim(truncate(r['context'], 100))}")
|
|
1613
|
+
click.echo()
|
|
1614
|
+
|
|
1615
|
+
else:
|
|
1616
|
+
import re
|
|
1617
|
+
|
|
1618
|
+
query = "SELECT workspace_id, source_path, source_type, content_type, content, size_bytes FROM vfs WHERE source_type IN ('edit_content','edit_state','memory_workspace','memory_global')" # noqa: E501
|
|
1619
|
+
params = []
|
|
1620
|
+
if workspace:
|
|
1621
|
+
query += " AND workspace_id LIKE ?"
|
|
1622
|
+
params.append(f"{workspace}%")
|
|
1623
|
+
if path:
|
|
1624
|
+
query += " AND source_path LIKE ?"
|
|
1625
|
+
params.append(f"%{path}%")
|
|
1626
|
+
|
|
1627
|
+
rows = conn.execute(query, params).fetchall()
|
|
1628
|
+
results = []
|
|
1629
|
+
lower_pattern = pattern.lower() if not regex else None
|
|
1630
|
+
compiled = None
|
|
1631
|
+
if regex:
|
|
1632
|
+
try:
|
|
1633
|
+
compiled = re.compile(pattern, re.IGNORECASE)
|
|
1634
|
+
except re.error:
|
|
1635
|
+
click.echo(red(" Invalid regex pattern."))
|
|
1636
|
+
conn.close()
|
|
1637
|
+
return
|
|
1638
|
+
|
|
1639
|
+
for workspace_id, source_path, source_type, content_type, content_blob, size_bytes in rows:
|
|
1640
|
+
text = _decode_vfs_text(content_blob)
|
|
1641
|
+
if not text:
|
|
1642
|
+
continue
|
|
1643
|
+
if regex:
|
|
1644
|
+
m = compiled.search(text)
|
|
1645
|
+
if not m:
|
|
1646
|
+
continue
|
|
1647
|
+
snippet = _code_search_snippet(text, m)
|
|
1648
|
+
else:
|
|
1649
|
+
if lower_pattern not in text.lower():
|
|
1650
|
+
continue
|
|
1651
|
+
idx = text.lower().find(lower_pattern)
|
|
1652
|
+
m = re.search(re.escape(pattern), text[idx:idx+len(pattern)+1]) if idx >= 0 else None
|
|
1653
|
+
start = max(0, idx - 80)
|
|
1654
|
+
end = min(len(text), idx + len(pattern) + 80)
|
|
1655
|
+
snippet = text[start:end].replace("\n", " ").strip()
|
|
1656
|
+
|
|
1657
|
+
results.append((workspace_id, source_path, source_type, snippet))
|
|
1658
|
+
if len(results) >= limit:
|
|
1659
|
+
break
|
|
1660
|
+
|
|
1661
|
+
conn.close()
|
|
1662
|
+
|
|
1663
|
+
if not results:
|
|
1664
|
+
click.echo(dim(f" No code content found matching '{pattern}'"))
|
|
1665
|
+
return
|
|
1666
|
+
|
|
1667
|
+
click.echo()
|
|
1668
|
+
click.echo(bold(f" Code content results ({len(results)} results)"))
|
|
1669
|
+
click.echo(hr())
|
|
1670
|
+
for workspace_id, source_path, source_type, snippet in results:
|
|
1671
|
+
click.echo(f" {bold(source_path)} {dim(source_type)} {dim(short_id(workspace_id, 10))}")
|
|
1672
|
+
click.echo(f" {dim(truncate(snippet, 180))}")
|
|
1673
|
+
click.echo()
|
|
1674
|
+
|
|
1675
|
+
|
|
1676
|
+
# ─────────────────────────────────────────────
|
|
1677
|
+
# EXPORT
|
|
1678
|
+
# ─────────────────────────────────────────────
|
|
1679
|
+
|
|
1680
|
+
@cli.command()
|
|
1681
|
+
@click.argument("session_id")
|
|
1682
|
+
@click.option("--output", "-o", default=None, help="Output file path (default: stdout)")
|
|
1683
|
+
@click.option("--format", "-f", "fmt", type=click.Choice(["json", "jsonl", "text"]), default="json")
|
|
1684
|
+
def export(session_id, output, fmt):
|
|
1685
|
+
"""Export a session as JSON, JSONL, or text."""
|
|
1686
|
+
conn = db()
|
|
1687
|
+
meta = conn.execute(
|
|
1688
|
+
"SELECT * FROM sessions WHERE session_id LIKE ?", (f"{session_id}%",)
|
|
1689
|
+
).fetchone()
|
|
1690
|
+
if not meta:
|
|
1691
|
+
click.echo(red(f" Session not found: {session_id}"))
|
|
1692
|
+
conn.close()
|
|
1693
|
+
return
|
|
1694
|
+
|
|
1695
|
+
sid = meta['session_id']
|
|
1696
|
+
rows = conn.execute(
|
|
1697
|
+
"SELECT * FROM exchanges WHERE session_id=? ORDER BY exchange_index", (sid,)
|
|
1698
|
+
).fetchall()
|
|
1699
|
+
|
|
1700
|
+
# Parse tool_calls JSON
|
|
1701
|
+
exchanges = []
|
|
1702
|
+
for r in rows:
|
|
1703
|
+
d = dict(r)
|
|
1704
|
+
try:
|
|
1705
|
+
d['tool_calls'] = json.loads(d['tool_calls'] or "[]")
|
|
1706
|
+
except Exception:
|
|
1707
|
+
d['tool_calls'] = []
|
|
1708
|
+
try:
|
|
1709
|
+
d['attachments'] = json.loads(d['attachments'] or "[]")
|
|
1710
|
+
except Exception:
|
|
1711
|
+
d['attachments'] = []
|
|
1712
|
+
exchanges.append(d)
|
|
1713
|
+
|
|
1714
|
+
conn.close()
|
|
1715
|
+
|
|
1716
|
+
if fmt == "json":
|
|
1717
|
+
out = json.dumps({
|
|
1718
|
+
"session_id": sid,
|
|
1719
|
+
"title": meta['title'],
|
|
1720
|
+
"created_at": meta['created_at'],
|
|
1721
|
+
"workspace_id": meta['workspace_id'],
|
|
1722
|
+
"exchanges": exchanges
|
|
1723
|
+
}, indent=2)
|
|
1724
|
+
elif fmt == "jsonl":
|
|
1725
|
+
out = "\n".join(json.dumps(e) for e in exchanges)
|
|
1726
|
+
else:
|
|
1727
|
+
lines = [f"SESSION: {meta['title']}", f"ID: {sid}", f"Date: {fmt_ts(meta['created_at'])}", ""]
|
|
1728
|
+
for ex in exchanges:
|
|
1729
|
+
if ex['user_message']:
|
|
1730
|
+
lines.append(f"YOU [{ex['exchange_index']}] {ex['user_ts']}")
|
|
1731
|
+
lines.append(ex['user_message'])
|
|
1732
|
+
lines.append("")
|
|
1733
|
+
if ex['response_text']:
|
|
1734
|
+
lines.append("CDA")
|
|
1735
|
+
lines.append(ex['response_text'])
|
|
1736
|
+
lines.append("")
|
|
1737
|
+
lines.append("─" * 60)
|
|
1738
|
+
lines.append("")
|
|
1739
|
+
out = "\n".join(lines)
|
|
1740
|
+
|
|
1741
|
+
if output:
|
|
1742
|
+
Path(output).write_text(out)
|
|
1743
|
+
click.echo(green(f" Exported to {output}"))
|
|
1744
|
+
else:
|
|
1745
|
+
click.echo(out)
|
|
1746
|
+
|
|
1747
|
+
|
|
1748
|
+
# ─────────────────────────────────────────────
|
|
1749
|
+
# RAW SQL QUERY
|
|
1750
|
+
# ─────────────────────────────────────────────
|
|
1751
|
+
|
|
1752
|
+
@cli.command()
|
|
1753
|
+
@click.argument("sql")
|
|
1754
|
+
@click.option("--limit", "-n", default=50)
|
|
1755
|
+
def query(sql, limit):
|
|
1756
|
+
"""Run a raw SQL query against the DB."""
|
|
1757
|
+
if "LIMIT" not in sql.upper():
|
|
1758
|
+
sql = sql.rstrip(";") + f" LIMIT {limit}"
|
|
1759
|
+
conn = db()
|
|
1760
|
+
try:
|
|
1761
|
+
rows = conn.execute(sql).fetchall()
|
|
1762
|
+
conn.close()
|
|
1763
|
+
except Exception as e:
|
|
1764
|
+
click.echo(red(f" SQL error: {e}"))
|
|
1765
|
+
conn.close()
|
|
1766
|
+
return
|
|
1767
|
+
|
|
1768
|
+
if not rows:
|
|
1769
|
+
click.echo(dim(" (0 rows)"))
|
|
1770
|
+
return
|
|
1771
|
+
|
|
1772
|
+
keys = rows[0].keys()
|
|
1773
|
+
click.echo()
|
|
1774
|
+
click.echo(bold(" " + " ".join(str(k)[:20] for k in keys)))
|
|
1775
|
+
click.echo(hr())
|
|
1776
|
+
for r in rows:
|
|
1777
|
+
click.echo(" " + " ".join(truncate(str(r[k]), 30) for k in keys))
|
|
1778
|
+
click.echo(dim(f"\n {len(rows)} rows"))
|
|
1779
|
+
click.echo()
|
|
1780
|
+
|
|
1781
|
+
|
|
1782
|
+
# ─────────────────────────────────────────────
|
|
1783
|
+
# BEHAVIORAL SIGNALS
|
|
1784
|
+
# ─────────────────────────────────────────────
|
|
1785
|
+
|
|
1786
|
+
@cli.command()
|
|
1787
|
+
@click.argument("session_id", required=False, default=None)
|
|
1788
|
+
@click.option("--type", "-t", "sig_type", default=None,
|
|
1789
|
+
help="Filter: correction|redirect|affirmation|approval|question")
|
|
1790
|
+
@click.option("--limit", "-n", default=40)
|
|
1791
|
+
def signals(session_id, sig_type, limit):
|
|
1792
|
+
"""Show behavioral signals extracted from sessions."""
|
|
1793
|
+
conn = db()
|
|
1794
|
+
where = []
|
|
1795
|
+
args = []
|
|
1796
|
+
if session_id:
|
|
1797
|
+
where.append("s.session_id LIKE ?")
|
|
1798
|
+
args.append(session_id + "%")
|
|
1799
|
+
if sig_type:
|
|
1800
|
+
where.append("s.signal_type = ?")
|
|
1801
|
+
args.append(sig_type)
|
|
1802
|
+
clause = ("WHERE " + " AND ".join(where)) if where else ""
|
|
1803
|
+
sql = f"""
|
|
1804
|
+
SELECT s.signal_type, s.matched_keyword, s.user_message,
|
|
1805
|
+
s.session_id, s.ts
|
|
1806
|
+
FROM exchange_signals s
|
|
1807
|
+
{clause}
|
|
1808
|
+
ORDER BY s.ts DESC
|
|
1809
|
+
LIMIT {limit}
|
|
1810
|
+
"""
|
|
1811
|
+
rows = conn.execute(sql, args).fetchall()
|
|
1812
|
+
conn.close()
|
|
1813
|
+
|
|
1814
|
+
if not rows:
|
|
1815
|
+
click.echo(dim(" (no signals found)"))
|
|
1816
|
+
return
|
|
1817
|
+
|
|
1818
|
+
click.echo()
|
|
1819
|
+
TYPE_COLOR = {
|
|
1820
|
+
'correction': red,
|
|
1821
|
+
'redirect': yellow,
|
|
1822
|
+
'affirmation': green,
|
|
1823
|
+
'approval': cyan,
|
|
1824
|
+
'question': bold,
|
|
1825
|
+
}
|
|
1826
|
+
for r in rows:
|
|
1827
|
+
t = r['signal_type']
|
|
1828
|
+
colorize = TYPE_COLOR.get(t, lambda x: x)
|
|
1829
|
+
label = colorize(f"[{t:<12}]")
|
|
1830
|
+
kw = dim(f" kw={r['matched_keyword']:<20}")
|
|
1831
|
+
sid_short = r['session_id'][:8]
|
|
1832
|
+
msg = truncate(r['user_message'] or '', 80)
|
|
1833
|
+
click.echo(f" {label}{kw} {dim(sid_short)} {msg}")
|
|
1834
|
+
click.echo()
|
|
1835
|
+
|
|
1836
|
+
# Summary by type
|
|
1837
|
+
conn = db()
|
|
1838
|
+
by_type = conn.execute(
|
|
1839
|
+
f"""SELECT signal_type, COUNT(*) as n FROM exchange_signals
|
|
1840
|
+
{'WHERE session_id LIKE ?' if session_id else ''}
|
|
1841
|
+
GROUP BY signal_type ORDER BY n DESC""",
|
|
1842
|
+
([session_id + "%"] if session_id else [])
|
|
1843
|
+
).fetchall()
|
|
1844
|
+
conn.close()
|
|
1845
|
+
click.echo(bold(" Signal totals:"))
|
|
1846
|
+
for r in by_type:
|
|
1847
|
+
bar = "█" * min(r['n'], 40)
|
|
1848
|
+
click.echo(f" {r['signal_type']:<14} {r['n']:>5} {dim(bar)}")
|
|
1849
|
+
click.echo()
|
|
1850
|
+
|
|
1851
|
+
|
|
1852
|
+
# ─────────────────────────────────────────────
|
|
1853
|
+
# COMPACTION HISTORY
|
|
1854
|
+
# ─────────────────────────────────────────────
|
|
1855
|
+
|
|
1856
|
+
@cli.command()
|
|
1857
|
+
@click.argument("session_id", required=False, default=None)
|
|
1858
|
+
@click.option("--full", "-f", is_flag=True, help="Show full summary text")
|
|
1859
|
+
@click.option("--limit", "-n", default=20)
|
|
1860
|
+
def compactions(session_id, full, limit):
|
|
1861
|
+
"""Show context compaction events (model self-summaries)."""
|
|
1862
|
+
conn = db()
|
|
1863
|
+
where = "WHERE session_id LIKE ?" if session_id else ""
|
|
1864
|
+
args = [session_id + "%"] if session_id else []
|
|
1865
|
+
sql = f"""
|
|
1866
|
+
SELECT session_id, turn_index, summary_length,
|
|
1867
|
+
context_length_before, num_rounds, summary_model,
|
|
1868
|
+
duration_ms, summary_text, ts
|
|
1869
|
+
FROM compactions
|
|
1870
|
+
{where}
|
|
1871
|
+
ORDER BY ts DESC
|
|
1872
|
+
LIMIT {limit}
|
|
1873
|
+
"""
|
|
1874
|
+
rows = conn.execute(sql, args).fetchall()
|
|
1875
|
+
total = conn.execute(
|
|
1876
|
+
f"SELECT COUNT(*) FROM compactions {where}", args
|
|
1877
|
+
).fetchone()[0] if not session_id else None
|
|
1878
|
+
conn.close()
|
|
1879
|
+
|
|
1880
|
+
if not rows:
|
|
1881
|
+
click.echo(dim(" (no compactions found)"))
|
|
1882
|
+
return
|
|
1883
|
+
|
|
1884
|
+
click.echo()
|
|
1885
|
+
click.echo(bold(f" {'session':<10} {'turn':>4} {'ctx_before':>10} {'rounds':>6} {'model':<25} {'summary_len':>11}"))
|
|
1886
|
+
click.echo(hr())
|
|
1887
|
+
for r in rows:
|
|
1888
|
+
sid_short = r['session_id'][:8]
|
|
1889
|
+
model = truncate(r['summary_model'] or 'unknown', 25)
|
|
1890
|
+
ctx = f"{r['context_length_before']:,}" if r['context_length_before'] else '-'
|
|
1891
|
+
click.echo(
|
|
1892
|
+
f" {dim(sid_short):<10} {(r['turn_index'] or 0):>4} {ctx:>10} "
|
|
1893
|
+
f"{(r['num_rounds'] or 0):>6} {model:<25} {(r['summary_length'] or 0):>11,}"
|
|
1894
|
+
)
|
|
1895
|
+
if full and r['summary_text']:
|
|
1896
|
+
wrapped = textwrap.fill(r['summary_text'][:800], width=90, initial_indent=' ', subsequent_indent=' ')
|
|
1897
|
+
click.echo(dim(wrapped))
|
|
1898
|
+
click.echo()
|
|
1899
|
+
|
|
1900
|
+
click.echo()
|
|
1901
|
+
if total:
|
|
1902
|
+
click.echo(dim(f" {total} total compaction events across all sessions"))
|
|
1903
|
+
click.echo()
|
|
1904
|
+
|
|
1905
|
+
# ─────────────────────────────────────────────
|
|
1906
|
+
# BEHAVIOR REPORT
|
|
1907
|
+
# ─────────────────────────────────────────────
|
|
1908
|
+
|
|
1909
|
+
|
|
1910
|
+
@cli.command()
|
|
1911
|
+
@click.option("--limit", "-n", default=20, help="Top N sessions to show")
|
|
1912
|
+
def behavior(limit):
|
|
1913
|
+
"""Aggregate behavioral intelligence report across all sessions."""
|
|
1914
|
+
conn = db()
|
|
1915
|
+
|
|
1916
|
+
# Overall signal totals
|
|
1917
|
+
sig_totals = conn.execute(
|
|
1918
|
+
"SELECT signal_type, COUNT(*) as n FROM exchange_signals GROUP BY signal_type ORDER BY n DESC"
|
|
1919
|
+
).fetchall()
|
|
1920
|
+
|
|
1921
|
+
# Top correction keywords
|
|
1922
|
+
top_kw = conn.execute(
|
|
1923
|
+
"""SELECT matched_keyword, COUNT(*) as n FROM exchange_signals
|
|
1924
|
+
WHERE signal_type='correction'
|
|
1925
|
+
GROUP BY matched_keyword ORDER BY n DESC LIMIT 15"""
|
|
1926
|
+
).fetchall()
|
|
1927
|
+
|
|
1928
|
+
# Sessions with most corrections
|
|
1929
|
+
worst_sessions = conn.execute(
|
|
1930
|
+
f"""SELECT sa.session_id, sa.total_corrections, sa.total_redirects,
|
|
1931
|
+
sa.total_affirmations, sa.compaction_count,
|
|
1932
|
+
sa.total_tokens_prompt, sa.total_tokens_completion
|
|
1933
|
+
FROM session_analysis sa
|
|
1934
|
+
WHERE sa.total_corrections > 0
|
|
1935
|
+
ORDER BY sa.total_corrections DESC
|
|
1936
|
+
LIMIT {limit}"""
|
|
1937
|
+
).fetchall()
|
|
1938
|
+
|
|
1939
|
+
# Session health summary
|
|
1940
|
+
health = conn.execute(
|
|
1941
|
+
"""SELECT
|
|
1942
|
+
COUNT(*) as total,
|
|
1943
|
+
SUM(clean_run) as clean,
|
|
1944
|
+
SUM(CASE WHEN total_corrections > 0 THEN 1 ELSE 0 END) as corrected,
|
|
1945
|
+
SUM(total_corrections) as total_corrections,
|
|
1946
|
+
SUM(total_affirmations) as total_affirmations,
|
|
1947
|
+
SUM(compaction_count) as total_compactions,
|
|
1948
|
+
AVG(total_tokens_prompt) as avg_prompt_tokens
|
|
1949
|
+
FROM session_analysis"""
|
|
1950
|
+
).fetchone()
|
|
1951
|
+
|
|
1952
|
+
# Most common model
|
|
1953
|
+
models = conn.execute(
|
|
1954
|
+
"SELECT model_ids, COUNT(*) as n FROM session_analysis WHERE model_ids != '' GROUP BY model_ids ORDER BY n DESC LIMIT 5"
|
|
1955
|
+
).fetchall()
|
|
1956
|
+
|
|
1957
|
+
conn.close()
|
|
1958
|
+
|
|
1959
|
+
click.echo()
|
|
1960
|
+
click.echo(bold("══════════════════════════════════════════"))
|
|
1961
|
+
click.echo(bold(" BEHAVIORAL INTELLIGENCE REPORT"))
|
|
1962
|
+
click.echo(bold("══════════════════════════════════════════"))
|
|
1963
|
+
click.echo()
|
|
1964
|
+
|
|
1965
|
+
if health and health['total']:
|
|
1966
|
+
click.echo(bold(" Session Health:"))
|
|
1967
|
+
click.echo(f" Total sessions analyzed: {health['total']}")
|
|
1968
|
+
pct_clean = 100 * (health['clean'] or 0) / health['total']
|
|
1969
|
+
pct_corrected = 100 * (health['corrected'] or 0) / health['total']
|
|
1970
|
+
click.echo(f" Clean runs (0 corrections): {health['clean']} ({pct_clean:.0f}%)")
|
|
1971
|
+
click.echo(f" Sessions with corrections: {health['corrected']} ({pct_corrected:.0f}%)")
|
|
1972
|
+
click.echo(f" Total corrections issued: {red(str(health['total_corrections'] or 0))}")
|
|
1973
|
+
click.echo(f" Total affirmations: {green(str(health['total_affirmations'] or 0))}")
|
|
1974
|
+
click.echo(f" Total compactions: {health['total_compactions'] or 0}")
|
|
1975
|
+
avg_pt = health['avg_prompt_tokens']
|
|
1976
|
+
if avg_pt:
|
|
1977
|
+
click.echo(f" Avg prompt tokens/session: {avg_pt:,.0f}")
|
|
1978
|
+
click.echo()
|
|
1979
|
+
|
|
1980
|
+
if sig_totals:
|
|
1981
|
+
click.echo(bold(" Signal Distribution:"))
|
|
1982
|
+
for r in sig_totals:
|
|
1983
|
+
bar = "█" * min(r['n'] // 5, 50)
|
|
1984
|
+
color = red if r['signal_type'] == 'correction' else (green if r['signal_type'] == 'affirmation' else dim)
|
|
1985
|
+
click.echo(f" {r['signal_type']:<14} {r['n']:>5} {color(bar)}")
|
|
1986
|
+
click.echo()
|
|
1987
|
+
|
|
1988
|
+
if top_kw:
|
|
1989
|
+
click.echo(bold(" Top Correction Triggers (what you typed to stop me):"))
|
|
1990
|
+
for r in top_kw:
|
|
1991
|
+
click.echo(f" {r['n']:>4}× {red(repr(r['matched_keyword']))}")
|
|
1992
|
+
click.echo()
|
|
1993
|
+
|
|
1994
|
+
if models:
|
|
1995
|
+
click.echo(bold(" Models Used:"))
|
|
1996
|
+
for r in models:
|
|
1997
|
+
click.echo(f" {r['n']:>4} sessions {r['model_ids']}")
|
|
1998
|
+
click.echo()
|
|
1999
|
+
|
|
2000
|
+
if worst_sessions:
|
|
2001
|
+
click.echo(bold(f" Sessions With Most Corrections (top {limit}):"))
|
|
2002
|
+
click.echo(bold(f" {'session':<10} {'corr':>5} {'redir':>6} {'affirm':>7} {'compact':>8} {'prompt_tok':>12}"))
|
|
2003
|
+
click.echo(" " + "─" * 65)
|
|
2004
|
+
for r in worst_sessions:
|
|
2005
|
+
sid_short = r['session_id'][:8]
|
|
2006
|
+
pt = f"{r['total_tokens_prompt']:,}" if r['total_tokens_prompt'] else '-'
|
|
2007
|
+
click.echo(
|
|
2008
|
+
f" {dim(sid_short):<10} {red(str(r['total_corrections'])):>5} "
|
|
2009
|
+
f"{str(r['total_redirects'] or 0):>6} {green(str(r['total_affirmations'] or 0)):>7} "
|
|
2010
|
+
f"{str(r['compaction_count'] or 0):>8} {pt:>12}"
|
|
2011
|
+
)
|
|
2012
|
+
click.echo()
|
|
2013
|
+
|
|
2014
|
+
|
|
2015
|
+
# ─────────────────────────────────────────────
|
|
2016
|
+
# HEAT — Frustration + Pre-correction analysis
|
|
2017
|
+
# ─────────────────────────────────────────────
|
|
2018
|
+
|
|
2019
|
+
@cli.command()
|
|
2020
|
+
@click.argument("session_id", required=False, default=None)
|
|
2021
|
+
@click.option("--limit", "-n", default=20, help="Top N sessions")
|
|
2022
|
+
@click.option("--signals", "show_signals", is_flag=True, default=False, help="Show raw signal messages")
|
|
2023
|
+
def heat(session_id, limit, show_signals):
|
|
2024
|
+
"""Frustration and pre-correction signal analysis.
|
|
2025
|
+
|
|
2026
|
+
With no args: show hottest sessions ranked by heat_score.
|
|
2027
|
+
With SESSION_ID: drill into that session's signals.
|
|
2028
|
+
"""
|
|
2029
|
+
conn = db()
|
|
2030
|
+
|
|
2031
|
+
if session_id:
|
|
2032
|
+
# Drill into one session
|
|
2033
|
+
sid_like = session_id + "%"
|
|
2034
|
+
sa = conn.execute(
|
|
2035
|
+
"""SELECT session_id, total_corrections, total_frustrations,
|
|
2036
|
+
total_pre_corrections, total_redirects, heat_score
|
|
2037
|
+
FROM session_analysis WHERE session_id LIKE ?""",
|
|
2038
|
+
(sid_like,)
|
|
2039
|
+
).fetchone()
|
|
2040
|
+
if not sa:
|
|
2041
|
+
click.echo(dim(f" no data for session {session_id}"))
|
|
2042
|
+
conn.close()
|
|
2043
|
+
return
|
|
2044
|
+
|
|
2045
|
+
click.echo()
|
|
2046
|
+
click.echo(bold(f" Heat report: {dim(sa['session_id'][:16])}"))
|
|
2047
|
+
click.echo(f" heat_score: {_heat_bar(sa['heat_score'])}")
|
|
2048
|
+
click.echo(f" corrections: {red(str(sa['total_corrections'] or 0))}")
|
|
2049
|
+
click.echo(f" frustrations: {red(str(sa['total_frustrations'] or 0))}")
|
|
2050
|
+
click.echo(f" pre-corrections: {str(sa['total_pre_corrections'] or 0)}")
|
|
2051
|
+
click.echo(f" redirects: {str(sa['total_redirects'] or 0)}")
|
|
2052
|
+
click.echo()
|
|
2053
|
+
|
|
2054
|
+
# Show signals grouped by type
|
|
2055
|
+
for sig_type in ('frustration', 'pre_correction', 'correction'):
|
|
2056
|
+
rows = conn.execute(
|
|
2057
|
+
"""SELECT matched_keyword, user_message, ts
|
|
2058
|
+
FROM exchange_signals
|
|
2059
|
+
WHERE session_id LIKE ? AND signal_type=?
|
|
2060
|
+
ORDER BY ts""",
|
|
2061
|
+
(sid_like, sig_type)
|
|
2062
|
+
).fetchall()
|
|
2063
|
+
if not rows:
|
|
2064
|
+
continue
|
|
2065
|
+
color = red if sig_type in ('frustration', 'correction') else dim
|
|
2066
|
+
click.echo(bold(f" {sig_type.upper()} ({len(rows)}):"))
|
|
2067
|
+
for r in rows:
|
|
2068
|
+
kw = r['matched_keyword'] or ''
|
|
2069
|
+
msg = (r['user_message'] or '')[:120]
|
|
2070
|
+
click.echo(f" {color('[' + kw + ']'):<28} {dim(msg)}")
|
|
2071
|
+
click.echo()
|
|
2072
|
+
|
|
2073
|
+
else:
|
|
2074
|
+
# Global hottest sessions
|
|
2075
|
+
rows = conn.execute(
|
|
2076
|
+
f"""SELECT sa.session_id, sa.heat_score,
|
|
2077
|
+
sa.total_corrections, sa.total_frustrations,
|
|
2078
|
+
sa.total_pre_corrections, sa.total_redirects,
|
|
2079
|
+
sa.total_affirmations, sa.compaction_count
|
|
2080
|
+
FROM session_analysis sa
|
|
2081
|
+
WHERE sa.heat_score > 0
|
|
2082
|
+
ORDER BY sa.heat_score DESC
|
|
2083
|
+
LIMIT {limit}"""
|
|
2084
|
+
).fetchall()
|
|
2085
|
+
|
|
2086
|
+
# Global frustration keyword frequency
|
|
2087
|
+
top_frustration_kw = conn.execute(
|
|
2088
|
+
"""SELECT matched_keyword, COUNT(*) as n
|
|
2089
|
+
FROM exchange_signals
|
|
2090
|
+
WHERE signal_type IN ('frustration', 'pre_correction')
|
|
2091
|
+
GROUP BY matched_keyword ORDER BY n DESC LIMIT 12"""
|
|
2092
|
+
).fetchall()
|
|
2093
|
+
|
|
2094
|
+
# Global totals
|
|
2095
|
+
totals = conn.execute(
|
|
2096
|
+
"""SELECT
|
|
2097
|
+
SUM(total_frustrations) as tf,
|
|
2098
|
+
SUM(total_pre_corrections) as tpc,
|
|
2099
|
+
COUNT(CASE WHEN heat_score >= 20 THEN 1 END) as hot_sessions,
|
|
2100
|
+
COUNT(CASE WHEN heat_score >= 50 THEN 1 END) as very_hot,
|
|
2101
|
+
AVG(heat_score) as avg_heat
|
|
2102
|
+
FROM session_analysis"""
|
|
2103
|
+
).fetchone()
|
|
2104
|
+
|
|
2105
|
+
conn.close()
|
|
2106
|
+
|
|
2107
|
+
click.echo()
|
|
2108
|
+
click.echo(bold("══════════════════════════════════════════"))
|
|
2109
|
+
click.echo(bold(" HEAT REPORT — Frustration Intelligence"))
|
|
2110
|
+
click.echo(bold("══════════════════════════════════════════"))
|
|
2111
|
+
click.echo()
|
|
2112
|
+
|
|
2113
|
+
if totals:
|
|
2114
|
+
click.echo(bold(" Overview:"))
|
|
2115
|
+
click.echo(f" Total frustration signals: {red(str(int(totals['tf'] or 0)))}")
|
|
2116
|
+
click.echo(f" Total pre-correction signals:{str(int(totals['tpc'] or 0))}")
|
|
2117
|
+
click.echo(f" Sessions with heat ≥ 20: {str(int(totals['hot_sessions'] or 0))}")
|
|
2118
|
+
click.echo(f" Sessions with heat ≥ 50: {red(str(int(totals['very_hot'] or 0)))}")
|
|
2119
|
+
click.echo(f" Average heat score: {totals['avg_heat']:.1f}" if totals['avg_heat'] else "")
|
|
2120
|
+
click.echo()
|
|
2121
|
+
|
|
2122
|
+
if top_frustration_kw:
|
|
2123
|
+
click.echo(bold(" Top Frustration Triggers:"))
|
|
2124
|
+
for r in top_frustration_kw:
|
|
2125
|
+
bar = "█" * min(r['n'], 30)
|
|
2126
|
+
click.echo(f" {r['n']:>4}× {red(repr(r['matched_keyword'])):<32} {dim(bar)}")
|
|
2127
|
+
click.echo()
|
|
2128
|
+
|
|
2129
|
+
if rows:
|
|
2130
|
+
click.echo(bold(f" Hottest Sessions (top {limit}):"))
|
|
2131
|
+
click.echo(bold(f" {'session':<10} {'heat':>6} {'corr':>5} {'frust':>6} {'pre':>4} {'redir':>5} {'affirm':>7}"))
|
|
2132
|
+
click.echo(" " + "─" * 60)
|
|
2133
|
+
for r in rows:
|
|
2134
|
+
sid_short = r['session_id'][:8]
|
|
2135
|
+
heat_val = r['heat_score'] or 0
|
|
2136
|
+
heat_str = red(str(heat_val)) if heat_val >= 50 else (str(heat_val) if heat_val >= 20 else dim(str(heat_val)))
|
|
2137
|
+
click.echo(
|
|
2138
|
+
f" {dim(sid_short):<10} {heat_str:>6} "
|
|
2139
|
+
f"{red(str(r['total_corrections'] or 0)):>5} "
|
|
2140
|
+
f"{red(str(r['total_frustrations'] or 0)):>6} "
|
|
2141
|
+
f"{str(r['total_pre_corrections'] or 0):>4} "
|
|
2142
|
+
f"{str(r['total_redirects'] or 0):>5} "
|
|
2143
|
+
f"{green(str(r['total_affirmations'] or 0)):>7}"
|
|
2144
|
+
)
|
|
2145
|
+
click.echo()
|
|
2146
|
+
return
|
|
2147
|
+
|
|
2148
|
+
conn.close()
|
|
2149
|
+
|
|
2150
|
+
|
|
2151
|
+
def _heat_bar(score):
|
|
2152
|
+
"""Visual heat bar for a score 0-100."""
|
|
2153
|
+
score = score or 0
|
|
2154
|
+
filled = min(score // 5, 20)
|
|
2155
|
+
bar = "█" * filled + "░" * (20 - filled)
|
|
2156
|
+
label = f"{score:>3}/100"
|
|
2157
|
+
if score >= 50:
|
|
2158
|
+
return red(f"{bar} {label}")
|
|
2159
|
+
elif score >= 20:
|
|
2160
|
+
return f"{bar} {label}"
|
|
2161
|
+
else:
|
|
2162
|
+
return dim(f"{bar} {label}")
|
|
2163
|
+
|
|
2164
|
+
|
|
2165
|
+
# ─────────────────────────────────────────────
|
|
2166
|
+
# SAVED SESSIONS
|
|
2167
|
+
# ─────────────────────────────────────────────
|
|
2168
|
+
|
|
2169
|
+
@cli.command()
|
|
2170
|
+
@click.option("--limit", "-n", default=20, help="Top N sessions")
|
|
2171
|
+
@click.option("--min-heat", default=25, help="Minimum peak_heat to qualify")
|
|
2172
|
+
@click.option("--show-antidote", "show_antidote", is_flag=True, default=False,
|
|
2173
|
+
help="Show full turning-point message text")
|
|
2174
|
+
def saved(limit, min_heat, show_antidote):
|
|
2175
|
+
"""Saved sessions — heat that recovered. The Antidote catalog.
|
|
2176
|
+
|
|
2177
|
+
Shows sessions where heat peaked then the session recovered with
|
|
2178
|
+
affirmations/approvals. The turning_point message is the exact
|
|
2179
|
+
correction that worked — the Antidote.
|
|
2180
|
+
"""
|
|
2181
|
+
conn = db()
|
|
2182
|
+
|
|
2183
|
+
rows = conn.execute(
|
|
2184
|
+
f"""SELECT sa.session_id, sa.heat_score, sa.peak_heat, sa.final_heat,
|
|
2185
|
+
sa.total_corrections, sa.total_frustrations, sa.total_pre_corrections,
|
|
2186
|
+
sa.total_affirmations, sa.compaction_count,
|
|
2187
|
+
sa.turning_point_ts, sa.turning_point_text
|
|
2188
|
+
FROM session_analysis sa
|
|
2189
|
+
WHERE sa.saved_session = 1 AND sa.peak_heat >= {min_heat}
|
|
2190
|
+
ORDER BY sa.peak_heat DESC
|
|
2191
|
+
LIMIT {limit}"""
|
|
2192
|
+
).fetchall()
|
|
2193
|
+
|
|
2194
|
+
# Global stats
|
|
2195
|
+
stats = conn.execute(
|
|
2196
|
+
f"""SELECT
|
|
2197
|
+
COUNT(*) as total_saved,
|
|
2198
|
+
AVG(peak_heat) as avg_peak,
|
|
2199
|
+
MAX(peak_heat) as max_peak,
|
|
2200
|
+
SUM(total_corrections + total_frustrations + total_pre_corrections) as total_heat_signals,
|
|
2201
|
+
COUNT(CASE WHEN peak_heat >= 50 THEN 1 END) as very_hot_saved
|
|
2202
|
+
FROM session_analysis
|
|
2203
|
+
WHERE saved_session = 1 AND peak_heat >= {min_heat}"""
|
|
2204
|
+
).fetchone()
|
|
2205
|
+
|
|
2206
|
+
# Turning-point signal type breakdown — what kind of message saved them?
|
|
2207
|
+
antidote_types = conn.execute(
|
|
2208
|
+
"""SELECT es.signal_type, COUNT(*) as n
|
|
2209
|
+
FROM session_analysis sa
|
|
2210
|
+
JOIN exchange_signals es
|
|
2211
|
+
ON es.session_id = sa.session_id AND es.ts = sa.turning_point_ts
|
|
2212
|
+
WHERE sa.saved_session = 1
|
|
2213
|
+
GROUP BY es.signal_type ORDER BY n DESC"""
|
|
2214
|
+
).fetchall()
|
|
2215
|
+
|
|
2216
|
+
# Top matched keywords at turning points
|
|
2217
|
+
antidote_kws = conn.execute(
|
|
2218
|
+
"""SELECT es.matched_keyword, es.signal_type, COUNT(*) as n
|
|
2219
|
+
FROM session_analysis sa
|
|
2220
|
+
JOIN exchange_signals es
|
|
2221
|
+
ON es.session_id = sa.session_id AND es.ts = sa.turning_point_ts
|
|
2222
|
+
WHERE sa.saved_session = 1 AND es.matched_keyword IS NOT NULL
|
|
2223
|
+
GROUP BY es.matched_keyword ORDER BY n DESC LIMIT 15"""
|
|
2224
|
+
).fetchall()
|
|
2225
|
+
|
|
2226
|
+
conn.close()
|
|
2227
|
+
|
|
2228
|
+
click.echo()
|
|
2229
|
+
click.echo(bold("══════════════════════════════════════════════"))
|
|
2230
|
+
click.echo(bold(" SAVED SESSIONS — The Antidote Catalog"))
|
|
2231
|
+
click.echo(bold("══════════════════════════════════════════════"))
|
|
2232
|
+
click.echo()
|
|
2233
|
+
|
|
2234
|
+
if stats and stats['total_saved']:
|
|
2235
|
+
click.echo(bold(" Recovery Stats:"))
|
|
2236
|
+
click.echo(f" Saved sessions: {green(str(stats['total_saved']))}")
|
|
2237
|
+
click.echo(f" Very hot saved (≥50): {green(str(stats['very_hot_saved'] or 0))}")
|
|
2238
|
+
click.echo(f" Avg peak heat: {stats['avg_peak']:.1f}" if stats['avg_peak'] else "")
|
|
2239
|
+
click.echo(f" Max peak heat: {red(str(int(stats['max_peak'] or 0)))}")
|
|
2240
|
+
click.echo(f" Total heat signals: {str(int(stats['total_heat_signals'] or 0))}")
|
|
2241
|
+
click.echo()
|
|
2242
|
+
else:
|
|
2243
|
+
click.echo(dim(f" No saved sessions found with peak_heat >= {min_heat}"))
|
|
2244
|
+
click.echo(dim(" Try --min-heat 15 to lower the threshold"))
|
|
2245
|
+
return
|
|
2246
|
+
|
|
2247
|
+
if antidote_types:
|
|
2248
|
+
click.echo(bold(" Antidote Signal Types (what kind of message saved the session):"))
|
|
2249
|
+
for r in antidote_types:
|
|
2250
|
+
bar = "█" * min(r['n'], 25)
|
|
2251
|
+
click.echo(f" {r['n']:>4}× {r['signal_type']:<20} {dim(bar)}")
|
|
2252
|
+
click.echo()
|
|
2253
|
+
|
|
2254
|
+
if antidote_kws:
|
|
2255
|
+
click.echo(bold(" Top Antidote Keywords (the phrases that worked):"))
|
|
2256
|
+
for r in antidote_kws:
|
|
2257
|
+
click.echo(f" {r['n']:>4}× {green(repr(r['matched_keyword'])):<30} {dim(r['signal_type'])}")
|
|
2258
|
+
click.echo()
|
|
2259
|
+
|
|
2260
|
+
if rows:
|
|
2261
|
+
click.echo(bold(f" Saved Sessions (top {limit}, ranked by peak heat):"))
|
|
2262
|
+
click.echo(bold(f" {'session':<10} {'peak':>5} {'corr':>5} {'frust':>6} {'pre':>4} {'affirm':>7} {'compact':>8}"))
|
|
2263
|
+
click.echo(" " + "─" * 62)
|
|
2264
|
+
for r in rows:
|
|
2265
|
+
sid_short = r['session_id'][:8]
|
|
2266
|
+
peak = r['peak_heat'] or 0
|
|
2267
|
+
peak_str = red(str(peak)) if peak >= 50 else str(peak)
|
|
2268
|
+
click.echo(
|
|
2269
|
+
f" {dim(sid_short):<10} {peak_str:>5} "
|
|
2270
|
+
f"{red(str(r['total_corrections'] or 0)):>5} "
|
|
2271
|
+
f"{red(str(r['total_frustrations'] or 0)):>6} "
|
|
2272
|
+
f"{str(r['total_pre_corrections'] or 0):>4} "
|
|
2273
|
+
f"{green(str(r['total_affirmations'] or 0)):>7} "
|
|
2274
|
+
f"{str(r['compaction_count'] or 0):>8}"
|
|
2275
|
+
)
|
|
2276
|
+
if show_antidote and r['turning_point_text']:
|
|
2277
|
+
# Show the turning-point message (the Antidote)
|
|
2278
|
+
msg = r['turning_point_text'][:200].replace('\n', ' ')
|
|
2279
|
+
click.echo(f" {bold('Antidote:')} {dim(msg)}")
|
|
2280
|
+
click.echo()
|
|
2281
|
+
click.echo()
|
|
2282
|
+
|
|
2283
|
+
|
|
2284
|
+
# ─────────────────────────────────────────────
|
|
2285
|
+
# TOKEN USAGE
|
|
2286
|
+
# ─────────────────────────────────────────────
|
|
2287
|
+
|
|
2288
|
+
@cli.command()
|
|
2289
|
+
@click.argument("session_id", required=False, default=None)
|
|
2290
|
+
@click.option("--limit", "-n", default=30)
|
|
2291
|
+
def tokens(session_id, limit):
|
|
2292
|
+
"""Show per-request token usage for a session (or aggregate summary)."""
|
|
2293
|
+
conn = db()
|
|
2294
|
+
|
|
2295
|
+
if session_id:
|
|
2296
|
+
rows = conn.execute(
|
|
2297
|
+
"""SELECT turn_index, prompt_tokens, output_tokens, model_id, ts
|
|
2298
|
+
FROM token_usage WHERE session_id LIKE ?
|
|
2299
|
+
ORDER BY turn_index LIMIT ?""",
|
|
2300
|
+
(session_id + "%", limit)
|
|
2301
|
+
).fetchall()
|
|
2302
|
+
conn.close()
|
|
2303
|
+
if not rows:
|
|
2304
|
+
click.echo(dim(" (no token data)"))
|
|
2305
|
+
return
|
|
2306
|
+
click.echo()
|
|
2307
|
+
click.echo(bold(f" Token usage — session {session_id[:16]}"))
|
|
2308
|
+
click.echo(bold(f" {'turn':>5} {'prompt':>9} {'output':>8} model"))
|
|
2309
|
+
click.echo(hr())
|
|
2310
|
+
total_p = total_o = 0
|
|
2311
|
+
for r in rows:
|
|
2312
|
+
total_p += r['prompt_tokens'] or 0
|
|
2313
|
+
total_o += r['output_tokens'] or 0
|
|
2314
|
+
model_short = (r['model_id'] or 'unknown')[:30]
|
|
2315
|
+
click.echo(f" {r['turn_index']:>5} {(r['prompt_tokens'] or 0):>9,} {(r['output_tokens'] or 0):>8,} {dim(model_short)}")
|
|
2316
|
+
click.echo(hr())
|
|
2317
|
+
click.echo(bold(f" {'TOTAL':>5} {total_p:>9,} {total_o:>8,}"))
|
|
2318
|
+
click.echo()
|
|
2319
|
+
else:
|
|
2320
|
+
# Aggregate across all sessions
|
|
2321
|
+
rows = conn.execute(
|
|
2322
|
+
f"""SELECT sa.session_id, sa.total_tokens_prompt, sa.total_tokens_completion,
|
|
2323
|
+
sa.compaction_count, sa.total_corrections, sa.model_ids
|
|
2324
|
+
FROM session_analysis sa
|
|
2325
|
+
WHERE sa.total_tokens_prompt > 0
|
|
2326
|
+
ORDER BY sa.total_tokens_prompt DESC LIMIT {limit}"""
|
|
2327
|
+
).fetchall()
|
|
2328
|
+
totals = conn.execute(
|
|
2329
|
+
"SELECT SUM(total_tokens_prompt), SUM(total_tokens_completion) FROM session_analysis"
|
|
2330
|
+
).fetchone()
|
|
2331
|
+
conn.close()
|
|
2332
|
+
|
|
2333
|
+
if not rows:
|
|
2334
|
+
click.echo(dim(" (no token data)"))
|
|
2335
|
+
return
|
|
2336
|
+
click.echo()
|
|
2337
|
+
click.echo(bold(" Top sessions by token usage:"))
|
|
2338
|
+
click.echo(bold(f" {'session':<10} {'prompt':>12} {'output':>9} {'compactions':>12} {'corrections':>12}"))
|
|
2339
|
+
click.echo(hr())
|
|
2340
|
+
for r in rows:
|
|
2341
|
+
sid_short = r['session_id'][:8]
|
|
2342
|
+
pt = f"{r['total_tokens_prompt']:,}" if r['total_tokens_prompt'] else '-'
|
|
2343
|
+
ot = f"{r['total_tokens_completion']:,}" if r['total_tokens_completion'] else '-'
|
|
2344
|
+
click.echo(
|
|
2345
|
+
f" {dim(sid_short):<10} {pt:>12} {ot:>9} "
|
|
2346
|
+
f"{(r['compaction_count'] or 0):>12} {(r['total_corrections'] or 0):>12}"
|
|
2347
|
+
)
|
|
2348
|
+
if totals and totals[0]:
|
|
2349
|
+
click.echo(hr())
|
|
2350
|
+
click.echo(bold(f" {'ALL':>10} {totals[0]:>12,} {(totals[1] or 0):>9,}"))
|
|
2351
|
+
click.echo()
|
|
2352
|
+
|
|
2353
|
+
|
|
2354
|
+
# ─────────────────────────────────────────────
|
|
2355
|
+
# CONTROL
|
|
2356
|
+
# ─────────────────────────────────────────────
|
|
2357
|
+
|
|
2358
|
+
@cli.group("control")
|
|
2359
|
+
def control_group():
|
|
2360
|
+
"""Inspect and query the control plane (identity, health, runs, events)."""
|
|
2361
|
+
pass
|
|
2362
|
+
|
|
2363
|
+
|
|
2364
|
+
@control_group.command("status")
|
|
2365
|
+
def control_status():
|
|
2366
|
+
"""Show control DB identity snapshot."""
|
|
2367
|
+
from cda.kernel.control_db import CONTROL_DB
|
|
2368
|
+
if not CONTROL_DB.exists():
|
|
2369
|
+
click.echo(red(" control.db not found — run: python control/scripts/seed.py"))
|
|
2370
|
+
return
|
|
2371
|
+
conn = sqlite3.connect(CONTROL_DB)
|
|
2372
|
+
rows = conn.execute("SELECT key, value FROM identity ORDER BY id").fetchall()
|
|
2373
|
+
conn.close()
|
|
2374
|
+
click.echo()
|
|
2375
|
+
click.echo(bold(" control plane — identity"))
|
|
2376
|
+
click.echo(hr())
|
|
2377
|
+
for key, val in rows:
|
|
2378
|
+
click.echo(f" {cyan(key.ljust(20))} {val or dim('—')}")
|
|
2379
|
+
click.echo()
|
|
2380
|
+
|
|
2381
|
+
|
|
2382
|
+
@control_group.command("health")
|
|
2383
|
+
@click.option("--tail", default=14, show_default=True, help="Show last N check runs.")
|
|
2384
|
+
@click.option("--check", "check_name", default=None, help="Filter to a specific check name.")
|
|
2385
|
+
def control_health(tail, check_name):
|
|
2386
|
+
"""Show recent selfcheck history from the health table."""
|
|
2387
|
+
from cda.kernel.control_db import CONTROL_DB
|
|
2388
|
+
if not CONTROL_DB.exists():
|
|
2389
|
+
click.echo(red(" control.db not found"))
|
|
2390
|
+
return
|
|
2391
|
+
conn = sqlite3.connect(CONTROL_DB)
|
|
2392
|
+
if check_name:
|
|
2393
|
+
rows = conn.execute(
|
|
2394
|
+
"SELECT run_at, check_name, passed, message FROM health "
|
|
2395
|
+
"WHERE check_name=? ORDER BY id DESC LIMIT ?",
|
|
2396
|
+
(check_name, tail)
|
|
2397
|
+
).fetchall()
|
|
2398
|
+
else:
|
|
2399
|
+
# latest full run (by run_at) — most recent N distinct timestamps
|
|
2400
|
+
run_ats = [r[0] for r in conn.execute(
|
|
2401
|
+
"SELECT DISTINCT run_at FROM health ORDER BY run_at DESC LIMIT ?", (tail,)
|
|
2402
|
+
).fetchall()]
|
|
2403
|
+
rows = []
|
|
2404
|
+
for ts in run_ats:
|
|
2405
|
+
batch = conn.execute(
|
|
2406
|
+
"SELECT run_at, check_name, passed, message FROM health WHERE run_at=? ORDER BY id",
|
|
2407
|
+
(ts,)
|
|
2408
|
+
).fetchall()
|
|
2409
|
+
rows.extend(batch)
|
|
2410
|
+
conn.close()
|
|
2411
|
+
|
|
2412
|
+
if not rows:
|
|
2413
|
+
click.echo(dim(" (no health history yet — run cda check)"))
|
|
2414
|
+
return
|
|
2415
|
+
|
|
2416
|
+
click.echo()
|
|
2417
|
+
last_ts = None
|
|
2418
|
+
for run_at, name, passed, msg in rows:
|
|
2419
|
+
ts_short = run_at[:19].replace("T", " ")
|
|
2420
|
+
if ts_short != last_ts:
|
|
2421
|
+
click.echo(bold(f"\n {ts_short}"))
|
|
2422
|
+
last_ts = ts_short
|
|
2423
|
+
icon = green("✓") if passed else red("✗")
|
|
2424
|
+
click.echo(f" {icon} {cyan(name.ljust(20))} {msg or ''}")
|
|
2425
|
+
click.echo()
|
|
2426
|
+
|
|
2427
|
+
|
|
2428
|
+
@control_group.command("runs")
|
|
2429
|
+
@click.option("--tail", default=10, show_default=True, help="Show last N sync runs.")
|
|
2430
|
+
def control_runs(tail):
|
|
2431
|
+
"""Show recent sync pipeline run history."""
|
|
2432
|
+
from cda.kernel.control_db import CONTROL_DB
|
|
2433
|
+
if not CONTROL_DB.exists():
|
|
2434
|
+
click.echo(red(" control.db not found"))
|
|
2435
|
+
return
|
|
2436
|
+
conn = sqlite3.connect(CONTROL_DB)
|
|
2437
|
+
rows = conn.execute(
|
|
2438
|
+
"SELECT started_at, finished_at, trigger, stages, sessions, exchanges, "
|
|
2439
|
+
"tool_calls, vfs_files, errors, exit_code, notes "
|
|
2440
|
+
"FROM runs ORDER BY id DESC LIMIT ?",
|
|
2441
|
+
(tail,)
|
|
2442
|
+
).fetchall()
|
|
2443
|
+
conn.close()
|
|
2444
|
+
|
|
2445
|
+
if not rows:
|
|
2446
|
+
click.echo(dim(" (no sync runs recorded yet)"))
|
|
2447
|
+
return
|
|
2448
|
+
|
|
2449
|
+
click.echo()
|
|
2450
|
+
click.echo(bold(f" last {len(rows)} sync run(s)"))
|
|
2451
|
+
click.echo(hr())
|
|
2452
|
+
for r in rows:
|
|
2453
|
+
started, finished, trigger, stages, sessions, exchanges, tc, vfs, errs, exit_c, notes = r
|
|
2454
|
+
duration = ""
|
|
2455
|
+
if started and finished:
|
|
2456
|
+
try:
|
|
2457
|
+
from datetime import datetime
|
|
2458
|
+
s = datetime.fromisoformat(started)
|
|
2459
|
+
f = datetime.fromisoformat(finished)
|
|
2460
|
+
secs = int((f - s).total_seconds())
|
|
2461
|
+
duration = f" {dim(str(secs) + 's')}"
|
|
2462
|
+
except Exception:
|
|
2463
|
+
pass
|
|
2464
|
+
status_icon = green("✓") if (exit_c == 0) else red("✗")
|
|
2465
|
+
ts = started[:19].replace("T", " ")
|
|
2466
|
+
click.echo(
|
|
2467
|
+
f" {status_icon} {cyan(ts)}{duration} "
|
|
2468
|
+
f"sessions={bold(str(sessions or '?'))} "
|
|
2469
|
+
f"exchanges={bold(str(exchanges or '?'))} "
|
|
2470
|
+
f"stages={dim(stages or '?')}"
|
|
2471
|
+
)
|
|
2472
|
+
if notes:
|
|
2473
|
+
click.echo(f" {dim(notes)}")
|
|
2474
|
+
click.echo()
|
|
2475
|
+
|
|
2476
|
+
|
|
2477
|
+
@control_group.command("events")
|
|
2478
|
+
@click.option("--tail", default=20, show_default=True, help="Show last N events.")
|
|
2479
|
+
@click.option("--kind", default=None, help="Filter by event kind (e.g. sync.complete).")
|
|
2480
|
+
def control_events(tail, kind):
|
|
2481
|
+
"""Show the system event log."""
|
|
2482
|
+
from cda.kernel.control_db import CONTROL_DB
|
|
2483
|
+
if not CONTROL_DB.exists():
|
|
2484
|
+
click.echo(red(" control.db not found"))
|
|
2485
|
+
return
|
|
2486
|
+
conn = sqlite3.connect(CONTROL_DB)
|
|
2487
|
+
if kind:
|
|
2488
|
+
rows = conn.execute(
|
|
2489
|
+
"SELECT occurred_at, kind, actor, subject, detail FROM events "
|
|
2490
|
+
"WHERE kind=? ORDER BY id DESC LIMIT ?", (kind, tail)
|
|
2491
|
+
).fetchall()
|
|
2492
|
+
else:
|
|
2493
|
+
rows = conn.execute(
|
|
2494
|
+
"SELECT occurred_at, kind, actor, subject, detail FROM events "
|
|
2495
|
+
"ORDER BY id DESC LIMIT ?", (tail,)
|
|
2496
|
+
).fetchall()
|
|
2497
|
+
conn.close()
|
|
2498
|
+
|
|
2499
|
+
if not rows:
|
|
2500
|
+
click.echo(dim(" (no events recorded yet)"))
|
|
2501
|
+
return
|
|
2502
|
+
|
|
2503
|
+
click.echo()
|
|
2504
|
+
click.echo(bold(f" last {len(rows)} event(s)"))
|
|
2505
|
+
click.echo(hr())
|
|
2506
|
+
for occurred_at, evkind, actor, subject, detail in rows:
|
|
2507
|
+
ts = occurred_at[:19].replace("T", " ")
|
|
2508
|
+
click.echo(
|
|
2509
|
+
f" {cyan(ts)} {bold(evkind.ljust(20))} "
|
|
2510
|
+
f"{dim(actor or '')} {subject or ''} {dim(detail or '')}"
|
|
2511
|
+
)
|
|
2512
|
+
click.echo()
|
|
2513
|
+
|
|
2514
|
+
|
|
2515
|
+
# ─────────────────────────────────────────────
|
|
2516
|
+
# SELF CHECK
|
|
2517
|
+
# ─────────────────────────────────────────────
|
|
2518
|
+
|
|
2519
|
+
@cli.command("check")
|
|
2520
|
+
@click.option("--json", "as_json", is_flag=True, help="Output results as JSON.")
|
|
2521
|
+
@click.option("--fail-fast", is_flag=True, help="Stop at first failure.")
|
|
2522
|
+
def check(as_json, fail_fast):
|
|
2523
|
+
"""Run a full self-diagnostic. The system checks itself."""
|
|
2524
|
+
from cda.kernel.selfcheck import CHECKS
|
|
2525
|
+
from cda.kernel.control_db import write_health
|
|
2526
|
+
from datetime import datetime, timezone
|
|
2527
|
+
|
|
2528
|
+
run_at = datetime.now(timezone.utc).isoformat()
|
|
2529
|
+
|
|
2530
|
+
if not as_json:
|
|
2531
|
+
click.echo()
|
|
2532
|
+
click.echo(bold(" cda self-check"))
|
|
2533
|
+
click.echo(hr())
|
|
2534
|
+
|
|
2535
|
+
results = []
|
|
2536
|
+
passed_all = True
|
|
2537
|
+
|
|
2538
|
+
for check_fn in CHECKS:
|
|
2539
|
+
result = check_fn()
|
|
2540
|
+
results.append(result)
|
|
2541
|
+
passed = result["passed"]
|
|
2542
|
+
if not passed:
|
|
2543
|
+
passed_all = False
|
|
2544
|
+
|
|
2545
|
+
if not as_json:
|
|
2546
|
+
icon = green("✓") if passed else red("✗")
|
|
2547
|
+
name = cyan(result["name"].ljust(18))
|
|
2548
|
+
msg = result["message"]
|
|
2549
|
+
click.echo(f" {icon} {name} {msg}")
|
|
2550
|
+
if not passed and result.get("details"):
|
|
2551
|
+
click.echo(f" {dim(str(result['details'])[:120])}")
|
|
2552
|
+
|
|
2553
|
+
if fail_fast and not passed:
|
|
2554
|
+
break
|
|
2555
|
+
|
|
2556
|
+
# Write results to control DB (silent — never blocks)
|
|
2557
|
+
write_health(results, run_at=run_at)
|
|
2558
|
+
|
|
2559
|
+
if as_json:
|
|
2560
|
+
import json as _json
|
|
2561
|
+
click.echo(_json.dumps({
|
|
2562
|
+
"passed": passed_all,
|
|
2563
|
+
"checks": results,
|
|
2564
|
+
}, indent=2))
|
|
2565
|
+
sys.exit(0 if passed_all else 1)
|
|
2566
|
+
|
|
2567
|
+
click.echo(hr())
|
|
2568
|
+
if passed_all:
|
|
2569
|
+
click.echo(f" {green(bold('All checks passed.'))}")
|
|
2570
|
+
else:
|
|
2571
|
+
failed = [r["name"] for r in results if not r["passed"]]
|
|
2572
|
+
click.echo(f" {red(bold(f'{len(failed)} check(s) failed:'))} {', '.join(failed)}")
|
|
2573
|
+
click.echo()
|
|
2574
|
+
sys.exit(0 if passed_all else 1)
|
|
2575
|
+
|
|
2576
|
+
|
|
2577
|
+
# ─────────────────────────────────────────────
|
|
2578
|
+
# ENTRY
|
|
2579
|
+
# ─────────────────────────────────────────────
|
|
2580
|
+
|
|
2581
|
+
def main():
|
|
2582
|
+
"""Main entry point for the CLI."""
|
|
2583
|
+
cli()
|
|
2584
|
+
|
|
2585
|
+
|
|
2586
|
+
if __name__ == "__main__":
|
|
2587
|
+
main()
|