@jeganwrites/claudash 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/db.py ADDED
@@ -0,0 +1,1156 @@
1
+ import sqlite3
2
+ import json
3
+ import os
4
+ import stat
5
+ import time
6
+ import re
7
+
8
+ DB_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "usage.db")
9
+
10
+
11
+ def _lock_db_file():
12
+ """Enforce 0600 perms on the SQLite file and its WAL/SHM side files.
13
+ The DB holds plaintext claude.ai session keys and the dashboard/sync
14
+ auth tokens — it must not be world-readable."""
15
+ for suffix in ("", "-wal", "-shm"):
16
+ p = DB_PATH + suffix
17
+ if os.path.exists(p):
18
+ try:
19
+ os.chmod(p, stat.S_IRUSR | stat.S_IWUSR)
20
+ except OSError:
21
+ pass
22
+
23
+
24
+ def get_conn():
25
+ os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
26
+ conn = sqlite3.connect(DB_PATH, timeout=30)
27
+ conn.row_factory = sqlite3.Row
28
+ conn.execute("PRAGMA journal_mode=WAL")
29
+ conn.execute("PRAGMA busy_timeout=5000")
30
+ _lock_db_file()
31
+ return conn
32
+
33
+
34
+ def _column_exists(conn, table, column):
35
+ cursor = conn.execute(f"PRAGMA table_info({table})")
36
+ return any(row[1] == column for row in cursor.fetchall())
37
+
38
+
39
+ def _table_exists(conn, table):
40
+ row = conn.execute(
41
+ "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name=?", (table,)
42
+ ).fetchone()
43
+ return row[0] > 0
44
+
45
+
46
+ def init_db():
47
+ conn = get_conn()
48
+
49
+ # Core tables
50
+ conn.executescript("""
51
+ CREATE TABLE IF NOT EXISTS sessions (
52
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
53
+ session_id TEXT,
54
+ timestamp INTEGER,
55
+ project TEXT,
56
+ account TEXT,
57
+ model TEXT,
58
+ input_tokens INTEGER,
59
+ output_tokens INTEGER,
60
+ cache_read_tokens INTEGER,
61
+ cache_creation_tokens INTEGER,
62
+ cost_usd REAL,
63
+ UNIQUE(session_id, timestamp, model)
64
+ );
65
+ CREATE INDEX IF NOT EXISTS idx_sessions_timestamp ON sessions(timestamp);
66
+ CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project);
67
+ CREATE INDEX IF NOT EXISTS idx_sessions_account ON sessions(account);
68
+
69
+ CREATE TABLE IF NOT EXISTS alerts (
70
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
71
+ created_at INTEGER,
72
+ level TEXT,
73
+ project TEXT,
74
+ message TEXT,
75
+ seen INTEGER DEFAULT 0
76
+ );
77
+ CREATE INDEX IF NOT EXISTS idx_alerts_created ON alerts(created_at);
78
+
79
+ CREATE TABLE IF NOT EXISTS claude_ai_usage (
80
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
81
+ account_label TEXT,
82
+ timestamp INTEGER,
83
+ tokens_used INTEGER,
84
+ tokens_limit INTEGER,
85
+ window_pct REAL,
86
+ window_start INTEGER,
87
+ window_end INTEGER,
88
+ status TEXT DEFAULT 'ok',
89
+ raw_json TEXT
90
+ );
91
+ CREATE INDEX IF NOT EXISTS idx_claude_ai_ts ON claude_ai_usage(timestamp);
92
+ CREATE INDEX IF NOT EXISTS idx_claude_ai_account ON claude_ai_usage(account_label);
93
+ """)
94
+
95
+ # --- Schema migration: add new columns to sessions ---
96
+ for col, typedef in [
97
+ ("source_path", "TEXT"),
98
+ ("compaction_detected", "INTEGER DEFAULT 0"),
99
+ ("tokens_before_compact", "INTEGER"),
100
+ ("tokens_after_compact", "INTEGER"),
101
+ ("is_subagent", "INTEGER DEFAULT 0"),
102
+ ("parent_session_id", "TEXT"),
103
+ ]:
104
+ if not _column_exists(conn, "sessions", col):
105
+ conn.execute(f"ALTER TABLE sessions ADD COLUMN {col} {typedef}")
106
+
107
+ # --- Additional index ---
108
+ conn.execute("CREATE INDEX IF NOT EXISTS idx_sessions_model ON sessions(model)")
109
+
110
+ # --- Scan state for incremental scanning ---
111
+ conn.executescript("""
112
+ CREATE TABLE IF NOT EXISTS scan_state (
113
+ file_path TEXT PRIMARY KEY,
114
+ last_offset INTEGER DEFAULT 0,
115
+ last_scanned INTEGER,
116
+ lines_processed INTEGER DEFAULT 0
117
+ );
118
+ """)
119
+
120
+ # --- One-time migration of old account values (gated) ---
121
+ migrated = conn.execute("SELECT value FROM settings WHERE key = 'account_migration_done'").fetchone() if _table_exists(conn, "settings") else None
122
+ if not migrated:
123
+ conn.execute("UPDATE sessions SET account = 'personal_max' WHERE account = 'personal'")
124
+ conn.execute("UPDATE sessions SET account = 'work_pro' WHERE account = 'work'")
125
+
126
+ # --- Existing analytics tables ---
127
+ conn.executescript("""
128
+ CREATE TABLE IF NOT EXISTS daily_snapshots (
129
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
130
+ date TEXT,
131
+ account TEXT,
132
+ project TEXT,
133
+ total_tokens INTEGER,
134
+ total_cost_usd REAL,
135
+ cache_hit_rate REAL,
136
+ session_count INTEGER,
137
+ UNIQUE(date, account, project)
138
+ );
139
+
140
+ CREATE TABLE IF NOT EXISTS window_burns (
141
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
142
+ account TEXT,
143
+ window_start INTEGER,
144
+ window_end INTEGER,
145
+ tokens_used INTEGER,
146
+ tokens_limit INTEGER,
147
+ pct_used REAL,
148
+ hit_limit INTEGER DEFAULT 0
149
+ );
150
+ CREATE INDEX IF NOT EXISTS idx_wb_account ON window_burns(account);
151
+ CREATE INDEX IF NOT EXISTS idx_wb_start ON window_burns(window_start);
152
+
153
+ CREATE TABLE IF NOT EXISTS insights (
154
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
155
+ created_at INTEGER,
156
+ account TEXT,
157
+ project TEXT,
158
+ insight_type TEXT,
159
+ message TEXT,
160
+ detail_json TEXT,
161
+ dismissed INTEGER DEFAULT 0
162
+ );
163
+ CREATE INDEX IF NOT EXISTS idx_insights_created ON insights(created_at);
164
+ CREATE INDEX IF NOT EXISTS idx_insights_account ON insights(account);
165
+ CREATE INDEX IF NOT EXISTS idx_insights_type ON insights(insight_type);
166
+ """)
167
+
168
+ # --- Account management tables ---
169
+ conn.executescript("""
170
+ CREATE TABLE IF NOT EXISTS accounts (
171
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
172
+ account_id TEXT UNIQUE,
173
+ label TEXT,
174
+ plan TEXT,
175
+ monthly_cost_usd REAL,
176
+ window_token_limit INTEGER,
177
+ color TEXT,
178
+ data_paths TEXT,
179
+ active INTEGER DEFAULT 1,
180
+ created_at INTEGER
181
+ );
182
+
183
+ CREATE TABLE IF NOT EXISTS account_projects (
184
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
185
+ account_id TEXT,
186
+ project_name TEXT,
187
+ keywords TEXT,
188
+ UNIQUE(account_id, project_name)
189
+ );
190
+ """)
191
+
192
+ # --- Account daily budget column ---
193
+ if not _column_exists(conn, "accounts", "daily_budget_usd"):
194
+ conn.execute("ALTER TABLE accounts ADD COLUMN daily_budget_usd REAL DEFAULT 0")
195
+
196
+ # --- Waste events (waste_patterns.py) ---
197
+ conn.executescript("""
198
+ CREATE TABLE IF NOT EXISTS waste_events (
199
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
200
+ session_id TEXT,
201
+ project TEXT,
202
+ account TEXT,
203
+ pattern_type TEXT,
204
+ severity TEXT,
205
+ turn_count INTEGER,
206
+ token_cost REAL,
207
+ detected_at INTEGER,
208
+ detail_json TEXT,
209
+ UNIQUE(session_id, pattern_type)
210
+ );
211
+ CREATE INDEX IF NOT EXISTS idx_waste_project ON waste_events(project);
212
+ CREATE INDEX IF NOT EXISTS idx_waste_detected ON waste_events(detected_at);
213
+ CREATE INDEX IF NOT EXISTS idx_waste_pattern ON waste_events(pattern_type);
214
+ """)
215
+
216
+ # --- Fix tracker (fix_tracker.py) ---
217
+ conn.executescript("""
218
+ CREATE TABLE IF NOT EXISTS fixes (
219
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
220
+ created_at INTEGER,
221
+ project TEXT,
222
+ waste_pattern TEXT,
223
+ title TEXT,
224
+ fix_type TEXT,
225
+ fix_detail TEXT,
226
+ baseline_json TEXT,
227
+ status TEXT DEFAULT 'applied'
228
+ );
229
+ CREATE INDEX IF NOT EXISTS idx_fixes_project ON fixes(project);
230
+ CREATE INDEX IF NOT EXISTS idx_fixes_created ON fixes(created_at);
231
+
232
+ CREATE TABLE IF NOT EXISTS fix_measurements (
233
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
234
+ fix_id INTEGER REFERENCES fixes(id),
235
+ measured_at INTEGER,
236
+ metrics_json TEXT,
237
+ delta_json TEXT,
238
+ verdict TEXT
239
+ );
240
+ CREATE INDEX IF NOT EXISTS idx_fm_fix ON fix_measurements(fix_id);
241
+ CREATE INDEX IF NOT EXISTS idx_fm_measured ON fix_measurements(measured_at);
242
+ """)
243
+
244
+ # --- claude.ai browser tracking tables ---
245
+ conn.executescript("""
246
+ CREATE TABLE IF NOT EXISTS claude_ai_accounts (
247
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
248
+ account_id TEXT UNIQUE,
249
+ label TEXT,
250
+ org_id TEXT,
251
+ session_key TEXT,
252
+ plan TEXT,
253
+ status TEXT DEFAULT 'unconfigured',
254
+ last_polled INTEGER,
255
+ last_error TEXT,
256
+ created_at INTEGER,
257
+ updated_at INTEGER
258
+ );
259
+
260
+ CREATE TABLE IF NOT EXISTS claude_ai_snapshots (
261
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
262
+ account_id TEXT,
263
+ polled_at INTEGER,
264
+ window_start INTEGER,
265
+ window_end INTEGER,
266
+ tokens_used INTEGER,
267
+ tokens_limit INTEGER,
268
+ messages_used INTEGER,
269
+ messages_limit INTEGER,
270
+ pct_used REAL,
271
+ plan TEXT,
272
+ raw_response TEXT,
273
+ UNIQUE(account_id, polled_at)
274
+ );
275
+ CREATE INDEX IF NOT EXISTS idx_cas_account ON claude_ai_snapshots(account_id);
276
+ CREATE INDEX IF NOT EXISTS idx_cas_polled ON claude_ai_snapshots(polled_at);
277
+ """)
278
+
279
+ # --- Migration: add mac_sync_mode column ---
280
+ if not _column_exists(conn, "claude_ai_accounts", "mac_sync_mode"):
281
+ conn.execute("ALTER TABLE claude_ai_accounts ADD COLUMN mac_sync_mode INTEGER DEFAULT 0")
282
+
283
+ # --- Migration: add utilization columns to claude_ai_snapshots ---
284
+ for col, typedef in [
285
+ ("five_hour_utilization", "REAL DEFAULT 0"),
286
+ ("seven_day_utilization", "REAL DEFAULT 0"),
287
+ ("extra_credits_used", "REAL DEFAULT 0"),
288
+ ("extra_credits_limit", "REAL DEFAULT 0"),
289
+ ]:
290
+ if not _column_exists(conn, "claude_ai_snapshots", col):
291
+ conn.execute(f"ALTER TABLE claude_ai_snapshots ADD COLUMN {col} {typedef}")
292
+
293
+ # --- Settings table ---
294
+ conn.executescript("""
295
+ CREATE TABLE IF NOT EXISTS settings (
296
+ key TEXT PRIMARY KEY,
297
+ value TEXT
298
+ );
299
+ """)
300
+ # Seed sync_token if missing
301
+ row = conn.execute("SELECT value FROM settings WHERE key = 'sync_token'").fetchone()
302
+ if not row:
303
+ import secrets
304
+ token = secrets.token_hex(32)
305
+ conn.execute("INSERT INTO settings (key, value) VALUES ('sync_token', ?)", (token,))
306
+
307
+ # Seed dashboard_key if missing (required for all write endpoints)
308
+ row = conn.execute("SELECT value FROM settings WHERE key = 'dashboard_key'").fetchone()
309
+ if not row:
310
+ import secrets
311
+ key = secrets.token_hex(16)
312
+ conn.execute("INSERT INTO settings (key, value) VALUES ('dashboard_key', ?)", (key,))
313
+
314
+ # Mark one-time account migration as done
315
+ conn.execute("INSERT OR IGNORE INTO settings (key, value) VALUES ('account_migration_done', '1')")
316
+
317
+ # --- Seed from config.py if accounts table is empty ---
318
+ count = conn.execute("SELECT COUNT(*) FROM accounts").fetchone()[0]
319
+ if count == 0:
320
+ _seed_from_config(conn)
321
+
322
+ # --- Seed claude_ai_accounts for each active account if not present ---
323
+ active_accounts = conn.execute("SELECT account_id, label, plan FROM accounts WHERE active = 1").fetchall()
324
+ for a in active_accounts:
325
+ exists = conn.execute("SELECT id FROM claude_ai_accounts WHERE account_id = ?", (a["account_id"],)).fetchone()
326
+ if not exists:
327
+ conn.execute(
328
+ """INSERT OR IGNORE INTO claude_ai_accounts
329
+ (account_id, label, org_id, session_key, plan, status, created_at, updated_at)
330
+ VALUES (?, ?, '', '', ?, 'unconfigured', ?, ?)""",
331
+ (a["account_id"], a["label"], a["plan"], int(time.time()), int(time.time())),
332
+ )
333
+
334
+ conn.commit()
335
+ conn.close()
336
+ _lock_db_file()
337
+
338
+
339
+ def _seed_from_config(conn):
340
+ """Migrate ACCOUNTS and PROJECT_MAP from config.py into DB tables."""
341
+ from config import ACCOUNTS as CFG_ACCOUNTS, PROJECT_MAP as CFG_PROJECTS
342
+ now = int(time.time())
343
+
344
+ for acct_id, acct in CFG_ACCOUNTS.items():
345
+ data_paths_json = json.dumps(acct.get("data_paths", []))
346
+ conn.execute(
347
+ """INSERT OR IGNORE INTO accounts
348
+ (account_id, label, plan, monthly_cost_usd, window_token_limit, color, data_paths, active, created_at)
349
+ VALUES (?, ?, ?, ?, ?, ?, ?, 1, ?)""",
350
+ (acct_id, acct["label"], acct.get("plan", acct.get("type", "max")),
351
+ acct.get("monthly_cost_usd", 0), acct.get("window_token_limit", 1_000_000),
352
+ acct.get("color", "teal"), data_paths_json, now),
353
+ )
354
+
355
+ for proj_name, info in CFG_PROJECTS.items():
356
+ keywords_json = json.dumps(info.get("keywords", []))
357
+ conn.execute(
358
+ "INSERT OR IGNORE INTO account_projects (account_id, project_name, keywords) VALUES (?, ?, ?)",
359
+ (info["account"], proj_name, keywords_json),
360
+ )
361
+
362
+
363
+ def sync_project_map_from_config(conn):
364
+ """UPSERT config.PROJECT_MAP into account_projects so keyword edits in
365
+ config.py actually take effect on next scan/reprocess. Adds new projects
366
+ and updates keyword lists on existing ones."""
367
+ from config import PROJECT_MAP as CFG_PROJECTS
368
+ for proj_name, info in CFG_PROJECTS.items():
369
+ keywords_json = json.dumps(info.get("keywords", []))
370
+ conn.execute(
371
+ "INSERT INTO account_projects (account_id, project_name, keywords) VALUES (?, ?, ?) "
372
+ "ON CONFLICT(account_id, project_name) DO UPDATE SET keywords=excluded.keywords",
373
+ (info["account"], proj_name, keywords_json),
374
+ )
375
+ conn.commit()
376
+
377
+
378
+ # ── Account config from DB (source of truth) ──
379
+
380
+ def get_accounts_config(conn=None):
381
+ """Return accounts dict in same shape as config.ACCOUNTS, from DB.
382
+ Falls back to config.py if DB has no active accounts."""
383
+ should_close = False
384
+ if conn is None:
385
+ conn = get_conn()
386
+ should_close = True
387
+
388
+ rows = conn.execute("SELECT * FROM accounts WHERE active = 1").fetchall()
389
+ if should_close:
390
+ conn.close()
391
+
392
+ if not rows:
393
+ from config import ACCOUNTS
394
+ return dict(ACCOUNTS)
395
+
396
+ result = {}
397
+ for r in rows:
398
+ paths = []
399
+ try:
400
+ paths = json.loads(r["data_paths"]) if r["data_paths"] else []
401
+ except (json.JSONDecodeError, TypeError):
402
+ pass
403
+ # Expand ~ in paths
404
+ paths = [os.path.expanduser(p) for p in paths]
405
+
406
+ try:
407
+ budget = r["daily_budget_usd"] or 0
408
+ except (IndexError, KeyError):
409
+ budget = 0
410
+ result[r["account_id"]] = {
411
+ "label": r["label"],
412
+ "type": r["plan"],
413
+ "plan": r["plan"],
414
+ "monthly_cost_usd": r["monthly_cost_usd"] or 0,
415
+ "window_token_limit": r["window_token_limit"] if r["window_token_limit"] is not None else 1_000_000,
416
+ "color": r["color"] or "teal",
417
+ "data_paths": paths,
418
+ "daily_budget_usd": budget,
419
+ }
420
+ return result
421
+
422
+
423
+ def get_project_map_config(conn=None):
424
+ """Return project map dict in same shape as config.PROJECT_MAP, from DB."""
425
+ should_close = False
426
+ if conn is None:
427
+ conn = get_conn()
428
+ should_close = True
429
+
430
+ rows = conn.execute("SELECT * FROM account_projects").fetchall()
431
+ if should_close:
432
+ conn.close()
433
+
434
+ if not rows:
435
+ from config import PROJECT_MAP
436
+ return dict(PROJECT_MAP)
437
+
438
+ result = {}
439
+ for r in rows:
440
+ keywords = []
441
+ try:
442
+ keywords = json.loads(r["keywords"]) if r["keywords"] else []
443
+ except (json.JSONDecodeError, TypeError):
444
+ pass
445
+ result[r["project_name"]] = {
446
+ "keywords": keywords,
447
+ "account": r["account_id"],
448
+ }
449
+ return result
450
+
451
+
452
+ # ── Account CRUD ──
453
+
454
+ def validate_account_id(account_id):
455
+ """Validate account_id slug: lowercase, underscores only, max 32 chars."""
456
+ if not account_id:
457
+ return False, "account_id is required"
458
+ if len(account_id) > 32:
459
+ return False, "account_id must be <= 32 characters"
460
+ if not re.match(r'^[a-z][a-z0-9_]*$', account_id):
461
+ return False, "account_id must be lowercase letters, numbers, underscores; start with letter"
462
+ return True, ""
463
+
464
+
465
+ def create_account(conn, data):
466
+ """Create a new account. Returns (success, error_msg)."""
467
+ account_id = data.get("account_id", "")
468
+ valid, err = validate_account_id(account_id)
469
+ if not valid:
470
+ return False, err
471
+
472
+ # Check uniqueness
473
+ existing = conn.execute("SELECT id FROM accounts WHERE account_id = ?", (account_id,)).fetchone()
474
+ if existing:
475
+ return False, f"account_id '{account_id}' already exists"
476
+
477
+ data_paths = data.get("data_paths", [])
478
+ if not data_paths:
479
+ return False, "at least one data_path is required"
480
+
481
+ label = data.get("label", "")
482
+ if not label:
483
+ return False, "label is required"
484
+
485
+ conn.execute(
486
+ """INSERT INTO accounts
487
+ (account_id, label, plan, monthly_cost_usd, window_token_limit, color, data_paths, active, created_at, daily_budget_usd)
488
+ VALUES (?, ?, ?, ?, ?, ?, ?, 1, ?, ?)""",
489
+ (account_id, label, data.get("plan", "max"),
490
+ data.get("monthly_cost_usd", 0), data.get("window_token_limit", 1_000_000),
491
+ data.get("color", "teal"), json.dumps(data_paths), int(time.time()),
492
+ float(data.get("daily_budget_usd", 0) or 0)),
493
+ )
494
+ conn.commit()
495
+ return True, ""
496
+
497
+
498
+ def update_account(conn, account_id, data):
499
+ """Update an existing account. Returns (success, error_msg)."""
500
+ existing = conn.execute("SELECT id FROM accounts WHERE account_id = ?", (account_id,)).fetchone()
501
+ if not existing:
502
+ return False, f"account '{account_id}' not found"
503
+
504
+ updates = []
505
+ params = []
506
+ for field in ("label", "plan", "monthly_cost_usd", "window_token_limit", "color", "daily_budget_usd"):
507
+ if field in data:
508
+ updates.append(f"{field} = ?")
509
+ params.append(data[field])
510
+ if "data_paths" in data:
511
+ updates.append("data_paths = ?")
512
+ params.append(json.dumps(data["data_paths"]))
513
+
514
+ if not updates:
515
+ return True, ""
516
+
517
+ params.append(account_id)
518
+ conn.execute(f"UPDATE accounts SET {', '.join(updates)} WHERE account_id = ?", params)
519
+ conn.commit()
520
+ return True, ""
521
+
522
+
523
+ def delete_account(conn, account_id):
524
+ """Soft delete (active=0). Returns (success, error_msg)."""
525
+ existing = conn.execute("SELECT id FROM accounts WHERE account_id = ?", (account_id,)).fetchone()
526
+ if not existing:
527
+ return False, f"account '{account_id}' not found"
528
+ conn.execute("UPDATE accounts SET active = 0 WHERE account_id = ?", (account_id,))
529
+ conn.commit()
530
+ return True, ""
531
+
532
+
533
+ def get_all_accounts(conn):
534
+ """Get all active accounts with their projects."""
535
+ accounts = conn.execute("SELECT * FROM accounts WHERE active = 1 ORDER BY created_at").fetchall()
536
+ result = []
537
+ for a in accounts:
538
+ paths = []
539
+ try:
540
+ paths = json.loads(a["data_paths"]) if a["data_paths"] else []
541
+ except (json.JSONDecodeError, TypeError):
542
+ pass
543
+
544
+ projects = conn.execute(
545
+ "SELECT * FROM account_projects WHERE account_id = ?", (a["account_id"],)
546
+ ).fetchall()
547
+ proj_list = []
548
+ for p in projects:
549
+ kw = []
550
+ try:
551
+ kw = json.loads(p["keywords"]) if p["keywords"] else []
552
+ except (json.JSONDecodeError, TypeError):
553
+ pass
554
+ proj_list.append({"project_name": p["project_name"], "keywords": kw})
555
+
556
+ try:
557
+ budget = a["daily_budget_usd"] or 0
558
+ except (IndexError, KeyError):
559
+ budget = 0
560
+ result.append({
561
+ "account_id": a["account_id"],
562
+ "label": a["label"],
563
+ "plan": a["plan"],
564
+ "monthly_cost_usd": a["monthly_cost_usd"],
565
+ "window_token_limit": a["window_token_limit"],
566
+ "color": a["color"],
567
+ "data_paths": paths,
568
+ "active": a["active"],
569
+ "created_at": a["created_at"],
570
+ "daily_budget_usd": budget,
571
+ "projects": proj_list,
572
+ })
573
+ return result
574
+
575
+
576
+ def get_account_projects(conn, account_id):
577
+ """Get projects for a specific account."""
578
+ rows = conn.execute(
579
+ "SELECT * FROM account_projects WHERE account_id = ?", (account_id,)
580
+ ).fetchall()
581
+ result = []
582
+ for r in rows:
583
+ kw = []
584
+ try:
585
+ kw = json.loads(r["keywords"]) if r["keywords"] else []
586
+ except (json.JSONDecodeError, TypeError):
587
+ pass
588
+ result.append({"project_name": r["project_name"], "keywords": kw})
589
+ return result
590
+
591
+
592
+ def add_account_project(conn, account_id, project_name, keywords):
593
+ """Add a project to an account. Returns (success, error_msg)."""
594
+ if not project_name:
595
+ return False, "project_name is required"
596
+ try:
597
+ conn.execute(
598
+ "INSERT INTO account_projects (account_id, project_name, keywords) VALUES (?, ?, ?)",
599
+ (account_id, project_name, json.dumps(keywords)),
600
+ )
601
+ conn.commit()
602
+ return True, ""
603
+ except sqlite3.IntegrityError:
604
+ return False, f"project '{project_name}' already exists for account '{account_id}'"
605
+
606
+
607
+ def remove_account_project(conn, account_id, project_name):
608
+ """Remove a project from an account. Returns (success, error_msg)."""
609
+ cursor = conn.execute(
610
+ "DELETE FROM account_projects WHERE account_id = ? AND project_name = ?",
611
+ (account_id, project_name),
612
+ )
613
+ conn.commit()
614
+ if cursor.rowcount == 0:
615
+ return False, f"project '{project_name}' not found for account '{account_id}'"
616
+ return True, ""
617
+
618
+
619
+ # ── Session CRUD (unchanged) ──
620
+
621
+ def insert_session(conn, row):
622
+ try:
623
+ conn.execute(
624
+ """INSERT OR IGNORE INTO sessions
625
+ (session_id, timestamp, project, account, model,
626
+ input_tokens, output_tokens, cache_read_tokens,
627
+ cache_creation_tokens, cost_usd, source_path,
628
+ compaction_detected, tokens_before_compact, tokens_after_compact,
629
+ is_subagent, parent_session_id)
630
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
631
+ (
632
+ row["session_id"], row["timestamp"], row["project"],
633
+ row["account"], row["model"], row["input_tokens"],
634
+ row["output_tokens"], row["cache_read_tokens"],
635
+ row["cache_creation_tokens"], row["cost_usd"],
636
+ row.get("source_path", ""),
637
+ row.get("compaction_detected", 0),
638
+ row.get("tokens_before_compact"),
639
+ row.get("tokens_after_compact"),
640
+ row.get("is_subagent", 0),
641
+ row.get("parent_session_id"),
642
+ ),
643
+ )
644
+ return conn.total_changes > 0
645
+ except sqlite3.Error:
646
+ return False
647
+
648
+
649
+ def insert_waste_event(conn, session_id, project, account, pattern_type, severity,
650
+ turn_count, token_cost, detail=None):
651
+ """UPSERT a waste_events row. Idempotent on (session_id, pattern_type)."""
652
+ import time as _t
653
+ conn.execute(
654
+ """INSERT INTO waste_events
655
+ (session_id, project, account, pattern_type, severity,
656
+ turn_count, token_cost, detected_at, detail_json)
657
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
658
+ ON CONFLICT(session_id, pattern_type) DO UPDATE SET
659
+ severity=excluded.severity,
660
+ turn_count=excluded.turn_count,
661
+ token_cost=excluded.token_cost,
662
+ detected_at=excluded.detected_at,
663
+ detail_json=excluded.detail_json""",
664
+ (session_id, project, account, pattern_type, severity,
665
+ turn_count, token_cost, int(_t.time()), json.dumps(detail or {})),
666
+ )
667
+
668
+
669
+ def clear_waste_events(conn):
670
+ conn.execute("DELETE FROM waste_events")
671
+
672
+
673
+ def get_waste_events_by_project(conn, days=7):
674
+ since = int(time.time()) - (days * 86400)
675
+ rows = conn.execute(
676
+ "SELECT * FROM waste_events WHERE detected_at >= ?",
677
+ (since,),
678
+ ).fetchall()
679
+ return [dict(r) for r in rows]
680
+
681
+
682
+ def update_account_daily_budget(conn, account_id, budget_usd):
683
+ conn.execute(
684
+ "UPDATE accounts SET daily_budget_usd=? WHERE account_id=?",
685
+ (float(budget_usd or 0), account_id),
686
+ )
687
+ conn.commit()
688
+
689
+
690
+ # ── Fix tracker CRUD ─────────────────────────────────────────────
691
+
692
+ def insert_fix(conn, project, waste_pattern, title, fix_type, fix_detail, baseline_json):
693
+ cursor = conn.execute(
694
+ """INSERT INTO fixes
695
+ (created_at, project, waste_pattern, title, fix_type,
696
+ fix_detail, baseline_json, status)
697
+ VALUES (?, ?, ?, ?, ?, ?, ?, 'applied')""",
698
+ (int(time.time()), project, waste_pattern, title, fix_type,
699
+ fix_detail, json.dumps(baseline_json)),
700
+ )
701
+ conn.commit()
702
+ return cursor.lastrowid
703
+
704
+
705
+ def get_fix(conn, fix_id):
706
+ row = conn.execute("SELECT * FROM fixes WHERE id = ?", (fix_id,)).fetchone()
707
+ return dict(row) if row else None
708
+
709
+
710
+ def get_all_fixes(conn):
711
+ """Return every fix row ordered most-recent first."""
712
+ rows = conn.execute("SELECT * FROM fixes ORDER BY created_at DESC").fetchall()
713
+ return [dict(r) for r in rows]
714
+
715
+
716
+ def update_fix_status(conn, fix_id, status):
717
+ conn.execute("UPDATE fixes SET status = ? WHERE id = ?", (status, fix_id))
718
+ conn.commit()
719
+
720
+
721
+ def insert_fix_measurement(conn, fix_id, metrics_json, delta_json, verdict):
722
+ cursor = conn.execute(
723
+ """INSERT INTO fix_measurements
724
+ (fix_id, measured_at, metrics_json, delta_json, verdict)
725
+ VALUES (?, ?, ?, ?, ?)""",
726
+ (fix_id, int(time.time()), json.dumps(metrics_json),
727
+ json.dumps(delta_json), verdict),
728
+ )
729
+ conn.commit()
730
+ return cursor.lastrowid
731
+
732
+
733
+ def get_fix_measurements(conn, fix_id):
734
+ rows = conn.execute(
735
+ "SELECT * FROM fix_measurements WHERE fix_id = ? ORDER BY measured_at",
736
+ (fix_id,),
737
+ ).fetchall()
738
+ return [dict(r) for r in rows]
739
+
740
+
741
+ def get_latest_fix_measurement(conn, fix_id):
742
+ row = conn.execute(
743
+ "SELECT * FROM fix_measurements WHERE fix_id = ? ORDER BY measured_at DESC LIMIT 1",
744
+ (fix_id,),
745
+ ).fetchone()
746
+ return dict(row) if row else None
747
+
748
+
749
+ def insert_alert(conn, level, project, message):
750
+ conn.execute(
751
+ "INSERT INTO alerts (created_at, level, project, message) VALUES (?, ?, ?, ?)",
752
+ (int(time.time()), level, project, message),
753
+ )
754
+
755
+
756
+ def query_sessions(conn, account=None, since=None):
757
+ sql = "SELECT * FROM sessions WHERE 1=1"
758
+ params = []
759
+ if account and account != "all":
760
+ sql += " AND account = ?"
761
+ params.append(account)
762
+ if since:
763
+ sql += " AND timestamp >= ?"
764
+ params.append(since)
765
+ sql += " ORDER BY timestamp DESC"
766
+ return conn.execute(sql, params).fetchall()
767
+
768
+
769
+ def query_alerts(conn, limit=20):
770
+ return conn.execute(
771
+ "SELECT * FROM alerts ORDER BY created_at DESC LIMIT ?", (limit,)
772
+ ).fetchall()
773
+
774
+
775
+ def clear_alerts(conn):
776
+ conn.execute("DELETE FROM alerts")
777
+ conn.commit()
778
+
779
+
780
+ def get_session_count(conn):
781
+ return conn.execute("SELECT COUNT(*) FROM sessions").fetchone()[0]
782
+
783
+
784
+ def insert_claude_ai_usage(conn, row):
785
+ conn.execute(
786
+ """INSERT INTO claude_ai_usage
787
+ (account_label, timestamp, tokens_used, tokens_limit,
788
+ window_pct, window_start, window_end, status, raw_json)
789
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
790
+ (
791
+ row["account_label"], row["timestamp"], row["tokens_used"],
792
+ row["tokens_limit"], row["window_pct"], row["window_start"],
793
+ row["window_end"], row["status"], row.get("raw_json", ""),
794
+ ),
795
+ )
796
+
797
+
798
+ def get_latest_claude_ai_usage(conn):
799
+ return conn.execute("""
800
+ SELECT c1.* FROM claude_ai_usage c1
801
+ INNER JOIN (
802
+ SELECT account_label, MAX(timestamp) as max_ts
803
+ FROM claude_ai_usage GROUP BY account_label
804
+ ) c2 ON c1.account_label = c2.account_label AND c1.timestamp = c2.max_ts
805
+ ORDER BY c1.account_label
806
+ """).fetchall()
807
+
808
+
809
+ def get_claude_ai_history(conn, account_label=None, hours=24):
810
+ since = int(time.time()) - (hours * 3600)
811
+ if account_label:
812
+ return conn.execute(
813
+ "SELECT * FROM claude_ai_usage WHERE account_label = ? AND timestamp >= ? ORDER BY timestamp",
814
+ (account_label, since),
815
+ ).fetchall()
816
+ return conn.execute(
817
+ "SELECT * FROM claude_ai_usage WHERE timestamp >= ? ORDER BY timestamp",
818
+ (since,),
819
+ ).fetchall()
820
+
821
+
822
+ # --- Daily snapshots ---
823
+
824
+ def upsert_daily_snapshot(conn, date_str, account, project, total_tokens, total_cost, cache_hit_rate, session_count):
825
+ conn.execute(
826
+ """INSERT INTO daily_snapshots (date, account, project, total_tokens, total_cost_usd, cache_hit_rate, session_count)
827
+ VALUES (?, ?, ?, ?, ?, ?, ?)
828
+ ON CONFLICT(date, account, project) DO UPDATE SET
829
+ total_tokens=excluded.total_tokens,
830
+ total_cost_usd=excluded.total_cost_usd,
831
+ cache_hit_rate=excluded.cache_hit_rate,
832
+ session_count=excluded.session_count""",
833
+ (date_str, account, project, total_tokens, total_cost, cache_hit_rate, session_count),
834
+ )
835
+
836
+
837
+ def get_daily_snapshots(conn, account=None, days=7):
838
+ from datetime import datetime, timezone, timedelta
839
+ since = (datetime.now(timezone.utc) - timedelta(days=days)).strftime("%Y-%m-%d")
840
+ sql = "SELECT * FROM daily_snapshots WHERE date >= ?"
841
+ params = [since]
842
+ if account and account != "all":
843
+ sql += " AND account = ?"
844
+ params.append(account)
845
+ sql += " ORDER BY date"
846
+ return conn.execute(sql, params).fetchall()
847
+
848
+
849
+ # --- Window burns ---
850
+
851
+ def insert_window_burn(conn, account, window_start, window_end, tokens_used, tokens_limit, pct_used, hit_limit):
852
+ conn.execute(
853
+ """INSERT INTO window_burns (account, window_start, window_end, tokens_used, tokens_limit, pct_used, hit_limit)
854
+ VALUES (?, ?, ?, ?, ?, ?, ?)""",
855
+ (account, window_start, window_end, tokens_used, tokens_limit, pct_used, hit_limit),
856
+ )
857
+
858
+
859
+ def get_window_burns(conn, account=None, limit=7):
860
+ sql = "SELECT * FROM window_burns"
861
+ params = []
862
+ if account and account != "all":
863
+ sql += " WHERE account = ?"
864
+ params.append(account)
865
+ sql += " ORDER BY window_start DESC LIMIT ?"
866
+ params.append(limit)
867
+ return conn.execute(sql, params).fetchall()
868
+
869
+
870
+ # --- Insights ---
871
+
872
+ def insert_insight(conn, account, project, insight_type, message, detail_json="{}"):
873
+ conn.execute(
874
+ """INSERT INTO insights (created_at, account, project, insight_type, message, detail_json, dismissed)
875
+ VALUES (?, ?, ?, ?, ?, ?, 0)""",
876
+ (int(time.time()), account, project, insight_type, message, detail_json),
877
+ )
878
+
879
+
880
+ def get_insights(conn, account=None, dismissed=0, limit=50):
881
+ sql = "SELECT * FROM insights WHERE dismissed = ?"
882
+ params = [dismissed]
883
+ if account and account != "all":
884
+ sql += " AND (account = ? OR account = 'all' OR account IS NULL OR account = '')"
885
+ params.append(account)
886
+ sql += " ORDER BY created_at DESC LIMIT ?"
887
+ params.append(limit)
888
+ return conn.execute(sql, params).fetchall()
889
+
890
+
891
+ def dismiss_insight(conn, insight_id):
892
+ conn.execute("UPDATE insights SET dismissed = 1 WHERE id = ?", (insight_id,))
893
+ conn.commit()
894
+
895
+
896
+ def get_db_size_mb():
897
+ try:
898
+ return round(os.path.getsize(DB_PATH) / (1024 * 1024), 2)
899
+ except OSError:
900
+ return 0
901
+
902
+
903
+ # ── claude.ai browser tracking ──
904
+
905
+ def get_claude_ai_accounts_all(conn):
906
+ """Get all claude_ai_accounts rows."""
907
+ return [dict(r) for r in conn.execute("SELECT * FROM claude_ai_accounts").fetchall()]
908
+
909
+
910
+ def get_claude_ai_account(conn, account_id):
911
+ row = conn.execute("SELECT * FROM claude_ai_accounts WHERE account_id = ?", (account_id,)).fetchone()
912
+ return dict(row) if row else None
913
+
914
+
915
+ def upsert_claude_ai_account(conn, account_id, label, org_id, session_key, plan, status):
916
+ now = int(time.time())
917
+ existing = conn.execute("SELECT id FROM claude_ai_accounts WHERE account_id = ?", (account_id,)).fetchone()
918
+ if existing:
919
+ conn.execute(
920
+ """UPDATE claude_ai_accounts
921
+ SET label=?, org_id=?, session_key=?, plan=?, status=?, updated_at=?
922
+ WHERE account_id=?""",
923
+ (label, org_id, session_key, plan, status, now, account_id),
924
+ )
925
+ else:
926
+ conn.execute(
927
+ """INSERT INTO claude_ai_accounts
928
+ (account_id, label, org_id, session_key, plan, status, created_at, updated_at)
929
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
930
+ (account_id, label, org_id, session_key, plan, status, now, now),
931
+ )
932
+ conn.commit()
933
+
934
+
935
+ def update_claude_ai_account_status(conn, account_id, status, last_error=None):
936
+ now = int(time.time())
937
+ conn.execute(
938
+ "UPDATE claude_ai_accounts SET status=?, last_polled=?, last_error=?, updated_at=? WHERE account_id=?",
939
+ (status, now, last_error, now, account_id),
940
+ )
941
+ conn.commit()
942
+
943
+
944
+ def clear_claude_ai_session(conn, account_id):
945
+ now = int(time.time())
946
+ conn.execute(
947
+ "UPDATE claude_ai_accounts SET session_key='', org_id='', status='unconfigured', updated_at=? WHERE account_id=?",
948
+ (now, account_id),
949
+ )
950
+ conn.commit()
951
+
952
+
953
+ def insert_claude_ai_snapshot(conn, account_id, data):
954
+ """Insert a snapshot, auto-purge old ones (keep last 200 per account)."""
955
+ now = int(time.time())
956
+ conn.execute(
957
+ """INSERT OR REPLACE INTO claude_ai_snapshots
958
+ (account_id, polled_at, window_start, window_end,
959
+ tokens_used, tokens_limit, messages_used, messages_limit,
960
+ pct_used, plan, raw_response,
961
+ five_hour_utilization, seven_day_utilization,
962
+ extra_credits_used, extra_credits_limit)
963
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
964
+ (account_id, now,
965
+ data.get("window_start") or 0, data.get("window_end") or 0,
966
+ data.get("tokens_used", 0), data.get("tokens_limit", 0),
967
+ data.get("messages_used", 0), data.get("messages_limit", 0),
968
+ data.get("pct_used", 0), data.get("plan", ""),
969
+ data.get("raw", ""),
970
+ data.get("five_hour_utilization", 0),
971
+ data.get("seven_day_utilization", 0),
972
+ data.get("extra_credits_used", 0),
973
+ data.get("extra_credits_limit", 0)),
974
+ )
975
+ # Auto-purge: keep last 200 per account
976
+ conn.execute(
977
+ """DELETE FROM claude_ai_snapshots WHERE account_id = ? AND id NOT IN (
978
+ SELECT id FROM claude_ai_snapshots WHERE account_id = ?
979
+ ORDER BY polled_at DESC LIMIT 200
980
+ )""",
981
+ (account_id, account_id),
982
+ )
983
+ conn.commit()
984
+
985
+
986
+ def get_latest_claude_ai_snapshot(conn, account_id):
987
+ row = conn.execute(
988
+ "SELECT * FROM claude_ai_snapshots WHERE account_id = ? ORDER BY polled_at DESC LIMIT 1",
989
+ (account_id,),
990
+ ).fetchone()
991
+ return dict(row) if row else None
992
+
993
+
994
+ def get_claude_ai_snapshot_history(conn, account_id, limit=48):
995
+ rows = conn.execute(
996
+ "SELECT * FROM claude_ai_snapshots WHERE account_id = ? ORDER BY polled_at DESC LIMIT ?",
997
+ (account_id, limit),
998
+ ).fetchall()
999
+ return [dict(r) for r in reversed(rows)]
1000
+
1001
+
1002
+ # ── Settings ──
1003
+
1004
+ def get_setting(conn, key):
1005
+ row = conn.execute("SELECT value FROM settings WHERE key = ?", (key,)).fetchone()
1006
+ return row["value"] if row else None
1007
+
1008
+
1009
+ def set_setting(conn, key, value):
1010
+ conn.execute(
1011
+ "INSERT INTO settings (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value=excluded.value",
1012
+ (key, value),
1013
+ )
1014
+ conn.commit()
1015
+
1016
+
1017
+ def get_real_story_insights():
1018
+ """Compute verified story cards from actual DB data. Returns a list of dicts."""
1019
+ conn = get_conn()
1020
+ stories = []
1021
+ now = int(time.time())
1022
+ cutoff_30d = now - 30 * 86400
1023
+ cutoff_14d = now - 14 * 86400
1024
+ total_analyzed = conn.execute(
1025
+ "SELECT COUNT(*) FROM sessions WHERE timestamp > ?", (cutoff_30d,)
1026
+ ).fetchone()[0]
1027
+
1028
+ # STORY 1 — Model mismatch: Opus doing Sonnet-level work
1029
+ rows = conn.execute(
1030
+ "SELECT project, account, AVG(output_tokens) as avg_out, COUNT(*) as sessions "
1031
+ "FROM sessions "
1032
+ "WHERE model LIKE '%opus%' AND timestamp > ? "
1033
+ "GROUP BY project "
1034
+ "HAVING avg_out < 300 AND sessions > 100 "
1035
+ "ORDER BY sessions DESC",
1036
+ (cutoff_30d,),
1037
+ ).fetchall()
1038
+ for r in rows:
1039
+ stories.append({
1040
+ "type": "model_mismatch", "badge": "Model Mismatch",
1041
+ "account": r["account"],
1042
+ "title": f"{r['project']}: Opus doing Sonnet-level work",
1043
+ "finding": (
1044
+ f"{r['sessions']} sessions, avg {int(r['avg_out'])} tokens output — "
1045
+ f"Opus costs 5x Sonnet for same output"
1046
+ ),
1047
+ "what_to_do": "Add to CLAUDE.md: use claude-sonnet for tasks with short expected outputs",
1048
+ "action": "Switch to claude-sonnet for short-output tasks",
1049
+ "verified": True,
1050
+ "sessions_analyzed": r["sessions"],
1051
+ })
1052
+
1053
+ # STORY 2 — Floundering (repeated tool failures)
1054
+ rows = conn.execute(
1055
+ "SELECT project, account, COUNT(*) as events "
1056
+ "FROM waste_events "
1057
+ "WHERE pattern_type = 'floundering' AND detected_at > ? "
1058
+ "GROUP BY project "
1059
+ "HAVING events > 5 "
1060
+ "ORDER BY events DESC LIMIT 3",
1061
+ (cutoff_30d,),
1062
+ ).fetchall()
1063
+ for r in rows:
1064
+ stories.append({
1065
+ "type": "floundering_detected", "badge": "Got Stuck",
1066
+ "account": r["account"],
1067
+ "title": f"{r['project']}: Claude got stuck {r['events']} times",
1068
+ "finding": (
1069
+ f"Detected {r['events']} sessions where the same tool call "
1070
+ f"failed 3+ times consecutively"
1071
+ ),
1072
+ "what_to_do": "Add to CLAUDE.md: after 3 failed tool calls of same type, skip and log",
1073
+ "action": "After 3 failed tool calls, stop and skip — never retry blindly",
1074
+ "verified": True,
1075
+ "sessions_analyzed": r["events"],
1076
+ })
1077
+
1078
+ # STORY 3 — Repeated reads
1079
+ rows = conn.execute(
1080
+ "SELECT project, account, COUNT(*) as events "
1081
+ "FROM waste_events "
1082
+ "WHERE pattern_type = 'repeated_reads' AND detected_at > ? "
1083
+ "GROUP BY project "
1084
+ "HAVING events > 3 "
1085
+ "ORDER BY events DESC LIMIT 3",
1086
+ (cutoff_30d,),
1087
+ ).fetchall()
1088
+ for r in rows:
1089
+ stories.append({
1090
+ "type": "repeated_reads", "badge": "Repeated Reads",
1091
+ "account": r["account"],
1092
+ "title": f"{r['project']}: Same files read {r['events']} times",
1093
+ "finding": (
1094
+ f"Detected {r['events']} sessions where the same file was "
1095
+ f"read 3+ times in a single session"
1096
+ ),
1097
+ "what_to_do": "Add to CLAUDE.md: read a file once, store key info, do not re-read",
1098
+ "action": "Read each file once at session start. Pass summaries between phases.",
1099
+ "verified": True,
1100
+ "sessions_analyzed": r["events"],
1101
+ })
1102
+
1103
+ # STORY 4 — Subagent cost spike
1104
+ row = conn.execute(
1105
+ "SELECT parent_session_id, project, account, "
1106
+ " COUNT(*) as subagent_count, "
1107
+ " SUM(cost_usd) as subagent_cost "
1108
+ "FROM sessions "
1109
+ "WHERE is_subagent = 1 AND timestamp > ? "
1110
+ "GROUP BY parent_session_id "
1111
+ "HAVING subagent_count > 5 "
1112
+ "ORDER BY subagent_cost DESC LIMIT 1",
1113
+ (cutoff_30d,),
1114
+ ).fetchone()
1115
+ if row and row["subagent_count"]:
1116
+ stories.append({
1117
+ "type": "subagent_spike", "badge": "Sub-agent Spike",
1118
+ "account": row["account"],
1119
+ "title": f"{row['project']}: One session spawned {row['subagent_count']} sub-agents",
1120
+ "finding": (
1121
+ f"Single parent session created {row['subagent_count']} sub-agents "
1122
+ f"costing ${row['subagent_cost']:.2f} API equivalent — "
1123
+ f"invisible without sub-agent tracking"
1124
+ ),
1125
+ "what_to_do": "Add max sub-agent limit to your agent orchestration",
1126
+ "verified": True,
1127
+ "sessions_analyzed": row["subagent_count"],
1128
+ })
1129
+
1130
+ # STORY 5 — Daily cost spike
1131
+ daily_rows = conn.execute(
1132
+ "SELECT date(timestamp, 'unixepoch') as day, SUM(cost_usd) as daily_cost "
1133
+ "FROM sessions WHERE timestamp > ? "
1134
+ "GROUP BY day ORDER BY daily_cost DESC",
1135
+ (cutoff_14d,),
1136
+ ).fetchall()
1137
+ if len(daily_rows) >= 3:
1138
+ avg_daily = sum(r["daily_cost"] for r in daily_rows) / len(daily_rows)
1139
+ top = daily_rows[0]
1140
+ if avg_daily > 0 and top["daily_cost"] > 3 * avg_daily:
1141
+ multiplier = round(top["daily_cost"] / avg_daily, 1)
1142
+ stories.append({
1143
+ "type": "cost_spike_day", "badge": "Cost Spike",
1144
+ "account": None,
1145
+ "title": f"{top['day'][5:]}: {multiplier}x your normal daily spend",
1146
+ "finding": (
1147
+ f"Highest day was ${top['daily_cost']:.2f} API equiv — "
1148
+ f"{multiplier}x your 14-day average of ${avg_daily:.2f}"
1149
+ ),
1150
+ "what_to_do": "Check what ran that day. Add daily budget alerts to catch this earlier.",
1151
+ "verified": True,
1152
+ "sessions_analyzed": total_analyzed,
1153
+ })
1154
+
1155
+ conn.close()
1156
+ return stories