omni-cortex 1.17.1__py3-none-any.whl → 1.17.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. omni_cortex/__init__.py +3 -0
  2. omni_cortex/_bundled/dashboard/backend/.env.example +12 -0
  3. omni_cortex/_bundled/dashboard/backend/backfill_summaries.py +280 -0
  4. omni_cortex/_bundled/dashboard/backend/chat_service.py +631 -0
  5. omni_cortex/_bundled/dashboard/backend/database.py +1773 -0
  6. omni_cortex/_bundled/dashboard/backend/image_service.py +552 -0
  7. omni_cortex/_bundled/dashboard/backend/logging_config.py +122 -0
  8. omni_cortex/_bundled/dashboard/backend/main.py +1888 -0
  9. omni_cortex/_bundled/dashboard/backend/models.py +472 -0
  10. omni_cortex/_bundled/dashboard/backend/project_config.py +170 -0
  11. omni_cortex/_bundled/dashboard/backend/project_scanner.py +164 -0
  12. omni_cortex/_bundled/dashboard/backend/prompt_security.py +111 -0
  13. omni_cortex/_bundled/dashboard/backend/pyproject.toml +23 -0
  14. omni_cortex/_bundled/dashboard/backend/security.py +104 -0
  15. omni_cortex/_bundled/dashboard/backend/uv.lock +1110 -0
  16. omni_cortex/_bundled/dashboard/backend/websocket_manager.py +104 -0
  17. omni_cortex/_bundled/hooks/post_tool_use.py +497 -0
  18. omni_cortex/_bundled/hooks/pre_tool_use.py +277 -0
  19. omni_cortex/_bundled/hooks/session_utils.py +186 -0
  20. omni_cortex/_bundled/hooks/stop.py +219 -0
  21. omni_cortex/_bundled/hooks/subagent_stop.py +120 -0
  22. omni_cortex/_bundled/hooks/user_prompt.py +220 -0
  23. omni_cortex/categorization/__init__.py +9 -0
  24. omni_cortex/categorization/auto_tags.py +166 -0
  25. omni_cortex/categorization/auto_type.py +165 -0
  26. omni_cortex/config.py +141 -0
  27. omni_cortex/dashboard.py +238 -0
  28. omni_cortex/database/__init__.py +24 -0
  29. omni_cortex/database/connection.py +137 -0
  30. omni_cortex/database/migrations.py +210 -0
  31. omni_cortex/database/schema.py +212 -0
  32. omni_cortex/database/sync.py +421 -0
  33. omni_cortex/decay/__init__.py +7 -0
  34. omni_cortex/decay/importance.py +147 -0
  35. omni_cortex/embeddings/__init__.py +35 -0
  36. omni_cortex/embeddings/local.py +442 -0
  37. omni_cortex/models/__init__.py +20 -0
  38. omni_cortex/models/activity.py +265 -0
  39. omni_cortex/models/agent.py +144 -0
  40. omni_cortex/models/memory.py +395 -0
  41. omni_cortex/models/relationship.py +206 -0
  42. omni_cortex/models/session.py +290 -0
  43. omni_cortex/resources/__init__.py +1 -0
  44. omni_cortex/search/__init__.py +22 -0
  45. omni_cortex/search/hybrid.py +197 -0
  46. omni_cortex/search/keyword.py +204 -0
  47. omni_cortex/search/ranking.py +127 -0
  48. omni_cortex/search/semantic.py +232 -0
  49. omni_cortex/server.py +360 -0
  50. omni_cortex/setup.py +284 -0
  51. omni_cortex/tools/__init__.py +13 -0
  52. omni_cortex/tools/activities.py +453 -0
  53. omni_cortex/tools/memories.py +536 -0
  54. omni_cortex/tools/sessions.py +311 -0
  55. omni_cortex/tools/utilities.py +477 -0
  56. omni_cortex/utils/__init__.py +13 -0
  57. omni_cortex/utils/formatting.py +282 -0
  58. omni_cortex/utils/ids.py +72 -0
  59. omni_cortex/utils/timestamps.py +129 -0
  60. omni_cortex/utils/truncation.py +111 -0
  61. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/METADATA +1 -1
  62. omni_cortex-1.17.3.dist-info/RECORD +86 -0
  63. omni_cortex-1.17.1.dist-info/RECORD +0 -26
  64. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
  65. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
  66. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
  67. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
  68. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
  69. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
  70. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/main.py +0 -0
  71. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
  72. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
  73. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
  74. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
  75. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
  76. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
  77. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
  78. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
  79. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
  80. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
  81. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
  82. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/stop.py +0 -0
  83. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
  84. {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
  85. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/WHEEL +0 -0
  86. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/entry_points.txt +0 -0
  87. {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,1773 @@
1
+ """Database query functions for reading omni-cortex SQLite databases."""
2
+
3
+ import json
4
+ import sqlite3
5
+ from collections import Counter
6
+ from datetime import datetime, timedelta
7
+ from pathlib import Path
8
+ from typing import Optional
9
+
10
+ from models import Activity, FilterParams, Memory, MemoryStats, MemoryUpdate, Session, TimelineEntry
11
+
12
+
13
+ def get_connection(db_path: str) -> sqlite3.Connection:
14
+ """Get a read-only connection to the database."""
15
+ conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
16
+ conn.row_factory = sqlite3.Row
17
+ return conn
18
+
19
+
20
+ def get_write_connection(db_path: str) -> sqlite3.Connection:
21
+ """Get a writable connection to the database."""
22
+ conn = sqlite3.connect(db_path)
23
+ conn.row_factory = sqlite3.Row
24
+ return conn
25
+
26
+
27
+ def ensure_migrations(db_path: str) -> None:
28
+ """Ensure database has latest migrations applied.
29
+
30
+ This function checks for and applies any missing schema updates,
31
+ including command analytics columns and natural language summary columns.
32
+ """
33
+ conn = get_write_connection(db_path)
34
+
35
+ # Check if activities table exists
36
+ table_check = conn.execute(
37
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='activities'"
38
+ ).fetchone()
39
+
40
+ if not table_check:
41
+ conn.close()
42
+ return
43
+
44
+ # Check available columns
45
+ columns = conn.execute("PRAGMA table_info(activities)").fetchall()
46
+ column_names = {col[1] for col in columns}
47
+
48
+ migrations_applied = []
49
+
50
+ # Migration v1.1: Command analytics columns
51
+ if "command_name" not in column_names:
52
+ conn.executescript("""
53
+ ALTER TABLE activities ADD COLUMN command_name TEXT;
54
+ ALTER TABLE activities ADD COLUMN command_scope TEXT;
55
+ ALTER TABLE activities ADD COLUMN mcp_server TEXT;
56
+ ALTER TABLE activities ADD COLUMN skill_name TEXT;
57
+
58
+ CREATE INDEX IF NOT EXISTS idx_activities_command ON activities(command_name);
59
+ CREATE INDEX IF NOT EXISTS idx_activities_mcp ON activities(mcp_server);
60
+ CREATE INDEX IF NOT EXISTS idx_activities_skill ON activities(skill_name);
61
+ """)
62
+ migrations_applied.append("v1.1: command analytics columns")
63
+
64
+ # Migration v1.2: Natural language summary columns
65
+ if "summary" not in column_names:
66
+ conn.executescript("""
67
+ ALTER TABLE activities ADD COLUMN summary TEXT;
68
+ ALTER TABLE activities ADD COLUMN summary_detail TEXT;
69
+ """)
70
+ migrations_applied.append("v1.2: summary columns")
71
+
72
+ if migrations_applied:
73
+ conn.commit()
74
+ print(f"[Database] Applied migrations: {', '.join(migrations_applied)}")
75
+
76
+ conn.close()
77
+
78
+
79
+ def parse_tags(tags_str: Optional[str]) -> list[str]:
80
+ """Parse tags from JSON string."""
81
+ if not tags_str:
82
+ return []
83
+ try:
84
+ tags = json.loads(tags_str)
85
+ return tags if isinstance(tags, list) else []
86
+ except (json.JSONDecodeError, TypeError):
87
+ return []
88
+
89
+
90
+ def get_memories(db_path: str, filters: FilterParams) -> list[Memory]:
91
+ """Get memories with filtering, sorting, and pagination."""
92
+ conn = get_connection(db_path)
93
+
94
+ # Build query
95
+ query = "SELECT * FROM memories WHERE 1=1"
96
+ params: list = []
97
+
98
+ if filters.memory_type:
99
+ query += " AND type = ?"
100
+ params.append(filters.memory_type)
101
+
102
+ if filters.status:
103
+ query += " AND status = ?"
104
+ params.append(filters.status)
105
+
106
+ if filters.min_importance is not None:
107
+ query += " AND importance_score >= ?"
108
+ params.append(filters.min_importance)
109
+
110
+ if filters.max_importance is not None:
111
+ query += " AND importance_score <= ?"
112
+ params.append(filters.max_importance)
113
+
114
+ if filters.search:
115
+ query += " AND (content LIKE ? OR context LIKE ?)"
116
+ search_term = f"%{filters.search}%"
117
+ params.extend([search_term, search_term])
118
+
119
+ # Sorting
120
+ valid_sort_columns = ["created_at", "last_accessed", "importance_score", "access_count"]
121
+ sort_by = filters.sort_by if filters.sort_by in valid_sort_columns else "last_accessed"
122
+ sort_order = "DESC" if filters.sort_order.lower() == "desc" else "ASC"
123
+ query += f" ORDER BY {sort_by} {sort_order}"
124
+
125
+ # Pagination
126
+ query += " LIMIT ? OFFSET ?"
127
+ params.extend([filters.limit, filters.offset])
128
+
129
+ cursor = conn.execute(query, params)
130
+ rows = cursor.fetchall()
131
+
132
+ memories = []
133
+ for row in rows:
134
+ # Parse tags from JSON string
135
+ tags = parse_tags(row["tags"])
136
+
137
+ memories.append(
138
+ Memory(
139
+ id=row["id"],
140
+ content=row["content"],
141
+ context=row["context"],
142
+ type=row["type"],
143
+ status=row["status"] or "fresh",
144
+ importance_score=int(row["importance_score"] or 50),
145
+ access_count=row["access_count"] or 0,
146
+ created_at=datetime.fromisoformat(row["created_at"]),
147
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
148
+ tags=tags,
149
+ )
150
+ )
151
+
152
+ conn.close()
153
+ return memories
154
+
155
+
156
+ def get_memory_by_id(db_path: str, memory_id: str) -> Optional[Memory]:
157
+ """Get a single memory by ID."""
158
+ conn = get_connection(db_path)
159
+
160
+ cursor = conn.execute("SELECT * FROM memories WHERE id = ?", (memory_id,))
161
+ row = cursor.fetchone()
162
+
163
+ if not row:
164
+ conn.close()
165
+ return None
166
+
167
+ # Parse tags from JSON string
168
+ tags = parse_tags(row["tags"])
169
+
170
+ memory = Memory(
171
+ id=row["id"],
172
+ content=row["content"],
173
+ context=row["context"],
174
+ type=row["type"],
175
+ status=row["status"] or "fresh",
176
+ importance_score=int(row["importance_score"] or 50),
177
+ access_count=row["access_count"] or 0,
178
+ created_at=datetime.fromisoformat(row["created_at"]),
179
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
180
+ tags=tags,
181
+ )
182
+
183
+ conn.close()
184
+ return memory
185
+
186
+
187
+ def get_memory_stats(db_path: str) -> MemoryStats:
188
+ """Get statistics about memories in the database."""
189
+ conn = get_connection(db_path)
190
+
191
+ # Total count
192
+ total = conn.execute("SELECT COUNT(*) FROM memories").fetchone()[0]
193
+
194
+ # By type
195
+ type_cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
196
+ by_type = {row["type"]: row["count"] for row in type_cursor.fetchall()}
197
+
198
+ # By status
199
+ status_cursor = conn.execute("SELECT status, COUNT(*) as count FROM memories GROUP BY status")
200
+ by_status = {(row["status"] or "fresh"): row["count"] for row in status_cursor.fetchall()}
201
+
202
+ # Average importance
203
+ avg_cursor = conn.execute("SELECT AVG(importance_score) FROM memories")
204
+ avg_importance = avg_cursor.fetchone()[0] or 0.0
205
+
206
+ # Total access count
207
+ access_cursor = conn.execute("SELECT SUM(access_count) FROM memories")
208
+ total_access = access_cursor.fetchone()[0] or 0
209
+
210
+ # Tags with counts - extract from JSON column
211
+ tags_cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
212
+ tag_counter: Counter = Counter()
213
+ for row in tags_cursor.fetchall():
214
+ tags = parse_tags(row["tags"])
215
+ tag_counter.update(tags)
216
+
217
+ tags = [{"name": name, "count": count} for name, count in tag_counter.most_common(50)]
218
+
219
+ conn.close()
220
+
221
+ return MemoryStats(
222
+ total_count=total,
223
+ by_type=by_type,
224
+ by_status=by_status,
225
+ avg_importance=round(avg_importance, 1),
226
+ total_access_count=total_access,
227
+ tags=tags,
228
+ )
229
+
230
+
231
+ def get_activities(
232
+ db_path: str,
233
+ event_type: Optional[str] = None,
234
+ tool_name: Optional[str] = None,
235
+ limit: int = 100,
236
+ offset: int = 0,
237
+ ) -> list[Activity]:
238
+ """Get activity log entries with all available fields."""
239
+ conn = get_connection(db_path)
240
+
241
+ # Check available columns for backward compatibility
242
+ columns = conn.execute("PRAGMA table_info(activities)").fetchall()
243
+ column_names = {col[1] for col in columns}
244
+
245
+ query = "SELECT * FROM activities WHERE 1=1"
246
+ params: list = []
247
+
248
+ if event_type:
249
+ query += " AND event_type = ?"
250
+ params.append(event_type)
251
+
252
+ if tool_name:
253
+ query += " AND tool_name = ?"
254
+ params.append(tool_name)
255
+
256
+ query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
257
+ params.extend([limit, offset])
258
+
259
+ cursor = conn.execute(query, params)
260
+ activities = []
261
+
262
+ for row in cursor.fetchall():
263
+ # Parse timestamp - handle both with and without timezone
264
+ ts_str = row["timestamp"]
265
+ try:
266
+ ts = datetime.fromisoformat(ts_str)
267
+ except ValueError:
268
+ # Fallback for edge cases
269
+ ts = datetime.now()
270
+
271
+ activity_data = {
272
+ "id": row["id"],
273
+ "session_id": row["session_id"],
274
+ "event_type": row["event_type"],
275
+ "tool_name": row["tool_name"],
276
+ "tool_input": row["tool_input"],
277
+ "tool_output": row["tool_output"],
278
+ "success": bool(row["success"]),
279
+ "error_message": row["error_message"],
280
+ "duration_ms": row["duration_ms"],
281
+ "file_path": row["file_path"],
282
+ "timestamp": ts,
283
+ }
284
+
285
+ # Add command analytics fields if available
286
+ if "command_name" in column_names:
287
+ activity_data["command_name"] = row["command_name"]
288
+ if "command_scope" in column_names:
289
+ activity_data["command_scope"] = row["command_scope"]
290
+ if "mcp_server" in column_names:
291
+ activity_data["mcp_server"] = row["mcp_server"]
292
+ if "skill_name" in column_names:
293
+ activity_data["skill_name"] = row["skill_name"]
294
+
295
+ # Add summary fields if available
296
+ if "summary" in column_names:
297
+ activity_data["summary"] = row["summary"]
298
+ if "summary_detail" in column_names:
299
+ activity_data["summary_detail"] = row["summary_detail"]
300
+
301
+ activities.append(Activity(**activity_data))
302
+
303
+ conn.close()
304
+ return activities
305
+
306
+
307
+ def get_timeline(
308
+ db_path: str,
309
+ hours: int = 24,
310
+ include_memories: bool = True,
311
+ include_activities: bool = True,
312
+ ) -> list[TimelineEntry]:
313
+ """Get a timeline of memories and activities."""
314
+ conn = get_connection(db_path)
315
+ since = datetime.now() - timedelta(hours=hours)
316
+ since_str = since.isoformat()
317
+
318
+ entries: list[TimelineEntry] = []
319
+
320
+ if include_memories:
321
+ cursor = conn.execute(
322
+ "SELECT * FROM memories WHERE created_at >= ? ORDER BY created_at DESC",
323
+ (since_str,),
324
+ )
325
+ for row in cursor.fetchall():
326
+ entries.append(
327
+ TimelineEntry(
328
+ timestamp=datetime.fromisoformat(row["created_at"]),
329
+ entry_type="memory",
330
+ data={
331
+ "id": row["id"],
332
+ "content": row["content"][:200] + "..." if len(row["content"]) > 200 else row["content"],
333
+ "type": row["type"],
334
+ "importance": row["importance_score"],
335
+ },
336
+ )
337
+ )
338
+
339
+ if include_activities:
340
+ cursor = conn.execute(
341
+ "SELECT * FROM activities WHERE timestamp >= ? ORDER BY timestamp DESC",
342
+ (since_str,),
343
+ )
344
+ for row in cursor.fetchall():
345
+ entries.append(
346
+ TimelineEntry(
347
+ timestamp=datetime.fromisoformat(row["timestamp"]),
348
+ entry_type="activity",
349
+ data={
350
+ "id": row["id"],
351
+ "event_type": row["event_type"],
352
+ "tool_name": row["tool_name"],
353
+ "success": bool(row["success"]),
354
+ "duration_ms": row["duration_ms"],
355
+ },
356
+ )
357
+ )
358
+
359
+ # Sort by timestamp descending
360
+ entries.sort(key=lambda e: e.timestamp, reverse=True)
361
+
362
+ conn.close()
363
+ return entries
364
+
365
+
366
+ def get_sessions(db_path: str, limit: int = 20) -> list[Session]:
367
+ """Get recent sessions."""
368
+ conn = get_connection(db_path)
369
+
370
+ cursor = conn.execute(
371
+ """
372
+ SELECT s.*, COUNT(a.id) as activity_count
373
+ FROM sessions s
374
+ LEFT JOIN activities a ON s.id = a.session_id
375
+ GROUP BY s.id
376
+ ORDER BY s.started_at DESC
377
+ LIMIT ?
378
+ """,
379
+ (limit,),
380
+ )
381
+
382
+ sessions = []
383
+ for row in cursor.fetchall():
384
+ sessions.append(
385
+ Session(
386
+ id=row["id"],
387
+ project_path=row["project_path"],
388
+ started_at=datetime.fromisoformat(row["started_at"]),
389
+ ended_at=datetime.fromisoformat(row["ended_at"]) if row["ended_at"] else None,
390
+ summary=row["summary"],
391
+ activity_count=row["activity_count"],
392
+ )
393
+ )
394
+
395
+ conn.close()
396
+ return sessions
397
+
398
+
399
+ def get_all_tags(db_path: str) -> list[dict]:
400
+ """Get all tags with their usage counts."""
401
+ conn = get_connection(db_path)
402
+
403
+ # Extract tags from JSON column
404
+ cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
405
+ tag_counter: Counter = Counter()
406
+ for row in cursor.fetchall():
407
+ tags = parse_tags(row["tags"])
408
+ tag_counter.update(tags)
409
+
410
+ tags = [{"name": name, "count": count} for name, count in tag_counter.most_common()]
411
+
412
+ conn.close()
413
+ return tags
414
+
415
+
416
+ def get_type_distribution(db_path: str) -> dict[str, int]:
417
+ """Get memory type distribution."""
418
+ conn = get_connection(db_path)
419
+
420
+ cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
421
+ distribution = {row["type"]: row["count"] for row in cursor.fetchall()}
422
+
423
+ conn.close()
424
+ return distribution
425
+
426
+
427
+ def search_memories(db_path: str, query: str, limit: int = 20) -> list[Memory]:
428
+ """Search memories using FTS if available, otherwise LIKE."""
429
+ conn = get_connection(db_path)
430
+
431
+ # Check if FTS table exists
432
+ fts_check = conn.execute(
433
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='memories_fts'"
434
+ ).fetchone()
435
+
436
+ if fts_check:
437
+ # Use FTS search - FTS5 uses rowid to match the memories table rowid
438
+ # Escape special FTS5 characters and wrap in quotes for phrase search
439
+ safe_query = query.replace('"', '""')
440
+ try:
441
+ cursor = conn.execute(
442
+ """
443
+ SELECT m.* FROM memories m
444
+ JOIN memories_fts fts ON m.rowid = fts.rowid
445
+ WHERE memories_fts MATCH ?
446
+ ORDER BY rank
447
+ LIMIT ?
448
+ """,
449
+ (f'"{safe_query}"', limit),
450
+ )
451
+ except sqlite3.OperationalError:
452
+ # Fallback if FTS query fails
453
+ search_term = f"%{query}%"
454
+ cursor = conn.execute(
455
+ """
456
+ SELECT * FROM memories
457
+ WHERE content LIKE ? OR context LIKE ?
458
+ ORDER BY importance_score DESC
459
+ LIMIT ?
460
+ """,
461
+ (search_term, search_term, limit),
462
+ )
463
+ else:
464
+ # Fallback to LIKE
465
+ search_term = f"%{query}%"
466
+ cursor = conn.execute(
467
+ """
468
+ SELECT * FROM memories
469
+ WHERE content LIKE ? OR context LIKE ?
470
+ ORDER BY importance_score DESC
471
+ LIMIT ?
472
+ """,
473
+ (search_term, search_term, limit),
474
+ )
475
+
476
+ memories = []
477
+ for row in cursor.fetchall():
478
+ # Parse tags from JSON string
479
+ tags = parse_tags(row["tags"])
480
+
481
+ memories.append(
482
+ Memory(
483
+ id=row["id"],
484
+ content=row["content"],
485
+ context=row["context"],
486
+ type=row["type"],
487
+ status=row["status"] or "fresh",
488
+ importance_score=int(row["importance_score"] or 50),
489
+ access_count=row["access_count"] or 0,
490
+ created_at=datetime.fromisoformat(row["created_at"]),
491
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
492
+ tags=tags,
493
+ )
494
+ )
495
+
496
+ conn.close()
497
+ return memories
498
+
499
+
500
+ def update_memory(db_path: str, memory_id: str, updates: MemoryUpdate) -> Optional[Memory]:
501
+ """Update a memory and return the updated record."""
502
+ conn = get_write_connection(db_path)
503
+
504
+ # Build update query dynamically based on provided fields
505
+ update_fields = []
506
+ params = []
507
+
508
+ if updates.content is not None:
509
+ update_fields.append("content = ?")
510
+ params.append(updates.content)
511
+
512
+ if updates.context is not None:
513
+ update_fields.append("context = ?")
514
+ params.append(updates.context)
515
+
516
+ if updates.memory_type is not None:
517
+ update_fields.append("type = ?")
518
+ params.append(updates.memory_type)
519
+
520
+ if updates.status is not None:
521
+ update_fields.append("status = ?")
522
+ params.append(updates.status)
523
+
524
+ if updates.importance_score is not None:
525
+ update_fields.append("importance_score = ?")
526
+ params.append(updates.importance_score)
527
+
528
+ if updates.tags is not None:
529
+ update_fields.append("tags = ?")
530
+ params.append(json.dumps(updates.tags))
531
+
532
+ if not update_fields:
533
+ conn.close()
534
+ return get_memory_by_id(db_path, memory_id)
535
+
536
+ # Add updated timestamp
537
+ update_fields.append("last_accessed = ?")
538
+ params.append(datetime.now().isoformat())
539
+
540
+ # Add memory_id to params
541
+ params.append(memory_id)
542
+
543
+ query = f"UPDATE memories SET {', '.join(update_fields)} WHERE id = ?"
544
+ cursor = conn.execute(query, params)
545
+ conn.commit()
546
+
547
+ if cursor.rowcount == 0:
548
+ conn.close()
549
+ return None
550
+
551
+ conn.close()
552
+ return get_memory_by_id(db_path, memory_id)
553
+
554
+
555
+ def delete_memory(db_path: str, memory_id: str) -> bool:
556
+ """Delete a memory by ID. Returns True if deleted, False if not found."""
557
+ conn = get_write_connection(db_path)
558
+
559
+ # Also delete related entries in memory_relationships
560
+ conn.execute(
561
+ "DELETE FROM memory_relationships WHERE source_id = ? OR target_id = ?",
562
+ (memory_id, memory_id),
563
+ )
564
+
565
+ cursor = conn.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
566
+ conn.commit()
567
+
568
+ deleted = cursor.rowcount > 0
569
+ conn.close()
570
+ return deleted
571
+
572
+
573
+ # --- Stats Functions for Dashboard Charts ---
574
+
575
+
576
+ def get_activity_heatmap(db_path: str, days: int = 90) -> list[dict]:
577
+ """Get activity counts grouped by day for heatmap visualization."""
578
+ conn = get_connection(db_path)
579
+ query = """
580
+ SELECT date(timestamp) as date, COUNT(*) as count
581
+ FROM activities
582
+ WHERE timestamp >= date('now', ?)
583
+ GROUP BY date(timestamp)
584
+ ORDER BY date
585
+ """
586
+ cursor = conn.execute(query, (f'-{days} days',))
587
+ result = [{"date": row["date"], "count": row["count"]} for row in cursor.fetchall()]
588
+ conn.close()
589
+ return result
590
+
591
+
592
+ def get_tool_usage(db_path: str, limit: int = 10) -> list[dict]:
593
+ """Get tool usage statistics with success rates."""
594
+ conn = get_connection(db_path)
595
+ query = """
596
+ SELECT
597
+ tool_name,
598
+ COUNT(*) as count,
599
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate
600
+ FROM activities
601
+ WHERE tool_name IS NOT NULL AND tool_name != ''
602
+ GROUP BY tool_name
603
+ ORDER BY count DESC
604
+ LIMIT ?
605
+ """
606
+ cursor = conn.execute(query, (limit,))
607
+ result = [
608
+ {
609
+ "tool_name": row["tool_name"],
610
+ "count": row["count"],
611
+ "success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
612
+ }
613
+ for row in cursor.fetchall()
614
+ ]
615
+ conn.close()
616
+ return result
617
+
618
+
619
+ def get_memory_growth(db_path: str, days: int = 30) -> list[dict]:
620
+ """Get memory creation over time with cumulative totals."""
621
+ conn = get_connection(db_path)
622
+ query = """
623
+ WITH daily_counts AS (
624
+ SELECT date(created_at) as date, COUNT(*) as count
625
+ FROM memories
626
+ WHERE created_at >= date('now', ?)
627
+ GROUP BY date(created_at)
628
+ )
629
+ SELECT
630
+ date,
631
+ count,
632
+ SUM(count) OVER (ORDER BY date) as cumulative
633
+ FROM daily_counts
634
+ ORDER BY date
635
+ """
636
+ cursor = conn.execute(query, (f'-{days} days',))
637
+ result = [
638
+ {"date": row["date"], "count": row["count"], "cumulative": row["cumulative"]}
639
+ for row in cursor.fetchall()
640
+ ]
641
+ conn.close()
642
+ return result
643
+
644
+
645
+ def get_recent_sessions(db_path: str, limit: int = 5) -> list[dict]:
646
+ """Get recent sessions with activity counts and memory counts."""
647
+ conn = get_connection(db_path)
648
+ query = """
649
+ SELECT
650
+ s.id,
651
+ s.project_path,
652
+ s.started_at,
653
+ s.ended_at,
654
+ s.summary,
655
+ COUNT(DISTINCT a.id) as activity_count
656
+ FROM sessions s
657
+ LEFT JOIN activities a ON a.session_id = s.id
658
+ GROUP BY s.id
659
+ ORDER BY s.started_at DESC
660
+ LIMIT ?
661
+ """
662
+ cursor = conn.execute(query, (limit,))
663
+ result = [
664
+ {
665
+ "id": row["id"],
666
+ "project_path": row["project_path"],
667
+ "started_at": row["started_at"],
668
+ "ended_at": row["ended_at"],
669
+ "summary": row["summary"],
670
+ "activity_count": row["activity_count"],
671
+ }
672
+ for row in cursor.fetchall()
673
+ ]
674
+ conn.close()
675
+ return result
676
+
677
+
678
+ def bulk_update_memory_status(db_path: str, memory_ids: list[str], status: str) -> int:
679
+ """Update status for multiple memories. Returns count updated."""
680
+ if not memory_ids:
681
+ return 0
682
+ conn = get_write_connection(db_path)
683
+ placeholders = ','.join('?' * len(memory_ids))
684
+ query = f"UPDATE memories SET status = ?, last_accessed = datetime('now') WHERE id IN ({placeholders})"
685
+ cursor = conn.execute(query, [status] + memory_ids)
686
+ conn.commit()
687
+ count = cursor.rowcount
688
+ conn.close()
689
+ return count
690
+
691
+
692
+ def get_memories_needing_review(db_path: str, days_threshold: int = 30, limit: int = 50) -> list[Memory]:
693
+ """Get memories that haven't been accessed recently and may need review."""
694
+ conn = get_connection(db_path)
695
+ query = """
696
+ SELECT * FROM memories
697
+ WHERE last_accessed < date('now', ?)
698
+ OR last_accessed IS NULL
699
+ ORDER BY last_accessed ASC NULLS FIRST, importance_score DESC
700
+ LIMIT ?
701
+ """
702
+ cursor = conn.execute(query, (f'-{days_threshold} days', limit))
703
+
704
+ memories = []
705
+ for row in cursor.fetchall():
706
+ tags = parse_tags(row["tags"])
707
+ memories.append(
708
+ Memory(
709
+ id=row["id"],
710
+ content=row["content"],
711
+ context=row["context"],
712
+ type=row["type"],
713
+ status=row["status"] or "fresh",
714
+ importance_score=int(row["importance_score"] or 50),
715
+ access_count=row["access_count"] or 0,
716
+ created_at=datetime.fromisoformat(row["created_at"]),
717
+ last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
718
+ tags=tags,
719
+ )
720
+ )
721
+
722
+ conn.close()
723
+ return memories
724
+
725
+
726
+ def get_relationships(db_path: str, memory_id: Optional[str] = None) -> list[dict]:
727
+ """Get memory relationships for graph visualization."""
728
+ conn = get_connection(db_path)
729
+
730
+ # Check if memory_relationships table exists
731
+ table_check = conn.execute(
732
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='memory_relationships'"
733
+ ).fetchone()
734
+
735
+ if not table_check:
736
+ conn.close()
737
+ return []
738
+
739
+ query = """
740
+ SELECT
741
+ r.source_memory_id as source_id,
742
+ r.target_memory_id as target_id,
743
+ r.relationship_type,
744
+ r.strength,
745
+ ms.content as source_content,
746
+ ms.type as source_type,
747
+ mt.content as target_content,
748
+ mt.type as target_type
749
+ FROM memory_relationships r
750
+ JOIN memories ms ON r.source_memory_id = ms.id
751
+ JOIN memories mt ON r.target_memory_id = mt.id
752
+ """
753
+
754
+ try:
755
+ if memory_id:
756
+ query += " WHERE r.source_memory_id = ? OR r.target_memory_id = ?"
757
+ cursor = conn.execute(query, (memory_id, memory_id))
758
+ else:
759
+ cursor = conn.execute(query)
760
+
761
+ result = [dict(row) for row in cursor.fetchall()]
762
+ except Exception as e:
763
+ print(f"[Database] Error querying relationships: {e}")
764
+ result = []
765
+ finally:
766
+ conn.close()
767
+
768
+ return result
769
+
770
+
771
+ def get_relationship_graph(db_path: str, center_id: Optional[str] = None, depth: int = 2) -> dict:
772
+ """Get graph data with nodes and edges for D3 visualization."""
773
+ relationships = get_relationships(db_path, center_id)
774
+
775
+ nodes = {}
776
+ edges = []
777
+
778
+ for rel in relationships:
779
+ # Add source node
780
+ if rel["source_id"] not in nodes:
781
+ nodes[rel["source_id"]] = {
782
+ "id": rel["source_id"],
783
+ "content": rel["source_content"][:100] if rel["source_content"] else "",
784
+ "type": rel["source_type"],
785
+ }
786
+ # Add target node
787
+ if rel["target_id"] not in nodes:
788
+ nodes[rel["target_id"]] = {
789
+ "id": rel["target_id"],
790
+ "content": rel["target_content"][:100] if rel["target_content"] else "",
791
+ "type": rel["target_type"],
792
+ }
793
+ # Add edge
794
+ edges.append({
795
+ "source": rel["source_id"],
796
+ "target": rel["target_id"],
797
+ "type": rel["relationship_type"],
798
+ "strength": rel["strength"] or 1.0,
799
+ })
800
+
801
+ return {"nodes": list(nodes.values()), "edges": edges}
802
+
803
+
804
+ # --- Command Analytics Functions ---
805
+
806
+
807
+ def get_command_usage(db_path: str, scope: Optional[str] = None, days: int = 30) -> list[dict]:
808
+ """Get slash command usage statistics aggregated by command_name.
809
+
810
+ Args:
811
+ db_path: Path to database
812
+ scope: Filter by scope ('universal', 'project', or None for all)
813
+ days: Number of days to look back
814
+
815
+ Returns:
816
+ List of command usage entries with counts and success rates
817
+ """
818
+ conn = get_connection(db_path)
819
+
820
+ # Check if command_name column exists
821
+ columns = conn.execute("PRAGMA table_info(activities)").fetchall()
822
+ column_names = [col[1] for col in columns]
823
+ if "command_name" not in column_names:
824
+ conn.close()
825
+ return []
826
+
827
+ query = """
828
+ SELECT
829
+ command_name,
830
+ command_scope,
831
+ COUNT(*) as count,
832
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate,
833
+ AVG(duration_ms) as avg_duration_ms
834
+ FROM activities
835
+ WHERE command_name IS NOT NULL
836
+ AND command_name != ''
837
+ AND timestamp >= date('now', ?)
838
+ """
839
+ params = [f'-{days} days']
840
+
841
+ if scope:
842
+ query += " AND command_scope = ?"
843
+ params.append(scope)
844
+
845
+ query += " GROUP BY command_name, command_scope ORDER BY count DESC"
846
+
847
+ cursor = conn.execute(query, params)
848
+ result = [
849
+ {
850
+ "command_name": row["command_name"],
851
+ "command_scope": row["command_scope"] or "unknown",
852
+ "count": row["count"],
853
+ "success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
854
+ "avg_duration_ms": round(row["avg_duration_ms"]) if row["avg_duration_ms"] else None,
855
+ }
856
+ for row in cursor.fetchall()
857
+ ]
858
+ conn.close()
859
+ return result
860
+
861
+
862
+ def get_skill_usage(db_path: str, scope: Optional[str] = None, days: int = 30) -> list[dict]:
863
+ """Get skill usage statistics aggregated by skill_name.
864
+
865
+ Args:
866
+ db_path: Path to database
867
+ scope: Filter by scope ('universal', 'project', or None for all)
868
+ days: Number of days to look back
869
+
870
+ Returns:
871
+ List of skill usage entries with counts and success rates
872
+ """
873
+ conn = get_connection(db_path)
874
+
875
+ # Check if skill_name column exists
876
+ columns = conn.execute("PRAGMA table_info(activities)").fetchall()
877
+ column_names = [col[1] for col in columns]
878
+ if "skill_name" not in column_names:
879
+ conn.close()
880
+ return []
881
+
882
+ query = """
883
+ SELECT
884
+ skill_name,
885
+ command_scope,
886
+ COUNT(*) as count,
887
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate,
888
+ AVG(duration_ms) as avg_duration_ms
889
+ FROM activities
890
+ WHERE skill_name IS NOT NULL
891
+ AND skill_name != ''
892
+ AND timestamp >= date('now', ?)
893
+ """
894
+ params = [f'-{days} days']
895
+
896
+ if scope:
897
+ query += " AND command_scope = ?"
898
+ params.append(scope)
899
+
900
+ query += " GROUP BY skill_name, command_scope ORDER BY count DESC"
901
+
902
+ cursor = conn.execute(query, params)
903
+ result = [
904
+ {
905
+ "skill_name": row["skill_name"],
906
+ "skill_scope": row["command_scope"] or "unknown",
907
+ "count": row["count"],
908
+ "success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
909
+ "avg_duration_ms": round(row["avg_duration_ms"]) if row["avg_duration_ms"] else None,
910
+ }
911
+ for row in cursor.fetchall()
912
+ ]
913
+ conn.close()
914
+ return result
915
+
916
+
917
+ def get_mcp_usage(db_path: str, days: int = 30) -> list[dict]:
918
+ """Get MCP server usage statistics.
919
+
920
+ Args:
921
+ db_path: Path to database
922
+ days: Number of days to look back
923
+
924
+ Returns:
925
+ List of MCP server usage entries with tool counts and call totals
926
+ """
927
+ conn = get_connection(db_path)
928
+
929
+ # Check if mcp_server column exists
930
+ columns = conn.execute("PRAGMA table_info(activities)").fetchall()
931
+ column_names = [col[1] for col in columns]
932
+ if "mcp_server" not in column_names:
933
+ conn.close()
934
+ return []
935
+
936
+ query = """
937
+ SELECT
938
+ mcp_server,
939
+ COUNT(DISTINCT tool_name) as tool_count,
940
+ COUNT(*) as total_calls,
941
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate
942
+ FROM activities
943
+ WHERE mcp_server IS NOT NULL
944
+ AND mcp_server != ''
945
+ AND timestamp >= date('now', ?)
946
+ GROUP BY mcp_server
947
+ ORDER BY total_calls DESC
948
+ """
949
+ cursor = conn.execute(query, (f'-{days} days',))
950
+ result = [
951
+ {
952
+ "mcp_server": row["mcp_server"],
953
+ "tool_count": row["tool_count"],
954
+ "total_calls": row["total_calls"],
955
+ "success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
956
+ }
957
+ for row in cursor.fetchall()
958
+ ]
959
+ conn.close()
960
+ return result
961
+
962
+
963
+ def get_activity_detail(db_path: str, activity_id: str) -> Optional[dict]:
964
+ """Get full activity details including complete input/output.
965
+
966
+ Args:
967
+ db_path: Path to database
968
+ activity_id: Activity ID
969
+
970
+ Returns:
971
+ Full activity details or None if not found
972
+ """
973
+ conn = get_connection(db_path)
974
+ cursor = conn.execute("SELECT * FROM activities WHERE id = ?", (activity_id,))
975
+ row = cursor.fetchone()
976
+
977
+ if not row:
978
+ conn.close()
979
+ return None
980
+
981
+ # Get column names for safe access
982
+ column_names = [description[0] for description in cursor.description]
983
+
984
+ result = {
985
+ "id": row["id"],
986
+ "session_id": row["session_id"],
987
+ "event_type": row["event_type"],
988
+ "tool_name": row["tool_name"],
989
+ "tool_input_full": row["tool_input"],
990
+ "tool_output_full": row["tool_output"],
991
+ "success": bool(row["success"]),
992
+ "error_message": row["error_message"],
993
+ "duration_ms": row["duration_ms"],
994
+ "file_path": row["file_path"],
995
+ "timestamp": row["timestamp"],
996
+ }
997
+
998
+ # Add command analytics fields if they exist
999
+ if "command_name" in column_names:
1000
+ result["command_name"] = row["command_name"]
1001
+ if "command_scope" in column_names:
1002
+ result["command_scope"] = row["command_scope"]
1003
+ if "mcp_server" in column_names:
1004
+ result["mcp_server"] = row["mcp_server"]
1005
+ if "skill_name" in column_names:
1006
+ result["skill_name"] = row["skill_name"]
1007
+
1008
+ # Add summary fields if they exist
1009
+ if "summary" in column_names:
1010
+ result["summary"] = row["summary"]
1011
+ if "summary_detail" in column_names:
1012
+ result["summary_detail"] = row["summary_detail"]
1013
+
1014
+ conn.close()
1015
+ return result
1016
+
1017
+
1018
+ def create_memory(
1019
+ db_path: str,
1020
+ content: str,
1021
+ memory_type: str = "other",
1022
+ context: Optional[str] = None,
1023
+ tags: Optional[list[str]] = None,
1024
+ importance_score: int = 50,
1025
+ related_memory_ids: Optional[list[str]] = None,
1026
+ ) -> str:
1027
+ """Create a new memory and return its ID.
1028
+
1029
+ Args:
1030
+ db_path: Path to the database file
1031
+ content: Memory content
1032
+ memory_type: Type of memory (e.g., 'decision', 'solution', 'conversation')
1033
+ context: Additional context
1034
+ tags: List of tags
1035
+ importance_score: Importance score (1-100)
1036
+ related_memory_ids: IDs of related memories to create relationships with
1037
+
1038
+ Returns:
1039
+ The ID of the created memory
1040
+ """
1041
+ import uuid
1042
+
1043
+ conn = get_write_connection(db_path)
1044
+
1045
+ # Generate ID
1046
+ memory_id = f"mem_{int(datetime.now().timestamp() * 1000)}_{uuid.uuid4().hex[:8]}"
1047
+ now = datetime.now().isoformat()
1048
+
1049
+ # Insert memory
1050
+ conn.execute(
1051
+ """
1052
+ INSERT INTO memories (id, content, context, type, status, importance_score, access_count, created_at, last_accessed, updated_at, tags)
1053
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1054
+ """,
1055
+ (
1056
+ memory_id,
1057
+ content,
1058
+ context,
1059
+ memory_type,
1060
+ "fresh",
1061
+ importance_score,
1062
+ 0,
1063
+ now,
1064
+ now,
1065
+ now,
1066
+ json.dumps(tags) if tags else None,
1067
+ ),
1068
+ )
1069
+
1070
+ # Create relationships if related_memory_ids provided
1071
+ if related_memory_ids:
1072
+ # Check if memory_relationships table exists
1073
+ table_check = conn.execute(
1074
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='memory_relationships'"
1075
+ ).fetchone()
1076
+
1077
+ if table_check:
1078
+ for related_id in related_memory_ids:
1079
+ try:
1080
+ conn.execute(
1081
+ """
1082
+ INSERT INTO memory_relationships (source_memory_id, target_memory_id, relationship_type, strength)
1083
+ VALUES (?, ?, ?, ?)
1084
+ """,
1085
+ (memory_id, related_id, "derived_from", 0.8),
1086
+ )
1087
+ except Exception:
1088
+ # Ignore if related memory doesn't exist
1089
+ pass
1090
+
1091
+ conn.commit()
1092
+ conn.close()
1093
+
1094
+ return memory_id
1095
+
1096
+
1097
+ # --- User Message Functions for Style Tab ---
1098
+
1099
+
1100
+ def get_user_messages(
1101
+ db_path: str,
1102
+ session_id: Optional[str] = None,
1103
+ search: Optional[str] = None,
1104
+ has_code_blocks: Optional[bool] = None,
1105
+ has_questions: Optional[bool] = None,
1106
+ has_commands: Optional[bool] = None,
1107
+ tone_filter: Optional[str] = None,
1108
+ sort_by: str = "timestamp",
1109
+ sort_order: str = "desc",
1110
+ limit: int = 50,
1111
+ offset: int = 0,
1112
+ ) -> list[dict]:
1113
+ """Get user messages with filtering, sorting, and pagination.
1114
+
1115
+ Args:
1116
+ db_path: Path to database
1117
+ session_id: Filter by session
1118
+ search: Search in content
1119
+ has_code_blocks: Filter messages with/without code blocks
1120
+ has_questions: Filter messages with/without questions
1121
+ has_commands: Filter messages with/without slash commands
1122
+ tone_filter: Filter by tone indicator (e.g., 'polite', 'urgent', 'technical')
1123
+ sort_by: Sort by column (timestamp, word_count, char_count)
1124
+ sort_order: 'asc' or 'desc'
1125
+ limit: Maximum results
1126
+ offset: Pagination offset
1127
+
1128
+ Returns:
1129
+ List of user message dictionaries
1130
+ """
1131
+ conn = get_connection(db_path)
1132
+
1133
+ # Check if user_messages table exists
1134
+ table_check = conn.execute(
1135
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
1136
+ ).fetchone()
1137
+
1138
+ if not table_check:
1139
+ conn.close()
1140
+ return []
1141
+
1142
+ query = "SELECT * FROM user_messages WHERE 1=1"
1143
+ params: list = []
1144
+
1145
+ if session_id:
1146
+ query += " AND session_id = ?"
1147
+ params.append(session_id)
1148
+
1149
+ if search:
1150
+ query += " AND content LIKE ?"
1151
+ params.append(f"%{search}%")
1152
+
1153
+ if has_code_blocks is not None:
1154
+ query += " AND has_code_blocks = ?"
1155
+ params.append(1 if has_code_blocks else 0)
1156
+
1157
+ if has_questions is not None:
1158
+ query += " AND has_questions = ?"
1159
+ params.append(1 if has_questions else 0)
1160
+
1161
+ if has_commands is not None:
1162
+ query += " AND has_commands = ?"
1163
+ params.append(1 if has_commands else 0)
1164
+
1165
+ if tone_filter:
1166
+ # Search within JSON array of tone_indicators
1167
+ query += " AND tone_indicators LIKE ?"
1168
+ params.append(f'%"{tone_filter}"%')
1169
+
1170
+ # Sorting
1171
+ valid_sort_columns = ["timestamp", "word_count", "char_count", "line_count"]
1172
+ sort_by = sort_by if sort_by in valid_sort_columns else "timestamp"
1173
+ sort_order = "DESC" if sort_order.lower() == "desc" else "ASC"
1174
+ query += f" ORDER BY {sort_by} {sort_order}"
1175
+
1176
+ # Pagination
1177
+ query += " LIMIT ? OFFSET ?"
1178
+ params.extend([limit, offset])
1179
+
1180
+ cursor = conn.execute(query, params)
1181
+ messages = []
1182
+
1183
+ for row in cursor.fetchall():
1184
+ # Parse tone_indicators from JSON
1185
+ tone_indicators = []
1186
+ if row["tone_indicators"]:
1187
+ try:
1188
+ tone_indicators = json.loads(row["tone_indicators"])
1189
+ except (json.JSONDecodeError, TypeError):
1190
+ pass
1191
+
1192
+ # Get primary tone (first in the list) for frontend compatibility
1193
+ primary_tone = tone_indicators[0] if tone_indicators else None
1194
+
1195
+ messages.append({
1196
+ "id": row["id"],
1197
+ "session_id": row["session_id"],
1198
+ "created_at": row["timestamp"], # Frontend expects created_at
1199
+ "timestamp": row["timestamp"], # Keep for backward compatibility
1200
+ "content": row["content"],
1201
+ "word_count": row["word_count"],
1202
+ "char_count": row["char_count"],
1203
+ "line_count": row["line_count"],
1204
+ "has_code_blocks": bool(row["has_code_blocks"]),
1205
+ "has_questions": bool(row["has_questions"]),
1206
+ "has_commands": bool(row["has_commands"]),
1207
+ "tone": primary_tone, # Frontend expects single tone string
1208
+ "tone_indicators": tone_indicators,
1209
+ "project_path": row["project_path"],
1210
+ })
1211
+
1212
+ conn.close()
1213
+ return messages
1214
+
1215
+
1216
+ def get_user_message_count(
1217
+ db_path: str,
1218
+ session_id: Optional[str] = None,
1219
+ ) -> int:
1220
+ """Get total count of user messages.
1221
+
1222
+ Args:
1223
+ db_path: Path to database
1224
+ session_id: Optional filter by session
1225
+
1226
+ Returns:
1227
+ Count of user messages
1228
+ """
1229
+ conn = get_connection(db_path)
1230
+
1231
+ # Check if user_messages table exists
1232
+ table_check = conn.execute(
1233
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
1234
+ ).fetchone()
1235
+
1236
+ if not table_check:
1237
+ conn.close()
1238
+ return 0
1239
+
1240
+ query = "SELECT COUNT(*) FROM user_messages"
1241
+ params = []
1242
+
1243
+ if session_id:
1244
+ query += " WHERE session_id = ?"
1245
+ params.append(session_id)
1246
+
1247
+ count = conn.execute(query, params).fetchone()[0]
1248
+ conn.close()
1249
+ return count
1250
+
1251
+
1252
+ def delete_user_message(db_path: str, message_id: str) -> bool:
1253
+ """Delete a single user message.
1254
+
1255
+ Args:
1256
+ db_path: Path to database
1257
+ message_id: Message ID to delete
1258
+
1259
+ Returns:
1260
+ True if deleted, False if not found
1261
+ """
1262
+ conn = get_write_connection(db_path)
1263
+
1264
+ cursor = conn.execute("DELETE FROM user_messages WHERE id = ?", (message_id,))
1265
+ conn.commit()
1266
+
1267
+ deleted = cursor.rowcount > 0
1268
+ conn.close()
1269
+ return deleted
1270
+
1271
+
1272
+ def delete_user_messages_bulk(db_path: str, message_ids: list[str]) -> int:
1273
+ """Delete multiple user messages.
1274
+
1275
+ Args:
1276
+ db_path: Path to database
1277
+ message_ids: List of message IDs to delete
1278
+
1279
+ Returns:
1280
+ Count of messages deleted
1281
+ """
1282
+ if not message_ids:
1283
+ return 0
1284
+
1285
+ conn = get_write_connection(db_path)
1286
+ placeholders = ','.join('?' * len(message_ids))
1287
+ query = f"DELETE FROM user_messages WHERE id IN ({placeholders})"
1288
+ cursor = conn.execute(query, message_ids)
1289
+ conn.commit()
1290
+
1291
+ count = cursor.rowcount
1292
+ conn.close()
1293
+ return count
1294
+
1295
+
1296
+ def get_style_profile(db_path: str, project_path: Optional[str] = None) -> Optional[dict]:
1297
+ """Get user style profile.
1298
+
1299
+ Args:
1300
+ db_path: Path to database
1301
+ project_path: Project-specific profile, or None for global
1302
+
1303
+ Returns:
1304
+ Style profile dictionary or None if not found
1305
+ """
1306
+ conn = get_connection(db_path)
1307
+
1308
+ # Check if user_style_profiles table exists
1309
+ table_check = conn.execute(
1310
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='user_style_profiles'"
1311
+ ).fetchone()
1312
+
1313
+ if not table_check:
1314
+ conn.close()
1315
+ return None
1316
+
1317
+ if project_path:
1318
+ query = "SELECT * FROM user_style_profiles WHERE project_path = ? ORDER BY updated_at DESC LIMIT 1"
1319
+ cursor = conn.execute(query, (project_path,))
1320
+ else:
1321
+ query = "SELECT * FROM user_style_profiles WHERE project_path IS NULL ORDER BY updated_at DESC LIMIT 1"
1322
+ cursor = conn.execute(query)
1323
+
1324
+ row = cursor.fetchone()
1325
+ conn.close()
1326
+
1327
+ if not row:
1328
+ return None
1329
+
1330
+ # Parse JSON fields
1331
+ def parse_json_field(value):
1332
+ if not value:
1333
+ return None
1334
+ try:
1335
+ return json.loads(value)
1336
+ except (json.JSONDecodeError, TypeError):
1337
+ return None
1338
+
1339
+ return {
1340
+ "id": row["id"],
1341
+ "project_path": row["project_path"],
1342
+ "total_messages": row["total_messages"],
1343
+ "avg_word_count": row["avg_word_count"],
1344
+ "avg_char_count": row["avg_char_count"],
1345
+ "common_phrases": parse_json_field(row["common_phrases"]),
1346
+ "vocabulary_richness": row["vocabulary_richness"],
1347
+ "formality_score": row["formality_score"],
1348
+ "question_frequency": row["question_frequency"],
1349
+ "command_frequency": row["command_frequency"],
1350
+ "code_block_frequency": row["code_block_frequency"],
1351
+ "punctuation_style": parse_json_field(row["punctuation_style"]),
1352
+ "greeting_patterns": parse_json_field(row["greeting_patterns"]),
1353
+ "instruction_style": parse_json_field(row["instruction_style"]),
1354
+ "sample_messages": parse_json_field(row["sample_messages"]),
1355
+ "created_at": row["created_at"],
1356
+ "updated_at": row["updated_at"],
1357
+ }
1358
+
1359
+
1360
+ def get_style_samples(db_path: str, limit: int = 10) -> list[dict]:
1361
+ """Get sample user messages for style analysis preview.
1362
+
1363
+ Returns a diverse selection of messages showcasing different styles.
1364
+
1365
+ Args:
1366
+ db_path: Path to database
1367
+ limit: Maximum samples to return
1368
+
1369
+ Returns:
1370
+ List of sample messages with style indicators
1371
+ """
1372
+ conn = get_connection(db_path)
1373
+
1374
+ # Check if user_messages table exists
1375
+ table_check = conn.execute(
1376
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
1377
+ ).fetchone()
1378
+
1379
+ if not table_check:
1380
+ conn.close()
1381
+ return []
1382
+
1383
+ # Get a diverse sample: some recent, some with code, some with questions
1384
+ samples = []
1385
+
1386
+ # Recent messages
1387
+ cursor = conn.execute(
1388
+ "SELECT * FROM user_messages ORDER BY timestamp DESC LIMIT ?",
1389
+ (limit // 3,)
1390
+ )
1391
+ for row in cursor.fetchall():
1392
+ samples.append(_row_to_sample(row))
1393
+
1394
+ # Messages with code blocks
1395
+ cursor = conn.execute(
1396
+ "SELECT * FROM user_messages WHERE has_code_blocks = 1 ORDER BY timestamp DESC LIMIT ?",
1397
+ (limit // 3,)
1398
+ )
1399
+ for row in cursor.fetchall():
1400
+ sample = _row_to_sample(row)
1401
+ if sample["id"] not in [s["id"] for s in samples]:
1402
+ samples.append(sample)
1403
+
1404
+ # Longer messages (likely more substantive)
1405
+ cursor = conn.execute(
1406
+ "SELECT * FROM user_messages WHERE word_count > 20 ORDER BY word_count DESC LIMIT ?",
1407
+ (limit // 3,)
1408
+ )
1409
+ for row in cursor.fetchall():
1410
+ sample = _row_to_sample(row)
1411
+ if sample["id"] not in [s["id"] for s in samples]:
1412
+ samples.append(sample)
1413
+
1414
+ conn.close()
1415
+ return samples[:limit]
1416
+
1417
+
1418
+ def _row_to_sample(row) -> dict:
1419
+ """Convert a database row to a sample message dict."""
1420
+ tone_indicators = []
1421
+ if row["tone_indicators"]:
1422
+ try:
1423
+ tone_indicators = json.loads(row["tone_indicators"])
1424
+ except (json.JSONDecodeError, TypeError):
1425
+ pass
1426
+
1427
+ return {
1428
+ "id": row["id"],
1429
+ "timestamp": row["timestamp"],
1430
+ "content_preview": row["content"][:200] + "..." if len(row["content"]) > 200 else row["content"],
1431
+ "word_count": row["word_count"],
1432
+ "has_code_blocks": bool(row["has_code_blocks"]),
1433
+ "has_questions": bool(row["has_questions"]),
1434
+ "tone_indicators": tone_indicators,
1435
+ }
1436
+
1437
+
1438
+ def get_style_samples_by_category(db_path: str, samples_per_tone: int = 3) -> dict:
1439
+ """Get sample user messages grouped by style category.
1440
+
1441
+ Maps tone_indicators to frontend categories:
1442
+ - professional: direct, polite, formal tones
1443
+ - casual: casual tones
1444
+ - technical: technical tones
1445
+ - creative: unique patterns, inquisitive tones
1446
+
1447
+ Args:
1448
+ db_path: Path to database
1449
+ samples_per_tone: Max samples per category
1450
+
1451
+ Returns:
1452
+ Dict with professional, casual, technical, creative lists
1453
+ """
1454
+ conn = get_connection(db_path)
1455
+
1456
+ # Check if user_messages table exists
1457
+ table_check = conn.execute(
1458
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
1459
+ ).fetchone()
1460
+
1461
+ if not table_check:
1462
+ conn.close()
1463
+ return {
1464
+ "professional": [],
1465
+ "casual": [],
1466
+ "technical": [],
1467
+ "creative": []
1468
+ }
1469
+
1470
+ result = {
1471
+ "professional": [],
1472
+ "casual": [],
1473
+ "technical": [],
1474
+ "creative": []
1475
+ }
1476
+
1477
+ # Mapping from tone_indicators to categories
1478
+ tone_to_category = {
1479
+ "direct": "professional",
1480
+ "polite": "professional",
1481
+ "formal": "professional",
1482
+ "casual": "casual",
1483
+ "technical": "technical",
1484
+ "inquisitive": "creative",
1485
+ "urgent": "professional",
1486
+ }
1487
+
1488
+ # Get all messages with tone indicators
1489
+ cursor = conn.execute(
1490
+ """SELECT content, tone_indicators FROM user_messages
1491
+ WHERE tone_indicators IS NOT NULL AND tone_indicators != '[]'
1492
+ ORDER BY timestamp DESC LIMIT 200"""
1493
+ )
1494
+
1495
+ for row in cursor.fetchall():
1496
+ content = row["content"]
1497
+ try:
1498
+ tones = json.loads(row["tone_indicators"]) if row["tone_indicators"] else []
1499
+ except (json.JSONDecodeError, TypeError):
1500
+ tones = []
1501
+
1502
+ # Map to categories
1503
+ for tone in tones:
1504
+ category = tone_to_category.get(tone.lower(), "creative")
1505
+ if len(result[category]) < samples_per_tone:
1506
+ # Truncate content for preview
1507
+ preview = content[:200] + "..." if len(content) > 200 else content
1508
+ if preview not in result[category]:
1509
+ result[category].append(preview)
1510
+ break # Only add to first matching category
1511
+
1512
+ # Fill any empty categories with recent messages
1513
+ if any(len(v) == 0 for v in result.values()):
1514
+ cursor = conn.execute(
1515
+ "SELECT content FROM user_messages ORDER BY timestamp DESC LIMIT ?",
1516
+ (samples_per_tone * 4,)
1517
+ )
1518
+ fallback_messages = [
1519
+ row["content"][:200] + "..." if len(row["content"]) > 200 else row["content"]
1520
+ for row in cursor.fetchall()
1521
+ ]
1522
+
1523
+ for category in result:
1524
+ if len(result[category]) == 0 and fallback_messages:
1525
+ # Take messages for empty categories
1526
+ for msg in fallback_messages[:samples_per_tone]:
1527
+ if msg not in [m for v in result.values() for m in v]:
1528
+ result[category].append(msg)
1529
+
1530
+ conn.close()
1531
+ return result
1532
+
1533
+
1534
+ def compute_style_profile_from_messages(db_path: str) -> Optional[dict]:
1535
+ """Compute a style profile from user_messages table.
1536
+
1537
+ This is used when no pre-computed profile exists.
1538
+
1539
+ Returns format expected by frontend StyleProfileCard:
1540
+ - total_messages: int
1541
+ - avg_word_count: float
1542
+ - primary_tone: str
1543
+ - question_percentage: float
1544
+ - tone_distribution: dict[str, int]
1545
+ - style_markers: list[str]
1546
+ """
1547
+ conn = get_connection(db_path)
1548
+
1549
+ # Check if user_messages table exists
1550
+ table_check = conn.execute(
1551
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
1552
+ ).fetchone()
1553
+
1554
+ if not table_check:
1555
+ conn.close()
1556
+ return None
1557
+
1558
+ # Get total count and averages
1559
+ stats = conn.execute(
1560
+ """SELECT
1561
+ COUNT(*) as total,
1562
+ AVG(word_count) as avg_words,
1563
+ AVG(char_count) as avg_chars,
1564
+ SUM(CASE WHEN has_questions = 1 THEN 1 ELSE 0 END) as question_count
1565
+ FROM user_messages"""
1566
+ ).fetchone()
1567
+
1568
+ if not stats or stats["total"] == 0:
1569
+ conn.close()
1570
+ return None
1571
+
1572
+ total_messages = stats["total"]
1573
+ avg_word_count = stats["avg_words"] or 0
1574
+ question_percentage = (stats["question_count"] / total_messages * 100) if total_messages > 0 else 0
1575
+
1576
+ # Compute tone distribution
1577
+ tone_distribution = {}
1578
+ cursor = conn.execute(
1579
+ "SELECT tone_indicators FROM user_messages WHERE tone_indicators IS NOT NULL AND tone_indicators != '[]'"
1580
+ )
1581
+ for row in cursor.fetchall():
1582
+ try:
1583
+ tones = json.loads(row["tone_indicators"]) if row["tone_indicators"] else []
1584
+ for tone in tones:
1585
+ tone_lower = tone.lower()
1586
+ tone_distribution[tone_lower] = tone_distribution.get(tone_lower, 0) + 1
1587
+ except (json.JSONDecodeError, TypeError):
1588
+ pass
1589
+
1590
+ # Determine primary tone (most common)
1591
+ primary_tone = "direct"
1592
+ if tone_distribution:
1593
+ primary_tone = max(tone_distribution, key=tone_distribution.get)
1594
+
1595
+ # Generate style markers based on the data
1596
+ style_markers = []
1597
+
1598
+ if avg_word_count < 15:
1599
+ style_markers.append("Concise")
1600
+ elif avg_word_count > 40:
1601
+ style_markers.append("Detailed")
1602
+ else:
1603
+ style_markers.append("Balanced length")
1604
+
1605
+ if question_percentage > 40:
1606
+ style_markers.append("Question-driven")
1607
+ elif question_percentage < 10:
1608
+ style_markers.append("Statement-focused")
1609
+
1610
+ # Check for code usage
1611
+ code_stats = conn.execute(
1612
+ "SELECT SUM(CASE WHEN has_code_blocks = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as code_pct FROM user_messages"
1613
+ ).fetchone()
1614
+ if code_stats and code_stats["code_pct"] and code_stats["code_pct"] > 20:
1615
+ style_markers.append("Code-heavy")
1616
+
1617
+ # Add primary tone to markers
1618
+ tone_labels = {
1619
+ "direct": "Direct",
1620
+ "polite": "Polite",
1621
+ "technical": "Technical",
1622
+ "casual": "Casual",
1623
+ "inquisitive": "Inquisitive",
1624
+ "urgent": "Urgent",
1625
+ }
1626
+ if primary_tone in tone_labels:
1627
+ style_markers.append(tone_labels[primary_tone])
1628
+
1629
+ if not style_markers:
1630
+ style_markers.append("Building profile...")
1631
+
1632
+ # Get sample messages to show the AI how the user actually writes
1633
+ sample_messages = []
1634
+ cursor = conn.execute(
1635
+ """SELECT content FROM user_messages
1636
+ WHERE length(content) > 20 AND length(content) < 500
1637
+ AND has_commands = 0
1638
+ ORDER BY timestamp DESC LIMIT 5"""
1639
+ )
1640
+ for row in cursor.fetchall():
1641
+ sample_messages.append(row["content"])
1642
+
1643
+ conn.close()
1644
+
1645
+ return {
1646
+ "totalMessages": total_messages,
1647
+ "avgWordCount": round(avg_word_count, 1),
1648
+ "primaryTone": primary_tone,
1649
+ "questionPercentage": round(question_percentage, 1),
1650
+ "toneDistribution": tone_distribution,
1651
+ "styleMarkers": style_markers,
1652
+ "sampleMessages": sample_messages,
1653
+ }
1654
+
1655
+
1656
+ # --- Agent Query Functions ---
1657
+
1658
+
1659
+ def get_agents(
1660
+ db_path: str,
1661
+ agent_type: Optional[str] = None,
1662
+ limit: int = 50,
1663
+ active_only: bool = False
1664
+ ) -> list[dict]:
1665
+ """Get agents with recent activity counts."""
1666
+ conn = get_connection(db_path)
1667
+
1668
+ query = """
1669
+ SELECT
1670
+ a.*,
1671
+ COALESCE(recent.count, 0) as recent_activity_count,
1672
+ CASE WHEN a.last_seen > datetime('now', '-5 minutes') THEN 1 ELSE 0 END as is_active
1673
+ FROM agents a
1674
+ LEFT JOIN (
1675
+ SELECT agent_id, COUNT(*) as count
1676
+ FROM activities
1677
+ WHERE timestamp > datetime('now', '-1 hour')
1678
+ GROUP BY agent_id
1679
+ ) recent ON recent.agent_id = a.id
1680
+ WHERE 1=1
1681
+ """
1682
+ params = []
1683
+
1684
+ if agent_type:
1685
+ query += " AND a.type = ?"
1686
+ params.append(agent_type)
1687
+
1688
+ if active_only:
1689
+ query += " AND a.last_seen > datetime('now', '-5 minutes')"
1690
+
1691
+ query += " ORDER BY a.last_seen DESC LIMIT ?"
1692
+ params.append(limit)
1693
+
1694
+ cursor = conn.execute(query, params)
1695
+ results = [dict(row) for row in cursor.fetchall()]
1696
+ conn.close()
1697
+ return results
1698
+
1699
+
1700
+ def get_agent_by_id(db_path: str, agent_id: str) -> Optional[dict]:
1701
+ """Get single agent by ID."""
1702
+ conn = get_connection(db_path)
1703
+
1704
+ cursor = conn.execute("""
1705
+ SELECT
1706
+ a.*,
1707
+ CASE WHEN a.last_seen > datetime('now', '-5 minutes') THEN 1 ELSE 0 END as is_active
1708
+ FROM agents a
1709
+ WHERE a.id = ?
1710
+ """, (agent_id,))
1711
+
1712
+ row = cursor.fetchone()
1713
+ conn.close()
1714
+ return dict(row) if row else None
1715
+
1716
+
1717
+ def get_agent_tool_breakdown(db_path: str, agent_id: str) -> list[dict]:
1718
+ """Get tool usage breakdown for an agent."""
1719
+ conn = get_connection(db_path)
1720
+
1721
+ cursor = conn.execute("""
1722
+ SELECT
1723
+ tool_name,
1724
+ COUNT(*) as count,
1725
+ AVG(duration_ms) as avg_duration_ms,
1726
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*) as success_rate
1727
+ FROM activities
1728
+ WHERE agent_id = ? AND tool_name IS NOT NULL
1729
+ GROUP BY tool_name
1730
+ ORDER BY count DESC
1731
+ """, (agent_id,))
1732
+
1733
+ results = [dict(row) for row in cursor.fetchall()]
1734
+ conn.close()
1735
+ return results
1736
+
1737
+
1738
+ def get_agent_files_touched(db_path: str, agent_id: str, limit: int = 50) -> list[str]:
1739
+ """Get list of files an agent has touched."""
1740
+ conn = get_connection(db_path)
1741
+
1742
+ # Files from file_path column
1743
+ cursor = conn.execute("""
1744
+ SELECT DISTINCT file_path
1745
+ FROM activities
1746
+ WHERE agent_id = ?
1747
+ AND file_path IS NOT NULL
1748
+ AND file_path != ''
1749
+ LIMIT ?
1750
+ """, (agent_id, limit))
1751
+
1752
+ files = [row[0] for row in cursor.fetchall()]
1753
+ conn.close()
1754
+ return files
1755
+
1756
+
1757
+ def get_agent_parent(db_path: str, agent_id: str) -> Optional[str]:
1758
+ """Find which agent spawned this subagent (via Task tool)."""
1759
+ conn = get_connection(db_path)
1760
+
1761
+ # Look for Task tool call that created this agent
1762
+ cursor = conn.execute("""
1763
+ SELECT agent_id
1764
+ FROM activities
1765
+ WHERE tool_name = 'Task'
1766
+ AND tool_output LIKE ?
1767
+ ORDER BY timestamp DESC
1768
+ LIMIT 1
1769
+ """, (f'%{agent_id}%',))
1770
+
1771
+ row = cursor.fetchone()
1772
+ conn.close()
1773
+ return row[0] if row else None