omni-cortex 1.12.0__py3-none-any.whl → 1.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +57 -3
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/database.py +1430 -1094
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/main.py +1592 -1381
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/models.py +370 -285
- {omni_cortex-1.12.0.dist-info → omni_cortex-1.12.1.dist-info}/METADATA +1 -1
- omni_cortex-1.12.1.dist-info/RECORD +26 -0
- omni_cortex-1.12.0.dist-info/RECORD +0 -26
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/hooks/stop.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.12.0.data → omni_cortex-1.12.1.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
- {omni_cortex-1.12.0.dist-info → omni_cortex-1.12.1.dist-info}/WHEEL +0 -0
- {omni_cortex-1.12.0.dist-info → omni_cortex-1.12.1.dist-info}/entry_points.txt +0 -0
- {omni_cortex-1.12.0.dist-info → omni_cortex-1.12.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,1094 +1,1430 @@
|
|
|
1
|
-
"""Database query functions for reading omni-cortex SQLite databases."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
import sqlite3
|
|
5
|
-
from collections import Counter
|
|
6
|
-
from datetime import datetime, timedelta
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
from typing import Optional
|
|
9
|
-
|
|
10
|
-
from models import Activity, FilterParams, Memory, MemoryStats, MemoryUpdate, Session, TimelineEntry
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def get_connection(db_path: str) -> sqlite3.Connection:
|
|
14
|
-
"""Get a read-only connection to the database."""
|
|
15
|
-
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
|
16
|
-
conn.row_factory = sqlite3.Row
|
|
17
|
-
return conn
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def get_write_connection(db_path: str) -> sqlite3.Connection:
|
|
21
|
-
"""Get a writable connection to the database."""
|
|
22
|
-
conn = sqlite3.connect(db_path)
|
|
23
|
-
conn.row_factory = sqlite3.Row
|
|
24
|
-
return conn
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def ensure_migrations(db_path: str) -> None:
|
|
28
|
-
"""Ensure database has latest migrations applied.
|
|
29
|
-
|
|
30
|
-
This function checks for and applies any missing schema updates,
|
|
31
|
-
including command analytics columns and natural language summary columns.
|
|
32
|
-
"""
|
|
33
|
-
conn = get_write_connection(db_path)
|
|
34
|
-
|
|
35
|
-
# Check if activities table exists
|
|
36
|
-
table_check = conn.execute(
|
|
37
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name='activities'"
|
|
38
|
-
).fetchone()
|
|
39
|
-
|
|
40
|
-
if not table_check:
|
|
41
|
-
conn.close()
|
|
42
|
-
return
|
|
43
|
-
|
|
44
|
-
# Check available columns
|
|
45
|
-
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
46
|
-
column_names = {col[1] for col in columns}
|
|
47
|
-
|
|
48
|
-
migrations_applied = []
|
|
49
|
-
|
|
50
|
-
# Migration v1.1: Command analytics columns
|
|
51
|
-
if "command_name" not in column_names:
|
|
52
|
-
conn.executescript("""
|
|
53
|
-
ALTER TABLE activities ADD COLUMN command_name TEXT;
|
|
54
|
-
ALTER TABLE activities ADD COLUMN command_scope TEXT;
|
|
55
|
-
ALTER TABLE activities ADD COLUMN mcp_server TEXT;
|
|
56
|
-
ALTER TABLE activities ADD COLUMN skill_name TEXT;
|
|
57
|
-
|
|
58
|
-
CREATE INDEX IF NOT EXISTS idx_activities_command ON activities(command_name);
|
|
59
|
-
CREATE INDEX IF NOT EXISTS idx_activities_mcp ON activities(mcp_server);
|
|
60
|
-
CREATE INDEX IF NOT EXISTS idx_activities_skill ON activities(skill_name);
|
|
61
|
-
""")
|
|
62
|
-
migrations_applied.append("v1.1: command analytics columns")
|
|
63
|
-
|
|
64
|
-
# Migration v1.2: Natural language summary columns
|
|
65
|
-
if "summary" not in column_names:
|
|
66
|
-
conn.executescript("""
|
|
67
|
-
ALTER TABLE activities ADD COLUMN summary TEXT;
|
|
68
|
-
ALTER TABLE activities ADD COLUMN summary_detail TEXT;
|
|
69
|
-
""")
|
|
70
|
-
migrations_applied.append("v1.2: summary columns")
|
|
71
|
-
|
|
72
|
-
if migrations_applied:
|
|
73
|
-
conn.commit()
|
|
74
|
-
print(f"[Database] Applied migrations: {', '.join(migrations_applied)}")
|
|
75
|
-
|
|
76
|
-
conn.close()
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
def parse_tags(tags_str: Optional[str]) -> list[str]:
|
|
80
|
-
"""Parse tags from JSON string."""
|
|
81
|
-
if not tags_str:
|
|
82
|
-
return []
|
|
83
|
-
try:
|
|
84
|
-
tags = json.loads(tags_str)
|
|
85
|
-
return tags if isinstance(tags, list) else []
|
|
86
|
-
except (json.JSONDecodeError, TypeError):
|
|
87
|
-
return []
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
def get_memories(db_path: str, filters: FilterParams) -> list[Memory]:
|
|
91
|
-
"""Get memories with filtering, sorting, and pagination."""
|
|
92
|
-
conn = get_connection(db_path)
|
|
93
|
-
|
|
94
|
-
# Build query
|
|
95
|
-
query = "SELECT * FROM memories WHERE 1=1"
|
|
96
|
-
params: list = []
|
|
97
|
-
|
|
98
|
-
if filters.memory_type:
|
|
99
|
-
query += " AND type = ?"
|
|
100
|
-
params.append(filters.memory_type)
|
|
101
|
-
|
|
102
|
-
if filters.status:
|
|
103
|
-
query += " AND status = ?"
|
|
104
|
-
params.append(filters.status)
|
|
105
|
-
|
|
106
|
-
if filters.min_importance is not None:
|
|
107
|
-
query += " AND importance_score >= ?"
|
|
108
|
-
params.append(filters.min_importance)
|
|
109
|
-
|
|
110
|
-
if filters.max_importance is not None:
|
|
111
|
-
query += " AND importance_score <= ?"
|
|
112
|
-
params.append(filters.max_importance)
|
|
113
|
-
|
|
114
|
-
if filters.search:
|
|
115
|
-
query += " AND (content LIKE ? OR context LIKE ?)"
|
|
116
|
-
search_term = f"%{filters.search}%"
|
|
117
|
-
params.extend([search_term, search_term])
|
|
118
|
-
|
|
119
|
-
# Sorting
|
|
120
|
-
valid_sort_columns = ["created_at", "last_accessed", "importance_score", "access_count"]
|
|
121
|
-
sort_by = filters.sort_by if filters.sort_by in valid_sort_columns else "last_accessed"
|
|
122
|
-
sort_order = "DESC" if filters.sort_order.lower() == "desc" else "ASC"
|
|
123
|
-
query += f" ORDER BY {sort_by} {sort_order}"
|
|
124
|
-
|
|
125
|
-
# Pagination
|
|
126
|
-
query += " LIMIT ? OFFSET ?"
|
|
127
|
-
params.extend([filters.limit, filters.offset])
|
|
128
|
-
|
|
129
|
-
cursor = conn.execute(query, params)
|
|
130
|
-
rows = cursor.fetchall()
|
|
131
|
-
|
|
132
|
-
memories = []
|
|
133
|
-
for row in rows:
|
|
134
|
-
# Parse tags from JSON string
|
|
135
|
-
tags = parse_tags(row["tags"])
|
|
136
|
-
|
|
137
|
-
memories.append(
|
|
138
|
-
Memory(
|
|
139
|
-
id=row["id"],
|
|
140
|
-
content=row["content"],
|
|
141
|
-
context=row["context"],
|
|
142
|
-
type=row["type"],
|
|
143
|
-
status=row["status"] or "fresh",
|
|
144
|
-
importance_score=int(row["importance_score"] or 50),
|
|
145
|
-
access_count=row["access_count"] or 0,
|
|
146
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
147
|
-
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
148
|
-
tags=tags,
|
|
149
|
-
)
|
|
150
|
-
)
|
|
151
|
-
|
|
152
|
-
conn.close()
|
|
153
|
-
return memories
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
def get_memory_by_id(db_path: str, memory_id: str) -> Optional[Memory]:
|
|
157
|
-
"""Get a single memory by ID."""
|
|
158
|
-
conn = get_connection(db_path)
|
|
159
|
-
|
|
160
|
-
cursor = conn.execute("SELECT * FROM memories WHERE id = ?", (memory_id,))
|
|
161
|
-
row = cursor.fetchone()
|
|
162
|
-
|
|
163
|
-
if not row:
|
|
164
|
-
conn.close()
|
|
165
|
-
return None
|
|
166
|
-
|
|
167
|
-
# Parse tags from JSON string
|
|
168
|
-
tags = parse_tags(row["tags"])
|
|
169
|
-
|
|
170
|
-
memory = Memory(
|
|
171
|
-
id=row["id"],
|
|
172
|
-
content=row["content"],
|
|
173
|
-
context=row["context"],
|
|
174
|
-
type=row["type"],
|
|
175
|
-
status=row["status"] or "fresh",
|
|
176
|
-
importance_score=int(row["importance_score"] or 50),
|
|
177
|
-
access_count=row["access_count"] or 0,
|
|
178
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
179
|
-
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
180
|
-
tags=tags,
|
|
181
|
-
)
|
|
182
|
-
|
|
183
|
-
conn.close()
|
|
184
|
-
return memory
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
def get_memory_stats(db_path: str) -> MemoryStats:
|
|
188
|
-
"""Get statistics about memories in the database."""
|
|
189
|
-
conn = get_connection(db_path)
|
|
190
|
-
|
|
191
|
-
# Total count
|
|
192
|
-
total = conn.execute("SELECT COUNT(*) FROM memories").fetchone()[0]
|
|
193
|
-
|
|
194
|
-
# By type
|
|
195
|
-
type_cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
|
|
196
|
-
by_type = {row["type"]: row["count"] for row in type_cursor.fetchall()}
|
|
197
|
-
|
|
198
|
-
# By status
|
|
199
|
-
status_cursor = conn.execute("SELECT status, COUNT(*) as count FROM memories GROUP BY status")
|
|
200
|
-
by_status = {(row["status"] or "fresh"): row["count"] for row in status_cursor.fetchall()}
|
|
201
|
-
|
|
202
|
-
# Average importance
|
|
203
|
-
avg_cursor = conn.execute("SELECT AVG(importance_score) FROM memories")
|
|
204
|
-
avg_importance = avg_cursor.fetchone()[0] or 0.0
|
|
205
|
-
|
|
206
|
-
# Total access count
|
|
207
|
-
access_cursor = conn.execute("SELECT SUM(access_count) FROM memories")
|
|
208
|
-
total_access = access_cursor.fetchone()[0] or 0
|
|
209
|
-
|
|
210
|
-
# Tags with counts - extract from JSON column
|
|
211
|
-
tags_cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
|
|
212
|
-
tag_counter: Counter = Counter()
|
|
213
|
-
for row in tags_cursor.fetchall():
|
|
214
|
-
tags = parse_tags(row["tags"])
|
|
215
|
-
tag_counter.update(tags)
|
|
216
|
-
|
|
217
|
-
tags = [{"name": name, "count": count} for name, count in tag_counter.most_common(50)]
|
|
218
|
-
|
|
219
|
-
conn.close()
|
|
220
|
-
|
|
221
|
-
return MemoryStats(
|
|
222
|
-
total_count=total,
|
|
223
|
-
by_type=by_type,
|
|
224
|
-
by_status=by_status,
|
|
225
|
-
avg_importance=round(avg_importance, 1),
|
|
226
|
-
total_access_count=total_access,
|
|
227
|
-
tags=tags,
|
|
228
|
-
)
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
def get_activities(
|
|
232
|
-
db_path: str,
|
|
233
|
-
event_type: Optional[str] = None,
|
|
234
|
-
tool_name: Optional[str] = None,
|
|
235
|
-
limit: int = 100,
|
|
236
|
-
offset: int = 0,
|
|
237
|
-
) -> list[Activity]:
|
|
238
|
-
"""Get activity log entries with all available fields."""
|
|
239
|
-
conn = get_connection(db_path)
|
|
240
|
-
|
|
241
|
-
# Check available columns for backward compatibility
|
|
242
|
-
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
243
|
-
column_names = {col[1] for col in columns}
|
|
244
|
-
|
|
245
|
-
query = "SELECT * FROM activities WHERE 1=1"
|
|
246
|
-
params: list = []
|
|
247
|
-
|
|
248
|
-
if event_type:
|
|
249
|
-
query += " AND event_type = ?"
|
|
250
|
-
params.append(event_type)
|
|
251
|
-
|
|
252
|
-
if tool_name:
|
|
253
|
-
query += " AND tool_name = ?"
|
|
254
|
-
params.append(tool_name)
|
|
255
|
-
|
|
256
|
-
query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
|
257
|
-
params.extend([limit, offset])
|
|
258
|
-
|
|
259
|
-
cursor = conn.execute(query, params)
|
|
260
|
-
activities = []
|
|
261
|
-
|
|
262
|
-
for row in cursor.fetchall():
|
|
263
|
-
# Parse timestamp - handle both with and without timezone
|
|
264
|
-
ts_str = row["timestamp"]
|
|
265
|
-
try:
|
|
266
|
-
ts = datetime.fromisoformat(ts_str)
|
|
267
|
-
except ValueError:
|
|
268
|
-
# Fallback for edge cases
|
|
269
|
-
ts = datetime.now()
|
|
270
|
-
|
|
271
|
-
activity_data = {
|
|
272
|
-
"id": row["id"],
|
|
273
|
-
"session_id": row["session_id"],
|
|
274
|
-
"event_type": row["event_type"],
|
|
275
|
-
"tool_name": row["tool_name"],
|
|
276
|
-
"tool_input": row["tool_input"],
|
|
277
|
-
"tool_output": row["tool_output"],
|
|
278
|
-
"success": bool(row["success"]),
|
|
279
|
-
"error_message": row["error_message"],
|
|
280
|
-
"duration_ms": row["duration_ms"],
|
|
281
|
-
"file_path": row["file_path"],
|
|
282
|
-
"timestamp": ts,
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
# Add command analytics fields if available
|
|
286
|
-
if "command_name" in column_names:
|
|
287
|
-
activity_data["command_name"] = row["command_name"]
|
|
288
|
-
if "command_scope" in column_names:
|
|
289
|
-
activity_data["command_scope"] = row["command_scope"]
|
|
290
|
-
if "mcp_server" in column_names:
|
|
291
|
-
activity_data["mcp_server"] = row["mcp_server"]
|
|
292
|
-
if "skill_name" in column_names:
|
|
293
|
-
activity_data["skill_name"] = row["skill_name"]
|
|
294
|
-
|
|
295
|
-
# Add summary fields if available
|
|
296
|
-
if "summary" in column_names:
|
|
297
|
-
activity_data["summary"] = row["summary"]
|
|
298
|
-
if "summary_detail" in column_names:
|
|
299
|
-
activity_data["summary_detail"] = row["summary_detail"]
|
|
300
|
-
|
|
301
|
-
activities.append(Activity(**activity_data))
|
|
302
|
-
|
|
303
|
-
conn.close()
|
|
304
|
-
return activities
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
def get_timeline(
|
|
308
|
-
db_path: str,
|
|
309
|
-
hours: int = 24,
|
|
310
|
-
include_memories: bool = True,
|
|
311
|
-
include_activities: bool = True,
|
|
312
|
-
) -> list[TimelineEntry]:
|
|
313
|
-
"""Get a timeline of memories and activities."""
|
|
314
|
-
conn = get_connection(db_path)
|
|
315
|
-
since = datetime.now() - timedelta(hours=hours)
|
|
316
|
-
since_str = since.isoformat()
|
|
317
|
-
|
|
318
|
-
entries: list[TimelineEntry] = []
|
|
319
|
-
|
|
320
|
-
if include_memories:
|
|
321
|
-
cursor = conn.execute(
|
|
322
|
-
"SELECT * FROM memories WHERE created_at >= ? ORDER BY created_at DESC",
|
|
323
|
-
(since_str,),
|
|
324
|
-
)
|
|
325
|
-
for row in cursor.fetchall():
|
|
326
|
-
entries.append(
|
|
327
|
-
TimelineEntry(
|
|
328
|
-
timestamp=datetime.fromisoformat(row["created_at"]),
|
|
329
|
-
entry_type="memory",
|
|
330
|
-
data={
|
|
331
|
-
"id": row["id"],
|
|
332
|
-
"content": row["content"][:200] + "..." if len(row["content"]) > 200 else row["content"],
|
|
333
|
-
"type": row["type"],
|
|
334
|
-
"importance": row["importance_score"],
|
|
335
|
-
},
|
|
336
|
-
)
|
|
337
|
-
)
|
|
338
|
-
|
|
339
|
-
if include_activities:
|
|
340
|
-
cursor = conn.execute(
|
|
341
|
-
"SELECT * FROM activities WHERE timestamp >= ? ORDER BY timestamp DESC",
|
|
342
|
-
(since_str,),
|
|
343
|
-
)
|
|
344
|
-
for row in cursor.fetchall():
|
|
345
|
-
entries.append(
|
|
346
|
-
TimelineEntry(
|
|
347
|
-
timestamp=datetime.fromisoformat(row["timestamp"]),
|
|
348
|
-
entry_type="activity",
|
|
349
|
-
data={
|
|
350
|
-
"id": row["id"],
|
|
351
|
-
"event_type": row["event_type"],
|
|
352
|
-
"tool_name": row["tool_name"],
|
|
353
|
-
"success": bool(row["success"]),
|
|
354
|
-
"duration_ms": row["duration_ms"],
|
|
355
|
-
},
|
|
356
|
-
)
|
|
357
|
-
)
|
|
358
|
-
|
|
359
|
-
# Sort by timestamp descending
|
|
360
|
-
entries.sort(key=lambda e: e.timestamp, reverse=True)
|
|
361
|
-
|
|
362
|
-
conn.close()
|
|
363
|
-
return entries
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
def get_sessions(db_path: str, limit: int = 20) -> list[Session]:
|
|
367
|
-
"""Get recent sessions."""
|
|
368
|
-
conn = get_connection(db_path)
|
|
369
|
-
|
|
370
|
-
cursor = conn.execute(
|
|
371
|
-
"""
|
|
372
|
-
SELECT s.*, COUNT(a.id) as activity_count
|
|
373
|
-
FROM sessions s
|
|
374
|
-
LEFT JOIN activities a ON s.id = a.session_id
|
|
375
|
-
GROUP BY s.id
|
|
376
|
-
ORDER BY s.started_at DESC
|
|
377
|
-
LIMIT ?
|
|
378
|
-
""",
|
|
379
|
-
(limit,),
|
|
380
|
-
)
|
|
381
|
-
|
|
382
|
-
sessions = []
|
|
383
|
-
for row in cursor.fetchall():
|
|
384
|
-
sessions.append(
|
|
385
|
-
Session(
|
|
386
|
-
id=row["id"],
|
|
387
|
-
project_path=row["project_path"],
|
|
388
|
-
started_at=datetime.fromisoformat(row["started_at"]),
|
|
389
|
-
ended_at=datetime.fromisoformat(row["ended_at"]) if row["ended_at"] else None,
|
|
390
|
-
summary=row["summary"],
|
|
391
|
-
activity_count=row["activity_count"],
|
|
392
|
-
)
|
|
393
|
-
)
|
|
394
|
-
|
|
395
|
-
conn.close()
|
|
396
|
-
return sessions
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
def get_all_tags(db_path: str) -> list[dict]:
|
|
400
|
-
"""Get all tags with their usage counts."""
|
|
401
|
-
conn = get_connection(db_path)
|
|
402
|
-
|
|
403
|
-
# Extract tags from JSON column
|
|
404
|
-
cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
|
|
405
|
-
tag_counter: Counter = Counter()
|
|
406
|
-
for row in cursor.fetchall():
|
|
407
|
-
tags = parse_tags(row["tags"])
|
|
408
|
-
tag_counter.update(tags)
|
|
409
|
-
|
|
410
|
-
tags = [{"name": name, "count": count} for name, count in tag_counter.most_common()]
|
|
411
|
-
|
|
412
|
-
conn.close()
|
|
413
|
-
return tags
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
def get_type_distribution(db_path: str) -> dict[str, int]:
|
|
417
|
-
"""Get memory type distribution."""
|
|
418
|
-
conn = get_connection(db_path)
|
|
419
|
-
|
|
420
|
-
cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
|
|
421
|
-
distribution = {row["type"]: row["count"] for row in cursor.fetchall()}
|
|
422
|
-
|
|
423
|
-
conn.close()
|
|
424
|
-
return distribution
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
def search_memories(db_path: str, query: str, limit: int = 20) -> list[Memory]:
|
|
428
|
-
"""Search memories using FTS if available, otherwise LIKE."""
|
|
429
|
-
conn = get_connection(db_path)
|
|
430
|
-
|
|
431
|
-
# Check if FTS table exists
|
|
432
|
-
fts_check = conn.execute(
|
|
433
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name='memories_fts'"
|
|
434
|
-
).fetchone()
|
|
435
|
-
|
|
436
|
-
if fts_check:
|
|
437
|
-
# Use FTS search - FTS5 uses rowid to match the memories table rowid
|
|
438
|
-
# Escape special FTS5 characters and wrap in quotes for phrase search
|
|
439
|
-
safe_query = query.replace('"', '""')
|
|
440
|
-
try:
|
|
441
|
-
cursor = conn.execute(
|
|
442
|
-
"""
|
|
443
|
-
SELECT m.* FROM memories m
|
|
444
|
-
JOIN memories_fts fts ON m.rowid = fts.rowid
|
|
445
|
-
WHERE memories_fts MATCH ?
|
|
446
|
-
ORDER BY rank
|
|
447
|
-
LIMIT ?
|
|
448
|
-
""",
|
|
449
|
-
(f'"{safe_query}"', limit),
|
|
450
|
-
)
|
|
451
|
-
except sqlite3.OperationalError:
|
|
452
|
-
# Fallback if FTS query fails
|
|
453
|
-
search_term = f"%{query}%"
|
|
454
|
-
cursor = conn.execute(
|
|
455
|
-
"""
|
|
456
|
-
SELECT * FROM memories
|
|
457
|
-
WHERE content LIKE ? OR context LIKE ?
|
|
458
|
-
ORDER BY importance_score DESC
|
|
459
|
-
LIMIT ?
|
|
460
|
-
""",
|
|
461
|
-
(search_term, search_term, limit),
|
|
462
|
-
)
|
|
463
|
-
else:
|
|
464
|
-
# Fallback to LIKE
|
|
465
|
-
search_term = f"%{query}%"
|
|
466
|
-
cursor = conn.execute(
|
|
467
|
-
"""
|
|
468
|
-
SELECT * FROM memories
|
|
469
|
-
WHERE content LIKE ? OR context LIKE ?
|
|
470
|
-
ORDER BY importance_score DESC
|
|
471
|
-
LIMIT ?
|
|
472
|
-
""",
|
|
473
|
-
(search_term, search_term, limit),
|
|
474
|
-
)
|
|
475
|
-
|
|
476
|
-
memories = []
|
|
477
|
-
for row in cursor.fetchall():
|
|
478
|
-
# Parse tags from JSON string
|
|
479
|
-
tags = parse_tags(row["tags"])
|
|
480
|
-
|
|
481
|
-
memories.append(
|
|
482
|
-
Memory(
|
|
483
|
-
id=row["id"],
|
|
484
|
-
content=row["content"],
|
|
485
|
-
context=row["context"],
|
|
486
|
-
type=row["type"],
|
|
487
|
-
status=row["status"] or "fresh",
|
|
488
|
-
importance_score=int(row["importance_score"] or 50),
|
|
489
|
-
access_count=row["access_count"] or 0,
|
|
490
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
491
|
-
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
492
|
-
tags=tags,
|
|
493
|
-
)
|
|
494
|
-
)
|
|
495
|
-
|
|
496
|
-
conn.close()
|
|
497
|
-
return memories
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
def update_memory(db_path: str, memory_id: str, updates: MemoryUpdate) -> Optional[Memory]:
|
|
501
|
-
"""Update a memory and return the updated record."""
|
|
502
|
-
conn = get_write_connection(db_path)
|
|
503
|
-
|
|
504
|
-
# Build update query dynamically based on provided fields
|
|
505
|
-
update_fields = []
|
|
506
|
-
params = []
|
|
507
|
-
|
|
508
|
-
if updates.content is not None:
|
|
509
|
-
update_fields.append("content = ?")
|
|
510
|
-
params.append(updates.content)
|
|
511
|
-
|
|
512
|
-
if updates.context is not None:
|
|
513
|
-
update_fields.append("context = ?")
|
|
514
|
-
params.append(updates.context)
|
|
515
|
-
|
|
516
|
-
if updates.memory_type is not None:
|
|
517
|
-
update_fields.append("type = ?")
|
|
518
|
-
params.append(updates.memory_type)
|
|
519
|
-
|
|
520
|
-
if updates.status is not None:
|
|
521
|
-
update_fields.append("status = ?")
|
|
522
|
-
params.append(updates.status)
|
|
523
|
-
|
|
524
|
-
if updates.importance_score is not None:
|
|
525
|
-
update_fields.append("importance_score = ?")
|
|
526
|
-
params.append(updates.importance_score)
|
|
527
|
-
|
|
528
|
-
if updates.tags is not None:
|
|
529
|
-
update_fields.append("tags = ?")
|
|
530
|
-
params.append(json.dumps(updates.tags))
|
|
531
|
-
|
|
532
|
-
if not update_fields:
|
|
533
|
-
conn.close()
|
|
534
|
-
return get_memory_by_id(db_path, memory_id)
|
|
535
|
-
|
|
536
|
-
# Add updated timestamp
|
|
537
|
-
update_fields.append("last_accessed = ?")
|
|
538
|
-
params.append(datetime.now().isoformat())
|
|
539
|
-
|
|
540
|
-
# Add memory_id to params
|
|
541
|
-
params.append(memory_id)
|
|
542
|
-
|
|
543
|
-
query = f"UPDATE memories SET {', '.join(update_fields)} WHERE id = ?"
|
|
544
|
-
cursor = conn.execute(query, params)
|
|
545
|
-
conn.commit()
|
|
546
|
-
|
|
547
|
-
if cursor.rowcount == 0:
|
|
548
|
-
conn.close()
|
|
549
|
-
return None
|
|
550
|
-
|
|
551
|
-
conn.close()
|
|
552
|
-
return get_memory_by_id(db_path, memory_id)
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
def delete_memory(db_path: str, memory_id: str) -> bool:
|
|
556
|
-
"""Delete a memory by ID. Returns True if deleted, False if not found."""
|
|
557
|
-
conn = get_write_connection(db_path)
|
|
558
|
-
|
|
559
|
-
# Also delete related entries in memory_relationships
|
|
560
|
-
conn.execute(
|
|
561
|
-
"DELETE FROM memory_relationships WHERE source_id = ? OR target_id = ?",
|
|
562
|
-
(memory_id, memory_id),
|
|
563
|
-
)
|
|
564
|
-
|
|
565
|
-
cursor = conn.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
|
|
566
|
-
conn.commit()
|
|
567
|
-
|
|
568
|
-
deleted = cursor.rowcount > 0
|
|
569
|
-
conn.close()
|
|
570
|
-
return deleted
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
# --- Stats Functions for Dashboard Charts ---
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
def get_activity_heatmap(db_path: str, days: int = 90) -> list[dict]:
|
|
577
|
-
"""Get activity counts grouped by day for heatmap visualization."""
|
|
578
|
-
conn = get_connection(db_path)
|
|
579
|
-
query = """
|
|
580
|
-
SELECT date(timestamp) as date, COUNT(*) as count
|
|
581
|
-
FROM activities
|
|
582
|
-
WHERE timestamp >= date('now', ?)
|
|
583
|
-
GROUP BY date(timestamp)
|
|
584
|
-
ORDER BY date
|
|
585
|
-
"""
|
|
586
|
-
cursor = conn.execute(query, (f'-{days} days',))
|
|
587
|
-
result = [{"date": row["date"], "count": row["count"]} for row in cursor.fetchall()]
|
|
588
|
-
conn.close()
|
|
589
|
-
return result
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
def get_tool_usage(db_path: str, limit: int = 10) -> list[dict]:
|
|
593
|
-
"""Get tool usage statistics with success rates."""
|
|
594
|
-
conn = get_connection(db_path)
|
|
595
|
-
query = """
|
|
596
|
-
SELECT
|
|
597
|
-
tool_name,
|
|
598
|
-
COUNT(*) as count,
|
|
599
|
-
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate
|
|
600
|
-
FROM activities
|
|
601
|
-
WHERE tool_name IS NOT NULL AND tool_name != ''
|
|
602
|
-
GROUP BY tool_name
|
|
603
|
-
ORDER BY count DESC
|
|
604
|
-
LIMIT ?
|
|
605
|
-
"""
|
|
606
|
-
cursor = conn.execute(query, (limit,))
|
|
607
|
-
result = [
|
|
608
|
-
{
|
|
609
|
-
"tool_name": row["tool_name"],
|
|
610
|
-
"count": row["count"],
|
|
611
|
-
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
612
|
-
}
|
|
613
|
-
for row in cursor.fetchall()
|
|
614
|
-
]
|
|
615
|
-
conn.close()
|
|
616
|
-
return result
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
def get_memory_growth(db_path: str, days: int = 30) -> list[dict]:
|
|
620
|
-
"""Get memory creation over time with cumulative totals."""
|
|
621
|
-
conn = get_connection(db_path)
|
|
622
|
-
query = """
|
|
623
|
-
WITH daily_counts AS (
|
|
624
|
-
SELECT date(created_at) as date, COUNT(*) as count
|
|
625
|
-
FROM memories
|
|
626
|
-
WHERE created_at >= date('now', ?)
|
|
627
|
-
GROUP BY date(created_at)
|
|
628
|
-
)
|
|
629
|
-
SELECT
|
|
630
|
-
date,
|
|
631
|
-
count,
|
|
632
|
-
SUM(count) OVER (ORDER BY date) as cumulative
|
|
633
|
-
FROM daily_counts
|
|
634
|
-
ORDER BY date
|
|
635
|
-
"""
|
|
636
|
-
cursor = conn.execute(query, (f'-{days} days',))
|
|
637
|
-
result = [
|
|
638
|
-
{"date": row["date"], "count": row["count"], "cumulative": row["cumulative"]}
|
|
639
|
-
for row in cursor.fetchall()
|
|
640
|
-
]
|
|
641
|
-
conn.close()
|
|
642
|
-
return result
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
def get_recent_sessions(db_path: str, limit: int = 5) -> list[dict]:
|
|
646
|
-
"""Get recent sessions with activity counts and memory counts."""
|
|
647
|
-
conn = get_connection(db_path)
|
|
648
|
-
query = """
|
|
649
|
-
SELECT
|
|
650
|
-
s.id,
|
|
651
|
-
s.project_path,
|
|
652
|
-
s.started_at,
|
|
653
|
-
s.ended_at,
|
|
654
|
-
s.summary,
|
|
655
|
-
COUNT(DISTINCT a.id) as activity_count
|
|
656
|
-
FROM sessions s
|
|
657
|
-
LEFT JOIN activities a ON a.session_id = s.id
|
|
658
|
-
GROUP BY s.id
|
|
659
|
-
ORDER BY s.started_at DESC
|
|
660
|
-
LIMIT ?
|
|
661
|
-
"""
|
|
662
|
-
cursor = conn.execute(query, (limit,))
|
|
663
|
-
result = [
|
|
664
|
-
{
|
|
665
|
-
"id": row["id"],
|
|
666
|
-
"project_path": row["project_path"],
|
|
667
|
-
"started_at": row["started_at"],
|
|
668
|
-
"ended_at": row["ended_at"],
|
|
669
|
-
"summary": row["summary"],
|
|
670
|
-
"activity_count": row["activity_count"],
|
|
671
|
-
}
|
|
672
|
-
for row in cursor.fetchall()
|
|
673
|
-
]
|
|
674
|
-
conn.close()
|
|
675
|
-
return result
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
def bulk_update_memory_status(db_path: str, memory_ids: list[str], status: str) -> int:
|
|
679
|
-
"""Update status for multiple memories. Returns count updated."""
|
|
680
|
-
if not memory_ids:
|
|
681
|
-
return 0
|
|
682
|
-
conn = get_write_connection(db_path)
|
|
683
|
-
placeholders = ','.join('?' * len(memory_ids))
|
|
684
|
-
query = f"UPDATE memories SET status = ?, last_accessed = datetime('now') WHERE id IN ({placeholders})"
|
|
685
|
-
cursor = conn.execute(query, [status] + memory_ids)
|
|
686
|
-
conn.commit()
|
|
687
|
-
count = cursor.rowcount
|
|
688
|
-
conn.close()
|
|
689
|
-
return count
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
def get_memories_needing_review(db_path: str, days_threshold: int = 30, limit: int = 50) -> list[Memory]:
|
|
693
|
-
"""Get memories that haven't been accessed recently and may need review."""
|
|
694
|
-
conn = get_connection(db_path)
|
|
695
|
-
query = """
|
|
696
|
-
SELECT * FROM memories
|
|
697
|
-
WHERE last_accessed < date('now', ?)
|
|
698
|
-
OR last_accessed IS NULL
|
|
699
|
-
ORDER BY last_accessed ASC NULLS FIRST, importance_score DESC
|
|
700
|
-
LIMIT ?
|
|
701
|
-
"""
|
|
702
|
-
cursor = conn.execute(query, (f'-{days_threshold} days', limit))
|
|
703
|
-
|
|
704
|
-
memories = []
|
|
705
|
-
for row in cursor.fetchall():
|
|
706
|
-
tags = parse_tags(row["tags"])
|
|
707
|
-
memories.append(
|
|
708
|
-
Memory(
|
|
709
|
-
id=row["id"],
|
|
710
|
-
content=row["content"],
|
|
711
|
-
context=row["context"],
|
|
712
|
-
type=row["type"],
|
|
713
|
-
status=row["status"] or "fresh",
|
|
714
|
-
importance_score=int(row["importance_score"] or 50),
|
|
715
|
-
access_count=row["access_count"] or 0,
|
|
716
|
-
created_at=datetime.fromisoformat(row["created_at"]),
|
|
717
|
-
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
718
|
-
tags=tags,
|
|
719
|
-
)
|
|
720
|
-
)
|
|
721
|
-
|
|
722
|
-
conn.close()
|
|
723
|
-
return memories
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
def get_relationships(db_path: str, memory_id: Optional[str] = None) -> list[dict]:
|
|
727
|
-
"""Get memory relationships for graph visualization."""
|
|
728
|
-
conn = get_connection(db_path)
|
|
729
|
-
|
|
730
|
-
# Check if memory_relationships table exists
|
|
731
|
-
table_check = conn.execute(
|
|
732
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name='memory_relationships'"
|
|
733
|
-
).fetchone()
|
|
734
|
-
|
|
735
|
-
if not table_check:
|
|
736
|
-
conn.close()
|
|
737
|
-
return []
|
|
738
|
-
|
|
739
|
-
query = """
|
|
740
|
-
SELECT
|
|
741
|
-
r.source_memory_id as source_id,
|
|
742
|
-
r.target_memory_id as target_id,
|
|
743
|
-
r.relationship_type,
|
|
744
|
-
r.strength,
|
|
745
|
-
ms.content as source_content,
|
|
746
|
-
ms.type as source_type,
|
|
747
|
-
mt.content as target_content,
|
|
748
|
-
mt.type as target_type
|
|
749
|
-
FROM memory_relationships r
|
|
750
|
-
JOIN memories ms ON r.source_memory_id = ms.id
|
|
751
|
-
JOIN memories mt ON r.target_memory_id = mt.id
|
|
752
|
-
"""
|
|
753
|
-
|
|
754
|
-
try:
|
|
755
|
-
if memory_id:
|
|
756
|
-
query += " WHERE r.source_memory_id = ? OR r.target_memory_id = ?"
|
|
757
|
-
cursor = conn.execute(query, (memory_id, memory_id))
|
|
758
|
-
else:
|
|
759
|
-
cursor = conn.execute(query)
|
|
760
|
-
|
|
761
|
-
result = [dict(row) for row in cursor.fetchall()]
|
|
762
|
-
except Exception as e:
|
|
763
|
-
print(f"[Database] Error querying relationships: {e}")
|
|
764
|
-
result = []
|
|
765
|
-
finally:
|
|
766
|
-
conn.close()
|
|
767
|
-
|
|
768
|
-
return result
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
def get_relationship_graph(db_path: str, center_id: Optional[str] = None, depth: int = 2) -> dict:
|
|
772
|
-
"""Get graph data with nodes and edges for D3 visualization."""
|
|
773
|
-
relationships = get_relationships(db_path, center_id)
|
|
774
|
-
|
|
775
|
-
nodes = {}
|
|
776
|
-
edges = []
|
|
777
|
-
|
|
778
|
-
for rel in relationships:
|
|
779
|
-
# Add source node
|
|
780
|
-
if rel["source_id"] not in nodes:
|
|
781
|
-
nodes[rel["source_id"]] = {
|
|
782
|
-
"id": rel["source_id"],
|
|
783
|
-
"content": rel["source_content"][:100] if rel["source_content"] else "",
|
|
784
|
-
"type": rel["source_type"],
|
|
785
|
-
}
|
|
786
|
-
# Add target node
|
|
787
|
-
if rel["target_id"] not in nodes:
|
|
788
|
-
nodes[rel["target_id"]] = {
|
|
789
|
-
"id": rel["target_id"],
|
|
790
|
-
"content": rel["target_content"][:100] if rel["target_content"] else "",
|
|
791
|
-
"type": rel["target_type"],
|
|
792
|
-
}
|
|
793
|
-
# Add edge
|
|
794
|
-
edges.append({
|
|
795
|
-
"source": rel["source_id"],
|
|
796
|
-
"target": rel["target_id"],
|
|
797
|
-
"type": rel["relationship_type"],
|
|
798
|
-
"strength": rel["strength"] or 1.0,
|
|
799
|
-
})
|
|
800
|
-
|
|
801
|
-
return {"nodes": list(nodes.values()), "edges": edges}
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
# --- Command Analytics Functions ---
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
def get_command_usage(db_path: str, scope: Optional[str] = None, days: int = 30) -> list[dict]:
|
|
808
|
-
"""Get slash command usage statistics aggregated by command_name.
|
|
809
|
-
|
|
810
|
-
Args:
|
|
811
|
-
db_path: Path to database
|
|
812
|
-
scope: Filter by scope ('universal', 'project', or None for all)
|
|
813
|
-
days: Number of days to look back
|
|
814
|
-
|
|
815
|
-
Returns:
|
|
816
|
-
List of command usage entries with counts and success rates
|
|
817
|
-
"""
|
|
818
|
-
conn = get_connection(db_path)
|
|
819
|
-
|
|
820
|
-
# Check if command_name column exists
|
|
821
|
-
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
822
|
-
column_names = [col[1] for col in columns]
|
|
823
|
-
if "command_name" not in column_names:
|
|
824
|
-
conn.close()
|
|
825
|
-
return []
|
|
826
|
-
|
|
827
|
-
query = """
|
|
828
|
-
SELECT
|
|
829
|
-
command_name,
|
|
830
|
-
command_scope,
|
|
831
|
-
COUNT(*) as count,
|
|
832
|
-
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate,
|
|
833
|
-
AVG(duration_ms) as avg_duration_ms
|
|
834
|
-
FROM activities
|
|
835
|
-
WHERE command_name IS NOT NULL
|
|
836
|
-
AND command_name != ''
|
|
837
|
-
AND timestamp >= date('now', ?)
|
|
838
|
-
"""
|
|
839
|
-
params = [f'-{days} days']
|
|
840
|
-
|
|
841
|
-
if scope:
|
|
842
|
-
query += " AND command_scope = ?"
|
|
843
|
-
params.append(scope)
|
|
844
|
-
|
|
845
|
-
query += " GROUP BY command_name, command_scope ORDER BY count DESC"
|
|
846
|
-
|
|
847
|
-
cursor = conn.execute(query, params)
|
|
848
|
-
result = [
|
|
849
|
-
{
|
|
850
|
-
"command_name": row["command_name"],
|
|
851
|
-
"command_scope": row["command_scope"] or "unknown",
|
|
852
|
-
"count": row["count"],
|
|
853
|
-
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
854
|
-
"avg_duration_ms": round(row["avg_duration_ms"]) if row["avg_duration_ms"] else None,
|
|
855
|
-
}
|
|
856
|
-
for row in cursor.fetchall()
|
|
857
|
-
]
|
|
858
|
-
conn.close()
|
|
859
|
-
return result
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
def get_skill_usage(db_path: str, scope: Optional[str] = None, days: int = 30) -> list[dict]:
|
|
863
|
-
"""Get skill usage statistics aggregated by skill_name.
|
|
864
|
-
|
|
865
|
-
Args:
|
|
866
|
-
db_path: Path to database
|
|
867
|
-
scope: Filter by scope ('universal', 'project', or None for all)
|
|
868
|
-
days: Number of days to look back
|
|
869
|
-
|
|
870
|
-
Returns:
|
|
871
|
-
List of skill usage entries with counts and success rates
|
|
872
|
-
"""
|
|
873
|
-
conn = get_connection(db_path)
|
|
874
|
-
|
|
875
|
-
# Check if skill_name column exists
|
|
876
|
-
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
877
|
-
column_names = [col[1] for col in columns]
|
|
878
|
-
if "skill_name" not in column_names:
|
|
879
|
-
conn.close()
|
|
880
|
-
return []
|
|
881
|
-
|
|
882
|
-
query = """
|
|
883
|
-
SELECT
|
|
884
|
-
skill_name,
|
|
885
|
-
command_scope,
|
|
886
|
-
COUNT(*) as count,
|
|
887
|
-
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate,
|
|
888
|
-
AVG(duration_ms) as avg_duration_ms
|
|
889
|
-
FROM activities
|
|
890
|
-
WHERE skill_name IS NOT NULL
|
|
891
|
-
AND skill_name != ''
|
|
892
|
-
AND timestamp >= date('now', ?)
|
|
893
|
-
"""
|
|
894
|
-
params = [f'-{days} days']
|
|
895
|
-
|
|
896
|
-
if scope:
|
|
897
|
-
query += " AND command_scope = ?"
|
|
898
|
-
params.append(scope)
|
|
899
|
-
|
|
900
|
-
query += " GROUP BY skill_name, command_scope ORDER BY count DESC"
|
|
901
|
-
|
|
902
|
-
cursor = conn.execute(query, params)
|
|
903
|
-
result = [
|
|
904
|
-
{
|
|
905
|
-
"skill_name": row["skill_name"],
|
|
906
|
-
"skill_scope": row["command_scope"] or "unknown",
|
|
907
|
-
"count": row["count"],
|
|
908
|
-
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
909
|
-
"avg_duration_ms": round(row["avg_duration_ms"]) if row["avg_duration_ms"] else None,
|
|
910
|
-
}
|
|
911
|
-
for row in cursor.fetchall()
|
|
912
|
-
]
|
|
913
|
-
conn.close()
|
|
914
|
-
return result
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
def get_mcp_usage(db_path: str, days: int = 30) -> list[dict]:
|
|
918
|
-
"""Get MCP server usage statistics.
|
|
919
|
-
|
|
920
|
-
Args:
|
|
921
|
-
db_path: Path to database
|
|
922
|
-
days: Number of days to look back
|
|
923
|
-
|
|
924
|
-
Returns:
|
|
925
|
-
List of MCP server usage entries with tool counts and call totals
|
|
926
|
-
"""
|
|
927
|
-
conn = get_connection(db_path)
|
|
928
|
-
|
|
929
|
-
# Check if mcp_server column exists
|
|
930
|
-
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
931
|
-
column_names = [col[1] for col in columns]
|
|
932
|
-
if "mcp_server" not in column_names:
|
|
933
|
-
conn.close()
|
|
934
|
-
return []
|
|
935
|
-
|
|
936
|
-
query = """
|
|
937
|
-
SELECT
|
|
938
|
-
mcp_server,
|
|
939
|
-
COUNT(DISTINCT tool_name) as tool_count,
|
|
940
|
-
COUNT(*) as total_calls,
|
|
941
|
-
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate
|
|
942
|
-
FROM activities
|
|
943
|
-
WHERE mcp_server IS NOT NULL
|
|
944
|
-
AND mcp_server != ''
|
|
945
|
-
AND timestamp >= date('now', ?)
|
|
946
|
-
GROUP BY mcp_server
|
|
947
|
-
ORDER BY total_calls DESC
|
|
948
|
-
"""
|
|
949
|
-
cursor = conn.execute(query, (f'-{days} days',))
|
|
950
|
-
result = [
|
|
951
|
-
{
|
|
952
|
-
"mcp_server": row["mcp_server"],
|
|
953
|
-
"tool_count": row["tool_count"],
|
|
954
|
-
"total_calls": row["total_calls"],
|
|
955
|
-
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
956
|
-
}
|
|
957
|
-
for row in cursor.fetchall()
|
|
958
|
-
]
|
|
959
|
-
conn.close()
|
|
960
|
-
return result
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
def get_activity_detail(db_path: str, activity_id: str) -> Optional[dict]:
|
|
964
|
-
"""Get full activity details including complete input/output.
|
|
965
|
-
|
|
966
|
-
Args:
|
|
967
|
-
db_path: Path to database
|
|
968
|
-
activity_id: Activity ID
|
|
969
|
-
|
|
970
|
-
Returns:
|
|
971
|
-
Full activity details or None if not found
|
|
972
|
-
"""
|
|
973
|
-
conn = get_connection(db_path)
|
|
974
|
-
cursor = conn.execute("SELECT * FROM activities WHERE id = ?", (activity_id,))
|
|
975
|
-
row = cursor.fetchone()
|
|
976
|
-
|
|
977
|
-
if not row:
|
|
978
|
-
conn.close()
|
|
979
|
-
return None
|
|
980
|
-
|
|
981
|
-
# Get column names for safe access
|
|
982
|
-
column_names = [description[0] for description in cursor.description]
|
|
983
|
-
|
|
984
|
-
result = {
|
|
985
|
-
"id": row["id"],
|
|
986
|
-
"session_id": row["session_id"],
|
|
987
|
-
"event_type": row["event_type"],
|
|
988
|
-
"tool_name": row["tool_name"],
|
|
989
|
-
"tool_input_full": row["tool_input"],
|
|
990
|
-
"tool_output_full": row["tool_output"],
|
|
991
|
-
"success": bool(row["success"]),
|
|
992
|
-
"error_message": row["error_message"],
|
|
993
|
-
"duration_ms": row["duration_ms"],
|
|
994
|
-
"file_path": row["file_path"],
|
|
995
|
-
"timestamp": row["timestamp"],
|
|
996
|
-
}
|
|
997
|
-
|
|
998
|
-
# Add command analytics fields if they exist
|
|
999
|
-
if "command_name" in column_names:
|
|
1000
|
-
result["command_name"] = row["command_name"]
|
|
1001
|
-
if "command_scope" in column_names:
|
|
1002
|
-
result["command_scope"] = row["command_scope"]
|
|
1003
|
-
if "mcp_server" in column_names:
|
|
1004
|
-
result["mcp_server"] = row["mcp_server"]
|
|
1005
|
-
if "skill_name" in column_names:
|
|
1006
|
-
result["skill_name"] = row["skill_name"]
|
|
1007
|
-
|
|
1008
|
-
# Add summary fields if they exist
|
|
1009
|
-
if "summary" in column_names:
|
|
1010
|
-
result["summary"] = row["summary"]
|
|
1011
|
-
if "summary_detail" in column_names:
|
|
1012
|
-
result["summary_detail"] = row["summary_detail"]
|
|
1013
|
-
|
|
1014
|
-
conn.close()
|
|
1015
|
-
return result
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
def create_memory(
|
|
1019
|
-
db_path: str,
|
|
1020
|
-
content: str,
|
|
1021
|
-
memory_type: str = "other",
|
|
1022
|
-
context: Optional[str] = None,
|
|
1023
|
-
tags: Optional[list[str]] = None,
|
|
1024
|
-
importance_score: int = 50,
|
|
1025
|
-
related_memory_ids: Optional[list[str]] = None,
|
|
1026
|
-
) -> str:
|
|
1027
|
-
"""Create a new memory and return its ID.
|
|
1028
|
-
|
|
1029
|
-
Args:
|
|
1030
|
-
db_path: Path to the database file
|
|
1031
|
-
content: Memory content
|
|
1032
|
-
memory_type: Type of memory (e.g., 'decision', 'solution', 'conversation')
|
|
1033
|
-
context: Additional context
|
|
1034
|
-
tags: List of tags
|
|
1035
|
-
importance_score: Importance score (1-100)
|
|
1036
|
-
related_memory_ids: IDs of related memories to create relationships with
|
|
1037
|
-
|
|
1038
|
-
Returns:
|
|
1039
|
-
The ID of the created memory
|
|
1040
|
-
"""
|
|
1041
|
-
import uuid
|
|
1042
|
-
|
|
1043
|
-
conn = get_write_connection(db_path)
|
|
1044
|
-
|
|
1045
|
-
# Generate ID
|
|
1046
|
-
memory_id = f"mem_{int(datetime.now().timestamp() * 1000)}_{uuid.uuid4().hex[:8]}"
|
|
1047
|
-
now = datetime.now().isoformat()
|
|
1048
|
-
|
|
1049
|
-
# Insert memory
|
|
1050
|
-
conn.execute(
|
|
1051
|
-
"""
|
|
1052
|
-
INSERT INTO memories (id, content, context, type, status, importance_score, access_count, created_at, last_accessed, updated_at, tags)
|
|
1053
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1054
|
-
""",
|
|
1055
|
-
(
|
|
1056
|
-
memory_id,
|
|
1057
|
-
content,
|
|
1058
|
-
context,
|
|
1059
|
-
memory_type,
|
|
1060
|
-
"fresh",
|
|
1061
|
-
importance_score,
|
|
1062
|
-
0,
|
|
1063
|
-
now,
|
|
1064
|
-
now,
|
|
1065
|
-
now,
|
|
1066
|
-
json.dumps(tags) if tags else None,
|
|
1067
|
-
),
|
|
1068
|
-
)
|
|
1069
|
-
|
|
1070
|
-
# Create relationships if related_memory_ids provided
|
|
1071
|
-
if related_memory_ids:
|
|
1072
|
-
# Check if memory_relationships table exists
|
|
1073
|
-
table_check = conn.execute(
|
|
1074
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name='memory_relationships'"
|
|
1075
|
-
).fetchone()
|
|
1076
|
-
|
|
1077
|
-
if table_check:
|
|
1078
|
-
for related_id in related_memory_ids:
|
|
1079
|
-
try:
|
|
1080
|
-
conn.execute(
|
|
1081
|
-
"""
|
|
1082
|
-
INSERT INTO memory_relationships (source_memory_id, target_memory_id, relationship_type, strength)
|
|
1083
|
-
VALUES (?, ?, ?, ?)
|
|
1084
|
-
""",
|
|
1085
|
-
(memory_id, related_id, "derived_from", 0.8),
|
|
1086
|
-
)
|
|
1087
|
-
except Exception:
|
|
1088
|
-
# Ignore if related memory doesn't exist
|
|
1089
|
-
pass
|
|
1090
|
-
|
|
1091
|
-
conn.commit()
|
|
1092
|
-
conn.close()
|
|
1093
|
-
|
|
1094
|
-
return memory_id
|
|
1
|
+
"""Database query functions for reading omni-cortex SQLite databases."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import sqlite3
|
|
5
|
+
from collections import Counter
|
|
6
|
+
from datetime import datetime, timedelta
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
from models import Activity, FilterParams, Memory, MemoryStats, MemoryUpdate, Session, TimelineEntry
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_connection(db_path: str) -> sqlite3.Connection:
|
|
14
|
+
"""Get a read-only connection to the database."""
|
|
15
|
+
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
|
16
|
+
conn.row_factory = sqlite3.Row
|
|
17
|
+
return conn
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_write_connection(db_path: str) -> sqlite3.Connection:
|
|
21
|
+
"""Get a writable connection to the database."""
|
|
22
|
+
conn = sqlite3.connect(db_path)
|
|
23
|
+
conn.row_factory = sqlite3.Row
|
|
24
|
+
return conn
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def ensure_migrations(db_path: str) -> None:
|
|
28
|
+
"""Ensure database has latest migrations applied.
|
|
29
|
+
|
|
30
|
+
This function checks for and applies any missing schema updates,
|
|
31
|
+
including command analytics columns and natural language summary columns.
|
|
32
|
+
"""
|
|
33
|
+
conn = get_write_connection(db_path)
|
|
34
|
+
|
|
35
|
+
# Check if activities table exists
|
|
36
|
+
table_check = conn.execute(
|
|
37
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='activities'"
|
|
38
|
+
).fetchone()
|
|
39
|
+
|
|
40
|
+
if not table_check:
|
|
41
|
+
conn.close()
|
|
42
|
+
return
|
|
43
|
+
|
|
44
|
+
# Check available columns
|
|
45
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
46
|
+
column_names = {col[1] for col in columns}
|
|
47
|
+
|
|
48
|
+
migrations_applied = []
|
|
49
|
+
|
|
50
|
+
# Migration v1.1: Command analytics columns
|
|
51
|
+
if "command_name" not in column_names:
|
|
52
|
+
conn.executescript("""
|
|
53
|
+
ALTER TABLE activities ADD COLUMN command_name TEXT;
|
|
54
|
+
ALTER TABLE activities ADD COLUMN command_scope TEXT;
|
|
55
|
+
ALTER TABLE activities ADD COLUMN mcp_server TEXT;
|
|
56
|
+
ALTER TABLE activities ADD COLUMN skill_name TEXT;
|
|
57
|
+
|
|
58
|
+
CREATE INDEX IF NOT EXISTS idx_activities_command ON activities(command_name);
|
|
59
|
+
CREATE INDEX IF NOT EXISTS idx_activities_mcp ON activities(mcp_server);
|
|
60
|
+
CREATE INDEX IF NOT EXISTS idx_activities_skill ON activities(skill_name);
|
|
61
|
+
""")
|
|
62
|
+
migrations_applied.append("v1.1: command analytics columns")
|
|
63
|
+
|
|
64
|
+
# Migration v1.2: Natural language summary columns
|
|
65
|
+
if "summary" not in column_names:
|
|
66
|
+
conn.executescript("""
|
|
67
|
+
ALTER TABLE activities ADD COLUMN summary TEXT;
|
|
68
|
+
ALTER TABLE activities ADD COLUMN summary_detail TEXT;
|
|
69
|
+
""")
|
|
70
|
+
migrations_applied.append("v1.2: summary columns")
|
|
71
|
+
|
|
72
|
+
if migrations_applied:
|
|
73
|
+
conn.commit()
|
|
74
|
+
print(f"[Database] Applied migrations: {', '.join(migrations_applied)}")
|
|
75
|
+
|
|
76
|
+
conn.close()
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def parse_tags(tags_str: Optional[str]) -> list[str]:
|
|
80
|
+
"""Parse tags from JSON string."""
|
|
81
|
+
if not tags_str:
|
|
82
|
+
return []
|
|
83
|
+
try:
|
|
84
|
+
tags = json.loads(tags_str)
|
|
85
|
+
return tags if isinstance(tags, list) else []
|
|
86
|
+
except (json.JSONDecodeError, TypeError):
|
|
87
|
+
return []
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def get_memories(db_path: str, filters: FilterParams) -> list[Memory]:
|
|
91
|
+
"""Get memories with filtering, sorting, and pagination."""
|
|
92
|
+
conn = get_connection(db_path)
|
|
93
|
+
|
|
94
|
+
# Build query
|
|
95
|
+
query = "SELECT * FROM memories WHERE 1=1"
|
|
96
|
+
params: list = []
|
|
97
|
+
|
|
98
|
+
if filters.memory_type:
|
|
99
|
+
query += " AND type = ?"
|
|
100
|
+
params.append(filters.memory_type)
|
|
101
|
+
|
|
102
|
+
if filters.status:
|
|
103
|
+
query += " AND status = ?"
|
|
104
|
+
params.append(filters.status)
|
|
105
|
+
|
|
106
|
+
if filters.min_importance is not None:
|
|
107
|
+
query += " AND importance_score >= ?"
|
|
108
|
+
params.append(filters.min_importance)
|
|
109
|
+
|
|
110
|
+
if filters.max_importance is not None:
|
|
111
|
+
query += " AND importance_score <= ?"
|
|
112
|
+
params.append(filters.max_importance)
|
|
113
|
+
|
|
114
|
+
if filters.search:
|
|
115
|
+
query += " AND (content LIKE ? OR context LIKE ?)"
|
|
116
|
+
search_term = f"%{filters.search}%"
|
|
117
|
+
params.extend([search_term, search_term])
|
|
118
|
+
|
|
119
|
+
# Sorting
|
|
120
|
+
valid_sort_columns = ["created_at", "last_accessed", "importance_score", "access_count"]
|
|
121
|
+
sort_by = filters.sort_by if filters.sort_by in valid_sort_columns else "last_accessed"
|
|
122
|
+
sort_order = "DESC" if filters.sort_order.lower() == "desc" else "ASC"
|
|
123
|
+
query += f" ORDER BY {sort_by} {sort_order}"
|
|
124
|
+
|
|
125
|
+
# Pagination
|
|
126
|
+
query += " LIMIT ? OFFSET ?"
|
|
127
|
+
params.extend([filters.limit, filters.offset])
|
|
128
|
+
|
|
129
|
+
cursor = conn.execute(query, params)
|
|
130
|
+
rows = cursor.fetchall()
|
|
131
|
+
|
|
132
|
+
memories = []
|
|
133
|
+
for row in rows:
|
|
134
|
+
# Parse tags from JSON string
|
|
135
|
+
tags = parse_tags(row["tags"])
|
|
136
|
+
|
|
137
|
+
memories.append(
|
|
138
|
+
Memory(
|
|
139
|
+
id=row["id"],
|
|
140
|
+
content=row["content"],
|
|
141
|
+
context=row["context"],
|
|
142
|
+
type=row["type"],
|
|
143
|
+
status=row["status"] or "fresh",
|
|
144
|
+
importance_score=int(row["importance_score"] or 50),
|
|
145
|
+
access_count=row["access_count"] or 0,
|
|
146
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
147
|
+
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
148
|
+
tags=tags,
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
conn.close()
|
|
153
|
+
return memories
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def get_memory_by_id(db_path: str, memory_id: str) -> Optional[Memory]:
|
|
157
|
+
"""Get a single memory by ID."""
|
|
158
|
+
conn = get_connection(db_path)
|
|
159
|
+
|
|
160
|
+
cursor = conn.execute("SELECT * FROM memories WHERE id = ?", (memory_id,))
|
|
161
|
+
row = cursor.fetchone()
|
|
162
|
+
|
|
163
|
+
if not row:
|
|
164
|
+
conn.close()
|
|
165
|
+
return None
|
|
166
|
+
|
|
167
|
+
# Parse tags from JSON string
|
|
168
|
+
tags = parse_tags(row["tags"])
|
|
169
|
+
|
|
170
|
+
memory = Memory(
|
|
171
|
+
id=row["id"],
|
|
172
|
+
content=row["content"],
|
|
173
|
+
context=row["context"],
|
|
174
|
+
type=row["type"],
|
|
175
|
+
status=row["status"] or "fresh",
|
|
176
|
+
importance_score=int(row["importance_score"] or 50),
|
|
177
|
+
access_count=row["access_count"] or 0,
|
|
178
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
179
|
+
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
180
|
+
tags=tags,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
conn.close()
|
|
184
|
+
return memory
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def get_memory_stats(db_path: str) -> MemoryStats:
|
|
188
|
+
"""Get statistics about memories in the database."""
|
|
189
|
+
conn = get_connection(db_path)
|
|
190
|
+
|
|
191
|
+
# Total count
|
|
192
|
+
total = conn.execute("SELECT COUNT(*) FROM memories").fetchone()[0]
|
|
193
|
+
|
|
194
|
+
# By type
|
|
195
|
+
type_cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
|
|
196
|
+
by_type = {row["type"]: row["count"] for row in type_cursor.fetchall()}
|
|
197
|
+
|
|
198
|
+
# By status
|
|
199
|
+
status_cursor = conn.execute("SELECT status, COUNT(*) as count FROM memories GROUP BY status")
|
|
200
|
+
by_status = {(row["status"] or "fresh"): row["count"] for row in status_cursor.fetchall()}
|
|
201
|
+
|
|
202
|
+
# Average importance
|
|
203
|
+
avg_cursor = conn.execute("SELECT AVG(importance_score) FROM memories")
|
|
204
|
+
avg_importance = avg_cursor.fetchone()[0] or 0.0
|
|
205
|
+
|
|
206
|
+
# Total access count
|
|
207
|
+
access_cursor = conn.execute("SELECT SUM(access_count) FROM memories")
|
|
208
|
+
total_access = access_cursor.fetchone()[0] or 0
|
|
209
|
+
|
|
210
|
+
# Tags with counts - extract from JSON column
|
|
211
|
+
tags_cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
|
|
212
|
+
tag_counter: Counter = Counter()
|
|
213
|
+
for row in tags_cursor.fetchall():
|
|
214
|
+
tags = parse_tags(row["tags"])
|
|
215
|
+
tag_counter.update(tags)
|
|
216
|
+
|
|
217
|
+
tags = [{"name": name, "count": count} for name, count in tag_counter.most_common(50)]
|
|
218
|
+
|
|
219
|
+
conn.close()
|
|
220
|
+
|
|
221
|
+
return MemoryStats(
|
|
222
|
+
total_count=total,
|
|
223
|
+
by_type=by_type,
|
|
224
|
+
by_status=by_status,
|
|
225
|
+
avg_importance=round(avg_importance, 1),
|
|
226
|
+
total_access_count=total_access,
|
|
227
|
+
tags=tags,
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def get_activities(
|
|
232
|
+
db_path: str,
|
|
233
|
+
event_type: Optional[str] = None,
|
|
234
|
+
tool_name: Optional[str] = None,
|
|
235
|
+
limit: int = 100,
|
|
236
|
+
offset: int = 0,
|
|
237
|
+
) -> list[Activity]:
|
|
238
|
+
"""Get activity log entries with all available fields."""
|
|
239
|
+
conn = get_connection(db_path)
|
|
240
|
+
|
|
241
|
+
# Check available columns for backward compatibility
|
|
242
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
243
|
+
column_names = {col[1] for col in columns}
|
|
244
|
+
|
|
245
|
+
query = "SELECT * FROM activities WHERE 1=1"
|
|
246
|
+
params: list = []
|
|
247
|
+
|
|
248
|
+
if event_type:
|
|
249
|
+
query += " AND event_type = ?"
|
|
250
|
+
params.append(event_type)
|
|
251
|
+
|
|
252
|
+
if tool_name:
|
|
253
|
+
query += " AND tool_name = ?"
|
|
254
|
+
params.append(tool_name)
|
|
255
|
+
|
|
256
|
+
query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
|
257
|
+
params.extend([limit, offset])
|
|
258
|
+
|
|
259
|
+
cursor = conn.execute(query, params)
|
|
260
|
+
activities = []
|
|
261
|
+
|
|
262
|
+
for row in cursor.fetchall():
|
|
263
|
+
# Parse timestamp - handle both with and without timezone
|
|
264
|
+
ts_str = row["timestamp"]
|
|
265
|
+
try:
|
|
266
|
+
ts = datetime.fromisoformat(ts_str)
|
|
267
|
+
except ValueError:
|
|
268
|
+
# Fallback for edge cases
|
|
269
|
+
ts = datetime.now()
|
|
270
|
+
|
|
271
|
+
activity_data = {
|
|
272
|
+
"id": row["id"],
|
|
273
|
+
"session_id": row["session_id"],
|
|
274
|
+
"event_type": row["event_type"],
|
|
275
|
+
"tool_name": row["tool_name"],
|
|
276
|
+
"tool_input": row["tool_input"],
|
|
277
|
+
"tool_output": row["tool_output"],
|
|
278
|
+
"success": bool(row["success"]),
|
|
279
|
+
"error_message": row["error_message"],
|
|
280
|
+
"duration_ms": row["duration_ms"],
|
|
281
|
+
"file_path": row["file_path"],
|
|
282
|
+
"timestamp": ts,
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
# Add command analytics fields if available
|
|
286
|
+
if "command_name" in column_names:
|
|
287
|
+
activity_data["command_name"] = row["command_name"]
|
|
288
|
+
if "command_scope" in column_names:
|
|
289
|
+
activity_data["command_scope"] = row["command_scope"]
|
|
290
|
+
if "mcp_server" in column_names:
|
|
291
|
+
activity_data["mcp_server"] = row["mcp_server"]
|
|
292
|
+
if "skill_name" in column_names:
|
|
293
|
+
activity_data["skill_name"] = row["skill_name"]
|
|
294
|
+
|
|
295
|
+
# Add summary fields if available
|
|
296
|
+
if "summary" in column_names:
|
|
297
|
+
activity_data["summary"] = row["summary"]
|
|
298
|
+
if "summary_detail" in column_names:
|
|
299
|
+
activity_data["summary_detail"] = row["summary_detail"]
|
|
300
|
+
|
|
301
|
+
activities.append(Activity(**activity_data))
|
|
302
|
+
|
|
303
|
+
conn.close()
|
|
304
|
+
return activities
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def get_timeline(
|
|
308
|
+
db_path: str,
|
|
309
|
+
hours: int = 24,
|
|
310
|
+
include_memories: bool = True,
|
|
311
|
+
include_activities: bool = True,
|
|
312
|
+
) -> list[TimelineEntry]:
|
|
313
|
+
"""Get a timeline of memories and activities."""
|
|
314
|
+
conn = get_connection(db_path)
|
|
315
|
+
since = datetime.now() - timedelta(hours=hours)
|
|
316
|
+
since_str = since.isoformat()
|
|
317
|
+
|
|
318
|
+
entries: list[TimelineEntry] = []
|
|
319
|
+
|
|
320
|
+
if include_memories:
|
|
321
|
+
cursor = conn.execute(
|
|
322
|
+
"SELECT * FROM memories WHERE created_at >= ? ORDER BY created_at DESC",
|
|
323
|
+
(since_str,),
|
|
324
|
+
)
|
|
325
|
+
for row in cursor.fetchall():
|
|
326
|
+
entries.append(
|
|
327
|
+
TimelineEntry(
|
|
328
|
+
timestamp=datetime.fromisoformat(row["created_at"]),
|
|
329
|
+
entry_type="memory",
|
|
330
|
+
data={
|
|
331
|
+
"id": row["id"],
|
|
332
|
+
"content": row["content"][:200] + "..." if len(row["content"]) > 200 else row["content"],
|
|
333
|
+
"type": row["type"],
|
|
334
|
+
"importance": row["importance_score"],
|
|
335
|
+
},
|
|
336
|
+
)
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
if include_activities:
|
|
340
|
+
cursor = conn.execute(
|
|
341
|
+
"SELECT * FROM activities WHERE timestamp >= ? ORDER BY timestamp DESC",
|
|
342
|
+
(since_str,),
|
|
343
|
+
)
|
|
344
|
+
for row in cursor.fetchall():
|
|
345
|
+
entries.append(
|
|
346
|
+
TimelineEntry(
|
|
347
|
+
timestamp=datetime.fromisoformat(row["timestamp"]),
|
|
348
|
+
entry_type="activity",
|
|
349
|
+
data={
|
|
350
|
+
"id": row["id"],
|
|
351
|
+
"event_type": row["event_type"],
|
|
352
|
+
"tool_name": row["tool_name"],
|
|
353
|
+
"success": bool(row["success"]),
|
|
354
|
+
"duration_ms": row["duration_ms"],
|
|
355
|
+
},
|
|
356
|
+
)
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
# Sort by timestamp descending
|
|
360
|
+
entries.sort(key=lambda e: e.timestamp, reverse=True)
|
|
361
|
+
|
|
362
|
+
conn.close()
|
|
363
|
+
return entries
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def get_sessions(db_path: str, limit: int = 20) -> list[Session]:
|
|
367
|
+
"""Get recent sessions."""
|
|
368
|
+
conn = get_connection(db_path)
|
|
369
|
+
|
|
370
|
+
cursor = conn.execute(
|
|
371
|
+
"""
|
|
372
|
+
SELECT s.*, COUNT(a.id) as activity_count
|
|
373
|
+
FROM sessions s
|
|
374
|
+
LEFT JOIN activities a ON s.id = a.session_id
|
|
375
|
+
GROUP BY s.id
|
|
376
|
+
ORDER BY s.started_at DESC
|
|
377
|
+
LIMIT ?
|
|
378
|
+
""",
|
|
379
|
+
(limit,),
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
sessions = []
|
|
383
|
+
for row in cursor.fetchall():
|
|
384
|
+
sessions.append(
|
|
385
|
+
Session(
|
|
386
|
+
id=row["id"],
|
|
387
|
+
project_path=row["project_path"],
|
|
388
|
+
started_at=datetime.fromisoformat(row["started_at"]),
|
|
389
|
+
ended_at=datetime.fromisoformat(row["ended_at"]) if row["ended_at"] else None,
|
|
390
|
+
summary=row["summary"],
|
|
391
|
+
activity_count=row["activity_count"],
|
|
392
|
+
)
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
conn.close()
|
|
396
|
+
return sessions
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
def get_all_tags(db_path: str) -> list[dict]:
|
|
400
|
+
"""Get all tags with their usage counts."""
|
|
401
|
+
conn = get_connection(db_path)
|
|
402
|
+
|
|
403
|
+
# Extract tags from JSON column
|
|
404
|
+
cursor = conn.execute("SELECT tags FROM memories WHERE tags IS NOT NULL AND tags != ''")
|
|
405
|
+
tag_counter: Counter = Counter()
|
|
406
|
+
for row in cursor.fetchall():
|
|
407
|
+
tags = parse_tags(row["tags"])
|
|
408
|
+
tag_counter.update(tags)
|
|
409
|
+
|
|
410
|
+
tags = [{"name": name, "count": count} for name, count in tag_counter.most_common()]
|
|
411
|
+
|
|
412
|
+
conn.close()
|
|
413
|
+
return tags
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def get_type_distribution(db_path: str) -> dict[str, int]:
|
|
417
|
+
"""Get memory type distribution."""
|
|
418
|
+
conn = get_connection(db_path)
|
|
419
|
+
|
|
420
|
+
cursor = conn.execute("SELECT type, COUNT(*) as count FROM memories GROUP BY type")
|
|
421
|
+
distribution = {row["type"]: row["count"] for row in cursor.fetchall()}
|
|
422
|
+
|
|
423
|
+
conn.close()
|
|
424
|
+
return distribution
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def search_memories(db_path: str, query: str, limit: int = 20) -> list[Memory]:
|
|
428
|
+
"""Search memories using FTS if available, otherwise LIKE."""
|
|
429
|
+
conn = get_connection(db_path)
|
|
430
|
+
|
|
431
|
+
# Check if FTS table exists
|
|
432
|
+
fts_check = conn.execute(
|
|
433
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='memories_fts'"
|
|
434
|
+
).fetchone()
|
|
435
|
+
|
|
436
|
+
if fts_check:
|
|
437
|
+
# Use FTS search - FTS5 uses rowid to match the memories table rowid
|
|
438
|
+
# Escape special FTS5 characters and wrap in quotes for phrase search
|
|
439
|
+
safe_query = query.replace('"', '""')
|
|
440
|
+
try:
|
|
441
|
+
cursor = conn.execute(
|
|
442
|
+
"""
|
|
443
|
+
SELECT m.* FROM memories m
|
|
444
|
+
JOIN memories_fts fts ON m.rowid = fts.rowid
|
|
445
|
+
WHERE memories_fts MATCH ?
|
|
446
|
+
ORDER BY rank
|
|
447
|
+
LIMIT ?
|
|
448
|
+
""",
|
|
449
|
+
(f'"{safe_query}"', limit),
|
|
450
|
+
)
|
|
451
|
+
except sqlite3.OperationalError:
|
|
452
|
+
# Fallback if FTS query fails
|
|
453
|
+
search_term = f"%{query}%"
|
|
454
|
+
cursor = conn.execute(
|
|
455
|
+
"""
|
|
456
|
+
SELECT * FROM memories
|
|
457
|
+
WHERE content LIKE ? OR context LIKE ?
|
|
458
|
+
ORDER BY importance_score DESC
|
|
459
|
+
LIMIT ?
|
|
460
|
+
""",
|
|
461
|
+
(search_term, search_term, limit),
|
|
462
|
+
)
|
|
463
|
+
else:
|
|
464
|
+
# Fallback to LIKE
|
|
465
|
+
search_term = f"%{query}%"
|
|
466
|
+
cursor = conn.execute(
|
|
467
|
+
"""
|
|
468
|
+
SELECT * FROM memories
|
|
469
|
+
WHERE content LIKE ? OR context LIKE ?
|
|
470
|
+
ORDER BY importance_score DESC
|
|
471
|
+
LIMIT ?
|
|
472
|
+
""",
|
|
473
|
+
(search_term, search_term, limit),
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
memories = []
|
|
477
|
+
for row in cursor.fetchall():
|
|
478
|
+
# Parse tags from JSON string
|
|
479
|
+
tags = parse_tags(row["tags"])
|
|
480
|
+
|
|
481
|
+
memories.append(
|
|
482
|
+
Memory(
|
|
483
|
+
id=row["id"],
|
|
484
|
+
content=row["content"],
|
|
485
|
+
context=row["context"],
|
|
486
|
+
type=row["type"],
|
|
487
|
+
status=row["status"] or "fresh",
|
|
488
|
+
importance_score=int(row["importance_score"] or 50),
|
|
489
|
+
access_count=row["access_count"] or 0,
|
|
490
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
491
|
+
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
492
|
+
tags=tags,
|
|
493
|
+
)
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
conn.close()
|
|
497
|
+
return memories
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
def update_memory(db_path: str, memory_id: str, updates: MemoryUpdate) -> Optional[Memory]:
|
|
501
|
+
"""Update a memory and return the updated record."""
|
|
502
|
+
conn = get_write_connection(db_path)
|
|
503
|
+
|
|
504
|
+
# Build update query dynamically based on provided fields
|
|
505
|
+
update_fields = []
|
|
506
|
+
params = []
|
|
507
|
+
|
|
508
|
+
if updates.content is not None:
|
|
509
|
+
update_fields.append("content = ?")
|
|
510
|
+
params.append(updates.content)
|
|
511
|
+
|
|
512
|
+
if updates.context is not None:
|
|
513
|
+
update_fields.append("context = ?")
|
|
514
|
+
params.append(updates.context)
|
|
515
|
+
|
|
516
|
+
if updates.memory_type is not None:
|
|
517
|
+
update_fields.append("type = ?")
|
|
518
|
+
params.append(updates.memory_type)
|
|
519
|
+
|
|
520
|
+
if updates.status is not None:
|
|
521
|
+
update_fields.append("status = ?")
|
|
522
|
+
params.append(updates.status)
|
|
523
|
+
|
|
524
|
+
if updates.importance_score is not None:
|
|
525
|
+
update_fields.append("importance_score = ?")
|
|
526
|
+
params.append(updates.importance_score)
|
|
527
|
+
|
|
528
|
+
if updates.tags is not None:
|
|
529
|
+
update_fields.append("tags = ?")
|
|
530
|
+
params.append(json.dumps(updates.tags))
|
|
531
|
+
|
|
532
|
+
if not update_fields:
|
|
533
|
+
conn.close()
|
|
534
|
+
return get_memory_by_id(db_path, memory_id)
|
|
535
|
+
|
|
536
|
+
# Add updated timestamp
|
|
537
|
+
update_fields.append("last_accessed = ?")
|
|
538
|
+
params.append(datetime.now().isoformat())
|
|
539
|
+
|
|
540
|
+
# Add memory_id to params
|
|
541
|
+
params.append(memory_id)
|
|
542
|
+
|
|
543
|
+
query = f"UPDATE memories SET {', '.join(update_fields)} WHERE id = ?"
|
|
544
|
+
cursor = conn.execute(query, params)
|
|
545
|
+
conn.commit()
|
|
546
|
+
|
|
547
|
+
if cursor.rowcount == 0:
|
|
548
|
+
conn.close()
|
|
549
|
+
return None
|
|
550
|
+
|
|
551
|
+
conn.close()
|
|
552
|
+
return get_memory_by_id(db_path, memory_id)
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
def delete_memory(db_path: str, memory_id: str) -> bool:
|
|
556
|
+
"""Delete a memory by ID. Returns True if deleted, False if not found."""
|
|
557
|
+
conn = get_write_connection(db_path)
|
|
558
|
+
|
|
559
|
+
# Also delete related entries in memory_relationships
|
|
560
|
+
conn.execute(
|
|
561
|
+
"DELETE FROM memory_relationships WHERE source_id = ? OR target_id = ?",
|
|
562
|
+
(memory_id, memory_id),
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
cursor = conn.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
|
|
566
|
+
conn.commit()
|
|
567
|
+
|
|
568
|
+
deleted = cursor.rowcount > 0
|
|
569
|
+
conn.close()
|
|
570
|
+
return deleted
|
|
571
|
+
|
|
572
|
+
|
|
573
|
+
# --- Stats Functions for Dashboard Charts ---
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
def get_activity_heatmap(db_path: str, days: int = 90) -> list[dict]:
|
|
577
|
+
"""Get activity counts grouped by day for heatmap visualization."""
|
|
578
|
+
conn = get_connection(db_path)
|
|
579
|
+
query = """
|
|
580
|
+
SELECT date(timestamp) as date, COUNT(*) as count
|
|
581
|
+
FROM activities
|
|
582
|
+
WHERE timestamp >= date('now', ?)
|
|
583
|
+
GROUP BY date(timestamp)
|
|
584
|
+
ORDER BY date
|
|
585
|
+
"""
|
|
586
|
+
cursor = conn.execute(query, (f'-{days} days',))
|
|
587
|
+
result = [{"date": row["date"], "count": row["count"]} for row in cursor.fetchall()]
|
|
588
|
+
conn.close()
|
|
589
|
+
return result
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
def get_tool_usage(db_path: str, limit: int = 10) -> list[dict]:
|
|
593
|
+
"""Get tool usage statistics with success rates."""
|
|
594
|
+
conn = get_connection(db_path)
|
|
595
|
+
query = """
|
|
596
|
+
SELECT
|
|
597
|
+
tool_name,
|
|
598
|
+
COUNT(*) as count,
|
|
599
|
+
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate
|
|
600
|
+
FROM activities
|
|
601
|
+
WHERE tool_name IS NOT NULL AND tool_name != ''
|
|
602
|
+
GROUP BY tool_name
|
|
603
|
+
ORDER BY count DESC
|
|
604
|
+
LIMIT ?
|
|
605
|
+
"""
|
|
606
|
+
cursor = conn.execute(query, (limit,))
|
|
607
|
+
result = [
|
|
608
|
+
{
|
|
609
|
+
"tool_name": row["tool_name"],
|
|
610
|
+
"count": row["count"],
|
|
611
|
+
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
612
|
+
}
|
|
613
|
+
for row in cursor.fetchall()
|
|
614
|
+
]
|
|
615
|
+
conn.close()
|
|
616
|
+
return result
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
def get_memory_growth(db_path: str, days: int = 30) -> list[dict]:
|
|
620
|
+
"""Get memory creation over time with cumulative totals."""
|
|
621
|
+
conn = get_connection(db_path)
|
|
622
|
+
query = """
|
|
623
|
+
WITH daily_counts AS (
|
|
624
|
+
SELECT date(created_at) as date, COUNT(*) as count
|
|
625
|
+
FROM memories
|
|
626
|
+
WHERE created_at >= date('now', ?)
|
|
627
|
+
GROUP BY date(created_at)
|
|
628
|
+
)
|
|
629
|
+
SELECT
|
|
630
|
+
date,
|
|
631
|
+
count,
|
|
632
|
+
SUM(count) OVER (ORDER BY date) as cumulative
|
|
633
|
+
FROM daily_counts
|
|
634
|
+
ORDER BY date
|
|
635
|
+
"""
|
|
636
|
+
cursor = conn.execute(query, (f'-{days} days',))
|
|
637
|
+
result = [
|
|
638
|
+
{"date": row["date"], "count": row["count"], "cumulative": row["cumulative"]}
|
|
639
|
+
for row in cursor.fetchall()
|
|
640
|
+
]
|
|
641
|
+
conn.close()
|
|
642
|
+
return result
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
def get_recent_sessions(db_path: str, limit: int = 5) -> list[dict]:
|
|
646
|
+
"""Get recent sessions with activity counts and memory counts."""
|
|
647
|
+
conn = get_connection(db_path)
|
|
648
|
+
query = """
|
|
649
|
+
SELECT
|
|
650
|
+
s.id,
|
|
651
|
+
s.project_path,
|
|
652
|
+
s.started_at,
|
|
653
|
+
s.ended_at,
|
|
654
|
+
s.summary,
|
|
655
|
+
COUNT(DISTINCT a.id) as activity_count
|
|
656
|
+
FROM sessions s
|
|
657
|
+
LEFT JOIN activities a ON a.session_id = s.id
|
|
658
|
+
GROUP BY s.id
|
|
659
|
+
ORDER BY s.started_at DESC
|
|
660
|
+
LIMIT ?
|
|
661
|
+
"""
|
|
662
|
+
cursor = conn.execute(query, (limit,))
|
|
663
|
+
result = [
|
|
664
|
+
{
|
|
665
|
+
"id": row["id"],
|
|
666
|
+
"project_path": row["project_path"],
|
|
667
|
+
"started_at": row["started_at"],
|
|
668
|
+
"ended_at": row["ended_at"],
|
|
669
|
+
"summary": row["summary"],
|
|
670
|
+
"activity_count": row["activity_count"],
|
|
671
|
+
}
|
|
672
|
+
for row in cursor.fetchall()
|
|
673
|
+
]
|
|
674
|
+
conn.close()
|
|
675
|
+
return result
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
def bulk_update_memory_status(db_path: str, memory_ids: list[str], status: str) -> int:
|
|
679
|
+
"""Update status for multiple memories. Returns count updated."""
|
|
680
|
+
if not memory_ids:
|
|
681
|
+
return 0
|
|
682
|
+
conn = get_write_connection(db_path)
|
|
683
|
+
placeholders = ','.join('?' * len(memory_ids))
|
|
684
|
+
query = f"UPDATE memories SET status = ?, last_accessed = datetime('now') WHERE id IN ({placeholders})"
|
|
685
|
+
cursor = conn.execute(query, [status] + memory_ids)
|
|
686
|
+
conn.commit()
|
|
687
|
+
count = cursor.rowcount
|
|
688
|
+
conn.close()
|
|
689
|
+
return count
|
|
690
|
+
|
|
691
|
+
|
|
692
|
+
def get_memories_needing_review(db_path: str, days_threshold: int = 30, limit: int = 50) -> list[Memory]:
|
|
693
|
+
"""Get memories that haven't been accessed recently and may need review."""
|
|
694
|
+
conn = get_connection(db_path)
|
|
695
|
+
query = """
|
|
696
|
+
SELECT * FROM memories
|
|
697
|
+
WHERE last_accessed < date('now', ?)
|
|
698
|
+
OR last_accessed IS NULL
|
|
699
|
+
ORDER BY last_accessed ASC NULLS FIRST, importance_score DESC
|
|
700
|
+
LIMIT ?
|
|
701
|
+
"""
|
|
702
|
+
cursor = conn.execute(query, (f'-{days_threshold} days', limit))
|
|
703
|
+
|
|
704
|
+
memories = []
|
|
705
|
+
for row in cursor.fetchall():
|
|
706
|
+
tags = parse_tags(row["tags"])
|
|
707
|
+
memories.append(
|
|
708
|
+
Memory(
|
|
709
|
+
id=row["id"],
|
|
710
|
+
content=row["content"],
|
|
711
|
+
context=row["context"],
|
|
712
|
+
type=row["type"],
|
|
713
|
+
status=row["status"] or "fresh",
|
|
714
|
+
importance_score=int(row["importance_score"] or 50),
|
|
715
|
+
access_count=row["access_count"] or 0,
|
|
716
|
+
created_at=datetime.fromisoformat(row["created_at"]),
|
|
717
|
+
last_accessed=datetime.fromisoformat(row["last_accessed"]) if row["last_accessed"] else None,
|
|
718
|
+
tags=tags,
|
|
719
|
+
)
|
|
720
|
+
)
|
|
721
|
+
|
|
722
|
+
conn.close()
|
|
723
|
+
return memories
|
|
724
|
+
|
|
725
|
+
|
|
726
|
+
def get_relationships(db_path: str, memory_id: Optional[str] = None) -> list[dict]:
|
|
727
|
+
"""Get memory relationships for graph visualization."""
|
|
728
|
+
conn = get_connection(db_path)
|
|
729
|
+
|
|
730
|
+
# Check if memory_relationships table exists
|
|
731
|
+
table_check = conn.execute(
|
|
732
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='memory_relationships'"
|
|
733
|
+
).fetchone()
|
|
734
|
+
|
|
735
|
+
if not table_check:
|
|
736
|
+
conn.close()
|
|
737
|
+
return []
|
|
738
|
+
|
|
739
|
+
query = """
|
|
740
|
+
SELECT
|
|
741
|
+
r.source_memory_id as source_id,
|
|
742
|
+
r.target_memory_id as target_id,
|
|
743
|
+
r.relationship_type,
|
|
744
|
+
r.strength,
|
|
745
|
+
ms.content as source_content,
|
|
746
|
+
ms.type as source_type,
|
|
747
|
+
mt.content as target_content,
|
|
748
|
+
mt.type as target_type
|
|
749
|
+
FROM memory_relationships r
|
|
750
|
+
JOIN memories ms ON r.source_memory_id = ms.id
|
|
751
|
+
JOIN memories mt ON r.target_memory_id = mt.id
|
|
752
|
+
"""
|
|
753
|
+
|
|
754
|
+
try:
|
|
755
|
+
if memory_id:
|
|
756
|
+
query += " WHERE r.source_memory_id = ? OR r.target_memory_id = ?"
|
|
757
|
+
cursor = conn.execute(query, (memory_id, memory_id))
|
|
758
|
+
else:
|
|
759
|
+
cursor = conn.execute(query)
|
|
760
|
+
|
|
761
|
+
result = [dict(row) for row in cursor.fetchall()]
|
|
762
|
+
except Exception as e:
|
|
763
|
+
print(f"[Database] Error querying relationships: {e}")
|
|
764
|
+
result = []
|
|
765
|
+
finally:
|
|
766
|
+
conn.close()
|
|
767
|
+
|
|
768
|
+
return result
|
|
769
|
+
|
|
770
|
+
|
|
771
|
+
def get_relationship_graph(db_path: str, center_id: Optional[str] = None, depth: int = 2) -> dict:
|
|
772
|
+
"""Get graph data with nodes and edges for D3 visualization."""
|
|
773
|
+
relationships = get_relationships(db_path, center_id)
|
|
774
|
+
|
|
775
|
+
nodes = {}
|
|
776
|
+
edges = []
|
|
777
|
+
|
|
778
|
+
for rel in relationships:
|
|
779
|
+
# Add source node
|
|
780
|
+
if rel["source_id"] not in nodes:
|
|
781
|
+
nodes[rel["source_id"]] = {
|
|
782
|
+
"id": rel["source_id"],
|
|
783
|
+
"content": rel["source_content"][:100] if rel["source_content"] else "",
|
|
784
|
+
"type": rel["source_type"],
|
|
785
|
+
}
|
|
786
|
+
# Add target node
|
|
787
|
+
if rel["target_id"] not in nodes:
|
|
788
|
+
nodes[rel["target_id"]] = {
|
|
789
|
+
"id": rel["target_id"],
|
|
790
|
+
"content": rel["target_content"][:100] if rel["target_content"] else "",
|
|
791
|
+
"type": rel["target_type"],
|
|
792
|
+
}
|
|
793
|
+
# Add edge
|
|
794
|
+
edges.append({
|
|
795
|
+
"source": rel["source_id"],
|
|
796
|
+
"target": rel["target_id"],
|
|
797
|
+
"type": rel["relationship_type"],
|
|
798
|
+
"strength": rel["strength"] or 1.0,
|
|
799
|
+
})
|
|
800
|
+
|
|
801
|
+
return {"nodes": list(nodes.values()), "edges": edges}
|
|
802
|
+
|
|
803
|
+
|
|
804
|
+
# --- Command Analytics Functions ---
|
|
805
|
+
|
|
806
|
+
|
|
807
|
+
def get_command_usage(db_path: str, scope: Optional[str] = None, days: int = 30) -> list[dict]:
|
|
808
|
+
"""Get slash command usage statistics aggregated by command_name.
|
|
809
|
+
|
|
810
|
+
Args:
|
|
811
|
+
db_path: Path to database
|
|
812
|
+
scope: Filter by scope ('universal', 'project', or None for all)
|
|
813
|
+
days: Number of days to look back
|
|
814
|
+
|
|
815
|
+
Returns:
|
|
816
|
+
List of command usage entries with counts and success rates
|
|
817
|
+
"""
|
|
818
|
+
conn = get_connection(db_path)
|
|
819
|
+
|
|
820
|
+
# Check if command_name column exists
|
|
821
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
822
|
+
column_names = [col[1] for col in columns]
|
|
823
|
+
if "command_name" not in column_names:
|
|
824
|
+
conn.close()
|
|
825
|
+
return []
|
|
826
|
+
|
|
827
|
+
query = """
|
|
828
|
+
SELECT
|
|
829
|
+
command_name,
|
|
830
|
+
command_scope,
|
|
831
|
+
COUNT(*) as count,
|
|
832
|
+
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate,
|
|
833
|
+
AVG(duration_ms) as avg_duration_ms
|
|
834
|
+
FROM activities
|
|
835
|
+
WHERE command_name IS NOT NULL
|
|
836
|
+
AND command_name != ''
|
|
837
|
+
AND timestamp >= date('now', ?)
|
|
838
|
+
"""
|
|
839
|
+
params = [f'-{days} days']
|
|
840
|
+
|
|
841
|
+
if scope:
|
|
842
|
+
query += " AND command_scope = ?"
|
|
843
|
+
params.append(scope)
|
|
844
|
+
|
|
845
|
+
query += " GROUP BY command_name, command_scope ORDER BY count DESC"
|
|
846
|
+
|
|
847
|
+
cursor = conn.execute(query, params)
|
|
848
|
+
result = [
|
|
849
|
+
{
|
|
850
|
+
"command_name": row["command_name"],
|
|
851
|
+
"command_scope": row["command_scope"] or "unknown",
|
|
852
|
+
"count": row["count"],
|
|
853
|
+
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
854
|
+
"avg_duration_ms": round(row["avg_duration_ms"]) if row["avg_duration_ms"] else None,
|
|
855
|
+
}
|
|
856
|
+
for row in cursor.fetchall()
|
|
857
|
+
]
|
|
858
|
+
conn.close()
|
|
859
|
+
return result
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
def get_skill_usage(db_path: str, scope: Optional[str] = None, days: int = 30) -> list[dict]:
|
|
863
|
+
"""Get skill usage statistics aggregated by skill_name.
|
|
864
|
+
|
|
865
|
+
Args:
|
|
866
|
+
db_path: Path to database
|
|
867
|
+
scope: Filter by scope ('universal', 'project', or None for all)
|
|
868
|
+
days: Number of days to look back
|
|
869
|
+
|
|
870
|
+
Returns:
|
|
871
|
+
List of skill usage entries with counts and success rates
|
|
872
|
+
"""
|
|
873
|
+
conn = get_connection(db_path)
|
|
874
|
+
|
|
875
|
+
# Check if skill_name column exists
|
|
876
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
877
|
+
column_names = [col[1] for col in columns]
|
|
878
|
+
if "skill_name" not in column_names:
|
|
879
|
+
conn.close()
|
|
880
|
+
return []
|
|
881
|
+
|
|
882
|
+
query = """
|
|
883
|
+
SELECT
|
|
884
|
+
skill_name,
|
|
885
|
+
command_scope,
|
|
886
|
+
COUNT(*) as count,
|
|
887
|
+
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate,
|
|
888
|
+
AVG(duration_ms) as avg_duration_ms
|
|
889
|
+
FROM activities
|
|
890
|
+
WHERE skill_name IS NOT NULL
|
|
891
|
+
AND skill_name != ''
|
|
892
|
+
AND timestamp >= date('now', ?)
|
|
893
|
+
"""
|
|
894
|
+
params = [f'-{days} days']
|
|
895
|
+
|
|
896
|
+
if scope:
|
|
897
|
+
query += " AND command_scope = ?"
|
|
898
|
+
params.append(scope)
|
|
899
|
+
|
|
900
|
+
query += " GROUP BY skill_name, command_scope ORDER BY count DESC"
|
|
901
|
+
|
|
902
|
+
cursor = conn.execute(query, params)
|
|
903
|
+
result = [
|
|
904
|
+
{
|
|
905
|
+
"skill_name": row["skill_name"],
|
|
906
|
+
"skill_scope": row["command_scope"] or "unknown",
|
|
907
|
+
"count": row["count"],
|
|
908
|
+
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
909
|
+
"avg_duration_ms": round(row["avg_duration_ms"]) if row["avg_duration_ms"] else None,
|
|
910
|
+
}
|
|
911
|
+
for row in cursor.fetchall()
|
|
912
|
+
]
|
|
913
|
+
conn.close()
|
|
914
|
+
return result
|
|
915
|
+
|
|
916
|
+
|
|
917
|
+
def get_mcp_usage(db_path: str, days: int = 30) -> list[dict]:
|
|
918
|
+
"""Get MCP server usage statistics.
|
|
919
|
+
|
|
920
|
+
Args:
|
|
921
|
+
db_path: Path to database
|
|
922
|
+
days: Number of days to look back
|
|
923
|
+
|
|
924
|
+
Returns:
|
|
925
|
+
List of MCP server usage entries with tool counts and call totals
|
|
926
|
+
"""
|
|
927
|
+
conn = get_connection(db_path)
|
|
928
|
+
|
|
929
|
+
# Check if mcp_server column exists
|
|
930
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
931
|
+
column_names = [col[1] for col in columns]
|
|
932
|
+
if "mcp_server" not in column_names:
|
|
933
|
+
conn.close()
|
|
934
|
+
return []
|
|
935
|
+
|
|
936
|
+
query = """
|
|
937
|
+
SELECT
|
|
938
|
+
mcp_server,
|
|
939
|
+
COUNT(DISTINCT tool_name) as tool_count,
|
|
940
|
+
COUNT(*) as total_calls,
|
|
941
|
+
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) * 1.0 / COUNT(*) as success_rate
|
|
942
|
+
FROM activities
|
|
943
|
+
WHERE mcp_server IS NOT NULL
|
|
944
|
+
AND mcp_server != ''
|
|
945
|
+
AND timestamp >= date('now', ?)
|
|
946
|
+
GROUP BY mcp_server
|
|
947
|
+
ORDER BY total_calls DESC
|
|
948
|
+
"""
|
|
949
|
+
cursor = conn.execute(query, (f'-{days} days',))
|
|
950
|
+
result = [
|
|
951
|
+
{
|
|
952
|
+
"mcp_server": row["mcp_server"],
|
|
953
|
+
"tool_count": row["tool_count"],
|
|
954
|
+
"total_calls": row["total_calls"],
|
|
955
|
+
"success_rate": round(row["success_rate"], 2) if row["success_rate"] else 1.0,
|
|
956
|
+
}
|
|
957
|
+
for row in cursor.fetchall()
|
|
958
|
+
]
|
|
959
|
+
conn.close()
|
|
960
|
+
return result
|
|
961
|
+
|
|
962
|
+
|
|
963
|
+
def get_activity_detail(db_path: str, activity_id: str) -> Optional[dict]:
|
|
964
|
+
"""Get full activity details including complete input/output.
|
|
965
|
+
|
|
966
|
+
Args:
|
|
967
|
+
db_path: Path to database
|
|
968
|
+
activity_id: Activity ID
|
|
969
|
+
|
|
970
|
+
Returns:
|
|
971
|
+
Full activity details or None if not found
|
|
972
|
+
"""
|
|
973
|
+
conn = get_connection(db_path)
|
|
974
|
+
cursor = conn.execute("SELECT * FROM activities WHERE id = ?", (activity_id,))
|
|
975
|
+
row = cursor.fetchone()
|
|
976
|
+
|
|
977
|
+
if not row:
|
|
978
|
+
conn.close()
|
|
979
|
+
return None
|
|
980
|
+
|
|
981
|
+
# Get column names for safe access
|
|
982
|
+
column_names = [description[0] for description in cursor.description]
|
|
983
|
+
|
|
984
|
+
result = {
|
|
985
|
+
"id": row["id"],
|
|
986
|
+
"session_id": row["session_id"],
|
|
987
|
+
"event_type": row["event_type"],
|
|
988
|
+
"tool_name": row["tool_name"],
|
|
989
|
+
"tool_input_full": row["tool_input"],
|
|
990
|
+
"tool_output_full": row["tool_output"],
|
|
991
|
+
"success": bool(row["success"]),
|
|
992
|
+
"error_message": row["error_message"],
|
|
993
|
+
"duration_ms": row["duration_ms"],
|
|
994
|
+
"file_path": row["file_path"],
|
|
995
|
+
"timestamp": row["timestamp"],
|
|
996
|
+
}
|
|
997
|
+
|
|
998
|
+
# Add command analytics fields if they exist
|
|
999
|
+
if "command_name" in column_names:
|
|
1000
|
+
result["command_name"] = row["command_name"]
|
|
1001
|
+
if "command_scope" in column_names:
|
|
1002
|
+
result["command_scope"] = row["command_scope"]
|
|
1003
|
+
if "mcp_server" in column_names:
|
|
1004
|
+
result["mcp_server"] = row["mcp_server"]
|
|
1005
|
+
if "skill_name" in column_names:
|
|
1006
|
+
result["skill_name"] = row["skill_name"]
|
|
1007
|
+
|
|
1008
|
+
# Add summary fields if they exist
|
|
1009
|
+
if "summary" in column_names:
|
|
1010
|
+
result["summary"] = row["summary"]
|
|
1011
|
+
if "summary_detail" in column_names:
|
|
1012
|
+
result["summary_detail"] = row["summary_detail"]
|
|
1013
|
+
|
|
1014
|
+
conn.close()
|
|
1015
|
+
return result
|
|
1016
|
+
|
|
1017
|
+
|
|
1018
|
+
def create_memory(
|
|
1019
|
+
db_path: str,
|
|
1020
|
+
content: str,
|
|
1021
|
+
memory_type: str = "other",
|
|
1022
|
+
context: Optional[str] = None,
|
|
1023
|
+
tags: Optional[list[str]] = None,
|
|
1024
|
+
importance_score: int = 50,
|
|
1025
|
+
related_memory_ids: Optional[list[str]] = None,
|
|
1026
|
+
) -> str:
|
|
1027
|
+
"""Create a new memory and return its ID.
|
|
1028
|
+
|
|
1029
|
+
Args:
|
|
1030
|
+
db_path: Path to the database file
|
|
1031
|
+
content: Memory content
|
|
1032
|
+
memory_type: Type of memory (e.g., 'decision', 'solution', 'conversation')
|
|
1033
|
+
context: Additional context
|
|
1034
|
+
tags: List of tags
|
|
1035
|
+
importance_score: Importance score (1-100)
|
|
1036
|
+
related_memory_ids: IDs of related memories to create relationships with
|
|
1037
|
+
|
|
1038
|
+
Returns:
|
|
1039
|
+
The ID of the created memory
|
|
1040
|
+
"""
|
|
1041
|
+
import uuid
|
|
1042
|
+
|
|
1043
|
+
conn = get_write_connection(db_path)
|
|
1044
|
+
|
|
1045
|
+
# Generate ID
|
|
1046
|
+
memory_id = f"mem_{int(datetime.now().timestamp() * 1000)}_{uuid.uuid4().hex[:8]}"
|
|
1047
|
+
now = datetime.now().isoformat()
|
|
1048
|
+
|
|
1049
|
+
# Insert memory
|
|
1050
|
+
conn.execute(
|
|
1051
|
+
"""
|
|
1052
|
+
INSERT INTO memories (id, content, context, type, status, importance_score, access_count, created_at, last_accessed, updated_at, tags)
|
|
1053
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1054
|
+
""",
|
|
1055
|
+
(
|
|
1056
|
+
memory_id,
|
|
1057
|
+
content,
|
|
1058
|
+
context,
|
|
1059
|
+
memory_type,
|
|
1060
|
+
"fresh",
|
|
1061
|
+
importance_score,
|
|
1062
|
+
0,
|
|
1063
|
+
now,
|
|
1064
|
+
now,
|
|
1065
|
+
now,
|
|
1066
|
+
json.dumps(tags) if tags else None,
|
|
1067
|
+
),
|
|
1068
|
+
)
|
|
1069
|
+
|
|
1070
|
+
# Create relationships if related_memory_ids provided
|
|
1071
|
+
if related_memory_ids:
|
|
1072
|
+
# Check if memory_relationships table exists
|
|
1073
|
+
table_check = conn.execute(
|
|
1074
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='memory_relationships'"
|
|
1075
|
+
).fetchone()
|
|
1076
|
+
|
|
1077
|
+
if table_check:
|
|
1078
|
+
for related_id in related_memory_ids:
|
|
1079
|
+
try:
|
|
1080
|
+
conn.execute(
|
|
1081
|
+
"""
|
|
1082
|
+
INSERT INTO memory_relationships (source_memory_id, target_memory_id, relationship_type, strength)
|
|
1083
|
+
VALUES (?, ?, ?, ?)
|
|
1084
|
+
""",
|
|
1085
|
+
(memory_id, related_id, "derived_from", 0.8),
|
|
1086
|
+
)
|
|
1087
|
+
except Exception:
|
|
1088
|
+
# Ignore if related memory doesn't exist
|
|
1089
|
+
pass
|
|
1090
|
+
|
|
1091
|
+
conn.commit()
|
|
1092
|
+
conn.close()
|
|
1093
|
+
|
|
1094
|
+
return memory_id
|
|
1095
|
+
|
|
1096
|
+
|
|
1097
|
+
# --- User Message Functions for Style Tab ---
|
|
1098
|
+
|
|
1099
|
+
|
|
1100
|
+
def get_user_messages(
|
|
1101
|
+
db_path: str,
|
|
1102
|
+
session_id: Optional[str] = None,
|
|
1103
|
+
search: Optional[str] = None,
|
|
1104
|
+
has_code_blocks: Optional[bool] = None,
|
|
1105
|
+
has_questions: Optional[bool] = None,
|
|
1106
|
+
has_commands: Optional[bool] = None,
|
|
1107
|
+
tone_filter: Optional[str] = None,
|
|
1108
|
+
sort_by: str = "timestamp",
|
|
1109
|
+
sort_order: str = "desc",
|
|
1110
|
+
limit: int = 50,
|
|
1111
|
+
offset: int = 0,
|
|
1112
|
+
) -> list[dict]:
|
|
1113
|
+
"""Get user messages with filtering, sorting, and pagination.
|
|
1114
|
+
|
|
1115
|
+
Args:
|
|
1116
|
+
db_path: Path to database
|
|
1117
|
+
session_id: Filter by session
|
|
1118
|
+
search: Search in content
|
|
1119
|
+
has_code_blocks: Filter messages with/without code blocks
|
|
1120
|
+
has_questions: Filter messages with/without questions
|
|
1121
|
+
has_commands: Filter messages with/without slash commands
|
|
1122
|
+
tone_filter: Filter by tone indicator (e.g., 'polite', 'urgent', 'technical')
|
|
1123
|
+
sort_by: Sort by column (timestamp, word_count, char_count)
|
|
1124
|
+
sort_order: 'asc' or 'desc'
|
|
1125
|
+
limit: Maximum results
|
|
1126
|
+
offset: Pagination offset
|
|
1127
|
+
|
|
1128
|
+
Returns:
|
|
1129
|
+
List of user message dictionaries
|
|
1130
|
+
"""
|
|
1131
|
+
conn = get_connection(db_path)
|
|
1132
|
+
|
|
1133
|
+
# Check if user_messages table exists
|
|
1134
|
+
table_check = conn.execute(
|
|
1135
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
|
|
1136
|
+
).fetchone()
|
|
1137
|
+
|
|
1138
|
+
if not table_check:
|
|
1139
|
+
conn.close()
|
|
1140
|
+
return []
|
|
1141
|
+
|
|
1142
|
+
query = "SELECT * FROM user_messages WHERE 1=1"
|
|
1143
|
+
params: list = []
|
|
1144
|
+
|
|
1145
|
+
if session_id:
|
|
1146
|
+
query += " AND session_id = ?"
|
|
1147
|
+
params.append(session_id)
|
|
1148
|
+
|
|
1149
|
+
if search:
|
|
1150
|
+
query += " AND content LIKE ?"
|
|
1151
|
+
params.append(f"%{search}%")
|
|
1152
|
+
|
|
1153
|
+
if has_code_blocks is not None:
|
|
1154
|
+
query += " AND has_code_blocks = ?"
|
|
1155
|
+
params.append(1 if has_code_blocks else 0)
|
|
1156
|
+
|
|
1157
|
+
if has_questions is not None:
|
|
1158
|
+
query += " AND has_questions = ?"
|
|
1159
|
+
params.append(1 if has_questions else 0)
|
|
1160
|
+
|
|
1161
|
+
if has_commands is not None:
|
|
1162
|
+
query += " AND has_commands = ?"
|
|
1163
|
+
params.append(1 if has_commands else 0)
|
|
1164
|
+
|
|
1165
|
+
if tone_filter:
|
|
1166
|
+
# Search within JSON array of tone_indicators
|
|
1167
|
+
query += " AND tone_indicators LIKE ?"
|
|
1168
|
+
params.append(f'%"{tone_filter}"%')
|
|
1169
|
+
|
|
1170
|
+
# Sorting
|
|
1171
|
+
valid_sort_columns = ["timestamp", "word_count", "char_count", "line_count"]
|
|
1172
|
+
sort_by = sort_by if sort_by in valid_sort_columns else "timestamp"
|
|
1173
|
+
sort_order = "DESC" if sort_order.lower() == "desc" else "ASC"
|
|
1174
|
+
query += f" ORDER BY {sort_by} {sort_order}"
|
|
1175
|
+
|
|
1176
|
+
# Pagination
|
|
1177
|
+
query += " LIMIT ? OFFSET ?"
|
|
1178
|
+
params.extend([limit, offset])
|
|
1179
|
+
|
|
1180
|
+
cursor = conn.execute(query, params)
|
|
1181
|
+
messages = []
|
|
1182
|
+
|
|
1183
|
+
for row in cursor.fetchall():
|
|
1184
|
+
# Parse tone_indicators from JSON
|
|
1185
|
+
tone_indicators = []
|
|
1186
|
+
if row["tone_indicators"]:
|
|
1187
|
+
try:
|
|
1188
|
+
tone_indicators = json.loads(row["tone_indicators"])
|
|
1189
|
+
except (json.JSONDecodeError, TypeError):
|
|
1190
|
+
pass
|
|
1191
|
+
|
|
1192
|
+
messages.append({
|
|
1193
|
+
"id": row["id"],
|
|
1194
|
+
"session_id": row["session_id"],
|
|
1195
|
+
"timestamp": row["timestamp"],
|
|
1196
|
+
"content": row["content"],
|
|
1197
|
+
"word_count": row["word_count"],
|
|
1198
|
+
"char_count": row["char_count"],
|
|
1199
|
+
"line_count": row["line_count"],
|
|
1200
|
+
"has_code_blocks": bool(row["has_code_blocks"]),
|
|
1201
|
+
"has_questions": bool(row["has_questions"]),
|
|
1202
|
+
"has_commands": bool(row["has_commands"]),
|
|
1203
|
+
"tone_indicators": tone_indicators,
|
|
1204
|
+
"project_path": row["project_path"],
|
|
1205
|
+
})
|
|
1206
|
+
|
|
1207
|
+
conn.close()
|
|
1208
|
+
return messages
|
|
1209
|
+
|
|
1210
|
+
|
|
1211
|
+
def get_user_message_count(
|
|
1212
|
+
db_path: str,
|
|
1213
|
+
session_id: Optional[str] = None,
|
|
1214
|
+
) -> int:
|
|
1215
|
+
"""Get total count of user messages.
|
|
1216
|
+
|
|
1217
|
+
Args:
|
|
1218
|
+
db_path: Path to database
|
|
1219
|
+
session_id: Optional filter by session
|
|
1220
|
+
|
|
1221
|
+
Returns:
|
|
1222
|
+
Count of user messages
|
|
1223
|
+
"""
|
|
1224
|
+
conn = get_connection(db_path)
|
|
1225
|
+
|
|
1226
|
+
# Check if user_messages table exists
|
|
1227
|
+
table_check = conn.execute(
|
|
1228
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
|
|
1229
|
+
).fetchone()
|
|
1230
|
+
|
|
1231
|
+
if not table_check:
|
|
1232
|
+
conn.close()
|
|
1233
|
+
return 0
|
|
1234
|
+
|
|
1235
|
+
query = "SELECT COUNT(*) FROM user_messages"
|
|
1236
|
+
params = []
|
|
1237
|
+
|
|
1238
|
+
if session_id:
|
|
1239
|
+
query += " WHERE session_id = ?"
|
|
1240
|
+
params.append(session_id)
|
|
1241
|
+
|
|
1242
|
+
count = conn.execute(query, params).fetchone()[0]
|
|
1243
|
+
conn.close()
|
|
1244
|
+
return count
|
|
1245
|
+
|
|
1246
|
+
|
|
1247
|
+
def delete_user_message(db_path: str, message_id: str) -> bool:
|
|
1248
|
+
"""Delete a single user message.
|
|
1249
|
+
|
|
1250
|
+
Args:
|
|
1251
|
+
db_path: Path to database
|
|
1252
|
+
message_id: Message ID to delete
|
|
1253
|
+
|
|
1254
|
+
Returns:
|
|
1255
|
+
True if deleted, False if not found
|
|
1256
|
+
"""
|
|
1257
|
+
conn = get_write_connection(db_path)
|
|
1258
|
+
|
|
1259
|
+
cursor = conn.execute("DELETE FROM user_messages WHERE id = ?", (message_id,))
|
|
1260
|
+
conn.commit()
|
|
1261
|
+
|
|
1262
|
+
deleted = cursor.rowcount > 0
|
|
1263
|
+
conn.close()
|
|
1264
|
+
return deleted
|
|
1265
|
+
|
|
1266
|
+
|
|
1267
|
+
def delete_user_messages_bulk(db_path: str, message_ids: list[str]) -> int:
|
|
1268
|
+
"""Delete multiple user messages.
|
|
1269
|
+
|
|
1270
|
+
Args:
|
|
1271
|
+
db_path: Path to database
|
|
1272
|
+
message_ids: List of message IDs to delete
|
|
1273
|
+
|
|
1274
|
+
Returns:
|
|
1275
|
+
Count of messages deleted
|
|
1276
|
+
"""
|
|
1277
|
+
if not message_ids:
|
|
1278
|
+
return 0
|
|
1279
|
+
|
|
1280
|
+
conn = get_write_connection(db_path)
|
|
1281
|
+
placeholders = ','.join('?' * len(message_ids))
|
|
1282
|
+
query = f"DELETE FROM user_messages WHERE id IN ({placeholders})"
|
|
1283
|
+
cursor = conn.execute(query, message_ids)
|
|
1284
|
+
conn.commit()
|
|
1285
|
+
|
|
1286
|
+
count = cursor.rowcount
|
|
1287
|
+
conn.close()
|
|
1288
|
+
return count
|
|
1289
|
+
|
|
1290
|
+
|
|
1291
|
+
def get_style_profile(db_path: str, project_path: Optional[str] = None) -> Optional[dict]:
|
|
1292
|
+
"""Get user style profile.
|
|
1293
|
+
|
|
1294
|
+
Args:
|
|
1295
|
+
db_path: Path to database
|
|
1296
|
+
project_path: Project-specific profile, or None for global
|
|
1297
|
+
|
|
1298
|
+
Returns:
|
|
1299
|
+
Style profile dictionary or None if not found
|
|
1300
|
+
"""
|
|
1301
|
+
conn = get_connection(db_path)
|
|
1302
|
+
|
|
1303
|
+
# Check if user_style_profiles table exists
|
|
1304
|
+
table_check = conn.execute(
|
|
1305
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='user_style_profiles'"
|
|
1306
|
+
).fetchone()
|
|
1307
|
+
|
|
1308
|
+
if not table_check:
|
|
1309
|
+
conn.close()
|
|
1310
|
+
return None
|
|
1311
|
+
|
|
1312
|
+
if project_path:
|
|
1313
|
+
query = "SELECT * FROM user_style_profiles WHERE project_path = ? ORDER BY updated_at DESC LIMIT 1"
|
|
1314
|
+
cursor = conn.execute(query, (project_path,))
|
|
1315
|
+
else:
|
|
1316
|
+
query = "SELECT * FROM user_style_profiles WHERE project_path IS NULL ORDER BY updated_at DESC LIMIT 1"
|
|
1317
|
+
cursor = conn.execute(query)
|
|
1318
|
+
|
|
1319
|
+
row = cursor.fetchone()
|
|
1320
|
+
conn.close()
|
|
1321
|
+
|
|
1322
|
+
if not row:
|
|
1323
|
+
return None
|
|
1324
|
+
|
|
1325
|
+
# Parse JSON fields
|
|
1326
|
+
def parse_json_field(value):
|
|
1327
|
+
if not value:
|
|
1328
|
+
return None
|
|
1329
|
+
try:
|
|
1330
|
+
return json.loads(value)
|
|
1331
|
+
except (json.JSONDecodeError, TypeError):
|
|
1332
|
+
return None
|
|
1333
|
+
|
|
1334
|
+
return {
|
|
1335
|
+
"id": row["id"],
|
|
1336
|
+
"project_path": row["project_path"],
|
|
1337
|
+
"total_messages": row["total_messages"],
|
|
1338
|
+
"avg_word_count": row["avg_word_count"],
|
|
1339
|
+
"avg_char_count": row["avg_char_count"],
|
|
1340
|
+
"common_phrases": parse_json_field(row["common_phrases"]),
|
|
1341
|
+
"vocabulary_richness": row["vocabulary_richness"],
|
|
1342
|
+
"formality_score": row["formality_score"],
|
|
1343
|
+
"question_frequency": row["question_frequency"],
|
|
1344
|
+
"command_frequency": row["command_frequency"],
|
|
1345
|
+
"code_block_frequency": row["code_block_frequency"],
|
|
1346
|
+
"punctuation_style": parse_json_field(row["punctuation_style"]),
|
|
1347
|
+
"greeting_patterns": parse_json_field(row["greeting_patterns"]),
|
|
1348
|
+
"instruction_style": parse_json_field(row["instruction_style"]),
|
|
1349
|
+
"sample_messages": parse_json_field(row["sample_messages"]),
|
|
1350
|
+
"created_at": row["created_at"],
|
|
1351
|
+
"updated_at": row["updated_at"],
|
|
1352
|
+
}
|
|
1353
|
+
|
|
1354
|
+
|
|
1355
|
+
def get_style_samples(db_path: str, limit: int = 10) -> list[dict]:
|
|
1356
|
+
"""Get sample user messages for style analysis preview.
|
|
1357
|
+
|
|
1358
|
+
Returns a diverse selection of messages showcasing different styles.
|
|
1359
|
+
|
|
1360
|
+
Args:
|
|
1361
|
+
db_path: Path to database
|
|
1362
|
+
limit: Maximum samples to return
|
|
1363
|
+
|
|
1364
|
+
Returns:
|
|
1365
|
+
List of sample messages with style indicators
|
|
1366
|
+
"""
|
|
1367
|
+
conn = get_connection(db_path)
|
|
1368
|
+
|
|
1369
|
+
# Check if user_messages table exists
|
|
1370
|
+
table_check = conn.execute(
|
|
1371
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='user_messages'"
|
|
1372
|
+
).fetchone()
|
|
1373
|
+
|
|
1374
|
+
if not table_check:
|
|
1375
|
+
conn.close()
|
|
1376
|
+
return []
|
|
1377
|
+
|
|
1378
|
+
# Get a diverse sample: some recent, some with code, some with questions
|
|
1379
|
+
samples = []
|
|
1380
|
+
|
|
1381
|
+
# Recent messages
|
|
1382
|
+
cursor = conn.execute(
|
|
1383
|
+
"SELECT * FROM user_messages ORDER BY timestamp DESC LIMIT ?",
|
|
1384
|
+
(limit // 3,)
|
|
1385
|
+
)
|
|
1386
|
+
for row in cursor.fetchall():
|
|
1387
|
+
samples.append(_row_to_sample(row))
|
|
1388
|
+
|
|
1389
|
+
# Messages with code blocks
|
|
1390
|
+
cursor = conn.execute(
|
|
1391
|
+
"SELECT * FROM user_messages WHERE has_code_blocks = 1 ORDER BY timestamp DESC LIMIT ?",
|
|
1392
|
+
(limit // 3,)
|
|
1393
|
+
)
|
|
1394
|
+
for row in cursor.fetchall():
|
|
1395
|
+
sample = _row_to_sample(row)
|
|
1396
|
+
if sample["id"] not in [s["id"] for s in samples]:
|
|
1397
|
+
samples.append(sample)
|
|
1398
|
+
|
|
1399
|
+
# Longer messages (likely more substantive)
|
|
1400
|
+
cursor = conn.execute(
|
|
1401
|
+
"SELECT * FROM user_messages WHERE word_count > 20 ORDER BY word_count DESC LIMIT ?",
|
|
1402
|
+
(limit // 3,)
|
|
1403
|
+
)
|
|
1404
|
+
for row in cursor.fetchall():
|
|
1405
|
+
sample = _row_to_sample(row)
|
|
1406
|
+
if sample["id"] not in [s["id"] for s in samples]:
|
|
1407
|
+
samples.append(sample)
|
|
1408
|
+
|
|
1409
|
+
conn.close()
|
|
1410
|
+
return samples[:limit]
|
|
1411
|
+
|
|
1412
|
+
|
|
1413
|
+
def _row_to_sample(row) -> dict:
|
|
1414
|
+
"""Convert a database row to a sample message dict."""
|
|
1415
|
+
tone_indicators = []
|
|
1416
|
+
if row["tone_indicators"]:
|
|
1417
|
+
try:
|
|
1418
|
+
tone_indicators = json.loads(row["tone_indicators"])
|
|
1419
|
+
except (json.JSONDecodeError, TypeError):
|
|
1420
|
+
pass
|
|
1421
|
+
|
|
1422
|
+
return {
|
|
1423
|
+
"id": row["id"],
|
|
1424
|
+
"timestamp": row["timestamp"],
|
|
1425
|
+
"content_preview": row["content"][:200] + "..." if len(row["content"]) > 200 else row["content"],
|
|
1426
|
+
"word_count": row["word_count"],
|
|
1427
|
+
"has_code_blocks": bool(row["has_code_blocks"]),
|
|
1428
|
+
"has_questions": bool(row["has_questions"]),
|
|
1429
|
+
"tone_indicators": tone_indicators,
|
|
1430
|
+
}
|