omni-cortex 1.4.0__tar.gz → 1.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/PKG-INFO +1 -1
- omni_cortex-1.6.0/dashboard/backend/backfill_summaries.py +280 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/chat_service.py +5 -1
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/database.py +94 -16
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/image_service.py +4 -4
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/main.py +56 -2
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/models.py +8 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/websocket_manager.py +22 -0
- omni_cortex-1.6.0/hooks/post_tool_use.py +335 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/hooks/pre_tool_use.py +130 -1
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/__init__.py +1 -1
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/database/migrations.py +6 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/database/schema.py +8 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/tools/activities.py +132 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/tools/memories.py +13 -2
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/utils/formatting.py +43 -6
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/pyproject.toml +1 -1
- omni_cortex-1.6.0/scripts/check-venv.py +106 -0
- omni_cortex-1.4.0/hooks/post_tool_use.py +0 -160
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/.gitignore +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/LICENSE +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/README.md +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/.env.example +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/logging_config.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/project_config.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/project_scanner.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/prompt_security.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/pyproject.toml +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/security.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/dashboard/backend/uv.lock +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/hooks/stop.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/categorization/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/categorization/auto_tags.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/categorization/auto_type.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/config.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/dashboard.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/database/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/database/connection.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/database/sync.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/decay/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/decay/importance.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/embeddings/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/embeddings/local.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/models/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/models/activity.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/models/agent.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/models/memory.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/models/relationship.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/models/session.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/resources/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/search/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/search/hybrid.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/search/keyword.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/search/ranking.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/search/semantic.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/server.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/setup.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/tools/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/tools/sessions.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/tools/utilities.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/utils/__init__.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/utils/ids.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/utils/timestamps.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/omni_cortex/utils/truncation.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/scripts/import_ken_memories.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/scripts/populate_session_data.py +0 -0
- {omni_cortex-1.4.0 → omni_cortex-1.6.0}/scripts/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: omni-cortex
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Give Claude Code a perfect memory - auto-logs everything, searches smartly, and gets smarter over time
|
|
5
5
|
Project-URL: Homepage, https://github.com/AllCytes/Omni-Cortex
|
|
6
6
|
Project-URL: Repository, https://github.com/AllCytes/Omni-Cortex
|
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
"""Backfill utility for generating activity summaries.
|
|
2
|
+
|
|
3
|
+
This module provides functions to retroactively generate natural language
|
|
4
|
+
summaries for existing activity records that don't have them.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import sqlite3
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
# Add parent paths for imports
|
|
14
|
+
sys.path.insert(0, str(Path(__file__).parent.parent.parent / "src"))
|
|
15
|
+
|
|
16
|
+
from database import get_write_connection, ensure_migrations
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def generate_activity_summary(
|
|
20
|
+
tool_name: Optional[str],
|
|
21
|
+
tool_input: Optional[str],
|
|
22
|
+
success: bool,
|
|
23
|
+
file_path: Optional[str],
|
|
24
|
+
event_type: str,
|
|
25
|
+
) -> tuple[str, str]:
|
|
26
|
+
"""Generate natural language summary for an activity.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
tuple of (short_summary, detailed_summary)
|
|
30
|
+
"""
|
|
31
|
+
short = ""
|
|
32
|
+
detail = ""
|
|
33
|
+
|
|
34
|
+
# Parse tool input if available
|
|
35
|
+
input_data = {}
|
|
36
|
+
if tool_input:
|
|
37
|
+
try:
|
|
38
|
+
input_data = json.loads(tool_input)
|
|
39
|
+
except (json.JSONDecodeError, TypeError):
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
# Generate summaries based on tool type
|
|
43
|
+
if tool_name == "Read":
|
|
44
|
+
path = input_data.get("file_path", file_path or "unknown file")
|
|
45
|
+
filename = Path(path).name if path else "file"
|
|
46
|
+
short = f"Read file: {filename}"
|
|
47
|
+
detail = f"Reading contents of {path}"
|
|
48
|
+
|
|
49
|
+
elif tool_name == "Write":
|
|
50
|
+
path = input_data.get("file_path", file_path or "unknown file")
|
|
51
|
+
filename = Path(path).name if path else "file"
|
|
52
|
+
short = f"Write file: {filename}"
|
|
53
|
+
detail = f"Writing/creating file at {path}"
|
|
54
|
+
|
|
55
|
+
elif tool_name == "Edit":
|
|
56
|
+
path = input_data.get("file_path", file_path or "unknown file")
|
|
57
|
+
filename = Path(path).name if path else "file"
|
|
58
|
+
short = f"Edit file: {filename}"
|
|
59
|
+
detail = f"Editing {path} - replacing text content"
|
|
60
|
+
|
|
61
|
+
elif tool_name == "Bash":
|
|
62
|
+
cmd = input_data.get("command", "")[:50]
|
|
63
|
+
short = f"Run command: {cmd}..."
|
|
64
|
+
detail = f"Executing bash command: {input_data.get('command', 'unknown')}"
|
|
65
|
+
|
|
66
|
+
elif tool_name == "Grep":
|
|
67
|
+
pattern = input_data.get("pattern", "")
|
|
68
|
+
short = f"Search for: {pattern[:30]}"
|
|
69
|
+
detail = f"Searching codebase for pattern: {pattern}"
|
|
70
|
+
|
|
71
|
+
elif tool_name == "Glob":
|
|
72
|
+
pattern = input_data.get("pattern", "")
|
|
73
|
+
short = f"Find files: {pattern[:30]}"
|
|
74
|
+
detail = f"Finding files matching pattern: {pattern}"
|
|
75
|
+
|
|
76
|
+
elif tool_name == "Skill":
|
|
77
|
+
skill = input_data.get("skill", "unknown")
|
|
78
|
+
short = f"Run skill: /{skill}"
|
|
79
|
+
detail = f"Executing slash command /{skill}"
|
|
80
|
+
|
|
81
|
+
elif tool_name == "Task":
|
|
82
|
+
desc = input_data.get("description", "task")
|
|
83
|
+
short = f"Spawn agent: {desc[:30]}"
|
|
84
|
+
detail = f"Launching sub-agent for: {input_data.get('prompt', desc)[:100]}"
|
|
85
|
+
|
|
86
|
+
elif tool_name == "WebSearch":
|
|
87
|
+
query = input_data.get("query", "")
|
|
88
|
+
short = f"Web search: {query[:30]}"
|
|
89
|
+
detail = f"Searching the web for: {query}"
|
|
90
|
+
|
|
91
|
+
elif tool_name == "WebFetch":
|
|
92
|
+
url = input_data.get("url", "")
|
|
93
|
+
short = f"Fetch URL: {url[:40]}"
|
|
94
|
+
detail = f"Fetching content from: {url}"
|
|
95
|
+
|
|
96
|
+
elif tool_name == "TodoWrite":
|
|
97
|
+
todos = input_data.get("todos", [])
|
|
98
|
+
count = len(todos) if isinstance(todos, list) else 0
|
|
99
|
+
short = f"Update todo list: {count} items"
|
|
100
|
+
detail = f"Managing task list with {count} items"
|
|
101
|
+
|
|
102
|
+
elif tool_name == "AskUserQuestion":
|
|
103
|
+
questions = input_data.get("questions", [])
|
|
104
|
+
count = len(questions) if isinstance(questions, list) else 1
|
|
105
|
+
short = f"Ask user: {count} question(s)"
|
|
106
|
+
detail = f"Prompting user for input with {count} question(s)"
|
|
107
|
+
|
|
108
|
+
elif tool_name and tool_name.startswith("mcp__"):
|
|
109
|
+
parts = tool_name.split("__")
|
|
110
|
+
server = parts[1] if len(parts) > 1 else "unknown"
|
|
111
|
+
tool = parts[2] if len(parts) > 2 else tool_name
|
|
112
|
+
short = f"MCP call: {server}/{tool}"
|
|
113
|
+
detail = f"Calling {tool} tool from MCP server {server}"
|
|
114
|
+
|
|
115
|
+
elif tool_name == "cortex_remember" or (tool_name and "remember" in tool_name.lower()):
|
|
116
|
+
params = input_data.get("params", {})
|
|
117
|
+
content = params.get("content", "") if isinstance(params, dict) else ""
|
|
118
|
+
short = f"Store memory: {content[:30]}..." if content else "Store memory"
|
|
119
|
+
detail = f"Saving to memory system: {content[:100]}" if content else "Saving to memory system"
|
|
120
|
+
|
|
121
|
+
elif tool_name == "cortex_recall" or (tool_name and "recall" in tool_name.lower()):
|
|
122
|
+
params = input_data.get("params", {})
|
|
123
|
+
query = params.get("query", "") if isinstance(params, dict) else ""
|
|
124
|
+
short = f"Recall: {query[:30]}" if query else "Recall memories"
|
|
125
|
+
detail = f"Searching memories for: {query}" if query else "Retrieving memories"
|
|
126
|
+
|
|
127
|
+
elif tool_name == "NotebookEdit":
|
|
128
|
+
path = input_data.get("notebook_path", "")
|
|
129
|
+
filename = Path(path).name if path else "notebook"
|
|
130
|
+
short = f"Edit notebook: {filename}"
|
|
131
|
+
detail = f"Editing Jupyter notebook {path}"
|
|
132
|
+
|
|
133
|
+
else:
|
|
134
|
+
short = f"{event_type}: {tool_name or 'unknown'}"
|
|
135
|
+
detail = f"Activity type {event_type} with tool {tool_name}"
|
|
136
|
+
|
|
137
|
+
# Add status suffix for failures
|
|
138
|
+
if not success:
|
|
139
|
+
short = f"[FAILED] {short}"
|
|
140
|
+
detail = f"[FAILED] {detail}"
|
|
141
|
+
|
|
142
|
+
return short, detail
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def backfill_activity_summaries(db_path: str) -> int:
|
|
146
|
+
"""Generate summaries for activities that don't have them.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
db_path: Path to the SQLite database
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
Number of activities updated
|
|
153
|
+
"""
|
|
154
|
+
# First ensure migrations are applied
|
|
155
|
+
ensure_migrations(db_path)
|
|
156
|
+
|
|
157
|
+
conn = get_write_connection(db_path)
|
|
158
|
+
|
|
159
|
+
# Check if summary column exists
|
|
160
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
161
|
+
column_names = {col[1] for col in columns}
|
|
162
|
+
|
|
163
|
+
if "summary" not in column_names:
|
|
164
|
+
print(f"[Backfill] Summary column not found in {db_path}, skipping")
|
|
165
|
+
conn.close()
|
|
166
|
+
return 0
|
|
167
|
+
|
|
168
|
+
cursor = conn.execute("""
|
|
169
|
+
SELECT id, tool_name, tool_input, success, file_path, event_type
|
|
170
|
+
FROM activities
|
|
171
|
+
WHERE summary IS NULL OR summary = ''
|
|
172
|
+
""")
|
|
173
|
+
|
|
174
|
+
count = 0
|
|
175
|
+
for row in cursor.fetchall():
|
|
176
|
+
short, detail = generate_activity_summary(
|
|
177
|
+
row["tool_name"],
|
|
178
|
+
row["tool_input"],
|
|
179
|
+
bool(row["success"]),
|
|
180
|
+
row["file_path"],
|
|
181
|
+
row["event_type"],
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
conn.execute(
|
|
185
|
+
"""
|
|
186
|
+
UPDATE activities
|
|
187
|
+
SET summary = ?, summary_detail = ?
|
|
188
|
+
WHERE id = ?
|
|
189
|
+
""",
|
|
190
|
+
(short, detail, row["id"]),
|
|
191
|
+
)
|
|
192
|
+
count += 1
|
|
193
|
+
|
|
194
|
+
if count % 100 == 0:
|
|
195
|
+
conn.commit()
|
|
196
|
+
print(f"[Backfill] Processed {count} activities...")
|
|
197
|
+
|
|
198
|
+
conn.commit()
|
|
199
|
+
conn.close()
|
|
200
|
+
return count
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def backfill_mcp_servers(db_path: str) -> int:
|
|
204
|
+
"""Extract and populate mcp_server for existing activities.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
db_path: Path to the SQLite database
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
Number of activities updated
|
|
211
|
+
"""
|
|
212
|
+
# First ensure migrations are applied
|
|
213
|
+
ensure_migrations(db_path)
|
|
214
|
+
|
|
215
|
+
conn = get_write_connection(db_path)
|
|
216
|
+
|
|
217
|
+
# Check if mcp_server column exists
|
|
218
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
219
|
+
column_names = {col[1] for col in columns}
|
|
220
|
+
|
|
221
|
+
if "mcp_server" not in column_names:
|
|
222
|
+
print(f"[Backfill] mcp_server column not found in {db_path}, skipping")
|
|
223
|
+
conn.close()
|
|
224
|
+
return 0
|
|
225
|
+
|
|
226
|
+
cursor = conn.execute("""
|
|
227
|
+
SELECT id, tool_name FROM activities
|
|
228
|
+
WHERE tool_name LIKE 'mcp__%'
|
|
229
|
+
AND (mcp_server IS NULL OR mcp_server = '')
|
|
230
|
+
""")
|
|
231
|
+
|
|
232
|
+
count = 0
|
|
233
|
+
for row in cursor.fetchall():
|
|
234
|
+
parts = row["tool_name"].split("__")
|
|
235
|
+
if len(parts) >= 2:
|
|
236
|
+
server = parts[1]
|
|
237
|
+
conn.execute(
|
|
238
|
+
"UPDATE activities SET mcp_server = ? WHERE id = ?",
|
|
239
|
+
(server, row["id"]),
|
|
240
|
+
)
|
|
241
|
+
count += 1
|
|
242
|
+
|
|
243
|
+
conn.commit()
|
|
244
|
+
conn.close()
|
|
245
|
+
return count
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def backfill_all(db_path: str) -> dict:
|
|
249
|
+
"""Run all backfill operations on a database.
|
|
250
|
+
|
|
251
|
+
Args:
|
|
252
|
+
db_path: Path to the SQLite database
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
Dictionary with counts of updated records
|
|
256
|
+
"""
|
|
257
|
+
print(f"[Backfill] Starting backfill for {db_path}")
|
|
258
|
+
|
|
259
|
+
results = {
|
|
260
|
+
"summaries": backfill_activity_summaries(db_path),
|
|
261
|
+
"mcp_servers": backfill_mcp_servers(db_path),
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
print(f"[Backfill] Complete: {results['summaries']} summaries, {results['mcp_servers']} MCP servers")
|
|
265
|
+
return results
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
if __name__ == "__main__":
|
|
269
|
+
# Allow running from command line with database path as argument
|
|
270
|
+
if len(sys.argv) < 2:
|
|
271
|
+
print("Usage: python backfill_summaries.py <path-to-database>")
|
|
272
|
+
sys.exit(1)
|
|
273
|
+
|
|
274
|
+
db_path = sys.argv[1]
|
|
275
|
+
if not Path(db_path).exists():
|
|
276
|
+
print(f"Error: Database not found at {db_path}")
|
|
277
|
+
sys.exit(1)
|
|
278
|
+
|
|
279
|
+
results = backfill_all(db_path)
|
|
280
|
+
print(f"Backfill complete: {results}")
|
|
@@ -214,9 +214,13 @@ async def save_conversation(
|
|
|
214
214
|
client = get_client()
|
|
215
215
|
if client:
|
|
216
216
|
try:
|
|
217
|
+
# Escape content to prevent injection in summary generation
|
|
218
|
+
safe_content = xml_escape(content[:2000])
|
|
217
219
|
summary_prompt = f"""Summarize this conversation in one concise sentence (max 100 chars):
|
|
218
220
|
|
|
219
|
-
|
|
221
|
+
<conversation>
|
|
222
|
+
{safe_content}
|
|
223
|
+
</conversation>
|
|
220
224
|
|
|
221
225
|
Summary:"""
|
|
222
226
|
response = client.models.generate_content(
|
|
@@ -24,6 +24,58 @@ def get_write_connection(db_path: str) -> sqlite3.Connection:
|
|
|
24
24
|
return conn
|
|
25
25
|
|
|
26
26
|
|
|
27
|
+
def ensure_migrations(db_path: str) -> None:
|
|
28
|
+
"""Ensure database has latest migrations applied.
|
|
29
|
+
|
|
30
|
+
This function checks for and applies any missing schema updates,
|
|
31
|
+
including command analytics columns and natural language summary columns.
|
|
32
|
+
"""
|
|
33
|
+
conn = get_write_connection(db_path)
|
|
34
|
+
|
|
35
|
+
# Check if activities table exists
|
|
36
|
+
table_check = conn.execute(
|
|
37
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='activities'"
|
|
38
|
+
).fetchone()
|
|
39
|
+
|
|
40
|
+
if not table_check:
|
|
41
|
+
conn.close()
|
|
42
|
+
return
|
|
43
|
+
|
|
44
|
+
# Check available columns
|
|
45
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
46
|
+
column_names = {col[1] for col in columns}
|
|
47
|
+
|
|
48
|
+
migrations_applied = []
|
|
49
|
+
|
|
50
|
+
# Migration v1.1: Command analytics columns
|
|
51
|
+
if "command_name" not in column_names:
|
|
52
|
+
conn.executescript("""
|
|
53
|
+
ALTER TABLE activities ADD COLUMN command_name TEXT;
|
|
54
|
+
ALTER TABLE activities ADD COLUMN command_scope TEXT;
|
|
55
|
+
ALTER TABLE activities ADD COLUMN mcp_server TEXT;
|
|
56
|
+
ALTER TABLE activities ADD COLUMN skill_name TEXT;
|
|
57
|
+
|
|
58
|
+
CREATE INDEX IF NOT EXISTS idx_activities_command ON activities(command_name);
|
|
59
|
+
CREATE INDEX IF NOT EXISTS idx_activities_mcp ON activities(mcp_server);
|
|
60
|
+
CREATE INDEX IF NOT EXISTS idx_activities_skill ON activities(skill_name);
|
|
61
|
+
""")
|
|
62
|
+
migrations_applied.append("v1.1: command analytics columns")
|
|
63
|
+
|
|
64
|
+
# Migration v1.2: Natural language summary columns
|
|
65
|
+
if "summary" not in column_names:
|
|
66
|
+
conn.executescript("""
|
|
67
|
+
ALTER TABLE activities ADD COLUMN summary TEXT;
|
|
68
|
+
ALTER TABLE activities ADD COLUMN summary_detail TEXT;
|
|
69
|
+
""")
|
|
70
|
+
migrations_applied.append("v1.2: summary columns")
|
|
71
|
+
|
|
72
|
+
if migrations_applied:
|
|
73
|
+
conn.commit()
|
|
74
|
+
print(f"[Database] Applied migrations: {', '.join(migrations_applied)}")
|
|
75
|
+
|
|
76
|
+
conn.close()
|
|
77
|
+
|
|
78
|
+
|
|
27
79
|
def parse_tags(tags_str: Optional[str]) -> list[str]:
|
|
28
80
|
"""Parse tags from JSON string."""
|
|
29
81
|
if not tags_str:
|
|
@@ -183,9 +235,13 @@ def get_activities(
|
|
|
183
235
|
limit: int = 100,
|
|
184
236
|
offset: int = 0,
|
|
185
237
|
) -> list[Activity]:
|
|
186
|
-
"""Get activity log entries."""
|
|
238
|
+
"""Get activity log entries with all available fields."""
|
|
187
239
|
conn = get_connection(db_path)
|
|
188
240
|
|
|
241
|
+
# Check available columns for backward compatibility
|
|
242
|
+
columns = conn.execute("PRAGMA table_info(activities)").fetchall()
|
|
243
|
+
column_names = {col[1] for col in columns}
|
|
244
|
+
|
|
189
245
|
query = "SELECT * FROM activities WHERE 1=1"
|
|
190
246
|
params: list = []
|
|
191
247
|
|
|
@@ -212,21 +268,37 @@ def get_activities(
|
|
|
212
268
|
# Fallback for edge cases
|
|
213
269
|
ts = datetime.now()
|
|
214
270
|
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
271
|
+
activity_data = {
|
|
272
|
+
"id": row["id"],
|
|
273
|
+
"session_id": row["session_id"],
|
|
274
|
+
"event_type": row["event_type"],
|
|
275
|
+
"tool_name": row["tool_name"],
|
|
276
|
+
"tool_input": row["tool_input"],
|
|
277
|
+
"tool_output": row["tool_output"],
|
|
278
|
+
"success": bool(row["success"]),
|
|
279
|
+
"error_message": row["error_message"],
|
|
280
|
+
"duration_ms": row["duration_ms"],
|
|
281
|
+
"file_path": row["file_path"],
|
|
282
|
+
"timestamp": ts,
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
# Add command analytics fields if available
|
|
286
|
+
if "command_name" in column_names:
|
|
287
|
+
activity_data["command_name"] = row["command_name"]
|
|
288
|
+
if "command_scope" in column_names:
|
|
289
|
+
activity_data["command_scope"] = row["command_scope"]
|
|
290
|
+
if "mcp_server" in column_names:
|
|
291
|
+
activity_data["mcp_server"] = row["mcp_server"]
|
|
292
|
+
if "skill_name" in column_names:
|
|
293
|
+
activity_data["skill_name"] = row["skill_name"]
|
|
294
|
+
|
|
295
|
+
# Add summary fields if available
|
|
296
|
+
if "summary" in column_names:
|
|
297
|
+
activity_data["summary"] = row["summary"]
|
|
298
|
+
if "summary_detail" in column_names:
|
|
299
|
+
activity_data["summary_detail"] = row["summary_detail"]
|
|
300
|
+
|
|
301
|
+
activities.append(Activity(**activity_data))
|
|
230
302
|
|
|
231
303
|
conn.close()
|
|
232
304
|
return activities
|
|
@@ -933,6 +1005,12 @@ def get_activity_detail(db_path: str, activity_id: str) -> Optional[dict]:
|
|
|
933
1005
|
if "skill_name" in column_names:
|
|
934
1006
|
result["skill_name"] = row["skill_name"]
|
|
935
1007
|
|
|
1008
|
+
# Add summary fields if they exist
|
|
1009
|
+
if "summary" in column_names:
|
|
1010
|
+
result["summary"] = row["summary"]
|
|
1011
|
+
if "summary_detail" in column_names:
|
|
1012
|
+
result["summary_detail"] = row["summary_detail"]
|
|
1013
|
+
|
|
936
1014
|
conn.close()
|
|
937
1015
|
return result
|
|
938
1016
|
|
|
@@ -209,9 +209,9 @@ Tags: {', '.join(memory.tags) if memory.tags else 'N/A'}
|
|
|
209
209
|
if preset_prompt:
|
|
210
210
|
parts.append(f"\nImage style guidance:\n{preset_prompt}")
|
|
211
211
|
|
|
212
|
-
# Add user's custom prompt
|
|
212
|
+
# Add user's custom prompt (escaped to prevent injection)
|
|
213
213
|
if request.custom_prompt:
|
|
214
|
-
parts.append(f"\nUser request: {request.custom_prompt}")
|
|
214
|
+
parts.append(f"\nUser request: {xml_escape(request.custom_prompt)}")
|
|
215
215
|
|
|
216
216
|
parts.append("\nGenerate a professional, high-quality image optimized for social media sharing.")
|
|
217
217
|
|
|
@@ -461,10 +461,10 @@ Tags: {', '.join(memory.tags) if memory.tags else 'N/A'}
|
|
|
461
461
|
"parts": parts
|
|
462
462
|
})
|
|
463
463
|
|
|
464
|
-
# Add refinement prompt
|
|
464
|
+
# Add refinement prompt (escaped to prevent injection)
|
|
465
465
|
contents.append({
|
|
466
466
|
"role": "user",
|
|
467
|
-
"parts": [{"text": refinement_prompt}]
|
|
467
|
+
"parts": [{"text": xml_escape(refinement_prompt)}]
|
|
468
468
|
})
|
|
469
469
|
|
|
470
470
|
# Configure - use defaults or provided values
|
|
@@ -32,6 +32,7 @@ except ImportError:
|
|
|
32
32
|
from database import (
|
|
33
33
|
bulk_update_memory_status,
|
|
34
34
|
delete_memory,
|
|
35
|
+
ensure_migrations,
|
|
35
36
|
get_activities,
|
|
36
37
|
get_activity_detail,
|
|
37
38
|
get_activity_heatmap,
|
|
@@ -134,6 +135,7 @@ class DatabaseChangeHandler(FileSystemEventHandler):
|
|
|
134
135
|
self.loop = loop
|
|
135
136
|
self._debounce_task: Optional[asyncio.Task] = None
|
|
136
137
|
self._last_path: Optional[str] = None
|
|
138
|
+
self._last_activity_count: dict[str, int] = {}
|
|
137
139
|
|
|
138
140
|
def on_modified(self, event):
|
|
139
141
|
if event.src_path.endswith("cortex.db") or event.src_path.endswith("global.db"):
|
|
@@ -145,9 +147,35 @@ class DatabaseChangeHandler(FileSystemEventHandler):
|
|
|
145
147
|
)
|
|
146
148
|
|
|
147
149
|
async def _debounced_notify(self):
|
|
148
|
-
await asyncio.sleep(0.
|
|
150
|
+
await asyncio.sleep(0.3) # Reduced from 0.5s for faster updates
|
|
149
151
|
if self._last_path:
|
|
150
|
-
|
|
152
|
+
db_path = self._last_path
|
|
153
|
+
|
|
154
|
+
# Broadcast general database change
|
|
155
|
+
await self.ws_manager.broadcast("database_changed", {"path": db_path})
|
|
156
|
+
|
|
157
|
+
# Fetch and broadcast latest activities (IndyDevDan pattern)
|
|
158
|
+
try:
|
|
159
|
+
# Get recent activities
|
|
160
|
+
recent = get_activities(db_path, limit=5, offset=0)
|
|
161
|
+
if recent:
|
|
162
|
+
# Broadcast each new activity
|
|
163
|
+
for activity in recent:
|
|
164
|
+
await self.ws_manager.broadcast_activity_logged(
|
|
165
|
+
db_path,
|
|
166
|
+
activity if isinstance(activity, dict) else activity.model_dump()
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Also broadcast session update
|
|
170
|
+
sessions = get_recent_sessions(db_path, limit=1)
|
|
171
|
+
if sessions:
|
|
172
|
+
session = sessions[0]
|
|
173
|
+
await self.ws_manager.broadcast_session_updated(
|
|
174
|
+
db_path,
|
|
175
|
+
session if isinstance(session, dict) else dict(session)
|
|
176
|
+
)
|
|
177
|
+
except Exception as e:
|
|
178
|
+
print(f"[WS] Error broadcasting activities: {e}")
|
|
151
179
|
|
|
152
180
|
|
|
153
181
|
# File watcher
|
|
@@ -487,6 +515,9 @@ async def list_activities(
|
|
|
487
515
|
if not Path(project).exists():
|
|
488
516
|
raise HTTPException(status_code=404, detail="Database not found")
|
|
489
517
|
|
|
518
|
+
# Ensure migrations are applied (adds summary columns if missing)
|
|
519
|
+
ensure_migrations(project)
|
|
520
|
+
|
|
490
521
|
return get_activities(project, event_type, tool_name, limit, offset)
|
|
491
522
|
|
|
492
523
|
|
|
@@ -627,6 +658,9 @@ async def get_activity_detail_endpoint(
|
|
|
627
658
|
if not Path(project).exists():
|
|
628
659
|
raise HTTPException(status_code=404, detail="Database not found")
|
|
629
660
|
|
|
661
|
+
# Ensure migrations are applied
|
|
662
|
+
ensure_migrations(project)
|
|
663
|
+
|
|
630
664
|
activity = get_activity_detail(project, activity_id)
|
|
631
665
|
if not activity:
|
|
632
666
|
raise HTTPException(status_code=404, detail="Activity not found")
|
|
@@ -634,6 +668,26 @@ async def get_activity_detail_endpoint(
|
|
|
634
668
|
return activity
|
|
635
669
|
|
|
636
670
|
|
|
671
|
+
@app.post("/api/activities/backfill-summaries")
|
|
672
|
+
async def backfill_activity_summaries_endpoint(
|
|
673
|
+
project: str = Query(..., description="Path to the database file"),
|
|
674
|
+
):
|
|
675
|
+
"""Generate summaries for existing activities that don't have them."""
|
|
676
|
+
if not Path(project).exists():
|
|
677
|
+
raise HTTPException(status_code=404, detail="Database not found")
|
|
678
|
+
|
|
679
|
+
try:
|
|
680
|
+
from backfill_summaries import backfill_all
|
|
681
|
+
results = backfill_all(project)
|
|
682
|
+
return {
|
|
683
|
+
"success": True,
|
|
684
|
+
"summaries_updated": results["summaries"],
|
|
685
|
+
"mcp_servers_updated": results["mcp_servers"],
|
|
686
|
+
}
|
|
687
|
+
except Exception as e:
|
|
688
|
+
raise HTTPException(status_code=500, detail=f"Backfill failed: {str(e)}")
|
|
689
|
+
|
|
690
|
+
|
|
637
691
|
# --- Session Context Endpoints ---
|
|
638
692
|
|
|
639
693
|
|
|
@@ -84,6 +84,14 @@ class Activity(BaseModel):
|
|
|
84
84
|
duration_ms: Optional[int] = None
|
|
85
85
|
file_path: Optional[str] = None
|
|
86
86
|
timestamp: datetime
|
|
87
|
+
# Command analytics fields
|
|
88
|
+
command_name: Optional[str] = None
|
|
89
|
+
command_scope: Optional[str] = None
|
|
90
|
+
mcp_server: Optional[str] = None
|
|
91
|
+
skill_name: Optional[str] = None
|
|
92
|
+
# Natural language summary fields
|
|
93
|
+
summary: Optional[str] = None
|
|
94
|
+
summary_detail: Optional[str] = None
|
|
87
95
|
|
|
88
96
|
|
|
89
97
|
class Session(BaseModel):
|
|
@@ -77,6 +77,28 @@ class WebSocketManager:
|
|
|
77
77
|
"""Get the number of active connections."""
|
|
78
78
|
return len(self.connections)
|
|
79
79
|
|
|
80
|
+
# Typed broadcast methods (IndyDevDan pattern)
|
|
81
|
+
async def broadcast_activity_logged(self, project: str, activity: dict[str, Any]):
|
|
82
|
+
"""Broadcast when a new activity is logged."""
|
|
83
|
+
await self.broadcast("activity_logged", {
|
|
84
|
+
"project": project,
|
|
85
|
+
"activity": activity,
|
|
86
|
+
})
|
|
87
|
+
|
|
88
|
+
async def broadcast_session_updated(self, project: str, session: dict[str, Any]):
|
|
89
|
+
"""Broadcast when a session is updated."""
|
|
90
|
+
await self.broadcast("session_updated", {
|
|
91
|
+
"project": project,
|
|
92
|
+
"session": session,
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
async def broadcast_stats_updated(self, project: str, stats: dict[str, Any]):
|
|
96
|
+
"""Broadcast when stats change (for charts/panels)."""
|
|
97
|
+
await self.broadcast("stats_updated", {
|
|
98
|
+
"project": project,
|
|
99
|
+
"stats": stats,
|
|
100
|
+
})
|
|
101
|
+
|
|
80
102
|
|
|
81
103
|
# Global manager instance
|
|
82
104
|
manager = WebSocketManager()
|