omni-cortex 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/.env.example +22 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +280 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/chat_service.py +315 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/database.py +1093 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/image_service.py +549 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/logging_config.py +122 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/main.py +1124 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/models.py +241 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/project_config.py +170 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/project_scanner.py +164 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/prompt_security.py +111 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/pyproject.toml +23 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/security.py +104 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/uv.lock +1110 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/websocket_manager.py +104 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/post_tool_use.py +335 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/pre_tool_use.py +333 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/stop.py +184 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/subagent_stop.py +120 -0
- omni_cortex-1.6.0.dist-info/METADATA +319 -0
- omni_cortex-1.6.0.dist-info/RECORD +24 -0
- omni_cortex-1.6.0.dist-info/WHEEL +4 -0
- omni_cortex-1.6.0.dist-info/entry_points.txt +4 -0
- omni_cortex-1.6.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""PreToolUse hook - logs tool call before execution.
|
|
3
|
+
|
|
4
|
+
This hook is called by Claude Code before each tool is executed.
|
|
5
|
+
It logs the tool name and input to the Cortex activity database.
|
|
6
|
+
|
|
7
|
+
Hook configuration for settings.json:
|
|
8
|
+
{
|
|
9
|
+
"hooks": {
|
|
10
|
+
"PreToolUse": [
|
|
11
|
+
{
|
|
12
|
+
"type": "command",
|
|
13
|
+
"command": "python hooks/pre_tool_use.py"
|
|
14
|
+
}
|
|
15
|
+
]
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import json
|
|
21
|
+
import re
|
|
22
|
+
import sys
|
|
23
|
+
import os
|
|
24
|
+
import sqlite3
|
|
25
|
+
import time
|
|
26
|
+
from datetime import datetime, timezone
|
|
27
|
+
from pathlib import Path
|
|
28
|
+
from typing import Optional
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Session timeout in seconds (4 hours of inactivity = new session)
|
|
32
|
+
SESSION_TIMEOUT_SECONDS = 4 * 60 * 60
|
|
33
|
+
|
|
34
|
+
# Patterns for sensitive field names that should be redacted
|
|
35
|
+
SENSITIVE_FIELD_PATTERNS = [
|
|
36
|
+
r'(?i)(api[_-]?key|apikey)',
|
|
37
|
+
r'(?i)(password|passwd|pwd)',
|
|
38
|
+
r'(?i)(secret|token|credential)',
|
|
39
|
+
r'(?i)(auth[_-]?token|access[_-]?token)',
|
|
40
|
+
r'(?i)(private[_-]?key|ssh[_-]?key)',
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def generate_session_id() -> str:
|
|
45
|
+
"""Generate a unique session ID matching the MCP format."""
|
|
46
|
+
timestamp_ms = int(time.time() * 1000)
|
|
47
|
+
random_hex = os.urandom(4).hex()
|
|
48
|
+
return f"sess_{timestamp_ms}_{random_hex}"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def get_session_file_path() -> Path:
|
|
52
|
+
"""Get the path to the current session file."""
|
|
53
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
54
|
+
return Path(project_path) / ".omni-cortex" / "current_session.json"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def load_session_file() -> Optional[dict]:
|
|
58
|
+
"""Load the current session from file if it exists and is valid."""
|
|
59
|
+
session_file = get_session_file_path()
|
|
60
|
+
if not session_file.exists():
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
with open(session_file, "r") as f:
|
|
65
|
+
return json.load(f)
|
|
66
|
+
except (json.JSONDecodeError, IOError):
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def save_session_file(session_data: dict) -> None:
|
|
71
|
+
"""Save the current session to file."""
|
|
72
|
+
session_file = get_session_file_path()
|
|
73
|
+
session_file.parent.mkdir(parents=True, exist_ok=True)
|
|
74
|
+
|
|
75
|
+
with open(session_file, "w") as f:
|
|
76
|
+
json.dump(session_data, f, indent=2)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def is_session_valid(session_data: dict) -> bool:
|
|
80
|
+
"""Check if a session is still valid (not timed out)."""
|
|
81
|
+
last_activity = session_data.get("last_activity_at")
|
|
82
|
+
if not last_activity:
|
|
83
|
+
return False
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
last_time = datetime.fromisoformat(last_activity.replace("Z", "+00:00"))
|
|
87
|
+
now = datetime.now(timezone.utc)
|
|
88
|
+
elapsed_seconds = (now - last_time).total_seconds()
|
|
89
|
+
return elapsed_seconds < SESSION_TIMEOUT_SECONDS
|
|
90
|
+
except (ValueError, TypeError):
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def create_session_in_db(conn: sqlite3.Connection, session_id: str, project_path: str) -> None:
|
|
95
|
+
"""Create a new session record in the database."""
|
|
96
|
+
cursor = conn.cursor()
|
|
97
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
98
|
+
|
|
99
|
+
# Check if sessions table exists (it might not if only activities table was created)
|
|
100
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='sessions'")
|
|
101
|
+
if cursor.fetchone() is None:
|
|
102
|
+
# Create sessions table with minimal schema
|
|
103
|
+
conn.executescript("""
|
|
104
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
105
|
+
id TEXT PRIMARY KEY,
|
|
106
|
+
project_path TEXT NOT NULL,
|
|
107
|
+
started_at TEXT NOT NULL,
|
|
108
|
+
ended_at TEXT,
|
|
109
|
+
summary TEXT,
|
|
110
|
+
tags TEXT,
|
|
111
|
+
metadata TEXT
|
|
112
|
+
);
|
|
113
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_started ON sessions(started_at DESC);
|
|
114
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project_path);
|
|
115
|
+
""")
|
|
116
|
+
conn.commit()
|
|
117
|
+
|
|
118
|
+
cursor.execute(
|
|
119
|
+
"""
|
|
120
|
+
INSERT OR IGNORE INTO sessions (id, project_path, started_at)
|
|
121
|
+
VALUES (?, ?, ?)
|
|
122
|
+
""",
|
|
123
|
+
(session_id, project_path, now),
|
|
124
|
+
)
|
|
125
|
+
conn.commit()
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def get_or_create_session(conn: sqlite3.Connection, project_path: str) -> str:
|
|
129
|
+
"""Get the current session ID, creating a new one if needed.
|
|
130
|
+
|
|
131
|
+
Session management logic:
|
|
132
|
+
1. Check for existing session file
|
|
133
|
+
2. If exists and not timed out, use it and update last_activity
|
|
134
|
+
3. If doesn't exist or timed out, create new session
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
The session ID to use for activity logging
|
|
138
|
+
"""
|
|
139
|
+
session_data = load_session_file()
|
|
140
|
+
now_iso = datetime.now(timezone.utc).isoformat()
|
|
141
|
+
|
|
142
|
+
if session_data and is_session_valid(session_data):
|
|
143
|
+
# Update last activity time
|
|
144
|
+
session_data["last_activity_at"] = now_iso
|
|
145
|
+
save_session_file(session_data)
|
|
146
|
+
return session_data["session_id"]
|
|
147
|
+
|
|
148
|
+
# Create new session
|
|
149
|
+
session_id = generate_session_id()
|
|
150
|
+
|
|
151
|
+
# Create in database
|
|
152
|
+
create_session_in_db(conn, session_id, project_path)
|
|
153
|
+
|
|
154
|
+
# Save to file
|
|
155
|
+
session_data = {
|
|
156
|
+
"session_id": session_id,
|
|
157
|
+
"project_path": project_path,
|
|
158
|
+
"started_at": now_iso,
|
|
159
|
+
"last_activity_at": now_iso,
|
|
160
|
+
}
|
|
161
|
+
save_session_file(session_data)
|
|
162
|
+
|
|
163
|
+
return session_id
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def redact_sensitive_fields(data: dict) -> dict:
|
|
167
|
+
"""Redact sensitive fields from a dictionary for safe logging.
|
|
168
|
+
|
|
169
|
+
Recursively processes nested dicts and lists.
|
|
170
|
+
"""
|
|
171
|
+
if not isinstance(data, dict):
|
|
172
|
+
return data
|
|
173
|
+
|
|
174
|
+
result = {}
|
|
175
|
+
for key, value in data.items():
|
|
176
|
+
# Check if key matches sensitive patterns
|
|
177
|
+
is_sensitive = any(
|
|
178
|
+
re.search(pattern, str(key))
|
|
179
|
+
for pattern in SENSITIVE_FIELD_PATTERNS
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
if is_sensitive:
|
|
183
|
+
result[key] = '[REDACTED]'
|
|
184
|
+
elif isinstance(value, dict):
|
|
185
|
+
result[key] = redact_sensitive_fields(value)
|
|
186
|
+
elif isinstance(value, list):
|
|
187
|
+
result[key] = [
|
|
188
|
+
redact_sensitive_fields(item) if isinstance(item, dict) else item
|
|
189
|
+
for item in value
|
|
190
|
+
]
|
|
191
|
+
else:
|
|
192
|
+
result[key] = value
|
|
193
|
+
|
|
194
|
+
return result
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def get_db_path() -> Path:
|
|
198
|
+
"""Get the database path for the current project."""
|
|
199
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
200
|
+
return Path(project_path) / ".omni-cortex" / "cortex.db"
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def ensure_database(db_path: Path) -> sqlite3.Connection:
|
|
204
|
+
"""Ensure database exists and is initialized.
|
|
205
|
+
|
|
206
|
+
Auto-creates the database and schema if it doesn't exist.
|
|
207
|
+
This enables 'out of the box' functionality.
|
|
208
|
+
"""
|
|
209
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
210
|
+
conn = sqlite3.connect(str(db_path))
|
|
211
|
+
|
|
212
|
+
# Check if schema exists
|
|
213
|
+
cursor = conn.cursor()
|
|
214
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='activities'")
|
|
215
|
+
if cursor.fetchone() is None:
|
|
216
|
+
# Apply minimal schema for activities (full schema applied by MCP)
|
|
217
|
+
conn.executescript("""
|
|
218
|
+
CREATE TABLE IF NOT EXISTS activities (
|
|
219
|
+
id TEXT PRIMARY KEY,
|
|
220
|
+
session_id TEXT,
|
|
221
|
+
agent_id TEXT,
|
|
222
|
+
timestamp TEXT NOT NULL,
|
|
223
|
+
event_type TEXT NOT NULL,
|
|
224
|
+
tool_name TEXT,
|
|
225
|
+
tool_input TEXT,
|
|
226
|
+
tool_output TEXT,
|
|
227
|
+
duration_ms INTEGER,
|
|
228
|
+
success INTEGER DEFAULT 1,
|
|
229
|
+
error_message TEXT,
|
|
230
|
+
project_path TEXT,
|
|
231
|
+
file_path TEXT,
|
|
232
|
+
metadata TEXT
|
|
233
|
+
);
|
|
234
|
+
CREATE INDEX IF NOT EXISTS idx_activities_timestamp ON activities(timestamp DESC);
|
|
235
|
+
CREATE INDEX IF NOT EXISTS idx_activities_tool ON activities(tool_name);
|
|
236
|
+
""")
|
|
237
|
+
conn.commit()
|
|
238
|
+
|
|
239
|
+
return conn
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
def generate_id() -> str:
|
|
243
|
+
"""Generate a unique activity ID."""
|
|
244
|
+
timestamp_ms = int(datetime.now().timestamp() * 1000)
|
|
245
|
+
random_hex = os.urandom(4).hex()
|
|
246
|
+
return f"act_{timestamp_ms}_{random_hex}"
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def truncate(text: str, max_length: int = 10000) -> str:
|
|
250
|
+
"""Truncate text to max length."""
|
|
251
|
+
if len(text) <= max_length:
|
|
252
|
+
return text
|
|
253
|
+
return text[:max_length - 20] + "\n... [truncated]"
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def main():
|
|
257
|
+
"""Process PreToolUse hook."""
|
|
258
|
+
try:
|
|
259
|
+
# Read input from stdin with timeout protection
|
|
260
|
+
import select
|
|
261
|
+
if sys.platform != "win32":
|
|
262
|
+
# Unix: use select for timeout
|
|
263
|
+
ready, _, _ = select.select([sys.stdin], [], [], 5.0)
|
|
264
|
+
if not ready:
|
|
265
|
+
print(json.dumps({}))
|
|
266
|
+
return
|
|
267
|
+
|
|
268
|
+
# Read all input at once
|
|
269
|
+
raw_input = sys.stdin.read()
|
|
270
|
+
if not raw_input or not raw_input.strip():
|
|
271
|
+
print(json.dumps({}))
|
|
272
|
+
return
|
|
273
|
+
|
|
274
|
+
input_data = json.loads(raw_input)
|
|
275
|
+
|
|
276
|
+
# Extract data from hook input
|
|
277
|
+
tool_name = input_data.get("tool_name")
|
|
278
|
+
tool_input = input_data.get("tool_input", {})
|
|
279
|
+
agent_id = input_data.get("agent_id")
|
|
280
|
+
|
|
281
|
+
# Skip logging our own tools to prevent recursion
|
|
282
|
+
# MCP tools are named like "mcp__omni-cortex__cortex_remember"
|
|
283
|
+
if tool_name and ("cortex_" in tool_name or "omni-cortex" in tool_name):
|
|
284
|
+
print(json.dumps({}))
|
|
285
|
+
return
|
|
286
|
+
|
|
287
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
288
|
+
|
|
289
|
+
# Auto-initialize database (creates if not exists)
|
|
290
|
+
db_path = get_db_path()
|
|
291
|
+
conn = ensure_database(db_path)
|
|
292
|
+
|
|
293
|
+
# Get or create session (auto-manages session lifecycle)
|
|
294
|
+
session_id = get_or_create_session(conn, project_path)
|
|
295
|
+
|
|
296
|
+
# Redact sensitive fields before logging
|
|
297
|
+
safe_input = redact_sensitive_fields(tool_input) if isinstance(tool_input, dict) else tool_input
|
|
298
|
+
|
|
299
|
+
# Insert activity record
|
|
300
|
+
cursor = conn.cursor()
|
|
301
|
+
cursor.execute(
|
|
302
|
+
"""
|
|
303
|
+
INSERT INTO activities (
|
|
304
|
+
id, session_id, agent_id, timestamp, event_type,
|
|
305
|
+
tool_name, tool_input, project_path
|
|
306
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
307
|
+
""",
|
|
308
|
+
(
|
|
309
|
+
generate_id(),
|
|
310
|
+
session_id,
|
|
311
|
+
agent_id,
|
|
312
|
+
datetime.now(timezone.utc).isoformat(),
|
|
313
|
+
"pre_tool_use",
|
|
314
|
+
tool_name,
|
|
315
|
+
truncate(json.dumps(safe_input, default=str)),
|
|
316
|
+
project_path,
|
|
317
|
+
),
|
|
318
|
+
)
|
|
319
|
+
conn.commit()
|
|
320
|
+
conn.close()
|
|
321
|
+
|
|
322
|
+
# Return empty response (no modification to tool call)
|
|
323
|
+
print(json.dumps({}))
|
|
324
|
+
|
|
325
|
+
except Exception as e:
|
|
326
|
+
# Hooks should never block - log error but continue
|
|
327
|
+
print(json.dumps({"systemMessage": f"Cortex pre_tool_use: {e}"}))
|
|
328
|
+
|
|
329
|
+
sys.exit(0)
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
if __name__ == "__main__":
|
|
333
|
+
main()
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Stop hook - logs session end when Claude Code stops.
|
|
3
|
+
|
|
4
|
+
This hook is called when Claude Code exits or the session ends.
|
|
5
|
+
It finalizes the session and generates a summary.
|
|
6
|
+
|
|
7
|
+
Hook configuration for settings.json:
|
|
8
|
+
{
|
|
9
|
+
"hooks": {
|
|
10
|
+
"Stop": [
|
|
11
|
+
{
|
|
12
|
+
"type": "command",
|
|
13
|
+
"command": "python hooks/stop.py"
|
|
14
|
+
}
|
|
15
|
+
]
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import json
|
|
21
|
+
import sys
|
|
22
|
+
import os
|
|
23
|
+
import sqlite3
|
|
24
|
+
from datetime import datetime, timezone
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_db_path() -> Path:
|
|
29
|
+
"""Get the database path for the current project."""
|
|
30
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
31
|
+
return Path(project_path) / ".omni-cortex" / "cortex.db"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def generate_id(prefix: str) -> str:
|
|
35
|
+
"""Generate a unique ID."""
|
|
36
|
+
timestamp_ms = int(datetime.now().timestamp() * 1000)
|
|
37
|
+
random_hex = os.urandom(4).hex()
|
|
38
|
+
return f"{prefix}_{timestamp_ms}_{random_hex}"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def main():
|
|
42
|
+
"""Process Stop hook."""
|
|
43
|
+
try:
|
|
44
|
+
# Read input from stdin
|
|
45
|
+
input_data = json.load(sys.stdin)
|
|
46
|
+
|
|
47
|
+
db_path = get_db_path()
|
|
48
|
+
|
|
49
|
+
# Only process if database exists
|
|
50
|
+
if not db_path.exists():
|
|
51
|
+
print(json.dumps({}))
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
session_id = os.environ.get("CLAUDE_SESSION_ID")
|
|
55
|
+
if not session_id:
|
|
56
|
+
print(json.dumps({}))
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
60
|
+
|
|
61
|
+
# Connect to database
|
|
62
|
+
conn = sqlite3.connect(str(db_path))
|
|
63
|
+
conn.row_factory = sqlite3.Row
|
|
64
|
+
cursor = conn.cursor()
|
|
65
|
+
|
|
66
|
+
# Check if session exists
|
|
67
|
+
cursor.execute("SELECT id FROM sessions WHERE id = ?", (session_id,))
|
|
68
|
+
if not cursor.fetchone():
|
|
69
|
+
print(json.dumps({}))
|
|
70
|
+
conn.close()
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
# End the session
|
|
74
|
+
cursor.execute(
|
|
75
|
+
"UPDATE sessions SET ended_at = ? WHERE id = ? AND ended_at IS NULL",
|
|
76
|
+
(now, session_id),
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
# Gather session statistics
|
|
80
|
+
cursor.execute(
|
|
81
|
+
"SELECT COUNT(*) as cnt FROM activities WHERE session_id = ?",
|
|
82
|
+
(session_id,),
|
|
83
|
+
)
|
|
84
|
+
total_activities = cursor.fetchone()["cnt"]
|
|
85
|
+
|
|
86
|
+
cursor.execute(
|
|
87
|
+
"SELECT COUNT(*) as cnt FROM memories WHERE source_session_id = ?",
|
|
88
|
+
(session_id,),
|
|
89
|
+
)
|
|
90
|
+
total_memories = cursor.fetchone()["cnt"]
|
|
91
|
+
|
|
92
|
+
# Get tools used
|
|
93
|
+
cursor.execute(
|
|
94
|
+
"""
|
|
95
|
+
SELECT tool_name, COUNT(*) as cnt
|
|
96
|
+
FROM activities
|
|
97
|
+
WHERE session_id = ? AND tool_name IS NOT NULL
|
|
98
|
+
GROUP BY tool_name
|
|
99
|
+
""",
|
|
100
|
+
(session_id,),
|
|
101
|
+
)
|
|
102
|
+
tools_used = {row["tool_name"]: row["cnt"] for row in cursor.fetchall()}
|
|
103
|
+
|
|
104
|
+
# Get files modified
|
|
105
|
+
cursor.execute(
|
|
106
|
+
"""
|
|
107
|
+
SELECT DISTINCT file_path
|
|
108
|
+
FROM activities
|
|
109
|
+
WHERE session_id = ? AND file_path IS NOT NULL
|
|
110
|
+
""",
|
|
111
|
+
(session_id,),
|
|
112
|
+
)
|
|
113
|
+
files_modified = [row["file_path"] for row in cursor.fetchall()]
|
|
114
|
+
|
|
115
|
+
# Get errors
|
|
116
|
+
cursor.execute(
|
|
117
|
+
"""
|
|
118
|
+
SELECT error_message
|
|
119
|
+
FROM activities
|
|
120
|
+
WHERE session_id = ? AND success = 0 AND error_message IS NOT NULL
|
|
121
|
+
LIMIT 10
|
|
122
|
+
""",
|
|
123
|
+
(session_id,),
|
|
124
|
+
)
|
|
125
|
+
key_errors = [row["error_message"] for row in cursor.fetchall()]
|
|
126
|
+
|
|
127
|
+
# Create or update summary
|
|
128
|
+
cursor.execute(
|
|
129
|
+
"SELECT id FROM session_summaries WHERE session_id = ?",
|
|
130
|
+
(session_id,),
|
|
131
|
+
)
|
|
132
|
+
existing = cursor.fetchone()
|
|
133
|
+
|
|
134
|
+
if existing:
|
|
135
|
+
cursor.execute(
|
|
136
|
+
"""
|
|
137
|
+
UPDATE session_summaries
|
|
138
|
+
SET key_errors = ?, files_modified = ?, tools_used = ?,
|
|
139
|
+
total_activities = ?, total_memories_created = ?
|
|
140
|
+
WHERE session_id = ?
|
|
141
|
+
""",
|
|
142
|
+
(
|
|
143
|
+
json.dumps(key_errors) if key_errors else None,
|
|
144
|
+
json.dumps(files_modified) if files_modified else None,
|
|
145
|
+
json.dumps(tools_used) if tools_used else None,
|
|
146
|
+
total_activities,
|
|
147
|
+
total_memories,
|
|
148
|
+
session_id,
|
|
149
|
+
),
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
cursor.execute(
|
|
153
|
+
"""
|
|
154
|
+
INSERT INTO session_summaries (
|
|
155
|
+
id, session_id, key_errors, files_modified, tools_used,
|
|
156
|
+
total_activities, total_memories_created, created_at
|
|
157
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
158
|
+
""",
|
|
159
|
+
(
|
|
160
|
+
generate_id("sum"),
|
|
161
|
+
session_id,
|
|
162
|
+
json.dumps(key_errors) if key_errors else None,
|
|
163
|
+
json.dumps(files_modified) if files_modified else None,
|
|
164
|
+
json.dumps(tools_used) if tools_used else None,
|
|
165
|
+
total_activities,
|
|
166
|
+
total_memories,
|
|
167
|
+
now,
|
|
168
|
+
),
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
conn.commit()
|
|
172
|
+
conn.close()
|
|
173
|
+
|
|
174
|
+
print(json.dumps({}))
|
|
175
|
+
|
|
176
|
+
except Exception as e:
|
|
177
|
+
# Hooks should never block
|
|
178
|
+
print(json.dumps({"systemMessage": f"Cortex stop: {e}"}))
|
|
179
|
+
|
|
180
|
+
sys.exit(0)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
if __name__ == "__main__":
|
|
184
|
+
main()
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""SubagentStop hook - logs when a subagent completes.
|
|
3
|
+
|
|
4
|
+
This hook is called when a subagent (spawned by the Task tool) finishes.
|
|
5
|
+
It logs the subagent completion and any results.
|
|
6
|
+
|
|
7
|
+
Hook configuration for settings.json:
|
|
8
|
+
{
|
|
9
|
+
"hooks": {
|
|
10
|
+
"SubagentStop": [
|
|
11
|
+
{
|
|
12
|
+
"type": "command",
|
|
13
|
+
"command": "python hooks/subagent_stop.py"
|
|
14
|
+
}
|
|
15
|
+
]
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import json
|
|
21
|
+
import sys
|
|
22
|
+
import os
|
|
23
|
+
import sqlite3
|
|
24
|
+
from datetime import datetime, timezone
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_db_path() -> Path:
|
|
29
|
+
"""Get the database path for the current project."""
|
|
30
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
31
|
+
return Path(project_path) / ".omni-cortex" / "cortex.db"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def generate_id() -> str:
|
|
35
|
+
"""Generate a unique activity ID."""
|
|
36
|
+
timestamp_ms = int(datetime.now().timestamp() * 1000)
|
|
37
|
+
random_hex = os.urandom(4).hex()
|
|
38
|
+
return f"act_{timestamp_ms}_{random_hex}"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def truncate(text: str, max_length: int = 10000) -> str:
|
|
42
|
+
"""Truncate text to max length."""
|
|
43
|
+
if len(text) <= max_length:
|
|
44
|
+
return text
|
|
45
|
+
return text[:max_length - 20] + "\n... [truncated]"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def main():
|
|
49
|
+
"""Process SubagentStop hook."""
|
|
50
|
+
try:
|
|
51
|
+
# Read input from stdin
|
|
52
|
+
input_data = json.load(sys.stdin)
|
|
53
|
+
|
|
54
|
+
db_path = get_db_path()
|
|
55
|
+
|
|
56
|
+
# Only log if database exists
|
|
57
|
+
if not db_path.exists():
|
|
58
|
+
print(json.dumps({}))
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
# Extract data from hook input
|
|
62
|
+
subagent_id = input_data.get("subagent_id")
|
|
63
|
+
subagent_type = input_data.get("subagent_type", "subagent")
|
|
64
|
+
result = input_data.get("result", {})
|
|
65
|
+
|
|
66
|
+
session_id = os.environ.get("CLAUDE_SESSION_ID")
|
|
67
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
68
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
69
|
+
|
|
70
|
+
# Connect to database
|
|
71
|
+
conn = sqlite3.connect(str(db_path))
|
|
72
|
+
cursor = conn.cursor()
|
|
73
|
+
|
|
74
|
+
# Log the subagent completion as an activity
|
|
75
|
+
cursor.execute(
|
|
76
|
+
"""
|
|
77
|
+
INSERT INTO activities (
|
|
78
|
+
id, session_id, agent_id, timestamp, event_type,
|
|
79
|
+
tool_name, tool_output, success, project_path
|
|
80
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
81
|
+
""",
|
|
82
|
+
(
|
|
83
|
+
generate_id(),
|
|
84
|
+
session_id,
|
|
85
|
+
subagent_id,
|
|
86
|
+
now,
|
|
87
|
+
"subagent_stop",
|
|
88
|
+
f"subagent_{subagent_type}",
|
|
89
|
+
truncate(json.dumps(result, default=str)),
|
|
90
|
+
1,
|
|
91
|
+
project_path,
|
|
92
|
+
),
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Update or create agent record
|
|
96
|
+
cursor.execute(
|
|
97
|
+
"""
|
|
98
|
+
INSERT INTO agents (id, name, type, first_seen, last_seen, total_activities)
|
|
99
|
+
VALUES (?, ?, ?, ?, ?, 1)
|
|
100
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
101
|
+
last_seen = ?,
|
|
102
|
+
total_activities = total_activities + 1
|
|
103
|
+
""",
|
|
104
|
+
(subagent_id, None, "subagent", now, now, now),
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
conn.commit()
|
|
108
|
+
conn.close()
|
|
109
|
+
|
|
110
|
+
print(json.dumps({}))
|
|
111
|
+
|
|
112
|
+
except Exception as e:
|
|
113
|
+
# Hooks should never block
|
|
114
|
+
print(json.dumps({"systemMessage": f"Cortex subagent_stop: {e}"}))
|
|
115
|
+
|
|
116
|
+
sys.exit(0)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
if __name__ == "__main__":
|
|
120
|
+
main()
|