omni-cortex 1.17.1__py3-none-any.whl → 1.17.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex/__init__.py +3 -0
- omni_cortex/_bundled/dashboard/backend/.env.example +12 -0
- omni_cortex/_bundled/dashboard/backend/backfill_summaries.py +280 -0
- omni_cortex/_bundled/dashboard/backend/chat_service.py +631 -0
- omni_cortex/_bundled/dashboard/backend/database.py +1773 -0
- omni_cortex/_bundled/dashboard/backend/image_service.py +552 -0
- omni_cortex/_bundled/dashboard/backend/logging_config.py +122 -0
- omni_cortex/_bundled/dashboard/backend/main.py +1888 -0
- omni_cortex/_bundled/dashboard/backend/models.py +472 -0
- omni_cortex/_bundled/dashboard/backend/project_config.py +170 -0
- omni_cortex/_bundled/dashboard/backend/project_scanner.py +164 -0
- omni_cortex/_bundled/dashboard/backend/prompt_security.py +111 -0
- omni_cortex/_bundled/dashboard/backend/pyproject.toml +23 -0
- omni_cortex/_bundled/dashboard/backend/security.py +104 -0
- omni_cortex/_bundled/dashboard/backend/uv.lock +1110 -0
- omni_cortex/_bundled/dashboard/backend/websocket_manager.py +104 -0
- omni_cortex/_bundled/hooks/post_tool_use.py +497 -0
- omni_cortex/_bundled/hooks/pre_tool_use.py +277 -0
- omni_cortex/_bundled/hooks/session_utils.py +186 -0
- omni_cortex/_bundled/hooks/stop.py +219 -0
- omni_cortex/_bundled/hooks/subagent_stop.py +120 -0
- omni_cortex/_bundled/hooks/user_prompt.py +220 -0
- omni_cortex/categorization/__init__.py +9 -0
- omni_cortex/categorization/auto_tags.py +166 -0
- omni_cortex/categorization/auto_type.py +165 -0
- omni_cortex/config.py +141 -0
- omni_cortex/dashboard.py +238 -0
- omni_cortex/database/__init__.py +24 -0
- omni_cortex/database/connection.py +137 -0
- omni_cortex/database/migrations.py +210 -0
- omni_cortex/database/schema.py +212 -0
- omni_cortex/database/sync.py +421 -0
- omni_cortex/decay/__init__.py +7 -0
- omni_cortex/decay/importance.py +147 -0
- omni_cortex/embeddings/__init__.py +35 -0
- omni_cortex/embeddings/local.py +442 -0
- omni_cortex/models/__init__.py +20 -0
- omni_cortex/models/activity.py +265 -0
- omni_cortex/models/agent.py +144 -0
- omni_cortex/models/memory.py +395 -0
- omni_cortex/models/relationship.py +206 -0
- omni_cortex/models/session.py +290 -0
- omni_cortex/resources/__init__.py +1 -0
- omni_cortex/search/__init__.py +22 -0
- omni_cortex/search/hybrid.py +197 -0
- omni_cortex/search/keyword.py +204 -0
- omni_cortex/search/ranking.py +127 -0
- omni_cortex/search/semantic.py +232 -0
- omni_cortex/server.py +360 -0
- omni_cortex/setup.py +284 -0
- omni_cortex/tools/__init__.py +13 -0
- omni_cortex/tools/activities.py +453 -0
- omni_cortex/tools/memories.py +536 -0
- omni_cortex/tools/sessions.py +311 -0
- omni_cortex/tools/utilities.py +477 -0
- omni_cortex/utils/__init__.py +13 -0
- omni_cortex/utils/formatting.py +282 -0
- omni_cortex/utils/ids.py +72 -0
- omni_cortex/utils/timestamps.py +129 -0
- omni_cortex/utils/truncation.py +111 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/METADATA +1 -1
- omni_cortex-1.17.3.dist-info/RECORD +86 -0
- omni_cortex-1.17.1.dist-info/RECORD +0 -26
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/main.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/stop.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.3.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/WHEEL +0 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/entry_points.txt +0 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""PreToolUse hook - logs tool call before execution.
|
|
3
|
+
|
|
4
|
+
This hook is called by Claude Code before each tool is executed.
|
|
5
|
+
It logs the tool name and input to the Cortex activity database.
|
|
6
|
+
|
|
7
|
+
Hook configuration for settings.json:
|
|
8
|
+
{
|
|
9
|
+
"hooks": {
|
|
10
|
+
"PreToolUse": [
|
|
11
|
+
{
|
|
12
|
+
"type": "command",
|
|
13
|
+
"command": "python hooks/pre_tool_use.py"
|
|
14
|
+
}
|
|
15
|
+
]
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import json
|
|
21
|
+
import re
|
|
22
|
+
import sys
|
|
23
|
+
import os
|
|
24
|
+
import sqlite3
|
|
25
|
+
from datetime import datetime, timezone
|
|
26
|
+
from pathlib import Path
|
|
27
|
+
|
|
28
|
+
# Import shared session management
|
|
29
|
+
from session_utils import get_or_create_session
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# === Tool Timing Management ===
|
|
33
|
+
# Store tool start timestamps for duration calculation in post_tool_use
|
|
34
|
+
|
|
35
|
+
def get_timing_file_path() -> Path:
|
|
36
|
+
"""Get the path to the tool timing file."""
|
|
37
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
38
|
+
return Path(project_path) / ".omni-cortex" / "tool_timing.json"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def load_timing_data() -> dict:
|
|
42
|
+
"""Load current timing data from file."""
|
|
43
|
+
timing_file = get_timing_file_path()
|
|
44
|
+
if not timing_file.exists():
|
|
45
|
+
return {}
|
|
46
|
+
try:
|
|
47
|
+
with open(timing_file, "r") as f:
|
|
48
|
+
return json.load(f)
|
|
49
|
+
except (json.JSONDecodeError, IOError):
|
|
50
|
+
return {}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def save_timing_data(data: dict) -> None:
|
|
54
|
+
"""Save timing data to file."""
|
|
55
|
+
timing_file = get_timing_file_path()
|
|
56
|
+
timing_file.parent.mkdir(parents=True, exist_ok=True)
|
|
57
|
+
with open(timing_file, "w") as f:
|
|
58
|
+
json.dump(data, f)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def record_tool_start(tool_name: str, activity_id: str, agent_id: str = None) -> None:
|
|
62
|
+
"""Record the start time for a tool execution.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
tool_name: Name of the tool being executed
|
|
66
|
+
activity_id: Unique activity ID for this tool call
|
|
67
|
+
agent_id: Optional agent ID
|
|
68
|
+
"""
|
|
69
|
+
timing_data = load_timing_data()
|
|
70
|
+
|
|
71
|
+
# Use activity_id as key (unique per tool call)
|
|
72
|
+
# Also store by tool_name for simpler matching in post_tool_use
|
|
73
|
+
key = f"{tool_name}_{agent_id or 'main'}"
|
|
74
|
+
|
|
75
|
+
timing_data[key] = {
|
|
76
|
+
"activity_id": activity_id,
|
|
77
|
+
"tool_name": tool_name,
|
|
78
|
+
"agent_id": agent_id,
|
|
79
|
+
"start_time_ms": int(datetime.now(timezone.utc).timestamp() * 1000),
|
|
80
|
+
"start_time_iso": datetime.now(timezone.utc).isoformat(),
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# Clean up old entries (older than 1 hour) to prevent file bloat
|
|
84
|
+
now_ms = int(datetime.now(timezone.utc).timestamp() * 1000)
|
|
85
|
+
one_hour_ms = 60 * 60 * 1000
|
|
86
|
+
timing_data = {
|
|
87
|
+
k: v for k, v in timing_data.items()
|
|
88
|
+
if now_ms - v.get("start_time_ms", 0) < one_hour_ms
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
save_timing_data(timing_data)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
# Patterns for sensitive field names that should be redacted
|
|
95
|
+
SENSITIVE_FIELD_PATTERNS = [
|
|
96
|
+
r'(?i)(api[_-]?key|apikey)',
|
|
97
|
+
r'(?i)(password|passwd|pwd)',
|
|
98
|
+
r'(?i)(secret|token|credential)',
|
|
99
|
+
r'(?i)(auth[_-]?token|access[_-]?token)',
|
|
100
|
+
r'(?i)(private[_-]?key|ssh[_-]?key)',
|
|
101
|
+
]
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def redact_sensitive_fields(data: dict) -> dict:
|
|
105
|
+
"""Redact sensitive fields from a dictionary for safe logging.
|
|
106
|
+
|
|
107
|
+
Recursively processes nested dicts and lists.
|
|
108
|
+
"""
|
|
109
|
+
if not isinstance(data, dict):
|
|
110
|
+
return data
|
|
111
|
+
|
|
112
|
+
result = {}
|
|
113
|
+
for key, value in data.items():
|
|
114
|
+
# Check if key matches sensitive patterns
|
|
115
|
+
is_sensitive = any(
|
|
116
|
+
re.search(pattern, str(key))
|
|
117
|
+
for pattern in SENSITIVE_FIELD_PATTERNS
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
if is_sensitive:
|
|
121
|
+
result[key] = '[REDACTED]'
|
|
122
|
+
elif isinstance(value, dict):
|
|
123
|
+
result[key] = redact_sensitive_fields(value)
|
|
124
|
+
elif isinstance(value, list):
|
|
125
|
+
result[key] = [
|
|
126
|
+
redact_sensitive_fields(item) if isinstance(item, dict) else item
|
|
127
|
+
for item in value
|
|
128
|
+
]
|
|
129
|
+
else:
|
|
130
|
+
result[key] = value
|
|
131
|
+
|
|
132
|
+
return result
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def get_db_path() -> Path:
|
|
136
|
+
"""Get the database path for the current project."""
|
|
137
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
138
|
+
return Path(project_path) / ".omni-cortex" / "cortex.db"
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def ensure_database(db_path: Path) -> sqlite3.Connection:
|
|
142
|
+
"""Ensure database exists and is initialized.
|
|
143
|
+
|
|
144
|
+
Auto-creates the database and schema if it doesn't exist.
|
|
145
|
+
This enables 'out of the box' functionality.
|
|
146
|
+
"""
|
|
147
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
148
|
+
conn = sqlite3.connect(str(db_path))
|
|
149
|
+
|
|
150
|
+
# Check if schema exists
|
|
151
|
+
cursor = conn.cursor()
|
|
152
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='activities'")
|
|
153
|
+
if cursor.fetchone() is None:
|
|
154
|
+
# Apply minimal schema for activities (full schema applied by MCP)
|
|
155
|
+
conn.executescript("""
|
|
156
|
+
CREATE TABLE IF NOT EXISTS activities (
|
|
157
|
+
id TEXT PRIMARY KEY,
|
|
158
|
+
session_id TEXT,
|
|
159
|
+
agent_id TEXT,
|
|
160
|
+
timestamp TEXT NOT NULL,
|
|
161
|
+
event_type TEXT NOT NULL,
|
|
162
|
+
tool_name TEXT,
|
|
163
|
+
tool_input TEXT,
|
|
164
|
+
tool_output TEXT,
|
|
165
|
+
duration_ms INTEGER,
|
|
166
|
+
success INTEGER DEFAULT 1,
|
|
167
|
+
error_message TEXT,
|
|
168
|
+
project_path TEXT,
|
|
169
|
+
file_path TEXT,
|
|
170
|
+
metadata TEXT
|
|
171
|
+
);
|
|
172
|
+
CREATE INDEX IF NOT EXISTS idx_activities_timestamp ON activities(timestamp DESC);
|
|
173
|
+
CREATE INDEX IF NOT EXISTS idx_activities_tool ON activities(tool_name);
|
|
174
|
+
""")
|
|
175
|
+
conn.commit()
|
|
176
|
+
|
|
177
|
+
return conn
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def generate_id() -> str:
|
|
181
|
+
"""Generate a unique activity ID."""
|
|
182
|
+
timestamp_ms = int(datetime.now().timestamp() * 1000)
|
|
183
|
+
random_hex = os.urandom(4).hex()
|
|
184
|
+
return f"act_{timestamp_ms}_{random_hex}"
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def truncate(text: str, max_length: int = 10000) -> str:
|
|
188
|
+
"""Truncate text to max length."""
|
|
189
|
+
if len(text) <= max_length:
|
|
190
|
+
return text
|
|
191
|
+
return text[:max_length - 20] + "\n... [truncated]"
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def main():
|
|
195
|
+
"""Process PreToolUse hook."""
|
|
196
|
+
try:
|
|
197
|
+
# Read input from stdin with timeout protection
|
|
198
|
+
import select
|
|
199
|
+
if sys.platform != "win32":
|
|
200
|
+
# Unix: use select for timeout
|
|
201
|
+
ready, _, _ = select.select([sys.stdin], [], [], 5.0)
|
|
202
|
+
if not ready:
|
|
203
|
+
print(json.dumps({}))
|
|
204
|
+
return
|
|
205
|
+
|
|
206
|
+
# Read all input at once
|
|
207
|
+
raw_input = sys.stdin.read()
|
|
208
|
+
if not raw_input or not raw_input.strip():
|
|
209
|
+
print(json.dumps({}))
|
|
210
|
+
return
|
|
211
|
+
|
|
212
|
+
input_data = json.loads(raw_input)
|
|
213
|
+
|
|
214
|
+
# Extract data from hook input
|
|
215
|
+
tool_name = input_data.get("tool_name")
|
|
216
|
+
tool_input = input_data.get("tool_input", {})
|
|
217
|
+
agent_id = input_data.get("agent_id")
|
|
218
|
+
|
|
219
|
+
# Skip logging our own tools to prevent recursion
|
|
220
|
+
# MCP tools are named like "mcp__omni-cortex__cortex_remember"
|
|
221
|
+
if tool_name and ("cortex_" in tool_name or "omni-cortex" in tool_name):
|
|
222
|
+
print(json.dumps({}))
|
|
223
|
+
return
|
|
224
|
+
|
|
225
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
226
|
+
|
|
227
|
+
# Auto-initialize database (creates if not exists)
|
|
228
|
+
db_path = get_db_path()
|
|
229
|
+
conn = ensure_database(db_path)
|
|
230
|
+
|
|
231
|
+
# Get or create session (auto-manages session lifecycle)
|
|
232
|
+
session_id = get_or_create_session(conn, project_path)
|
|
233
|
+
|
|
234
|
+
# Redact sensitive fields before logging
|
|
235
|
+
safe_input = redact_sensitive_fields(tool_input) if isinstance(tool_input, dict) else tool_input
|
|
236
|
+
|
|
237
|
+
# Generate activity ID
|
|
238
|
+
activity_id = generate_id()
|
|
239
|
+
|
|
240
|
+
# Record tool start time for duration calculation
|
|
241
|
+
record_tool_start(tool_name, activity_id, agent_id)
|
|
242
|
+
|
|
243
|
+
# Insert activity record
|
|
244
|
+
cursor = conn.cursor()
|
|
245
|
+
cursor.execute(
|
|
246
|
+
"""
|
|
247
|
+
INSERT INTO activities (
|
|
248
|
+
id, session_id, agent_id, timestamp, event_type,
|
|
249
|
+
tool_name, tool_input, project_path
|
|
250
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
251
|
+
""",
|
|
252
|
+
(
|
|
253
|
+
activity_id,
|
|
254
|
+
session_id,
|
|
255
|
+
agent_id,
|
|
256
|
+
datetime.now(timezone.utc).isoformat(),
|
|
257
|
+
"pre_tool_use",
|
|
258
|
+
tool_name,
|
|
259
|
+
truncate(json.dumps(safe_input, default=str)),
|
|
260
|
+
project_path,
|
|
261
|
+
),
|
|
262
|
+
)
|
|
263
|
+
conn.commit()
|
|
264
|
+
conn.close()
|
|
265
|
+
|
|
266
|
+
# Return empty response (no modification to tool call)
|
|
267
|
+
print(json.dumps({}))
|
|
268
|
+
|
|
269
|
+
except Exception as e:
|
|
270
|
+
# Hooks should never block - log error but continue
|
|
271
|
+
print(json.dumps({"systemMessage": f"Cortex pre_tool_use: {e}"}))
|
|
272
|
+
|
|
273
|
+
sys.exit(0)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
if __name__ == "__main__":
|
|
277
|
+
main()
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Shared session management utilities for Claude Code hooks.
|
|
3
|
+
|
|
4
|
+
This module provides session management functionality that can be shared
|
|
5
|
+
across pre_tool_use.py and post_tool_use.py hooks to ensure consistent
|
|
6
|
+
session tracking.
|
|
7
|
+
|
|
8
|
+
Session Management Logic:
|
|
9
|
+
1. Check for existing session file at `.omni-cortex/current_session.json`
|
|
10
|
+
2. If session exists and is valid (not timed out), use it
|
|
11
|
+
3. If no valid session, create a new one in both file and database
|
|
12
|
+
4. Update last_activity_at on each use to track session activity
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import json
|
|
16
|
+
import os
|
|
17
|
+
import sqlite3
|
|
18
|
+
import time
|
|
19
|
+
from datetime import datetime, timezone
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Optional
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# Session timeout in seconds (4 hours of inactivity = new session)
|
|
25
|
+
SESSION_TIMEOUT_SECONDS = 4 * 60 * 60
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def generate_session_id() -> str:
|
|
29
|
+
"""Generate a unique session ID matching the MCP format.
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Session ID in format: sess_{timestamp_ms}_{random_hex}
|
|
33
|
+
"""
|
|
34
|
+
timestamp_ms = int(time.time() * 1000)
|
|
35
|
+
random_hex = os.urandom(4).hex()
|
|
36
|
+
return f"sess_{timestamp_ms}_{random_hex}"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_session_file_path() -> Path:
|
|
40
|
+
"""Get the path to the current session file.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Path to .omni-cortex/current_session.json
|
|
44
|
+
"""
|
|
45
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
46
|
+
return Path(project_path) / ".omni-cortex" / "current_session.json"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def load_session_file() -> Optional[dict]:
|
|
50
|
+
"""Load the current session from file if it exists.
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Session data dict or None if file doesn't exist or is invalid
|
|
54
|
+
"""
|
|
55
|
+
session_file = get_session_file_path()
|
|
56
|
+
if not session_file.exists():
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
with open(session_file, "r") as f:
|
|
61
|
+
return json.load(f)
|
|
62
|
+
except (json.JSONDecodeError, IOError):
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def save_session_file(session_data: dict) -> None:
|
|
67
|
+
"""Save the current session to file.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
session_data: Dict containing session_id, project_path, started_at, last_activity_at
|
|
71
|
+
"""
|
|
72
|
+
session_file = get_session_file_path()
|
|
73
|
+
session_file.parent.mkdir(parents=True, exist_ok=True)
|
|
74
|
+
|
|
75
|
+
with open(session_file, "w") as f:
|
|
76
|
+
json.dump(session_data, f, indent=2)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def is_session_valid(session_data: dict) -> bool:
|
|
80
|
+
"""Check if a session is still valid (not timed out).
|
|
81
|
+
|
|
82
|
+
A session is valid if:
|
|
83
|
+
- It has a last_activity_at timestamp
|
|
84
|
+
- The timestamp is within SESSION_TIMEOUT_SECONDS of now
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
session_data: Session dict with last_activity_at field
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
True if session is valid, False otherwise
|
|
91
|
+
"""
|
|
92
|
+
last_activity = session_data.get("last_activity_at")
|
|
93
|
+
if not last_activity:
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
last_time = datetime.fromisoformat(last_activity.replace("Z", "+00:00"))
|
|
98
|
+
now = datetime.now(timezone.utc)
|
|
99
|
+
elapsed_seconds = (now - last_time).total_seconds()
|
|
100
|
+
return elapsed_seconds < SESSION_TIMEOUT_SECONDS
|
|
101
|
+
except (ValueError, TypeError):
|
|
102
|
+
return False
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def create_session_in_db(conn: sqlite3.Connection, session_id: str, project_path: str) -> None:
|
|
106
|
+
"""Create a new session record in the database.
|
|
107
|
+
|
|
108
|
+
Also creates the sessions table if it doesn't exist (for first-run scenarios).
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
conn: SQLite database connection
|
|
112
|
+
session_id: The session ID to create
|
|
113
|
+
project_path: The project directory path
|
|
114
|
+
"""
|
|
115
|
+
cursor = conn.cursor()
|
|
116
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
117
|
+
|
|
118
|
+
# Check if sessions table exists (it might not if only activities table was created)
|
|
119
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='sessions'")
|
|
120
|
+
if cursor.fetchone() is None:
|
|
121
|
+
# Create sessions table with minimal schema
|
|
122
|
+
conn.executescript("""
|
|
123
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
124
|
+
id TEXT PRIMARY KEY,
|
|
125
|
+
project_path TEXT NOT NULL,
|
|
126
|
+
started_at TEXT NOT NULL,
|
|
127
|
+
ended_at TEXT,
|
|
128
|
+
summary TEXT,
|
|
129
|
+
tags TEXT,
|
|
130
|
+
metadata TEXT
|
|
131
|
+
);
|
|
132
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_started ON sessions(started_at DESC);
|
|
133
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project_path);
|
|
134
|
+
""")
|
|
135
|
+
conn.commit()
|
|
136
|
+
|
|
137
|
+
cursor.execute(
|
|
138
|
+
"""
|
|
139
|
+
INSERT OR IGNORE INTO sessions (id, project_path, started_at)
|
|
140
|
+
VALUES (?, ?, ?)
|
|
141
|
+
""",
|
|
142
|
+
(session_id, project_path, now),
|
|
143
|
+
)
|
|
144
|
+
conn.commit()
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_or_create_session(conn: sqlite3.Connection, project_path: str) -> str:
|
|
148
|
+
"""Get the current session ID, creating a new one if needed.
|
|
149
|
+
|
|
150
|
+
Session management logic:
|
|
151
|
+
1. Check for existing session file
|
|
152
|
+
2. If exists and not timed out, use it and update last_activity
|
|
153
|
+
3. If doesn't exist or timed out, create new session
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
conn: SQLite database connection
|
|
157
|
+
project_path: The project directory path
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
The session ID to use for activity logging
|
|
161
|
+
"""
|
|
162
|
+
session_data = load_session_file()
|
|
163
|
+
now_iso = datetime.now(timezone.utc).isoformat()
|
|
164
|
+
|
|
165
|
+
if session_data and is_session_valid(session_data):
|
|
166
|
+
# Update last activity time
|
|
167
|
+
session_data["last_activity_at"] = now_iso
|
|
168
|
+
save_session_file(session_data)
|
|
169
|
+
return session_data["session_id"]
|
|
170
|
+
|
|
171
|
+
# Create new session
|
|
172
|
+
session_id = generate_session_id()
|
|
173
|
+
|
|
174
|
+
# Create in database
|
|
175
|
+
create_session_in_db(conn, session_id, project_path)
|
|
176
|
+
|
|
177
|
+
# Save to file
|
|
178
|
+
session_data = {
|
|
179
|
+
"session_id": session_id,
|
|
180
|
+
"project_path": project_path,
|
|
181
|
+
"started_at": now_iso,
|
|
182
|
+
"last_activity_at": now_iso,
|
|
183
|
+
}
|
|
184
|
+
save_session_file(session_data)
|
|
185
|
+
|
|
186
|
+
return session_id
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Stop hook - logs session end when Claude Code stops.
|
|
3
|
+
|
|
4
|
+
This hook is called when Claude Code exits or the session ends.
|
|
5
|
+
It finalizes the session and generates a summary.
|
|
6
|
+
|
|
7
|
+
Hook configuration for settings.json:
|
|
8
|
+
{
|
|
9
|
+
"hooks": {
|
|
10
|
+
"Stop": [
|
|
11
|
+
{
|
|
12
|
+
"type": "command",
|
|
13
|
+
"command": "python hooks/stop.py"
|
|
14
|
+
}
|
|
15
|
+
]
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import json
|
|
21
|
+
import sys
|
|
22
|
+
import os
|
|
23
|
+
import sqlite3
|
|
24
|
+
from datetime import datetime, timezone
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_db_path() -> Path:
|
|
29
|
+
"""Get the database path for the current project."""
|
|
30
|
+
project_path = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
31
|
+
return Path(project_path) / ".omni-cortex" / "cortex.db"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def generate_id(prefix: str) -> str:
|
|
35
|
+
"""Generate a unique ID."""
|
|
36
|
+
timestamp_ms = int(datetime.now().timestamp() * 1000)
|
|
37
|
+
random_hex = os.urandom(4).hex()
|
|
38
|
+
return f"{prefix}_{timestamp_ms}_{random_hex}"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def main():
|
|
42
|
+
"""Process Stop hook."""
|
|
43
|
+
try:
|
|
44
|
+
# Read input from stdin
|
|
45
|
+
input_data = json.load(sys.stdin)
|
|
46
|
+
|
|
47
|
+
db_path = get_db_path()
|
|
48
|
+
|
|
49
|
+
# Only process if database exists
|
|
50
|
+
if not db_path.exists():
|
|
51
|
+
print(json.dumps({}))
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
session_id = os.environ.get("CLAUDE_SESSION_ID")
|
|
55
|
+
if not session_id:
|
|
56
|
+
print(json.dumps({}))
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
60
|
+
|
|
61
|
+
# Connect to database
|
|
62
|
+
conn = sqlite3.connect(str(db_path))
|
|
63
|
+
conn.row_factory = sqlite3.Row
|
|
64
|
+
cursor = conn.cursor()
|
|
65
|
+
|
|
66
|
+
# Check if session exists
|
|
67
|
+
cursor.execute("SELECT id FROM sessions WHERE id = ?", (session_id,))
|
|
68
|
+
if not cursor.fetchone():
|
|
69
|
+
print(json.dumps({}))
|
|
70
|
+
conn.close()
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
# Get session start time for duration calculation
|
|
74
|
+
cursor.execute("SELECT started_at FROM sessions WHERE id = ?", (session_id,))
|
|
75
|
+
session_row = cursor.fetchone()
|
|
76
|
+
session_duration_ms = None
|
|
77
|
+
|
|
78
|
+
if session_row and session_row["started_at"]:
|
|
79
|
+
try:
|
|
80
|
+
started_at = session_row["started_at"]
|
|
81
|
+
started_dt = datetime.fromisoformat(started_at.replace("Z", "+00:00"))
|
|
82
|
+
ended_dt = datetime.now(timezone.utc)
|
|
83
|
+
session_duration_ms = int((ended_dt - started_dt).total_seconds() * 1000)
|
|
84
|
+
except (ValueError, TypeError):
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
# End the session with duration
|
|
88
|
+
cursor.execute(
|
|
89
|
+
"UPDATE sessions SET ended_at = ?, duration_ms = ? WHERE id = ? AND ended_at IS NULL",
|
|
90
|
+
(now, session_duration_ms, session_id),
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
# Gather session statistics
|
|
94
|
+
cursor.execute(
|
|
95
|
+
"SELECT COUNT(*) as cnt FROM activities WHERE session_id = ?",
|
|
96
|
+
(session_id,),
|
|
97
|
+
)
|
|
98
|
+
total_activities = cursor.fetchone()["cnt"]
|
|
99
|
+
|
|
100
|
+
cursor.execute(
|
|
101
|
+
"SELECT COUNT(*) as cnt FROM memories WHERE source_session_id = ?",
|
|
102
|
+
(session_id,),
|
|
103
|
+
)
|
|
104
|
+
total_memories = cursor.fetchone()["cnt"]
|
|
105
|
+
|
|
106
|
+
# Get tools used
|
|
107
|
+
cursor.execute(
|
|
108
|
+
"""
|
|
109
|
+
SELECT tool_name, COUNT(*) as cnt
|
|
110
|
+
FROM activities
|
|
111
|
+
WHERE session_id = ? AND tool_name IS NOT NULL
|
|
112
|
+
GROUP BY tool_name
|
|
113
|
+
""",
|
|
114
|
+
(session_id,),
|
|
115
|
+
)
|
|
116
|
+
tools_used = {row["tool_name"]: row["cnt"] for row in cursor.fetchall()}
|
|
117
|
+
|
|
118
|
+
# Get files modified
|
|
119
|
+
cursor.execute(
|
|
120
|
+
"""
|
|
121
|
+
SELECT DISTINCT file_path
|
|
122
|
+
FROM activities
|
|
123
|
+
WHERE session_id = ? AND file_path IS NOT NULL
|
|
124
|
+
""",
|
|
125
|
+
(session_id,),
|
|
126
|
+
)
|
|
127
|
+
files_modified = [row["file_path"] for row in cursor.fetchall()]
|
|
128
|
+
|
|
129
|
+
# Get errors
|
|
130
|
+
cursor.execute(
|
|
131
|
+
"""
|
|
132
|
+
SELECT error_message
|
|
133
|
+
FROM activities
|
|
134
|
+
WHERE session_id = ? AND success = 0 AND error_message IS NOT NULL
|
|
135
|
+
LIMIT 10
|
|
136
|
+
""",
|
|
137
|
+
(session_id,),
|
|
138
|
+
)
|
|
139
|
+
key_errors = [row["error_message"] for row in cursor.fetchall()]
|
|
140
|
+
|
|
141
|
+
# Create or update summary
|
|
142
|
+
cursor.execute(
|
|
143
|
+
"SELECT id FROM session_summaries WHERE session_id = ?",
|
|
144
|
+
(session_id,),
|
|
145
|
+
)
|
|
146
|
+
existing = cursor.fetchone()
|
|
147
|
+
|
|
148
|
+
# Calculate tool duration breakdown from activities
|
|
149
|
+
cursor.execute(
|
|
150
|
+
"""
|
|
151
|
+
SELECT tool_name, SUM(duration_ms) as total_ms, COUNT(*) as cnt
|
|
152
|
+
FROM activities
|
|
153
|
+
WHERE session_id = ? AND tool_name IS NOT NULL AND duration_ms IS NOT NULL
|
|
154
|
+
GROUP BY tool_name
|
|
155
|
+
""",
|
|
156
|
+
(session_id,),
|
|
157
|
+
)
|
|
158
|
+
tool_duration_breakdown = {
|
|
159
|
+
row["tool_name"]: {"total_ms": row["total_ms"], "count": row["cnt"]}
|
|
160
|
+
for row in cursor.fetchall()
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
if existing:
|
|
164
|
+
cursor.execute(
|
|
165
|
+
"""
|
|
166
|
+
UPDATE session_summaries
|
|
167
|
+
SET key_errors = ?, files_modified = ?, tools_used = ?,
|
|
168
|
+
total_activities = ?, total_memories_created = ?,
|
|
169
|
+
duration_ms = ?, tool_duration_breakdown = ?
|
|
170
|
+
WHERE session_id = ?
|
|
171
|
+
""",
|
|
172
|
+
(
|
|
173
|
+
json.dumps(key_errors) if key_errors else None,
|
|
174
|
+
json.dumps(files_modified) if files_modified else None,
|
|
175
|
+
json.dumps(tools_used) if tools_used else None,
|
|
176
|
+
total_activities,
|
|
177
|
+
total_memories,
|
|
178
|
+
session_duration_ms,
|
|
179
|
+
json.dumps(tool_duration_breakdown) if tool_duration_breakdown else None,
|
|
180
|
+
session_id,
|
|
181
|
+
),
|
|
182
|
+
)
|
|
183
|
+
else:
|
|
184
|
+
cursor.execute(
|
|
185
|
+
"""
|
|
186
|
+
INSERT INTO session_summaries (
|
|
187
|
+
id, session_id, key_errors, files_modified, tools_used,
|
|
188
|
+
total_activities, total_memories_created, created_at,
|
|
189
|
+
duration_ms, tool_duration_breakdown
|
|
190
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
191
|
+
""",
|
|
192
|
+
(
|
|
193
|
+
generate_id("sum"),
|
|
194
|
+
session_id,
|
|
195
|
+
json.dumps(key_errors) if key_errors else None,
|
|
196
|
+
json.dumps(files_modified) if files_modified else None,
|
|
197
|
+
json.dumps(tools_used) if tools_used else None,
|
|
198
|
+
total_activities,
|
|
199
|
+
total_memories,
|
|
200
|
+
now,
|
|
201
|
+
session_duration_ms,
|
|
202
|
+
json.dumps(tool_duration_breakdown) if tool_duration_breakdown else None,
|
|
203
|
+
),
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
conn.commit()
|
|
207
|
+
conn.close()
|
|
208
|
+
|
|
209
|
+
print(json.dumps({}))
|
|
210
|
+
|
|
211
|
+
except Exception as e:
|
|
212
|
+
# Hooks should never block
|
|
213
|
+
print(json.dumps({"systemMessage": f"Cortex stop: {e}"}))
|
|
214
|
+
|
|
215
|
+
sys.exit(0)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
if __name__ == "__main__":
|
|
219
|
+
main()
|