omni-cortex 1.17.1__py3-none-any.whl → 1.17.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex/__init__.py +3 -0
- omni_cortex/categorization/__init__.py +9 -0
- omni_cortex/categorization/auto_tags.py +166 -0
- omni_cortex/categorization/auto_type.py +165 -0
- omni_cortex/config.py +141 -0
- omni_cortex/dashboard.py +232 -0
- omni_cortex/database/__init__.py +24 -0
- omni_cortex/database/connection.py +137 -0
- omni_cortex/database/migrations.py +210 -0
- omni_cortex/database/schema.py +212 -0
- omni_cortex/database/sync.py +421 -0
- omni_cortex/decay/__init__.py +7 -0
- omni_cortex/decay/importance.py +147 -0
- omni_cortex/embeddings/__init__.py +35 -0
- omni_cortex/embeddings/local.py +442 -0
- omni_cortex/models/__init__.py +20 -0
- omni_cortex/models/activity.py +265 -0
- omni_cortex/models/agent.py +144 -0
- omni_cortex/models/memory.py +395 -0
- omni_cortex/models/relationship.py +206 -0
- omni_cortex/models/session.py +290 -0
- omni_cortex/resources/__init__.py +1 -0
- omni_cortex/search/__init__.py +22 -0
- omni_cortex/search/hybrid.py +197 -0
- omni_cortex/search/keyword.py +204 -0
- omni_cortex/search/ranking.py +127 -0
- omni_cortex/search/semantic.py +232 -0
- omni_cortex/server.py +360 -0
- omni_cortex/setup.py +278 -0
- omni_cortex/tools/__init__.py +13 -0
- omni_cortex/tools/activities.py +453 -0
- omni_cortex/tools/memories.py +536 -0
- omni_cortex/tools/sessions.py +311 -0
- omni_cortex/tools/utilities.py +477 -0
- omni_cortex/utils/__init__.py +13 -0
- omni_cortex/utils/formatting.py +282 -0
- omni_cortex/utils/ids.py +72 -0
- omni_cortex/utils/timestamps.py +129 -0
- omni_cortex/utils/truncation.py +111 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.2.dist-info}/METADATA +1 -1
- omni_cortex-1.17.2.dist-info/RECORD +65 -0
- omni_cortex-1.17.1.dist-info/RECORD +0 -26
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/main.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/stop.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.17.1.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.2.dist-info}/WHEEL +0 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.2.dist-info}/entry_points.txt +0 -0
- {omni_cortex-1.17.1.dist-info → omni_cortex-1.17.2.dist-info}/licenses/LICENSE +0 -0
omni_cortex/dashboard.py
ADDED
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
"""Dashboard CLI for Omni-Cortex.
|
|
2
|
+
|
|
3
|
+
Starts the web dashboard server for viewing and managing memories.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import argparse
|
|
7
|
+
import os
|
|
8
|
+
import subprocess
|
|
9
|
+
import sys
|
|
10
|
+
import webbrowser
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from time import sleep
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def check_editable_install() -> bool:
|
|
16
|
+
"""Check if package is installed in editable (development) mode.
|
|
17
|
+
|
|
18
|
+
Returns True if editable, False if installed from PyPI.
|
|
19
|
+
"""
|
|
20
|
+
try:
|
|
21
|
+
import importlib.metadata as metadata
|
|
22
|
+
dist = metadata.distribution("omni-cortex")
|
|
23
|
+
# Editable installs have a direct_url.json with editable=true
|
|
24
|
+
# or are installed via .egg-link
|
|
25
|
+
direct_url = dist.read_text("direct_url.json")
|
|
26
|
+
if direct_url and '"editable":true' in direct_url.replace(" ", ""):
|
|
27
|
+
return True
|
|
28
|
+
except Exception:
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
# Alternative check: see if we're running from source directory
|
|
32
|
+
package_dir = Path(__file__).parent
|
|
33
|
+
repo_root = package_dir.parent.parent
|
|
34
|
+
if (repo_root / "pyproject.toml").exists() and (repo_root / ".git").exists():
|
|
35
|
+
# We're in a repo, check if there's an egg-link or editable marker
|
|
36
|
+
import site
|
|
37
|
+
for site_dir in [site.getusersitepackages()] + site.getsitepackages():
|
|
38
|
+
egg_link = Path(site_dir) / "omni-cortex.egg-link"
|
|
39
|
+
if egg_link.exists():
|
|
40
|
+
return True
|
|
41
|
+
# Check for __editable__ marker (PEP 660) - any version
|
|
42
|
+
for pth_file in Path(site_dir).glob("__editable__.omni_cortex*.pth"):
|
|
43
|
+
return True
|
|
44
|
+
|
|
45
|
+
return False
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def warn_non_editable_install() -> None:
|
|
49
|
+
"""Warn if not running in editable mode during development."""
|
|
50
|
+
if not check_editable_install():
|
|
51
|
+
# Check if we appear to be in a development context
|
|
52
|
+
package_dir = Path(__file__).parent
|
|
53
|
+
repo_root = package_dir.parent.parent
|
|
54
|
+
if (repo_root / "pyproject.toml").exists() and (repo_root / ".git").exists():
|
|
55
|
+
print("[Dashboard] Note: Package may not be in editable mode.")
|
|
56
|
+
print("[Dashboard] If you see import errors, run: pip install -e .")
|
|
57
|
+
print()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def find_dashboard_dir() -> Path | None:
|
|
61
|
+
"""Find the dashboard directory.
|
|
62
|
+
|
|
63
|
+
Searches in order:
|
|
64
|
+
1. Development directory (cloned repo)
|
|
65
|
+
2. Package shared-data (installed via pip)
|
|
66
|
+
3. Site-packages share location
|
|
67
|
+
"""
|
|
68
|
+
package_dir = Path(__file__).parent
|
|
69
|
+
|
|
70
|
+
# Check for development directory (repo structure)
|
|
71
|
+
# Go up from src/omni_cortex to repo root, then dashboard
|
|
72
|
+
repo_root = package_dir.parent.parent
|
|
73
|
+
dashboard_in_repo = repo_root / "dashboard"
|
|
74
|
+
if dashboard_in_repo.exists() and (dashboard_in_repo / "backend" / "main.py").exists():
|
|
75
|
+
return dashboard_in_repo
|
|
76
|
+
|
|
77
|
+
# Check pip shared-data location
|
|
78
|
+
# On Unix: ~/.local/share/omni-cortex/dashboard
|
|
79
|
+
# On Windows: %APPDATA%/Python/share/omni-cortex/dashboard
|
|
80
|
+
import site
|
|
81
|
+
for site_dir in site.getsitepackages() + [site.getusersitepackages()]:
|
|
82
|
+
share_dir = Path(site_dir).parent / "share" / "omni-cortex" / "dashboard"
|
|
83
|
+
if share_dir.exists() and (share_dir / "backend" / "main.py").exists():
|
|
84
|
+
return share_dir
|
|
85
|
+
|
|
86
|
+
# Check relative to sys.prefix (virtualenv)
|
|
87
|
+
share_in_prefix = Path(sys.prefix) / "share" / "omni-cortex" / "dashboard"
|
|
88
|
+
if share_in_prefix.exists() and (share_in_prefix / "backend" / "main.py").exists():
|
|
89
|
+
return share_in_prefix
|
|
90
|
+
|
|
91
|
+
return None
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def check_dependencies() -> bool:
|
|
95
|
+
"""Check if dashboard dependencies are installed."""
|
|
96
|
+
try:
|
|
97
|
+
import uvicorn # noqa: F401
|
|
98
|
+
import fastapi # noqa: F401
|
|
99
|
+
return True
|
|
100
|
+
except ImportError:
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def install_dependencies() -> bool:
|
|
105
|
+
"""Install dashboard dependencies."""
|
|
106
|
+
required_packages = ["uvicorn", "fastapi"]
|
|
107
|
+
|
|
108
|
+
print("[Dashboard] Installing dependencies...")
|
|
109
|
+
try:
|
|
110
|
+
subprocess.check_call(
|
|
111
|
+
[sys.executable, "-m", "pip", "install", *required_packages, "-q"],
|
|
112
|
+
stdout=subprocess.DEVNULL,
|
|
113
|
+
stderr=subprocess.DEVNULL,
|
|
114
|
+
)
|
|
115
|
+
return True
|
|
116
|
+
except subprocess.CalledProcessError:
|
|
117
|
+
return False
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def start_server(dashboard_dir: Path, host: str, port: int, no_browser: bool) -> None:
|
|
121
|
+
"""Start the dashboard server."""
|
|
122
|
+
backend_dir = dashboard_dir / "backend"
|
|
123
|
+
|
|
124
|
+
# Add backend to path
|
|
125
|
+
sys.path.insert(0, str(backend_dir))
|
|
126
|
+
|
|
127
|
+
# Change to backend directory for relative imports
|
|
128
|
+
original_cwd = os.getcwd()
|
|
129
|
+
os.chdir(backend_dir)
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
import uvicorn
|
|
133
|
+
|
|
134
|
+
print(f"\n[Dashboard] Starting Omni-Cortex Dashboard")
|
|
135
|
+
print(f"[Dashboard] URL: http://{host}:{port}")
|
|
136
|
+
print(f"[Dashboard] API Docs: http://{host}:{port}/docs")
|
|
137
|
+
print(f"[Dashboard] Press Ctrl+C to stop\n")
|
|
138
|
+
|
|
139
|
+
# Open browser after short delay
|
|
140
|
+
if not no_browser:
|
|
141
|
+
def open_browser():
|
|
142
|
+
sleep(1.5)
|
|
143
|
+
webbrowser.open(f"http://{host}:{port}")
|
|
144
|
+
|
|
145
|
+
import threading
|
|
146
|
+
threading.Thread(target=open_browser, daemon=True).start()
|
|
147
|
+
|
|
148
|
+
# Run the server
|
|
149
|
+
uvicorn.run(
|
|
150
|
+
"main:app",
|
|
151
|
+
host=host,
|
|
152
|
+
port=port,
|
|
153
|
+
reload=False,
|
|
154
|
+
log_level="info",
|
|
155
|
+
)
|
|
156
|
+
finally:
|
|
157
|
+
os.chdir(original_cwd)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def main():
|
|
161
|
+
"""Main entry point for omni-cortex dashboard command."""
|
|
162
|
+
# Check for potential editable install issues early
|
|
163
|
+
warn_non_editable_install()
|
|
164
|
+
|
|
165
|
+
parser = argparse.ArgumentParser(
|
|
166
|
+
description="Start the Omni-Cortex web dashboard",
|
|
167
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
168
|
+
epilog="""
|
|
169
|
+
Examples:
|
|
170
|
+
omni-cortex dashboard Start on default port 8765
|
|
171
|
+
omni-cortex dashboard --port 9000 Start on custom port
|
|
172
|
+
omni-cortex dashboard --no-browser Don't auto-open browser
|
|
173
|
+
"""
|
|
174
|
+
)
|
|
175
|
+
parser.add_argument(
|
|
176
|
+
"--host",
|
|
177
|
+
default="127.0.0.1",
|
|
178
|
+
help="Host to bind to (default: 127.0.0.1)"
|
|
179
|
+
)
|
|
180
|
+
parser.add_argument(
|
|
181
|
+
"--port", "-p",
|
|
182
|
+
type=int,
|
|
183
|
+
default=8765,
|
|
184
|
+
help="Port to run on (default: 8765)"
|
|
185
|
+
)
|
|
186
|
+
parser.add_argument(
|
|
187
|
+
"--no-browser",
|
|
188
|
+
action="store_true",
|
|
189
|
+
help="Don't automatically open browser"
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
args = parser.parse_args()
|
|
193
|
+
|
|
194
|
+
# Find dashboard directory
|
|
195
|
+
dashboard_dir = find_dashboard_dir()
|
|
196
|
+
if not dashboard_dir:
|
|
197
|
+
print("[Dashboard] Error: Dashboard files not found.")
|
|
198
|
+
print("[Dashboard] If you installed via pip, try reinstalling:")
|
|
199
|
+
print(" pip install --force-reinstall omni-cortex")
|
|
200
|
+
print("\nOr clone the repository:")
|
|
201
|
+
print(" git clone https://github.com/AllCytes/Omni-Cortex.git")
|
|
202
|
+
sys.exit(1)
|
|
203
|
+
|
|
204
|
+
print(f"[Dashboard] Found dashboard at: {dashboard_dir}")
|
|
205
|
+
|
|
206
|
+
# Check/install dependencies
|
|
207
|
+
if not check_dependencies():
|
|
208
|
+
print("[Dashboard] Installing required dependencies...")
|
|
209
|
+
if not install_dependencies():
|
|
210
|
+
print("[Dashboard] Error: Failed to install dependencies.")
|
|
211
|
+
print("[Dashboard] Try manually: pip install uvicorn fastapi")
|
|
212
|
+
sys.exit(1)
|
|
213
|
+
|
|
214
|
+
# Check if dist exists (built frontend)
|
|
215
|
+
dist_dir = dashboard_dir / "frontend" / "dist"
|
|
216
|
+
if not dist_dir.exists():
|
|
217
|
+
print(f"[Dashboard] Warning: Frontend not built ({dist_dir})")
|
|
218
|
+
print("[Dashboard] API will work but web UI may not be available.")
|
|
219
|
+
print("[Dashboard] To build: cd dashboard/frontend && npm install && npm run build")
|
|
220
|
+
|
|
221
|
+
# Start the server
|
|
222
|
+
try:
|
|
223
|
+
start_server(dashboard_dir, args.host, args.port, args.no_browser)
|
|
224
|
+
except KeyboardInterrupt:
|
|
225
|
+
print("\n[Dashboard] Stopped")
|
|
226
|
+
except Exception as e:
|
|
227
|
+
print(f"[Dashboard] Error: {e}")
|
|
228
|
+
sys.exit(1)
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
if __name__ == "__main__":
|
|
232
|
+
main()
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Database layer for Omni Cortex - SQLite with FTS5."""
|
|
2
|
+
|
|
3
|
+
from .connection import get_connection, init_database, close_connection
|
|
4
|
+
from .schema import SCHEMA_VERSION, get_schema_sql
|
|
5
|
+
from .sync import (
|
|
6
|
+
sync_memory_to_global,
|
|
7
|
+
delete_memory_from_global,
|
|
8
|
+
search_global_memories,
|
|
9
|
+
get_global_stats,
|
|
10
|
+
sync_all_project_memories,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"get_connection",
|
|
15
|
+
"init_database",
|
|
16
|
+
"close_connection",
|
|
17
|
+
"SCHEMA_VERSION",
|
|
18
|
+
"get_schema_sql",
|
|
19
|
+
"sync_memory_to_global",
|
|
20
|
+
"delete_memory_from_global",
|
|
21
|
+
"search_global_memories",
|
|
22
|
+
"get_global_stats",
|
|
23
|
+
"sync_all_project_memories",
|
|
24
|
+
]
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""SQLite connection management for Omni Cortex."""
|
|
2
|
+
|
|
3
|
+
import sqlite3
|
|
4
|
+
import threading
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
from contextlib import contextmanager
|
|
8
|
+
|
|
9
|
+
from ..config import get_project_db_path, get_global_db_path, get_project_db_dir, get_global_db_dir
|
|
10
|
+
from .schema import get_schema_sql, SCHEMA_VERSION
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Thread-local storage for connections
|
|
14
|
+
_local = threading.local()
|
|
15
|
+
|
|
16
|
+
# Connection cache by path
|
|
17
|
+
_connections: dict[str, sqlite3.Connection] = {}
|
|
18
|
+
_lock = threading.Lock()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _configure_connection(conn: sqlite3.Connection) -> None:
|
|
22
|
+
"""Configure a SQLite connection with optimal settings."""
|
|
23
|
+
conn.row_factory = sqlite3.Row
|
|
24
|
+
conn.execute("PRAGMA foreign_keys = ON")
|
|
25
|
+
conn.execute("PRAGMA journal_mode = WAL")
|
|
26
|
+
conn.execute("PRAGMA synchronous = NORMAL")
|
|
27
|
+
conn.execute("PRAGMA cache_size = -64000") # 64MB cache
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_connection(db_path: Optional[Path] = None, is_global: bool = False) -> sqlite3.Connection:
|
|
31
|
+
"""Get a database connection, creating it if necessary.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
db_path: Explicit path to database file
|
|
35
|
+
is_global: If True and no db_path, use global database
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
SQLite connection
|
|
39
|
+
"""
|
|
40
|
+
if db_path is None:
|
|
41
|
+
db_path = get_global_db_path() if is_global else get_project_db_path()
|
|
42
|
+
|
|
43
|
+
path_str = str(db_path)
|
|
44
|
+
|
|
45
|
+
with _lock:
|
|
46
|
+
if path_str not in _connections:
|
|
47
|
+
# Ensure directory exists
|
|
48
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
49
|
+
|
|
50
|
+
# Create connection
|
|
51
|
+
conn = sqlite3.connect(path_str, check_same_thread=False)
|
|
52
|
+
_configure_connection(conn)
|
|
53
|
+
_connections[path_str] = conn
|
|
54
|
+
|
|
55
|
+
return _connections[path_str]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def init_database(db_path: Optional[Path] = None, is_global: bool = False) -> sqlite3.Connection:
|
|
59
|
+
"""Initialize the database with schema.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
db_path: Explicit path to database file
|
|
63
|
+
is_global: If True and no db_path, use global database
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
SQLite connection
|
|
67
|
+
"""
|
|
68
|
+
if db_path is None:
|
|
69
|
+
if is_global:
|
|
70
|
+
db_path = get_global_db_path()
|
|
71
|
+
db_dir = get_global_db_dir()
|
|
72
|
+
else:
|
|
73
|
+
db_path = get_project_db_path()
|
|
74
|
+
db_dir = get_project_db_dir()
|
|
75
|
+
else:
|
|
76
|
+
db_dir = db_path.parent
|
|
77
|
+
|
|
78
|
+
# Ensure directory exists
|
|
79
|
+
db_dir.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
|
|
81
|
+
conn = get_connection(db_path)
|
|
82
|
+
|
|
83
|
+
# Check if schema needs initialization
|
|
84
|
+
cursor = conn.cursor()
|
|
85
|
+
|
|
86
|
+
# Check if tables exist
|
|
87
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='memories'")
|
|
88
|
+
if cursor.fetchone() is None:
|
|
89
|
+
# Apply schema
|
|
90
|
+
conn.executescript(get_schema_sql())
|
|
91
|
+
|
|
92
|
+
# Record schema version
|
|
93
|
+
from ..utils.timestamps import now_iso
|
|
94
|
+
cursor.execute(
|
|
95
|
+
"INSERT OR REPLACE INTO schema_migrations (version, applied_at) VALUES (?, ?)",
|
|
96
|
+
(SCHEMA_VERSION, now_iso())
|
|
97
|
+
)
|
|
98
|
+
conn.commit()
|
|
99
|
+
|
|
100
|
+
return conn
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def close_connection(db_path: Optional[Path] = None, is_global: bool = False) -> None:
|
|
104
|
+
"""Close a database connection.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
db_path: Explicit path to database file
|
|
108
|
+
is_global: If True and no db_path, use global database
|
|
109
|
+
"""
|
|
110
|
+
if db_path is None:
|
|
111
|
+
db_path = get_global_db_path() if is_global else get_project_db_path()
|
|
112
|
+
|
|
113
|
+
path_str = str(db_path)
|
|
114
|
+
|
|
115
|
+
with _lock:
|
|
116
|
+
if path_str in _connections:
|
|
117
|
+
_connections[path_str].close()
|
|
118
|
+
del _connections[path_str]
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def close_all_connections() -> None:
|
|
122
|
+
"""Close all database connections."""
|
|
123
|
+
with _lock:
|
|
124
|
+
for conn in _connections.values():
|
|
125
|
+
conn.close()
|
|
126
|
+
_connections.clear()
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@contextmanager
|
|
130
|
+
def transaction(conn: sqlite3.Connection):
|
|
131
|
+
"""Context manager for database transactions."""
|
|
132
|
+
try:
|
|
133
|
+
yield conn
|
|
134
|
+
conn.commit()
|
|
135
|
+
except Exception:
|
|
136
|
+
conn.rollback()
|
|
137
|
+
raise
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""Database migration management for Omni Cortex."""
|
|
2
|
+
|
|
3
|
+
import sqlite3
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from .schema import SCHEMA_VERSION, get_schema_sql
|
|
8
|
+
from .connection import get_connection
|
|
9
|
+
from ..utils.timestamps import now_iso
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Migration definitions: version -> SQL
|
|
13
|
+
MIGRATIONS: dict[str, str] = {
|
|
14
|
+
# Command analytics columns for slash command/skill tracking
|
|
15
|
+
"1.1": """
|
|
16
|
+
-- Add command analytics columns to activities table
|
|
17
|
+
ALTER TABLE activities ADD COLUMN command_name TEXT;
|
|
18
|
+
ALTER TABLE activities ADD COLUMN command_scope TEXT;
|
|
19
|
+
ALTER TABLE activities ADD COLUMN mcp_server TEXT;
|
|
20
|
+
ALTER TABLE activities ADD COLUMN skill_name TEXT;
|
|
21
|
+
|
|
22
|
+
-- Create indexes for new columns
|
|
23
|
+
CREATE INDEX IF NOT EXISTS idx_activities_command ON activities(command_name);
|
|
24
|
+
CREATE INDEX IF NOT EXISTS idx_activities_mcp ON activities(mcp_server);
|
|
25
|
+
CREATE INDEX IF NOT EXISTS idx_activities_skill ON activities(skill_name);
|
|
26
|
+
""",
|
|
27
|
+
# Natural language summary columns for activity display
|
|
28
|
+
"1.2": """
|
|
29
|
+
-- Add natural language summary columns to activities table
|
|
30
|
+
ALTER TABLE activities ADD COLUMN summary TEXT;
|
|
31
|
+
ALTER TABLE activities ADD COLUMN summary_detail TEXT;
|
|
32
|
+
""",
|
|
33
|
+
# Duration tracking columns for concrete time analysis
|
|
34
|
+
"1.3": """
|
|
35
|
+
-- Add duration tracking to sessions table
|
|
36
|
+
ALTER TABLE sessions ADD COLUMN duration_ms INTEGER;
|
|
37
|
+
|
|
38
|
+
-- Add duration tracking to session_summaries table
|
|
39
|
+
ALTER TABLE session_summaries ADD COLUMN duration_ms INTEGER;
|
|
40
|
+
ALTER TABLE session_summaries ADD COLUMN tool_duration_breakdown TEXT;
|
|
41
|
+
|
|
42
|
+
-- Create index for duration queries
|
|
43
|
+
CREATE INDEX IF NOT EXISTS idx_activities_duration ON activities(duration_ms);
|
|
44
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_duration ON sessions(duration_ms);
|
|
45
|
+
""",
|
|
46
|
+
# User message tracking for style analysis
|
|
47
|
+
"1.4": """
|
|
48
|
+
-- User messages table for tracking all user prompts
|
|
49
|
+
CREATE TABLE IF NOT EXISTS user_messages (
|
|
50
|
+
id TEXT PRIMARY KEY, -- msg_{timestamp}_{random}
|
|
51
|
+
session_id TEXT,
|
|
52
|
+
timestamp TEXT NOT NULL, -- ISO 8601
|
|
53
|
+
content TEXT NOT NULL, -- The full user message
|
|
54
|
+
word_count INTEGER,
|
|
55
|
+
char_count INTEGER,
|
|
56
|
+
line_count INTEGER,
|
|
57
|
+
has_code_blocks INTEGER DEFAULT 0,
|
|
58
|
+
has_questions INTEGER DEFAULT 0,
|
|
59
|
+
has_commands INTEGER DEFAULT 0, -- Starts with /
|
|
60
|
+
tone_indicators TEXT, -- JSON: detected tone markers
|
|
61
|
+
project_path TEXT,
|
|
62
|
+
metadata TEXT, -- JSON for extensibility
|
|
63
|
+
FOREIGN KEY (session_id) REFERENCES sessions(id)
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
-- User style profile for aggregated style analysis
|
|
67
|
+
CREATE TABLE IF NOT EXISTS user_style_profiles (
|
|
68
|
+
id TEXT PRIMARY KEY, -- profile_{timestamp}_{random}
|
|
69
|
+
project_path TEXT, -- NULL for global profile
|
|
70
|
+
total_messages INTEGER DEFAULT 0,
|
|
71
|
+
avg_word_count REAL,
|
|
72
|
+
avg_char_count REAL,
|
|
73
|
+
common_phrases TEXT, -- JSON array of frequent phrases
|
|
74
|
+
vocabulary_richness REAL, -- Type-token ratio
|
|
75
|
+
formality_score REAL, -- 0-100 scale
|
|
76
|
+
question_frequency REAL, -- % of messages with questions
|
|
77
|
+
command_frequency REAL, -- % of messages starting with /
|
|
78
|
+
code_block_frequency REAL, -- % with code blocks
|
|
79
|
+
punctuation_style TEXT, -- JSON: punctuation patterns
|
|
80
|
+
greeting_patterns TEXT, -- JSON: how user starts conversations
|
|
81
|
+
instruction_style TEXT, -- JSON: how user gives instructions
|
|
82
|
+
sample_messages TEXT, -- JSON array of representative samples
|
|
83
|
+
created_at TEXT NOT NULL,
|
|
84
|
+
updated_at TEXT NOT NULL,
|
|
85
|
+
metadata TEXT
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
-- Indexes for user message queries
|
|
89
|
+
CREATE INDEX IF NOT EXISTS idx_user_messages_session ON user_messages(session_id);
|
|
90
|
+
CREATE INDEX IF NOT EXISTS idx_user_messages_timestamp ON user_messages(timestamp DESC);
|
|
91
|
+
CREATE INDEX IF NOT EXISTS idx_user_messages_project ON user_messages(project_path);
|
|
92
|
+
CREATE INDEX IF NOT EXISTS idx_user_style_project ON user_style_profiles(project_path);
|
|
93
|
+
""",
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def get_current_version(conn: sqlite3.Connection) -> Optional[str]:
|
|
98
|
+
"""Get the current schema version from the database."""
|
|
99
|
+
try:
|
|
100
|
+
cursor = conn.execute(
|
|
101
|
+
"SELECT version FROM schema_migrations ORDER BY applied_at DESC LIMIT 1"
|
|
102
|
+
)
|
|
103
|
+
row = cursor.fetchone()
|
|
104
|
+
return row[0] if row else None
|
|
105
|
+
except sqlite3.OperationalError:
|
|
106
|
+
# Table doesn't exist yet
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def strip_sql_comments(sql: str) -> str:
|
|
111
|
+
"""Strip SQL comments from a statement while preserving inline comments in strings."""
|
|
112
|
+
import re
|
|
113
|
+
lines = sql.split('\n')
|
|
114
|
+
cleaned_lines = []
|
|
115
|
+
for line in lines:
|
|
116
|
+
# Remove line comments (-- ...) but only if not inside a string
|
|
117
|
+
# Simple approach: just strip everything after -- if not in CREATE TABLE definition
|
|
118
|
+
# For CREATE TABLE, we need to keep the structure
|
|
119
|
+
stripped = line.strip()
|
|
120
|
+
if stripped.startswith('--'):
|
|
121
|
+
continue # Skip pure comment lines
|
|
122
|
+
# Keep lines that have SQL content (even if they have trailing comments)
|
|
123
|
+
cleaned_lines.append(line)
|
|
124
|
+
return '\n'.join(cleaned_lines)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def apply_migration(conn: sqlite3.Connection, version: str, sql: str) -> None:
|
|
128
|
+
"""Apply a single migration.
|
|
129
|
+
|
|
130
|
+
Handles ALTER TABLE ADD COLUMN gracefully - if column exists, skips it.
|
|
131
|
+
"""
|
|
132
|
+
# First, strip pure comment lines from the SQL
|
|
133
|
+
sql = strip_sql_comments(sql)
|
|
134
|
+
|
|
135
|
+
# Split into individual statements and apply each
|
|
136
|
+
for statement in sql.strip().split(';'):
|
|
137
|
+
statement = statement.strip()
|
|
138
|
+
if not statement:
|
|
139
|
+
continue
|
|
140
|
+
|
|
141
|
+
# Skip if the remaining statement is just whitespace or pure comments
|
|
142
|
+
non_comment_content = '\n'.join(
|
|
143
|
+
line for line in statement.split('\n')
|
|
144
|
+
if line.strip() and not line.strip().startswith('--')
|
|
145
|
+
).strip()
|
|
146
|
+
if not non_comment_content:
|
|
147
|
+
continue
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
conn.execute(statement)
|
|
151
|
+
conn.commit() # Commit each statement so next one sees the change
|
|
152
|
+
except sqlite3.OperationalError as e:
|
|
153
|
+
error_msg = str(e).lower()
|
|
154
|
+
# Ignore "duplicate column" errors - column already exists
|
|
155
|
+
if "duplicate column" in error_msg:
|
|
156
|
+
continue
|
|
157
|
+
# Ignore "index already exists" errors
|
|
158
|
+
if "already exists" in error_msg:
|
|
159
|
+
continue
|
|
160
|
+
# Ignore "no such column" for indexes on columns that may not exist yet
|
|
161
|
+
# (will be created in a later migration)
|
|
162
|
+
if "no such column" in error_msg and "CREATE INDEX" in statement.upper():
|
|
163
|
+
continue
|
|
164
|
+
# Ignore "table already exists" errors
|
|
165
|
+
if "already exists" in error_msg:
|
|
166
|
+
continue
|
|
167
|
+
raise
|
|
168
|
+
|
|
169
|
+
conn.execute(
|
|
170
|
+
"INSERT INTO schema_migrations (version, applied_at) VALUES (?, ?)",
|
|
171
|
+
(version, now_iso())
|
|
172
|
+
)
|
|
173
|
+
conn.commit()
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def migrate(db_path: Optional[Path] = None, is_global: bool = False) -> str:
|
|
177
|
+
"""Run all pending migrations.
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
The final schema version
|
|
181
|
+
"""
|
|
182
|
+
conn = get_connection(db_path, is_global)
|
|
183
|
+
current = get_current_version(conn)
|
|
184
|
+
|
|
185
|
+
if current is None:
|
|
186
|
+
# Fresh database - apply full schema
|
|
187
|
+
conn.executescript(get_schema_sql())
|
|
188
|
+
conn.execute(
|
|
189
|
+
"INSERT INTO schema_migrations (version, applied_at) VALUES (?, ?)",
|
|
190
|
+
(SCHEMA_VERSION, now_iso())
|
|
191
|
+
)
|
|
192
|
+
conn.commit()
|
|
193
|
+
return SCHEMA_VERSION
|
|
194
|
+
|
|
195
|
+
# Apply pending migrations in order
|
|
196
|
+
versions = sorted(MIGRATIONS.keys())
|
|
197
|
+
for version in versions:
|
|
198
|
+
if version > current:
|
|
199
|
+
apply_migration(conn, version, MIGRATIONS[version])
|
|
200
|
+
current = version
|
|
201
|
+
|
|
202
|
+
return current
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def needs_migration(conn: sqlite3.Connection) -> bool:
|
|
206
|
+
"""Check if database needs migration."""
|
|
207
|
+
current = get_current_version(conn)
|
|
208
|
+
if current is None:
|
|
209
|
+
return True
|
|
210
|
+
return any(v > current for v in MIGRATIONS.keys())
|