omni-cortex 1.17.0__py3-none-any.whl → 1.17.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex/__init__.py +3 -0
- omni_cortex/categorization/__init__.py +9 -0
- omni_cortex/categorization/auto_tags.py +166 -0
- omni_cortex/categorization/auto_type.py +165 -0
- omni_cortex/config.py +141 -0
- omni_cortex/dashboard.py +232 -0
- omni_cortex/database/__init__.py +24 -0
- omni_cortex/database/connection.py +137 -0
- omni_cortex/database/migrations.py +210 -0
- omni_cortex/database/schema.py +212 -0
- omni_cortex/database/sync.py +421 -0
- omni_cortex/decay/__init__.py +7 -0
- omni_cortex/decay/importance.py +147 -0
- omni_cortex/embeddings/__init__.py +35 -0
- omni_cortex/embeddings/local.py +442 -0
- omni_cortex/models/__init__.py +20 -0
- omni_cortex/models/activity.py +265 -0
- omni_cortex/models/agent.py +144 -0
- omni_cortex/models/memory.py +395 -0
- omni_cortex/models/relationship.py +206 -0
- omni_cortex/models/session.py +290 -0
- omni_cortex/resources/__init__.py +1 -0
- omni_cortex/search/__init__.py +22 -0
- omni_cortex/search/hybrid.py +197 -0
- omni_cortex/search/keyword.py +204 -0
- omni_cortex/search/ranking.py +127 -0
- omni_cortex/search/semantic.py +232 -0
- omni_cortex/server.py +360 -0
- omni_cortex/setup.py +278 -0
- omni_cortex/tools/__init__.py +13 -0
- omni_cortex/tools/activities.py +453 -0
- omni_cortex/tools/memories.py +536 -0
- omni_cortex/tools/sessions.py +311 -0
- omni_cortex/tools/utilities.py +477 -0
- omni_cortex/utils/__init__.py +13 -0
- omni_cortex/utils/formatting.py +282 -0
- omni_cortex/utils/ids.py +72 -0
- omni_cortex/utils/timestamps.py +129 -0
- omni_cortex/utils/truncation.py +111 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/main.py +43 -13
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/METADATA +1 -1
- omni_cortex-1.17.2.dist-info/RECORD +65 -0
- omni_cortex-1.17.0.dist-info/RECORD +0 -26
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/stop.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/WHEEL +0 -0
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/entry_points.txt +0 -0
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
"""Database schema definitions for Omni Cortex."""
|
|
2
|
+
|
|
3
|
+
SCHEMA_VERSION = "1.0"
|
|
4
|
+
|
|
5
|
+
# Main schema SQL
|
|
6
|
+
SCHEMA_SQL = """
|
|
7
|
+
-- ============================================
|
|
8
|
+
-- OMNI CORTEX MCP DATABASE SCHEMA v1.0
|
|
9
|
+
-- ============================================
|
|
10
|
+
|
|
11
|
+
-- Sessions Table
|
|
12
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
13
|
+
id TEXT PRIMARY KEY, -- sess_{timestamp}_{random}
|
|
14
|
+
project_path TEXT NOT NULL,
|
|
15
|
+
started_at TEXT NOT NULL, -- ISO 8601
|
|
16
|
+
ended_at TEXT,
|
|
17
|
+
summary TEXT,
|
|
18
|
+
tags TEXT, -- JSON array
|
|
19
|
+
metadata TEXT -- JSON object
|
|
20
|
+
);
|
|
21
|
+
|
|
22
|
+
-- Agents Table
|
|
23
|
+
CREATE TABLE IF NOT EXISTS agents (
|
|
24
|
+
id TEXT PRIMARY KEY, -- Agent ID from Claude Code
|
|
25
|
+
name TEXT,
|
|
26
|
+
type TEXT NOT NULL DEFAULT 'main', -- main, subagent, tool
|
|
27
|
+
first_seen TEXT NOT NULL,
|
|
28
|
+
last_seen TEXT NOT NULL,
|
|
29
|
+
total_activities INTEGER DEFAULT 0,
|
|
30
|
+
metadata TEXT
|
|
31
|
+
);
|
|
32
|
+
|
|
33
|
+
-- Activities Table (Layer 1)
|
|
34
|
+
CREATE TABLE IF NOT EXISTS activities (
|
|
35
|
+
id TEXT PRIMARY KEY, -- act_{timestamp}_{random}
|
|
36
|
+
session_id TEXT,
|
|
37
|
+
agent_id TEXT,
|
|
38
|
+
timestamp TEXT NOT NULL, -- ISO 8601 with timezone
|
|
39
|
+
event_type TEXT NOT NULL, -- pre_tool_use, post_tool_use, etc.
|
|
40
|
+
tool_name TEXT,
|
|
41
|
+
tool_input TEXT, -- JSON (truncated to 10KB)
|
|
42
|
+
tool_output TEXT, -- JSON (truncated to 10KB)
|
|
43
|
+
duration_ms INTEGER,
|
|
44
|
+
success INTEGER DEFAULT 1,
|
|
45
|
+
error_message TEXT,
|
|
46
|
+
project_path TEXT,
|
|
47
|
+
file_path TEXT,
|
|
48
|
+
metadata TEXT,
|
|
49
|
+
-- Command analytics columns (v1.1)
|
|
50
|
+
command_name TEXT,
|
|
51
|
+
command_scope TEXT,
|
|
52
|
+
mcp_server TEXT,
|
|
53
|
+
skill_name TEXT,
|
|
54
|
+
-- Natural language summaries (v1.2)
|
|
55
|
+
summary TEXT,
|
|
56
|
+
summary_detail TEXT,
|
|
57
|
+
FOREIGN KEY (session_id) REFERENCES sessions(id),
|
|
58
|
+
FOREIGN KEY (agent_id) REFERENCES agents(id)
|
|
59
|
+
);
|
|
60
|
+
|
|
61
|
+
-- Memories Table (Layer 2)
|
|
62
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
63
|
+
id TEXT PRIMARY KEY, -- mem_{timestamp}_{random}
|
|
64
|
+
content TEXT NOT NULL,
|
|
65
|
+
type TEXT NOT NULL DEFAULT 'general',
|
|
66
|
+
tags TEXT, -- JSON array
|
|
67
|
+
context TEXT,
|
|
68
|
+
|
|
69
|
+
-- Timestamps
|
|
70
|
+
created_at TEXT NOT NULL,
|
|
71
|
+
updated_at TEXT NOT NULL,
|
|
72
|
+
last_accessed TEXT NOT NULL,
|
|
73
|
+
last_verified TEXT,
|
|
74
|
+
|
|
75
|
+
-- Usage
|
|
76
|
+
access_count INTEGER DEFAULT 0,
|
|
77
|
+
|
|
78
|
+
-- Importance/Decay
|
|
79
|
+
importance_score REAL DEFAULT 50.0, -- 0-100
|
|
80
|
+
manual_importance INTEGER, -- User override
|
|
81
|
+
|
|
82
|
+
-- Freshness
|
|
83
|
+
status TEXT DEFAULT 'fresh', -- fresh, needs_review, outdated, archived
|
|
84
|
+
|
|
85
|
+
-- Attribution
|
|
86
|
+
source_session_id TEXT,
|
|
87
|
+
source_agent_id TEXT,
|
|
88
|
+
source_activity_id TEXT,
|
|
89
|
+
|
|
90
|
+
-- Project
|
|
91
|
+
project_path TEXT,
|
|
92
|
+
file_context TEXT, -- JSON array
|
|
93
|
+
|
|
94
|
+
-- Embedding
|
|
95
|
+
has_embedding INTEGER DEFAULT 0,
|
|
96
|
+
|
|
97
|
+
metadata TEXT,
|
|
98
|
+
|
|
99
|
+
FOREIGN KEY (source_session_id) REFERENCES sessions(id),
|
|
100
|
+
FOREIGN KEY (source_agent_id) REFERENCES agents(id),
|
|
101
|
+
FOREIGN KEY (source_activity_id) REFERENCES activities(id)
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
-- FTS5 for Full-Text Search
|
|
105
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
|
|
106
|
+
content, context, tags,
|
|
107
|
+
content=memories,
|
|
108
|
+
content_rowid=rowid,
|
|
109
|
+
tokenize='porter unicode61'
|
|
110
|
+
);
|
|
111
|
+
|
|
112
|
+
-- Triggers to keep FTS in sync with memories table
|
|
113
|
+
CREATE TRIGGER IF NOT EXISTS memories_ai AFTER INSERT ON memories BEGIN
|
|
114
|
+
INSERT INTO memories_fts(rowid, content, context, tags)
|
|
115
|
+
VALUES (NEW.rowid, NEW.content, NEW.context, NEW.tags);
|
|
116
|
+
END;
|
|
117
|
+
|
|
118
|
+
CREATE TRIGGER IF NOT EXISTS memories_ad AFTER DELETE ON memories BEGIN
|
|
119
|
+
INSERT INTO memories_fts(memories_fts, rowid, content, context, tags)
|
|
120
|
+
VALUES ('delete', OLD.rowid, OLD.content, OLD.context, OLD.tags);
|
|
121
|
+
END;
|
|
122
|
+
|
|
123
|
+
CREATE TRIGGER IF NOT EXISTS memories_au AFTER UPDATE ON memories BEGIN
|
|
124
|
+
INSERT INTO memories_fts(memories_fts, rowid, content, context, tags)
|
|
125
|
+
VALUES ('delete', OLD.rowid, OLD.content, OLD.context, OLD.tags);
|
|
126
|
+
INSERT INTO memories_fts(rowid, content, context, tags)
|
|
127
|
+
VALUES (NEW.rowid, NEW.content, NEW.context, NEW.tags);
|
|
128
|
+
END;
|
|
129
|
+
|
|
130
|
+
-- Memory Relationships
|
|
131
|
+
CREATE TABLE IF NOT EXISTS memory_relationships (
|
|
132
|
+
id TEXT PRIMARY KEY,
|
|
133
|
+
source_memory_id TEXT NOT NULL,
|
|
134
|
+
target_memory_id TEXT NOT NULL,
|
|
135
|
+
relationship_type TEXT NOT NULL, -- related_to, supersedes, derived_from, contradicts
|
|
136
|
+
strength REAL DEFAULT 1.0,
|
|
137
|
+
created_at TEXT NOT NULL,
|
|
138
|
+
metadata TEXT,
|
|
139
|
+
FOREIGN KEY (source_memory_id) REFERENCES memories(id) ON DELETE CASCADE,
|
|
140
|
+
FOREIGN KEY (target_memory_id) REFERENCES memories(id) ON DELETE CASCADE,
|
|
141
|
+
UNIQUE(source_memory_id, target_memory_id, relationship_type)
|
|
142
|
+
);
|
|
143
|
+
|
|
144
|
+
-- Activity-Memory Links
|
|
145
|
+
CREATE TABLE IF NOT EXISTS activity_memory_links (
|
|
146
|
+
activity_id TEXT NOT NULL,
|
|
147
|
+
memory_id TEXT NOT NULL,
|
|
148
|
+
link_type TEXT NOT NULL, -- created, accessed, updated, referenced
|
|
149
|
+
created_at TEXT NOT NULL,
|
|
150
|
+
PRIMARY KEY (activity_id, memory_id, link_type),
|
|
151
|
+
FOREIGN KEY (activity_id) REFERENCES activities(id) ON DELETE CASCADE,
|
|
152
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id) ON DELETE CASCADE
|
|
153
|
+
);
|
|
154
|
+
|
|
155
|
+
-- Embeddings
|
|
156
|
+
CREATE TABLE IF NOT EXISTS embeddings (
|
|
157
|
+
id TEXT PRIMARY KEY,
|
|
158
|
+
memory_id TEXT NOT NULL UNIQUE,
|
|
159
|
+
model_name TEXT NOT NULL, -- 'all-MiniLM-L6-v2'
|
|
160
|
+
vector BLOB NOT NULL, -- float32 array
|
|
161
|
+
dimensions INTEGER NOT NULL, -- 384
|
|
162
|
+
created_at TEXT NOT NULL,
|
|
163
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id) ON DELETE CASCADE
|
|
164
|
+
);
|
|
165
|
+
|
|
166
|
+
-- Session Summaries
|
|
167
|
+
CREATE TABLE IF NOT EXISTS session_summaries (
|
|
168
|
+
id TEXT PRIMARY KEY,
|
|
169
|
+
session_id TEXT NOT NULL UNIQUE,
|
|
170
|
+
key_learnings TEXT, -- JSON array
|
|
171
|
+
key_decisions TEXT, -- JSON array
|
|
172
|
+
key_errors TEXT, -- JSON array
|
|
173
|
+
files_modified TEXT, -- JSON array
|
|
174
|
+
tools_used TEXT, -- JSON object
|
|
175
|
+
total_activities INTEGER DEFAULT 0,
|
|
176
|
+
total_memories_created INTEGER DEFAULT 0,
|
|
177
|
+
created_at TEXT NOT NULL,
|
|
178
|
+
FOREIGN KEY (session_id) REFERENCES sessions(id) ON DELETE CASCADE
|
|
179
|
+
);
|
|
180
|
+
|
|
181
|
+
-- Configuration
|
|
182
|
+
CREATE TABLE IF NOT EXISTS config (
|
|
183
|
+
key TEXT PRIMARY KEY,
|
|
184
|
+
value TEXT NOT NULL,
|
|
185
|
+
updated_at TEXT NOT NULL
|
|
186
|
+
);
|
|
187
|
+
|
|
188
|
+
-- Schema Migrations
|
|
189
|
+
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
190
|
+
version TEXT PRIMARY KEY,
|
|
191
|
+
applied_at TEXT NOT NULL
|
|
192
|
+
);
|
|
193
|
+
|
|
194
|
+
-- Indexes
|
|
195
|
+
CREATE INDEX IF NOT EXISTS idx_activities_session ON activities(session_id);
|
|
196
|
+
CREATE INDEX IF NOT EXISTS idx_activities_agent ON activities(agent_id);
|
|
197
|
+
CREATE INDEX IF NOT EXISTS idx_activities_timestamp ON activities(timestamp DESC);
|
|
198
|
+
CREATE INDEX IF NOT EXISTS idx_activities_tool ON activities(tool_name);
|
|
199
|
+
CREATE INDEX IF NOT EXISTS idx_memories_type ON memories(type);
|
|
200
|
+
CREATE INDEX IF NOT EXISTS idx_memories_status ON memories(status);
|
|
201
|
+
CREATE INDEX IF NOT EXISTS idx_memories_project ON memories(project_path);
|
|
202
|
+
CREATE INDEX IF NOT EXISTS idx_memories_importance ON memories(importance_score DESC);
|
|
203
|
+
CREATE INDEX IF NOT EXISTS idx_memories_accessed ON memories(last_accessed DESC);
|
|
204
|
+
CREATE INDEX IF NOT EXISTS idx_memories_created ON memories(created_at DESC);
|
|
205
|
+
CREATE INDEX IF NOT EXISTS idx_relationships_source ON memory_relationships(source_memory_id);
|
|
206
|
+
CREATE INDEX IF NOT EXISTS idx_relationships_target ON memory_relationships(target_memory_id);
|
|
207
|
+
"""
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def get_schema_sql() -> str:
|
|
211
|
+
"""Get the complete schema SQL."""
|
|
212
|
+
return SCHEMA_SQL
|
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
"""Global index synchronization for cross-project memory search.
|
|
2
|
+
|
|
3
|
+
This module handles syncing memories from project-local databases to the
|
|
4
|
+
global database at ~/.omni-cortex/global.db, enabling cross-project search.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import sqlite3
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
from .connection import get_connection, init_database
|
|
14
|
+
from ..config import get_global_db_path, get_project_path, load_config
|
|
15
|
+
from ..utils.timestamps import now_iso
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def sync_memory_to_global(
|
|
21
|
+
memory_id: str,
|
|
22
|
+
content: str,
|
|
23
|
+
memory_type: str,
|
|
24
|
+
tags: list[str],
|
|
25
|
+
context: Optional[str],
|
|
26
|
+
importance_score: float,
|
|
27
|
+
status: str,
|
|
28
|
+
project_path: str,
|
|
29
|
+
created_at: str,
|
|
30
|
+
updated_at: str,
|
|
31
|
+
) -> bool:
|
|
32
|
+
"""Sync a single memory to the global index.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
memory_id: The memory ID
|
|
36
|
+
content: Memory content
|
|
37
|
+
memory_type: Memory type
|
|
38
|
+
tags: List of tags
|
|
39
|
+
context: Optional context
|
|
40
|
+
importance_score: Importance score
|
|
41
|
+
status: Memory status
|
|
42
|
+
project_path: Source project path
|
|
43
|
+
created_at: Creation timestamp
|
|
44
|
+
updated_at: Update timestamp
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
True if synced successfully
|
|
48
|
+
"""
|
|
49
|
+
config = load_config()
|
|
50
|
+
if not config.global_sync_enabled:
|
|
51
|
+
return False
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
global_conn = init_database(is_global=True)
|
|
55
|
+
|
|
56
|
+
cursor = global_conn.cursor()
|
|
57
|
+
|
|
58
|
+
# Upsert the memory to global index
|
|
59
|
+
cursor.execute(
|
|
60
|
+
"""
|
|
61
|
+
INSERT INTO memories (
|
|
62
|
+
id, content, type, tags, context,
|
|
63
|
+
created_at, updated_at, last_accessed,
|
|
64
|
+
access_count, importance_score, status,
|
|
65
|
+
project_path, has_embedding
|
|
66
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, 0)
|
|
67
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
68
|
+
content = excluded.content,
|
|
69
|
+
type = excluded.type,
|
|
70
|
+
tags = excluded.tags,
|
|
71
|
+
context = excluded.context,
|
|
72
|
+
updated_at = excluded.updated_at,
|
|
73
|
+
importance_score = excluded.importance_score,
|
|
74
|
+
status = excluded.status
|
|
75
|
+
""",
|
|
76
|
+
(
|
|
77
|
+
memory_id,
|
|
78
|
+
content,
|
|
79
|
+
memory_type,
|
|
80
|
+
json.dumps(tags),
|
|
81
|
+
context,
|
|
82
|
+
created_at,
|
|
83
|
+
updated_at,
|
|
84
|
+
now_iso(),
|
|
85
|
+
importance_score,
|
|
86
|
+
status,
|
|
87
|
+
project_path,
|
|
88
|
+
),
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
global_conn.commit()
|
|
92
|
+
logger.debug(f"Synced memory {memory_id} to global index")
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
except Exception as e:
|
|
96
|
+
logger.warning(f"Failed to sync memory {memory_id} to global: {e}")
|
|
97
|
+
return False
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def delete_memory_from_global(memory_id: str) -> bool:
|
|
101
|
+
"""Remove a memory from the global index.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
memory_id: The memory ID to remove
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
True if removed successfully
|
|
108
|
+
"""
|
|
109
|
+
config = load_config()
|
|
110
|
+
if not config.global_sync_enabled:
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
try:
|
|
114
|
+
global_conn = init_database(is_global=True)
|
|
115
|
+
cursor = global_conn.cursor()
|
|
116
|
+
|
|
117
|
+
cursor.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
|
|
118
|
+
global_conn.commit()
|
|
119
|
+
|
|
120
|
+
if cursor.rowcount > 0:
|
|
121
|
+
logger.debug(f"Removed memory {memory_id} from global index")
|
|
122
|
+
return True
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
except Exception as e:
|
|
126
|
+
logger.warning(f"Failed to remove memory {memory_id} from global: {e}")
|
|
127
|
+
return False
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def search_global_memories(
|
|
131
|
+
query: str,
|
|
132
|
+
type_filter: Optional[str] = None,
|
|
133
|
+
tags_filter: Optional[list[str]] = None,
|
|
134
|
+
project_filter: Optional[str] = None,
|
|
135
|
+
limit: int = 20,
|
|
136
|
+
) -> list[dict]:
|
|
137
|
+
"""Search memories across all projects via global index.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
query: Search query
|
|
141
|
+
type_filter: Filter by memory type
|
|
142
|
+
tags_filter: Filter by tags
|
|
143
|
+
project_filter: Filter by project path (substring match)
|
|
144
|
+
limit: Maximum results
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
List of memory dicts with project_path included
|
|
148
|
+
"""
|
|
149
|
+
try:
|
|
150
|
+
global_conn = init_database(is_global=True)
|
|
151
|
+
cursor = global_conn.cursor()
|
|
152
|
+
|
|
153
|
+
# Escape FTS5 special characters
|
|
154
|
+
fts_query = _escape_fts_query(query)
|
|
155
|
+
|
|
156
|
+
# Build WHERE conditions
|
|
157
|
+
where_conditions = []
|
|
158
|
+
params: list = [fts_query]
|
|
159
|
+
|
|
160
|
+
if type_filter:
|
|
161
|
+
where_conditions.append("m.type = ?")
|
|
162
|
+
params.append(type_filter)
|
|
163
|
+
|
|
164
|
+
if project_filter:
|
|
165
|
+
where_conditions.append("m.project_path LIKE ?")
|
|
166
|
+
params.append(f"%{project_filter}%")
|
|
167
|
+
|
|
168
|
+
where_conditions.append("m.status != 'archived'")
|
|
169
|
+
|
|
170
|
+
if tags_filter:
|
|
171
|
+
tag_conditions = []
|
|
172
|
+
for tag in tags_filter:
|
|
173
|
+
tag_conditions.append("m.tags LIKE ?")
|
|
174
|
+
params.append(f'%"{tag}"%')
|
|
175
|
+
where_conditions.append(f"({' OR '.join(tag_conditions)})")
|
|
176
|
+
|
|
177
|
+
where_sql = ""
|
|
178
|
+
if where_conditions:
|
|
179
|
+
where_sql = "AND " + " AND ".join(where_conditions)
|
|
180
|
+
|
|
181
|
+
params.append(limit)
|
|
182
|
+
|
|
183
|
+
try:
|
|
184
|
+
cursor.execute(
|
|
185
|
+
f"""
|
|
186
|
+
SELECT m.*, bm25(memories_fts) as score
|
|
187
|
+
FROM memories_fts fts
|
|
188
|
+
JOIN memories m ON fts.rowid = m.rowid
|
|
189
|
+
WHERE memories_fts MATCH ?
|
|
190
|
+
{where_sql}
|
|
191
|
+
ORDER BY score
|
|
192
|
+
LIMIT ?
|
|
193
|
+
""",
|
|
194
|
+
params,
|
|
195
|
+
)
|
|
196
|
+
except sqlite3.OperationalError:
|
|
197
|
+
# Fallback to LIKE search if FTS fails
|
|
198
|
+
return _fallback_global_search(
|
|
199
|
+
global_conn, query, type_filter, tags_filter, project_filter, limit
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
results = []
|
|
203
|
+
for row in cursor.fetchall():
|
|
204
|
+
tags = row["tags"]
|
|
205
|
+
if tags and isinstance(tags, str):
|
|
206
|
+
try:
|
|
207
|
+
tags = json.loads(tags)
|
|
208
|
+
except json.JSONDecodeError:
|
|
209
|
+
tags = []
|
|
210
|
+
|
|
211
|
+
results.append({
|
|
212
|
+
"id": row["id"],
|
|
213
|
+
"content": row["content"],
|
|
214
|
+
"type": row["type"],
|
|
215
|
+
"tags": tags,
|
|
216
|
+
"context": row["context"],
|
|
217
|
+
"importance_score": row["importance_score"],
|
|
218
|
+
"status": row["status"],
|
|
219
|
+
"project_path": row["project_path"],
|
|
220
|
+
"created_at": row["created_at"],
|
|
221
|
+
"updated_at": row["updated_at"],
|
|
222
|
+
"score": -row["score"], # bm25 returns negative scores
|
|
223
|
+
})
|
|
224
|
+
|
|
225
|
+
return results
|
|
226
|
+
|
|
227
|
+
except Exception as e:
|
|
228
|
+
logger.error(f"Global search failed: {e}")
|
|
229
|
+
return []
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def _escape_fts_query(query: str) -> str:
|
|
233
|
+
"""Escape special characters for FTS5 query."""
|
|
234
|
+
special_chars = ['"', "'", "(", ")", "*", ":", "^", "-", "+"]
|
|
235
|
+
escaped = query
|
|
236
|
+
for char in special_chars:
|
|
237
|
+
escaped = escaped.replace(char, " ")
|
|
238
|
+
|
|
239
|
+
words = escaped.split()
|
|
240
|
+
if not words:
|
|
241
|
+
return '""'
|
|
242
|
+
|
|
243
|
+
if len(words) == 1:
|
|
244
|
+
return f'"{words[0]}"'
|
|
245
|
+
|
|
246
|
+
return " OR ".join(f'"{word}"' for word in words)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _fallback_global_search(
|
|
250
|
+
conn: sqlite3.Connection,
|
|
251
|
+
query: str,
|
|
252
|
+
type_filter: Optional[str],
|
|
253
|
+
tags_filter: Optional[list[str]],
|
|
254
|
+
project_filter: Optional[str],
|
|
255
|
+
limit: int,
|
|
256
|
+
) -> list[dict]:
|
|
257
|
+
"""Fallback to LIKE search if FTS5 fails."""
|
|
258
|
+
words = query.lower().split()
|
|
259
|
+
if not words:
|
|
260
|
+
return []
|
|
261
|
+
|
|
262
|
+
where_conditions = []
|
|
263
|
+
params: list = []
|
|
264
|
+
|
|
265
|
+
# Match any word in content or context
|
|
266
|
+
word_conditions = []
|
|
267
|
+
for word in words:
|
|
268
|
+
word_conditions.append("(LOWER(content) LIKE ? OR LOWER(context) LIKE ?)")
|
|
269
|
+
params.extend([f"%{word}%", f"%{word}%"])
|
|
270
|
+
where_conditions.append(f"({' OR '.join(word_conditions)})")
|
|
271
|
+
|
|
272
|
+
if type_filter:
|
|
273
|
+
where_conditions.append("type = ?")
|
|
274
|
+
params.append(type_filter)
|
|
275
|
+
|
|
276
|
+
if project_filter:
|
|
277
|
+
where_conditions.append("project_path LIKE ?")
|
|
278
|
+
params.append(f"%{project_filter}%")
|
|
279
|
+
|
|
280
|
+
where_conditions.append("status != 'archived'")
|
|
281
|
+
|
|
282
|
+
if tags_filter:
|
|
283
|
+
tag_conds = []
|
|
284
|
+
for tag in tags_filter:
|
|
285
|
+
tag_conds.append("tags LIKE ?")
|
|
286
|
+
params.append(f'%"{tag}"%')
|
|
287
|
+
where_conditions.append(f"({' OR '.join(tag_conds)})")
|
|
288
|
+
|
|
289
|
+
params.append(limit)
|
|
290
|
+
|
|
291
|
+
cursor = conn.cursor()
|
|
292
|
+
cursor.execute(
|
|
293
|
+
f"""
|
|
294
|
+
SELECT *
|
|
295
|
+
FROM memories
|
|
296
|
+
WHERE {' AND '.join(where_conditions)}
|
|
297
|
+
ORDER BY importance_score DESC, updated_at DESC
|
|
298
|
+
LIMIT ?
|
|
299
|
+
""",
|
|
300
|
+
params,
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
results = []
|
|
304
|
+
for row in cursor.fetchall():
|
|
305
|
+
tags = row["tags"]
|
|
306
|
+
if tags and isinstance(tags, str):
|
|
307
|
+
try:
|
|
308
|
+
tags = json.loads(tags)
|
|
309
|
+
except json.JSONDecodeError:
|
|
310
|
+
tags = []
|
|
311
|
+
|
|
312
|
+
content = (row["content"] + " " + (row["context"] or "")).lower()
|
|
313
|
+
score = sum(1 for word in words if word in content)
|
|
314
|
+
|
|
315
|
+
results.append({
|
|
316
|
+
"id": row["id"],
|
|
317
|
+
"content": row["content"],
|
|
318
|
+
"type": row["type"],
|
|
319
|
+
"tags": tags,
|
|
320
|
+
"context": row["context"],
|
|
321
|
+
"importance_score": row["importance_score"],
|
|
322
|
+
"status": row["status"],
|
|
323
|
+
"project_path": row["project_path"],
|
|
324
|
+
"created_at": row["created_at"],
|
|
325
|
+
"updated_at": row["updated_at"],
|
|
326
|
+
"score": float(score),
|
|
327
|
+
})
|
|
328
|
+
|
|
329
|
+
return results
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def get_global_stats() -> dict:
|
|
333
|
+
"""Get statistics from the global index.
|
|
334
|
+
|
|
335
|
+
Returns:
|
|
336
|
+
Dict with counts by project, type, etc.
|
|
337
|
+
"""
|
|
338
|
+
try:
|
|
339
|
+
global_conn = init_database(is_global=True)
|
|
340
|
+
cursor = global_conn.cursor()
|
|
341
|
+
|
|
342
|
+
stats = {}
|
|
343
|
+
|
|
344
|
+
# Total memories
|
|
345
|
+
cursor.execute("SELECT COUNT(*) FROM memories")
|
|
346
|
+
stats["total_memories"] = cursor.fetchone()[0]
|
|
347
|
+
|
|
348
|
+
# By project
|
|
349
|
+
cursor.execute("""
|
|
350
|
+
SELECT project_path, COUNT(*) as cnt
|
|
351
|
+
FROM memories
|
|
352
|
+
GROUP BY project_path
|
|
353
|
+
ORDER BY cnt DESC
|
|
354
|
+
""")
|
|
355
|
+
stats["by_project"] = {row["project_path"]: row["cnt"] for row in cursor.fetchall()}
|
|
356
|
+
|
|
357
|
+
# By type
|
|
358
|
+
cursor.execute("""
|
|
359
|
+
SELECT type, COUNT(*) as cnt
|
|
360
|
+
FROM memories
|
|
361
|
+
GROUP BY type
|
|
362
|
+
ORDER BY cnt DESC
|
|
363
|
+
""")
|
|
364
|
+
stats["by_type"] = {row["type"]: row["cnt"] for row in cursor.fetchall()}
|
|
365
|
+
|
|
366
|
+
return stats
|
|
367
|
+
|
|
368
|
+
except Exception as e:
|
|
369
|
+
logger.error(f"Failed to get global stats: {e}")
|
|
370
|
+
return {"error": str(e)}
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
def sync_all_project_memories() -> int:
|
|
374
|
+
"""Sync all memories from current project to global index.
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
Number of memories synced
|
|
378
|
+
"""
|
|
379
|
+
config = load_config()
|
|
380
|
+
if not config.global_sync_enabled:
|
|
381
|
+
return 0
|
|
382
|
+
|
|
383
|
+
try:
|
|
384
|
+
project_conn = init_database()
|
|
385
|
+
project_path = str(get_project_path())
|
|
386
|
+
|
|
387
|
+
cursor = project_conn.cursor()
|
|
388
|
+
cursor.execute("SELECT * FROM memories WHERE status != 'archived'")
|
|
389
|
+
|
|
390
|
+
count = 0
|
|
391
|
+
for row in cursor.fetchall():
|
|
392
|
+
tags = row["tags"]
|
|
393
|
+
if tags and isinstance(tags, str):
|
|
394
|
+
try:
|
|
395
|
+
tags = json.loads(tags)
|
|
396
|
+
except json.JSONDecodeError:
|
|
397
|
+
tags = []
|
|
398
|
+
else:
|
|
399
|
+
tags = []
|
|
400
|
+
|
|
401
|
+
synced = sync_memory_to_global(
|
|
402
|
+
memory_id=row["id"],
|
|
403
|
+
content=row["content"],
|
|
404
|
+
memory_type=row["type"],
|
|
405
|
+
tags=tags,
|
|
406
|
+
context=row["context"],
|
|
407
|
+
importance_score=row["importance_score"],
|
|
408
|
+
status=row["status"],
|
|
409
|
+
project_path=project_path,
|
|
410
|
+
created_at=row["created_at"],
|
|
411
|
+
updated_at=row["updated_at"],
|
|
412
|
+
)
|
|
413
|
+
if synced:
|
|
414
|
+
count += 1
|
|
415
|
+
|
|
416
|
+
logger.info(f"Synced {count} memories to global index")
|
|
417
|
+
return count
|
|
418
|
+
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.error(f"Failed to sync project memories: {e}")
|
|
421
|
+
return 0
|