omni-cortex 1.0.1__tar.gz → 1.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/PKG-INFO +23 -2
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/README.md +22 -1
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/hooks/post_tool_use.py +9 -3
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/hooks/pre_tool_use.py +18 -3
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/config.py +2 -2
- omni_cortex-1.0.2/omni_cortex/database/__init__.py +24 -0
- omni_cortex-1.0.2/omni_cortex/database/sync.py +421 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/embeddings/local.py +160 -80
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/models/activity.py +14 -13
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/server.py +77 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/tools/memories.py +67 -8
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/tools/sessions.py +1 -1
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/tools/utilities.py +191 -1
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/utils/formatting.py +25 -3
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/pyproject.toml +1 -1
- omni_cortex-1.0.2/scripts/import_ken_memories.py +261 -0
- omni_cortex-1.0.1/omni_cortex/database/__init__.py +0 -12
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/.gitignore +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/LICENSE +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/hooks/stop.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/categorization/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/categorization/auto_tags.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/categorization/auto_type.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/database/connection.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/database/migrations.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/database/schema.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/decay/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/decay/importance.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/embeddings/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/models/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/models/agent.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/models/memory.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/models/relationship.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/models/session.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/resources/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/search/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/search/hybrid.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/search/keyword.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/search/ranking.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/search/semantic.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/setup.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/tools/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/tools/activities.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/utils/__init__.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/utils/ids.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/utils/timestamps.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/omni_cortex/utils/truncation.py +0 -0
- {omni_cortex-1.0.1 → omni_cortex-1.0.2}/scripts/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: omni-cortex
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.2
|
|
4
4
|
Summary: Universal Memory MCP for Claude Code - dual-layer activity logging and knowledge storage
|
|
5
5
|
Project-URL: Homepage, https://github.com/AllCytes/Omni-Cortex
|
|
6
6
|
Project-URL: Repository, https://github.com/AllCytes/Omni-Cortex
|
|
@@ -43,7 +43,7 @@ A universal memory system for Claude Code that combines activity logging with in
|
|
|
43
43
|
|
|
44
44
|
- **Zero Configuration**: Works out of the box - just install and run setup
|
|
45
45
|
- **Dual-Layer Storage**: Activity logging (audit trail) + Knowledge store (memories)
|
|
46
|
-
- **
|
|
46
|
+
- **18 MCP Tools**: Full-featured API for memory management, activity tracking, session continuity, and cross-project search
|
|
47
47
|
- **Semantic Search**: AI-powered search using sentence-transformers (optional)
|
|
48
48
|
- **Hybrid Search**: Combines keyword (FTS5) + semantic search for best results
|
|
49
49
|
- **Full-Text Search**: SQLite FTS5-powered keyword search with smart ranking
|
|
@@ -163,6 +163,14 @@ pip uninstall omni-cortex
|
|
|
163
163
|
| `cortex_review_memories` | Review and update memory freshness |
|
|
164
164
|
| `cortex_export` | Export data to markdown or JSON |
|
|
165
165
|
|
|
166
|
+
### Global Tools (3)
|
|
167
|
+
|
|
168
|
+
| Tool | Description |
|
|
169
|
+
|------|-------------|
|
|
170
|
+
| `cortex_global_search` | Search memories across all projects |
|
|
171
|
+
| `cortex_global_stats` | Get global index statistics |
|
|
172
|
+
| `cortex_sync_to_global` | Manually sync to global index |
|
|
173
|
+
|
|
166
174
|
## Memory Types
|
|
167
175
|
|
|
168
176
|
Memories are automatically categorized into:
|
|
@@ -197,6 +205,11 @@ auto_provide_context: true
|
|
|
197
205
|
context_depth: 3
|
|
198
206
|
```
|
|
199
207
|
|
|
208
|
+
## Documentation
|
|
209
|
+
|
|
210
|
+
- [Tool Reference](docs/TOOLS.md) - Complete documentation for all 15 tools with examples
|
|
211
|
+
- [Configuration Guide](docs/CONFIGURATION.md) - Configuration options and troubleshooting
|
|
212
|
+
|
|
200
213
|
## Development
|
|
201
214
|
|
|
202
215
|
```bash
|
|
@@ -211,6 +224,14 @@ black src tests
|
|
|
211
224
|
ruff check src tests
|
|
212
225
|
```
|
|
213
226
|
|
|
227
|
+
## Security
|
|
228
|
+
|
|
229
|
+
Omni Cortex v1.0.3 has been security reviewed:
|
|
230
|
+
- All SQL queries use parameterized statements
|
|
231
|
+
- Input validation via Pydantic models
|
|
232
|
+
- Model name validation prevents code injection
|
|
233
|
+
- YAML loading uses `safe_load()`
|
|
234
|
+
|
|
214
235
|
## License
|
|
215
236
|
|
|
216
237
|
MIT
|
|
@@ -6,7 +6,7 @@ A universal memory system for Claude Code that combines activity logging with in
|
|
|
6
6
|
|
|
7
7
|
- **Zero Configuration**: Works out of the box - just install and run setup
|
|
8
8
|
- **Dual-Layer Storage**: Activity logging (audit trail) + Knowledge store (memories)
|
|
9
|
-
- **
|
|
9
|
+
- **18 MCP Tools**: Full-featured API for memory management, activity tracking, session continuity, and cross-project search
|
|
10
10
|
- **Semantic Search**: AI-powered search using sentence-transformers (optional)
|
|
11
11
|
- **Hybrid Search**: Combines keyword (FTS5) + semantic search for best results
|
|
12
12
|
- **Full-Text Search**: SQLite FTS5-powered keyword search with smart ranking
|
|
@@ -126,6 +126,14 @@ pip uninstall omni-cortex
|
|
|
126
126
|
| `cortex_review_memories` | Review and update memory freshness |
|
|
127
127
|
| `cortex_export` | Export data to markdown or JSON |
|
|
128
128
|
|
|
129
|
+
### Global Tools (3)
|
|
130
|
+
|
|
131
|
+
| Tool | Description |
|
|
132
|
+
|------|-------------|
|
|
133
|
+
| `cortex_global_search` | Search memories across all projects |
|
|
134
|
+
| `cortex_global_stats` | Get global index statistics |
|
|
135
|
+
| `cortex_sync_to_global` | Manually sync to global index |
|
|
136
|
+
|
|
129
137
|
## Memory Types
|
|
130
138
|
|
|
131
139
|
Memories are automatically categorized into:
|
|
@@ -160,6 +168,11 @@ auto_provide_context: true
|
|
|
160
168
|
context_depth: 3
|
|
161
169
|
```
|
|
162
170
|
|
|
171
|
+
## Documentation
|
|
172
|
+
|
|
173
|
+
- [Tool Reference](docs/TOOLS.md) - Complete documentation for all 15 tools with examples
|
|
174
|
+
- [Configuration Guide](docs/CONFIGURATION.md) - Configuration options and troubleshooting
|
|
175
|
+
|
|
163
176
|
## Development
|
|
164
177
|
|
|
165
178
|
```bash
|
|
@@ -174,6 +187,14 @@ black src tests
|
|
|
174
187
|
ruff check src tests
|
|
175
188
|
```
|
|
176
189
|
|
|
190
|
+
## Security
|
|
191
|
+
|
|
192
|
+
Omni Cortex v1.0.3 has been security reviewed:
|
|
193
|
+
- All SQL queries use parameterized statements
|
|
194
|
+
- Input validation via Pydantic models
|
|
195
|
+
- Model name validation prevents code injection
|
|
196
|
+
- YAML loading uses `safe_load()`
|
|
197
|
+
|
|
177
198
|
## License
|
|
178
199
|
|
|
179
200
|
MIT
|
|
@@ -87,8 +87,13 @@ def truncate(text: str, max_length: int = 10000) -> str:
|
|
|
87
87
|
def main():
|
|
88
88
|
"""Process PostToolUse hook."""
|
|
89
89
|
try:
|
|
90
|
-
# Read input
|
|
91
|
-
|
|
90
|
+
# Read all input at once (more reliable than json.load on stdin)
|
|
91
|
+
raw_input = sys.stdin.read()
|
|
92
|
+
if not raw_input or not raw_input.strip():
|
|
93
|
+
print(json.dumps({}))
|
|
94
|
+
return
|
|
95
|
+
|
|
96
|
+
input_data = json.loads(raw_input)
|
|
92
97
|
|
|
93
98
|
# Extract data from hook input
|
|
94
99
|
tool_name = input_data.get("tool_name")
|
|
@@ -103,7 +108,8 @@ def main():
|
|
|
103
108
|
error_message = tool_output.get("error") or tool_output.get("message")
|
|
104
109
|
|
|
105
110
|
# Skip logging our own tools to prevent recursion
|
|
106
|
-
|
|
111
|
+
# MCP tools are named like "mcp__omni-cortex__cortex_remember"
|
|
112
|
+
if tool_name and ("cortex_" in tool_name or "omni-cortex" in tool_name):
|
|
107
113
|
print(json.dumps({}))
|
|
108
114
|
return
|
|
109
115
|
|
|
@@ -87,8 +87,22 @@ def truncate(text: str, max_length: int = 10000) -> str:
|
|
|
87
87
|
def main():
|
|
88
88
|
"""Process PreToolUse hook."""
|
|
89
89
|
try:
|
|
90
|
-
# Read input from stdin
|
|
91
|
-
|
|
90
|
+
# Read input from stdin with timeout protection
|
|
91
|
+
import select
|
|
92
|
+
if sys.platform != "win32":
|
|
93
|
+
# Unix: use select for timeout
|
|
94
|
+
ready, _, _ = select.select([sys.stdin], [], [], 5.0)
|
|
95
|
+
if not ready:
|
|
96
|
+
print(json.dumps({}))
|
|
97
|
+
return
|
|
98
|
+
|
|
99
|
+
# Read all input at once
|
|
100
|
+
raw_input = sys.stdin.read()
|
|
101
|
+
if not raw_input or not raw_input.strip():
|
|
102
|
+
print(json.dumps({}))
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
input_data = json.loads(raw_input)
|
|
92
106
|
|
|
93
107
|
# Extract data from hook input
|
|
94
108
|
tool_name = input_data.get("tool_name")
|
|
@@ -96,7 +110,8 @@ def main():
|
|
|
96
110
|
agent_id = input_data.get("agent_id")
|
|
97
111
|
|
|
98
112
|
# Skip logging our own tools to prevent recursion
|
|
99
|
-
|
|
113
|
+
# MCP tools are named like "mcp__omni-cortex__cortex_remember"
|
|
114
|
+
if tool_name and ("cortex_" in tool_name or "omni-cortex" in tool_name):
|
|
100
115
|
print(json.dumps({}))
|
|
101
116
|
return
|
|
102
117
|
|
|
@@ -15,9 +15,9 @@ class CortexConfig:
|
|
|
15
15
|
# Database
|
|
16
16
|
schema_version: str = "1.0"
|
|
17
17
|
|
|
18
|
-
# Embedding
|
|
18
|
+
# Embedding (disabled by default - model loading can be slow)
|
|
19
19
|
embedding_model: str = "all-MiniLM-L6-v2"
|
|
20
|
-
embedding_enabled: bool =
|
|
20
|
+
embedding_enabled: bool = False
|
|
21
21
|
|
|
22
22
|
# Decay
|
|
23
23
|
decay_rate_per_day: float = 0.5
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Database layer for Omni Cortex - SQLite with FTS5."""
|
|
2
|
+
|
|
3
|
+
from .connection import get_connection, init_database, close_connection
|
|
4
|
+
from .schema import SCHEMA_VERSION, get_schema_sql
|
|
5
|
+
from .sync import (
|
|
6
|
+
sync_memory_to_global,
|
|
7
|
+
delete_memory_from_global,
|
|
8
|
+
search_global_memories,
|
|
9
|
+
get_global_stats,
|
|
10
|
+
sync_all_project_memories,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"get_connection",
|
|
15
|
+
"init_database",
|
|
16
|
+
"close_connection",
|
|
17
|
+
"SCHEMA_VERSION",
|
|
18
|
+
"get_schema_sql",
|
|
19
|
+
"sync_memory_to_global",
|
|
20
|
+
"delete_memory_from_global",
|
|
21
|
+
"search_global_memories",
|
|
22
|
+
"get_global_stats",
|
|
23
|
+
"sync_all_project_memories",
|
|
24
|
+
]
|
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
"""Global index synchronization for cross-project memory search.
|
|
2
|
+
|
|
3
|
+
This module handles syncing memories from project-local databases to the
|
|
4
|
+
global database at ~/.omni-cortex/global.db, enabling cross-project search.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import sqlite3
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
from .connection import get_connection, init_database
|
|
14
|
+
from ..config import get_global_db_path, get_project_path, load_config
|
|
15
|
+
from ..utils.timestamps import now_iso
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def sync_memory_to_global(
|
|
21
|
+
memory_id: str,
|
|
22
|
+
content: str,
|
|
23
|
+
memory_type: str,
|
|
24
|
+
tags: list[str],
|
|
25
|
+
context: Optional[str],
|
|
26
|
+
importance_score: float,
|
|
27
|
+
status: str,
|
|
28
|
+
project_path: str,
|
|
29
|
+
created_at: str,
|
|
30
|
+
updated_at: str,
|
|
31
|
+
) -> bool:
|
|
32
|
+
"""Sync a single memory to the global index.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
memory_id: The memory ID
|
|
36
|
+
content: Memory content
|
|
37
|
+
memory_type: Memory type
|
|
38
|
+
tags: List of tags
|
|
39
|
+
context: Optional context
|
|
40
|
+
importance_score: Importance score
|
|
41
|
+
status: Memory status
|
|
42
|
+
project_path: Source project path
|
|
43
|
+
created_at: Creation timestamp
|
|
44
|
+
updated_at: Update timestamp
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
True if synced successfully
|
|
48
|
+
"""
|
|
49
|
+
config = load_config()
|
|
50
|
+
if not config.global_sync_enabled:
|
|
51
|
+
return False
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
global_conn = init_database(is_global=True)
|
|
55
|
+
|
|
56
|
+
cursor = global_conn.cursor()
|
|
57
|
+
|
|
58
|
+
# Upsert the memory to global index
|
|
59
|
+
cursor.execute(
|
|
60
|
+
"""
|
|
61
|
+
INSERT INTO memories (
|
|
62
|
+
id, content, type, tags, context,
|
|
63
|
+
created_at, updated_at, last_accessed,
|
|
64
|
+
access_count, importance_score, status,
|
|
65
|
+
project_path, has_embedding
|
|
66
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, 0)
|
|
67
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
68
|
+
content = excluded.content,
|
|
69
|
+
type = excluded.type,
|
|
70
|
+
tags = excluded.tags,
|
|
71
|
+
context = excluded.context,
|
|
72
|
+
updated_at = excluded.updated_at,
|
|
73
|
+
importance_score = excluded.importance_score,
|
|
74
|
+
status = excluded.status
|
|
75
|
+
""",
|
|
76
|
+
(
|
|
77
|
+
memory_id,
|
|
78
|
+
content,
|
|
79
|
+
memory_type,
|
|
80
|
+
json.dumps(tags),
|
|
81
|
+
context,
|
|
82
|
+
created_at,
|
|
83
|
+
updated_at,
|
|
84
|
+
now_iso(),
|
|
85
|
+
importance_score,
|
|
86
|
+
status,
|
|
87
|
+
project_path,
|
|
88
|
+
),
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
global_conn.commit()
|
|
92
|
+
logger.debug(f"Synced memory {memory_id} to global index")
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
except Exception as e:
|
|
96
|
+
logger.warning(f"Failed to sync memory {memory_id} to global: {e}")
|
|
97
|
+
return False
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def delete_memory_from_global(memory_id: str) -> bool:
|
|
101
|
+
"""Remove a memory from the global index.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
memory_id: The memory ID to remove
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
True if removed successfully
|
|
108
|
+
"""
|
|
109
|
+
config = load_config()
|
|
110
|
+
if not config.global_sync_enabled:
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
try:
|
|
114
|
+
global_conn = init_database(is_global=True)
|
|
115
|
+
cursor = global_conn.cursor()
|
|
116
|
+
|
|
117
|
+
cursor.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
|
|
118
|
+
global_conn.commit()
|
|
119
|
+
|
|
120
|
+
if cursor.rowcount > 0:
|
|
121
|
+
logger.debug(f"Removed memory {memory_id} from global index")
|
|
122
|
+
return True
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
except Exception as e:
|
|
126
|
+
logger.warning(f"Failed to remove memory {memory_id} from global: {e}")
|
|
127
|
+
return False
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def search_global_memories(
|
|
131
|
+
query: str,
|
|
132
|
+
type_filter: Optional[str] = None,
|
|
133
|
+
tags_filter: Optional[list[str]] = None,
|
|
134
|
+
project_filter: Optional[str] = None,
|
|
135
|
+
limit: int = 20,
|
|
136
|
+
) -> list[dict]:
|
|
137
|
+
"""Search memories across all projects via global index.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
query: Search query
|
|
141
|
+
type_filter: Filter by memory type
|
|
142
|
+
tags_filter: Filter by tags
|
|
143
|
+
project_filter: Filter by project path (substring match)
|
|
144
|
+
limit: Maximum results
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
List of memory dicts with project_path included
|
|
148
|
+
"""
|
|
149
|
+
try:
|
|
150
|
+
global_conn = init_database(is_global=True)
|
|
151
|
+
cursor = global_conn.cursor()
|
|
152
|
+
|
|
153
|
+
# Escape FTS5 special characters
|
|
154
|
+
fts_query = _escape_fts_query(query)
|
|
155
|
+
|
|
156
|
+
# Build WHERE conditions
|
|
157
|
+
where_conditions = []
|
|
158
|
+
params: list = [fts_query]
|
|
159
|
+
|
|
160
|
+
if type_filter:
|
|
161
|
+
where_conditions.append("m.type = ?")
|
|
162
|
+
params.append(type_filter)
|
|
163
|
+
|
|
164
|
+
if project_filter:
|
|
165
|
+
where_conditions.append("m.project_path LIKE ?")
|
|
166
|
+
params.append(f"%{project_filter}%")
|
|
167
|
+
|
|
168
|
+
where_conditions.append("m.status != 'archived'")
|
|
169
|
+
|
|
170
|
+
if tags_filter:
|
|
171
|
+
tag_conditions = []
|
|
172
|
+
for tag in tags_filter:
|
|
173
|
+
tag_conditions.append("m.tags LIKE ?")
|
|
174
|
+
params.append(f'%"{tag}"%')
|
|
175
|
+
where_conditions.append(f"({' OR '.join(tag_conditions)})")
|
|
176
|
+
|
|
177
|
+
where_sql = ""
|
|
178
|
+
if where_conditions:
|
|
179
|
+
where_sql = "AND " + " AND ".join(where_conditions)
|
|
180
|
+
|
|
181
|
+
params.append(limit)
|
|
182
|
+
|
|
183
|
+
try:
|
|
184
|
+
cursor.execute(
|
|
185
|
+
f"""
|
|
186
|
+
SELECT m.*, bm25(memories_fts) as score
|
|
187
|
+
FROM memories_fts fts
|
|
188
|
+
JOIN memories m ON fts.rowid = m.rowid
|
|
189
|
+
WHERE memories_fts MATCH ?
|
|
190
|
+
{where_sql}
|
|
191
|
+
ORDER BY score
|
|
192
|
+
LIMIT ?
|
|
193
|
+
""",
|
|
194
|
+
params,
|
|
195
|
+
)
|
|
196
|
+
except sqlite3.OperationalError:
|
|
197
|
+
# Fallback to LIKE search if FTS fails
|
|
198
|
+
return _fallback_global_search(
|
|
199
|
+
global_conn, query, type_filter, tags_filter, project_filter, limit
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
results = []
|
|
203
|
+
for row in cursor.fetchall():
|
|
204
|
+
tags = row["tags"]
|
|
205
|
+
if tags and isinstance(tags, str):
|
|
206
|
+
try:
|
|
207
|
+
tags = json.loads(tags)
|
|
208
|
+
except json.JSONDecodeError:
|
|
209
|
+
tags = []
|
|
210
|
+
|
|
211
|
+
results.append({
|
|
212
|
+
"id": row["id"],
|
|
213
|
+
"content": row["content"],
|
|
214
|
+
"type": row["type"],
|
|
215
|
+
"tags": tags,
|
|
216
|
+
"context": row["context"],
|
|
217
|
+
"importance_score": row["importance_score"],
|
|
218
|
+
"status": row["status"],
|
|
219
|
+
"project_path": row["project_path"],
|
|
220
|
+
"created_at": row["created_at"],
|
|
221
|
+
"updated_at": row["updated_at"],
|
|
222
|
+
"score": -row["score"], # bm25 returns negative scores
|
|
223
|
+
})
|
|
224
|
+
|
|
225
|
+
return results
|
|
226
|
+
|
|
227
|
+
except Exception as e:
|
|
228
|
+
logger.error(f"Global search failed: {e}")
|
|
229
|
+
return []
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def _escape_fts_query(query: str) -> str:
|
|
233
|
+
"""Escape special characters for FTS5 query."""
|
|
234
|
+
special_chars = ['"', "'", "(", ")", "*", ":", "^", "-", "+"]
|
|
235
|
+
escaped = query
|
|
236
|
+
for char in special_chars:
|
|
237
|
+
escaped = escaped.replace(char, " ")
|
|
238
|
+
|
|
239
|
+
words = escaped.split()
|
|
240
|
+
if not words:
|
|
241
|
+
return '""'
|
|
242
|
+
|
|
243
|
+
if len(words) == 1:
|
|
244
|
+
return f'"{words[0]}"'
|
|
245
|
+
|
|
246
|
+
return " OR ".join(f'"{word}"' for word in words)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _fallback_global_search(
|
|
250
|
+
conn: sqlite3.Connection,
|
|
251
|
+
query: str,
|
|
252
|
+
type_filter: Optional[str],
|
|
253
|
+
tags_filter: Optional[list[str]],
|
|
254
|
+
project_filter: Optional[str],
|
|
255
|
+
limit: int,
|
|
256
|
+
) -> list[dict]:
|
|
257
|
+
"""Fallback to LIKE search if FTS5 fails."""
|
|
258
|
+
words = query.lower().split()
|
|
259
|
+
if not words:
|
|
260
|
+
return []
|
|
261
|
+
|
|
262
|
+
where_conditions = []
|
|
263
|
+
params: list = []
|
|
264
|
+
|
|
265
|
+
# Match any word in content or context
|
|
266
|
+
word_conditions = []
|
|
267
|
+
for word in words:
|
|
268
|
+
word_conditions.append("(LOWER(content) LIKE ? OR LOWER(context) LIKE ?)")
|
|
269
|
+
params.extend([f"%{word}%", f"%{word}%"])
|
|
270
|
+
where_conditions.append(f"({' OR '.join(word_conditions)})")
|
|
271
|
+
|
|
272
|
+
if type_filter:
|
|
273
|
+
where_conditions.append("type = ?")
|
|
274
|
+
params.append(type_filter)
|
|
275
|
+
|
|
276
|
+
if project_filter:
|
|
277
|
+
where_conditions.append("project_path LIKE ?")
|
|
278
|
+
params.append(f"%{project_filter}%")
|
|
279
|
+
|
|
280
|
+
where_conditions.append("status != 'archived'")
|
|
281
|
+
|
|
282
|
+
if tags_filter:
|
|
283
|
+
tag_conds = []
|
|
284
|
+
for tag in tags_filter:
|
|
285
|
+
tag_conds.append("tags LIKE ?")
|
|
286
|
+
params.append(f'%"{tag}"%')
|
|
287
|
+
where_conditions.append(f"({' OR '.join(tag_conds)})")
|
|
288
|
+
|
|
289
|
+
params.append(limit)
|
|
290
|
+
|
|
291
|
+
cursor = conn.cursor()
|
|
292
|
+
cursor.execute(
|
|
293
|
+
f"""
|
|
294
|
+
SELECT *
|
|
295
|
+
FROM memories
|
|
296
|
+
WHERE {' AND '.join(where_conditions)}
|
|
297
|
+
ORDER BY importance_score DESC, updated_at DESC
|
|
298
|
+
LIMIT ?
|
|
299
|
+
""",
|
|
300
|
+
params,
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
results = []
|
|
304
|
+
for row in cursor.fetchall():
|
|
305
|
+
tags = row["tags"]
|
|
306
|
+
if tags and isinstance(tags, str):
|
|
307
|
+
try:
|
|
308
|
+
tags = json.loads(tags)
|
|
309
|
+
except json.JSONDecodeError:
|
|
310
|
+
tags = []
|
|
311
|
+
|
|
312
|
+
content = (row["content"] + " " + (row["context"] or "")).lower()
|
|
313
|
+
score = sum(1 for word in words if word in content)
|
|
314
|
+
|
|
315
|
+
results.append({
|
|
316
|
+
"id": row["id"],
|
|
317
|
+
"content": row["content"],
|
|
318
|
+
"type": row["type"],
|
|
319
|
+
"tags": tags,
|
|
320
|
+
"context": row["context"],
|
|
321
|
+
"importance_score": row["importance_score"],
|
|
322
|
+
"status": row["status"],
|
|
323
|
+
"project_path": row["project_path"],
|
|
324
|
+
"created_at": row["created_at"],
|
|
325
|
+
"updated_at": row["updated_at"],
|
|
326
|
+
"score": float(score),
|
|
327
|
+
})
|
|
328
|
+
|
|
329
|
+
return results
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def get_global_stats() -> dict:
|
|
333
|
+
"""Get statistics from the global index.
|
|
334
|
+
|
|
335
|
+
Returns:
|
|
336
|
+
Dict with counts by project, type, etc.
|
|
337
|
+
"""
|
|
338
|
+
try:
|
|
339
|
+
global_conn = init_database(is_global=True)
|
|
340
|
+
cursor = global_conn.cursor()
|
|
341
|
+
|
|
342
|
+
stats = {}
|
|
343
|
+
|
|
344
|
+
# Total memories
|
|
345
|
+
cursor.execute("SELECT COUNT(*) FROM memories")
|
|
346
|
+
stats["total_memories"] = cursor.fetchone()[0]
|
|
347
|
+
|
|
348
|
+
# By project
|
|
349
|
+
cursor.execute("""
|
|
350
|
+
SELECT project_path, COUNT(*) as cnt
|
|
351
|
+
FROM memories
|
|
352
|
+
GROUP BY project_path
|
|
353
|
+
ORDER BY cnt DESC
|
|
354
|
+
""")
|
|
355
|
+
stats["by_project"] = {row["project_path"]: row["cnt"] for row in cursor.fetchall()}
|
|
356
|
+
|
|
357
|
+
# By type
|
|
358
|
+
cursor.execute("""
|
|
359
|
+
SELECT type, COUNT(*) as cnt
|
|
360
|
+
FROM memories
|
|
361
|
+
GROUP BY type
|
|
362
|
+
ORDER BY cnt DESC
|
|
363
|
+
""")
|
|
364
|
+
stats["by_type"] = {row["type"]: row["cnt"] for row in cursor.fetchall()}
|
|
365
|
+
|
|
366
|
+
return stats
|
|
367
|
+
|
|
368
|
+
except Exception as e:
|
|
369
|
+
logger.error(f"Failed to get global stats: {e}")
|
|
370
|
+
return {"error": str(e)}
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
def sync_all_project_memories() -> int:
|
|
374
|
+
"""Sync all memories from current project to global index.
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
Number of memories synced
|
|
378
|
+
"""
|
|
379
|
+
config = load_config()
|
|
380
|
+
if not config.global_sync_enabled:
|
|
381
|
+
return 0
|
|
382
|
+
|
|
383
|
+
try:
|
|
384
|
+
project_conn = init_database()
|
|
385
|
+
project_path = str(get_project_path())
|
|
386
|
+
|
|
387
|
+
cursor = project_conn.cursor()
|
|
388
|
+
cursor.execute("SELECT * FROM memories WHERE status != 'archived'")
|
|
389
|
+
|
|
390
|
+
count = 0
|
|
391
|
+
for row in cursor.fetchall():
|
|
392
|
+
tags = row["tags"]
|
|
393
|
+
if tags and isinstance(tags, str):
|
|
394
|
+
try:
|
|
395
|
+
tags = json.loads(tags)
|
|
396
|
+
except json.JSONDecodeError:
|
|
397
|
+
tags = []
|
|
398
|
+
else:
|
|
399
|
+
tags = []
|
|
400
|
+
|
|
401
|
+
synced = sync_memory_to_global(
|
|
402
|
+
memory_id=row["id"],
|
|
403
|
+
content=row["content"],
|
|
404
|
+
memory_type=row["type"],
|
|
405
|
+
tags=tags,
|
|
406
|
+
context=row["context"],
|
|
407
|
+
importance_score=row["importance_score"],
|
|
408
|
+
status=row["status"],
|
|
409
|
+
project_path=project_path,
|
|
410
|
+
created_at=row["created_at"],
|
|
411
|
+
updated_at=row["updated_at"],
|
|
412
|
+
)
|
|
413
|
+
if synced:
|
|
414
|
+
count += 1
|
|
415
|
+
|
|
416
|
+
logger.info(f"Synced {count} memories to global index")
|
|
417
|
+
return count
|
|
418
|
+
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.error(f"Failed to sync project memories: {e}")
|
|
421
|
+
return 0
|