omni-cortex 1.17.0__py3-none-any.whl → 1.17.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex/__init__.py +3 -0
- omni_cortex/categorization/__init__.py +9 -0
- omni_cortex/categorization/auto_tags.py +166 -0
- omni_cortex/categorization/auto_type.py +165 -0
- omni_cortex/config.py +141 -0
- omni_cortex/dashboard.py +232 -0
- omni_cortex/database/__init__.py +24 -0
- omni_cortex/database/connection.py +137 -0
- omni_cortex/database/migrations.py +210 -0
- omni_cortex/database/schema.py +212 -0
- omni_cortex/database/sync.py +421 -0
- omni_cortex/decay/__init__.py +7 -0
- omni_cortex/decay/importance.py +147 -0
- omni_cortex/embeddings/__init__.py +35 -0
- omni_cortex/embeddings/local.py +442 -0
- omni_cortex/models/__init__.py +20 -0
- omni_cortex/models/activity.py +265 -0
- omni_cortex/models/agent.py +144 -0
- omni_cortex/models/memory.py +395 -0
- omni_cortex/models/relationship.py +206 -0
- omni_cortex/models/session.py +290 -0
- omni_cortex/resources/__init__.py +1 -0
- omni_cortex/search/__init__.py +22 -0
- omni_cortex/search/hybrid.py +197 -0
- omni_cortex/search/keyword.py +204 -0
- omni_cortex/search/ranking.py +127 -0
- omni_cortex/search/semantic.py +232 -0
- omni_cortex/server.py +360 -0
- omni_cortex/setup.py +278 -0
- omni_cortex/tools/__init__.py +13 -0
- omni_cortex/tools/activities.py +453 -0
- omni_cortex/tools/memories.py +536 -0
- omni_cortex/tools/sessions.py +311 -0
- omni_cortex/tools/utilities.py +477 -0
- omni_cortex/utils/__init__.py +13 -0
- omni_cortex/utils/formatting.py +282 -0
- omni_cortex/utils/ids.py +72 -0
- omni_cortex/utils/timestamps.py +129 -0
- omni_cortex/utils/truncation.py +111 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/main.py +43 -13
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/METADATA +1 -1
- omni_cortex-1.17.2.dist-info/RECORD +65 -0
- omni_cortex-1.17.0.dist-info/RECORD +0 -26
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/stop.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
- {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/WHEEL +0 -0
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/entry_points.txt +0 -0
- {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,536 @@
|
|
|
1
|
+
"""Memory storage tools for Omni Cortex MCP."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from pydantic import BaseModel, Field, ConfigDict
|
|
6
|
+
|
|
7
|
+
from mcp.server.fastmcp import FastMCP
|
|
8
|
+
|
|
9
|
+
from ..database.connection import get_connection, init_database
|
|
10
|
+
from ..config import get_project_path, get_session_id
|
|
11
|
+
from ..models.memory import (
|
|
12
|
+
MemoryCreate,
|
|
13
|
+
MemoryUpdate,
|
|
14
|
+
Memory,
|
|
15
|
+
create_memory,
|
|
16
|
+
get_memory,
|
|
17
|
+
update_memory,
|
|
18
|
+
delete_memory,
|
|
19
|
+
list_memories,
|
|
20
|
+
touch_memory,
|
|
21
|
+
)
|
|
22
|
+
from ..models.relationship import create_relationship, get_relationships, VALID_RELATIONSHIP_TYPES
|
|
23
|
+
from ..search.hybrid import search
|
|
24
|
+
from ..search.ranking import calculate_relevance_score
|
|
25
|
+
from ..utils.formatting import format_memory_markdown, format_memories_list_markdown, detect_injection_patterns
|
|
26
|
+
from ..embeddings import generate_and_store_embedding, is_model_available
|
|
27
|
+
from ..config import load_config
|
|
28
|
+
from ..database.sync import sync_memory_to_global, delete_memory_from_global
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# === Input Models ===
|
|
32
|
+
|
|
33
|
+
class RememberInput(BaseModel):
|
|
34
|
+
"""Input for storing a new memory."""
|
|
35
|
+
|
|
36
|
+
model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
|
|
37
|
+
|
|
38
|
+
content: str = Field(..., description="The information to remember", min_length=1)
|
|
39
|
+
context: Optional[str] = Field(None, description="Additional context about the memory")
|
|
40
|
+
tags: Optional[list[str]] = Field(
|
|
41
|
+
default_factory=list, description="Tags for categorization"
|
|
42
|
+
)
|
|
43
|
+
type: Optional[str] = Field(
|
|
44
|
+
None, description="Memory type (auto-detected if not specified)"
|
|
45
|
+
)
|
|
46
|
+
importance: Optional[int] = Field(
|
|
47
|
+
None, description="Importance score 1-100", ge=1, le=100
|
|
48
|
+
)
|
|
49
|
+
related_activity_id: Optional[str] = Field(
|
|
50
|
+
None, description="ID of related activity"
|
|
51
|
+
)
|
|
52
|
+
related_memory_ids: Optional[list[str]] = Field(
|
|
53
|
+
default_factory=list, description="IDs of related memories"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class RecallInput(BaseModel):
|
|
58
|
+
"""Input for searching memories."""
|
|
59
|
+
|
|
60
|
+
model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
|
|
61
|
+
|
|
62
|
+
query: str = Field(..., description="Search query", min_length=1)
|
|
63
|
+
search_mode: str = Field(
|
|
64
|
+
"keyword",
|
|
65
|
+
description="Search mode: keyword, semantic, or hybrid",
|
|
66
|
+
)
|
|
67
|
+
type_filter: Optional[str] = Field(None, description="Filter by memory type")
|
|
68
|
+
tags_filter: Optional[list[str]] = Field(None, description="Filter by tags")
|
|
69
|
+
status_filter: Optional[str] = Field(None, description="Filter by status")
|
|
70
|
+
min_importance: Optional[int] = Field(None, description="Minimum importance", ge=0, le=100)
|
|
71
|
+
include_archived: bool = Field(False, description="Include archived memories")
|
|
72
|
+
limit: int = Field(10, description="Maximum results", ge=1, le=50)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class ListMemoriesInput(BaseModel):
|
|
76
|
+
"""Input for listing memories."""
|
|
77
|
+
|
|
78
|
+
model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
|
|
79
|
+
|
|
80
|
+
type_filter: Optional[str] = Field(None, description="Filter by memory type")
|
|
81
|
+
tags_filter: Optional[list[str]] = Field(None, description="Filter by tags")
|
|
82
|
+
status_filter: Optional[str] = Field(None, description="Filter by status")
|
|
83
|
+
sort_by: str = Field(
|
|
84
|
+
"last_accessed",
|
|
85
|
+
description="Sort by: last_accessed, created_at, importance_score",
|
|
86
|
+
)
|
|
87
|
+
sort_order: str = Field("desc", description="Sort order: asc or desc")
|
|
88
|
+
limit: int = Field(20, description="Maximum results", ge=1, le=100)
|
|
89
|
+
offset: int = Field(0, description="Pagination offset", ge=0)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class UpdateMemoryInput(BaseModel):
|
|
93
|
+
"""Input for updating a memory."""
|
|
94
|
+
|
|
95
|
+
model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
|
|
96
|
+
|
|
97
|
+
id: str = Field(..., description="Memory ID to update")
|
|
98
|
+
content: Optional[str] = Field(None, description="New content")
|
|
99
|
+
context: Optional[str] = Field(None, description="New context")
|
|
100
|
+
tags: Optional[list[str]] = Field(None, description="Replace all tags")
|
|
101
|
+
add_tags: Optional[list[str]] = Field(None, description="Tags to add")
|
|
102
|
+
remove_tags: Optional[list[str]] = Field(None, description="Tags to remove")
|
|
103
|
+
status: Optional[str] = Field(None, description="New status")
|
|
104
|
+
importance: Optional[int] = Field(None, description="New importance", ge=1, le=100)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class ForgetInput(BaseModel):
|
|
108
|
+
"""Input for deleting a memory."""
|
|
109
|
+
|
|
110
|
+
model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
|
|
111
|
+
|
|
112
|
+
id: str = Field(..., description="Memory ID to delete")
|
|
113
|
+
confirm: bool = Field(..., description="Must be true to confirm deletion")
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class LinkMemoriesInput(BaseModel):
|
|
117
|
+
"""Input for linking two memories."""
|
|
118
|
+
|
|
119
|
+
model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
|
|
120
|
+
|
|
121
|
+
source_id: str = Field(..., description="Source memory ID")
|
|
122
|
+
target_id: str = Field(..., description="Target memory ID")
|
|
123
|
+
relationship_type: str = Field(
|
|
124
|
+
..., description="Type: related_to, supersedes, derived_from, contradicts"
|
|
125
|
+
)
|
|
126
|
+
strength: float = Field(1.0, description="Relationship strength 0-1", ge=0.0, le=1.0)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def register_memory_tools(mcp: FastMCP) -> None:
|
|
130
|
+
"""Register all memory tools with the MCP server."""
|
|
131
|
+
|
|
132
|
+
@mcp.tool(
|
|
133
|
+
name="cortex_remember",
|
|
134
|
+
annotations={
|
|
135
|
+
"title": "Remember Information",
|
|
136
|
+
"readOnlyHint": False,
|
|
137
|
+
"destructiveHint": False,
|
|
138
|
+
"idempotentHint": False,
|
|
139
|
+
"openWorldHint": False,
|
|
140
|
+
},
|
|
141
|
+
)
|
|
142
|
+
async def cortex_remember(params: RememberInput) -> str:
|
|
143
|
+
"""Store important information with auto-categorization and tagging.
|
|
144
|
+
|
|
145
|
+
This tool saves knowledge, decisions, solutions, and other important
|
|
146
|
+
information to the Cortex memory system. Content is automatically
|
|
147
|
+
categorized and tagged based on analysis.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
params: RememberInput with content, optional context, tags, type, importance
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
Confirmation with memory ID and detected type/tags
|
|
154
|
+
"""
|
|
155
|
+
try:
|
|
156
|
+
conn = init_database()
|
|
157
|
+
project_path = str(get_project_path())
|
|
158
|
+
session_id = get_session_id()
|
|
159
|
+
|
|
160
|
+
# Detect potential injection patterns in content
|
|
161
|
+
injection_warnings = detect_injection_patterns(params.content)
|
|
162
|
+
if injection_warnings:
|
|
163
|
+
import logging
|
|
164
|
+
logging.getLogger(__name__).warning(
|
|
165
|
+
f"Memory content contains potential injection patterns: {injection_warnings}"
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Create the memory
|
|
169
|
+
memory_data = MemoryCreate(
|
|
170
|
+
content=params.content,
|
|
171
|
+
context=params.context,
|
|
172
|
+
tags=params.tags or [],
|
|
173
|
+
type=params.type or "general",
|
|
174
|
+
importance=params.importance,
|
|
175
|
+
related_activity_id=params.related_activity_id,
|
|
176
|
+
related_memory_ids=params.related_memory_ids or [],
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
memory = create_memory(
|
|
180
|
+
conn,
|
|
181
|
+
memory_data,
|
|
182
|
+
project_path=project_path,
|
|
183
|
+
session_id=session_id,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
# Create relationships if specified
|
|
187
|
+
if params.related_memory_ids:
|
|
188
|
+
for related_id in params.related_memory_ids:
|
|
189
|
+
create_relationship(
|
|
190
|
+
conn,
|
|
191
|
+
source_id=memory.id,
|
|
192
|
+
target_id=related_id,
|
|
193
|
+
relationship_type="related_to",
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# Generate embedding for semantic search (if enabled)
|
|
197
|
+
has_embedding = False
|
|
198
|
+
config = load_config()
|
|
199
|
+
if config.embedding_enabled and is_model_available():
|
|
200
|
+
try:
|
|
201
|
+
generate_and_store_embedding(
|
|
202
|
+
conn,
|
|
203
|
+
memory_id=memory.id,
|
|
204
|
+
content=memory.content,
|
|
205
|
+
context=memory.context,
|
|
206
|
+
)
|
|
207
|
+
has_embedding = True
|
|
208
|
+
except Exception as e:
|
|
209
|
+
# Non-fatal: embedding generation is optional
|
|
210
|
+
# Log timeout errors to help with debugging
|
|
211
|
+
import logging
|
|
212
|
+
logging.getLogger(__name__).warning(f"Embedding generation failed: {e}")
|
|
213
|
+
|
|
214
|
+
# Sync to global index for cross-project search
|
|
215
|
+
sync_memory_to_global(
|
|
216
|
+
memory_id=memory.id,
|
|
217
|
+
content=memory.content,
|
|
218
|
+
memory_type=memory.type,
|
|
219
|
+
tags=memory.tags or [],
|
|
220
|
+
context=memory.context,
|
|
221
|
+
importance_score=memory.importance_score,
|
|
222
|
+
status=memory.status,
|
|
223
|
+
project_path=project_path,
|
|
224
|
+
created_at=memory.created_at,
|
|
225
|
+
updated_at=memory.updated_at,
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
embedding_status = "with embedding" if has_embedding else "no embedding"
|
|
229
|
+
result = (
|
|
230
|
+
f"Remembered: {memory.id}\n"
|
|
231
|
+
f"Type: {memory.type}\n"
|
|
232
|
+
f"Tags: {', '.join(memory.tags) if memory.tags else 'none'}\n"
|
|
233
|
+
f"Importance: {memory.importance_score:.0f}/100\n"
|
|
234
|
+
f"Search: {embedding_status}"
|
|
235
|
+
)
|
|
236
|
+
if injection_warnings:
|
|
237
|
+
result += f"\n[Security Note: Content contains patterns that may be injection attempts: {', '.join(injection_warnings)}]"
|
|
238
|
+
return result
|
|
239
|
+
|
|
240
|
+
except Exception as e:
|
|
241
|
+
return f"Error storing memory: {e}"
|
|
242
|
+
|
|
243
|
+
@mcp.tool(
|
|
244
|
+
name="cortex_recall",
|
|
245
|
+
annotations={
|
|
246
|
+
"title": "Search Memories",
|
|
247
|
+
"readOnlyHint": True,
|
|
248
|
+
"destructiveHint": False,
|
|
249
|
+
"idempotentHint": True,
|
|
250
|
+
"openWorldHint": False,
|
|
251
|
+
},
|
|
252
|
+
)
|
|
253
|
+
async def cortex_recall(params: RecallInput) -> str:
|
|
254
|
+
"""Search memories by keyword or semantic similarity.
|
|
255
|
+
|
|
256
|
+
This tool searches through stored memories using keyword matching
|
|
257
|
+
(FTS5) or semantic search (embeddings). Results are ranked by
|
|
258
|
+
relevance, access frequency, recency, and importance.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
params: RecallInput with query and filters
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
Matching memories formatted as markdown
|
|
265
|
+
"""
|
|
266
|
+
try:
|
|
267
|
+
conn = init_database()
|
|
268
|
+
|
|
269
|
+
# Use unified search function supporting all modes
|
|
270
|
+
results = search(
|
|
271
|
+
conn,
|
|
272
|
+
query=params.query,
|
|
273
|
+
mode=params.search_mode,
|
|
274
|
+
type_filter=params.type_filter,
|
|
275
|
+
tags_filter=params.tags_filter,
|
|
276
|
+
status_filter=params.status_filter,
|
|
277
|
+
min_importance=params.min_importance,
|
|
278
|
+
include_archived=params.include_archived,
|
|
279
|
+
limit=params.limit,
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
if not results:
|
|
283
|
+
return f"No memories found matching: {params.query}"
|
|
284
|
+
|
|
285
|
+
# Calculate full relevance scores and re-rank
|
|
286
|
+
scored_results = []
|
|
287
|
+
for memory, keyword_score, semantic_score in results:
|
|
288
|
+
# Touch memory to update access count
|
|
289
|
+
touch_memory(conn, memory.id)
|
|
290
|
+
|
|
291
|
+
# Normalize scores to 0-1 range for ranking
|
|
292
|
+
# Keyword scores from FTS can vary, normalize them
|
|
293
|
+
kw_normalized = min(1.0, keyword_score / 10.0) if keyword_score > 0 else 0.0
|
|
294
|
+
|
|
295
|
+
# Calculate combined score
|
|
296
|
+
final_score = calculate_relevance_score(
|
|
297
|
+
memory,
|
|
298
|
+
keyword_score=kw_normalized,
|
|
299
|
+
semantic_score=semantic_score,
|
|
300
|
+
query=params.query,
|
|
301
|
+
)
|
|
302
|
+
scored_results.append((memory, final_score))
|
|
303
|
+
|
|
304
|
+
# Sort by final score
|
|
305
|
+
scored_results.sort(key=lambda x: x[1], reverse=True)
|
|
306
|
+
|
|
307
|
+
# Build related memories map
|
|
308
|
+
related_map: dict[str, list[dict]] = {}
|
|
309
|
+
for memory, _ in scored_results:
|
|
310
|
+
relationships = get_relationships(conn, memory.id)
|
|
311
|
+
if relationships:
|
|
312
|
+
related_list = []
|
|
313
|
+
for rel in relationships[:3]: # Limit to 3 related
|
|
314
|
+
# Get the related memory ID (could be source or target)
|
|
315
|
+
related_id = (
|
|
316
|
+
rel.target_memory_id
|
|
317
|
+
if rel.source_memory_id == memory.id
|
|
318
|
+
else rel.source_memory_id
|
|
319
|
+
)
|
|
320
|
+
related_mem = get_memory(conn, related_id)
|
|
321
|
+
if related_mem:
|
|
322
|
+
related_list.append({
|
|
323
|
+
"id": related_mem.id,
|
|
324
|
+
"content": related_mem.content,
|
|
325
|
+
"relationship_type": rel.relationship_type,
|
|
326
|
+
})
|
|
327
|
+
if related_list:
|
|
328
|
+
related_map[memory.id] = related_list
|
|
329
|
+
|
|
330
|
+
# Format output - convert Memory objects to dicts for formatting
|
|
331
|
+
memories = [m.model_dump() for m, _ in scored_results]
|
|
332
|
+
return format_memories_list_markdown(memories, len(memories), related_map=related_map)
|
|
333
|
+
|
|
334
|
+
except Exception as e:
|
|
335
|
+
return f"Error searching memories: {e}"
|
|
336
|
+
|
|
337
|
+
@mcp.tool(
|
|
338
|
+
name="cortex_list_memories",
|
|
339
|
+
annotations={
|
|
340
|
+
"title": "List Memories",
|
|
341
|
+
"readOnlyHint": True,
|
|
342
|
+
"destructiveHint": False,
|
|
343
|
+
"idempotentHint": True,
|
|
344
|
+
"openWorldHint": False,
|
|
345
|
+
},
|
|
346
|
+
)
|
|
347
|
+
async def cortex_list_memories(params: ListMemoriesInput) -> str:
|
|
348
|
+
"""List memories with filtering and pagination.
|
|
349
|
+
|
|
350
|
+
Args:
|
|
351
|
+
params: ListMemoriesInput with filters and pagination
|
|
352
|
+
|
|
353
|
+
Returns:
|
|
354
|
+
Memories formatted as markdown
|
|
355
|
+
"""
|
|
356
|
+
try:
|
|
357
|
+
conn = init_database()
|
|
358
|
+
|
|
359
|
+
memories, total = list_memories(
|
|
360
|
+
conn,
|
|
361
|
+
type_filter=params.type_filter,
|
|
362
|
+
tags_filter=params.tags_filter,
|
|
363
|
+
status_filter=params.status_filter,
|
|
364
|
+
sort_by=params.sort_by,
|
|
365
|
+
sort_order=params.sort_order,
|
|
366
|
+
limit=params.limit,
|
|
367
|
+
offset=params.offset,
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
if not memories:
|
|
371
|
+
return "No memories found."
|
|
372
|
+
|
|
373
|
+
return format_memories_list_markdown(
|
|
374
|
+
[m.model_dump() for m in memories],
|
|
375
|
+
total,
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
except Exception as e:
|
|
379
|
+
return f"Error listing memories: {e}"
|
|
380
|
+
|
|
381
|
+
@mcp.tool(
|
|
382
|
+
name="cortex_update_memory",
|
|
383
|
+
annotations={
|
|
384
|
+
"title": "Update Memory",
|
|
385
|
+
"readOnlyHint": False,
|
|
386
|
+
"destructiveHint": False,
|
|
387
|
+
"idempotentHint": True,
|
|
388
|
+
"openWorldHint": False,
|
|
389
|
+
},
|
|
390
|
+
)
|
|
391
|
+
async def cortex_update_memory(params: UpdateMemoryInput) -> str:
|
|
392
|
+
"""Update an existing memory.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
params: UpdateMemoryInput with ID and fields to update
|
|
396
|
+
|
|
397
|
+
Returns:
|
|
398
|
+
Updated memory details or error
|
|
399
|
+
"""
|
|
400
|
+
try:
|
|
401
|
+
conn = init_database()
|
|
402
|
+
|
|
403
|
+
update_data = MemoryUpdate(
|
|
404
|
+
content=params.content,
|
|
405
|
+
context=params.context,
|
|
406
|
+
tags=params.tags,
|
|
407
|
+
add_tags=params.add_tags,
|
|
408
|
+
remove_tags=params.remove_tags,
|
|
409
|
+
status=params.status,
|
|
410
|
+
importance=params.importance,
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
updated = update_memory(conn, params.id, update_data)
|
|
414
|
+
|
|
415
|
+
if not updated:
|
|
416
|
+
return f"Memory not found: {params.id}"
|
|
417
|
+
|
|
418
|
+
# Regenerate embedding if content or context changed (if enabled)
|
|
419
|
+
config = load_config()
|
|
420
|
+
if (params.content is not None or params.context is not None) and config.embedding_enabled and is_model_available():
|
|
421
|
+
try:
|
|
422
|
+
generate_and_store_embedding(
|
|
423
|
+
conn,
|
|
424
|
+
memory_id=updated.id,
|
|
425
|
+
content=updated.content,
|
|
426
|
+
context=updated.context,
|
|
427
|
+
)
|
|
428
|
+
except Exception:
|
|
429
|
+
pass # Non-fatal
|
|
430
|
+
|
|
431
|
+
# Sync update to global index
|
|
432
|
+
sync_memory_to_global(
|
|
433
|
+
memory_id=updated.id,
|
|
434
|
+
content=updated.content,
|
|
435
|
+
memory_type=updated.type,
|
|
436
|
+
tags=updated.tags or [],
|
|
437
|
+
context=updated.context,
|
|
438
|
+
importance_score=updated.importance_score,
|
|
439
|
+
status=updated.status,
|
|
440
|
+
project_path=updated.project_path or str(get_project_path()),
|
|
441
|
+
created_at=updated.created_at,
|
|
442
|
+
updated_at=updated.updated_at,
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
return format_memory_markdown(updated.model_dump())
|
|
446
|
+
|
|
447
|
+
except Exception as e:
|
|
448
|
+
return f"Error updating memory: {e}"
|
|
449
|
+
|
|
450
|
+
@mcp.tool(
|
|
451
|
+
name="cortex_forget",
|
|
452
|
+
annotations={
|
|
453
|
+
"title": "Delete Memory",
|
|
454
|
+
"readOnlyHint": False,
|
|
455
|
+
"destructiveHint": True,
|
|
456
|
+
"idempotentHint": True,
|
|
457
|
+
"openWorldHint": False,
|
|
458
|
+
},
|
|
459
|
+
)
|
|
460
|
+
async def cortex_forget(params: ForgetInput) -> str:
|
|
461
|
+
"""Permanently delete a memory.
|
|
462
|
+
|
|
463
|
+
Args:
|
|
464
|
+
params: ForgetInput with ID and confirmation
|
|
465
|
+
|
|
466
|
+
Returns:
|
|
467
|
+
Confirmation or error
|
|
468
|
+
"""
|
|
469
|
+
try:
|
|
470
|
+
if not params.confirm:
|
|
471
|
+
return "Deletion not confirmed. Set confirm=true to delete."
|
|
472
|
+
|
|
473
|
+
conn = init_database()
|
|
474
|
+
deleted = delete_memory(conn, params.id)
|
|
475
|
+
|
|
476
|
+
if deleted:
|
|
477
|
+
# Also remove from global index
|
|
478
|
+
delete_memory_from_global(params.id)
|
|
479
|
+
return f"Memory deleted: {params.id}"
|
|
480
|
+
else:
|
|
481
|
+
return f"Memory not found: {params.id}"
|
|
482
|
+
|
|
483
|
+
except Exception as e:
|
|
484
|
+
return f"Error deleting memory: {e}"
|
|
485
|
+
|
|
486
|
+
@mcp.tool(
|
|
487
|
+
name="cortex_link_memories",
|
|
488
|
+
annotations={
|
|
489
|
+
"title": "Link Memories",
|
|
490
|
+
"readOnlyHint": False,
|
|
491
|
+
"destructiveHint": False,
|
|
492
|
+
"idempotentHint": True,
|
|
493
|
+
"openWorldHint": False,
|
|
494
|
+
},
|
|
495
|
+
)
|
|
496
|
+
async def cortex_link_memories(params: LinkMemoriesInput) -> str:
|
|
497
|
+
"""Create a relationship between two memories.
|
|
498
|
+
|
|
499
|
+
Relationship types:
|
|
500
|
+
- related_to: General association
|
|
501
|
+
- supersedes: New memory replaces old
|
|
502
|
+
- derived_from: New memory based on old
|
|
503
|
+
- contradicts: Memories conflict
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
params: LinkMemoriesInput with source, target, and type
|
|
507
|
+
|
|
508
|
+
Returns:
|
|
509
|
+
Confirmation or error
|
|
510
|
+
"""
|
|
511
|
+
try:
|
|
512
|
+
if params.relationship_type not in VALID_RELATIONSHIP_TYPES:
|
|
513
|
+
return (
|
|
514
|
+
f"Invalid relationship type: {params.relationship_type}. "
|
|
515
|
+
f"Valid types: {', '.join(VALID_RELATIONSHIP_TYPES)}"
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
conn = init_database()
|
|
519
|
+
relationship = create_relationship(
|
|
520
|
+
conn,
|
|
521
|
+
source_id=params.source_id,
|
|
522
|
+
target_id=params.target_id,
|
|
523
|
+
relationship_type=params.relationship_type,
|
|
524
|
+
strength=params.strength,
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
if relationship:
|
|
528
|
+
return (
|
|
529
|
+
f"Linked: {params.source_id} --[{params.relationship_type}]--> "
|
|
530
|
+
f"{params.target_id}"
|
|
531
|
+
)
|
|
532
|
+
else:
|
|
533
|
+
return "Failed to create relationship. Check that both memories exist."
|
|
534
|
+
|
|
535
|
+
except Exception as e:
|
|
536
|
+
return f"Error linking memories: {e}"
|