memorygraphMCP 0.11.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memorygraph/__init__.py +50 -0
- memorygraph/__main__.py +12 -0
- memorygraph/advanced_tools.py +509 -0
- memorygraph/analytics/__init__.py +46 -0
- memorygraph/analytics/advanced_queries.py +727 -0
- memorygraph/backends/__init__.py +21 -0
- memorygraph/backends/base.py +179 -0
- memorygraph/backends/cloud.py +75 -0
- memorygraph/backends/cloud_backend.py +858 -0
- memorygraph/backends/factory.py +577 -0
- memorygraph/backends/falkordb_backend.py +749 -0
- memorygraph/backends/falkordblite_backend.py +746 -0
- memorygraph/backends/ladybugdb_backend.py +242 -0
- memorygraph/backends/memgraph_backend.py +327 -0
- memorygraph/backends/neo4j_backend.py +298 -0
- memorygraph/backends/sqlite_fallback.py +463 -0
- memorygraph/backends/turso.py +448 -0
- memorygraph/cli.py +743 -0
- memorygraph/cloud_database.py +297 -0
- memorygraph/config.py +295 -0
- memorygraph/database.py +933 -0
- memorygraph/graph_analytics.py +631 -0
- memorygraph/integration/__init__.py +69 -0
- memorygraph/integration/context_capture.py +426 -0
- memorygraph/integration/project_analysis.py +583 -0
- memorygraph/integration/workflow_tracking.py +492 -0
- memorygraph/intelligence/__init__.py +59 -0
- memorygraph/intelligence/context_retrieval.py +447 -0
- memorygraph/intelligence/entity_extraction.py +386 -0
- memorygraph/intelligence/pattern_recognition.py +420 -0
- memorygraph/intelligence/temporal.py +374 -0
- memorygraph/migration/__init__.py +27 -0
- memorygraph/migration/manager.py +579 -0
- memorygraph/migration/models.py +142 -0
- memorygraph/migration/scripts/__init__.py +17 -0
- memorygraph/migration/scripts/bitemporal_migration.py +595 -0
- memorygraph/migration/scripts/multitenancy_migration.py +452 -0
- memorygraph/migration_tools_module.py +146 -0
- memorygraph/models.py +684 -0
- memorygraph/proactive/__init__.py +46 -0
- memorygraph/proactive/outcome_learning.py +444 -0
- memorygraph/proactive/predictive.py +410 -0
- memorygraph/proactive/session_briefing.py +399 -0
- memorygraph/relationships.py +668 -0
- memorygraph/server.py +883 -0
- memorygraph/sqlite_database.py +1876 -0
- memorygraph/tools/__init__.py +59 -0
- memorygraph/tools/activity_tools.py +262 -0
- memorygraph/tools/memory_tools.py +315 -0
- memorygraph/tools/migration_tools.py +181 -0
- memorygraph/tools/relationship_tools.py +147 -0
- memorygraph/tools/search_tools.py +406 -0
- memorygraph/tools/temporal_tools.py +339 -0
- memorygraph/utils/__init__.py +10 -0
- memorygraph/utils/context_extractor.py +429 -0
- memorygraph/utils/error_handling.py +151 -0
- memorygraph/utils/export_import.py +425 -0
- memorygraph/utils/graph_algorithms.py +200 -0
- memorygraph/utils/pagination.py +149 -0
- memorygraph/utils/project_detection.py +133 -0
- memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
- memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
- memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
- memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
- memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,746 @@
|
|
|
1
|
+
"""
|
|
2
|
+
FalkorDBLite backend implementation for the Claude Code Memory Server.
|
|
3
|
+
|
|
4
|
+
This module provides the FalkorDBLite-specific implementation of the GraphBackend interface.
|
|
5
|
+
FalkorDBLite is an embedded graph database (like SQLite) with native Cypher support and exceptional performance.
|
|
6
|
+
Unlike FalkorDB (client-server), FalkorDBLite uses a file path for embedded local storage.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
from typing import Any, Optional, List, Tuple, Dict
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
from .base import GraphBackend
|
|
15
|
+
from ..models import (
|
|
16
|
+
Memory,
|
|
17
|
+
MemoryType,
|
|
18
|
+
Relationship,
|
|
19
|
+
RelationshipType,
|
|
20
|
+
RelationshipProperties,
|
|
21
|
+
SearchQuery,
|
|
22
|
+
MemoryContext,
|
|
23
|
+
MemoryNode,
|
|
24
|
+
DatabaseConnectionError,
|
|
25
|
+
SchemaError,
|
|
26
|
+
ValidationError,
|
|
27
|
+
RelationshipError,
|
|
28
|
+
)
|
|
29
|
+
from ..config import Config
|
|
30
|
+
from datetime import datetime, timezone
|
|
31
|
+
import uuid
|
|
32
|
+
import json
|
|
33
|
+
|
|
34
|
+
logger = logging.getLogger(__name__)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class FalkorDBLiteBackend(GraphBackend):
|
|
38
|
+
"""FalkorDBLite implementation of the GraphBackend interface."""
|
|
39
|
+
|
|
40
|
+
def __init__(
|
|
41
|
+
self,
|
|
42
|
+
db_path: Optional[str] = None,
|
|
43
|
+
graph_name: str = "memorygraph"
|
|
44
|
+
):
|
|
45
|
+
"""
|
|
46
|
+
Initialize FalkorDBLite backend.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
db_path: Path to database file (defaults to FALKORDBLITE_PATH env var or ~/.memorygraph/falkordblite.db)
|
|
50
|
+
graph_name: Name of the graph database (defaults to 'memorygraph')
|
|
51
|
+
"""
|
|
52
|
+
if db_path is None:
|
|
53
|
+
db_path = os.getenv("FALKORDBLITE_PATH")
|
|
54
|
+
if db_path is None:
|
|
55
|
+
# Default to ~/.memorygraph/falkordblite.db
|
|
56
|
+
home = Path.home()
|
|
57
|
+
db_dir = home / ".memorygraph"
|
|
58
|
+
db_dir.mkdir(parents=True, exist_ok=True)
|
|
59
|
+
db_path = str(db_dir / "falkordblite.db")
|
|
60
|
+
|
|
61
|
+
self.db_path = db_path
|
|
62
|
+
self.graph_name = graph_name
|
|
63
|
+
self.client = None
|
|
64
|
+
self.graph = None
|
|
65
|
+
self._connected = False
|
|
66
|
+
|
|
67
|
+
async def connect(self) -> bool:
|
|
68
|
+
"""
|
|
69
|
+
Establish connection to FalkorDBLite database.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
True if connection successful
|
|
73
|
+
|
|
74
|
+
Raises:
|
|
75
|
+
DatabaseConnectionError: If connection fails
|
|
76
|
+
"""
|
|
77
|
+
try:
|
|
78
|
+
# Lazy import falkordblite only when connecting
|
|
79
|
+
try:
|
|
80
|
+
from redislite.falkordb_client import FalkorDB
|
|
81
|
+
except ImportError as e:
|
|
82
|
+
raise DatabaseConnectionError(
|
|
83
|
+
"falkordblite package is required for FalkorDBLite backend. "
|
|
84
|
+
"Install with: pip install falkordblite"
|
|
85
|
+
) from e
|
|
86
|
+
|
|
87
|
+
# Create FalkorDBLite client with file path (embedded database)
|
|
88
|
+
self.client = FalkorDB(self.db_path)
|
|
89
|
+
|
|
90
|
+
# Select the graph
|
|
91
|
+
self.graph = self.client.select_graph(self.graph_name)
|
|
92
|
+
self._connected = True
|
|
93
|
+
|
|
94
|
+
logger.info(f"Successfully connected to FalkorDBLite at {self.db_path}")
|
|
95
|
+
return True
|
|
96
|
+
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.error(f"Failed to connect to FalkorDBLite: {e}")
|
|
99
|
+
raise DatabaseConnectionError(f"Failed to connect to FalkorDBLite: {e}")
|
|
100
|
+
|
|
101
|
+
async def disconnect(self) -> None:
|
|
102
|
+
"""Close the database connection."""
|
|
103
|
+
if self.client:
|
|
104
|
+
# FalkorDBLite client doesn't require explicit close in Python SDK
|
|
105
|
+
self.client = None
|
|
106
|
+
self.graph = None
|
|
107
|
+
self._connected = False
|
|
108
|
+
logger.info("FalkorDBLite connection closed")
|
|
109
|
+
|
|
110
|
+
async def execute_query(
|
|
111
|
+
self,
|
|
112
|
+
query: str,
|
|
113
|
+
parameters: Optional[dict[str, Any]] = None,
|
|
114
|
+
write: bool = False
|
|
115
|
+
) -> list[dict[str, Any]]:
|
|
116
|
+
"""
|
|
117
|
+
Execute a Cypher query and return results.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
query: The Cypher query string
|
|
121
|
+
parameters: Query parameters for parameterized queries
|
|
122
|
+
write: Whether this is a write operation (default: False)
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
List of result records as dictionaries
|
|
126
|
+
|
|
127
|
+
Raises:
|
|
128
|
+
DatabaseConnectionError: If not connected or query fails
|
|
129
|
+
"""
|
|
130
|
+
if not self._connected or not self.graph:
|
|
131
|
+
raise DatabaseConnectionError("Not connected to FalkorDBLite. Call connect() first.")
|
|
132
|
+
|
|
133
|
+
params = parameters or {}
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
# Execute query on FalkorDBLite
|
|
137
|
+
result = self.graph.query(query, params)
|
|
138
|
+
|
|
139
|
+
# Convert result to list of dicts
|
|
140
|
+
result_list = []
|
|
141
|
+
if hasattr(result, 'result_set') and result.result_set:
|
|
142
|
+
result_list = result.result_set
|
|
143
|
+
|
|
144
|
+
return result_list
|
|
145
|
+
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.error(f"Query execution failed: {e}")
|
|
148
|
+
raise DatabaseConnectionError(f"Query execution failed: {e}")
|
|
149
|
+
|
|
150
|
+
async def initialize_schema(self) -> None:
|
|
151
|
+
"""
|
|
152
|
+
Initialize database schema including indexes and constraints.
|
|
153
|
+
|
|
154
|
+
Raises:
|
|
155
|
+
SchemaError: If schema initialization fails
|
|
156
|
+
"""
|
|
157
|
+
logger.info("Initializing FalkorDBLite schema for Claude Memory...")
|
|
158
|
+
|
|
159
|
+
# Create constraints (FalkorDBLite uses similar Cypher syntax to FalkorDB)
|
|
160
|
+
constraints = [
|
|
161
|
+
"CREATE CONSTRAINT ON (m:Memory) ASSERT m.id IS UNIQUE",
|
|
162
|
+
]
|
|
163
|
+
|
|
164
|
+
# Create indexes for performance
|
|
165
|
+
indexes = [
|
|
166
|
+
"CREATE INDEX ON :Memory(type)",
|
|
167
|
+
"CREATE INDEX ON :Memory(created_at)",
|
|
168
|
+
"CREATE INDEX ON :Memory(importance)",
|
|
169
|
+
"CREATE INDEX ON :Memory(confidence)",
|
|
170
|
+
]
|
|
171
|
+
|
|
172
|
+
# Conditional multi-tenant indexes (Phase 1)
|
|
173
|
+
if Config.is_multi_tenant_mode():
|
|
174
|
+
multitenant_indexes = [
|
|
175
|
+
"CREATE INDEX ON :Memory(context_tenant_id)",
|
|
176
|
+
"CREATE INDEX ON :Memory(context_team_id)",
|
|
177
|
+
"CREATE INDEX ON :Memory(context_visibility)",
|
|
178
|
+
"CREATE INDEX ON :Memory(context_created_by)",
|
|
179
|
+
"CREATE INDEX ON :Memory(version)",
|
|
180
|
+
]
|
|
181
|
+
indexes.extend(multitenant_indexes)
|
|
182
|
+
logger.info("Multi-tenant mode enabled, adding tenant indexes")
|
|
183
|
+
|
|
184
|
+
# Execute schema creation
|
|
185
|
+
for constraint in constraints:
|
|
186
|
+
try:
|
|
187
|
+
await self.execute_query(constraint, write=True)
|
|
188
|
+
logger.debug(f"Created constraint: {constraint}")
|
|
189
|
+
except Exception as e:
|
|
190
|
+
# FalkorDBLite may not support all constraint types, log but continue
|
|
191
|
+
logger.debug(f"Constraint creation note: {e}")
|
|
192
|
+
|
|
193
|
+
for index in indexes:
|
|
194
|
+
try:
|
|
195
|
+
await self.execute_query(index, write=True)
|
|
196
|
+
logger.debug(f"Created index: {index}")
|
|
197
|
+
except Exception as e:
|
|
198
|
+
# FalkorDBLite may not support all index types, log but continue
|
|
199
|
+
logger.debug(f"Index creation note: {e}")
|
|
200
|
+
|
|
201
|
+
logger.info("Schema initialization completed")
|
|
202
|
+
|
|
203
|
+
async def store_memory(self, memory: Memory) -> str:
|
|
204
|
+
"""
|
|
205
|
+
Store a memory in the database and return its ID.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
memory: Memory object to store
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
ID of the stored memory
|
|
212
|
+
|
|
213
|
+
Raises:
|
|
214
|
+
ValidationError: If memory data is invalid
|
|
215
|
+
DatabaseConnectionError: If storage fails
|
|
216
|
+
"""
|
|
217
|
+
try:
|
|
218
|
+
if not memory.id:
|
|
219
|
+
memory.id = str(uuid.uuid4())
|
|
220
|
+
|
|
221
|
+
memory.updated_at = datetime.now(timezone.utc)
|
|
222
|
+
|
|
223
|
+
# Convert memory to properties
|
|
224
|
+
memory_node = MemoryNode(memory=memory)
|
|
225
|
+
properties = memory_node.to_neo4j_properties()
|
|
226
|
+
|
|
227
|
+
query = """
|
|
228
|
+
MERGE (m:Memory {id: $id})
|
|
229
|
+
SET m += $properties
|
|
230
|
+
RETURN m.id as id
|
|
231
|
+
"""
|
|
232
|
+
|
|
233
|
+
result = await self.execute_query(
|
|
234
|
+
query,
|
|
235
|
+
{"id": memory.id, "properties": properties},
|
|
236
|
+
write=True
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
if result:
|
|
240
|
+
logger.info(f"Stored memory: {memory.id} ({memory.type})")
|
|
241
|
+
return result[0]["id"]
|
|
242
|
+
else:
|
|
243
|
+
raise DatabaseConnectionError(f"Failed to store memory: {memory.id}")
|
|
244
|
+
|
|
245
|
+
except Exception as e:
|
|
246
|
+
if isinstance(e, (DatabaseConnectionError, ValidationError)):
|
|
247
|
+
raise
|
|
248
|
+
logger.error(f"Failed to store memory: {e}")
|
|
249
|
+
raise DatabaseConnectionError(f"Failed to store memory: {e}")
|
|
250
|
+
|
|
251
|
+
async def get_memory(self, memory_id: str, include_relationships: bool = True) -> Optional[Memory]:
|
|
252
|
+
"""
|
|
253
|
+
Retrieve a memory by ID.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
memory_id: ID of the memory to retrieve
|
|
257
|
+
include_relationships: Whether to include relationships (not currently used)
|
|
258
|
+
|
|
259
|
+
Returns:
|
|
260
|
+
Memory object if found, None otherwise
|
|
261
|
+
|
|
262
|
+
Raises:
|
|
263
|
+
DatabaseConnectionError: If query fails
|
|
264
|
+
"""
|
|
265
|
+
try:
|
|
266
|
+
query = """
|
|
267
|
+
MATCH (m:Memory {id: $memory_id})
|
|
268
|
+
RETURN m
|
|
269
|
+
"""
|
|
270
|
+
|
|
271
|
+
result = await self.execute_query(query, {"memory_id": memory_id}, write=False)
|
|
272
|
+
|
|
273
|
+
if not result:
|
|
274
|
+
return None
|
|
275
|
+
|
|
276
|
+
memory_data = result[0]["m"]
|
|
277
|
+
return self._falkordblite_to_memory(memory_data)
|
|
278
|
+
|
|
279
|
+
except Exception as e:
|
|
280
|
+
if isinstance(e, DatabaseConnectionError):
|
|
281
|
+
raise
|
|
282
|
+
logger.error(f"Failed to get memory {memory_id}: {e}")
|
|
283
|
+
raise DatabaseConnectionError(f"Failed to get memory: {e}")
|
|
284
|
+
|
|
285
|
+
async def search_memories(self, search_query: SearchQuery) -> List[Memory]:
|
|
286
|
+
"""
|
|
287
|
+
Search for memories based on query parameters.
|
|
288
|
+
|
|
289
|
+
Args:
|
|
290
|
+
search_query: SearchQuery object with filter criteria
|
|
291
|
+
|
|
292
|
+
Returns:
|
|
293
|
+
List of Memory objects matching the search criteria
|
|
294
|
+
|
|
295
|
+
Raises:
|
|
296
|
+
DatabaseConnectionError: If search fails
|
|
297
|
+
"""
|
|
298
|
+
try:
|
|
299
|
+
conditions = []
|
|
300
|
+
parameters = {}
|
|
301
|
+
|
|
302
|
+
# Build WHERE conditions based on search parameters
|
|
303
|
+
if search_query.query:
|
|
304
|
+
conditions.append("(m.title CONTAINS $query OR m.content CONTAINS $query OR m.summary CONTAINS $query)")
|
|
305
|
+
parameters["query"] = search_query.query
|
|
306
|
+
|
|
307
|
+
if search_query.memory_types:
|
|
308
|
+
conditions.append("m.type IN $memory_types")
|
|
309
|
+
parameters["memory_types"] = [t.value for t in search_query.memory_types]
|
|
310
|
+
|
|
311
|
+
if search_query.tags:
|
|
312
|
+
conditions.append("ANY(tag IN $tags WHERE tag IN m.tags)")
|
|
313
|
+
parameters["tags"] = search_query.tags
|
|
314
|
+
|
|
315
|
+
if search_query.project_path:
|
|
316
|
+
conditions.append("m.context_project_path = $project_path")
|
|
317
|
+
parameters["project_path"] = search_query.project_path
|
|
318
|
+
|
|
319
|
+
if search_query.min_importance is not None:
|
|
320
|
+
conditions.append("m.importance >= $min_importance")
|
|
321
|
+
parameters["min_importance"] = search_query.min_importance
|
|
322
|
+
|
|
323
|
+
if search_query.min_confidence is not None:
|
|
324
|
+
conditions.append("m.confidence >= $min_confidence")
|
|
325
|
+
parameters["min_confidence"] = search_query.min_confidence
|
|
326
|
+
|
|
327
|
+
# Build the complete query
|
|
328
|
+
where_clause = " AND ".join(conditions) if conditions else "true"
|
|
329
|
+
|
|
330
|
+
query = f"""
|
|
331
|
+
MATCH (m:Memory)
|
|
332
|
+
WHERE {where_clause}
|
|
333
|
+
RETURN m
|
|
334
|
+
ORDER BY m.importance DESC, m.created_at DESC
|
|
335
|
+
LIMIT $limit
|
|
336
|
+
"""
|
|
337
|
+
|
|
338
|
+
parameters["limit"] = search_query.limit
|
|
339
|
+
|
|
340
|
+
result = await self.execute_query(query, parameters, write=False)
|
|
341
|
+
|
|
342
|
+
memories = []
|
|
343
|
+
for record in result:
|
|
344
|
+
memory = self._falkordblite_to_memory(record["m"])
|
|
345
|
+
if memory:
|
|
346
|
+
memories.append(memory)
|
|
347
|
+
|
|
348
|
+
logger.info(f"Found {len(memories)} memories for search query")
|
|
349
|
+
return memories
|
|
350
|
+
|
|
351
|
+
except Exception as e:
|
|
352
|
+
if isinstance(e, DatabaseConnectionError):
|
|
353
|
+
raise
|
|
354
|
+
logger.error(f"Failed to search memories: {e}")
|
|
355
|
+
raise DatabaseConnectionError(f"Failed to search memories: {e}")
|
|
356
|
+
|
|
357
|
+
async def update_memory(self, memory: Memory) -> bool:
|
|
358
|
+
"""
|
|
359
|
+
Update an existing memory.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
memory: Memory object with updated fields
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
True if update succeeded, False otherwise
|
|
366
|
+
|
|
367
|
+
Raises:
|
|
368
|
+
ValidationError: If memory ID is missing
|
|
369
|
+
DatabaseConnectionError: If update fails
|
|
370
|
+
"""
|
|
371
|
+
try:
|
|
372
|
+
if not memory.id:
|
|
373
|
+
raise ValidationError("Memory must have an ID to update")
|
|
374
|
+
|
|
375
|
+
memory.updated_at = datetime.now(timezone.utc)
|
|
376
|
+
|
|
377
|
+
# Convert memory to properties
|
|
378
|
+
memory_node = MemoryNode(memory=memory)
|
|
379
|
+
properties = memory_node.to_neo4j_properties()
|
|
380
|
+
|
|
381
|
+
query = """
|
|
382
|
+
MATCH (m:Memory {id: $id})
|
|
383
|
+
SET m += $properties
|
|
384
|
+
RETURN m.id as id
|
|
385
|
+
"""
|
|
386
|
+
|
|
387
|
+
result = await self.execute_query(
|
|
388
|
+
query,
|
|
389
|
+
{"id": memory.id, "properties": properties},
|
|
390
|
+
write=True
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
success = len(result) > 0
|
|
394
|
+
if success:
|
|
395
|
+
logger.info(f"Updated memory: {memory.id}")
|
|
396
|
+
|
|
397
|
+
return success
|
|
398
|
+
|
|
399
|
+
except Exception as e:
|
|
400
|
+
if isinstance(e, (ValidationError, DatabaseConnectionError)):
|
|
401
|
+
raise
|
|
402
|
+
logger.error(f"Failed to update memory {memory.id}: {e}")
|
|
403
|
+
raise DatabaseConnectionError(f"Failed to update memory: {e}")
|
|
404
|
+
|
|
405
|
+
async def delete_memory(self, memory_id: str) -> bool:
|
|
406
|
+
"""
|
|
407
|
+
Delete a memory and all its relationships.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
memory_id: ID of the memory to delete
|
|
411
|
+
|
|
412
|
+
Returns:
|
|
413
|
+
True if deletion succeeded, False otherwise
|
|
414
|
+
|
|
415
|
+
Raises:
|
|
416
|
+
DatabaseConnectionError: If deletion fails
|
|
417
|
+
"""
|
|
418
|
+
try:
|
|
419
|
+
query = """
|
|
420
|
+
MATCH (m:Memory {id: $memory_id})
|
|
421
|
+
DETACH DELETE m
|
|
422
|
+
RETURN COUNT(m) as deleted_count
|
|
423
|
+
"""
|
|
424
|
+
|
|
425
|
+
result = await self.execute_query(query, {"memory_id": memory_id}, write=True)
|
|
426
|
+
|
|
427
|
+
success = result and result[0]["deleted_count"] > 0
|
|
428
|
+
if success:
|
|
429
|
+
logger.info(f"Deleted memory: {memory_id}")
|
|
430
|
+
|
|
431
|
+
return success
|
|
432
|
+
|
|
433
|
+
except Exception as e:
|
|
434
|
+
if isinstance(e, DatabaseConnectionError):
|
|
435
|
+
raise
|
|
436
|
+
logger.error(f"Failed to delete memory {memory_id}: {e}")
|
|
437
|
+
raise DatabaseConnectionError(f"Failed to delete memory: {e}")
|
|
438
|
+
|
|
439
|
+
async def create_relationship(
|
|
440
|
+
self,
|
|
441
|
+
from_memory_id: str,
|
|
442
|
+
to_memory_id: str,
|
|
443
|
+
relationship_type: RelationshipType,
|
|
444
|
+
properties: RelationshipProperties = None
|
|
445
|
+
) -> str:
|
|
446
|
+
"""
|
|
447
|
+
Create a relationship between two memories.
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
from_memory_id: Source memory ID
|
|
451
|
+
to_memory_id: Target memory ID
|
|
452
|
+
relationship_type: Type of relationship
|
|
453
|
+
properties: Relationship properties (optional)
|
|
454
|
+
|
|
455
|
+
Returns:
|
|
456
|
+
ID of the created relationship
|
|
457
|
+
|
|
458
|
+
Raises:
|
|
459
|
+
RelationshipError: If relationship creation fails
|
|
460
|
+
DatabaseConnectionError: If database operation fails
|
|
461
|
+
"""
|
|
462
|
+
try:
|
|
463
|
+
relationship_id = str(uuid.uuid4())
|
|
464
|
+
|
|
465
|
+
if properties is None:
|
|
466
|
+
properties = RelationshipProperties()
|
|
467
|
+
|
|
468
|
+
# Convert properties to dict
|
|
469
|
+
props_dict = properties.model_dump()
|
|
470
|
+
props_dict['id'] = relationship_id
|
|
471
|
+
props_dict['created_at'] = props_dict['created_at'].isoformat()
|
|
472
|
+
props_dict['last_validated'] = props_dict['last_validated'].isoformat()
|
|
473
|
+
|
|
474
|
+
query = f"""
|
|
475
|
+
MATCH (from:Memory {{id: $from_id}})
|
|
476
|
+
MATCH (to:Memory {{id: $to_id}})
|
|
477
|
+
CREATE (from)-[r:{relationship_type.value} $properties]->(to)
|
|
478
|
+
RETURN r.id as id
|
|
479
|
+
"""
|
|
480
|
+
|
|
481
|
+
result = await self.execute_query(
|
|
482
|
+
query,
|
|
483
|
+
{
|
|
484
|
+
"from_id": from_memory_id,
|
|
485
|
+
"to_id": to_memory_id,
|
|
486
|
+
"properties": props_dict
|
|
487
|
+
},
|
|
488
|
+
write=True
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
if result:
|
|
492
|
+
logger.info(f"Created relationship: {relationship_type.value} between {from_memory_id} and {to_memory_id}")
|
|
493
|
+
return result[0]["id"]
|
|
494
|
+
else:
|
|
495
|
+
raise RelationshipError(
|
|
496
|
+
f"Failed to create relationship between {from_memory_id} and {to_memory_id}",
|
|
497
|
+
{"from_id": from_memory_id, "to_id": to_memory_id, "type": relationship_type.value}
|
|
498
|
+
)
|
|
499
|
+
|
|
500
|
+
except Exception as e:
|
|
501
|
+
if isinstance(e, (RelationshipError, DatabaseConnectionError)):
|
|
502
|
+
raise
|
|
503
|
+
logger.error(f"Failed to create relationship: {e}")
|
|
504
|
+
raise RelationshipError(f"Failed to create relationship: {e}")
|
|
505
|
+
|
|
506
|
+
async def get_related_memories(
|
|
507
|
+
self,
|
|
508
|
+
memory_id: str,
|
|
509
|
+
relationship_types: List[RelationshipType] = None,
|
|
510
|
+
max_depth: int = 2
|
|
511
|
+
) -> List[Tuple[Memory, Relationship]]:
|
|
512
|
+
"""
|
|
513
|
+
Get memories related to a specific memory.
|
|
514
|
+
|
|
515
|
+
Args:
|
|
516
|
+
memory_id: ID of the memory to find relations for
|
|
517
|
+
relationship_types: Filter by specific relationship types (optional)
|
|
518
|
+
max_depth: Maximum depth for graph traversal
|
|
519
|
+
|
|
520
|
+
Returns:
|
|
521
|
+
List of tuples containing (Memory, Relationship)
|
|
522
|
+
|
|
523
|
+
Raises:
|
|
524
|
+
DatabaseConnectionError: If query fails
|
|
525
|
+
"""
|
|
526
|
+
try:
|
|
527
|
+
# Build relationship type filter
|
|
528
|
+
rel_filter = ""
|
|
529
|
+
if relationship_types:
|
|
530
|
+
rel_types = "|".join([rt.value for rt in relationship_types])
|
|
531
|
+
rel_filter = f":{rel_types}"
|
|
532
|
+
|
|
533
|
+
query = f"""
|
|
534
|
+
MATCH (start:Memory {{id: $memory_id}})
|
|
535
|
+
MATCH (start)-[r{rel_filter}*1..{max_depth}]-(related:Memory)
|
|
536
|
+
WHERE related.id <> start.id
|
|
537
|
+
WITH DISTINCT related, r[0] as rel
|
|
538
|
+
RETURN related,
|
|
539
|
+
type(rel) as rel_type,
|
|
540
|
+
properties(rel) as rel_props
|
|
541
|
+
ORDER BY rel.strength DESC, related.importance DESC
|
|
542
|
+
LIMIT 20
|
|
543
|
+
"""
|
|
544
|
+
|
|
545
|
+
result = await self.execute_query(query, {"memory_id": memory_id}, write=False)
|
|
546
|
+
|
|
547
|
+
related_memories = []
|
|
548
|
+
for record in result:
|
|
549
|
+
memory = self._falkordblite_to_memory(record["related"])
|
|
550
|
+
if memory:
|
|
551
|
+
rel_type_str = record.get("rel_type", "RELATED_TO")
|
|
552
|
+
rel_props = record.get("rel_props", {})
|
|
553
|
+
|
|
554
|
+
try:
|
|
555
|
+
rel_type = RelationshipType(rel_type_str)
|
|
556
|
+
except ValueError:
|
|
557
|
+
rel_type = RelationshipType.RELATED_TO
|
|
558
|
+
|
|
559
|
+
relationship = Relationship(
|
|
560
|
+
from_memory_id=memory_id,
|
|
561
|
+
to_memory_id=memory.id,
|
|
562
|
+
type=rel_type,
|
|
563
|
+
properties=RelationshipProperties(
|
|
564
|
+
strength=rel_props.get("strength", 0.5),
|
|
565
|
+
confidence=rel_props.get("confidence", 0.8),
|
|
566
|
+
context=rel_props.get("context"),
|
|
567
|
+
evidence_count=rel_props.get("evidence_count", 1)
|
|
568
|
+
)
|
|
569
|
+
)
|
|
570
|
+
related_memories.append((memory, relationship))
|
|
571
|
+
|
|
572
|
+
logger.info(f"Found {len(related_memories)} related memories for {memory_id}")
|
|
573
|
+
return related_memories
|
|
574
|
+
|
|
575
|
+
except Exception as e:
|
|
576
|
+
if isinstance(e, DatabaseConnectionError):
|
|
577
|
+
raise
|
|
578
|
+
logger.error(f"Failed to get related memories for {memory_id}: {e}")
|
|
579
|
+
raise DatabaseConnectionError(f"Failed to get related memories: {e}")
|
|
580
|
+
|
|
581
|
+
async def get_memory_statistics(self) -> Dict[str, Any]:
|
|
582
|
+
"""
|
|
583
|
+
Get database statistics and metrics.
|
|
584
|
+
|
|
585
|
+
Returns:
|
|
586
|
+
Dictionary containing various database statistics
|
|
587
|
+
|
|
588
|
+
Raises:
|
|
589
|
+
DatabaseConnectionError: If query fails
|
|
590
|
+
"""
|
|
591
|
+
queries = {
|
|
592
|
+
"total_memories": "MATCH (m:Memory) RETURN COUNT(m) as count",
|
|
593
|
+
"memories_by_type": """
|
|
594
|
+
MATCH (m:Memory)
|
|
595
|
+
RETURN m.type as type, COUNT(m) as count
|
|
596
|
+
ORDER BY count DESC
|
|
597
|
+
""",
|
|
598
|
+
"total_relationships": "MATCH ()-[r]->() RETURN COUNT(r) as count",
|
|
599
|
+
"avg_importance": "MATCH (m:Memory) RETURN AVG(m.importance) as avg_importance",
|
|
600
|
+
"avg_confidence": "MATCH (m:Memory) RETURN AVG(m.confidence) as avg_confidence",
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
stats = {}
|
|
604
|
+
for stat_name, query in queries.items():
|
|
605
|
+
try:
|
|
606
|
+
result = await self.execute_query(query, write=False)
|
|
607
|
+
if stat_name == "memories_by_type":
|
|
608
|
+
stats[stat_name] = {record["type"]: record["count"] for record in result}
|
|
609
|
+
else:
|
|
610
|
+
stats[stat_name] = result[0] if result else None
|
|
611
|
+
except Exception as e:
|
|
612
|
+
logger.error(f"Failed to get statistic {stat_name}: {e}")
|
|
613
|
+
stats[stat_name] = None
|
|
614
|
+
|
|
615
|
+
return stats
|
|
616
|
+
|
|
617
|
+
async def health_check(self) -> dict[str, Any]:
|
|
618
|
+
"""
|
|
619
|
+
Check backend health and return status information.
|
|
620
|
+
|
|
621
|
+
Returns:
|
|
622
|
+
Dictionary with health check results
|
|
623
|
+
"""
|
|
624
|
+
health_info = {
|
|
625
|
+
"connected": self._connected,
|
|
626
|
+
"backend_type": "falkordblite",
|
|
627
|
+
"db_path": self.db_path,
|
|
628
|
+
"graph_name": self.graph_name
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
if self._connected:
|
|
632
|
+
try:
|
|
633
|
+
# Get basic node count
|
|
634
|
+
count_query = "MATCH (m:Memory) RETURN count(m) as count"
|
|
635
|
+
count_result = await self.execute_query(count_query, write=False)
|
|
636
|
+
if count_result:
|
|
637
|
+
health_info["statistics"] = {
|
|
638
|
+
"memory_count": count_result[0].get("count", 0)
|
|
639
|
+
}
|
|
640
|
+
except Exception as e:
|
|
641
|
+
logger.warning(f"Could not get detailed health info: {e}")
|
|
642
|
+
health_info["warning"] = str(e)
|
|
643
|
+
|
|
644
|
+
return health_info
|
|
645
|
+
|
|
646
|
+
def backend_name(self) -> str:
|
|
647
|
+
"""Return the name of this backend implementation."""
|
|
648
|
+
return "falkordblite"
|
|
649
|
+
|
|
650
|
+
def supports_fulltext_search(self) -> bool:
|
|
651
|
+
"""Check if this backend supports full-text search."""
|
|
652
|
+
return True
|
|
653
|
+
|
|
654
|
+
def supports_transactions(self) -> bool:
|
|
655
|
+
"""Check if this backend supports ACID transactions."""
|
|
656
|
+
return True
|
|
657
|
+
|
|
658
|
+
def _falkordblite_to_memory(self, node_data: Dict[str, Any]) -> Optional[Memory]:
|
|
659
|
+
"""
|
|
660
|
+
Convert FalkorDBLite node data to Memory object.
|
|
661
|
+
|
|
662
|
+
Args:
|
|
663
|
+
node_data: Dictionary of node properties from FalkorDBLite
|
|
664
|
+
|
|
665
|
+
Returns:
|
|
666
|
+
Memory object or None if conversion fails
|
|
667
|
+
"""
|
|
668
|
+
try:
|
|
669
|
+
# Extract basic memory fields
|
|
670
|
+
memory_data = {
|
|
671
|
+
"id": node_data.get("id"),
|
|
672
|
+
"type": MemoryType(node_data.get("type")),
|
|
673
|
+
"title": node_data.get("title"),
|
|
674
|
+
"content": node_data.get("content"),
|
|
675
|
+
"summary": node_data.get("summary"),
|
|
676
|
+
"tags": node_data.get("tags", []),
|
|
677
|
+
"importance": node_data.get("importance", 0.5),
|
|
678
|
+
"confidence": node_data.get("confidence", 0.8),
|
|
679
|
+
"effectiveness": node_data.get("effectiveness"),
|
|
680
|
+
"usage_count": node_data.get("usage_count", 0),
|
|
681
|
+
"created_at": datetime.fromisoformat(node_data.get("created_at")),
|
|
682
|
+
"updated_at": datetime.fromisoformat(node_data.get("updated_at")),
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
# Handle optional last_accessed field
|
|
686
|
+
if node_data.get("last_accessed"):
|
|
687
|
+
memory_data["last_accessed"] = datetime.fromisoformat(node_data["last_accessed"])
|
|
688
|
+
|
|
689
|
+
# Extract context information
|
|
690
|
+
context_data = {}
|
|
691
|
+
for key, value in node_data.items():
|
|
692
|
+
if key.startswith("context_") and value is not None:
|
|
693
|
+
context_key = key[8:] # Remove "context_" prefix
|
|
694
|
+
|
|
695
|
+
# Deserialize JSON strings back to Python objects
|
|
696
|
+
if isinstance(value, str) and context_key in ["additional_metadata"]:
|
|
697
|
+
try:
|
|
698
|
+
context_data[context_key] = json.loads(value)
|
|
699
|
+
except json.JSONDecodeError:
|
|
700
|
+
context_data[context_key] = value
|
|
701
|
+
# Handle JSON-serialized lists/dicts
|
|
702
|
+
elif isinstance(value, str) and value.startswith(('[', '{')):
|
|
703
|
+
try:
|
|
704
|
+
context_data[context_key] = json.loads(value)
|
|
705
|
+
except json.JSONDecodeError:
|
|
706
|
+
context_data[context_key] = value
|
|
707
|
+
else:
|
|
708
|
+
context_data[context_key] = value
|
|
709
|
+
|
|
710
|
+
if context_data:
|
|
711
|
+
# Handle timestamp fields in context
|
|
712
|
+
for time_field in ["timestamp"]:
|
|
713
|
+
if time_field in context_data:
|
|
714
|
+
if isinstance(context_data[time_field], str):
|
|
715
|
+
context_data[time_field] = datetime.fromisoformat(context_data[time_field])
|
|
716
|
+
|
|
717
|
+
memory_data["context"] = MemoryContext(**context_data)
|
|
718
|
+
|
|
719
|
+
return Memory(**memory_data)
|
|
720
|
+
|
|
721
|
+
except Exception as e:
|
|
722
|
+
logger.error(f"Failed to convert FalkorDBLite node to Memory: {e}")
|
|
723
|
+
return None
|
|
724
|
+
|
|
725
|
+
@classmethod
|
|
726
|
+
async def create(
|
|
727
|
+
cls,
|
|
728
|
+
db_path: Optional[str] = None,
|
|
729
|
+
graph_name: str = "memorygraph"
|
|
730
|
+
) -> "FalkorDBLiteBackend":
|
|
731
|
+
"""
|
|
732
|
+
Factory method to create and connect to a FalkorDBLite backend.
|
|
733
|
+
|
|
734
|
+
Args:
|
|
735
|
+
db_path: Path to database file
|
|
736
|
+
graph_name: Name of the graph database
|
|
737
|
+
|
|
738
|
+
Returns:
|
|
739
|
+
Connected FalkorDBLiteBackend instance
|
|
740
|
+
|
|
741
|
+
Raises:
|
|
742
|
+
DatabaseConnectionError: If connection fails
|
|
743
|
+
"""
|
|
744
|
+
backend = cls(db_path, graph_name)
|
|
745
|
+
await backend.connect()
|
|
746
|
+
return backend
|