memorygraphMCP 0.11.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memorygraph/__init__.py +50 -0
- memorygraph/__main__.py +12 -0
- memorygraph/advanced_tools.py +509 -0
- memorygraph/analytics/__init__.py +46 -0
- memorygraph/analytics/advanced_queries.py +727 -0
- memorygraph/backends/__init__.py +21 -0
- memorygraph/backends/base.py +179 -0
- memorygraph/backends/cloud.py +75 -0
- memorygraph/backends/cloud_backend.py +858 -0
- memorygraph/backends/factory.py +577 -0
- memorygraph/backends/falkordb_backend.py +749 -0
- memorygraph/backends/falkordblite_backend.py +746 -0
- memorygraph/backends/ladybugdb_backend.py +242 -0
- memorygraph/backends/memgraph_backend.py +327 -0
- memorygraph/backends/neo4j_backend.py +298 -0
- memorygraph/backends/sqlite_fallback.py +463 -0
- memorygraph/backends/turso.py +448 -0
- memorygraph/cli.py +743 -0
- memorygraph/cloud_database.py +297 -0
- memorygraph/config.py +295 -0
- memorygraph/database.py +933 -0
- memorygraph/graph_analytics.py +631 -0
- memorygraph/integration/__init__.py +69 -0
- memorygraph/integration/context_capture.py +426 -0
- memorygraph/integration/project_analysis.py +583 -0
- memorygraph/integration/workflow_tracking.py +492 -0
- memorygraph/intelligence/__init__.py +59 -0
- memorygraph/intelligence/context_retrieval.py +447 -0
- memorygraph/intelligence/entity_extraction.py +386 -0
- memorygraph/intelligence/pattern_recognition.py +420 -0
- memorygraph/intelligence/temporal.py +374 -0
- memorygraph/migration/__init__.py +27 -0
- memorygraph/migration/manager.py +579 -0
- memorygraph/migration/models.py +142 -0
- memorygraph/migration/scripts/__init__.py +17 -0
- memorygraph/migration/scripts/bitemporal_migration.py +595 -0
- memorygraph/migration/scripts/multitenancy_migration.py +452 -0
- memorygraph/migration_tools_module.py +146 -0
- memorygraph/models.py +684 -0
- memorygraph/proactive/__init__.py +46 -0
- memorygraph/proactive/outcome_learning.py +444 -0
- memorygraph/proactive/predictive.py +410 -0
- memorygraph/proactive/session_briefing.py +399 -0
- memorygraph/relationships.py +668 -0
- memorygraph/server.py +883 -0
- memorygraph/sqlite_database.py +1876 -0
- memorygraph/tools/__init__.py +59 -0
- memorygraph/tools/activity_tools.py +262 -0
- memorygraph/tools/memory_tools.py +315 -0
- memorygraph/tools/migration_tools.py +181 -0
- memorygraph/tools/relationship_tools.py +147 -0
- memorygraph/tools/search_tools.py +406 -0
- memorygraph/tools/temporal_tools.py +339 -0
- memorygraph/utils/__init__.py +10 -0
- memorygraph/utils/context_extractor.py +429 -0
- memorygraph/utils/error_handling.py +151 -0
- memorygraph/utils/export_import.py +425 -0
- memorygraph/utils/graph_algorithms.py +200 -0
- memorygraph/utils/pagination.py +149 -0
- memorygraph/utils/project_detection.py +133 -0
- memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
- memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
- memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
- memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
- memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
memorygraph/database.py
ADDED
|
@@ -0,0 +1,933 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Neo4j database connection and management for Claude Code Memory Server.
|
|
3
|
+
|
|
4
|
+
This module handles all database operations, connection management, and provides
|
|
5
|
+
a high-level interface for interacting with the Neo4j graph database.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
import logging
|
|
10
|
+
from typing import Dict, List, Optional, Any, Union, Tuple, TYPE_CHECKING
|
|
11
|
+
from contextlib import asynccontextmanager
|
|
12
|
+
import uuid
|
|
13
|
+
from datetime import datetime, timezone
|
|
14
|
+
|
|
15
|
+
# Lazy imports for neo4j - only imported when Neo4jConnection is instantiated
|
|
16
|
+
# This allows the package to work with SQLite backend without neo4j installed
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from neo4j import AsyncDriver
|
|
19
|
+
|
|
20
|
+
from .models import (
|
|
21
|
+
Memory, MemoryType, MemoryNode, Relationship, RelationshipType,
|
|
22
|
+
RelationshipProperties, SearchQuery, MemoryGraph, MemoryContext,
|
|
23
|
+
MemoryError, MemoryNotFoundError, RelationshipError,
|
|
24
|
+
ValidationError, DatabaseConnectionError, SchemaError, PaginatedResult
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Neo4jConnection:
|
|
32
|
+
"""Manages Neo4j database connection and async operations."""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
uri: str = None,
|
|
37
|
+
user: str = None,
|
|
38
|
+
password: str = None,
|
|
39
|
+
database: str = "neo4j"
|
|
40
|
+
):
|
|
41
|
+
"""Initialize Neo4j connection.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
uri: Neo4j database URI (defaults to NEO4J_URI env var or bolt://localhost:7687)
|
|
45
|
+
user: Database username (defaults to NEO4J_USER env var or 'neo4j')
|
|
46
|
+
password: Database password (defaults to NEO4J_PASSWORD env var)
|
|
47
|
+
database: Database name (defaults to 'neo4j')
|
|
48
|
+
|
|
49
|
+
Raises:
|
|
50
|
+
DatabaseConnectionError: If password is not provided
|
|
51
|
+
"""
|
|
52
|
+
self.uri = uri or os.getenv("NEO4J_URI", "bolt://localhost:7687")
|
|
53
|
+
self.user = user or os.getenv("NEO4J_USER", "neo4j")
|
|
54
|
+
self.password = password or os.getenv("NEO4J_PASSWORD")
|
|
55
|
+
self.database = database
|
|
56
|
+
self.driver: Optional[AsyncDriver] = None
|
|
57
|
+
|
|
58
|
+
if not self.password:
|
|
59
|
+
raise DatabaseConnectionError(
|
|
60
|
+
"Neo4j password must be provided via parameter or NEO4J_PASSWORD env var"
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
async def connect(self) -> None:
|
|
64
|
+
"""Establish async connection to Neo4j database.
|
|
65
|
+
|
|
66
|
+
Raises:
|
|
67
|
+
DatabaseConnectionError: If connection fails
|
|
68
|
+
"""
|
|
69
|
+
# Lazy import neo4j only when connecting
|
|
70
|
+
try:
|
|
71
|
+
from neo4j import AsyncGraphDatabase
|
|
72
|
+
from neo4j.exceptions import ServiceUnavailable, AuthError
|
|
73
|
+
except ImportError as e:
|
|
74
|
+
raise DatabaseConnectionError(
|
|
75
|
+
"neo4j package is required for Neo4j backend. "
|
|
76
|
+
"Install with: pip install neo4j"
|
|
77
|
+
) from e
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
self.driver = AsyncGraphDatabase.driver(
|
|
81
|
+
self.uri,
|
|
82
|
+
auth=(self.user, self.password),
|
|
83
|
+
max_connection_lifetime=30 * 60, # 30 minutes
|
|
84
|
+
max_connection_pool_size=50,
|
|
85
|
+
connection_acquisition_timeout=30.0
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
# Verify connectivity
|
|
89
|
+
await self.driver.verify_connectivity()
|
|
90
|
+
logger.info(f"Successfully connected to Neo4j at {self.uri}")
|
|
91
|
+
|
|
92
|
+
except ServiceUnavailable as e:
|
|
93
|
+
logger.error(f"Failed to connect to Neo4j: {e}")
|
|
94
|
+
raise DatabaseConnectionError(f"Failed to connect to Neo4j: {e}")
|
|
95
|
+
except AuthError as e:
|
|
96
|
+
logger.error(f"Authentication failed for Neo4j: {e}")
|
|
97
|
+
raise DatabaseConnectionError(f"Authentication failed for Neo4j: {e}")
|
|
98
|
+
except Exception as e:
|
|
99
|
+
logger.error(f"Unexpected error connecting to Neo4j: {e}")
|
|
100
|
+
raise DatabaseConnectionError(f"Unexpected error connecting to Neo4j: {e}")
|
|
101
|
+
|
|
102
|
+
async def close(self) -> None:
|
|
103
|
+
"""Close the database connection."""
|
|
104
|
+
if self.driver:
|
|
105
|
+
await self.driver.close()
|
|
106
|
+
self.driver = None
|
|
107
|
+
logger.info("Neo4j connection closed")
|
|
108
|
+
|
|
109
|
+
@asynccontextmanager
|
|
110
|
+
async def session(self, database: str = None):
|
|
111
|
+
"""Async context manager for Neo4j session.
|
|
112
|
+
|
|
113
|
+
Raises:
|
|
114
|
+
DatabaseConnectionError: If not connected
|
|
115
|
+
"""
|
|
116
|
+
if not self.driver:
|
|
117
|
+
raise DatabaseConnectionError("Not connected to Neo4j. Call connect() first.")
|
|
118
|
+
|
|
119
|
+
session = self.driver.session(database=database or self.database)
|
|
120
|
+
try:
|
|
121
|
+
yield session
|
|
122
|
+
finally:
|
|
123
|
+
await session.close()
|
|
124
|
+
|
|
125
|
+
async def execute_write_query(
|
|
126
|
+
self,
|
|
127
|
+
query: str,
|
|
128
|
+
parameters: Dict[str, Any] = None,
|
|
129
|
+
database: str = None
|
|
130
|
+
) -> List[Dict[str, Any]]:
|
|
131
|
+
"""Execute a write query in a transaction.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
query: Cypher query string
|
|
135
|
+
parameters: Query parameters
|
|
136
|
+
database: Database name (optional)
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
List of result records as dictionaries
|
|
140
|
+
|
|
141
|
+
Raises:
|
|
142
|
+
DatabaseConnectionError: If query execution fails
|
|
143
|
+
"""
|
|
144
|
+
# Lazy import Neo4jError for exception handling
|
|
145
|
+
try:
|
|
146
|
+
from neo4j.exceptions import Neo4jError
|
|
147
|
+
except ImportError:
|
|
148
|
+
# If neo4j not installed, we shouldn't be here anyway
|
|
149
|
+
Neo4jError = Exception
|
|
150
|
+
|
|
151
|
+
try:
|
|
152
|
+
async with self.session(database) as session:
|
|
153
|
+
result = await session.execute_write(
|
|
154
|
+
self._run_query_async, query, parameters or {}
|
|
155
|
+
)
|
|
156
|
+
return result
|
|
157
|
+
except Neo4jError as e:
|
|
158
|
+
logger.error(f"Write query failed: {e}")
|
|
159
|
+
raise DatabaseConnectionError(f"Write query failed: {e}")
|
|
160
|
+
|
|
161
|
+
async def execute_read_query(
|
|
162
|
+
self,
|
|
163
|
+
query: str,
|
|
164
|
+
parameters: Dict[str, Any] = None,
|
|
165
|
+
database: str = None
|
|
166
|
+
) -> List[Dict[str, Any]]:
|
|
167
|
+
"""Execute a read query in a transaction.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
query: Cypher query string
|
|
171
|
+
parameters: Query parameters
|
|
172
|
+
database: Database name (optional)
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
List of result records as dictionaries
|
|
176
|
+
|
|
177
|
+
Raises:
|
|
178
|
+
DatabaseConnectionError: If query execution fails
|
|
179
|
+
"""
|
|
180
|
+
# Lazy import Neo4jError for exception handling
|
|
181
|
+
try:
|
|
182
|
+
from neo4j.exceptions import Neo4jError
|
|
183
|
+
except ImportError:
|
|
184
|
+
# If neo4j not installed, we shouldn't be here anyway
|
|
185
|
+
Neo4jError = Exception
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
async with self.session(database) as session:
|
|
189
|
+
result = await session.execute_read(
|
|
190
|
+
self._run_query_async, query, parameters or {}
|
|
191
|
+
)
|
|
192
|
+
return result
|
|
193
|
+
except Neo4jError as e:
|
|
194
|
+
logger.error(f"Read query failed: {e}")
|
|
195
|
+
raise DatabaseConnectionError(f"Read query failed: {e}")
|
|
196
|
+
|
|
197
|
+
@staticmethod
|
|
198
|
+
async def _run_query_async(tx, query: str, parameters: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
199
|
+
"""Helper method to run a query within an async transaction.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
tx: Transaction object
|
|
203
|
+
query: Cypher query string
|
|
204
|
+
parameters: Query parameters
|
|
205
|
+
|
|
206
|
+
Returns:
|
|
207
|
+
List of result records as dictionaries
|
|
208
|
+
"""
|
|
209
|
+
result = await tx.run(query, parameters)
|
|
210
|
+
records = await result.data()
|
|
211
|
+
return records
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class MemoryDatabase:
|
|
215
|
+
"""High-level interface for memory database operations."""
|
|
216
|
+
|
|
217
|
+
def __init__(self, connection):
|
|
218
|
+
"""
|
|
219
|
+
Initialize with a database backend connection.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
connection: Database backend connection (Neo4jConnection or GraphBackend).
|
|
223
|
+
Must provide execute_write_query and execute_read_query methods.
|
|
224
|
+
"""
|
|
225
|
+
self.connection = connection
|
|
226
|
+
|
|
227
|
+
async def initialize_schema(self) -> None:
|
|
228
|
+
"""Create database schema, constraints, and indexes.
|
|
229
|
+
|
|
230
|
+
Raises:
|
|
231
|
+
SchemaError: If schema creation fails
|
|
232
|
+
"""
|
|
233
|
+
logger.info("Initializing Neo4j schema for Claude Memory...")
|
|
234
|
+
|
|
235
|
+
# Create constraints
|
|
236
|
+
constraints = [
|
|
237
|
+
"CREATE CONSTRAINT memory_id_unique IF NOT EXISTS FOR (m:Memory) REQUIRE m.id IS UNIQUE",
|
|
238
|
+
"CREATE CONSTRAINT relationship_id_unique IF NOT EXISTS FOR (r:RELATIONSHIP) REQUIRE r.id IS UNIQUE",
|
|
239
|
+
]
|
|
240
|
+
|
|
241
|
+
# Create indexes for performance
|
|
242
|
+
indexes = [
|
|
243
|
+
"CREATE INDEX memory_type_index IF NOT EXISTS FOR (m:Memory) ON (m.type)",
|
|
244
|
+
"CREATE INDEX memory_created_at_index IF NOT EXISTS FOR (m:Memory) ON (m.created_at)",
|
|
245
|
+
"CREATE INDEX memory_tags_index IF NOT EXISTS FOR (m:Memory) ON (m.tags)",
|
|
246
|
+
"CREATE FULLTEXT INDEX memory_content_index IF NOT EXISTS FOR (m:Memory) ON EACH [m.title, m.content, m.summary]",
|
|
247
|
+
"CREATE INDEX memory_importance_index IF NOT EXISTS FOR (m:Memory) ON (m.importance)",
|
|
248
|
+
"CREATE INDEX memory_confidence_index IF NOT EXISTS FOR (m:Memory) ON (m.confidence)",
|
|
249
|
+
"CREATE INDEX memory_project_path_index IF NOT EXISTS FOR (m:Memory) ON (m.context_project_path)",
|
|
250
|
+
]
|
|
251
|
+
|
|
252
|
+
# Execute schema creation
|
|
253
|
+
for constraint in constraints:
|
|
254
|
+
try:
|
|
255
|
+
await self.connection.execute_write_query(constraint)
|
|
256
|
+
logger.debug(f"Created constraint: {constraint}")
|
|
257
|
+
except Exception as e:
|
|
258
|
+
if "already exists" not in str(e).lower():
|
|
259
|
+
logger.warning(f"Failed to create constraint: {e}")
|
|
260
|
+
|
|
261
|
+
for index in indexes:
|
|
262
|
+
try:
|
|
263
|
+
await self.connection.execute_write_query(index)
|
|
264
|
+
logger.debug(f"Created index: {index}")
|
|
265
|
+
except Exception as e:
|
|
266
|
+
if "already exists" not in str(e).lower():
|
|
267
|
+
logger.warning(f"Failed to create index: {e}")
|
|
268
|
+
|
|
269
|
+
logger.info("Schema initialization completed")
|
|
270
|
+
|
|
271
|
+
async def store_memory(self, memory: Memory) -> str:
|
|
272
|
+
"""Store a memory in the database and return its ID.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
memory: Memory object to store
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
ID of the stored memory
|
|
279
|
+
|
|
280
|
+
Raises:
|
|
281
|
+
ValidationError: If memory data is invalid
|
|
282
|
+
DatabaseConnectionError: If storage fails
|
|
283
|
+
"""
|
|
284
|
+
try:
|
|
285
|
+
if not memory.id:
|
|
286
|
+
memory.id = str(uuid.uuid4())
|
|
287
|
+
|
|
288
|
+
memory.updated_at = datetime.now(timezone.utc)
|
|
289
|
+
|
|
290
|
+
# Convert memory to Neo4j properties
|
|
291
|
+
memory_node = MemoryNode(memory=memory)
|
|
292
|
+
properties = memory_node.to_neo4j_properties()
|
|
293
|
+
|
|
294
|
+
query = """
|
|
295
|
+
MERGE (m:Memory {id: $id})
|
|
296
|
+
SET m += $properties
|
|
297
|
+
RETURN m.id as id
|
|
298
|
+
"""
|
|
299
|
+
|
|
300
|
+
result = await self.connection.execute_write_query(
|
|
301
|
+
query,
|
|
302
|
+
{"id": memory.id, "properties": properties}
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
if result:
|
|
306
|
+
logger.info(f"Stored memory: {memory.id} ({memory.type})")
|
|
307
|
+
return result[0]["id"]
|
|
308
|
+
else:
|
|
309
|
+
raise DatabaseConnectionError(f"Failed to store memory: {memory.id}")
|
|
310
|
+
|
|
311
|
+
except Exception as e:
|
|
312
|
+
if isinstance(e, (DatabaseConnectionError, ValidationError)):
|
|
313
|
+
raise
|
|
314
|
+
logger.error(f"Failed to store memory: {e}")
|
|
315
|
+
raise DatabaseConnectionError(f"Failed to store memory: {e}")
|
|
316
|
+
|
|
317
|
+
async def get_memory(self, memory_id: str, include_relationships: bool = True) -> Optional[Memory]:
|
|
318
|
+
"""Retrieve a memory by ID.
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
memory_id: ID of the memory to retrieve
|
|
322
|
+
include_relationships: Whether to include relationships (not currently used)
|
|
323
|
+
|
|
324
|
+
Returns:
|
|
325
|
+
Memory object if found, None otherwise
|
|
326
|
+
|
|
327
|
+
Raises:
|
|
328
|
+
DatabaseConnectionError: If query fails
|
|
329
|
+
"""
|
|
330
|
+
try:
|
|
331
|
+
query = """
|
|
332
|
+
MATCH (m:Memory {id: $memory_id})
|
|
333
|
+
RETURN m
|
|
334
|
+
"""
|
|
335
|
+
|
|
336
|
+
result = await self.connection.execute_read_query(query, {"memory_id": memory_id})
|
|
337
|
+
|
|
338
|
+
if not result:
|
|
339
|
+
return None
|
|
340
|
+
|
|
341
|
+
memory_data = result[0]["m"]
|
|
342
|
+
return self._neo4j_to_memory(memory_data)
|
|
343
|
+
|
|
344
|
+
except Exception as e:
|
|
345
|
+
if isinstance(e, DatabaseConnectionError):
|
|
346
|
+
raise
|
|
347
|
+
logger.error(f"Failed to get memory {memory_id}: {e}")
|
|
348
|
+
raise DatabaseConnectionError(f"Failed to get memory: {e}")
|
|
349
|
+
|
|
350
|
+
async def search_memories(self, search_query: SearchQuery) -> List[Memory]:
|
|
351
|
+
"""Search for memories based on query parameters.
|
|
352
|
+
|
|
353
|
+
Args:
|
|
354
|
+
search_query: SearchQuery object with filter criteria
|
|
355
|
+
|
|
356
|
+
Returns:
|
|
357
|
+
List of Memory objects matching the search criteria
|
|
358
|
+
|
|
359
|
+
Raises:
|
|
360
|
+
DatabaseConnectionError: If search fails
|
|
361
|
+
"""
|
|
362
|
+
try:
|
|
363
|
+
conditions = []
|
|
364
|
+
parameters = {}
|
|
365
|
+
|
|
366
|
+
# Build WHERE conditions based on search parameters
|
|
367
|
+
if search_query.query:
|
|
368
|
+
conditions.append("(m.title CONTAINS $query OR m.content CONTAINS $query OR m.summary CONTAINS $query)")
|
|
369
|
+
parameters["query"] = search_query.query
|
|
370
|
+
|
|
371
|
+
if search_query.memory_types:
|
|
372
|
+
conditions.append("m.type IN $memory_types")
|
|
373
|
+
parameters["memory_types"] = [t.value for t in search_query.memory_types]
|
|
374
|
+
|
|
375
|
+
if search_query.tags:
|
|
376
|
+
conditions.append("ANY(tag IN $tags WHERE tag IN m.tags)")
|
|
377
|
+
parameters["tags"] = search_query.tags
|
|
378
|
+
|
|
379
|
+
if search_query.project_path:
|
|
380
|
+
conditions.append("m.context_project_path = $project_path")
|
|
381
|
+
parameters["project_path"] = search_query.project_path
|
|
382
|
+
|
|
383
|
+
if search_query.min_importance is not None:
|
|
384
|
+
conditions.append("m.importance >= $min_importance")
|
|
385
|
+
parameters["min_importance"] = search_query.min_importance
|
|
386
|
+
|
|
387
|
+
if search_query.min_confidence is not None:
|
|
388
|
+
conditions.append("m.confidence >= $min_confidence")
|
|
389
|
+
parameters["min_confidence"] = search_query.min_confidence
|
|
390
|
+
|
|
391
|
+
if search_query.created_after:
|
|
392
|
+
conditions.append("datetime(m.created_at) >= datetime($created_after)")
|
|
393
|
+
parameters["created_after"] = search_query.created_after.isoformat()
|
|
394
|
+
|
|
395
|
+
if search_query.created_before:
|
|
396
|
+
conditions.append("datetime(m.created_at) <= datetime($created_before)")
|
|
397
|
+
parameters["created_before"] = search_query.created_before.isoformat()
|
|
398
|
+
|
|
399
|
+
# Build the complete query
|
|
400
|
+
where_clause = " AND ".join(conditions) if conditions else "true"
|
|
401
|
+
|
|
402
|
+
query = f"""
|
|
403
|
+
MATCH (m:Memory)
|
|
404
|
+
WHERE {where_clause}
|
|
405
|
+
RETURN m
|
|
406
|
+
ORDER BY m.importance DESC, m.created_at DESC
|
|
407
|
+
LIMIT $limit
|
|
408
|
+
"""
|
|
409
|
+
|
|
410
|
+
parameters["limit"] = search_query.limit
|
|
411
|
+
|
|
412
|
+
result = await self.connection.execute_read_query(query, parameters)
|
|
413
|
+
|
|
414
|
+
memories = []
|
|
415
|
+
for record in result:
|
|
416
|
+
memory = self._neo4j_to_memory(record["m"])
|
|
417
|
+
if memory:
|
|
418
|
+
memories.append(memory)
|
|
419
|
+
|
|
420
|
+
logger.info(f"Found {len(memories)} memories for search query")
|
|
421
|
+
return memories
|
|
422
|
+
|
|
423
|
+
except Exception as e:
|
|
424
|
+
if isinstance(e, DatabaseConnectionError):
|
|
425
|
+
raise
|
|
426
|
+
logger.error(f"Failed to search memories: {e}")
|
|
427
|
+
raise DatabaseConnectionError(f"Failed to search memories: {e}")
|
|
428
|
+
|
|
429
|
+
async def search_memories_paginated(self, search_query: SearchQuery) -> PaginatedResult:
|
|
430
|
+
"""Search for memories with pagination support.
|
|
431
|
+
|
|
432
|
+
Args:
|
|
433
|
+
search_query: SearchQuery object with filter criteria, limit, and offset
|
|
434
|
+
|
|
435
|
+
Returns:
|
|
436
|
+
PaginatedResult with memories and pagination metadata
|
|
437
|
+
|
|
438
|
+
Raises:
|
|
439
|
+
DatabaseConnectionError: If search fails
|
|
440
|
+
"""
|
|
441
|
+
try:
|
|
442
|
+
conditions = []
|
|
443
|
+
parameters = {}
|
|
444
|
+
|
|
445
|
+
# Build WHERE conditions based on search parameters (same as search_memories)
|
|
446
|
+
if search_query.query:
|
|
447
|
+
conditions.append("(m.title CONTAINS $query OR m.content CONTAINS $query OR m.summary CONTAINS $query)")
|
|
448
|
+
parameters["query"] = search_query.query
|
|
449
|
+
|
|
450
|
+
if search_query.memory_types:
|
|
451
|
+
conditions.append("m.type IN $memory_types")
|
|
452
|
+
parameters["memory_types"] = [t.value for t in search_query.memory_types]
|
|
453
|
+
|
|
454
|
+
if search_query.tags:
|
|
455
|
+
conditions.append("ANY(tag IN $tags WHERE tag IN m.tags)")
|
|
456
|
+
parameters["tags"] = search_query.tags
|
|
457
|
+
|
|
458
|
+
if search_query.project_path:
|
|
459
|
+
conditions.append("m.context_project_path = $project_path")
|
|
460
|
+
parameters["project_path"] = search_query.project_path
|
|
461
|
+
|
|
462
|
+
if search_query.min_importance is not None:
|
|
463
|
+
conditions.append("m.importance >= $min_importance")
|
|
464
|
+
parameters["min_importance"] = search_query.min_importance
|
|
465
|
+
|
|
466
|
+
if search_query.min_confidence is not None:
|
|
467
|
+
conditions.append("m.confidence >= $min_confidence")
|
|
468
|
+
parameters["min_confidence"] = search_query.min_confidence
|
|
469
|
+
|
|
470
|
+
if search_query.created_after:
|
|
471
|
+
conditions.append("datetime(m.created_at) >= datetime($created_after)")
|
|
472
|
+
parameters["created_after"] = search_query.created_after.isoformat()
|
|
473
|
+
|
|
474
|
+
if search_query.created_before:
|
|
475
|
+
conditions.append("datetime(m.created_at) <= datetime($created_before)")
|
|
476
|
+
parameters["created_before"] = search_query.created_before.isoformat()
|
|
477
|
+
|
|
478
|
+
where_clause = " AND ".join(conditions) if conditions else "true"
|
|
479
|
+
|
|
480
|
+
# First, get the total count
|
|
481
|
+
count_query = f"""
|
|
482
|
+
MATCH (m:Memory)
|
|
483
|
+
WHERE {where_clause}
|
|
484
|
+
RETURN count(m) as total_count
|
|
485
|
+
"""
|
|
486
|
+
|
|
487
|
+
count_result = await self.connection.execute_read_query(count_query, parameters)
|
|
488
|
+
total_count = count_result[0]["total_count"] if count_result else 0
|
|
489
|
+
|
|
490
|
+
# Then get the paginated results
|
|
491
|
+
results_query = f"""
|
|
492
|
+
MATCH (m:Memory)
|
|
493
|
+
WHERE {where_clause}
|
|
494
|
+
RETURN m
|
|
495
|
+
ORDER BY m.importance DESC, m.created_at DESC
|
|
496
|
+
SKIP $offset
|
|
497
|
+
LIMIT $limit
|
|
498
|
+
"""
|
|
499
|
+
|
|
500
|
+
parameters["offset"] = search_query.offset
|
|
501
|
+
parameters["limit"] = search_query.limit
|
|
502
|
+
|
|
503
|
+
result = await self.connection.execute_read_query(results_query, parameters)
|
|
504
|
+
|
|
505
|
+
memories = []
|
|
506
|
+
for record in result:
|
|
507
|
+
memory = self._neo4j_to_memory(record["m"])
|
|
508
|
+
if memory:
|
|
509
|
+
memories.append(memory)
|
|
510
|
+
|
|
511
|
+
# Calculate pagination metadata
|
|
512
|
+
has_more = (search_query.offset + search_query.limit) < total_count
|
|
513
|
+
next_offset = (search_query.offset + search_query.limit) if has_more else None
|
|
514
|
+
|
|
515
|
+
logger.info(f"Found {len(memories)} memories (page {search_query.offset}-{search_query.offset + len(memories)} of {total_count})")
|
|
516
|
+
|
|
517
|
+
return PaginatedResult(
|
|
518
|
+
results=memories,
|
|
519
|
+
total_count=total_count,
|
|
520
|
+
limit=search_query.limit,
|
|
521
|
+
offset=search_query.offset,
|
|
522
|
+
has_more=has_more,
|
|
523
|
+
next_offset=next_offset
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
except Exception as e:
|
|
527
|
+
if isinstance(e, DatabaseConnectionError):
|
|
528
|
+
raise
|
|
529
|
+
logger.error(f"Failed to search memories (paginated): {e}")
|
|
530
|
+
raise DatabaseConnectionError(f"Failed to search memories (paginated): {e}")
|
|
531
|
+
|
|
532
|
+
async def update_memory(self, memory: Memory) -> bool:
|
|
533
|
+
"""Update an existing memory.
|
|
534
|
+
|
|
535
|
+
Args:
|
|
536
|
+
memory: Memory object with updated fields
|
|
537
|
+
|
|
538
|
+
Returns:
|
|
539
|
+
True if update succeeded, False otherwise
|
|
540
|
+
|
|
541
|
+
Raises:
|
|
542
|
+
ValidationError: If memory ID is missing
|
|
543
|
+
DatabaseConnectionError: If update fails
|
|
544
|
+
"""
|
|
545
|
+
try:
|
|
546
|
+
if not memory.id:
|
|
547
|
+
raise ValidationError("Memory must have an ID to update")
|
|
548
|
+
|
|
549
|
+
memory.updated_at = datetime.now(timezone.utc)
|
|
550
|
+
|
|
551
|
+
# Convert memory to Neo4j properties
|
|
552
|
+
memory_node = MemoryNode(memory=memory)
|
|
553
|
+
properties = memory_node.to_neo4j_properties()
|
|
554
|
+
|
|
555
|
+
query = """
|
|
556
|
+
MATCH (m:Memory {id: $id})
|
|
557
|
+
SET m += $properties
|
|
558
|
+
RETURN m.id as id
|
|
559
|
+
"""
|
|
560
|
+
|
|
561
|
+
result = await self.connection.execute_write_query(
|
|
562
|
+
query,
|
|
563
|
+
{"id": memory.id, "properties": properties}
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
success = len(result) > 0
|
|
567
|
+
if success:
|
|
568
|
+
logger.info(f"Updated memory: {memory.id}")
|
|
569
|
+
|
|
570
|
+
return success
|
|
571
|
+
|
|
572
|
+
except Exception as e:
|
|
573
|
+
if isinstance(e, (ValidationError, DatabaseConnectionError)):
|
|
574
|
+
raise
|
|
575
|
+
logger.error(f"Failed to update memory {memory.id}: {e}")
|
|
576
|
+
raise DatabaseConnectionError(f"Failed to update memory: {e}")
|
|
577
|
+
|
|
578
|
+
async def delete_memory(self, memory_id: str) -> bool:
|
|
579
|
+
"""Delete a memory and all its relationships.
|
|
580
|
+
|
|
581
|
+
Args:
|
|
582
|
+
memory_id: ID of the memory to delete
|
|
583
|
+
|
|
584
|
+
Returns:
|
|
585
|
+
True if deletion succeeded, False otherwise
|
|
586
|
+
|
|
587
|
+
Raises:
|
|
588
|
+
DatabaseConnectionError: If deletion fails
|
|
589
|
+
"""
|
|
590
|
+
try:
|
|
591
|
+
query = """
|
|
592
|
+
MATCH (m:Memory {id: $memory_id})
|
|
593
|
+
DETACH DELETE m
|
|
594
|
+
RETURN COUNT(m) as deleted_count
|
|
595
|
+
"""
|
|
596
|
+
|
|
597
|
+
result = await self.connection.execute_write_query(query, {"memory_id": memory_id})
|
|
598
|
+
|
|
599
|
+
success = result and result[0]["deleted_count"] > 0
|
|
600
|
+
if success:
|
|
601
|
+
logger.info(f"Deleted memory: {memory_id}")
|
|
602
|
+
|
|
603
|
+
return success
|
|
604
|
+
|
|
605
|
+
except Exception as e:
|
|
606
|
+
if isinstance(e, DatabaseConnectionError):
|
|
607
|
+
raise
|
|
608
|
+
logger.error(f"Failed to delete memory {memory_id}: {e}")
|
|
609
|
+
raise DatabaseConnectionError(f"Failed to delete memory: {e}")
|
|
610
|
+
|
|
611
|
+
async def create_relationship(
|
|
612
|
+
self,
|
|
613
|
+
from_memory_id: str,
|
|
614
|
+
to_memory_id: str,
|
|
615
|
+
relationship_type: RelationshipType,
|
|
616
|
+
properties: RelationshipProperties = None
|
|
617
|
+
) -> str:
|
|
618
|
+
"""Create a relationship between two memories.
|
|
619
|
+
|
|
620
|
+
Args:
|
|
621
|
+
from_memory_id: Source memory ID
|
|
622
|
+
to_memory_id: Target memory ID
|
|
623
|
+
relationship_type: Type of relationship
|
|
624
|
+
properties: Relationship properties (optional)
|
|
625
|
+
|
|
626
|
+
Returns:
|
|
627
|
+
ID of the created relationship
|
|
628
|
+
|
|
629
|
+
Raises:
|
|
630
|
+
RelationshipError: If relationship creation fails
|
|
631
|
+
DatabaseConnectionError: If database operation fails
|
|
632
|
+
"""
|
|
633
|
+
try:
|
|
634
|
+
relationship_id = str(uuid.uuid4())
|
|
635
|
+
|
|
636
|
+
if properties is None:
|
|
637
|
+
properties = RelationshipProperties()
|
|
638
|
+
|
|
639
|
+
# Convert properties to dict for Neo4j
|
|
640
|
+
props_dict = properties.model_dump()
|
|
641
|
+
props_dict['id'] = relationship_id
|
|
642
|
+
props_dict['created_at'] = props_dict['created_at'].isoformat()
|
|
643
|
+
props_dict['last_validated'] = props_dict['last_validated'].isoformat()
|
|
644
|
+
|
|
645
|
+
query = f"""
|
|
646
|
+
MATCH (from:Memory {{id: $from_id}})
|
|
647
|
+
MATCH (to:Memory {{id: $to_id}})
|
|
648
|
+
CREATE (from)-[r:{relationship_type.value} $properties]->(to)
|
|
649
|
+
RETURN r.id as id
|
|
650
|
+
"""
|
|
651
|
+
|
|
652
|
+
result = await self.connection.execute_write_query(
|
|
653
|
+
query,
|
|
654
|
+
{
|
|
655
|
+
"from_id": from_memory_id,
|
|
656
|
+
"to_id": to_memory_id,
|
|
657
|
+
"properties": props_dict
|
|
658
|
+
}
|
|
659
|
+
)
|
|
660
|
+
|
|
661
|
+
if result:
|
|
662
|
+
logger.info(f"Created relationship: {relationship_type.value} between {from_memory_id} and {to_memory_id}")
|
|
663
|
+
return result[0]["id"]
|
|
664
|
+
else:
|
|
665
|
+
raise RelationshipError(
|
|
666
|
+
f"Failed to create relationship between {from_memory_id} and {to_memory_id}",
|
|
667
|
+
{"from_id": from_memory_id, "to_id": to_memory_id, "type": relationship_type.value}
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
except Exception as e:
|
|
671
|
+
if isinstance(e, (RelationshipError, DatabaseConnectionError)):
|
|
672
|
+
raise
|
|
673
|
+
logger.error(f"Failed to create relationship: {e}")
|
|
674
|
+
raise RelationshipError(f"Failed to create relationship: {e}")
|
|
675
|
+
|
|
676
|
+
async def get_related_memories(
|
|
677
|
+
self,
|
|
678
|
+
memory_id: str,
|
|
679
|
+
relationship_types: List[RelationshipType] = None,
|
|
680
|
+
max_depth: int = 2
|
|
681
|
+
) -> List[Tuple[Memory, Relationship]]:
|
|
682
|
+
"""Get memories related to a specific memory.
|
|
683
|
+
|
|
684
|
+
Args:
|
|
685
|
+
memory_id: ID of the memory to find relations for
|
|
686
|
+
relationship_types: Filter by specific relationship types (optional)
|
|
687
|
+
max_depth: Maximum depth for graph traversal
|
|
688
|
+
|
|
689
|
+
Returns:
|
|
690
|
+
List of tuples containing (Memory, Relationship)
|
|
691
|
+
|
|
692
|
+
Raises:
|
|
693
|
+
DatabaseConnectionError: If query fails
|
|
694
|
+
"""
|
|
695
|
+
try:
|
|
696
|
+
# Build relationship type filter
|
|
697
|
+
rel_filter = ""
|
|
698
|
+
if relationship_types:
|
|
699
|
+
rel_types = "|".join([rt.value for rt in relationship_types])
|
|
700
|
+
rel_filter = f":{rel_types}"
|
|
701
|
+
|
|
702
|
+
# Query to capture both outgoing and incoming relationships with proper direction
|
|
703
|
+
# We query in both directions and capture the actual source/target nodes
|
|
704
|
+
query = f"""
|
|
705
|
+
MATCH (start:Memory {{id: $memory_id}})
|
|
706
|
+
MATCH path = (start)-[r{rel_filter}*1..{max_depth}]-(related:Memory)
|
|
707
|
+
WHERE related.id <> start.id
|
|
708
|
+
WITH DISTINCT related, r[0] as rel,
|
|
709
|
+
startNode(rel) as source,
|
|
710
|
+
endNode(rel) as target
|
|
711
|
+
RETURN related,
|
|
712
|
+
type(rel) as rel_type,
|
|
713
|
+
properties(rel) as rel_props,
|
|
714
|
+
source.id as from_id,
|
|
715
|
+
target.id as to_id
|
|
716
|
+
ORDER BY rel.strength DESC, related.importance DESC
|
|
717
|
+
LIMIT 20
|
|
718
|
+
"""
|
|
719
|
+
|
|
720
|
+
result = await self.connection.execute_read_query(query, {"memory_id": memory_id})
|
|
721
|
+
|
|
722
|
+
related_memories = []
|
|
723
|
+
for record in result:
|
|
724
|
+
memory = self._neo4j_to_memory(record["related"])
|
|
725
|
+
if memory:
|
|
726
|
+
# Properly extract relationship type, properties, and direction
|
|
727
|
+
rel_type_str = record.get("rel_type", "RELATED_TO")
|
|
728
|
+
rel_props = record.get("rel_props", {})
|
|
729
|
+
from_id = record.get("from_id")
|
|
730
|
+
to_id = record.get("to_id")
|
|
731
|
+
|
|
732
|
+
# Fallback: if from_id/to_id are not provided, infer from query
|
|
733
|
+
# This happens in older implementations or mocked tests
|
|
734
|
+
if not from_id or not to_id:
|
|
735
|
+
# We don't know the direction, so skip this relationship
|
|
736
|
+
# or use a conservative approach and assume outgoing
|
|
737
|
+
logger.warning(
|
|
738
|
+
f"Relationship direction not provided in query result, "
|
|
739
|
+
f"skipping relationship to {memory.id}"
|
|
740
|
+
)
|
|
741
|
+
continue
|
|
742
|
+
|
|
743
|
+
try:
|
|
744
|
+
rel_type = RelationshipType(rel_type_str)
|
|
745
|
+
except ValueError:
|
|
746
|
+
rel_type = RelationshipType.RELATED_TO
|
|
747
|
+
|
|
748
|
+
relationship = Relationship(
|
|
749
|
+
from_memory_id=from_id,
|
|
750
|
+
to_memory_id=to_id,
|
|
751
|
+
type=rel_type,
|
|
752
|
+
properties=RelationshipProperties(
|
|
753
|
+
strength=rel_props.get("strength", 0.5),
|
|
754
|
+
confidence=rel_props.get("confidence", 0.8),
|
|
755
|
+
context=rel_props.get("context"),
|
|
756
|
+
evidence_count=rel_props.get("evidence_count", 1)
|
|
757
|
+
)
|
|
758
|
+
)
|
|
759
|
+
related_memories.append((memory, relationship))
|
|
760
|
+
|
|
761
|
+
logger.info(f"Found {len(related_memories)} related memories for {memory_id}")
|
|
762
|
+
return related_memories
|
|
763
|
+
|
|
764
|
+
except Exception as e:
|
|
765
|
+
if isinstance(e, DatabaseConnectionError):
|
|
766
|
+
raise
|
|
767
|
+
logger.error(f"Failed to get related memories for {memory_id}: {e}")
|
|
768
|
+
raise DatabaseConnectionError(f"Failed to get related memories: {e}")
|
|
769
|
+
|
|
770
|
+
def _neo4j_to_memory(self, node_data: Dict[str, Any]) -> Optional[Memory]:
|
|
771
|
+
"""Convert Neo4j node data to Memory object."""
|
|
772
|
+
try:
|
|
773
|
+
# Extract basic memory fields
|
|
774
|
+
memory_data = {
|
|
775
|
+
"id": node_data.get("id"),
|
|
776
|
+
"type": MemoryType(node_data.get("type")),
|
|
777
|
+
"title": node_data.get("title"),
|
|
778
|
+
"content": node_data.get("content"),
|
|
779
|
+
"summary": node_data.get("summary"),
|
|
780
|
+
"tags": node_data.get("tags", []),
|
|
781
|
+
"importance": node_data.get("importance", 0.5),
|
|
782
|
+
"confidence": node_data.get("confidence", 0.8),
|
|
783
|
+
"effectiveness": node_data.get("effectiveness"),
|
|
784
|
+
"usage_count": node_data.get("usage_count", 0),
|
|
785
|
+
"created_at": datetime.fromisoformat(node_data.get("created_at")),
|
|
786
|
+
"updated_at": datetime.fromisoformat(node_data.get("updated_at")),
|
|
787
|
+
}
|
|
788
|
+
|
|
789
|
+
# Handle optional last_accessed field
|
|
790
|
+
if node_data.get("last_accessed"):
|
|
791
|
+
memory_data["last_accessed"] = datetime.fromisoformat(node_data["last_accessed"])
|
|
792
|
+
|
|
793
|
+
# Extract context information
|
|
794
|
+
import json
|
|
795
|
+
context_data = {}
|
|
796
|
+
for key, value in node_data.items():
|
|
797
|
+
if key.startswith("context_") and value is not None:
|
|
798
|
+
context_key = key[8:] # Remove "context_" prefix
|
|
799
|
+
|
|
800
|
+
# Deserialize JSON strings back to Python objects
|
|
801
|
+
if isinstance(value, str) and context_key in ["additional_metadata"]:
|
|
802
|
+
try:
|
|
803
|
+
context_data[context_key] = json.loads(value)
|
|
804
|
+
except json.JSONDecodeError:
|
|
805
|
+
context_data[context_key] = value
|
|
806
|
+
# Handle JSON-serialized lists/dicts
|
|
807
|
+
elif isinstance(value, str) and value.startswith(('[', '{')):
|
|
808
|
+
try:
|
|
809
|
+
context_data[context_key] = json.loads(value)
|
|
810
|
+
except json.JSONDecodeError:
|
|
811
|
+
context_data[context_key] = value
|
|
812
|
+
else:
|
|
813
|
+
context_data[context_key] = value
|
|
814
|
+
|
|
815
|
+
if context_data:
|
|
816
|
+
# Handle timestamp fields in context
|
|
817
|
+
for time_field in ["timestamp"]:
|
|
818
|
+
if time_field in context_data:
|
|
819
|
+
if isinstance(context_data[time_field], str):
|
|
820
|
+
context_data[time_field] = datetime.fromisoformat(context_data[time_field])
|
|
821
|
+
|
|
822
|
+
memory_data["context"] = MemoryContext(**context_data)
|
|
823
|
+
|
|
824
|
+
return Memory(**memory_data)
|
|
825
|
+
|
|
826
|
+
except Exception as e:
|
|
827
|
+
logger.error(f"Failed to convert Neo4j node to Memory: {e}")
|
|
828
|
+
return None
|
|
829
|
+
|
|
830
|
+
async def update_relationship_properties(
|
|
831
|
+
self,
|
|
832
|
+
from_memory_id: str,
|
|
833
|
+
to_memory_id: str,
|
|
834
|
+
relationship_type: RelationshipType,
|
|
835
|
+
properties: RelationshipProperties
|
|
836
|
+
) -> bool:
|
|
837
|
+
"""Update properties of an existing relationship.
|
|
838
|
+
|
|
839
|
+
Args:
|
|
840
|
+
from_memory_id: Source memory ID
|
|
841
|
+
to_memory_id: Target memory ID
|
|
842
|
+
relationship_type: Type of relationship to update
|
|
843
|
+
properties: Updated relationship properties
|
|
844
|
+
|
|
845
|
+
Returns:
|
|
846
|
+
bool: True if update successful, False otherwise
|
|
847
|
+
|
|
848
|
+
Raises:
|
|
849
|
+
DatabaseConnectionError: If query fails
|
|
850
|
+
RelationshipError: If relationship not found
|
|
851
|
+
"""
|
|
852
|
+
try:
|
|
853
|
+
# Convert properties to dict
|
|
854
|
+
props_dict = properties.model_dump()
|
|
855
|
+
|
|
856
|
+
# Convert datetime fields to ISO format strings
|
|
857
|
+
for key in ['created_at', 'last_validated']:
|
|
858
|
+
if key in props_dict and props_dict[key]:
|
|
859
|
+
props_dict[key] = props_dict[key].isoformat()
|
|
860
|
+
|
|
861
|
+
# Update the relationship properties
|
|
862
|
+
query = """
|
|
863
|
+
MATCH (from:Memory {id: $from_id})-[r:$rel_type]->(to:Memory {id: $to_id})
|
|
864
|
+
SET r += $props
|
|
865
|
+
RETURN r
|
|
866
|
+
"""
|
|
867
|
+
|
|
868
|
+
# Neo4j doesn't support parameterized relationship types, so construct query dynamically
|
|
869
|
+
query = f"""
|
|
870
|
+
MATCH (from:Memory {{id: $from_id}})-[r:{relationship_type.value}]->(to:Memory {{id: $to_id}})
|
|
871
|
+
SET r += $props
|
|
872
|
+
RETURN r
|
|
873
|
+
"""
|
|
874
|
+
|
|
875
|
+
result = await self.connection.execute_write_query(
|
|
876
|
+
query,
|
|
877
|
+
{
|
|
878
|
+
"from_id": from_memory_id,
|
|
879
|
+
"to_id": to_memory_id,
|
|
880
|
+
"props": props_dict
|
|
881
|
+
}
|
|
882
|
+
)
|
|
883
|
+
|
|
884
|
+
if not result:
|
|
885
|
+
raise RelationshipError(
|
|
886
|
+
f"Relationship not found: {from_memory_id} -{relationship_type.value}-> {to_memory_id}"
|
|
887
|
+
)
|
|
888
|
+
|
|
889
|
+
logger.info(
|
|
890
|
+
f"Updated relationship {from_memory_id} -{relationship_type.value}-> {to_memory_id}"
|
|
891
|
+
)
|
|
892
|
+
return True
|
|
893
|
+
|
|
894
|
+
except Exception as e:
|
|
895
|
+
logger.error(f"Failed to update relationship: {e}")
|
|
896
|
+
if isinstance(e, RelationshipError):
|
|
897
|
+
raise
|
|
898
|
+
raise DatabaseConnectionError(f"Failed to update relationship: {str(e)}")
|
|
899
|
+
|
|
900
|
+
async def get_memory_statistics(self) -> Dict[str, Any]:
|
|
901
|
+
"""Get database statistics and metrics.
|
|
902
|
+
|
|
903
|
+
Returns:
|
|
904
|
+
Dictionary containing various database statistics
|
|
905
|
+
|
|
906
|
+
Raises:
|
|
907
|
+
DatabaseConnectionError: If query fails
|
|
908
|
+
"""
|
|
909
|
+
queries = {
|
|
910
|
+
"total_memories": "MATCH (m:Memory) RETURN COUNT(m) as count",
|
|
911
|
+
"memories_by_type": """
|
|
912
|
+
MATCH (m:Memory)
|
|
913
|
+
RETURN m.type as type, COUNT(m) as count
|
|
914
|
+
ORDER BY count DESC
|
|
915
|
+
""",
|
|
916
|
+
"total_relationships": "MATCH ()-[r]->() RETURN COUNT(r) as count",
|
|
917
|
+
"avg_importance": "MATCH (m:Memory) RETURN AVG(m.importance) as avg_importance",
|
|
918
|
+
"avg_confidence": "MATCH (m:Memory) RETURN AVG(m.confidence) as avg_confidence",
|
|
919
|
+
}
|
|
920
|
+
|
|
921
|
+
stats = {}
|
|
922
|
+
for stat_name, query in queries.items():
|
|
923
|
+
try:
|
|
924
|
+
result = await self.connection.execute_read_query(query)
|
|
925
|
+
if stat_name == "memories_by_type":
|
|
926
|
+
stats[stat_name] = {record["type"]: record["count"] for record in result}
|
|
927
|
+
else:
|
|
928
|
+
stats[stat_name] = result[0] if result else None
|
|
929
|
+
except Exception as e:
|
|
930
|
+
logger.error(f"Failed to get statistic {stat_name}: {e}")
|
|
931
|
+
stats[stat_name] = None
|
|
932
|
+
|
|
933
|
+
return stats
|