memorygraphMCP 0.11.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. memorygraph/__init__.py +50 -0
  2. memorygraph/__main__.py +12 -0
  3. memorygraph/advanced_tools.py +509 -0
  4. memorygraph/analytics/__init__.py +46 -0
  5. memorygraph/analytics/advanced_queries.py +727 -0
  6. memorygraph/backends/__init__.py +21 -0
  7. memorygraph/backends/base.py +179 -0
  8. memorygraph/backends/cloud.py +75 -0
  9. memorygraph/backends/cloud_backend.py +858 -0
  10. memorygraph/backends/factory.py +577 -0
  11. memorygraph/backends/falkordb_backend.py +749 -0
  12. memorygraph/backends/falkordblite_backend.py +746 -0
  13. memorygraph/backends/ladybugdb_backend.py +242 -0
  14. memorygraph/backends/memgraph_backend.py +327 -0
  15. memorygraph/backends/neo4j_backend.py +298 -0
  16. memorygraph/backends/sqlite_fallback.py +463 -0
  17. memorygraph/backends/turso.py +448 -0
  18. memorygraph/cli.py +743 -0
  19. memorygraph/cloud_database.py +297 -0
  20. memorygraph/config.py +295 -0
  21. memorygraph/database.py +933 -0
  22. memorygraph/graph_analytics.py +631 -0
  23. memorygraph/integration/__init__.py +69 -0
  24. memorygraph/integration/context_capture.py +426 -0
  25. memorygraph/integration/project_analysis.py +583 -0
  26. memorygraph/integration/workflow_tracking.py +492 -0
  27. memorygraph/intelligence/__init__.py +59 -0
  28. memorygraph/intelligence/context_retrieval.py +447 -0
  29. memorygraph/intelligence/entity_extraction.py +386 -0
  30. memorygraph/intelligence/pattern_recognition.py +420 -0
  31. memorygraph/intelligence/temporal.py +374 -0
  32. memorygraph/migration/__init__.py +27 -0
  33. memorygraph/migration/manager.py +579 -0
  34. memorygraph/migration/models.py +142 -0
  35. memorygraph/migration/scripts/__init__.py +17 -0
  36. memorygraph/migration/scripts/bitemporal_migration.py +595 -0
  37. memorygraph/migration/scripts/multitenancy_migration.py +452 -0
  38. memorygraph/migration_tools_module.py +146 -0
  39. memorygraph/models.py +684 -0
  40. memorygraph/proactive/__init__.py +46 -0
  41. memorygraph/proactive/outcome_learning.py +444 -0
  42. memorygraph/proactive/predictive.py +410 -0
  43. memorygraph/proactive/session_briefing.py +399 -0
  44. memorygraph/relationships.py +668 -0
  45. memorygraph/server.py +883 -0
  46. memorygraph/sqlite_database.py +1876 -0
  47. memorygraph/tools/__init__.py +59 -0
  48. memorygraph/tools/activity_tools.py +262 -0
  49. memorygraph/tools/memory_tools.py +315 -0
  50. memorygraph/tools/migration_tools.py +181 -0
  51. memorygraph/tools/relationship_tools.py +147 -0
  52. memorygraph/tools/search_tools.py +406 -0
  53. memorygraph/tools/temporal_tools.py +339 -0
  54. memorygraph/utils/__init__.py +10 -0
  55. memorygraph/utils/context_extractor.py +429 -0
  56. memorygraph/utils/error_handling.py +151 -0
  57. memorygraph/utils/export_import.py +425 -0
  58. memorygraph/utils/graph_algorithms.py +200 -0
  59. memorygraph/utils/pagination.py +149 -0
  60. memorygraph/utils/project_detection.py +133 -0
  61. memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
  62. memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
  63. memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
  64. memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
  65. memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,749 @@
1
+ """
2
+ FalkorDB backend implementation for the Claude Code Memory Server.
3
+
4
+ This module provides the FalkorDB-specific implementation of the GraphBackend interface.
5
+ FalkorDB is a Redis-based graph database with exceptional performance (500x faster p99 than Neo4j).
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ from typing import Any, Optional, List, Tuple, Dict
11
+
12
+ from .base import GraphBackend
13
+ from ..models import (
14
+ Memory,
15
+ MemoryType,
16
+ Relationship,
17
+ RelationshipType,
18
+ RelationshipProperties,
19
+ SearchQuery,
20
+ MemoryContext,
21
+ MemoryNode,
22
+ DatabaseConnectionError,
23
+ SchemaError,
24
+ ValidationError,
25
+ RelationshipError,
26
+ )
27
+ from ..config import Config
28
+ from datetime import datetime, timezone
29
+ import uuid
30
+ import json
31
+
32
+ logger = logging.getLogger(__name__)
33
+
34
+
35
+ class FalkorDBBackend(GraphBackend):
36
+ """FalkorDB implementation of the GraphBackend interface."""
37
+
38
+ def __init__(
39
+ self,
40
+ host: Optional[str] = None,
41
+ port: Optional[int] = None,
42
+ password: Optional[str] = None,
43
+ graph_name: str = "memorygraph"
44
+ ):
45
+ """
46
+ Initialize FalkorDB backend.
47
+
48
+ Args:
49
+ host: FalkorDB host (defaults to FALKORDB_HOST env var or localhost)
50
+ port: FalkorDB port (defaults to FALKORDB_PORT env var or 6379)
51
+ password: FalkorDB password (defaults to FALKORDB_PASSWORD env var)
52
+ graph_name: Name of the graph database (defaults to 'memorygraph')
53
+ """
54
+ self.host = host or os.getenv("FALKORDB_HOST", "localhost")
55
+ self.port = port or int(os.getenv("FALKORDB_PORT", "6379"))
56
+ self.password = password or os.getenv("FALKORDB_PASSWORD")
57
+ self.graph_name = graph_name
58
+ self.client = None
59
+ self.graph = None
60
+ self._connected = False
61
+
62
+ async def connect(self) -> bool:
63
+ """
64
+ Establish connection to FalkorDB database.
65
+
66
+ Returns:
67
+ True if connection successful
68
+
69
+ Raises:
70
+ DatabaseConnectionError: If connection fails
71
+ """
72
+ try:
73
+ # Lazy import falkordb only when connecting
74
+ try:
75
+ from falkordb import FalkorDB
76
+ except ImportError as e:
77
+ raise DatabaseConnectionError(
78
+ "falkordb package is required for FalkorDB backend. "
79
+ "Install with: pip install falkordb"
80
+ ) from e
81
+
82
+ # Create FalkorDB client
83
+ if self.password:
84
+ self.client = FalkorDB(host=self.host, port=self.port, password=self.password)
85
+ else:
86
+ self.client = FalkorDB(host=self.host, port=self.port)
87
+
88
+ # Select the graph
89
+ self.graph = self.client.select_graph(self.graph_name)
90
+ self._connected = True
91
+
92
+ logger.info(f"Successfully connected to FalkorDB at {self.host}:{self.port}")
93
+ return True
94
+
95
+ except Exception as e:
96
+ logger.error(f"Failed to connect to FalkorDB: {e}")
97
+ raise DatabaseConnectionError(f"Failed to connect to FalkorDB: {e}")
98
+
99
+ async def disconnect(self) -> None:
100
+ """Close the database connection."""
101
+ if self.client:
102
+ # FalkorDB client doesn't require explicit close in Python SDK
103
+ self.client = None
104
+ self.graph = None
105
+ self._connected = False
106
+ logger.info("FalkorDB connection closed")
107
+
108
+ async def execute_query(
109
+ self,
110
+ query: str,
111
+ parameters: Optional[dict[str, Any]] = None,
112
+ write: bool = False
113
+ ) -> list[dict[str, Any]]:
114
+ """
115
+ Execute a Cypher query and return results.
116
+
117
+ Args:
118
+ query: The Cypher query string
119
+ parameters: Query parameters for parameterized queries
120
+ write: Whether this is a write operation (default: False)
121
+
122
+ Returns:
123
+ List of result records as dictionaries
124
+
125
+ Raises:
126
+ DatabaseConnectionError: If not connected or query fails
127
+ """
128
+ if not self._connected or not self.graph:
129
+ raise DatabaseConnectionError("Not connected to FalkorDB. Call connect() first.")
130
+
131
+ params = parameters or {}
132
+
133
+ try:
134
+ # Execute query on FalkorDB
135
+ result = self.graph.query(query, params)
136
+
137
+ # Convert result to list of dicts
138
+ result_list = []
139
+ if hasattr(result, 'result_set') and result.result_set:
140
+ result_list = result.result_set
141
+
142
+ return result_list
143
+
144
+ except Exception as e:
145
+ logger.error(f"Query execution failed: {e}")
146
+ raise DatabaseConnectionError(f"Query execution failed: {e}")
147
+
148
+ async def initialize_schema(self) -> None:
149
+ """
150
+ Initialize database schema including indexes and constraints.
151
+
152
+ Raises:
153
+ SchemaError: If schema initialization fails
154
+ """
155
+ logger.info("Initializing FalkorDB schema for Claude Memory...")
156
+
157
+ # Create constraints (FalkorDB uses similar Cypher syntax to Neo4j)
158
+ constraints = [
159
+ "CREATE CONSTRAINT ON (m:Memory) ASSERT m.id IS UNIQUE",
160
+ ]
161
+
162
+ # Create indexes for performance
163
+ indexes = [
164
+ "CREATE INDEX ON :Memory(type)",
165
+ "CREATE INDEX ON :Memory(created_at)",
166
+ "CREATE INDEX ON :Memory(importance)",
167
+ "CREATE INDEX ON :Memory(confidence)",
168
+ ]
169
+
170
+ # Conditional multi-tenant indexes (Phase 1)
171
+ if Config.is_multi_tenant_mode():
172
+ multitenant_indexes = [
173
+ "CREATE INDEX ON :Memory(context_tenant_id)",
174
+ "CREATE INDEX ON :Memory(context_team_id)",
175
+ "CREATE INDEX ON :Memory(context_visibility)",
176
+ "CREATE INDEX ON :Memory(context_created_by)",
177
+ "CREATE INDEX ON :Memory(version)",
178
+ ]
179
+ indexes.extend(multitenant_indexes)
180
+ logger.info("Multi-tenant mode enabled, adding tenant indexes")
181
+
182
+ # Execute schema creation
183
+ for constraint in constraints:
184
+ try:
185
+ await self.execute_query(constraint, write=True)
186
+ logger.debug(f"Created constraint: {constraint}")
187
+ except Exception as e:
188
+ # FalkorDB may not support all constraint types, log but continue
189
+ logger.debug(f"Constraint creation note: {e}")
190
+
191
+ for index in indexes:
192
+ try:
193
+ await self.execute_query(index, write=True)
194
+ logger.debug(f"Created index: {index}")
195
+ except Exception as e:
196
+ # FalkorDB may not support all index types, log but continue
197
+ logger.debug(f"Index creation note: {e}")
198
+
199
+ logger.info("Schema initialization completed")
200
+
201
+ async def store_memory(self, memory: Memory) -> str:
202
+ """
203
+ Store a memory in the database and return its ID.
204
+
205
+ Args:
206
+ memory: Memory object to store
207
+
208
+ Returns:
209
+ ID of the stored memory
210
+
211
+ Raises:
212
+ ValidationError: If memory data is invalid
213
+ DatabaseConnectionError: If storage fails
214
+ """
215
+ try:
216
+ if not memory.id:
217
+ memory.id = str(uuid.uuid4())
218
+
219
+ memory.updated_at = datetime.now(timezone.utc)
220
+
221
+ # Convert memory to properties
222
+ memory_node = MemoryNode(memory=memory)
223
+ properties = memory_node.to_neo4j_properties()
224
+
225
+ query = """
226
+ MERGE (m:Memory {id: $id})
227
+ SET m += $properties
228
+ RETURN m.id as id
229
+ """
230
+
231
+ result = await self.execute_query(
232
+ query,
233
+ {"id": memory.id, "properties": properties},
234
+ write=True
235
+ )
236
+
237
+ if result:
238
+ logger.info(f"Stored memory: {memory.id} ({memory.type})")
239
+ return result[0]["id"]
240
+ else:
241
+ raise DatabaseConnectionError(f"Failed to store memory: {memory.id}")
242
+
243
+ except Exception as e:
244
+ if isinstance(e, (DatabaseConnectionError, ValidationError)):
245
+ raise
246
+ logger.error(f"Failed to store memory: {e}")
247
+ raise DatabaseConnectionError(f"Failed to store memory: {e}")
248
+
249
+ async def get_memory(self, memory_id: str, include_relationships: bool = True) -> Optional[Memory]:
250
+ """
251
+ Retrieve a memory by ID.
252
+
253
+ Args:
254
+ memory_id: ID of the memory to retrieve
255
+ include_relationships: Whether to include relationships (not currently used)
256
+
257
+ Returns:
258
+ Memory object if found, None otherwise
259
+
260
+ Raises:
261
+ DatabaseConnectionError: If query fails
262
+ """
263
+ try:
264
+ query = """
265
+ MATCH (m:Memory {id: $memory_id})
266
+ RETURN m
267
+ """
268
+
269
+ result = await self.execute_query(query, {"memory_id": memory_id}, write=False)
270
+
271
+ if not result:
272
+ return None
273
+
274
+ memory_data = result[0]["m"]
275
+ return self._falkordb_to_memory(memory_data)
276
+
277
+ except Exception as e:
278
+ if isinstance(e, DatabaseConnectionError):
279
+ raise
280
+ logger.error(f"Failed to get memory {memory_id}: {e}")
281
+ raise DatabaseConnectionError(f"Failed to get memory: {e}")
282
+
283
+ async def search_memories(self, search_query: SearchQuery) -> List[Memory]:
284
+ """
285
+ Search for memories based on query parameters.
286
+
287
+ Args:
288
+ search_query: SearchQuery object with filter criteria
289
+
290
+ Returns:
291
+ List of Memory objects matching the search criteria
292
+
293
+ Raises:
294
+ DatabaseConnectionError: If search fails
295
+ """
296
+ try:
297
+ conditions = []
298
+ parameters = {}
299
+
300
+ # Build WHERE conditions based on search parameters
301
+ if search_query.query:
302
+ conditions.append("(m.title CONTAINS $query OR m.content CONTAINS $query OR m.summary CONTAINS $query)")
303
+ parameters["query"] = search_query.query
304
+
305
+ if search_query.memory_types:
306
+ conditions.append("m.type IN $memory_types")
307
+ parameters["memory_types"] = [t.value for t in search_query.memory_types]
308
+
309
+ if search_query.tags:
310
+ conditions.append("ANY(tag IN $tags WHERE tag IN m.tags)")
311
+ parameters["tags"] = search_query.tags
312
+
313
+ if search_query.project_path:
314
+ conditions.append("m.context_project_path = $project_path")
315
+ parameters["project_path"] = search_query.project_path
316
+
317
+ if search_query.min_importance is not None:
318
+ conditions.append("m.importance >= $min_importance")
319
+ parameters["min_importance"] = search_query.min_importance
320
+
321
+ if search_query.min_confidence is not None:
322
+ conditions.append("m.confidence >= $min_confidence")
323
+ parameters["min_confidence"] = search_query.min_confidence
324
+
325
+ # Build the complete query
326
+ where_clause = " AND ".join(conditions) if conditions else "true"
327
+
328
+ query = f"""
329
+ MATCH (m:Memory)
330
+ WHERE {where_clause}
331
+ RETURN m
332
+ ORDER BY m.importance DESC, m.created_at DESC
333
+ LIMIT $limit
334
+ """
335
+
336
+ parameters["limit"] = search_query.limit
337
+
338
+ result = await self.execute_query(query, parameters, write=False)
339
+
340
+ memories = []
341
+ for record in result:
342
+ memory = self._falkordb_to_memory(record["m"])
343
+ if memory:
344
+ memories.append(memory)
345
+
346
+ logger.info(f"Found {len(memories)} memories for search query")
347
+ return memories
348
+
349
+ except Exception as e:
350
+ if isinstance(e, DatabaseConnectionError):
351
+ raise
352
+ logger.error(f"Failed to search memories: {e}")
353
+ raise DatabaseConnectionError(f"Failed to search memories: {e}")
354
+
355
+ async def update_memory(self, memory: Memory) -> bool:
356
+ """
357
+ Update an existing memory.
358
+
359
+ Args:
360
+ memory: Memory object with updated fields
361
+
362
+ Returns:
363
+ True if update succeeded, False otherwise
364
+
365
+ Raises:
366
+ ValidationError: If memory ID is missing
367
+ DatabaseConnectionError: If update fails
368
+ """
369
+ try:
370
+ if not memory.id:
371
+ raise ValidationError("Memory must have an ID to update")
372
+
373
+ memory.updated_at = datetime.now(timezone.utc)
374
+
375
+ # Convert memory to properties
376
+ memory_node = MemoryNode(memory=memory)
377
+ properties = memory_node.to_neo4j_properties()
378
+
379
+ query = """
380
+ MATCH (m:Memory {id: $id})
381
+ SET m += $properties
382
+ RETURN m.id as id
383
+ """
384
+
385
+ result = await self.execute_query(
386
+ query,
387
+ {"id": memory.id, "properties": properties},
388
+ write=True
389
+ )
390
+
391
+ success = len(result) > 0
392
+ if success:
393
+ logger.info(f"Updated memory: {memory.id}")
394
+
395
+ return success
396
+
397
+ except Exception as e:
398
+ if isinstance(e, (ValidationError, DatabaseConnectionError)):
399
+ raise
400
+ logger.error(f"Failed to update memory {memory.id}: {e}")
401
+ raise DatabaseConnectionError(f"Failed to update memory: {e}")
402
+
403
+ async def delete_memory(self, memory_id: str) -> bool:
404
+ """
405
+ Delete a memory and all its relationships.
406
+
407
+ Args:
408
+ memory_id: ID of the memory to delete
409
+
410
+ Returns:
411
+ True if deletion succeeded, False otherwise
412
+
413
+ Raises:
414
+ DatabaseConnectionError: If deletion fails
415
+ """
416
+ try:
417
+ query = """
418
+ MATCH (m:Memory {id: $memory_id})
419
+ DETACH DELETE m
420
+ RETURN COUNT(m) as deleted_count
421
+ """
422
+
423
+ result = await self.execute_query(query, {"memory_id": memory_id}, write=True)
424
+
425
+ success = result and result[0]["deleted_count"] > 0
426
+ if success:
427
+ logger.info(f"Deleted memory: {memory_id}")
428
+
429
+ return success
430
+
431
+ except Exception as e:
432
+ if isinstance(e, DatabaseConnectionError):
433
+ raise
434
+ logger.error(f"Failed to delete memory {memory_id}: {e}")
435
+ raise DatabaseConnectionError(f"Failed to delete memory: {e}")
436
+
437
+ async def create_relationship(
438
+ self,
439
+ from_memory_id: str,
440
+ to_memory_id: str,
441
+ relationship_type: RelationshipType,
442
+ properties: RelationshipProperties = None
443
+ ) -> str:
444
+ """
445
+ Create a relationship between two memories.
446
+
447
+ Args:
448
+ from_memory_id: Source memory ID
449
+ to_memory_id: Target memory ID
450
+ relationship_type: Type of relationship
451
+ properties: Relationship properties (optional)
452
+
453
+ Returns:
454
+ ID of the created relationship
455
+
456
+ Raises:
457
+ RelationshipError: If relationship creation fails
458
+ DatabaseConnectionError: If database operation fails
459
+ """
460
+ try:
461
+ relationship_id = str(uuid.uuid4())
462
+
463
+ if properties is None:
464
+ properties = RelationshipProperties()
465
+
466
+ # Convert properties to dict
467
+ props_dict = properties.model_dump()
468
+ props_dict['id'] = relationship_id
469
+ props_dict['created_at'] = props_dict['created_at'].isoformat()
470
+ props_dict['last_validated'] = props_dict['last_validated'].isoformat()
471
+
472
+ query = f"""
473
+ MATCH (from:Memory {{id: $from_id}})
474
+ MATCH (to:Memory {{id: $to_id}})
475
+ CREATE (from)-[r:{relationship_type.value} $properties]->(to)
476
+ RETURN r.id as id
477
+ """
478
+
479
+ result = await self.execute_query(
480
+ query,
481
+ {
482
+ "from_id": from_memory_id,
483
+ "to_id": to_memory_id,
484
+ "properties": props_dict
485
+ },
486
+ write=True
487
+ )
488
+
489
+ if result:
490
+ logger.info(f"Created relationship: {relationship_type.value} between {from_memory_id} and {to_memory_id}")
491
+ return result[0]["id"]
492
+ else:
493
+ raise RelationshipError(
494
+ f"Failed to create relationship between {from_memory_id} and {to_memory_id}",
495
+ {"from_id": from_memory_id, "to_id": to_memory_id, "type": relationship_type.value}
496
+ )
497
+
498
+ except Exception as e:
499
+ if isinstance(e, (RelationshipError, DatabaseConnectionError)):
500
+ raise
501
+ logger.error(f"Failed to create relationship: {e}")
502
+ raise RelationshipError(f"Failed to create relationship: {e}")
503
+
504
+ async def get_related_memories(
505
+ self,
506
+ memory_id: str,
507
+ relationship_types: List[RelationshipType] = None,
508
+ max_depth: int = 2
509
+ ) -> List[Tuple[Memory, Relationship]]:
510
+ """
511
+ Get memories related to a specific memory.
512
+
513
+ Args:
514
+ memory_id: ID of the memory to find relations for
515
+ relationship_types: Filter by specific relationship types (optional)
516
+ max_depth: Maximum depth for graph traversal
517
+
518
+ Returns:
519
+ List of tuples containing (Memory, Relationship)
520
+
521
+ Raises:
522
+ DatabaseConnectionError: If query fails
523
+ """
524
+ try:
525
+ # Build relationship type filter
526
+ rel_filter = ""
527
+ if relationship_types:
528
+ rel_types = "|".join([rt.value for rt in relationship_types])
529
+ rel_filter = f":{rel_types}"
530
+
531
+ query = f"""
532
+ MATCH (start:Memory {{id: $memory_id}})
533
+ MATCH (start)-[r{rel_filter}*1..{max_depth}]-(related:Memory)
534
+ WHERE related.id <> start.id
535
+ WITH DISTINCT related, r[0] as rel
536
+ RETURN related,
537
+ type(rel) as rel_type,
538
+ properties(rel) as rel_props
539
+ ORDER BY rel.strength DESC, related.importance DESC
540
+ LIMIT 20
541
+ """
542
+
543
+ result = await self.execute_query(query, {"memory_id": memory_id}, write=False)
544
+
545
+ related_memories = []
546
+ for record in result:
547
+ memory = self._falkordb_to_memory(record["related"])
548
+ if memory:
549
+ rel_type_str = record.get("rel_type", "RELATED_TO")
550
+ rel_props = record.get("rel_props", {})
551
+
552
+ try:
553
+ rel_type = RelationshipType(rel_type_str)
554
+ except ValueError:
555
+ rel_type = RelationshipType.RELATED_TO
556
+
557
+ relationship = Relationship(
558
+ from_memory_id=memory_id,
559
+ to_memory_id=memory.id,
560
+ type=rel_type,
561
+ properties=RelationshipProperties(
562
+ strength=rel_props.get("strength", 0.5),
563
+ confidence=rel_props.get("confidence", 0.8),
564
+ context=rel_props.get("context"),
565
+ evidence_count=rel_props.get("evidence_count", 1)
566
+ )
567
+ )
568
+ related_memories.append((memory, relationship))
569
+
570
+ logger.info(f"Found {len(related_memories)} related memories for {memory_id}")
571
+ return related_memories
572
+
573
+ except Exception as e:
574
+ if isinstance(e, DatabaseConnectionError):
575
+ raise
576
+ logger.error(f"Failed to get related memories for {memory_id}: {e}")
577
+ raise DatabaseConnectionError(f"Failed to get related memories: {e}")
578
+
579
+ async def get_memory_statistics(self) -> Dict[str, Any]:
580
+ """
581
+ Get database statistics and metrics.
582
+
583
+ Returns:
584
+ Dictionary containing various database statistics
585
+
586
+ Raises:
587
+ DatabaseConnectionError: If query fails
588
+ """
589
+ queries = {
590
+ "total_memories": "MATCH (m:Memory) RETURN COUNT(m) as count",
591
+ "memories_by_type": """
592
+ MATCH (m:Memory)
593
+ RETURN m.type as type, COUNT(m) as count
594
+ ORDER BY count DESC
595
+ """,
596
+ "total_relationships": "MATCH ()-[r]->() RETURN COUNT(r) as count",
597
+ "avg_importance": "MATCH (m:Memory) RETURN AVG(m.importance) as avg_importance",
598
+ "avg_confidence": "MATCH (m:Memory) RETURN AVG(m.confidence) as avg_confidence",
599
+ }
600
+
601
+ stats = {}
602
+ for stat_name, query in queries.items():
603
+ try:
604
+ result = await self.execute_query(query, write=False)
605
+ if stat_name == "memories_by_type":
606
+ stats[stat_name] = {record["type"]: record["count"] for record in result}
607
+ else:
608
+ stats[stat_name] = result[0] if result else None
609
+ except Exception as e:
610
+ logger.error(f"Failed to get statistic {stat_name}: {e}")
611
+ stats[stat_name] = None
612
+
613
+ return stats
614
+
615
+ async def health_check(self) -> dict[str, Any]:
616
+ """
617
+ Check backend health and return status information.
618
+
619
+ Returns:
620
+ Dictionary with health check results
621
+ """
622
+ health_info = {
623
+ "connected": self._connected,
624
+ "backend_type": "falkordb",
625
+ "host": self.host,
626
+ "port": self.port,
627
+ "graph_name": self.graph_name
628
+ }
629
+
630
+ if self._connected:
631
+ try:
632
+ # Get basic node count
633
+ count_query = "MATCH (m:Memory) RETURN count(m) as count"
634
+ count_result = await self.execute_query(count_query, write=False)
635
+ if count_result:
636
+ health_info["statistics"] = {
637
+ "memory_count": count_result[0].get("count", 0)
638
+ }
639
+ except Exception as e:
640
+ logger.warning(f"Could not get detailed health info: {e}")
641
+ health_info["warning"] = str(e)
642
+
643
+ return health_info
644
+
645
+ def backend_name(self) -> str:
646
+ """Return the name of this backend implementation."""
647
+ return "falkordb"
648
+
649
+ def supports_fulltext_search(self) -> bool:
650
+ """Check if this backend supports full-text search."""
651
+ return True
652
+
653
+ def supports_transactions(self) -> bool:
654
+ """Check if this backend supports ACID transactions."""
655
+ return True
656
+
657
+ def _falkordb_to_memory(self, node_data: Dict[str, Any]) -> Optional[Memory]:
658
+ """
659
+ Convert FalkorDB node data to Memory object.
660
+
661
+ Args:
662
+ node_data: Dictionary of node properties from FalkorDB
663
+
664
+ Returns:
665
+ Memory object or None if conversion fails
666
+ """
667
+ try:
668
+ # Extract basic memory fields
669
+ memory_data = {
670
+ "id": node_data.get("id"),
671
+ "type": MemoryType(node_data.get("type")),
672
+ "title": node_data.get("title"),
673
+ "content": node_data.get("content"),
674
+ "summary": node_data.get("summary"),
675
+ "tags": node_data.get("tags", []),
676
+ "importance": node_data.get("importance", 0.5),
677
+ "confidence": node_data.get("confidence", 0.8),
678
+ "effectiveness": node_data.get("effectiveness"),
679
+ "usage_count": node_data.get("usage_count", 0),
680
+ "created_at": datetime.fromisoformat(node_data.get("created_at")),
681
+ "updated_at": datetime.fromisoformat(node_data.get("updated_at")),
682
+ }
683
+
684
+ # Handle optional last_accessed field
685
+ if node_data.get("last_accessed"):
686
+ memory_data["last_accessed"] = datetime.fromisoformat(node_data["last_accessed"])
687
+
688
+ # Extract context information
689
+ context_data = {}
690
+ for key, value in node_data.items():
691
+ if key.startswith("context_") and value is not None:
692
+ context_key = key[8:] # Remove "context_" prefix
693
+
694
+ # Deserialize JSON strings back to Python objects
695
+ if isinstance(value, str) and context_key in ["additional_metadata"]:
696
+ try:
697
+ context_data[context_key] = json.loads(value)
698
+ except json.JSONDecodeError:
699
+ context_data[context_key] = value
700
+ # Handle JSON-serialized lists/dicts
701
+ elif isinstance(value, str) and value.startswith(('[', '{')):
702
+ try:
703
+ context_data[context_key] = json.loads(value)
704
+ except json.JSONDecodeError:
705
+ context_data[context_key] = value
706
+ else:
707
+ context_data[context_key] = value
708
+
709
+ if context_data:
710
+ # Handle timestamp fields in context
711
+ for time_field in ["timestamp"]:
712
+ if time_field in context_data:
713
+ if isinstance(context_data[time_field], str):
714
+ context_data[time_field] = datetime.fromisoformat(context_data[time_field])
715
+
716
+ memory_data["context"] = MemoryContext(**context_data)
717
+
718
+ return Memory(**memory_data)
719
+
720
+ except Exception as e:
721
+ logger.error(f"Failed to convert FalkorDB node to Memory: {e}")
722
+ return None
723
+
724
+ @classmethod
725
+ async def create(
726
+ cls,
727
+ host: Optional[str] = None,
728
+ port: Optional[int] = None,
729
+ password: Optional[str] = None,
730
+ graph_name: str = "memorygraph"
731
+ ) -> "FalkorDBBackend":
732
+ """
733
+ Factory method to create and connect to a FalkorDB backend.
734
+
735
+ Args:
736
+ host: FalkorDB host
737
+ port: FalkorDB port
738
+ password: FalkorDB password
739
+ graph_name: Name of the graph database
740
+
741
+ Returns:
742
+ Connected FalkorDBBackend instance
743
+
744
+ Raises:
745
+ DatabaseConnectionError: If connection fails
746
+ """
747
+ backend = cls(host, port, password, graph_name)
748
+ await backend.connect()
749
+ return backend