memorygraphMCP 0.11.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memorygraph/__init__.py +50 -0
- memorygraph/__main__.py +12 -0
- memorygraph/advanced_tools.py +509 -0
- memorygraph/analytics/__init__.py +46 -0
- memorygraph/analytics/advanced_queries.py +727 -0
- memorygraph/backends/__init__.py +21 -0
- memorygraph/backends/base.py +179 -0
- memorygraph/backends/cloud.py +75 -0
- memorygraph/backends/cloud_backend.py +858 -0
- memorygraph/backends/factory.py +577 -0
- memorygraph/backends/falkordb_backend.py +749 -0
- memorygraph/backends/falkordblite_backend.py +746 -0
- memorygraph/backends/ladybugdb_backend.py +242 -0
- memorygraph/backends/memgraph_backend.py +327 -0
- memorygraph/backends/neo4j_backend.py +298 -0
- memorygraph/backends/sqlite_fallback.py +463 -0
- memorygraph/backends/turso.py +448 -0
- memorygraph/cli.py +743 -0
- memorygraph/cloud_database.py +297 -0
- memorygraph/config.py +295 -0
- memorygraph/database.py +933 -0
- memorygraph/graph_analytics.py +631 -0
- memorygraph/integration/__init__.py +69 -0
- memorygraph/integration/context_capture.py +426 -0
- memorygraph/integration/project_analysis.py +583 -0
- memorygraph/integration/workflow_tracking.py +492 -0
- memorygraph/intelligence/__init__.py +59 -0
- memorygraph/intelligence/context_retrieval.py +447 -0
- memorygraph/intelligence/entity_extraction.py +386 -0
- memorygraph/intelligence/pattern_recognition.py +420 -0
- memorygraph/intelligence/temporal.py +374 -0
- memorygraph/migration/__init__.py +27 -0
- memorygraph/migration/manager.py +579 -0
- memorygraph/migration/models.py +142 -0
- memorygraph/migration/scripts/__init__.py +17 -0
- memorygraph/migration/scripts/bitemporal_migration.py +595 -0
- memorygraph/migration/scripts/multitenancy_migration.py +452 -0
- memorygraph/migration_tools_module.py +146 -0
- memorygraph/models.py +684 -0
- memorygraph/proactive/__init__.py +46 -0
- memorygraph/proactive/outcome_learning.py +444 -0
- memorygraph/proactive/predictive.py +410 -0
- memorygraph/proactive/session_briefing.py +399 -0
- memorygraph/relationships.py +668 -0
- memorygraph/server.py +883 -0
- memorygraph/sqlite_database.py +1876 -0
- memorygraph/tools/__init__.py +59 -0
- memorygraph/tools/activity_tools.py +262 -0
- memorygraph/tools/memory_tools.py +315 -0
- memorygraph/tools/migration_tools.py +181 -0
- memorygraph/tools/relationship_tools.py +147 -0
- memorygraph/tools/search_tools.py +406 -0
- memorygraph/tools/temporal_tools.py +339 -0
- memorygraph/utils/__init__.py +10 -0
- memorygraph/utils/context_extractor.py +429 -0
- memorygraph/utils/error_handling.py +151 -0
- memorygraph/utils/export_import.py +425 -0
- memorygraph/utils/graph_algorithms.py +200 -0
- memorygraph/utils/pagination.py +149 -0
- memorygraph/utils/project_detection.py +133 -0
- memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
- memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
- memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
- memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
- memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,425 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Export and import utilities for MemoryGraph data.
|
|
3
|
+
|
|
4
|
+
Supports JSON and Markdown export formats.
|
|
5
|
+
Works with all backends (SQLite, Neo4j, Memgraph, FalkorDB, FalkorDBLite).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Dict, Any, List, Optional, Callable, Set, Tuple, Union
|
|
13
|
+
|
|
14
|
+
from ..models import (
|
|
15
|
+
Memory, MemoryType, MemoryContext, RelationshipType, RelationshipProperties,
|
|
16
|
+
SearchQuery, Relationship
|
|
17
|
+
)
|
|
18
|
+
from .pagination import paginate_memories
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def _export_relationships(
|
|
24
|
+
db, # MemoryDatabase or SQLiteMemoryDatabase
|
|
25
|
+
memories: List[Memory]
|
|
26
|
+
) -> List[Dict[str, Any]]:
|
|
27
|
+
"""
|
|
28
|
+
Export all relationships for given memories using backend-agnostic methods.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
db: Database instance (any backend)
|
|
32
|
+
memories: List of memories to export relationships for
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
List of relationship dictionaries
|
|
36
|
+
"""
|
|
37
|
+
relationships_map: Dict[Tuple[str, str, str], Dict[str, Any]] = {}
|
|
38
|
+
|
|
39
|
+
for memory in memories:
|
|
40
|
+
try:
|
|
41
|
+
related = await db.get_related_memories(
|
|
42
|
+
memory_id=memory.id,
|
|
43
|
+
max_depth=1
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
for related_memory, relationship in related:
|
|
47
|
+
# Use tuple as key for deduplication (from_id, to_id, type)
|
|
48
|
+
key = (relationship.from_memory_id, relationship.to_memory_id, relationship.type.value)
|
|
49
|
+
|
|
50
|
+
if key not in relationships_map:
|
|
51
|
+
rel_dict = {
|
|
52
|
+
"from_memory_id": relationship.from_memory_id,
|
|
53
|
+
"to_memory_id": relationship.to_memory_id,
|
|
54
|
+
"type": relationship.type.value,
|
|
55
|
+
"properties": {
|
|
56
|
+
"strength": relationship.properties.strength,
|
|
57
|
+
"confidence": relationship.properties.confidence,
|
|
58
|
+
"context": relationship.properties.context,
|
|
59
|
+
"evidence_count": relationship.properties.evidence_count
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
relationships_map[key] = rel_dict
|
|
63
|
+
except Exception as e:
|
|
64
|
+
logger.warning(f"Failed to export relationships for memory {memory.id}: {e}")
|
|
65
|
+
continue
|
|
66
|
+
|
|
67
|
+
return list(relationships_map.values())
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
async def export_to_json(
|
|
71
|
+
db, # MemoryDatabase or SQLiteMemoryDatabase
|
|
72
|
+
output_path: str,
|
|
73
|
+
progress_callback: Optional[Callable[[int, int], None]] = None
|
|
74
|
+
) -> Dict[str, Any]:
|
|
75
|
+
"""
|
|
76
|
+
Export all memories and relationships to JSON format.
|
|
77
|
+
|
|
78
|
+
Works with ANY backend by using the MemoryDatabase interface.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
db: Database instance (works with all backends)
|
|
82
|
+
output_path: Path to output JSON file
|
|
83
|
+
progress_callback: Optional callback(current, total) for progress reporting
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
Dictionary with export statistics
|
|
87
|
+
|
|
88
|
+
Raises:
|
|
89
|
+
IOError: If file cannot be written
|
|
90
|
+
"""
|
|
91
|
+
logger.info("Starting backend-agnostic export...")
|
|
92
|
+
|
|
93
|
+
# Export memories in batches using pagination helper
|
|
94
|
+
all_memories = []
|
|
95
|
+
|
|
96
|
+
def progress_reporter(count: int):
|
|
97
|
+
if progress_callback:
|
|
98
|
+
# We don't know total in advance without an extra query, so pass current count twice
|
|
99
|
+
progress_callback(count, count)
|
|
100
|
+
|
|
101
|
+
async for batch in paginate_memories(db, batch_size=1000, progress_callback=progress_reporter):
|
|
102
|
+
all_memories.extend(batch)
|
|
103
|
+
|
|
104
|
+
logger.info(f"Exported {len(all_memories)} memories")
|
|
105
|
+
|
|
106
|
+
# Export relationships using backend-agnostic method
|
|
107
|
+
relationships_data = await _export_relationships(db, all_memories)
|
|
108
|
+
|
|
109
|
+
logger.info(f"Exported {len(relationships_data)} relationships")
|
|
110
|
+
|
|
111
|
+
# Convert memories to dict format
|
|
112
|
+
memories_data = []
|
|
113
|
+
for memory in all_memories:
|
|
114
|
+
memory_dict = {
|
|
115
|
+
"id": memory.id,
|
|
116
|
+
"type": memory.type.value,
|
|
117
|
+
"title": memory.title,
|
|
118
|
+
"content": memory.content,
|
|
119
|
+
"summary": memory.summary,
|
|
120
|
+
"tags": memory.tags,
|
|
121
|
+
"importance": memory.importance,
|
|
122
|
+
"confidence": memory.confidence,
|
|
123
|
+
"created_at": memory.created_at.isoformat(),
|
|
124
|
+
"updated_at": memory.updated_at.isoformat()
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
# Add context if present
|
|
128
|
+
if memory.context:
|
|
129
|
+
memory_dict["context"] = {}
|
|
130
|
+
for field in ["project_path", "function_name", "class_name", "files_involved",
|
|
131
|
+
"languages", "frameworks", "technologies", "environment", "additional_metadata"]:
|
|
132
|
+
value = getattr(memory.context, field, None)
|
|
133
|
+
if value is not None:
|
|
134
|
+
memory_dict["context"][field] = value
|
|
135
|
+
|
|
136
|
+
memories_data.append(memory_dict)
|
|
137
|
+
|
|
138
|
+
# Get backend name if available
|
|
139
|
+
backend_type = "unknown"
|
|
140
|
+
if hasattr(db, 'backend') and hasattr(db.backend, 'backend_name'):
|
|
141
|
+
backend_type = db.backend.backend_name()
|
|
142
|
+
elif hasattr(db, 'connection') and hasattr(db.connection, 'backend_name'):
|
|
143
|
+
backend_type = db.connection.backend_name()
|
|
144
|
+
|
|
145
|
+
# Create export data structure (format v2.0 for universal export)
|
|
146
|
+
export_data = {
|
|
147
|
+
"format_version": "2.0",
|
|
148
|
+
"export_version": "1.0", # Keep for backward compatibility
|
|
149
|
+
"export_date": datetime.now(timezone.utc).isoformat(),
|
|
150
|
+
"backend_type": backend_type,
|
|
151
|
+
"memory_count": len(memories_data),
|
|
152
|
+
"relationship_count": len(relationships_data),
|
|
153
|
+
"memories": memories_data,
|
|
154
|
+
"relationships": relationships_data
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
# Write to file
|
|
158
|
+
output_file = Path(output_path)
|
|
159
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
160
|
+
|
|
161
|
+
with open(output_path, 'w') as f:
|
|
162
|
+
json.dump(export_data, f, indent=2)
|
|
163
|
+
|
|
164
|
+
logger.info(f"Export complete: {len(memories_data)} memories and {len(relationships_data)} relationships to {output_path}")
|
|
165
|
+
|
|
166
|
+
return {
|
|
167
|
+
"memory_count": len(memories_data),
|
|
168
|
+
"relationship_count": len(relationships_data),
|
|
169
|
+
"backend_type": backend_type,
|
|
170
|
+
"output_path": output_path
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
async def import_from_json(
|
|
175
|
+
db, # MemoryDatabase or SQLiteMemoryDatabase
|
|
176
|
+
input_path: str,
|
|
177
|
+
skip_duplicates: bool = False,
|
|
178
|
+
progress_callback: Optional[Callable[[int, int], None]] = None
|
|
179
|
+
) -> Dict[str, int]:
|
|
180
|
+
"""
|
|
181
|
+
Import memories and relationships from JSON format.
|
|
182
|
+
|
|
183
|
+
Works with ANY backend by using the MemoryDatabase interface.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
db: Database instance (works with all backends)
|
|
187
|
+
input_path: Path to input JSON file
|
|
188
|
+
skip_duplicates: If True, skip memories with existing IDs
|
|
189
|
+
progress_callback: Optional callback(current, total) for progress reporting
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
Dictionary with import statistics:
|
|
193
|
+
- imported_memories: Number of memories imported
|
|
194
|
+
- imported_relationships: Number of relationships imported
|
|
195
|
+
- skipped_memories: Number of duplicate memories skipped
|
|
196
|
+
- skipped_relationships: Number of invalid relationships skipped
|
|
197
|
+
|
|
198
|
+
Raises:
|
|
199
|
+
IOError: If file cannot be read
|
|
200
|
+
ValueError: If JSON format is invalid
|
|
201
|
+
"""
|
|
202
|
+
# Read JSON file
|
|
203
|
+
with open(input_path, 'r') as f:
|
|
204
|
+
data = json.load(f)
|
|
205
|
+
|
|
206
|
+
# Validate structure
|
|
207
|
+
if "memories" not in data or "relationships" not in data:
|
|
208
|
+
raise ValueError("Invalid export format: missing 'memories' or 'relationships'")
|
|
209
|
+
|
|
210
|
+
# Validate format version (support both v1.0 and v2.0)
|
|
211
|
+
format_version = data.get("format_version") or data.get("export_version")
|
|
212
|
+
if not format_version:
|
|
213
|
+
raise ValueError("Invalid export format: missing version information")
|
|
214
|
+
|
|
215
|
+
logger.info(f"Importing from export format {format_version}")
|
|
216
|
+
|
|
217
|
+
# Validate required fields in memories
|
|
218
|
+
for mem_data in data["memories"]:
|
|
219
|
+
required_fields = ["id", "type", "title", "content"]
|
|
220
|
+
missing_fields = [field for field in required_fields if field not in mem_data]
|
|
221
|
+
if missing_fields:
|
|
222
|
+
raise ValueError(f"Invalid memory data: missing fields {missing_fields}")
|
|
223
|
+
|
|
224
|
+
# Validate memory IDs are unique in export
|
|
225
|
+
memory_ids = set()
|
|
226
|
+
duplicate_ids = []
|
|
227
|
+
for mem_data in data["memories"]:
|
|
228
|
+
mem_id = mem_data["id"]
|
|
229
|
+
if mem_id in memory_ids:
|
|
230
|
+
duplicate_ids.append(mem_id)
|
|
231
|
+
memory_ids.add(mem_id)
|
|
232
|
+
|
|
233
|
+
if duplicate_ids:
|
|
234
|
+
raise ValueError(f"Invalid export: duplicate memory IDs found: {duplicate_ids}")
|
|
235
|
+
|
|
236
|
+
# Validate relationship endpoints exist in export
|
|
237
|
+
for rel_data in data["relationships"]:
|
|
238
|
+
from_id = rel_data.get("from_memory_id")
|
|
239
|
+
to_id = rel_data.get("to_memory_id")
|
|
240
|
+
if from_id not in memory_ids or to_id not in memory_ids:
|
|
241
|
+
logger.warning(f"Relationship references missing memory: {from_id} -> {to_id}")
|
|
242
|
+
|
|
243
|
+
imported_memories = 0
|
|
244
|
+
skipped_memories = 0
|
|
245
|
+
total_memories = len(data["memories"])
|
|
246
|
+
|
|
247
|
+
# Import memories
|
|
248
|
+
for idx, mem_data in enumerate(data["memories"], 1):
|
|
249
|
+
try:
|
|
250
|
+
# Check if memory already exists
|
|
251
|
+
if skip_duplicates:
|
|
252
|
+
existing = await db.get_memory(mem_data["id"], include_relationships=False)
|
|
253
|
+
if existing:
|
|
254
|
+
skipped_memories += 1
|
|
255
|
+
logger.debug(f"Skipping duplicate memory: {mem_data['id']}")
|
|
256
|
+
if progress_callback:
|
|
257
|
+
progress_callback(idx, total_memories)
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
# Create Memory object
|
|
261
|
+
memory = Memory(
|
|
262
|
+
id=mem_data["id"],
|
|
263
|
+
type=MemoryType(mem_data["type"]),
|
|
264
|
+
title=mem_data["title"],
|
|
265
|
+
content=mem_data["content"],
|
|
266
|
+
summary=mem_data.get("summary"),
|
|
267
|
+
tags=mem_data.get("tags", []),
|
|
268
|
+
importance=mem_data.get("importance", 0.5),
|
|
269
|
+
confidence=mem_data.get("confidence", 0.8)
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
# Add context if present
|
|
273
|
+
if "context" in mem_data:
|
|
274
|
+
ctx_data = mem_data["context"]
|
|
275
|
+
memory.context = MemoryContext(**ctx_data)
|
|
276
|
+
|
|
277
|
+
# Store memory
|
|
278
|
+
await db.store_memory(memory)
|
|
279
|
+
imported_memories += 1
|
|
280
|
+
|
|
281
|
+
if progress_callback:
|
|
282
|
+
progress_callback(idx, total_memories)
|
|
283
|
+
|
|
284
|
+
except Exception as e:
|
|
285
|
+
logger.error(f"Failed to import memory {mem_data.get('id')}: {e}")
|
|
286
|
+
skipped_memories += 1
|
|
287
|
+
|
|
288
|
+
# Import relationships
|
|
289
|
+
imported_relationships = 0
|
|
290
|
+
skipped_relationships = 0
|
|
291
|
+
total_relationships = len(data["relationships"])
|
|
292
|
+
|
|
293
|
+
for idx, rel_data in enumerate(data["relationships"], 1):
|
|
294
|
+
try:
|
|
295
|
+
# Verify both memories exist
|
|
296
|
+
from_mem = await db.get_memory(rel_data["from_memory_id"], include_relationships=False)
|
|
297
|
+
to_mem = await db.get_memory(rel_data["to_memory_id"], include_relationships=False)
|
|
298
|
+
|
|
299
|
+
if not from_mem or not to_mem:
|
|
300
|
+
logger.warning(f"Skipping relationship: one or both memories not found")
|
|
301
|
+
skipped_relationships += 1
|
|
302
|
+
continue
|
|
303
|
+
|
|
304
|
+
# Create relationship
|
|
305
|
+
props_data = rel_data.get("properties", {})
|
|
306
|
+
properties = RelationshipProperties(
|
|
307
|
+
strength=props_data.get("strength", 0.5),
|
|
308
|
+
confidence=props_data.get("confidence", 0.8),
|
|
309
|
+
context=props_data.get("context"),
|
|
310
|
+
evidence_count=props_data.get("evidence_count", 1)
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
await db.create_relationship(
|
|
314
|
+
from_memory_id=rel_data["from_memory_id"],
|
|
315
|
+
to_memory_id=rel_data["to_memory_id"],
|
|
316
|
+
relationship_type=RelationshipType(rel_data["type"]),
|
|
317
|
+
properties=properties
|
|
318
|
+
)
|
|
319
|
+
imported_relationships += 1
|
|
320
|
+
|
|
321
|
+
except Exception as e:
|
|
322
|
+
logger.error(f"Failed to import relationship: {e}")
|
|
323
|
+
skipped_relationships += 1
|
|
324
|
+
|
|
325
|
+
logger.info(
|
|
326
|
+
f"Import complete: {imported_memories} memories, {imported_relationships} relationships "
|
|
327
|
+
f"({skipped_memories} memories skipped, {skipped_relationships} relationships skipped)"
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
return {
|
|
331
|
+
"imported_memories": imported_memories,
|
|
332
|
+
"imported_relationships": imported_relationships,
|
|
333
|
+
"skipped_memories": skipped_memories,
|
|
334
|
+
"skipped_relationships": skipped_relationships
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
async def export_to_markdown(
|
|
339
|
+
db, # MemoryDatabase or SQLiteMemoryDatabase
|
|
340
|
+
output_dir: str
|
|
341
|
+
) -> None:
|
|
342
|
+
"""
|
|
343
|
+
Export all memories to Markdown files.
|
|
344
|
+
|
|
345
|
+
Creates one .md file per memory with frontmatter and content.
|
|
346
|
+
Works with ANY backend by using the MemoryDatabase interface.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
db: Database instance (works with all backends)
|
|
350
|
+
output_dir: Directory to write Markdown files
|
|
351
|
+
|
|
352
|
+
Raises:
|
|
353
|
+
IOError: If files cannot be written
|
|
354
|
+
"""
|
|
355
|
+
logger.info("Starting backend-agnostic markdown export...")
|
|
356
|
+
|
|
357
|
+
# Get all memories using pagination helper
|
|
358
|
+
all_memories = []
|
|
359
|
+
async for batch in paginate_memories(db, batch_size=1000):
|
|
360
|
+
all_memories.extend(batch)
|
|
361
|
+
|
|
362
|
+
logger.info(f"Exporting {len(all_memories)} memories to markdown...")
|
|
363
|
+
|
|
364
|
+
output_path = Path(output_dir)
|
|
365
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
366
|
+
|
|
367
|
+
for memory in all_memories:
|
|
368
|
+
# Create safe filename from title
|
|
369
|
+
safe_title = "".join(c if c.isalnum() or c in (' ', '-', '_') else '_' for c in memory.title)
|
|
370
|
+
safe_title = safe_title.replace(' ', '_')
|
|
371
|
+
filename = f"{safe_title}_{memory.id[:8]}.md"
|
|
372
|
+
|
|
373
|
+
# Get relationships for this memory
|
|
374
|
+
related = await db.get_related_memories(memory.id, max_depth=1)
|
|
375
|
+
|
|
376
|
+
# Build Markdown content
|
|
377
|
+
content_lines = [
|
|
378
|
+
"---",
|
|
379
|
+
f"title: {memory.title}",
|
|
380
|
+
f"id: {memory.id}",
|
|
381
|
+
f"type: {memory.type.value}",
|
|
382
|
+
f"importance: {memory.importance}",
|
|
383
|
+
f"confidence: {memory.confidence}",
|
|
384
|
+
f"tags: [{', '.join(memory.tags)}]",
|
|
385
|
+
f"created_at: {memory.created_at.isoformat()}",
|
|
386
|
+
f"updated_at: {memory.updated_at.isoformat()}"
|
|
387
|
+
]
|
|
388
|
+
|
|
389
|
+
# Add context
|
|
390
|
+
if memory.context:
|
|
391
|
+
if memory.context.project_path:
|
|
392
|
+
content_lines.append(f"project: {memory.context.project_path}")
|
|
393
|
+
if memory.context.languages:
|
|
394
|
+
content_lines.append(f"languages: [{', '.join(memory.context.languages)}]")
|
|
395
|
+
if memory.context.technologies:
|
|
396
|
+
content_lines.append(f"technologies: [{', '.join(memory.context.technologies)}]")
|
|
397
|
+
|
|
398
|
+
content_lines.append("---")
|
|
399
|
+
content_lines.append("")
|
|
400
|
+
|
|
401
|
+
# Add summary if present
|
|
402
|
+
if memory.summary:
|
|
403
|
+
content_lines.append(f"## Summary\n")
|
|
404
|
+
content_lines.append(memory.summary)
|
|
405
|
+
content_lines.append("")
|
|
406
|
+
|
|
407
|
+
# Add main content
|
|
408
|
+
content_lines.append(f"## Content\n")
|
|
409
|
+
content_lines.append(memory.content)
|
|
410
|
+
content_lines.append("")
|
|
411
|
+
|
|
412
|
+
# Add relationships
|
|
413
|
+
if related:
|
|
414
|
+
content_lines.append(f"## Relationships\n")
|
|
415
|
+
for related_memory, relationship in related:
|
|
416
|
+
content_lines.append(
|
|
417
|
+
f"- **{relationship.type.value}** → [{related_memory.title}]({related_memory.id})"
|
|
418
|
+
)
|
|
419
|
+
content_lines.append("")
|
|
420
|
+
|
|
421
|
+
# Write file
|
|
422
|
+
file_path = output_path / filename
|
|
423
|
+
file_path.write_text('\n'.join(content_lines))
|
|
424
|
+
|
|
425
|
+
logger.info(f"Exported {len(all_memories)} memories to {output_dir}")
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Graph algorithm utilities for MemoryGraph.
|
|
3
|
+
|
|
4
|
+
This module provides graph algorithms for cycle detection, path finding,
|
|
5
|
+
and other graph operations on memory relationships.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
from typing import Set, Optional
|
|
10
|
+
from ..models import RelationshipType
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
async def has_cycle(
|
|
16
|
+
memory_db,
|
|
17
|
+
from_memory_id: str,
|
|
18
|
+
to_memory_id: str,
|
|
19
|
+
relationship_type: RelationshipType,
|
|
20
|
+
max_depth: int = 100
|
|
21
|
+
) -> bool:
|
|
22
|
+
"""
|
|
23
|
+
Check if adding a relationship would create a cycle in the graph.
|
|
24
|
+
|
|
25
|
+
Uses depth-first search (DFS) to traverse from to_memory_id and check
|
|
26
|
+
if from_memory_id is reachable. If it is, then adding the edge
|
|
27
|
+
from_memory_id → to_memory_id would create a cycle.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
memory_db: Database instance to query relationships
|
|
31
|
+
from_memory_id: Source memory ID for the proposed relationship
|
|
32
|
+
to_memory_id: Target memory ID for the proposed relationship
|
|
33
|
+
relationship_type: Type of relationship to check for cycles
|
|
34
|
+
max_depth: Maximum traversal depth (prevents infinite loops)
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
True if adding the relationship would create a cycle, False otherwise
|
|
38
|
+
|
|
39
|
+
Examples:
|
|
40
|
+
# Check if B → A would create a cycle when A → B exists
|
|
41
|
+
>>> await has_cycle(db, "B", "A", RelationshipType.FOLLOWS)
|
|
42
|
+
True
|
|
43
|
+
|
|
44
|
+
# Check if C → D creates cycle in linear chain A → B → C
|
|
45
|
+
>>> await has_cycle(db, "C", "D", RelationshipType.FOLLOWS)
|
|
46
|
+
False
|
|
47
|
+
"""
|
|
48
|
+
# Self-loops always create cycles
|
|
49
|
+
if from_memory_id == to_memory_id:
|
|
50
|
+
logger.debug(f"Cycle detected: self-loop {from_memory_id} → {from_memory_id}")
|
|
51
|
+
return True
|
|
52
|
+
|
|
53
|
+
# Use DFS to check if from_memory_id is reachable from to_memory_id
|
|
54
|
+
visited: Set[str] = set()
|
|
55
|
+
|
|
56
|
+
async def dfs(current_id: str, depth: int = 0) -> bool:
|
|
57
|
+
"""
|
|
58
|
+
Depth-first search to find if target is reachable from current node.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
current_id: Current memory ID in traversal
|
|
62
|
+
depth: Current depth in the traversal
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
True if from_memory_id is reachable from current_id
|
|
66
|
+
"""
|
|
67
|
+
# Depth limit reached
|
|
68
|
+
if depth > max_depth:
|
|
69
|
+
logger.warning(f"Cycle detection depth limit ({max_depth}) reached")
|
|
70
|
+
return False
|
|
71
|
+
|
|
72
|
+
# Already visited this node
|
|
73
|
+
if current_id in visited:
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
# Found the target - cycle would be created
|
|
77
|
+
if current_id == from_memory_id:
|
|
78
|
+
logger.debug(
|
|
79
|
+
f"Cycle detected: {from_memory_id} is reachable from {to_memory_id} "
|
|
80
|
+
f"via {relationship_type.value} relationships"
|
|
81
|
+
)
|
|
82
|
+
return True
|
|
83
|
+
|
|
84
|
+
# Mark as visited
|
|
85
|
+
visited.add(current_id)
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
# Get all outgoing relationships of the specified type from current node
|
|
89
|
+
relationships = await memory_db.get_related_memories(
|
|
90
|
+
current_id,
|
|
91
|
+
relationship_types=[relationship_type],
|
|
92
|
+
max_depth=1 # Only immediate neighbors
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Traverse each neighbor
|
|
96
|
+
for related in relationships:
|
|
97
|
+
# Determine if related memory is a target (outgoing edge from current)
|
|
98
|
+
# get_related_memories returns both incoming and outgoing relationships
|
|
99
|
+
# We only want to follow outgoing edges for cycle detection
|
|
100
|
+
|
|
101
|
+
# Get the actual relationship to determine direction
|
|
102
|
+
rels = await _get_outgoing_relationships(
|
|
103
|
+
memory_db,
|
|
104
|
+
current_id,
|
|
105
|
+
relationship_type
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
for target_id in rels:
|
|
109
|
+
if await dfs(target_id, depth + 1):
|
|
110
|
+
return True
|
|
111
|
+
|
|
112
|
+
return False
|
|
113
|
+
|
|
114
|
+
except Exception as e:
|
|
115
|
+
logger.error(f"Error during cycle detection DFS: {e}")
|
|
116
|
+
return False
|
|
117
|
+
|
|
118
|
+
# Start DFS from to_memory_id
|
|
119
|
+
result = await dfs(to_memory_id)
|
|
120
|
+
|
|
121
|
+
if result:
|
|
122
|
+
logger.info(
|
|
123
|
+
f"Cycle would be created: {from_memory_id} → {to_memory_id} "
|
|
124
|
+
f"(type: {relationship_type.value})"
|
|
125
|
+
)
|
|
126
|
+
else:
|
|
127
|
+
logger.debug(
|
|
128
|
+
f"No cycle: {from_memory_id} → {to_memory_id} "
|
|
129
|
+
f"(type: {relationship_type.value})"
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
return result
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
async def _get_outgoing_relationships(
|
|
136
|
+
memory_db,
|
|
137
|
+
from_memory_id: str,
|
|
138
|
+
relationship_type: RelationshipType
|
|
139
|
+
) -> list[str]:
|
|
140
|
+
"""
|
|
141
|
+
Get target memory IDs for all outgoing relationships of a specific type.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
memory_db: Database instance
|
|
145
|
+
from_memory_id: Source memory ID
|
|
146
|
+
relationship_type: Type of relationships to find
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
List of target memory IDs
|
|
150
|
+
"""
|
|
151
|
+
try:
|
|
152
|
+
# Query depends on database backend
|
|
153
|
+
# For SQLite backend, query the relationships table directly
|
|
154
|
+
if hasattr(memory_db, 'backend'):
|
|
155
|
+
query = """
|
|
156
|
+
SELECT to_id FROM relationships
|
|
157
|
+
WHERE from_id = ? AND rel_type = ?
|
|
158
|
+
"""
|
|
159
|
+
result = memory_db.backend.execute_sync(
|
|
160
|
+
query,
|
|
161
|
+
(from_memory_id, relationship_type.value)
|
|
162
|
+
)
|
|
163
|
+
return [row['to_id'] for row in result]
|
|
164
|
+
else:
|
|
165
|
+
# For Neo4j/other backends, use Cypher query
|
|
166
|
+
query = f"""
|
|
167
|
+
MATCH (m:Memory {{id: $from_id}})-[r:{relationship_type.value}]->(target:Memory)
|
|
168
|
+
RETURN target.id as to_id
|
|
169
|
+
"""
|
|
170
|
+
result = await memory_db.connection.execute_read_query(
|
|
171
|
+
query,
|
|
172
|
+
{"from_id": from_memory_id}
|
|
173
|
+
)
|
|
174
|
+
return [row['to_id'] for row in result]
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.error(f"Error getting outgoing relationships: {e}")
|
|
177
|
+
return []
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
async def find_all_cycles(
|
|
181
|
+
memory_db,
|
|
182
|
+
relationship_type: Optional[RelationshipType] = None
|
|
183
|
+
) -> list[list[str]]:
|
|
184
|
+
"""
|
|
185
|
+
Find all cycles in the memory graph.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
memory_db: Database instance
|
|
189
|
+
relationship_type: Optional relationship type to filter by
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
List of cycles, where each cycle is a list of memory IDs
|
|
193
|
+
|
|
194
|
+
Note:
|
|
195
|
+
This is an expensive operation on large graphs.
|
|
196
|
+
Use sparingly or implement as a background task.
|
|
197
|
+
"""
|
|
198
|
+
# TODO: Implement for cycle visualization/reporting
|
|
199
|
+
# This would be useful for a CLI command to detect existing cycles
|
|
200
|
+
raise NotImplementedError("find_all_cycles not yet implemented")
|