memorygraphMCP 0.11.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memorygraph/__init__.py +50 -0
- memorygraph/__main__.py +12 -0
- memorygraph/advanced_tools.py +509 -0
- memorygraph/analytics/__init__.py +46 -0
- memorygraph/analytics/advanced_queries.py +727 -0
- memorygraph/backends/__init__.py +21 -0
- memorygraph/backends/base.py +179 -0
- memorygraph/backends/cloud.py +75 -0
- memorygraph/backends/cloud_backend.py +858 -0
- memorygraph/backends/factory.py +577 -0
- memorygraph/backends/falkordb_backend.py +749 -0
- memorygraph/backends/falkordblite_backend.py +746 -0
- memorygraph/backends/ladybugdb_backend.py +242 -0
- memorygraph/backends/memgraph_backend.py +327 -0
- memorygraph/backends/neo4j_backend.py +298 -0
- memorygraph/backends/sqlite_fallback.py +463 -0
- memorygraph/backends/turso.py +448 -0
- memorygraph/cli.py +743 -0
- memorygraph/cloud_database.py +297 -0
- memorygraph/config.py +295 -0
- memorygraph/database.py +933 -0
- memorygraph/graph_analytics.py +631 -0
- memorygraph/integration/__init__.py +69 -0
- memorygraph/integration/context_capture.py +426 -0
- memorygraph/integration/project_analysis.py +583 -0
- memorygraph/integration/workflow_tracking.py +492 -0
- memorygraph/intelligence/__init__.py +59 -0
- memorygraph/intelligence/context_retrieval.py +447 -0
- memorygraph/intelligence/entity_extraction.py +386 -0
- memorygraph/intelligence/pattern_recognition.py +420 -0
- memorygraph/intelligence/temporal.py +374 -0
- memorygraph/migration/__init__.py +27 -0
- memorygraph/migration/manager.py +579 -0
- memorygraph/migration/models.py +142 -0
- memorygraph/migration/scripts/__init__.py +17 -0
- memorygraph/migration/scripts/bitemporal_migration.py +595 -0
- memorygraph/migration/scripts/multitenancy_migration.py +452 -0
- memorygraph/migration_tools_module.py +146 -0
- memorygraph/models.py +684 -0
- memorygraph/proactive/__init__.py +46 -0
- memorygraph/proactive/outcome_learning.py +444 -0
- memorygraph/proactive/predictive.py +410 -0
- memorygraph/proactive/session_briefing.py +399 -0
- memorygraph/relationships.py +668 -0
- memorygraph/server.py +883 -0
- memorygraph/sqlite_database.py +1876 -0
- memorygraph/tools/__init__.py +59 -0
- memorygraph/tools/activity_tools.py +262 -0
- memorygraph/tools/memory_tools.py +315 -0
- memorygraph/tools/migration_tools.py +181 -0
- memorygraph/tools/relationship_tools.py +147 -0
- memorygraph/tools/search_tools.py +406 -0
- memorygraph/tools/temporal_tools.py +339 -0
- memorygraph/utils/__init__.py +10 -0
- memorygraph/utils/context_extractor.py +429 -0
- memorygraph/utils/error_handling.py +151 -0
- memorygraph/utils/export_import.py +425 -0
- memorygraph/utils/graph_algorithms.py +200 -0
- memorygraph/utils/pagination.py +149 -0
- memorygraph/utils/project_detection.py +133 -0
- memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
- memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
- memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
- memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
- memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,448 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Turso (libSQL) backend implementation for MemoryGraph.
|
|
3
|
+
|
|
4
|
+
Provides cloud-hosted SQLite-compatible storage with embedded replica support.
|
|
5
|
+
Users can create a free Turso database for persistent memory in Claude Code Web
|
|
6
|
+
and other remote environments.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import logging
|
|
11
|
+
import os
|
|
12
|
+
import json
|
|
13
|
+
from typing import Any, Optional
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
import libsql_experimental as libsql
|
|
18
|
+
except ImportError:
|
|
19
|
+
libsql = None # type: ignore
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
import networkx as nx
|
|
23
|
+
except ImportError:
|
|
24
|
+
nx = None
|
|
25
|
+
|
|
26
|
+
from .base import GraphBackend
|
|
27
|
+
from ..models import DatabaseConnectionError, SchemaError
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class TursoBackend(GraphBackend):
|
|
33
|
+
"""Turso/libSQL backend using same schema as SQLite."""
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
db_path: Optional[str] = None,
|
|
38
|
+
sync_url: Optional[str] = None,
|
|
39
|
+
auth_token: Optional[str] = None,
|
|
40
|
+
):
|
|
41
|
+
"""
|
|
42
|
+
Initialize Turso backend.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
db_path: Path to local database file (for embedded replicas)
|
|
46
|
+
sync_url: Turso database URL (e.g., libsql://your-db.turso.io)
|
|
47
|
+
auth_token: Turso authentication token
|
|
48
|
+
|
|
49
|
+
Raises:
|
|
50
|
+
DatabaseConnectionError: If libsql or NetworkX not installed
|
|
51
|
+
"""
|
|
52
|
+
if libsql is None:
|
|
53
|
+
raise DatabaseConnectionError(
|
|
54
|
+
"libsql-experimental is required for Turso backend. "
|
|
55
|
+
"Install with: pip install memorygraphMCP[turso]"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
if nx is None:
|
|
59
|
+
raise DatabaseConnectionError(
|
|
60
|
+
"NetworkX is required for Turso backend. "
|
|
61
|
+
"Install with: pip install networkx"
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# Configuration
|
|
65
|
+
default_path = os.path.expanduser("~/.memorygraph/memory.db")
|
|
66
|
+
self.db_path = db_path or os.getenv("MEMORY_TURSO_PATH", default_path)
|
|
67
|
+
self.sync_url = sync_url or os.getenv("TURSO_DATABASE_URL")
|
|
68
|
+
self.auth_token = auth_token or os.getenv("TURSO_AUTH_TOKEN")
|
|
69
|
+
|
|
70
|
+
# Connection state
|
|
71
|
+
self.conn = None
|
|
72
|
+
self.graph: Optional[nx.DiGraph] = None # type: ignore
|
|
73
|
+
self._connected = False
|
|
74
|
+
|
|
75
|
+
# Ensure directory exists for local file
|
|
76
|
+
if self.db_path:
|
|
77
|
+
Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
|
|
78
|
+
|
|
79
|
+
async def connect(self) -> bool:
|
|
80
|
+
"""
|
|
81
|
+
Establish connection to Turso database.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
True if connection successful
|
|
85
|
+
|
|
86
|
+
Raises:
|
|
87
|
+
DatabaseConnectionError: If connection fails
|
|
88
|
+
"""
|
|
89
|
+
try:
|
|
90
|
+
if self.sync_url and self.auth_token:
|
|
91
|
+
# Embedded replica mode (local + sync)
|
|
92
|
+
logger.info("Connecting to Turso with embedded replica...")
|
|
93
|
+
self.conn = libsql.connect(
|
|
94
|
+
self.db_path,
|
|
95
|
+
sync_url=self.sync_url,
|
|
96
|
+
auth_token=self.auth_token,
|
|
97
|
+
)
|
|
98
|
+
# Initial sync (non-blocking)
|
|
99
|
+
await asyncio.to_thread(self.conn.sync)
|
|
100
|
+
logger.info(f"Connected to Turso (embedded replica at {self.db_path})")
|
|
101
|
+
elif self.sync_url:
|
|
102
|
+
# Remote-only mode
|
|
103
|
+
logger.info("Connecting to Turso (remote-only)...")
|
|
104
|
+
self.conn = libsql.connect(
|
|
105
|
+
database_url=self.sync_url,
|
|
106
|
+
auth_token=self.auth_token or "",
|
|
107
|
+
)
|
|
108
|
+
logger.info("Connected to Turso (remote)")
|
|
109
|
+
else:
|
|
110
|
+
# Local-only mode (same as SQLite)
|
|
111
|
+
logger.info("Using Turso in local-only mode...")
|
|
112
|
+
self.conn = libsql.connect(self.db_path)
|
|
113
|
+
logger.info(f"Connected to local libSQL database at {self.db_path}")
|
|
114
|
+
|
|
115
|
+
self.graph = nx.DiGraph()
|
|
116
|
+
self._connected = True
|
|
117
|
+
|
|
118
|
+
# Load existing graph into memory
|
|
119
|
+
await self._load_graph_to_memory()
|
|
120
|
+
|
|
121
|
+
return True
|
|
122
|
+
|
|
123
|
+
except Exception as e:
|
|
124
|
+
logger.error(f"Failed to connect to Turso: {e}")
|
|
125
|
+
raise DatabaseConnectionError(f"Failed to connect to Turso: {e}")
|
|
126
|
+
|
|
127
|
+
async def disconnect(self) -> None:
|
|
128
|
+
"""Close the database connection and sync if needed."""
|
|
129
|
+
if self.conn:
|
|
130
|
+
# Sync before closing (if embedded replica) - non-blocking
|
|
131
|
+
if self.sync_url and self.auth_token:
|
|
132
|
+
try:
|
|
133
|
+
await asyncio.to_thread(self.conn.sync)
|
|
134
|
+
logger.info("Synced to Turso before disconnect")
|
|
135
|
+
except Exception as e:
|
|
136
|
+
logger.warning(f"Failed to sync before disconnect: {e}")
|
|
137
|
+
|
|
138
|
+
# Sync graph to database before closing
|
|
139
|
+
await self._sync_to_database()
|
|
140
|
+
|
|
141
|
+
self.conn.close()
|
|
142
|
+
self.conn = None
|
|
143
|
+
self.graph = None
|
|
144
|
+
self._connected = False
|
|
145
|
+
logger.info("Turso connection closed")
|
|
146
|
+
|
|
147
|
+
async def sync(self) -> None:
|
|
148
|
+
"""
|
|
149
|
+
Manually sync embedded replica with Turso primary.
|
|
150
|
+
|
|
151
|
+
Only applicable in embedded replica mode.
|
|
152
|
+
"""
|
|
153
|
+
if self.conn and self.sync_url and self.auth_token:
|
|
154
|
+
try:
|
|
155
|
+
await asyncio.to_thread(self.conn.sync)
|
|
156
|
+
logger.info("Synced with Turso primary")
|
|
157
|
+
except Exception as e:
|
|
158
|
+
logger.error(f"Failed to sync with Turso: {e}")
|
|
159
|
+
raise DatabaseConnectionError(f"Sync failed: {e}")
|
|
160
|
+
else:
|
|
161
|
+
logger.warning("Sync not available (not in embedded replica mode)")
|
|
162
|
+
|
|
163
|
+
async def execute_query(
|
|
164
|
+
self,
|
|
165
|
+
query: str,
|
|
166
|
+
parameters: Optional[dict[str, Any]] = None,
|
|
167
|
+
write: bool = False,
|
|
168
|
+
) -> list[dict[str, Any]]:
|
|
169
|
+
"""
|
|
170
|
+
Execute a SQL query against Turso database.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
query: SQL query string
|
|
174
|
+
parameters: Query parameters
|
|
175
|
+
write: Whether this is a write operation
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
List of result records as dictionaries
|
|
179
|
+
|
|
180
|
+
Raises:
|
|
181
|
+
DatabaseConnectionError: If not connected
|
|
182
|
+
"""
|
|
183
|
+
if not self._connected or not self.conn:
|
|
184
|
+
raise DatabaseConnectionError(
|
|
185
|
+
"Not connected to Turso. Call connect() first."
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
params = parameters or {}
|
|
189
|
+
|
|
190
|
+
try:
|
|
191
|
+
# Wrap all blocking libsql operations in asyncio.to_thread
|
|
192
|
+
def _execute_sync():
|
|
193
|
+
cursor = self.conn.cursor()
|
|
194
|
+
cursor.execute(query, params)
|
|
195
|
+
|
|
196
|
+
if write:
|
|
197
|
+
self.conn.commit()
|
|
198
|
+
|
|
199
|
+
# Return results
|
|
200
|
+
if cursor.description:
|
|
201
|
+
columns = [desc[0] for desc in cursor.description]
|
|
202
|
+
rows = cursor.fetchall()
|
|
203
|
+
return [dict(zip(columns, row)) for row in rows]
|
|
204
|
+
return []
|
|
205
|
+
|
|
206
|
+
# Execute in thread pool
|
|
207
|
+
result = await asyncio.to_thread(_execute_sync)
|
|
208
|
+
|
|
209
|
+
# Sync after writes if embedded replica (non-blocking)
|
|
210
|
+
if write and self.sync_url and self.auth_token:
|
|
211
|
+
await asyncio.to_thread(self.conn.sync)
|
|
212
|
+
|
|
213
|
+
return result
|
|
214
|
+
|
|
215
|
+
except Exception as e:
|
|
216
|
+
logger.error(f"Query execution failed: {e}")
|
|
217
|
+
raise DatabaseConnectionError(f"Query failed: {e}")
|
|
218
|
+
|
|
219
|
+
async def initialize_schema(self) -> None:
|
|
220
|
+
"""
|
|
221
|
+
Initialize database schema (same as SQLite backend).
|
|
222
|
+
|
|
223
|
+
Raises:
|
|
224
|
+
SchemaError: If schema initialization fails
|
|
225
|
+
"""
|
|
226
|
+
logger.info("Initializing Turso schema...")
|
|
227
|
+
|
|
228
|
+
if not self.conn:
|
|
229
|
+
raise DatabaseConnectionError("Not connected to database")
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
# Wrap all blocking operations in asyncio.to_thread
|
|
233
|
+
def _initialize_sync():
|
|
234
|
+
cursor = self.conn.cursor()
|
|
235
|
+
|
|
236
|
+
# Create nodes table
|
|
237
|
+
cursor.execute(
|
|
238
|
+
"""
|
|
239
|
+
CREATE TABLE IF NOT EXISTS nodes (
|
|
240
|
+
id TEXT PRIMARY KEY,
|
|
241
|
+
type TEXT NOT NULL,
|
|
242
|
+
title TEXT NOT NULL,
|
|
243
|
+
content TEXT,
|
|
244
|
+
summary TEXT,
|
|
245
|
+
context TEXT,
|
|
246
|
+
importance REAL DEFAULT 0.5,
|
|
247
|
+
created_at TEXT NOT NULL,
|
|
248
|
+
updated_at TEXT NOT NULL,
|
|
249
|
+
tags TEXT
|
|
250
|
+
)
|
|
251
|
+
"""
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
# Create relationships table
|
|
255
|
+
cursor.execute(
|
|
256
|
+
"""
|
|
257
|
+
CREATE TABLE IF NOT EXISTS relationships (
|
|
258
|
+
id TEXT PRIMARY KEY,
|
|
259
|
+
from_id TEXT NOT NULL,
|
|
260
|
+
to_id TEXT NOT NULL,
|
|
261
|
+
type TEXT NOT NULL,
|
|
262
|
+
context TEXT,
|
|
263
|
+
strength REAL DEFAULT 0.5,
|
|
264
|
+
confidence REAL DEFAULT 0.8,
|
|
265
|
+
created_at TEXT NOT NULL,
|
|
266
|
+
FOREIGN KEY (from_id) REFERENCES nodes(id) ON DELETE CASCADE,
|
|
267
|
+
FOREIGN KEY (to_id) REFERENCES nodes(id) ON DELETE CASCADE
|
|
268
|
+
)
|
|
269
|
+
"""
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
# Create indexes for performance
|
|
273
|
+
cursor.execute(
|
|
274
|
+
"CREATE INDEX IF NOT EXISTS idx_nodes_type ON nodes(type)"
|
|
275
|
+
)
|
|
276
|
+
cursor.execute(
|
|
277
|
+
"CREATE INDEX IF NOT EXISTS idx_nodes_created_at ON nodes(created_at)"
|
|
278
|
+
)
|
|
279
|
+
cursor.execute(
|
|
280
|
+
"CREATE INDEX IF NOT EXISTS idx_relationships_from ON relationships(from_id)"
|
|
281
|
+
)
|
|
282
|
+
cursor.execute(
|
|
283
|
+
"CREATE INDEX IF NOT EXISTS idx_relationships_to ON relationships(to_id)"
|
|
284
|
+
)
|
|
285
|
+
cursor.execute(
|
|
286
|
+
"CREATE INDEX IF NOT EXISTS idx_relationships_type ON relationships(type)"
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
# Create full-text search virtual table
|
|
290
|
+
cursor.execute(
|
|
291
|
+
"""
|
|
292
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS nodes_fts USING fts5(
|
|
293
|
+
id UNINDEXED,
|
|
294
|
+
title,
|
|
295
|
+
content,
|
|
296
|
+
summary,
|
|
297
|
+
content='nodes',
|
|
298
|
+
content_rowid='rowid'
|
|
299
|
+
)
|
|
300
|
+
"""
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
# Create triggers to keep FTS in sync
|
|
304
|
+
cursor.execute(
|
|
305
|
+
"""
|
|
306
|
+
CREATE TRIGGER IF NOT EXISTS nodes_fts_insert AFTER INSERT ON nodes BEGIN
|
|
307
|
+
INSERT INTO nodes_fts(rowid, id, title, content, summary)
|
|
308
|
+
VALUES (new.rowid, new.id, new.title, new.content, new.summary);
|
|
309
|
+
END
|
|
310
|
+
"""
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
cursor.execute(
|
|
314
|
+
"""
|
|
315
|
+
CREATE TRIGGER IF NOT EXISTS nodes_fts_update AFTER UPDATE ON nodes BEGIN
|
|
316
|
+
UPDATE nodes_fts SET
|
|
317
|
+
title = new.title,
|
|
318
|
+
content = new.content,
|
|
319
|
+
summary = new.summary
|
|
320
|
+
WHERE rowid = new.rowid;
|
|
321
|
+
END
|
|
322
|
+
"""
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
cursor.execute(
|
|
326
|
+
"""
|
|
327
|
+
CREATE TRIGGER IF NOT EXISTS nodes_fts_delete AFTER DELETE ON nodes BEGIN
|
|
328
|
+
DELETE FROM nodes_fts WHERE rowid = old.rowid;
|
|
329
|
+
END
|
|
330
|
+
"""
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
self.conn.commit()
|
|
334
|
+
|
|
335
|
+
# Execute schema initialization in thread pool
|
|
336
|
+
await asyncio.to_thread(_initialize_sync)
|
|
337
|
+
|
|
338
|
+
# Sync after schema initialization if embedded replica
|
|
339
|
+
if self.sync_url and self.auth_token:
|
|
340
|
+
await asyncio.to_thread(self.conn.sync)
|
|
341
|
+
|
|
342
|
+
logger.info("Turso schema initialized successfully")
|
|
343
|
+
|
|
344
|
+
except Exception as e:
|
|
345
|
+
logger.error(f"Failed to initialize schema: {e}")
|
|
346
|
+
raise SchemaError(f"Schema initialization failed: {e}")
|
|
347
|
+
|
|
348
|
+
async def health_check(self) -> dict[str, Any]:
|
|
349
|
+
"""
|
|
350
|
+
Check backend health and connection status.
|
|
351
|
+
|
|
352
|
+
Returns:
|
|
353
|
+
Dictionary with health information
|
|
354
|
+
"""
|
|
355
|
+
health_info = {
|
|
356
|
+
"backend": "turso",
|
|
357
|
+
"connected": self._connected,
|
|
358
|
+
"database_path": self.db_path,
|
|
359
|
+
"sync_enabled": bool(self.sync_url and self.auth_token),
|
|
360
|
+
"mode": "embedded_replica"
|
|
361
|
+
if (self.sync_url and self.auth_token)
|
|
362
|
+
else ("remote" if self.sync_url else "local"),
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
if self._connected and self.conn:
|
|
366
|
+
try:
|
|
367
|
+
# Wrap blocking operations in asyncio.to_thread
|
|
368
|
+
def _get_counts():
|
|
369
|
+
cursor = self.conn.cursor()
|
|
370
|
+
cursor.execute("SELECT COUNT(*) as count FROM nodes")
|
|
371
|
+
node_result = cursor.fetchone()
|
|
372
|
+
node_count = node_result[0] if node_result else 0
|
|
373
|
+
|
|
374
|
+
cursor.execute("SELECT COUNT(*) as count FROM relationships")
|
|
375
|
+
rel_result = cursor.fetchone()
|
|
376
|
+
rel_count = rel_result[0] if rel_result else 0
|
|
377
|
+
|
|
378
|
+
return node_count, rel_count
|
|
379
|
+
|
|
380
|
+
node_count, rel_count = await asyncio.to_thread(_get_counts)
|
|
381
|
+
|
|
382
|
+
health_info["node_count"] = node_count
|
|
383
|
+
health_info["relationship_count"] = rel_count
|
|
384
|
+
health_info["status"] = "healthy"
|
|
385
|
+
except Exception as e:
|
|
386
|
+
health_info["status"] = "error"
|
|
387
|
+
health_info["error"] = str(e)
|
|
388
|
+
else:
|
|
389
|
+
health_info["status"] = "disconnected"
|
|
390
|
+
|
|
391
|
+
return health_info
|
|
392
|
+
|
|
393
|
+
def backend_name(self) -> str:
|
|
394
|
+
"""Return the backend identifier."""
|
|
395
|
+
return "turso"
|
|
396
|
+
|
|
397
|
+
def supports_fulltext_search(self) -> bool:
|
|
398
|
+
"""Turso supports FTS5 full-text search (SQLite compatible)."""
|
|
399
|
+
return True
|
|
400
|
+
|
|
401
|
+
def supports_transactions(self) -> bool:
|
|
402
|
+
"""Turso supports ACID transactions."""
|
|
403
|
+
return True
|
|
404
|
+
|
|
405
|
+
# Helper methods (same as SQLite backend)
|
|
406
|
+
|
|
407
|
+
async def _load_graph_to_memory(self) -> None:
|
|
408
|
+
"""Load graph from database into NetworkX graph."""
|
|
409
|
+
if not self.conn or not self.graph:
|
|
410
|
+
return
|
|
411
|
+
|
|
412
|
+
try:
|
|
413
|
+
# Wrap blocking operations in asyncio.to_thread
|
|
414
|
+
def _load_sync():
|
|
415
|
+
cursor = self.conn.cursor()
|
|
416
|
+
|
|
417
|
+
# Load nodes
|
|
418
|
+
cursor.execute("SELECT id, type, title FROM nodes")
|
|
419
|
+
nodes = cursor.fetchall()
|
|
420
|
+
|
|
421
|
+
# Load relationships
|
|
422
|
+
cursor.execute("SELECT from_id, to_id, type FROM relationships")
|
|
423
|
+
relationships = cursor.fetchall()
|
|
424
|
+
|
|
425
|
+
return nodes, relationships
|
|
426
|
+
|
|
427
|
+
nodes, relationships = await asyncio.to_thread(_load_sync)
|
|
428
|
+
|
|
429
|
+
# Add to graph (NetworkX operations are fast, no need to thread)
|
|
430
|
+
for row in nodes:
|
|
431
|
+
self.graph.add_node(row[0], type=row[1], title=row[2])
|
|
432
|
+
|
|
433
|
+
for row in relationships:
|
|
434
|
+
self.graph.add_edge(row[0], row[1], type=row[2])
|
|
435
|
+
|
|
436
|
+
logger.info(
|
|
437
|
+
f"Loaded graph: {self.graph.number_of_nodes()} nodes, "
|
|
438
|
+
f"{self.graph.number_of_edges()} edges"
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
except Exception as e:
|
|
442
|
+
logger.warning(f"Failed to load graph to memory: {e}")
|
|
443
|
+
|
|
444
|
+
async def _sync_to_database(self) -> None:
|
|
445
|
+
"""Sync NetworkX graph to database (for consistency)."""
|
|
446
|
+
# In Turso backend, we primarily use database operations
|
|
447
|
+
# This is here for compatibility with SQLite backend pattern
|
|
448
|
+
pass
|