memorygraphMCP 0.11.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memorygraph/__init__.py +50 -0
- memorygraph/__main__.py +12 -0
- memorygraph/advanced_tools.py +509 -0
- memorygraph/analytics/__init__.py +46 -0
- memorygraph/analytics/advanced_queries.py +727 -0
- memorygraph/backends/__init__.py +21 -0
- memorygraph/backends/base.py +179 -0
- memorygraph/backends/cloud.py +75 -0
- memorygraph/backends/cloud_backend.py +858 -0
- memorygraph/backends/factory.py +577 -0
- memorygraph/backends/falkordb_backend.py +749 -0
- memorygraph/backends/falkordblite_backend.py +746 -0
- memorygraph/backends/ladybugdb_backend.py +242 -0
- memorygraph/backends/memgraph_backend.py +327 -0
- memorygraph/backends/neo4j_backend.py +298 -0
- memorygraph/backends/sqlite_fallback.py +463 -0
- memorygraph/backends/turso.py +448 -0
- memorygraph/cli.py +743 -0
- memorygraph/cloud_database.py +297 -0
- memorygraph/config.py +295 -0
- memorygraph/database.py +933 -0
- memorygraph/graph_analytics.py +631 -0
- memorygraph/integration/__init__.py +69 -0
- memorygraph/integration/context_capture.py +426 -0
- memorygraph/integration/project_analysis.py +583 -0
- memorygraph/integration/workflow_tracking.py +492 -0
- memorygraph/intelligence/__init__.py +59 -0
- memorygraph/intelligence/context_retrieval.py +447 -0
- memorygraph/intelligence/entity_extraction.py +386 -0
- memorygraph/intelligence/pattern_recognition.py +420 -0
- memorygraph/intelligence/temporal.py +374 -0
- memorygraph/migration/__init__.py +27 -0
- memorygraph/migration/manager.py +579 -0
- memorygraph/migration/models.py +142 -0
- memorygraph/migration/scripts/__init__.py +17 -0
- memorygraph/migration/scripts/bitemporal_migration.py +595 -0
- memorygraph/migration/scripts/multitenancy_migration.py +452 -0
- memorygraph/migration_tools_module.py +146 -0
- memorygraph/models.py +684 -0
- memorygraph/proactive/__init__.py +46 -0
- memorygraph/proactive/outcome_learning.py +444 -0
- memorygraph/proactive/predictive.py +410 -0
- memorygraph/proactive/session_briefing.py +399 -0
- memorygraph/relationships.py +668 -0
- memorygraph/server.py +883 -0
- memorygraph/sqlite_database.py +1876 -0
- memorygraph/tools/__init__.py +59 -0
- memorygraph/tools/activity_tools.py +262 -0
- memorygraph/tools/memory_tools.py +315 -0
- memorygraph/tools/migration_tools.py +181 -0
- memorygraph/tools/relationship_tools.py +147 -0
- memorygraph/tools/search_tools.py +406 -0
- memorygraph/tools/temporal_tools.py +339 -0
- memorygraph/utils/__init__.py +10 -0
- memorygraph/utils/context_extractor.py +429 -0
- memorygraph/utils/error_handling.py +151 -0
- memorygraph/utils/export_import.py +425 -0
- memorygraph/utils/graph_algorithms.py +200 -0
- memorygraph/utils/pagination.py +149 -0
- memorygraph/utils/project_detection.py +133 -0
- memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
- memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
- memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
- memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
- memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,858 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cloud backend for MemoryGraph MCP Server.
|
|
3
|
+
|
|
4
|
+
This module provides a backend that communicates with the MemoryGraph Cloud API,
|
|
5
|
+
enabling multi-device sync, team collaboration, and cloud-based memory storage.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import time
|
|
12
|
+
from datetime import datetime, timezone
|
|
13
|
+
from typing import Any, Optional
|
|
14
|
+
|
|
15
|
+
import httpx
|
|
16
|
+
|
|
17
|
+
from .base import GraphBackend
|
|
18
|
+
from ..models import (
|
|
19
|
+
Memory, MemoryType, MemoryContext, Relationship, RelationshipType,
|
|
20
|
+
RelationshipProperties, SearchQuery, DatabaseConnectionError,
|
|
21
|
+
MemoryNotFoundError, ValidationError
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CircuitBreaker:
|
|
28
|
+
"""
|
|
29
|
+
Circuit breaker pattern implementation to prevent cascading failures.
|
|
30
|
+
|
|
31
|
+
States:
|
|
32
|
+
- CLOSED: Normal operation, requests proceed
|
|
33
|
+
- OPEN: Too many failures, requests fail fast
|
|
34
|
+
- HALF_OPEN: Recovery period, limited requests allowed
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, failure_threshold: int = 5, recovery_timeout: float = 60.0):
|
|
38
|
+
"""
|
|
39
|
+
Initialize circuit breaker.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
failure_threshold: Number of consecutive failures before opening circuit
|
|
43
|
+
recovery_timeout: Seconds to wait before attempting recovery
|
|
44
|
+
"""
|
|
45
|
+
self.failure_threshold = failure_threshold
|
|
46
|
+
self.recovery_timeout = recovery_timeout
|
|
47
|
+
self.failure_count = 0
|
|
48
|
+
self.last_failure_time: Optional[float] = None
|
|
49
|
+
self.state = "closed" # closed, open, half_open
|
|
50
|
+
|
|
51
|
+
def can_execute(self) -> bool:
|
|
52
|
+
"""
|
|
53
|
+
Check if request should be allowed to proceed.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
True if request should proceed, False to fail fast
|
|
57
|
+
"""
|
|
58
|
+
if self.state == "closed":
|
|
59
|
+
return True
|
|
60
|
+
|
|
61
|
+
if self.state == "open":
|
|
62
|
+
# Check if recovery timeout has passed
|
|
63
|
+
if self.last_failure_time and (time.time() - self.last_failure_time >= self.recovery_timeout):
|
|
64
|
+
logger.info("Circuit breaker entering half-open state for recovery attempt")
|
|
65
|
+
self.state = "half_open"
|
|
66
|
+
return True
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
# half_open state - allow the request through
|
|
70
|
+
return True
|
|
71
|
+
|
|
72
|
+
def record_success(self) -> None:
|
|
73
|
+
"""Record a successful request."""
|
|
74
|
+
if self.state == "half_open":
|
|
75
|
+
logger.info("Circuit breaker closing after successful recovery")
|
|
76
|
+
self.failure_count = 0
|
|
77
|
+
self.last_failure_time = None
|
|
78
|
+
self.state = "closed"
|
|
79
|
+
|
|
80
|
+
def record_failure(self) -> None:
|
|
81
|
+
"""Record a failed request."""
|
|
82
|
+
self.failure_count += 1
|
|
83
|
+
self.last_failure_time = time.time()
|
|
84
|
+
|
|
85
|
+
if self.state == "half_open":
|
|
86
|
+
# Failed during recovery, reopen circuit
|
|
87
|
+
logger.warning("Circuit breaker reopening after failed recovery attempt")
|
|
88
|
+
self.state = "open"
|
|
89
|
+
elif self.failure_count >= self.failure_threshold:
|
|
90
|
+
# Too many failures, open circuit
|
|
91
|
+
logger.warning(
|
|
92
|
+
f"Circuit breaker opening after {self.failure_count} consecutive failures. "
|
|
93
|
+
f"Will retry in {self.recovery_timeout} seconds"
|
|
94
|
+
)
|
|
95
|
+
self.state = "open"
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class CloudBackendError(Exception):
|
|
99
|
+
"""Base exception for cloud backend errors."""
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class AuthenticationError(CloudBackendError):
|
|
104
|
+
"""Raised when API key is invalid or expired."""
|
|
105
|
+
pass
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class UsageLimitExceeded(CloudBackendError):
|
|
109
|
+
"""Raised when usage limits are exceeded."""
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class RateLimitExceeded(CloudBackendError):
|
|
114
|
+
"""Raised when rate limits are exceeded."""
|
|
115
|
+
|
|
116
|
+
def __init__(self, message: str, retry_after: Optional[int] = None):
|
|
117
|
+
super().__init__(message)
|
|
118
|
+
self.retry_after = retry_after
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class CircuitBreakerOpenError(CloudBackendError):
|
|
122
|
+
"""Raised when circuit breaker is open (failing fast)."""
|
|
123
|
+
pass
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
class CloudBackend(GraphBackend):
|
|
127
|
+
"""
|
|
128
|
+
Cloud backend that connects to MemoryGraph Cloud API.
|
|
129
|
+
|
|
130
|
+
This backend enables:
|
|
131
|
+
- Multi-device synchronization
|
|
132
|
+
- Team collaboration and shared memories
|
|
133
|
+
- Cloud-based storage with automatic backups
|
|
134
|
+
- Usage tracking and analytics
|
|
135
|
+
|
|
136
|
+
Configuration:
|
|
137
|
+
MEMORYGRAPH_API_KEY: API key for authentication (required)
|
|
138
|
+
MEMORYGRAPH_API_URL: API base URL (default: https://graph-api.memorygraph.dev)
|
|
139
|
+
MEMORYGRAPH_TIMEOUT: Request timeout in seconds (default: 30)
|
|
140
|
+
"""
|
|
141
|
+
|
|
142
|
+
# Production API URL - configurable via MEMORYGRAPH_API_URL environment variable
|
|
143
|
+
DEFAULT_API_URL = "https://graph-api.memorygraph.dev"
|
|
144
|
+
DEFAULT_TIMEOUT = 30
|
|
145
|
+
MAX_RETRIES = 3
|
|
146
|
+
RETRY_BACKOFF_BASE = 1.0 # seconds
|
|
147
|
+
|
|
148
|
+
def __init__(
|
|
149
|
+
self,
|
|
150
|
+
api_key: Optional[str] = None,
|
|
151
|
+
api_url: Optional[str] = None,
|
|
152
|
+
timeout: Optional[int] = None
|
|
153
|
+
):
|
|
154
|
+
"""
|
|
155
|
+
Initialize cloud backend.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
api_key: API key for authentication. If not provided, reads from
|
|
159
|
+
MEMORYGRAPH_API_KEY environment variable.
|
|
160
|
+
api_url: Base URL for the Graph API. Defaults to production URL.
|
|
161
|
+
timeout: Request timeout in seconds. Defaults to 30.
|
|
162
|
+
|
|
163
|
+
Raises:
|
|
164
|
+
DatabaseConnectionError: If API key is not provided.
|
|
165
|
+
"""
|
|
166
|
+
self.api_key = api_key or os.getenv("MEMORYGRAPH_API_KEY")
|
|
167
|
+
self.api_url = (
|
|
168
|
+
api_url or
|
|
169
|
+
os.getenv("MEMORYGRAPH_API_URL") or
|
|
170
|
+
self.DEFAULT_API_URL
|
|
171
|
+
).rstrip("/")
|
|
172
|
+
self.timeout = timeout or int(os.getenv("MEMORYGRAPH_TIMEOUT", str(self.DEFAULT_TIMEOUT)))
|
|
173
|
+
|
|
174
|
+
if not self.api_key:
|
|
175
|
+
raise DatabaseConnectionError(
|
|
176
|
+
"MEMORYGRAPH_API_KEY is required for cloud backend. "
|
|
177
|
+
"Get your API key at https://app.memorygraph.dev"
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
if not self.api_key.startswith("mg_"):
|
|
181
|
+
logger.warning(
|
|
182
|
+
"API key does not start with 'mg_' prefix. "
|
|
183
|
+
"Ensure you're using a valid MemoryGraph API key."
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
self._client: Optional[httpx.AsyncClient] = None
|
|
187
|
+
self._connected = False
|
|
188
|
+
self._circuit_breaker = CircuitBreaker(failure_threshold=5, recovery_timeout=60.0)
|
|
189
|
+
|
|
190
|
+
def _get_headers(self) -> dict[str, str]:
|
|
191
|
+
"""Get headers for API requests."""
|
|
192
|
+
return {
|
|
193
|
+
"X-API-Key": self.api_key,
|
|
194
|
+
"Content-Type": "application/json",
|
|
195
|
+
"User-Agent": "memorygraph-mcp/1.0"
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async def _get_client(self) -> httpx.AsyncClient:
|
|
199
|
+
"""Get or create the HTTP client."""
|
|
200
|
+
if self._client is None or self._client.is_closed:
|
|
201
|
+
self._client = httpx.AsyncClient(
|
|
202
|
+
base_url=self.api_url,
|
|
203
|
+
headers=self._get_headers(),
|
|
204
|
+
timeout=httpx.Timeout(self.timeout),
|
|
205
|
+
follow_redirects=True
|
|
206
|
+
)
|
|
207
|
+
return self._client
|
|
208
|
+
|
|
209
|
+
async def _request(
|
|
210
|
+
self,
|
|
211
|
+
method: str,
|
|
212
|
+
path: str,
|
|
213
|
+
json: Optional[dict] = None,
|
|
214
|
+
params: Optional[dict] = None,
|
|
215
|
+
retry_count: int = 0
|
|
216
|
+
) -> dict[str, Any]:
|
|
217
|
+
"""
|
|
218
|
+
Make an HTTP request with retry logic and circuit breaker.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
method: HTTP method (GET, POST, PUT, DELETE)
|
|
222
|
+
path: API path (e.g., "/memories")
|
|
223
|
+
json: JSON body for POST/PUT requests
|
|
224
|
+
params: Query parameters
|
|
225
|
+
retry_count: Current retry attempt
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
Response data as dictionary
|
|
229
|
+
|
|
230
|
+
Raises:
|
|
231
|
+
AuthenticationError: If API key is invalid
|
|
232
|
+
UsageLimitExceeded: If usage limits exceeded
|
|
233
|
+
RateLimitExceeded: If rate limits exceeded
|
|
234
|
+
CircuitBreakerOpenError: If circuit breaker is open
|
|
235
|
+
DatabaseConnectionError: For network or server errors
|
|
236
|
+
"""
|
|
237
|
+
# Check circuit breaker
|
|
238
|
+
if not self._circuit_breaker.can_execute():
|
|
239
|
+
raise CircuitBreakerOpenError(
|
|
240
|
+
"Circuit breaker is open due to repeated failures. "
|
|
241
|
+
f"Will retry in {self._circuit_breaker.recovery_timeout} seconds."
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
client = await self._get_client()
|
|
245
|
+
|
|
246
|
+
try:
|
|
247
|
+
response = await client.request(
|
|
248
|
+
method=method,
|
|
249
|
+
url=path,
|
|
250
|
+
json=json,
|
|
251
|
+
params=params
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
# Handle specific error codes
|
|
255
|
+
if response.status_code == 401:
|
|
256
|
+
raise AuthenticationError(
|
|
257
|
+
"Invalid API key. Get a valid key at https://app.memorygraph.dev"
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
if response.status_code == 403:
|
|
261
|
+
error_data = response.json() if response.content else {}
|
|
262
|
+
raise UsageLimitExceeded(
|
|
263
|
+
error_data.get("detail", "Usage limit exceeded. Upgrade at https://app.memorygraph.dev/pricing")
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
if response.status_code == 404:
|
|
267
|
+
# Raise consistent exception for not found
|
|
268
|
+
raise MemoryNotFoundError(f"Resource not found: {path}")
|
|
269
|
+
|
|
270
|
+
if response.status_code == 429:
|
|
271
|
+
retry_after = response.headers.get("Retry-After")
|
|
272
|
+
raise RateLimitExceeded(
|
|
273
|
+
"Rate limit exceeded. Please slow down requests.",
|
|
274
|
+
retry_after=int(retry_after) if retry_after else None
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
if response.status_code >= 500:
|
|
278
|
+
# Server error - retry with backoff
|
|
279
|
+
self._circuit_breaker.record_failure()
|
|
280
|
+
if retry_count < self.MAX_RETRIES:
|
|
281
|
+
backoff = self.RETRY_BACKOFF_BASE * (2 ** retry_count)
|
|
282
|
+
logger.warning(
|
|
283
|
+
f"Server error {response.status_code}, "
|
|
284
|
+
f"retrying in {backoff}s (attempt {retry_count + 1}/{self.MAX_RETRIES})"
|
|
285
|
+
)
|
|
286
|
+
await asyncio.sleep(backoff)
|
|
287
|
+
return await self._request(method, path, json, params, retry_count + 1)
|
|
288
|
+
else:
|
|
289
|
+
raise DatabaseConnectionError(
|
|
290
|
+
f"Graph API server error after {self.MAX_RETRIES} retries: {response.status_code}"
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
response.raise_for_status()
|
|
294
|
+
|
|
295
|
+
# Record success with circuit breaker
|
|
296
|
+
self._circuit_breaker.record_success()
|
|
297
|
+
|
|
298
|
+
if response.status_code == 204:
|
|
299
|
+
return {}
|
|
300
|
+
|
|
301
|
+
return response.json()
|
|
302
|
+
|
|
303
|
+
except httpx.TimeoutException:
|
|
304
|
+
self._circuit_breaker.record_failure()
|
|
305
|
+
if retry_count < self.MAX_RETRIES:
|
|
306
|
+
backoff = self.RETRY_BACKOFF_BASE * (2 ** retry_count)
|
|
307
|
+
logger.warning(
|
|
308
|
+
f"Request timeout, retrying in {backoff}s "
|
|
309
|
+
f"(attempt {retry_count + 1}/{self.MAX_RETRIES})"
|
|
310
|
+
)
|
|
311
|
+
await asyncio.sleep(backoff)
|
|
312
|
+
return await self._request(method, path, json, params, retry_count + 1)
|
|
313
|
+
raise DatabaseConnectionError(
|
|
314
|
+
f"Request timeout after {self.MAX_RETRIES} retries"
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
except httpx.ConnectError as e:
|
|
318
|
+
self._circuit_breaker.record_failure()
|
|
319
|
+
if retry_count < self.MAX_RETRIES:
|
|
320
|
+
backoff = self.RETRY_BACKOFF_BASE * (2 ** retry_count)
|
|
321
|
+
logger.warning(
|
|
322
|
+
f"Connection error, retrying in {backoff}s "
|
|
323
|
+
f"(attempt {retry_count + 1}/{self.MAX_RETRIES})"
|
|
324
|
+
)
|
|
325
|
+
await asyncio.sleep(backoff)
|
|
326
|
+
return await self._request(method, path, json, params, retry_count + 1)
|
|
327
|
+
raise DatabaseConnectionError(
|
|
328
|
+
f"Cannot connect to Graph API at {self.api_url}: {e}"
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
except (AuthenticationError, UsageLimitExceeded, RateLimitExceeded, MemoryNotFoundError):
|
|
332
|
+
raise
|
|
333
|
+
|
|
334
|
+
except httpx.HTTPStatusError as e:
|
|
335
|
+
raise DatabaseConnectionError(f"HTTP error: {e}")
|
|
336
|
+
|
|
337
|
+
except Exception as e:
|
|
338
|
+
raise DatabaseConnectionError(f"Unexpected error: {e}")
|
|
339
|
+
|
|
340
|
+
# =========================================================================
|
|
341
|
+
# GraphBackend Interface Implementation
|
|
342
|
+
# =========================================================================
|
|
343
|
+
|
|
344
|
+
async def connect(self) -> bool:
|
|
345
|
+
"""
|
|
346
|
+
Establish connection to the cloud API.
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
True if connection successful
|
|
350
|
+
|
|
351
|
+
Raises:
|
|
352
|
+
DatabaseConnectionError: If connection fails
|
|
353
|
+
AuthenticationError: If API key is invalid
|
|
354
|
+
"""
|
|
355
|
+
try:
|
|
356
|
+
logger.info(f"Connecting to MemoryGraph Cloud at {self.api_url}...")
|
|
357
|
+
|
|
358
|
+
# Verify connection with health check
|
|
359
|
+
result = await self._request("GET", "/health")
|
|
360
|
+
|
|
361
|
+
if result and result.get("status") == "healthy":
|
|
362
|
+
self._connected = True
|
|
363
|
+
logger.info("✓ Successfully connected to MemoryGraph Cloud")
|
|
364
|
+
return True
|
|
365
|
+
else:
|
|
366
|
+
raise DatabaseConnectionError(
|
|
367
|
+
f"Health check failed: {result}"
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
except AuthenticationError:
|
|
371
|
+
raise
|
|
372
|
+
except Exception as e:
|
|
373
|
+
raise DatabaseConnectionError(f"Failed to connect to cloud: {e}")
|
|
374
|
+
|
|
375
|
+
async def disconnect(self) -> None:
|
|
376
|
+
"""Close the connection and clean up resources."""
|
|
377
|
+
if self._client and not self._client.is_closed:
|
|
378
|
+
await self._client.aclose()
|
|
379
|
+
self._client = None
|
|
380
|
+
self._connected = False
|
|
381
|
+
logger.info("Disconnected from MemoryGraph Cloud")
|
|
382
|
+
|
|
383
|
+
async def execute_query(
|
|
384
|
+
self,
|
|
385
|
+
query: str,
|
|
386
|
+
parameters: Optional[dict[str, Any]] = None,
|
|
387
|
+
write: bool = False
|
|
388
|
+
) -> list[dict[str, Any]]:
|
|
389
|
+
"""
|
|
390
|
+
Execute a query (not supported for cloud backend).
|
|
391
|
+
|
|
392
|
+
Cloud backend uses REST API, not Cypher queries.
|
|
393
|
+
Use the specific memory/relationship methods instead.
|
|
394
|
+
|
|
395
|
+
Raises:
|
|
396
|
+
NotImplementedError: Always, as cloud backend doesn't support raw queries
|
|
397
|
+
"""
|
|
398
|
+
raise NotImplementedError(
|
|
399
|
+
"Cloud backend does not support raw Cypher queries. "
|
|
400
|
+
"Use store_memory(), search_memories(), etc. instead."
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
async def initialize_schema(self) -> None:
|
|
404
|
+
"""
|
|
405
|
+
Initialize schema (no-op for cloud backend).
|
|
406
|
+
|
|
407
|
+
Schema is managed by the cloud service.
|
|
408
|
+
"""
|
|
409
|
+
logger.debug("Schema initialization skipped - managed by cloud service")
|
|
410
|
+
|
|
411
|
+
async def health_check(self) -> dict[str, Any]:
|
|
412
|
+
"""
|
|
413
|
+
Check cloud API health and return status.
|
|
414
|
+
|
|
415
|
+
Returns:
|
|
416
|
+
Dictionary with health check results
|
|
417
|
+
"""
|
|
418
|
+
try:
|
|
419
|
+
result = await self._request("GET", "/health")
|
|
420
|
+
return {
|
|
421
|
+
"connected": True,
|
|
422
|
+
"backend_type": "cloud",
|
|
423
|
+
"api_url": self.api_url,
|
|
424
|
+
"status": result.get("status", "unknown"),
|
|
425
|
+
"version": result.get("version", "unknown")
|
|
426
|
+
}
|
|
427
|
+
except Exception as e:
|
|
428
|
+
return {
|
|
429
|
+
"connected": False,
|
|
430
|
+
"backend_type": "cloud",
|
|
431
|
+
"api_url": self.api_url,
|
|
432
|
+
"error": str(e)
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
def backend_name(self) -> str:
|
|
436
|
+
"""Return backend name."""
|
|
437
|
+
return "cloud"
|
|
438
|
+
|
|
439
|
+
def supports_fulltext_search(self) -> bool:
|
|
440
|
+
"""Cloud backend supports full-text search."""
|
|
441
|
+
return True
|
|
442
|
+
|
|
443
|
+
def supports_transactions(self) -> bool:
|
|
444
|
+
"""Cloud backend handles transactions server-side."""
|
|
445
|
+
return True
|
|
446
|
+
|
|
447
|
+
# =========================================================================
|
|
448
|
+
# Memory Operations
|
|
449
|
+
# =========================================================================
|
|
450
|
+
|
|
451
|
+
async def store_memory(self, memory: Memory) -> str:
|
|
452
|
+
"""
|
|
453
|
+
Store a memory in the cloud.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
memory: Memory object to store
|
|
457
|
+
|
|
458
|
+
Returns:
|
|
459
|
+
ID of the stored memory
|
|
460
|
+
|
|
461
|
+
Raises:
|
|
462
|
+
UsageLimitExceeded: If storage limits exceeded
|
|
463
|
+
DatabaseConnectionError: If storage fails
|
|
464
|
+
"""
|
|
465
|
+
payload = self._memory_to_api_payload(memory)
|
|
466
|
+
|
|
467
|
+
result = await self._request("POST", "/memories", json=payload)
|
|
468
|
+
|
|
469
|
+
memory_id = result.get("id") or result.get("memory_id")
|
|
470
|
+
logger.info(f"Stored memory in cloud: {memory_id}")
|
|
471
|
+
return memory_id
|
|
472
|
+
|
|
473
|
+
async def get_memory(self, memory_id: str) -> Optional[Memory]:
|
|
474
|
+
"""
|
|
475
|
+
Retrieve a memory by ID.
|
|
476
|
+
|
|
477
|
+
Args:
|
|
478
|
+
memory_id: ID of the memory
|
|
479
|
+
|
|
480
|
+
Returns:
|
|
481
|
+
Memory object if found, None otherwise
|
|
482
|
+
|
|
483
|
+
Raises:
|
|
484
|
+
MemoryNotFoundError: If memory doesn't exist
|
|
485
|
+
"""
|
|
486
|
+
try:
|
|
487
|
+
result = await self._request("GET", f"/memories/{memory_id}")
|
|
488
|
+
return self._api_response_to_memory(result)
|
|
489
|
+
except MemoryNotFoundError:
|
|
490
|
+
# get_memory returns None for not found (API contract)
|
|
491
|
+
return None
|
|
492
|
+
|
|
493
|
+
async def update_memory(self, memory_id: str, updates: dict[str, Any]) -> Optional[Memory]:
|
|
494
|
+
"""
|
|
495
|
+
Update an existing memory.
|
|
496
|
+
|
|
497
|
+
Args:
|
|
498
|
+
memory_id: ID of the memory to update
|
|
499
|
+
updates: Dictionary of fields to update
|
|
500
|
+
|
|
501
|
+
Returns:
|
|
502
|
+
Updated Memory object
|
|
503
|
+
|
|
504
|
+
Raises:
|
|
505
|
+
MemoryNotFoundError: If memory doesn't exist
|
|
506
|
+
"""
|
|
507
|
+
result = await self._request("PUT", f"/memories/{memory_id}", json=updates)
|
|
508
|
+
return self._api_response_to_memory(result)
|
|
509
|
+
|
|
510
|
+
async def delete_memory(self, memory_id: str) -> bool:
|
|
511
|
+
"""
|
|
512
|
+
Delete a memory.
|
|
513
|
+
|
|
514
|
+
Args:
|
|
515
|
+
memory_id: ID of the memory to delete
|
|
516
|
+
|
|
517
|
+
Returns:
|
|
518
|
+
True if deleted successfully
|
|
519
|
+
|
|
520
|
+
Raises:
|
|
521
|
+
MemoryNotFoundError: If memory doesn't exist
|
|
522
|
+
"""
|
|
523
|
+
await self._request("DELETE", f"/memories/{memory_id}")
|
|
524
|
+
logger.info(f"Deleted memory from cloud: {memory_id}")
|
|
525
|
+
return True
|
|
526
|
+
|
|
527
|
+
# =========================================================================
|
|
528
|
+
# Relationship Operations
|
|
529
|
+
# =========================================================================
|
|
530
|
+
|
|
531
|
+
async def create_relationship(
|
|
532
|
+
self,
|
|
533
|
+
from_memory_id: str,
|
|
534
|
+
to_memory_id: str,
|
|
535
|
+
relationship_type: RelationshipType,
|
|
536
|
+
properties: Optional[RelationshipProperties] = None
|
|
537
|
+
) -> str:
|
|
538
|
+
"""
|
|
539
|
+
Create a relationship between two memories.
|
|
540
|
+
|
|
541
|
+
Args:
|
|
542
|
+
from_memory_id: Source memory ID
|
|
543
|
+
to_memory_id: Target memory ID
|
|
544
|
+
relationship_type: Type of relationship
|
|
545
|
+
properties: Optional relationship properties
|
|
546
|
+
|
|
547
|
+
Returns:
|
|
548
|
+
ID of the created relationship
|
|
549
|
+
"""
|
|
550
|
+
payload = {
|
|
551
|
+
"from_memory_id": from_memory_id,
|
|
552
|
+
"to_memory_id": to_memory_id,
|
|
553
|
+
"relationship_type": relationship_type.value,
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
if properties:
|
|
557
|
+
payload["strength"] = properties.strength
|
|
558
|
+
payload["confidence"] = properties.confidence
|
|
559
|
+
if properties.context:
|
|
560
|
+
payload["context"] = properties.context
|
|
561
|
+
|
|
562
|
+
result = await self._request("POST", "/relationships", json=payload)
|
|
563
|
+
|
|
564
|
+
relationship_id = result.get("id") or result.get("relationship_id")
|
|
565
|
+
logger.info(
|
|
566
|
+
f"Created relationship in cloud: {from_memory_id} "
|
|
567
|
+
f"-[{relationship_type.value}]-> {to_memory_id}"
|
|
568
|
+
)
|
|
569
|
+
return relationship_id
|
|
570
|
+
|
|
571
|
+
async def get_related_memories(
|
|
572
|
+
self,
|
|
573
|
+
memory_id: str,
|
|
574
|
+
relationship_types: Optional[list[RelationshipType]] = None,
|
|
575
|
+
max_depth: int = 1
|
|
576
|
+
) -> list[tuple[Memory, Relationship]]:
|
|
577
|
+
"""
|
|
578
|
+
Get memories related to a specific memory.
|
|
579
|
+
|
|
580
|
+
Args:
|
|
581
|
+
memory_id: ID of the memory
|
|
582
|
+
relationship_types: Filter by relationship types
|
|
583
|
+
max_depth: Maximum traversal depth
|
|
584
|
+
|
|
585
|
+
Returns:
|
|
586
|
+
List of (Memory, Relationship) tuples, empty if memory not found
|
|
587
|
+
"""
|
|
588
|
+
params = {"max_depth": max_depth}
|
|
589
|
+
|
|
590
|
+
if relationship_types:
|
|
591
|
+
params["relationship_types"] = ",".join(rt.value for rt in relationship_types)
|
|
592
|
+
|
|
593
|
+
try:
|
|
594
|
+
result = await self._request(
|
|
595
|
+
"GET",
|
|
596
|
+
f"/memories/{memory_id}/related",
|
|
597
|
+
params=params
|
|
598
|
+
)
|
|
599
|
+
except MemoryNotFoundError:
|
|
600
|
+
# Memory doesn't exist, return empty list
|
|
601
|
+
return []
|
|
602
|
+
|
|
603
|
+
if not result:
|
|
604
|
+
return []
|
|
605
|
+
|
|
606
|
+
related = []
|
|
607
|
+
for item in result.get("related_memories", []):
|
|
608
|
+
memory = self._api_response_to_memory(item.get("memory", item))
|
|
609
|
+
|
|
610
|
+
rel_data = item.get("relationship", {})
|
|
611
|
+
try:
|
|
612
|
+
rel_type = RelationshipType(rel_data.get("type", "RELATED_TO"))
|
|
613
|
+
except ValueError:
|
|
614
|
+
rel_type = RelationshipType.RELATED_TO
|
|
615
|
+
|
|
616
|
+
relationship = Relationship(
|
|
617
|
+
from_memory_id=memory_id,
|
|
618
|
+
to_memory_id=memory.id,
|
|
619
|
+
type=rel_type,
|
|
620
|
+
properties=RelationshipProperties(
|
|
621
|
+
strength=rel_data.get("strength", 0.5),
|
|
622
|
+
confidence=rel_data.get("confidence", 0.8),
|
|
623
|
+
context=rel_data.get("context")
|
|
624
|
+
)
|
|
625
|
+
)
|
|
626
|
+
related.append((memory, relationship))
|
|
627
|
+
|
|
628
|
+
return related
|
|
629
|
+
|
|
630
|
+
# =========================================================================
|
|
631
|
+
# Search Operations
|
|
632
|
+
# =========================================================================
|
|
633
|
+
|
|
634
|
+
async def search_memories(self, search_query: SearchQuery) -> list[Memory]:
|
|
635
|
+
"""
|
|
636
|
+
Search for memories based on query parameters.
|
|
637
|
+
|
|
638
|
+
Args:
|
|
639
|
+
search_query: SearchQuery object with filter criteria
|
|
640
|
+
|
|
641
|
+
Returns:
|
|
642
|
+
List of matching Memory objects
|
|
643
|
+
"""
|
|
644
|
+
payload = {}
|
|
645
|
+
|
|
646
|
+
if search_query.query:
|
|
647
|
+
payload["query"] = search_query.query
|
|
648
|
+
|
|
649
|
+
if search_query.memory_types:
|
|
650
|
+
payload["memory_types"] = [mt.value for mt in search_query.memory_types]
|
|
651
|
+
|
|
652
|
+
if search_query.tags:
|
|
653
|
+
payload["tags"] = search_query.tags
|
|
654
|
+
|
|
655
|
+
if search_query.project_path:
|
|
656
|
+
payload["project_path"] = search_query.project_path
|
|
657
|
+
|
|
658
|
+
if search_query.min_importance is not None:
|
|
659
|
+
payload["min_importance"] = search_query.min_importance
|
|
660
|
+
|
|
661
|
+
if search_query.limit:
|
|
662
|
+
payload["limit"] = search_query.limit
|
|
663
|
+
|
|
664
|
+
if search_query.offset:
|
|
665
|
+
payload["offset"] = search_query.offset
|
|
666
|
+
|
|
667
|
+
result = await self._request("POST", "/memories/search", json=payload)
|
|
668
|
+
|
|
669
|
+
memories = []
|
|
670
|
+
for item in result.get("memories", result.get("results", [])):
|
|
671
|
+
memory = self._api_response_to_memory(item)
|
|
672
|
+
if memory:
|
|
673
|
+
memories.append(memory)
|
|
674
|
+
|
|
675
|
+
logger.info(f"Cloud search returned {len(memories)} memories")
|
|
676
|
+
return memories
|
|
677
|
+
|
|
678
|
+
async def recall_memories(
|
|
679
|
+
self,
|
|
680
|
+
query: str,
|
|
681
|
+
memory_types: Optional[list[MemoryType]] = None,
|
|
682
|
+
project_path: Optional[str] = None,
|
|
683
|
+
limit: int = 20
|
|
684
|
+
) -> list[Memory]:
|
|
685
|
+
"""
|
|
686
|
+
Recall memories using natural language query (fuzzy search).
|
|
687
|
+
|
|
688
|
+
Args:
|
|
689
|
+
query: Natural language query
|
|
690
|
+
memory_types: Optional filter by memory types
|
|
691
|
+
project_path: Optional filter by project
|
|
692
|
+
limit: Maximum results
|
|
693
|
+
|
|
694
|
+
Returns:
|
|
695
|
+
List of relevant Memory objects
|
|
696
|
+
"""
|
|
697
|
+
payload = {
|
|
698
|
+
"query": query,
|
|
699
|
+
"limit": limit
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
if memory_types:
|
|
703
|
+
payload["memory_types"] = [mt.value for mt in memory_types]
|
|
704
|
+
|
|
705
|
+
if project_path:
|
|
706
|
+
payload["project_path"] = project_path
|
|
707
|
+
|
|
708
|
+
result = await self._request("POST", "/memories/recall", json=payload)
|
|
709
|
+
|
|
710
|
+
memories = []
|
|
711
|
+
for item in result.get("memories", result.get("results", [])):
|
|
712
|
+
memory = self._api_response_to_memory(item)
|
|
713
|
+
if memory:
|
|
714
|
+
memories.append(memory)
|
|
715
|
+
|
|
716
|
+
return memories
|
|
717
|
+
|
|
718
|
+
async def get_recent_activity(
|
|
719
|
+
self,
|
|
720
|
+
days: int = 7,
|
|
721
|
+
project: Optional[str] = None
|
|
722
|
+
) -> dict[str, Any]:
|
|
723
|
+
"""
|
|
724
|
+
Get recent memory activity summary.
|
|
725
|
+
|
|
726
|
+
Args:
|
|
727
|
+
days: Number of days to look back
|
|
728
|
+
project: Optional project filter
|
|
729
|
+
|
|
730
|
+
Returns:
|
|
731
|
+
Activity summary dictionary
|
|
732
|
+
"""
|
|
733
|
+
params = {"days": days}
|
|
734
|
+
if project:
|
|
735
|
+
params["project"] = project
|
|
736
|
+
|
|
737
|
+
result = await self._request("GET", "/memories/recent", params=params)
|
|
738
|
+
return result or {}
|
|
739
|
+
|
|
740
|
+
async def get_statistics(self) -> dict[str, Any]:
|
|
741
|
+
"""
|
|
742
|
+
Get graph statistics.
|
|
743
|
+
|
|
744
|
+
Returns:
|
|
745
|
+
Statistics dictionary
|
|
746
|
+
"""
|
|
747
|
+
result = await self._request("GET", "/graphs/statistics")
|
|
748
|
+
return result or {}
|
|
749
|
+
|
|
750
|
+
# =========================================================================
|
|
751
|
+
# Helper Methods
|
|
752
|
+
# =========================================================================
|
|
753
|
+
|
|
754
|
+
def _memory_to_api_payload(self, memory: Memory) -> dict[str, Any]:
|
|
755
|
+
"""Convert Memory object to API payload."""
|
|
756
|
+
payload = {
|
|
757
|
+
"type": memory.type.value,
|
|
758
|
+
"title": memory.title,
|
|
759
|
+
"content": memory.content,
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
if memory.id:
|
|
763
|
+
payload["id"] = memory.id
|
|
764
|
+
|
|
765
|
+
if memory.summary:
|
|
766
|
+
payload["summary"] = memory.summary
|
|
767
|
+
|
|
768
|
+
if memory.tags:
|
|
769
|
+
payload["tags"] = memory.tags
|
|
770
|
+
|
|
771
|
+
if memory.importance is not None:
|
|
772
|
+
payload["importance"] = memory.importance
|
|
773
|
+
|
|
774
|
+
if memory.confidence is not None:
|
|
775
|
+
payload["confidence"] = memory.confidence
|
|
776
|
+
|
|
777
|
+
if memory.context:
|
|
778
|
+
context_dict = {}
|
|
779
|
+
if memory.context.project_path:
|
|
780
|
+
context_dict["project_path"] = memory.context.project_path
|
|
781
|
+
if memory.context.files_involved:
|
|
782
|
+
context_dict["files_involved"] = memory.context.files_involved
|
|
783
|
+
if memory.context.languages:
|
|
784
|
+
context_dict["languages"] = memory.context.languages
|
|
785
|
+
if memory.context.frameworks:
|
|
786
|
+
context_dict["frameworks"] = memory.context.frameworks
|
|
787
|
+
if memory.context.technologies:
|
|
788
|
+
context_dict["technologies"] = memory.context.technologies
|
|
789
|
+
if memory.context.git_commit:
|
|
790
|
+
context_dict["git_commit"] = memory.context.git_commit
|
|
791
|
+
if memory.context.git_branch:
|
|
792
|
+
context_dict["git_branch"] = memory.context.git_branch
|
|
793
|
+
if memory.context.working_directory:
|
|
794
|
+
context_dict["working_directory"] = memory.context.working_directory
|
|
795
|
+
if memory.context.additional_metadata:
|
|
796
|
+
context_dict["additional_metadata"] = memory.context.additional_metadata
|
|
797
|
+
|
|
798
|
+
if context_dict:
|
|
799
|
+
payload["context"] = context_dict
|
|
800
|
+
|
|
801
|
+
return payload
|
|
802
|
+
|
|
803
|
+
def _api_response_to_memory(self, data: dict[str, Any]) -> Optional[Memory]:
|
|
804
|
+
"""Convert API response to Memory object."""
|
|
805
|
+
try:
|
|
806
|
+
# Parse memory type
|
|
807
|
+
type_str = data.get("type", "general")
|
|
808
|
+
try:
|
|
809
|
+
memory_type = MemoryType(type_str)
|
|
810
|
+
except ValueError:
|
|
811
|
+
memory_type = MemoryType.GENERAL
|
|
812
|
+
|
|
813
|
+
# Parse timestamps
|
|
814
|
+
created_at = data.get("created_at")
|
|
815
|
+
if isinstance(created_at, str):
|
|
816
|
+
created_at = datetime.fromisoformat(created_at.replace("Z", "+00:00"))
|
|
817
|
+
elif created_at is None:
|
|
818
|
+
created_at = datetime.now(timezone.utc)
|
|
819
|
+
|
|
820
|
+
updated_at = data.get("updated_at")
|
|
821
|
+
if isinstance(updated_at, str):
|
|
822
|
+
updated_at = datetime.fromisoformat(updated_at.replace("Z", "+00:00"))
|
|
823
|
+
elif updated_at is None:
|
|
824
|
+
updated_at = created_at
|
|
825
|
+
|
|
826
|
+
# Parse context
|
|
827
|
+
context = None
|
|
828
|
+
context_data = data.get("context")
|
|
829
|
+
if context_data and isinstance(context_data, dict):
|
|
830
|
+
context = MemoryContext(
|
|
831
|
+
project_path=context_data.get("project_path"),
|
|
832
|
+
files_involved=context_data.get("files_involved", []),
|
|
833
|
+
languages=context_data.get("languages", []),
|
|
834
|
+
frameworks=context_data.get("frameworks", []),
|
|
835
|
+
technologies=context_data.get("technologies", []),
|
|
836
|
+
git_commit=context_data.get("git_commit"),
|
|
837
|
+
git_branch=context_data.get("git_branch"),
|
|
838
|
+
working_directory=context_data.get("working_directory"),
|
|
839
|
+
additional_metadata=context_data.get("additional_metadata", {})
|
|
840
|
+
)
|
|
841
|
+
|
|
842
|
+
return Memory(
|
|
843
|
+
id=data.get("id") or data.get("memory_id"),
|
|
844
|
+
type=memory_type,
|
|
845
|
+
title=data.get("title", ""),
|
|
846
|
+
content=data.get("content", ""),
|
|
847
|
+
summary=data.get("summary"),
|
|
848
|
+
tags=data.get("tags", []),
|
|
849
|
+
importance=data.get("importance", 0.5),
|
|
850
|
+
confidence=data.get("confidence", 0.8),
|
|
851
|
+
created_at=created_at,
|
|
852
|
+
updated_at=updated_at,
|
|
853
|
+
context=context
|
|
854
|
+
)
|
|
855
|
+
|
|
856
|
+
except Exception as e:
|
|
857
|
+
logger.error(f"Failed to parse memory from API response: {e}")
|
|
858
|
+
return None
|