memorygraphMCP 0.11.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memorygraph/__init__.py +50 -0
- memorygraph/__main__.py +12 -0
- memorygraph/advanced_tools.py +509 -0
- memorygraph/analytics/__init__.py +46 -0
- memorygraph/analytics/advanced_queries.py +727 -0
- memorygraph/backends/__init__.py +21 -0
- memorygraph/backends/base.py +179 -0
- memorygraph/backends/cloud.py +75 -0
- memorygraph/backends/cloud_backend.py +858 -0
- memorygraph/backends/factory.py +577 -0
- memorygraph/backends/falkordb_backend.py +749 -0
- memorygraph/backends/falkordblite_backend.py +746 -0
- memorygraph/backends/ladybugdb_backend.py +242 -0
- memorygraph/backends/memgraph_backend.py +327 -0
- memorygraph/backends/neo4j_backend.py +298 -0
- memorygraph/backends/sqlite_fallback.py +463 -0
- memorygraph/backends/turso.py +448 -0
- memorygraph/cli.py +743 -0
- memorygraph/cloud_database.py +297 -0
- memorygraph/config.py +295 -0
- memorygraph/database.py +933 -0
- memorygraph/graph_analytics.py +631 -0
- memorygraph/integration/__init__.py +69 -0
- memorygraph/integration/context_capture.py +426 -0
- memorygraph/integration/project_analysis.py +583 -0
- memorygraph/integration/workflow_tracking.py +492 -0
- memorygraph/intelligence/__init__.py +59 -0
- memorygraph/intelligence/context_retrieval.py +447 -0
- memorygraph/intelligence/entity_extraction.py +386 -0
- memorygraph/intelligence/pattern_recognition.py +420 -0
- memorygraph/intelligence/temporal.py +374 -0
- memorygraph/migration/__init__.py +27 -0
- memorygraph/migration/manager.py +579 -0
- memorygraph/migration/models.py +142 -0
- memorygraph/migration/scripts/__init__.py +17 -0
- memorygraph/migration/scripts/bitemporal_migration.py +595 -0
- memorygraph/migration/scripts/multitenancy_migration.py +452 -0
- memorygraph/migration_tools_module.py +146 -0
- memorygraph/models.py +684 -0
- memorygraph/proactive/__init__.py +46 -0
- memorygraph/proactive/outcome_learning.py +444 -0
- memorygraph/proactive/predictive.py +410 -0
- memorygraph/proactive/session_briefing.py +399 -0
- memorygraph/relationships.py +668 -0
- memorygraph/server.py +883 -0
- memorygraph/sqlite_database.py +1876 -0
- memorygraph/tools/__init__.py +59 -0
- memorygraph/tools/activity_tools.py +262 -0
- memorygraph/tools/memory_tools.py +315 -0
- memorygraph/tools/migration_tools.py +181 -0
- memorygraph/tools/relationship_tools.py +147 -0
- memorygraph/tools/search_tools.py +406 -0
- memorygraph/tools/temporal_tools.py +339 -0
- memorygraph/utils/__init__.py +10 -0
- memorygraph/utils/context_extractor.py +429 -0
- memorygraph/utils/error_handling.py +151 -0
- memorygraph/utils/export_import.py +425 -0
- memorygraph/utils/graph_algorithms.py +200 -0
- memorygraph/utils/pagination.py +149 -0
- memorygraph/utils/project_detection.py +133 -0
- memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
- memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
- memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
- memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
- memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,429 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Context extraction utilities for relationship contexts.
|
|
3
|
+
|
|
4
|
+
This module provides pattern-based extraction of structured information
|
|
5
|
+
from natural language relationship context fields.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import re
|
|
10
|
+
from typing import Any, Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_context_structure(text: Optional[str]) -> Dict[str, Any]:
|
|
14
|
+
"""
|
|
15
|
+
Auto-extract structure from free-text context.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
text: Natural language context string
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Dictionary with structure:
|
|
22
|
+
{
|
|
23
|
+
"text": str, # Original text (always preserved)
|
|
24
|
+
"scope": Optional[str], # partial|full|conditional|None
|
|
25
|
+
"components": List[str], # Mentioned components/modules
|
|
26
|
+
"conditions": List[str], # When/if/requires patterns
|
|
27
|
+
"evidence": List[str], # Verification/testing mentions
|
|
28
|
+
"temporal": Optional[str],# Version/date/time info
|
|
29
|
+
"exceptions": List[str] # Exclusions/limitations
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
Examples:
|
|
33
|
+
>>> extract_context_structure("partially implements auth module")
|
|
34
|
+
{
|
|
35
|
+
"text": "partially implements auth module",
|
|
36
|
+
"scope": "partial",
|
|
37
|
+
"components": ["auth module"],
|
|
38
|
+
"conditions": [],
|
|
39
|
+
"evidence": [],
|
|
40
|
+
"temporal": None,
|
|
41
|
+
"exceptions": []
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
>>> extract_context_structure("verified by integration tests")
|
|
45
|
+
{
|
|
46
|
+
"text": "verified by integration tests",
|
|
47
|
+
"scope": None,
|
|
48
|
+
"components": [],
|
|
49
|
+
"conditions": [],
|
|
50
|
+
"evidence": ["integration tests"],
|
|
51
|
+
"temporal": None,
|
|
52
|
+
"exceptions": []
|
|
53
|
+
}
|
|
54
|
+
"""
|
|
55
|
+
if text is None:
|
|
56
|
+
return {}
|
|
57
|
+
|
|
58
|
+
if not isinstance(text, str):
|
|
59
|
+
text = str(text)
|
|
60
|
+
|
|
61
|
+
# Always preserve original text
|
|
62
|
+
result = {
|
|
63
|
+
"text": text,
|
|
64
|
+
"scope": _extract_scope(text),
|
|
65
|
+
"components": _extract_components(text),
|
|
66
|
+
"conditions": _extract_conditions(text),
|
|
67
|
+
"evidence": _extract_evidence(text),
|
|
68
|
+
"temporal": _extract_temporal(text),
|
|
69
|
+
"exceptions": _extract_exceptions(text),
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return result
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def parse_context(context: Optional[str]) -> Dict[str, Any]:
|
|
76
|
+
"""
|
|
77
|
+
Parse context field - handles both JSON and free text.
|
|
78
|
+
|
|
79
|
+
This function provides backward compatibility by:
|
|
80
|
+
1. Trying to parse as JSON first (new structured format)
|
|
81
|
+
2. Falling back to pattern extraction for legacy free text
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
context: Context string (either JSON or free text)
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
Structured dictionary with extracted information
|
|
88
|
+
|
|
89
|
+
Examples:
|
|
90
|
+
>>> parse_context('{"text": "test", "scope": "partial"}')
|
|
91
|
+
{"text": "test", "scope": "partial"}
|
|
92
|
+
|
|
93
|
+
>>> parse_context("this is free text")
|
|
94
|
+
{"text": "this is free text", "scope": None, ...}
|
|
95
|
+
|
|
96
|
+
>>> parse_context(None)
|
|
97
|
+
{}
|
|
98
|
+
"""
|
|
99
|
+
if not context:
|
|
100
|
+
return {}
|
|
101
|
+
|
|
102
|
+
# Try parsing as JSON first (new format)
|
|
103
|
+
try:
|
|
104
|
+
return json.loads(context)
|
|
105
|
+
except (json.JSONDecodeError, TypeError):
|
|
106
|
+
# Legacy free-text format - extract structure
|
|
107
|
+
return extract_context_structure(context)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _extract_scope(text: str) -> Optional[str]:
|
|
111
|
+
"""
|
|
112
|
+
Extract scope information from text.
|
|
113
|
+
|
|
114
|
+
Patterns:
|
|
115
|
+
- partial: "partially", "limited", "incomplete"
|
|
116
|
+
- full: "fully", "complete", "completely", "entirely"
|
|
117
|
+
- conditional: "conditional", "only", "if", "when"
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
text: Context text to analyze
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
Scope type or None if not detected
|
|
124
|
+
"""
|
|
125
|
+
if not text:
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
text_lower = text.lower()
|
|
129
|
+
|
|
130
|
+
# Partial scope patterns
|
|
131
|
+
partial_patterns = [
|
|
132
|
+
r'\bpartial(ly)?\b',
|
|
133
|
+
r'\blimited\b',
|
|
134
|
+
r'\bincomplete\b',
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
for pattern in partial_patterns:
|
|
138
|
+
if re.search(pattern, text_lower):
|
|
139
|
+
return "partial"
|
|
140
|
+
|
|
141
|
+
# Full scope patterns
|
|
142
|
+
full_patterns = [
|
|
143
|
+
r'\bfull(y)?\b',
|
|
144
|
+
r'\bcomplete(ly)?\b',
|
|
145
|
+
r'\bentirely\b',
|
|
146
|
+
]
|
|
147
|
+
|
|
148
|
+
for pattern in full_patterns:
|
|
149
|
+
if re.search(pattern, text_lower):
|
|
150
|
+
return "full"
|
|
151
|
+
|
|
152
|
+
# Conditional scope patterns
|
|
153
|
+
conditional_patterns = [
|
|
154
|
+
r'\bconditional(ly)?\b',
|
|
155
|
+
r'\bonly\b',
|
|
156
|
+
]
|
|
157
|
+
|
|
158
|
+
for pattern in conditional_patterns:
|
|
159
|
+
if re.search(pattern, text_lower):
|
|
160
|
+
return "conditional"
|
|
161
|
+
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def _extract_conditions(text: str) -> List[str]:
|
|
166
|
+
"""
|
|
167
|
+
Extract conditional statements from text.
|
|
168
|
+
|
|
169
|
+
Patterns:
|
|
170
|
+
- "when X"
|
|
171
|
+
- "if X"
|
|
172
|
+
- "in X environment"
|
|
173
|
+
- "requires X"
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
text: Context text to analyze
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
List of extracted conditions
|
|
180
|
+
"""
|
|
181
|
+
if not text:
|
|
182
|
+
return []
|
|
183
|
+
|
|
184
|
+
conditions = []
|
|
185
|
+
|
|
186
|
+
# Pattern: "when X"
|
|
187
|
+
when_matches = re.finditer(r'\bwhen\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
188
|
+
for match in when_matches:
|
|
189
|
+
conditions.append(match.group(1).strip())
|
|
190
|
+
|
|
191
|
+
# Pattern: "if X"
|
|
192
|
+
if_matches = re.finditer(r'\bif\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
193
|
+
for match in if_matches:
|
|
194
|
+
conditions.append(match.group(1).strip())
|
|
195
|
+
|
|
196
|
+
# Pattern: "in X environment"
|
|
197
|
+
env_matches = re.finditer(
|
|
198
|
+
r'\bin\s+([\w\-]+)\s+environment', text, re.IGNORECASE
|
|
199
|
+
)
|
|
200
|
+
for match in env_matches:
|
|
201
|
+
conditions.append(match.group(1).strip())
|
|
202
|
+
|
|
203
|
+
# Pattern: "requires X"
|
|
204
|
+
requires_matches = re.finditer(r'\brequires\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
205
|
+
for match in requires_matches:
|
|
206
|
+
conditions.append(match.group(1).strip())
|
|
207
|
+
|
|
208
|
+
# Pattern: "only works in X" or "only in X"
|
|
209
|
+
only_in_matches = re.finditer(
|
|
210
|
+
r'\bonly\s+(?:works\s+)?in\s+([^,\.;]+)', text, re.IGNORECASE
|
|
211
|
+
)
|
|
212
|
+
for match in only_in_matches:
|
|
213
|
+
conditions.append(match.group(1).strip())
|
|
214
|
+
|
|
215
|
+
return conditions
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def _extract_evidence(text: str) -> List[str]:
|
|
219
|
+
"""
|
|
220
|
+
Extract evidence/verification mentions from text.
|
|
221
|
+
|
|
222
|
+
Patterns:
|
|
223
|
+
- "verified by X"
|
|
224
|
+
- "tested by X"
|
|
225
|
+
- "proven by X"
|
|
226
|
+
- "observed in X"
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
text: Context text to analyze
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
List of extracted evidence
|
|
233
|
+
"""
|
|
234
|
+
if not text:
|
|
235
|
+
return []
|
|
236
|
+
|
|
237
|
+
evidence = []
|
|
238
|
+
|
|
239
|
+
# Pattern: "verified by X"
|
|
240
|
+
verified_matches = re.finditer(
|
|
241
|
+
r'\bverified\s+by\s+([^,\.;]+)', text, re.IGNORECASE
|
|
242
|
+
)
|
|
243
|
+
for match in verified_matches:
|
|
244
|
+
evidence.append(match.group(1).strip())
|
|
245
|
+
|
|
246
|
+
# Pattern: "tested by X"
|
|
247
|
+
tested_matches = re.finditer(r'\btested\s+by\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
248
|
+
for match in tested_matches:
|
|
249
|
+
evidence.append(match.group(1).strip())
|
|
250
|
+
|
|
251
|
+
# Pattern: "proven by X"
|
|
252
|
+
proven_matches = re.finditer(r'\bproven\s+by\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
253
|
+
for match in proven_matches:
|
|
254
|
+
evidence.append(match.group(1).strip())
|
|
255
|
+
|
|
256
|
+
# Pattern: "observed in X"
|
|
257
|
+
observed_matches = re.finditer(
|
|
258
|
+
r'\bobserved\s+in\s+([^,\.;]+)', text, re.IGNORECASE
|
|
259
|
+
)
|
|
260
|
+
for match in observed_matches:
|
|
261
|
+
evidence.append(match.group(1).strip())
|
|
262
|
+
|
|
263
|
+
return evidence
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def _extract_temporal(text: str) -> Optional[str]:
|
|
267
|
+
"""
|
|
268
|
+
Extract temporal information from text.
|
|
269
|
+
|
|
270
|
+
Patterns:
|
|
271
|
+
- Temporal markers: "since X", "after X", "as of X" (checked first for context)
|
|
272
|
+
- Version numbers: "v2.1.0", "version 2.1.0"
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
text: Context text to analyze
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
Temporal information or None if not detected
|
|
279
|
+
"""
|
|
280
|
+
if not text:
|
|
281
|
+
return None
|
|
282
|
+
|
|
283
|
+
# Pattern: "since X" - use looser pattern that allows periods (for versions)
|
|
284
|
+
since_match = re.search(r'\bsince\s+([^,;]+?)(?:\s*,|\s*;|$)', text, re.IGNORECASE)
|
|
285
|
+
if since_match:
|
|
286
|
+
return since_match.group(1).strip()
|
|
287
|
+
|
|
288
|
+
# Pattern: "after X"
|
|
289
|
+
after_match = re.search(r'\bafter\s+([^,;]+?)(?:\s*,|\s*;|$)', text, re.IGNORECASE)
|
|
290
|
+
if after_match:
|
|
291
|
+
return after_match.group(1).strip()
|
|
292
|
+
|
|
293
|
+
# Pattern: "as of X"
|
|
294
|
+
as_of_match = re.search(r'\bas\s+of\s+([^,;]+?)(?:\s*,|\s*;|$)', text, re.IGNORECASE)
|
|
295
|
+
if as_of_match:
|
|
296
|
+
return as_of_match.group(1).strip()
|
|
297
|
+
|
|
298
|
+
# Pattern: Version numbers (v2.1.0 or 2.1.0)
|
|
299
|
+
version_match = re.search(r'\bv?\d+\.\d+(?:\.\d+)?', text, re.IGNORECASE)
|
|
300
|
+
if version_match:
|
|
301
|
+
return version_match.group(0)
|
|
302
|
+
|
|
303
|
+
return None
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def _extract_exceptions(text: str) -> List[str]:
|
|
307
|
+
"""
|
|
308
|
+
Extract exceptions/exclusions from text.
|
|
309
|
+
|
|
310
|
+
Patterns:
|
|
311
|
+
- "except X"
|
|
312
|
+
- "excluding X"
|
|
313
|
+
- "but not X"
|
|
314
|
+
- "without X"
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
text: Context text to analyze
|
|
318
|
+
|
|
319
|
+
Returns:
|
|
320
|
+
List of extracted exceptions
|
|
321
|
+
"""
|
|
322
|
+
if not text:
|
|
323
|
+
return []
|
|
324
|
+
|
|
325
|
+
exceptions = []
|
|
326
|
+
|
|
327
|
+
# Pattern: "except X"
|
|
328
|
+
except_matches = re.finditer(r'\bexcept\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
329
|
+
for match in except_matches:
|
|
330
|
+
exceptions.append(match.group(1).strip())
|
|
331
|
+
|
|
332
|
+
# Pattern: "excluding X"
|
|
333
|
+
excluding_matches = re.finditer(r'\bexcluding\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
334
|
+
for match in excluding_matches:
|
|
335
|
+
exceptions.append(match.group(1).strip())
|
|
336
|
+
|
|
337
|
+
# Pattern: "but not X"
|
|
338
|
+
but_not_matches = re.finditer(r'\bbut\s+not\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
339
|
+
for match in but_not_matches:
|
|
340
|
+
exceptions.append(match.group(1).strip())
|
|
341
|
+
|
|
342
|
+
# Pattern: "without X"
|
|
343
|
+
without_matches = re.finditer(r'\bwithout\s+([^,\.;]+)', text, re.IGNORECASE)
|
|
344
|
+
for match in without_matches:
|
|
345
|
+
exceptions.append(match.group(1).strip())
|
|
346
|
+
|
|
347
|
+
return exceptions
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def _extract_components(text: str) -> List[str]:
|
|
351
|
+
"""
|
|
352
|
+
Extract component/module names from text using simple heuristics.
|
|
353
|
+
|
|
354
|
+
This function uses basic noun phrase patterns to identify technical
|
|
355
|
+
components mentioned in the context.
|
|
356
|
+
|
|
357
|
+
Patterns:
|
|
358
|
+
- "X module"
|
|
359
|
+
- "X service"
|
|
360
|
+
- "X layer"
|
|
361
|
+
- "X system"
|
|
362
|
+
- "X threads/process/flow"
|
|
363
|
+
- Technical terms (capitalized words, hyphenated terms)
|
|
364
|
+
|
|
365
|
+
Args:
|
|
366
|
+
text: Context text to analyze
|
|
367
|
+
|
|
368
|
+
Returns:
|
|
369
|
+
List of extracted component names
|
|
370
|
+
"""
|
|
371
|
+
if not text:
|
|
372
|
+
return []
|
|
373
|
+
|
|
374
|
+
components = []
|
|
375
|
+
|
|
376
|
+
# Pattern: "X module/service/layer/system/component"
|
|
377
|
+
component_patterns = [
|
|
378
|
+
r'([\w\-]+)\s+module',
|
|
379
|
+
r'([\w\-]+)\s+service',
|
|
380
|
+
r'([\w\-]+)\s+layer',
|
|
381
|
+
r'([\w\-]+)\s+system',
|
|
382
|
+
r'([\w\-]+)\s+component',
|
|
383
|
+
r'([\w\-]+)\s+database',
|
|
384
|
+
r'([\w\-]+)\s+API',
|
|
385
|
+
r'([\w\-]+)\s+threads?',
|
|
386
|
+
r'([\w\-]+)\s+process(?:es)?',
|
|
387
|
+
r'([\w\-]+)\s+flow',
|
|
388
|
+
r'([\w\-]+)\s+leak',
|
|
389
|
+
]
|
|
390
|
+
|
|
391
|
+
for pattern in component_patterns:
|
|
392
|
+
matches = re.finditer(pattern, text, re.IGNORECASE)
|
|
393
|
+
for match in matches:
|
|
394
|
+
component = f"{match.group(1)} {match.group(0).split()[-1]}"
|
|
395
|
+
if component not in components:
|
|
396
|
+
components.append(component)
|
|
397
|
+
|
|
398
|
+
# Pattern: "implements/fixes X" where X is a technical noun phrase
|
|
399
|
+
action_patterns = [
|
|
400
|
+
r'\b(?:implements?|fixes?|supports?|handles?)\s+([\w\-]+(?:\s+[\w\-]+)?)',
|
|
401
|
+
]
|
|
402
|
+
|
|
403
|
+
for pattern in action_patterns:
|
|
404
|
+
matches = re.finditer(pattern, text, re.IGNORECASE)
|
|
405
|
+
for match in matches:
|
|
406
|
+
component = match.group(1).strip()
|
|
407
|
+
# Skip if it's just a scope word
|
|
408
|
+
if component.lower() not in ['partially', 'fully', 'feature', 'all']:
|
|
409
|
+
if component not in components:
|
|
410
|
+
components.append(component)
|
|
411
|
+
|
|
412
|
+
# Pattern: Capitalized technical terms (e.g., PostgreSQL, Redis, OAuth)
|
|
413
|
+
# Match words that start with capital letter and are at least 3 chars
|
|
414
|
+
cap_matches = re.finditer(r'\b([A-Z][A-Za-z0-9]{2,})\b', text)
|
|
415
|
+
for match in cap_matches:
|
|
416
|
+
term = match.group(1)
|
|
417
|
+
# Filter out common words that aren't technical terms
|
|
418
|
+
if term not in ['The', 'This', 'That', 'It', 'Testing']:
|
|
419
|
+
if term not in components:
|
|
420
|
+
components.append(term)
|
|
421
|
+
|
|
422
|
+
# Pattern: Hyphenated technical terms (e.g., two-factor, JWT-based)
|
|
423
|
+
hyphen_matches = re.finditer(r'\b([\w]+-[\w]+)\b', text)
|
|
424
|
+
for match in hyphen_matches:
|
|
425
|
+
term = match.group(1)
|
|
426
|
+
if term not in components:
|
|
427
|
+
components.append(term)
|
|
428
|
+
|
|
429
|
+
return components
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Error handling utilities for MemoryGraph.
|
|
3
|
+
|
|
4
|
+
This module provides a decorator for standardized error handling across the codebase,
|
|
5
|
+
ensuring consistent error messages, proper logging, and error type conversion.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import functools
|
|
9
|
+
import logging
|
|
10
|
+
from typing import Any, Callable, TypeVar, cast
|
|
11
|
+
|
|
12
|
+
from ..models import (
|
|
13
|
+
MemoryError,
|
|
14
|
+
ValidationError,
|
|
15
|
+
NotFoundError,
|
|
16
|
+
BackendError,
|
|
17
|
+
ConfigurationError,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
F = TypeVar('F', bound=Callable[..., Any])
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def handle_errors(
|
|
26
|
+
operation_name: str | None = None,
|
|
27
|
+
reraise: bool = True,
|
|
28
|
+
log_level: int = logging.ERROR
|
|
29
|
+
) -> Callable[[F], F]:
|
|
30
|
+
"""
|
|
31
|
+
Decorator for standardized error handling across backend operations.
|
|
32
|
+
|
|
33
|
+
This decorator wraps common exceptions into MemoryGraph-specific exception types,
|
|
34
|
+
adds context to error messages, preserves stack traces, and logs errors appropriately.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
operation_name: Human-readable name of the operation (e.g., "store memory").
|
|
38
|
+
If not provided, uses the function name.
|
|
39
|
+
reraise: Whether to re-raise the exception after logging. Default is True.
|
|
40
|
+
log_level: Logging level for errors. Default is logging.ERROR.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Decorator function that wraps the target function with error handling.
|
|
44
|
+
|
|
45
|
+
Example:
|
|
46
|
+
>>> @handle_errors(operation_name="store memory")
|
|
47
|
+
... async def store_memory(self, memory: Memory) -> str:
|
|
48
|
+
... # Implementation
|
|
49
|
+
... pass
|
|
50
|
+
"""
|
|
51
|
+
def decorator(func: F) -> F:
|
|
52
|
+
@functools.wraps(func)
|
|
53
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
54
|
+
op_name = operation_name or func.__name__.replace('_', ' ')
|
|
55
|
+
try:
|
|
56
|
+
return await func(*args, **kwargs)
|
|
57
|
+
except (ValidationError, NotFoundError, BackendError, ConfigurationError):
|
|
58
|
+
# Re-raise MemoryGraph exceptions as-is
|
|
59
|
+
raise
|
|
60
|
+
except KeyError as e:
|
|
61
|
+
msg = f"Failed to {op_name}: Missing required key {e}"
|
|
62
|
+
logger.log(log_level, msg, exc_info=True)
|
|
63
|
+
if reraise:
|
|
64
|
+
raise ValidationError(msg) from e
|
|
65
|
+
return None
|
|
66
|
+
except ValueError as e:
|
|
67
|
+
msg = f"Failed to {op_name}: Invalid value - {e}"
|
|
68
|
+
logger.log(log_level, msg, exc_info=True)
|
|
69
|
+
if reraise:
|
|
70
|
+
raise ValidationError(msg) from e
|
|
71
|
+
return None
|
|
72
|
+
except TypeError as e:
|
|
73
|
+
msg = f"Failed to {op_name}: Type error - {e}"
|
|
74
|
+
logger.log(log_level, msg, exc_info=True)
|
|
75
|
+
if reraise:
|
|
76
|
+
raise ValidationError(msg) from e
|
|
77
|
+
return None
|
|
78
|
+
except ConnectionError as e:
|
|
79
|
+
msg = f"Failed to {op_name}: Connection error - {e}"
|
|
80
|
+
logger.log(log_level, msg, exc_info=True)
|
|
81
|
+
if reraise:
|
|
82
|
+
raise BackendError(msg) from e
|
|
83
|
+
return None
|
|
84
|
+
except TimeoutError as e:
|
|
85
|
+
msg = f"Failed to {op_name}: Operation timed out - {e}"
|
|
86
|
+
logger.log(log_level, msg, exc_info=True)
|
|
87
|
+
if reraise:
|
|
88
|
+
raise BackendError(msg) from e
|
|
89
|
+
return None
|
|
90
|
+
except Exception as e:
|
|
91
|
+
msg = f"Failed to {op_name}: Unexpected error - {e}"
|
|
92
|
+
logger.log(log_level, msg, exc_info=True)
|
|
93
|
+
if reraise:
|
|
94
|
+
raise MemoryError(msg) from e
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
@functools.wraps(func)
|
|
98
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
99
|
+
op_name = operation_name or func.__name__.replace('_', ' ')
|
|
100
|
+
try:
|
|
101
|
+
return func(*args, **kwargs)
|
|
102
|
+
except (ValidationError, NotFoundError, BackendError, ConfigurationError):
|
|
103
|
+
# Re-raise MemoryGraph exceptions as-is
|
|
104
|
+
raise
|
|
105
|
+
except KeyError as e:
|
|
106
|
+
msg = f"Failed to {op_name}: Missing required key {e}"
|
|
107
|
+
logger.log(log_level, msg, exc_info=True)
|
|
108
|
+
if reraise:
|
|
109
|
+
raise ValidationError(msg) from e
|
|
110
|
+
return None
|
|
111
|
+
except ValueError as e:
|
|
112
|
+
msg = f"Failed to {op_name}: Invalid value - {e}"
|
|
113
|
+
logger.log(log_level, msg, exc_info=True)
|
|
114
|
+
if reraise:
|
|
115
|
+
raise ValidationError(msg) from e
|
|
116
|
+
return None
|
|
117
|
+
except TypeError as e:
|
|
118
|
+
msg = f"Failed to {op_name}: Type error - {e}"
|
|
119
|
+
logger.log(log_level, msg, exc_info=True)
|
|
120
|
+
if reraise:
|
|
121
|
+
raise ValidationError(msg) from e
|
|
122
|
+
return None
|
|
123
|
+
except ConnectionError as e:
|
|
124
|
+
msg = f"Failed to {op_name}: Connection error - {e}"
|
|
125
|
+
logger.log(log_level, msg, exc_info=True)
|
|
126
|
+
if reraise:
|
|
127
|
+
raise BackendError(msg) from e
|
|
128
|
+
return None
|
|
129
|
+
except TimeoutError as e:
|
|
130
|
+
msg = f"Failed to {op_name}: Operation timed out - {e}"
|
|
131
|
+
logger.log(log_level, msg, exc_info=True)
|
|
132
|
+
if reraise:
|
|
133
|
+
raise BackendError(msg) from e
|
|
134
|
+
return None
|
|
135
|
+
except Exception as e:
|
|
136
|
+
msg = f"Failed to {op_name}: Unexpected error - {e}"
|
|
137
|
+
logger.log(log_level, msg, exc_info=True)
|
|
138
|
+
if reraise:
|
|
139
|
+
raise MemoryError(msg) from e
|
|
140
|
+
return None
|
|
141
|
+
|
|
142
|
+
# Return appropriate wrapper based on whether function is async
|
|
143
|
+
if asyncio.iscoroutinefunction(func):
|
|
144
|
+
return cast(F, async_wrapper)
|
|
145
|
+
return cast(F, sync_wrapper)
|
|
146
|
+
|
|
147
|
+
return decorator
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
# Import asyncio after type checking to avoid circular imports
|
|
151
|
+
import asyncio
|