memorygraphMCP 0.11.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. memorygraph/__init__.py +50 -0
  2. memorygraph/__main__.py +12 -0
  3. memorygraph/advanced_tools.py +509 -0
  4. memorygraph/analytics/__init__.py +46 -0
  5. memorygraph/analytics/advanced_queries.py +727 -0
  6. memorygraph/backends/__init__.py +21 -0
  7. memorygraph/backends/base.py +179 -0
  8. memorygraph/backends/cloud.py +75 -0
  9. memorygraph/backends/cloud_backend.py +858 -0
  10. memorygraph/backends/factory.py +577 -0
  11. memorygraph/backends/falkordb_backend.py +749 -0
  12. memorygraph/backends/falkordblite_backend.py +746 -0
  13. memorygraph/backends/ladybugdb_backend.py +242 -0
  14. memorygraph/backends/memgraph_backend.py +327 -0
  15. memorygraph/backends/neo4j_backend.py +298 -0
  16. memorygraph/backends/sqlite_fallback.py +463 -0
  17. memorygraph/backends/turso.py +448 -0
  18. memorygraph/cli.py +743 -0
  19. memorygraph/cloud_database.py +297 -0
  20. memorygraph/config.py +295 -0
  21. memorygraph/database.py +933 -0
  22. memorygraph/graph_analytics.py +631 -0
  23. memorygraph/integration/__init__.py +69 -0
  24. memorygraph/integration/context_capture.py +426 -0
  25. memorygraph/integration/project_analysis.py +583 -0
  26. memorygraph/integration/workflow_tracking.py +492 -0
  27. memorygraph/intelligence/__init__.py +59 -0
  28. memorygraph/intelligence/context_retrieval.py +447 -0
  29. memorygraph/intelligence/entity_extraction.py +386 -0
  30. memorygraph/intelligence/pattern_recognition.py +420 -0
  31. memorygraph/intelligence/temporal.py +374 -0
  32. memorygraph/migration/__init__.py +27 -0
  33. memorygraph/migration/manager.py +579 -0
  34. memorygraph/migration/models.py +142 -0
  35. memorygraph/migration/scripts/__init__.py +17 -0
  36. memorygraph/migration/scripts/bitemporal_migration.py +595 -0
  37. memorygraph/migration/scripts/multitenancy_migration.py +452 -0
  38. memorygraph/migration_tools_module.py +146 -0
  39. memorygraph/models.py +684 -0
  40. memorygraph/proactive/__init__.py +46 -0
  41. memorygraph/proactive/outcome_learning.py +444 -0
  42. memorygraph/proactive/predictive.py +410 -0
  43. memorygraph/proactive/session_briefing.py +399 -0
  44. memorygraph/relationships.py +668 -0
  45. memorygraph/server.py +883 -0
  46. memorygraph/sqlite_database.py +1876 -0
  47. memorygraph/tools/__init__.py +59 -0
  48. memorygraph/tools/activity_tools.py +262 -0
  49. memorygraph/tools/memory_tools.py +315 -0
  50. memorygraph/tools/migration_tools.py +181 -0
  51. memorygraph/tools/relationship_tools.py +147 -0
  52. memorygraph/tools/search_tools.py +406 -0
  53. memorygraph/tools/temporal_tools.py +339 -0
  54. memorygraph/utils/__init__.py +10 -0
  55. memorygraph/utils/context_extractor.py +429 -0
  56. memorygraph/utils/error_handling.py +151 -0
  57. memorygraph/utils/export_import.py +425 -0
  58. memorygraph/utils/graph_algorithms.py +200 -0
  59. memorygraph/utils/pagination.py +149 -0
  60. memorygraph/utils/project_detection.py +133 -0
  61. memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
  62. memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
  63. memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
  64. memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
  65. memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,420 @@
1
+ """
2
+ Pattern Recognition - Identify reusable patterns from accumulated memories.
3
+
4
+ This module provides pattern matching, similar problem identification,
5
+ and pattern suggestion capabilities.
6
+ """
7
+
8
+ import re
9
+ import logging
10
+ from typing import Optional
11
+ from datetime import datetime, timezone
12
+ from collections import Counter
13
+ from pydantic import BaseModel, Field
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class Pattern(BaseModel):
19
+ """Represents a recognized pattern."""
20
+
21
+ id: str
22
+ name: str
23
+ description: str
24
+ pattern_type: str
25
+ confidence: float = Field(ge=0.0, le=1.0)
26
+ occurrences: int = 0
27
+ source_memory_ids: list[str] = Field(default_factory=list)
28
+ entities: list[str] = Field(default_factory=list)
29
+ created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
30
+ effectiveness: Optional[float] = Field(None, ge=0.0, le=1.0)
31
+ context: Optional[dict] = None
32
+
33
+
34
+ class PatternRecognizer:
35
+ """Recognizes patterns in memories using keyword and entity matching."""
36
+
37
+ def __init__(self, backend):
38
+ """
39
+ Initialize pattern recognizer with database backend.
40
+
41
+ Args:
42
+ backend: Database backend instance
43
+ """
44
+ self.backend = backend
45
+
46
+ async def find_similar_problems(
47
+ self, problem: str, threshold: float = 0.7, limit: int = 10
48
+ ) -> list[dict]:
49
+ """
50
+ Find similar problems and their solutions using keyword matching.
51
+
52
+ Args:
53
+ problem: Problem description to match against
54
+ threshold: Similarity threshold (0.0-1.0)
55
+ limit: Maximum number of results
56
+
57
+ Returns:
58
+ List of similar problems with their solutions
59
+ """
60
+ # Extract keywords from problem
61
+ keywords = self._extract_keywords(problem)
62
+
63
+ if not keywords:
64
+ return []
65
+
66
+ # Search for problem-type memories with matching keywords
67
+ query = """
68
+ MATCH (m:Memory {type: 'problem'})
69
+ WHERE any(keyword IN $keywords WHERE m.content CONTAINS keyword)
70
+ OPTIONAL MATCH (m)-[r:SOLVES|SOLVED_BY]-(solution:Memory)
71
+ WITH m, solution, r,
72
+ size([keyword IN $keywords WHERE m.content CONTAINS keyword]) as match_count
73
+ WITH m, solution, r,
74
+ toFloat(match_count) / toFloat(size($keywords)) as similarity
75
+ WHERE similarity >= $threshold
76
+ ORDER BY similarity DESC, m.created_at DESC
77
+ LIMIT $limit
78
+ RETURN m.id as problem_id,
79
+ m.title as problem_title,
80
+ m.content as problem_content,
81
+ m.created_at as created_at,
82
+ similarity,
83
+ collect({
84
+ id: solution.id,
85
+ title: solution.title,
86
+ content: solution.content,
87
+ effectiveness: r.effectiveness
88
+ }) as solutions
89
+ """
90
+
91
+ params = {
92
+ "keywords": keywords,
93
+ "threshold": threshold,
94
+ "limit": limit,
95
+ }
96
+
97
+ try:
98
+ results = await self.backend.execute_query(query, params)
99
+ return [dict(r) for r in results]
100
+ except Exception as e:
101
+ logger.error(f"Error finding similar problems: {e}")
102
+ return []
103
+
104
+ async def extract_patterns(
105
+ self, memory_type: str = "solution", min_occurrences: int = 3
106
+ ) -> list[Pattern]:
107
+ """
108
+ Extract common patterns from memories of a given type.
109
+
110
+ Args:
111
+ memory_type: Type of memories to analyze
112
+ min_occurrences: Minimum number of occurrences to be considered a pattern
113
+
114
+ Returns:
115
+ List of identified patterns
116
+ """
117
+ # Find frequently co-occurring entities
118
+ query = """
119
+ MATCH (m:Memory {type: $memory_type})-[:MENTIONS]->(e:Entity)
120
+ WITH e.text as entity, e.type as entity_type,
121
+ collect(m.id) as memory_ids,
122
+ count(m) as occurrence_count
123
+ WHERE occurrence_count >= $min_occurrences
124
+ RETURN entity, entity_type, memory_ids, occurrence_count
125
+ ORDER BY occurrence_count DESC
126
+ LIMIT 50
127
+ """
128
+
129
+ params = {
130
+ "memory_type": memory_type,
131
+ "min_occurrences": min_occurrences,
132
+ }
133
+
134
+ try:
135
+ entity_results = await self.backend.execute_query(query, params)
136
+
137
+ # Build patterns from frequent entities
138
+ patterns: list[Pattern] = []
139
+
140
+ for result in entity_results:
141
+ pattern = Pattern(
142
+ id=f"pattern-{result['entity']}-{datetime.now(timezone.utc).timestamp()}",
143
+ name=f"{result['entity_type']} Pattern: {result['entity']}",
144
+ description=f"Common {memory_type} pattern involving {result['entity']}",
145
+ pattern_type=memory_type,
146
+ confidence=min(result["occurrence_count"] / 10.0, 1.0),
147
+ occurrences=result["occurrence_count"],
148
+ source_memory_ids=result["memory_ids"],
149
+ entities=[result["entity"]],
150
+ )
151
+ patterns.append(pattern)
152
+
153
+ # Find co-occurring entity pairs
154
+ if len(entity_results) > 1:
155
+ co_occurrence_patterns = await self._find_entity_co_occurrences(
156
+ memory_type, min_occurrences
157
+ )
158
+ patterns.extend(co_occurrence_patterns)
159
+
160
+ return patterns
161
+
162
+ except Exception as e:
163
+ logger.error(f"Error extracting patterns: {e}")
164
+ return []
165
+
166
+ async def _find_entity_co_occurrences(
167
+ self, memory_type: str, min_occurrences: int
168
+ ) -> list[Pattern]:
169
+ """Find patterns from entities that frequently appear together."""
170
+ query = """
171
+ MATCH (m:Memory {type: $memory_type})-[:MENTIONS]->(e1:Entity)
172
+ MATCH (m)-[:MENTIONS]->(e2:Entity)
173
+ WHERE id(e1) < id(e2)
174
+ WITH e1.text as entity1, e2.text as entity2,
175
+ collect(m.id) as memory_ids,
176
+ count(m) as occurrence_count
177
+ WHERE occurrence_count >= $min_occurrences
178
+ RETURN entity1, entity2, memory_ids, occurrence_count
179
+ ORDER BY occurrence_count DESC
180
+ LIMIT 20
181
+ """
182
+
183
+ params = {
184
+ "memory_type": memory_type,
185
+ "min_occurrences": min_occurrences,
186
+ }
187
+
188
+ try:
189
+ results = await self.backend.execute_query(query, params)
190
+ patterns: list[Pattern] = []
191
+
192
+ for result in results:
193
+ pattern = Pattern(
194
+ id=f"pattern-pair-{result['entity1']}-{result['entity2']}-{datetime.now(timezone.utc).timestamp()}",
195
+ name=f"Co-occurrence: {result['entity1']} + {result['entity2']}",
196
+ description=f"Frequent {memory_type} pattern combining {result['entity1']} and {result['entity2']}",
197
+ pattern_type=f"{memory_type}_combination",
198
+ confidence=min(result["occurrence_count"] / 5.0, 1.0),
199
+ occurrences=result["occurrence_count"],
200
+ source_memory_ids=result["memory_ids"],
201
+ entities=[result["entity1"], result["entity2"]],
202
+ )
203
+ patterns.append(pattern)
204
+
205
+ return patterns
206
+
207
+ except Exception as e:
208
+ logger.error(f"Error finding co-occurrences: {e}")
209
+ return []
210
+
211
+ async def suggest_patterns(self, context: str, limit: int = 5) -> list[Pattern]:
212
+ """
213
+ Suggest relevant patterns for given context.
214
+
215
+ Args:
216
+ context: Current context to match patterns against
217
+ limit: Maximum number of suggestions
218
+
219
+ Returns:
220
+ List of relevant patterns
221
+ """
222
+ from memorygraph.intelligence.entity_extraction import extract_entities
223
+
224
+ # Extract entities from context
225
+ entities = extract_entities(context)
226
+
227
+ if not entities:
228
+ return []
229
+
230
+ entity_texts = [e.text for e in entities]
231
+
232
+ # Find patterns that match the context entities
233
+ query = """
234
+ UNWIND $entities as entity_text
235
+ MATCH (m:Memory)-[:MENTIONS]->(e:Entity {text: entity_text})
236
+ WITH m, collect(DISTINCT e.text) as matched_entities
237
+ WHERE size(matched_entities) >= 1
238
+ OPTIONAL MATCH (m)-[:MENTIONS]->(all_entities:Entity)
239
+ WITH m, matched_entities,
240
+ collect(DISTINCT all_entities.text) as all_entity_texts,
241
+ size(matched_entities) as match_count
242
+ RETURN m.id as memory_id,
243
+ m.type as memory_type,
244
+ m.title as title,
245
+ m.content as content,
246
+ matched_entities,
247
+ all_entity_texts,
248
+ match_count
249
+ ORDER BY match_count DESC, m.created_at DESC
250
+ LIMIT $limit
251
+ """
252
+
253
+ params = {
254
+ "entities": entity_texts,
255
+ "limit": limit * 2, # Get more to filter
256
+ }
257
+
258
+ try:
259
+ results = await self.backend.execute_query(query, params)
260
+
261
+ patterns: list[Pattern] = []
262
+ for idx, result in enumerate(results[:limit]):
263
+ # Calculate relevance score
264
+ overlap = len(set(result["matched_entities"]) & set(entity_texts))
265
+ total_entities = len(set(result["all_entity_texts"]))
266
+ relevance = overlap / max(total_entities, 1) if total_entities > 0 else 0
267
+
268
+ pattern = Pattern(
269
+ id=result["memory_id"],
270
+ name=result.get("title", "Untitled Pattern"),
271
+ description=result.get("content", "")[:200],
272
+ pattern_type=result.get("memory_type", "unknown"),
273
+ confidence=min(relevance, 1.0),
274
+ occurrences=result["match_count"],
275
+ source_memory_ids=[result["memory_id"]],
276
+ entities=result["matched_entities"],
277
+ )
278
+ patterns.append(pattern)
279
+
280
+ return patterns
281
+
282
+ except Exception as e:
283
+ logger.error(f"Error suggesting patterns: {e}")
284
+ return []
285
+
286
+ def _extract_keywords(self, text: str) -> list[str]:
287
+ """
288
+ Extract keywords from text for matching.
289
+
290
+ Args:
291
+ text: Text to extract keywords from
292
+
293
+ Returns:
294
+ List of keywords
295
+ """
296
+ # Remove common stop words
297
+ stop_words = {
298
+ "the",
299
+ "a",
300
+ "an",
301
+ "and",
302
+ "or",
303
+ "but",
304
+ "in",
305
+ "on",
306
+ "at",
307
+ "to",
308
+ "for",
309
+ "of",
310
+ "with",
311
+ "by",
312
+ "from",
313
+ "is",
314
+ "are",
315
+ "was",
316
+ "were",
317
+ "be",
318
+ "been",
319
+ "being",
320
+ "have",
321
+ "has",
322
+ "had",
323
+ "do",
324
+ "does",
325
+ "did",
326
+ "will",
327
+ "would",
328
+ "should",
329
+ "could",
330
+ "may",
331
+ "might",
332
+ "can",
333
+ "this",
334
+ "that",
335
+ "these",
336
+ "those",
337
+ }
338
+
339
+ # Tokenize and clean
340
+ words = re.findall(r"\b[a-z]{3,}\b", text.lower())
341
+
342
+ # Filter stop words and keep meaningful keywords
343
+ keywords = [w for w in words if w not in stop_words]
344
+
345
+ # Return unique keywords
346
+ return list(set(keywords))
347
+
348
+
349
+ # Convenience functions using default backend parameter
350
+
351
+
352
+ async def find_similar_problems(
353
+ backend, problem: str, threshold: float = 0.7, limit: int = 10
354
+ ) -> list[dict]:
355
+ """
356
+ Find similar problems using keyword matching.
357
+
358
+ Args:
359
+ backend: Database backend instance
360
+ problem: Problem description
361
+ threshold: Similarity threshold (0.0-1.0)
362
+ limit: Maximum results
363
+
364
+ Returns:
365
+ List of similar problems with solutions
366
+
367
+ Example:
368
+ >>> problems = await find_similar_problems(
369
+ ... backend,
370
+ ... "Authentication timeout in API",
371
+ ... threshold=0.6
372
+ ... )
373
+ """
374
+ recognizer = PatternRecognizer(backend)
375
+ return await recognizer.find_similar_problems(problem, threshold, limit)
376
+
377
+
378
+ async def extract_patterns(
379
+ backend, memory_type: str = "solution", min_occurrences: int = 3
380
+ ) -> list[Pattern]:
381
+ """
382
+ Extract patterns from memories.
383
+
384
+ Args:
385
+ backend: Database backend instance
386
+ memory_type: Type of memories to analyze
387
+ min_occurrences: Minimum occurrences
388
+
389
+ Returns:
390
+ List of patterns
391
+
392
+ Example:
393
+ >>> patterns = await extract_patterns(backend, "solution", min_occurrences=3)
394
+ >>> for pattern in patterns:
395
+ ... print(f"{pattern.name}: {pattern.confidence}")
396
+ """
397
+ recognizer = PatternRecognizer(backend)
398
+ return await recognizer.extract_patterns(memory_type, min_occurrences)
399
+
400
+
401
+ async def suggest_patterns(backend, context: str, limit: int = 5) -> list[Pattern]:
402
+ """
403
+ Suggest patterns for context.
404
+
405
+ Args:
406
+ backend: Database backend instance
407
+ context: Current context
408
+ limit: Maximum suggestions
409
+
410
+ Returns:
411
+ List of relevant patterns
412
+
413
+ Example:
414
+ >>> patterns = await suggest_patterns(
415
+ ... backend,
416
+ ... "Using React hooks with TypeScript"
417
+ ... )
418
+ """
419
+ recognizer = PatternRecognizer(backend)
420
+ return await recognizer.suggest_patterns(context, limit)