memorygraphMCP 0.11.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. memorygraph/__init__.py +50 -0
  2. memorygraph/__main__.py +12 -0
  3. memorygraph/advanced_tools.py +509 -0
  4. memorygraph/analytics/__init__.py +46 -0
  5. memorygraph/analytics/advanced_queries.py +727 -0
  6. memorygraph/backends/__init__.py +21 -0
  7. memorygraph/backends/base.py +179 -0
  8. memorygraph/backends/cloud.py +75 -0
  9. memorygraph/backends/cloud_backend.py +858 -0
  10. memorygraph/backends/factory.py +577 -0
  11. memorygraph/backends/falkordb_backend.py +749 -0
  12. memorygraph/backends/falkordblite_backend.py +746 -0
  13. memorygraph/backends/ladybugdb_backend.py +242 -0
  14. memorygraph/backends/memgraph_backend.py +327 -0
  15. memorygraph/backends/neo4j_backend.py +298 -0
  16. memorygraph/backends/sqlite_fallback.py +463 -0
  17. memorygraph/backends/turso.py +448 -0
  18. memorygraph/cli.py +743 -0
  19. memorygraph/cloud_database.py +297 -0
  20. memorygraph/config.py +295 -0
  21. memorygraph/database.py +933 -0
  22. memorygraph/graph_analytics.py +631 -0
  23. memorygraph/integration/__init__.py +69 -0
  24. memorygraph/integration/context_capture.py +426 -0
  25. memorygraph/integration/project_analysis.py +583 -0
  26. memorygraph/integration/workflow_tracking.py +492 -0
  27. memorygraph/intelligence/__init__.py +59 -0
  28. memorygraph/intelligence/context_retrieval.py +447 -0
  29. memorygraph/intelligence/entity_extraction.py +386 -0
  30. memorygraph/intelligence/pattern_recognition.py +420 -0
  31. memorygraph/intelligence/temporal.py +374 -0
  32. memorygraph/migration/__init__.py +27 -0
  33. memorygraph/migration/manager.py +579 -0
  34. memorygraph/migration/models.py +142 -0
  35. memorygraph/migration/scripts/__init__.py +17 -0
  36. memorygraph/migration/scripts/bitemporal_migration.py +595 -0
  37. memorygraph/migration/scripts/multitenancy_migration.py +452 -0
  38. memorygraph/migration_tools_module.py +146 -0
  39. memorygraph/models.py +684 -0
  40. memorygraph/proactive/__init__.py +46 -0
  41. memorygraph/proactive/outcome_learning.py +444 -0
  42. memorygraph/proactive/predictive.py +410 -0
  43. memorygraph/proactive/session_briefing.py +399 -0
  44. memorygraph/relationships.py +668 -0
  45. memorygraph/server.py +883 -0
  46. memorygraph/sqlite_database.py +1876 -0
  47. memorygraph/tools/__init__.py +59 -0
  48. memorygraph/tools/activity_tools.py +262 -0
  49. memorygraph/tools/memory_tools.py +315 -0
  50. memorygraph/tools/migration_tools.py +181 -0
  51. memorygraph/tools/relationship_tools.py +147 -0
  52. memorygraph/tools/search_tools.py +406 -0
  53. memorygraph/tools/temporal_tools.py +339 -0
  54. memorygraph/utils/__init__.py +10 -0
  55. memorygraph/utils/context_extractor.py +429 -0
  56. memorygraph/utils/error_handling.py +151 -0
  57. memorygraph/utils/export_import.py +425 -0
  58. memorygraph/utils/graph_algorithms.py +200 -0
  59. memorygraph/utils/pagination.py +149 -0
  60. memorygraph/utils/project_detection.py +133 -0
  61. memorygraphmcp-0.11.7.dist-info/METADATA +970 -0
  62. memorygraphmcp-0.11.7.dist-info/RECORD +65 -0
  63. memorygraphmcp-0.11.7.dist-info/WHEEL +4 -0
  64. memorygraphmcp-0.11.7.dist-info/entry_points.txt +2 -0
  65. memorygraphmcp-0.11.7.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,583 @@
1
+ """
2
+ Project-Aware Memory for Claude Code Integration.
3
+
4
+ Provides project detection, codebase analysis, and file change tracking:
5
+ - Project detection from directory structure
6
+ - Codebase analysis (languages, frameworks, structure)
7
+ - File change tracking with git integration
8
+ - Code pattern identification
9
+ """
10
+
11
+ import json
12
+ import os
13
+ import re
14
+ import subprocess
15
+ from collections import Counter
16
+ from datetime import datetime
17
+ from pathlib import Path
18
+ from typing import Any, Optional
19
+ from uuid import uuid4
20
+
21
+ from pydantic import BaseModel, Field
22
+
23
+ from ..backends.base import GraphBackend
24
+
25
+
26
+ class ProjectInfo(BaseModel):
27
+ """Project information."""
28
+
29
+ project_id: str = Field(default_factory=lambda: str(uuid4()))
30
+ name: str = Field(..., description="Project name")
31
+ path: str = Field(..., description="Project directory path")
32
+ project_type: str = Field(..., description="Project type (e.g., 'python', 'typescript', 'mixed')")
33
+ git_remote: Optional[str] = Field(None, description="Git remote URL if available")
34
+ description: Optional[str] = Field(None, description="Project description")
35
+ technologies: list[str] = Field(default_factory=list, description="Technologies used")
36
+
37
+
38
+ class CodebaseInfo(BaseModel):
39
+ """Codebase analysis results."""
40
+
41
+ total_files: int = Field(..., description="Total number of files")
42
+ file_types: dict[str, int] = Field(
43
+ default_factory=dict, description="File count by extension"
44
+ )
45
+ languages: list[str] = Field(default_factory=list, description="Programming languages detected")
46
+ frameworks: list[str] = Field(default_factory=list, description="Frameworks detected")
47
+ structure: dict[str, Any] = Field(default_factory=dict, description="Directory structure")
48
+ config_files: list[str] = Field(default_factory=list, description="Configuration files found")
49
+
50
+
51
+ class FileChange(BaseModel):
52
+ """File change information."""
53
+
54
+ file_path: str = Field(..., description="Path to changed file")
55
+ change_type: str = Field(..., description="Type of change (added, modified, deleted)")
56
+ timestamp: datetime = Field(default_factory=datetime.now)
57
+ lines_added: int = Field(default=0, description="Lines added")
58
+ lines_removed: int = Field(default=0, description="Lines removed")
59
+
60
+
61
+ class Pattern(BaseModel):
62
+ """Code pattern identified."""
63
+
64
+ pattern_id: str = Field(default_factory=lambda: str(uuid4()))
65
+ pattern_type: str = Field(..., description="Type of pattern")
66
+ description: str = Field(..., description="Pattern description")
67
+ examples: list[str] = Field(default_factory=list, description="Example occurrences")
68
+ frequency: int = Field(default=1, description="Frequency of pattern")
69
+ confidence: float = Field(default=0.5, description="Confidence in pattern")
70
+
71
+
72
+ # File patterns to ignore
73
+ IGNORE_PATTERNS = [
74
+ ".git",
75
+ ".svn",
76
+ ".hg",
77
+ "node_modules",
78
+ "__pycache__",
79
+ ".pytest_cache",
80
+ ".mypy_cache",
81
+ ".tox",
82
+ "venv",
83
+ ".venv",
84
+ "env",
85
+ ".env",
86
+ "dist",
87
+ "build",
88
+ "*.pyc",
89
+ "*.pyo",
90
+ "*.egg-info",
91
+ ".DS_Store",
92
+ "thumbs.db",
93
+ ]
94
+
95
+ # Config file patterns for project type detection
96
+ PROJECT_CONFIGS = {
97
+ "python": ["pyproject.toml", "setup.py", "requirements.txt", "Pipfile", "poetry.lock"],
98
+ "typescript": ["package.json", "tsconfig.json", "yarn.lock", "pnpm-lock.yaml"],
99
+ "javascript": ["package.json", "yarn.lock", "pnpm-lock.yaml"],
100
+ "rust": ["Cargo.toml", "Cargo.lock"],
101
+ "go": ["go.mod", "go.sum"],
102
+ "java": ["pom.xml", "build.gradle", "build.gradle.kts"],
103
+ "ruby": ["Gemfile", "Gemfile.lock"],
104
+ "php": ["composer.json", "composer.lock"],
105
+ }
106
+
107
+ # Framework detection patterns
108
+ FRAMEWORK_PATTERNS = {
109
+ "react": ["react", "@types/react"],
110
+ "vue": ["vue", "@vue/"],
111
+ "angular": ["@angular/"],
112
+ "next": ["next", "next.config"],
113
+ "fastapi": ["fastapi"],
114
+ "flask": ["flask"],
115
+ "django": ["django"],
116
+ "express": ["express"],
117
+ "nestjs": ["@nestjs/"],
118
+ "spring": ["spring-boot", "springframework"],
119
+ }
120
+
121
+
122
+ async def detect_project(backend: GraphBackend, directory: str) -> Optional[ProjectInfo]:
123
+ """
124
+ Detect project from directory and return project information.
125
+
126
+ Args:
127
+ backend: Database backend
128
+ directory: Directory path to analyze
129
+
130
+ Returns:
131
+ ProjectInfo if project detected, None otherwise
132
+
133
+ Example:
134
+ >>> project = await detect_project(backend, "/Users/me/my-app")
135
+ >>> print(project.name, project.project_type)
136
+ """
137
+ directory = os.path.abspath(os.path.expanduser(directory))
138
+
139
+ if not os.path.isdir(directory):
140
+ return None
141
+
142
+ # Extract project name from directory
143
+ project_name = os.path.basename(directory)
144
+
145
+ # Check for git remote
146
+ git_remote = None
147
+ try:
148
+ result = subprocess.run(
149
+ ["git", "-C", directory, "config", "--get", "remote.origin.url"],
150
+ capture_output=True,
151
+ text=True,
152
+ timeout=5,
153
+ )
154
+ if result.returncode == 0:
155
+ git_remote = result.stdout.strip()
156
+ # Extract repo name from git URL if available
157
+ if git_remote:
158
+ match = re.search(r"[/:]([^/]+?)(?:\.git)?$", git_remote)
159
+ if match:
160
+ project_name = match.group(1)
161
+ except Exception:
162
+ pass
163
+
164
+ # Detect project type from config files
165
+ project_type = "unknown"
166
+ config_files = []
167
+ technologies = []
168
+
169
+ for lang, configs in PROJECT_CONFIGS.items():
170
+ for config in configs:
171
+ config_path = os.path.join(directory, config)
172
+ if os.path.isfile(config_path):
173
+ config_files.append(config)
174
+ if project_type == "unknown":
175
+ project_type = lang
176
+
177
+ # Parse config file for more details
178
+ if config == "package.json":
179
+ try:
180
+ with open(config_path) as f:
181
+ package_data = json.load(f)
182
+ dependencies = {
183
+ **package_data.get("dependencies", {}),
184
+ **package_data.get("devDependencies", {}),
185
+ }
186
+
187
+ # Detect frameworks
188
+ for framework, patterns in FRAMEWORK_PATTERNS.items():
189
+ if any(p in dep for dep in dependencies for p in patterns):
190
+ technologies.append(framework)
191
+ except Exception:
192
+ pass
193
+
194
+ elif config == "pyproject.toml":
195
+ try:
196
+ with open(config_path) as f:
197
+ content = f.read()
198
+ # Detect Python frameworks
199
+ for framework, patterns in FRAMEWORK_PATTERNS.items():
200
+ if any(p in content for p in patterns):
201
+ technologies.append(framework)
202
+ except Exception:
203
+ pass
204
+
205
+ # If multiple config types found, mark as mixed (but handle TypeScript special case)
206
+ detected_types = [
207
+ lang for lang, configs in PROJECT_CONFIGS.items() if any(c in config_files for c in configs)
208
+ ]
209
+
210
+ # Special case: if tsconfig.json is present, it's TypeScript (not mixed with JavaScript)
211
+ # because TypeScript projects also use package.json
212
+ if "tsconfig.json" in config_files:
213
+ project_type = "typescript"
214
+ detected_types = ["typescript"]
215
+ elif len(detected_types) > 1:
216
+ project_type = "mixed"
217
+ technologies.extend(detected_types)
218
+
219
+ project = ProjectInfo(
220
+ name=project_name,
221
+ path=directory,
222
+ project_type=project_type,
223
+ git_remote=git_remote,
224
+ technologies=list(set(technologies)), # Remove duplicates
225
+ )
226
+
227
+ # Check if project already exists in database
228
+ existing = await backend.search_nodes(
229
+ "Entity", {"type": "project", "name": project_name, "path": directory}
230
+ )
231
+
232
+ if existing:
233
+ project.project_id = existing[0]["id"]
234
+ # Update existing project
235
+ await backend.execute_query(
236
+ """
237
+ MATCH (p:Entity {id: $project_id})
238
+ SET p.git_remote = $git_remote,
239
+ p.project_type = $project_type,
240
+ p.technologies = $technologies,
241
+ p.updated_at = datetime()
242
+ """,
243
+ {
244
+ "project_id": project.project_id,
245
+ "git_remote": git_remote,
246
+ "project_type": project_type,
247
+ "technologies": technologies,
248
+ },
249
+ )
250
+ else:
251
+ # Create new project entity
252
+ properties = {
253
+ "id": project.project_id,
254
+ "type": "project",
255
+ "name": project_name,
256
+ "path": directory,
257
+ "project_type": project_type,
258
+ "git_remote": git_remote,
259
+ "technologies": technologies,
260
+ "created_at": datetime.now(),
261
+ "updated_at": datetime.now(),
262
+ }
263
+ await backend.store_node("Entity", properties)
264
+
265
+ return project
266
+
267
+
268
+ async def analyze_codebase(backend: GraphBackend, directory: str) -> CodebaseInfo:
269
+ """
270
+ Analyze codebase structure and characteristics.
271
+
272
+ Args:
273
+ backend: Database backend
274
+ directory: Directory path to analyze
275
+
276
+ Returns:
277
+ CodebaseInfo with analysis results
278
+
279
+ Example:
280
+ >>> info = await analyze_codebase(backend, "/Users/me/my-app")
281
+ >>> print(f"Total files: {info.total_files}")
282
+ >>> print(f"Languages: {', '.join(info.languages)}")
283
+ """
284
+ directory = os.path.abspath(os.path.expanduser(directory))
285
+
286
+ file_types: Counter = Counter()
287
+ config_files = []
288
+ total_files = 0
289
+
290
+ # Walk directory tree
291
+ for root, dirs, files in os.walk(directory):
292
+ # Filter out ignored directories
293
+ dirs[:] = [d for d in dirs if d not in IGNORE_PATTERNS and not d.startswith(".")]
294
+
295
+ for file in files:
296
+ # Skip ignored patterns
297
+ if any(file.endswith(pattern.replace("*", "")) for pattern in IGNORE_PATTERNS):
298
+ continue
299
+
300
+ total_files += 1
301
+ ext = os.path.splitext(file)[1]
302
+ if ext:
303
+ file_types[ext] += 1
304
+
305
+ # Check if it's a config file
306
+ for configs in PROJECT_CONFIGS.values():
307
+ if file in configs:
308
+ config_files.append(os.path.join(root, file))
309
+
310
+ # Map extensions to languages
311
+ ext_to_lang = {
312
+ ".py": "python",
313
+ ".js": "javascript",
314
+ ".ts": "typescript",
315
+ ".tsx": "typescript",
316
+ ".jsx": "javascript",
317
+ ".rs": "rust",
318
+ ".go": "go",
319
+ ".java": "java",
320
+ ".rb": "ruby",
321
+ ".php": "php",
322
+ ".c": "c",
323
+ ".cpp": "cpp",
324
+ ".h": "c",
325
+ ".hpp": "cpp",
326
+ ".cs": "csharp",
327
+ ".swift": "swift",
328
+ ".kt": "kotlin",
329
+ }
330
+
331
+ languages = list({ext_to_lang.get(ext, "other") for ext in file_types if ext in ext_to_lang})
332
+
333
+ # Detect frameworks from file analysis
334
+ frameworks = []
335
+
336
+ codebase_info = CodebaseInfo(
337
+ total_files=total_files,
338
+ file_types=dict(file_types),
339
+ languages=languages,
340
+ frameworks=frameworks,
341
+ config_files=config_files,
342
+ )
343
+
344
+ return codebase_info
345
+
346
+
347
+ async def track_file_changes(
348
+ backend: GraphBackend, repo_path: str, project_id: str
349
+ ) -> list[FileChange]:
350
+ """
351
+ Track file changes using git diff.
352
+
353
+ Args:
354
+ backend: Database backend
355
+ repo_path: Path to git repository
356
+ project_id: Project ID
357
+
358
+ Returns:
359
+ List of FileChange objects
360
+
361
+ Example:
362
+ >>> changes = await track_file_changes(
363
+ ... backend,
364
+ ... "/Users/me/my-app",
365
+ ... "project-123"
366
+ ... )
367
+ >>> for change in changes:
368
+ ... print(f"{change.change_type}: {change.file_path}")
369
+ """
370
+ repo_path = os.path.abspath(os.path.expanduser(repo_path))
371
+
372
+ changes = []
373
+
374
+ try:
375
+ # Get git status for changed files
376
+ result = subprocess.run(
377
+ ["git", "-C", repo_path, "status", "--porcelain"],
378
+ capture_output=True,
379
+ text=True,
380
+ timeout=10,
381
+ )
382
+
383
+ if result.returncode != 0:
384
+ return changes
385
+
386
+ for line in result.stdout.strip().split("\n"):
387
+ if not line:
388
+ continue
389
+
390
+ status = line[:2].strip()
391
+ file_path = line[3:].strip()
392
+
393
+ # Map git status to change type
394
+ change_type = "modified"
395
+ if status in ["A", "??"]:
396
+ change_type = "added"
397
+ elif status == "D":
398
+ change_type = "deleted"
399
+ elif status in ["M", "MM"]:
400
+ change_type = "modified"
401
+
402
+ # Get diff stats for modified files
403
+ lines_added = 0
404
+ lines_removed = 0
405
+
406
+ if change_type == "modified" and os.path.isfile(os.path.join(repo_path, file_path)):
407
+ try:
408
+ diff_result = subprocess.run(
409
+ ["git", "-C", repo_path, "diff", "--numstat", "HEAD", file_path],
410
+ capture_output=True,
411
+ text=True,
412
+ timeout=5,
413
+ )
414
+ if diff_result.returncode == 0 and diff_result.stdout:
415
+ parts = diff_result.stdout.strip().split("\t")
416
+ if len(parts) >= 2:
417
+ lines_added = int(parts[0]) if parts[0].isdigit() else 0
418
+ lines_removed = int(parts[1]) if parts[1].isdigit() else 0
419
+ except Exception:
420
+ pass
421
+
422
+ file_change = FileChange(
423
+ file_path=file_path,
424
+ change_type=change_type,
425
+ lines_added=lines_added,
426
+ lines_removed=lines_removed,
427
+ )
428
+ changes.append(file_change)
429
+
430
+ # Store file change as observation
431
+ properties = {
432
+ "id": str(uuid4()),
433
+ "type": "file_change",
434
+ "title": f"File {change_type}: {file_path}",
435
+ "content": f"File: {file_path}\nChange: {change_type}\n"
436
+ f"Lines added: {lines_added}\nLines removed: {lines_removed}",
437
+ "context": {
438
+ "file_path": file_path,
439
+ "change_type": change_type,
440
+ "lines_added": lines_added,
441
+ "lines_removed": lines_removed,
442
+ "project_id": project_id,
443
+ },
444
+ "created_at": datetime.now(),
445
+ "updated_at": datetime.now(),
446
+ }
447
+
448
+ memory_id = await backend.store_node("Memory", properties)
449
+
450
+ # Link to project
451
+ await backend.store_relationship(
452
+ memory_id,
453
+ project_id,
454
+ "PART_OF",
455
+ {"created_at": datetime.now(), "strength": 1.0},
456
+ )
457
+
458
+ # Create or get file entity and link
459
+ file_entity = await backend.execute_query(
460
+ """
461
+ MERGE (f:Entity {name: $file_path, type: 'file'})
462
+ ON CREATE SET f.id = $file_id, f.created_at = datetime()
463
+ RETURN f.id as id
464
+ """,
465
+ {"file_path": file_path, "file_id": str(uuid4())},
466
+ )
467
+
468
+ if file_entity:
469
+ file_id = file_entity[0]["id"]
470
+ await backend.store_relationship(
471
+ memory_id,
472
+ file_id,
473
+ "MODIFIES" if change_type == "modified" else "CREATES",
474
+ {"created_at": datetime.now(), "strength": 1.0},
475
+ )
476
+
477
+ except Exception as e:
478
+ # Git not available or not a git repo - return empty list
479
+ pass
480
+
481
+ return changes
482
+
483
+
484
+ async def identify_code_patterns(
485
+ backend: GraphBackend, project_id: str, files: list[str]
486
+ ) -> list[Pattern]:
487
+ """
488
+ Identify code patterns in files.
489
+
490
+ Args:
491
+ backend: Database backend
492
+ project_id: Project ID
493
+ files: List of file paths to analyze
494
+
495
+ Returns:
496
+ List of identified patterns
497
+
498
+ Example:
499
+ >>> patterns = await identify_code_patterns(
500
+ ... backend,
501
+ ... "project-123",
502
+ ... ["src/api/users.py", "src/api/posts.py"]
503
+ ... )
504
+ """
505
+ patterns: list[Pattern] = []
506
+
507
+ # Common code patterns to identify
508
+ pattern_regexes = {
509
+ "api_endpoint": r"@(app|router)\.(get|post|put|delete|patch)\(['\"]([^'\"]+)",
510
+ "class_definition": r"class\s+(\w+)(?:\(.*?\))?:",
511
+ "function_definition": r"(?:async\s+)?def\s+(\w+)\s*\(",
512
+ "import_statement": r"(?:from\s+[\w.]+\s+)?import\s+([\w, ]+)",
513
+ "error_handling": r"try:|except\s+(\w+(?:Error|Exception)):",
514
+ "async_await": r"\basync\s+def\b|\bawait\b",
515
+ "type_annotation": r":\s*([A-Z][\w\[\], ]+)(?:\s*=)?",
516
+ }
517
+
518
+ pattern_counts: Counter = Counter()
519
+ pattern_examples: dict[str, list[str]] = {}
520
+
521
+ for file_path in files:
522
+ if not os.path.isfile(file_path):
523
+ continue
524
+
525
+ try:
526
+ with open(file_path, encoding="utf-8", errors="ignore") as f:
527
+ content = f.read()
528
+
529
+ for pattern_type, regex in pattern_regexes.items():
530
+ matches = re.findall(regex, content)
531
+ if matches:
532
+ pattern_counts[pattern_type] += len(matches)
533
+ if pattern_type not in pattern_examples:
534
+ pattern_examples[pattern_type] = []
535
+ # Store first few examples
536
+ pattern_examples[pattern_type].extend(
537
+ [str(m)[:100] for m in matches[:3]]
538
+ )
539
+ except Exception:
540
+ continue
541
+
542
+ # Create pattern objects for significant patterns
543
+ for pattern_type, count in pattern_counts.items():
544
+ if count >= 2: # Only patterns that occur at least twice
545
+ pattern = Pattern(
546
+ pattern_type=pattern_type,
547
+ description=f"Code pattern: {pattern_type}",
548
+ examples=pattern_examples.get(pattern_type, [])[:5],
549
+ frequency=count,
550
+ confidence=min(0.5 + (count * 0.05), 0.95),
551
+ )
552
+
553
+ # Store pattern as memory
554
+ properties = {
555
+ "id": pattern.pattern_id,
556
+ "type": "code_pattern",
557
+ "title": f"Pattern: {pattern_type}",
558
+ "content": f"Pattern Type: {pattern_type}\n"
559
+ f"Frequency: {count}\n\n"
560
+ f"Examples:\n" + "\n".join(f"- {ex}" for ex in pattern.examples),
561
+ "context": {
562
+ "pattern_type": pattern_type,
563
+ "frequency": count,
564
+ "confidence": pattern.confidence,
565
+ "project_id": project_id,
566
+ },
567
+ "created_at": datetime.now(),
568
+ "updated_at": datetime.now(),
569
+ }
570
+
571
+ memory_id = await backend.store_node("Memory", properties)
572
+
573
+ # Link to project
574
+ await backend.store_relationship(
575
+ memory_id,
576
+ project_id,
577
+ "FOUND_IN",
578
+ {"created_at": datetime.now(), "strength": pattern.confidence},
579
+ )
580
+
581
+ patterns.append(pattern)
582
+
583
+ return patterns