claude-mpm 4.3.22__py3-none-any.whl → 4.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/cli/commands/doctor.py +2 -2
  3. claude_mpm/hooks/memory_integration_hook.py +1 -1
  4. claude_mpm/services/agents/memory/content_manager.py +5 -2
  5. claude_mpm/services/agents/memory/memory_file_service.py +1 -0
  6. claude_mpm/services/agents/memory/memory_limits_service.py +1 -0
  7. claude_mpm/services/unified/__init__.py +65 -0
  8. claude_mpm/services/unified/analyzer_strategies/__init__.py +44 -0
  9. claude_mpm/services/unified/analyzer_strategies/code_analyzer.py +473 -0
  10. claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +643 -0
  11. claude_mpm/services/unified/analyzer_strategies/performance_analyzer.py +804 -0
  12. claude_mpm/services/unified/analyzer_strategies/security_analyzer.py +661 -0
  13. claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +696 -0
  14. claude_mpm/services/unified/deployment_strategies/__init__.py +97 -0
  15. claude_mpm/services/unified/deployment_strategies/base.py +557 -0
  16. claude_mpm/services/unified/deployment_strategies/cloud_strategies.py +486 -0
  17. claude_mpm/services/unified/deployment_strategies/local.py +594 -0
  18. claude_mpm/services/unified/deployment_strategies/utils.py +672 -0
  19. claude_mpm/services/unified/deployment_strategies/vercel.py +471 -0
  20. claude_mpm/services/unified/interfaces.py +499 -0
  21. claude_mpm/services/unified/migration.py +532 -0
  22. claude_mpm/services/unified/strategies.py +551 -0
  23. claude_mpm/services/unified/unified_analyzer.py +534 -0
  24. claude_mpm/services/unified/unified_config.py +688 -0
  25. claude_mpm/services/unified/unified_deployment.py +470 -0
  26. {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/METADATA +1 -1
  27. {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/RECORD +31 -12
  28. {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/WHEEL +0 -0
  29. {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/entry_points.txt +0 -0
  30. {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/licenses/LICENSE +0 -0
  31. {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,672 @@
1
+ """
2
+ Deployment Utilities
3
+ ====================
4
+
5
+ Common utilities for deployment strategies, consolidating shared patterns
6
+ from 45+ deployment services to eliminate duplication.
7
+
8
+ This module reduces ~5000 LOC of duplicated utility functions across:
9
+ - Validation routines
10
+ - Artifact preparation
11
+ - Health checks
12
+ - Rollback operations
13
+ - Environment management
14
+ - Version control
15
+ """
16
+
17
+ import hashlib
18
+ import json
19
+ import os
20
+ import shutil
21
+ import subprocess
22
+ import tempfile
23
+ from datetime import datetime
24
+ from pathlib import Path
25
+ from typing import Any, Dict, List, Optional, Tuple, Union
26
+
27
+ import yaml
28
+
29
+ from claude_mpm.core.logging_utils import get_logger
30
+
31
+ logger = get_logger(__name__)
32
+
33
+
34
+ # Validation Utilities
35
+ # ====================
36
+
37
+ def validate_deployment_config(config: Dict[str, Any]) -> List[str]:
38
+ """
39
+ Validate deployment configuration.
40
+
41
+ Consolidates validation logic from multiple deployment services.
42
+
43
+ Args:
44
+ config: Deployment configuration
45
+
46
+ Returns:
47
+ List of validation errors
48
+ """
49
+ errors = []
50
+
51
+ # Required fields
52
+ required_fields = ["type", "source", "target"]
53
+ for field in required_fields:
54
+ if field not in config:
55
+ errors.append(f"Required field missing: {field}")
56
+
57
+ # Type validation
58
+ if "type" in config:
59
+ valid_types = [
60
+ "local", "vercel", "railway", "aws", "docker", "git",
61
+ "agent", "config", "template", "resource"
62
+ ]
63
+ if config["type"] not in valid_types:
64
+ errors.append(f"Invalid deployment type: {config['type']}")
65
+
66
+ # Source/target validation
67
+ if "source" in config:
68
+ source_path = Path(config["source"])
69
+ if not source_path.exists():
70
+ errors.append(f"Source does not exist: {source_path}")
71
+
72
+ # Version format validation
73
+ if "version" in config:
74
+ if not validate_version_format(config["version"]):
75
+ errors.append(f"Invalid version format: {config['version']}")
76
+
77
+ # Environment variables validation
78
+ if "env" in config:
79
+ if not isinstance(config["env"], dict):
80
+ errors.append("Environment variables must be a dictionary")
81
+ else:
82
+ for key, value in config["env"].items():
83
+ if not isinstance(key, str):
84
+ errors.append(f"Environment variable key must be string: {key}")
85
+ if not isinstance(value, (str, int, float, bool)):
86
+ errors.append(f"Invalid environment variable type for {key}")
87
+
88
+ return errors
89
+
90
+
91
+ def validate_version_format(version: str) -> bool:
92
+ """
93
+ Validate version string format.
94
+
95
+ Supports semantic versioning and date-based versions.
96
+
97
+ Args:
98
+ version: Version string
99
+
100
+ Returns:
101
+ True if valid format
102
+ """
103
+ import re
104
+
105
+ # Semantic version pattern
106
+ semver_pattern = r"^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$"
107
+
108
+ # Date-based version pattern
109
+ date_pattern = r"^\d{4}\.\d{2}\.\d{2}(\.\d+)?$"
110
+
111
+ return bool(re.match(semver_pattern, version) or re.match(date_pattern, version))
112
+
113
+
114
+ def validate_path_security(path: Path, base_path: Path) -> bool:
115
+ """
116
+ Validate path doesn't escape base directory (path traversal prevention).
117
+
118
+ Args:
119
+ path: Path to validate
120
+ base_path: Base directory path
121
+
122
+ Returns:
123
+ True if path is safe
124
+ """
125
+ try:
126
+ resolved_path = path.resolve()
127
+ resolved_base = base_path.resolve()
128
+ return resolved_path.is_relative_to(resolved_base)
129
+ except (ValueError, OSError):
130
+ return False
131
+
132
+
133
+ # Artifact Preparation
134
+ # ====================
135
+
136
+ def prepare_deployment_artifact(
137
+ source: Union[str, Path],
138
+ artifact_type: str = "auto",
139
+ config: Optional[Dict[str, Any]] = None
140
+ ) -> Tuple[Path, Dict[str, Any]]:
141
+ """
142
+ Prepare deployment artifact from source.
143
+
144
+ Consolidates artifact preparation from multiple services.
145
+
146
+ Args:
147
+ source: Source path
148
+ artifact_type: Type of artifact (auto, zip, tar, directory)
149
+ config: Additional configuration
150
+
151
+ Returns:
152
+ Tuple of (artifact_path, metadata)
153
+ """
154
+ source_path = Path(source)
155
+ config = config or {}
156
+ metadata = {
157
+ "source": str(source_path),
158
+ "type": artifact_type,
159
+ "created_at": datetime.now().isoformat(),
160
+ }
161
+
162
+ # Auto-detect type
163
+ if artifact_type == "auto":
164
+ if source_path.is_file():
165
+ artifact_type = "file"
166
+ elif source_path.is_dir():
167
+ artifact_type = "directory"
168
+ else:
169
+ raise ValueError(f"Source does not exist: {source_path}")
170
+
171
+ # Prepare based on type
172
+ artifact_dir = Path(tempfile.mkdtemp(prefix="deploy_artifact_"))
173
+
174
+ if artifact_type == "zip":
175
+ artifact_path = create_zip_artifact(source_path, artifact_dir)
176
+ metadata["format"] = "zip"
177
+ elif artifact_type == "tar":
178
+ artifact_path = create_tar_artifact(source_path, artifact_dir)
179
+ metadata["format"] = "tar.gz"
180
+ elif artifact_type == "directory":
181
+ artifact_path = artifact_dir / "content"
182
+ if source_path.is_dir():
183
+ shutil.copytree(source_path, artifact_path)
184
+ else:
185
+ artifact_path.mkdir(parents=True)
186
+ shutil.copy2(source_path, artifact_path / source_path.name)
187
+ metadata["format"] = "directory"
188
+ else: # file
189
+ artifact_path = artifact_dir / source_path.name
190
+ shutil.copy2(source_path, artifact_path)
191
+ metadata["format"] = "file"
192
+
193
+ # Add checksums
194
+ metadata["checksum"] = calculate_checksum(artifact_path)
195
+ metadata["size_bytes"] = get_size(artifact_path)
196
+
197
+ return artifact_path, metadata
198
+
199
+
200
+ def create_zip_artifact(source: Path, output_dir: Path) -> Path:
201
+ """Create ZIP artifact from source."""
202
+ import zipfile
203
+
204
+ zip_path = output_dir / f"{source.name}.zip"
205
+
206
+ with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
207
+ if source.is_file():
208
+ zipf.write(source, source.name)
209
+ else:
210
+ for file_path in source.rglob("*"):
211
+ if file_path.is_file():
212
+ arcname = file_path.relative_to(source.parent)
213
+ zipf.write(file_path, arcname)
214
+
215
+ return zip_path
216
+
217
+
218
+ def create_tar_artifact(source: Path, output_dir: Path) -> Path:
219
+ """Create TAR.GZ artifact from source."""
220
+ import tarfile
221
+
222
+ tar_path = output_dir / f"{source.name}.tar.gz"
223
+
224
+ with tarfile.open(tar_path, "w:gz") as tar:
225
+ tar.add(source, arcname=source.name)
226
+
227
+ return tar_path
228
+
229
+
230
+ # Health Check Utilities
231
+ # ======================
232
+
233
+ def verify_deployment_health(
234
+ deployment_type: str,
235
+ deployment_info: Dict[str, Any],
236
+ checks: Optional[List[str]] = None
237
+ ) -> Dict[str, Any]:
238
+ """
239
+ Perform health checks on deployment.
240
+
241
+ Consolidates health check patterns from multiple services.
242
+
243
+ Args:
244
+ deployment_type: Type of deployment
245
+ deployment_info: Deployment information
246
+ checks: Specific checks to perform
247
+
248
+ Returns:
249
+ Health status dictionary
250
+ """
251
+ health = {
252
+ "status": "unknown",
253
+ "timestamp": datetime.now().isoformat(),
254
+ "checks": {},
255
+ "errors": [],
256
+ }
257
+
258
+ checks = checks or ["existence", "accessibility", "integrity"]
259
+
260
+ try:
261
+ # Existence check
262
+ if "existence" in checks:
263
+ if "deployed_path" in deployment_info:
264
+ path = Path(deployment_info["deployed_path"])
265
+ health["checks"]["exists"] = path.exists()
266
+
267
+ # Accessibility check
268
+ if "accessibility" in checks:
269
+ if "deployment_url" in deployment_info:
270
+ health["checks"]["accessible"] = check_url_accessibility(
271
+ deployment_info["deployment_url"]
272
+ )
273
+
274
+ # Integrity check
275
+ if "integrity" in checks:
276
+ if "checksum" in deployment_info:
277
+ health["checks"]["integrity"] = verify_checksum(
278
+ deployment_info.get("deployed_path"),
279
+ deployment_info["checksum"]
280
+ )
281
+
282
+ # Service-specific checks
283
+ if deployment_type == "docker":
284
+ health["checks"]["container_running"] = check_docker_container(
285
+ deployment_info.get("container_id")
286
+ )
287
+ elif deployment_type == "aws":
288
+ health["checks"]["aws_status"] = check_aws_deployment(
289
+ deployment_info
290
+ )
291
+
292
+ # Determine overall status
293
+ if all(health["checks"].values()):
294
+ health["status"] = "healthy"
295
+ elif any(health["checks"].values()):
296
+ health["status"] = "degraded"
297
+ else:
298
+ health["status"] = "unhealthy"
299
+
300
+ except Exception as e:
301
+ health["status"] = "error"
302
+ health["errors"].append(str(e))
303
+
304
+ return health
305
+
306
+
307
+ def check_url_accessibility(url: str, timeout: int = 10) -> bool:
308
+ """Check if URL is accessible."""
309
+ try:
310
+ import urllib.request
311
+ with urllib.request.urlopen(url, timeout=timeout) as response:
312
+ return response.status < 400
313
+ except Exception:
314
+ return False
315
+
316
+
317
+ def check_docker_container(container_id: Optional[str]) -> bool:
318
+ """Check if Docker container is running."""
319
+ if not container_id:
320
+ return False
321
+
322
+ try:
323
+ result = subprocess.run(
324
+ ["docker", "inspect", "-f", "{{.State.Running}}", container_id],
325
+ capture_output=True,
326
+ text=True,
327
+ check=True,
328
+ )
329
+ return result.stdout.strip().lower() == "true"
330
+ except Exception:
331
+ return False
332
+
333
+
334
+ def check_aws_deployment(deployment_info: Dict[str, Any]) -> bool:
335
+ """Check AWS deployment status."""
336
+ # Simplified check - would use boto3 in production
337
+ return deployment_info.get("aws_status") == "deployed"
338
+
339
+
340
+ # Rollback Utilities
341
+ # ==================
342
+
343
+ def rollback_deployment(
344
+ deployment_type: str,
345
+ deployment_info: Dict[str, Any],
346
+ backup_info: Optional[Dict[str, Any]] = None
347
+ ) -> bool:
348
+ """
349
+ Rollback deployment to previous state.
350
+
351
+ Consolidates rollback patterns from multiple services.
352
+
353
+ Args:
354
+ deployment_type: Type of deployment
355
+ deployment_info: Current deployment information
356
+ backup_info: Backup information for restoration
357
+
358
+ Returns:
359
+ True if rollback successful
360
+ """
361
+ try:
362
+ if deployment_type == "local":
363
+ return rollback_local_deployment(deployment_info, backup_info)
364
+ elif deployment_type == "docker":
365
+ return rollback_docker_deployment(deployment_info)
366
+ elif deployment_type == "git":
367
+ return rollback_git_deployment(deployment_info)
368
+ else:
369
+ logger.warning(f"No rollback strategy for type: {deployment_type}")
370
+ return False
371
+
372
+ except Exception as e:
373
+ logger.error(f"Rollback failed: {str(e)}")
374
+ return False
375
+
376
+
377
+ def rollback_local_deployment(
378
+ deployment_info: Dict[str, Any],
379
+ backup_info: Optional[Dict[str, Any]] = None
380
+ ) -> bool:
381
+ """Rollback local filesystem deployment."""
382
+ deployed_path = Path(deployment_info.get("deployed_path", ""))
383
+
384
+ if deployed_path.exists():
385
+ # Remove current deployment
386
+ if deployed_path.is_file():
387
+ deployed_path.unlink()
388
+ else:
389
+ shutil.rmtree(deployed_path)
390
+
391
+ # Restore from backup if available
392
+ if backup_info and "backup_path" in backup_info:
393
+ backup_path = Path(backup_info["backup_path"])
394
+ if backup_path.exists():
395
+ if backup_path.is_file():
396
+ shutil.copy2(backup_path, deployed_path)
397
+ else:
398
+ shutil.copytree(backup_path, deployed_path)
399
+ return True
400
+
401
+ return True
402
+
403
+
404
+ def rollback_docker_deployment(deployment_info: Dict[str, Any]) -> bool:
405
+ """Rollback Docker deployment."""
406
+ container_id = deployment_info.get("container_id")
407
+
408
+ if container_id:
409
+ # Stop and remove container
410
+ subprocess.run(["docker", "stop", container_id], check=False)
411
+ subprocess.run(["docker", "rm", container_id], check=False)
412
+
413
+ # Restore previous container if specified
414
+ if "previous_container" in deployment_info:
415
+ subprocess.run(
416
+ ["docker", "start", deployment_info["previous_container"]],
417
+ check=True
418
+ )
419
+
420
+ return True
421
+
422
+
423
+ def rollback_git_deployment(deployment_info: Dict[str, Any]) -> bool:
424
+ """Rollback Git-based deployment."""
425
+ repo_path = Path(deployment_info.get("repo_path", ""))
426
+ previous_commit = deployment_info.get("previous_commit")
427
+
428
+ if repo_path.exists() and previous_commit:
429
+ subprocess.run(
430
+ ["git", "checkout", previous_commit],
431
+ cwd=repo_path,
432
+ check=True
433
+ )
434
+ return True
435
+
436
+ return False
437
+
438
+
439
+ # Version Management
440
+ # ==================
441
+
442
+ def get_version_info(path: Union[str, Path]) -> Dict[str, Any]:
443
+ """
444
+ Extract version information from deployment.
445
+
446
+ Args:
447
+ path: Deployment path
448
+
449
+ Returns:
450
+ Version information dictionary
451
+ """
452
+ path = Path(path)
453
+ version_info = {}
454
+
455
+ # Check for version files
456
+ version_files = [
457
+ "VERSION",
458
+ ".version",
459
+ "version.txt",
460
+ "package.json",
461
+ "setup.py",
462
+ "pyproject.toml",
463
+ ]
464
+
465
+ for version_file in version_files:
466
+ file_path = path / version_file if path.is_dir() else path.parent / version_file
467
+
468
+ if file_path.exists():
469
+ if version_file == "package.json":
470
+ with open(file_path) as f:
471
+ data = json.load(f)
472
+ version_info["version"] = data.get("version")
473
+ version_info["source"] = "package.json"
474
+ elif version_file in ["setup.py", "pyproject.toml"]:
475
+ # Simple regex extraction
476
+ import re
477
+ content = file_path.read_text()
478
+ match = re.search(r'version\s*=\s*["\'](.*?)["\']', content)
479
+ if match:
480
+ version_info["version"] = match.group(1)
481
+ version_info["source"] = version_file
482
+ else:
483
+ # Plain text version file
484
+ version_info["version"] = file_path.read_text().strip()
485
+ version_info["source"] = version_file
486
+
487
+ if "version" in version_info:
488
+ break
489
+
490
+ return version_info
491
+
492
+
493
+ def update_version(
494
+ path: Union[str, Path],
495
+ new_version: str,
496
+ create_backup: bool = True
497
+ ) -> bool:
498
+ """
499
+ Update version in deployment.
500
+
501
+ Args:
502
+ path: Deployment path
503
+ new_version: New version string
504
+ create_backup: Whether to backup current version
505
+
506
+ Returns:
507
+ True if update successful
508
+ """
509
+ path = Path(path)
510
+ version_file = path / ".version" if path.is_dir() else path.with_suffix(".version")
511
+
512
+ try:
513
+ # Backup current version
514
+ if create_backup and version_file.exists():
515
+ backup_file = version_file.with_suffix(".backup")
516
+ shutil.copy2(version_file, backup_file)
517
+
518
+ # Write new version
519
+ version_file.write_text(f"{new_version}\n{datetime.now().isoformat()}\n")
520
+ return True
521
+
522
+ except Exception as e:
523
+ logger.error(f"Failed to update version: {str(e)}")
524
+ return False
525
+
526
+
527
+ # Checksum and Integrity
528
+ # ======================
529
+
530
+ def calculate_checksum(path: Union[str, Path], algorithm: str = "sha256") -> str:
531
+ """
532
+ Calculate checksum of file or directory.
533
+
534
+ Args:
535
+ path: Path to calculate checksum for
536
+ algorithm: Hash algorithm to use
537
+
538
+ Returns:
539
+ Hex digest of checksum
540
+ """
541
+ path = Path(path)
542
+ hasher = hashlib.new(algorithm)
543
+
544
+ if path.is_file():
545
+ with open(path, "rb") as f:
546
+ for chunk in iter(lambda: f.read(4096), b""):
547
+ hasher.update(chunk)
548
+ elif path.is_dir():
549
+ # Hash all files in directory
550
+ for file_path in sorted(path.rglob("*")):
551
+ if file_path.is_file():
552
+ hasher.update(str(file_path.relative_to(path)).encode())
553
+ with open(file_path, "rb") as f:
554
+ for chunk in iter(lambda: f.read(4096), b""):
555
+ hasher.update(chunk)
556
+
557
+ return hasher.hexdigest()
558
+
559
+
560
+ def verify_checksum(
561
+ path: Union[str, Path],
562
+ expected_checksum: str,
563
+ algorithm: str = "sha256"
564
+ ) -> bool:
565
+ """
566
+ Verify checksum of file or directory.
567
+
568
+ Args:
569
+ path: Path to verify
570
+ expected_checksum: Expected checksum value
571
+ algorithm: Hash algorithm to use
572
+
573
+ Returns:
574
+ True if checksum matches
575
+ """
576
+ try:
577
+ actual_checksum = calculate_checksum(path, algorithm)
578
+ return actual_checksum == expected_checksum
579
+ except Exception:
580
+ return False
581
+
582
+
583
+ def get_size(path: Union[str, Path]) -> int:
584
+ """
585
+ Get size of file or directory in bytes.
586
+
587
+ Args:
588
+ path: Path to measure
589
+
590
+ Returns:
591
+ Size in bytes
592
+ """
593
+ path = Path(path)
594
+
595
+ if path.is_file():
596
+ return path.stat().st_size
597
+ elif path.is_dir():
598
+ total_size = 0
599
+ for file_path in path.rglob("*"):
600
+ if file_path.is_file():
601
+ total_size += file_path.stat().st_size
602
+ return total_size
603
+ else:
604
+ return 0
605
+
606
+
607
+ # Environment Management
608
+ # =====================
609
+
610
+ def load_env_file(env_file: Union[str, Path]) -> Dict[str, str]:
611
+ """
612
+ Load environment variables from file.
613
+
614
+ Args:
615
+ env_file: Path to environment file
616
+
617
+ Returns:
618
+ Dictionary of environment variables
619
+ """
620
+ env_vars = {}
621
+ env_path = Path(env_file)
622
+
623
+ if env_path.exists():
624
+ with open(env_path) as f:
625
+ for line in f:
626
+ line = line.strip()
627
+ if line and not line.startswith("#") and "=" in line:
628
+ key, value = line.split("=", 1)
629
+ env_vars[key.strip()] = value.strip()
630
+
631
+ return env_vars
632
+
633
+
634
+ def merge_environments(*env_dicts: Dict[str, str]) -> Dict[str, str]:
635
+ """
636
+ Merge multiple environment dictionaries.
637
+
638
+ Later dictionaries override earlier ones.
639
+
640
+ Args:
641
+ *env_dicts: Environment dictionaries to merge
642
+
643
+ Returns:
644
+ Merged environment dictionary
645
+ """
646
+ merged = {}
647
+ for env_dict in env_dicts:
648
+ if env_dict:
649
+ merged.update(env_dict)
650
+ return merged
651
+
652
+
653
+ def export_env_to_file(
654
+ env_vars: Dict[str, str],
655
+ output_file: Union[str, Path]
656
+ ) -> None:
657
+ """
658
+ Export environment variables to file.
659
+
660
+ Args:
661
+ env_vars: Environment variables
662
+ output_file: Output file path
663
+ """
664
+ output_path = Path(output_file)
665
+ output_path.parent.mkdir(parents=True, exist_ok=True)
666
+
667
+ with open(output_path, "w") as f:
668
+ for key, value in env_vars.items():
669
+ # Escape special characters in value
670
+ if " " in value or '"' in value:
671
+ value = f'"{value}"'
672
+ f.write(f"{key}={value}\n")