claude-mpm 4.5.0__py3-none-any.whl → 4.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/PM_INSTRUCTIONS.md +33 -0
  3. claude_mpm/agents/templates/web_qa.json +2 -2
  4. claude_mpm/cli/__init__.py +27 -11
  5. claude_mpm/cli/commands/doctor.py +1 -4
  6. claude_mpm/core/config.py +2 -2
  7. claude_mpm/core/framework/__init__.py +6 -6
  8. claude_mpm/core/unified_paths.py +13 -12
  9. claude_mpm/hooks/kuzu_memory_hook.py +1 -1
  10. claude_mpm/init.py +19 -0
  11. claude_mpm/services/async_session_logger.py +6 -2
  12. claude_mpm/services/claude_session_logger.py +2 -2
  13. claude_mpm/services/diagnostics/checks/mcp_services_check.py +2 -2
  14. claude_mpm/services/diagnostics/doctor_reporter.py +0 -2
  15. claude_mpm/services/mcp_config_manager.py +156 -105
  16. claude_mpm/services/mcp_gateway/core/process_pool.py +258 -36
  17. claude_mpm/services/mcp_gateway/utils/__init__.py +14 -0
  18. claude_mpm/services/mcp_gateway/utils/package_version_checker.py +160 -0
  19. claude_mpm/services/mcp_gateway/utils/update_preferences.py +170 -0
  20. claude_mpm/services/mcp_service_verifier.py +4 -4
  21. claude_mpm/services/monitor/event_emitter.py +6 -2
  22. claude_mpm/services/project/archive_manager.py +7 -9
  23. claude_mpm/services/project/documentation_manager.py +2 -3
  24. claude_mpm/services/project/enhanced_analyzer.py +1 -1
  25. claude_mpm/services/project/project_organizer.py +15 -12
  26. claude_mpm/services/unified/__init__.py +13 -13
  27. claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +4 -8
  28. claude_mpm/services/unified/analyzer_strategies/performance_analyzer.py +0 -1
  29. claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +8 -9
  30. claude_mpm/services/unified/config_strategies/__init__.py +37 -37
  31. claude_mpm/services/unified/config_strategies/config_schema.py +18 -22
  32. claude_mpm/services/unified/config_strategies/context_strategy.py +6 -7
  33. claude_mpm/services/unified/config_strategies/error_handling_strategy.py +6 -10
  34. claude_mpm/services/unified/config_strategies/file_loader_strategy.py +5 -9
  35. claude_mpm/services/unified/config_strategies/unified_config_service.py +2 -4
  36. claude_mpm/services/unified/config_strategies/validation_strategy.py +1 -1
  37. claude_mpm/services/unified/deployment_strategies/__init__.py +8 -8
  38. claude_mpm/services/unified/deployment_strategies/local.py +2 -5
  39. claude_mpm/services/unified/deployment_strategies/utils.py +13 -17
  40. claude_mpm/services/unified/deployment_strategies/vercel.py +5 -6
  41. claude_mpm/services/unified/unified_analyzer.py +1 -1
  42. claude_mpm/utils/common.py +3 -7
  43. claude_mpm/utils/database_connector.py +9 -12
  44. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/METADATA +2 -2
  45. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/RECORD +49 -46
  46. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/WHEEL +0 -0
  47. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/entry_points.txt +0 -0
  48. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/licenses/LICENSE +0 -0
  49. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,14 @@
1
+ """
2
+ MCP Gateway Utilities
3
+ ======================
4
+
5
+ Utility modules for the MCP Gateway service.
6
+ """
7
+
8
+ from .package_version_checker import PackageVersionChecker
9
+ from .update_preferences import UpdatePreferences
10
+
11
+ __all__ = [
12
+ "PackageVersionChecker",
13
+ "UpdatePreferences",
14
+ ]
@@ -0,0 +1,160 @@
1
+ """
2
+ Package Version Checker
3
+ =======================
4
+
5
+ Checks PyPI packages for updates with caching and timeout support.
6
+ Provides non-blocking version checking for MCP tools like kuzu-memory.
7
+ """
8
+
9
+ import asyncio
10
+ import json
11
+ from datetime import datetime, timedelta
12
+ from pathlib import Path
13
+ from typing import Any, Dict, Optional
14
+
15
+ import aiohttp
16
+ from packaging import version
17
+
18
+ from ....core.logger import get_logger
19
+
20
+
21
+ class PackageVersionChecker:
22
+ """
23
+ Check PyPI packages for updates with caching and timeout.
24
+
25
+ WHY: Automatically detect when newer versions of critical packages
26
+ (like kuzu-memory) are available to help users stay up-to-date.
27
+
28
+ DESIGN DECISIONS:
29
+ - Cache results to avoid excessive PyPI API calls
30
+ - Non-blocking with timeout to prevent startup delays
31
+ - Graceful failure handling (never block system operation)
32
+ """
33
+
34
+ CACHE_DIR = Path.home() / ".cache" / "claude-mpm" / "version-checks"
35
+ DEFAULT_CACHE_TTL = 86400 # 24 hours
36
+ PYPI_TIMEOUT = 5 # seconds
37
+
38
+ def __init__(self):
39
+ """Initialize the version checker with cache directory."""
40
+ self.logger = get_logger("PackageVersionChecker")
41
+ self.cache_dir = self.CACHE_DIR
42
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
43
+
44
+ async def check_for_update(
45
+ self, package_name: str, current_version: str, cache_ttl: Optional[int] = None
46
+ ) -> Optional[Dict[str, Any]]:
47
+ """
48
+ Check if a package has updates available.
49
+
50
+ Args:
51
+ package_name: Name of the package on PyPI
52
+ current_version: Currently installed version
53
+ cache_ttl: Cache time-to-live in seconds (optional)
54
+
55
+ Returns:
56
+ Dict with update information or None if check fails:
57
+ {
58
+ "current": "1.0.0",
59
+ "latest": "1.1.0",
60
+ "update_available": True,
61
+ "checked_at": "2025-01-29T12:00:00"
62
+ }
63
+ """
64
+ cache_ttl = cache_ttl or self.DEFAULT_CACHE_TTL
65
+
66
+ # Check cache first
67
+ cache_file = self.cache_dir / f"{package_name}.json"
68
+ cached = self._read_cache(cache_file, cache_ttl)
69
+ if cached:
70
+ # Update current version in cached data
71
+ cached["current"] = current_version
72
+ cached["update_available"] = version.parse(
73
+ cached["latest"]
74
+ ) > version.parse(current_version)
75
+ return cached
76
+
77
+ # Fetch from PyPI with timeout
78
+ try:
79
+ latest = await self._fetch_pypi_version(package_name)
80
+ if latest:
81
+ result = {
82
+ "current": current_version,
83
+ "latest": latest,
84
+ "update_available": version.parse(latest)
85
+ > version.parse(current_version),
86
+ "checked_at": datetime.now().isoformat(),
87
+ }
88
+ self._write_cache(cache_file, result)
89
+ return result
90
+ except Exception as e:
91
+ self.logger.debug(f"Version check failed for {package_name}: {e}")
92
+
93
+ return None
94
+
95
+ async def _fetch_pypi_version(self, package_name: str) -> Optional[str]:
96
+ """
97
+ Fetch the latest version from PyPI.
98
+
99
+ Args:
100
+ package_name: Package name to query
101
+
102
+ Returns:
103
+ Latest version string or None if fetch fails
104
+ """
105
+ url = f"https://pypi.org/pypi/{package_name}/json"
106
+
107
+ try:
108
+ timeout = aiohttp.ClientTimeout(total=self.PYPI_TIMEOUT)
109
+ async with aiohttp.ClientSession(timeout=timeout) as session:
110
+ async with session.get(url) as response:
111
+ if response.status == 200:
112
+ data = await response.json()
113
+ return data["info"]["version"]
114
+ except asyncio.TimeoutError:
115
+ self.logger.debug(f"PyPI request timed out for {package_name}")
116
+ except Exception as e:
117
+ self.logger.debug(f"PyPI request failed: {e}")
118
+
119
+ return None
120
+
121
+ def _read_cache(self, cache_file: Path, ttl: int) -> Optional[Dict[str, Any]]:
122
+ """
123
+ Read from cache if valid.
124
+
125
+ Args:
126
+ cache_file: Path to cache file
127
+ ttl: Time-to-live in seconds
128
+
129
+ Returns:
130
+ Cached data if valid, None otherwise
131
+ """
132
+ if not cache_file.exists():
133
+ return None
134
+
135
+ try:
136
+ with open(cache_file) as f:
137
+ data = json.load(f)
138
+
139
+ # Check TTL
140
+ checked_at = datetime.fromisoformat(data["checked_at"])
141
+ if datetime.now() - checked_at < timedelta(seconds=ttl):
142
+ return data
143
+ except Exception as e:
144
+ self.logger.debug(f"Cache read error: {e}")
145
+
146
+ return None
147
+
148
+ def _write_cache(self, cache_file: Path, data: Dict[str, Any]) -> None:
149
+ """
150
+ Write data to cache file.
151
+
152
+ Args:
153
+ cache_file: Path to cache file
154
+ data: Data to cache
155
+ """
156
+ try:
157
+ with open(cache_file, "w") as f:
158
+ json.dump(data, f, indent=2)
159
+ except Exception as e:
160
+ self.logger.debug(f"Cache write failed: {e}")
@@ -0,0 +1,170 @@
1
+ """
2
+ Update Preferences Manager
3
+ ===========================
4
+
5
+ Manages user preferences for package update checking.
6
+ Allows users to skip specific versions or disable update checks entirely.
7
+ """
8
+
9
+ import json
10
+ from pathlib import Path
11
+ from typing import Any, Dict
12
+
13
+
14
+ class UpdatePreferences:
15
+ """
16
+ Manage update checking preferences.
17
+
18
+ WHY: Respect user preferences about update notifications to avoid
19
+ annoying users who want to stay on specific versions or disable checks.
20
+
21
+ DESIGN DECISIONS:
22
+ - Store preferences in user's home directory
23
+ - Simple JSON format for easy manual editing
24
+ - Per-package preferences for granular control
25
+ """
26
+
27
+ PREFS_FILE = Path.home() / ".claude-mpm" / "mcp_updates.json"
28
+
29
+ @classmethod
30
+ def load(cls) -> Dict[str, Any]:
31
+ """
32
+ Load preferences from disk.
33
+
34
+ Returns:
35
+ Dictionary of preferences, empty dict if file doesn't exist
36
+ """
37
+ if cls.PREFS_FILE.exists():
38
+ try:
39
+ with open(cls.PREFS_FILE) as f:
40
+ return json.load(f)
41
+ except (OSError, json.JSONDecodeError):
42
+ # Return empty dict if file is corrupted or unreadable
43
+ pass
44
+ return {}
45
+
46
+ @classmethod
47
+ def save(cls, prefs: Dict[str, Any]) -> None:
48
+ """
49
+ Save preferences to disk.
50
+
51
+ Args:
52
+ prefs: Preferences dictionary to save
53
+ """
54
+ cls.PREFS_FILE.parent.mkdir(parents=True, exist_ok=True)
55
+ try:
56
+ with open(cls.PREFS_FILE, "w") as f:
57
+ json.dump(prefs, f, indent=2)
58
+ except OSError:
59
+ # Silently fail if we can't write preferences
60
+ pass
61
+
62
+ @classmethod
63
+ def should_check_package(cls, package: str) -> bool:
64
+ """
65
+ Check if updates should be checked for a specific package.
66
+
67
+ Args:
68
+ package: Package name to check
69
+
70
+ Returns:
71
+ True if updates should be checked, False otherwise
72
+ """
73
+ prefs = cls.load()
74
+
75
+ # Global preference takes precedence
76
+ if not prefs.get("global_check_enabled", True):
77
+ return False
78
+
79
+ # Package-specific preference
80
+ pkg_prefs = prefs.get("packages", {}).get(package, {})
81
+ return pkg_prefs.get("check_enabled", True)
82
+
83
+ @classmethod
84
+ def should_skip_version(cls, package: str, version: str) -> bool:
85
+ """
86
+ Check if a specific version should be skipped for a package.
87
+
88
+ Args:
89
+ package: Package name
90
+ version: Version to check
91
+
92
+ Returns:
93
+ True if this version should be skipped, False otherwise
94
+ """
95
+ prefs = cls.load()
96
+ pkg_prefs = prefs.get("packages", {}).get(package, {})
97
+ skip_version = pkg_prefs.get("skip_version")
98
+ return skip_version == version
99
+
100
+ @classmethod
101
+ def set_skip_version(cls, package: str, version: str) -> None:
102
+ """
103
+ Remember to skip a specific version for a package.
104
+
105
+ Args:
106
+ package: Package name
107
+ version: Version to skip
108
+ """
109
+ prefs = cls.load()
110
+
111
+ # Ensure packages dict exists
112
+ if "packages" not in prefs:
113
+ prefs["packages"] = {}
114
+ if package not in prefs["packages"]:
115
+ prefs["packages"][package] = {}
116
+
117
+ prefs["packages"][package]["skip_version"] = version
118
+ cls.save(prefs)
119
+
120
+ @classmethod
121
+ def disable_package_checks(cls, package: str) -> None:
122
+ """
123
+ Disable update checks for a specific package.
124
+
125
+ Args:
126
+ package: Package name to disable checks for
127
+ """
128
+ prefs = cls.load()
129
+
130
+ # Ensure packages dict exists
131
+ if "packages" not in prefs:
132
+ prefs["packages"] = {}
133
+ if package not in prefs["packages"]:
134
+ prefs["packages"][package] = {}
135
+
136
+ prefs["packages"][package]["check_enabled"] = False
137
+ cls.save(prefs)
138
+
139
+ @classmethod
140
+ def enable_package_checks(cls, package: str) -> None:
141
+ """
142
+ Enable update checks for a specific package.
143
+
144
+ Args:
145
+ package: Package name to enable checks for
146
+ """
147
+ prefs = cls.load()
148
+
149
+ # Ensure packages dict exists
150
+ if "packages" not in prefs:
151
+ prefs["packages"] = {}
152
+ if package not in prefs["packages"]:
153
+ prefs["packages"][package] = {}
154
+
155
+ prefs["packages"][package]["check_enabled"] = True
156
+ cls.save(prefs)
157
+
158
+ @classmethod
159
+ def disable_all_checks(cls) -> None:
160
+ """Disable all update checks globally."""
161
+ prefs = cls.load()
162
+ prefs["global_check_enabled"] = False
163
+ cls.save(prefs)
164
+
165
+ @classmethod
166
+ def enable_all_checks(cls) -> None:
167
+ """Enable all update checks globally."""
168
+ prefs = cls.load()
169
+ prefs["global_check_enabled"] = True
170
+ cls.save(prefs)
@@ -311,7 +311,7 @@ class MCPServiceVerifier:
311
311
  try:
312
312
  # First try direct execution
313
313
  result = subprocess.run(
314
- [path] + test_args,
314
+ [path, *test_args],
315
315
  capture_output=True,
316
316
  text=True,
317
317
  timeout=10,
@@ -339,7 +339,7 @@ class MCPServiceVerifier:
339
339
  # Try pipx run as fallback
340
340
  if shutil.which("pipx"):
341
341
  result = subprocess.run(
342
- ["pipx", "run", service_name] + test_args,
342
+ ["pipx", "run", service_name, *test_args],
343
343
  capture_output=True,
344
344
  text=True,
345
345
  timeout=10,
@@ -655,7 +655,7 @@ class MCPServiceVerifier:
655
655
  working = []
656
656
  issues = []
657
657
 
658
- for name, diag in diagnostics.items():
658
+ for _name, diag in diagnostics.items():
659
659
  if diag.status == ServiceStatus.WORKING:
660
660
  working.append(diag)
661
661
  else:
@@ -710,7 +710,7 @@ def verify_mcp_services_on_startup() -> Tuple[bool, str]:
710
710
  Tuple of (all_working, summary_message)
711
711
  """
712
712
  verifier = MCPServiceVerifier()
713
- logger = get_logger(__name__)
713
+ get_logger(__name__)
714
714
 
715
715
  # Do quick checks only (don't block startup)
716
716
  issues = []
@@ -26,7 +26,8 @@ class AsyncEventEmitter:
26
26
 
27
27
  def __init__(self):
28
28
  """Initialize the event emitter."""
29
- self.logger = get_logger(__name__)
29
+ # Use a shorter, cleaner logger name instead of full module path
30
+ self.logger = get_logger("event_emitter")
30
31
 
31
32
  # Direct emission targets (in-process)
32
33
  self._socketio_servers: Set[weakref.ref] = set()
@@ -221,7 +222,10 @@ class AsyncEventEmitter:
221
222
  self.logger.warning(f"HTTP emission timeout: {event}")
222
223
  return False
223
224
  except aiohttp.ClientError as e:
224
- self.logger.warning(f"HTTP emission client error: {e}")
225
+ # Use DEBUG since the monitor service is optional and may not be running
226
+ self.logger.debug(
227
+ f"HTTP emission client error (monitor may not be running): {e}"
228
+ )
225
229
  return False
226
230
  except Exception as e:
227
231
  self.logger.error(f"HTTP emission unexpected error: {e}")
@@ -16,6 +16,7 @@ Author: Claude MPM Development Team
16
16
  Created: 2025-01-26
17
17
  """
18
18
 
19
+ import contextlib
19
20
  import difflib
20
21
  import gzip
21
22
  import hashlib
@@ -173,7 +174,7 @@ Generated by Claude MPM Archive Manager
173
174
  # Compress old archives
174
175
  cutoff_compress = datetime.now() - timedelta(days=self.COMPRESS_AFTER_DAYS)
175
176
  for archive in archives:
176
- if not archive.suffix == ".gz":
177
+ if archive.suffix != ".gz":
177
178
  mtime = datetime.fromtimestamp(archive.stat().st_mtime)
178
179
  if mtime < cutoff_compress:
179
180
  self._compress_archive(archive)
@@ -247,7 +248,7 @@ Generated by Claude MPM Archive Manager
247
248
  archive_files = [
248
249
  f
249
250
  for f in self.archive_path.glob("*")
250
- if not f.name.endswith(".meta.json") and not f.name == "README.md"
251
+ if not f.name.endswith(".meta.json") and f.name != "README.md"
251
252
  ]
252
253
 
253
254
  for archive_file in archive_files:
@@ -265,10 +266,8 @@ Generated by Claude MPM Archive Manager
265
266
  if include_metadata:
266
267
  meta_path = self.archive_path / f"{archive_file.name}.meta.json"
267
268
  if meta_path.exists():
268
- try:
269
+ with contextlib.suppress(Exception):
269
270
  info["metadata"] = json.loads(meta_path.read_text())
270
- except Exception:
271
- pass
272
271
 
273
272
  archives.append(info)
274
273
 
@@ -452,7 +451,7 @@ Generated by Claude MPM Archive Manager
452
451
 
453
452
  try:
454
453
  result = subprocess.run(
455
- ["git"] + args,
454
+ ["git", *args],
456
455
  cwd=self.project_path,
457
456
  capture_output=True,
458
457
  text=True,
@@ -528,7 +527,7 @@ Generated by Claude MPM Archive Manager
528
527
  }
529
528
 
530
529
  # Review each key documentation file
531
- for doc_file, description in self.key_docs.items():
530
+ for doc_file, _description in self.key_docs.items():
532
531
  file_path = self.project_path / doc_file
533
532
  if file_path.exists():
534
533
  file_report = self._review_single_doc(file_path, check_git)
@@ -956,13 +955,12 @@ Generated by Claude MPM Archive Manager
956
955
  )
957
956
 
958
957
  # Update explicit version mentions
959
- content = re.sub(
958
+ return re.sub(
960
959
  r"(?:Version|version|v)\s*\d+\.\d+\.\d+",
961
960
  f"v{new_version}",
962
961
  content,
963
962
  )
964
963
 
965
- return content
966
964
 
967
965
  def display_review_summary(self, review: Dict) -> None:
968
966
  """Display a formatted summary of the documentation review."""
@@ -231,9 +231,8 @@ class DocumentationManager:
231
231
  merged = self._reorganize_by_priority(merged)
232
232
 
233
233
  # Add metadata
234
- merged = self._add_metadata(merged)
234
+ return self._add_metadata(merged)
235
235
 
236
- return merged
237
236
 
238
237
  def _parse_into_sections(self, content: str) -> Dict[str, str]:
239
238
  """Parse markdown content into a dictionary of sections."""
@@ -337,7 +336,7 @@ class DocumentationManager:
337
336
 
338
337
  # Reconstruct section
339
338
  if all_items:
340
- return "\n".join([""] + all_items + [""])
339
+ return "\n".join(["", *all_items, ""])
341
340
  return new
342
341
  # For other sections, use new as base and append existing
343
342
  if existing.strip() and existing.strip() != new.strip():
@@ -61,7 +61,7 @@ class EnhancedProjectAnalyzer:
61
61
  """Run a git command and return output."""
62
62
  try:
63
63
  result = subprocess.run(
64
- ["git"] + args,
64
+ ["git", *args],
65
65
  cwd=self.project_path,
66
66
  capture_output=True,
67
67
  text=True,
@@ -54,6 +54,10 @@ class ProjectOrganizer:
54
54
  ".claude-mpm/cache/",
55
55
  ".claude-mpm/logs/",
56
56
  ".claude/cache/",
57
+ # MCP service directories for local data
58
+ ".mcp-vector-search/",
59
+ ".kuzu-memory/",
60
+ "kuzu-memories/", # kuzu-memory database directory
57
61
  # Python artifacts
58
62
  "__pycache__/",
59
63
  "*.py[cod]",
@@ -444,11 +448,11 @@ This directory is used for {description.lower()}.
444
448
  existing_patterns = set()
445
449
  if self.gitignore_path.exists():
446
450
  content = self.gitignore_path.read_text()
447
- existing_patterns = set(
451
+ existing_patterns = {
448
452
  line.strip()
449
453
  for line in content.splitlines()
450
454
  if line.strip() and not line.startswith("#")
451
- )
455
+ }
452
456
  else:
453
457
  content = ""
454
458
 
@@ -973,16 +977,15 @@ This directory is used for {description.lower()}.
973
977
  root_files = list(self.project_path.glob("*"))
974
978
  misplaced_count = 0
975
979
  for file in root_files:
976
- if file.is_file():
977
- if (
978
- ("test" in file.name.lower() and file.suffix == ".py")
979
- or (
980
- file.suffix in [".sh", ".bash"]
981
- and file.name not in ["Makefile"]
982
- )
983
- or file.suffix in [".log", ".tmp", ".cache"]
984
- ):
985
- misplaced_count += 1
980
+ if file.is_file() and (
981
+ ("test" in file.name.lower() and file.suffix == ".py")
982
+ or (
983
+ file.suffix in [".sh", ".bash"]
984
+ and file.name not in ["Makefile"]
985
+ )
986
+ or file.suffix in [".log", ".tmp", ".cache"]
987
+ ):
988
+ misplaced_count += 1
986
989
 
987
990
  if misplaced_count > 0:
988
991
  validation["warnings"].append(
@@ -42,24 +42,24 @@ from .unified_config import UnifiedConfigManager
42
42
  from .unified_deployment import UnifiedDeploymentService
43
43
 
44
44
  __all__ = [
45
- # Interfaces
46
- "IDeploymentService",
45
+ "AnalyzerStrategy",
46
+ "ConfigStrategy",
47
+ # Strategies
48
+ "DeploymentStrategy",
49
+ "FeatureFlag",
47
50
  "IAnalyzerService",
48
51
  "IConfigurationService",
49
- "ServiceMetadata",
52
+ # Interfaces
53
+ "IDeploymentService",
54
+ "MigrationStatus",
50
55
  "ServiceCapability",
51
- # Strategies
52
- "DeploymentStrategy",
53
- "AnalyzerStrategy",
54
- "ConfigStrategy",
55
- "StrategyRegistry",
56
56
  # Migration
57
57
  "ServiceMapper",
58
- "MigrationStatus",
59
- "FeatureFlag",
60
- "create_compatibility_wrapper",
61
- # Unified Services
62
- "UnifiedDeploymentService",
58
+ "ServiceMetadata",
59
+ "StrategyRegistry",
63
60
  "UnifiedAnalyzer",
64
61
  "UnifiedConfigManager",
62
+ # Unified Services
63
+ "UnifiedDeploymentService",
64
+ "create_compatibility_wrapper",
65
65
  ]
@@ -234,10 +234,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
234
234
  # Check for pyproject.toml variants
235
235
  if manifest_path.name == "pyproject.toml":
236
236
  content = manifest_path.read_text()
237
- if "[tool.poetry]" in content:
238
- manager = "poetry"
239
- else:
240
- manager = "pip"
237
+ manager = "poetry" if "[tool.poetry]" in content else "pip"
241
238
 
242
239
  if not manager:
243
240
  return {
@@ -537,7 +534,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
537
534
  """Detect web frameworks from dependencies."""
538
535
  detected = []
539
536
 
540
- for language, frameworks in self.FRAMEWORK_PACKAGES.items():
537
+ for _language, frameworks in self.FRAMEWORK_PACKAGES.items():
541
538
  for framework in frameworks:
542
539
  if framework in dependencies:
543
540
  detected.append(framework)
@@ -561,7 +558,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
561
558
  """Detect testing tools from dependencies."""
562
559
  detected = []
563
560
 
564
- for language, tools in self.TESTING_PACKAGES.items():
561
+ for _language, tools in self.TESTING_PACKAGES.items():
565
562
  for tool in tools:
566
563
  if tool in dependencies:
567
564
  detected.append(tool)
@@ -570,7 +567,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
570
567
 
571
568
  def _check_vulnerabilities(self, project_path: Path) -> Dict[str, Any]:
572
569
  """Check for known security vulnerabilities in dependencies."""
573
- vulnerabilities = {
570
+ return {
574
571
  "total": 0,
575
572
  "critical": 0,
576
573
  "high": 0,
@@ -583,7 +580,6 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
583
580
  # In production, you would integrate with vulnerability databases
584
581
  # like npm audit, pip-audit, or safety
585
582
 
586
- return vulnerabilities
587
583
 
588
584
  def _calculate_statistics(self, results: Dict[str, Any]) -> Dict[str, Any]:
589
585
  """Calculate dependency statistics."""
@@ -276,7 +276,6 @@ class PerformanceAnalyzerStrategy(AnalyzerStrategy):
276
276
  ".sql",
277
277
  }
278
278
 
279
- files_with_issues = []
280
279
  total_score = 0
281
280
 
282
281
  # Analyze each file
@@ -716,14 +716,13 @@ class StructureAnalyzerStrategy(AnalyzerStrategy):
716
716
  }
717
717
 
718
718
  # Compare architecture
719
- if "architecture" in baseline and "architecture" in current:
720
- if (
721
- baseline["architecture"]["pattern"]
722
- != current["architecture"]["pattern"]
723
- ):
724
- comparison["architecture_change"] = {
725
- "baseline": baseline["architecture"]["pattern"],
726
- "current": current["architecture"]["pattern"],
727
- }
719
+ if "architecture" in baseline and "architecture" in current and (
720
+ baseline["architecture"]["pattern"]
721
+ != current["architecture"]["pattern"]
722
+ ):
723
+ comparison["architecture_change"] = {
724
+ "baseline": baseline["architecture"]["pattern"],
725
+ "current": current["architecture"]["pattern"],
726
+ }
728
727
 
729
728
  return comparison