claude-mpm 4.5.0__py3-none-any.whl → 4.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/PM_INSTRUCTIONS.md +33 -0
  3. claude_mpm/agents/templates/web_qa.json +2 -2
  4. claude_mpm/cli/__init__.py +27 -11
  5. claude_mpm/cli/commands/doctor.py +1 -4
  6. claude_mpm/core/config.py +2 -2
  7. claude_mpm/core/framework/__init__.py +6 -6
  8. claude_mpm/core/unified_paths.py +13 -12
  9. claude_mpm/hooks/kuzu_memory_hook.py +1 -1
  10. claude_mpm/init.py +19 -0
  11. claude_mpm/services/async_session_logger.py +6 -2
  12. claude_mpm/services/claude_session_logger.py +2 -2
  13. claude_mpm/services/diagnostics/checks/mcp_services_check.py +2 -2
  14. claude_mpm/services/diagnostics/doctor_reporter.py +0 -2
  15. claude_mpm/services/mcp_config_manager.py +156 -105
  16. claude_mpm/services/mcp_gateway/core/process_pool.py +258 -36
  17. claude_mpm/services/mcp_gateway/utils/__init__.py +14 -0
  18. claude_mpm/services/mcp_gateway/utils/package_version_checker.py +160 -0
  19. claude_mpm/services/mcp_gateway/utils/update_preferences.py +170 -0
  20. claude_mpm/services/mcp_service_verifier.py +4 -4
  21. claude_mpm/services/monitor/event_emitter.py +6 -2
  22. claude_mpm/services/project/archive_manager.py +7 -9
  23. claude_mpm/services/project/documentation_manager.py +2 -3
  24. claude_mpm/services/project/enhanced_analyzer.py +1 -1
  25. claude_mpm/services/project/project_organizer.py +15 -12
  26. claude_mpm/services/unified/__init__.py +13 -13
  27. claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +4 -8
  28. claude_mpm/services/unified/analyzer_strategies/performance_analyzer.py +0 -1
  29. claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +8 -9
  30. claude_mpm/services/unified/config_strategies/__init__.py +37 -37
  31. claude_mpm/services/unified/config_strategies/config_schema.py +18 -22
  32. claude_mpm/services/unified/config_strategies/context_strategy.py +6 -7
  33. claude_mpm/services/unified/config_strategies/error_handling_strategy.py +6 -10
  34. claude_mpm/services/unified/config_strategies/file_loader_strategy.py +5 -9
  35. claude_mpm/services/unified/config_strategies/unified_config_service.py +2 -4
  36. claude_mpm/services/unified/config_strategies/validation_strategy.py +1 -1
  37. claude_mpm/services/unified/deployment_strategies/__init__.py +8 -8
  38. claude_mpm/services/unified/deployment_strategies/local.py +2 -5
  39. claude_mpm/services/unified/deployment_strategies/utils.py +13 -17
  40. claude_mpm/services/unified/deployment_strategies/vercel.py +5 -6
  41. claude_mpm/services/unified/unified_analyzer.py +1 -1
  42. claude_mpm/utils/common.py +3 -7
  43. claude_mpm/utils/database_connector.py +9 -12
  44. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/METADATA +2 -2
  45. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/RECORD +49 -46
  46. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/WHEEL +0 -0
  47. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/entry_points.txt +0 -0
  48. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/licenses/LICENSE +0 -0
  49. {claude_mpm-4.5.0.dist-info → claude_mpm-4.5.5.dist-info}/top_level.txt +0 -0
@@ -97,55 +97,55 @@ ConfigLoader = UnifiedConfigService
97
97
 
98
98
  # Export all public APIs
99
99
  __all__ = [
100
- # Main service
101
- "UnifiedConfigService",
102
- "unified_config",
103
- # Strategies
104
- "FileLoaderStrategy",
105
- "ValidationStrategy",
106
- "ErrorHandlingStrategy",
107
- "ContextStrategy",
100
+ "CompositeFileLoader",
101
+ "ConfigContext",
108
102
  # Core types
109
103
  "ConfigFormat",
110
- "ConfigContext",
104
+ "ConfigLoader",
105
+ "ConfigManager",
111
106
  "ConfigMetadata",
112
- "IConfigStrategy",
113
- # File loading
114
- "LoaderType",
115
- "FileLoadContext",
116
- "StructuredFileLoader",
107
+ "ConfigMigration",
108
+ # Schema
109
+ "ConfigSchema",
110
+ # Backward compatibility
111
+ "ConfigService",
112
+ "ContextLifecycle",
113
+ # Context management
114
+ "ContextScope",
115
+ "ContextStrategy",
117
116
  "EnvironmentFileLoader",
118
- "ProgrammaticFileLoader",
119
- "LegacyFileLoader",
120
- "CompositeFileLoader",
121
- # Validation
122
- "ValidationRule",
123
- "ValidationResult",
124
- "ValidationType",
117
+ "ErrorCategory",
125
118
  # Error handling
126
119
  "ErrorContext",
127
120
  "ErrorHandlingResult",
128
- "ErrorCategory",
121
+ "ErrorHandlingStrategy",
129
122
  "ErrorSeverity",
130
- # Context management
131
- "ContextScope",
132
- "ContextLifecycle",
123
+ "FileLoadContext",
124
+ # Strategies
125
+ "FileLoaderStrategy",
133
126
  "HierarchicalContextManager",
134
- "ScopedConfigManager",
135
- # Schema
136
- "ConfigSchema",
137
- "SchemaProperty",
127
+ "IConfigStrategy",
128
+ "LegacyFileLoader",
129
+ # File loading
130
+ "LoaderType",
131
+ "ProgrammaticFileLoader",
138
132
  "SchemaBuilder",
139
- "SchemaValidator",
133
+ "SchemaFormat",
134
+ "SchemaProperty",
140
135
  "SchemaRegistry",
141
- "ConfigMigration",
142
- "TypedConfig",
143
136
  "SchemaType",
144
- "SchemaFormat",
145
- # Backward compatibility
146
- "ConfigService",
147
- "ConfigManager",
148
- "ConfigLoader",
137
+ "SchemaValidator",
138
+ "ScopedConfigManager",
139
+ "StructuredFileLoader",
140
+ "TypedConfig",
141
+ # Main service
142
+ "UnifiedConfigService",
143
+ "ValidationResult",
144
+ # Validation
145
+ "ValidationRule",
146
+ "ValidationStrategy",
147
+ "ValidationType",
148
+ "unified_config",
149
149
  ]
150
150
 
151
151
 
@@ -165,9 +165,8 @@ class SchemaBuilder:
165
165
  prop = SchemaProperty(type=type, **kwargs)
166
166
  self.schema.properties[name] = prop
167
167
 
168
- if kwargs.get("required", False):
169
- if name not in self.schema.required:
170
- self.schema.required.append(name)
168
+ if kwargs.get("required", False) and name not in self.schema.required:
169
+ self.schema.required.append(name)
171
170
 
172
171
  return self
173
172
 
@@ -387,9 +386,8 @@ class SchemaValidator:
387
386
  if not re.match(prop.pattern, value):
388
387
  self.errors.append(f"{path}: does not match pattern {prop.pattern}")
389
388
 
390
- if prop.format:
391
- if not self._validate_format(value, prop.format):
392
- self.errors.append(f"{path}: invalid format {prop.format.value}")
389
+ if prop.format and not self._validate_format(value, prop.format):
390
+ self.errors.append(f"{path}: invalid format {prop.format.value}")
393
391
 
394
392
  def _validate_array(self, value: List, prop: SchemaProperty, path: str):
395
393
  """Validate array constraints"""
@@ -432,24 +430,22 @@ class SchemaValidator:
432
430
  if req not in value:
433
431
  self.errors.append(f"{path}: required property '{req}' missing")
434
432
 
435
- if not prop.additional_properties:
436
- if prop.properties:
437
- extra = set(value.keys()) - set(prop.properties.keys())
438
- if extra:
439
- self.errors.append(
440
- f"{path}: additional properties not allowed: {extra}"
441
- )
433
+ if not prop.additional_properties and prop.properties:
434
+ extra = set(value.keys()) - set(prop.properties.keys())
435
+ if extra:
436
+ self.errors.append(
437
+ f"{path}: additional properties not allowed: {extra}"
438
+ )
442
439
 
443
440
  def _validate_dependencies(self, config: Dict, dependencies: Dict):
444
441
  """Validate field dependencies"""
445
442
  for field, deps in dependencies.items():
446
- if field in config:
447
- if isinstance(deps, list):
448
- for dep in deps:
449
- if dep not in config:
450
- self.errors.append(
451
- f"Field '{field}' requires '{dep}' to be present"
452
- )
443
+ if field in config and isinstance(deps, list):
444
+ for dep in deps:
445
+ if dep not in config:
446
+ self.errors.append(
447
+ f"Field '{field}' requires '{dep}' to be present"
448
+ )
453
449
 
454
450
  def _validate_format(self, value: str, format: SchemaFormat) -> bool:
455
451
  """Validate string format"""
@@ -602,7 +598,7 @@ class ConfigMigration:
602
598
 
603
599
  # Build graph of migrations
604
600
  graph = {}
605
- for from_v, to_v in self.migrations.keys():
601
+ for from_v, to_v in self.migrations:
606
602
  if from_v not in graph:
607
603
  graph[from_v] = []
608
604
  graph[from_v].append(to_v)
@@ -620,7 +616,7 @@ class ConfigMigration:
620
616
  for next_v in graph.get(current, []):
621
617
  if next_v not in visited:
622
618
  visited.add(next_v)
623
- queue.append((next_v, path + [next_v]))
619
+ queue.append((next_v, [*path, next_v]))
624
620
 
625
621
  return None
626
622
 
@@ -136,12 +136,11 @@ class HierarchicalContextManager(BaseContextManager):
136
136
  self.contexts[context_id] = metadata
137
137
 
138
138
  # Update hierarchy
139
- if parent_id:
140
- if parent_id in self.contexts:
141
- self.contexts[parent_id].children.append(context_id)
142
- if parent_id not in self.context_hierarchy:
143
- self.context_hierarchy[parent_id] = []
144
- self.context_hierarchy[parent_id].append(context_id)
139
+ if parent_id and parent_id in self.contexts:
140
+ self.contexts[parent_id].children.append(context_id)
141
+ if parent_id not in self.context_hierarchy:
142
+ self.context_hierarchy[parent_id] = []
143
+ self.context_hierarchy[parent_id].append(context_id)
145
144
 
146
145
  # Initialize context config
147
146
  self.configs[context_id] = ContextConfig(context_id=context_id, data={})
@@ -511,7 +510,7 @@ class CachingContextManager:
511
510
  """Invalidate all cached values for context"""
512
511
  with self._lock:
513
512
  keys_to_remove = [
514
- k for k in self.cache.keys() if k.startswith(f"{context_id}:")
513
+ k for k in self.cache if k.startswith(f"{context_id}:")
515
514
  ]
516
515
 
517
516
  for key in keys_to_remove:
@@ -86,7 +86,7 @@ class BaseErrorHandler(ABC):
86
86
  def handle(self, context: ErrorContext) -> ErrorHandlingResult:
87
87
  """Handle the error"""
88
88
 
89
- def log_error(self, context: ErrorContext, message: str = None):
89
+ def log_error(self, context: ErrorContext, message: Optional[str] = None):
90
90
  """Log error with appropriate level"""
91
91
  log_message = message or str(context.error)
92
92
 
@@ -309,8 +309,7 @@ class ParsingErrorHandler(BaseErrorHandler):
309
309
  # Remove single-line comments
310
310
  content = re.sub(r"//.*?$", "", content, flags=re.MULTILINE)
311
311
  # Remove multi-line comments
312
- content = re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
313
- return content
312
+ return re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
314
313
 
315
314
  def _fix_json_quotes(self, content: str) -> str:
316
315
  """Fix quote issues in JSON"""
@@ -319,24 +318,21 @@ class ParsingErrorHandler(BaseErrorHandler):
319
318
  # Replace single quotes with double quotes (careful with values)
320
319
  # This is a simple approach - more sophisticated parsing might be needed
321
320
  content = re.sub(r"'([^']*)':", r'"\1":', content) # Keys
322
- content = re.sub(r":\s*'([^']*)'", r': "\1"', content) # Values
323
- return content
321
+ return re.sub(r":\s*'([^']*)'", r': "\1"', content) # Values
324
322
 
325
323
  def _fix_json_trailing_commas(self, content: str) -> str:
326
324
  """Remove trailing commas"""
327
325
  import re
328
326
 
329
327
  content = re.sub(r",\s*}", "}", content)
330
- content = re.sub(r",\s*]", "]", content)
331
- return content
328
+ return re.sub(r",\s*]", "]", content)
332
329
 
333
330
  def _fix_json_unquoted_keys(self, content: str) -> str:
334
331
  """Add quotes to unquoted keys"""
335
332
  import re
336
333
 
337
334
  # Match unquoted keys (word characters followed by colon)
338
- content = re.sub(r"(\w+):", r'"\1":', content)
339
- return content
335
+ return re.sub(r"(\w+):", r'"\1":', content)
340
336
 
341
337
  def _parse_lenient_json(
342
338
  self, content: str, result: ErrorHandlingResult
@@ -875,7 +871,7 @@ class ErrorHandlingStrategy(IConfigStrategy):
875
871
 
876
872
  def _categorize_error(self, error: Exception) -> ErrorCategory:
877
873
  """Categorize the error type"""
878
- error_type = type(error)
874
+ type(error)
879
875
 
880
876
  # File I/O errors
881
877
  if isinstance(error, (FileNotFoundError, PermissionError, IOError, OSError)):
@@ -198,8 +198,7 @@ class StructuredFileLoader(BaseFileLoader):
198
198
  # Remove single-line comments
199
199
  content = re.sub(r"//.*?$", "", content, flags=re.MULTILINE)
200
200
  # Remove multi-line comments
201
- content = re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
202
- return content
201
+ return re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
203
202
 
204
203
  def _recover_json(self, content: str) -> Dict[str, Any]:
205
204
  """Attempt to recover from malformed JSON"""
@@ -409,7 +408,7 @@ class EnvironmentFileLoader(BaseFileLoader):
409
408
  pass
410
409
 
411
410
  # JSON array or object
412
- if value.startswith("[") or value.startswith("{"):
411
+ if value.startswith(("[", "{")):
413
412
  try:
414
413
  return json.loads(value)
415
414
  except:
@@ -492,10 +491,7 @@ class ProgrammaticFileLoader(BaseFileLoader):
492
491
  config = module.CONFIG
493
492
  elif hasattr(module, "config"):
494
493
  # config dict or function
495
- if callable(module.config):
496
- config = module.config()
497
- else:
498
- config = module.config
494
+ config = module.config() if callable(module.config) else module.config
499
495
  elif hasattr(module, "get_config"):
500
496
  # get_config function
501
497
  config = module.get_config()
@@ -594,7 +590,7 @@ class LegacyFileLoader(BaseFileLoader):
594
590
  line = line.strip()
595
591
 
596
592
  # Skip comments and empty lines
597
- if not line or line.startswith("#") or line.startswith("!"):
593
+ if not line or line.startswith(("#", "!")):
598
594
  continue
599
595
 
600
596
  # Handle line continuation
@@ -765,7 +761,7 @@ class CompositeFileLoader(BaseFileLoader):
765
761
  def _load_single(self, context: FileLoadContext) -> Dict[str, Any]:
766
762
  """Load single configuration file"""
767
763
  # Find appropriate loader
768
- for loader_type, loader in self.loaders.items():
764
+ for _loader_type, loader in self.loaders.items():
769
765
  if loader.supports(context.format):
770
766
  return loader.load(context)
771
767
 
@@ -273,7 +273,7 @@ class UnifiedConfigService:
273
273
  """Get configuration value by key with context awareness"""
274
274
  # Check specific context first
275
275
  if context:
276
- for cache_key, config in self._contexts[context].items():
276
+ for _cache_key, config in self._contexts[context].items():
277
277
  if key in config:
278
278
  return config[key]
279
279
 
@@ -579,9 +579,7 @@ class UnifiedConfigService:
579
579
  return False
580
580
  if not self._validate_pattern(config, schema):
581
581
  return False
582
- if not self._validate_enum(config, schema):
583
- return False
584
- return True
582
+ return self._validate_enum(config, schema)
585
583
 
586
584
  def _validate_dependency(self, config: Dict[str, Any], schema: Dict) -> bool:
587
585
  """Validate field dependencies"""
@@ -73,7 +73,7 @@ class BaseValidator(ABC):
73
73
  """Perform validation"""
74
74
 
75
75
  def _create_result(
76
- self, valid: bool, message: str = None, severity: str = "error"
76
+ self, valid: bool, message: Optional[str] = None, severity: str = "error"
77
77
  ) -> ValidationResult:
78
78
  """Create validation result"""
79
79
  result = ValidationResult(valid=valid)
@@ -46,22 +46,22 @@ from .utils import (
46
46
  from .vercel import VercelDeploymentStrategy
47
47
 
48
48
  __all__ = [
49
- # Base classes
50
- "DeploymentStrategy",
49
+ "AWSDeploymentStrategy",
51
50
  "DeploymentContext",
52
51
  "DeploymentResult",
52
+ # Base classes
53
+ "DeploymentStrategy",
54
+ "DockerDeploymentStrategy",
55
+ "GitDeploymentStrategy",
53
56
  # Strategy implementations
54
57
  "LocalDeploymentStrategy",
55
- "VercelDeploymentStrategy",
56
58
  "RailwayDeploymentStrategy",
57
- "AWSDeploymentStrategy",
58
- "DockerDeploymentStrategy",
59
- "GitDeploymentStrategy",
59
+ "VercelDeploymentStrategy",
60
+ "prepare_deployment_artifact",
61
+ "rollback_deployment",
60
62
  # Utilities
61
63
  "validate_deployment_config",
62
- "prepare_deployment_artifact",
63
64
  "verify_deployment_health",
64
- "rollback_deployment",
65
65
  ]
66
66
 
67
67
  # Strategy registry for automatic discovery
@@ -466,10 +466,7 @@ class LocalDeploymentStrategy(DeploymentStrategy):
466
466
  deployed = []
467
467
 
468
468
  for artifact in artifacts:
469
- if target_path.is_dir():
470
- dest = target_path / artifact.name
471
- else:
472
- dest = target_path
469
+ dest = target_path / artifact.name if target_path.is_dir() else target_path
473
470
 
474
471
  shutil.copy2(artifact, dest)
475
472
  deployed.append(dest)
@@ -556,7 +553,7 @@ class LocalDeploymentStrategy(DeploymentStrategy):
556
553
  ) -> bool:
557
554
  """Verify template deployment."""
558
555
  # Check that template variables were replaced
559
- variables = context.config.get("variables", {})
556
+ context.config.get("variables", {})
560
557
 
561
558
  if deployed_path.is_file():
562
559
  content = deployed_path.read_text()
@@ -76,9 +76,8 @@ def validate_deployment_config(config: Dict[str, Any]) -> List[str]:
76
76
  errors.append(f"Source does not exist: {source_path}")
77
77
 
78
78
  # Version format validation
79
- if "version" in config:
80
- if not validate_version_format(config["version"]):
81
- errors.append(f"Invalid version format: {config['version']}")
79
+ if "version" in config and not validate_version_format(config["version"]):
80
+ errors.append(f"Invalid version format: {config['version']}")
82
81
 
83
82
  # Environment variables validation
84
83
  if "env" in config:
@@ -267,24 +266,21 @@ def verify_deployment_health(
267
266
 
268
267
  try:
269
268
  # Existence check
270
- if "existence" in checks:
271
- if "deployed_path" in deployment_info:
272
- path = Path(deployment_info["deployed_path"])
273
- health["checks"]["exists"] = path.exists()
269
+ if "existence" in checks and "deployed_path" in deployment_info:
270
+ path = Path(deployment_info["deployed_path"])
271
+ health["checks"]["exists"] = path.exists()
274
272
 
275
273
  # Accessibility check
276
- if "accessibility" in checks:
277
- if "deployment_url" in deployment_info:
278
- health["checks"]["accessible"] = check_url_accessibility(
279
- deployment_info["deployment_url"]
280
- )
274
+ if "accessibility" in checks and "deployment_url" in deployment_info:
275
+ health["checks"]["accessible"] = check_url_accessibility(
276
+ deployment_info["deployment_url"]
277
+ )
281
278
 
282
279
  # Integrity check
283
- if "integrity" in checks:
284
- if "checksum" in deployment_info:
285
- health["checks"]["integrity"] = verify_checksum(
286
- deployment_info.get("deployed_path"), deployment_info["checksum"]
287
- )
280
+ if "integrity" in checks and "checksum" in deployment_info:
281
+ health["checks"]["integrity"] = verify_checksum(
282
+ deployment_info.get("deployed_path"), deployment_info["checksum"]
283
+ )
288
284
 
289
285
  # Service-specific checks
290
286
  if deployment_type == "docker":
@@ -88,9 +88,8 @@ class VercelDeploymentStrategy(DeploymentStrategy):
88
88
  config = context.config
89
89
 
90
90
  # Check project name
91
- if not config.get("project_name"):
92
- if not vercel_json.exists():
93
- errors.append("Project name required when vercel.json is missing")
91
+ if not config.get("project_name") and not vercel_json.exists():
92
+ errors.append("Project name required when vercel.json is missing")
94
93
 
95
94
  # Validate environment variables
96
95
  env_vars = config.get("env", {})
@@ -275,7 +274,7 @@ class VercelDeploymentStrategy(DeploymentStrategy):
275
274
  if context.config.get("token"):
276
275
  cmd.extend(["--token", context.config["token"]])
277
276
 
278
- result = subprocess.run(
277
+ subprocess.run(
279
278
  cmd,
280
279
  capture_output=True,
281
280
  text=True,
@@ -359,7 +358,7 @@ class VercelDeploymentStrategy(DeploymentStrategy):
359
358
  def _check_vercel_cli(self) -> bool:
360
359
  """Check if Vercel CLI is installed."""
361
360
  try:
362
- result = subprocess.run(
361
+ subprocess.run(
363
362
  ["vercel", "--version"],
364
363
  capture_output=True,
365
364
  text=True,
@@ -372,7 +371,7 @@ class VercelDeploymentStrategy(DeploymentStrategy):
372
371
  def _check_vercel_auth(self) -> bool:
373
372
  """Check if authenticated with Vercel."""
374
373
  try:
375
- result = subprocess.run(
374
+ subprocess.run(
376
375
  ["vercel", "whoami"],
377
376
  capture_output=True,
378
377
  text=True,
@@ -455,7 +455,7 @@ class UnifiedAnalyzer(IAnalyzerService, IUnifiedService):
455
455
  return options["type"]
456
456
 
457
457
  # Infer from target type
458
- if isinstance(target, Path) or isinstance(target, str):
458
+ if isinstance(target, (Path, str)):
459
459
  path = Path(target)
460
460
  if path.is_file():
461
461
  # Determine by file extension
@@ -339,12 +339,9 @@ def run_command_safe(
339
339
  CompletedProcess result
340
340
  """
341
341
  try:
342
- if isinstance(command, str):
343
- shell = True
344
- else:
345
- shell = False
342
+ shell = bool(isinstance(command, str))
346
343
 
347
- result = subprocess.run(
344
+ return subprocess.run(
348
345
  command,
349
346
  shell=shell,
350
347
  cwd=cwd,
@@ -354,7 +351,6 @@ def run_command_safe(
354
351
  timeout=timeout,
355
352
  env=env,
356
353
  )
357
- return result
358
354
  except subprocess.TimeoutExpired:
359
355
  logger.error(f"Command timed out: {command}")
360
356
  raise
@@ -515,7 +511,7 @@ def import_from_string(import_path: str, fallback: Any = None) -> Any:
515
511
  # ==============================================================================
516
512
 
517
513
 
518
- def deprecated(replacement: str = None):
514
+ def deprecated(replacement: Optional[str] = None):
519
515
  """
520
516
  Decorator to mark functions as deprecated.
521
517
 
@@ -58,26 +58,23 @@ class DatabaseConnector:
58
58
  # Check MySQL drivers
59
59
  for package_name, import_name in self.MYSQL_DRIVERS:
60
60
  driver = self._try_import(import_name)
61
- if driver:
62
- if "mysql" not in self.available_drivers:
63
- self.available_drivers["mysql"] = (package_name, driver)
64
- logger.info(f"MySQL driver available: {package_name}")
61
+ if driver and "mysql" not in self.available_drivers:
62
+ self.available_drivers["mysql"] = (package_name, driver)
63
+ logger.info(f"MySQL driver available: {package_name}")
65
64
 
66
65
  # Check PostgreSQL drivers
67
66
  for package_name, import_name in self.POSTGRESQL_DRIVERS:
68
67
  driver = self._try_import(import_name)
69
- if driver:
70
- if "postgresql" not in self.available_drivers:
71
- self.available_drivers["postgresql"] = (package_name, driver)
72
- logger.info(f"PostgreSQL driver available: {package_name}")
68
+ if driver and "postgresql" not in self.available_drivers:
69
+ self.available_drivers["postgresql"] = (package_name, driver)
70
+ logger.info(f"PostgreSQL driver available: {package_name}")
73
71
 
74
72
  # Check Oracle drivers
75
73
  for package_name, import_name in self.ORACLE_DRIVERS:
76
74
  driver = self._try_import(import_name)
77
- if driver:
78
- if "oracle" not in self.available_drivers:
79
- self.available_drivers["oracle"] = (package_name, driver)
80
- logger.info(f"Oracle driver available: {package_name}")
75
+ if driver and "oracle" not in self.available_drivers:
76
+ self.available_drivers["oracle"] = (package_name, driver)
77
+ logger.info(f"Oracle driver available: {package_name}")
81
78
 
82
79
  def _try_import(self, module_name: str) -> Optional[Any]:
83
80
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: claude-mpm
3
- Version: 4.5.0
3
+ Version: 4.5.5
4
4
  Summary: Claude Multi-Agent Project Manager - Orchestrate Claude with agent delegation and ticket tracking
5
5
  Author-email: Bob Matsuoka <bob@matsuoka.com>
6
6
  Maintainer: Claude MPM Team
@@ -54,7 +54,7 @@ Requires-Dist: mcp>=0.1.0; extra == "mcp"
54
54
  Requires-Dist: mcp-vector-search>=0.1.0; extra == "mcp"
55
55
  Requires-Dist: mcp-browser>=0.1.0; extra == "mcp"
56
56
  Requires-Dist: mcp-ticketer>=0.1.0; extra == "mcp"
57
- Requires-Dist: kuzu-memory>=1.1.1; extra == "mcp"
57
+ Requires-Dist: kuzu-memory>=1.1.5; extra == "mcp"
58
58
  Provides-Extra: dev
59
59
  Requires-Dist: pytest>=7.0; extra == "dev"
60
60
  Requires-Dist: pytest-asyncio; extra == "dev"