claude-mpm 4.5.11__py3-none-any.whl → 4.5.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of claude-mpm might be problematic. Click here for more details.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/frontmatter_validator.py +4 -4
- claude_mpm/cli/commands/agent_manager.py +3 -3
- claude_mpm/cli/commands/agents.py +6 -6
- claude_mpm/cli/commands/aggregate.py +4 -4
- claude_mpm/cli/commands/analyze.py +2 -2
- claude_mpm/cli/commands/analyze_code.py +1 -1
- claude_mpm/cli/commands/cleanup.py +3 -3
- claude_mpm/cli/commands/config.py +2 -2
- claude_mpm/cli/commands/configure.py +14 -14
- claude_mpm/cli/commands/dashboard.py +1 -1
- claude_mpm/cli/commands/debug.py +3 -3
- claude_mpm/cli/commands/doctor.py +1 -1
- claude_mpm/cli/commands/mcp.py +7 -7
- claude_mpm/cli/commands/mcp_command_router.py +1 -1
- claude_mpm/cli/commands/mcp_config.py +2 -2
- claude_mpm/cli/commands/mcp_external_commands.py +2 -2
- claude_mpm/cli/commands/mcp_install_commands.py +3 -3
- claude_mpm/cli/commands/mcp_pipx_config.py +2 -2
- claude_mpm/cli/commands/mcp_setup_external.py +3 -3
- claude_mpm/cli/commands/monitor.py +1 -1
- claude_mpm/cli/commands/mpm_init_handler.py +1 -1
- claude_mpm/cli/interactive/agent_wizard.py +1 -1
- claude_mpm/cli/parsers/search_parser.py +1 -1
- claude_mpm/cli/shared/argument_patterns.py +2 -2
- claude_mpm/cli/shared/base_command.py +1 -1
- claude_mpm/cli/startup_logging.py +4 -4
- claude_mpm/config/experimental_features.py +4 -4
- claude_mpm/config/socketio_config.py +2 -2
- claude_mpm/core/agent_session_manager.py +2 -2
- claude_mpm/core/api_validator.py +3 -3
- claude_mpm/core/base_service.py +10 -1
- claude_mpm/core/cache.py +2 -2
- claude_mpm/core/config.py +4 -4
- claude_mpm/core/config_aliases.py +4 -4
- claude_mpm/core/config_constants.py +1 -1
- claude_mpm/core/error_handler.py +1 -1
- claude_mpm/core/file_utils.py +5 -5
- claude_mpm/core/framework/formatters/capability_generator.py +5 -5
- claude_mpm/core/framework/loaders/agent_loader.py +1 -1
- claude_mpm/core/framework/processors/metadata_processor.py +1 -1
- claude_mpm/core/framework/processors/template_processor.py +3 -3
- claude_mpm/core/framework_loader.py +2 -2
- claude_mpm/core/log_manager.py +4 -4
- claude_mpm/core/logger.py +2 -2
- claude_mpm/core/optimized_startup.py +1 -1
- claude_mpm/core/output_style_manager.py +1 -1
- claude_mpm/core/service_registry.py +2 -2
- claude_mpm/core/session_manager.py +3 -3
- claude_mpm/core/shared/config_loader.py +1 -1
- claude_mpm/core/socketio_pool.py +2 -2
- claude_mpm/core/unified_agent_registry.py +2 -2
- claude_mpm/core/unified_config.py +6 -6
- claude_mpm/core/unified_paths.py +2 -2
- claude_mpm/dashboard/api/simple_directory.py +1 -1
- claude_mpm/generators/agent_profile_generator.py +1 -1
- claude_mpm/hooks/claude_hooks/event_handlers.py +2 -2
- claude_mpm/hooks/claude_hooks/installer.py +9 -9
- claude_mpm/hooks/claude_hooks/services/connection_manager_http.py +7 -2
- claude_mpm/hooks/claude_hooks/tool_analysis.py +2 -2
- claude_mpm/hooks/memory_integration_hook.py +1 -1
- claude_mpm/hooks/validation_hooks.py +1 -1
- claude_mpm/init.py +4 -4
- claude_mpm/models/agent_session.py +1 -1
- claude_mpm/scripts/socketio_daemon.py +5 -5
- claude_mpm/services/__init__.py +2 -2
- claude_mpm/services/agent_capabilities_service.py +1 -1
- claude_mpm/services/agents/agent_builder.py +4 -4
- claude_mpm/services/agents/deployment/agent_lifecycle_manager.py +1 -1
- claude_mpm/services/agents/deployment/agent_metrics_collector.py +1 -1
- claude_mpm/services/agents/deployment/agent_record_service.py +3 -3
- claude_mpm/services/agents/deployment/deployment_wrapper.py +1 -1
- claude_mpm/services/agents/deployment/pipeline/steps/target_directory_step.py +2 -2
- claude_mpm/services/agents/loading/agent_profile_loader.py +2 -2
- claude_mpm/services/agents/local_template_manager.py +5 -5
- claude_mpm/services/agents/registry/deployed_agent_discovery.py +1 -1
- claude_mpm/services/agents/registry/modification_tracker.py +19 -11
- claude_mpm/services/async_session_logger.py +1 -1
- claude_mpm/services/claude_session_logger.py +1 -1
- claude_mpm/services/cli/agent_listing_service.py +3 -3
- claude_mpm/services/cli/agent_validation_service.py +1 -1
- claude_mpm/services/cli/session_manager.py +2 -2
- claude_mpm/services/core/path_resolver.py +1 -1
- claude_mpm/services/diagnostics/checks/agent_check.py +1 -1
- claude_mpm/services/diagnostics/checks/claude_code_check.py +2 -2
- claude_mpm/services/diagnostics/checks/common_issues_check.py +3 -3
- claude_mpm/services/diagnostics/checks/configuration_check.py +2 -2
- claude_mpm/services/diagnostics/checks/installation_check.py +1 -1
- claude_mpm/services/diagnostics/checks/mcp_check.py +1 -1
- claude_mpm/services/diagnostics/checks/mcp_services_check.py +9 -9
- claude_mpm/services/diagnostics/checks/monitor_check.py +1 -1
- claude_mpm/services/diagnostics/doctor_reporter.py +1 -1
- claude_mpm/services/event_aggregator.py +1 -1
- claude_mpm/services/event_bus/event_bus.py +7 -2
- claude_mpm/services/events/consumers/dead_letter.py +2 -2
- claude_mpm/services/framework_claude_md_generator/__init__.py +1 -1
- claude_mpm/services/framework_claude_md_generator/deployment_manager.py +3 -3
- claude_mpm/services/framework_claude_md_generator/version_manager.py +1 -1
- claude_mpm/services/hook_installer_service.py +7 -7
- claude_mpm/services/infrastructure/context_preservation.py +7 -7
- claude_mpm/services/infrastructure/daemon_manager.py +5 -5
- claude_mpm/services/mcp_config_manager.py +10 -10
- claude_mpm/services/mcp_gateway/auto_configure.py +5 -5
- claude_mpm/services/mcp_gateway/config/config_loader.py +2 -2
- claude_mpm/services/mcp_gateway/config/configuration.py +3 -3
- claude_mpm/services/mcp_gateway/core/process_pool.py +3 -3
- claude_mpm/services/mcp_gateway/core/singleton_manager.py +2 -2
- claude_mpm/services/mcp_gateway/core/startup_verification.py +1 -1
- claude_mpm/services/mcp_gateway/main.py +1 -1
- claude_mpm/services/mcp_gateway/registry/service_registry.py +4 -2
- claude_mpm/services/mcp_gateway/registry/tool_registry.py +2 -1
- claude_mpm/services/mcp_gateway/server/stdio_handler.py +1 -1
- claude_mpm/services/mcp_gateway/tools/document_summarizer.py +1 -1
- claude_mpm/services/mcp_gateway/tools/health_check_tool.py +1 -1
- claude_mpm/services/mcp_gateway/tools/hello_world.py +1 -1
- claude_mpm/services/mcp_gateway/utils/package_version_checker.py +5 -5
- claude_mpm/services/mcp_gateway/utils/update_preferences.py +2 -2
- claude_mpm/services/mcp_service_verifier.py +1 -1
- claude_mpm/services/memory/builder.py +1 -1
- claude_mpm/services/memory/cache/shared_prompt_cache.py +2 -1
- claude_mpm/services/memory/indexed_memory.py +3 -3
- claude_mpm/services/monitor/daemon.py +1 -1
- claude_mpm/services/monitor/daemon_manager.py +9 -9
- claude_mpm/services/monitor/handlers/file.py +1 -1
- claude_mpm/services/monitor/handlers/hooks.py +3 -3
- claude_mpm/services/monitor/management/lifecycle.py +7 -7
- claude_mpm/services/monitor/server.py +2 -2
- claude_mpm/services/orphan_detection.py +13 -16
- claude_mpm/services/port_manager.py +2 -2
- claude_mpm/services/project/analyzer.py +3 -3
- claude_mpm/services/project/archive_manager.py +13 -13
- claude_mpm/services/project/dependency_analyzer.py +4 -4
- claude_mpm/services/project/documentation_manager.py +4 -4
- claude_mpm/services/project/enhanced_analyzer.py +8 -8
- claude_mpm/services/project/registry.py +4 -4
- claude_mpm/services/project_port_allocator.py +7 -11
- claude_mpm/services/session_management_service.py +1 -1
- claude_mpm/services/socketio/event_normalizer.py +1 -1
- claude_mpm/services/socketio/handlers/code_analysis.py +14 -12
- claude_mpm/services/socketio/handlers/file.py +1 -1
- claude_mpm/services/socketio/migration_utils.py +1 -1
- claude_mpm/services/socketio/server/core.py +1 -1
- claude_mpm/services/unified/analyzer_strategies/code_analyzer.py +1 -1
- claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +4 -4
- claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +1 -1
- claude_mpm/services/unified/config_strategies/config_schema.py +4 -4
- claude_mpm/services/unified/config_strategies/context_strategy.py +6 -6
- claude_mpm/services/unified/config_strategies/error_handling_strategy.py +10 -10
- claude_mpm/services/unified/config_strategies/file_loader_strategy.py +5 -5
- claude_mpm/services/unified/config_strategies/unified_config_service.py +8 -8
- claude_mpm/services/unified/config_strategies/validation_strategy.py +15 -15
- claude_mpm/services/unified/deployment_strategies/base.py +4 -4
- claude_mpm/services/unified/deployment_strategies/cloud_strategies.py +15 -15
- claude_mpm/services/unified/deployment_strategies/local.py +9 -9
- claude_mpm/services/unified/deployment_strategies/utils.py +9 -9
- claude_mpm/services/unified/deployment_strategies/vercel.py +7 -7
- claude_mpm/services/unified/unified_config.py +5 -5
- claude_mpm/services/unified/unified_deployment.py +2 -2
- claude_mpm/services/utility_service.py +1 -1
- claude_mpm/services/version_control/conflict_resolution.py +2 -2
- claude_mpm/services/version_control/git_operations.py +3 -3
- claude_mpm/services/version_control/semantic_versioning.py +13 -13
- claude_mpm/services/version_control/version_parser.py +1 -1
- claude_mpm/storage/state_storage.py +12 -13
- claude_mpm/tools/code_tree_analyzer.py +5 -5
- claude_mpm/tools/code_tree_builder.py +4 -4
- claude_mpm/tools/socketio_debug.py +1 -1
- claude_mpm/utils/agent_dependency_loader.py +4 -4
- claude_mpm/utils/common.py +2 -2
- claude_mpm/utils/config_manager.py +3 -3
- claude_mpm/utils/dependency_cache.py +2 -2
- claude_mpm/utils/dependency_strategies.py +6 -6
- claude_mpm/utils/file_utils.py +11 -11
- claude_mpm/utils/log_cleanup.py +1 -1
- claude_mpm/utils/path_operations.py +1 -1
- claude_mpm/validation/agent_validator.py +2 -2
- claude_mpm/validation/frontmatter_validator.py +1 -1
- {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/METADATA +1 -1
- {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/RECORD +183 -183
- {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/WHEEL +0 -0
- {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/top_level.txt +0 -0
|
@@ -279,7 +279,7 @@ class DependencyAnalyzerService:
|
|
|
279
279
|
package_json_path = self.working_directory / "package.json"
|
|
280
280
|
if package_json_path.exists():
|
|
281
281
|
try:
|
|
282
|
-
with open(
|
|
282
|
+
with package_json_path.open() as f:
|
|
283
283
|
package_data = json.load(f)
|
|
284
284
|
except Exception as e:
|
|
285
285
|
self.logger.warning(f"Error reading package.json: {e}")
|
|
@@ -324,7 +324,7 @@ class DependencyAnalyzerService:
|
|
|
324
324
|
) -> None:
|
|
325
325
|
"""Parse package.json for dependencies."""
|
|
326
326
|
try:
|
|
327
|
-
with open(
|
|
327
|
+
with path.open() as f:
|
|
328
328
|
data = json.load(f)
|
|
329
329
|
|
|
330
330
|
# Production dependencies
|
|
@@ -382,7 +382,7 @@ class DependencyAnalyzerService:
|
|
|
382
382
|
except ImportError:
|
|
383
383
|
return
|
|
384
384
|
|
|
385
|
-
with open(
|
|
385
|
+
with path.open("rb") as f:
|
|
386
386
|
data = tomllib.load(f)
|
|
387
387
|
|
|
388
388
|
# PEP 621 dependencies
|
|
@@ -433,7 +433,7 @@ class DependencyAnalyzerService:
|
|
|
433
433
|
except ImportError:
|
|
434
434
|
return
|
|
435
435
|
|
|
436
|
-
with open(
|
|
436
|
+
with path.open("rb") as f:
|
|
437
437
|
data = tomllib.load(f)
|
|
438
438
|
|
|
439
439
|
# Production dependencies
|
|
@@ -19,7 +19,7 @@ Created: 2025-01-26
|
|
|
19
19
|
import difflib
|
|
20
20
|
import hashlib
|
|
21
21
|
import re
|
|
22
|
-
from datetime import datetime
|
|
22
|
+
from datetime import datetime, timezone
|
|
23
23
|
from pathlib import Path
|
|
24
24
|
from typing import Dict, List, Tuple
|
|
25
25
|
|
|
@@ -384,7 +384,7 @@ class DocumentationManager:
|
|
|
384
384
|
|
|
385
385
|
def _add_metadata(self, content: str) -> str:
|
|
386
386
|
"""Add metadata to the document."""
|
|
387
|
-
timestamp = datetime.now().isoformat()
|
|
387
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
388
388
|
|
|
389
389
|
# Check if meta section exists
|
|
390
390
|
if "## 📝 Meta:" not in content and "## Meta:" not in content:
|
|
@@ -417,7 +417,7 @@ class DocumentationManager:
|
|
|
417
417
|
def generate_update_report(self, old_content: str, new_content: str) -> Dict:
|
|
418
418
|
"""Generate a report of changes between old and new content."""
|
|
419
419
|
report = {
|
|
420
|
-
"timestamp": datetime.now().isoformat(),
|
|
420
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
421
421
|
"changes": [],
|
|
422
422
|
"additions": [],
|
|
423
423
|
"deletions": [],
|
|
@@ -547,7 +547,7 @@ class DocumentationManager:
|
|
|
547
547
|
|
|
548
548
|
## 📝 Meta: Maintaining This Document
|
|
549
549
|
|
|
550
|
-
- **Last Updated**: {datetime.now().isoformat()}
|
|
550
|
+
- **Last Updated**: {datetime.now(timezone.utc).isoformat()}
|
|
551
551
|
- **Created By**: Claude MPM /mpm-init
|
|
552
552
|
- **Update Frequency**: As needed when requirements change
|
|
553
553
|
"""
|
|
@@ -17,7 +17,7 @@ Created: 2025-01-26
|
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
19
|
import subprocess
|
|
20
|
-
from datetime import datetime, timedelta
|
|
20
|
+
from datetime import datetime, timedelta, timezone
|
|
21
21
|
from pathlib import Path
|
|
22
22
|
from typing import Dict, List, Optional
|
|
23
23
|
|
|
@@ -74,7 +74,7 @@ class EnhancedProjectAnalyzer:
|
|
|
74
74
|
|
|
75
75
|
def _get_recent_commits(self, days: int) -> List[Dict]:
|
|
76
76
|
"""Get recent commits within specified days."""
|
|
77
|
-
since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
77
|
+
since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
78
78
|
|
|
79
79
|
# Get commit log with structured format
|
|
80
80
|
output = self._run_git_command(
|
|
@@ -107,7 +107,7 @@ class EnhancedProjectAnalyzer:
|
|
|
107
107
|
|
|
108
108
|
def _get_changed_files(self, days: int) -> Dict:
|
|
109
109
|
"""Get files changed in recent commits."""
|
|
110
|
-
since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
110
|
+
since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
111
111
|
|
|
112
112
|
output = self._run_git_command(
|
|
113
113
|
[
|
|
@@ -138,7 +138,7 @@ class EnhancedProjectAnalyzer:
|
|
|
138
138
|
|
|
139
139
|
def _get_recently_added_files(self, days: int) -> List[str]:
|
|
140
140
|
"""Get files added in recent commits."""
|
|
141
|
-
since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
141
|
+
since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
142
142
|
|
|
143
143
|
output = self._run_git_command(
|
|
144
144
|
[
|
|
@@ -162,7 +162,7 @@ class EnhancedProjectAnalyzer:
|
|
|
162
162
|
|
|
163
163
|
def _get_author_stats(self, days: int) -> Dict:
|
|
164
164
|
"""Get author contribution statistics."""
|
|
165
|
-
since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
165
|
+
since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
166
166
|
|
|
167
167
|
output = self._run_git_command(
|
|
168
168
|
[
|
|
@@ -228,7 +228,7 @@ class EnhancedProjectAnalyzer:
|
|
|
228
228
|
|
|
229
229
|
def _get_documentation_changes(self, days: int) -> Dict:
|
|
230
230
|
"""Track changes to documentation files."""
|
|
231
|
-
since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
231
|
+
since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime("%Y-%m-%d")
|
|
232
232
|
|
|
233
233
|
# Get changes to documentation files
|
|
234
234
|
doc_patterns = ["*.md", "*.rst", "*.txt", "docs/*", "README*", "CLAUDE*"]
|
|
@@ -448,7 +448,7 @@ class EnhancedProjectAnalyzer:
|
|
|
448
448
|
)
|
|
449
449
|
if first_commit:
|
|
450
450
|
age_days = (
|
|
451
|
-
datetime.now() - datetime.fromtimestamp(int(first_commit))
|
|
451
|
+
datetime.now(timezone.utc) - datetime.fromtimestamp(int(first_commit))
|
|
452
452
|
).days
|
|
453
453
|
indicators.append(f"{age_days} days old")
|
|
454
454
|
|
|
@@ -470,7 +470,7 @@ class EnhancedProjectAnalyzer:
|
|
|
470
470
|
"""Generate comprehensive project analysis report."""
|
|
471
471
|
report = {
|
|
472
472
|
"project_path": str(self.project_path),
|
|
473
|
-
"timestamp": datetime.now().isoformat(),
|
|
473
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
474
474
|
}
|
|
475
475
|
|
|
476
476
|
# Basic project info
|
|
@@ -69,7 +69,7 @@ class ProjectRegistry:
|
|
|
69
69
|
self.registry_dir.mkdir(parents=True, exist_ok=True)
|
|
70
70
|
except Exception as e:
|
|
71
71
|
self.logger.error(f"Failed to create registry directory: {e}")
|
|
72
|
-
raise ProjectRegistryError(f"Cannot create registry directory: {e}")
|
|
72
|
+
raise ProjectRegistryError(f"Cannot create registry directory: {e}") from e
|
|
73
73
|
|
|
74
74
|
def get_or_create_project_entry(self) -> Dict[str, Any]:
|
|
75
75
|
"""
|
|
@@ -104,7 +104,7 @@ class ProjectRegistry:
|
|
|
104
104
|
|
|
105
105
|
except Exception as e:
|
|
106
106
|
self.logger.error(f"Failed to get or create project entry: {e}")
|
|
107
|
-
raise ProjectRegistryError(f"Registry operation failed: {e}")
|
|
107
|
+
raise ProjectRegistryError(f"Registry operation failed: {e}") from e
|
|
108
108
|
|
|
109
109
|
def _find_existing_entry(self) -> Optional[Dict[str, Any]]:
|
|
110
110
|
"""
|
|
@@ -491,7 +491,7 @@ class ProjectRegistry:
|
|
|
491
491
|
# Remove internal fields before saving
|
|
492
492
|
save_data = {k: v for k, v in data.items() if not k.startswith("_")}
|
|
493
493
|
|
|
494
|
-
with open(
|
|
494
|
+
with registry_file.open("w", encoding="utf-8") as f:
|
|
495
495
|
yaml.dump(
|
|
496
496
|
save_data, f, default_flow_style=False, sort_keys=False, indent=2
|
|
497
497
|
)
|
|
@@ -500,7 +500,7 @@ class ProjectRegistry:
|
|
|
500
500
|
|
|
501
501
|
except Exception as e:
|
|
502
502
|
self.logger.error(f"Failed to save registry data: {e}")
|
|
503
|
-
raise ProjectRegistryError(f"Failed to save registry: {e}")
|
|
503
|
+
raise ProjectRegistryError(f"Failed to save registry: {e}") from e
|
|
504
504
|
|
|
505
505
|
def list_projects(self) -> List[Dict[str, Any]]:
|
|
506
506
|
"""
|
|
@@ -143,9 +143,8 @@ class ProjectPortAllocator(SyncBaseService):
|
|
|
143
143
|
|
|
144
144
|
# Map to port range
|
|
145
145
|
port_range = self.port_range_end - self.port_range_start + 1
|
|
146
|
-
|
|
146
|
+
return self.port_range_start + (hash_int % port_range)
|
|
147
147
|
|
|
148
|
-
return port
|
|
149
148
|
|
|
150
149
|
def _is_port_available(self, port: int) -> bool:
|
|
151
150
|
"""
|
|
@@ -177,10 +176,7 @@ class ProjectPortAllocator(SyncBaseService):
|
|
|
177
176
|
Returns:
|
|
178
177
|
True if port is protected
|
|
179
178
|
"""
|
|
180
|
-
for start, end in self.PROTECTED_PORT_RANGES
|
|
181
|
-
if start <= port <= end:
|
|
182
|
-
return True
|
|
183
|
-
return False
|
|
179
|
+
return any(start <= port <= end for start, end in self.PROTECTED_PORT_RANGES)
|
|
184
180
|
|
|
185
181
|
def _load_project_state(self) -> Dict[str, Any]:
|
|
186
182
|
"""
|
|
@@ -191,7 +187,7 @@ class ProjectPortAllocator(SyncBaseService):
|
|
|
191
187
|
"""
|
|
192
188
|
try:
|
|
193
189
|
if self.state_file.exists():
|
|
194
|
-
with
|
|
190
|
+
with self.state_file.open() as f:
|
|
195
191
|
return json.load(f)
|
|
196
192
|
except Exception as e:
|
|
197
193
|
self.log_warning(f"Failed to load project state: {e}")
|
|
@@ -208,7 +204,7 @@ class ProjectPortAllocator(SyncBaseService):
|
|
|
208
204
|
try:
|
|
209
205
|
# Write to temporary file first
|
|
210
206
|
temp_file = self.state_file.with_suffix(".tmp")
|
|
211
|
-
with open(
|
|
207
|
+
with temp_file.open("w") as f:
|
|
212
208
|
json.dump(state, f, indent=2)
|
|
213
209
|
|
|
214
210
|
# Atomic rename
|
|
@@ -227,7 +223,7 @@ class ProjectPortAllocator(SyncBaseService):
|
|
|
227
223
|
"""
|
|
228
224
|
try:
|
|
229
225
|
if self.global_registry_file.exists():
|
|
230
|
-
with
|
|
226
|
+
with self.global_registry_file.open() as f:
|
|
231
227
|
return json.load(f)
|
|
232
228
|
except Exception as e:
|
|
233
229
|
self.log_warning(f"Failed to load global registry: {e}")
|
|
@@ -247,7 +243,7 @@ class ProjectPortAllocator(SyncBaseService):
|
|
|
247
243
|
|
|
248
244
|
# Write to temporary file first
|
|
249
245
|
temp_file = self.global_registry_file.with_suffix(".tmp")
|
|
250
|
-
with open(
|
|
246
|
+
with temp_file.open("w") as f:
|
|
251
247
|
json.dump(registry, f, indent=2)
|
|
252
248
|
|
|
253
249
|
# Atomic rename
|
|
@@ -507,7 +503,7 @@ class ProjectPortAllocator(SyncBaseService):
|
|
|
507
503
|
state_file = project_path / ".claude-mpm" / self.STATE_FILE_NAME
|
|
508
504
|
if state_file.exists():
|
|
509
505
|
try:
|
|
510
|
-
with open(
|
|
506
|
+
with state_file.open() as f:
|
|
511
507
|
project_state = json.load(f)
|
|
512
508
|
|
|
513
509
|
# Check if service still registered
|
|
@@ -187,7 +187,7 @@ class SessionManagementService(BaseService, SessionManagementInterface):
|
|
|
187
187
|
event_data["timestamp"] = datetime.now(timezone.utc).isoformat()
|
|
188
188
|
|
|
189
189
|
# Append to log file as JSONL
|
|
190
|
-
with open(
|
|
190
|
+
with log_file.open("a") as f:
|
|
191
191
|
f.write(json.dumps(event_data) + "\n")
|
|
192
192
|
|
|
193
193
|
except Exception as e:
|
|
@@ -342,7 +342,7 @@ class EventNormalizer:
|
|
|
342
342
|
|
|
343
343
|
return "unknown"
|
|
344
344
|
|
|
345
|
-
def _map_event_name(self, event_name: str) -> Tuple[str, str]:
|
|
345
|
+
def _map_event_name(self, event_name: str) -> Tuple[str, str]:
|
|
346
346
|
"""Map event name to (type, subtype) tuple.
|
|
347
347
|
|
|
348
348
|
WHY: Consistent categorization helps clients filter and handle events.
|
|
@@ -41,6 +41,14 @@ class CodeAnalysisEventHandler(BaseEventHandler):
|
|
|
41
41
|
self.logger = get_logger(__name__)
|
|
42
42
|
self.analysis_runner = None
|
|
43
43
|
self.code_analyzer = None # For lazy loading operations
|
|
44
|
+
self._emit_tasks: set = set() # Track emit tasks to prevent GC
|
|
45
|
+
|
|
46
|
+
def _create_emit_task(self, coro):
|
|
47
|
+
"""Create a tracked emit task to prevent garbage collection."""
|
|
48
|
+
task = asyncio.get_event_loop().create_task(coro)
|
|
49
|
+
self._emit_tasks.add(task)
|
|
50
|
+
task.add_done_callback(self._emit_tasks.discard)
|
|
51
|
+
return task
|
|
44
52
|
|
|
45
53
|
def initialize(self):
|
|
46
54
|
"""Initialize the analysis runner."""
|
|
@@ -246,16 +254,14 @@ class CodeAnalysisEventHandler(BaseEventHandler):
|
|
|
246
254
|
# Special handling for 'info' events - they should be passed through directly
|
|
247
255
|
if event_type == "info":
|
|
248
256
|
# INFO events for granular tracking
|
|
249
|
-
|
|
250
|
-
loop.create_task(
|
|
257
|
+
self._create_emit_task(
|
|
251
258
|
self.server.core.sio.emit(
|
|
252
259
|
"info", {"request_id": request_id, **event_data}
|
|
253
260
|
)
|
|
254
261
|
)
|
|
255
262
|
else:
|
|
256
263
|
# Regular code analysis events
|
|
257
|
-
|
|
258
|
-
loop.create_task(
|
|
264
|
+
self._create_emit_task(
|
|
259
265
|
self.server.core.sio.emit(
|
|
260
266
|
event_type, {"request_id": request_id, **event_data}
|
|
261
267
|
)
|
|
@@ -437,16 +443,14 @@ class CodeAnalysisEventHandler(BaseEventHandler):
|
|
|
437
443
|
# Special handling for 'info' events - they should be passed through directly
|
|
438
444
|
if event_type == "info":
|
|
439
445
|
# INFO events for granular tracking
|
|
440
|
-
|
|
441
|
-
loop.create_task(
|
|
446
|
+
self._create_emit_task(
|
|
442
447
|
self.server.core.sio.emit(
|
|
443
448
|
"info", {"request_id": request_id, **event_data}
|
|
444
449
|
)
|
|
445
450
|
)
|
|
446
451
|
else:
|
|
447
452
|
# Regular code analysis events
|
|
448
|
-
|
|
449
|
-
loop.create_task(
|
|
453
|
+
self._create_emit_task(
|
|
450
454
|
self.server.core.sio.emit(
|
|
451
455
|
event_type, {"request_id": request_id, **event_data}
|
|
452
456
|
)
|
|
@@ -609,16 +613,14 @@ class CodeAnalysisEventHandler(BaseEventHandler):
|
|
|
609
613
|
# Special handling for 'info' events - they should be passed through directly
|
|
610
614
|
if event_type == "info":
|
|
611
615
|
# INFO events for granular tracking
|
|
612
|
-
|
|
613
|
-
loop.create_task(
|
|
616
|
+
self._create_emit_task(
|
|
614
617
|
self.server.core.sio.emit(
|
|
615
618
|
"info", {"request_id": request_id, **event_data}
|
|
616
619
|
)
|
|
617
620
|
)
|
|
618
621
|
else:
|
|
619
622
|
# Regular code analysis events
|
|
620
|
-
|
|
621
|
-
loop.create_task(
|
|
623
|
+
self._create_emit_task(
|
|
622
624
|
self.server.core.sio.emit(
|
|
623
625
|
event_type, {"request_id": request_id, **event_data}
|
|
624
626
|
)
|
|
@@ -224,7 +224,7 @@ class FileEventHandler(BaseEventHandler):
|
|
|
224
224
|
be displayed as text.
|
|
225
225
|
"""
|
|
226
226
|
try:
|
|
227
|
-
with open(
|
|
227
|
+
with real_path.open("rb") as f:
|
|
228
228
|
binary_content = f.read()
|
|
229
229
|
|
|
230
230
|
# Check if it's a text file by looking for common text patterns
|
|
@@ -223,7 +223,7 @@ class EventTypeMapper:
|
|
|
223
223
|
}
|
|
224
224
|
|
|
225
225
|
@classmethod
|
|
226
|
-
def map_event_type(cls, old_type: str) -> Tuple[str, str]:
|
|
226
|
+
def map_event_type(cls, old_type: str) -> Tuple[str, str]:
|
|
227
227
|
"""Map an old event type to new type/subtype.
|
|
228
228
|
|
|
229
229
|
WHY: Provides consistent categorization for all events.
|
|
@@ -428,7 +428,7 @@ class SocketIOServerCore:
|
|
|
428
428
|
)
|
|
429
429
|
|
|
430
430
|
# Add file reading endpoint for source viewer
|
|
431
|
-
async def file_read_handler(request):
|
|
431
|
+
async def file_read_handler(request):
|
|
432
432
|
"""Handle GET /api/file/read for reading source files."""
|
|
433
433
|
import os
|
|
434
434
|
|
|
@@ -305,7 +305,7 @@ class CodeAnalyzerStrategy(AnalyzerStrategy):
|
|
|
305
305
|
tree = ast.parse(content)
|
|
306
306
|
complexity["cyclomatic"] = self._calculate_cyclomatic_complexity(tree)
|
|
307
307
|
complexity["cognitive"] = self._calculate_cognitive_complexity(tree)
|
|
308
|
-
except:
|
|
308
|
+
except (SyntaxError, ValueError):
|
|
309
309
|
pass
|
|
310
310
|
|
|
311
311
|
return complexity
|
|
@@ -372,7 +372,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
|
|
|
372
372
|
def _parse_package_json(self, path: Path) -> Dict[str, Any]:
|
|
373
373
|
"""Parse package.json file."""
|
|
374
374
|
try:
|
|
375
|
-
with open(
|
|
375
|
+
with path.open() as f:
|
|
376
376
|
data = json.load(f)
|
|
377
377
|
|
|
378
378
|
return {
|
|
@@ -417,7 +417,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
|
|
|
417
417
|
return {}
|
|
418
418
|
|
|
419
419
|
try:
|
|
420
|
-
with open(
|
|
420
|
+
with path.open("rb") as f:
|
|
421
421
|
data = tomllib.load(f)
|
|
422
422
|
|
|
423
423
|
dependencies = {}
|
|
@@ -459,7 +459,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
|
|
|
459
459
|
return {}
|
|
460
460
|
|
|
461
461
|
try:
|
|
462
|
-
with open(
|
|
462
|
+
with path.open("rb") as f:
|
|
463
463
|
data = tomllib.load(f)
|
|
464
464
|
|
|
465
465
|
return {
|
|
@@ -481,7 +481,7 @@ class DependencyAnalyzerStrategy(AnalyzerStrategy):
|
|
|
481
481
|
return {}
|
|
482
482
|
|
|
483
483
|
try:
|
|
484
|
-
with open(
|
|
484
|
+
with path.open("rb") as f:
|
|
485
485
|
data = tomllib.load(f)
|
|
486
486
|
|
|
487
487
|
return {
|
|
@@ -471,7 +471,7 @@ class SchemaValidator:
|
|
|
471
471
|
try:
|
|
472
472
|
datetime.strptime(value, format)
|
|
473
473
|
return True
|
|
474
|
-
except:
|
|
474
|
+
except (ValueError, TypeError):
|
|
475
475
|
return False
|
|
476
476
|
|
|
477
477
|
def _validate_uuid(self, value: str) -> bool:
|
|
@@ -481,7 +481,7 @@ class SchemaValidator:
|
|
|
481
481
|
try:
|
|
482
482
|
uuid.UUID(value)
|
|
483
483
|
return True
|
|
484
|
-
except:
|
|
484
|
+
except (ValueError, TypeError, AttributeError):
|
|
485
485
|
return False
|
|
486
486
|
|
|
487
487
|
def _validate_ipv4(self, value: str) -> bool:
|
|
@@ -491,7 +491,7 @@ class SchemaValidator:
|
|
|
491
491
|
try:
|
|
492
492
|
ipaddress.IPv4Address(value)
|
|
493
493
|
return True
|
|
494
|
-
except:
|
|
494
|
+
except (ValueError, TypeError, ipaddress.AddressValueError):
|
|
495
495
|
return False
|
|
496
496
|
|
|
497
497
|
def _validate_ipv6(self, value: str) -> bool:
|
|
@@ -501,7 +501,7 @@ class SchemaValidator:
|
|
|
501
501
|
try:
|
|
502
502
|
ipaddress.IPv6Address(value)
|
|
503
503
|
return True
|
|
504
|
-
except:
|
|
504
|
+
except (ValueError, TypeError, ipaddress.AddressValueError):
|
|
505
505
|
return False
|
|
506
506
|
|
|
507
507
|
def _validate_semver(self, value: str) -> bool:
|
|
@@ -9,7 +9,7 @@ from abc import ABC, abstractmethod
|
|
|
9
9
|
from collections import OrderedDict
|
|
10
10
|
from contextlib import contextmanager
|
|
11
11
|
from dataclasses import dataclass, field
|
|
12
|
-
from datetime import datetime, timedelta
|
|
12
|
+
from datetime import datetime, timedelta, timezone
|
|
13
13
|
from enum import Enum
|
|
14
14
|
from pathlib import Path
|
|
15
15
|
from typing import Any, Callable, Dict, List, Optional, Set, Union
|
|
@@ -124,11 +124,11 @@ class HierarchicalContextManager(BaseContextManager):
|
|
|
124
124
|
id=context_id,
|
|
125
125
|
scope=scope,
|
|
126
126
|
lifecycle=ContextLifecycle.CREATED,
|
|
127
|
-
created_at=datetime.now(),
|
|
128
|
-
updated_at=datetime.now(),
|
|
127
|
+
created_at=datetime.now(timezone.utc),
|
|
128
|
+
updated_at=datetime.now(timezone.utc),
|
|
129
129
|
parent_id=parent_id,
|
|
130
130
|
ttl=ttl,
|
|
131
|
-
expires_at=datetime.now() + ttl if ttl else None,
|
|
131
|
+
expires_at=datetime.now(timezone.utc) + ttl if ttl else None,
|
|
132
132
|
attributes=kwargs,
|
|
133
133
|
)
|
|
134
134
|
|
|
@@ -158,7 +158,7 @@ class HierarchicalContextManager(BaseContextManager):
|
|
|
158
158
|
|
|
159
159
|
# Check expiration
|
|
160
160
|
if context and context.expires_at:
|
|
161
|
-
if datetime.now() > context.expires_at:
|
|
161
|
+
if datetime.now(timezone.utc) > context.expires_at:
|
|
162
162
|
self.close_context(context_id)
|
|
163
163
|
return None
|
|
164
164
|
|
|
@@ -299,7 +299,7 @@ class ScopedConfigManager:
|
|
|
299
299
|
|
|
300
300
|
# Update context metadata
|
|
301
301
|
if context_id in self.context_manager.contexts:
|
|
302
|
-
self.context_manager.contexts[context_id].updated_at = datetime.now()
|
|
302
|
+
self.context_manager.contexts[context_id].updated_at = datetime.now(timezone.utc)
|
|
303
303
|
|
|
304
304
|
def _get_inherited_config(self, context_id: str) -> Dict[str, Any]:
|
|
305
305
|
"""Get merged configuration from context hierarchy"""
|
|
@@ -206,7 +206,7 @@ class FileIOErrorHandler(BaseErrorHandler):
|
|
|
206
206
|
result.fallback_value = str(alt_path)
|
|
207
207
|
result.actions_taken.append(f"Using alternative location: {alt_path}")
|
|
208
208
|
|
|
209
|
-
except:
|
|
209
|
+
except (OSError, PermissionError):
|
|
210
210
|
result.should_escalate = True
|
|
211
211
|
|
|
212
212
|
# Use read-only mode if applicable
|
|
@@ -293,7 +293,7 @@ class ParsingErrorHandler(BaseErrorHandler):
|
|
|
293
293
|
result.actions_taken.append(f"Fixed JSON with {fix_func.__name__}")
|
|
294
294
|
self.logger.info(f"Recovered from JSON error using {fix_func.__name__}")
|
|
295
295
|
return result
|
|
296
|
-
except:
|
|
296
|
+
except (json.JSONDecodeError, ValueError, TypeError):
|
|
297
297
|
continue
|
|
298
298
|
|
|
299
299
|
# Use lenient parser if available
|
|
@@ -346,7 +346,7 @@ class ParsingErrorHandler(BaseErrorHandler):
|
|
|
346
346
|
result.recovered = True
|
|
347
347
|
result.fallback_value = parsed
|
|
348
348
|
result.actions_taken.append("Parsed as Python literal")
|
|
349
|
-
except:
|
|
349
|
+
except (ValueError, SyntaxError, TypeError):
|
|
350
350
|
# Return empty dict as last resort
|
|
351
351
|
result.recovered = True
|
|
352
352
|
result.fallback_value = {}
|
|
@@ -370,7 +370,7 @@ class ParsingErrorHandler(BaseErrorHandler):
|
|
|
370
370
|
result.fallback_value = parsed
|
|
371
371
|
result.actions_taken.append("Parsed with safe YAML loader")
|
|
372
372
|
|
|
373
|
-
except:
|
|
373
|
+
except (yaml.YAMLError, ValueError, AttributeError):
|
|
374
374
|
# Try to fix tabs
|
|
375
375
|
content = content.replace("\t", " ")
|
|
376
376
|
try:
|
|
@@ -378,7 +378,7 @@ class ParsingErrorHandler(BaseErrorHandler):
|
|
|
378
378
|
result.recovered = True
|
|
379
379
|
result.fallback_value = parsed
|
|
380
380
|
result.actions_taken.append("Fixed YAML tabs")
|
|
381
|
-
except:
|
|
381
|
+
except (yaml.YAMLError, ValueError, AttributeError):
|
|
382
382
|
result.fallback_value = {}
|
|
383
383
|
result.actions_taken.append("Used empty configuration as fallback")
|
|
384
384
|
|
|
@@ -406,7 +406,7 @@ class ParsingErrorHandler(BaseErrorHandler):
|
|
|
406
406
|
result.fallback_value = parsed
|
|
407
407
|
result.actions_taken.append(f"Parsed as {format_name}")
|
|
408
408
|
return result
|
|
409
|
-
except:
|
|
409
|
+
except (ValueError, TypeError, AttributeError, ImportError):
|
|
410
410
|
continue
|
|
411
411
|
|
|
412
412
|
# Use default/empty config
|
|
@@ -533,7 +533,7 @@ class ValidationErrorHandler(BaseErrorHandler):
|
|
|
533
533
|
if isinstance(value, str):
|
|
534
534
|
return json.loads(value)
|
|
535
535
|
return dict(value)
|
|
536
|
-
except:
|
|
536
|
+
except (ValueError, TypeError, json.JSONDecodeError):
|
|
537
537
|
return None
|
|
538
538
|
|
|
539
539
|
def _handle_generic_validation(
|
|
@@ -661,7 +661,7 @@ class TypeConversionErrorHandler(BaseErrorHandler):
|
|
|
661
661
|
if converter:
|
|
662
662
|
try:
|
|
663
663
|
return converter(value)
|
|
664
|
-
except:
|
|
664
|
+
except (ValueError, TypeError, AttributeError):
|
|
665
665
|
pass
|
|
666
666
|
|
|
667
667
|
return None
|
|
@@ -706,7 +706,7 @@ class TypeConversionErrorHandler(BaseErrorHandler):
|
|
|
706
706
|
if value.startswith("["):
|
|
707
707
|
try:
|
|
708
708
|
return json.loads(value)
|
|
709
|
-
except:
|
|
709
|
+
except (json.JSONDecodeError, ValueError):
|
|
710
710
|
pass
|
|
711
711
|
# Try comma-separated
|
|
712
712
|
return [v.strip() for v in value.split(",")]
|
|
@@ -720,7 +720,7 @@ class TypeConversionErrorHandler(BaseErrorHandler):
|
|
|
720
720
|
# Try JSON object
|
|
721
721
|
try:
|
|
722
722
|
return json.loads(value)
|
|
723
|
-
except:
|
|
723
|
+
except (json.JSONDecodeError, ValueError):
|
|
724
724
|
pass
|
|
725
725
|
# Try key=value pairs
|
|
726
726
|
result = {}
|
|
@@ -75,7 +75,7 @@ class BaseFileLoader(ABC):
|
|
|
75
75
|
f"Read {path} with fallback encoding: {enc}"
|
|
76
76
|
)
|
|
77
77
|
return f.read()
|
|
78
|
-
except:
|
|
78
|
+
except (UnicodeDecodeError, OSError):
|
|
79
79
|
continue
|
|
80
80
|
raise
|
|
81
81
|
|
|
@@ -185,8 +185,8 @@ class StructuredFileLoader(BaseFileLoader):
|
|
|
185
185
|
import tomli
|
|
186
186
|
|
|
187
187
|
return tomli.loads(content)
|
|
188
|
-
except ImportError:
|
|
189
|
-
raise ImportError("Neither toml nor tomli package is installed")
|
|
188
|
+
except ImportError as e:
|
|
189
|
+
raise ImportError("Neither toml nor tomli package is installed") from e
|
|
190
190
|
except Exception as e:
|
|
191
191
|
if context.strict:
|
|
192
192
|
raise
|
|
@@ -209,7 +209,7 @@ class StructuredFileLoader(BaseFileLoader):
|
|
|
209
209
|
|
|
210
210
|
try:
|
|
211
211
|
return json.loads(content)
|
|
212
|
-
except:
|
|
212
|
+
except (json.JSONDecodeError, ValueError):
|
|
213
213
|
return {}
|
|
214
214
|
|
|
215
215
|
def _process_includes(
|
|
@@ -411,7 +411,7 @@ class EnvironmentFileLoader(BaseFileLoader):
|
|
|
411
411
|
if value.startswith(("[", "{")):
|
|
412
412
|
try:
|
|
413
413
|
return json.loads(value)
|
|
414
|
-
except:
|
|
414
|
+
except (json.JSONDecodeError, ValueError):
|
|
415
415
|
pass
|
|
416
416
|
|
|
417
417
|
# Comma-separated list
|