claude-mpm 4.5.11__py3-none-any.whl → 4.5.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of claude-mpm might be problematic. Click here for more details.

Files changed (183) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/frontmatter_validator.py +4 -4
  3. claude_mpm/cli/commands/agent_manager.py +3 -3
  4. claude_mpm/cli/commands/agents.py +6 -6
  5. claude_mpm/cli/commands/aggregate.py +4 -4
  6. claude_mpm/cli/commands/analyze.py +2 -2
  7. claude_mpm/cli/commands/analyze_code.py +1 -1
  8. claude_mpm/cli/commands/cleanup.py +3 -3
  9. claude_mpm/cli/commands/config.py +2 -2
  10. claude_mpm/cli/commands/configure.py +14 -14
  11. claude_mpm/cli/commands/dashboard.py +1 -1
  12. claude_mpm/cli/commands/debug.py +3 -3
  13. claude_mpm/cli/commands/doctor.py +1 -1
  14. claude_mpm/cli/commands/mcp.py +7 -7
  15. claude_mpm/cli/commands/mcp_command_router.py +1 -1
  16. claude_mpm/cli/commands/mcp_config.py +2 -2
  17. claude_mpm/cli/commands/mcp_external_commands.py +2 -2
  18. claude_mpm/cli/commands/mcp_install_commands.py +3 -3
  19. claude_mpm/cli/commands/mcp_pipx_config.py +2 -2
  20. claude_mpm/cli/commands/mcp_setup_external.py +3 -3
  21. claude_mpm/cli/commands/monitor.py +1 -1
  22. claude_mpm/cli/commands/mpm_init_handler.py +1 -1
  23. claude_mpm/cli/interactive/agent_wizard.py +1 -1
  24. claude_mpm/cli/parsers/search_parser.py +1 -1
  25. claude_mpm/cli/shared/argument_patterns.py +2 -2
  26. claude_mpm/cli/shared/base_command.py +1 -1
  27. claude_mpm/cli/startup_logging.py +4 -4
  28. claude_mpm/config/experimental_features.py +4 -4
  29. claude_mpm/config/socketio_config.py +2 -2
  30. claude_mpm/core/agent_session_manager.py +2 -2
  31. claude_mpm/core/api_validator.py +3 -3
  32. claude_mpm/core/base_service.py +10 -1
  33. claude_mpm/core/cache.py +2 -2
  34. claude_mpm/core/config.py +4 -4
  35. claude_mpm/core/config_aliases.py +4 -4
  36. claude_mpm/core/config_constants.py +1 -1
  37. claude_mpm/core/error_handler.py +1 -1
  38. claude_mpm/core/file_utils.py +5 -5
  39. claude_mpm/core/framework/formatters/capability_generator.py +5 -5
  40. claude_mpm/core/framework/loaders/agent_loader.py +1 -1
  41. claude_mpm/core/framework/processors/metadata_processor.py +1 -1
  42. claude_mpm/core/framework/processors/template_processor.py +3 -3
  43. claude_mpm/core/framework_loader.py +2 -2
  44. claude_mpm/core/log_manager.py +4 -4
  45. claude_mpm/core/logger.py +2 -2
  46. claude_mpm/core/optimized_startup.py +1 -1
  47. claude_mpm/core/output_style_manager.py +1 -1
  48. claude_mpm/core/service_registry.py +2 -2
  49. claude_mpm/core/session_manager.py +3 -3
  50. claude_mpm/core/shared/config_loader.py +1 -1
  51. claude_mpm/core/socketio_pool.py +2 -2
  52. claude_mpm/core/unified_agent_registry.py +2 -2
  53. claude_mpm/core/unified_config.py +6 -6
  54. claude_mpm/core/unified_paths.py +2 -2
  55. claude_mpm/dashboard/api/simple_directory.py +1 -1
  56. claude_mpm/generators/agent_profile_generator.py +1 -1
  57. claude_mpm/hooks/claude_hooks/event_handlers.py +2 -2
  58. claude_mpm/hooks/claude_hooks/installer.py +9 -9
  59. claude_mpm/hooks/claude_hooks/services/connection_manager_http.py +7 -2
  60. claude_mpm/hooks/claude_hooks/tool_analysis.py +2 -2
  61. claude_mpm/hooks/memory_integration_hook.py +1 -1
  62. claude_mpm/hooks/validation_hooks.py +1 -1
  63. claude_mpm/init.py +4 -4
  64. claude_mpm/models/agent_session.py +1 -1
  65. claude_mpm/scripts/socketio_daemon.py +5 -5
  66. claude_mpm/services/__init__.py +2 -2
  67. claude_mpm/services/agent_capabilities_service.py +1 -1
  68. claude_mpm/services/agents/agent_builder.py +4 -4
  69. claude_mpm/services/agents/deployment/agent_lifecycle_manager.py +1 -1
  70. claude_mpm/services/agents/deployment/agent_metrics_collector.py +1 -1
  71. claude_mpm/services/agents/deployment/agent_record_service.py +3 -3
  72. claude_mpm/services/agents/deployment/deployment_wrapper.py +1 -1
  73. claude_mpm/services/agents/deployment/pipeline/steps/target_directory_step.py +2 -2
  74. claude_mpm/services/agents/loading/agent_profile_loader.py +2 -2
  75. claude_mpm/services/agents/local_template_manager.py +5 -5
  76. claude_mpm/services/agents/registry/deployed_agent_discovery.py +1 -1
  77. claude_mpm/services/agents/registry/modification_tracker.py +19 -11
  78. claude_mpm/services/async_session_logger.py +1 -1
  79. claude_mpm/services/claude_session_logger.py +1 -1
  80. claude_mpm/services/cli/agent_listing_service.py +3 -3
  81. claude_mpm/services/cli/agent_validation_service.py +1 -1
  82. claude_mpm/services/cli/session_manager.py +2 -2
  83. claude_mpm/services/core/path_resolver.py +1 -1
  84. claude_mpm/services/diagnostics/checks/agent_check.py +1 -1
  85. claude_mpm/services/diagnostics/checks/claude_code_check.py +2 -2
  86. claude_mpm/services/diagnostics/checks/common_issues_check.py +3 -3
  87. claude_mpm/services/diagnostics/checks/configuration_check.py +2 -2
  88. claude_mpm/services/diagnostics/checks/installation_check.py +1 -1
  89. claude_mpm/services/diagnostics/checks/mcp_check.py +1 -1
  90. claude_mpm/services/diagnostics/checks/mcp_services_check.py +9 -9
  91. claude_mpm/services/diagnostics/checks/monitor_check.py +1 -1
  92. claude_mpm/services/diagnostics/doctor_reporter.py +1 -1
  93. claude_mpm/services/event_aggregator.py +1 -1
  94. claude_mpm/services/event_bus/event_bus.py +7 -2
  95. claude_mpm/services/events/consumers/dead_letter.py +2 -2
  96. claude_mpm/services/framework_claude_md_generator/__init__.py +1 -1
  97. claude_mpm/services/framework_claude_md_generator/deployment_manager.py +3 -3
  98. claude_mpm/services/framework_claude_md_generator/version_manager.py +1 -1
  99. claude_mpm/services/hook_installer_service.py +7 -7
  100. claude_mpm/services/infrastructure/context_preservation.py +7 -7
  101. claude_mpm/services/infrastructure/daemon_manager.py +5 -5
  102. claude_mpm/services/mcp_config_manager.py +10 -10
  103. claude_mpm/services/mcp_gateway/auto_configure.py +5 -5
  104. claude_mpm/services/mcp_gateway/config/config_loader.py +2 -2
  105. claude_mpm/services/mcp_gateway/config/configuration.py +3 -3
  106. claude_mpm/services/mcp_gateway/core/process_pool.py +3 -3
  107. claude_mpm/services/mcp_gateway/core/singleton_manager.py +2 -2
  108. claude_mpm/services/mcp_gateway/core/startup_verification.py +1 -1
  109. claude_mpm/services/mcp_gateway/main.py +1 -1
  110. claude_mpm/services/mcp_gateway/registry/service_registry.py +4 -2
  111. claude_mpm/services/mcp_gateway/registry/tool_registry.py +2 -1
  112. claude_mpm/services/mcp_gateway/server/stdio_handler.py +1 -1
  113. claude_mpm/services/mcp_gateway/tools/document_summarizer.py +1 -1
  114. claude_mpm/services/mcp_gateway/tools/health_check_tool.py +1 -1
  115. claude_mpm/services/mcp_gateway/tools/hello_world.py +1 -1
  116. claude_mpm/services/mcp_gateway/utils/package_version_checker.py +5 -5
  117. claude_mpm/services/mcp_gateway/utils/update_preferences.py +2 -2
  118. claude_mpm/services/mcp_service_verifier.py +1 -1
  119. claude_mpm/services/memory/builder.py +1 -1
  120. claude_mpm/services/memory/cache/shared_prompt_cache.py +2 -1
  121. claude_mpm/services/memory/indexed_memory.py +3 -3
  122. claude_mpm/services/monitor/daemon.py +1 -1
  123. claude_mpm/services/monitor/daemon_manager.py +9 -9
  124. claude_mpm/services/monitor/handlers/file.py +1 -1
  125. claude_mpm/services/monitor/handlers/hooks.py +3 -3
  126. claude_mpm/services/monitor/management/lifecycle.py +7 -7
  127. claude_mpm/services/monitor/server.py +2 -2
  128. claude_mpm/services/orphan_detection.py +13 -16
  129. claude_mpm/services/port_manager.py +2 -2
  130. claude_mpm/services/project/analyzer.py +3 -3
  131. claude_mpm/services/project/archive_manager.py +13 -13
  132. claude_mpm/services/project/dependency_analyzer.py +4 -4
  133. claude_mpm/services/project/documentation_manager.py +4 -4
  134. claude_mpm/services/project/enhanced_analyzer.py +8 -8
  135. claude_mpm/services/project/registry.py +4 -4
  136. claude_mpm/services/project_port_allocator.py +7 -11
  137. claude_mpm/services/session_management_service.py +1 -1
  138. claude_mpm/services/socketio/event_normalizer.py +1 -1
  139. claude_mpm/services/socketio/handlers/code_analysis.py +14 -12
  140. claude_mpm/services/socketio/handlers/file.py +1 -1
  141. claude_mpm/services/socketio/migration_utils.py +1 -1
  142. claude_mpm/services/socketio/server/core.py +1 -1
  143. claude_mpm/services/unified/analyzer_strategies/code_analyzer.py +1 -1
  144. claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +4 -4
  145. claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +1 -1
  146. claude_mpm/services/unified/config_strategies/config_schema.py +4 -4
  147. claude_mpm/services/unified/config_strategies/context_strategy.py +6 -6
  148. claude_mpm/services/unified/config_strategies/error_handling_strategy.py +10 -10
  149. claude_mpm/services/unified/config_strategies/file_loader_strategy.py +5 -5
  150. claude_mpm/services/unified/config_strategies/unified_config_service.py +8 -8
  151. claude_mpm/services/unified/config_strategies/validation_strategy.py +15 -15
  152. claude_mpm/services/unified/deployment_strategies/base.py +4 -4
  153. claude_mpm/services/unified/deployment_strategies/cloud_strategies.py +15 -15
  154. claude_mpm/services/unified/deployment_strategies/local.py +9 -9
  155. claude_mpm/services/unified/deployment_strategies/utils.py +9 -9
  156. claude_mpm/services/unified/deployment_strategies/vercel.py +7 -7
  157. claude_mpm/services/unified/unified_config.py +5 -5
  158. claude_mpm/services/unified/unified_deployment.py +2 -2
  159. claude_mpm/services/utility_service.py +1 -1
  160. claude_mpm/services/version_control/conflict_resolution.py +2 -2
  161. claude_mpm/services/version_control/git_operations.py +3 -3
  162. claude_mpm/services/version_control/semantic_versioning.py +13 -13
  163. claude_mpm/services/version_control/version_parser.py +1 -1
  164. claude_mpm/storage/state_storage.py +12 -13
  165. claude_mpm/tools/code_tree_analyzer.py +5 -5
  166. claude_mpm/tools/code_tree_builder.py +4 -4
  167. claude_mpm/tools/socketio_debug.py +1 -1
  168. claude_mpm/utils/agent_dependency_loader.py +4 -4
  169. claude_mpm/utils/common.py +2 -2
  170. claude_mpm/utils/config_manager.py +3 -3
  171. claude_mpm/utils/dependency_cache.py +2 -2
  172. claude_mpm/utils/dependency_strategies.py +6 -6
  173. claude_mpm/utils/file_utils.py +11 -11
  174. claude_mpm/utils/log_cleanup.py +1 -1
  175. claude_mpm/utils/path_operations.py +1 -1
  176. claude_mpm/validation/agent_validator.py +2 -2
  177. claude_mpm/validation/frontmatter_validator.py +1 -1
  178. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/METADATA +1 -1
  179. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/RECORD +183 -183
  180. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/WHEEL +0 -0
  181. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/entry_points.txt +0 -0
  182. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/licenses/LICENSE +0 -0
  183. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.12.dist-info}/top_level.txt +0 -0
@@ -63,7 +63,7 @@ except ImportError:
63
63
  try:
64
64
  from claude_mpm.services.mcp_gateway.server.mcp_gateway import MCPGateway
65
65
  except ImportError as e:
66
- raise ImportError(f"Critical: Cannot import MCPGateway server: {e}")
66
+ raise ImportError(f"Critical: Cannot import MCPGateway server: {e}") from e
67
67
 
68
68
  try:
69
69
  from claude_mpm.services.mcp_gateway.server.stdio_handler import StdioHandler
@@ -224,7 +224,8 @@ class MCPServiceRegistry(ManagerBase):
224
224
  try:
225
225
  import asyncio
226
226
 
227
- asyncio.create_task(instance.stop())
227
+ _task = asyncio.create_task(instance.stop()) # noqa: RUF006
228
+ # Fire-and-forget shutdown during unregister
228
229
  except Exception as e:
229
230
  self.logger.warning(
230
231
  f"Error stopping service {interface.__name__}: {e}"
@@ -335,7 +336,8 @@ class MCPServiceRegistry(ManagerBase):
335
336
  try:
336
337
  import asyncio
337
338
 
338
- asyncio.create_task(instance.stop())
339
+ _task = asyncio.create_task(instance.stop()) # noqa: RUF006
340
+ # Fire-and-forget shutdown during clear
339
341
  except Exception as e:
340
342
  self.logger.warning(f"Error stopping service: {e}")
341
343
 
@@ -216,7 +216,8 @@ class ToolRegistry(BaseMCPService, IMCPToolRegistry):
216
216
 
217
217
  # Shutdown adapter (outside lock to avoid deadlock)
218
218
  try:
219
- asyncio.create_task(adapter.shutdown())
219
+ _task = asyncio.create_task(adapter.shutdown()) # noqa: RUF006
220
+ # Fire-and-forget shutdown during tool unregister
220
221
  except Exception as e:
221
222
  self.log_warning(f"Error shutting down tool adapter {tool_name}: {e}")
222
223
 
@@ -139,7 +139,7 @@ class StdioHandler(BaseMCPService, IMCPCommunication):
139
139
  self._metrics["errors"] += 1
140
140
  raise
141
141
 
142
- async def receive_message(self) -> Optional[Dict[str, Any]]: # noqa: PLR0911
142
+ async def receive_message(self) -> Optional[Dict[str, Any]]:
143
143
  """
144
144
  Receive a message from the MCP client via stdin.
145
145
 
@@ -306,7 +306,7 @@ class DocumentSummarizerTool(BaseToolAdapter):
306
306
  continue
307
307
 
308
308
  # If all fail, read as binary and decode with errors='ignore'
309
- with open(file_path, "rb") as f:
309
+ with file_path.open("rb") as f:
310
310
  content = f.read()
311
311
  return content.decode("utf-8", errors="ignore")
312
312
 
@@ -383,7 +383,7 @@ class HealthCheckTool(BaseToolAdapter):
383
383
  # Try to load configuration
384
384
  import json
385
385
 
386
- with open(config_file) as f:
386
+ with config_file.open() as f:
387
387
  config_data = json.load(f)
388
388
 
389
389
  check_result["checks"]["config_valid"] = True
@@ -204,7 +204,7 @@ class HelloWorldTool(BaseToolAdapter):
204
204
  self.greeting_history: List[Dict[str, Any]] = []
205
205
  self.max_history_size = 100
206
206
 
207
- def validate_parameters(self, parameters: Dict[str, Any]) -> bool: # noqa: PLR0911
207
+ def validate_parameters(self, parameters: Dict[str, Any]) -> bool:
208
208
  """
209
209
  Enhanced parameter validation with detailed error messages.
210
210
 
@@ -8,7 +8,7 @@ Provides non-blocking version checking for MCP tools like kuzu-memory.
8
8
 
9
9
  import asyncio
10
10
  import json
11
- from datetime import datetime, timedelta
11
+ from datetime import datetime, timedelta, timezone
12
12
  from pathlib import Path
13
13
  from typing import Any, Dict, Optional
14
14
 
@@ -83,7 +83,7 @@ class PackageVersionChecker:
83
83
  "latest": latest,
84
84
  "update_available": version.parse(latest)
85
85
  > version.parse(current_version),
86
- "checked_at": datetime.now().isoformat(),
86
+ "checked_at": datetime.now(timezone.utc).isoformat(),
87
87
  }
88
88
  self._write_cache(cache_file, result)
89
89
  return result
@@ -133,12 +133,12 @@ class PackageVersionChecker:
133
133
  return None
134
134
 
135
135
  try:
136
- with open(cache_file) as f:
136
+ with cache_file.open() as f:
137
137
  data = json.load(f)
138
138
 
139
139
  # Check TTL
140
140
  checked_at = datetime.fromisoformat(data["checked_at"])
141
- if datetime.now() - checked_at < timedelta(seconds=ttl):
141
+ if datetime.now(timezone.utc) - checked_at < timedelta(seconds=ttl):
142
142
  return data
143
143
  except Exception as e:
144
144
  self.logger.debug(f"Cache read error: {e}")
@@ -154,7 +154,7 @@ class PackageVersionChecker:
154
154
  data: Data to cache
155
155
  """
156
156
  try:
157
- with open(cache_file, "w") as f:
157
+ with cache_file.open("w") as f:
158
158
  json.dump(data, f, indent=2)
159
159
  except Exception as e:
160
160
  self.logger.debug(f"Cache write failed: {e}")
@@ -36,7 +36,7 @@ class UpdatePreferences:
36
36
  """
37
37
  if cls.PREFS_FILE.exists():
38
38
  try:
39
- with open(cls.PREFS_FILE) as f:
39
+ with cls.PREFS_FILE.open() as f:
40
40
  return json.load(f)
41
41
  except (OSError, json.JSONDecodeError):
42
42
  # Return empty dict if file is corrupted or unreadable
@@ -53,7 +53,7 @@ class UpdatePreferences:
53
53
  """
54
54
  cls.PREFS_FILE.parent.mkdir(parents=True, exist_ok=True)
55
55
  try:
56
- with open(cls.PREFS_FILE, "w") as f:
56
+ with cls.PREFS_FILE.open("w") as f:
57
57
  json.dump(prefs, f, indent=2)
58
58
  except OSError:
59
59
  # Silently fail if we can't write preferences
@@ -429,7 +429,7 @@ class MCPServiceVerifier:
429
429
  return {"configured": False, "correct": False}
430
430
 
431
431
  try:
432
- with open(self.claude_config_path) as f:
432
+ with self.claude_config_path.open() as f:
433
433
  config = json.load(f)
434
434
 
435
435
  # Check if project is configured
@@ -458,7 +458,7 @@ class MemoryBuilder(LoggerMixin):
458
458
  import tomllib
459
459
  except ImportError:
460
460
  import tomli as tomllib
461
- with open(file_path, "rb") as f:
461
+ with file_path.open("rb") as f:
462
462
  config_data = tomllib.load(f)
463
463
  items = self._extract_from_toml_config(config_data, source)
464
464
  extracted_items.extend(items)
@@ -194,7 +194,8 @@ class SharedPromptCache(BaseService):
194
194
  with cls._lock:
195
195
  if cls._instance is not None:
196
196
  if cls._instance.running:
197
- asyncio.create_task(cls._instance.stop())
197
+ _task = asyncio.create_task(cls._instance.stop()) # noqa: RUF006
198
+ # Fire-and-forget cleanup task during test reset
198
199
  cls._instance = None
199
200
 
200
201
  async def _initialize(self) -> None:
@@ -213,7 +213,7 @@ class InvertedIndex:
213
213
  "doc_freqs": dict(self.doc_freqs),
214
214
  "doc_count": self.doc_count,
215
215
  }
216
- with open(path, "wb") as f:
216
+ with path.open("wb") as f:
217
217
  pickle.dump(data, f)
218
218
 
219
219
  def load(self, path: Path):
@@ -221,7 +221,7 @@ class InvertedIndex:
221
221
  if not path.exists():
222
222
  return
223
223
 
224
- with open(path, "rb") as f:
224
+ with path.open("rb") as f:
225
225
  data = pickle.load(f)
226
226
 
227
227
  self.index = defaultdict(set, {k: set(v) for k, v in data["index"].items()})
@@ -597,7 +597,7 @@ class IndexedMemoryService:
597
597
  # Load other indexes
598
598
  indexes_path = self.data_dir / "indexes.pkl"
599
599
  if indexes_path.exists():
600
- with open(indexes_path, "rb") as f:
600
+ with indexes_path.open("rb") as f:
601
601
  data = pickle.load(f)
602
602
 
603
603
  self.memories = data.get("memories", {})
@@ -121,7 +121,7 @@ class UnifiedMonitorDaemon:
121
121
  """
122
122
  return self.daemon_manager.cleanup_port_conflicts()
123
123
 
124
- def _start_daemon(self, force_restart: bool = False) -> bool: # noqa: PLR0911
124
+ def _start_daemon(self, force_restart: bool = False) -> bool:
125
125
  """Start as background daemon process.
126
126
 
127
127
  Args:
@@ -302,7 +302,7 @@ class DaemonManager:
302
302
  if not self.pid_file.exists():
303
303
  return True
304
304
 
305
- with open(self.pid_file) as f:
305
+ with self.pid_file.open() as f:
306
306
  pid = int(f.read().strip())
307
307
 
308
308
  self.logger.info(f"Found PID {pid} in PID file")
@@ -397,7 +397,7 @@ class DaemonManager:
397
397
  # First check PID file
398
398
  if self.pid_file.exists():
399
399
  try:
400
- with open(self.pid_file) as f:
400
+ with self.pid_file.open() as f:
401
401
  pid = int(f.read().strip())
402
402
 
403
403
  # Verify process exists
@@ -604,7 +604,7 @@ class DaemonManager:
604
604
  # Check if PID file was written
605
605
  if self.pid_file.exists():
606
606
  try:
607
- with open(self.pid_file) as f:
607
+ with self.pid_file.open() as f:
608
608
  written_pid = int(f.read().strip())
609
609
  if written_pid == pid:
610
610
  # PID file written correctly, check port
@@ -798,7 +798,7 @@ class DaemonManager:
798
798
  if not self.pid_file.exists():
799
799
  return None
800
800
 
801
- with open(self.pid_file) as f:
801
+ with self.pid_file.open() as f:
802
802
  return int(f.read().strip())
803
803
 
804
804
  except Exception as e:
@@ -809,7 +809,7 @@ class DaemonManager:
809
809
  """Write current PID to PID file."""
810
810
  try:
811
811
  self.pid_file.parent.mkdir(parents=True, exist_ok=True)
812
- with open(self.pid_file, "w") as f:
812
+ with self.pid_file.open("w") as f:
813
813
  f.write(str(os.getpid()))
814
814
  self.logger.debug(f"PID file written: {self.pid_file}")
815
815
  except Exception as e:
@@ -863,7 +863,7 @@ class DaemonManager:
863
863
 
864
864
  # Redirect stdout and stderr to log file
865
865
  self.log_file.parent.mkdir(parents=True, exist_ok=True)
866
- with open(self.log_file, "a") as log_out:
866
+ with self.log_file.open("a") as log_out:
867
867
  os.dup2(log_out.fileno(), sys.stdout.fileno())
868
868
  os.dup2(log_out.fileno(), sys.stderr.fileno())
869
869
 
@@ -903,7 +903,7 @@ class DaemonManager:
903
903
  continue
904
904
 
905
905
  try:
906
- with open(self.startup_status_file) as f:
906
+ with self.startup_status_file.open() as f:
907
907
  status = f.read().strip()
908
908
 
909
909
  if status == "success":
@@ -935,7 +935,7 @@ class DaemonManager:
935
935
  try:
936
936
  # Don't check if file exists - we need to write to it regardless
937
937
  # The parent created it and is waiting for us to update it
938
- with open(self.startup_status_file, "w") as f:
938
+ with self.startup_status_file.open("w") as f:
939
939
  f.write("success")
940
940
  f.flush() # Ensure it's written immediately
941
941
  os.fsync(f.fileno()) # Force write to disk
@@ -948,7 +948,7 @@ class DaemonManager:
948
948
  if self.startup_status_file:
949
949
  try:
950
950
  # Don't check if file exists - we need to write to it regardless
951
- with open(self.startup_status_file, "w") as f:
951
+ with self.startup_status_file.open("w") as f:
952
952
  f.write(f"error:{error}")
953
953
  f.flush() # Ensure it's written immediately
954
954
  os.fsync(f.fileno()) # Force write to disk
@@ -225,7 +225,7 @@ class FileHandler:
225
225
  be displayed as text.
226
226
  """
227
227
  try:
228
- with open(real_path, "rb") as f:
228
+ with real_path.open("rb") as f:
229
229
  binary_content = f.read()
230
230
 
231
231
  # Check if it's a text file by looking for common text patterns
@@ -236,10 +236,10 @@ class HookHandler:
236
236
  },
237
237
  )
238
238
 
239
- # Remove from active sessions after a delay
240
- asyncio.create_task(
239
+ # Remove from active sessions after a delay (5 minutes)
240
+ _task = asyncio.create_task( # noqa: RUF006
241
241
  self._cleanup_session(session_id, delay=300)
242
- ) # 5 minutes
242
+ ) # Fire-and-forget cleanup task
243
243
 
244
244
  self.logger.info(f"Claude Code session ended: {session_id}")
245
245
  else:
@@ -127,14 +127,14 @@ class DaemonLifecycle:
127
127
  # Redirect stdout and stderr
128
128
  if self.log_file:
129
129
  # Redirect to log file
130
- with open(self.log_file, "a") as log_out:
130
+ with self.log_file.open("a") as log_out:
131
131
  os.dup2(log_out.fileno(), sys.stdout.fileno())
132
132
  os.dup2(log_out.fileno(), sys.stderr.fileno())
133
133
  else:
134
134
  # Default to a daemon log file instead of /dev/null for errors
135
135
  default_log = Path.home() / ".claude-mpm" / "monitor-daemon.log"
136
136
  default_log.parent.mkdir(parents=True, exist_ok=True)
137
- with open(default_log, "a") as log_out:
137
+ with default_log.open("a") as log_out:
138
138
  os.dup2(log_out.fileno(), sys.stdout.fileno())
139
139
  os.dup2(log_out.fileno(), sys.stderr.fileno())
140
140
 
@@ -149,7 +149,7 @@ class DaemonLifecycle:
149
149
  self.pid_file.parent.mkdir(parents=True, exist_ok=True)
150
150
 
151
151
  # Write PID
152
- with open(self.pid_file, "w") as f:
152
+ with self.pid_file.open("w") as f:
153
153
  f.write(str(os.getpid()))
154
154
 
155
155
  self.logger.debug(f"PID file written: {self.pid_file}")
@@ -199,7 +199,7 @@ class DaemonLifecycle:
199
199
  if not self.pid_file.exists():
200
200
  return None
201
201
 
202
- with open(self.pid_file) as f:
202
+ with self.pid_file.open() as f:
203
203
  pid_str = f.read().strip()
204
204
  return int(pid_str) if pid_str else None
205
205
 
@@ -385,7 +385,7 @@ class DaemonLifecycle:
385
385
  try:
386
386
  # Check if status file exists and read it
387
387
  if self.startup_status_file and Path(self.startup_status_file).exists():
388
- with open(self.startup_status_file) as f:
388
+ with self.startup_status_file.open() as f:
389
389
  status = f.read().strip()
390
390
 
391
391
  if status == "success":
@@ -437,7 +437,7 @@ class DaemonLifecycle:
437
437
  """Report successful startup to parent process."""
438
438
  if self.startup_status_file:
439
439
  try:
440
- with open(self.startup_status_file, "w") as f:
440
+ with self.startup_status_file.open("w") as f:
441
441
  f.write("success")
442
442
  except Exception as e:
443
443
  self.logger.error(f"Failed to report startup success: {e}")
@@ -450,7 +450,7 @@ class DaemonLifecycle:
450
450
  """
451
451
  if self.startup_status_file:
452
452
  try:
453
- with open(self.startup_status_file, "w") as f:
453
+ with self.startup_status_file.open("w") as f:
454
454
  f.write(f"error:{error_msg}")
455
455
  except Exception:
456
456
  pass # Can't report if file write fails
@@ -313,7 +313,7 @@ class UnifiedMonitorServer:
313
313
  async def dashboard_index(request):
314
314
  template_path = dashboard_dir / "templates" / "index.html"
315
315
  if template_path.exists():
316
- with open(template_path) as f:
316
+ with template_path.open() as f:
317
317
  content = f.read()
318
318
  return web.Response(text=content, content_type="text/html")
319
319
  return web.Response(text="Dashboard not found", status=404)
@@ -365,7 +365,7 @@ class UnifiedMonitorServer:
365
365
  return web.Response(text=f"Error: {e!s}", status=500)
366
366
 
367
367
  # File content endpoint for file viewer
368
- async def api_file_handler(request): # noqa: PLR0911
368
+ async def api_file_handler(request):
369
369
  """Handle file content requests."""
370
370
  import json
371
371
  import os
@@ -199,10 +199,7 @@ class OrphanDetectionService(SyncBaseService):
199
199
  Returns:
200
200
  True if port is protected
201
201
  """
202
- for start, end in self.PROTECTED_PORT_RANGES:
203
- if start <= port <= end:
204
- return True
205
- return False
202
+ return any(start <= port <= end for start, end in self.PROTECTED_PORT_RANGES)
206
203
 
207
204
  def _get_process_age(self, pid: int) -> Optional[float]:
208
205
  """
@@ -262,7 +259,7 @@ class OrphanDetectionService(SyncBaseService):
262
259
  if not self.state_file.exists():
263
260
  return orphans
264
261
 
265
- with open(self.state_file) as f:
262
+ with self.state_file.open() as f:
266
263
  state = json.load(f)
267
264
 
268
265
  deployments = state.get("deployments", {})
@@ -308,7 +305,7 @@ class OrphanDetectionService(SyncBaseService):
308
305
  if not self.global_registry_file.exists():
309
306
  return orphans
310
307
 
311
- with open(self.global_registry_file) as f:
308
+ with self.global_registry_file.open() as f:
312
309
  registry = json.load(f)
313
310
 
314
311
  allocations = registry.get("allocations", {})
@@ -350,11 +347,11 @@ class OrphanDetectionService(SyncBaseService):
350
347
  # Load global registry to know which ports are managed
351
348
  managed_ports = set()
352
349
  if self.global_registry_file.exists():
353
- with open(self.global_registry_file) as f:
350
+ with self.global_registry_file.open() as f:
354
351
  registry = json.load(f)
355
- managed_ports = set(
356
- int(p) for p in registry.get("allocations", {}).keys()
357
- )
352
+ managed_ports = {
353
+ int(p) for p in registry.get("allocations", {})
354
+ }
358
355
 
359
356
  # Scan all network connections
360
357
  for conn in psutil.net_connections(kind="inet"):
@@ -557,7 +554,7 @@ class OrphanDetectionService(SyncBaseService):
557
554
  # Check project state
558
555
  if self.state_file.exists():
559
556
  try:
560
- with open(self.state_file) as f:
557
+ with self.state_file.open() as f:
561
558
  state = json.load(f)
562
559
 
563
560
  for deployment in state.get("deployments", {}).values():
@@ -585,7 +582,7 @@ class OrphanDetectionService(SyncBaseService):
585
582
  # Check project state
586
583
  if self.state_file.exists():
587
584
  try:
588
- with open(self.state_file) as f:
585
+ with self.state_file.open() as f:
589
586
  state = json.load(f)
590
587
 
591
588
  for deployment in state.get("deployments", {}).values():
@@ -667,14 +664,14 @@ class OrphanDetectionService(SyncBaseService):
667
664
  def _cleanup_dead_pid(self, orphan: OrphanInfo) -> Tuple[bool, str]:
668
665
  """Clean up dead PID entry from state file."""
669
666
  try:
670
- with open(self.state_file) as f:
667
+ with self.state_file.open() as f:
671
668
  state = json.load(f)
672
669
 
673
670
  service_name = orphan.details.get("service_name")
674
671
  if service_name in state.get("deployments", {}):
675
672
  del state["deployments"][service_name]
676
673
 
677
- with open(self.state_file, "w") as f:
674
+ with self.state_file.open("w") as f:
678
675
  json.dump(state, f, indent=2)
679
676
 
680
677
  return True, f"Removed dead PID entry for {service_name}"
@@ -687,14 +684,14 @@ class OrphanDetectionService(SyncBaseService):
687
684
  def _cleanup_deleted_project(self, orphan: OrphanInfo) -> Tuple[bool, str]:
688
685
  """Clean up deleted project entry from global registry."""
689
686
  try:
690
- with open(self.global_registry_file) as f:
687
+ with self.global_registry_file.open() as f:
691
688
  registry = json.load(f)
692
689
 
693
690
  port = str(orphan.details.get("port"))
694
691
  if port in registry.get("allocations", {}):
695
692
  del registry["allocations"][port]
696
693
 
697
- with open(self.global_registry_file, "w") as f:
694
+ with self.global_registry_file.open("w") as f:
698
695
  json.dump(registry, f, indent=2)
699
696
 
700
697
  return True, f"Removed deleted project entry for port {port}"
@@ -474,7 +474,7 @@ class PortManager:
474
474
  """Load registered instances from file."""
475
475
  try:
476
476
  if self.instances_file.exists():
477
- with open(self.instances_file) as f:
477
+ with self.instances_file.open() as f:
478
478
  return json.load(f)
479
479
  except Exception as e:
480
480
  self.logger.warning(f"Failed to load instances file: {e}")
@@ -484,7 +484,7 @@ class PortManager:
484
484
  def save_instances(self, instances: Dict) -> None:
485
485
  """Save registered instances to file."""
486
486
  try:
487
- with open(self.instances_file, "w") as f:
487
+ with self.instances_file.open("w") as f:
488
488
  json.dump(instances, f, indent=2)
489
489
  except Exception as e:
490
490
  self.logger.error(f"Failed to save instances file: {e}")
@@ -313,7 +313,7 @@ class ProjectAnalyzer(ProjectAnalyzerInterface):
313
313
  ) -> None:
314
314
  """Parse package.json for Node.js project details."""
315
315
  try:
316
- with open(package_path) as f:
316
+ with package_path.open() as f:
317
317
  package_data = json.load(f)
318
318
 
319
319
  # Extract dependencies
@@ -382,7 +382,7 @@ class ProjectAnalyzer(ProjectAnalyzerInterface):
382
382
  f"TOML parsing not available for {deps_path}"
383
383
  )
384
384
  return
385
- with open(deps_path, "rb") as f:
385
+ with deps_path.open("rb") as f:
386
386
  data = tomllib.load(f)
387
387
  deps = list(data.get("project", {}).get("dependencies", []))
388
388
  deps.extend(
@@ -430,7 +430,7 @@ class ProjectAnalyzer(ProjectAnalyzerInterface):
430
430
  except ImportError:
431
431
  self.logger.warning(f"TOML parsing not available for {cargo_path}")
432
432
  return
433
- with open(cargo_path, "rb") as f:
433
+ with cargo_path.open("rb") as f:
434
434
  cargo_data = tomllib.load(f)
435
435
 
436
436
  deps = cargo_data.get("dependencies", {})
@@ -24,7 +24,7 @@ import json
24
24
  import re
25
25
  import shutil
26
26
  import subprocess
27
- from datetime import datetime, timedelta
27
+ from datetime import datetime, timedelta, timezone
28
28
  from pathlib import Path
29
29
  from typing import Dict, List, Optional, Tuple
30
30
 
@@ -116,7 +116,7 @@ Generated by Claude MPM Archive Manager
116
116
 
117
117
  try:
118
118
  # Generate archive filename
119
- timestamp = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
119
+ timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H-%M-%S")
120
120
  archive_name = f"{file_path.name}.{timestamp}{file_path.suffix}"
121
121
  archive_file_path = self.archive_path / archive_name
122
122
 
@@ -130,7 +130,7 @@ Generated by Claude MPM Archive Manager
130
130
  meta_data.update(
131
131
  {
132
132
  "original_path": str(file_path),
133
- "archived_at": datetime.now().isoformat(),
133
+ "archived_at": datetime.now(timezone.utc).isoformat(),
134
134
  "reason": reason or "Manual archive",
135
135
  "file_size": file_path.stat().st_size,
136
136
  "file_hash": self._calculate_file_hash(file_path),
@@ -152,7 +152,7 @@ Generated by Claude MPM Archive Manager
152
152
  def _calculate_file_hash(self, file_path: Path) -> str:
153
153
  """Calculate MD5 hash of file."""
154
154
  hasher = hashlib.md5()
155
- with open(file_path, "rb") as f:
155
+ with file_path.open("rb") as f:
156
156
  for chunk in iter(lambda: f.read(4096), b""):
157
157
  hasher.update(chunk)
158
158
  return hasher.hexdigest()
@@ -172,7 +172,7 @@ Generated by Claude MPM Archive Manager
172
172
  logger.info(f"Removed old archive: {archive.name}")
173
173
 
174
174
  # Compress old archives
175
- cutoff_compress = datetime.now() - timedelta(days=self.COMPRESS_AFTER_DAYS)
175
+ cutoff_compress = datetime.now(timezone.utc) - timedelta(days=self.COMPRESS_AFTER_DAYS)
176
176
  for archive in archives:
177
177
  if archive.suffix != ".gz":
178
178
  mtime = datetime.fromtimestamp(archive.stat().st_mtime)
@@ -180,7 +180,7 @@ Generated by Claude MPM Archive Manager
180
180
  self._compress_archive(archive)
181
181
 
182
182
  # Delete very old archives
183
- cutoff_delete = datetime.now() - timedelta(days=self.DELETE_AFTER_DAYS)
183
+ cutoff_delete = datetime.now(timezone.utc) - timedelta(days=self.DELETE_AFTER_DAYS)
184
184
  for archive in archives:
185
185
  mtime = datetime.fromtimestamp(archive.stat().st_mtime)
186
186
  if mtime < cutoff_delete:
@@ -204,7 +204,7 @@ Generated by Claude MPM Archive Manager
204
204
  try:
205
205
  compressed_path = archive_path.with_suffix(archive_path.suffix + ".gz")
206
206
 
207
- with open(archive_path, "rb") as f_in:
207
+ with archive_path.open("rb") as f_in:
208
208
  with gzip.open(compressed_path, "wb") as f_out:
209
209
  shutil.copyfileobj(f_in, f_out)
210
210
 
@@ -321,7 +321,7 @@ Generated by Claude MPM Archive Manager
321
321
  if archive_file.suffix == ".gz":
322
322
  # Decompress first
323
323
  with gzip.open(archive_file, "rb") as f_in:
324
- with open(target_path, "wb") as f_out:
324
+ with target_path.open("wb") as f_out:
325
325
  shutil.copyfileobj(f_in, f_out)
326
326
  else:
327
327
  shutil.copy2(archive_file, target_path)
@@ -519,7 +519,7 @@ Generated by Claude MPM Archive Manager
519
519
  def review_documentation(self, check_git: bool = True) -> Dict:
520
520
  """Comprehensive documentation review with outdated detection."""
521
521
  report = {
522
- "timestamp": datetime.now().isoformat(),
522
+ "timestamp": datetime.now(timezone.utc).isoformat(),
523
523
  "files_reviewed": {},
524
524
  "outdated_sections": [],
525
525
  "synchronization_issues": [],
@@ -698,7 +698,7 @@ Generated by Claude MPM Archive Manager
698
698
  if changelog_path.exists():
699
699
  last_modified = self.get_file_last_modified(changelog_path)
700
700
  if last_modified:
701
- days_old = (datetime.now() - last_modified).days
701
+ days_old = (datetime.now(timezone.utc) - last_modified).days
702
702
  if days_old > 30:
703
703
  issues.append(
704
704
  {
@@ -800,7 +800,7 @@ Generated by Claude MPM Archive Manager
800
800
  # Check if file hasn't been updated in git for long time
801
801
  if file_report.get("last_git_update"):
802
802
  last_update = datetime.fromisoformat(file_report["last_git_update"])
803
- days_old = (datetime.now() - last_update).days
803
+ days_old = (datetime.now(timezone.utc) - last_update).days
804
804
  if days_old > 90:
805
805
  should_archive = True
806
806
  archive_reason.append(f"No updates for {days_old} days")
@@ -913,7 +913,7 @@ Generated by Claude MPM Archive Manager
913
913
  changelog_content = changelog_path.read_text()
914
914
  if f"## [{current_version}]" not in changelog_content:
915
915
  # Add new version section
916
- today = datetime.now().strftime("%Y-%m-%d")
916
+ today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
917
917
  new_section = f"\n## [{current_version}] - {today}\n\n### Added\n\n### Changed\n\n### Fixed\n\n"
918
918
 
919
919
  # Insert after header
@@ -981,7 +981,7 @@ Generated by Claude MPM Archive Manager
981
981
  last_updated = "Unknown"
982
982
  if report.get("last_git_update"):
983
983
  last_date = datetime.fromisoformat(report["last_git_update"])
984
- days_ago = (datetime.now() - last_date).days
984
+ days_ago = (datetime.now(timezone.utc) - last_date).days
985
985
  last_updated = f"{days_ago} days ago"
986
986
 
987
987
  table.add_row(filename, status, str(issues), last_updated)