claude-mpm 4.5.11__py3-none-any.whl → 4.5.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/BASE_ENGINEER.md +47 -0
  3. claude_mpm/agents/BASE_QA.md +60 -0
  4. claude_mpm/agents/frontmatter_validator.py +4 -4
  5. claude_mpm/agents/templates/nextjs_engineer.json +2 -2
  6. claude_mpm/agents/templates/qa.json +13 -3
  7. claude_mpm/agents/templates/react_engineer.json +2 -2
  8. claude_mpm/agents/templates/typescript_engineer.json +2 -2
  9. claude_mpm/agents/templates/web_qa.json +14 -3
  10. claude_mpm/cli/commands/agent_manager.py +3 -3
  11. claude_mpm/cli/commands/agents.py +6 -6
  12. claude_mpm/cli/commands/aggregate.py +4 -4
  13. claude_mpm/cli/commands/analyze.py +2 -2
  14. claude_mpm/cli/commands/analyze_code.py +1 -1
  15. claude_mpm/cli/commands/cleanup.py +3 -3
  16. claude_mpm/cli/commands/config.py +2 -2
  17. claude_mpm/cli/commands/configure.py +14 -14
  18. claude_mpm/cli/commands/dashboard.py +1 -1
  19. claude_mpm/cli/commands/debug.py +3 -3
  20. claude_mpm/cli/commands/doctor.py +1 -1
  21. claude_mpm/cli/commands/mcp.py +7 -7
  22. claude_mpm/cli/commands/mcp_command_router.py +1 -1
  23. claude_mpm/cli/commands/mcp_config.py +2 -2
  24. claude_mpm/cli/commands/mcp_external_commands.py +2 -2
  25. claude_mpm/cli/commands/mcp_install_commands.py +3 -3
  26. claude_mpm/cli/commands/mcp_pipx_config.py +2 -2
  27. claude_mpm/cli/commands/mcp_setup_external.py +3 -3
  28. claude_mpm/cli/commands/monitor.py +1 -1
  29. claude_mpm/cli/commands/mpm_init_handler.py +1 -1
  30. claude_mpm/cli/interactive/agent_wizard.py +1 -1
  31. claude_mpm/cli/parsers/search_parser.py +1 -1
  32. claude_mpm/cli/shared/argument_patterns.py +2 -2
  33. claude_mpm/cli/shared/base_command.py +1 -1
  34. claude_mpm/cli/startup_logging.py +6 -4
  35. claude_mpm/config/experimental_features.py +4 -4
  36. claude_mpm/config/socketio_config.py +2 -2
  37. claude_mpm/core/agent_session_manager.py +2 -2
  38. claude_mpm/core/api_validator.py +3 -3
  39. claude_mpm/core/base_service.py +10 -1
  40. claude_mpm/core/cache.py +2 -2
  41. claude_mpm/core/config.py +4 -4
  42. claude_mpm/core/config_aliases.py +4 -4
  43. claude_mpm/core/config_constants.py +1 -1
  44. claude_mpm/core/error_handler.py +1 -1
  45. claude_mpm/core/file_utils.py +5 -5
  46. claude_mpm/core/framework/formatters/capability_generator.py +5 -5
  47. claude_mpm/core/framework/loaders/agent_loader.py +1 -1
  48. claude_mpm/core/framework/processors/metadata_processor.py +1 -1
  49. claude_mpm/core/framework/processors/template_processor.py +3 -3
  50. claude_mpm/core/framework_loader.py +2 -2
  51. claude_mpm/core/log_manager.py +4 -4
  52. claude_mpm/core/logger.py +2 -2
  53. claude_mpm/core/optimized_startup.py +1 -1
  54. claude_mpm/core/output_style_manager.py +1 -1
  55. claude_mpm/core/service_registry.py +2 -2
  56. claude_mpm/core/session_manager.py +3 -3
  57. claude_mpm/core/shared/config_loader.py +1 -1
  58. claude_mpm/core/socketio_pool.py +2 -2
  59. claude_mpm/core/unified_agent_registry.py +2 -2
  60. claude_mpm/core/unified_config.py +6 -6
  61. claude_mpm/core/unified_paths.py +2 -2
  62. claude_mpm/dashboard/api/simple_directory.py +1 -1
  63. claude_mpm/generators/agent_profile_generator.py +1 -1
  64. claude_mpm/hooks/claude_hooks/event_handlers.py +2 -2
  65. claude_mpm/hooks/claude_hooks/installer.py +9 -9
  66. claude_mpm/hooks/claude_hooks/services/connection_manager_http.py +7 -2
  67. claude_mpm/hooks/claude_hooks/tool_analysis.py +2 -2
  68. claude_mpm/hooks/memory_integration_hook.py +1 -1
  69. claude_mpm/hooks/validation_hooks.py +1 -1
  70. claude_mpm/init.py +4 -4
  71. claude_mpm/models/agent_session.py +1 -1
  72. claude_mpm/scripts/socketio_daemon.py +5 -5
  73. claude_mpm/services/__init__.py +2 -2
  74. claude_mpm/services/agent_capabilities_service.py +1 -1
  75. claude_mpm/services/agents/agent_builder.py +6 -4
  76. claude_mpm/services/agents/deployment/agent_lifecycle_manager.py +1 -1
  77. claude_mpm/services/agents/deployment/agent_metrics_collector.py +1 -1
  78. claude_mpm/services/agents/deployment/agent_record_service.py +3 -3
  79. claude_mpm/services/agents/deployment/deployment_wrapper.py +1 -1
  80. claude_mpm/services/agents/deployment/pipeline/steps/target_directory_step.py +2 -2
  81. claude_mpm/services/agents/loading/agent_profile_loader.py +2 -2
  82. claude_mpm/services/agents/local_template_manager.py +5 -5
  83. claude_mpm/services/agents/registry/deployed_agent_discovery.py +1 -1
  84. claude_mpm/services/agents/registry/modification_tracker.py +19 -11
  85. claude_mpm/services/async_session_logger.py +1 -1
  86. claude_mpm/services/claude_session_logger.py +1 -1
  87. claude_mpm/services/cli/agent_listing_service.py +3 -3
  88. claude_mpm/services/cli/agent_validation_service.py +1 -1
  89. claude_mpm/services/cli/session_manager.py +2 -2
  90. claude_mpm/services/core/path_resolver.py +1 -1
  91. claude_mpm/services/diagnostics/checks/agent_check.py +1 -1
  92. claude_mpm/services/diagnostics/checks/claude_code_check.py +2 -2
  93. claude_mpm/services/diagnostics/checks/common_issues_check.py +3 -3
  94. claude_mpm/services/diagnostics/checks/configuration_check.py +2 -2
  95. claude_mpm/services/diagnostics/checks/installation_check.py +1 -1
  96. claude_mpm/services/diagnostics/checks/mcp_check.py +1 -1
  97. claude_mpm/services/diagnostics/checks/mcp_services_check.py +9 -9
  98. claude_mpm/services/diagnostics/checks/monitor_check.py +1 -1
  99. claude_mpm/services/diagnostics/doctor_reporter.py +1 -1
  100. claude_mpm/services/event_aggregator.py +1 -1
  101. claude_mpm/services/event_bus/event_bus.py +9 -2
  102. claude_mpm/services/events/consumers/dead_letter.py +2 -2
  103. claude_mpm/services/framework_claude_md_generator/__init__.py +1 -1
  104. claude_mpm/services/framework_claude_md_generator/deployment_manager.py +3 -3
  105. claude_mpm/services/framework_claude_md_generator/version_manager.py +1 -1
  106. claude_mpm/services/hook_installer_service.py +7 -7
  107. claude_mpm/services/infrastructure/context_preservation.py +7 -7
  108. claude_mpm/services/infrastructure/daemon_manager.py +5 -5
  109. claude_mpm/services/mcp_config_manager.py +10 -10
  110. claude_mpm/services/mcp_gateway/auto_configure.py +5 -5
  111. claude_mpm/services/mcp_gateway/config/config_loader.py +2 -2
  112. claude_mpm/services/mcp_gateway/config/configuration.py +5 -3
  113. claude_mpm/services/mcp_gateway/core/process_pool.py +3 -3
  114. claude_mpm/services/mcp_gateway/core/singleton_manager.py +2 -2
  115. claude_mpm/services/mcp_gateway/core/startup_verification.py +1 -1
  116. claude_mpm/services/mcp_gateway/main.py +1 -1
  117. claude_mpm/services/mcp_gateway/registry/service_registry.py +4 -2
  118. claude_mpm/services/mcp_gateway/registry/tool_registry.py +2 -1
  119. claude_mpm/services/mcp_gateway/server/stdio_handler.py +1 -1
  120. claude_mpm/services/mcp_gateway/tools/document_summarizer.py +1 -1
  121. claude_mpm/services/mcp_gateway/tools/health_check_tool.py +1 -1
  122. claude_mpm/services/mcp_gateway/tools/hello_world.py +1 -1
  123. claude_mpm/services/mcp_gateway/utils/package_version_checker.py +5 -5
  124. claude_mpm/services/mcp_gateway/utils/update_preferences.py +2 -2
  125. claude_mpm/services/mcp_service_verifier.py +1 -1
  126. claude_mpm/services/memory/builder.py +1 -1
  127. claude_mpm/services/memory/cache/shared_prompt_cache.py +2 -1
  128. claude_mpm/services/memory/indexed_memory.py +3 -3
  129. claude_mpm/services/monitor/daemon.py +1 -1
  130. claude_mpm/services/monitor/daemon_manager.py +9 -9
  131. claude_mpm/services/monitor/handlers/file.py +1 -1
  132. claude_mpm/services/monitor/handlers/hooks.py +3 -3
  133. claude_mpm/services/monitor/management/lifecycle.py +7 -7
  134. claude_mpm/services/monitor/server.py +2 -2
  135. claude_mpm/services/orphan_detection.py +11 -16
  136. claude_mpm/services/port_manager.py +2 -2
  137. claude_mpm/services/project/analyzer.py +3 -3
  138. claude_mpm/services/project/archive_manager.py +17 -13
  139. claude_mpm/services/project/dependency_analyzer.py +4 -4
  140. claude_mpm/services/project/documentation_manager.py +4 -4
  141. claude_mpm/services/project/enhanced_analyzer.py +19 -8
  142. claude_mpm/services/project/registry.py +4 -4
  143. claude_mpm/services/project_port_allocator.py +7 -12
  144. claude_mpm/services/session_management_service.py +1 -1
  145. claude_mpm/services/socketio/event_normalizer.py +1 -1
  146. claude_mpm/services/socketio/handlers/code_analysis.py +14 -12
  147. claude_mpm/services/socketio/handlers/file.py +1 -1
  148. claude_mpm/services/socketio/migration_utils.py +1 -1
  149. claude_mpm/services/socketio/server/core.py +1 -1
  150. claude_mpm/services/unified/analyzer_strategies/code_analyzer.py +1 -1
  151. claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +4 -4
  152. claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +1 -1
  153. claude_mpm/services/unified/config_strategies/config_schema.py +4 -4
  154. claude_mpm/services/unified/config_strategies/context_strategy.py +8 -6
  155. claude_mpm/services/unified/config_strategies/error_handling_strategy.py +10 -10
  156. claude_mpm/services/unified/config_strategies/file_loader_strategy.py +5 -5
  157. claude_mpm/services/unified/config_strategies/unified_config_service.py +8 -8
  158. claude_mpm/services/unified/config_strategies/validation_strategy.py +15 -15
  159. claude_mpm/services/unified/deployment_strategies/base.py +4 -4
  160. claude_mpm/services/unified/deployment_strategies/cloud_strategies.py +15 -15
  161. claude_mpm/services/unified/deployment_strategies/local.py +11 -11
  162. claude_mpm/services/unified/deployment_strategies/utils.py +11 -9
  163. claude_mpm/services/unified/deployment_strategies/vercel.py +7 -9
  164. claude_mpm/services/unified/unified_config.py +5 -5
  165. claude_mpm/services/unified/unified_deployment.py +2 -2
  166. claude_mpm/services/utility_service.py +1 -1
  167. claude_mpm/services/version_control/conflict_resolution.py +2 -2
  168. claude_mpm/services/version_control/git_operations.py +3 -3
  169. claude_mpm/services/version_control/semantic_versioning.py +13 -13
  170. claude_mpm/services/version_control/version_parser.py +1 -1
  171. claude_mpm/storage/state_storage.py +12 -13
  172. claude_mpm/tools/code_tree_analyzer.py +5 -5
  173. claude_mpm/tools/code_tree_builder.py +4 -4
  174. claude_mpm/tools/socketio_debug.py +1 -1
  175. claude_mpm/utils/agent_dependency_loader.py +4 -4
  176. claude_mpm/utils/common.py +2 -2
  177. claude_mpm/utils/config_manager.py +3 -3
  178. claude_mpm/utils/dependency_cache.py +2 -2
  179. claude_mpm/utils/dependency_strategies.py +6 -6
  180. claude_mpm/utils/file_utils.py +11 -11
  181. claude_mpm/utils/log_cleanup.py +1 -1
  182. claude_mpm/utils/path_operations.py +1 -1
  183. claude_mpm/validation/agent_validator.py +2 -2
  184. claude_mpm/validation/frontmatter_validator.py +1 -1
  185. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.13.dist-info}/METADATA +1 -1
  186. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.13.dist-info}/RECORD +190 -190
  187. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.13.dist-info}/WHEEL +0 -0
  188. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.13.dist-info}/entry_points.txt +0 -0
  189. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.13.dist-info}/licenses/LICENSE +0 -0
  190. {claude_mpm-4.5.11.dist-info → claude_mpm-4.5.13.dist-info}/top_level.txt +0 -0
@@ -302,7 +302,7 @@ class DaemonManager:
302
302
  if not self.pid_file.exists():
303
303
  return True
304
304
 
305
- with open(self.pid_file) as f:
305
+ with self.pid_file.open() as f:
306
306
  pid = int(f.read().strip())
307
307
 
308
308
  self.logger.info(f"Found PID {pid} in PID file")
@@ -397,7 +397,7 @@ class DaemonManager:
397
397
  # First check PID file
398
398
  if self.pid_file.exists():
399
399
  try:
400
- with open(self.pid_file) as f:
400
+ with self.pid_file.open() as f:
401
401
  pid = int(f.read().strip())
402
402
 
403
403
  # Verify process exists
@@ -604,7 +604,7 @@ class DaemonManager:
604
604
  # Check if PID file was written
605
605
  if self.pid_file.exists():
606
606
  try:
607
- with open(self.pid_file) as f:
607
+ with self.pid_file.open() as f:
608
608
  written_pid = int(f.read().strip())
609
609
  if written_pid == pid:
610
610
  # PID file written correctly, check port
@@ -798,7 +798,7 @@ class DaemonManager:
798
798
  if not self.pid_file.exists():
799
799
  return None
800
800
 
801
- with open(self.pid_file) as f:
801
+ with self.pid_file.open() as f:
802
802
  return int(f.read().strip())
803
803
 
804
804
  except Exception as e:
@@ -809,7 +809,7 @@ class DaemonManager:
809
809
  """Write current PID to PID file."""
810
810
  try:
811
811
  self.pid_file.parent.mkdir(parents=True, exist_ok=True)
812
- with open(self.pid_file, "w") as f:
812
+ with self.pid_file.open("w") as f:
813
813
  f.write(str(os.getpid()))
814
814
  self.logger.debug(f"PID file written: {self.pid_file}")
815
815
  except Exception as e:
@@ -863,7 +863,7 @@ class DaemonManager:
863
863
 
864
864
  # Redirect stdout and stderr to log file
865
865
  self.log_file.parent.mkdir(parents=True, exist_ok=True)
866
- with open(self.log_file, "a") as log_out:
866
+ with self.log_file.open("a") as log_out:
867
867
  os.dup2(log_out.fileno(), sys.stdout.fileno())
868
868
  os.dup2(log_out.fileno(), sys.stderr.fileno())
869
869
 
@@ -903,7 +903,7 @@ class DaemonManager:
903
903
  continue
904
904
 
905
905
  try:
906
- with open(self.startup_status_file) as f:
906
+ with self.startup_status_file.open() as f:
907
907
  status = f.read().strip()
908
908
 
909
909
  if status == "success":
@@ -935,7 +935,7 @@ class DaemonManager:
935
935
  try:
936
936
  # Don't check if file exists - we need to write to it regardless
937
937
  # The parent created it and is waiting for us to update it
938
- with open(self.startup_status_file, "w") as f:
938
+ with self.startup_status_file.open("w") as f:
939
939
  f.write("success")
940
940
  f.flush() # Ensure it's written immediately
941
941
  os.fsync(f.fileno()) # Force write to disk
@@ -948,7 +948,7 @@ class DaemonManager:
948
948
  if self.startup_status_file:
949
949
  try:
950
950
  # Don't check if file exists - we need to write to it regardless
951
- with open(self.startup_status_file, "w") as f:
951
+ with self.startup_status_file.open("w") as f:
952
952
  f.write(f"error:{error}")
953
953
  f.flush() # Ensure it's written immediately
954
954
  os.fsync(f.fileno()) # Force write to disk
@@ -225,7 +225,7 @@ class FileHandler:
225
225
  be displayed as text.
226
226
  """
227
227
  try:
228
- with open(real_path, "rb") as f:
228
+ with real_path.open("rb") as f:
229
229
  binary_content = f.read()
230
230
 
231
231
  # Check if it's a text file by looking for common text patterns
@@ -236,10 +236,10 @@ class HookHandler:
236
236
  },
237
237
  )
238
238
 
239
- # Remove from active sessions after a delay
240
- asyncio.create_task(
239
+ # Remove from active sessions after a delay (5 minutes)
240
+ _task = asyncio.create_task( # noqa: RUF006
241
241
  self._cleanup_session(session_id, delay=300)
242
- ) # 5 minutes
242
+ ) # Fire-and-forget cleanup task
243
243
 
244
244
  self.logger.info(f"Claude Code session ended: {session_id}")
245
245
  else:
@@ -127,14 +127,14 @@ class DaemonLifecycle:
127
127
  # Redirect stdout and stderr
128
128
  if self.log_file:
129
129
  # Redirect to log file
130
- with open(self.log_file, "a") as log_out:
130
+ with self.log_file.open("a") as log_out:
131
131
  os.dup2(log_out.fileno(), sys.stdout.fileno())
132
132
  os.dup2(log_out.fileno(), sys.stderr.fileno())
133
133
  else:
134
134
  # Default to a daemon log file instead of /dev/null for errors
135
135
  default_log = Path.home() / ".claude-mpm" / "monitor-daemon.log"
136
136
  default_log.parent.mkdir(parents=True, exist_ok=True)
137
- with open(default_log, "a") as log_out:
137
+ with default_log.open("a") as log_out:
138
138
  os.dup2(log_out.fileno(), sys.stdout.fileno())
139
139
  os.dup2(log_out.fileno(), sys.stderr.fileno())
140
140
 
@@ -149,7 +149,7 @@ class DaemonLifecycle:
149
149
  self.pid_file.parent.mkdir(parents=True, exist_ok=True)
150
150
 
151
151
  # Write PID
152
- with open(self.pid_file, "w") as f:
152
+ with self.pid_file.open("w") as f:
153
153
  f.write(str(os.getpid()))
154
154
 
155
155
  self.logger.debug(f"PID file written: {self.pid_file}")
@@ -199,7 +199,7 @@ class DaemonLifecycle:
199
199
  if not self.pid_file.exists():
200
200
  return None
201
201
 
202
- with open(self.pid_file) as f:
202
+ with self.pid_file.open() as f:
203
203
  pid_str = f.read().strip()
204
204
  return int(pid_str) if pid_str else None
205
205
 
@@ -385,7 +385,7 @@ class DaemonLifecycle:
385
385
  try:
386
386
  # Check if status file exists and read it
387
387
  if self.startup_status_file and Path(self.startup_status_file).exists():
388
- with open(self.startup_status_file) as f:
388
+ with self.startup_status_file.open() as f:
389
389
  status = f.read().strip()
390
390
 
391
391
  if status == "success":
@@ -437,7 +437,7 @@ class DaemonLifecycle:
437
437
  """Report successful startup to parent process."""
438
438
  if self.startup_status_file:
439
439
  try:
440
- with open(self.startup_status_file, "w") as f:
440
+ with self.startup_status_file.open("w") as f:
441
441
  f.write("success")
442
442
  except Exception as e:
443
443
  self.logger.error(f"Failed to report startup success: {e}")
@@ -450,7 +450,7 @@ class DaemonLifecycle:
450
450
  """
451
451
  if self.startup_status_file:
452
452
  try:
453
- with open(self.startup_status_file, "w") as f:
453
+ with self.startup_status_file.open("w") as f:
454
454
  f.write(f"error:{error_msg}")
455
455
  except Exception:
456
456
  pass # Can't report if file write fails
@@ -313,7 +313,7 @@ class UnifiedMonitorServer:
313
313
  async def dashboard_index(request):
314
314
  template_path = dashboard_dir / "templates" / "index.html"
315
315
  if template_path.exists():
316
- with open(template_path) as f:
316
+ with template_path.open() as f:
317
317
  content = f.read()
318
318
  return web.Response(text=content, content_type="text/html")
319
319
  return web.Response(text="Dashboard not found", status=404)
@@ -365,7 +365,7 @@ class UnifiedMonitorServer:
365
365
  return web.Response(text=f"Error: {e!s}", status=500)
366
366
 
367
367
  # File content endpoint for file viewer
368
- async def api_file_handler(request): # noqa: PLR0911
368
+ async def api_file_handler(request):
369
369
  """Handle file content requests."""
370
370
  import json
371
371
  import os
@@ -199,10 +199,7 @@ class OrphanDetectionService(SyncBaseService):
199
199
  Returns:
200
200
  True if port is protected
201
201
  """
202
- for start, end in self.PROTECTED_PORT_RANGES:
203
- if start <= port <= end:
204
- return True
205
- return False
202
+ return any(start <= port <= end for start, end in self.PROTECTED_PORT_RANGES)
206
203
 
207
204
  def _get_process_age(self, pid: int) -> Optional[float]:
208
205
  """
@@ -262,7 +259,7 @@ class OrphanDetectionService(SyncBaseService):
262
259
  if not self.state_file.exists():
263
260
  return orphans
264
261
 
265
- with open(self.state_file) as f:
262
+ with self.state_file.open() as f:
266
263
  state = json.load(f)
267
264
 
268
265
  deployments = state.get("deployments", {})
@@ -308,7 +305,7 @@ class OrphanDetectionService(SyncBaseService):
308
305
  if not self.global_registry_file.exists():
309
306
  return orphans
310
307
 
311
- with open(self.global_registry_file) as f:
308
+ with self.global_registry_file.open() as f:
312
309
  registry = json.load(f)
313
310
 
314
311
  allocations = registry.get("allocations", {})
@@ -350,11 +347,9 @@ class OrphanDetectionService(SyncBaseService):
350
347
  # Load global registry to know which ports are managed
351
348
  managed_ports = set()
352
349
  if self.global_registry_file.exists():
353
- with open(self.global_registry_file) as f:
350
+ with self.global_registry_file.open() as f:
354
351
  registry = json.load(f)
355
- managed_ports = set(
356
- int(p) for p in registry.get("allocations", {}).keys()
357
- )
352
+ managed_ports = {int(p) for p in registry.get("allocations", {})}
358
353
 
359
354
  # Scan all network connections
360
355
  for conn in psutil.net_connections(kind="inet"):
@@ -557,7 +552,7 @@ class OrphanDetectionService(SyncBaseService):
557
552
  # Check project state
558
553
  if self.state_file.exists():
559
554
  try:
560
- with open(self.state_file) as f:
555
+ with self.state_file.open() as f:
561
556
  state = json.load(f)
562
557
 
563
558
  for deployment in state.get("deployments", {}).values():
@@ -585,7 +580,7 @@ class OrphanDetectionService(SyncBaseService):
585
580
  # Check project state
586
581
  if self.state_file.exists():
587
582
  try:
588
- with open(self.state_file) as f:
583
+ with self.state_file.open() as f:
589
584
  state = json.load(f)
590
585
 
591
586
  for deployment in state.get("deployments", {}).values():
@@ -667,14 +662,14 @@ class OrphanDetectionService(SyncBaseService):
667
662
  def _cleanup_dead_pid(self, orphan: OrphanInfo) -> Tuple[bool, str]:
668
663
  """Clean up dead PID entry from state file."""
669
664
  try:
670
- with open(self.state_file) as f:
665
+ with self.state_file.open() as f:
671
666
  state = json.load(f)
672
667
 
673
668
  service_name = orphan.details.get("service_name")
674
669
  if service_name in state.get("deployments", {}):
675
670
  del state["deployments"][service_name]
676
671
 
677
- with open(self.state_file, "w") as f:
672
+ with self.state_file.open("w") as f:
678
673
  json.dump(state, f, indent=2)
679
674
 
680
675
  return True, f"Removed dead PID entry for {service_name}"
@@ -687,14 +682,14 @@ class OrphanDetectionService(SyncBaseService):
687
682
  def _cleanup_deleted_project(self, orphan: OrphanInfo) -> Tuple[bool, str]:
688
683
  """Clean up deleted project entry from global registry."""
689
684
  try:
690
- with open(self.global_registry_file) as f:
685
+ with self.global_registry_file.open() as f:
691
686
  registry = json.load(f)
692
687
 
693
688
  port = str(orphan.details.get("port"))
694
689
  if port in registry.get("allocations", {}):
695
690
  del registry["allocations"][port]
696
691
 
697
- with open(self.global_registry_file, "w") as f:
692
+ with self.global_registry_file.open("w") as f:
698
693
  json.dump(registry, f, indent=2)
699
694
 
700
695
  return True, f"Removed deleted project entry for port {port}"
@@ -474,7 +474,7 @@ class PortManager:
474
474
  """Load registered instances from file."""
475
475
  try:
476
476
  if self.instances_file.exists():
477
- with open(self.instances_file) as f:
477
+ with self.instances_file.open() as f:
478
478
  return json.load(f)
479
479
  except Exception as e:
480
480
  self.logger.warning(f"Failed to load instances file: {e}")
@@ -484,7 +484,7 @@ class PortManager:
484
484
  def save_instances(self, instances: Dict) -> None:
485
485
  """Save registered instances to file."""
486
486
  try:
487
- with open(self.instances_file, "w") as f:
487
+ with self.instances_file.open("w") as f:
488
488
  json.dump(instances, f, indent=2)
489
489
  except Exception as e:
490
490
  self.logger.error(f"Failed to save instances file: {e}")
@@ -313,7 +313,7 @@ class ProjectAnalyzer(ProjectAnalyzerInterface):
313
313
  ) -> None:
314
314
  """Parse package.json for Node.js project details."""
315
315
  try:
316
- with open(package_path) as f:
316
+ with package_path.open() as f:
317
317
  package_data = json.load(f)
318
318
 
319
319
  # Extract dependencies
@@ -382,7 +382,7 @@ class ProjectAnalyzer(ProjectAnalyzerInterface):
382
382
  f"TOML parsing not available for {deps_path}"
383
383
  )
384
384
  return
385
- with open(deps_path, "rb") as f:
385
+ with deps_path.open("rb") as f:
386
386
  data = tomllib.load(f)
387
387
  deps = list(data.get("project", {}).get("dependencies", []))
388
388
  deps.extend(
@@ -430,7 +430,7 @@ class ProjectAnalyzer(ProjectAnalyzerInterface):
430
430
  except ImportError:
431
431
  self.logger.warning(f"TOML parsing not available for {cargo_path}")
432
432
  return
433
- with open(cargo_path, "rb") as f:
433
+ with cargo_path.open("rb") as f:
434
434
  cargo_data = tomllib.load(f)
435
435
 
436
436
  deps = cargo_data.get("dependencies", {})
@@ -24,7 +24,7 @@ import json
24
24
  import re
25
25
  import shutil
26
26
  import subprocess
27
- from datetime import datetime, timedelta
27
+ from datetime import datetime, timedelta, timezone
28
28
  from pathlib import Path
29
29
  from typing import Dict, List, Optional, Tuple
30
30
 
@@ -116,7 +116,7 @@ Generated by Claude MPM Archive Manager
116
116
 
117
117
  try:
118
118
  # Generate archive filename
119
- timestamp = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
119
+ timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H-%M-%S")
120
120
  archive_name = f"{file_path.name}.{timestamp}{file_path.suffix}"
121
121
  archive_file_path = self.archive_path / archive_name
122
122
 
@@ -130,7 +130,7 @@ Generated by Claude MPM Archive Manager
130
130
  meta_data.update(
131
131
  {
132
132
  "original_path": str(file_path),
133
- "archived_at": datetime.now().isoformat(),
133
+ "archived_at": datetime.now(timezone.utc).isoformat(),
134
134
  "reason": reason or "Manual archive",
135
135
  "file_size": file_path.stat().st_size,
136
136
  "file_hash": self._calculate_file_hash(file_path),
@@ -152,7 +152,7 @@ Generated by Claude MPM Archive Manager
152
152
  def _calculate_file_hash(self, file_path: Path) -> str:
153
153
  """Calculate MD5 hash of file."""
154
154
  hasher = hashlib.md5()
155
- with open(file_path, "rb") as f:
155
+ with file_path.open("rb") as f:
156
156
  for chunk in iter(lambda: f.read(4096), b""):
157
157
  hasher.update(chunk)
158
158
  return hasher.hexdigest()
@@ -172,7 +172,9 @@ Generated by Claude MPM Archive Manager
172
172
  logger.info(f"Removed old archive: {archive.name}")
173
173
 
174
174
  # Compress old archives
175
- cutoff_compress = datetime.now() - timedelta(days=self.COMPRESS_AFTER_DAYS)
175
+ cutoff_compress = datetime.now(timezone.utc) - timedelta(
176
+ days=self.COMPRESS_AFTER_DAYS
177
+ )
176
178
  for archive in archives:
177
179
  if archive.suffix != ".gz":
178
180
  mtime = datetime.fromtimestamp(archive.stat().st_mtime)
@@ -180,7 +182,9 @@ Generated by Claude MPM Archive Manager
180
182
  self._compress_archive(archive)
181
183
 
182
184
  # Delete very old archives
183
- cutoff_delete = datetime.now() - timedelta(days=self.DELETE_AFTER_DAYS)
185
+ cutoff_delete = datetime.now(timezone.utc) - timedelta(
186
+ days=self.DELETE_AFTER_DAYS
187
+ )
184
188
  for archive in archives:
185
189
  mtime = datetime.fromtimestamp(archive.stat().st_mtime)
186
190
  if mtime < cutoff_delete:
@@ -204,7 +208,7 @@ Generated by Claude MPM Archive Manager
204
208
  try:
205
209
  compressed_path = archive_path.with_suffix(archive_path.suffix + ".gz")
206
210
 
207
- with open(archive_path, "rb") as f_in:
211
+ with archive_path.open("rb") as f_in:
208
212
  with gzip.open(compressed_path, "wb") as f_out:
209
213
  shutil.copyfileobj(f_in, f_out)
210
214
 
@@ -321,7 +325,7 @@ Generated by Claude MPM Archive Manager
321
325
  if archive_file.suffix == ".gz":
322
326
  # Decompress first
323
327
  with gzip.open(archive_file, "rb") as f_in:
324
- with open(target_path, "wb") as f_out:
328
+ with target_path.open("wb") as f_out:
325
329
  shutil.copyfileobj(f_in, f_out)
326
330
  else:
327
331
  shutil.copy2(archive_file, target_path)
@@ -519,7 +523,7 @@ Generated by Claude MPM Archive Manager
519
523
  def review_documentation(self, check_git: bool = True) -> Dict:
520
524
  """Comprehensive documentation review with outdated detection."""
521
525
  report = {
522
- "timestamp": datetime.now().isoformat(),
526
+ "timestamp": datetime.now(timezone.utc).isoformat(),
523
527
  "files_reviewed": {},
524
528
  "outdated_sections": [],
525
529
  "synchronization_issues": [],
@@ -698,7 +702,7 @@ Generated by Claude MPM Archive Manager
698
702
  if changelog_path.exists():
699
703
  last_modified = self.get_file_last_modified(changelog_path)
700
704
  if last_modified:
701
- days_old = (datetime.now() - last_modified).days
705
+ days_old = (datetime.now(timezone.utc) - last_modified).days
702
706
  if days_old > 30:
703
707
  issues.append(
704
708
  {
@@ -800,7 +804,7 @@ Generated by Claude MPM Archive Manager
800
804
  # Check if file hasn't been updated in git for long time
801
805
  if file_report.get("last_git_update"):
802
806
  last_update = datetime.fromisoformat(file_report["last_git_update"])
803
- days_old = (datetime.now() - last_update).days
807
+ days_old = (datetime.now(timezone.utc) - last_update).days
804
808
  if days_old > 90:
805
809
  should_archive = True
806
810
  archive_reason.append(f"No updates for {days_old} days")
@@ -913,7 +917,7 @@ Generated by Claude MPM Archive Manager
913
917
  changelog_content = changelog_path.read_text()
914
918
  if f"## [{current_version}]" not in changelog_content:
915
919
  # Add new version section
916
- today = datetime.now().strftime("%Y-%m-%d")
920
+ today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
917
921
  new_section = f"\n## [{current_version}] - {today}\n\n### Added\n\n### Changed\n\n### Fixed\n\n"
918
922
 
919
923
  # Insert after header
@@ -981,7 +985,7 @@ Generated by Claude MPM Archive Manager
981
985
  last_updated = "Unknown"
982
986
  if report.get("last_git_update"):
983
987
  last_date = datetime.fromisoformat(report["last_git_update"])
984
- days_ago = (datetime.now() - last_date).days
988
+ days_ago = (datetime.now(timezone.utc) - last_date).days
985
989
  last_updated = f"{days_ago} days ago"
986
990
 
987
991
  table.add_row(filename, status, str(issues), last_updated)
@@ -279,7 +279,7 @@ class DependencyAnalyzerService:
279
279
  package_json_path = self.working_directory / "package.json"
280
280
  if package_json_path.exists():
281
281
  try:
282
- with open(package_json_path) as f:
282
+ with package_json_path.open() as f:
283
283
  package_data = json.load(f)
284
284
  except Exception as e:
285
285
  self.logger.warning(f"Error reading package.json: {e}")
@@ -324,7 +324,7 @@ class DependencyAnalyzerService:
324
324
  ) -> None:
325
325
  """Parse package.json for dependencies."""
326
326
  try:
327
- with open(path) as f:
327
+ with path.open() as f:
328
328
  data = json.load(f)
329
329
 
330
330
  # Production dependencies
@@ -382,7 +382,7 @@ class DependencyAnalyzerService:
382
382
  except ImportError:
383
383
  return
384
384
 
385
- with open(path, "rb") as f:
385
+ with path.open("rb") as f:
386
386
  data = tomllib.load(f)
387
387
 
388
388
  # PEP 621 dependencies
@@ -433,7 +433,7 @@ class DependencyAnalyzerService:
433
433
  except ImportError:
434
434
  return
435
435
 
436
- with open(path, "rb") as f:
436
+ with path.open("rb") as f:
437
437
  data = tomllib.load(f)
438
438
 
439
439
  # Production dependencies
@@ -19,7 +19,7 @@ Created: 2025-01-26
19
19
  import difflib
20
20
  import hashlib
21
21
  import re
22
- from datetime import datetime
22
+ from datetime import datetime, timezone
23
23
  from pathlib import Path
24
24
  from typing import Dict, List, Tuple
25
25
 
@@ -384,7 +384,7 @@ class DocumentationManager:
384
384
 
385
385
  def _add_metadata(self, content: str) -> str:
386
386
  """Add metadata to the document."""
387
- timestamp = datetime.now().isoformat()
387
+ timestamp = datetime.now(timezone.utc).isoformat()
388
388
 
389
389
  # Check if meta section exists
390
390
  if "## 📝 Meta:" not in content and "## Meta:" not in content:
@@ -417,7 +417,7 @@ class DocumentationManager:
417
417
  def generate_update_report(self, old_content: str, new_content: str) -> Dict:
418
418
  """Generate a report of changes between old and new content."""
419
419
  report = {
420
- "timestamp": datetime.now().isoformat(),
420
+ "timestamp": datetime.now(timezone.utc).isoformat(),
421
421
  "changes": [],
422
422
  "additions": [],
423
423
  "deletions": [],
@@ -547,7 +547,7 @@ class DocumentationManager:
547
547
 
548
548
  ## 📝 Meta: Maintaining This Document
549
549
 
550
- - **Last Updated**: {datetime.now().isoformat()}
550
+ - **Last Updated**: {datetime.now(timezone.utc).isoformat()}
551
551
  - **Created By**: Claude MPM /mpm-init
552
552
  - **Update Frequency**: As needed when requirements change
553
553
  """
@@ -17,7 +17,7 @@ Created: 2025-01-26
17
17
  """
18
18
 
19
19
  import subprocess
20
- from datetime import datetime, timedelta
20
+ from datetime import datetime, timedelta, timezone
21
21
  from pathlib import Path
22
22
  from typing import Dict, List, Optional
23
23
 
@@ -74,7 +74,9 @@ class EnhancedProjectAnalyzer:
74
74
 
75
75
  def _get_recent_commits(self, days: int) -> List[Dict]:
76
76
  """Get recent commits within specified days."""
77
- since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
77
+ since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime(
78
+ "%Y-%m-%d"
79
+ )
78
80
 
79
81
  # Get commit log with structured format
80
82
  output = self._run_git_command(
@@ -107,7 +109,9 @@ class EnhancedProjectAnalyzer:
107
109
 
108
110
  def _get_changed_files(self, days: int) -> Dict:
109
111
  """Get files changed in recent commits."""
110
- since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
112
+ since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime(
113
+ "%Y-%m-%d"
114
+ )
111
115
 
112
116
  output = self._run_git_command(
113
117
  [
@@ -138,7 +142,9 @@ class EnhancedProjectAnalyzer:
138
142
 
139
143
  def _get_recently_added_files(self, days: int) -> List[str]:
140
144
  """Get files added in recent commits."""
141
- since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
145
+ since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime(
146
+ "%Y-%m-%d"
147
+ )
142
148
 
143
149
  output = self._run_git_command(
144
150
  [
@@ -162,7 +168,9 @@ class EnhancedProjectAnalyzer:
162
168
 
163
169
  def _get_author_stats(self, days: int) -> Dict:
164
170
  """Get author contribution statistics."""
165
- since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
171
+ since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime(
172
+ "%Y-%m-%d"
173
+ )
166
174
 
167
175
  output = self._run_git_command(
168
176
  [
@@ -228,7 +236,9 @@ class EnhancedProjectAnalyzer:
228
236
 
229
237
  def _get_documentation_changes(self, days: int) -> Dict:
230
238
  """Track changes to documentation files."""
231
- since_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
239
+ since_date = (datetime.now(timezone.utc) - timedelta(days=days)).strftime(
240
+ "%Y-%m-%d"
241
+ )
232
242
 
233
243
  # Get changes to documentation files
234
244
  doc_patterns = ["*.md", "*.rst", "*.txt", "docs/*", "README*", "CLAUDE*"]
@@ -448,7 +458,8 @@ class EnhancedProjectAnalyzer:
448
458
  )
449
459
  if first_commit:
450
460
  age_days = (
451
- datetime.now() - datetime.fromtimestamp(int(first_commit))
461
+ datetime.now(timezone.utc)
462
+ - datetime.fromtimestamp(int(first_commit))
452
463
  ).days
453
464
  indicators.append(f"{age_days} days old")
454
465
 
@@ -470,7 +481,7 @@ class EnhancedProjectAnalyzer:
470
481
  """Generate comprehensive project analysis report."""
471
482
  report = {
472
483
  "project_path": str(self.project_path),
473
- "timestamp": datetime.now().isoformat(),
484
+ "timestamp": datetime.now(timezone.utc).isoformat(),
474
485
  }
475
486
 
476
487
  # Basic project info
@@ -69,7 +69,7 @@ class ProjectRegistry:
69
69
  self.registry_dir.mkdir(parents=True, exist_ok=True)
70
70
  except Exception as e:
71
71
  self.logger.error(f"Failed to create registry directory: {e}")
72
- raise ProjectRegistryError(f"Cannot create registry directory: {e}")
72
+ raise ProjectRegistryError(f"Cannot create registry directory: {e}") from e
73
73
 
74
74
  def get_or_create_project_entry(self) -> Dict[str, Any]:
75
75
  """
@@ -104,7 +104,7 @@ class ProjectRegistry:
104
104
 
105
105
  except Exception as e:
106
106
  self.logger.error(f"Failed to get or create project entry: {e}")
107
- raise ProjectRegistryError(f"Registry operation failed: {e}")
107
+ raise ProjectRegistryError(f"Registry operation failed: {e}") from e
108
108
 
109
109
  def _find_existing_entry(self) -> Optional[Dict[str, Any]]:
110
110
  """
@@ -491,7 +491,7 @@ class ProjectRegistry:
491
491
  # Remove internal fields before saving
492
492
  save_data = {k: v for k, v in data.items() if not k.startswith("_")}
493
493
 
494
- with open(registry_file, "w", encoding="utf-8") as f:
494
+ with registry_file.open("w", encoding="utf-8") as f:
495
495
  yaml.dump(
496
496
  save_data, f, default_flow_style=False, sort_keys=False, indent=2
497
497
  )
@@ -500,7 +500,7 @@ class ProjectRegistry:
500
500
 
501
501
  except Exception as e:
502
502
  self.logger.error(f"Failed to save registry data: {e}")
503
- raise ProjectRegistryError(f"Failed to save registry: {e}")
503
+ raise ProjectRegistryError(f"Failed to save registry: {e}") from e
504
504
 
505
505
  def list_projects(self) -> List[Dict[str, Any]]:
506
506
  """