claude-mpm 4.2.22__py3-none-any.whl → 4.2.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/cli/commands/monitor.py +6 -3
- claude_mpm/services/cli/unified_dashboard_manager.py +27 -13
- claude_mpm/services/monitor/daemon.py +112 -13
- claude_mpm/services/monitor/management/lifecycle.py +182 -0
- claude_mpm/services/monitor/server.py +14 -2
- {claude_mpm-4.2.22.dist-info → claude_mpm-4.2.24.dist-info}/METADATA +1 -1
- {claude_mpm-4.2.22.dist-info → claude_mpm-4.2.24.dist-info}/RECORD +12 -12
- {claude_mpm-4.2.22.dist-info → claude_mpm-4.2.24.dist-info}/WHEEL +0 -0
- {claude_mpm-4.2.22.dist-info → claude_mpm-4.2.24.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.2.22.dist-info → claude_mpm-4.2.24.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.2.22.dist-info → claude_mpm-4.2.24.dist-info}/top_level.txt +0 -0
claude_mpm/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
4.2.
|
1
|
+
4.2.24
|
@@ -91,8 +91,11 @@ class MonitorCommand(BaseCommand):
|
|
91
91
|
host=host, port=port, daemon_mode=daemon_mode
|
92
92
|
)
|
93
93
|
|
94
|
+
# Get force restart flag
|
95
|
+
force_restart = getattr(args, "force", False)
|
96
|
+
|
94
97
|
# Check if already running
|
95
|
-
if self.daemon.lifecycle.is_running():
|
98
|
+
if self.daemon.lifecycle.is_running() and not force_restart:
|
96
99
|
existing_pid = self.daemon.lifecycle.get_pid()
|
97
100
|
return CommandResult.success_result(
|
98
101
|
f"Unified monitor daemon already running with PID {existing_pid}",
|
@@ -103,8 +106,8 @@ class MonitorCommand(BaseCommand):
|
|
103
106
|
},
|
104
107
|
)
|
105
108
|
|
106
|
-
# Start the daemon
|
107
|
-
if self.daemon.start():
|
109
|
+
# Start the daemon (with force restart if specified)
|
110
|
+
if self.daemon.start(force_restart=force_restart):
|
108
111
|
# For daemon mode, verify it actually started
|
109
112
|
if daemon_mode:
|
110
113
|
# Give it a moment to fully initialize
|
@@ -75,7 +75,8 @@ class UnifiedDashboardManager(IUnifiedDashboardManager):
|
|
75
75
|
self._lock = threading.Lock()
|
76
76
|
|
77
77
|
def start_dashboard(
|
78
|
-
self, port: int = 8765, background: bool = False, open_browser: bool = True
|
78
|
+
self, port: int = 8765, background: bool = False, open_browser: bool = True,
|
79
|
+
force_restart: bool = False
|
79
80
|
) -> Tuple[bool, bool]:
|
80
81
|
"""
|
81
82
|
Start the dashboard using unified daemon.
|
@@ -84,30 +85,41 @@ class UnifiedDashboardManager(IUnifiedDashboardManager):
|
|
84
85
|
port: Port to run dashboard on
|
85
86
|
background: Whether to run in background mode
|
86
87
|
open_browser: Whether to open browser automatically
|
88
|
+
force_restart: If True, restart existing service if it's ours
|
87
89
|
|
88
90
|
Returns:
|
89
91
|
Tuple of (success, browser_opened)
|
90
92
|
"""
|
91
93
|
try:
|
92
|
-
#
|
93
|
-
|
94
|
-
|
94
|
+
# Create daemon instance to check service status
|
95
|
+
daemon = UnifiedMonitorDaemon(
|
96
|
+
host="localhost", port=port, daemon_mode=background
|
97
|
+
)
|
98
|
+
|
99
|
+
# Check if it's our service running
|
100
|
+
is_ours, pid = daemon.lifecycle.is_our_service("localhost")
|
101
|
+
|
102
|
+
if is_ours and not force_restart:
|
103
|
+
# Our service is already running, just open browser if needed
|
104
|
+
self.logger.info(f"Our dashboard already running on port {port} (PID: {pid})")
|
95
105
|
browser_opened = False
|
96
106
|
if open_browser:
|
97
107
|
browser_opened = self.open_browser(self.get_dashboard_url(port))
|
98
108
|
return True, browser_opened
|
109
|
+
elif is_ours and force_restart:
|
110
|
+
self.logger.info(f"Force restarting our dashboard on port {port} (PID: {pid})")
|
111
|
+
elif self.is_dashboard_running(port) and not force_restart:
|
112
|
+
# Different service is using the port
|
113
|
+
self.logger.warning(f"Port {port} is in use by a different service")
|
114
|
+
return False, False
|
99
115
|
|
100
116
|
self.logger.info(
|
101
|
-
f"Starting unified dashboard on port {port} (background: {background})"
|
117
|
+
f"Starting unified dashboard on port {port} (background: {background}, force_restart: {force_restart})"
|
102
118
|
)
|
103
119
|
|
104
120
|
if background:
|
105
|
-
# Start daemon in background mode
|
106
|
-
|
107
|
-
host="localhost", port=port, daemon_mode=True
|
108
|
-
)
|
109
|
-
|
110
|
-
success = daemon.start()
|
121
|
+
# Start daemon in background mode with force restart if needed
|
122
|
+
success = daemon.start(force_restart=force_restart)
|
111
123
|
if success:
|
112
124
|
with self._lock:
|
113
125
|
self._background_daemons[port] = daemon
|
@@ -293,7 +305,7 @@ class UnifiedDashboardManager(IUnifiedDashboardManager):
|
|
293
305
|
return self.port_manager.find_available_port(preferred_port)
|
294
306
|
|
295
307
|
def start_server(
|
296
|
-
self, port: Optional[int] = None, timeout: int = 30
|
308
|
+
self, port: Optional[int] = None, timeout: int = 30, force_restart: bool = True
|
297
309
|
) -> Tuple[bool, DashboardInfo]:
|
298
310
|
"""
|
299
311
|
Start the server (compatibility method for SocketIOManager interface).
|
@@ -301,6 +313,7 @@ class UnifiedDashboardManager(IUnifiedDashboardManager):
|
|
301
313
|
Args:
|
302
314
|
port: Port to use (finds available if None)
|
303
315
|
timeout: Timeout for startup
|
316
|
+
force_restart: If True, restart existing service if it's ours
|
304
317
|
|
305
318
|
Returns:
|
306
319
|
Tuple of (success, DashboardInfo)
|
@@ -308,8 +321,9 @@ class UnifiedDashboardManager(IUnifiedDashboardManager):
|
|
308
321
|
if port is None:
|
309
322
|
port = self.find_available_port()
|
310
323
|
|
324
|
+
# Use force_restart to ensure we're using the latest code
|
311
325
|
success, browser_opened = self.start_dashboard(
|
312
|
-
port=port, background=True, open_browser=False
|
326
|
+
port=port, background=True, open_browser=False, force_restart=force_restart
|
313
327
|
)
|
314
328
|
|
315
329
|
if success:
|
@@ -81,29 +81,80 @@ class UnifiedMonitorDaemon:
|
|
81
81
|
claude_mpm_dir.mkdir(exist_ok=True)
|
82
82
|
return str(claude_mpm_dir / "monitor-daemon.pid")
|
83
83
|
|
84
|
-
def start(self) -> bool:
|
84
|
+
def start(self, force_restart: bool = False) -> bool:
|
85
85
|
"""Start the unified monitor daemon.
|
86
86
|
|
87
|
+
Args:
|
88
|
+
force_restart: If True, restart existing service if it's ours
|
89
|
+
|
87
90
|
Returns:
|
88
91
|
True if started successfully, False otherwise
|
89
92
|
"""
|
90
93
|
try:
|
91
94
|
if self.daemon_mode:
|
92
|
-
return self._start_daemon()
|
93
|
-
return self._start_foreground()
|
95
|
+
return self._start_daemon(force_restart=force_restart)
|
96
|
+
return self._start_foreground(force_restart=force_restart)
|
94
97
|
except Exception as e:
|
95
98
|
self.logger.error(f"Failed to start unified monitor daemon: {e}")
|
96
99
|
return False
|
97
100
|
|
98
|
-
def _start_daemon(self) -> bool:
|
99
|
-
"""Start as background daemon process.
|
101
|
+
def _start_daemon(self, force_restart: bool = False) -> bool:
|
102
|
+
"""Start as background daemon process.
|
103
|
+
|
104
|
+
Args:
|
105
|
+
force_restart: If True, restart existing service if it's ours
|
106
|
+
"""
|
100
107
|
self.logger.info("Starting unified monitor daemon in background mode")
|
101
108
|
|
102
109
|
# Check if already running
|
103
110
|
if self.lifecycle.is_running():
|
104
111
|
existing_pid = self.lifecycle.get_pid()
|
105
|
-
|
106
|
-
|
112
|
+
|
113
|
+
if force_restart:
|
114
|
+
# Check if it's our service
|
115
|
+
self.logger.debug(f"Checking if existing daemon (PID: {existing_pid}) is our service...")
|
116
|
+
is_ours, detected_pid = self.lifecycle.is_our_service(self.host)
|
117
|
+
|
118
|
+
if is_ours:
|
119
|
+
self.logger.info(f"Force restarting our existing claude-mpm monitor daemon (PID: {detected_pid or existing_pid})")
|
120
|
+
# Stop the existing daemon
|
121
|
+
if self.lifecycle.stop_daemon():
|
122
|
+
# Wait a moment for port to be released
|
123
|
+
time.sleep(2)
|
124
|
+
else:
|
125
|
+
self.logger.error("Failed to stop existing daemon for restart")
|
126
|
+
return False
|
127
|
+
else:
|
128
|
+
self.logger.warning(f"Port {self.port} is in use by another service (PID: {existing_pid}). Cannot force restart.")
|
129
|
+
self.logger.info("To restart the claude-mpm monitor, first stop the other service or use a different port.")
|
130
|
+
return False
|
131
|
+
else:
|
132
|
+
self.logger.warning(f"Daemon already running with PID {existing_pid}")
|
133
|
+
return False
|
134
|
+
|
135
|
+
# Check for orphaned processes (service running but no PID file)
|
136
|
+
elif force_restart:
|
137
|
+
self.logger.debug("No PID file found, checking for orphaned claude-mpm service...")
|
138
|
+
is_ours, pid = self.lifecycle.is_our_service(self.host)
|
139
|
+
if is_ours and pid:
|
140
|
+
self.logger.info(f"Found orphaned claude-mpm monitor service (PID: {pid}), force restarting")
|
141
|
+
# Try to kill the orphaned process
|
142
|
+
try:
|
143
|
+
os.kill(pid, signal.SIGTERM)
|
144
|
+
# Wait for it to exit
|
145
|
+
for _ in range(10):
|
146
|
+
try:
|
147
|
+
os.kill(pid, 0) # Check if still exists
|
148
|
+
time.sleep(0.5)
|
149
|
+
except ProcessLookupError:
|
150
|
+
break
|
151
|
+
else:
|
152
|
+
# Force kill if still running
|
153
|
+
os.kill(pid, signal.SIGKILL)
|
154
|
+
time.sleep(1)
|
155
|
+
except Exception as e:
|
156
|
+
self.logger.error(f"Failed to kill orphaned process: {e}")
|
157
|
+
return False
|
107
158
|
|
108
159
|
# Verify port is available before forking
|
109
160
|
port_available, error_msg = self.lifecycle.verify_port_available(self.host)
|
@@ -133,17 +184,65 @@ class UnifiedMonitorDaemon:
|
|
133
184
|
self.lifecycle._report_startup_error(f"Server startup exception: {e}")
|
134
185
|
raise
|
135
186
|
|
136
|
-
def _start_foreground(self) -> bool:
|
137
|
-
"""Start in foreground mode.
|
187
|
+
def _start_foreground(self, force_restart: bool = False) -> bool:
|
188
|
+
"""Start in foreground mode.
|
189
|
+
|
190
|
+
Args:
|
191
|
+
force_restart: If True, restart existing service if it's ours
|
192
|
+
"""
|
138
193
|
self.logger.info(f"Starting unified monitor daemon on {self.host}:{self.port}")
|
139
194
|
|
140
195
|
# Check if already running (check PID file even in foreground mode)
|
141
196
|
if self.lifecycle.is_running():
|
142
197
|
existing_pid = self.lifecycle.get_pid()
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
198
|
+
|
199
|
+
if force_restart:
|
200
|
+
# Check if it's our service
|
201
|
+
self.logger.debug(f"Checking if existing daemon (PID: {existing_pid}) is our service...")
|
202
|
+
is_ours, detected_pid = self.lifecycle.is_our_service(self.host)
|
203
|
+
|
204
|
+
if is_ours:
|
205
|
+
self.logger.info(f"Force restarting our existing claude-mpm monitor daemon (PID: {detected_pid or existing_pid})")
|
206
|
+
# Stop the existing daemon
|
207
|
+
if self.lifecycle.stop_daemon():
|
208
|
+
# Wait a moment for port to be released
|
209
|
+
time.sleep(2)
|
210
|
+
else:
|
211
|
+
self.logger.error("Failed to stop existing daemon for restart")
|
212
|
+
return False
|
213
|
+
else:
|
214
|
+
self.logger.warning(f"Port {self.port} is in use by another service (PID: {existing_pid}). Cannot force restart.")
|
215
|
+
self.logger.info("To restart the claude-mpm monitor, first stop the other service or use a different port.")
|
216
|
+
return False
|
217
|
+
else:
|
218
|
+
self.logger.warning(
|
219
|
+
f"Monitor daemon already running with PID {existing_pid}"
|
220
|
+
)
|
221
|
+
return False
|
222
|
+
|
223
|
+
# Check for orphaned processes (service running but no PID file)
|
224
|
+
elif force_restart:
|
225
|
+
self.logger.debug("No PID file found, checking for orphaned claude-mpm service...")
|
226
|
+
is_ours, pid = self.lifecycle.is_our_service(self.host)
|
227
|
+
if is_ours and pid:
|
228
|
+
self.logger.info(f"Found orphaned claude-mpm monitor service (PID: {pid}), force restarting")
|
229
|
+
# Try to kill the orphaned process
|
230
|
+
try:
|
231
|
+
os.kill(pid, signal.SIGTERM)
|
232
|
+
# Wait for it to exit
|
233
|
+
for _ in range(10):
|
234
|
+
try:
|
235
|
+
os.kill(pid, 0) # Check if still exists
|
236
|
+
time.sleep(0.5)
|
237
|
+
except ProcessLookupError:
|
238
|
+
break
|
239
|
+
else:
|
240
|
+
# Force kill if still running
|
241
|
+
os.kill(pid, signal.SIGKILL)
|
242
|
+
time.sleep(1)
|
243
|
+
except Exception as e:
|
244
|
+
self.logger.error(f"Failed to kill orphaned process: {e}")
|
245
|
+
return False
|
147
246
|
|
148
247
|
# Setup signal handlers for graceful shutdown
|
149
248
|
self._setup_signal_handlers()
|
@@ -21,6 +21,7 @@ import tempfile
|
|
21
21
|
import time
|
22
22
|
from pathlib import Path
|
23
23
|
from typing import Optional, Tuple
|
24
|
+
import json
|
24
25
|
|
25
26
|
from ....core.logging_config import get_logger
|
26
27
|
|
@@ -497,3 +498,184 @@ class DaemonLifecycle:
|
|
497
498
|
except OSError as e:
|
498
499
|
error_msg = f"Port {self.port} is already in use or cannot be bound: {e}"
|
499
500
|
return False, error_msg
|
501
|
+
|
502
|
+
def is_our_service(self, host: str = "localhost") -> Tuple[bool, Optional[int]]:
|
503
|
+
"""Check if the service on the port is our Socket.IO service.
|
504
|
+
|
505
|
+
This uses multiple detection methods:
|
506
|
+
1. Check health endpoint for service signature
|
507
|
+
2. Check Socket.IO namespace availability
|
508
|
+
3. Check process ownership if PID file exists
|
509
|
+
|
510
|
+
Args:
|
511
|
+
host: Host to check
|
512
|
+
|
513
|
+
Returns:
|
514
|
+
Tuple of (is_ours, pid_if_found)
|
515
|
+
"""
|
516
|
+
self.logger.debug(f"Checking if service on {host}:{self.port} is ours")
|
517
|
+
|
518
|
+
try:
|
519
|
+
# Method 1: Check health endpoint
|
520
|
+
import urllib.request
|
521
|
+
import urllib.error
|
522
|
+
|
523
|
+
health_url = f"http://{host}:{self.port}/health"
|
524
|
+
self.logger.debug(f"Checking health endpoint: {health_url}")
|
525
|
+
|
526
|
+
try:
|
527
|
+
req = urllib.request.Request(health_url)
|
528
|
+
req.add_header('User-Agent', 'claude-mpm-monitor')
|
529
|
+
|
530
|
+
with urllib.request.urlopen(req, timeout=3) as response:
|
531
|
+
if response.status == 200:
|
532
|
+
data = json.loads(response.read().decode())
|
533
|
+
self.logger.debug(f"Health endpoint response: {data}")
|
534
|
+
|
535
|
+
# Check for our service signature
|
536
|
+
service_name = data.get("service")
|
537
|
+
if service_name == "claude-mpm-monitor":
|
538
|
+
# Try to get PID from response
|
539
|
+
pid = data.get("pid")
|
540
|
+
if pid:
|
541
|
+
self.logger.info(f"Found our claude-mpm-monitor service via health endpoint, PID: {pid}")
|
542
|
+
return True, pid
|
543
|
+
else:
|
544
|
+
# Service is ours but no PID in response
|
545
|
+
# Try to get from PID file
|
546
|
+
file_pid = self.get_pid()
|
547
|
+
self.logger.info(f"Found our claude-mpm-monitor service via health endpoint, PID from file: {file_pid}")
|
548
|
+
return True, file_pid
|
549
|
+
else:
|
550
|
+
self.logger.debug(f"Service name '{service_name}' does not match 'claude-mpm-monitor'")
|
551
|
+
|
552
|
+
except urllib.error.URLError as e:
|
553
|
+
self.logger.debug(f"Health endpoint not accessible: {e}")
|
554
|
+
except urllib.error.HTTPError as e:
|
555
|
+
self.logger.debug(f"Health endpoint HTTP error: {e}")
|
556
|
+
except json.JSONDecodeError as e:
|
557
|
+
self.logger.debug(f"Health endpoint invalid JSON: {e}")
|
558
|
+
except Exception as e:
|
559
|
+
self.logger.debug(f"Health endpoint check failed: {e}")
|
560
|
+
|
561
|
+
# Method 2: Check if PID file exists and process matches
|
562
|
+
pid = self.get_pid()
|
563
|
+
if pid:
|
564
|
+
self.logger.debug(f"Checking PID from file: {pid}")
|
565
|
+
try:
|
566
|
+
# Check if process exists
|
567
|
+
os.kill(pid, 0)
|
568
|
+
self.logger.debug(f"Process {pid} exists")
|
569
|
+
|
570
|
+
# Process exists, check if it's using our port
|
571
|
+
# This requires psutil for accurate port checking
|
572
|
+
try:
|
573
|
+
import psutil
|
574
|
+
process = psutil.Process(pid)
|
575
|
+
|
576
|
+
# Check process command line for our service
|
577
|
+
cmdline = ' '.join(process.cmdline())
|
578
|
+
if 'claude_mpm' in cmdline or 'claude-mpm' in cmdline:
|
579
|
+
if 'monitor' in cmdline:
|
580
|
+
self.logger.info(f"Found our claude-mpm monitor process via PID file, PID: {pid}")
|
581
|
+
return True, pid
|
582
|
+
|
583
|
+
# Also check if it's listening on our port
|
584
|
+
connections = process.connections()
|
585
|
+
for conn in connections:
|
586
|
+
if conn.laddr.port == self.port and conn.status == 'LISTEN':
|
587
|
+
self.logger.info(f"Found process {pid} listening on our port {self.port}")
|
588
|
+
# Double-check it's a Python process (likely ours)
|
589
|
+
if 'python' in process.name().lower():
|
590
|
+
self.logger.info(f"Confirmed as Python process, assuming it's our service")
|
591
|
+
return True, pid
|
592
|
+
|
593
|
+
except ImportError:
|
594
|
+
# psutil not available, but we have a PID file and process exists
|
595
|
+
# Assume it's ours since we manage the PID file
|
596
|
+
self.logger.info(f"Found process with our PID file: {pid}, assuming it's ours (psutil not available)")
|
597
|
+
return True, pid
|
598
|
+
except psutil.NoSuchProcess:
|
599
|
+
self.logger.debug(f"Process {pid} no longer exists")
|
600
|
+
except psutil.AccessDenied:
|
601
|
+
# Can't access process info, but it exists - likely ours
|
602
|
+
self.logger.info(f"Process {pid} exists but access denied, assuming it's ours")
|
603
|
+
return True, pid
|
604
|
+
except Exception as e:
|
605
|
+
self.logger.debug(f"Error checking process {pid}: {e}")
|
606
|
+
|
607
|
+
except (OSError, ProcessLookupError):
|
608
|
+
# Process doesn't exist
|
609
|
+
self.logger.debug(f"Process {pid} does not exist")
|
610
|
+
self._cleanup_stale_pid_file()
|
611
|
+
|
612
|
+
# Method 3: Try Socket.IO connection to check namespace
|
613
|
+
try:
|
614
|
+
import socketio
|
615
|
+
sio_client = socketio.Client()
|
616
|
+
|
617
|
+
# Try to connect with a short timeout
|
618
|
+
connected = False
|
619
|
+
def on_connect():
|
620
|
+
nonlocal connected
|
621
|
+
connected = True
|
622
|
+
|
623
|
+
sio_client.on('connect', on_connect)
|
624
|
+
|
625
|
+
try:
|
626
|
+
sio_client.connect(f'http://{host}:{self.port}', wait_timeout=2)
|
627
|
+
if connected:
|
628
|
+
# Successfully connected to Socket.IO
|
629
|
+
sio_client.disconnect()
|
630
|
+
|
631
|
+
# Check for orphaned process (no PID file but service running)
|
632
|
+
try:
|
633
|
+
# Try to find process using the port
|
634
|
+
import psutil
|
635
|
+
for proc in psutil.process_iter(['pid', 'name']):
|
636
|
+
try:
|
637
|
+
for conn in proc.connections():
|
638
|
+
if conn.laddr.port == self.port and conn.status == 'LISTEN':
|
639
|
+
# Found process listening on our port
|
640
|
+
if 'python' in proc.name().lower():
|
641
|
+
self.logger.debug(f"Found likely orphaned claude-mpm service on port {self.port}, PID: {proc.pid}")
|
642
|
+
return True, proc.pid
|
643
|
+
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
644
|
+
continue
|
645
|
+
except ImportError:
|
646
|
+
pass
|
647
|
+
|
648
|
+
# Socket.IO service exists but can't determine if it's ours
|
649
|
+
self.logger.debug(f"Found Socket.IO service on port {self.port}, but cannot confirm ownership")
|
650
|
+
return False, None
|
651
|
+
|
652
|
+
except Exception:
|
653
|
+
pass
|
654
|
+
finally:
|
655
|
+
if sio_client.connected:
|
656
|
+
sio_client.disconnect()
|
657
|
+
|
658
|
+
except ImportError:
|
659
|
+
# socketio not available
|
660
|
+
pass
|
661
|
+
except Exception as e:
|
662
|
+
self.logger.debug(f"Error checking Socket.IO connection: {e}")
|
663
|
+
|
664
|
+
# Method 4: Final fallback - if we have a PID file and can't definitively say it's NOT ours
|
665
|
+
# This handles edge cases where the health endpoint might be temporarily unavailable
|
666
|
+
if pid and self.pid_file.exists():
|
667
|
+
try:
|
668
|
+
# One more check - see if process exists
|
669
|
+
os.kill(pid, 0)
|
670
|
+
self.logger.info(f"PID file exists with valid process {pid}, assuming it's our stale service")
|
671
|
+
return True, pid
|
672
|
+
except (OSError, ProcessLookupError):
|
673
|
+
pass
|
674
|
+
|
675
|
+
# No service detected or not ours
|
676
|
+
self.logger.debug("Service not detected as ours")
|
677
|
+
return False, None
|
678
|
+
|
679
|
+
except Exception as e:
|
680
|
+
self.logger.error(f"Error checking if service is ours: {e}", exc_info=True)
|
681
|
+
return False, None
|
@@ -15,6 +15,7 @@ DESIGN DECISIONS:
|
|
15
15
|
"""
|
16
16
|
|
17
17
|
import asyncio
|
18
|
+
import os
|
18
19
|
import threading
|
19
20
|
import time
|
20
21
|
from datetime import datetime
|
@@ -308,12 +309,23 @@ class UnifiedMonitorServer:
|
|
308
309
|
|
309
310
|
# Health check
|
310
311
|
async def health_check(request):
|
312
|
+
# Get version from VERSION file
|
313
|
+
version = "1.0.0"
|
314
|
+
try:
|
315
|
+
version_file = Path(__file__).parent.parent.parent.parent.parent / "VERSION"
|
316
|
+
if version_file.exists():
|
317
|
+
version = version_file.read_text().strip()
|
318
|
+
except Exception:
|
319
|
+
pass
|
320
|
+
|
311
321
|
return web.json_response(
|
312
322
|
{
|
313
323
|
"status": "healthy",
|
314
|
-
"service": "
|
315
|
-
"version":
|
324
|
+
"service": "claude-mpm-monitor", # Important: must match what is_our_service() checks
|
325
|
+
"version": version,
|
316
326
|
"port": self.port,
|
327
|
+
"pid": os.getpid(),
|
328
|
+
"uptime": int(time.time() - self.server_start_time),
|
317
329
|
}
|
318
330
|
)
|
319
331
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
claude_mpm/BUILD_NUMBER,sha256=toytnNjkIKPgQaGwDqQdC1rpNTAdSEc6Vja50d7Ovug,4
|
2
|
-
claude_mpm/VERSION,sha256=
|
2
|
+
claude_mpm/VERSION,sha256=U4vIe-sZ-5Iumq-KW3abswP9PknmnEd-RswDftmED1g,7
|
3
3
|
claude_mpm/__init__.py,sha256=lyTZAYGH4DTaFGLRNWJKk5Q5oTjzN5I6AXmfVX-Jff0,1512
|
4
4
|
claude_mpm/__main__.py,sha256=Ro5UBWBoQaSAIoSqWAr7zkbLyvi4sSy28WShqAhKJG0,723
|
5
5
|
claude_mpm/constants.py,sha256=I946iCQzIIPRZVVJ8aO7lA4euiyDnNw2IX7EelAOkIE,5915
|
@@ -83,7 +83,7 @@ claude_mpm/cli/commands/mcp_pipx_config.py,sha256=sE62VD6Q1CcO2k1nlbIhHMfAJFQTZf
|
|
83
83
|
claude_mpm/cli/commands/mcp_server_commands.py,sha256=-1G_2Y5ScTvzDd-kY8fTAao2H6FH7DnsLimleF1rVqQ,6197
|
84
84
|
claude_mpm/cli/commands/mcp_tool_commands.py,sha256=q17GzlFT3JiLTrDqwPO2tz1-fKmPO5QU449syTnKTz4,1283
|
85
85
|
claude_mpm/cli/commands/memory.py,sha256=Yzfs3_oiKciv3sfOoDm2lJL4M9idG7ARV3-sNw1ge_g,26186
|
86
|
-
claude_mpm/cli/commands/monitor.py,sha256=
|
86
|
+
claude_mpm/cli/commands/monitor.py,sha256=K8TNtOsdsFgzr9VePELxFnNqZOGhL5a7XFbgOpNYq0g,9621
|
87
87
|
claude_mpm/cli/commands/mpm_init.py,sha256=lO7N91ZHn_n18XbchUUcYoyme7L5NLcXVnhWm5F_Gq8,22367
|
88
88
|
claude_mpm/cli/commands/mpm_init_handler.py,sha256=-pCB0XL3KipqGtnta8CC7Lg5TPMwstEhMFBcgF4aaa4,2919
|
89
89
|
claude_mpm/cli/commands/run.py,sha256=qS3eolLiDrE8EXLQJioB6kL1ONr_l0c3OE3qMUJCqbA,43489
|
@@ -432,7 +432,7 @@ claude_mpm/services/cli/memory_crud_service.py,sha256=ciN9Pl_12iDAqF9zPBWOzu-iXi
|
|
432
432
|
claude_mpm/services/cli/memory_output_formatter.py,sha256=nbf7VsjGvH4e9fLv9c7PzjuO9COZhbK5P2fNZ79055w,24783
|
433
433
|
claude_mpm/services/cli/session_manager.py,sha256=rla_Stbcvt93wa9G9MCMu9UqB3FLGqlPt_eN5lQb3Gg,16599
|
434
434
|
claude_mpm/services/cli/startup_checker.py,sha256=efhuvu8ns5G16jcQ0nQZKVddmD2AktUEdlvjNcXjAuk,12232
|
435
|
-
claude_mpm/services/cli/unified_dashboard_manager.py,sha256=
|
435
|
+
claude_mpm/services/cli/unified_dashboard_manager.py,sha256=3IRum9HH6IA4UQtTzD5l6rCngfxfqeqfTGzpKIpmZd8,12607
|
436
436
|
claude_mpm/services/communication/__init__.py,sha256=b4qc7_Rqy4DE9q7BAUlfUZjoYG4uimAyUnE0irPcXyU,560
|
437
437
|
claude_mpm/services/core/__init__.py,sha256=evEayLlBqJvxMZhrhuK6aagXmNrKGSj8Jm9OOxKzqvU,2195
|
438
438
|
claude_mpm/services/core/base.py,sha256=iA-F7DgGp-FJIMvQTiHQ68RkG_k-AtUWlArJPMw6ZPk,7297
|
@@ -548,9 +548,9 @@ claude_mpm/services/memory/cache/__init__.py,sha256=6M6-P8ParyxX8vOgp_IxHgLMvacr
|
|
548
548
|
claude_mpm/services/memory/cache/shared_prompt_cache.py,sha256=crnYPUT8zcS7TvoE1vW7pyaf4T77N5rJ1wUf_YQ2vvo,28704
|
549
549
|
claude_mpm/services/memory/cache/simple_cache.py,sha256=qsTjbcsPxj-kNfaod9VN_uE5NioIwpfkUin_mMVUJCg,10218
|
550
550
|
claude_mpm/services/monitor/__init__.py,sha256=X7gxSLUm9Fg_zEsX6LtCHP2ipF0qj6Emkun20h2So7g,745
|
551
|
-
claude_mpm/services/monitor/daemon.py,sha256=
|
551
|
+
claude_mpm/services/monitor/daemon.py,sha256=9Cllm-jtVA85_qX1Z3TF-OwB5QZDzSfIvAcKhVQ65G8,21829
|
552
552
|
claude_mpm/services/monitor/event_emitter.py,sha256=JzRLNg8PUJ5s3ulNnq_D4yqCPItvidJzu8DmFxriieQ,12224
|
553
|
-
claude_mpm/services/monitor/server.py,sha256=
|
553
|
+
claude_mpm/services/monitor/server.py,sha256=2-xLo14qFBZf5MDYwBxBhTrFCNnbu3tOS3eEu8vyddc,28476
|
554
554
|
claude_mpm/services/monitor/handlers/__init__.py,sha256=jgPIf4IJVERm_tAeD9834tfx9IcxtlHj5r9rhEWpkfM,701
|
555
555
|
claude_mpm/services/monitor/handlers/code_analysis.py,sha256=mHyI27Wp6WVmUBc0m0i991ogyFZBTvkrfR7Kf3EAk5U,11474
|
556
556
|
claude_mpm/services/monitor/handlers/dashboard.py,sha256=uGBhb-6RG6u4WLipUXgdx7RCW-vb_qek5dIfHIwAC7o,9805
|
@@ -558,7 +558,7 @@ claude_mpm/services/monitor/handlers/file.py,sha256=p3C4wffl0GIcN00b-KkrmZ8F-Amd
|
|
558
558
|
claude_mpm/services/monitor/handlers/hooks.py,sha256=dlrmyFu8WChlvn6-sND9DLjSbm5nrMfNZrAgoWN-2No,17582
|
559
559
|
claude_mpm/services/monitor/management/__init__.py,sha256=mxaEFRgvvgV85gUpXu_DsnHtywihdP14EisvISAVZuQ,525
|
560
560
|
claude_mpm/services/monitor/management/health.py,sha256=Wm92Cli_4cWD6B89KX_CdpAvvevuEaGB8Ah59ILhFww,3772
|
561
|
-
claude_mpm/services/monitor/management/lifecycle.py,sha256
|
561
|
+
claude_mpm/services/monitor/management/lifecycle.py,sha256=EClf8CK-kCLVb2iRo9uiGI2CAHFrii1Qe1i8xuWnhG4,27037
|
562
562
|
claude_mpm/services/project/__init__.py,sha256=IUclN1L7ChHCNya7PJiVxu4nttxsrj3WRIpwyA1A_hw,512
|
563
563
|
claude_mpm/services/project/analyzer.py,sha256=VHlLrP8-S5gr12w4Yzs7-6d7LWdJKISHPCFSG7SDiQU,38434
|
564
564
|
claude_mpm/services/project/analyzer_refactored.py,sha256=USYEdPAhSoGPqZCpaT89Dw6ElFW_L1yXSURheQjAhLA,18243
|
@@ -639,9 +639,9 @@ claude_mpm/utils/subprocess_utils.py,sha256=zgiwLqh_17WxHpySvUPH65pb4bzIeUGOAYUJ
|
|
639
639
|
claude_mpm/validation/__init__.py,sha256=YZhwE3mhit-lslvRLuwfX82xJ_k4haZeKmh4IWaVwtk,156
|
640
640
|
claude_mpm/validation/agent_validator.py,sha256=3Lo6LK-Mw9IdnL_bd3zl_R6FkgSVDYKUUM7EeVVD3jc,20865
|
641
641
|
claude_mpm/validation/frontmatter_validator.py,sha256=u8g4Eyd_9O6ugj7Un47oSGh3kqv4wMkuks2i_CtWRvM,7028
|
642
|
-
claude_mpm-4.2.
|
643
|
-
claude_mpm-4.2.
|
644
|
-
claude_mpm-4.2.
|
645
|
-
claude_mpm-4.2.
|
646
|
-
claude_mpm-4.2.
|
647
|
-
claude_mpm-4.2.
|
642
|
+
claude_mpm-4.2.24.dist-info/licenses/LICENSE,sha256=lpaivOlPuBZW1ds05uQLJJswy8Rp_HMNieJEbFlqvLk,1072
|
643
|
+
claude_mpm-4.2.24.dist-info/METADATA,sha256=gBePVzfdnH07tGtxu8dnaQwimTu90EqPI0hcpdIZnjM,14451
|
644
|
+
claude_mpm-4.2.24.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
645
|
+
claude_mpm-4.2.24.dist-info/entry_points.txt,sha256=FDPZgz8JOvD-6iuXY2l9Zbo9zYVRuE4uz4Qr0vLeGOk,471
|
646
|
+
claude_mpm-4.2.24.dist-info/top_level.txt,sha256=1nUg3FEaBySgm8t-s54jK5zoPnu3_eY6EP6IOlekyHA,11
|
647
|
+
claude_mpm-4.2.24.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|