claude-mpm 4.2.25__py3-none-any.whl → 4.2.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/cli/commands/monitor.py +1 -1
- claude_mpm/scripts/claude-hook-handler.sh +33 -7
- claude_mpm/services/cli/unified_dashboard_manager.py +111 -5
- claude_mpm/services/monitor/daemon.py +107 -5
- {claude_mpm-4.2.25.dist-info → claude_mpm-4.2.27.dist-info}/METADATA +1 -1
- {claude_mpm-4.2.25.dist-info → claude_mpm-4.2.27.dist-info}/RECORD +11 -11
- {claude_mpm-4.2.25.dist-info → claude_mpm-4.2.27.dist-info}/WHEEL +0 -0
- {claude_mpm-4.2.25.dist-info → claude_mpm-4.2.27.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.2.25.dist-info → claude_mpm-4.2.27.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.2.25.dist-info → claude_mpm-4.2.27.dist-info}/top_level.txt +0 -0
claude_mpm/VERSION
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
4.2.
|
|
1
|
+
4.2.27
|
|
@@ -151,7 +151,7 @@ class MonitorCommand(BaseCommand):
|
|
|
151
151
|
|
|
152
152
|
def _stop_monitor(self, args) -> CommandResult:
|
|
153
153
|
"""Stop the unified monitor daemon."""
|
|
154
|
-
|
|
154
|
+
# Don't log here - the daemon will log when it stops
|
|
155
155
|
|
|
156
156
|
# Get parameters from args or use defaults
|
|
157
157
|
port = getattr(args, "port", None)
|
|
@@ -61,9 +61,20 @@ set -e
|
|
|
61
61
|
# Get the directory where this script is located
|
|
62
62
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
63
63
|
|
|
64
|
-
# Determine the claude-mpm root
|
|
65
|
-
#
|
|
66
|
-
|
|
64
|
+
# Determine the claude-mpm root based on installation type
|
|
65
|
+
# Check if we're in a pipx installation
|
|
66
|
+
if [[ "$SCRIPT_DIR" == *"/.local/pipx/venvs/claude-mpm/"* ]]; then
|
|
67
|
+
# pipx installation - script is at lib/python*/site-packages/claude_mpm/scripts/
|
|
68
|
+
# The venv root is what we need for Python detection
|
|
69
|
+
CLAUDE_MPM_ROOT="$(echo "$SCRIPT_DIR" | sed 's|/lib/python.*/site-packages/.*||')"
|
|
70
|
+
elif [[ "$SCRIPT_DIR" == *"/site-packages/claude_mpm/scripts"* ]]; then
|
|
71
|
+
# Regular pip installation - script is in site-packages
|
|
72
|
+
# Use the Python environment root
|
|
73
|
+
CLAUDE_MPM_ROOT="$(python3 -c 'import sys; print(sys.prefix)')"
|
|
74
|
+
else
|
|
75
|
+
# Development installation - script is at src/claude_mpm/scripts/, so we go up 3 levels
|
|
76
|
+
CLAUDE_MPM_ROOT="$(cd "$SCRIPT_DIR/../../.." 2>/dev/null && pwd || echo "$SCRIPT_DIR")"
|
|
77
|
+
fi
|
|
67
78
|
|
|
68
79
|
# Debug logging (can be enabled via environment variable)
|
|
69
80
|
if [ "${CLAUDE_MPM_HOOK_DEBUG}" = "true" ]; then
|
|
@@ -102,7 +113,16 @@ fi
|
|
|
102
113
|
# Absolute path to Python executable with claude-mpm dependencies
|
|
103
114
|
#
|
|
104
115
|
find_python_command() {
|
|
105
|
-
# 1. Check
|
|
116
|
+
# 1. Check if we're in a pipx installation first
|
|
117
|
+
if [[ "$SCRIPT_DIR" == *"/.local/pipx/venvs/claude-mpm/"* ]]; then
|
|
118
|
+
# pipx installation - use the pipx venv's Python directly
|
|
119
|
+
if [ -f "$CLAUDE_MPM_ROOT/bin/python" ]; then
|
|
120
|
+
echo "$CLAUDE_MPM_ROOT/bin/python"
|
|
121
|
+
return
|
|
122
|
+
fi
|
|
123
|
+
fi
|
|
124
|
+
|
|
125
|
+
# 2. Check for project-local virtual environment (common in development)
|
|
106
126
|
if [ -f "$CLAUDE_MPM_ROOT/venv/bin/activate" ]; then
|
|
107
127
|
source "$CLAUDE_MPM_ROOT/venv/bin/activate"
|
|
108
128
|
echo "$CLAUDE_MPM_ROOT/venv/bin/python"
|
|
@@ -122,8 +142,14 @@ find_python_command() {
|
|
|
122
142
|
# Set up Python command
|
|
123
143
|
PYTHON_CMD=$(find_python_command)
|
|
124
144
|
|
|
125
|
-
# Check
|
|
126
|
-
if [
|
|
145
|
+
# Check installation type and set PYTHONPATH accordingly
|
|
146
|
+
if [[ "$SCRIPT_DIR" == *"/.local/pipx/venvs/claude-mpm/"* ]]; then
|
|
147
|
+
# pipx installation - claude_mpm is already in the venv's site-packages
|
|
148
|
+
# No need to modify PYTHONPATH
|
|
149
|
+
if [ "${CLAUDE_MPM_HOOK_DEBUG}" = "true" ]; then
|
|
150
|
+
echo "[$(date -u +%Y-%m-%dT%H:%M:%S.%3NZ)] pipx installation detected" >> /tmp/claude-mpm-hook.log
|
|
151
|
+
fi
|
|
152
|
+
elif [ -d "$CLAUDE_MPM_ROOT/src" ]; then
|
|
127
153
|
# Development install - add src to PYTHONPATH
|
|
128
154
|
export PYTHONPATH="$CLAUDE_MPM_ROOT/src:$PYTHONPATH"
|
|
129
155
|
|
|
@@ -131,7 +157,7 @@ if [ -d "$CLAUDE_MPM_ROOT/src" ]; then
|
|
|
131
157
|
echo "[$(date -u +%Y-%m-%dT%H:%M:%S.%3NZ)] Development environment detected" >> /tmp/claude-mpm-hook.log
|
|
132
158
|
fi
|
|
133
159
|
else
|
|
134
|
-
#
|
|
160
|
+
# Regular pip install - claude_mpm should be in site-packages
|
|
135
161
|
# No need to modify PYTHONPATH
|
|
136
162
|
if [ "${CLAUDE_MPM_HOOK_DEBUG}" = "true" ]; then
|
|
137
163
|
echo "[$(date -u +%Y-%m-%dT%H:%M:%S.%3NZ)] Pip installation detected" >> /tmp/claude-mpm-hook.log
|
|
@@ -115,18 +115,46 @@ class UnifiedDashboardManager(IUnifiedDashboardManager):
|
|
|
115
115
|
self.logger.info(
|
|
116
116
|
f"Force restarting our dashboard on port {port} (PID: {pid})"
|
|
117
117
|
)
|
|
118
|
+
# Clean up the existing service before restart
|
|
119
|
+
self._cleanup_port_conflicts(port)
|
|
118
120
|
elif self.is_dashboard_running(port) and not force_restart:
|
|
119
|
-
# Different service is using the port
|
|
120
|
-
self.logger.warning(f"Port {port} is in use by a different service")
|
|
121
|
-
|
|
121
|
+
# Different service is using the port - try to clean it up
|
|
122
|
+
self.logger.warning(f"Port {port} is in use by a different service, attempting cleanup")
|
|
123
|
+
self._cleanup_port_conflicts(port)
|
|
124
|
+
# Brief pause to ensure cleanup is complete
|
|
125
|
+
import time
|
|
126
|
+
time.sleep(1)
|
|
122
127
|
|
|
123
128
|
self.logger.info(
|
|
124
129
|
f"Starting unified dashboard on port {port} (background: {background}, force_restart: {force_restart})"
|
|
125
130
|
)
|
|
126
131
|
|
|
127
132
|
if background:
|
|
128
|
-
#
|
|
129
|
-
|
|
133
|
+
# Try to start daemon with retry on port conflicts
|
|
134
|
+
max_retries = 3
|
|
135
|
+
retry_count = 0
|
|
136
|
+
success = False
|
|
137
|
+
|
|
138
|
+
while retry_count < max_retries and not success:
|
|
139
|
+
if retry_count > 0:
|
|
140
|
+
self.logger.info(f"Retry {retry_count}/{max_retries}: Cleaning up port {port}")
|
|
141
|
+
self._cleanup_port_conflicts(port)
|
|
142
|
+
time.sleep(2) # Wait for cleanup to complete
|
|
143
|
+
|
|
144
|
+
# Start daemon in background mode with force restart if needed
|
|
145
|
+
success = daemon.start(force_restart=force_restart or retry_count > 0)
|
|
146
|
+
|
|
147
|
+
if not success and retry_count < max_retries - 1:
|
|
148
|
+
# Check if it's a port conflict
|
|
149
|
+
if not self.port_manager.is_port_available(port):
|
|
150
|
+
self.logger.warning(f"Port {port} still in use, will retry cleanup")
|
|
151
|
+
retry_count += 1
|
|
152
|
+
else:
|
|
153
|
+
# Different kind of failure, don't retry
|
|
154
|
+
break
|
|
155
|
+
else:
|
|
156
|
+
break
|
|
157
|
+
|
|
130
158
|
if success:
|
|
131
159
|
with self._lock:
|
|
132
160
|
self._background_daemons[port] = daemon
|
|
@@ -311,6 +339,84 @@ class UnifiedDashboardManager(IUnifiedDashboardManager):
|
|
|
311
339
|
"""
|
|
312
340
|
return self.port_manager.find_available_port(preferred_port)
|
|
313
341
|
|
|
342
|
+
def _cleanup_port_conflicts(self, port: int) -> bool:
|
|
343
|
+
"""
|
|
344
|
+
Try to clean up any processes using our port.
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
port: Port to clean up
|
|
348
|
+
|
|
349
|
+
Returns:
|
|
350
|
+
True if cleanup was successful or not needed
|
|
351
|
+
"""
|
|
352
|
+
try:
|
|
353
|
+
import subprocess
|
|
354
|
+
import signal
|
|
355
|
+
import time
|
|
356
|
+
|
|
357
|
+
# Find processes using the port
|
|
358
|
+
result = subprocess.run(
|
|
359
|
+
["lsof", "-ti", f":{port}"],
|
|
360
|
+
capture_output=True,
|
|
361
|
+
text=True
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
365
|
+
pids = result.stdout.strip().split('\n')
|
|
366
|
+
self.logger.info(f"Found processes using port {port}: {pids}")
|
|
367
|
+
|
|
368
|
+
for pid_str in pids:
|
|
369
|
+
try:
|
|
370
|
+
pid = int(pid_str.strip())
|
|
371
|
+
# Try graceful termination first
|
|
372
|
+
import os
|
|
373
|
+
os.kill(pid, signal.SIGTERM)
|
|
374
|
+
self.logger.info(f"Sent SIGTERM to process {pid}")
|
|
375
|
+
except (ValueError, ProcessLookupError) as e:
|
|
376
|
+
self.logger.debug(f"Could not terminate process {pid_str}: {e}")
|
|
377
|
+
continue
|
|
378
|
+
|
|
379
|
+
# Give processes time to shut down gracefully
|
|
380
|
+
time.sleep(3)
|
|
381
|
+
|
|
382
|
+
# Check if port is still in use and force kill if needed
|
|
383
|
+
result = subprocess.run(
|
|
384
|
+
["lsof", "-ti", f":{port}"],
|
|
385
|
+
capture_output=True,
|
|
386
|
+
text=True
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
390
|
+
remaining_pids = result.stdout.strip().split('\n')
|
|
391
|
+
self.logger.warning(f"Processes still using port {port}: {remaining_pids}, force killing")
|
|
392
|
+
|
|
393
|
+
for pid_str in remaining_pids:
|
|
394
|
+
try:
|
|
395
|
+
pid = int(pid_str.strip())
|
|
396
|
+
os.kill(pid, signal.SIGKILL)
|
|
397
|
+
self.logger.info(f"Force killed process {pid}")
|
|
398
|
+
except (ValueError, ProcessLookupError) as e:
|
|
399
|
+
self.logger.debug(f"Could not force kill process {pid_str}: {e}")
|
|
400
|
+
continue
|
|
401
|
+
|
|
402
|
+
# Brief pause after force kill to ensure port is released
|
|
403
|
+
time.sleep(2)
|
|
404
|
+
|
|
405
|
+
self.logger.info(f"Successfully cleaned up processes on port {port}")
|
|
406
|
+
return True
|
|
407
|
+
else:
|
|
408
|
+
self.logger.debug(f"No processes found using port {port}")
|
|
409
|
+
return True
|
|
410
|
+
|
|
411
|
+
except FileNotFoundError:
|
|
412
|
+
# lsof not available, try alternative approach
|
|
413
|
+
self.logger.debug("lsof not available, skipping port cleanup")
|
|
414
|
+
return True
|
|
415
|
+
except Exception as e:
|
|
416
|
+
self.logger.warning(f"Error during port cleanup: {e}")
|
|
417
|
+
# Continue anyway - the port check will catch actual conflicts
|
|
418
|
+
return True
|
|
419
|
+
|
|
314
420
|
def start_server(
|
|
315
421
|
self, port: Optional[int] = None, timeout: int = 30, force_restart: bool = True
|
|
316
422
|
) -> Tuple[bool, DashboardInfo]:
|
|
@@ -102,6 +102,87 @@ class UnifiedMonitorDaemon:
|
|
|
102
102
|
self.logger.error(f"Failed to start unified monitor daemon: {e}")
|
|
103
103
|
return False
|
|
104
104
|
|
|
105
|
+
def _cleanup_port_conflicts(self) -> bool:
|
|
106
|
+
"""Try to clean up any processes using our port.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
True if cleanup was successful, False otherwise
|
|
110
|
+
"""
|
|
111
|
+
try:
|
|
112
|
+
# Find process using the port
|
|
113
|
+
import subprocess
|
|
114
|
+
result = subprocess.run(
|
|
115
|
+
["lsof", "-ti", f":{self.port}"],
|
|
116
|
+
capture_output=True,
|
|
117
|
+
text=True
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
121
|
+
pids = result.stdout.strip().split('\n')
|
|
122
|
+
for pid_str in pids:
|
|
123
|
+
try:
|
|
124
|
+
pid = int(pid_str.strip())
|
|
125
|
+
self.logger.info(f"Found process {pid} using port {self.port}")
|
|
126
|
+
|
|
127
|
+
# Check if it's a claude-mpm process
|
|
128
|
+
process_info = subprocess.run(
|
|
129
|
+
["ps", "-p", str(pid), "-o", "comm="],
|
|
130
|
+
capture_output=True,
|
|
131
|
+
text=True
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
if "python" in process_info.stdout.lower() or "claude" in process_info.stdout.lower():
|
|
135
|
+
self.logger.info(f"Killing process {pid} (appears to be Python/Claude related)")
|
|
136
|
+
os.kill(pid, signal.SIGTERM)
|
|
137
|
+
time.sleep(1)
|
|
138
|
+
|
|
139
|
+
# Check if still alive
|
|
140
|
+
try:
|
|
141
|
+
os.kill(pid, 0)
|
|
142
|
+
# Still alive, force kill
|
|
143
|
+
self.logger.warning(f"Process {pid} didn't terminate, force killing")
|
|
144
|
+
os.kill(pid, signal.SIGKILL)
|
|
145
|
+
time.sleep(1)
|
|
146
|
+
except ProcessLookupError:
|
|
147
|
+
pass
|
|
148
|
+
else:
|
|
149
|
+
self.logger.warning(f"Process {pid} is not a Claude MPM process: {process_info.stdout}")
|
|
150
|
+
return False
|
|
151
|
+
except (ValueError, ProcessLookupError) as e:
|
|
152
|
+
self.logger.debug(f"Error handling PID {pid_str}: {e}")
|
|
153
|
+
continue
|
|
154
|
+
|
|
155
|
+
return True
|
|
156
|
+
|
|
157
|
+
except FileNotFoundError:
|
|
158
|
+
# lsof not available, try alternative method
|
|
159
|
+
self.logger.debug("lsof not available, using alternative cleanup")
|
|
160
|
+
|
|
161
|
+
# Check if there's an orphaned service we can identify
|
|
162
|
+
is_ours, pid = self.lifecycle.is_our_service(self.host)
|
|
163
|
+
if is_ours and pid:
|
|
164
|
+
try:
|
|
165
|
+
self.logger.info(f"Killing orphaned Claude MPM service (PID: {pid})")
|
|
166
|
+
os.kill(pid, signal.SIGTERM)
|
|
167
|
+
time.sleep(1)
|
|
168
|
+
|
|
169
|
+
# Check if still alive
|
|
170
|
+
try:
|
|
171
|
+
os.kill(pid, 0)
|
|
172
|
+
os.kill(pid, signal.SIGKILL)
|
|
173
|
+
time.sleep(1)
|
|
174
|
+
except ProcessLookupError:
|
|
175
|
+
pass
|
|
176
|
+
|
|
177
|
+
return True
|
|
178
|
+
except Exception as e:
|
|
179
|
+
self.logger.error(f"Failed to kill process: {e}")
|
|
180
|
+
|
|
181
|
+
except Exception as e:
|
|
182
|
+
self.logger.error(f"Error during port cleanup: {e}")
|
|
183
|
+
|
|
184
|
+
return False
|
|
185
|
+
|
|
105
186
|
def _start_daemon(self, force_restart: bool = False) -> bool:
|
|
106
187
|
"""Start as background daemon process.
|
|
107
188
|
|
|
@@ -172,12 +253,26 @@ class UnifiedMonitorDaemon:
|
|
|
172
253
|
self.logger.error(f"Failed to kill orphaned process: {e}")
|
|
173
254
|
return False
|
|
174
255
|
|
|
175
|
-
#
|
|
256
|
+
# Check port availability and clean up if needed
|
|
176
257
|
port_available, error_msg = self.lifecycle.verify_port_available(self.host)
|
|
177
258
|
if not port_available:
|
|
178
|
-
self.logger.
|
|
179
|
-
|
|
180
|
-
|
|
259
|
+
self.logger.warning(f"Port {self.port} is not available: {error_msg}")
|
|
260
|
+
|
|
261
|
+
# Try to identify and kill any process using the port
|
|
262
|
+
self.logger.info("Attempting to clean up processes on port...")
|
|
263
|
+
cleaned = self._cleanup_port_conflicts()
|
|
264
|
+
|
|
265
|
+
if cleaned:
|
|
266
|
+
# Wait longer for port to be released to avoid race conditions
|
|
267
|
+
time.sleep(3)
|
|
268
|
+
# Check again
|
|
269
|
+
port_available, error_msg = self.lifecycle.verify_port_available(self.host)
|
|
270
|
+
|
|
271
|
+
if not port_available:
|
|
272
|
+
self.logger.error(f"Port {self.port} is still not available after cleanup: {error_msg}")
|
|
273
|
+
print(f"Error: {error_msg}", file=sys.stderr)
|
|
274
|
+
print(f"Try 'claude-mpm monitor stop' or use --force flag", file=sys.stderr)
|
|
275
|
+
return False
|
|
181
276
|
|
|
182
277
|
# Wait for any pre-warming threads to complete before forking
|
|
183
278
|
self._wait_for_prewarm_completion()
|
|
@@ -207,6 +302,13 @@ class UnifiedMonitorDaemon:
|
|
|
207
302
|
force_restart: If True, restart existing service if it's ours
|
|
208
303
|
"""
|
|
209
304
|
self.logger.info(f"Starting unified monitor daemon on {self.host}:{self.port}")
|
|
305
|
+
|
|
306
|
+
# Clean up any processes on the port before checking service status
|
|
307
|
+
# This helps with race conditions where old processes haven't fully released the port
|
|
308
|
+
if force_restart:
|
|
309
|
+
self.logger.info("Force restart requested, cleaning up port conflicts...")
|
|
310
|
+
self._cleanup_port_conflicts()
|
|
311
|
+
time.sleep(1) # Brief pause to ensure port is released
|
|
210
312
|
|
|
211
313
|
# Check if already running (check PID file even in foreground mode)
|
|
212
314
|
if self.lifecycle.is_running():
|
|
@@ -374,7 +476,7 @@ class UnifiedMonitorDaemon:
|
|
|
374
476
|
pid = self.lifecycle.get_pid()
|
|
375
477
|
if pid and pid != os.getpid():
|
|
376
478
|
# We're not the daemon process, so stop it via signal
|
|
377
|
-
|
|
479
|
+
# Don't log here - lifecycle.stop_daemon will log
|
|
378
480
|
success = self.lifecycle.stop_daemon()
|
|
379
481
|
if success:
|
|
380
482
|
# Clean up our local state
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
claude_mpm/BUILD_NUMBER,sha256=toytnNjkIKPgQaGwDqQdC1rpNTAdSEc6Vja50d7Ovug,4
|
|
2
|
-
claude_mpm/VERSION,sha256=
|
|
2
|
+
claude_mpm/VERSION,sha256=lg0--TYI2YjVY4MKeldwdDKp-mvjjCP5p4dJWnMfDLw,7
|
|
3
3
|
claude_mpm/__init__.py,sha256=lyTZAYGH4DTaFGLRNWJKk5Q5oTjzN5I6AXmfVX-Jff0,1512
|
|
4
4
|
claude_mpm/__main__.py,sha256=Ro5UBWBoQaSAIoSqWAr7zkbLyvi4sSy28WShqAhKJG0,723
|
|
5
5
|
claude_mpm/constants.py,sha256=I946iCQzIIPRZVVJ8aO7lA4euiyDnNw2IX7EelAOkIE,5915
|
|
@@ -83,7 +83,7 @@ claude_mpm/cli/commands/mcp_pipx_config.py,sha256=sE62VD6Q1CcO2k1nlbIhHMfAJFQTZf
|
|
|
83
83
|
claude_mpm/cli/commands/mcp_server_commands.py,sha256=-1G_2Y5ScTvzDd-kY8fTAao2H6FH7DnsLimleF1rVqQ,6197
|
|
84
84
|
claude_mpm/cli/commands/mcp_tool_commands.py,sha256=q17GzlFT3JiLTrDqwPO2tz1-fKmPO5QU449syTnKTz4,1283
|
|
85
85
|
claude_mpm/cli/commands/memory.py,sha256=Yzfs3_oiKciv3sfOoDm2lJL4M9idG7ARV3-sNw1ge_g,26186
|
|
86
|
-
claude_mpm/cli/commands/monitor.py,sha256=
|
|
86
|
+
claude_mpm/cli/commands/monitor.py,sha256=Y4on91KW6Ye5NIglHnMVG8hhzw8ZRslBm1oMm6KC48s,9556
|
|
87
87
|
claude_mpm/cli/commands/mpm_init.py,sha256=lO7N91ZHn_n18XbchUUcYoyme7L5NLcXVnhWm5F_Gq8,22367
|
|
88
88
|
claude_mpm/cli/commands/mpm_init_handler.py,sha256=-pCB0XL3KipqGtnta8CC7Lg5TPMwstEhMFBcgF4aaa4,2919
|
|
89
89
|
claude_mpm/cli/commands/run.py,sha256=qS3eolLiDrE8EXLQJioB6kL1ONr_l0c3OE3qMUJCqbA,43489
|
|
@@ -299,7 +299,7 @@ claude_mpm/models/agent_definition.py,sha256=LC7EwihixF2Gw4QqOxiCNchsEzzyQJPR6Ag
|
|
|
299
299
|
claude_mpm/models/agent_session.py,sha256=7YU9oklwqEDyH3PTKUQ52yh6N9C9eJX8GJKhxDCpDj0,19988
|
|
300
300
|
claude_mpm/schemas/__init__.py,sha256=2SLpkojJq34KnwPkVxrsVmw_cEI66872i75QBT1C2To,446
|
|
301
301
|
claude_mpm/scripts/__init__.py,sha256=IffMdVD99Pxyw85yluRa0VDPi4dRQecIWce764pcfZE,553
|
|
302
|
-
claude_mpm/scripts/claude-hook-handler.sh,sha256=
|
|
302
|
+
claude_mpm/scripts/claude-hook-handler.sh,sha256=xe6dKubrjK1JDO0SJdc1-tA6C7YSb5YazhwKhOYBQvw,7905
|
|
303
303
|
claude_mpm/scripts/launch_monitor.py,sha256=Q7hN4Wurw45veLWPSXk0WfvkKxQ1Snz7TjZsV_pNWQc,2418
|
|
304
304
|
claude_mpm/scripts/mcp_server.py,sha256=_i9ydtI7AcO-Eb7gzbIDbcJY4PKRQRYNobB8eMailI4,2259
|
|
305
305
|
claude_mpm/scripts/mcp_wrapper.py,sha256=PvfHJShcsQHGJZD-RN3RnwLOzemAKYZ2kW_QfTkGzkk,1105
|
|
@@ -434,7 +434,7 @@ claude_mpm/services/cli/memory_crud_service.py,sha256=ciN9Pl_12iDAqF9zPBWOzu-iXi
|
|
|
434
434
|
claude_mpm/services/cli/memory_output_formatter.py,sha256=nbf7VsjGvH4e9fLv9c7PzjuO9COZhbK5P2fNZ79055w,24783
|
|
435
435
|
claude_mpm/services/cli/session_manager.py,sha256=rla_Stbcvt93wa9G9MCMu9UqB3FLGqlPt_eN5lQb3Gg,16599
|
|
436
436
|
claude_mpm/services/cli/startup_checker.py,sha256=efhuvu8ns5G16jcQ0nQZKVddmD2AktUEdlvjNcXjAuk,12232
|
|
437
|
-
claude_mpm/services/cli/unified_dashboard_manager.py,sha256=
|
|
437
|
+
claude_mpm/services/cli/unified_dashboard_manager.py,sha256=OqHMovIDnMJXa4Ys70uULB37kgaoMWhoAiDvAEKHA7U,17374
|
|
438
438
|
claude_mpm/services/communication/__init__.py,sha256=b4qc7_Rqy4DE9q7BAUlfUZjoYG4uimAyUnE0irPcXyU,560
|
|
439
439
|
claude_mpm/services/core/__init__.py,sha256=evEayLlBqJvxMZhrhuK6aagXmNrKGSj8Jm9OOxKzqvU,2195
|
|
440
440
|
claude_mpm/services/core/base.py,sha256=iA-F7DgGp-FJIMvQTiHQ68RkG_k-AtUWlArJPMw6ZPk,7297
|
|
@@ -550,7 +550,7 @@ claude_mpm/services/memory/cache/__init__.py,sha256=6M6-P8ParyxX8vOgp_IxHgLMvacr
|
|
|
550
550
|
claude_mpm/services/memory/cache/shared_prompt_cache.py,sha256=crnYPUT8zcS7TvoE1vW7pyaf4T77N5rJ1wUf_YQ2vvo,28704
|
|
551
551
|
claude_mpm/services/memory/cache/simple_cache.py,sha256=qsTjbcsPxj-kNfaod9VN_uE5NioIwpfkUin_mMVUJCg,10218
|
|
552
552
|
claude_mpm/services/monitor/__init__.py,sha256=X7gxSLUm9Fg_zEsX6LtCHP2ipF0qj6Emkun20h2So7g,745
|
|
553
|
-
claude_mpm/services/monitor/daemon.py,sha256=
|
|
553
|
+
claude_mpm/services/monitor/daemon.py,sha256=LGKn9LG2RKL4Of6c3DWZSxRFWqJJRUZFnxT4ochTPjc,28157
|
|
554
554
|
claude_mpm/services/monitor/event_emitter.py,sha256=JzRLNg8PUJ5s3ulNnq_D4yqCPItvidJzu8DmFxriieQ,12224
|
|
555
555
|
claude_mpm/services/monitor/server.py,sha256=m98Eyv9caxRywJ4JtAdOuv5EB__z7vd2hYRZPwcqFLg,28498
|
|
556
556
|
claude_mpm/services/monitor/handlers/__init__.py,sha256=jgPIf4IJVERm_tAeD9834tfx9IcxtlHj5r9rhEWpkfM,701
|
|
@@ -641,9 +641,9 @@ claude_mpm/utils/subprocess_utils.py,sha256=zgiwLqh_17WxHpySvUPH65pb4bzIeUGOAYUJ
|
|
|
641
641
|
claude_mpm/validation/__init__.py,sha256=YZhwE3mhit-lslvRLuwfX82xJ_k4haZeKmh4IWaVwtk,156
|
|
642
642
|
claude_mpm/validation/agent_validator.py,sha256=3Lo6LK-Mw9IdnL_bd3zl_R6FkgSVDYKUUM7EeVVD3jc,20865
|
|
643
643
|
claude_mpm/validation/frontmatter_validator.py,sha256=u8g4Eyd_9O6ugj7Un47oSGh3kqv4wMkuks2i_CtWRvM,7028
|
|
644
|
-
claude_mpm-4.2.
|
|
645
|
-
claude_mpm-4.2.
|
|
646
|
-
claude_mpm-4.2.
|
|
647
|
-
claude_mpm-4.2.
|
|
648
|
-
claude_mpm-4.2.
|
|
649
|
-
claude_mpm-4.2.
|
|
644
|
+
claude_mpm-4.2.27.dist-info/licenses/LICENSE,sha256=lpaivOlPuBZW1ds05uQLJJswy8Rp_HMNieJEbFlqvLk,1072
|
|
645
|
+
claude_mpm-4.2.27.dist-info/METADATA,sha256=TdXXStRCBxj-tjx9H9gISHrS9luzBtF7Ccj1If2HpT8,14451
|
|
646
|
+
claude_mpm-4.2.27.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
647
|
+
claude_mpm-4.2.27.dist-info/entry_points.txt,sha256=FDPZgz8JOvD-6iuXY2l9Zbo9zYVRuE4uz4Qr0vLeGOk,471
|
|
648
|
+
claude_mpm-4.2.27.dist-info/top_level.txt,sha256=1nUg3FEaBySgm8t-s54jK5zoPnu3_eY6EP6IOlekyHA,11
|
|
649
|
+
claude_mpm-4.2.27.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|