claude-mpm 4.0.32__py3-none-any.whl → 4.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/INSTRUCTIONS.md +70 -2
- claude_mpm/agents/OUTPUT_STYLE.md +0 -11
- claude_mpm/agents/WORKFLOW.md +14 -2
- claude_mpm/agents/templates/documentation.json +51 -34
- claude_mpm/agents/templates/research.json +0 -11
- claude_mpm/cli/__init__.py +111 -33
- claude_mpm/cli/commands/agent_manager.py +10 -8
- claude_mpm/cli/commands/agents.py +82 -0
- claude_mpm/cli/commands/cleanup_orphaned_agents.py +150 -0
- claude_mpm/cli/commands/mcp_pipx_config.py +199 -0
- claude_mpm/cli/parsers/agents_parser.py +27 -0
- claude_mpm/cli/parsers/base_parser.py +6 -0
- claude_mpm/cli/startup_logging.py +75 -0
- claude_mpm/core/framework_loader.py +173 -84
- claude_mpm/dashboard/static/css/dashboard.css +449 -0
- claude_mpm/dashboard/static/dist/components/agent-inference.js +1 -1
- claude_mpm/dashboard/static/dist/components/event-viewer.js +1 -1
- claude_mpm/dashboard/static/dist/components/file-tool-tracker.js +1 -1
- claude_mpm/dashboard/static/dist/components/module-viewer.js +1 -1
- claude_mpm/dashboard/static/dist/components/session-manager.js +1 -1
- claude_mpm/dashboard/static/dist/dashboard.js +1 -1
- claude_mpm/dashboard/static/dist/socket-client.js +1 -1
- claude_mpm/dashboard/static/js/components/agent-hierarchy.js +774 -0
- claude_mpm/dashboard/static/js/components/agent-inference.js +257 -3
- claude_mpm/dashboard/static/js/components/build-tracker.js +323 -0
- claude_mpm/dashboard/static/js/components/event-viewer.js +168 -39
- claude_mpm/dashboard/static/js/components/file-tool-tracker.js +17 -0
- claude_mpm/dashboard/static/js/components/session-manager.js +23 -3
- claude_mpm/dashboard/static/js/components/socket-manager.js +2 -0
- claude_mpm/dashboard/static/js/dashboard.js +207 -31
- claude_mpm/dashboard/static/js/socket-client.js +92 -11
- claude_mpm/dashboard/templates/index.html +1 -0
- claude_mpm/hooks/claude_hooks/connection_pool.py +25 -4
- claude_mpm/hooks/claude_hooks/event_handlers.py +81 -19
- claude_mpm/hooks/claude_hooks/hook_handler.py +125 -163
- claude_mpm/hooks/claude_hooks/hook_handler_eventbus.py +398 -0
- claude_mpm/hooks/claude_hooks/response_tracking.py +10 -0
- claude_mpm/services/agents/deployment/agent_deployment.py +34 -48
- claude_mpm/services/agents/deployment/agent_discovery_service.py +4 -1
- claude_mpm/services/agents/deployment/agent_template_builder.py +20 -11
- claude_mpm/services/agents/deployment/agent_version_manager.py +4 -1
- claude_mpm/services/agents/deployment/agents_directory_resolver.py +10 -25
- claude_mpm/services/agents/deployment/multi_source_deployment_service.py +396 -13
- claude_mpm/services/agents/deployment/pipeline/steps/target_directory_step.py +3 -2
- claude_mpm/services/agents/deployment/strategies/system_strategy.py +10 -3
- claude_mpm/services/agents/deployment/strategies/user_strategy.py +10 -14
- claude_mpm/services/agents/deployment/system_instructions_deployer.py +8 -85
- claude_mpm/services/agents/memory/content_manager.py +98 -105
- claude_mpm/services/event_bus/__init__.py +18 -0
- claude_mpm/services/event_bus/config.py +165 -0
- claude_mpm/services/event_bus/event_bus.py +349 -0
- claude_mpm/services/event_bus/relay.py +297 -0
- claude_mpm/services/events/__init__.py +44 -0
- claude_mpm/services/events/consumers/__init__.py +18 -0
- claude_mpm/services/events/consumers/dead_letter.py +296 -0
- claude_mpm/services/events/consumers/logging.py +183 -0
- claude_mpm/services/events/consumers/metrics.py +242 -0
- claude_mpm/services/events/consumers/socketio.py +376 -0
- claude_mpm/services/events/core.py +470 -0
- claude_mpm/services/events/interfaces.py +230 -0
- claude_mpm/services/events/producers/__init__.py +14 -0
- claude_mpm/services/events/producers/hook.py +269 -0
- claude_mpm/services/events/producers/system.py +327 -0
- claude_mpm/services/mcp_gateway/auto_configure.py +372 -0
- claude_mpm/services/mcp_gateway/core/process_pool.py +411 -0
- claude_mpm/services/mcp_gateway/server/stdio_server.py +13 -0
- claude_mpm/services/monitor_build_service.py +345 -0
- claude_mpm/services/socketio/event_normalizer.py +667 -0
- claude_mpm/services/socketio/handlers/connection.py +81 -23
- claude_mpm/services/socketio/handlers/hook.py +14 -5
- claude_mpm/services/socketio/migration_utils.py +329 -0
- claude_mpm/services/socketio/server/broadcaster.py +26 -33
- claude_mpm/services/socketio/server/core.py +29 -5
- claude_mpm/services/socketio/server/eventbus_integration.py +189 -0
- claude_mpm/services/socketio/server/main.py +25 -0
- {claude_mpm-4.0.32.dist-info → claude_mpm-4.1.0.dist-info}/METADATA +28 -9
- {claude_mpm-4.0.32.dist-info → claude_mpm-4.1.0.dist-info}/RECORD +82 -56
- {claude_mpm-4.0.32.dist-info → claude_mpm-4.1.0.dist-info}/WHEEL +0 -0
- {claude_mpm-4.0.32.dist-info → claude_mpm-4.1.0.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.0.32.dist-info → claude_mpm-4.1.0.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.0.32.dist-info → claude_mpm-4.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Optimized Claude Code hook handler with EventBus architecture.
|
|
3
|
+
|
|
4
|
+
This handler uses the EventBus for decoupled event emission instead of
|
|
5
|
+
direct Socket.IO connections. This provides better separation of concerns
|
|
6
|
+
and improved testability.
|
|
7
|
+
|
|
8
|
+
WHY EventBus approach:
|
|
9
|
+
- Decouples hook processing from Socket.IO implementation
|
|
10
|
+
- Enables multiple event consumers without code changes
|
|
11
|
+
- Simplifies testing by removing Socket.IO dependencies
|
|
12
|
+
- Provides centralized event routing and filtering
|
|
13
|
+
- Maintains backward compatibility with existing hooks
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
import os
|
|
18
|
+
import select
|
|
19
|
+
import signal
|
|
20
|
+
import subprocess
|
|
21
|
+
import sys
|
|
22
|
+
import threading
|
|
23
|
+
import time
|
|
24
|
+
from collections import deque
|
|
25
|
+
from datetime import datetime
|
|
26
|
+
from pathlib import Path
|
|
27
|
+
|
|
28
|
+
# Add parent path for imports
|
|
29
|
+
sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
|
|
30
|
+
|
|
31
|
+
# Import EventBus
|
|
32
|
+
try:
|
|
33
|
+
from claude_mpm.services.event_bus import EventBus
|
|
34
|
+
EVENTBUS_AVAILABLE = True
|
|
35
|
+
except ImportError:
|
|
36
|
+
EVENTBUS_AVAILABLE = False
|
|
37
|
+
EventBus = None
|
|
38
|
+
|
|
39
|
+
# Import EventNormalizer for consistent event formatting
|
|
40
|
+
try:
|
|
41
|
+
from claude_mpm.services.socketio.event_normalizer import EventNormalizer
|
|
42
|
+
except ImportError:
|
|
43
|
+
# Create a simple fallback EventNormalizer if import fails
|
|
44
|
+
class EventNormalizer:
|
|
45
|
+
def normalize(self, event_data, source="hook"):
|
|
46
|
+
"""Simple fallback normalizer that returns event as-is."""
|
|
47
|
+
return type('NormalizedEvent', (), {
|
|
48
|
+
'to_dict': lambda: {
|
|
49
|
+
'event': 'claude_event',
|
|
50
|
+
'type': event_data.get('type', 'unknown'),
|
|
51
|
+
'subtype': event_data.get('subtype', 'generic'),
|
|
52
|
+
'timestamp': event_data.get('timestamp', datetime.now().isoformat()),
|
|
53
|
+
'data': event_data.get('data', event_data),
|
|
54
|
+
'source': source
|
|
55
|
+
}
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
# Import constants for configuration
|
|
59
|
+
try:
|
|
60
|
+
from claude_mpm.core.constants import TimeoutConfig
|
|
61
|
+
except ImportError:
|
|
62
|
+
# Fallback values if constants module not available
|
|
63
|
+
class TimeoutConfig:
|
|
64
|
+
QUICK_TIMEOUT = 2.0
|
|
65
|
+
|
|
66
|
+
# Import other handler modules
|
|
67
|
+
try:
|
|
68
|
+
from .memory_integration import MemoryHookManager
|
|
69
|
+
from .response_tracking import ResponseTrackingManager
|
|
70
|
+
from .event_handlers import EventHandlers
|
|
71
|
+
except ImportError:
|
|
72
|
+
# Fallback for direct execution
|
|
73
|
+
from memory_integration import MemoryHookManager
|
|
74
|
+
from response_tracking import ResponseTrackingManager
|
|
75
|
+
from event_handlers import EventHandlers
|
|
76
|
+
|
|
77
|
+
# Debug mode is enabled by default for better visibility into hook processing
|
|
78
|
+
DEBUG = os.environ.get("CLAUDE_MPM_HOOK_DEBUG", "true").lower() != "false"
|
|
79
|
+
|
|
80
|
+
# Global singleton handler instance
|
|
81
|
+
_global_handler = None
|
|
82
|
+
_handler_lock = threading.Lock()
|
|
83
|
+
|
|
84
|
+
# Track recent events to detect duplicates
|
|
85
|
+
_recent_events = deque(maxlen=10)
|
|
86
|
+
_events_lock = threading.Lock()
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class HookHandler:
|
|
90
|
+
"""Main hook handler class using EventBus for event emission.
|
|
91
|
+
|
|
92
|
+
WHY EventBus integration:
|
|
93
|
+
- Replaces direct Socket.IO connections with EventBus publishing
|
|
94
|
+
- Events are published once and consumed by multiple listeners
|
|
95
|
+
- Failures in one consumer don't affect others
|
|
96
|
+
- Simplified testing without Socket.IO dependencies
|
|
97
|
+
"""
|
|
98
|
+
|
|
99
|
+
# Tracking dictionaries with size limits
|
|
100
|
+
MAX_DELEGATION_TRACKING = 100
|
|
101
|
+
MAX_PROMPT_TRACKING = 50
|
|
102
|
+
MAX_CACHE_AGE_SECONDS = 1800 # 30 minutes
|
|
103
|
+
|
|
104
|
+
def __init__(self):
|
|
105
|
+
"""Initialize the hook handler with EventBus."""
|
|
106
|
+
# Initialize EventBus if available
|
|
107
|
+
self.event_bus = EventBus.get_instance() if EVENTBUS_AVAILABLE else None
|
|
108
|
+
self.event_normalizer = EventNormalizer()
|
|
109
|
+
|
|
110
|
+
# Initialize tracking managers
|
|
111
|
+
self.memory_manager = MemoryHookManager()
|
|
112
|
+
self.response_tracker = ResponseTrackingManager()
|
|
113
|
+
self.event_handlers = EventHandlers(self)
|
|
114
|
+
|
|
115
|
+
# Delegation tracking
|
|
116
|
+
self.active_delegations = {}
|
|
117
|
+
self.delegation_requests = {}
|
|
118
|
+
self.delegation_history = deque(maxlen=20)
|
|
119
|
+
|
|
120
|
+
# Prompt tracking
|
|
121
|
+
self.pending_prompts = {}
|
|
122
|
+
|
|
123
|
+
# Git branch caching
|
|
124
|
+
self._git_branch_cache = {}
|
|
125
|
+
self._git_branch_cache_time = {}
|
|
126
|
+
|
|
127
|
+
# Session tracking
|
|
128
|
+
self.current_session_id = None
|
|
129
|
+
|
|
130
|
+
# Cleanup old entries periodically
|
|
131
|
+
self._last_cleanup = time.time()
|
|
132
|
+
|
|
133
|
+
if self.event_bus:
|
|
134
|
+
logger_msg = "HookHandler initialized with EventBus"
|
|
135
|
+
else:
|
|
136
|
+
logger_msg = "HookHandler initialized (EventBus not available)"
|
|
137
|
+
|
|
138
|
+
if DEBUG:
|
|
139
|
+
print(f"🚀 {logger_msg}", file=sys.stderr)
|
|
140
|
+
|
|
141
|
+
def _emit_event(self, event_type: str, data: dict):
|
|
142
|
+
"""Emit an event through the EventBus.
|
|
143
|
+
|
|
144
|
+
WHY this approach:
|
|
145
|
+
- Single point of event emission
|
|
146
|
+
- Consistent event normalization
|
|
147
|
+
- Graceful fallback if EventBus unavailable
|
|
148
|
+
- Easy to add metrics and monitoring
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
event_type: The event type (e.g., 'pre_tool', 'subagent_stop')
|
|
152
|
+
data: The event data
|
|
153
|
+
"""
|
|
154
|
+
if not self.event_bus:
|
|
155
|
+
if DEBUG:
|
|
156
|
+
print(f"EventBus not available, cannot emit: hook.{event_type}", file=sys.stderr)
|
|
157
|
+
return
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
# Create event data for normalization
|
|
161
|
+
raw_event = {
|
|
162
|
+
"type": "hook",
|
|
163
|
+
"subtype": event_type,
|
|
164
|
+
"timestamp": datetime.now().isoformat(),
|
|
165
|
+
"data": data,
|
|
166
|
+
"source": "claude_hooks",
|
|
167
|
+
"session_id": data.get("sessionId", self.current_session_id)
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
# Normalize the event
|
|
171
|
+
normalized_event = self.event_normalizer.normalize(raw_event, source="hook")
|
|
172
|
+
event_data = normalized_event.to_dict()
|
|
173
|
+
|
|
174
|
+
# Publish to EventBus
|
|
175
|
+
success = self.event_bus.publish(f"hook.{event_type}", event_data)
|
|
176
|
+
|
|
177
|
+
if DEBUG:
|
|
178
|
+
if success:
|
|
179
|
+
print(f"✅ Published to EventBus: hook.{event_type}", file=sys.stderr)
|
|
180
|
+
else:
|
|
181
|
+
print(f"⚠️ EventBus rejected event: hook.{event_type}", file=sys.stderr)
|
|
182
|
+
|
|
183
|
+
# Log important events
|
|
184
|
+
if DEBUG and event_type in ["subagent_stop", "pre_tool"]:
|
|
185
|
+
if event_type == "subagent_stop":
|
|
186
|
+
agent_type = data.get("agent_type", "unknown")
|
|
187
|
+
print(f"📤 Published SubagentStop for agent '{agent_type}'", file=sys.stderr)
|
|
188
|
+
elif event_type == "pre_tool" and data.get("tool_name") == "Task":
|
|
189
|
+
delegation = data.get("delegation_details", {})
|
|
190
|
+
agent_type = delegation.get("agent_type", "unknown")
|
|
191
|
+
print(f"📤 Published Task delegation to agent '{agent_type}'", file=sys.stderr)
|
|
192
|
+
|
|
193
|
+
except Exception as e:
|
|
194
|
+
if DEBUG:
|
|
195
|
+
print(f"❌ Failed to publish event hook.{event_type}: {e}", file=sys.stderr)
|
|
196
|
+
|
|
197
|
+
def _get_git_branch(self, working_dir: str = None) -> str:
|
|
198
|
+
"""Get git branch for the given directory with caching."""
|
|
199
|
+
# Use current working directory if not specified
|
|
200
|
+
if not working_dir:
|
|
201
|
+
working_dir = os.getcwd()
|
|
202
|
+
|
|
203
|
+
# Check cache first (cache for 30 seconds)
|
|
204
|
+
current_time = time.time()
|
|
205
|
+
cache_key = working_dir
|
|
206
|
+
|
|
207
|
+
if (
|
|
208
|
+
cache_key in self._git_branch_cache
|
|
209
|
+
and cache_key in self._git_branch_cache_time
|
|
210
|
+
and current_time - self._git_branch_cache_time[cache_key] < 30
|
|
211
|
+
):
|
|
212
|
+
return self._git_branch_cache[cache_key]
|
|
213
|
+
|
|
214
|
+
# Try to get git branch
|
|
215
|
+
try:
|
|
216
|
+
# Change to the working directory temporarily
|
|
217
|
+
original_cwd = os.getcwd()
|
|
218
|
+
os.chdir(working_dir)
|
|
219
|
+
|
|
220
|
+
# Run git command to get current branch
|
|
221
|
+
result = subprocess.run(
|
|
222
|
+
["git", "branch", "--show-current"],
|
|
223
|
+
capture_output=True,
|
|
224
|
+
text=True,
|
|
225
|
+
timeout=TimeoutConfig.QUICK_TIMEOUT
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
# Restore original directory
|
|
229
|
+
os.chdir(original_cwd)
|
|
230
|
+
|
|
231
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
232
|
+
branch = result.stdout.strip()
|
|
233
|
+
# Cache the result
|
|
234
|
+
self._git_branch_cache[cache_key] = branch
|
|
235
|
+
self._git_branch_cache_time[cache_key] = current_time
|
|
236
|
+
return branch
|
|
237
|
+
else:
|
|
238
|
+
return "unknown"
|
|
239
|
+
|
|
240
|
+
except Exception:
|
|
241
|
+
return "unknown"
|
|
242
|
+
|
|
243
|
+
def _cleanup_old_entries(self):
|
|
244
|
+
"""Clean up old entries to prevent memory growth."""
|
|
245
|
+
cutoff_time = time.time() - self.MAX_CACHE_AGE_SECONDS
|
|
246
|
+
|
|
247
|
+
# Clean up delegation tracking dictionaries
|
|
248
|
+
for storage in [self.active_delegations, self.delegation_requests]:
|
|
249
|
+
if len(storage) > self.MAX_DELEGATION_TRACKING:
|
|
250
|
+
# Keep only the most recent entries
|
|
251
|
+
sorted_keys = sorted(storage.keys())
|
|
252
|
+
excess = len(storage) - self.MAX_DELEGATION_TRACKING
|
|
253
|
+
for key in sorted_keys[:excess]:
|
|
254
|
+
del storage[key]
|
|
255
|
+
|
|
256
|
+
# Clean up pending prompts
|
|
257
|
+
if len(self.pending_prompts) > self.MAX_PROMPT_TRACKING:
|
|
258
|
+
sorted_keys = sorted(self.pending_prompts.keys())
|
|
259
|
+
excess = len(self.pending_prompts) - self.MAX_PROMPT_TRACKING
|
|
260
|
+
for key in sorted_keys[:excess]:
|
|
261
|
+
del self.pending_prompts[key]
|
|
262
|
+
|
|
263
|
+
# Clean up git branch cache
|
|
264
|
+
expired_keys = [
|
|
265
|
+
key
|
|
266
|
+
for key, cache_time in self._git_branch_cache_time.items()
|
|
267
|
+
if time.time() - cache_time > self.MAX_CACHE_AGE_SECONDS
|
|
268
|
+
]
|
|
269
|
+
for key in expired_keys:
|
|
270
|
+
self._git_branch_cache.pop(key, None)
|
|
271
|
+
self._git_branch_cache_time.pop(key, None)
|
|
272
|
+
|
|
273
|
+
def handle_event(self, event: dict):
|
|
274
|
+
"""Process an event from Claude Code.
|
|
275
|
+
|
|
276
|
+
Args:
|
|
277
|
+
event: The event dictionary from Claude
|
|
278
|
+
"""
|
|
279
|
+
# Periodic cleanup
|
|
280
|
+
current_time = time.time()
|
|
281
|
+
if current_time - self._last_cleanup > 300: # Every 5 minutes
|
|
282
|
+
self._cleanup_old_entries()
|
|
283
|
+
self._last_cleanup = current_time
|
|
284
|
+
|
|
285
|
+
# Extract event details
|
|
286
|
+
event_type = event.get("type", "")
|
|
287
|
+
event_name = event.get("name", "")
|
|
288
|
+
|
|
289
|
+
# Update session ID if present
|
|
290
|
+
if "sessionId" in event:
|
|
291
|
+
self.current_session_id = event["sessionId"]
|
|
292
|
+
|
|
293
|
+
# Detect duplicate events
|
|
294
|
+
event_signature = f"{event_type}:{event_name}:{json.dumps(event.get('data', ''))[:100]}"
|
|
295
|
+
with _events_lock:
|
|
296
|
+
if event_signature in _recent_events:
|
|
297
|
+
if DEBUG:
|
|
298
|
+
print(f"Skipping duplicate event: {event_type}", file=sys.stderr)
|
|
299
|
+
return
|
|
300
|
+
_recent_events.append(event_signature)
|
|
301
|
+
|
|
302
|
+
# Route to appropriate handler
|
|
303
|
+
if event_type == "Start":
|
|
304
|
+
self.event_handlers.handle_start(event)
|
|
305
|
+
elif event_type == "Stop":
|
|
306
|
+
self.event_handlers.handle_stop(event)
|
|
307
|
+
elif event_type == "UserPrompt":
|
|
308
|
+
self.event_handlers.handle_user_prompt(event)
|
|
309
|
+
elif event_type == "AssistantResponse":
|
|
310
|
+
self.event_handlers.handle_assistant_response(event)
|
|
311
|
+
elif event_type == "SubagentStart":
|
|
312
|
+
self.event_handlers.handle_subagent_start(event)
|
|
313
|
+
elif event_type == "SubagentStop":
|
|
314
|
+
self.event_handlers.handle_subagent_stop(event)
|
|
315
|
+
elif event_type == "PreToolExecution" and event_name == "Task":
|
|
316
|
+
self.event_handlers.handle_task_delegation(event)
|
|
317
|
+
elif event_type == "PreToolExecution":
|
|
318
|
+
self.event_handlers.handle_pre_tool(event)
|
|
319
|
+
elif event_type == "PostToolExecution":
|
|
320
|
+
self.event_handlers.handle_post_tool(event)
|
|
321
|
+
elif event_type == "PromptCachingBetaStats":
|
|
322
|
+
# Ignore caching stats events
|
|
323
|
+
pass
|
|
324
|
+
else:
|
|
325
|
+
# Log unhandled events in debug mode
|
|
326
|
+
if DEBUG:
|
|
327
|
+
print(f"Unhandled event type: {event_type}", file=sys.stderr)
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def get_handler() -> HookHandler:
|
|
331
|
+
"""Get or create the global hook handler instance.
|
|
332
|
+
|
|
333
|
+
Returns:
|
|
334
|
+
HookHandler: The singleton handler instance
|
|
335
|
+
"""
|
|
336
|
+
global _global_handler
|
|
337
|
+
if _global_handler is None:
|
|
338
|
+
with _handler_lock:
|
|
339
|
+
if _global_handler is None:
|
|
340
|
+
_global_handler = HookHandler()
|
|
341
|
+
return _global_handler
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def main():
|
|
345
|
+
"""Main entry point for the hook handler."""
|
|
346
|
+
if DEBUG:
|
|
347
|
+
print("🎯 EventBus Hook Handler starting...", file=sys.stderr)
|
|
348
|
+
|
|
349
|
+
handler = get_handler()
|
|
350
|
+
|
|
351
|
+
# Set up signal handling for clean shutdown
|
|
352
|
+
def signal_handler(signum, frame):
|
|
353
|
+
if DEBUG:
|
|
354
|
+
print("\n👋 Hook handler shutting down...", file=sys.stderr)
|
|
355
|
+
sys.exit(0)
|
|
356
|
+
|
|
357
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
358
|
+
signal.signal(signal.SIGTERM, signal_handler)
|
|
359
|
+
|
|
360
|
+
# Process events from stdin
|
|
361
|
+
try:
|
|
362
|
+
while True:
|
|
363
|
+
# Check if data is available with timeout
|
|
364
|
+
readable, _, _ = select.select([sys.stdin], [], [], 0.1)
|
|
365
|
+
if readable:
|
|
366
|
+
line = sys.stdin.readline()
|
|
367
|
+
if not line:
|
|
368
|
+
break
|
|
369
|
+
|
|
370
|
+
try:
|
|
371
|
+
event = json.loads(line.strip())
|
|
372
|
+
handler.handle_event(event)
|
|
373
|
+
|
|
374
|
+
# Acknowledge event
|
|
375
|
+
print(json.dumps({"status": "ok"}))
|
|
376
|
+
sys.stdout.flush()
|
|
377
|
+
|
|
378
|
+
except json.JSONDecodeError as e:
|
|
379
|
+
if DEBUG:
|
|
380
|
+
print(f"Invalid JSON: {e}", file=sys.stderr)
|
|
381
|
+
print(json.dumps({"status": "error", "message": str(e)}))
|
|
382
|
+
sys.stdout.flush()
|
|
383
|
+
except Exception as e:
|
|
384
|
+
if DEBUG:
|
|
385
|
+
print(f"Error processing event: {e}", file=sys.stderr)
|
|
386
|
+
print(json.dumps({"status": "error", "message": str(e)}))
|
|
387
|
+
sys.stdout.flush()
|
|
388
|
+
|
|
389
|
+
except KeyboardInterrupt:
|
|
390
|
+
if DEBUG:
|
|
391
|
+
print("\n👋 Hook handler interrupted", file=sys.stderr)
|
|
392
|
+
finally:
|
|
393
|
+
if DEBUG:
|
|
394
|
+
print("Hook handler exiting", file=sys.stderr)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
if __name__ == "__main__":
|
|
398
|
+
main()
|
|
@@ -203,7 +203,17 @@ class ResponseTrackingManager:
|
|
|
203
203
|
"files_modified": structured_response.get("files_modified", []),
|
|
204
204
|
"tools_used": structured_response.get("tools_used", []),
|
|
205
205
|
"remember": structured_response.get("remember"),
|
|
206
|
+
"MEMORIES": structured_response.get("MEMORIES"), # Complete memory replacement
|
|
206
207
|
}
|
|
208
|
+
|
|
209
|
+
# Log if MEMORIES field is present
|
|
210
|
+
if "MEMORIES" in structured_response and structured_response["MEMORIES"]:
|
|
211
|
+
if DEBUG:
|
|
212
|
+
memories_count = len(structured_response["MEMORIES"])
|
|
213
|
+
print(
|
|
214
|
+
f"Agent {agent_type} returned MEMORIES field with {memories_count} items",
|
|
215
|
+
file=sys.stderr,
|
|
216
|
+
)
|
|
207
217
|
|
|
208
218
|
# Check if task was completed for logging purposes
|
|
209
219
|
if structured_response.get("task_completed"):
|
|
@@ -385,12 +385,23 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
385
385
|
|
|
386
386
|
if use_multi_source:
|
|
387
387
|
# Use multi-source deployment to get highest version agents
|
|
388
|
-
template_files, agent_sources = self._get_multi_source_templates(
|
|
388
|
+
template_files, agent_sources, cleanup_results = self._get_multi_source_templates(
|
|
389
389
|
excluded_agents, config, agents_dir, force_rebuild
|
|
390
390
|
)
|
|
391
391
|
results["total"] = len(template_files)
|
|
392
392
|
results["multi_source"] = True
|
|
393
393
|
results["agent_sources"] = agent_sources
|
|
394
|
+
results["cleanup"] = cleanup_results
|
|
395
|
+
|
|
396
|
+
# Log cleanup results if any agents were removed
|
|
397
|
+
if cleanup_results.get("removed"):
|
|
398
|
+
self.logger.info(
|
|
399
|
+
f"Cleaned up {len(cleanup_results['removed'])} outdated user agents"
|
|
400
|
+
)
|
|
401
|
+
for removed in cleanup_results["removed"]:
|
|
402
|
+
self.logger.debug(
|
|
403
|
+
f" - Removed: {removed['name']} v{removed['version']} ({removed['reason']})"
|
|
404
|
+
)
|
|
394
405
|
else:
|
|
395
406
|
# Get and filter template files from single source
|
|
396
407
|
template_files = self._get_filtered_templates(excluded_agents, config)
|
|
@@ -516,11 +527,11 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
516
527
|
return False
|
|
517
528
|
|
|
518
529
|
# Ensure target directory exists
|
|
519
|
-
|
|
520
|
-
|
|
530
|
+
# target_dir should already be the agents directory
|
|
531
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
521
532
|
|
|
522
533
|
# Build and deploy the agent
|
|
523
|
-
target_file =
|
|
534
|
+
target_file = target_dir / f"{agent_name}.md"
|
|
524
535
|
|
|
525
536
|
# Check if update is needed
|
|
526
537
|
if not force_rebuild and target_file.exists():
|
|
@@ -618,7 +629,7 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
618
629
|
|
|
619
630
|
deployer = SystemInstructionsDeployer(self.logger, self.working_directory)
|
|
620
631
|
deployer.deploy_system_instructions(
|
|
621
|
-
target_dir, force_rebuild, results
|
|
632
|
+
target_dir, force_rebuild, results
|
|
622
633
|
)
|
|
623
634
|
|
|
624
635
|
def deploy_system_instructions_explicit(
|
|
@@ -629,11 +640,11 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
629
640
|
|
|
630
641
|
This method should ONLY be called when the user explicitly requests
|
|
631
642
|
deployment of system instructions through agent-manager commands.
|
|
632
|
-
It will deploy INSTRUCTIONS.md, MEMORY.md, and WORKFLOW.md to .claude
|
|
633
|
-
directory
|
|
643
|
+
It will deploy INSTRUCTIONS.md, MEMORY.md, and WORKFLOW.md to .claude/
|
|
644
|
+
directory in the project.
|
|
634
645
|
|
|
635
646
|
Args:
|
|
636
|
-
target_dir: Target directory for deployment (
|
|
647
|
+
target_dir: Target directory for deployment (ignored - always uses .claude/)
|
|
637
648
|
force_rebuild: Force rebuild even if files exist
|
|
638
649
|
|
|
639
650
|
Returns:
|
|
@@ -647,23 +658,19 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
647
658
|
}
|
|
648
659
|
|
|
649
660
|
try:
|
|
650
|
-
#
|
|
651
|
-
|
|
652
|
-
if self._is_project_specific_deployment():
|
|
653
|
-
target_dir = self.working_directory / ".claude-mpm"
|
|
654
|
-
else:
|
|
655
|
-
target_dir = Path.home() / ".claude-mpm"
|
|
661
|
+
# Always use project's .claude directory
|
|
662
|
+
target_dir = self.working_directory / ".claude"
|
|
656
663
|
|
|
657
664
|
# Ensure directory exists
|
|
658
665
|
target_dir.mkdir(parents=True, exist_ok=True)
|
|
659
666
|
|
|
660
|
-
# Deploy using the
|
|
667
|
+
# Deploy using the deployer (targeting .claude/)
|
|
661
668
|
from .system_instructions_deployer import SystemInstructionsDeployer
|
|
662
669
|
deployer = SystemInstructionsDeployer(self.logger, self.working_directory)
|
|
663
670
|
|
|
664
|
-
#
|
|
665
|
-
deployer.
|
|
666
|
-
target_dir, force_rebuild, results
|
|
671
|
+
# Deploy to .claude directory
|
|
672
|
+
deployer.deploy_system_instructions(
|
|
673
|
+
target_dir, force_rebuild, results
|
|
667
674
|
)
|
|
668
675
|
|
|
669
676
|
self.logger.info(
|
|
@@ -764,32 +771,9 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
764
771
|
"""Determine the correct agents directory based on input."""
|
|
765
772
|
from .agents_directory_resolver import AgentsDirectoryResolver
|
|
766
773
|
|
|
767
|
-
resolver = AgentsDirectoryResolver(
|
|
768
|
-
self.working_directory,
|
|
769
|
-
self._is_system_agent_deployment(),
|
|
770
|
-
self._is_project_specific_deployment(),
|
|
771
|
-
)
|
|
774
|
+
resolver = AgentsDirectoryResolver(self.working_directory)
|
|
772
775
|
return resolver.determine_agents_directory(target_dir)
|
|
773
776
|
|
|
774
|
-
def _is_system_agent_deployment(self) -> bool:
|
|
775
|
-
"""Check if this is a deployment of system agents."""
|
|
776
|
-
from .deployment_type_detector import DeploymentTypeDetector
|
|
777
|
-
|
|
778
|
-
return DeploymentTypeDetector.is_system_agent_deployment(self.templates_dir)
|
|
779
|
-
|
|
780
|
-
def _is_project_specific_deployment(self) -> bool:
|
|
781
|
-
"""Check if deploying project-specific agents."""
|
|
782
|
-
from .deployment_type_detector import DeploymentTypeDetector
|
|
783
|
-
|
|
784
|
-
return DeploymentTypeDetector.is_project_specific_deployment(
|
|
785
|
-
self.templates_dir, self.working_directory
|
|
786
|
-
)
|
|
787
|
-
|
|
788
|
-
def _is_user_custom_deployment(self) -> bool:
|
|
789
|
-
"""Check if deploying user custom agents."""
|
|
790
|
-
from .deployment_type_detector import DeploymentTypeDetector
|
|
791
|
-
|
|
792
|
-
return DeploymentTypeDetector.is_user_custom_deployment(self.templates_dir)
|
|
793
777
|
|
|
794
778
|
def _initialize_deployment_results(
|
|
795
779
|
self, agents_dir: Path, deployment_start_time: float
|
|
@@ -1125,7 +1109,7 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
1125
1109
|
def _get_multi_source_templates(
|
|
1126
1110
|
self, excluded_agents: List[str], config: Config, agents_dir: Path,
|
|
1127
1111
|
force_rebuild: bool = False
|
|
1128
|
-
) -> Tuple[List[Path], Dict[str, str]]:
|
|
1112
|
+
) -> Tuple[List[Path], Dict[str, str], Dict[str, Any]]:
|
|
1129
1113
|
"""Get agent templates from multiple sources with version comparison.
|
|
1130
1114
|
|
|
1131
1115
|
WHY: This method uses the multi-source service to discover agents
|
|
@@ -1135,9 +1119,10 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
1135
1119
|
excluded_agents: List of agents to exclude
|
|
1136
1120
|
config: Configuration object
|
|
1137
1121
|
agents_dir: Target deployment directory
|
|
1122
|
+
force_rebuild: Whether to force rebuild
|
|
1138
1123
|
|
|
1139
1124
|
Returns:
|
|
1140
|
-
Tuple of (template_files, agent_sources)
|
|
1125
|
+
Tuple of (template_files, agent_sources, cleanup_results)
|
|
1141
1126
|
"""
|
|
1142
1127
|
# Determine source directories
|
|
1143
1128
|
system_templates_dir = self.templates_dir
|
|
@@ -1158,14 +1143,15 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
1158
1143
|
user_agents_dir = potential_user_dir
|
|
1159
1144
|
self.logger.info(f"Found user agents at: {user_agents_dir}")
|
|
1160
1145
|
|
|
1161
|
-
# Get agents with version comparison
|
|
1162
|
-
agents_to_deploy, agent_sources = self.multi_source_service.get_agents_for_deployment(
|
|
1146
|
+
# Get agents with version comparison and cleanup
|
|
1147
|
+
agents_to_deploy, agent_sources, cleanup_results = self.multi_source_service.get_agents_for_deployment(
|
|
1163
1148
|
system_templates_dir=system_templates_dir,
|
|
1164
1149
|
project_agents_dir=project_agents_dir,
|
|
1165
1150
|
user_agents_dir=user_agents_dir,
|
|
1166
1151
|
working_directory=self.working_directory,
|
|
1167
1152
|
excluded_agents=excluded_agents,
|
|
1168
|
-
config=config
|
|
1153
|
+
config=config,
|
|
1154
|
+
cleanup_outdated=True # Enable cleanup by default
|
|
1169
1155
|
)
|
|
1170
1156
|
|
|
1171
1157
|
# Compare with deployed versions if agents directory exists
|
|
@@ -1206,7 +1192,7 @@ class AgentDeploymentService(ConfigServiceBase, AgentDeploymentInterface):
|
|
|
1206
1192
|
# Convert to list of Path objects
|
|
1207
1193
|
template_files = list(agents_to_deploy.values())
|
|
1208
1194
|
|
|
1209
|
-
return template_files, agent_sources
|
|
1195
|
+
return template_files, agent_sources, cleanup_results
|
|
1210
1196
|
|
|
1211
1197
|
# ================================================================================
|
|
1212
1198
|
# Interface Adapter Methods
|
|
@@ -198,7 +198,10 @@ class AgentDiscoveryService:
|
|
|
198
198
|
"name": metadata.get("name", template_file.stem),
|
|
199
199
|
"description": metadata.get("description", "No description available"),
|
|
200
200
|
"version": template_data.get(
|
|
201
|
-
"agent_version",
|
|
201
|
+
"agent_version",
|
|
202
|
+
template_data.get("version",
|
|
203
|
+
metadata.get("version", "1.0.0")
|
|
204
|
+
)
|
|
202
205
|
),
|
|
203
206
|
"tools": capabilities.get("tools", []),
|
|
204
207
|
"specializations": metadata.get(
|
|
@@ -136,7 +136,8 @@ class AgentTemplateBuilder:
|
|
|
136
136
|
)
|
|
137
137
|
|
|
138
138
|
# Extract custom metadata fields
|
|
139
|
-
|
|
139
|
+
metadata = template_data.get("metadata", {})
|
|
140
|
+
agent_version = template_data.get("agent_version") or template_data.get("version") or metadata.get("version", "1.0.0")
|
|
140
141
|
agent_type = template_data.get("agent_type", "general")
|
|
141
142
|
# Use the capabilities_model we already extracted earlier
|
|
142
143
|
model_type = capabilities_model or "sonnet"
|
|
@@ -148,16 +149,24 @@ class AgentTemplateBuilder:
|
|
|
148
149
|
else:
|
|
149
150
|
claude_model = "inherit"
|
|
150
151
|
|
|
151
|
-
# Determine color
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
152
|
+
# Determine color - prefer template's color, fallback to type-based defaults
|
|
153
|
+
template_metadata = template_data.get("metadata", {})
|
|
154
|
+
template_color = template_metadata.get("color")
|
|
155
|
+
|
|
156
|
+
if template_color:
|
|
157
|
+
# Use the color specified in the template
|
|
158
|
+
color = template_color
|
|
159
|
+
else:
|
|
160
|
+
# Fallback to default color map based on agent type
|
|
161
|
+
color_map = {
|
|
162
|
+
"engineer": "blue",
|
|
163
|
+
"qa": "green",
|
|
164
|
+
"security": "red",
|
|
165
|
+
"research": "purple",
|
|
166
|
+
"documentation": "cyan", # Changed default to match template preference
|
|
167
|
+
"ops": "gray",
|
|
168
|
+
}
|
|
169
|
+
color = color_map.get(agent_type, "blue")
|
|
161
170
|
|
|
162
171
|
# Check if we should include tools field (only if significantly restricting)
|
|
163
172
|
# Claude Code approach: omit tools field unless specifically restricting
|
|
@@ -254,8 +254,11 @@ class AgentVersionManager:
|
|
|
254
254
|
template_data = json.loads(template_file.read_text())
|
|
255
255
|
|
|
256
256
|
# Extract agent version from template
|
|
257
|
+
metadata = template_data.get("metadata", {})
|
|
257
258
|
current_agent_version = self.parse_version(
|
|
258
|
-
template_data.get("agent_version") or
|
|
259
|
+
template_data.get("agent_version") or
|
|
260
|
+
template_data.get("version") or
|
|
261
|
+
metadata.get("version", 0)
|
|
259
262
|
)
|
|
260
263
|
|
|
261
264
|
# If old format detected, always trigger update for migration
|