claude-mpm 3.5.6__py3-none-any.whl → 3.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/BASE_AGENT_TEMPLATE.md +96 -23
  3. claude_mpm/agents/BASE_PM.md +273 -0
  4. claude_mpm/agents/INSTRUCTIONS.md +114 -103
  5. claude_mpm/agents/agent_loader.py +36 -1
  6. claude_mpm/agents/async_agent_loader.py +421 -0
  7. claude_mpm/agents/templates/code_analyzer.json +81 -0
  8. claude_mpm/agents/templates/data_engineer.json +18 -3
  9. claude_mpm/agents/templates/documentation.json +18 -3
  10. claude_mpm/agents/templates/engineer.json +19 -4
  11. claude_mpm/agents/templates/ops.json +18 -3
  12. claude_mpm/agents/templates/qa.json +20 -4
  13. claude_mpm/agents/templates/research.json +20 -4
  14. claude_mpm/agents/templates/security.json +18 -3
  15. claude_mpm/agents/templates/version_control.json +16 -3
  16. claude_mpm/cli/__init__.py +5 -1
  17. claude_mpm/cli/commands/__init__.py +5 -1
  18. claude_mpm/cli/commands/agents.py +212 -3
  19. claude_mpm/cli/commands/aggregate.py +462 -0
  20. claude_mpm/cli/commands/config.py +277 -0
  21. claude_mpm/cli/commands/run.py +224 -36
  22. claude_mpm/cli/parser.py +176 -1
  23. claude_mpm/constants.py +19 -0
  24. claude_mpm/core/claude_runner.py +320 -44
  25. claude_mpm/core/config.py +161 -4
  26. claude_mpm/core/framework_loader.py +81 -0
  27. claude_mpm/hooks/claude_hooks/hook_handler.py +391 -9
  28. claude_mpm/init.py +40 -5
  29. claude_mpm/models/agent_session.py +511 -0
  30. claude_mpm/scripts/__init__.py +15 -0
  31. claude_mpm/scripts/start_activity_logging.py +86 -0
  32. claude_mpm/services/agents/deployment/agent_deployment.py +165 -19
  33. claude_mpm/services/agents/deployment/async_agent_deployment.py +461 -0
  34. claude_mpm/services/event_aggregator.py +547 -0
  35. claude_mpm/utils/agent_dependency_loader.py +655 -0
  36. claude_mpm/utils/console.py +11 -0
  37. claude_mpm/utils/dependency_cache.py +376 -0
  38. claude_mpm/utils/dependency_strategies.py +343 -0
  39. claude_mpm/utils/environment_context.py +310 -0
  40. {claude_mpm-3.5.6.dist-info → claude_mpm-3.7.1.dist-info}/METADATA +47 -3
  41. {claude_mpm-3.5.6.dist-info → claude_mpm-3.7.1.dist-info}/RECORD +45 -31
  42. claude_mpm/agents/templates/pm.json +0 -122
  43. {claude_mpm-3.5.6.dist-info → claude_mpm-3.7.1.dist-info}/WHEEL +0 -0
  44. {claude_mpm-3.5.6.dist-info → claude_mpm-3.7.1.dist-info}/entry_points.txt +0 -0
  45. {claude_mpm-3.5.6.dist-info → claude_mpm-3.7.1.dist-info}/licenses/LICENSE +0 -0
  46. {claude_mpm-3.5.6.dist-info → claude_mpm-3.7.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,547 @@
1
+ """Event Aggregator Service for Claude MPM.
2
+
3
+ WHY: This service connects to the Socket.IO dashboard server as a client and
4
+ captures all events emitted during Claude MPM sessions. It builds complete
5
+ session representations that can be saved as JSON documents for analysis.
6
+
7
+ DESIGN DECISION: We run as a Socket.IO client rather than modifying the server
8
+ to avoid interfering with the existing dashboard functionality. This allows the
9
+ aggregator to run alongside the dashboard without any conflicts.
10
+ """
11
+
12
+ import asyncio
13
+ import json
14
+ import logging
15
+ import os
16
+ import signal
17
+ import sys
18
+ import threading
19
+ import time
20
+ from datetime import datetime
21
+ from pathlib import Path
22
+ from typing import Dict, Any, Optional, List
23
+ from collections import defaultdict
24
+
25
+ try:
26
+ import socketio
27
+ SOCKETIO_AVAILABLE = True
28
+ except ImportError:
29
+ SOCKETIO_AVAILABLE = False
30
+ socketio = None
31
+
32
+ from ..models.agent_session import AgentSession, EventCategory
33
+ from ..core.logger import get_logger
34
+
35
+
36
+ class EventAggregator:
37
+ """Aggregates Socket.IO events into complete agent sessions.
38
+
39
+ WHY: The dashboard emits events in real-time but doesn't persist complete
40
+ sessions. This service captures those events and builds structured session
41
+ documents for analysis and debugging.
42
+
43
+ DESIGN DECISION: We maintain active sessions in memory and save them when
44
+ they complete or after a timeout. This balances memory usage with the need
45
+ to capture all events even if a session doesn't complete cleanly.
46
+ """
47
+
48
+ def __init__(self, host: str = "localhost", port: int = 8765, save_dir: Optional[str] = None):
49
+ """Initialize the event aggregator.
50
+
51
+ Args:
52
+ host: Socket.IO server host
53
+ port: Socket.IO server port
54
+ save_dir: Directory to save session files (defaults to .claude-mpm/sessions/)
55
+ """
56
+ self.host = host
57
+ self.port = port
58
+ self.logger = get_logger("event_aggregator")
59
+
60
+ # Load configuration
61
+ from claude_mpm.core.config import Config
62
+ self.config = Config()
63
+
64
+ # Session storage
65
+ self.active_sessions: Dict[str, AgentSession] = {}
66
+ self.session_timeout = self.config.get('event_aggregator.session_timeout_minutes', 60) * 60
67
+ self.last_activity: Dict[str, float] = {}
68
+
69
+ # Save directory - use config or provided dir or default to .claude-mpm/activity
70
+ if save_dir is None:
71
+ activity_dir = self.config.get('event_aggregator.activity_directory', '.claude-mpm/activity')
72
+ self.save_dir = Path.cwd() / activity_dir if not Path(activity_dir).is_absolute() else Path(activity_dir)
73
+ else:
74
+ self.save_dir = Path(save_dir)
75
+ self.save_dir.mkdir(parents=True, exist_ok=True)
76
+
77
+ # Socket.IO client
78
+ self.sio_client = None
79
+ self.connected = False
80
+ self.running = False
81
+ self.client_thread = None
82
+ self.client_loop = None
83
+
84
+ # Event statistics
85
+ self.total_events_captured = 0
86
+ self.events_by_type = defaultdict(int)
87
+ self.sessions_completed = 0
88
+
89
+ # Cleanup task
90
+ self.cleanup_task = None
91
+
92
+ self.logger.info(f"Event Aggregator initialized - will connect to {host}:{port}")
93
+ self.logger.info(f"Sessions will be saved to: {self.save_dir}")
94
+
95
+ def start(self) -> bool:
96
+ """Start the aggregator service.
97
+
98
+ Returns:
99
+ True if started successfully, False otherwise
100
+ """
101
+ if not SOCKETIO_AVAILABLE:
102
+ self.logger.error("Socket.IO client not available. Install python-socketio package.")
103
+ return False
104
+
105
+ if self.running:
106
+ self.logger.warning("Aggregator already running")
107
+ return True
108
+
109
+ self.running = True
110
+
111
+ # Start the Socket.IO client in a background thread
112
+ self.client_thread = threading.Thread(target=self._run_client, daemon=True)
113
+ self.client_thread.start()
114
+
115
+ # Wait a moment for connection
116
+ time.sleep(1)
117
+
118
+ if self.connected:
119
+ self.logger.info("Event Aggregator started successfully")
120
+ return True
121
+ else:
122
+ self.logger.error("Failed to connect to Socket.IO server")
123
+ self.running = False
124
+ return False
125
+
126
+ def stop(self):
127
+ """Stop the aggregator service."""
128
+ self.logger.info("Stopping Event Aggregator...")
129
+ self.running = False
130
+
131
+ # Save all active sessions
132
+ self._save_all_sessions()
133
+
134
+ # Disconnect Socket.IO client
135
+ if self.sio_client and self.connected:
136
+ try:
137
+ asyncio.run_coroutine_threadsafe(
138
+ self.sio_client.disconnect(),
139
+ self.client_loop
140
+ ).result(timeout=2)
141
+ except:
142
+ pass
143
+
144
+ # Stop the client thread
145
+ if self.client_thread and self.client_thread.is_alive():
146
+ self.client_thread.join(timeout=3)
147
+
148
+ self.logger.info(f"Event Aggregator stopped - captured {self.total_events_captured} events")
149
+ self.logger.info(f"Completed sessions: {self.sessions_completed}")
150
+
151
+ def _run_client(self):
152
+ """Run the Socket.IO client in a background thread."""
153
+ self.client_loop = asyncio.new_event_loop()
154
+ asyncio.set_event_loop(self.client_loop)
155
+
156
+ try:
157
+ self.client_loop.run_until_complete(self._connect_and_listen())
158
+ except Exception as e:
159
+ self.logger.error(f"Client thread error: {e}")
160
+ finally:
161
+ self.client_loop.close()
162
+
163
+ async def _connect_and_listen(self):
164
+ """Connect to Socket.IO server and listen for events."""
165
+ try:
166
+ self.sio_client = socketio.AsyncClient(
167
+ reconnection=True,
168
+ reconnection_attempts=0, # Infinite retries
169
+ reconnection_delay=1,
170
+ reconnection_delay_max=5
171
+ )
172
+
173
+ # Register event handlers
174
+ self._register_handlers()
175
+
176
+ # Connect to server
177
+ url = f'http://{self.host}:{self.port}'
178
+ self.logger.info(f"Connecting to Socket.IO server at {url}")
179
+ await self.sio_client.connect(url)
180
+
181
+ # Start cleanup task
182
+ self.cleanup_task = asyncio.create_task(self._periodic_cleanup())
183
+
184
+ # Keep running until stopped
185
+ while self.running:
186
+ await asyncio.sleep(0.5)
187
+
188
+ # Cancel cleanup task
189
+ if self.cleanup_task:
190
+ self.cleanup_task.cancel()
191
+ try:
192
+ await self.cleanup_task
193
+ except asyncio.CancelledError:
194
+ pass
195
+
196
+ except Exception as e:
197
+ self.logger.error(f"Connection error: {e}")
198
+ self.connected = False
199
+
200
+ def _register_handlers(self):
201
+ """Register Socket.IO event handlers."""
202
+
203
+ @self.sio_client.event
204
+ async def connect():
205
+ """Handle connection to server."""
206
+ self.connected = True
207
+ self.logger.info("Connected to Socket.IO server")
208
+
209
+ # Request event history to catch up on any missed events
210
+ await self.sio_client.emit('get_history', {
211
+ 'limit': 100
212
+ })
213
+
214
+ @self.sio_client.event
215
+ async def disconnect():
216
+ """Handle disconnection from server."""
217
+ self.connected = False
218
+ self.logger.warning("Disconnected from Socket.IO server")
219
+
220
+ @self.sio_client.event
221
+ async def claude_event(data):
222
+ """Handle Claude events from the server.
223
+
224
+ WHY: This is the main event handler that captures all events
225
+ emitted by the dashboard and processes them into sessions.
226
+ """
227
+ try:
228
+ await self._process_event(data)
229
+ except Exception as e:
230
+ self.logger.error(f"Error processing event: {e}")
231
+
232
+ @self.sio_client.event
233
+ async def history(data):
234
+ """Handle historical events from the server.
235
+
236
+ WHY: When we connect, we request recent history to ensure we
237
+ don't miss events from sessions that started before we connected.
238
+ """
239
+ try:
240
+ events = data.get('events', [])
241
+ self.logger.info(f"Received {len(events)} historical events")
242
+
243
+ for event in events:
244
+ await self._process_event(event)
245
+
246
+ except Exception as e:
247
+ self.logger.error(f"Error processing history: {e}")
248
+
249
+ async def _process_event(self, event_data: Dict[str, Any]):
250
+ """Process a single event and add it to the appropriate session.
251
+
252
+ WHY: Each event needs to be routed to the correct session and
253
+ processed according to its type.
254
+ """
255
+ try:
256
+ # Extract event metadata
257
+ event_type = event_data.get('type', 'unknown')
258
+ timestamp = event_data.get('timestamp', datetime.utcnow().isoformat() + 'Z')
259
+ data = event_data.get('data', {})
260
+
261
+ # Update statistics
262
+ self.total_events_captured += 1
263
+ self.events_by_type[event_type] += 1
264
+
265
+ # Determine session ID
266
+ session_id = self._extract_session_id(event_type, data)
267
+
268
+ if not session_id:
269
+ # Some events don't belong to a specific session
270
+ self.logger.debug(f"Event {event_type} has no session ID, skipping")
271
+ return
272
+
273
+ # Get or create session
274
+ session = self._get_or_create_session(session_id, event_type, data, timestamp)
275
+
276
+ # Add event to session
277
+ session.add_event(event_type, data, timestamp)
278
+
279
+ # Update last activity time
280
+ self.last_activity[session_id] = time.time()
281
+
282
+ # Check if session ended
283
+ if event_type in ['session.end', 'Stop']:
284
+ await self._finalize_session(session_id)
285
+
286
+ # Log progress periodically
287
+ if self.total_events_captured % 100 == 0:
288
+ self.logger.info(f"Processed {self.total_events_captured} events, "
289
+ f"active sessions: {len(self.active_sessions)}")
290
+
291
+ except Exception as e:
292
+ self.logger.error(f"Error processing event {event_data.get('type', 'unknown')}: {e}")
293
+
294
+ def _extract_session_id(self, event_type: str, data: Dict[str, Any]) -> Optional[str]:
295
+ """Extract session ID from event data.
296
+
297
+ WHY: Events use different field names for session ID depending on
298
+ their source and type.
299
+ """
300
+ # Try common session ID fields
301
+ session_id = (
302
+ data.get('session_id') or
303
+ data.get('sessionId') or
304
+ data.get('session') or
305
+ data.get('sid')
306
+ )
307
+
308
+ # For session.start events, the session_id is the key piece of data
309
+ if event_type == 'session.start' and 'session_id' in data:
310
+ return data['session_id']
311
+
312
+ # For hook events, check nested data
313
+ if not session_id and isinstance(data, dict):
314
+ for key in ['hook_data', 'event_data', 'context']:
315
+ if key in data and isinstance(data[key], dict):
316
+ nested_id = data[key].get('session_id') or data[key].get('sessionId')
317
+ if nested_id:
318
+ return nested_id
319
+
320
+ return session_id
321
+
322
+ def _get_or_create_session(self, session_id: str, event_type: str,
323
+ data: Dict[str, Any], timestamp: str) -> AgentSession:
324
+ """Get existing session or create a new one.
325
+
326
+ WHY: Sessions are created on demand when we see the first event
327
+ for a new session ID.
328
+ """
329
+ if session_id not in self.active_sessions:
330
+ # Create new session
331
+ session = AgentSession(
332
+ session_id=session_id,
333
+ start_time=timestamp
334
+ )
335
+
336
+ # Extract initial metadata if this is a session.start event
337
+ if event_type == 'session.start':
338
+ session.working_directory = data.get('working_directory', '')
339
+ session.launch_method = data.get('launch_method', '')
340
+ session.claude_pid = data.get('pid')
341
+
342
+ # Try to get git branch and project info
343
+ instance_info = data.get('instance_info', {})
344
+ session.git_branch = instance_info.get('git_branch')
345
+ session.project_root = instance_info.get('working_dir')
346
+
347
+ self.active_sessions[session_id] = session
348
+ self.last_activity[session_id] = time.time()
349
+
350
+ self.logger.info(f"Created new session: {session_id[:8]}...")
351
+
352
+ return self.active_sessions[session_id]
353
+
354
+ async def _finalize_session(self, session_id: str):
355
+ """Finalize and save a completed session.
356
+
357
+ WHY: When a session ends, we need to calculate final metrics
358
+ and persist it to disk.
359
+ """
360
+ if session_id not in self.active_sessions:
361
+ return
362
+
363
+ session = self.active_sessions[session_id]
364
+
365
+ # Finalize the session
366
+ session.finalize()
367
+
368
+ # Save to file
369
+ try:
370
+ filepath = session.save_to_file(self.save_dir)
371
+ self.logger.info(f"Saved session {session_id[:8]}... to {filepath}")
372
+ self.logger.info(f" - Events: {session.metrics.total_events}")
373
+ self.logger.info(f" - Delegations: {session.metrics.total_delegations}")
374
+ self.logger.info(f" - Tools used: {len(session.metrics.tools_used)}")
375
+ self.logger.info(f" - Files modified: {len(session.metrics.files_modified)}")
376
+
377
+ self.sessions_completed += 1
378
+ except Exception as e:
379
+ self.logger.error(f"Failed to save session {session_id}: {e}")
380
+
381
+ # Remove from active sessions
382
+ del self.active_sessions[session_id]
383
+ if session_id in self.last_activity:
384
+ del self.last_activity[session_id]
385
+
386
+ async def _periodic_cleanup(self):
387
+ """Periodically clean up inactive sessions.
388
+
389
+ WHY: Some sessions may not complete cleanly, so we need to
390
+ periodically save and remove inactive sessions to prevent
391
+ memory leaks.
392
+ """
393
+ while self.running:
394
+ try:
395
+ await asyncio.sleep(60) # Check every minute
396
+
397
+ current_time = time.time()
398
+ sessions_to_finalize = []
399
+
400
+ for session_id, last_time in list(self.last_activity.items()):
401
+ if current_time - last_time > self.session_timeout:
402
+ sessions_to_finalize.append(session_id)
403
+
404
+ for session_id in sessions_to_finalize:
405
+ self.logger.info(f"Finalizing inactive session: {session_id[:8]}...")
406
+ await self._finalize_session(session_id)
407
+
408
+ except asyncio.CancelledError:
409
+ break
410
+ except Exception as e:
411
+ self.logger.error(f"Error in cleanup task: {e}")
412
+
413
+ def _save_all_sessions(self):
414
+ """Save all active sessions to disk.
415
+
416
+ WHY: Called on shutdown to ensure we don't lose any data.
417
+ """
418
+ for session_id in list(self.active_sessions.keys()):
419
+ try:
420
+ session = self.active_sessions[session_id]
421
+ session.finalize()
422
+ filepath = session.save_to_file(self.save_dir)
423
+ self.logger.info(f"Saved active session {session_id[:8]}... to {filepath}")
424
+ except Exception as e:
425
+ self.logger.error(f"Failed to save session {session_id}: {e}")
426
+
427
+ def get_status(self) -> Dict[str, Any]:
428
+ """Get current status of the aggregator.
429
+
430
+ Returns:
431
+ Status dictionary with metrics and state
432
+ """
433
+ return {
434
+ 'running': self.running,
435
+ 'connected': self.connected,
436
+ 'server': f"{self.host}:{self.port}",
437
+ 'save_directory': str(self.save_dir),
438
+ 'active_sessions': len(self.active_sessions),
439
+ 'sessions_completed': self.sessions_completed,
440
+ 'total_events': self.total_events_captured,
441
+ 'events_by_type': dict(self.events_by_type),
442
+ 'active_session_ids': [sid[:8] + '...' for sid in self.active_sessions.keys()]
443
+ }
444
+
445
+ def list_sessions(self, limit: int = 10) -> List[Dict[str, Any]]:
446
+ """List captured sessions.
447
+
448
+ Args:
449
+ limit: Maximum number of sessions to return
450
+
451
+ Returns:
452
+ List of session summaries
453
+ """
454
+ sessions = []
455
+
456
+ # Get saved session files
457
+ session_files = sorted(
458
+ self.save_dir.glob('session_*.json'),
459
+ key=lambda p: p.stat().st_mtime,
460
+ reverse=True
461
+ )[:limit]
462
+
463
+ for filepath in session_files:
464
+ try:
465
+ # Load just the metadata, not the full session
466
+ with open(filepath, 'r') as f:
467
+ data = json.load(f)
468
+
469
+ sessions.append({
470
+ 'file': filepath.name,
471
+ 'session_id': data.get('session_id', 'unknown')[:8] + '...',
472
+ 'start_time': data.get('start_time', 'unknown'),
473
+ 'end_time': data.get('end_time', 'unknown'),
474
+ 'events': data.get('metrics', {}).get('total_events', 0),
475
+ 'delegations': data.get('metrics', {}).get('total_delegations', 0),
476
+ 'initial_prompt': (data.get('initial_prompt', '')[:50] + '...')
477
+ if data.get('initial_prompt') else 'N/A'
478
+ })
479
+ except Exception as e:
480
+ self.logger.error(f"Error reading session file {filepath}: {e}")
481
+
482
+ return sessions
483
+
484
+ def load_session(self, session_id_prefix: str) -> Optional[AgentSession]:
485
+ """Load a session by ID prefix.
486
+
487
+ Args:
488
+ session_id_prefix: First few characters of session ID
489
+
490
+ Returns:
491
+ AgentSession if found, None otherwise
492
+ """
493
+ # Search for matching session file
494
+ for filepath in self.save_dir.glob('session_*.json'):
495
+ if session_id_prefix in filepath.name:
496
+ try:
497
+ return AgentSession.load_from_file(str(filepath))
498
+ except Exception as e:
499
+ self.logger.error(f"Error loading session from {filepath}: {e}")
500
+
501
+ return None
502
+
503
+
504
+ # Global aggregator instance
505
+ _aggregator: Optional[EventAggregator] = None
506
+
507
+
508
+ def get_aggregator() -> EventAggregator:
509
+ """Get or create the global aggregator instance."""
510
+ global _aggregator
511
+ if _aggregator is None:
512
+ _aggregator = EventAggregator()
513
+ return _aggregator
514
+
515
+
516
+ def start_aggregator() -> bool:
517
+ """Start the global aggregator service."""
518
+ aggregator = get_aggregator()
519
+ return aggregator.start()
520
+
521
+
522
+ def stop_aggregator():
523
+ """Stop the global aggregator service."""
524
+ global _aggregator
525
+ if _aggregator:
526
+ _aggregator.stop()
527
+ _aggregator = None
528
+
529
+
530
+ def aggregator_status() -> Dict[str, Any]:
531
+ """Get status of the aggregator service."""
532
+ aggregator = get_aggregator()
533
+ return aggregator.get_status()
534
+
535
+
536
+ # Signal handlers for graceful shutdown
537
+ def _signal_handler(signum, frame):
538
+ """Handle shutdown signals."""
539
+ logger = get_logger("event_aggregator")
540
+ logger.info(f"Received signal {signum}, shutting down...")
541
+ stop_aggregator()
542
+ sys.exit(0)
543
+
544
+
545
+ # Register signal handlers
546
+ signal.signal(signal.SIGINT, _signal_handler)
547
+ signal.signal(signal.SIGTERM, _signal_handler)