claude-mpm 4.0.31__py3-none-any.whl → 4.0.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/BASE_AGENT_TEMPLATE.md +33 -25
  3. claude_mpm/agents/INSTRUCTIONS.md +14 -10
  4. claude_mpm/agents/templates/documentation.json +51 -34
  5. claude_mpm/agents/templates/research.json +0 -11
  6. claude_mpm/cli/__init__.py +63 -26
  7. claude_mpm/cli/commands/agent_manager.py +10 -8
  8. claude_mpm/core/framework_loader.py +272 -113
  9. claude_mpm/dashboard/static/css/dashboard.css +449 -0
  10. claude_mpm/dashboard/static/dist/components/agent-inference.js +1 -1
  11. claude_mpm/dashboard/static/dist/components/event-viewer.js +1 -1
  12. claude_mpm/dashboard/static/dist/components/file-tool-tracker.js +1 -1
  13. claude_mpm/dashboard/static/dist/components/module-viewer.js +1 -1
  14. claude_mpm/dashboard/static/dist/components/session-manager.js +1 -1
  15. claude_mpm/dashboard/static/dist/dashboard.js +1 -1
  16. claude_mpm/dashboard/static/dist/socket-client.js +1 -1
  17. claude_mpm/dashboard/static/js/components/agent-hierarchy.js +774 -0
  18. claude_mpm/dashboard/static/js/components/agent-inference.js +257 -3
  19. claude_mpm/dashboard/static/js/components/build-tracker.js +289 -0
  20. claude_mpm/dashboard/static/js/components/event-viewer.js +168 -39
  21. claude_mpm/dashboard/static/js/components/file-tool-tracker.js +17 -0
  22. claude_mpm/dashboard/static/js/components/session-manager.js +23 -3
  23. claude_mpm/dashboard/static/js/components/socket-manager.js +2 -0
  24. claude_mpm/dashboard/static/js/dashboard.js +207 -31
  25. claude_mpm/dashboard/static/js/socket-client.js +85 -6
  26. claude_mpm/dashboard/templates/index.html +1 -0
  27. claude_mpm/hooks/claude_hooks/connection_pool.py +12 -2
  28. claude_mpm/hooks/claude_hooks/event_handlers.py +81 -19
  29. claude_mpm/hooks/claude_hooks/hook_handler.py +72 -10
  30. claude_mpm/hooks/claude_hooks/hook_handler_eventbus.py +398 -0
  31. claude_mpm/hooks/claude_hooks/response_tracking.py +10 -0
  32. claude_mpm/services/agents/deployment/agent_deployment.py +86 -37
  33. claude_mpm/services/agents/deployment/agent_template_builder.py +18 -10
  34. claude_mpm/services/agents/deployment/agents_directory_resolver.py +10 -25
  35. claude_mpm/services/agents/deployment/multi_source_deployment_service.py +189 -3
  36. claude_mpm/services/agents/deployment/pipeline/steps/target_directory_step.py +3 -2
  37. claude_mpm/services/agents/deployment/strategies/system_strategy.py +10 -3
  38. claude_mpm/services/agents/deployment/strategies/user_strategy.py +10 -14
  39. claude_mpm/services/agents/deployment/system_instructions_deployer.py +8 -13
  40. claude_mpm/services/agents/memory/agent_memory_manager.py +141 -184
  41. claude_mpm/services/agents/memory/content_manager.py +182 -232
  42. claude_mpm/services/agents/memory/template_generator.py +4 -40
  43. claude_mpm/services/event_bus/__init__.py +18 -0
  44. claude_mpm/services/event_bus/event_bus.py +334 -0
  45. claude_mpm/services/event_bus/relay.py +301 -0
  46. claude_mpm/services/events/__init__.py +44 -0
  47. claude_mpm/services/events/consumers/__init__.py +18 -0
  48. claude_mpm/services/events/consumers/dead_letter.py +296 -0
  49. claude_mpm/services/events/consumers/logging.py +183 -0
  50. claude_mpm/services/events/consumers/metrics.py +242 -0
  51. claude_mpm/services/events/consumers/socketio.py +376 -0
  52. claude_mpm/services/events/core.py +470 -0
  53. claude_mpm/services/events/interfaces.py +230 -0
  54. claude_mpm/services/events/producers/__init__.py +14 -0
  55. claude_mpm/services/events/producers/hook.py +269 -0
  56. claude_mpm/services/events/producers/system.py +327 -0
  57. claude_mpm/services/mcp_gateway/core/process_pool.py +411 -0
  58. claude_mpm/services/mcp_gateway/server/stdio_server.py +13 -0
  59. claude_mpm/services/monitor_build_service.py +345 -0
  60. claude_mpm/services/socketio/event_normalizer.py +667 -0
  61. claude_mpm/services/socketio/handlers/connection.py +78 -20
  62. claude_mpm/services/socketio/handlers/hook.py +14 -5
  63. claude_mpm/services/socketio/migration_utils.py +329 -0
  64. claude_mpm/services/socketio/server/broadcaster.py +26 -33
  65. claude_mpm/services/socketio/server/core.py +4 -3
  66. {claude_mpm-4.0.31.dist-info → claude_mpm-4.0.34.dist-info}/METADATA +4 -3
  67. {claude_mpm-4.0.31.dist-info → claude_mpm-4.0.34.dist-info}/RECORD +71 -50
  68. {claude_mpm-4.0.31.dist-info → claude_mpm-4.0.34.dist-info}/WHEEL +0 -0
  69. {claude_mpm-4.0.31.dist-info → claude_mpm-4.0.34.dist-info}/entry_points.txt +0 -0
  70. {claude_mpm-4.0.31.dist-info → claude_mpm-4.0.34.dist-info}/licenses/LICENSE +0 -0
  71. {claude_mpm-4.0.31.dist-info → claude_mpm-4.0.34.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,470 @@
1
+ """
2
+ Event Bus Core Implementation
3
+ ============================
4
+
5
+ The central event bus that manages event flow from producers to consumers.
6
+ """
7
+
8
+ import asyncio
9
+ import re
10
+ import time
11
+ import uuid
12
+ from collections import defaultdict, deque
13
+ from dataclasses import dataclass, field
14
+ from datetime import datetime
15
+ from enum import Enum
16
+ from typing import Any, Deque, Dict, List, Optional, Set
17
+
18
+ from claude_mpm.core.logging_config import get_logger
19
+
20
+ from .interfaces import ConsumerPriority, IEventBus, IEventConsumer
21
+
22
+
23
+ class EventPriority(Enum):
24
+ """Priority levels for events."""
25
+ CRITICAL = 1
26
+ HIGH = 2
27
+ NORMAL = 3
28
+ LOW = 4
29
+
30
+
31
+ @dataclass
32
+ class EventMetadata:
33
+ """Metadata associated with an event."""
34
+ retry_count: int = 0
35
+ max_retries: int = 3
36
+ published_at: Optional[datetime] = None
37
+ consumed_at: Optional[datetime] = None
38
+ consumers_processed: Set[str] = field(default_factory=set)
39
+ consumers_failed: Set[str] = field(default_factory=set)
40
+ error_messages: List[str] = field(default_factory=list)
41
+
42
+
43
+ @dataclass
44
+ class Event:
45
+ """
46
+ Standard event format for the event bus.
47
+
48
+ All events flowing through the system use this format.
49
+ """
50
+ id: str # Unique event ID
51
+ topic: str # Event topic (e.g., "hook.response")
52
+ type: str # Event type (e.g., "AssistantResponse")
53
+ timestamp: datetime # When event was created
54
+ source: str # Who created the event
55
+ data: Dict[str, Any] # Event payload
56
+ metadata: Optional[EventMetadata] = None # Event metadata
57
+ correlation_id: Optional[str] = None # For tracking related events
58
+ priority: EventPriority = EventPriority.NORMAL
59
+
60
+ def __post_init__(self):
61
+ if self.metadata is None:
62
+ self.metadata = EventMetadata()
63
+ if not self.id:
64
+ self.id = str(uuid.uuid4())
65
+ if isinstance(self.timestamp, str):
66
+ self.timestamp = datetime.fromisoformat(self.timestamp)
67
+
68
+ def matches_topic(self, pattern: str) -> bool:
69
+ """
70
+ Check if event matches a topic pattern.
71
+
72
+ Supports wildcards:
73
+ - * matches any single segment
74
+ - ** matches any number of segments
75
+
76
+ Examples:
77
+ - "hook.*" matches "hook.response" but not "hook.tool.usage"
78
+ - "hook.**" matches both "hook.response" and "hook.tool.usage"
79
+ """
80
+ if pattern == "**" or pattern == "*":
81
+ return True
82
+
83
+ # Convert wildcard pattern to regex
84
+ regex_pattern = pattern.replace(".", r"\.")
85
+ regex_pattern = regex_pattern.replace("**", ".*")
86
+ regex_pattern = regex_pattern.replace("*", "[^.]+")
87
+ regex_pattern = f"^{regex_pattern}$"
88
+
89
+ return bool(re.match(regex_pattern, self.topic))
90
+
91
+
92
+ class EventBus(IEventBus):
93
+ """
94
+ Central event bus implementation.
95
+
96
+ Features:
97
+ - Async event processing
98
+ - Topic-based routing
99
+ - Consumer priority
100
+ - Error isolation
101
+ - Metrics tracking
102
+ - Optional persistence
103
+ """
104
+
105
+ def __init__(
106
+ self,
107
+ max_queue_size: int = 10000,
108
+ process_interval: float = 0.01,
109
+ batch_timeout: float = 0.1,
110
+ enable_metrics: bool = True,
111
+ enable_persistence: bool = False,
112
+ ):
113
+ """
114
+ Initialize the event bus.
115
+
116
+ Args:
117
+ max_queue_size: Maximum events in queue
118
+ process_interval: How often to process events (seconds)
119
+ batch_timeout: Max time to wait for batch (seconds)
120
+ enable_metrics: Track metrics
121
+ enable_persistence: Persist events to disk
122
+ """
123
+ self.logger = get_logger("EventBus")
124
+
125
+ # Configuration
126
+ self.max_queue_size = max_queue_size
127
+ self.process_interval = process_interval
128
+ self.batch_timeout = batch_timeout
129
+ self.enable_metrics = enable_metrics
130
+ self.enable_persistence = enable_persistence
131
+
132
+ # State
133
+ self._running = False
134
+ self._processing_task: Optional[asyncio.Task] = None
135
+
136
+ # Event queue (priority-based)
137
+ self._event_queues: Dict[EventPriority, Deque[Event]] = {
138
+ priority: deque(maxlen=max_queue_size // 4)
139
+ for priority in EventPriority
140
+ }
141
+
142
+ # Consumers
143
+ self._consumers: Dict[str, IEventConsumer] = {}
144
+ self._consumer_topics: Dict[str, List[str]] = {}
145
+ self._topic_consumers: Dict[str, Set[str]] = defaultdict(set)
146
+
147
+ # Metrics
148
+ self._metrics = {
149
+ "events_published": 0,
150
+ "events_processed": 0,
151
+ "events_failed": 0,
152
+ "events_dropped": 0,
153
+ "consumers_active": 0,
154
+ "queue_size": 0,
155
+ "processing_time_ms": 0,
156
+ "last_event_time": None,
157
+ }
158
+
159
+ # Dead letter queue for failed events
160
+ self._dead_letter_queue: Deque[Event] = deque(maxlen=1000)
161
+
162
+ async def start(self) -> None:
163
+ """Start the event bus."""
164
+ if self._running:
165
+ self.logger.warning("Event bus already running")
166
+ return
167
+
168
+ self.logger.info("Starting event bus")
169
+ self._running = True
170
+
171
+ # Start processing task
172
+ self._processing_task = asyncio.create_task(self._process_events())
173
+
174
+ self.logger.info("Event bus started")
175
+
176
+ async def stop(self) -> None:
177
+ """Stop the event bus gracefully."""
178
+ if not self._running:
179
+ return
180
+
181
+ self.logger.info("Stopping event bus")
182
+ self._running = False
183
+
184
+ # Wait for processing to complete
185
+ if self._processing_task:
186
+ self._processing_task.cancel()
187
+ try:
188
+ await self._processing_task
189
+ except asyncio.CancelledError:
190
+ pass
191
+
192
+ # Process remaining events
193
+ await self._flush_events()
194
+
195
+ # Shutdown consumers
196
+ for consumer in self._consumers.values():
197
+ try:
198
+ await consumer.shutdown()
199
+ except Exception as e:
200
+ self.logger.error(f"Error shutting down consumer {consumer.config.name}: {e}")
201
+
202
+ self.logger.info("Event bus stopped")
203
+
204
+ async def publish(self, event: Event) -> bool:
205
+ """
206
+ Publish an event to the bus.
207
+
208
+ Events are queued based on priority and processed asynchronously.
209
+ """
210
+ if not self._running:
211
+ self.logger.warning("Cannot publish event - bus not running")
212
+ return False
213
+
214
+ # Check queue size
215
+ total_size = sum(len(q) for q in self._event_queues.values())
216
+ if total_size >= self.max_queue_size:
217
+ self.logger.error(f"Event queue full ({total_size}/{self.max_queue_size}), dropping event")
218
+ self._metrics["events_dropped"] += 1
219
+ return False
220
+
221
+ # Add metadata
222
+ if event.metadata:
223
+ event.metadata.published_at = datetime.now()
224
+
225
+ # Queue event
226
+ self._event_queues[event.priority].append(event)
227
+ self._metrics["events_published"] += 1
228
+ self._metrics["queue_size"] = total_size + 1
229
+
230
+ self.logger.debug(f"Published event: {event.topic}/{event.type} (priority={event.priority.name})")
231
+ return True
232
+
233
+ async def subscribe(self, consumer: IEventConsumer) -> bool:
234
+ """Subscribe a consumer to the bus."""
235
+ config = consumer.config
236
+
237
+ if config.name in self._consumers:
238
+ self.logger.warning(f"Consumer {config.name} already subscribed")
239
+ return False
240
+
241
+ try:
242
+ # Initialize consumer
243
+ if not await consumer.initialize():
244
+ self.logger.error(f"Failed to initialize consumer {config.name}")
245
+ return False
246
+
247
+ # Register consumer
248
+ self._consumers[config.name] = consumer
249
+
250
+ # Register topics
251
+ if config.topics:
252
+ self._consumer_topics[config.name] = config.topics
253
+ for topic in config.topics:
254
+ self._topic_consumers[topic].add(config.name)
255
+ else:
256
+ # Consumer receives all events
257
+ self._consumer_topics[config.name] = ["**"]
258
+ self._topic_consumers["**"].add(config.name)
259
+
260
+ self._metrics["consumers_active"] = len(self._consumers)
261
+
262
+ self.logger.info(
263
+ f"Subscribed consumer {config.name} to topics: "
264
+ f"{self._consumer_topics[config.name]}"
265
+ )
266
+ return True
267
+
268
+ except Exception as e:
269
+ self.logger.error(f"Error subscribing consumer {config.name}: {e}")
270
+ return False
271
+
272
+ async def unsubscribe(self, consumer_name: str) -> bool:
273
+ """Unsubscribe a consumer from the bus."""
274
+ if consumer_name not in self._consumers:
275
+ self.logger.warning(f"Consumer {consumer_name} not found")
276
+ return False
277
+
278
+ try:
279
+ consumer = self._consumers[consumer_name]
280
+
281
+ # Shutdown consumer
282
+ await consumer.shutdown()
283
+
284
+ # Remove from registries
285
+ del self._consumers[consumer_name]
286
+
287
+ # Remove topic subscriptions
288
+ if consumer_name in self._consumer_topics:
289
+ for topic in self._consumer_topics[consumer_name]:
290
+ self._topic_consumers[topic].discard(consumer_name)
291
+ del self._consumer_topics[consumer_name]
292
+
293
+ self._metrics["consumers_active"] = len(self._consumers)
294
+
295
+ self.logger.info(f"Unsubscribed consumer {consumer_name}")
296
+ return True
297
+
298
+ except Exception as e:
299
+ self.logger.error(f"Error unsubscribing consumer {consumer_name}: {e}")
300
+ return False
301
+
302
+ def get_consumers(self) -> List[IEventConsumer]:
303
+ """Get list of active consumers."""
304
+ return list(self._consumers.values())
305
+
306
+ def get_metrics(self) -> Dict[str, Any]:
307
+ """Get event bus metrics."""
308
+ return {
309
+ **self._metrics,
310
+ "dead_letter_queue_size": len(self._dead_letter_queue),
311
+ "consumers": {
312
+ name: consumer.get_metrics()
313
+ for name, consumer in self._consumers.items()
314
+ }
315
+ }
316
+
317
+ @property
318
+ def is_running(self) -> bool:
319
+ """Check if event bus is running."""
320
+ return self._running
321
+
322
+ async def _process_events(self) -> None:
323
+ """
324
+ Main event processing loop.
325
+
326
+ Continuously processes events from the queue and routes them
327
+ to appropriate consumers.
328
+ """
329
+ while self._running:
330
+ try:
331
+ # Process events by priority
332
+ events_processed = 0
333
+
334
+ for priority in EventPriority:
335
+ queue = self._event_queues[priority]
336
+
337
+ # Process up to batch_size events
338
+ batch = []
339
+ while queue and len(batch) < 10:
340
+ batch.append(queue.popleft())
341
+
342
+ if batch:
343
+ await self._route_events(batch)
344
+ events_processed += len(batch)
345
+
346
+ # Update metrics
347
+ if events_processed > 0:
348
+ self._metrics["events_processed"] += events_processed
349
+ self._metrics["last_event_time"] = datetime.now()
350
+ self._metrics["queue_size"] = sum(len(q) for q in self._event_queues.values())
351
+
352
+ # Sleep if no events
353
+ if events_processed == 0:
354
+ await asyncio.sleep(self.process_interval)
355
+
356
+ except Exception as e:
357
+ self.logger.error(f"Error in event processing loop: {e}")
358
+ await asyncio.sleep(1) # Back off on error
359
+
360
+ async def _route_events(self, events: List[Event]) -> None:
361
+ """
362
+ Route events to appropriate consumers.
363
+
364
+ Events are routed based on topic subscriptions.
365
+ Consumers are called in priority order.
366
+ """
367
+ for event in events:
368
+ # Find matching consumers
369
+ matching_consumers = set()
370
+
371
+ # Check exact topic matches
372
+ if event.topic in self._topic_consumers:
373
+ matching_consumers.update(self._topic_consumers[event.topic])
374
+
375
+ # Check wildcard subscriptions
376
+ for pattern, consumers in self._topic_consumers.items():
377
+ if "*" in pattern and event.matches_topic(pattern):
378
+ matching_consumers.update(consumers)
379
+
380
+ # Check consumers with no specific topics (receive all)
381
+ if "**" in self._topic_consumers:
382
+ matching_consumers.update(self._topic_consumers["**"])
383
+
384
+ # Sort consumers by priority
385
+ consumers_by_priority = defaultdict(list)
386
+ for consumer_name in matching_consumers:
387
+ if consumer_name in self._consumers:
388
+ consumer = self._consumers[consumer_name]
389
+ consumers_by_priority[consumer.config.priority].append(consumer)
390
+
391
+ # Process event with each consumer
392
+ for priority in ConsumerPriority:
393
+ for consumer in consumers_by_priority[priority]:
394
+ await self._deliver_to_consumer(event, consumer)
395
+
396
+ async def _deliver_to_consumer(self, event: Event, consumer: IEventConsumer) -> None:
397
+ """
398
+ Deliver an event to a specific consumer.
399
+
400
+ Handles errors gracefully without affecting other consumers.
401
+ """
402
+ try:
403
+ # Apply filter if configured
404
+ if consumer.config.filter_func:
405
+ if not consumer.config.filter_func(event):
406
+ return
407
+
408
+ # Apply transformation if configured
409
+ if consumer.config.transform_func:
410
+ event = consumer.config.transform_func(event)
411
+
412
+ # Process event
413
+ start_time = time.time()
414
+ success = await consumer.consume(event)
415
+ elapsed_ms = (time.time() - start_time) * 1000
416
+
417
+ # Update metrics
418
+ if success:
419
+ event.metadata.consumers_processed.add(consumer.config.name)
420
+ self.logger.debug(
421
+ f"Delivered event {event.id} to {consumer.config.name} "
422
+ f"({elapsed_ms:.1f}ms)"
423
+ )
424
+ else:
425
+ event.metadata.consumers_failed.add(consumer.config.name)
426
+ self.logger.warning(
427
+ f"Consumer {consumer.config.name} failed to process event {event.id}"
428
+ )
429
+
430
+ # Add to dead letter queue if all retries exhausted
431
+ if event.metadata.retry_count >= event.metadata.max_retries:
432
+ self._dead_letter_queue.append(event)
433
+ self._metrics["events_failed"] += 1
434
+
435
+ except Exception as e:
436
+ self.logger.error(
437
+ f"Error delivering event {event.id} to consumer "
438
+ f"{consumer.config.name}: {e}"
439
+ )
440
+ event.metadata.consumers_failed.add(consumer.config.name)
441
+ event.metadata.error_messages.append(str(e))
442
+
443
+ # Use custom error handler if provided
444
+ if consumer.config.error_handler:
445
+ try:
446
+ await consumer.config.error_handler(event, e)
447
+ except Exception as handler_error:
448
+ self.logger.error(
449
+ f"Error in custom error handler for {consumer.config.name}: "
450
+ f"{handler_error}"
451
+ )
452
+
453
+ async def _flush_events(self) -> None:
454
+ """Process all remaining events in the queue."""
455
+ total_events = sum(len(q) for q in self._event_queues.values())
456
+
457
+ if total_events > 0:
458
+ self.logger.info(f"Flushing {total_events} remaining events")
459
+
460
+ for priority in EventPriority:
461
+ queue = self._event_queues[priority]
462
+ while queue:
463
+ batch = []
464
+ for _ in range(min(10, len(queue))):
465
+ batch.append(queue.popleft())
466
+
467
+ if batch:
468
+ await self._route_events(batch)
469
+
470
+ self.logger.info("Event flush complete")
@@ -0,0 +1,230 @@
1
+ """
2
+ Event Bus Interfaces
3
+ ===================
4
+
5
+ Defines the contracts for event producers and consumers in the event bus system.
6
+ """
7
+
8
+ from abc import ABC, abstractmethod
9
+ from dataclasses import dataclass
10
+ from enum import Enum
11
+ from typing import Any, Callable, Dict, List, Optional, Pattern, Set
12
+
13
+ from .core import Event
14
+
15
+
16
+ class ConsumerPriority(Enum):
17
+ """Priority levels for event consumers."""
18
+ CRITICAL = 1 # Process first (e.g., error handlers)
19
+ HIGH = 2 # Important consumers (e.g., Socket.IO)
20
+ NORMAL = 3 # Default priority
21
+ LOW = 4 # Background processing
22
+ DEFERRED = 5 # Process last (e.g., metrics, logging)
23
+
24
+
25
+ @dataclass
26
+ class ConsumerConfig:
27
+ """Configuration for an event consumer."""
28
+ name: str # Consumer identifier
29
+ topics: Optional[List[str]] = None # Topics to subscribe to (None = all)
30
+ topic_pattern: Optional[Pattern] = None # Regex pattern for topics
31
+ priority: ConsumerPriority = ConsumerPriority.NORMAL
32
+ batch_size: int = 1 # Process events in batches
33
+ batch_timeout: float = 0.0 # Max time to wait for batch
34
+ max_retries: int = 3 # Retry failed events
35
+ retry_backoff: float = 1.0 # Backoff multiplier
36
+ error_handler: Optional[Callable] = None # Custom error handler
37
+ filter_func: Optional[Callable] = None # Event filter function
38
+ transform_func: Optional[Callable] = None # Event transformation
39
+
40
+
41
+ class IEventProducer(ABC):
42
+ """
43
+ Interface for event producers.
44
+
45
+ Producers create and publish events to the event bus without
46
+ knowing about consumers or handling errors.
47
+ """
48
+
49
+ @abstractmethod
50
+ async def publish(self, event: Event) -> bool:
51
+ """
52
+ Publish an event to the bus.
53
+
54
+ Args:
55
+ event: The event to publish
56
+
57
+ Returns:
58
+ True if event was accepted, False otherwise
59
+ """
60
+ pass
61
+
62
+ @abstractmethod
63
+ async def publish_batch(self, events: List[Event]) -> int:
64
+ """
65
+ Publish multiple events efficiently.
66
+
67
+ Args:
68
+ events: List of events to publish
69
+
70
+ Returns:
71
+ Number of events successfully published
72
+ """
73
+ pass
74
+
75
+ @property
76
+ @abstractmethod
77
+ def source_name(self) -> str:
78
+ """Get the name of this event source."""
79
+ pass
80
+
81
+
82
+ class IEventConsumer(ABC):
83
+ """
84
+ Interface for event consumers.
85
+
86
+ Consumers subscribe to events and process them asynchronously.
87
+ Each consumer is responsible for its own error handling.
88
+ """
89
+
90
+ @abstractmethod
91
+ async def initialize(self) -> bool:
92
+ """
93
+ Initialize the consumer.
94
+
95
+ Returns:
96
+ True if initialization successful
97
+ """
98
+ pass
99
+
100
+ @abstractmethod
101
+ async def consume(self, event: Event) -> bool:
102
+ """
103
+ Process a single event.
104
+
105
+ Args:
106
+ event: The event to process
107
+
108
+ Returns:
109
+ True if event processed successfully
110
+ """
111
+ pass
112
+
113
+ @abstractmethod
114
+ async def consume_batch(self, events: List[Event]) -> int:
115
+ """
116
+ Process multiple events in a batch.
117
+
118
+ Args:
119
+ events: List of events to process
120
+
121
+ Returns:
122
+ Number of events successfully processed
123
+ """
124
+ pass
125
+
126
+ @abstractmethod
127
+ async def shutdown(self) -> None:
128
+ """Shutdown the consumer gracefully."""
129
+ pass
130
+
131
+ @property
132
+ @abstractmethod
133
+ def config(self) -> ConsumerConfig:
134
+ """Get consumer configuration."""
135
+ pass
136
+
137
+ @property
138
+ @abstractmethod
139
+ def is_healthy(self) -> bool:
140
+ """Check if consumer is healthy."""
141
+ pass
142
+
143
+ @abstractmethod
144
+ def get_metrics(self) -> Dict[str, Any]:
145
+ """
146
+ Get consumer metrics.
147
+
148
+ Returns:
149
+ Dictionary of metrics (events processed, errors, etc.)
150
+ """
151
+ pass
152
+
153
+
154
+ class IEventBus(ABC):
155
+ """
156
+ Interface for the event bus.
157
+
158
+ The event bus manages subscriptions and routes events from
159
+ producers to consumers.
160
+ """
161
+
162
+ @abstractmethod
163
+ async def start(self) -> None:
164
+ """Start the event bus."""
165
+ pass
166
+
167
+ @abstractmethod
168
+ async def stop(self) -> None:
169
+ """Stop the event bus gracefully."""
170
+ pass
171
+
172
+ @abstractmethod
173
+ async def publish(self, event: Event) -> bool:
174
+ """
175
+ Publish an event to the bus.
176
+
177
+ Args:
178
+ event: The event to publish
179
+
180
+ Returns:
181
+ True if event was queued successfully
182
+ """
183
+ pass
184
+
185
+ @abstractmethod
186
+ async def subscribe(self, consumer: IEventConsumer) -> bool:
187
+ """
188
+ Subscribe a consumer to the bus.
189
+
190
+ Args:
191
+ consumer: The consumer to subscribe
192
+
193
+ Returns:
194
+ True if subscription successful
195
+ """
196
+ pass
197
+
198
+ @abstractmethod
199
+ async def unsubscribe(self, consumer_name: str) -> bool:
200
+ """
201
+ Unsubscribe a consumer from the bus.
202
+
203
+ Args:
204
+ consumer_name: Name of the consumer to unsubscribe
205
+
206
+ Returns:
207
+ True if unsubscription successful
208
+ """
209
+ pass
210
+
211
+ @abstractmethod
212
+ def get_consumers(self) -> List[IEventConsumer]:
213
+ """Get list of active consumers."""
214
+ pass
215
+
216
+ @abstractmethod
217
+ def get_metrics(self) -> Dict[str, Any]:
218
+ """
219
+ Get event bus metrics.
220
+
221
+ Returns:
222
+ Dictionary of metrics (queue size, throughput, etc.)
223
+ """
224
+ pass
225
+
226
+ @property
227
+ @abstractmethod
228
+ def is_running(self) -> bool:
229
+ """Check if event bus is running."""
230
+ pass