pactown 0.1.4__py3-none-any.whl → 0.1.47__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pactown/events.py ADDED
@@ -0,0 +1,1066 @@
1
+ """
2
+ CQRS/Event Sourcing infrastructure for Pactown.
3
+
4
+ Provides a complete event sourcing foundation:
5
+ - Event: Immutable event records with versioning
6
+ - EventStore: Append-only event storage with subscriptions
7
+ - Aggregate: Base class for event-sourced aggregates
8
+ - Projections: Materialized views built from events
9
+ - Commands/Queries: CQRS pattern implementation
10
+
11
+ Usage:
12
+ from pactown.events import (
13
+ Event, EventType, EventStore, get_event_store,
14
+ ServiceAggregate, ServiceCommands, ServiceQueries,
15
+ )
16
+
17
+ # Record events
18
+ commands = ServiceCommands(get_event_store())
19
+ await commands.create_service(service_id=1, user_id=1, name="api", port=8000)
20
+ await commands.start_service(service_id=1, pid=12345)
21
+
22
+ # Query events
23
+ queries = ServiceQueries(get_event_store())
24
+ history = queries.get_service_history(service_id=1)
25
+ stats = queries.get_stats()
26
+ """
27
+ from abc import ABC, abstractmethod
28
+ from collections import defaultdict
29
+ from dataclasses import dataclass, field
30
+ from datetime import datetime, UTC
31
+ from enum import Enum
32
+ from pathlib import Path
33
+ from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
34
+ import asyncio
35
+ import json
36
+ import uuid
37
+
38
+
39
+ class EventType(str, Enum):
40
+ """Standard event types for service lifecycle."""
41
+ # Service lifecycle
42
+ SERVICE_CREATED = "service.created"
43
+ SERVICE_STARTED = "service.started"
44
+ SERVICE_STOPPED = "service.stopped"
45
+ SERVICE_DELETED = "service.deleted"
46
+ SERVICE_HEALTH_CHECK = "service.health_check"
47
+ SERVICE_ERROR = "service.error"
48
+ SERVICE_RESTARTED = "service.restarted"
49
+
50
+ # Sandbox lifecycle
51
+ SANDBOX_CREATED = "sandbox.created"
52
+ SANDBOX_DESTROYED = "sandbox.destroyed"
53
+ SANDBOX_FILES_WRITTEN = "sandbox.files_written"
54
+ SANDBOX_DEPS_INSTALLED = "sandbox.deps_installed"
55
+
56
+ # Project lifecycle
57
+ PROJECT_CREATED = "project.created"
58
+ PROJECT_UPDATED = "project.updated"
59
+ PROJECT_DELETED = "project.deleted"
60
+ PROJECT_VALIDATED = "project.validated"
61
+
62
+ # User actions
63
+ USER_LOGIN = "user.login"
64
+ USER_LOGOUT = "user.logout"
65
+ USER_CREATED = "user.created"
66
+
67
+ # Security events
68
+ SECURITY_CHECK_PASSED = "security.check_passed"
69
+ SECURITY_CHECK_FAILED = "security.check_failed"
70
+ RATE_LIMIT_HIT = "security.rate_limit"
71
+ ANOMALY_DETECTED = "security.anomaly"
72
+
73
+ # Deployment events
74
+ DEPLOYMENT_STARTED = "deployment.started"
75
+ DEPLOYMENT_COMPLETED = "deployment.completed"
76
+ DEPLOYMENT_FAILED = "deployment.failed"
77
+
78
+ # Custom events
79
+ CUSTOM = "custom"
80
+
81
+
82
+ @dataclass(frozen=True)
83
+ class Event:
84
+ """
85
+ Immutable event record.
86
+
87
+ Events are the source of truth in event sourcing. Each event represents
88
+ a fact that happened in the system at a specific point in time.
89
+
90
+ Attributes:
91
+ event_type: The type of event (from EventType enum or custom string)
92
+ aggregate_id: ID of the aggregate this event belongs to (e.g., "service:123")
93
+ aggregate_type: Type of aggregate (e.g., "service", "project", "user")
94
+ data: Event payload with domain-specific data
95
+ metadata: Additional context (user_id, correlation_id, etc.)
96
+ timestamp: When the event occurred (UTC)
97
+ event_id: Unique identifier for this event
98
+ version: Event schema version for migrations
99
+ sequence: Position in the event stream (set by EventStore)
100
+ """
101
+ event_type: EventType | str
102
+ aggregate_id: str
103
+ aggregate_type: str
104
+ data: Dict[str, Any]
105
+ metadata: Dict[str, Any] = field(default_factory=dict)
106
+ timestamp: datetime = field(default_factory=lambda: datetime.now(UTC))
107
+ event_id: str = field(default_factory=lambda: str(uuid.uuid4()))
108
+ version: int = 1
109
+ sequence: int = 0
110
+
111
+ def to_dict(self) -> Dict[str, Any]:
112
+ """Serialize event to dictionary for JSON storage."""
113
+ return {
114
+ "event_id": self.event_id,
115
+ "event_type": self.event_type.value if isinstance(self.event_type, EventType) else self.event_type,
116
+ "aggregate_id": self.aggregate_id,
117
+ "aggregate_type": self.aggregate_type,
118
+ "data": self.data,
119
+ "metadata": self.metadata,
120
+ "timestamp": self.timestamp.isoformat(),
121
+ "version": self.version,
122
+ "sequence": self.sequence,
123
+ }
124
+
125
+ @classmethod
126
+ def from_dict(cls, d: Dict[str, Any]) -> "Event":
127
+ """Deserialize event from dictionary."""
128
+ event_type_str = d["event_type"]
129
+ try:
130
+ event_type = EventType(event_type_str)
131
+ except ValueError:
132
+ event_type = event_type_str
133
+
134
+ return cls(
135
+ event_id=d["event_id"],
136
+ event_type=event_type,
137
+ aggregate_id=d["aggregate_id"],
138
+ aggregate_type=d["aggregate_type"],
139
+ data=d["data"],
140
+ metadata=d.get("metadata", {}),
141
+ timestamp=datetime.fromisoformat(d["timestamp"]) if isinstance(d["timestamp"], str) else d["timestamp"],
142
+ version=d.get("version", 1),
143
+ sequence=d.get("sequence", 0),
144
+ )
145
+
146
+
147
+ class EventStore:
148
+ """
149
+ Append-only event store with subscription support.
150
+
151
+ Provides:
152
+ - Append-only event storage
153
+ - Event subscriptions for reactive updates
154
+ - Querying by aggregate, type, or time range
155
+ - Optional persistence to JSON file
156
+
157
+ Thread-safe for async operations.
158
+ """
159
+
160
+ def __init__(self, persistence_path: Optional[Path] = None):
161
+ """
162
+ Initialize event store.
163
+
164
+ Args:
165
+ persistence_path: Optional path to persist events to JSON file
166
+ """
167
+ self._events: List[Event] = []
168
+ self._subscribers: Dict[EventType | str, List[Callable]] = defaultdict(list)
169
+ self._global_subscribers: List[Callable] = []
170
+ self._lock = asyncio.Lock()
171
+ self._sequence = 0
172
+ self._persistence_path = persistence_path
173
+
174
+ if persistence_path and persistence_path.exists():
175
+ self._load_from_file()
176
+
177
+ def _load_from_file(self) -> None:
178
+ """Load events from persistence file."""
179
+ try:
180
+ with open(self._persistence_path, 'r') as f:
181
+ data = json.load(f)
182
+ self._events = [Event.from_dict(e) for e in data.get("events", [])]
183
+ self._sequence = data.get("sequence", len(self._events))
184
+ except (json.JSONDecodeError, KeyError):
185
+ pass
186
+
187
+ def _save_to_file(self) -> None:
188
+ """Persist events to file."""
189
+ if not self._persistence_path:
190
+ return
191
+
192
+ self._persistence_path.parent.mkdir(parents=True, exist_ok=True)
193
+ with open(self._persistence_path, 'w') as f:
194
+ json.dump({
195
+ "events": [e.to_dict() for e in self._events],
196
+ "sequence": self._sequence,
197
+ }, f, indent=2, default=str)
198
+
199
+ async def append(self, event: Event) -> Event:
200
+ """
201
+ Append event to store and notify subscribers.
202
+
203
+ Args:
204
+ event: Event to append
205
+
206
+ Returns:
207
+ Event with sequence number set
208
+ """
209
+ async with self._lock:
210
+ self._sequence += 1
211
+ # Create new event with sequence (Event is frozen)
212
+ sequenced_event = Event(
213
+ event_id=event.event_id,
214
+ event_type=event.event_type,
215
+ aggregate_id=event.aggregate_id,
216
+ aggregate_type=event.aggregate_type,
217
+ data=event.data,
218
+ metadata=event.metadata,
219
+ timestamp=event.timestamp,
220
+ version=event.version,
221
+ sequence=self._sequence,
222
+ )
223
+ self._events.append(sequenced_event)
224
+
225
+ if self._persistence_path:
226
+ self._save_to_file()
227
+
228
+ # Notify subscribers asynchronously
229
+ await self._notify_subscribers(sequenced_event)
230
+
231
+ return sequenced_event
232
+
233
+ async def _notify_subscribers(self, event: Event) -> None:
234
+ """Notify all relevant subscribers of an event."""
235
+ handlers = (
236
+ self._subscribers.get(event.event_type, []) +
237
+ self._global_subscribers
238
+ )
239
+
240
+ for handler in handlers:
241
+ try:
242
+ if asyncio.iscoroutinefunction(handler):
243
+ await handler(event)
244
+ else:
245
+ handler(event)
246
+ except Exception as e:
247
+ print(f"Event handler error: {e}")
248
+
249
+ def subscribe(self, event_type: EventType | str, handler: Callable) -> Callable[[], None]:
250
+ """
251
+ Subscribe to events of a specific type.
252
+
253
+ Args:
254
+ event_type: Type of events to subscribe to
255
+ handler: Callback function (sync or async)
256
+
257
+ Returns:
258
+ Unsubscribe function
259
+ """
260
+ self._subscribers[event_type].append(handler)
261
+
262
+ def unsubscribe():
263
+ self._subscribers[event_type].remove(handler)
264
+
265
+ return unsubscribe
266
+
267
+ def subscribe_all(self, handler: Callable) -> Callable[[], None]:
268
+ """
269
+ Subscribe to all events.
270
+
271
+ Args:
272
+ handler: Callback function (sync or async)
273
+
274
+ Returns:
275
+ Unsubscribe function
276
+ """
277
+ self._global_subscribers.append(handler)
278
+
279
+ def unsubscribe():
280
+ self._global_subscribers.remove(handler)
281
+
282
+ return unsubscribe
283
+
284
+ def get_events(
285
+ self,
286
+ aggregate_id: Optional[str] = None,
287
+ aggregate_type: Optional[str] = None,
288
+ event_type: Optional[EventType | str] = None,
289
+ since: Optional[datetime] = None,
290
+ until: Optional[datetime] = None,
291
+ since_sequence: Optional[int] = None,
292
+ limit: int = 100,
293
+ ) -> List[Event]:
294
+ """
295
+ Query events with filters.
296
+
297
+ Args:
298
+ aggregate_id: Filter by aggregate ID
299
+ aggregate_type: Filter by aggregate type
300
+ event_type: Filter by event type
301
+ since: Filter events after this timestamp
302
+ until: Filter events before this timestamp
303
+ since_sequence: Filter events after this sequence number
304
+ limit: Maximum number of events to return
305
+
306
+ Returns:
307
+ List of matching events
308
+ """
309
+ events = self._events
310
+
311
+ if aggregate_id:
312
+ events = [e for e in events if e.aggregate_id == aggregate_id]
313
+ if aggregate_type:
314
+ events = [e for e in events if e.aggregate_type == aggregate_type]
315
+ if event_type:
316
+ events = [e for e in events if e.event_type == event_type]
317
+ if since:
318
+ events = [e for e in events if e.timestamp >= since]
319
+ if until:
320
+ events = [e for e in events if e.timestamp <= until]
321
+ if since_sequence is not None:
322
+ events = [e for e in events if e.sequence > since_sequence]
323
+
324
+ return events[-limit:]
325
+
326
+ def get_aggregate_history(self, aggregate_id: str) -> List[Event]:
327
+ """Get all events for a specific aggregate in order."""
328
+ return sorted(
329
+ [e for e in self._events if e.aggregate_id == aggregate_id],
330
+ key=lambda e: e.sequence
331
+ )
332
+
333
+ def count(self, event_type: Optional[EventType | str] = None) -> int:
334
+ """Count events, optionally filtered by type."""
335
+ if event_type:
336
+ return len([e for e in self._events if e.event_type == event_type])
337
+ return len(self._events)
338
+
339
+ def get_current_sequence(self) -> int:
340
+ """Get current sequence number."""
341
+ return self._sequence
342
+
343
+ def clear(self) -> None:
344
+ """Clear all events (use with caution)."""
345
+ self._events.clear()
346
+ self._sequence = 0
347
+ if self._persistence_path and self._persistence_path.exists():
348
+ self._persistence_path.unlink()
349
+
350
+
351
+ # Global event store instance
352
+ _event_store: Optional[EventStore] = None
353
+
354
+
355
+ def get_event_store(persistence_path: Optional[Path] = None) -> EventStore:
356
+ """Get or create global event store."""
357
+ global _event_store
358
+ if _event_store is None:
359
+ _event_store = EventStore(persistence_path=persistence_path)
360
+ return _event_store
361
+
362
+
363
+ def set_event_store(store: EventStore) -> None:
364
+ """Set the global event store instance."""
365
+ global _event_store
366
+ _event_store = store
367
+
368
+
369
+ # Aggregate base class
370
+ T = TypeVar('T', bound='Aggregate')
371
+
372
+
373
+ class Aggregate(ABC):
374
+ """
375
+ Base class for event-sourced aggregates.
376
+
377
+ Aggregates encapsulate domain logic and maintain consistency boundaries.
378
+ State is rebuilt by replaying events.
379
+
380
+ Usage:
381
+ class ServiceAggregate(Aggregate):
382
+ def __init__(self, aggregate_id: str):
383
+ super().__init__(aggregate_id, "service")
384
+ self.name = ""
385
+ self.status = "pending"
386
+
387
+ def apply_event(self, event: Event) -> None:
388
+ if event.event_type == EventType.SERVICE_CREATED:
389
+ self.name = event.data["name"]
390
+ self.status = "created"
391
+ elif event.event_type == EventType.SERVICE_STARTED:
392
+ self.status = "running"
393
+ """
394
+
395
+ def __init__(self, aggregate_id: str, aggregate_type: str):
396
+ self.aggregate_id = aggregate_id
397
+ self.aggregate_type = aggregate_type
398
+ self.version = 0
399
+ self._pending_events: List[Event] = []
400
+
401
+ @abstractmethod
402
+ def apply_event(self, event: Event) -> None:
403
+ """Apply an event to update aggregate state."""
404
+ pass
405
+
406
+ def load_from_history(self, events: List[Event]) -> None:
407
+ """Rebuild state from event history."""
408
+ for event in events:
409
+ self.apply_event(event)
410
+ self.version = event.sequence
411
+
412
+ def raise_event(self, event_type: EventType | str, data: Dict[str, Any], metadata: Optional[Dict[str, Any]] = None) -> Event:
413
+ """
414
+ Raise a new event from this aggregate.
415
+
416
+ Args:
417
+ event_type: Type of event
418
+ data: Event payload
419
+ metadata: Optional metadata
420
+
421
+ Returns:
422
+ The raised event (not yet persisted)
423
+ """
424
+ event = Event(
425
+ event_type=event_type,
426
+ aggregate_id=self.aggregate_id,
427
+ aggregate_type=self.aggregate_type,
428
+ data=data,
429
+ metadata=metadata or {},
430
+ )
431
+ self._pending_events.append(event)
432
+ self.apply_event(event)
433
+ return event
434
+
435
+ def get_pending_events(self) -> List[Event]:
436
+ """Get events raised but not yet persisted."""
437
+ return self._pending_events.copy()
438
+
439
+ def clear_pending_events(self) -> None:
440
+ """Clear pending events after persistence."""
441
+ self._pending_events.clear()
442
+
443
+ @classmethod
444
+ async def load(cls: type[T], aggregate_id: str, event_store: EventStore) -> T:
445
+ """Load aggregate from event store."""
446
+ instance = cls(aggregate_id)
447
+ events = event_store.get_aggregate_history(aggregate_id)
448
+ instance.load_from_history(events)
449
+ return instance
450
+
451
+
452
+ class ServiceAggregate(Aggregate):
453
+ """
454
+ Event-sourced aggregate for service lifecycle.
455
+
456
+ Tracks service state through events, enabling:
457
+ - Full audit trail of service changes
458
+ - State reconstruction at any point in time
459
+ - Eventual consistency with read models
460
+ """
461
+
462
+ def __init__(self, aggregate_id: str):
463
+ super().__init__(aggregate_id, "service")
464
+ self.service_id: Optional[int] = None
465
+ self.user_id: Optional[int] = None
466
+ self.name: str = ""
467
+ self.port: int = 0
468
+ self.status: str = "pending"
469
+ self.pid: Optional[int] = None
470
+ self.started_at: Optional[datetime] = None
471
+ self.stopped_at: Optional[datetime] = None
472
+ self.error_count: int = 0
473
+ self.last_error: Optional[str] = None
474
+
475
+ def apply_event(self, event: Event) -> None:
476
+ """Apply event to update service state."""
477
+ if event.event_type == EventType.SERVICE_CREATED:
478
+ self.service_id = event.data.get("service_id")
479
+ self.user_id = event.data.get("user_id")
480
+ self.name = event.data.get("name", "")
481
+ self.port = event.data.get("port", 0)
482
+ self.status = "created"
483
+
484
+ elif event.event_type == EventType.SERVICE_STARTED:
485
+ self.status = "running"
486
+ self.pid = event.data.get("pid")
487
+ self.started_at = event.timestamp
488
+
489
+ elif event.event_type == EventType.SERVICE_STOPPED:
490
+ self.status = "stopped"
491
+ self.pid = None
492
+ self.stopped_at = event.timestamp
493
+
494
+ elif event.event_type == EventType.SERVICE_ERROR:
495
+ self.error_count += 1
496
+ self.last_error = event.data.get("error")
497
+ if event.data.get("fatal", False):
498
+ self.status = "error"
499
+
500
+ elif event.event_type == EventType.SERVICE_DELETED:
501
+ self.status = "deleted"
502
+
503
+ def to_dict(self) -> Dict[str, Any]:
504
+ """Serialize aggregate state."""
505
+ return {
506
+ "aggregate_id": self.aggregate_id,
507
+ "service_id": self.service_id,
508
+ "user_id": self.user_id,
509
+ "name": self.name,
510
+ "port": self.port,
511
+ "status": self.status,
512
+ "pid": self.pid,
513
+ "started_at": self.started_at.isoformat() if self.started_at else None,
514
+ "stopped_at": self.stopped_at.isoformat() if self.stopped_at else None,
515
+ "error_count": self.error_count,
516
+ "last_error": self.last_error,
517
+ "version": self.version,
518
+ }
519
+
520
+
521
+ # Command handlers (Write side of CQRS)
522
+ class ServiceCommands:
523
+ """
524
+ Command handlers for service operations.
525
+
526
+ Commands represent intentions to change state. Each command
527
+ results in one or more events being recorded.
528
+ """
529
+
530
+ def __init__(self, event_store: EventStore):
531
+ self.event_store = event_store
532
+
533
+ async def create_service(
534
+ self,
535
+ service_id: int,
536
+ user_id: int,
537
+ name: str,
538
+ port: int,
539
+ **kwargs
540
+ ) -> Event:
541
+ """Record service creation."""
542
+ event = Event(
543
+ event_type=EventType.SERVICE_CREATED,
544
+ aggregate_id=f"service:{service_id}",
545
+ aggregate_type="service",
546
+ data={
547
+ "service_id": service_id,
548
+ "user_id": user_id,
549
+ "name": name,
550
+ "port": port,
551
+ **kwargs,
552
+ },
553
+ metadata={"user_id": user_id},
554
+ )
555
+ return await self.event_store.append(event)
556
+
557
+ async def start_service(
558
+ self,
559
+ service_id: int,
560
+ pid: Optional[int] = None,
561
+ startup_time_ms: Optional[float] = None,
562
+ cached: bool = False,
563
+ ) -> Event:
564
+ """Record service start."""
565
+ event = Event(
566
+ event_type=EventType.SERVICE_STARTED,
567
+ aggregate_id=f"service:{service_id}",
568
+ aggregate_type="service",
569
+ data={
570
+ "service_id": service_id,
571
+ "pid": pid,
572
+ "startup_time_ms": startup_time_ms,
573
+ "cached": cached,
574
+ },
575
+ )
576
+ return await self.event_store.append(event)
577
+
578
+ async def stop_service(
579
+ self,
580
+ service_id: int,
581
+ reason: str = "user_request",
582
+ ) -> Event:
583
+ """Record service stop."""
584
+ event = Event(
585
+ event_type=EventType.SERVICE_STOPPED,
586
+ aggregate_id=f"service:{service_id}",
587
+ aggregate_type="service",
588
+ data={
589
+ "service_id": service_id,
590
+ "reason": reason,
591
+ },
592
+ )
593
+ return await self.event_store.append(event)
594
+
595
+ async def record_error(
596
+ self,
597
+ service_id: int,
598
+ error: str,
599
+ details: Optional[Dict] = None,
600
+ fatal: bool = False,
601
+ ) -> Event:
602
+ """Record service error."""
603
+ event = Event(
604
+ event_type=EventType.SERVICE_ERROR,
605
+ aggregate_id=f"service:{service_id}",
606
+ aggregate_type="service",
607
+ data={
608
+ "service_id": service_id,
609
+ "error": error,
610
+ "details": details or {},
611
+ "fatal": fatal,
612
+ },
613
+ )
614
+ return await self.event_store.append(event)
615
+
616
+ async def record_health_check(
617
+ self,
618
+ service_id: int,
619
+ healthy: bool,
620
+ response_time_ms: Optional[float] = None,
621
+ status_code: Optional[int] = None,
622
+ ) -> Event:
623
+ """Record health check result."""
624
+ event = Event(
625
+ event_type=EventType.SERVICE_HEALTH_CHECK,
626
+ aggregate_id=f"service:{service_id}",
627
+ aggregate_type="service",
628
+ data={
629
+ "service_id": service_id,
630
+ "healthy": healthy,
631
+ "response_time_ms": response_time_ms,
632
+ "status_code": status_code,
633
+ },
634
+ )
635
+ return await self.event_store.append(event)
636
+
637
+ async def delete_service(
638
+ self,
639
+ service_id: int,
640
+ user_id: Optional[int] = None,
641
+ ) -> Event:
642
+ """Record service deletion."""
643
+ event = Event(
644
+ event_type=EventType.SERVICE_DELETED,
645
+ aggregate_id=f"service:{service_id}",
646
+ aggregate_type="service",
647
+ data={"service_id": service_id},
648
+ metadata={"user_id": user_id} if user_id else {},
649
+ )
650
+ return await self.event_store.append(event)
651
+
652
+
653
+ class ProjectCommands:
654
+ """Command handlers for project operations."""
655
+
656
+ def __init__(self, event_store: EventStore):
657
+ self.event_store = event_store
658
+
659
+ async def create_project(
660
+ self,
661
+ project_id: int,
662
+ user_id: int,
663
+ name: str,
664
+ **kwargs
665
+ ) -> Event:
666
+ """Record project creation."""
667
+ event = Event(
668
+ event_type=EventType.PROJECT_CREATED,
669
+ aggregate_id=f"project:{project_id}",
670
+ aggregate_type="project",
671
+ data={
672
+ "project_id": project_id,
673
+ "user_id": user_id,
674
+ "name": name,
675
+ **kwargs,
676
+ },
677
+ metadata={"user_id": user_id},
678
+ )
679
+ return await self.event_store.append(event)
680
+
681
+ async def update_project(
682
+ self,
683
+ project_id: int,
684
+ changes: Dict[str, Any],
685
+ user_id: Optional[int] = None,
686
+ ) -> Event:
687
+ """Record project update."""
688
+ event = Event(
689
+ event_type=EventType.PROJECT_UPDATED,
690
+ aggregate_id=f"project:{project_id}",
691
+ aggregate_type="project",
692
+ data={
693
+ "project_id": project_id,
694
+ "changes": changes,
695
+ },
696
+ metadata={"user_id": user_id} if user_id else {},
697
+ )
698
+ return await self.event_store.append(event)
699
+
700
+ async def delete_project(
701
+ self,
702
+ project_id: int,
703
+ user_id: Optional[int] = None,
704
+ ) -> Event:
705
+ """Record project deletion."""
706
+ event = Event(
707
+ event_type=EventType.PROJECT_DELETED,
708
+ aggregate_id=f"project:{project_id}",
709
+ aggregate_type="project",
710
+ data={"project_id": project_id},
711
+ metadata={"user_id": user_id} if user_id else {},
712
+ )
713
+ return await self.event_store.append(event)
714
+
715
+
716
+ class SecurityCommands:
717
+ """Command handlers for security events."""
718
+
719
+ def __init__(self, event_store: EventStore):
720
+ self.event_store = event_store
721
+
722
+ async def record_security_check(
723
+ self,
724
+ user_id: str,
725
+ service_id: str,
726
+ passed: bool,
727
+ reason: Optional[str] = None,
728
+ details: Optional[Dict] = None,
729
+ ) -> Event:
730
+ """Record security check result."""
731
+ event_type = EventType.SECURITY_CHECK_PASSED if passed else EventType.SECURITY_CHECK_FAILED
732
+ event = Event(
733
+ event_type=event_type,
734
+ aggregate_id=f"user:{user_id}",
735
+ aggregate_type="security",
736
+ data={
737
+ "user_id": user_id,
738
+ "service_id": service_id,
739
+ "passed": passed,
740
+ "reason": reason,
741
+ "details": details or {},
742
+ },
743
+ )
744
+ return await self.event_store.append(event)
745
+
746
+ async def record_rate_limit(
747
+ self,
748
+ user_id: str,
749
+ endpoint: str,
750
+ limit: int,
751
+ ) -> Event:
752
+ """Record rate limit hit."""
753
+ event = Event(
754
+ event_type=EventType.RATE_LIMIT_HIT,
755
+ aggregate_id=f"user:{user_id}",
756
+ aggregate_type="security",
757
+ data={
758
+ "user_id": user_id,
759
+ "endpoint": endpoint,
760
+ "limit": limit,
761
+ },
762
+ )
763
+ return await self.event_store.append(event)
764
+
765
+ async def record_anomaly(
766
+ self,
767
+ user_id: str,
768
+ anomaly_type: str,
769
+ severity: str,
770
+ details: Optional[Dict] = None,
771
+ ) -> Event:
772
+ """Record security anomaly."""
773
+ event = Event(
774
+ event_type=EventType.ANOMALY_DETECTED,
775
+ aggregate_id=f"user:{user_id}",
776
+ aggregate_type="security",
777
+ data={
778
+ "user_id": user_id,
779
+ "anomaly_type": anomaly_type,
780
+ "severity": severity,
781
+ "details": details or {},
782
+ },
783
+ )
784
+ return await self.event_store.append(event)
785
+
786
+
787
+ # Query handlers (Read side of CQRS)
788
+ class ServiceQueries:
789
+ """
790
+ Query handlers for service read operations.
791
+
792
+ Queries don't modify state - they only read from the event store
793
+ or materialized projections.
794
+ """
795
+
796
+ def __init__(self, event_store: EventStore):
797
+ self.event_store = event_store
798
+
799
+ def get_service_history(self, service_id: int) -> List[Dict]:
800
+ """Get history of events for a service."""
801
+ events = self.event_store.get_aggregate_history(f"service:{service_id}")
802
+ return [e.to_dict() for e in events]
803
+
804
+ def get_recent_starts(self, limit: int = 10) -> List[Dict]:
805
+ """Get recent service starts."""
806
+ events = self.event_store.get_events(
807
+ event_type=EventType.SERVICE_STARTED,
808
+ limit=limit,
809
+ )
810
+ return [e.to_dict() for e in events]
811
+
812
+ def get_recent_errors(self, limit: int = 10) -> List[Dict]:
813
+ """Get recent service errors."""
814
+ events = self.event_store.get_events(
815
+ event_type=EventType.SERVICE_ERROR,
816
+ limit=limit,
817
+ )
818
+ return [e.to_dict() for e in events]
819
+
820
+ def get_recent_health_checks(self, service_id: Optional[int] = None, limit: int = 10) -> List[Dict]:
821
+ """Get recent health check results."""
822
+ aggregate_id = f"service:{service_id}" if service_id else None
823
+ events = self.event_store.get_events(
824
+ aggregate_id=aggregate_id,
825
+ event_type=EventType.SERVICE_HEALTH_CHECK,
826
+ limit=limit,
827
+ )
828
+ return [e.to_dict() for e in events]
829
+
830
+ def get_stats(self) -> Dict[str, int]:
831
+ """Get event statistics."""
832
+ return {
833
+ "total_events": self.event_store.count(),
834
+ "services_created": self.event_store.count(EventType.SERVICE_CREATED),
835
+ "services_started": self.event_store.count(EventType.SERVICE_STARTED),
836
+ "services_stopped": self.event_store.count(EventType.SERVICE_STOPPED),
837
+ "services_deleted": self.event_store.count(EventType.SERVICE_DELETED),
838
+ "errors": self.event_store.count(EventType.SERVICE_ERROR),
839
+ "health_checks": self.event_store.count(EventType.SERVICE_HEALTH_CHECK),
840
+ }
841
+
842
+ async def get_service_state(self, service_id: int) -> Dict[str, Any]:
843
+ """Rebuild current service state from events."""
844
+ aggregate = await ServiceAggregate.load(
845
+ f"service:{service_id}",
846
+ self.event_store
847
+ )
848
+ return aggregate.to_dict()
849
+
850
+ def get_user_services(self, user_id: int) -> List[Dict]:
851
+ """Get all service events for a user."""
852
+ events = self.event_store.get_events(
853
+ event_type=EventType.SERVICE_CREATED,
854
+ )
855
+ user_events = [e for e in events if e.data.get("user_id") == user_id]
856
+ return [e.to_dict() for e in user_events]
857
+
858
+
859
+ class ProjectQueries:
860
+ """Query handlers for project read operations."""
861
+
862
+ def __init__(self, event_store: EventStore):
863
+ self.event_store = event_store
864
+
865
+ def get_project_history(self, project_id: int) -> List[Dict]:
866
+ """Get history of events for a project."""
867
+ events = self.event_store.get_aggregate_history(f"project:{project_id}")
868
+ return [e.to_dict() for e in events]
869
+
870
+ def get_recent_projects(self, user_id: Optional[int] = None, limit: int = 10) -> List[Dict]:
871
+ """Get recently created projects."""
872
+ events = self.event_store.get_events(
873
+ event_type=EventType.PROJECT_CREATED,
874
+ limit=limit,
875
+ )
876
+ if user_id:
877
+ events = [e for e in events if e.data.get("user_id") == user_id]
878
+ return [e.to_dict() for e in events]
879
+
880
+ def get_stats(self) -> Dict[str, int]:
881
+ """Get project statistics."""
882
+ return {
883
+ "projects_created": self.event_store.count(EventType.PROJECT_CREATED),
884
+ "projects_updated": self.event_store.count(EventType.PROJECT_UPDATED),
885
+ "projects_deleted": self.event_store.count(EventType.PROJECT_DELETED),
886
+ }
887
+
888
+
889
+ class SecurityQueries:
890
+ """Query handlers for security read operations."""
891
+
892
+ def __init__(self, event_store: EventStore):
893
+ self.event_store = event_store
894
+
895
+ def get_recent_security_failures(self, limit: int = 10) -> List[Dict]:
896
+ """Get recent security check failures."""
897
+ events = self.event_store.get_events(
898
+ event_type=EventType.SECURITY_CHECK_FAILED,
899
+ limit=limit,
900
+ )
901
+ return [e.to_dict() for e in events]
902
+
903
+ def get_user_security_history(self, user_id: str, limit: int = 50) -> List[Dict]:
904
+ """Get security event history for a user."""
905
+ events = self.event_store.get_events(
906
+ aggregate_id=f"user:{user_id}",
907
+ aggregate_type="security",
908
+ limit=limit,
909
+ )
910
+ return [e.to_dict() for e in events]
911
+
912
+ def get_rate_limit_hits(self, since: Optional[datetime] = None, limit: int = 100) -> List[Dict]:
913
+ """Get recent rate limit hits."""
914
+ events = self.event_store.get_events(
915
+ event_type=EventType.RATE_LIMIT_HIT,
916
+ since=since,
917
+ limit=limit,
918
+ )
919
+ return [e.to_dict() for e in events]
920
+
921
+ def get_anomalies(self, severity: Optional[str] = None, limit: int = 50) -> List[Dict]:
922
+ """Get security anomalies."""
923
+ events = self.event_store.get_events(
924
+ event_type=EventType.ANOMALY_DETECTED,
925
+ limit=limit,
926
+ )
927
+ if severity:
928
+ events = [e for e in events if e.data.get("severity") == severity]
929
+ return [e.to_dict() for e in events]
930
+
931
+ def get_stats(self) -> Dict[str, int]:
932
+ """Get security statistics."""
933
+ return {
934
+ "security_checks_passed": self.event_store.count(EventType.SECURITY_CHECK_PASSED),
935
+ "security_checks_failed": self.event_store.count(EventType.SECURITY_CHECK_FAILED),
936
+ "rate_limit_hits": self.event_store.count(EventType.RATE_LIMIT_HIT),
937
+ "anomalies_detected": self.event_store.count(EventType.ANOMALY_DETECTED),
938
+ }
939
+
940
+
941
+ # Projections - Materialized views built from events
942
+ class Projection(ABC):
943
+ """
944
+ Base class for event projections.
945
+
946
+ Projections maintain materialized views that are optimized for
947
+ specific query patterns. They're rebuilt by replaying events.
948
+ """
949
+
950
+ def __init__(self, event_store: EventStore):
951
+ self.event_store = event_store
952
+ self._last_sequence = 0
953
+
954
+ @abstractmethod
955
+ def apply(self, event: Event) -> None:
956
+ """Apply an event to update the projection."""
957
+ pass
958
+
959
+ def rebuild(self) -> None:
960
+ """Rebuild projection from all events."""
961
+ self._last_sequence = 0
962
+ for event in self.event_store.get_events(limit=10000):
963
+ self.apply(event)
964
+ self._last_sequence = event.sequence
965
+
966
+ def catch_up(self) -> None:
967
+ """Apply new events since last update."""
968
+ events = self.event_store.get_events(
969
+ since_sequence=self._last_sequence,
970
+ limit=1000,
971
+ )
972
+ for event in events:
973
+ self.apply(event)
974
+ self._last_sequence = event.sequence
975
+
976
+
977
+ class ServiceStatusProjection(Projection):
978
+ """
979
+ Projection maintaining current status of all services.
980
+
981
+ Optimized for queries like "list all running services".
982
+ """
983
+
984
+ def __init__(self, event_store: EventStore):
985
+ super().__init__(event_store)
986
+ self._services: Dict[str, Dict[str, Any]] = {}
987
+
988
+ def apply(self, event: Event) -> None:
989
+ """Update service status based on event."""
990
+ if event.aggregate_type != "service":
991
+ return
992
+
993
+ service_id = event.aggregate_id
994
+
995
+ if event.event_type == EventType.SERVICE_CREATED:
996
+ self._services[service_id] = {
997
+ "service_id": event.data.get("service_id"),
998
+ "user_id": event.data.get("user_id"),
999
+ "name": event.data.get("name"),
1000
+ "port": event.data.get("port"),
1001
+ "status": "created",
1002
+ "created_at": event.timestamp.isoformat(),
1003
+ }
1004
+ elif event.event_type == EventType.SERVICE_STARTED:
1005
+ if service_id in self._services:
1006
+ self._services[service_id]["status"] = "running"
1007
+ self._services[service_id]["pid"] = event.data.get("pid")
1008
+ self._services[service_id]["started_at"] = event.timestamp.isoformat()
1009
+ elif event.event_type == EventType.SERVICE_STOPPED:
1010
+ if service_id in self._services:
1011
+ self._services[service_id]["status"] = "stopped"
1012
+ self._services[service_id]["pid"] = None
1013
+ elif event.event_type == EventType.SERVICE_DELETED:
1014
+ self._services.pop(service_id, None)
1015
+ elif event.event_type == EventType.SERVICE_ERROR:
1016
+ if service_id in self._services:
1017
+ self._services[service_id]["last_error"] = event.data.get("error")
1018
+ if event.data.get("fatal"):
1019
+ self._services[service_id]["status"] = "error"
1020
+
1021
+ def get_all(self) -> List[Dict[str, Any]]:
1022
+ """Get all services."""
1023
+ return list(self._services.values())
1024
+
1025
+ def get_running(self) -> List[Dict[str, Any]]:
1026
+ """Get only running services."""
1027
+ return [s for s in self._services.values() if s.get("status") == "running"]
1028
+
1029
+ def get_by_user(self, user_id: int) -> List[Dict[str, Any]]:
1030
+ """Get services for a specific user."""
1031
+ return [s for s in self._services.values() if s.get("user_id") == user_id]
1032
+
1033
+ def get(self, service_id: str) -> Optional[Dict[str, Any]]:
1034
+ """Get a specific service."""
1035
+ return self._services.get(service_id)
1036
+
1037
+
1038
+ # Convenience functions for common patterns
1039
+ def get_service_commands(event_store: Optional[EventStore] = None) -> ServiceCommands:
1040
+ """Get service command handlers."""
1041
+ return ServiceCommands(event_store or get_event_store())
1042
+
1043
+
1044
+ def get_service_queries(event_store: Optional[EventStore] = None) -> ServiceQueries:
1045
+ """Get service query handlers."""
1046
+ return ServiceQueries(event_store or get_event_store())
1047
+
1048
+
1049
+ def get_project_commands(event_store: Optional[EventStore] = None) -> ProjectCommands:
1050
+ """Get project command handlers."""
1051
+ return ProjectCommands(event_store or get_event_store())
1052
+
1053
+
1054
+ def get_project_queries(event_store: Optional[EventStore] = None) -> ProjectQueries:
1055
+ """Get project query handlers."""
1056
+ return ProjectQueries(event_store or get_event_store())
1057
+
1058
+
1059
+ def get_security_commands(event_store: Optional[EventStore] = None) -> SecurityCommands:
1060
+ """Get security command handlers."""
1061
+ return SecurityCommands(event_store or get_event_store())
1062
+
1063
+
1064
+ def get_security_queries(event_store: Optional[EventStore] = None) -> SecurityQueries:
1065
+ """Get security query handlers."""
1066
+ return SecurityQueries(event_store or get_event_store())