python-cqrs 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. cqrs/__init__.py +28 -0
  2. cqrs/adapters/__init__.py +0 -0
  3. cqrs/adapters/amqp.py +63 -0
  4. cqrs/adapters/kafka.py +102 -0
  5. cqrs/compressors/__init__.py +0 -0
  6. cqrs/compressors/protocol.py +9 -0
  7. cqrs/compressors/zlib.py +9 -0
  8. cqrs/container/__init__.py +3 -0
  9. cqrs/container/di.py +19 -0
  10. cqrs/container/protocol.py +20 -0
  11. cqrs/dispatcher/__init__.py +7 -0
  12. cqrs/dispatcher/dispatcher.py +64 -0
  13. cqrs/events/__init__.py +14 -0
  14. cqrs/events/bootstrap.py +55 -0
  15. cqrs/events/event.py +86 -0
  16. cqrs/events/event_emitter.py +86 -0
  17. cqrs/events/event_handler.py +24 -0
  18. cqrs/events/map.py +27 -0
  19. cqrs/mediator.py +103 -0
  20. cqrs/message_brokers/__init__.py +6 -0
  21. cqrs/message_brokers/amqp.py +23 -0
  22. cqrs/message_brokers/devnull.py +10 -0
  23. cqrs/message_brokers/kafka.py +14 -0
  24. cqrs/message_brokers/protocol.py +22 -0
  25. cqrs/middlewares/__init__.py +3 -0
  26. cqrs/middlewares/base.py +30 -0
  27. cqrs/middlewares/logging.py +37 -0
  28. cqrs/outbox/__init__.py +0 -0
  29. cqrs/outbox/producer.py +65 -0
  30. cqrs/outbox/protocol.py +44 -0
  31. cqrs/outbox/repository.py +54 -0
  32. cqrs/outbox/sqlalchemy.py +223 -0
  33. cqrs/registry.py +29 -0
  34. cqrs/requests/__init__.py +9 -0
  35. cqrs/requests/bootstrap.py +94 -0
  36. cqrs/requests/map.py +30 -0
  37. cqrs/requests/request.py +10 -0
  38. cqrs/requests/request_handler.py +45 -0
  39. cqrs/response.py +12 -0
  40. python_cqrs-0.0.2.dist-info/LICENSE +201 -0
  41. python_cqrs-0.0.2.dist-info/METADATA +233 -0
  42. python_cqrs-0.0.2.dist-info/RECORD +44 -0
  43. python_cqrs-0.0.2.dist-info/WHEEL +5 -0
  44. python_cqrs-0.0.2.dist-info/top_level.txt +1 -0
cqrs/__init__.py ADDED
@@ -0,0 +1,28 @@
1
+ from cqrs.compressors.protocol import Compressor
2
+ from cqrs.compressors.zlib import ZlibCompressor
3
+ from cqrs.events.event import DomainEvent, ECSTEvent, NotificationEvent
4
+ from cqrs.events.event_emitter import EventEmitter
5
+ from cqrs.events.event_handler import EventHandler
6
+ from cqrs.mediator import EventMediator, RequestMediator
7
+ from cqrs.outbox.producer import EventProducer
8
+ from cqrs.outbox.repository import OutboxedEventRepository
9
+ from cqrs.outbox.sqlalchemy import SqlAlchemyOutboxedEventRepository
10
+ from cqrs.requests.request import Request
11
+ from cqrs.requests.request_handler import RequestHandler
12
+
13
+ __all__ = (
14
+ "RequestMediator",
15
+ "EventMediator",
16
+ "DomainEvent",
17
+ "NotificationEvent",
18
+ "ECSTEvent",
19
+ "EventEmitter",
20
+ "EventHandler",
21
+ "RequestHandler",
22
+ "Request",
23
+ "OutboxedEventRepository",
24
+ "SqlAlchemyOutboxedEventRepository",
25
+ "EventProducer",
26
+ "Compressor",
27
+ "ZlibCompressor",
28
+ )
File without changes
cqrs/adapters/amqp.py ADDED
@@ -0,0 +1,63 @@
1
+ import asyncio
2
+ import typing
3
+ from functools import partial
4
+
5
+ import aio_pika
6
+ from aio_pika import abc, pool
7
+
8
+
9
+ async def connection_pool_factory(url: str) -> abc.AbstractRobustConnection:
10
+ return await aio_pika.connect_robust(url=url)
11
+
12
+
13
+ async def channel_pool_factory(connection_pool: pool.Pool) -> aio_pika.Channel:
14
+ async with connection_pool.acquire() as connection:
15
+ return await connection.channel()
16
+
17
+
18
+ class AMQPPublisher:
19
+ def __init__(self, url: str, max_connection_pool_size=2, max_channel_pool_size=10):
20
+ self.url = url
21
+ self.max_connection_pool_size = max_connection_pool_size
22
+ self.max_channel_pool_size = max_channel_pool_size
23
+ self.connection_pool: pool.Pool = pool.Pool(
24
+ partial(connection_pool_factory, url=url),
25
+ max_size=self.max_connection_pool_size,
26
+ )
27
+ self.channel_pool: pool.Pool = pool.Pool(
28
+ partial(channel_pool_factory, connection_pool=self.connection_pool),
29
+ max_size=self.max_channel_pool_size,
30
+ )
31
+
32
+ async def publish(self, message: abc.AbstractMessage, queue_name: str, exchange_name: str) -> None:
33
+ async with self.channel_pool.acquire() as channel:
34
+ queue: aio_pika.Queue = await channel.declare_queue(queue_name)
35
+ exchange: aio_pika.Exchange = await channel.declare_exchange(exchange_name, type="direct", auto_delete=True)
36
+ await queue.bind(exchange=exchange, routing_key=queue_name)
37
+ await exchange.publish(message=message, routing_key=queue_name)
38
+
39
+
40
+ class AMQPConsumer:
41
+ def __init__(self, url: str, max_connection_pool_size=2, max_channel_pool_size=10):
42
+ self.url = url
43
+ self.max_connection_pool_size = max_connection_pool_size
44
+ self.max_channel_pool_size = max_channel_pool_size
45
+ self.connection_pool: pool.Pool = pool.Pool(
46
+ partial(connection_pool_factory, url=url),
47
+ max_size=self.max_connection_pool_size,
48
+ )
49
+ self.channel_pool: pool.Pool = pool.Pool(
50
+ partial(channel_pool_factory, connection_pool=self.connection_pool),
51
+ max_size=self.max_channel_pool_size,
52
+ )
53
+
54
+ async def consume(
55
+ self,
56
+ handler: typing.Callable[[abc.AbstractIncomingMessage], typing.Awaitable[None]],
57
+ queue_name: str,
58
+ ) -> None:
59
+ async with self.channel_pool.acquire() as channel:
60
+ await channel.set_qos(prefetch_count=1)
61
+ queue = await channel.declare_queue(queue_name)
62
+ await queue._consume(handler)
63
+ await asyncio.Future()
cqrs/adapters/kafka.py ADDED
@@ -0,0 +1,102 @@
1
+ import asyncio
2
+ import functools
3
+ import logging
4
+ import typing
5
+
6
+ import aiokafka
7
+ import orjson
8
+ import retry_async
9
+ from aiokafka import errors
10
+
11
+ from cqrs.message_brokers import protocol
12
+
13
+ __all__ = (
14
+ "KafkaProducer",
15
+ "kafka_producer_factory",
16
+ )
17
+
18
+ _retry = functools.partial(
19
+ retry_async.retry,
20
+ exceptions=(
21
+ errors.KafkaConnectionError,
22
+ errors.NodeNotReadyError,
23
+ errors.RequestTimedOutError,
24
+ ),
25
+ is_async=True,
26
+ )
27
+
28
+ SecurityProtocol: typing.TypeAlias = typing.Literal["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"]
29
+ SaslMechanism: typing.TypeAlias = typing.Literal["PLAIN", "GSSAPI", "SCRAM-SHA-256", "SCRAM-SHA-512", "OAUTHBEARER"]
30
+
31
+ logger = logging.getLogger("cqrs")
32
+ logger.setLevel(logging.DEBUG)
33
+
34
+
35
+ def _serializer(message: protocol.Message) -> typing.ByteString:
36
+ return orjson.dumps(message.model_dump(mode="json"))
37
+
38
+
39
+ class _Singleton(type):
40
+ _instances = {}
41
+
42
+ def __call__(cls, *args, **kwargs):
43
+ if cls not in cls._instances:
44
+ cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs)
45
+ return cls._instances[cls]
46
+
47
+
48
+ class KafkaProducer(metaclass=_Singleton):
49
+ def __init__(
50
+ self,
51
+ producer: aiokafka.AIOKafkaProducer,
52
+ topics: typing.List[typing.Text],
53
+ retry_count: int,
54
+ retry_delay: int,
55
+ ):
56
+ self._topics = topics
57
+ self._producer = producer
58
+ self._retry_count = retry_count
59
+ self._retry_delay = retry_delay
60
+
61
+ async def _check_connection(self):
62
+ node_id = self._producer.client.get_random_node()
63
+ if not await self._producer.client.ready(node_id=node_id):
64
+ await self._producer.start()
65
+
66
+ async def _produce(self, message: protocol.Message):
67
+ await self._check_connection()
68
+ for topic in self._topics:
69
+ logger.debug(f"produce message {message} to topic {topic}")
70
+ await self._producer.send_and_wait(topic, value=message)
71
+
72
+ async def produce(self, message: protocol.Message):
73
+ """
74
+ Produces event to kafka broker.
75
+ Tries to reconnect if connect has been lost or has not been opened.
76
+ """
77
+ await _retry(tries=self._retry_count, delay=self._retry_delay)(self._produce)(message)
78
+
79
+
80
+ def kafka_producer_factory(
81
+ dsn: typing.Text,
82
+ topics: typing.List[typing.Text],
83
+ security_protocol: SecurityProtocol = "PLAINTEXT",
84
+ sasl_mechanism: SaslMechanism = "PLAIN",
85
+ retry_count: int = 3,
86
+ retry_delay: int = 1,
87
+ user: typing.Text | None = None,
88
+ password: typing.Text | None = None,
89
+ ) -> KafkaProducer:
90
+ loop = asyncio.get_event_loop()
91
+ asyncio.set_event_loop(loop)
92
+
93
+ producer = aiokafka.AIOKafkaProducer(
94
+ bootstrap_servers=dsn,
95
+ value_serializer=_serializer,
96
+ security_protocol=security_protocol,
97
+ sasl_mechanism=sasl_mechanism,
98
+ sasl_plain_username=user,
99
+ sasl_plain_password=password,
100
+ loop=loop,
101
+ )
102
+ return KafkaProducer(producer=producer, topics=topics, retry_count=retry_count, retry_delay=retry_delay)
File without changes
@@ -0,0 +1,9 @@
1
+ import typing
2
+
3
+
4
+ class Compressor(typing.Protocol):
5
+ def compress(self, value: bytes) -> bytes:
6
+ """Compress value"""
7
+
8
+ def decompress(self, value: bytes) -> bytes:
9
+ """Decompress compressed value"""
@@ -0,0 +1,9 @@
1
+ import zlib
2
+
3
+
4
+ class ZlibCompressor:
5
+ def compress(self, value: bytes) -> bytes:
6
+ return zlib.compress(value)
7
+
8
+ def decompress(self, value: bytes) -> bytes:
9
+ return zlib.decompress(value)
@@ -0,0 +1,3 @@
1
+ from cqrs.container.protocol import Container
2
+
3
+ __all__ = ("Container",)
cqrs/container/di.py ADDED
@@ -0,0 +1,19 @@
1
+ import typing
2
+
3
+ import di
4
+ from di import dependent, executors
5
+
6
+ from cqrs import container
7
+
8
+ T = typing.TypeVar("T")
9
+
10
+
11
+ class DIContainer(container.Container[di.Container]):
12
+ def __init__(self, external_container: di.Container | None = None) -> None:
13
+ self._external_container = external_container
14
+
15
+ async def resolve(self, type_: typing.Type[T]) -> T:
16
+ executor = executors.AsyncExecutor()
17
+ solved = self._external_container.solve(dependent.Dependent(type_, scope="request"), scopes=["request"])
18
+ with self._external_container.enter_scope("request") as state:
19
+ return await solved.execute_async(executor=executor, state=state)
@@ -0,0 +1,20 @@
1
+ import typing
2
+
3
+ T = typing.TypeVar("T")
4
+ C = typing.TypeVar("C")
5
+
6
+
7
+ class Container(typing.Protocol[C]):
8
+ """
9
+ The container interface.
10
+ """
11
+
12
+ @property
13
+ def external_container(self) -> C:
14
+ ...
15
+
16
+ def attach_external_container(self, container: C) -> None:
17
+ ...
18
+
19
+ async def resolve(self, type_: typing.Type[T]) -> T:
20
+ ...
@@ -0,0 +1,7 @@
1
+ from cqrs.dispatcher.dispatcher import EventDispatcher, RequestDispatcher, RequestDispatchResult
2
+
3
+ __all__ = (
4
+ "RequestDispatchResult",
5
+ "RequestDispatcher",
6
+ "EventDispatcher",
7
+ )
@@ -0,0 +1,64 @@
1
+ import logging
2
+ import typing
3
+
4
+ import pydantic
5
+
6
+ from cqrs import container as di_container
7
+ from cqrs import events as cqrs_events
8
+ from cqrs import middlewares, requests
9
+ from cqrs import response as res
10
+
11
+ logger = logging.getLogger("cqrs")
12
+
13
+
14
+ class RequestDispatchResult(pydantic.BaseModel):
15
+ response: res.Response | None = pydantic.Field(default=None)
16
+ events: typing.List[cqrs_events.Event] = pydantic.Field(default_factory=list)
17
+
18
+
19
+ class RequestDispatcher:
20
+ def __init__(
21
+ self,
22
+ request_map: requests.RequestMap,
23
+ container: di_container.Container,
24
+ middleware_chain: middlewares.MiddlewareChain | None = None,
25
+ ) -> None:
26
+ self._request_map = request_map
27
+ self._container = container
28
+ self._middleware_chain = middleware_chain or middlewares.MiddlewareChain()
29
+
30
+ async def dispatch(self, request: requests.Request) -> RequestDispatchResult:
31
+ handler_type = self._request_map.get(type(request))
32
+ handler = await self._container.resolve(handler_type)
33
+ wrapped_handle = self._middleware_chain.wrap(handler.handle)
34
+ response = await wrapped_handle(request)
35
+ return RequestDispatchResult(response=response, events=handler.events)
36
+
37
+
38
+ E = typing.TypeVar("E", bound=cqrs_events.Event, contravariant=True)
39
+
40
+
41
+ class EventDispatcher:
42
+ def __init__(
43
+ self,
44
+ event_map: cqrs_events.EventMap,
45
+ container: di_container.Container,
46
+ middleware_chain: middlewares.MiddlewareChain | None = None,
47
+ ):
48
+ self._event_map = event_map
49
+ self._container = container
50
+ self._middleware_chain = middleware_chain or middlewares.MiddlewareChain()
51
+
52
+ async def _handle_event(self, event: E, handle_type: typing.Type[cqrs_events.EventHandler[E]]):
53
+ handler = await self._container.resolve(handle_type)
54
+ await handler.handle(event)
55
+
56
+ async def dispatch(self, event: E) -> None:
57
+ handler_types = self._event_map.get(type(event))
58
+ if not handler_types:
59
+ logger.warning(
60
+ "Handlers for event %s not found",
61
+ type(event).__name__,
62
+ )
63
+ for h_type in handler_types:
64
+ await self._handle_event(event, h_type)
@@ -0,0 +1,14 @@
1
+ from cqrs.events.event import DomainEvent, ECSTEvent, Event, NotificationEvent
2
+ from cqrs.events.event_emitter import EventEmitter
3
+ from cqrs.events.event_handler import EventHandler
4
+ from cqrs.events.map import EventMap
5
+
6
+ __all__ = (
7
+ "Event",
8
+ "DomainEvent",
9
+ "ECSTEvent",
10
+ "NotificationEvent",
11
+ "EventEmitter",
12
+ "EventHandler",
13
+ "EventMap",
14
+ )
@@ -0,0 +1,55 @@
1
+ import typing
2
+
3
+ import di
4
+
5
+ import cqrs
6
+ from cqrs import events
7
+ from cqrs.container import di as ed_di_container
8
+ from cqrs.middlewares import base as mediator_middlewares
9
+ from cqrs.middlewares import logging as logging_middleware
10
+
11
+
12
+ def setup_mediator(
13
+ container: ed_di_container.DIContainer | None,
14
+ middlewares: typing.Iterable[mediator_middlewares.Middleware] | None = None,
15
+ events_mapper: typing.Callable[[events.EventMap], None] = None,
16
+ ) -> cqrs.EventMediator:
17
+
18
+ _events_mapper = events.EventMap()
19
+ if events_mapper:
20
+ events_mapper(_events_mapper)
21
+
22
+ middleware_chain = mediator_middlewares.MiddlewareChain()
23
+ if middlewares is None:
24
+ middlewares = []
25
+
26
+ for middleware in middlewares:
27
+ middleware_chain.add(middleware)
28
+
29
+ return cqrs.EventMediator(
30
+ event_map=_events_mapper,
31
+ container=container,
32
+ middleware_chain=middleware_chain,
33
+ )
34
+
35
+
36
+ def bootstrap(
37
+ di_container: di.Container | None = None,
38
+ middlewares: typing.Iterable[mediator_middlewares.Middleware] | None = None,
39
+ events_mapper: typing.Callable[[events.EventMap], None] = None,
40
+ on_startup: typing.List[typing.Callable[[], None]] | None = None,
41
+ ) -> cqrs.EventMediator:
42
+ if on_startup is None:
43
+ on_startup = []
44
+
45
+ for fun in on_startup:
46
+ fun()
47
+
48
+ if middlewares is None:
49
+ middlewares = []
50
+ container = ed_di_container.DIContainer(di_container)
51
+ return setup_mediator(
52
+ container,
53
+ events_mapper=events_mapper,
54
+ middlewares=middlewares + [logging_middleware.LoggingMiddleware()],
55
+ )
cqrs/events/event.py ADDED
@@ -0,0 +1,86 @@
1
+ import datetime
2
+ import typing
3
+ import uuid
4
+
5
+ import pydantic
6
+
7
+
8
+ class Event(pydantic.BaseModel, frozen=True):
9
+ """The base class for events"""
10
+
11
+
12
+ class DomainEvent(Event):
13
+ """
14
+ The base class for domain events.
15
+ """
16
+
17
+
18
+ _P = typing.TypeVar("_P")
19
+
20
+
21
+ class NotificationEvent(Event):
22
+ """
23
+ The base class for notification events.
24
+
25
+ Contains only identification information about state change.
26
+
27
+ Example plain structure::
28
+
29
+ {
30
+ "event_id": "82a0b10e-1b3d-4c3c-9bdd-3934f8f824c2",
31
+ "event_timestamp": "2023-03-06 12:11:35.103792",
32
+ "event_topic": "user_notification_events",
33
+ "payload": {
34
+ "changed_user_id": 987
35
+ }
36
+ }
37
+
38
+ """
39
+
40
+ event_id: uuid.UUID = pydantic.Field(default_factory=uuid.uuid4)
41
+ event_timestamp: datetime.datetime = pydantic.Field(default_factory=datetime.datetime.now)
42
+ event_name: typing.Text
43
+ event_type: typing.ClassVar[typing.Text] = "notification_event"
44
+
45
+ payload: typing.Dict = pydantic.Field(default_factory=dict)
46
+
47
+ model_config = pydantic.ConfigDict(from_attributes=True)
48
+
49
+ def __hash__(self):
50
+ return hash(self.event_id)
51
+
52
+
53
+ class ECSTEvent(Event, typing.Generic[_P]):
54
+ """
55
+ Base class for ECST events.
56
+
57
+ ECST means event-carried state transfer.
58
+
59
+ Contains full information about state change.
60
+
61
+ Example plain structure::
62
+
63
+ {
64
+ "event_id": "82a0b10e-1b3d-4c3c-9bdd-3934f8f824c2",
65
+ "event_timestamp": "2023-03-06 12:11:35.103792",
66
+ "event_topic": "user_ecst_events",
67
+ "payload": {
68
+ "user_id": 987,
69
+ "new_user_last_name": "Doe",
70
+ "new_user_nickname": "kend"
71
+ }
72
+ }
73
+
74
+ """
75
+
76
+ event_id: uuid.UUID = pydantic.Field(default_factory=uuid.uuid4)
77
+ event_timestamp: datetime.datetime = pydantic.Field(default_factory=datetime.datetime.now)
78
+ event_name: typing.Text
79
+ event_type: typing.ClassVar = "ecst_event"
80
+
81
+ payload: _P | None = pydantic.Field(default=None)
82
+
83
+ model_config = pydantic.ConfigDict(from_attributes=True)
84
+
85
+ def __hash__(self):
86
+ return hash(self.event_id)
@@ -0,0 +1,86 @@
1
+ import functools
2
+ import logging
3
+
4
+ from cqrs import container, message_brokers
5
+ from cqrs.events import event, map
6
+
7
+ logger = logging.getLogger("cqrs")
8
+
9
+
10
+ class EventEmitter:
11
+ """
12
+ The event emitter is responsible for sending events to the according handlers or
13
+ to the message broker abstraction.
14
+ """
15
+
16
+ def __init__(
17
+ self,
18
+ event_map: map.EventMap,
19
+ container: container.Container,
20
+ message_broker: message_brokers.MessageBroker | None = None,
21
+ ) -> None:
22
+ self._event_map = event_map
23
+ self._container = container
24
+ self._message_broker = message_broker
25
+
26
+ @functools.singledispatchmethod
27
+ async def emit(self, event: event.Event) -> None:
28
+ ...
29
+
30
+ @emit.register
31
+ async def _(self, event: event.DomainEvent) -> None:
32
+ handlers_types = self._event_map.get(type(event))
33
+ if not handlers_types:
34
+ logger.warning(
35
+ "Handlers for domain event %s not found",
36
+ type(event).__name__,
37
+ )
38
+ for handler_type in handlers_types:
39
+ handler = await self._container.resolve(handler_type)
40
+ logger.debug(
41
+ "Handling Event(%s) via event handler(%s)",
42
+ type(event).__name__,
43
+ handler_type.__name__,
44
+ )
45
+ await handler.handle(event)
46
+
47
+ @emit.register
48
+ async def _(self, event: event.NotificationEvent) -> None:
49
+ if not self._message_broker:
50
+ raise RuntimeError("To use NotificationEvent, message_broker argument must be specified.")
51
+
52
+ message = _build_message(event)
53
+
54
+ logger.debug(
55
+ "Sending Notification Event(%s) to message broker %s",
56
+ event.event_id,
57
+ type(self._message_broker).__name__,
58
+ )
59
+
60
+ await self._message_broker.send_message(message)
61
+
62
+ @emit.register
63
+ async def _(self, event: event.ECSTEvent) -> None:
64
+ if not self._message_broker:
65
+ raise RuntimeError("To use ECSTEvent, message_broker argument must be specified.")
66
+
67
+ message = _build_message(event)
68
+
69
+ logger.debug(
70
+ "Sending ECST event(%s) to message broker %s",
71
+ event.event_id,
72
+ type(self._message_broker).__name__,
73
+ )
74
+
75
+ await self._message_broker.send_message(message)
76
+
77
+
78
+ def _build_message(event: event.NotificationEvent | event.ECSTEvent) -> message_brokers.Message:
79
+ payload = event.model_dump(mode="json")
80
+
81
+ return message_brokers.Message(
82
+ message_type=event.event_type,
83
+ message_name=type(event).__name__,
84
+ message_id=event.event_id,
85
+ payload=payload,
86
+ )
@@ -0,0 +1,24 @@
1
+ import typing
2
+
3
+ from cqrs.events import event as event_models
4
+
5
+ E = typing.TypeVar("E", bound=event_models.Event, contravariant=True)
6
+
7
+
8
+ class EventHandler(typing.Protocol[E]):
9
+ """
10
+ The event handler interface.
11
+
12
+ Usage::
13
+
14
+ class UserJoinedEventHandler(EventHandler[UserJoinedEventHandler])
15
+ def __init__(self, meetings_api: MeetingAPIProtocol) -> None:
16
+ self._meetings_api = meetings_api
17
+
18
+ async def handle(self, event: UserJoinedEventHandler) -> None:
19
+ await self._meetings_api.notify_room(event.meeting_id, "New user joined!")
20
+
21
+ """
22
+
23
+ async def handle(self, event: E) -> None:
24
+ raise NotImplementedError
cqrs/events/map.py ADDED
@@ -0,0 +1,27 @@
1
+ import collections
2
+ import typing
3
+
4
+ from cqrs import registry
5
+ from cqrs.events import event, event_handler
6
+
7
+ E = typing.TypeVar("E", bound=event.Event, contravariant=True)
8
+
9
+
10
+ class EventMap(registry.InMemoryRegistry[typing.Type[E]], typing.List[typing.Type[event_handler.EventHandler]]):
11
+ _registry: collections.defaultdict
12
+
13
+ def __init__(self) -> None:
14
+ super().__init__()
15
+ self._registry = collections.defaultdict(list)
16
+
17
+ def bind(self, event_type: typing.Type[E], handler_type: typing.Type[event_handler.EventHandler[E]]) -> None:
18
+ self[event_type].append(handler_type)
19
+
20
+ def get(self, event_type: typing.Type[E]) -> typing.List[typing.Type[event_handler.EventHandler[E]]]:
21
+ return self._registry[event_type]
22
+
23
+ def get_events(self) -> list[typing.Type[E]]:
24
+ return list(self.keys())
25
+
26
+ def __str__(self) -> str:
27
+ return str(self._registry)