python-cqrs 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. cqrs/__init__.py +28 -0
  2. cqrs/adapters/__init__.py +0 -0
  3. cqrs/adapters/amqp.py +63 -0
  4. cqrs/adapters/kafka.py +102 -0
  5. cqrs/compressors/__init__.py +0 -0
  6. cqrs/compressors/protocol.py +9 -0
  7. cqrs/compressors/zlib.py +9 -0
  8. cqrs/container/__init__.py +3 -0
  9. cqrs/container/di.py +19 -0
  10. cqrs/container/protocol.py +20 -0
  11. cqrs/dispatcher/__init__.py +7 -0
  12. cqrs/dispatcher/dispatcher.py +64 -0
  13. cqrs/events/__init__.py +14 -0
  14. cqrs/events/bootstrap.py +55 -0
  15. cqrs/events/event.py +86 -0
  16. cqrs/events/event_emitter.py +86 -0
  17. cqrs/events/event_handler.py +24 -0
  18. cqrs/events/map.py +27 -0
  19. cqrs/mediator.py +103 -0
  20. cqrs/message_brokers/__init__.py +6 -0
  21. cqrs/message_brokers/amqp.py +23 -0
  22. cqrs/message_brokers/devnull.py +10 -0
  23. cqrs/message_brokers/kafka.py +14 -0
  24. cqrs/message_brokers/protocol.py +22 -0
  25. cqrs/middlewares/__init__.py +3 -0
  26. cqrs/middlewares/base.py +30 -0
  27. cqrs/middlewares/logging.py +37 -0
  28. cqrs/outbox/__init__.py +0 -0
  29. cqrs/outbox/producer.py +65 -0
  30. cqrs/outbox/protocol.py +44 -0
  31. cqrs/outbox/repository.py +54 -0
  32. cqrs/outbox/sqlalchemy.py +223 -0
  33. cqrs/registry.py +29 -0
  34. cqrs/requests/__init__.py +9 -0
  35. cqrs/requests/bootstrap.py +94 -0
  36. cqrs/requests/map.py +30 -0
  37. cqrs/requests/request.py +10 -0
  38. cqrs/requests/request_handler.py +45 -0
  39. cqrs/response.py +12 -0
  40. python_cqrs-0.0.2.dist-info/LICENSE +201 -0
  41. python_cqrs-0.0.2.dist-info/METADATA +233 -0
  42. python_cqrs-0.0.2.dist-info/RECORD +44 -0
  43. python_cqrs-0.0.2.dist-info/WHEEL +5 -0
  44. python_cqrs-0.0.2.dist-info/top_level.txt +1 -0
cqrs/mediator.py ADDED
@@ -0,0 +1,103 @@
1
+ import typing
2
+
3
+ from cqrs import container as di_container
4
+ from cqrs import dispatcher, events, middlewares, requests, response
5
+
6
+ Req = typing.TypeVar("Req", bound=requests.Request, contravariant=True)
7
+ Resp = typing.TypeVar("Resp", bound=response.Response, covariant=True)
8
+ E = typing.TypeVar("E", bound=events.Event, contravariant=True)
9
+
10
+
11
+ class RequestMediator:
12
+ """
13
+ The request mediator object.
14
+
15
+ Usage::
16
+
17
+ message_broker = AMQPMessageBroker(
18
+ dsn=f"amqp://{LOGIN}:{PASSWORD}@{HOSTNAME}/",
19
+ queue_name="user_joined_domain",
20
+ exchange_name="user_joined",
21
+ )
22
+ event_map = EventMap()
23
+ event_map.bind(UserJoinedDomainEvent, UserJoinedDomainEventHandler)
24
+ request_map = RequestMap()
25
+ request_map.bind(JoinUserCommand, JoinUserCommandHandler)
26
+ event_emitter = EventEmitter(event_map, container, message_broker)
27
+
28
+ mediator = RequestMediator(
29
+ request_map=request_map,
30
+ container=container
31
+ event_emitter=event_emitter,
32
+ )
33
+
34
+ # Handles command and published events by the command handler.
35
+ await mediator.send(join_user_command)
36
+
37
+ """
38
+
39
+ def __init__(
40
+ self,
41
+ request_map: requests.RequestMap,
42
+ container: di_container.Container,
43
+ event_emitter: events.EventEmitter | None = None,
44
+ middleware_chain: middlewares.MiddlewareChain | None = None,
45
+ *,
46
+ dispatcher_type: typing.Type[dispatcher.RequestDispatcher] = dispatcher.RequestDispatcher,
47
+ ) -> None:
48
+ self._event_emitter = event_emitter
49
+ self._dispatcher = dispatcher_type(
50
+ request_map=request_map, # type: ignore
51
+ container=container, # type: ignore
52
+ middleware_chain=middleware_chain, # type: ignore
53
+ )
54
+
55
+ async def send(self, request: Req) -> Resp | None:
56
+ dispatch_result = await self._dispatcher.dispatch(request)
57
+
58
+ if dispatch_result.events:
59
+ await self._send_events(dispatch_result.events.copy())
60
+
61
+ return dispatch_result.response
62
+
63
+ async def _send_events(self, events: typing.List[E]) -> None:
64
+ if not self._event_emitter:
65
+ return
66
+
67
+ while events:
68
+ event = events.pop()
69
+ await self._event_emitter.emit(event)
70
+
71
+
72
+ class EventMediator:
73
+ """
74
+ The event mediator object.
75
+
76
+ Usage::
77
+ event_map = EventMap()
78
+ event_map.bind(UserJoinedECSTEvent, UserJoinedECSTEventHandler)
79
+ mediator = EventMediator(
80
+ event_map=event_map,
81
+ container=container
82
+ )
83
+
84
+ # Handles ecst and notification events.
85
+ await mediator.send(user_joined_event)
86
+ """
87
+
88
+ def __init__(
89
+ self,
90
+ event_map: events.EventMap,
91
+ container: di_container.Container,
92
+ middleware_chain: middlewares.MiddlewareChain | None = None,
93
+ *,
94
+ dispatcher_type: typing.Type[dispatcher.EventDispatcher] = dispatcher.EventDispatcher,
95
+ ):
96
+ self._dispatcher = dispatcher_type(
97
+ event_map=event_map, # type: ignore
98
+ container=container, # type: ignore
99
+ middleware_chain=middleware_chain, # type: ignore
100
+ )
101
+
102
+ async def send(self, event: E) -> None:
103
+ await self._dispatcher.dispatch(event)
@@ -0,0 +1,6 @@
1
+ from cqrs.message_brokers.protocol import Message, MessageBroker
2
+
3
+ __all__ = (
4
+ "Message",
5
+ "MessageBroker",
6
+ )
@@ -0,0 +1,23 @@
1
+ import logging
2
+
3
+ import aio_pika
4
+ import orjson
5
+
6
+ from cqrs.adapters import amqp
7
+ from cqrs.message_brokers import protocol
8
+
9
+
10
+ class AMQPMessageBroker:
11
+ def __init__(self, dsn: str, queue_name: str, exchange_name: str, pika_log_level: str = "ERROR"):
12
+ self.publisher = amqp.AMQPPublisher(url=dsn)
13
+ self.queue_name = queue_name
14
+ self.exchange_name = exchange_name
15
+ logging.getLogger("aiormq").setLevel(pika_log_level)
16
+ logging.getLogger("aio_pika").setLevel(pika_log_level)
17
+
18
+ async def send_message(self, message: protocol.Message) -> None:
19
+ await self.publisher.publish(
20
+ message=aio_pika.Message(body=orjson.dumps(message.model_dump(mode="json"))),
21
+ exchange_name=self.exchange_name,
22
+ queue_name=self.queue_name,
23
+ )
@@ -0,0 +1,10 @@
1
+ import logging
2
+
3
+ from cqrs.message_brokers import protocol
4
+
5
+ logger = logging.getLogger("cqrs")
6
+
7
+
8
+ class DevnullMessageBroker:
9
+ async def send_message(self, message: protocol.Message) -> None:
10
+ logger.warning(f"Event {message} will be skip")
@@ -0,0 +1,14 @@
1
+ import logging
2
+ import typing
3
+
4
+ from cqrs.adapters import kafka
5
+ from cqrs.message_brokers import protocol
6
+
7
+
8
+ class KafkaMessageBroker:
9
+ def __init__(self, producer: kafka.KafkaProducer, aiokafka_log_level: typing.Text = "ERROR"):
10
+ self._producer = producer
11
+ logging.getLogger("aiokafka").setLevel(aiokafka_log_level)
12
+
13
+ async def send_message(self, message: protocol.Message) -> None:
14
+ await self._producer.produce(message)
@@ -0,0 +1,22 @@
1
+ import typing
2
+ import uuid
3
+
4
+ import pydantic
5
+
6
+
7
+ class Message(pydantic.BaseModel):
8
+ message_type: str = pydantic.Field()
9
+ message_name: str = pydantic.Field()
10
+ message_id: uuid.UUID = pydantic.Field(default_factory=uuid.uuid4)
11
+ payload: dict = pydantic.Field()
12
+
13
+
14
+ class MessageBroker(typing.Protocol):
15
+ """
16
+ The interface over a message broker.
17
+
18
+ Used for sending messages to message brokers (currently only redis supported).
19
+ """
20
+
21
+ async def send_message(self, message: Message) -> None:
22
+ ...
@@ -0,0 +1,3 @@
1
+ from cqrs.middlewares.base import Middleware, MiddlewareChain
2
+
3
+ __all__ = ("Middleware", "MiddlewareChain")
@@ -0,0 +1,30 @@
1
+ import functools
2
+ import typing
3
+
4
+ from cqrs import requests, response
5
+
6
+ Req = typing.TypeVar("Req", bound=requests.Request, contravariant=True)
7
+ Res = typing.TypeVar("Res", response.Response, None, covariant=True)
8
+ HandleType = typing.Callable[[Req], typing.Awaitable[Res]]
9
+
10
+
11
+ class Middleware(typing.Protocol):
12
+ async def __call__(self, request: requests.Request, handle: HandleType) -> Res:
13
+ ...
14
+
15
+
16
+ class MiddlewareChain:
17
+ def __init__(self) -> None:
18
+ self._chain: list[Middleware] = []
19
+
20
+ def set(self, chain: list[Middleware]) -> None:
21
+ self._chain = chain
22
+
23
+ def add(self, middleware: Middleware) -> None:
24
+ self._chain.append(middleware)
25
+
26
+ def wrap(self, handle: HandleType) -> HandleType:
27
+ for middleware in reversed(self._chain):
28
+ handle = functools.partial(middleware.__call__, handle=handle)
29
+
30
+ return handle
@@ -0,0 +1,37 @@
1
+ import logging
2
+ import typing
3
+
4
+ from cqrs import requests, response
5
+
6
+ Req = typing.TypeVar("Req", bound=requests.Request, contravariant=True)
7
+ Res = typing.TypeVar("Res", response.Response, None, covariant=True)
8
+ HandleType = typing.Callable[[Req], typing.Awaitable[Res]]
9
+
10
+
11
+ class LoggingMiddleware:
12
+ def __init__(
13
+ self,
14
+ logger: logging.Logger | None = None,
15
+ ) -> None:
16
+ self._logger = logger or logging.getLogger("cqrs")
17
+
18
+ async def __call__(self, request: Req, handle: HandleType) -> Res:
19
+ self._logger.debug(
20
+ "Handle %s request",
21
+ type(request).__name__,
22
+ extra={
23
+ "request_json_fields": {"request": request.model_dump(mode="json")},
24
+ "to_mask": True,
25
+ },
26
+ )
27
+ resp = await handle(request)
28
+ self._logger.debug(
29
+ "Request %s handled",
30
+ type(request).__name__,
31
+ extra={
32
+ "request_json_fields": {"response": resp.model_dump(mode="json") if resp else {}},
33
+ "to_mask": True,
34
+ },
35
+ )
36
+
37
+ return resp
File without changes
@@ -0,0 +1,65 @@
1
+ import asyncio
2
+ import logging
3
+ import typing
4
+ import uuid
5
+
6
+ from sqlalchemy.ext.asyncio import session as sql_session
7
+
8
+ import cqrs
9
+ from cqrs.message_brokers import protocol as broker_protocol
10
+ from cqrs.outbox import repository as repository_protocol
11
+
12
+ logger = logging.getLogger("cqrs")
13
+ logger.setLevel(logging.DEBUG)
14
+
15
+ SessionFactory: typing.TypeAlias = typing.Callable[[], sql_session.AsyncSession]
16
+ Serializer: typing.TypeAlias = typing.Callable[[repository_protocol.Event], typing.Awaitable[typing.Dict]]
17
+
18
+
19
+ class EventProducer:
20
+ def __init__(
21
+ self,
22
+ message_broker: broker_protocol.MessageBroker,
23
+ repository: repository_protocol.OutboxedEventRepository,
24
+ serializer: Serializer | None = None,
25
+ ):
26
+ self.message_broker = message_broker
27
+ self.repository = repository
28
+ self.serializer = serializer
29
+
30
+ async def periodically_task(self, batch_size: int = 100, wait_ms: int = 500) -> None:
31
+ """Calls produce periodically with specified delay"""
32
+ while True:
33
+ await asyncio.sleep(float(wait_ms) / 1000.0)
34
+ await self.produce_batch(batch_size)
35
+
36
+ async def send_message(self, session: repository_protocol.Session, event: cqrs.ECSTEvent | cqrs.NotificationEvent):
37
+ try:
38
+ serialized = (await self.serializer(event)) if self.serializer else event.model_dump(mode="json")
39
+ await self.message_broker.send_message(
40
+ broker_protocol.Message(
41
+ message_type=event.event_type,
42
+ message_name=event.event_name,
43
+ message_id=event.event_id,
44
+ payload=serialized,
45
+ ),
46
+ )
47
+ except Exception as e:
48
+ logger.error(f"Error while producing event {event.event_id} to kafka broker: {e}")
49
+ await self.repository.update_status(session, event.event_id, repository_protocol.EventStatus.NOT_PRODUCED)
50
+ else:
51
+ await self.repository.update_status(session, event.event_id, repository_protocol.EventStatus.PRODUCED)
52
+
53
+ async def produce_one(self, event_id: uuid.UUID) -> None:
54
+ async with self.repository as session:
55
+ event = await self.repository.get_one(session, event_id)
56
+ if event:
57
+ await self.send_message(session, event)
58
+ await self.repository.commit(session)
59
+
60
+ async def produce_batch(self, batch_size: int = 100) -> None:
61
+ async with self.repository as session:
62
+ events = await self.repository.get_many(session, batch_size)
63
+ for event in events:
64
+ await self.send_message(session, event)
65
+ await self.repository.commit(session)
@@ -0,0 +1,44 @@
1
+ import abc
2
+ import asyncio
3
+ import typing
4
+ import uuid
5
+
6
+ from cqrs.events import event as ev
7
+
8
+ Event: typing.TypeAlias = ev.NotificationEvent | ev.ECSTEvent
9
+
10
+
11
+ class Outbox(typing.Protocol):
12
+ def add(self, event: Event):
13
+ """Adds event to outbox"""
14
+
15
+ async def save(self):
16
+ """Commits events to the storage"""
17
+
18
+ async def get_events(self, batch_size: int = 100) -> typing.List[Event]:
19
+ """Returns not produced events"""
20
+
21
+ async def get_event(self, event_id: uuid.UUID) -> Event | None:
22
+ """Returns event by id"""
23
+
24
+ async def mark_as_produced(self, event_id: uuid.UUID) -> None:
25
+ """Marks event as produced"""
26
+
27
+ async def mark_as_failure(self, event_id: uuid.UUID) -> None:
28
+ """Marks event as not produced with failure"""
29
+
30
+
31
+ class EventProducer(abc.ABC):
32
+ @abc.abstractmethod
33
+ async def produce_one(self, event_id: uuid.UUID) -> None:
34
+ """Produces event to broker"""
35
+
36
+ @abc.abstractmethod
37
+ async def produce_batch(self, batch_size: int = 100) -> None:
38
+ """Produces events to broker"""
39
+
40
+ async def periodically_task(self, batch_size: int = 100, wait_ms: int = 500) -> None:
41
+ """Calls produce periodically with specified delay"""
42
+ while True:
43
+ await asyncio.sleep(float(wait_ms) / 1000.0)
44
+ await self.produce_batch(batch_size)
@@ -0,0 +1,54 @@
1
+ import abc
2
+ import enum
3
+ import typing
4
+ import uuid
5
+
6
+ import cqrs
7
+ from cqrs.events import event as ev
8
+
9
+ Event: typing.TypeAlias = ev.NotificationEvent | ev.ECSTEvent
10
+ Session = typing.TypeVar("Session")
11
+
12
+
13
+ class EventStatus(enum.StrEnum):
14
+ NEW = "new"
15
+ PRODUCED = "produced"
16
+ NOT_PRODUCED = "not_produced"
17
+
18
+
19
+ class OutboxedEventRepository(abc.ABC, typing.Generic[Session]):
20
+ def __init__(self, session_factory: typing.Callable[[], Session], compressor: cqrs.Compressor | None = None):
21
+ self._session_factory = session_factory
22
+ self._compressor = compressor
23
+
24
+ @abc.abstractmethod
25
+ async def __aenter__(self) -> Session:
26
+ """start transaction"""
27
+
28
+ @abc.abstractmethod
29
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
30
+ """end transaction"""
31
+
32
+ @abc.abstractmethod
33
+ def add(self, session: Session, event: Event) -> None:
34
+ """Add an event to the repository."""
35
+
36
+ @abc.abstractmethod
37
+ async def get_one(self, session: Session, event_id: uuid.UUID) -> Event | None:
38
+ """Get one event from the repository."""
39
+
40
+ @abc.abstractmethod
41
+ async def get_many(self, session: Session, batch_size: int = 100) -> typing.List[Event]:
42
+ """Get many events from the repository."""
43
+
44
+ @abc.abstractmethod
45
+ async def update_status(self, session: Session, event_id: uuid.UUID, new_status: EventStatus):
46
+ """Update the event status"""
47
+
48
+ @abc.abstractmethod
49
+ async def commit(self, session: Session):
50
+ """Commit the changes to the repository."""
51
+
52
+ @abc.abstractmethod
53
+ async def rollback(self, session: Session):
54
+ """Rollback the changes to the repository."""
@@ -0,0 +1,223 @@
1
+ import asyncio
2
+ import enum
3
+ import logging
4
+ import typing
5
+ import uuid
6
+
7
+ import orjson
8
+ import sqlalchemy
9
+ from sqlalchemy import func
10
+ from sqlalchemy.dialects import mysql
11
+ from sqlalchemy.ext.asyncio import session as sql_session
12
+ from sqlalchemy.orm import registry
13
+
14
+ from cqrs.events import event as ev
15
+ from cqrs.outbox import repository
16
+
17
+ mapper_registry = registry()
18
+ Base = mapper_registry.generate_base()
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ class EventType(enum.StrEnum):
24
+ ECST_EVENT = "ecst_event"
25
+ NOTIFICATION_EVENT = "notification_event"
26
+
27
+
28
+ MAX_FLUSH_COUNTER_VALUE = 5
29
+
30
+
31
+ class OutboxModel(Base):
32
+ __tablename__ = "outbox"
33
+
34
+ __table_args__ = (
35
+ sqlalchemy.UniqueConstraint(
36
+ "event_id_bin",
37
+ "event_name",
38
+ name="event_id_unique_index",
39
+ ),
40
+ )
41
+ id = sqlalchemy.Column(
42
+ sqlalchemy.BigInteger(),
43
+ sqlalchemy.Identity(),
44
+ primary_key=True,
45
+ nullable=False,
46
+ autoincrement=True,
47
+ comment="Identity",
48
+ )
49
+ event_id = sqlalchemy.Column(sqlalchemy.Uuid, nullable=False, comment="Event idempotency id")
50
+ event_id_bin = sqlalchemy.Column(
51
+ sqlalchemy.BINARY(16),
52
+ nullable=False,
53
+ comment="Event idempotency id in 16 bit presentation",
54
+ )
55
+ event_type = sqlalchemy.Column(sqlalchemy.Enum(EventType), nullable=False, comment="Event type")
56
+ event_status = sqlalchemy.Column(
57
+ sqlalchemy.Enum(repository.EventStatus),
58
+ nullable=False,
59
+ default=repository.EventStatus.NEW,
60
+ comment="Event producing status",
61
+ )
62
+ flush_counter = sqlalchemy.Column(
63
+ sqlalchemy.SmallInteger(),
64
+ nullable=False,
65
+ default=0,
66
+ comment="Event producing flush counter",
67
+ )
68
+ event_name = sqlalchemy.Column(sqlalchemy.String(255), nullable=False, comment="Event name")
69
+
70
+ created_at = sqlalchemy.Column(
71
+ sqlalchemy.DateTime,
72
+ nullable=False,
73
+ server_default=func.now(),
74
+ comment="Event creation timestamp",
75
+ )
76
+ payload = sqlalchemy.Column(mysql.BLOB, nullable=False, default={}, comment="Event payload")
77
+
78
+ def row_to_dict(self) -> typing.Dict[typing.Text, typing.Any]:
79
+ return {column.name: getattr(self, column.name) for column in self.__table__.columns}
80
+
81
+ @classmethod
82
+ def get_batch_query(cls, size: int) -> sqlalchemy.Select:
83
+ return (
84
+ sqlalchemy.select(cls)
85
+ .select_from(cls)
86
+ .where(
87
+ sqlalchemy.and_(
88
+ cls.event_status.in_([repository.EventStatus.NEW, repository.EventStatus.NOT_PRODUCED]),
89
+ cls.flush_counter < MAX_FLUSH_COUNTER_VALUE,
90
+ ),
91
+ )
92
+ .order_by(cls.status_sorting_case().asc())
93
+ .order_by(cls.id.asc())
94
+ .limit(size)
95
+ .with_for_update()
96
+ )
97
+
98
+ @classmethod
99
+ def get_event_query(cls, event_id: uuid.UUID) -> sqlalchemy.Select:
100
+ return (
101
+ sqlalchemy.select(cls)
102
+ .select_from(cls)
103
+ .where(
104
+ sqlalchemy.and_(
105
+ cls.event_id_bin == func.UUID_TO_BIN(event_id),
106
+ cls.event_status.in_([repository.EventStatus.NEW, repository.EventStatus.NOT_PRODUCED]),
107
+ cls.flush_counter < MAX_FLUSH_COUNTER_VALUE,
108
+ ),
109
+ )
110
+ .order_by(cls.status_sorting_case().asc())
111
+ .order_by(cls.id.asc())
112
+ .with_for_update()
113
+ )
114
+
115
+ @classmethod
116
+ def update_status_query(
117
+ cls,
118
+ event_id: uuid.UUID,
119
+ status: typing.Literal[
120
+ repository.EventStatus.PRODUCED,
121
+ repository.EventStatus.NOT_PRODUCED,
122
+ ],
123
+ ) -> sqlalchemy.Update:
124
+ values = {"event_status": status}
125
+ if status == repository.EventStatus.NOT_PRODUCED:
126
+ values["flush_counter"] = cls.flush_counter + 1
127
+
128
+ return sqlalchemy.update(cls).where(cls.event_id_bin == func.UUID_TO_BIN(event_id)).values(**values)
129
+
130
+ @classmethod
131
+ def status_sorting_case(cls) -> sqlalchemy.case:
132
+ return sqlalchemy.case(
133
+ {
134
+ repository.EventStatus.NEW: 1,
135
+ repository.EventStatus.NOT_PRODUCED: 2,
136
+ repository.EventStatus.PRODUCED: 3,
137
+ },
138
+ value=cls.event_status,
139
+ else_=4,
140
+ )
141
+
142
+
143
+ class SqlAlchemyOutboxedEventRepository(repository.OutboxedEventRepository[sql_session.AsyncSession]):
144
+ EVENT_CLASS_MAPPING: typing.ClassVar[typing.Dict[EventType, typing.Type[repository.Event]]] = {
145
+ EventType.NOTIFICATION_EVENT: ev.NotificationEvent,
146
+ EventType.ECST_EVENT: ev.ECSTEvent,
147
+ }
148
+
149
+ def add(self, session: sql_session.AsyncSession, event: repository.Event) -> None:
150
+ bytes_payload = orjson.dumps(event.payload)
151
+ if self._compressor:
152
+ bytes_payload = self._compressor.compress(bytes_payload)
153
+ session.add(
154
+ OutboxModel(
155
+ event_id=event.event_id,
156
+ event_id_bin=func.UUID_TO_BIN(event.event_id),
157
+ event_type=EventType(event.event_type),
158
+ event_name=event.event_name,
159
+ created_at=event.event_timestamp,
160
+ payload=bytes_payload,
161
+ ),
162
+ )
163
+
164
+ def _process_events(self, model: OutboxModel) -> repository.Event:
165
+ event_dict = model.row_to_dict()
166
+ event_dict["payload"] = orjson.loads(
167
+ self._compressor.decompress(event_dict["payload"]) if self._compressor else event_dict["payload"],
168
+ )
169
+ return self.EVENT_CLASS_MAPPING[event_dict["event_type"]].model_validate(event_dict)
170
+
171
+ async def get_many(self, session: sql_session.AsyncSession, batch_size: int = 100) -> typing.List[repository.Event]:
172
+ events: typing.Sequence[OutboxModel] = (
173
+ (await session.execute(OutboxModel.get_batch_query(batch_size))).scalars().all()
174
+ )
175
+
176
+ tasks = []
177
+ for event in events:
178
+ if not self.EVENT_CLASS_MAPPING.get(event.event_type):
179
+ logger.warning(f"Unknown event type for {event}")
180
+ continue
181
+ tasks.append(asyncio.to_thread(self._process_events, event))
182
+
183
+ return await asyncio.gather(*tasks) # noqa
184
+
185
+ async def get_one(self, session: sql_session.AsyncSession, event_id: uuid.UUID) -> repository.Event | None:
186
+ event: OutboxModel | None = (await session.execute(OutboxModel.get_event_query(event_id))).scalar()
187
+
188
+ if event is None:
189
+ return
190
+
191
+ if not self.EVENT_CLASS_MAPPING.get(event.event_type):
192
+ logger.warning(f"Unknown event type for {event}")
193
+ return
194
+
195
+ event_dict = event.row_to_dict()
196
+ event_dict["payload"] = orjson.loads(
197
+ self._compressor.decompress(event_dict["payload"]) if self._compressor else event_dict["payload"],
198
+ )
199
+
200
+ return self.EVENT_CLASS_MAPPING[event.event_type].model_validate(event_dict)
201
+
202
+ async def update_status(
203
+ self,
204
+ session: sql_session.AsyncSession,
205
+ event_id: uuid.UUID,
206
+ new_status: repository.EventStatus,
207
+ ) -> None:
208
+ await session.execute(OutboxModel.update_status_query(event_id, new_status))
209
+
210
+ async def commit(self, session: sql_session.AsyncSession):
211
+ await session.commit()
212
+
213
+ async def rollback(self, session: sql_session.AsyncSession):
214
+ await session.rollback()
215
+
216
+ async def __aenter__(self):
217
+ self.session = self._session_factory()
218
+ return self.session
219
+
220
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
221
+ await self.session.rollback()
222
+ await self.session.close()
223
+ self.session = None
cqrs/registry.py ADDED
@@ -0,0 +1,29 @@
1
+ import typing
2
+ from collections import abc
3
+
4
+ _VT = typing.TypeVar("_VT")
5
+ _KT = typing.TypeVar("_KT")
6
+
7
+
8
+ class InMemoryRegistry(abc.MutableMapping[_KT, _VT]):
9
+ _registry: typing.Dict[_KT, _VT]
10
+
11
+ def __init__(self):
12
+ self._registry = dict()
13
+
14
+ def __setitem__(self, __key: _KT, __value: _VT) -> None:
15
+ if __key in self._registry:
16
+ raise KeyError(f"{__key} already exists in registry")
17
+ self._registry[__key] = __value
18
+
19
+ def __delitem__(self, __key):
20
+ raise TypeError(f"{self.__class__.__name__} has no delete method")
21
+
22
+ def __getitem__(self, __key: _KT) -> _VT:
23
+ return self._registry[__key]
24
+
25
+ def __len__(self):
26
+ return len(self._registry.keys())
27
+
28
+ def __iter__(self):
29
+ return iter(self._registry.keys())
@@ -0,0 +1,9 @@
1
+ from cqrs.requests.map import RequestMap
2
+ from cqrs.requests.request import Request
3
+ from cqrs.requests.request_handler import RequestHandler
4
+
5
+ __all__ = (
6
+ "RequestMap",
7
+ "Request",
8
+ "RequestHandler",
9
+ )