eventsourcing 9.3.5__py3-none-any.whl → 9.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eventsourcing might be problematic. Click here for more details.

@@ -4,27 +4,18 @@ import json
4
4
  import uuid
5
5
  from abc import ABC, abstractmethod
6
6
  from collections import deque
7
+ from collections.abc import Iterator, Mapping, Sequence
7
8
  from dataclasses import dataclass
8
9
  from datetime import datetime
9
10
  from decimal import Decimal
10
- from threading import Condition, Event, Lock, Semaphore, Timer
11
- from time import time
12
- from types import ModuleType
13
- from typing import (
14
- Any,
15
- Dict,
16
- Generic,
17
- Iterator,
18
- List,
19
- Mapping,
20
- Sequence,
21
- Type,
22
- TypeVar,
23
- Union,
24
- cast,
25
- )
11
+ from queue import Queue
12
+ from threading import Condition, Event, Lock, Semaphore, Thread, Timer
13
+ from time import monotonic, sleep, time
14
+ from types import GenericAlias, ModuleType
15
+ from typing import TYPE_CHECKING, Any, Generic, Union, cast
26
16
  from uuid import UUID
27
- from warnings import warn
17
+
18
+ from typing_extensions import TypeVar
28
19
 
29
20
  from eventsourcing.domain import DomainEventProtocol, EventSourcingError
30
21
  from eventsourcing.utils import (
@@ -35,11 +26,12 @@ from eventsourcing.utils import (
35
26
  strtobool,
36
27
  )
37
28
 
29
+ if TYPE_CHECKING:
30
+ from typing_extensions import Self
31
+
38
32
 
39
33
  class Transcoding(ABC):
40
- """
41
- Abstract base class for custom transcodings.
42
- """
34
+ """Abstract base class for custom transcodings."""
43
35
 
44
36
  type: type
45
37
  name: str
@@ -54,20 +46,7 @@ class Transcoding(ABC):
54
46
 
55
47
 
56
48
  class Transcoder(ABC):
57
- """
58
- Abstract base class for transcoders.
59
- """
60
-
61
- def __init__(self) -> None:
62
- self.types: Dict[type, Transcoding] = {}
63
- self.names: Dict[str, Transcoding] = {}
64
-
65
- def register(self, transcoding: Transcoding) -> None:
66
- """
67
- Registers given transcoding with the transcoder.
68
- """
69
- self.types[transcoding.type] = transcoding
70
- self.names[transcoding.name] = transcoding
49
+ """Abstract base class for transcoders."""
71
50
 
72
51
  @abstractmethod
73
52
  def encode(self, obj: Any) -> bytes:
@@ -78,13 +57,16 @@ class Transcoder(ABC):
78
57
  """Decodes obj from bytes."""
79
58
 
80
59
 
60
+ class TranscodingNotRegisteredError(EventSourcingError, TypeError):
61
+ """Raised when a transcoding isn't registered with JSONTranscoder."""
62
+
63
+
81
64
  class JSONTranscoder(Transcoder):
82
- """
83
- Extensible transcoder that uses the Python :mod:`json` module.
84
- """
65
+ """Extensible transcoder that uses the Python :mod:`json` module."""
85
66
 
86
67
  def __init__(self) -> None:
87
- super().__init__()
68
+ self.types: dict[type, Transcoding] = {}
69
+ self.names: dict[str, Transcoding] = {}
88
70
  self.encoder = json.JSONEncoder(
89
71
  default=self._encode_obj,
90
72
  separators=(",", ":"),
@@ -92,19 +74,20 @@ class JSONTranscoder(Transcoder):
92
74
  )
93
75
  self.decoder = json.JSONDecoder(object_hook=self._decode_obj)
94
76
 
77
+ def register(self, transcoding: Transcoding) -> None:
78
+ """Registers given transcoding with the transcoder."""
79
+ self.types[transcoding.type] = transcoding
80
+ self.names[transcoding.name] = transcoding
81
+
95
82
  def encode(self, obj: Any) -> bytes:
96
- """
97
- Encodes given object as a bytes array.
98
- """
83
+ """Encodes given object as a bytes array."""
99
84
  return self.encoder.encode(obj).encode("utf8")
100
85
 
101
86
  def decode(self, data: bytes) -> Any:
102
- """
103
- Decodes bytes array as previously encoded object.
104
- """
87
+ """Decodes bytes array as previously encoded object."""
105
88
  return self.decoder.decode(data.decode("utf8"))
106
89
 
107
- def _encode_obj(self, o: Any) -> Dict[str, Any]:
90
+ def _encode_obj(self, o: Any) -> dict[str, Any]:
108
91
  try:
109
92
  transcoding = self.types[type(o)]
110
93
  except KeyError:
@@ -113,14 +96,14 @@ class JSONTranscoder(Transcoder):
113
96
  "serializable. Please define and register "
114
97
  "a custom transcoding for this type."
115
98
  )
116
- raise TypeError(msg) from None
99
+ raise TranscodingNotRegisteredError(msg) from None
117
100
  else:
118
101
  return {
119
102
  "_type_": transcoding.name,
120
103
  "_data_": transcoding.encode(o),
121
104
  }
122
105
 
123
- def _decode_obj(self, d: Dict[str, Any]) -> Any:
106
+ def _decode_obj(self, d: dict[str, Any]) -> Any:
124
107
  if len(d) == 2:
125
108
  try:
126
109
  _type_ = d["_type_"]
@@ -133,14 +116,14 @@ class JSONTranscoder(Transcoder):
133
116
  return d
134
117
  else:
135
118
  try:
136
- transcoding = self.names[cast(str, _type_)]
119
+ transcoding = self.names[cast("str", _type_)]
137
120
  except KeyError as e:
138
121
  msg = (
139
- f"Data serialized with name '{cast(str, _type_)}' is not "
122
+ f"Data serialized with name '{cast('str', _type_)}' is not "
140
123
  "deserializable. Please register a "
141
124
  "custom transcoding for this type."
142
125
  )
143
- raise TypeError(msg) from e
126
+ raise TranscodingNotRegisteredError(msg) from e
144
127
  else:
145
128
  return transcoding.decode(_data_)
146
129
  else:
@@ -148,9 +131,7 @@ class JSONTranscoder(Transcoder):
148
131
 
149
132
 
150
133
  class UUIDAsHex(Transcoding):
151
- """
152
- Transcoding that represents :class:`UUID` objects as hex values.
153
- """
134
+ """Transcoding that represents :class:`UUID` objects as hex values."""
154
135
 
155
136
  type = UUID
156
137
  name = "uuid_hex"
@@ -164,9 +145,7 @@ class UUIDAsHex(Transcoding):
164
145
 
165
146
 
166
147
  class DecimalAsStr(Transcoding):
167
- """
168
- Transcoding that represents :class:`Decimal` objects as strings.
169
- """
148
+ """Transcoding that represents :class:`Decimal` objects as strings."""
170
149
 
171
150
  type = Decimal
172
151
  name = "decimal_str"
@@ -179,9 +158,7 @@ class DecimalAsStr(Transcoding):
179
158
 
180
159
 
181
160
  class DatetimeAsISO(Transcoding):
182
- """
183
- Transcoding that represents :class:`datetime` objects as ISO strings.
184
- """
161
+ """Transcoding that represents :class:`datetime` objects as ISO strings."""
185
162
 
186
163
  type = datetime
187
164
  name = "datetime_iso"
@@ -196,70 +173,55 @@ class DatetimeAsISO(Transcoding):
196
173
 
197
174
  @dataclass(frozen=True)
198
175
  class StoredEvent:
199
- """
200
- Frozen dataclass that represents :class:`~eventsourcing.domain.DomainEvent`
176
+ """Frozen dataclass that represents :class:`~eventsourcing.domain.DomainEvent`
201
177
  objects, such as aggregate :class:`~eventsourcing.domain.Aggregate.Event`
202
178
  objects and :class:`~eventsourcing.domain.Snapshot` objects.
203
-
204
- Constructor parameters:
205
-
206
- :param UUID originator_id: ID of the originating aggregate
207
- :param int originator_version: version of the originating aggregate
208
- :param str topic: topic of the domain event object class
209
- :param bytes state: serialised state of the domain event object
210
179
  """
211
180
 
212
181
  originator_id: uuid.UUID
182
+ """ID of the originating aggregate."""
213
183
  originator_version: int
184
+ """Position in an aggregate sequence."""
214
185
  topic: str
186
+ """Topic of a domain event object class."""
215
187
  state: bytes
188
+ """Serialised state of a domain event object."""
216
189
 
217
190
 
218
191
  class Compressor(ABC):
219
- """
220
- Base class for compressors.
221
- """
192
+ """Base class for compressors."""
222
193
 
223
194
  @abstractmethod
224
195
  def compress(self, data: bytes) -> bytes:
225
- """
226
- Compress bytes.
227
- """
196
+ """Compress bytes."""
228
197
 
229
198
  @abstractmethod
230
199
  def decompress(self, data: bytes) -> bytes:
231
- """
232
- Decompress bytes.
233
- """
200
+ """Decompress bytes."""
234
201
 
235
202
 
236
203
  class Cipher(ABC):
237
- """
238
- Base class for ciphers.
239
- """
204
+ """Base class for ciphers."""
240
205
 
241
206
  @abstractmethod
242
207
  def __init__(self, environment: Environment):
243
- """
244
- Initialises cipher with given environment.
245
- """
208
+ """Initialises cipher with given environment."""
246
209
 
247
210
  @abstractmethod
248
211
  def encrypt(self, plaintext: bytes) -> bytes:
249
- """
250
- Return ciphertext for given plaintext.
251
- """
212
+ """Return ciphertext for given plaintext."""
252
213
 
253
214
  @abstractmethod
254
215
  def decrypt(self, ciphertext: bytes) -> bytes:
255
- """
256
- Return plaintext for given ciphertext.
257
- """
216
+ """Return plaintext for given ciphertext."""
217
+
218
+
219
+ class MapperDeserialisationError(EventSourcingError, ValueError):
220
+ """Raised when deserialization fails in a Mapper."""
258
221
 
259
222
 
260
223
  class Mapper:
261
- """
262
- Converts between domain event objects and :class:`StoredEvent` objects.
224
+ """Converts between domain event objects and :class:`StoredEvent` objects.
263
225
 
264
226
  Uses a :class:`Transcoder`, and optionally a cryptographic cipher and compressor.
265
227
  """
@@ -275,9 +237,7 @@ class Mapper:
275
237
  self.cipher = cipher
276
238
 
277
239
  def to_stored_event(self, domain_event: DomainEventProtocol) -> StoredEvent:
278
- """
279
- Converts the given domain event to a :class:`StoredEvent` object.
280
- """
240
+ """Converts the given domain event to a :class:`StoredEvent` object."""
281
241
  topic = get_topic(domain_event.__class__)
282
242
  event_state = domain_event.__dict__.copy()
283
243
  originator_id = event_state.pop("originator_id")
@@ -297,25 +257,24 @@ class Mapper:
297
257
  state=stored_state,
298
258
  )
299
259
 
300
- def from_domain_event(self, domain_event: DomainEventProtocol) -> StoredEvent:
301
- warn(
302
- "'from_domain_event()' is deprecated, use 'to_stored_event()' instead",
303
- DeprecationWarning,
304
- stacklevel=2,
305
- )
306
-
307
- return self.to_stored_event(domain_event)
308
-
309
260
  def to_domain_event(self, stored_event: StoredEvent) -> DomainEventProtocol:
310
- """
311
- Converts the given :class:`StoredEvent` to a domain event object.
312
- """
261
+ """Converts the given :class:`StoredEvent` to a domain event object."""
313
262
  stored_state = stored_event.state
314
- if self.cipher:
315
- stored_state = self.cipher.decrypt(stored_state)
316
- if self.compressor:
317
- stored_state = self.compressor.decompress(stored_state)
318
- event_state: Dict[str, Any] = self.transcoder.decode(stored_state)
263
+ try:
264
+ if self.cipher:
265
+ stored_state = self.cipher.decrypt(stored_state)
266
+ if self.compressor:
267
+ stored_state = self.compressor.decompress(stored_state)
268
+ event_state: dict[str, Any] = self.transcoder.decode(stored_state)
269
+ except Exception as e:
270
+ msg = (
271
+ f"Failed to deserialise state of stored event with "
272
+ f"topic '{stored_event.topic}', "
273
+ f"originator_id '{stored_event.originator_id}' and "
274
+ f"originator_version {stored_event.originator_version}: {e}"
275
+ )
276
+ raise MapperDeserialisationError(msg) from e
277
+
319
278
  event_state["originator_id"] = stored_event.originator_id
320
279
  event_state["originator_version"] = stored_event.originator_version
321
280
  cls = resolve_topic(stored_event.topic)
@@ -331,42 +290,34 @@ class Mapper:
331
290
 
332
291
 
333
292
  class RecordConflictError(EventSourcingError):
334
- """
335
- Legacy exception, replaced with IntegrityError.
336
- """
293
+ """Legacy exception, replaced with IntegrityError."""
337
294
 
338
295
 
339
296
  class PersistenceError(EventSourcingError):
340
- """
341
- The base class of the other exceptions in this module.
297
+ """The base class of the other exceptions in this module.
342
298
 
343
299
  Exception class names follow https://www.python.org/dev/peps/pep-0249/#exceptions
344
300
  """
345
301
 
346
302
 
347
303
  class InterfaceError(PersistenceError):
348
- """
349
- Exception raised for errors that are related to the database
304
+ """Exception raised for errors that are related to the database
350
305
  interface rather than the database itself.
351
306
  """
352
307
 
353
308
 
354
309
  class DatabaseError(PersistenceError):
355
- """
356
- Exception raised for errors that are related to the database.
357
- """
310
+ """Exception raised for errors that are related to the database."""
358
311
 
359
312
 
360
313
  class DataError(DatabaseError):
361
- """
362
- Exception raised for errors that are due to problems with the
314
+ """Exception raised for errors that are due to problems with the
363
315
  processed data like division by zero, numeric value out of range, etc.
364
316
  """
365
317
 
366
318
 
367
319
  class OperationalError(DatabaseError):
368
- """
369
- Exception raised for errors that are related to the database's
320
+ """Exception raised for errors that are related to the database's
370
321
  operation and not necessarily under the control of the programmer,
371
322
  e.g. an unexpected disconnect occurs, the data source name is not
372
323
  found, a transaction could not be processed, a memory allocation
@@ -375,50 +326,49 @@ class OperationalError(DatabaseError):
375
326
 
376
327
 
377
328
  class IntegrityError(DatabaseError, RecordConflictError):
378
- """
379
- Exception raised when the relational integrity of the
329
+ """Exception raised when the relational integrity of the
380
330
  database is affected, e.g. a foreign key check fails.
381
331
  """
382
332
 
383
333
 
384
334
  class InternalError(DatabaseError):
385
- """
386
- Exception raised when the database encounters an internal
335
+ """Exception raised when the database encounters an internal
387
336
  error, e.g. the cursor is not valid anymore, the transaction
388
337
  is out of sync, etc.
389
338
  """
390
339
 
391
340
 
392
341
  class ProgrammingError(DatabaseError):
393
- """
394
- Exception raised for database programming errors, e.g. table
342
+ """Exception raised for database programming errors, e.g. table
395
343
  not found or already exists, syntax error in the SQL statement,
396
344
  wrong number of parameters specified, etc.
397
345
  """
398
346
 
399
347
 
400
348
  class NotSupportedError(DatabaseError):
401
- """
402
- Exception raised in case a method or database API was used
349
+ """Exception raised in case a method or database API was used
403
350
  which is not supported by the database, e.g. calling the
404
351
  rollback() method on a connection that does not support
405
352
  transaction or has transactions turned off.
406
353
  """
407
354
 
408
355
 
409
- class AggregateRecorder(ABC):
410
- """
411
- Abstract base class for recorders that record and
412
- retrieve stored events for domain model aggregates.
413
- """
356
+ class WaitInterruptedError(PersistenceError):
357
+ """Raised when waiting for a tracking record is interrupted."""
358
+
359
+
360
+ class Recorder:
361
+ pass
362
+
363
+
364
+ class AggregateRecorder(Recorder, ABC):
365
+ """Abstract base class for inserting and selecting stored events."""
414
366
 
415
367
  @abstractmethod
416
368
  def insert_events(
417
- self, stored_events: List[StoredEvent], **kwargs: Any
369
+ self, stored_events: list[StoredEvent], **kwargs: Any
418
370
  ) -> Sequence[int] | None:
419
- """
420
- Writes stored events into database.
421
- """
371
+ """Writes stored events into database."""
422
372
 
423
373
  @abstractmethod
424
374
  def select_events(
@@ -429,90 +379,148 @@ class AggregateRecorder(ABC):
429
379
  lte: int | None = None,
430
380
  desc: bool = False,
431
381
  limit: int | None = None,
432
- ) -> List[StoredEvent]:
433
- """
434
- Reads stored events from database.
435
- """
382
+ ) -> list[StoredEvent]:
383
+ """Reads stored events from database."""
436
384
 
437
385
 
438
386
  @dataclass(frozen=True)
439
387
  class Notification(StoredEvent):
440
- """
441
- Frozen dataclass that represents domain event notifications.
442
- """
388
+ """Frozen dataclass that represents domain event notifications."""
443
389
 
444
390
  id: int
391
+ """Position in an application sequence."""
445
392
 
446
393
 
447
394
  class ApplicationRecorder(AggregateRecorder):
448
- """
449
- Abstract base class for recorders that record and
450
- retrieve stored events for domain model aggregates.
451
-
452
- Extends the behaviour of aggregate recorders by
453
- recording aggregate events in a total order that
454
- allows the stored events also to be retrieved
455
- as event notifications.
395
+ """Abstract base class for recording events in both aggregate
396
+ and application sequences.
456
397
  """
457
398
 
458
399
  @abstractmethod
459
400
  def select_notifications(
460
401
  self,
461
- start: int,
402
+ start: int | None,
462
403
  limit: int,
463
404
  stop: int | None = None,
464
405
  topics: Sequence[str] = (),
465
- ) -> List[Notification]:
466
- """
467
- Returns a list of event notifications
468
- from 'start', limited by 'limit' and
469
- optionally by 'stop'.
406
+ *,
407
+ inclusive_of_start: bool = True,
408
+ ) -> list[Notification]:
409
+ """Returns a list of Notification objects representing events from an
410
+ application sequence. If `inclusive_of_start` is True (the default),
411
+ the returned Notification objects will have IDs greater than or equal
412
+ to `start` and less than or equal to `stop`. If `inclusive_of_start`
413
+ is False, the Notification objects will have IDs greater than `start`
414
+ and less than or equal to `stop`.
470
415
  """
471
416
 
472
417
  @abstractmethod
473
- def max_notification_id(self) -> int:
418
+ def max_notification_id(self) -> int | None:
419
+ """Returns the largest notification ID in an application sequence,
420
+ or None if no stored events have been recorded.
474
421
  """
475
- Returns the maximum notification ID.
422
+
423
+ @abstractmethod
424
+ def subscribe(
425
+ self, gt: int | None = None, topics: Sequence[str] = ()
426
+ ) -> Subscription[ApplicationRecorder]:
427
+ """Returns an iterator of Notification objects representing events from an
428
+ application sequence.
429
+
430
+ The iterator will block after the last recorded event has been yielded, but
431
+ will then continue yielding newly recorded events when they are recorded.
432
+
433
+ Notifications will have IDs greater than the optional `gt` argument.
476
434
  """
477
435
 
478
436
 
479
- class ProcessRecorder(ApplicationRecorder):
437
+ class TrackingRecorder(Recorder, ABC):
438
+ """Abstract base class for recorders that record tracking
439
+ objects atomically with other state.
480
440
  """
481
- Abstract base class for recorders that record and
482
- retrieve stored events for domain model aggregates.
483
441
 
484
- Extends the behaviour of applications recorders by
485
- recording aggregate events with tracking information
486
- that records the position of a processed event
487
- notification in a notification log.
488
- """
442
+ @abstractmethod
443
+ def insert_tracking(self, tracking: Tracking) -> None:
444
+ """Records a tracking object."""
489
445
 
490
446
  @abstractmethod
491
- def max_tracking_id(self, application_name: str) -> int:
492
- """
493
- Returns the largest notification ID across all tracking records
494
- for the named application. Returns zero if there are no tracking
495
- records.
447
+ def max_tracking_id(self, application_name: str) -> int | None:
448
+ """Returns the largest notification ID across all recorded tracking objects
449
+ for the named application, or None if no tracking objects have been recorded.
496
450
  """
497
451
 
498
452
  @abstractmethod
499
- def has_tracking_id(self, application_name: str, notification_id: int) -> bool:
500
- """
501
- Returns true if a tracking record with the given application name
502
- and notification ID exists, otherwise returns false.
453
+ def has_tracking_id(
454
+ self, application_name: str, notification_id: int | None
455
+ ) -> bool:
456
+ """Returns True if a tracking object with the given application name
457
+ and notification ID has been recorded, and True if given notification_id is
458
+ None, otherwise returns False.
503
459
  """
504
460
 
461
+ def wait(
462
+ self,
463
+ application_name: str,
464
+ notification_id: int | None,
465
+ timeout: float = 1.0,
466
+ interrupt: Event | None = None,
467
+ ) -> None:
468
+ """Block until a tracking object with the given application name and a
469
+ notification ID greater than equal to the given value has been recorded.
470
+
471
+ Polls max_tracking_id() with exponential backoff until the timeout
472
+ is reached, or until the optional interrupt event is set.
473
+
474
+ The timeout argument should be a floating point number specifying a
475
+ timeout for the operation in seconds (or fractions thereof). The default
476
+ is 1.0 seconds.
477
+
478
+ Raises TimeoutError if the timeout is reached.
479
+
480
+ Raises WaitInterruptError if the `interrupt` is set before `timeout` is reached.
481
+ """
482
+ deadline = monotonic() + timeout
483
+ sleep_interval_ms = 100.0
484
+ max_sleep_interval_ms = 800.0
485
+ while True:
486
+ max_tracking_id = self.max_tracking_id(application_name)
487
+ if notification_id is None or (
488
+ max_tracking_id is not None and max_tracking_id >= notification_id
489
+ ):
490
+ break
491
+ if interrupt:
492
+ if interrupt.wait(timeout=sleep_interval_ms / 1000):
493
+ raise WaitInterruptedError
494
+ else:
495
+ sleep(sleep_interval_ms / 1000)
496
+ remaining = deadline - monotonic()
497
+ if remaining < 0:
498
+ msg = (
499
+ f"Timed out waiting for notification {notification_id} "
500
+ f"from application '{application_name}' to be processed"
501
+ )
502
+ raise TimeoutError(msg)
503
+ sleep_interval_ms = min(
504
+ sleep_interval_ms * 2, remaining * 1000, max_sleep_interval_ms
505
+ )
506
+
507
+
508
+ class ProcessRecorder(TrackingRecorder, ApplicationRecorder, ABC):
509
+ pass
510
+
505
511
 
506
512
  @dataclass(frozen=True)
507
513
  class Recording:
514
+ """Represents the recording of a domain event."""
515
+
508
516
  domain_event: DomainEventProtocol
517
+ """The domain event that has been recorded."""
509
518
  notification: Notification
519
+ """A Notification that represents the domain event in the application sequence."""
510
520
 
511
521
 
512
522
  class EventStore:
513
- """
514
- Stores and retrieves domain events.
515
- """
523
+ """Stores and retrieves domain events."""
516
524
 
517
525
  def __init__(
518
526
  self,
@@ -524,10 +532,8 @@ class EventStore:
524
532
 
525
533
  def put(
526
534
  self, domain_events: Sequence[DomainEventProtocol], **kwargs: Any
527
- ) -> List[Recording]:
528
- """
529
- Stores domain events in aggregate sequence.
530
- """
535
+ ) -> list[Recording]:
536
+ """Stores domain events in aggregate sequence."""
531
537
  stored_events = list(map(self.mapper.to_stored_event, domain_events))
532
538
  recordings = []
533
539
  notification_ids = self.recorder.insert_events(stored_events, **kwargs)
@@ -557,9 +563,7 @@ class EventStore:
557
563
  desc: bool = False,
558
564
  limit: int | None = None,
559
565
  ) -> Iterator[DomainEventProtocol]:
560
- """
561
- Retrieves domain events from aggregate sequence.
562
- """
566
+ """Retrieves domain events from aggregate sequence."""
563
567
  return map(
564
568
  self.mapper.to_domain_event,
565
569
  self.recorder.select_events(
@@ -572,32 +576,40 @@ class EventStore:
572
576
  )
573
577
 
574
578
 
575
- TInfrastructureFactory = TypeVar(
576
- "TInfrastructureFactory", bound="InfrastructureFactory"
579
+ TTrackingRecorder = TypeVar(
580
+ "TTrackingRecorder", bound=TrackingRecorder, default=TrackingRecorder
577
581
  )
578
582
 
579
583
 
580
- class InfrastructureFactory(ABC):
581
- """
582
- Abstract base class for infrastructure factories.
583
- """
584
+ class InfrastructureFactoryError(EventSourcingError):
585
+ """Raised when an infrastructure factory cannot be created."""
586
+
587
+
588
+ class InfrastructureFactory(ABC, Generic[TTrackingRecorder]):
589
+ """Abstract base class for infrastructure factories."""
584
590
 
585
591
  PERSISTENCE_MODULE = "PERSISTENCE_MODULE"
592
+ TRANSCODER_TOPIC = "TRANSCODER_TOPIC"
586
593
  MAPPER_TOPIC = "MAPPER_TOPIC"
587
594
  CIPHER_TOPIC = "CIPHER_TOPIC"
588
595
  COMPRESSOR_TOPIC = "COMPRESSOR_TOPIC"
589
596
  IS_SNAPSHOTTING_ENABLED = "IS_SNAPSHOTTING_ENABLED"
597
+ APPLICATION_RECORDER_TOPIC = "APPLICATION_RECORDER_TOPIC"
598
+ TRACKING_RECORDER_TOPIC = "TRACKING_RECORDER_TOPIC"
599
+ PROCESS_RECORDER_TOPIC = "PROCESS_RECORDER_TOPIC"
590
600
 
591
601
  @classmethod
592
602
  def construct(
593
- cls: Type[TInfrastructureFactory], env: Environment
594
- ) -> TInfrastructureFactory:
595
- """
596
- Constructs concrete infrastructure factory for given
603
+ cls: type[InfrastructureFactory[TTrackingRecorder]],
604
+ env: Environment | None = None,
605
+ ) -> InfrastructureFactory[TTrackingRecorder]:
606
+ """Constructs concrete infrastructure factory for given
597
607
  named application. Reads and resolves persistence
598
608
  topic from environment variable 'PERSISTENCE_MODULE'.
599
609
  """
600
- factory_cls: Type[InfrastructureFactory]
610
+ factory_cls: type[InfrastructureFactory[TTrackingRecorder]]
611
+ if env is None:
612
+ env = Environment()
601
613
  topic = (
602
614
  env.get(
603
615
  cls.PERSISTENCE_MODULE,
@@ -614,26 +626,33 @@ class InfrastructureFactory(ABC):
614
626
  or "eventsourcing.popo"
615
627
  )
616
628
  try:
617
- obj: Type[InfrastructureFactory] | ModuleType = resolve_topic(topic)
629
+ obj: type[InfrastructureFactory[TTrackingRecorder]] | ModuleType = (
630
+ resolve_topic(topic)
631
+ )
618
632
  except TopicError as e:
619
633
  msg = (
620
634
  "Failed to resolve persistence module topic: "
621
635
  f"'{topic}' from environment "
622
636
  f"variable '{cls.PERSISTENCE_MODULE}'"
623
637
  )
624
- raise OSError(msg) from e
638
+ raise InfrastructureFactoryError(msg) from e
625
639
 
626
640
  if isinstance(obj, ModuleType):
627
641
  # Find the factory in the module.
628
- factory_classes: List[Type[InfrastructureFactory]] = [
629
- member
630
- for member in obj.__dict__.values()
642
+ factory_classes: list[type[InfrastructureFactory[TTrackingRecorder]]] = []
643
+ for member in obj.__dict__.values():
631
644
  if (
632
645
  member is not InfrastructureFactory
633
- and isinstance(member, type)
634
- and issubclass(member, InfrastructureFactory)
635
- )
636
- ]
646
+ and isinstance(member, type) # Look for classes...
647
+ and isinstance(member, type) # Look for classes...
648
+ and not isinstance(
649
+ member, GenericAlias
650
+ ) # Issue with Python 3.9 and 3.10.
651
+ and issubclass(member, InfrastructureFactory) # Ignore base class.
652
+ and member not in factory_classes # Forgive aliases.
653
+ ):
654
+ factory_classes.append(member)
655
+
637
656
  if len(factory_classes) == 1:
638
657
  factory_cls = factory_classes[0]
639
658
  else:
@@ -641,45 +660,51 @@ class InfrastructureFactory(ABC):
641
660
  f"Found {len(factory_classes)} infrastructure factory classes in"
642
661
  f" '{topic}', expected 1."
643
662
  )
644
- raise AssertionError(msg)
663
+ raise InfrastructureFactoryError(msg)
645
664
  elif isinstance(obj, type) and issubclass(obj, InfrastructureFactory):
646
665
  factory_cls = obj
647
666
  else:
648
- msg = f"Not an infrastructure factory class or module: {topic}"
649
- raise AssertionError(msg)
650
- return cast(TInfrastructureFactory, factory_cls(env=env))
667
+ msg = (
668
+ f"Topic '{topic}' didn't resolve to a persistence module "
669
+ f"or infrastructure factory class: {obj}"
670
+ )
671
+ raise InfrastructureFactoryError(msg)
672
+ return factory_cls(env=env)
651
673
 
652
674
  def __init__(self, env: Environment):
653
- """
654
- Initialises infrastructure factory object with given application name.
655
- """
675
+ """Initialises infrastructure factory object with given application name."""
656
676
  self.env = env
657
677
 
658
678
  def transcoder(
659
679
  self,
660
680
  ) -> Transcoder:
661
- """
662
- Constructs a transcoder.
663
- """
664
- # TODO: Implement support for TRANSCODER_TOPIC.
665
- return JSONTranscoder()
681
+ """Constructs a transcoder."""
682
+ transcoder_topic = self.env.get(self.TRANSCODER_TOPIC)
683
+ if transcoder_topic:
684
+ transcoder_class: type[Transcoder] = resolve_topic(transcoder_topic)
685
+ else:
686
+ transcoder_class = JSONTranscoder
687
+ return transcoder_class()
666
688
 
667
689
  def mapper(
668
- self, transcoder: Transcoder, mapper_class: Type[Mapper] = Mapper
690
+ self,
691
+ transcoder: Transcoder | None = None,
692
+ mapper_class: type[Mapper] | None = None,
669
693
  ) -> Mapper:
670
- """
671
- Constructs a mapper.
672
- """
673
- # TODO: Implement support for MAPPER_TOPIC.
694
+ """Constructs a mapper."""
695
+ if mapper_class is None:
696
+ mapper_topic = self.env.get(self.MAPPER_TOPIC)
697
+ mapper_class = resolve_topic(mapper_topic) if mapper_topic else Mapper
698
+
699
+ assert isinstance(mapper_class, type) and issubclass(mapper_class, Mapper)
674
700
  return mapper_class(
675
- transcoder=transcoder,
701
+ transcoder=transcoder or self.transcoder(),
676
702
  cipher=self.cipher(),
677
703
  compressor=self.compressor(),
678
704
  )
679
705
 
680
706
  def cipher(self) -> Cipher | None:
681
- """
682
- Reads environment variables 'CIPHER_TOPIC'
707
+ """Reads environment variables 'CIPHER_TOPIC'
683
708
  and 'CIPHER_KEY' to decide whether or not
684
709
  to construct a cipher.
685
710
  """
@@ -690,20 +715,19 @@ class InfrastructureFactory(ABC):
690
715
  cipher_topic = default_cipher_topic
691
716
 
692
717
  if cipher_topic:
693
- cipher_cls: Type[Cipher] = resolve_topic(cipher_topic)
718
+ cipher_cls: type[Cipher] = resolve_topic(cipher_topic)
694
719
  cipher = cipher_cls(self.env)
695
720
 
696
721
  return cipher
697
722
 
698
723
  def compressor(self) -> Compressor | None:
699
- """
700
- Reads environment variable 'COMPRESSOR_TOPIC' to
724
+ """Reads environment variable 'COMPRESSOR_TOPIC' to
701
725
  decide whether or not to construct a compressor.
702
726
  """
703
727
  compressor: Compressor | None = None
704
728
  compressor_topic = self.env.get(self.COMPRESSOR_TOPIC)
705
729
  if compressor_topic:
706
- compressor_cls: Type[Compressor] | Compressor = resolve_topic(
730
+ compressor_cls: type[Compressor] | Compressor = resolve_topic(
707
731
  compressor_topic
708
732
  )
709
733
  if isinstance(compressor_cls, type):
@@ -712,49 +736,49 @@ class InfrastructureFactory(ABC):
712
736
  compressor = compressor_cls
713
737
  return compressor
714
738
 
715
- @staticmethod
716
- def event_store(**kwargs: Any) -> EventStore:
717
- """
718
- Constructs an event store.
719
- """
720
- return EventStore(**kwargs)
739
+ def event_store(
740
+ self,
741
+ mapper: Mapper | None = None,
742
+ recorder: AggregateRecorder | None = None,
743
+ ) -> EventStore:
744
+ """Constructs an event store."""
745
+ return EventStore(
746
+ mapper=mapper or self.mapper(),
747
+ recorder=recorder or self.application_recorder(),
748
+ )
721
749
 
722
750
  @abstractmethod
723
751
  def aggregate_recorder(self, purpose: str = "events") -> AggregateRecorder:
724
- """
725
- Constructs an aggregate recorder.
726
- """
752
+ """Constructs an aggregate recorder."""
727
753
 
728
754
  @abstractmethod
729
755
  def application_recorder(self) -> ApplicationRecorder:
730
- """
731
- Constructs an application recorder.
732
- """
756
+ """Constructs an application recorder."""
757
+
758
+ @abstractmethod
759
+ def tracking_recorder(
760
+ self, tracking_recorder_class: type[TTrackingRecorder] | None = None
761
+ ) -> TTrackingRecorder:
762
+ """Constructs a tracking recorder."""
733
763
 
734
764
  @abstractmethod
735
765
  def process_recorder(self) -> ProcessRecorder:
736
- """
737
- Constructs a process recorder.
738
- """
766
+ """Constructs a process recorder."""
739
767
 
740
768
  def is_snapshotting_enabled(self) -> bool:
741
- """
742
- Decides whether or not snapshotting is enabled by
769
+ """Decides whether or not snapshotting is enabled by
743
770
  reading environment variable 'IS_SNAPSHOTTING_ENABLED'.
744
771
  Snapshotting is not enabled by default.
745
772
  """
746
773
  return strtobool(self.env.get(self.IS_SNAPSHOTTING_ENABLED, "no"))
747
774
 
748
775
  def close(self) -> None:
749
- """
750
- Closes any database connections, or anything else that needs closing.
751
- """
776
+ """Closes any database connections, and anything else that needs closing."""
752
777
 
753
778
 
754
779
  @dataclass(frozen=True)
755
780
  class Tracking:
756
- """
757
- Frozen dataclass representing the position of a domain
781
+ """Frozen dataclass representing the position of a domain
758
782
  event :class:`Notification` in an application's notification log.
759
783
  """
760
784
 
@@ -841,20 +865,15 @@ TConnection = TypeVar("TConnection", bound=Connection[Any])
841
865
 
842
866
 
843
867
  class ConnectionPoolClosedError(EventSourcingError):
844
- """
845
- Raised when using a connection pool that is already closed.
846
- """
868
+ """Raised when using a connection pool that is already closed."""
847
869
 
848
870
 
849
871
  class ConnectionNotFromPoolError(EventSourcingError):
850
- """
851
- Raised when putting a connection in the wrong pool.
852
- """
872
+ """Raised when putting a connection in the wrong pool."""
853
873
 
854
874
 
855
875
  class ConnectionUnavailableError(OperationalError, TimeoutError):
856
- """
857
- Raised when a request to get a connection from a
876
+ """Raised when a request to get a connection from a
858
877
  connection pool times out.
859
878
  """
860
879
 
@@ -870,8 +889,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
870
889
  pre_ping: bool = False,
871
890
  mutually_exclusive_read_write: bool = False,
872
891
  ) -> None:
873
- """
874
- Initialises a new connection pool.
892
+ """Initialises a new connection pool.
875
893
 
876
894
  The 'pool_size' argument specifies the maximum number of connections
877
895
  that will be put into the pool when connections are returned. The
@@ -906,7 +924,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
906
924
  self.max_age = max_age
907
925
  self.pre_ping = pre_ping
908
926
  self._pool: deque[TConnection] = deque()
909
- self._in_use: Dict[int, TConnection] = {}
927
+ self._in_use: dict[int, TConnection] = {}
910
928
  self._get_semaphore = Semaphore()
911
929
  self._put_condition = Condition()
912
930
  self._no_readers = Condition()
@@ -922,9 +940,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
922
940
 
923
941
  @property
924
942
  def num_in_use(self) -> int:
925
- """
926
- Indicates the total number of connections currently in use.
927
- """
943
+ """Indicates the total number of connections currently in use."""
928
944
  with self._put_condition:
929
945
  return self._num_in_use
930
946
 
@@ -934,9 +950,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
934
950
 
935
951
  @property
936
952
  def num_in_pool(self) -> int:
937
- """
938
- Indicates the number of connections currently in the pool.
939
- """
953
+ """Indicates the number of connections currently in the pool."""
940
954
  with self._put_condition:
941
955
  return self._num_in_pool
942
956
 
@@ -955,8 +969,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
955
969
  def get_connection(
956
970
  self, timeout: float | None = None, is_writer: bool | None = None
957
971
  ) -> TConnection:
958
- """
959
- Issues connections, or raises ConnectionPoolExhausted error.
972
+ """Issues connections, or raises ConnectionPoolExhausted error.
960
973
  Provides "fairness" on attempts to get connections, meaning that
961
974
  connections are issued in the same order as they are requested.
962
975
 
@@ -1040,8 +1053,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
1040
1053
  raise ConnectionUnavailableError(msg)
1041
1054
 
1042
1055
  def _get_connection(self, timeout: float = 0.0) -> TConnection:
1043
- """
1044
- Gets or creates connections from pool within given
1056
+ """Gets or creates connections from pool within given
1045
1057
  time, otherwise raises a "pool exhausted" error.
1046
1058
 
1047
1059
  Waits for connections to be returned if the pool
@@ -1108,8 +1120,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
1108
1120
  return conn
1109
1121
 
1110
1122
  def put_connection(self, conn: TConnection) -> None:
1111
- """
1112
- Returns connections to the pool, or closes connection
1123
+ """Returns connections to the pool, or closes connection
1113
1124
  if the pool is full.
1114
1125
 
1115
1126
  Unlocks write lock after writer has returned, and
@@ -1118,7 +1129,6 @@ class ConnectionPool(ABC, Generic[TConnection]):
1118
1129
  Notifies waiters when connections have been returned,
1119
1130
  and when there are no longer any readers.
1120
1131
  """
1121
-
1122
1132
  # Start forgetting if this connection was for reading or writing.
1123
1133
  is_writer, conn.is_writer = conn.is_writer, None
1124
1134
 
@@ -1165,8 +1175,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
1165
1175
 
1166
1176
  @abstractmethod
1167
1177
  def _create_connection(self) -> TConnection:
1168
- """
1169
- Create a new connection.
1178
+ """Create a new connection.
1170
1179
 
1171
1180
  Subclasses should implement this method by
1172
1181
  creating a database connection of the type
@@ -1174,9 +1183,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
1174
1183
  """
1175
1184
 
1176
1185
  def close(self) -> None:
1177
- """
1178
- Close the connection pool.
1179
- """
1186
+ """Close the connection pool."""
1180
1187
  with self._put_condition:
1181
1188
  if self._closed:
1182
1189
  return
@@ -1197,3 +1204,121 @@ class ConnectionPool(ABC, Generic[TConnection]):
1197
1204
 
1198
1205
  def __del__(self) -> None:
1199
1206
  self.close()
1207
+
1208
+
1209
+ TApplicationRecorder_co = TypeVar(
1210
+ "TApplicationRecorder_co", bound=ApplicationRecorder, covariant=True
1211
+ )
1212
+
1213
+
1214
+ class Subscription(Iterator[Notification], Generic[TApplicationRecorder_co]):
1215
+ def __init__(
1216
+ self,
1217
+ recorder: TApplicationRecorder_co,
1218
+ gt: int | None = None,
1219
+ topics: Sequence[str] = (),
1220
+ ) -> None:
1221
+ self._recorder = recorder
1222
+ self._last_notification_id = gt
1223
+ self._topics = topics
1224
+ self._has_been_entered = False
1225
+ self._has_been_stopped = False
1226
+
1227
+ def __enter__(self) -> Self:
1228
+ if self._has_been_entered:
1229
+ msg = "Already entered subscription context manager"
1230
+ raise ProgrammingError(msg)
1231
+ self._has_been_entered = True
1232
+ return self
1233
+
1234
+ def __exit__(self, *args: object, **kwargs: Any) -> None:
1235
+ if not self._has_been_entered:
1236
+ msg = "Not already entered subscription context manager"
1237
+ raise ProgrammingError(msg)
1238
+ self.stop()
1239
+
1240
+ def stop(self) -> None:
1241
+ """Stops the subscription."""
1242
+ self._has_been_stopped = True
1243
+
1244
+ def __iter__(self) -> Self:
1245
+ return self
1246
+
1247
+ @abstractmethod
1248
+ def __next__(self) -> Notification:
1249
+ """Returns the next Notification object in the application sequence."""
1250
+
1251
+
1252
+ class ListenNotifySubscription(Subscription[TApplicationRecorder_co]):
1253
+ def __init__(
1254
+ self,
1255
+ recorder: TApplicationRecorder_co,
1256
+ gt: int | None = None,
1257
+ topics: Sequence[str] = (),
1258
+ ) -> None:
1259
+ super().__init__(recorder=recorder, gt=gt, topics=topics)
1260
+ self._select_limit = 500
1261
+ self._notifications: list[Notification] = []
1262
+ self._notifications_index: int = 0
1263
+ self._notifications_queue: Queue[list[Notification]] = Queue(maxsize=10)
1264
+ self._has_been_notified = Event()
1265
+ self._thread_error: BaseException | None = None
1266
+ self._pull_thread = Thread(target=self._loop_on_pull)
1267
+ self._pull_thread.start()
1268
+
1269
+ def __exit__(self, *args: object, **kwargs: Any) -> None:
1270
+ super().__exit__(*args, **kwargs)
1271
+ self._pull_thread.join()
1272
+
1273
+ def stop(self) -> None:
1274
+ """Stops the subscription."""
1275
+ super().stop()
1276
+ self._notifications_queue.put([])
1277
+ self._has_been_notified.set()
1278
+
1279
+ def __next__(self) -> Notification:
1280
+ # If necessary, get a new list of notifications from the recorder.
1281
+ if (
1282
+ self._notifications_index == len(self._notifications)
1283
+ and not self._has_been_stopped
1284
+ ):
1285
+ self._notifications = self._notifications_queue.get()
1286
+ self._notifications_index = 0
1287
+
1288
+ # Stop the iteration if necessary, maybe raise thread error.
1289
+ if self._has_been_stopped or not self._notifications:
1290
+ if self._thread_error is not None:
1291
+ raise self._thread_error
1292
+ raise StopIteration
1293
+
1294
+ # Return a notification from previously obtained list.
1295
+ notification = self._notifications[self._notifications_index]
1296
+ self._notifications_index += 1
1297
+ return notification
1298
+
1299
+ def _loop_on_pull(self) -> None:
1300
+ try:
1301
+ self._pull() # Already recorded events.
1302
+ while not self._has_been_stopped:
1303
+ self._has_been_notified.wait()
1304
+ self._pull() # Newly recorded events.
1305
+ except BaseException as e:
1306
+ if self._thread_error is None:
1307
+ self._thread_error = e
1308
+ self.stop()
1309
+
1310
+ def _pull(self) -> None:
1311
+ while not self._has_been_stopped:
1312
+ self._has_been_notified.clear()
1313
+ notifications = self._recorder.select_notifications(
1314
+ start=self._last_notification_id or 0,
1315
+ limit=self._select_limit,
1316
+ topics=self._topics,
1317
+ inclusive_of_start=False,
1318
+ )
1319
+ if len(notifications) > 0:
1320
+ # print("Putting", len(notifications), "notifications into queue")
1321
+ self._notifications_queue.put(notifications)
1322
+ self._last_notification_id = notifications[-1].id
1323
+ if len(notifications) < self._select_limit:
1324
+ break