eventsourcing 9.2.22__py3-none-any.whl → 9.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eventsourcing might be problematic. Click here for more details.

Files changed (144) hide show
  1. eventsourcing/__init__.py +1 -1
  2. eventsourcing/application.py +116 -135
  3. eventsourcing/cipher.py +15 -12
  4. eventsourcing/dispatch.py +31 -91
  5. eventsourcing/domain.py +220 -226
  6. eventsourcing/examples/__init__.py +0 -0
  7. eventsourcing/examples/aggregate1/__init__.py +0 -0
  8. eventsourcing/examples/aggregate1/application.py +27 -0
  9. eventsourcing/examples/aggregate1/domainmodel.py +16 -0
  10. eventsourcing/examples/aggregate1/test_application.py +37 -0
  11. eventsourcing/examples/aggregate2/__init__.py +0 -0
  12. eventsourcing/examples/aggregate2/application.py +27 -0
  13. eventsourcing/examples/aggregate2/domainmodel.py +22 -0
  14. eventsourcing/examples/aggregate2/test_application.py +37 -0
  15. eventsourcing/examples/aggregate3/__init__.py +0 -0
  16. eventsourcing/examples/aggregate3/application.py +27 -0
  17. eventsourcing/examples/aggregate3/domainmodel.py +38 -0
  18. eventsourcing/examples/aggregate3/test_application.py +37 -0
  19. eventsourcing/examples/aggregate4/__init__.py +0 -0
  20. eventsourcing/examples/aggregate4/application.py +27 -0
  21. eventsourcing/examples/aggregate4/domainmodel.py +114 -0
  22. eventsourcing/examples/aggregate4/test_application.py +38 -0
  23. eventsourcing/examples/aggregate5/__init__.py +0 -0
  24. eventsourcing/examples/aggregate5/application.py +27 -0
  25. eventsourcing/examples/aggregate5/domainmodel.py +131 -0
  26. eventsourcing/examples/aggregate5/test_application.py +38 -0
  27. eventsourcing/examples/aggregate6/__init__.py +0 -0
  28. eventsourcing/examples/aggregate6/application.py +30 -0
  29. eventsourcing/examples/aggregate6/domainmodel.py +123 -0
  30. eventsourcing/examples/aggregate6/test_application.py +38 -0
  31. eventsourcing/examples/aggregate6a/__init__.py +0 -0
  32. eventsourcing/examples/aggregate6a/application.py +40 -0
  33. eventsourcing/examples/aggregate6a/domainmodel.py +149 -0
  34. eventsourcing/examples/aggregate6a/test_application.py +45 -0
  35. eventsourcing/examples/aggregate7/__init__.py +0 -0
  36. eventsourcing/examples/aggregate7/application.py +48 -0
  37. eventsourcing/examples/aggregate7/domainmodel.py +144 -0
  38. eventsourcing/examples/aggregate7/persistence.py +57 -0
  39. eventsourcing/examples/aggregate7/test_application.py +38 -0
  40. eventsourcing/examples/aggregate7/test_compression_and_encryption.py +45 -0
  41. eventsourcing/examples/aggregate7/test_snapshotting_intervals.py +67 -0
  42. eventsourcing/examples/aggregate7a/__init__.py +0 -0
  43. eventsourcing/examples/aggregate7a/application.py +56 -0
  44. eventsourcing/examples/aggregate7a/domainmodel.py +170 -0
  45. eventsourcing/examples/aggregate7a/test_application.py +46 -0
  46. eventsourcing/examples/aggregate7a/test_compression_and_encryption.py +45 -0
  47. eventsourcing/examples/aggregate8/__init__.py +0 -0
  48. eventsourcing/examples/aggregate8/application.py +47 -0
  49. eventsourcing/examples/aggregate8/domainmodel.py +65 -0
  50. eventsourcing/examples/aggregate8/persistence.py +57 -0
  51. eventsourcing/examples/aggregate8/test_application.py +37 -0
  52. eventsourcing/examples/aggregate8/test_compression_and_encryption.py +44 -0
  53. eventsourcing/examples/aggregate8/test_snapshotting_intervals.py +38 -0
  54. eventsourcing/examples/bankaccounts/__init__.py +0 -0
  55. eventsourcing/examples/bankaccounts/application.py +70 -0
  56. eventsourcing/examples/bankaccounts/domainmodel.py +56 -0
  57. eventsourcing/examples/bankaccounts/test.py +173 -0
  58. eventsourcing/examples/cargoshipping/__init__.py +0 -0
  59. eventsourcing/examples/cargoshipping/application.py +126 -0
  60. eventsourcing/examples/cargoshipping/domainmodel.py +330 -0
  61. eventsourcing/examples/cargoshipping/interface.py +143 -0
  62. eventsourcing/examples/cargoshipping/test.py +231 -0
  63. eventsourcing/examples/contentmanagement/__init__.py +0 -0
  64. eventsourcing/examples/contentmanagement/application.py +118 -0
  65. eventsourcing/examples/contentmanagement/domainmodel.py +69 -0
  66. eventsourcing/examples/contentmanagement/test.py +180 -0
  67. eventsourcing/examples/contentmanagement/utils.py +26 -0
  68. eventsourcing/examples/contentmanagementsystem/__init__.py +0 -0
  69. eventsourcing/examples/contentmanagementsystem/application.py +54 -0
  70. eventsourcing/examples/contentmanagementsystem/postgres.py +17 -0
  71. eventsourcing/examples/contentmanagementsystem/sqlite.py +17 -0
  72. eventsourcing/examples/contentmanagementsystem/system.py +14 -0
  73. eventsourcing/examples/contentmanagementsystem/test_system.py +180 -0
  74. eventsourcing/examples/searchablecontent/__init__.py +0 -0
  75. eventsourcing/examples/searchablecontent/application.py +45 -0
  76. eventsourcing/examples/searchablecontent/persistence.py +23 -0
  77. eventsourcing/examples/searchablecontent/postgres.py +118 -0
  78. eventsourcing/examples/searchablecontent/sqlite.py +136 -0
  79. eventsourcing/examples/searchablecontent/test_application.py +110 -0
  80. eventsourcing/examples/searchablecontent/test_recorder.py +68 -0
  81. eventsourcing/examples/searchabletimestamps/__init__.py +0 -0
  82. eventsourcing/examples/searchabletimestamps/application.py +32 -0
  83. eventsourcing/examples/searchabletimestamps/persistence.py +20 -0
  84. eventsourcing/examples/searchabletimestamps/postgres.py +110 -0
  85. eventsourcing/examples/searchabletimestamps/sqlite.py +99 -0
  86. eventsourcing/examples/searchabletimestamps/test_searchabletimestamps.py +94 -0
  87. eventsourcing/examples/test_invoice.py +176 -0
  88. eventsourcing/examples/test_parking_lot.py +206 -0
  89. eventsourcing/interface.py +2 -2
  90. eventsourcing/persistence.py +85 -81
  91. eventsourcing/popo.py +30 -31
  92. eventsourcing/postgres.py +379 -590
  93. eventsourcing/sqlite.py +91 -99
  94. eventsourcing/system.py +52 -57
  95. eventsourcing/tests/application.py +20 -32
  96. eventsourcing/tests/application_tests/__init__.py +0 -0
  97. eventsourcing/tests/application_tests/test_application_with_automatic_snapshotting.py +55 -0
  98. eventsourcing/tests/application_tests/test_application_with_popo.py +22 -0
  99. eventsourcing/tests/application_tests/test_application_with_postgres.py +75 -0
  100. eventsourcing/tests/application_tests/test_application_with_sqlite.py +72 -0
  101. eventsourcing/tests/application_tests/test_cache.py +134 -0
  102. eventsourcing/tests/application_tests/test_event_sourced_log.py +162 -0
  103. eventsourcing/tests/application_tests/test_notificationlog.py +232 -0
  104. eventsourcing/tests/application_tests/test_notificationlogreader.py +126 -0
  105. eventsourcing/tests/application_tests/test_processapplication.py +110 -0
  106. eventsourcing/tests/application_tests/test_processingpolicy.py +109 -0
  107. eventsourcing/tests/application_tests/test_repository.py +504 -0
  108. eventsourcing/tests/application_tests/test_snapshotting.py +68 -0
  109. eventsourcing/tests/application_tests/test_upcasting.py +459 -0
  110. eventsourcing/tests/docs_tests/__init__.py +0 -0
  111. eventsourcing/tests/docs_tests/test_docs.py +293 -0
  112. eventsourcing/tests/domain.py +1 -1
  113. eventsourcing/tests/domain_tests/__init__.py +0 -0
  114. eventsourcing/tests/domain_tests/test_aggregate.py +1180 -0
  115. eventsourcing/tests/domain_tests/test_aggregate_decorators.py +1604 -0
  116. eventsourcing/tests/domain_tests/test_domainevent.py +80 -0
  117. eventsourcing/tests/interface_tests/__init__.py +0 -0
  118. eventsourcing/tests/interface_tests/test_remotenotificationlog.py +258 -0
  119. eventsourcing/tests/persistence.py +52 -50
  120. eventsourcing/tests/persistence_tests/__init__.py +0 -0
  121. eventsourcing/tests/persistence_tests/test_aes.py +93 -0
  122. eventsourcing/tests/persistence_tests/test_connection_pool.py +722 -0
  123. eventsourcing/tests/persistence_tests/test_eventstore.py +72 -0
  124. eventsourcing/tests/persistence_tests/test_infrastructure_factory.py +21 -0
  125. eventsourcing/tests/persistence_tests/test_mapper.py +113 -0
  126. eventsourcing/tests/persistence_tests/test_noninterleaving_notification_ids.py +69 -0
  127. eventsourcing/tests/persistence_tests/test_popo.py +124 -0
  128. eventsourcing/tests/persistence_tests/test_postgres.py +1119 -0
  129. eventsourcing/tests/persistence_tests/test_sqlite.py +348 -0
  130. eventsourcing/tests/persistence_tests/test_transcoder.py +44 -0
  131. eventsourcing/tests/postgres_utils.py +7 -7
  132. eventsourcing/tests/system_tests/__init__.py +0 -0
  133. eventsourcing/tests/system_tests/test_runner.py +935 -0
  134. eventsourcing/tests/system_tests/test_system.py +284 -0
  135. eventsourcing/tests/utils_tests/__init__.py +0 -0
  136. eventsourcing/tests/utils_tests/test_utils.py +226 -0
  137. eventsourcing/utils.py +47 -50
  138. {eventsourcing-9.2.22.dist-info → eventsourcing-9.3.0.dist-info}/METADATA +29 -79
  139. eventsourcing-9.3.0.dist-info/RECORD +145 -0
  140. {eventsourcing-9.2.22.dist-info → eventsourcing-9.3.0.dist-info}/WHEEL +1 -2
  141. eventsourcing-9.2.22.dist-info/RECORD +0 -25
  142. eventsourcing-9.2.22.dist-info/top_level.txt +0 -1
  143. {eventsourcing-9.2.22.dist-info → eventsourcing-9.3.0.dist-info}/AUTHORS +0 -0
  144. {eventsourcing-9.2.22.dist-info → eventsourcing-9.3.0.dist-info}/LICENSE +0 -0
@@ -12,12 +12,11 @@ from time import time
12
12
  from types import ModuleType
13
13
  from typing import (
14
14
  Any,
15
- Deque,
16
15
  Dict,
17
16
  Generic,
18
17
  Iterator,
19
18
  List,
20
- Optional,
19
+ Mapping,
21
20
  Sequence,
22
21
  Type,
23
22
  TypeVar,
@@ -109,11 +108,12 @@ class JSONTranscoder(Transcoder):
109
108
  try:
110
109
  transcoding = self.types[type(o)]
111
110
  except KeyError:
112
- raise TypeError(
111
+ msg = (
113
112
  f"Object of type {type(o)} is not "
114
113
  "serializable. Please define and register "
115
114
  "a custom transcoding for this type."
116
- ) from None
115
+ )
116
+ raise TypeError(msg) from None
117
117
  else:
118
118
  return {
119
119
  "_type_": transcoding.name,
@@ -134,12 +134,13 @@ class JSONTranscoder(Transcoder):
134
134
  else:
135
135
  try:
136
136
  transcoding = self.names[cast(str, _type_)]
137
- except KeyError:
138
- raise TypeError(
137
+ except KeyError as e:
138
+ msg = (
139
139
  f"Data serialized with name '{cast(str, _type_)}' is not "
140
140
  "deserializable. Please register a "
141
141
  "custom transcoding for this type."
142
142
  )
143
+ raise TypeError(msg) from e
143
144
  else:
144
145
  return transcoding.decode(_data_)
145
146
  else:
@@ -266,8 +267,8 @@ class Mapper:
266
267
  def __init__(
267
268
  self,
268
269
  transcoder: Transcoder,
269
- compressor: Optional[Compressor] = None,
270
- cipher: Optional[Cipher] = None,
270
+ compressor: Compressor | None = None,
271
+ cipher: Cipher | None = None,
271
272
  ):
272
273
  self.transcoder = transcoder
273
274
  self.compressor = compressor
@@ -365,7 +366,7 @@ class DataError(DatabaseError):
365
366
 
366
367
  class OperationalError(DatabaseError):
367
368
  """
368
- Exception raised for errors that are related to the databases
369
+ Exception raised for errors that are related to the database's
369
370
  operation and not necessarily under the control of the programmer,
370
371
  e.g. an unexpected disconnect occurs, the data source name is not
371
372
  found, a transaction could not be processed, a memory allocation
@@ -414,7 +415,7 @@ class AggregateRecorder(ABC):
414
415
  @abstractmethod
415
416
  def insert_events(
416
417
  self, stored_events: List[StoredEvent], **kwargs: Any
417
- ) -> Optional[Sequence[int]]:
418
+ ) -> Sequence[int] | None:
418
419
  """
419
420
  Writes stored events into database.
420
421
  """
@@ -423,10 +424,11 @@ class AggregateRecorder(ABC):
423
424
  def select_events(
424
425
  self,
425
426
  originator_id: UUID,
426
- gt: Optional[int] = None,
427
- lte: Optional[int] = None,
427
+ *,
428
+ gt: int | None = None,
429
+ lte: int | None = None,
428
430
  desc: bool = False,
429
- limit: Optional[int] = None,
431
+ limit: int | None = None,
430
432
  ) -> List[StoredEvent]:
431
433
  """
432
434
  Reads stored events from database.
@@ -458,7 +460,7 @@ class ApplicationRecorder(AggregateRecorder):
458
460
  self,
459
461
  start: int,
460
462
  limit: int,
461
- stop: Optional[int] = None,
463
+ stop: int | None = None,
462
464
  topics: Sequence[str] = (),
463
465
  ) -> List[Notification]:
464
466
  """
@@ -549,10 +551,11 @@ class EventStore:
549
551
  def get(
550
552
  self,
551
553
  originator_id: UUID,
552
- gt: Optional[int] = None,
553
- lte: Optional[int] = None,
554
+ *,
555
+ gt: int | None = None,
556
+ lte: int | None = None,
554
557
  desc: bool = False,
555
- limit: Optional[int] = None,
558
+ limit: int | None = None,
556
559
  ) -> Iterator[DomainEventProtocol]:
557
560
  """
558
561
  Retrieves domain events from aggregate sequence.
@@ -611,37 +614,39 @@ class InfrastructureFactory(ABC):
611
614
  or "eventsourcing.popo"
612
615
  )
613
616
  try:
614
- obj: Union[Type[InfrastructureFactory], ModuleType] = resolve_topic(topic)
617
+ obj: Type[InfrastructureFactory] | ModuleType = resolve_topic(topic)
615
618
  except TopicError as e:
616
- raise EnvironmentError(
619
+ msg = (
617
620
  "Failed to resolve persistence module topic: "
618
621
  f"'{topic}' from environment "
619
622
  f"variable '{cls.PERSISTENCE_MODULE}'"
620
- ) from e
623
+ )
624
+ raise OSError(msg) from e
621
625
 
622
626
  if isinstance(obj, ModuleType):
623
627
  # Find the factory in the module.
624
- factory_classes: List[Type[InfrastructureFactory]] = []
625
- for member in obj.__dict__.values():
628
+ factory_classes: List[Type[InfrastructureFactory]] = [
629
+ member
630
+ for member in obj.__dict__.values()
626
631
  if (
627
632
  member is not InfrastructureFactory
628
633
  and isinstance(member, type)
629
634
  and issubclass(member, InfrastructureFactory)
630
- ):
631
- factory_classes.append(member)
635
+ )
636
+ ]
632
637
  if len(factory_classes) == 1:
633
638
  factory_cls = factory_classes[0]
634
639
  else:
635
- raise AssertionError(
640
+ msg = (
636
641
  f"Found {len(factory_classes)} infrastructure factory classes in"
637
642
  f" '{topic}', expected 1."
638
643
  )
644
+ raise AssertionError(msg)
639
645
  elif isinstance(obj, type) and issubclass(obj, InfrastructureFactory):
640
646
  factory_cls = obj
641
647
  else:
642
- raise AssertionError(
643
- f"Not an infrastructure factory class or module: {topic}"
644
- )
648
+ msg = f"Not an infrastructure factory class or module: {topic}"
649
+ raise AssertionError(msg)
645
650
  return cast(TInfrastructureFactory, factory_cls(env=env))
646
651
 
647
652
  def __init__(self, env: Environment):
@@ -656,7 +661,7 @@ class InfrastructureFactory(ABC):
656
661
  """
657
662
  Constructs a transcoder.
658
663
  """
659
- # Todo: Implement support for TRANSCODER_TOPIC.
664
+ # TODO: Implement support for TRANSCODER_TOPIC.
660
665
  return JSONTranscoder()
661
666
 
662
667
  def mapper(
@@ -665,21 +670,21 @@ class InfrastructureFactory(ABC):
665
670
  """
666
671
  Constructs a mapper.
667
672
  """
668
- # Todo: Implement support for MAPPER_TOPIC.
673
+ # TODO: Implement support for MAPPER_TOPIC.
669
674
  return mapper_class(
670
675
  transcoder=transcoder,
671
676
  cipher=self.cipher(),
672
677
  compressor=self.compressor(),
673
678
  )
674
679
 
675
- def cipher(self) -> Optional[Cipher]:
680
+ def cipher(self) -> Cipher | None:
676
681
  """
677
682
  Reads environment variables 'CIPHER_TOPIC'
678
683
  and 'CIPHER_KEY' to decide whether or not
679
684
  to construct a cipher.
680
685
  """
681
686
  cipher_topic = self.env.get(self.CIPHER_TOPIC)
682
- cipher: Optional[Cipher] = None
687
+ cipher: Cipher | None = None
683
688
  default_cipher_topic = "eventsourcing.cipher:AESCipher"
684
689
  if self.env.get("CIPHER_KEY") and not cipher_topic:
685
690
  cipher_topic = default_cipher_topic
@@ -690,15 +695,15 @@ class InfrastructureFactory(ABC):
690
695
 
691
696
  return cipher
692
697
 
693
- def compressor(self) -> Optional[Compressor]:
698
+ def compressor(self) -> Compressor | None:
694
699
  """
695
700
  Reads environment variable 'COMPRESSOR_TOPIC' to
696
701
  decide whether or not to construct a compressor.
697
702
  """
698
- compressor: Optional[Compressor] = None
703
+ compressor: Compressor | None = None
699
704
  compressor_topic = self.env.get(self.COMPRESSOR_TOPIC)
700
705
  if compressor_topic:
701
- compressor_cls: Union[Type[Compressor], Compressor] = resolve_topic(
706
+ compressor_cls: Type[Compressor] | Compressor = resolve_topic(
702
707
  compressor_topic
703
708
  )
704
709
  if isinstance(compressor_cls, type):
@@ -757,9 +762,12 @@ class Tracking:
757
762
  notification_id: int
758
763
 
759
764
 
765
+ Params = Union[Sequence[Any], Mapping[str, Any]]
766
+
767
+
760
768
  class Cursor(ABC):
761
769
  @abstractmethod
762
- def execute(self, statement: Union[str, bytes], params: Any = None) -> None:
770
+ def execute(self, statement: str | bytes, params: Params | None = None) -> None:
763
771
  """Executes given statement."""
764
772
 
765
773
  @abstractmethod
@@ -775,14 +783,14 @@ TCursor = TypeVar("TCursor", bound=Cursor)
775
783
 
776
784
 
777
785
  class Connection(ABC, Generic[TCursor]):
778
- def __init__(self, max_age: Optional[float] = None) -> None:
786
+ def __init__(self, max_age: float | None = None) -> None:
779
787
  self._closed = False
780
788
  self._closing = Event()
781
789
  self._close_lock = Lock()
782
790
  self.in_use = Lock()
783
791
  self.in_use.acquire()
784
792
  if max_age is not None:
785
- self._max_age_timer: Optional[Timer] = Timer(
793
+ self._max_age_timer: Timer | None = Timer(
786
794
  interval=max_age,
787
795
  function=self._close_when_not_in_use,
788
796
  )
@@ -790,7 +798,7 @@ class Connection(ABC, Generic[TCursor]):
790
798
  self._max_age_timer.start()
791
799
  else:
792
800
  self._max_age_timer = None
793
- self.is_writer: Optional[bool] = None
801
+ self.is_writer: bool | None = None
794
802
 
795
803
  @property
796
804
  def closed(self) -> bool:
@@ -832,19 +840,19 @@ class Connection(ABC, Generic[TCursor]):
832
840
  TConnection = TypeVar("TConnection", bound=Connection[Any])
833
841
 
834
842
 
835
- class ConnectionPoolClosed(EventSourcingError):
843
+ class ConnectionPoolClosedError(EventSourcingError):
836
844
  """
837
845
  Raised when using a connection pool that is already closed.
838
846
  """
839
847
 
840
848
 
841
- class ConnectionNotFromPool(EventSourcingError):
849
+ class ConnectionNotFromPoolError(EventSourcingError):
842
850
  """
843
851
  Raised when putting a connection in the wrong pool.
844
852
  """
845
853
 
846
854
 
847
- class ConnectionUnavailable(OperationalError, TimeoutError):
855
+ class ConnectionUnavailableError(OperationalError, TimeoutError):
848
856
  """
849
857
  Raised when a request to get a connection from a
850
858
  connection pool times out.
@@ -854,10 +862,11 @@ class ConnectionUnavailable(OperationalError, TimeoutError):
854
862
  class ConnectionPool(ABC, Generic[TConnection]):
855
863
  def __init__(
856
864
  self,
865
+ *,
857
866
  pool_size: int = 5,
858
867
  max_overflow: int = 10,
859
868
  pool_timeout: float = 30.0,
860
- max_age: Optional[float] = None,
869
+ max_age: float | None = None,
861
870
  pre_ping: bool = False,
862
871
  mutually_exclusive_read_write: bool = False,
863
872
  ) -> None:
@@ -896,8 +905,8 @@ class ConnectionPool(ABC, Generic[TConnection]):
896
905
  self.pool_timeout = pool_timeout
897
906
  self.max_age = max_age
898
907
  self.pre_ping = pre_ping
899
- self._pool: Deque[TConnection] = deque()
900
- self._in_use: Dict[int, TConnection] = dict()
908
+ self._pool: deque[TConnection] = deque()
909
+ self._in_use: Dict[int, TConnection] = {}
901
910
  self._get_semaphore = Semaphore()
902
911
  self._put_condition = Condition()
903
912
  self._no_readers = Condition()
@@ -907,6 +916,10 @@ class ConnectionPool(ABC, Generic[TConnection]):
907
916
  self._mutually_exclusive_read_write = mutually_exclusive_read_write
908
917
  self._closed = False
909
918
 
919
+ @property
920
+ def closed(self) -> bool:
921
+ return self._closed
922
+
910
923
  @property
911
924
  def num_in_use(self) -> int:
912
925
  """
@@ -940,7 +953,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
940
953
  return self._num_in_use >= self.pool_size + self.max_overflow
941
954
 
942
955
  def get_connection(
943
- self, timeout: Optional[float] = None, is_writer: Optional[bool] = None
956
+ self, timeout: float | None = None, is_writer: bool | None = None
944
957
  ) -> TConnection:
945
958
  """
946
959
  Issues connections, or raises ConnectionPoolExhausted error.
@@ -968,7 +981,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
968
981
  """
969
982
  # Make sure we aren't dealing with a closed pool.
970
983
  if self._closed:
971
- raise ConnectionPoolClosed
984
+ raise ConnectionPoolClosedError
972
985
 
973
986
  # Decide the timeout for getting a connection.
974
987
  timeout = self.pool_timeout if timeout is None else timeout
@@ -984,20 +997,16 @@ class ConnectionPool(ABC, Generic[TConnection]):
984
997
  if not self._writer_lock.acquire(
985
998
  timeout=self._time_remaining(timeout, started)
986
999
  ):
987
- raise ConnectionUnavailable(
988
- "Timed out waiting for return of writer"
989
- )
1000
+ msg = "Timed out waiting for return of writer"
1001
+ raise ConnectionUnavailableError(msg)
990
1002
  if self._mutually_exclusive_read_write:
991
1003
  with self._no_readers:
992
- if self._num_readers > 0:
993
- # print("writer waiting")
994
- if not self._no_readers.wait(
995
- timeout=self._time_remaining(timeout, started)
996
- ):
997
- self._writer_lock.release()
998
- raise ConnectionUnavailable(
999
- "Timed out waiting for return of reader"
1000
- )
1004
+ if self._num_readers > 0 and not self._no_readers.wait(
1005
+ timeout=self._time_remaining(timeout, started)
1006
+ ):
1007
+ self._writer_lock.release()
1008
+ msg = "Timed out waiting for return of reader"
1009
+ raise ConnectionUnavailableError(msg)
1001
1010
  self._num_writers += 1
1002
1011
 
1003
1012
  # If connection is for reading, and writing excludes reading,
@@ -1007,9 +1016,8 @@ class ConnectionPool(ABC, Generic[TConnection]):
1007
1016
  if not self._writer_lock.acquire(
1008
1017
  timeout=self._time_remaining(timeout, started)
1009
1018
  ):
1010
- raise ConnectionUnavailable(
1011
- "Timed out waiting for return of writer"
1012
- )
1019
+ msg = "Timed out waiting for return of writer"
1020
+ raise ConnectionUnavailableError(msg)
1013
1021
  self._writer_lock.release()
1014
1022
  with self._no_readers:
1015
1023
  self._num_readers += 1
@@ -1028,9 +1036,8 @@ class ConnectionPool(ABC, Generic[TConnection]):
1028
1036
  self._get_semaphore.release()
1029
1037
  else:
1030
1038
  # Timed out waiting for semaphore.
1031
- raise ConnectionUnavailable(
1032
- "Timed out waiting for connection pool semaphore"
1033
- )
1039
+ msg = "Timed out waiting for connection pool semaphore"
1040
+ raise ConnectionUnavailableError(msg)
1034
1041
 
1035
1042
  def _get_connection(self, timeout: float = 0.0) -> TConnection:
1036
1043
  """
@@ -1060,18 +1067,15 @@ class ConnectionPool(ABC, Generic[TConnection]):
1060
1067
  return self._get_connection(
1061
1068
  timeout=self._time_remaining(timeout, started)
1062
1069
  )
1063
- else:
1064
- # Timed out waiting for a connection to be returned.
1065
- raise ConnectionUnavailable(
1066
- "Timed out waiting for return of connection"
1067
- ) from None
1068
- else:
1069
- # Not fully used, so create a new connection.
1070
- conn = self._create_connection()
1071
- # print("created another connection")
1070
+ # Timed out waiting for a connection to be returned.
1071
+ msg = "Timed out waiting for return of connection"
1072
+ raise ConnectionUnavailableError(msg) from None
1073
+ # Not fully used, so create a new connection.
1074
+ conn = self._create_connection()
1075
+ # print("created another connection")
1072
1076
 
1073
- # Connection should be pre-locked for use (avoids timer race).
1074
- assert conn.in_use.locked()
1077
+ # Connection should be pre-locked for use (avoids timer race).
1078
+ assert conn.in_use.locked()
1075
1079
 
1076
1080
  else:
1077
1081
  # Got unused connection from pool, so lock for use.
@@ -1122,15 +1126,15 @@ class ConnectionPool(ABC, Generic[TConnection]):
1122
1126
  with self._put_condition:
1123
1127
  # Make sure we aren't dealing with a closed pool
1124
1128
  if self._closed:
1125
- raise ConnectionPoolClosed("Pool is closed")
1129
+ msg = "Pool is closed"
1130
+ raise ConnectionPoolClosedError(msg)
1126
1131
 
1127
1132
  # Make sure we are dealing with a connection from this pool.
1128
1133
  try:
1129
1134
  del self._in_use[id(conn)]
1130
1135
  except KeyError:
1131
- raise ConnectionNotFromPool(
1132
- "Connection not in use in this pool"
1133
- ) from None
1136
+ msg = "Connection not in use in this pool"
1137
+ raise ConnectionNotFromPoolError(msg) from None
1134
1138
 
1135
1139
  if not conn.closed:
1136
1140
  # Put open connection in pool if not full.
@@ -1181,7 +1185,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
1181
1185
  while True:
1182
1186
  try:
1183
1187
  conn = self._pool.popleft()
1184
- except IndexError:
1188
+ except IndexError: # noqa: PERF203
1185
1189
  break
1186
1190
  else:
1187
1191
  conn.close()
eventsourcing/popo.py CHANGED
@@ -2,8 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  from collections import defaultdict
4
4
  from threading import Lock
5
- from typing import Any, Dict, Iterable, List, Optional, Sequence, Set
6
- from uuid import UUID
5
+ from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Sequence
7
6
 
8
7
  from eventsourcing.persistence import (
9
8
  AggregateRecorder,
@@ -17,6 +16,9 @@ from eventsourcing.persistence import (
17
16
  )
18
17
  from eventsourcing.utils import reversed_keys
19
18
 
19
+ if TYPE_CHECKING: # pragma: nocover
20
+ from uuid import UUID
21
+
20
22
 
21
23
  class POPOAggregateRecorder(AggregateRecorder):
22
24
  def __init__(self) -> None:
@@ -26,33 +28,33 @@ class POPOAggregateRecorder(AggregateRecorder):
26
28
 
27
29
  def insert_events(
28
30
  self, stored_events: List[StoredEvent], **kwargs: Any
29
- ) -> Optional[Sequence[int]]:
31
+ ) -> Sequence[int] | None:
30
32
  self._insert_events(stored_events, **kwargs)
31
33
  return None
32
34
 
33
35
  def _insert_events(
34
36
  self, stored_events: List[StoredEvent], **kwargs: Any
35
- ) -> Optional[Sequence[int]]:
37
+ ) -> Sequence[int] | None:
36
38
  with self._database_lock:
37
39
  self._assert_uniqueness(stored_events, **kwargs)
38
40
  return self._update_table(stored_events, **kwargs)
39
41
 
40
- def _assert_uniqueness(
41
- self, stored_events: List[StoredEvent], **kwargs: Any
42
- ) -> None:
42
+ def _assert_uniqueness(self, stored_events: List[StoredEvent], **_: Any) -> None:
43
43
  new = set()
44
44
  for s in stored_events:
45
45
  # Check events don't already exist.
46
46
  if s.originator_version in self._stored_events_index[s.originator_id]:
47
- raise IntegrityError(f"Stored event already recorded: {s}")
47
+ msg = f"Stored event already recorded: {s}"
48
+ raise IntegrityError(msg)
48
49
  new.add((s.originator_id, s.originator_version))
49
50
  # Check new events are unique.
50
51
  if len(new) < len(stored_events):
51
- raise IntegrityError(f"Stored events are not unique: {stored_events}")
52
+ msg = f"Stored events are not unique: {stored_events}"
53
+ raise IntegrityError(msg)
52
54
 
53
55
  def _update_table(
54
- self, stored_events: List[StoredEvent], **kwargs: Any
55
- ) -> Optional[Sequence[int]]:
56
+ self, stored_events: List[StoredEvent], **_: Any
57
+ ) -> Sequence[int] | None:
56
58
  notification_ids = []
57
59
  for s in stored_events:
58
60
  self._stored_events.append(s)
@@ -65,27 +67,23 @@ class POPOAggregateRecorder(AggregateRecorder):
65
67
  def select_events(
66
68
  self,
67
69
  originator_id: UUID,
68
- gt: Optional[int] = None,
69
- lte: Optional[int] = None,
70
+ *,
71
+ gt: int | None = None,
72
+ lte: int | None = None,
70
73
  desc: bool = False,
71
- limit: Optional[int] = None,
74
+ limit: int | None = None,
72
75
  ) -> List[StoredEvent]:
73
76
  with self._database_lock:
74
77
  results = []
75
78
 
76
79
  index = self._stored_events_index[originator_id]
77
80
  positions: Iterable[int]
78
- if desc:
79
- positions = reversed_keys(index)
80
- else:
81
- positions = index.keys()
81
+ positions = reversed_keys(index) if desc else index.keys()
82
82
  for p in positions:
83
- if gt is not None:
84
- if not p > gt:
85
- continue
86
- if lte is not None:
87
- if not p <= lte:
88
- continue
83
+ if gt is not None and not p > gt:
84
+ continue
85
+ if lte is not None and not p <= lte:
86
+ continue
89
87
  s = self._stored_events[index[p]]
90
88
  results.append(s)
91
89
  if len(results) == limit:
@@ -96,14 +94,14 @@ class POPOAggregateRecorder(AggregateRecorder):
96
94
  class POPOApplicationRecorder(ApplicationRecorder, POPOAggregateRecorder):
97
95
  def insert_events(
98
96
  self, stored_events: List[StoredEvent], **kwargs: Any
99
- ) -> Optional[Sequence[int]]:
97
+ ) -> Sequence[int] | None:
100
98
  return self._insert_events(stored_events, **kwargs)
101
99
 
102
100
  def select_notifications(
103
101
  self,
104
102
  start: int,
105
103
  limit: int,
106
- stop: Optional[int] = None,
104
+ stop: int | None = None,
107
105
  topics: Sequence[str] = (),
108
106
  ) -> List[Notification]:
109
107
  with self._database_lock:
@@ -140,25 +138,26 @@ class POPOApplicationRecorder(ApplicationRecorder, POPOAggregateRecorder):
140
138
  class POPOProcessRecorder(ProcessRecorder, POPOApplicationRecorder):
141
139
  def __init__(self) -> None:
142
140
  super().__init__()
143
- self._tracking_table: Dict[str, Set[int]] = defaultdict(set)
141
+ self._tracking_table: Dict[str, set[int]] = defaultdict(set)
144
142
  self._max_tracking_ids: Dict[str, int] = defaultdict(lambda: 0)
145
143
 
146
144
  def _assert_uniqueness(
147
145
  self, stored_events: List[StoredEvent], **kwargs: Any
148
146
  ) -> None:
149
147
  super()._assert_uniqueness(stored_events, **kwargs)
150
- t: Optional[Tracking] = kwargs.get("tracking", None)
148
+ t: Tracking | None = kwargs.get("tracking", None)
151
149
  if t and t.notification_id in self._tracking_table[t.application_name]:
152
- raise IntegrityError(
150
+ msg = (
153
151
  f"Already recorded notification ID {t.notification_id} "
154
152
  f"for application {t.application_name}"
155
153
  )
154
+ raise IntegrityError(msg)
156
155
 
157
156
  def _update_table(
158
157
  self, stored_events: List[StoredEvent], **kwargs: Any
159
- ) -> Optional[Sequence[int]]:
158
+ ) -> Sequence[int] | None:
160
159
  notification_ids = super()._update_table(stored_events, **kwargs)
161
- t: Optional[Tracking] = kwargs.get("tracking", None)
160
+ t: Tracking | None = kwargs.get("tracking", None)
162
161
  if t:
163
162
  self._tracking_table[t.application_name].add(t.notification_id)
164
163
  if self._max_tracking_ids[t.application_name] < t.notification_id: