eventsourcing 9.2.21__py3-none-any.whl → 9.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eventsourcing might be problematic. Click here for more details.

Files changed (145) hide show
  1. eventsourcing/__init__.py +1 -1
  2. eventsourcing/application.py +137 -132
  3. eventsourcing/cipher.py +17 -12
  4. eventsourcing/compressor.py +2 -0
  5. eventsourcing/dispatch.py +30 -56
  6. eventsourcing/domain.py +221 -227
  7. eventsourcing/examples/__init__.py +0 -0
  8. eventsourcing/examples/aggregate1/__init__.py +0 -0
  9. eventsourcing/examples/aggregate1/application.py +27 -0
  10. eventsourcing/examples/aggregate1/domainmodel.py +16 -0
  11. eventsourcing/examples/aggregate1/test_application.py +37 -0
  12. eventsourcing/examples/aggregate2/__init__.py +0 -0
  13. eventsourcing/examples/aggregate2/application.py +27 -0
  14. eventsourcing/examples/aggregate2/domainmodel.py +22 -0
  15. eventsourcing/examples/aggregate2/test_application.py +37 -0
  16. eventsourcing/examples/aggregate3/__init__.py +0 -0
  17. eventsourcing/examples/aggregate3/application.py +27 -0
  18. eventsourcing/examples/aggregate3/domainmodel.py +38 -0
  19. eventsourcing/examples/aggregate3/test_application.py +37 -0
  20. eventsourcing/examples/aggregate4/__init__.py +0 -0
  21. eventsourcing/examples/aggregate4/application.py +27 -0
  22. eventsourcing/examples/aggregate4/domainmodel.py +114 -0
  23. eventsourcing/examples/aggregate4/test_application.py +38 -0
  24. eventsourcing/examples/aggregate5/__init__.py +0 -0
  25. eventsourcing/examples/aggregate5/application.py +27 -0
  26. eventsourcing/examples/aggregate5/domainmodel.py +131 -0
  27. eventsourcing/examples/aggregate5/test_application.py +38 -0
  28. eventsourcing/examples/aggregate6/__init__.py +0 -0
  29. eventsourcing/examples/aggregate6/application.py +30 -0
  30. eventsourcing/examples/aggregate6/domainmodel.py +123 -0
  31. eventsourcing/examples/aggregate6/test_application.py +38 -0
  32. eventsourcing/examples/aggregate6a/__init__.py +0 -0
  33. eventsourcing/examples/aggregate6a/application.py +40 -0
  34. eventsourcing/examples/aggregate6a/domainmodel.py +149 -0
  35. eventsourcing/examples/aggregate6a/test_application.py +45 -0
  36. eventsourcing/examples/aggregate7/__init__.py +0 -0
  37. eventsourcing/examples/aggregate7/application.py +48 -0
  38. eventsourcing/examples/aggregate7/domainmodel.py +144 -0
  39. eventsourcing/examples/aggregate7/persistence.py +57 -0
  40. eventsourcing/examples/aggregate7/test_application.py +38 -0
  41. eventsourcing/examples/aggregate7/test_compression_and_encryption.py +45 -0
  42. eventsourcing/examples/aggregate7/test_snapshotting_intervals.py +67 -0
  43. eventsourcing/examples/aggregate7a/__init__.py +0 -0
  44. eventsourcing/examples/aggregate7a/application.py +56 -0
  45. eventsourcing/examples/aggregate7a/domainmodel.py +170 -0
  46. eventsourcing/examples/aggregate7a/test_application.py +46 -0
  47. eventsourcing/examples/aggregate7a/test_compression_and_encryption.py +45 -0
  48. eventsourcing/examples/aggregate8/__init__.py +0 -0
  49. eventsourcing/examples/aggregate8/application.py +47 -0
  50. eventsourcing/examples/aggregate8/domainmodel.py +65 -0
  51. eventsourcing/examples/aggregate8/persistence.py +57 -0
  52. eventsourcing/examples/aggregate8/test_application.py +37 -0
  53. eventsourcing/examples/aggregate8/test_compression_and_encryption.py +44 -0
  54. eventsourcing/examples/aggregate8/test_snapshotting_intervals.py +38 -0
  55. eventsourcing/examples/bankaccounts/__init__.py +0 -0
  56. eventsourcing/examples/bankaccounts/application.py +70 -0
  57. eventsourcing/examples/bankaccounts/domainmodel.py +56 -0
  58. eventsourcing/examples/bankaccounts/test.py +173 -0
  59. eventsourcing/examples/cargoshipping/__init__.py +0 -0
  60. eventsourcing/examples/cargoshipping/application.py +126 -0
  61. eventsourcing/examples/cargoshipping/domainmodel.py +330 -0
  62. eventsourcing/examples/cargoshipping/interface.py +143 -0
  63. eventsourcing/examples/cargoshipping/test.py +231 -0
  64. eventsourcing/examples/contentmanagement/__init__.py +0 -0
  65. eventsourcing/examples/contentmanagement/application.py +118 -0
  66. eventsourcing/examples/contentmanagement/domainmodel.py +69 -0
  67. eventsourcing/examples/contentmanagement/test.py +180 -0
  68. eventsourcing/examples/contentmanagement/utils.py +26 -0
  69. eventsourcing/examples/contentmanagementsystem/__init__.py +0 -0
  70. eventsourcing/examples/contentmanagementsystem/application.py +54 -0
  71. eventsourcing/examples/contentmanagementsystem/postgres.py +17 -0
  72. eventsourcing/examples/contentmanagementsystem/sqlite.py +17 -0
  73. eventsourcing/examples/contentmanagementsystem/system.py +14 -0
  74. eventsourcing/examples/contentmanagementsystem/test_system.py +180 -0
  75. eventsourcing/examples/searchablecontent/__init__.py +0 -0
  76. eventsourcing/examples/searchablecontent/application.py +45 -0
  77. eventsourcing/examples/searchablecontent/persistence.py +23 -0
  78. eventsourcing/examples/searchablecontent/postgres.py +118 -0
  79. eventsourcing/examples/searchablecontent/sqlite.py +136 -0
  80. eventsourcing/examples/searchablecontent/test_application.py +110 -0
  81. eventsourcing/examples/searchablecontent/test_recorder.py +68 -0
  82. eventsourcing/examples/searchabletimestamps/__init__.py +0 -0
  83. eventsourcing/examples/searchabletimestamps/application.py +32 -0
  84. eventsourcing/examples/searchabletimestamps/persistence.py +20 -0
  85. eventsourcing/examples/searchabletimestamps/postgres.py +110 -0
  86. eventsourcing/examples/searchabletimestamps/sqlite.py +99 -0
  87. eventsourcing/examples/searchabletimestamps/test_searchabletimestamps.py +94 -0
  88. eventsourcing/examples/test_invoice.py +176 -0
  89. eventsourcing/examples/test_parking_lot.py +206 -0
  90. eventsourcing/interface.py +4 -2
  91. eventsourcing/persistence.py +88 -82
  92. eventsourcing/popo.py +32 -31
  93. eventsourcing/postgres.py +388 -593
  94. eventsourcing/sqlite.py +100 -102
  95. eventsourcing/system.py +66 -71
  96. eventsourcing/tests/application.py +20 -32
  97. eventsourcing/tests/application_tests/__init__.py +0 -0
  98. eventsourcing/tests/application_tests/test_application_with_automatic_snapshotting.py +55 -0
  99. eventsourcing/tests/application_tests/test_application_with_popo.py +22 -0
  100. eventsourcing/tests/application_tests/test_application_with_postgres.py +75 -0
  101. eventsourcing/tests/application_tests/test_application_with_sqlite.py +72 -0
  102. eventsourcing/tests/application_tests/test_cache.py +134 -0
  103. eventsourcing/tests/application_tests/test_event_sourced_log.py +162 -0
  104. eventsourcing/tests/application_tests/test_notificationlog.py +232 -0
  105. eventsourcing/tests/application_tests/test_notificationlogreader.py +126 -0
  106. eventsourcing/tests/application_tests/test_processapplication.py +110 -0
  107. eventsourcing/tests/application_tests/test_processingpolicy.py +109 -0
  108. eventsourcing/tests/application_tests/test_repository.py +504 -0
  109. eventsourcing/tests/application_tests/test_snapshotting.py +68 -0
  110. eventsourcing/tests/application_tests/test_upcasting.py +459 -0
  111. eventsourcing/tests/docs_tests/__init__.py +0 -0
  112. eventsourcing/tests/docs_tests/test_docs.py +293 -0
  113. eventsourcing/tests/domain.py +1 -1
  114. eventsourcing/tests/domain_tests/__init__.py +0 -0
  115. eventsourcing/tests/domain_tests/test_aggregate.py +1180 -0
  116. eventsourcing/tests/domain_tests/test_aggregate_decorators.py +1604 -0
  117. eventsourcing/tests/domain_tests/test_domainevent.py +80 -0
  118. eventsourcing/tests/interface_tests/__init__.py +0 -0
  119. eventsourcing/tests/interface_tests/test_remotenotificationlog.py +258 -0
  120. eventsourcing/tests/persistence.py +52 -50
  121. eventsourcing/tests/persistence_tests/__init__.py +0 -0
  122. eventsourcing/tests/persistence_tests/test_aes.py +93 -0
  123. eventsourcing/tests/persistence_tests/test_connection_pool.py +722 -0
  124. eventsourcing/tests/persistence_tests/test_eventstore.py +72 -0
  125. eventsourcing/tests/persistence_tests/test_infrastructure_factory.py +21 -0
  126. eventsourcing/tests/persistence_tests/test_mapper.py +113 -0
  127. eventsourcing/tests/persistence_tests/test_noninterleaving_notification_ids.py +69 -0
  128. eventsourcing/tests/persistence_tests/test_popo.py +124 -0
  129. eventsourcing/tests/persistence_tests/test_postgres.py +1119 -0
  130. eventsourcing/tests/persistence_tests/test_sqlite.py +348 -0
  131. eventsourcing/tests/persistence_tests/test_transcoder.py +44 -0
  132. eventsourcing/tests/postgres_utils.py +7 -7
  133. eventsourcing/tests/system_tests/__init__.py +0 -0
  134. eventsourcing/tests/system_tests/test_runner.py +935 -0
  135. eventsourcing/tests/system_tests/test_system.py +284 -0
  136. eventsourcing/tests/utils_tests/__init__.py +0 -0
  137. eventsourcing/tests/utils_tests/test_utils.py +226 -0
  138. eventsourcing/utils.py +49 -50
  139. {eventsourcing-9.2.21.dist-info → eventsourcing-9.3.0.dist-info}/METADATA +30 -33
  140. eventsourcing-9.3.0.dist-info/RECORD +145 -0
  141. {eventsourcing-9.2.21.dist-info → eventsourcing-9.3.0.dist-info}/WHEEL +1 -2
  142. eventsourcing-9.2.21.dist-info/RECORD +0 -25
  143. eventsourcing-9.2.21.dist-info/top_level.txt +0 -1
  144. {eventsourcing-9.2.21.dist-info → eventsourcing-9.3.0.dist-info}/AUTHORS +0 -0
  145. {eventsourcing-9.2.21.dist-info → eventsourcing-9.3.0.dist-info}/LICENSE +0 -0
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import json
2
4
  import uuid
3
5
  from abc import ABC, abstractmethod
@@ -10,12 +12,11 @@ from time import time
10
12
  from types import ModuleType
11
13
  from typing import (
12
14
  Any,
13
- Deque,
14
15
  Dict,
15
16
  Generic,
16
17
  Iterator,
17
18
  List,
18
- Optional,
19
+ Mapping,
19
20
  Sequence,
20
21
  Type,
21
22
  TypeVar,
@@ -107,11 +108,12 @@ class JSONTranscoder(Transcoder):
107
108
  try:
108
109
  transcoding = self.types[type(o)]
109
110
  except KeyError:
110
- raise TypeError(
111
+ msg = (
111
112
  f"Object of type {type(o)} is not "
112
113
  "serializable. Please define and register "
113
114
  "a custom transcoding for this type."
114
- ) from None
115
+ )
116
+ raise TypeError(msg) from None
115
117
  else:
116
118
  return {
117
119
  "_type_": transcoding.name,
@@ -132,12 +134,13 @@ class JSONTranscoder(Transcoder):
132
134
  else:
133
135
  try:
134
136
  transcoding = self.names[cast(str, _type_)]
135
- except KeyError:
136
- raise TypeError(
137
+ except KeyError as e:
138
+ msg = (
137
139
  f"Data serialized with name '{cast(str, _type_)}' is not "
138
140
  "deserializable. Please register a "
139
141
  "custom transcoding for this type."
140
142
  )
143
+ raise TypeError(msg) from e
141
144
  else:
142
145
  return transcoding.decode(_data_)
143
146
  else:
@@ -264,8 +267,8 @@ class Mapper:
264
267
  def __init__(
265
268
  self,
266
269
  transcoder: Transcoder,
267
- compressor: Optional[Compressor] = None,
268
- cipher: Optional[Cipher] = None,
270
+ compressor: Compressor | None = None,
271
+ cipher: Cipher | None = None,
269
272
  ):
270
273
  self.transcoder = transcoder
271
274
  self.compressor = compressor
@@ -363,7 +366,7 @@ class DataError(DatabaseError):
363
366
 
364
367
  class OperationalError(DatabaseError):
365
368
  """
366
- Exception raised for errors that are related to the databases
369
+ Exception raised for errors that are related to the database's
367
370
  operation and not necessarily under the control of the programmer,
368
371
  e.g. an unexpected disconnect occurs, the data source name is not
369
372
  found, a transaction could not be processed, a memory allocation
@@ -412,7 +415,7 @@ class AggregateRecorder(ABC):
412
415
  @abstractmethod
413
416
  def insert_events(
414
417
  self, stored_events: List[StoredEvent], **kwargs: Any
415
- ) -> Optional[Sequence[int]]:
418
+ ) -> Sequence[int] | None:
416
419
  """
417
420
  Writes stored events into database.
418
421
  """
@@ -421,10 +424,11 @@ class AggregateRecorder(ABC):
421
424
  def select_events(
422
425
  self,
423
426
  originator_id: UUID,
424
- gt: Optional[int] = None,
425
- lte: Optional[int] = None,
427
+ *,
428
+ gt: int | None = None,
429
+ lte: int | None = None,
426
430
  desc: bool = False,
427
- limit: Optional[int] = None,
431
+ limit: int | None = None,
428
432
  ) -> List[StoredEvent]:
429
433
  """
430
434
  Reads stored events from database.
@@ -456,7 +460,7 @@ class ApplicationRecorder(AggregateRecorder):
456
460
  self,
457
461
  start: int,
458
462
  limit: int,
459
- stop: Optional[int] = None,
463
+ stop: int | None = None,
460
464
  topics: Sequence[str] = (),
461
465
  ) -> List[Notification]:
462
466
  """
@@ -547,10 +551,11 @@ class EventStore:
547
551
  def get(
548
552
  self,
549
553
  originator_id: UUID,
550
- gt: Optional[int] = None,
551
- lte: Optional[int] = None,
554
+ *,
555
+ gt: int | None = None,
556
+ lte: int | None = None,
552
557
  desc: bool = False,
553
- limit: Optional[int] = None,
558
+ limit: int | None = None,
554
559
  ) -> Iterator[DomainEventProtocol]:
555
560
  """
556
561
  Retrieves domain events from aggregate sequence.
@@ -609,37 +614,39 @@ class InfrastructureFactory(ABC):
609
614
  or "eventsourcing.popo"
610
615
  )
611
616
  try:
612
- obj: Union[Type[InfrastructureFactory], ModuleType] = resolve_topic(topic)
617
+ obj: Type[InfrastructureFactory] | ModuleType = resolve_topic(topic)
613
618
  except TopicError as e:
614
- raise EnvironmentError(
619
+ msg = (
615
620
  "Failed to resolve persistence module topic: "
616
621
  f"'{topic}' from environment "
617
622
  f"variable '{cls.PERSISTENCE_MODULE}'"
618
- ) from e
623
+ )
624
+ raise OSError(msg) from e
619
625
 
620
626
  if isinstance(obj, ModuleType):
621
627
  # Find the factory in the module.
622
- factory_classes: List[Type[InfrastructureFactory]] = []
623
- for member in obj.__dict__.values():
628
+ factory_classes: List[Type[InfrastructureFactory]] = [
629
+ member
630
+ for member in obj.__dict__.values()
624
631
  if (
625
632
  member is not InfrastructureFactory
626
633
  and isinstance(member, type)
627
634
  and issubclass(member, InfrastructureFactory)
628
- ):
629
- factory_classes.append(member)
635
+ )
636
+ ]
630
637
  if len(factory_classes) == 1:
631
638
  factory_cls = factory_classes[0]
632
639
  else:
633
- raise AssertionError(
640
+ msg = (
634
641
  f"Found {len(factory_classes)} infrastructure factory classes in"
635
642
  f" '{topic}', expected 1."
636
643
  )
644
+ raise AssertionError(msg)
637
645
  elif isinstance(obj, type) and issubclass(obj, InfrastructureFactory):
638
646
  factory_cls = obj
639
647
  else:
640
- raise AssertionError(
641
- f"Not an infrastructure factory class or module: {topic}"
642
- )
648
+ msg = f"Not an infrastructure factory class or module: {topic}"
649
+ raise AssertionError(msg)
643
650
  return cast(TInfrastructureFactory, factory_cls(env=env))
644
651
 
645
652
  def __init__(self, env: Environment):
@@ -654,7 +661,7 @@ class InfrastructureFactory(ABC):
654
661
  """
655
662
  Constructs a transcoder.
656
663
  """
657
- # Todo: Implement support for TRANSCODER_TOPIC.
664
+ # TODO: Implement support for TRANSCODER_TOPIC.
658
665
  return JSONTranscoder()
659
666
 
660
667
  def mapper(
@@ -663,21 +670,21 @@ class InfrastructureFactory(ABC):
663
670
  """
664
671
  Constructs a mapper.
665
672
  """
666
- # Todo: Implement support for MAPPER_TOPIC.
673
+ # TODO: Implement support for MAPPER_TOPIC.
667
674
  return mapper_class(
668
675
  transcoder=transcoder,
669
676
  cipher=self.cipher(),
670
677
  compressor=self.compressor(),
671
678
  )
672
679
 
673
- def cipher(self) -> Optional[Cipher]:
680
+ def cipher(self) -> Cipher | None:
674
681
  """
675
682
  Reads environment variables 'CIPHER_TOPIC'
676
683
  and 'CIPHER_KEY' to decide whether or not
677
684
  to construct a cipher.
678
685
  """
679
686
  cipher_topic = self.env.get(self.CIPHER_TOPIC)
680
- cipher: Optional[Cipher] = None
687
+ cipher: Cipher | None = None
681
688
  default_cipher_topic = "eventsourcing.cipher:AESCipher"
682
689
  if self.env.get("CIPHER_KEY") and not cipher_topic:
683
690
  cipher_topic = default_cipher_topic
@@ -688,15 +695,15 @@ class InfrastructureFactory(ABC):
688
695
 
689
696
  return cipher
690
697
 
691
- def compressor(self) -> Optional[Compressor]:
698
+ def compressor(self) -> Compressor | None:
692
699
  """
693
700
  Reads environment variable 'COMPRESSOR_TOPIC' to
694
701
  decide whether or not to construct a compressor.
695
702
  """
696
- compressor: Optional[Compressor] = None
703
+ compressor: Compressor | None = None
697
704
  compressor_topic = self.env.get(self.COMPRESSOR_TOPIC)
698
705
  if compressor_topic:
699
- compressor_cls: Union[Type[Compressor], Compressor] = resolve_topic(
706
+ compressor_cls: Type[Compressor] | Compressor = resolve_topic(
700
707
  compressor_topic
701
708
  )
702
709
  if isinstance(compressor_cls, type):
@@ -755,9 +762,12 @@ class Tracking:
755
762
  notification_id: int
756
763
 
757
764
 
765
+ Params = Union[Sequence[Any], Mapping[str, Any]]
766
+
767
+
758
768
  class Cursor(ABC):
759
769
  @abstractmethod
760
- def execute(self, statement: Union[str, bytes], params: Any = None) -> None:
770
+ def execute(self, statement: str | bytes, params: Params | None = None) -> None:
761
771
  """Executes given statement."""
762
772
 
763
773
  @abstractmethod
@@ -773,14 +783,14 @@ TCursor = TypeVar("TCursor", bound=Cursor)
773
783
 
774
784
 
775
785
  class Connection(ABC, Generic[TCursor]):
776
- def __init__(self, max_age: Optional[float] = None) -> None:
786
+ def __init__(self, max_age: float | None = None) -> None:
777
787
  self._closed = False
778
788
  self._closing = Event()
779
789
  self._close_lock = Lock()
780
790
  self.in_use = Lock()
781
791
  self.in_use.acquire()
782
792
  if max_age is not None:
783
- self._max_age_timer: Optional[Timer] = Timer(
793
+ self._max_age_timer: Timer | None = Timer(
784
794
  interval=max_age,
785
795
  function=self._close_when_not_in_use,
786
796
  )
@@ -788,7 +798,7 @@ class Connection(ABC, Generic[TCursor]):
788
798
  self._max_age_timer.start()
789
799
  else:
790
800
  self._max_age_timer = None
791
- self.is_writer: Optional[bool] = None
801
+ self.is_writer: bool | None = None
792
802
 
793
803
  @property
794
804
  def closed(self) -> bool:
@@ -830,19 +840,19 @@ class Connection(ABC, Generic[TCursor]):
830
840
  TConnection = TypeVar("TConnection", bound=Connection[Any])
831
841
 
832
842
 
833
- class ConnectionPoolClosed(EventSourcingError):
843
+ class ConnectionPoolClosedError(EventSourcingError):
834
844
  """
835
845
  Raised when using a connection pool that is already closed.
836
846
  """
837
847
 
838
848
 
839
- class ConnectionNotFromPool(EventSourcingError):
849
+ class ConnectionNotFromPoolError(EventSourcingError):
840
850
  """
841
851
  Raised when putting a connection in the wrong pool.
842
852
  """
843
853
 
844
854
 
845
- class ConnectionUnavailable(OperationalError, TimeoutError):
855
+ class ConnectionUnavailableError(OperationalError, TimeoutError):
846
856
  """
847
857
  Raised when a request to get a connection from a
848
858
  connection pool times out.
@@ -852,10 +862,11 @@ class ConnectionUnavailable(OperationalError, TimeoutError):
852
862
  class ConnectionPool(ABC, Generic[TConnection]):
853
863
  def __init__(
854
864
  self,
865
+ *,
855
866
  pool_size: int = 5,
856
867
  max_overflow: int = 10,
857
868
  pool_timeout: float = 30.0,
858
- max_age: Optional[float] = None,
869
+ max_age: float | None = None,
859
870
  pre_ping: bool = False,
860
871
  mutually_exclusive_read_write: bool = False,
861
872
  ) -> None:
@@ -894,8 +905,8 @@ class ConnectionPool(ABC, Generic[TConnection]):
894
905
  self.pool_timeout = pool_timeout
895
906
  self.max_age = max_age
896
907
  self.pre_ping = pre_ping
897
- self._pool: Deque[TConnection] = deque()
898
- self._in_use: Dict[int, TConnection] = dict()
908
+ self._pool: deque[TConnection] = deque()
909
+ self._in_use: Dict[int, TConnection] = {}
899
910
  self._get_semaphore = Semaphore()
900
911
  self._put_condition = Condition()
901
912
  self._no_readers = Condition()
@@ -905,6 +916,10 @@ class ConnectionPool(ABC, Generic[TConnection]):
905
916
  self._mutually_exclusive_read_write = mutually_exclusive_read_write
906
917
  self._closed = False
907
918
 
919
+ @property
920
+ def closed(self) -> bool:
921
+ return self._closed
922
+
908
923
  @property
909
924
  def num_in_use(self) -> int:
910
925
  """
@@ -938,7 +953,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
938
953
  return self._num_in_use >= self.pool_size + self.max_overflow
939
954
 
940
955
  def get_connection(
941
- self, timeout: Optional[float] = None, is_writer: Optional[bool] = None
956
+ self, timeout: float | None = None, is_writer: bool | None = None
942
957
  ) -> TConnection:
943
958
  """
944
959
  Issues connections, or raises ConnectionPoolExhausted error.
@@ -966,7 +981,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
966
981
  """
967
982
  # Make sure we aren't dealing with a closed pool.
968
983
  if self._closed:
969
- raise ConnectionPoolClosed
984
+ raise ConnectionPoolClosedError
970
985
 
971
986
  # Decide the timeout for getting a connection.
972
987
  timeout = self.pool_timeout if timeout is None else timeout
@@ -982,20 +997,16 @@ class ConnectionPool(ABC, Generic[TConnection]):
982
997
  if not self._writer_lock.acquire(
983
998
  timeout=self._time_remaining(timeout, started)
984
999
  ):
985
- raise ConnectionUnavailable(
986
- "Timed out waiting for return of writer"
987
- )
1000
+ msg = "Timed out waiting for return of writer"
1001
+ raise ConnectionUnavailableError(msg)
988
1002
  if self._mutually_exclusive_read_write:
989
1003
  with self._no_readers:
990
- if self._num_readers > 0:
991
- # print("writer waiting")
992
- if not self._no_readers.wait(
993
- timeout=self._time_remaining(timeout, started)
994
- ):
995
- self._writer_lock.release()
996
- raise ConnectionUnavailable(
997
- "Timed out waiting for return of reader"
998
- )
1004
+ if self._num_readers > 0 and not self._no_readers.wait(
1005
+ timeout=self._time_remaining(timeout, started)
1006
+ ):
1007
+ self._writer_lock.release()
1008
+ msg = "Timed out waiting for return of reader"
1009
+ raise ConnectionUnavailableError(msg)
999
1010
  self._num_writers += 1
1000
1011
 
1001
1012
  # If connection is for reading, and writing excludes reading,
@@ -1005,9 +1016,8 @@ class ConnectionPool(ABC, Generic[TConnection]):
1005
1016
  if not self._writer_lock.acquire(
1006
1017
  timeout=self._time_remaining(timeout, started)
1007
1018
  ):
1008
- raise ConnectionUnavailable(
1009
- "Timed out waiting for return of writer"
1010
- )
1019
+ msg = "Timed out waiting for return of writer"
1020
+ raise ConnectionUnavailableError(msg)
1011
1021
  self._writer_lock.release()
1012
1022
  with self._no_readers:
1013
1023
  self._num_readers += 1
@@ -1026,9 +1036,8 @@ class ConnectionPool(ABC, Generic[TConnection]):
1026
1036
  self._get_semaphore.release()
1027
1037
  else:
1028
1038
  # Timed out waiting for semaphore.
1029
- raise ConnectionUnavailable(
1030
- "Timed out waiting for connection pool semaphore"
1031
- )
1039
+ msg = "Timed out waiting for connection pool semaphore"
1040
+ raise ConnectionUnavailableError(msg)
1032
1041
 
1033
1042
  def _get_connection(self, timeout: float = 0.0) -> TConnection:
1034
1043
  """
@@ -1058,18 +1067,15 @@ class ConnectionPool(ABC, Generic[TConnection]):
1058
1067
  return self._get_connection(
1059
1068
  timeout=self._time_remaining(timeout, started)
1060
1069
  )
1061
- else:
1062
- # Timed out waiting for a connection to be returned.
1063
- raise ConnectionUnavailable(
1064
- "Timed out waiting for return of connection"
1065
- ) from None
1066
- else:
1067
- # Not fully used, so create a new connection.
1068
- conn = self._create_connection()
1069
- # print("created another connection")
1070
+ # Timed out waiting for a connection to be returned.
1071
+ msg = "Timed out waiting for return of connection"
1072
+ raise ConnectionUnavailableError(msg) from None
1073
+ # Not fully used, so create a new connection.
1074
+ conn = self._create_connection()
1075
+ # print("created another connection")
1070
1076
 
1071
- # Connection should be pre-locked for use (avoids timer race).
1072
- assert conn.in_use.locked()
1077
+ # Connection should be pre-locked for use (avoids timer race).
1078
+ assert conn.in_use.locked()
1073
1079
 
1074
1080
  else:
1075
1081
  # Got unused connection from pool, so lock for use.
@@ -1106,7 +1112,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
1106
1112
  Returns connections to the pool, or closes connection
1107
1113
  if the pool is full.
1108
1114
 
1109
- Unlocks write lock after writer has returned., and
1115
+ Unlocks write lock after writer has returned, and
1110
1116
  updates count of readers when readers are returned.
1111
1117
 
1112
1118
  Notifies waiters when connections have been returned,
@@ -1120,15 +1126,15 @@ class ConnectionPool(ABC, Generic[TConnection]):
1120
1126
  with self._put_condition:
1121
1127
  # Make sure we aren't dealing with a closed pool
1122
1128
  if self._closed:
1123
- raise ConnectionPoolClosed("Pool is closed")
1129
+ msg = "Pool is closed"
1130
+ raise ConnectionPoolClosedError(msg)
1124
1131
 
1125
1132
  # Make sure we are dealing with a connection from this pool.
1126
1133
  try:
1127
1134
  del self._in_use[id(conn)]
1128
1135
  except KeyError:
1129
- raise ConnectionNotFromPool(
1130
- "Connection not in use in this pool"
1131
- ) from None
1136
+ msg = "Connection not in use in this pool"
1137
+ raise ConnectionNotFromPoolError(msg) from None
1132
1138
 
1133
1139
  if not conn.closed:
1134
1140
  # Put open connection in pool if not full.
@@ -1179,7 +1185,7 @@ class ConnectionPool(ABC, Generic[TConnection]):
1179
1185
  while True:
1180
1186
  try:
1181
1187
  conn = self._pool.popleft()
1182
- except IndexError:
1188
+ except IndexError: # noqa: PERF203
1183
1189
  break
1184
1190
  else:
1185
1191
  conn.close()
eventsourcing/popo.py CHANGED
@@ -1,7 +1,8 @@
1
+ from __future__ import annotations
2
+
1
3
  from collections import defaultdict
2
4
  from threading import Lock
3
- from typing import Any, Dict, Iterable, List, Optional, Sequence, Set
4
- from uuid import UUID
5
+ from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Sequence
5
6
 
6
7
  from eventsourcing.persistence import (
7
8
  AggregateRecorder,
@@ -15,6 +16,9 @@ from eventsourcing.persistence import (
15
16
  )
16
17
  from eventsourcing.utils import reversed_keys
17
18
 
19
+ if TYPE_CHECKING: # pragma: nocover
20
+ from uuid import UUID
21
+
18
22
 
19
23
  class POPOAggregateRecorder(AggregateRecorder):
20
24
  def __init__(self) -> None:
@@ -24,33 +28,33 @@ class POPOAggregateRecorder(AggregateRecorder):
24
28
 
25
29
  def insert_events(
26
30
  self, stored_events: List[StoredEvent], **kwargs: Any
27
- ) -> Optional[Sequence[int]]:
31
+ ) -> Sequence[int] | None:
28
32
  self._insert_events(stored_events, **kwargs)
29
33
  return None
30
34
 
31
35
  def _insert_events(
32
36
  self, stored_events: List[StoredEvent], **kwargs: Any
33
- ) -> Optional[Sequence[int]]:
37
+ ) -> Sequence[int] | None:
34
38
  with self._database_lock:
35
39
  self._assert_uniqueness(stored_events, **kwargs)
36
40
  return self._update_table(stored_events, **kwargs)
37
41
 
38
- def _assert_uniqueness(
39
- self, stored_events: List[StoredEvent], **kwargs: Any
40
- ) -> None:
42
+ def _assert_uniqueness(self, stored_events: List[StoredEvent], **_: Any) -> None:
41
43
  new = set()
42
44
  for s in stored_events:
43
45
  # Check events don't already exist.
44
46
  if s.originator_version in self._stored_events_index[s.originator_id]:
45
- raise IntegrityError(f"Stored event already recorded: {s}")
47
+ msg = f"Stored event already recorded: {s}"
48
+ raise IntegrityError(msg)
46
49
  new.add((s.originator_id, s.originator_version))
47
50
  # Check new events are unique.
48
51
  if len(new) < len(stored_events):
49
- raise IntegrityError(f"Stored events are not unique: {stored_events}")
52
+ msg = f"Stored events are not unique: {stored_events}"
53
+ raise IntegrityError(msg)
50
54
 
51
55
  def _update_table(
52
- self, stored_events: List[StoredEvent], **kwargs: Any
53
- ) -> Optional[Sequence[int]]:
56
+ self, stored_events: List[StoredEvent], **_: Any
57
+ ) -> Sequence[int] | None:
54
58
  notification_ids = []
55
59
  for s in stored_events:
56
60
  self._stored_events.append(s)
@@ -63,27 +67,23 @@ class POPOAggregateRecorder(AggregateRecorder):
63
67
  def select_events(
64
68
  self,
65
69
  originator_id: UUID,
66
- gt: Optional[int] = None,
67
- lte: Optional[int] = None,
70
+ *,
71
+ gt: int | None = None,
72
+ lte: int | None = None,
68
73
  desc: bool = False,
69
- limit: Optional[int] = None,
74
+ limit: int | None = None,
70
75
  ) -> List[StoredEvent]:
71
76
  with self._database_lock:
72
77
  results = []
73
78
 
74
79
  index = self._stored_events_index[originator_id]
75
80
  positions: Iterable[int]
76
- if desc:
77
- positions = reversed_keys(index)
78
- else:
79
- positions = index.keys()
81
+ positions = reversed_keys(index) if desc else index.keys()
80
82
  for p in positions:
81
- if gt is not None:
82
- if not p > gt:
83
- continue
84
- if lte is not None:
85
- if not p <= lte:
86
- continue
83
+ if gt is not None and not p > gt:
84
+ continue
85
+ if lte is not None and not p <= lte:
86
+ continue
87
87
  s = self._stored_events[index[p]]
88
88
  results.append(s)
89
89
  if len(results) == limit:
@@ -94,14 +94,14 @@ class POPOAggregateRecorder(AggregateRecorder):
94
94
  class POPOApplicationRecorder(ApplicationRecorder, POPOAggregateRecorder):
95
95
  def insert_events(
96
96
  self, stored_events: List[StoredEvent], **kwargs: Any
97
- ) -> Optional[Sequence[int]]:
97
+ ) -> Sequence[int] | None:
98
98
  return self._insert_events(stored_events, **kwargs)
99
99
 
100
100
  def select_notifications(
101
101
  self,
102
102
  start: int,
103
103
  limit: int,
104
- stop: Optional[int] = None,
104
+ stop: int | None = None,
105
105
  topics: Sequence[str] = (),
106
106
  ) -> List[Notification]:
107
107
  with self._database_lock:
@@ -138,25 +138,26 @@ class POPOApplicationRecorder(ApplicationRecorder, POPOAggregateRecorder):
138
138
  class POPOProcessRecorder(ProcessRecorder, POPOApplicationRecorder):
139
139
  def __init__(self) -> None:
140
140
  super().__init__()
141
- self._tracking_table: Dict[str, Set[int]] = defaultdict(set)
141
+ self._tracking_table: Dict[str, set[int]] = defaultdict(set)
142
142
  self._max_tracking_ids: Dict[str, int] = defaultdict(lambda: 0)
143
143
 
144
144
  def _assert_uniqueness(
145
145
  self, stored_events: List[StoredEvent], **kwargs: Any
146
146
  ) -> None:
147
147
  super()._assert_uniqueness(stored_events, **kwargs)
148
- t: Optional[Tracking] = kwargs.get("tracking", None)
148
+ t: Tracking | None = kwargs.get("tracking", None)
149
149
  if t and t.notification_id in self._tracking_table[t.application_name]:
150
- raise IntegrityError(
150
+ msg = (
151
151
  f"Already recorded notification ID {t.notification_id} "
152
152
  f"for application {t.application_name}"
153
153
  )
154
+ raise IntegrityError(msg)
154
155
 
155
156
  def _update_table(
156
157
  self, stored_events: List[StoredEvent], **kwargs: Any
157
- ) -> Optional[Sequence[int]]:
158
+ ) -> Sequence[int] | None:
158
159
  notification_ids = super()._update_table(stored_events, **kwargs)
159
- t: Optional[Tracking] = kwargs.get("tracking", None)
160
+ t: Tracking | None = kwargs.get("tracking", None)
160
161
  if t:
161
162
  self._tracking_table[t.application_name].add(t.notification_id)
162
163
  if self._max_tracking_ids[t.application_name] < t.notification_id: