eventsourcing 9.3.0__py3-none-any.whl → 9.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eventsourcing might be problematic. Click here for more details.

@@ -76,19 +76,16 @@ class WithPostgreSQL(SearchableTimestampsTestCase):
76
76
  super().tearDown()
77
77
 
78
78
  def drop_tables(self) -> None:
79
- with PostgresDatastore(
79
+ db = PostgresDatastore(
80
80
  os.environ["POSTGRES_DBNAME"],
81
81
  os.environ["POSTGRES_HOST"],
82
82
  os.environ["POSTGRES_PORT"],
83
83
  os.environ["POSTGRES_USER"],
84
84
  os.environ["POSTGRES_PASSWORD"],
85
- ) as datastore:
86
- drop_postgres_table(
87
- datastore, "public.searchabletimestampsapplication_events"
88
- )
89
- drop_postgres_table(
90
- datastore, "public.searchabletimestampsapplication_timestamps"
91
- )
85
+ )
86
+ drop_postgres_table(db, "public.searchabletimestampsapplication_events")
87
+ drop_postgres_table(db, "public.searchabletimestampsapplication_timestamps")
88
+ db.close()
92
89
 
93
90
 
94
91
  del SearchableTimestampsTestCase
eventsourcing/postgres.py CHANGED
@@ -2,14 +2,13 @@ from __future__ import annotations
2
2
 
3
3
  import logging
4
4
  from contextlib import contextmanager
5
- from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Sequence
5
+ from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Sequence
6
6
 
7
7
  import psycopg
8
8
  import psycopg.errors
9
9
  import psycopg_pool
10
10
  from psycopg import Connection, Cursor
11
11
  from psycopg.rows import DictRow, dict_row
12
- from typing_extensions import Self
13
12
 
14
13
  from eventsourcing.persistence import (
15
14
  AggregateRecorder,
@@ -63,11 +62,11 @@ class PostgresDatastore:
63
62
  user: str,
64
63
  password: str,
65
64
  *,
66
- connect_timeout: int = 30,
65
+ connect_timeout: int = 5,
67
66
  idle_in_transaction_session_timeout: int = 0,
68
67
  pool_size: int = 2,
69
68
  max_overflow: int = 2,
70
- max_waiting: int = 0,
69
+ pool_timeout: float = 5.0,
71
70
  conn_max_age: float = 60 * 60.0,
72
71
  pre_ping: bool = False,
73
72
  lock_timeout: int = 0,
@@ -80,6 +79,7 @@ class PostgresDatastore:
80
79
  self.pool_open_timeout = pool_open_timeout
81
80
 
82
81
  check = ConnectionPool.check_connection if pre_ping else None
82
+ kwargs: Dict[str, Any] = {"check": check}
83
83
  self.pool = ConnectionPool(
84
84
  get_password_func=get_password_func,
85
85
  connection_class=Connection[DictRow],
@@ -96,9 +96,9 @@ class PostgresDatastore:
96
96
  open=False,
97
97
  configure=self.after_connect,
98
98
  timeout=connect_timeout,
99
- max_waiting=max_waiting,
99
+ max_waiting=round(pool_timeout),
100
100
  max_lifetime=conn_max_age,
101
- check=check,
101
+ **kwargs, # use the 'check' argument when no longer supporting Python 3.7
102
102
  )
103
103
  self.lock_timeout = lock_timeout
104
104
  self.schema = schema.strip()
@@ -156,12 +156,6 @@ class PostgresDatastore:
156
156
  def __del__(self) -> None:
157
157
  self.close()
158
158
 
159
- def __enter__(self) -> Self:
160
- return self
161
-
162
- def __exit__(self, *args: object, **kwargs: Any) -> None:
163
- self.close()
164
-
165
159
 
166
160
  class PostgresAggregateRecorder(AggregateRecorder):
167
161
  def __init__(
@@ -564,10 +558,10 @@ class Factory(InfrastructureFactory):
564
558
  POSTGRES_CONNECT_TIMEOUT = "POSTGRES_CONNECT_TIMEOUT"
565
559
  POSTGRES_CONN_MAX_AGE = "POSTGRES_CONN_MAX_AGE"
566
560
  POSTGRES_PRE_PING = "POSTGRES_PRE_PING"
567
- POSTGRES_MAX_WAITING = "POSTGRES_MAX_WAITING"
561
+ POSTGRES_POOL_TIMEOUT = "POSTGRES_POOL_TIMEOUT"
568
562
  POSTGRES_LOCK_TIMEOUT = "POSTGRES_LOCK_TIMEOUT"
569
563
  POSTGRES_POOL_SIZE = "POSTGRES_POOL_SIZE"
570
- POSTGRES_MAX_OVERFLOW = "POSTGRES_MAX_OVERFLOW"
564
+ POSTGRES_POOL_MAX_OVERFLOW = "POSTGRES_POOL_MAX_OVERFLOW"
571
565
  POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT = (
572
566
  "POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT"
573
567
  )
@@ -624,7 +618,7 @@ class Factory(InfrastructureFactory):
624
618
  get_password_func = resolve_topic(get_password_topic)
625
619
  password = ""
626
620
 
627
- connect_timeout = 30
621
+ connect_timeout = 5
628
622
  connect_timeout_str = self.env.get(self.POSTGRES_CONNECT_TIMEOUT)
629
623
  if connect_timeout_str:
630
624
  try:
@@ -670,30 +664,30 @@ class Factory(InfrastructureFactory):
670
664
  raise OSError(msg) from None
671
665
 
672
666
  pool_max_overflow = 10
673
- pool_max_overflow_str = self.env.get(self.POSTGRES_MAX_OVERFLOW)
667
+ pool_max_overflow_str = self.env.get(self.POSTGRES_POOL_MAX_OVERFLOW)
674
668
  if pool_max_overflow_str:
675
669
  try:
676
670
  pool_max_overflow = int(pool_max_overflow_str)
677
671
  except ValueError:
678
672
  msg = (
679
673
  "Postgres environment value for key "
680
- f"'{self.POSTGRES_MAX_OVERFLOW}' is invalid. "
674
+ f"'{self.POSTGRES_POOL_MAX_OVERFLOW}' is invalid. "
681
675
  "If set, an integer or empty string is expected: "
682
676
  f"'{pool_max_overflow_str}'"
683
677
  )
684
678
  raise OSError(msg) from None
685
679
 
686
- max_waiting = 0
687
- max_waiting_str = self.env.get(self.POSTGRES_MAX_WAITING)
688
- if max_waiting_str:
680
+ pool_timeout = 30.0
681
+ pool_timeout_str = self.env.get(self.POSTGRES_POOL_TIMEOUT)
682
+ if pool_timeout_str:
689
683
  try:
690
- max_waiting = int(max_waiting_str)
684
+ pool_timeout = float(pool_timeout_str)
691
685
  except ValueError:
692
686
  msg = (
693
687
  "Postgres environment value for key "
694
- f"'{self.POSTGRES_MAX_WAITING}' is invalid. "
695
- "If set, an integer or empty string is expected: "
696
- f"'{max_waiting_str}'"
688
+ f"'{self.POSTGRES_POOL_TIMEOUT}' is invalid. "
689
+ "If set, a float or empty string is expected: "
690
+ f"'{pool_timeout_str}'"
697
691
  )
698
692
  raise OSError(msg) from None
699
693
 
@@ -739,16 +733,13 @@ class Factory(InfrastructureFactory):
739
733
  idle_in_transaction_session_timeout=idle_in_transaction_session_timeout,
740
734
  pool_size=pool_size,
741
735
  max_overflow=pool_max_overflow,
742
- max_waiting=max_waiting,
736
+ pool_timeout=pool_timeout,
743
737
  conn_max_age=conn_max_age,
744
738
  pre_ping=pre_ping,
745
739
  lock_timeout=lock_timeout,
746
740
  schema=schema,
747
741
  )
748
742
 
749
- def env_create_table(self) -> bool:
750
- return strtobool(self.env.get(self.CREATE_TABLE) or "yes")
751
-
752
743
  def aggregate_recorder(self, purpose: str = "events") -> AggregateRecorder:
753
744
  prefix = self.env.name.lower() or "stored"
754
745
  events_table_name = prefix + "_" + purpose
@@ -792,6 +783,9 @@ class Factory(InfrastructureFactory):
792
783
  recorder.create_table()
793
784
  return recorder
794
785
 
786
+ def env_create_table(self) -> bool:
787
+ return strtobool(self.env.get(self.CREATE_TABLE) or "yes")
788
+
795
789
  def close(self) -> None:
796
790
  if hasattr(self, "datastore"):
797
791
  self.datastore.close()
eventsourcing/system.py CHANGED
@@ -8,7 +8,6 @@ from queue import Full, Queue
8
8
  from threading import Event, Lock, RLock, Thread
9
9
  from types import FrameType, ModuleType
10
10
  from typing import (
11
- Any,
12
11
  ClassVar,
13
12
  Dict,
14
13
  Iterable,
@@ -22,8 +21,6 @@ from typing import (
22
21
  cast,
23
22
  )
24
23
 
25
- from typing_extensions import Self
26
-
27
24
  from eventsourcing.application import (
28
25
  Application,
29
26
  NotificationLog,
@@ -528,13 +525,6 @@ class SingleThreadedRunner(Runner, RecordingEventReceiver):
528
525
  assert isinstance(app, cls)
529
526
  return app
530
527
 
531
- def __enter__(self) -> Self:
532
- self.start()
533
- return self
534
-
535
- def __exit__(self, *args: object, **kwargs: Any) -> None:
536
- self.stop()
537
-
538
528
 
539
529
  class NewSingleThreadedRunner(Runner, RecordingEventReceiver):
540
530
  """
@@ -20,32 +20,32 @@ class TestDocs(TestCase):
20
20
  super().setUp()
21
21
  self.uris = tmpfile_uris()
22
22
 
23
- with PostgresDatastore(
23
+ db = PostgresDatastore(
24
24
  "eventsourcing",
25
25
  "127.0.0.1",
26
26
  "5432",
27
27
  "eventsourcing",
28
28
  "eventsourcing",
29
- ) as datastore:
30
- drop_postgres_table(datastore, "dogschool_events")
31
- drop_postgres_table(datastore, "counters_events")
32
- drop_postgres_table(datastore, "counters_tracking")
29
+ )
30
+ drop_postgres_table(db, "dogschool_events")
31
+ drop_postgres_table(db, "counters_events")
32
+ drop_postgres_table(db, "counters_tracking")
33
33
 
34
34
  def tearDown(self) -> None:
35
35
  self.clean_env()
36
36
 
37
37
  def clean_env(self):
38
38
  clear_topic_cache()
39
- with PostgresDatastore(
39
+ db = PostgresDatastore(
40
40
  "eventsourcing",
41
41
  "127.0.0.1",
42
42
  "5432",
43
43
  "eventsourcing",
44
44
  "eventsourcing",
45
- ) as datastore:
46
- drop_postgres_table(datastore, "dogschool_events")
47
- drop_postgres_table(datastore, "counters_events")
48
- drop_postgres_table(datastore, "counters_tracking")
45
+ )
46
+ drop_postgres_table(db, "dogschool_events")
47
+ drop_postgres_table(db, "counters_events")
48
+ drop_postgres_table(db, "counters_tracking")
49
49
 
50
50
  keys = [
51
51
  "PERSISTENCE_MODULE",
@@ -5,7 +5,6 @@ from decimal import Decimal
5
5
  from unittest.case import TestCase
6
6
  from uuid import NAMESPACE_URL, UUID, uuid4, uuid5
7
7
 
8
- from eventsourcing.application import AggregateNotFound, AggregateNotFoundError
9
8
  from eventsourcing.domain import (
10
9
  Aggregate,
11
10
  AggregateCreated,
@@ -731,7 +730,6 @@ class TestAggregateCreation(TestCase):
731
730
  order = Order("name")
732
731
  pending = order.collect_events()
733
732
  self.assertEqual(type(pending[0]).__name__, "Started")
734
- self.assertTrue(isinstance(pending[0], Order.Created))
735
733
 
736
734
  def test_raises_when_given_created_event_name_conflicts_with_created_event_class(
737
735
  self,
@@ -1100,19 +1098,6 @@ class TestAggregateEventsAreSubclassed(TestCase):
1100
1098
  MySubclass.Ended.__qualname__,
1101
1099
  )
1102
1100
 
1103
- self.assertTrue(
1104
- MySubclass._created_event_class.__qualname__.endswith("MySubclass.Opened")
1105
- )
1106
-
1107
- class MySubSubClass(MySubclass):
1108
- pass
1109
-
1110
- self.assertTrue(
1111
- MySubSubClass._created_event_class.__qualname__.endswith(
1112
- "MySubSubClass.Opened"
1113
- )
1114
- )
1115
-
1116
1101
 
1117
1102
  class TestBankAccount(TestCase):
1118
1103
  def test_subclass_bank_account(self):
@@ -1172,9 +1157,3 @@ class TestBankAccount(TestCase):
1172
1157
  # Collect pending events.
1173
1158
  pending = account.collect_events()
1174
1159
  self.assertEqual(len(pending), 7)
1175
-
1176
-
1177
- class TestAggregateNotFound(TestCase):
1178
- def test(self):
1179
- e = AggregateNotFound()
1180
- self.assertIsInstance(e, AggregateNotFoundError)
@@ -839,9 +839,6 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
839
839
  self.transcoder.register(DecimalAsStr())
840
840
  self.transcoder.register(DatetimeAsISO())
841
841
 
842
- def tearDown(self):
843
- self.factory.close()
844
-
845
842
  def test_createmapper(self):
846
843
  # Want to construct:
847
844
  # - application recorder