eventsourcing 9.3.0a1__py3-none-any.whl → 9.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eventsourcing might be problematic. Click here for more details.
- eventsourcing/application.py +8 -1
- eventsourcing/domain.py +87 -88
- eventsourcing/examples/aggregate4/domainmodel.py +14 -28
- eventsourcing/examples/contentmanagementsystem/test_system.py +119 -113
- eventsourcing/examples/searchablecontent/test_application.py +4 -5
- eventsourcing/examples/searchablecontent/test_recorder.py +4 -5
- eventsourcing/examples/searchabletimestamps/test_searchabletimestamps.py +8 -5
- eventsourcing/postgres.py +28 -22
- eventsourcing/system.py +10 -0
- eventsourcing/tests/docs_tests/test_docs.py +10 -10
- eventsourcing/tests/domain_tests/test_aggregate.py +41 -0
- eventsourcing/tests/persistence.py +3 -0
- eventsourcing/tests/persistence_tests/test_postgres.py +104 -106
- eventsourcing/tests/system_tests/test_runner.py +17 -17
- eventsourcing/tests/system_tests/test_system.py +1 -4
- eventsourcing-9.3.1.dist-info/AUTHORS +10 -0
- {eventsourcing-9.3.0a1.dist-info → eventsourcing-9.3.1.dist-info}/METADATA +6 -5
- {eventsourcing-9.3.0a1.dist-info → eventsourcing-9.3.1.dist-info}/RECORD +20 -19
- {eventsourcing-9.3.0a1.dist-info → eventsourcing-9.3.1.dist-info}/WHEEL +1 -1
- {eventsourcing-9.3.0a1.dist-info → eventsourcing-9.3.1.dist-info}/LICENSE +0 -0
|
@@ -54,16 +54,15 @@ class TestWithPostgres(SearchableContentRecorderTestCase):
|
|
|
54
54
|
super().tearDown()
|
|
55
55
|
|
|
56
56
|
def drop_tables(self) -> None:
|
|
57
|
-
|
|
57
|
+
with PostgresDatastore(
|
|
58
58
|
os.environ["POSTGRES_DBNAME"],
|
|
59
59
|
os.environ["POSTGRES_HOST"],
|
|
60
60
|
os.environ["POSTGRES_PORT"],
|
|
61
61
|
os.environ["POSTGRES_USER"],
|
|
62
62
|
os.environ["POSTGRES_PASSWORD"],
|
|
63
|
-
)
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
db.close()
|
|
63
|
+
) as datastore:
|
|
64
|
+
drop_postgres_table(datastore, "public.searchablecontentapplication_events")
|
|
65
|
+
drop_postgres_table(datastore, "public.pages_projection_example")
|
|
67
66
|
|
|
68
67
|
|
|
69
68
|
del SearchableContentRecorderTestCase
|
|
@@ -76,16 +76,19 @@ class WithPostgreSQL(SearchableTimestampsTestCase):
|
|
|
76
76
|
super().tearDown()
|
|
77
77
|
|
|
78
78
|
def drop_tables(self) -> None:
|
|
79
|
-
|
|
79
|
+
with PostgresDatastore(
|
|
80
80
|
os.environ["POSTGRES_DBNAME"],
|
|
81
81
|
os.environ["POSTGRES_HOST"],
|
|
82
82
|
os.environ["POSTGRES_PORT"],
|
|
83
83
|
os.environ["POSTGRES_USER"],
|
|
84
84
|
os.environ["POSTGRES_PASSWORD"],
|
|
85
|
-
)
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
85
|
+
) as datastore:
|
|
86
|
+
drop_postgres_table(
|
|
87
|
+
datastore, "public.searchabletimestampsapplication_events"
|
|
88
|
+
)
|
|
89
|
+
drop_postgres_table(
|
|
90
|
+
datastore, "public.searchabletimestampsapplication_timestamps"
|
|
91
|
+
)
|
|
89
92
|
|
|
90
93
|
|
|
91
94
|
del SearchableTimestampsTestCase
|
eventsourcing/postgres.py
CHANGED
|
@@ -2,13 +2,14 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
from contextlib import contextmanager
|
|
5
|
-
from typing import TYPE_CHECKING, Any, Callable,
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Sequence
|
|
6
6
|
|
|
7
7
|
import psycopg
|
|
8
8
|
import psycopg.errors
|
|
9
9
|
import psycopg_pool
|
|
10
10
|
from psycopg import Connection, Cursor
|
|
11
11
|
from psycopg.rows import DictRow, dict_row
|
|
12
|
+
from typing_extensions import Self
|
|
12
13
|
|
|
13
14
|
from eventsourcing.persistence import (
|
|
14
15
|
AggregateRecorder,
|
|
@@ -62,11 +63,11 @@ class PostgresDatastore:
|
|
|
62
63
|
user: str,
|
|
63
64
|
password: str,
|
|
64
65
|
*,
|
|
65
|
-
connect_timeout: int =
|
|
66
|
+
connect_timeout: int = 30,
|
|
66
67
|
idle_in_transaction_session_timeout: int = 0,
|
|
67
68
|
pool_size: int = 2,
|
|
68
69
|
max_overflow: int = 2,
|
|
69
|
-
|
|
70
|
+
max_waiting: int = 0,
|
|
70
71
|
conn_max_age: float = 60 * 60.0,
|
|
71
72
|
pre_ping: bool = False,
|
|
72
73
|
lock_timeout: int = 0,
|
|
@@ -79,7 +80,6 @@ class PostgresDatastore:
|
|
|
79
80
|
self.pool_open_timeout = pool_open_timeout
|
|
80
81
|
|
|
81
82
|
check = ConnectionPool.check_connection if pre_ping else None
|
|
82
|
-
kwargs: Dict[str, Any] = {"check": check}
|
|
83
83
|
self.pool = ConnectionPool(
|
|
84
84
|
get_password_func=get_password_func,
|
|
85
85
|
connection_class=Connection[DictRow],
|
|
@@ -96,9 +96,9 @@ class PostgresDatastore:
|
|
|
96
96
|
open=False,
|
|
97
97
|
configure=self.after_connect,
|
|
98
98
|
timeout=connect_timeout,
|
|
99
|
-
max_waiting=
|
|
99
|
+
max_waiting=max_waiting,
|
|
100
100
|
max_lifetime=conn_max_age,
|
|
101
|
-
|
|
101
|
+
check=check,
|
|
102
102
|
)
|
|
103
103
|
self.lock_timeout = lock_timeout
|
|
104
104
|
self.schema = schema.strip()
|
|
@@ -156,6 +156,12 @@ class PostgresDatastore:
|
|
|
156
156
|
def __del__(self) -> None:
|
|
157
157
|
self.close()
|
|
158
158
|
|
|
159
|
+
def __enter__(self) -> Self:
|
|
160
|
+
return self
|
|
161
|
+
|
|
162
|
+
def __exit__(self, *args: object, **kwargs: Any) -> None:
|
|
163
|
+
self.close()
|
|
164
|
+
|
|
159
165
|
|
|
160
166
|
class PostgresAggregateRecorder(AggregateRecorder):
|
|
161
167
|
def __init__(
|
|
@@ -558,10 +564,10 @@ class Factory(InfrastructureFactory):
|
|
|
558
564
|
POSTGRES_CONNECT_TIMEOUT = "POSTGRES_CONNECT_TIMEOUT"
|
|
559
565
|
POSTGRES_CONN_MAX_AGE = "POSTGRES_CONN_MAX_AGE"
|
|
560
566
|
POSTGRES_PRE_PING = "POSTGRES_PRE_PING"
|
|
561
|
-
|
|
567
|
+
POSTGRES_MAX_WAITING = "POSTGRES_MAX_WAITING"
|
|
562
568
|
POSTGRES_LOCK_TIMEOUT = "POSTGRES_LOCK_TIMEOUT"
|
|
563
569
|
POSTGRES_POOL_SIZE = "POSTGRES_POOL_SIZE"
|
|
564
|
-
|
|
570
|
+
POSTGRES_MAX_OVERFLOW = "POSTGRES_MAX_OVERFLOW"
|
|
565
571
|
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT = (
|
|
566
572
|
"POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT"
|
|
567
573
|
)
|
|
@@ -618,7 +624,7 @@ class Factory(InfrastructureFactory):
|
|
|
618
624
|
get_password_func = resolve_topic(get_password_topic)
|
|
619
625
|
password = ""
|
|
620
626
|
|
|
621
|
-
connect_timeout =
|
|
627
|
+
connect_timeout = 30
|
|
622
628
|
connect_timeout_str = self.env.get(self.POSTGRES_CONNECT_TIMEOUT)
|
|
623
629
|
if connect_timeout_str:
|
|
624
630
|
try:
|
|
@@ -664,30 +670,30 @@ class Factory(InfrastructureFactory):
|
|
|
664
670
|
raise OSError(msg) from None
|
|
665
671
|
|
|
666
672
|
pool_max_overflow = 10
|
|
667
|
-
pool_max_overflow_str = self.env.get(self.
|
|
673
|
+
pool_max_overflow_str = self.env.get(self.POSTGRES_MAX_OVERFLOW)
|
|
668
674
|
if pool_max_overflow_str:
|
|
669
675
|
try:
|
|
670
676
|
pool_max_overflow = int(pool_max_overflow_str)
|
|
671
677
|
except ValueError:
|
|
672
678
|
msg = (
|
|
673
679
|
"Postgres environment value for key "
|
|
674
|
-
f"'{self.
|
|
680
|
+
f"'{self.POSTGRES_MAX_OVERFLOW}' is invalid. "
|
|
675
681
|
"If set, an integer or empty string is expected: "
|
|
676
682
|
f"'{pool_max_overflow_str}'"
|
|
677
683
|
)
|
|
678
684
|
raise OSError(msg) from None
|
|
679
685
|
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
if
|
|
686
|
+
max_waiting = 0
|
|
687
|
+
max_waiting_str = self.env.get(self.POSTGRES_MAX_WAITING)
|
|
688
|
+
if max_waiting_str:
|
|
683
689
|
try:
|
|
684
|
-
|
|
690
|
+
max_waiting = int(max_waiting_str)
|
|
685
691
|
except ValueError:
|
|
686
692
|
msg = (
|
|
687
693
|
"Postgres environment value for key "
|
|
688
|
-
f"'{self.
|
|
689
|
-
"If set,
|
|
690
|
-
f"'{
|
|
694
|
+
f"'{self.POSTGRES_MAX_WAITING}' is invalid. "
|
|
695
|
+
"If set, an integer or empty string is expected: "
|
|
696
|
+
f"'{max_waiting_str}'"
|
|
691
697
|
)
|
|
692
698
|
raise OSError(msg) from None
|
|
693
699
|
|
|
@@ -733,13 +739,16 @@ class Factory(InfrastructureFactory):
|
|
|
733
739
|
idle_in_transaction_session_timeout=idle_in_transaction_session_timeout,
|
|
734
740
|
pool_size=pool_size,
|
|
735
741
|
max_overflow=pool_max_overflow,
|
|
736
|
-
|
|
742
|
+
max_waiting=max_waiting,
|
|
737
743
|
conn_max_age=conn_max_age,
|
|
738
744
|
pre_ping=pre_ping,
|
|
739
745
|
lock_timeout=lock_timeout,
|
|
740
746
|
schema=schema,
|
|
741
747
|
)
|
|
742
748
|
|
|
749
|
+
def env_create_table(self) -> bool:
|
|
750
|
+
return strtobool(self.env.get(self.CREATE_TABLE) or "yes")
|
|
751
|
+
|
|
743
752
|
def aggregate_recorder(self, purpose: str = "events") -> AggregateRecorder:
|
|
744
753
|
prefix = self.env.name.lower() or "stored"
|
|
745
754
|
events_table_name = prefix + "_" + purpose
|
|
@@ -783,9 +792,6 @@ class Factory(InfrastructureFactory):
|
|
|
783
792
|
recorder.create_table()
|
|
784
793
|
return recorder
|
|
785
794
|
|
|
786
|
-
def env_create_table(self) -> bool:
|
|
787
|
-
return strtobool(self.env.get(self.CREATE_TABLE) or "yes")
|
|
788
|
-
|
|
789
795
|
def close(self) -> None:
|
|
790
796
|
if hasattr(self, "datastore"):
|
|
791
797
|
self.datastore.close()
|
eventsourcing/system.py
CHANGED
|
@@ -8,6 +8,7 @@ from queue import Full, Queue
|
|
|
8
8
|
from threading import Event, Lock, RLock, Thread
|
|
9
9
|
from types import FrameType, ModuleType
|
|
10
10
|
from typing import (
|
|
11
|
+
Any,
|
|
11
12
|
ClassVar,
|
|
12
13
|
Dict,
|
|
13
14
|
Iterable,
|
|
@@ -21,6 +22,8 @@ from typing import (
|
|
|
21
22
|
cast,
|
|
22
23
|
)
|
|
23
24
|
|
|
25
|
+
from typing_extensions import Self
|
|
26
|
+
|
|
24
27
|
from eventsourcing.application import (
|
|
25
28
|
Application,
|
|
26
29
|
NotificationLog,
|
|
@@ -525,6 +528,13 @@ class SingleThreadedRunner(Runner, RecordingEventReceiver):
|
|
|
525
528
|
assert isinstance(app, cls)
|
|
526
529
|
return app
|
|
527
530
|
|
|
531
|
+
def __enter__(self) -> Self:
|
|
532
|
+
self.start()
|
|
533
|
+
return self
|
|
534
|
+
|
|
535
|
+
def __exit__(self, *args: object, **kwargs: Any) -> None:
|
|
536
|
+
self.stop()
|
|
537
|
+
|
|
528
538
|
|
|
529
539
|
class NewSingleThreadedRunner(Runner, RecordingEventReceiver):
|
|
530
540
|
"""
|
|
@@ -20,32 +20,32 @@ class TestDocs(TestCase):
|
|
|
20
20
|
super().setUp()
|
|
21
21
|
self.uris = tmpfile_uris()
|
|
22
22
|
|
|
23
|
-
|
|
23
|
+
with PostgresDatastore(
|
|
24
24
|
"eventsourcing",
|
|
25
25
|
"127.0.0.1",
|
|
26
26
|
"5432",
|
|
27
27
|
"eventsourcing",
|
|
28
28
|
"eventsourcing",
|
|
29
|
-
)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
29
|
+
) as datastore:
|
|
30
|
+
drop_postgres_table(datastore, "dogschool_events")
|
|
31
|
+
drop_postgres_table(datastore, "counters_events")
|
|
32
|
+
drop_postgres_table(datastore, "counters_tracking")
|
|
33
33
|
|
|
34
34
|
def tearDown(self) -> None:
|
|
35
35
|
self.clean_env()
|
|
36
36
|
|
|
37
37
|
def clean_env(self):
|
|
38
38
|
clear_topic_cache()
|
|
39
|
-
|
|
39
|
+
with PostgresDatastore(
|
|
40
40
|
"eventsourcing",
|
|
41
41
|
"127.0.0.1",
|
|
42
42
|
"5432",
|
|
43
43
|
"eventsourcing",
|
|
44
44
|
"eventsourcing",
|
|
45
|
-
)
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
45
|
+
) as datastore:
|
|
46
|
+
drop_postgres_table(datastore, "dogschool_events")
|
|
47
|
+
drop_postgres_table(datastore, "counters_events")
|
|
48
|
+
drop_postgres_table(datastore, "counters_tracking")
|
|
49
49
|
|
|
50
50
|
keys = [
|
|
51
51
|
"PERSISTENCE_MODULE",
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import inspect
|
|
2
|
+
import warnings
|
|
2
3
|
from dataclasses import _DataclassParams, dataclass
|
|
3
4
|
from datetime import datetime
|
|
4
5
|
from decimal import Decimal
|
|
5
6
|
from unittest.case import TestCase
|
|
6
7
|
from uuid import NAMESPACE_URL, UUID, uuid4, uuid5
|
|
7
8
|
|
|
9
|
+
from eventsourcing.application import AggregateNotFound, AggregateNotFoundError
|
|
8
10
|
from eventsourcing.domain import (
|
|
9
11
|
Aggregate,
|
|
10
12
|
AggregateCreated,
|
|
@@ -730,6 +732,7 @@ class TestAggregateCreation(TestCase):
|
|
|
730
732
|
order = Order("name")
|
|
731
733
|
pending = order.collect_events()
|
|
732
734
|
self.assertEqual(type(pending[0]).__name__, "Started")
|
|
735
|
+
self.assertTrue(isinstance(pending[0], Order.Created))
|
|
733
736
|
|
|
734
737
|
def test_raises_when_given_created_event_name_conflicts_with_created_event_class(
|
|
735
738
|
self,
|
|
@@ -1098,6 +1101,19 @@ class TestAggregateEventsAreSubclassed(TestCase):
|
|
|
1098
1101
|
MySubclass.Ended.__qualname__,
|
|
1099
1102
|
)
|
|
1100
1103
|
|
|
1104
|
+
self.assertTrue(
|
|
1105
|
+
MySubclass._created_event_class.__qualname__.endswith("MySubclass.Opened")
|
|
1106
|
+
)
|
|
1107
|
+
|
|
1108
|
+
class MySubSubClass(MySubclass):
|
|
1109
|
+
pass
|
|
1110
|
+
|
|
1111
|
+
self.assertTrue(
|
|
1112
|
+
MySubSubClass._created_event_class.__qualname__.endswith(
|
|
1113
|
+
"MySubSubClass.Opened"
|
|
1114
|
+
)
|
|
1115
|
+
)
|
|
1116
|
+
|
|
1101
1117
|
|
|
1102
1118
|
class TestBankAccount(TestCase):
|
|
1103
1119
|
def test_subclass_bank_account(self):
|
|
@@ -1157,3 +1173,28 @@ class TestBankAccount(TestCase):
|
|
|
1157
1173
|
# Collect pending events.
|
|
1158
1174
|
pending = account.collect_events()
|
|
1159
1175
|
self.assertEqual(len(pending), 7)
|
|
1176
|
+
|
|
1177
|
+
|
|
1178
|
+
class TestAggregateNotFound(TestCase):
|
|
1179
|
+
def test(self):
|
|
1180
|
+
# Verify deprecation warning.
|
|
1181
|
+
with warnings.catch_warnings(record=True) as w:
|
|
1182
|
+
AggregateNotFound()
|
|
1183
|
+
|
|
1184
|
+
self.assertEqual(len(w), 1)
|
|
1185
|
+
self.assertIs(w[-1].category, DeprecationWarning)
|
|
1186
|
+
self.assertEqual(
|
|
1187
|
+
"AggregateNotFound is deprecated, use AggregateNotFoundError instead",
|
|
1188
|
+
w[-1].message.args[0],
|
|
1189
|
+
)
|
|
1190
|
+
|
|
1191
|
+
# Verify no deprecation warning.
|
|
1192
|
+
with warnings.catch_warnings(record=True) as w:
|
|
1193
|
+
AggregateNotFoundError()
|
|
1194
|
+
self.assertEqual(len(w), 0)
|
|
1195
|
+
|
|
1196
|
+
# Check we didn't break any code.
|
|
1197
|
+
try:
|
|
1198
|
+
raise AggregateNotFoundError
|
|
1199
|
+
except AggregateNotFound:
|
|
1200
|
+
pass
|
|
@@ -839,6 +839,9 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
839
839
|
self.transcoder.register(DecimalAsStr())
|
|
840
840
|
self.transcoder.register(DatetimeAsISO())
|
|
841
841
|
|
|
842
|
+
def tearDown(self):
|
|
843
|
+
self.factory.close()
|
|
844
|
+
|
|
842
845
|
def test_createmapper(self):
|
|
843
846
|
# Want to construct:
|
|
844
847
|
# - application recorder
|