eventsourcing 9.3.5__py3-none-any.whl → 9.4.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eventsourcing might be problematic. Click here for more details.
- eventsourcing/__init__.py +0 -1
- eventsourcing/application.py +26 -10
- eventsourcing/cipher.py +4 -2
- eventsourcing/cryptography.py +96 -0
- eventsourcing/domain.py +29 -9
- eventsourcing/interface.py +23 -5
- eventsourcing/persistence.py +292 -71
- eventsourcing/popo.py +113 -32
- eventsourcing/postgres.py +265 -103
- eventsourcing/projection.py +200 -0
- eventsourcing/sqlite.py +143 -36
- eventsourcing/system.py +64 -42
- eventsourcing/tests/application.py +7 -12
- eventsourcing/tests/persistence.py +304 -75
- eventsourcing/utils.py +1 -1
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0a2.dist-info}/LICENSE +1 -1
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0a2.dist-info}/METADATA +9 -7
- eventsourcing-9.4.0a2.dist-info/RECORD +26 -0
- eventsourcing-9.3.5.dist-info/RECORD +0 -24
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0a2.dist-info}/AUTHORS +0 -0
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0a2.dist-info}/WHEEL +0 -0
eventsourcing/system.py
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import inspect
|
|
4
|
+
import threading
|
|
4
5
|
import traceback
|
|
5
6
|
from abc import ABC, abstractmethod
|
|
6
7
|
from collections import defaultdict
|
|
7
8
|
from queue import Full, Queue
|
|
8
|
-
from threading import Event, Lock, RLock, Thread
|
|
9
9
|
from types import FrameType, ModuleType
|
|
10
10
|
from typing import (
|
|
11
11
|
TYPE_CHECKING,
|
|
@@ -23,7 +23,7 @@ from typing import (
|
|
|
23
23
|
cast,
|
|
24
24
|
)
|
|
25
25
|
|
|
26
|
-
if TYPE_CHECKING:
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
27
|
from typing_extensions import Self
|
|
28
28
|
|
|
29
29
|
from eventsourcing.application import (
|
|
@@ -79,7 +79,7 @@ class Follower(Application):
|
|
|
79
79
|
self.mappers: Dict[str, Mapper] = {}
|
|
80
80
|
self.recorder: ProcessRecorder
|
|
81
81
|
self.is_threading_enabled = False
|
|
82
|
-
self.processing_lock =
|
|
82
|
+
self.processing_lock = threading.Lock()
|
|
83
83
|
|
|
84
84
|
def construct_recorder(self) -> ProcessRecorder:
|
|
85
85
|
"""
|
|
@@ -112,9 +112,9 @@ class Follower(Application):
|
|
|
112
112
|
Pull and process new domain event notifications.
|
|
113
113
|
"""
|
|
114
114
|
if start is None:
|
|
115
|
-
start = self.recorder.max_tracking_id(leader_name)
|
|
115
|
+
start = self.recorder.max_tracking_id(leader_name)
|
|
116
116
|
for notifications in self.pull_notifications(
|
|
117
|
-
leader_name, start=start, stop=stop
|
|
117
|
+
leader_name, start=start, stop=stop, inclusive_of_start=False
|
|
118
118
|
):
|
|
119
119
|
notifications_iter = self.filter_received_notifications(notifications)
|
|
120
120
|
for domain_event, tracking in self.convert_notifications(
|
|
@@ -123,14 +123,22 @@ class Follower(Application):
|
|
|
123
123
|
self.process_event(domain_event, tracking)
|
|
124
124
|
|
|
125
125
|
def pull_notifications(
|
|
126
|
-
self,
|
|
126
|
+
self,
|
|
127
|
+
leader_name: str,
|
|
128
|
+
start: int | None,
|
|
129
|
+
stop: int | None = None,
|
|
130
|
+
*,
|
|
131
|
+
inclusive_of_start: bool = True,
|
|
127
132
|
) -> Iterator[List[Notification]]:
|
|
128
133
|
"""
|
|
129
134
|
Pulls batches of unseen :class:`~eventsourcing.persistence.Notification`
|
|
130
135
|
objects from the notification log reader of the named application.
|
|
131
136
|
"""
|
|
132
137
|
return self.readers[leader_name].select(
|
|
133
|
-
start=start,
|
|
138
|
+
start=start,
|
|
139
|
+
stop=stop,
|
|
140
|
+
topics=self.follow_topics,
|
|
141
|
+
inclusive_of_start=inclusive_of_start,
|
|
134
142
|
)
|
|
135
143
|
|
|
136
144
|
def filter_received_notifications(
|
|
@@ -465,9 +473,9 @@ class SingleThreadedRunner(Runner, RecordingEventReceiver):
|
|
|
465
473
|
super().__init__(system=system, env=env)
|
|
466
474
|
self.apps: Dict[str, Application] = {}
|
|
467
475
|
self._recording_events_received: List[RecordingEvent] = []
|
|
468
|
-
self._prompted_names_lock = Lock()
|
|
476
|
+
self._prompted_names_lock = threading.Lock()
|
|
469
477
|
self._prompted_names: set[str] = set()
|
|
470
|
-
self._processing_lock = Lock()
|
|
478
|
+
self._processing_lock = threading.Lock()
|
|
471
479
|
|
|
472
480
|
# Construct followers.
|
|
473
481
|
for name in self.system.followers:
|
|
@@ -573,8 +581,8 @@ class NewSingleThreadedRunner(Runner, RecordingEventReceiver):
|
|
|
573
581
|
super().__init__(system=system, env=env)
|
|
574
582
|
self.apps: Dict[str, Application] = {}
|
|
575
583
|
self._recording_events_received: List[RecordingEvent] = []
|
|
576
|
-
self._recording_events_received_lock = Lock()
|
|
577
|
-
self._processing_lock = Lock()
|
|
584
|
+
self._recording_events_received_lock = threading.Lock()
|
|
585
|
+
self._processing_lock = threading.Lock()
|
|
578
586
|
self._previous_max_notification_ids: Dict[str, int] = {}
|
|
579
587
|
|
|
580
588
|
# Construct followers.
|
|
@@ -665,9 +673,7 @@ class NewSingleThreadedRunner(Runner, RecordingEventReceiver):
|
|
|
665
673
|
for follower_name in self.system.leads[leader_name]:
|
|
666
674
|
follower = self.apps[follower_name]
|
|
667
675
|
assert isinstance(follower, Follower)
|
|
668
|
-
start = (
|
|
669
|
-
follower.recorder.max_tracking_id(leader_name) + 1
|
|
670
|
-
)
|
|
676
|
+
start = follower.recorder.max_tracking_id(leader_name)
|
|
671
677
|
stop = recording_event.recordings[0].notification.id - 1
|
|
672
678
|
follower.pull_and_process(
|
|
673
679
|
leader_name=leader_name,
|
|
@@ -723,7 +729,7 @@ class MultiThreadedRunner(Runner):
|
|
|
723
729
|
super().__init__(system=system, env=env)
|
|
724
730
|
self.apps: Dict[str, Application] = {}
|
|
725
731
|
self.threads: Dict[str, MultiThreadedRunnerThread] = {}
|
|
726
|
-
self.has_errored = Event()
|
|
732
|
+
self.has_errored = threading.Event()
|
|
727
733
|
|
|
728
734
|
# Construct followers.
|
|
729
735
|
for follower_name in self.system.followers:
|
|
@@ -807,7 +813,7 @@ class MultiThreadedRunner(Runner):
|
|
|
807
813
|
return app
|
|
808
814
|
|
|
809
815
|
|
|
810
|
-
class MultiThreadedRunnerThread(RecordingEventReceiver, Thread):
|
|
816
|
+
class MultiThreadedRunnerThread(RecordingEventReceiver, threading.Thread):
|
|
811
817
|
"""
|
|
812
818
|
Runs one :class:`~eventsourcing.system.Follower` application in
|
|
813
819
|
a :class:`~eventsourcing.system.MultiThreadedRunner`.
|
|
@@ -816,18 +822,18 @@ class MultiThreadedRunnerThread(RecordingEventReceiver, Thread):
|
|
|
816
822
|
def __init__(
|
|
817
823
|
self,
|
|
818
824
|
follower: Follower,
|
|
819
|
-
has_errored: Event,
|
|
825
|
+
has_errored: threading.Event,
|
|
820
826
|
):
|
|
821
827
|
super().__init__(daemon=True)
|
|
822
828
|
self.follower = follower
|
|
823
829
|
self.has_errored = has_errored
|
|
824
830
|
self.error: Exception | None = None
|
|
825
|
-
self.is_stopping = Event()
|
|
826
|
-
self.has_started = Event()
|
|
827
|
-
self.is_prompted = Event()
|
|
831
|
+
self.is_stopping = threading.Event()
|
|
832
|
+
self.has_started = threading.Event()
|
|
833
|
+
self.is_prompted = threading.Event()
|
|
828
834
|
self.prompted_names: List[str] = []
|
|
829
|
-
self.prompted_names_lock = Lock()
|
|
830
|
-
self.is_running = Event()
|
|
835
|
+
self.prompted_names_lock = threading.Lock()
|
|
836
|
+
self.is_running = threading.Event()
|
|
831
837
|
|
|
832
838
|
def run(self) -> None:
|
|
833
839
|
"""
|
|
@@ -889,7 +895,7 @@ class NewMultiThreadedRunner(Runner, RecordingEventReceiver):
|
|
|
889
895
|
self.pulling_threads: Dict[str, List[PullingThread]] = {}
|
|
890
896
|
self.processing_queues: Dict[str, Queue[List[ProcessingJob] | None]] = {}
|
|
891
897
|
self.all_threads: List[PullingThread | ConvertingThread | ProcessingThread] = []
|
|
892
|
-
self.has_errored = Event()
|
|
898
|
+
self.has_errored = threading.Event()
|
|
893
899
|
|
|
894
900
|
# Construct followers.
|
|
895
901
|
for follower_name in self.system.followers:
|
|
@@ -1014,7 +1020,7 @@ class NewMultiThreadedRunner(Runner, RecordingEventReceiver):
|
|
|
1014
1020
|
pulling_thread.receive_recording_event(recording_event)
|
|
1015
1021
|
|
|
1016
1022
|
|
|
1017
|
-
class PullingThread(Thread):
|
|
1023
|
+
class PullingThread(threading.Thread):
|
|
1018
1024
|
"""
|
|
1019
1025
|
Receives or pulls notifications from the given leader, and
|
|
1020
1026
|
puts them on a queue for conversion into processing jobs.
|
|
@@ -1025,19 +1031,19 @@ class PullingThread(Thread):
|
|
|
1025
1031
|
converting_queue: Queue[ConvertingJob],
|
|
1026
1032
|
follower: Follower,
|
|
1027
1033
|
leader_name: str,
|
|
1028
|
-
has_errored: Event,
|
|
1034
|
+
has_errored: threading.Event,
|
|
1029
1035
|
):
|
|
1030
1036
|
super().__init__(daemon=True)
|
|
1031
|
-
self.overflow_event = Event()
|
|
1037
|
+
self.overflow_event = threading.Event()
|
|
1032
1038
|
self.recording_event_queue: Queue[RecordingEvent | None] = Queue(maxsize=100)
|
|
1033
1039
|
self.converting_queue = converting_queue
|
|
1034
|
-
self.receive_lock = Lock()
|
|
1040
|
+
self.receive_lock = threading.Lock()
|
|
1035
1041
|
self.follower = follower
|
|
1036
1042
|
self.leader_name = leader_name
|
|
1037
1043
|
self.error: Exception | None = None
|
|
1038
1044
|
self.has_errored = has_errored
|
|
1039
|
-
self.is_stopping = Event()
|
|
1040
|
-
self.has_started = Event()
|
|
1045
|
+
self.is_stopping = threading.Event()
|
|
1046
|
+
self.has_started = threading.Event()
|
|
1041
1047
|
self.mapper = self.follower.mappers[self.leader_name]
|
|
1042
1048
|
self.previous_max_notification_id = self.follower.recorder.max_tracking_id(
|
|
1043
1049
|
application_name=self.leader_name
|
|
@@ -1054,6 +1060,7 @@ class PullingThread(Thread):
|
|
|
1054
1060
|
# Ignore recording event if already seen a subsequent.
|
|
1055
1061
|
if (
|
|
1056
1062
|
recording_event.previous_max_notification_id is not None
|
|
1063
|
+
and self.previous_max_notification_id is not None
|
|
1057
1064
|
and recording_event.previous_max_notification_id
|
|
1058
1065
|
< self.previous_max_notification_id
|
|
1059
1066
|
):
|
|
@@ -1062,13 +1069,17 @@ class PullingThread(Thread):
|
|
|
1062
1069
|
# Catch up if there is a gap in sequence of recording events.
|
|
1063
1070
|
if (
|
|
1064
1071
|
recording_event.previous_max_notification_id is None
|
|
1072
|
+
or self.previous_max_notification_id is None
|
|
1065
1073
|
or recording_event.previous_max_notification_id
|
|
1066
1074
|
> self.previous_max_notification_id
|
|
1067
1075
|
):
|
|
1068
|
-
start = self.previous_max_notification_id
|
|
1076
|
+
start = self.previous_max_notification_id
|
|
1069
1077
|
stop = recording_event.recordings[0].notification.id - 1
|
|
1070
1078
|
for notifications in self.follower.pull_notifications(
|
|
1071
|
-
self.leader_name,
|
|
1079
|
+
self.leader_name,
|
|
1080
|
+
start=start,
|
|
1081
|
+
stop=stop,
|
|
1082
|
+
inclusive_of_start=False,
|
|
1072
1083
|
):
|
|
1073
1084
|
self.converting_queue.put(notifications)
|
|
1074
1085
|
self.previous_max_notification_id = notifications[-1].id
|
|
@@ -1092,7 +1103,7 @@ class PullingThread(Thread):
|
|
|
1092
1103
|
self.recording_event_queue.put(None)
|
|
1093
1104
|
|
|
1094
1105
|
|
|
1095
|
-
class ConvertingThread(Thread):
|
|
1106
|
+
class ConvertingThread(threading.Thread):
|
|
1096
1107
|
"""
|
|
1097
1108
|
Converts notifications into processing jobs.
|
|
1098
1109
|
"""
|
|
@@ -1103,7 +1114,7 @@ class ConvertingThread(Thread):
|
|
|
1103
1114
|
processing_queue: Queue[List[ProcessingJob] | None],
|
|
1104
1115
|
follower: Follower,
|
|
1105
1116
|
leader_name: str,
|
|
1106
|
-
has_errored: Event,
|
|
1117
|
+
has_errored: threading.Event,
|
|
1107
1118
|
):
|
|
1108
1119
|
super().__init__(daemon=True)
|
|
1109
1120
|
self.converting_queue = converting_queue
|
|
@@ -1112,8 +1123,8 @@ class ConvertingThread(Thread):
|
|
|
1112
1123
|
self.leader_name = leader_name
|
|
1113
1124
|
self.error: Exception | None = None
|
|
1114
1125
|
self.has_errored = has_errored
|
|
1115
|
-
self.is_stopping = Event()
|
|
1116
|
-
self.has_started = Event()
|
|
1126
|
+
self.is_stopping = threading.Event()
|
|
1127
|
+
self.has_started = threading.Event()
|
|
1117
1128
|
self.mapper = self.follower.mappers[self.leader_name]
|
|
1118
1129
|
|
|
1119
1130
|
def run(self) -> None:
|
|
@@ -1162,7 +1173,7 @@ class ConvertingThread(Thread):
|
|
|
1162
1173
|
self.converting_queue.put(None)
|
|
1163
1174
|
|
|
1164
1175
|
|
|
1165
|
-
class ProcessingThread(Thread):
|
|
1176
|
+
class ProcessingThread(threading.Thread):
|
|
1166
1177
|
"""
|
|
1167
1178
|
A processing thread gets events from a processing queue, and
|
|
1168
1179
|
calls the application's process_event() method.
|
|
@@ -1172,15 +1183,15 @@ class ProcessingThread(Thread):
|
|
|
1172
1183
|
self,
|
|
1173
1184
|
processing_queue: Queue[List[ProcessingJob] | None],
|
|
1174
1185
|
follower: Follower,
|
|
1175
|
-
has_errored: Event,
|
|
1186
|
+
has_errored: threading.Event,
|
|
1176
1187
|
):
|
|
1177
1188
|
super().__init__(daemon=True)
|
|
1178
1189
|
self.processing_queue = processing_queue
|
|
1179
1190
|
self.follower = follower
|
|
1180
1191
|
self.error: Exception | None = None
|
|
1181
1192
|
self.has_errored = has_errored
|
|
1182
|
-
self.is_stopping = Event()
|
|
1183
|
-
self.has_started = Event()
|
|
1193
|
+
self.is_stopping = threading.Event()
|
|
1194
|
+
self.has_started = threading.Event()
|
|
1184
1195
|
|
|
1185
1196
|
def run(self) -> None:
|
|
1186
1197
|
self.has_started.set()
|
|
@@ -1247,7 +1258,12 @@ class NotificationLogReader:
|
|
|
1247
1258
|
section_id = section.next_id
|
|
1248
1259
|
|
|
1249
1260
|
def select(
|
|
1250
|
-
self,
|
|
1261
|
+
self,
|
|
1262
|
+
*,
|
|
1263
|
+
start: int | None,
|
|
1264
|
+
stop: int | None = None,
|
|
1265
|
+
topics: Sequence[str] = (),
|
|
1266
|
+
inclusive_of_start: bool = True,
|
|
1251
1267
|
) -> Iterator[List[Notification]]:
|
|
1252
1268
|
"""
|
|
1253
1269
|
Returns a generator that yields lists of event notifications
|
|
@@ -1263,12 +1279,18 @@ class NotificationLogReader:
|
|
|
1263
1279
|
"""
|
|
1264
1280
|
while True:
|
|
1265
1281
|
notifications = self.notification_log.select(
|
|
1266
|
-
start=start,
|
|
1282
|
+
start=start,
|
|
1283
|
+
stop=stop,
|
|
1284
|
+
limit=self.section_size,
|
|
1285
|
+
topics=topics,
|
|
1286
|
+
inclusive_of_start=inclusive_of_start,
|
|
1267
1287
|
)
|
|
1268
1288
|
# Stop if zero notifications.
|
|
1269
1289
|
if len(notifications) == 0:
|
|
1270
1290
|
break
|
|
1271
1291
|
|
|
1272
1292
|
# Otherwise, yield and continue.
|
|
1293
|
+
start = notifications[-1].id
|
|
1294
|
+
if inclusive_of_start:
|
|
1295
|
+
start += 1
|
|
1273
1296
|
yield notifications
|
|
1274
|
-
start = notifications[-1].id + 1
|
|
@@ -19,7 +19,7 @@ from eventsourcing.domain import Aggregate
|
|
|
19
19
|
from eventsourcing.persistence import (
|
|
20
20
|
InfrastructureFactory,
|
|
21
21
|
IntegrityError,
|
|
22
|
-
|
|
22
|
+
JSONTranscoder,
|
|
23
23
|
Transcoding,
|
|
24
24
|
)
|
|
25
25
|
from eventsourcing.tests.domain import BankAccount, EmailAddress
|
|
@@ -36,7 +36,6 @@ class ExampleApplicationTestCase(TestCase):
|
|
|
36
36
|
|
|
37
37
|
def test_example_application(self):
|
|
38
38
|
app = BankAccounts(env={"IS_SNAPSHOTTING_ENABLED": "y"})
|
|
39
|
-
max_notification_id = app.recorder.max_notification_id()
|
|
40
39
|
|
|
41
40
|
self.assertEqual(get_topic(type(app.factory)), self.expected_factory_topic)
|
|
42
41
|
|
|
@@ -75,9 +74,7 @@ class ExampleApplicationTestCase(TestCase):
|
|
|
75
74
|
)
|
|
76
75
|
|
|
77
76
|
sleep(1) # Added to make eventsourcing-axon tests work, perhaps not necessary.
|
|
78
|
-
section = app.notification_log[
|
|
79
|
-
f"{max_notification_id + 1},{max_notification_id + 10}"
|
|
80
|
-
]
|
|
77
|
+
section = app.notification_log["1,10"]
|
|
81
78
|
self.assertEqual(len(section.items), 4)
|
|
82
79
|
|
|
83
80
|
# Take snapshot (specify version).
|
|
@@ -198,7 +195,7 @@ class EmailAddressAsStr(Transcoding):
|
|
|
198
195
|
class BankAccounts(Application):
|
|
199
196
|
is_snapshotting_enabled = True
|
|
200
197
|
|
|
201
|
-
def register_transcodings(self, transcoder:
|
|
198
|
+
def register_transcodings(self, transcoder: JSONTranscoder) -> None:
|
|
202
199
|
super().register_transcodings(transcoder)
|
|
203
200
|
transcoder.register(EmailAddressAsStr())
|
|
204
201
|
|
|
@@ -291,20 +288,18 @@ class ApplicationTestCase(TestCase):
|
|
|
291
288
|
recordings = app.save(None)
|
|
292
289
|
self.assertEqual(recordings, [])
|
|
293
290
|
|
|
294
|
-
max_id = app.recorder.max_notification_id()
|
|
295
|
-
|
|
296
291
|
recordings = app.save(Aggregate())
|
|
297
292
|
self.assertEqual(len(recordings), 1)
|
|
298
|
-
self.assertEqual(recordings[0].notification.id, 1
|
|
293
|
+
self.assertEqual(recordings[0].notification.id, 1)
|
|
299
294
|
|
|
300
295
|
recordings = app.save(Aggregate())
|
|
301
296
|
self.assertEqual(len(recordings), 1)
|
|
302
|
-
self.assertEqual(recordings[0].notification.id, 2
|
|
297
|
+
self.assertEqual(recordings[0].notification.id, 2)
|
|
303
298
|
|
|
304
299
|
recordings = app.save(Aggregate(), Aggregate())
|
|
305
300
|
self.assertEqual(len(recordings), 2)
|
|
306
|
-
self.assertEqual(recordings[0].notification.id, 3
|
|
307
|
-
self.assertEqual(recordings[1].notification.id, 4
|
|
301
|
+
self.assertEqual(recordings[0].notification.id, 3)
|
|
302
|
+
self.assertEqual(recordings[1].notification.id, 4)
|
|
308
303
|
|
|
309
304
|
def test_take_snapshot_raises_assertion_error_if_snapshotting_not_enabled(self):
|
|
310
305
|
app = Application()
|