eventsourcing 9.5.0b3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eventsourcing/__init__.py +0 -0
- eventsourcing/application.py +998 -0
- eventsourcing/cipher.py +107 -0
- eventsourcing/compressor.py +15 -0
- eventsourcing/cryptography.py +91 -0
- eventsourcing/dcb/__init__.py +0 -0
- eventsourcing/dcb/api.py +144 -0
- eventsourcing/dcb/application.py +159 -0
- eventsourcing/dcb/domain.py +369 -0
- eventsourcing/dcb/msgpack.py +38 -0
- eventsourcing/dcb/persistence.py +193 -0
- eventsourcing/dcb/popo.py +178 -0
- eventsourcing/dcb/postgres_tt.py +704 -0
- eventsourcing/dcb/tests.py +608 -0
- eventsourcing/dispatch.py +80 -0
- eventsourcing/domain.py +1964 -0
- eventsourcing/interface.py +164 -0
- eventsourcing/persistence.py +1429 -0
- eventsourcing/popo.py +267 -0
- eventsourcing/postgres.py +1441 -0
- eventsourcing/projection.py +502 -0
- eventsourcing/py.typed +0 -0
- eventsourcing/sqlite.py +816 -0
- eventsourcing/system.py +1203 -0
- eventsourcing/tests/__init__.py +3 -0
- eventsourcing/tests/application.py +483 -0
- eventsourcing/tests/domain.py +105 -0
- eventsourcing/tests/persistence.py +1744 -0
- eventsourcing/tests/postgres_utils.py +131 -0
- eventsourcing/utils.py +257 -0
- eventsourcing-9.5.0b3.dist-info/METADATA +253 -0
- eventsourcing-9.5.0b3.dist-info/RECORD +35 -0
- eventsourcing-9.5.0b3.dist-info/WHEEL +4 -0
- eventsourcing-9.5.0b3.dist-info/licenses/AUTHORS +10 -0
- eventsourcing-9.5.0b3.dist-info/licenses/LICENSE +29 -0
|
@@ -0,0 +1,483 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import traceback
|
|
4
|
+
import warnings
|
|
5
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
6
|
+
from decimal import Decimal
|
|
7
|
+
from threading import Event, get_ident
|
|
8
|
+
from time import sleep
|
|
9
|
+
from typing import TYPE_CHECKING, Any, ClassVar
|
|
10
|
+
from unittest import TestCase
|
|
11
|
+
from uuid import UUID, uuid4
|
|
12
|
+
|
|
13
|
+
from eventsourcing.application import AggregateNotFoundError, Application
|
|
14
|
+
from eventsourcing.domain import Aggregate
|
|
15
|
+
from eventsourcing.persistence import (
|
|
16
|
+
InfrastructureFactory,
|
|
17
|
+
InfrastructureFactoryError,
|
|
18
|
+
IntegrityError,
|
|
19
|
+
JSONTranscoder,
|
|
20
|
+
Transcoding,
|
|
21
|
+
)
|
|
22
|
+
from eventsourcing.tests.domain import BankAccount, EmailAddress
|
|
23
|
+
from eventsourcing.utils import EnvType, get_topic
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from datetime import datetime
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ExampleApplicationTestCase(TestCase):
|
|
30
|
+
started_ats: ClassVar[dict[type[TestCase], datetime]] = {}
|
|
31
|
+
counts: ClassVar[dict[type[TestCase], int]] = {}
|
|
32
|
+
expected_factory_topic: str
|
|
33
|
+
|
|
34
|
+
def test_example_application(self) -> None:
|
|
35
|
+
app = BankAccounts(env={"IS_SNAPSHOTTING_ENABLED": "y"})
|
|
36
|
+
|
|
37
|
+
self.assertEqual(get_topic(type(app.factory)), self.expected_factory_topic)
|
|
38
|
+
|
|
39
|
+
# Check AccountNotFound exception.
|
|
40
|
+
with self.assertRaises(BankAccounts.AccountNotFoundError):
|
|
41
|
+
app.get_account(uuid4())
|
|
42
|
+
|
|
43
|
+
# Open an account.
|
|
44
|
+
account_id = app.open_account(
|
|
45
|
+
full_name="Alice",
|
|
46
|
+
email_address="alice@example.com",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Check balance.
|
|
50
|
+
self.assertEqual(
|
|
51
|
+
app.get_balance(account_id),
|
|
52
|
+
Decimal("0.00"),
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
# Credit the account.
|
|
56
|
+
app.credit_account(account_id, Decimal("10.00"))
|
|
57
|
+
|
|
58
|
+
# Check balance.
|
|
59
|
+
self.assertEqual(
|
|
60
|
+
app.get_balance(account_id),
|
|
61
|
+
Decimal("10.00"),
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
app.credit_account(account_id, Decimal("25.00"))
|
|
65
|
+
app.credit_account(account_id, Decimal("30.00"))
|
|
66
|
+
|
|
67
|
+
# Check balance.
|
|
68
|
+
self.assertEqual(
|
|
69
|
+
app.get_balance(account_id),
|
|
70
|
+
Decimal("65.00"),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# sleep(1) # Added to make eventsourcing-axon tests work.
|
|
74
|
+
section = app.notification_log["1,10"]
|
|
75
|
+
self.assertEqual(len(section.items), 4)
|
|
76
|
+
|
|
77
|
+
# Take snapshot (specify version).
|
|
78
|
+
app.take_snapshot(account_id, version=Aggregate.INITIAL_VERSION + 1)
|
|
79
|
+
|
|
80
|
+
assert app.snapshots is not None # for mypy
|
|
81
|
+
snapshots = list(app.snapshots.get(account_id))
|
|
82
|
+
self.assertEqual(len(snapshots), 1)
|
|
83
|
+
self.assertEqual(snapshots[0].originator_version, Aggregate.INITIAL_VERSION + 1)
|
|
84
|
+
|
|
85
|
+
from_snapshot1: BankAccount = app.repository.get(
|
|
86
|
+
account_id, version=Aggregate.INITIAL_VERSION + 2
|
|
87
|
+
)
|
|
88
|
+
self.assertIsInstance(from_snapshot1, BankAccount)
|
|
89
|
+
self.assertEqual(from_snapshot1.version, Aggregate.INITIAL_VERSION + 2)
|
|
90
|
+
self.assertEqual(from_snapshot1.balance, Decimal("35.00"))
|
|
91
|
+
|
|
92
|
+
# Take snapshot (don't specify version).
|
|
93
|
+
app.take_snapshot(account_id)
|
|
94
|
+
assert app.snapshots is not None # for mypy
|
|
95
|
+
snapshots = list(app.snapshots.get(account_id))
|
|
96
|
+
self.assertEqual(len(snapshots), 2)
|
|
97
|
+
self.assertEqual(snapshots[0].originator_version, Aggregate.INITIAL_VERSION + 1)
|
|
98
|
+
self.assertEqual(snapshots[1].originator_version, Aggregate.INITIAL_VERSION + 3)
|
|
99
|
+
|
|
100
|
+
from_snapshot2: BankAccount = app.repository.get(account_id)
|
|
101
|
+
self.assertIsInstance(from_snapshot2, BankAccount)
|
|
102
|
+
self.assertEqual(from_snapshot2.version, Aggregate.INITIAL_VERSION + 3)
|
|
103
|
+
self.assertEqual(from_snapshot2.balance, Decimal("65.00"))
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class EmailAddressAsStr(Transcoding):
|
|
107
|
+
type = EmailAddress
|
|
108
|
+
name = "email_address_as_str"
|
|
109
|
+
|
|
110
|
+
def encode(self, obj: EmailAddress) -> str:
|
|
111
|
+
return obj.address
|
|
112
|
+
|
|
113
|
+
def decode(self, data: str) -> EmailAddress:
|
|
114
|
+
return EmailAddress(data)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class BankAccounts(Application[UUID]):
|
|
118
|
+
is_snapshotting_enabled = True
|
|
119
|
+
|
|
120
|
+
def register_transcodings(self, transcoder: JSONTranscoder) -> None:
|
|
121
|
+
super().register_transcodings(transcoder)
|
|
122
|
+
transcoder.register(EmailAddressAsStr())
|
|
123
|
+
|
|
124
|
+
def open_account(self, full_name: str, email_address: str) -> UUID:
|
|
125
|
+
account = BankAccount.open(
|
|
126
|
+
full_name=full_name,
|
|
127
|
+
email_address=email_address,
|
|
128
|
+
)
|
|
129
|
+
self.save(account)
|
|
130
|
+
return account.id
|
|
131
|
+
|
|
132
|
+
def credit_account(self, account_id: UUID, amount: Decimal) -> None:
|
|
133
|
+
account = self.get_account(account_id)
|
|
134
|
+
account.append_transaction(amount)
|
|
135
|
+
self.save(account)
|
|
136
|
+
|
|
137
|
+
def get_balance(self, account_id: UUID) -> Decimal:
|
|
138
|
+
account = self.get_account(account_id)
|
|
139
|
+
return account.balance
|
|
140
|
+
|
|
141
|
+
def get_account(self, account_id: UUID) -> BankAccount:
|
|
142
|
+
try:
|
|
143
|
+
aggregate: BankAccount = self.repository.get(account_id)
|
|
144
|
+
except AggregateNotFoundError:
|
|
145
|
+
raise self.AccountNotFoundError(account_id) from None
|
|
146
|
+
else:
|
|
147
|
+
assert isinstance(aggregate, BankAccount)
|
|
148
|
+
return aggregate
|
|
149
|
+
|
|
150
|
+
class AccountNotFoundError(Exception):
|
|
151
|
+
pass
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class ApplicationTestCase(TestCase):
|
|
155
|
+
def test_name(self) -> None:
|
|
156
|
+
self.assertEqual(Application.name, "Application")
|
|
157
|
+
|
|
158
|
+
class MyApplication1(Application[UUID]):
|
|
159
|
+
pass
|
|
160
|
+
|
|
161
|
+
self.assertEqual(MyApplication1.name, "MyApplication1")
|
|
162
|
+
|
|
163
|
+
class MyApplication2(Application[UUID]):
|
|
164
|
+
name = "MyBoundedContext"
|
|
165
|
+
|
|
166
|
+
self.assertEqual(MyApplication2.name, "MyBoundedContext")
|
|
167
|
+
|
|
168
|
+
def test_as_context_manager(self) -> None:
|
|
169
|
+
with Application[UUID]():
|
|
170
|
+
pass
|
|
171
|
+
|
|
172
|
+
def test_resolve_persistence_topics(self) -> None:
|
|
173
|
+
# None specified.
|
|
174
|
+
app = Application[UUID]()
|
|
175
|
+
self.assertIsInstance(app.factory, InfrastructureFactory)
|
|
176
|
+
|
|
177
|
+
# Legacy 'INFRASTRUCTURE_FACTORY'.
|
|
178
|
+
app = Application(env={"INFRASTRUCTURE_FACTORY": "eventsourcing.popo:Factory"})
|
|
179
|
+
self.assertIsInstance(app.factory, InfrastructureFactory)
|
|
180
|
+
|
|
181
|
+
# Legacy 'FACTORY_TOPIC'.
|
|
182
|
+
app = Application(env={"FACTORY_TOPIC": "eventsourcing.popo:Factory"})
|
|
183
|
+
self.assertIsInstance(app.factory, InfrastructureFactory)
|
|
184
|
+
|
|
185
|
+
# Check 'PERSISTENCE_MODULE' resolves to a class.
|
|
186
|
+
app = Application(env={"PERSISTENCE_MODULE": "eventsourcing.popo"})
|
|
187
|
+
self.assertIsInstance(app.factory, InfrastructureFactory)
|
|
188
|
+
|
|
189
|
+
# Check exceptions.
|
|
190
|
+
with self.assertRaises(InfrastructureFactoryError) as cm:
|
|
191
|
+
Application(env={"PERSISTENCE_MODULE": "eventsourcing.application"})
|
|
192
|
+
self.assertEqual(
|
|
193
|
+
cm.exception.args[0],
|
|
194
|
+
"Found 0 infrastructure factory classes in "
|
|
195
|
+
"'eventsourcing.application', expected 1.",
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
with self.assertRaises(InfrastructureFactoryError) as cm:
|
|
199
|
+
Application(
|
|
200
|
+
env={"PERSISTENCE_MODULE": "eventsourcing.application:Application"}
|
|
201
|
+
)
|
|
202
|
+
self.assertEqual(
|
|
203
|
+
"Topic 'eventsourcing.application:Application' didn't "
|
|
204
|
+
"resolve to a persistence module or infrastructure factory class: "
|
|
205
|
+
"<class 'eventsourcing.application.Application'>",
|
|
206
|
+
cm.exception.args[0],
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
def test_save_returns_recording_event(self) -> None:
|
|
210
|
+
app = Application[UUID]()
|
|
211
|
+
|
|
212
|
+
recordings = app.save()
|
|
213
|
+
self.assertEqual(recordings, [])
|
|
214
|
+
|
|
215
|
+
recordings = app.save(None)
|
|
216
|
+
self.assertEqual(recordings, [])
|
|
217
|
+
|
|
218
|
+
recordings = app.save(Aggregate())
|
|
219
|
+
self.assertEqual(len(recordings), 1)
|
|
220
|
+
self.assertEqual(recordings[0].notification.id, 1)
|
|
221
|
+
|
|
222
|
+
recordings = app.save(Aggregate())
|
|
223
|
+
self.assertEqual(len(recordings), 1)
|
|
224
|
+
self.assertEqual(recordings[0].notification.id, 2)
|
|
225
|
+
|
|
226
|
+
recordings = app.save(Aggregate(), Aggregate())
|
|
227
|
+
self.assertEqual(len(recordings), 2)
|
|
228
|
+
self.assertEqual(recordings[0].notification.id, 3)
|
|
229
|
+
self.assertEqual(recordings[1].notification.id, 4)
|
|
230
|
+
|
|
231
|
+
def test_take_snapshot_raises_assertion_error_if_snapshotting_not_enabled(
|
|
232
|
+
self,
|
|
233
|
+
) -> None:
|
|
234
|
+
app = Application[UUID]()
|
|
235
|
+
with self.assertRaises(AssertionError) as cm:
|
|
236
|
+
app.take_snapshot(uuid4())
|
|
237
|
+
self.assertEqual(
|
|
238
|
+
cm.exception.args[0],
|
|
239
|
+
"Can't take snapshot without snapshots store. Please "
|
|
240
|
+
"set environment variable IS_SNAPSHOTTING_ENABLED to "
|
|
241
|
+
"a true value (e.g. 'y'), or set 'is_snapshotting_enabled' "
|
|
242
|
+
"on application class, or set 'snapshotting_intervals' on "
|
|
243
|
+
"application class.",
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
def test_application_with_cached_aggregates_and_fastforward(self) -> None:
|
|
247
|
+
app = Application[UUID](env={"AGGREGATE_CACHE_MAXSIZE": "10"})
|
|
248
|
+
|
|
249
|
+
aggregate = Aggregate()
|
|
250
|
+
app.save(aggregate)
|
|
251
|
+
# Should not put the aggregate in the cache.
|
|
252
|
+
assert app.repository.cache is not None # for mypy
|
|
253
|
+
with self.assertRaises(KeyError):
|
|
254
|
+
self.assertEqual(aggregate, app.repository.cache.get(aggregate.id))
|
|
255
|
+
|
|
256
|
+
# Getting the aggregate should put aggregate in the cache.
|
|
257
|
+
app.repository.get(aggregate.id)
|
|
258
|
+
self.assertEqual(aggregate, app.repository.cache.get(aggregate.id))
|
|
259
|
+
|
|
260
|
+
# Triggering a subsequent event shouldn't update the cache.
|
|
261
|
+
aggregate.trigger_event(Aggregate.Event)
|
|
262
|
+
app.save(aggregate)
|
|
263
|
+
self.assertNotEqual(aggregate, app.repository.cache.get(aggregate.id))
|
|
264
|
+
self.assertEqual(
|
|
265
|
+
aggregate.version, app.repository.cache.get(aggregate.id).version + 1
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
# Getting the aggregate should fastforward the aggregate in the cache.
|
|
269
|
+
app.repository.get(aggregate.id)
|
|
270
|
+
self.assertEqual(aggregate, app.repository.cache.get(aggregate.id))
|
|
271
|
+
|
|
272
|
+
def test_check_aggregate_fastforwarding_nonblocking(self) -> None:
|
|
273
|
+
self._check_aggregate_fastforwarding_during_contention(
|
|
274
|
+
env={
|
|
275
|
+
"AGGREGATE_CACHE_MAXSIZE": "10",
|
|
276
|
+
"AGGREGATE_CACHE_FASTFORWARD_SKIPPING": "y",
|
|
277
|
+
}
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
def test_check_aggregate_fastforwarding_blocking(self) -> None:
|
|
281
|
+
self._check_aggregate_fastforwarding_during_contention(
|
|
282
|
+
env={"AGGREGATE_CACHE_MAXSIZE": "10"}
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
def _check_aggregate_fastforwarding_during_contention(self, env: EnvType) -> None:
|
|
286
|
+
app = Application[UUID](env=env)
|
|
287
|
+
|
|
288
|
+
self.assertEqual(len(app.repository._fastforward_locks_inuse), 0)
|
|
289
|
+
|
|
290
|
+
# Create one aggregate.
|
|
291
|
+
original_aggregate = Aggregate()
|
|
292
|
+
app.save(original_aggregate)
|
|
293
|
+
obj_ids = set()
|
|
294
|
+
|
|
295
|
+
# Prime the cache.
|
|
296
|
+
app.repository.get(original_aggregate.id)
|
|
297
|
+
|
|
298
|
+
# Remember the aggregate ID.
|
|
299
|
+
aggregate_id = original_aggregate.id
|
|
300
|
+
|
|
301
|
+
stopped = Event()
|
|
302
|
+
errors: list[BaseException] = []
|
|
303
|
+
successful_thread_ids = set()
|
|
304
|
+
|
|
305
|
+
def trigger_save_get_check() -> None:
|
|
306
|
+
while not stopped.is_set():
|
|
307
|
+
try:
|
|
308
|
+
# Get the aggregate.
|
|
309
|
+
aggregate: Aggregate = app.repository.get(aggregate_id)
|
|
310
|
+
original_version = aggregate.version
|
|
311
|
+
|
|
312
|
+
# Try to record a new event.
|
|
313
|
+
aggregate.trigger_event(Aggregate.Event)
|
|
314
|
+
# Give other threads a chance.
|
|
315
|
+
try:
|
|
316
|
+
app.save(aggregate)
|
|
317
|
+
except IntegrityError:
|
|
318
|
+
# Start again if we didn't record a new event.
|
|
319
|
+
# print("Got integrity error")
|
|
320
|
+
sleep(0.001)
|
|
321
|
+
continue
|
|
322
|
+
|
|
323
|
+
# Get the aggregate from the cache.
|
|
324
|
+
assert app.repository.cache is not None
|
|
325
|
+
cached: Any = app.repository.cache.get(aggregate_id)
|
|
326
|
+
obj_ids.add(id(cached))
|
|
327
|
+
|
|
328
|
+
if len(obj_ids) > 1:
|
|
329
|
+
stopped.set()
|
|
330
|
+
continue
|
|
331
|
+
|
|
332
|
+
# Fast-forward the cached aggregate.
|
|
333
|
+
fastforwarded: Aggregate = app.repository.get(aggregate_id)
|
|
334
|
+
|
|
335
|
+
# Check cached aggregate was fast-forwarded with recorded event.
|
|
336
|
+
if fastforwarded.version < original_version:
|
|
337
|
+
try:
|
|
338
|
+
self.fail(
|
|
339
|
+
f"Failed to fast-forward at version {original_version}"
|
|
340
|
+
)
|
|
341
|
+
except AssertionError as e:
|
|
342
|
+
errors.append(e)
|
|
343
|
+
stopped.set()
|
|
344
|
+
continue
|
|
345
|
+
|
|
346
|
+
# Monitor number of threads getting involved.
|
|
347
|
+
thread_id = get_ident()
|
|
348
|
+
successful_thread_ids.add(thread_id)
|
|
349
|
+
|
|
350
|
+
# print("Version:", aggregate.version, thread_id)
|
|
351
|
+
|
|
352
|
+
# See if we have done enough.
|
|
353
|
+
if len(successful_thread_ids) > 10 and aggregate.version >= 25:
|
|
354
|
+
stopped.set()
|
|
355
|
+
continue
|
|
356
|
+
|
|
357
|
+
sleep(0.0001)
|
|
358
|
+
# sleep(0.001)
|
|
359
|
+
except BaseException as e:
|
|
360
|
+
errors.append(e)
|
|
361
|
+
stopped.set()
|
|
362
|
+
print(traceback.format_exc())
|
|
363
|
+
raise
|
|
364
|
+
|
|
365
|
+
executor = ThreadPoolExecutor(max_workers=100)
|
|
366
|
+
futures = []
|
|
367
|
+
for _ in range(100):
|
|
368
|
+
f = executor.submit(trigger_save_get_check)
|
|
369
|
+
futures.append(f)
|
|
370
|
+
|
|
371
|
+
# Run for three seconds.
|
|
372
|
+
stopped.wait(timeout=10)
|
|
373
|
+
for f in futures:
|
|
374
|
+
f.result()
|
|
375
|
+
# print("Got all results, shutting down executor")
|
|
376
|
+
executor.shutdown()
|
|
377
|
+
|
|
378
|
+
try:
|
|
379
|
+
if errors:
|
|
380
|
+
raise errors[0]
|
|
381
|
+
if len(obj_ids) > 1:
|
|
382
|
+
self.fail(f"More than one instance used in the cache: {len(obj_ids)}")
|
|
383
|
+
if len(successful_thread_ids) < 3:
|
|
384
|
+
self.fail("Insufficient sharing across contentious threads")
|
|
385
|
+
|
|
386
|
+
final_aggregate: Aggregate = app.repository.get(aggregate_id)
|
|
387
|
+
# print("Final aggregate version:", final_aggregate.version)
|
|
388
|
+
if final_aggregate.version < 25:
|
|
389
|
+
self.fail(f"Insufficient version increment: {final_aggregate.version}")
|
|
390
|
+
|
|
391
|
+
self.assertEqual(len(app.repository._fastforward_locks_inuse), 0)
|
|
392
|
+
|
|
393
|
+
finally:
|
|
394
|
+
# print("Closing application")
|
|
395
|
+
app.close()
|
|
396
|
+
|
|
397
|
+
def test_application_with_cached_aggregates_not_fastforward(self) -> None:
|
|
398
|
+
app = Application[UUID](
|
|
399
|
+
env={
|
|
400
|
+
"AGGREGATE_CACHE_MAXSIZE": "10",
|
|
401
|
+
"AGGREGATE_CACHE_FASTFORWARD": "f",
|
|
402
|
+
}
|
|
403
|
+
)
|
|
404
|
+
aggregate1 = Aggregate()
|
|
405
|
+
app.save(aggregate1)
|
|
406
|
+
aggregate_id = aggregate1.id
|
|
407
|
+
|
|
408
|
+
# Should put the aggregate in the cache.
|
|
409
|
+
assert app.repository.cache is not None # for mypy
|
|
410
|
+
self.assertEqual(aggregate1, app.repository.cache.get(aggregate_id))
|
|
411
|
+
app.repository.get(aggregate_id)
|
|
412
|
+
self.assertEqual(aggregate1, app.repository.cache.get(aggregate_id))
|
|
413
|
+
|
|
414
|
+
aggregate2 = Aggregate()
|
|
415
|
+
aggregate2._id = aggregate_id
|
|
416
|
+
aggregate2.trigger_event(Aggregate.Event)
|
|
417
|
+
|
|
418
|
+
# This will replace object in cache.
|
|
419
|
+
app.save(aggregate2)
|
|
420
|
+
|
|
421
|
+
self.assertEqual(aggregate2.version, aggregate1.version + 1)
|
|
422
|
+
aggregate3: Aggregate = app.repository.get(aggregate_id)
|
|
423
|
+
self.assertEqual(aggregate3.version, aggregate3.version)
|
|
424
|
+
self.assertEqual(id(aggregate3.version), id(aggregate3.version))
|
|
425
|
+
|
|
426
|
+
# This will mess things up because the cache has a stale aggregate.
|
|
427
|
+
aggregate3.trigger_event(Aggregate.Event)
|
|
428
|
+
app.events.put(aggregate3.collect_events())
|
|
429
|
+
|
|
430
|
+
# And so using the aggregate to record new events will cause an IntegrityError.
|
|
431
|
+
aggregate4: Aggregate = app.repository.get(aggregate_id)
|
|
432
|
+
aggregate4.trigger_event(Aggregate.Event)
|
|
433
|
+
with self.assertRaises(IntegrityError):
|
|
434
|
+
app.save(aggregate4)
|
|
435
|
+
|
|
436
|
+
def test_application_with_deepcopy_from_cache_arg(self) -> None:
|
|
437
|
+
app = Application[UUID](
|
|
438
|
+
env={
|
|
439
|
+
"AGGREGATE_CACHE_MAXSIZE": "10",
|
|
440
|
+
}
|
|
441
|
+
)
|
|
442
|
+
aggregate = Aggregate()
|
|
443
|
+
app.save(aggregate)
|
|
444
|
+
self.assertEqual(aggregate.version, 1)
|
|
445
|
+
reconstructed: Aggregate = app.repository.get(aggregate.id)
|
|
446
|
+
reconstructed.version = 101
|
|
447
|
+
assert app.repository.cache is not None # for mypy
|
|
448
|
+
self.assertEqual(app.repository.cache.get(aggregate.id).version, 1)
|
|
449
|
+
cached: Aggregate = app.repository.get(aggregate.id, deepcopy_from_cache=False)
|
|
450
|
+
cached.version = 101
|
|
451
|
+
self.assertEqual(app.repository.cache.get(aggregate.id).version, 101)
|
|
452
|
+
|
|
453
|
+
def test_application_with_deepcopy_from_cache_attribute(self) -> None:
|
|
454
|
+
app = Application[UUID](
|
|
455
|
+
env={
|
|
456
|
+
"AGGREGATE_CACHE_MAXSIZE": "10",
|
|
457
|
+
}
|
|
458
|
+
)
|
|
459
|
+
aggregate = Aggregate()
|
|
460
|
+
app.save(aggregate)
|
|
461
|
+
self.assertEqual(aggregate.version, 1)
|
|
462
|
+
reconstructed: Aggregate = app.repository.get(aggregate.id)
|
|
463
|
+
reconstructed.version = 101
|
|
464
|
+
assert app.repository.cache is not None # for mypy
|
|
465
|
+
self.assertEqual(app.repository.cache.get(aggregate.id).version, 1)
|
|
466
|
+
app.repository.deepcopy_from_cache = False
|
|
467
|
+
cached: Aggregate = app.repository.get(aggregate.id)
|
|
468
|
+
cached.version = 101
|
|
469
|
+
self.assertEqual(app.repository.cache.get(aggregate.id).version, 101)
|
|
470
|
+
|
|
471
|
+
def test_application_log(self) -> None:
|
|
472
|
+
# Check the old 'log' attribute presents the 'notification log' object.
|
|
473
|
+
app = Application[UUID]()
|
|
474
|
+
|
|
475
|
+
# Verify deprecation warning.
|
|
476
|
+
with warnings.catch_warnings(record=True) as w:
|
|
477
|
+
self.assertIs(app.log, app.notification_log)
|
|
478
|
+
|
|
479
|
+
self.assertEqual(1, len(w))
|
|
480
|
+
self.assertIs(w[-1].category, DeprecationWarning)
|
|
481
|
+
self.assertIn(
|
|
482
|
+
"'log' is deprecated, use 'notification_log' instead", str(w[-1].message)
|
|
483
|
+
)
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from decimal import Decimal
|
|
5
|
+
from typing import cast
|
|
6
|
+
from uuid import uuid4
|
|
7
|
+
|
|
8
|
+
from eventsourcing.domain import Aggregate, AggregateCreated, AggregateEvent
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(frozen=True)
|
|
12
|
+
class EmailAddress:
|
|
13
|
+
address: str
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BankAccount(Aggregate):
|
|
17
|
+
"""Aggregate root for bank accounts."""
|
|
18
|
+
|
|
19
|
+
def __init__(self, full_name: str, email_address: EmailAddress):
|
|
20
|
+
self.full_name = full_name
|
|
21
|
+
self.email_address = email_address
|
|
22
|
+
self.balance = Decimal("0.00")
|
|
23
|
+
self.overdraft_limit = Decimal("0.00")
|
|
24
|
+
self.is_closed = False
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def open(cls, full_name: str, email_address: str) -> BankAccount:
|
|
28
|
+
"""Creates new bank account object."""
|
|
29
|
+
return cls._create(
|
|
30
|
+
cls.Opened,
|
|
31
|
+
id=uuid4(),
|
|
32
|
+
full_name=full_name,
|
|
33
|
+
email_address=EmailAddress(email_address),
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
class Opened(AggregateCreated):
|
|
37
|
+
full_name: str
|
|
38
|
+
email_address: str
|
|
39
|
+
|
|
40
|
+
def append_transaction(self, amount: Decimal) -> None:
|
|
41
|
+
"""Appends given amount as transaction on account."""
|
|
42
|
+
self.check_account_is_not_closed()
|
|
43
|
+
self.check_has_sufficient_funds(amount)
|
|
44
|
+
self.trigger_event(
|
|
45
|
+
self.TransactionAppended,
|
|
46
|
+
amount=amount,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
def check_account_is_not_closed(self) -> None:
|
|
50
|
+
if self.is_closed:
|
|
51
|
+
raise AccountClosedError({"account_id": self.id})
|
|
52
|
+
|
|
53
|
+
def check_has_sufficient_funds(self, amount: Decimal) -> None:
|
|
54
|
+
if self.balance + amount < -self.overdraft_limit:
|
|
55
|
+
raise InsufficientFundsError({"account_id": self.id})
|
|
56
|
+
|
|
57
|
+
@dataclass(frozen=True)
|
|
58
|
+
class TransactionAppended(AggregateEvent):
|
|
59
|
+
"""Domain event for when transaction
|
|
60
|
+
is appended to bank account.
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
amount: Decimal
|
|
64
|
+
|
|
65
|
+
def apply(self, aggregate: Aggregate) -> None:
|
|
66
|
+
"""Increments the account balance."""
|
|
67
|
+
cast("BankAccount", aggregate).balance += self.amount
|
|
68
|
+
|
|
69
|
+
def set_overdraft_limit(self, overdraft_limit: Decimal) -> None:
|
|
70
|
+
"""Sets the overdraft limit."""
|
|
71
|
+
# Check the limit is not a negative value.
|
|
72
|
+
assert overdraft_limit >= Decimal("0.00")
|
|
73
|
+
self.check_account_is_not_closed()
|
|
74
|
+
self.trigger_event(
|
|
75
|
+
self.OverdraftLimitSet,
|
|
76
|
+
overdraft_limit=overdraft_limit,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
class OverdraftLimitSet(AggregateEvent):
|
|
80
|
+
"""Domain event for when overdraft
|
|
81
|
+
limit is set.
|
|
82
|
+
"""
|
|
83
|
+
|
|
84
|
+
overdraft_limit: Decimal
|
|
85
|
+
|
|
86
|
+
def apply(self, aggregate: Aggregate) -> None:
|
|
87
|
+
cast("BankAccount", aggregate).overdraft_limit = self.overdraft_limit
|
|
88
|
+
|
|
89
|
+
def close(self) -> None:
|
|
90
|
+
"""Closes the bank account."""
|
|
91
|
+
self.trigger_event(self.Closed)
|
|
92
|
+
|
|
93
|
+
class Closed(AggregateEvent):
|
|
94
|
+
"""Domain event for when account is closed."""
|
|
95
|
+
|
|
96
|
+
def apply(self, aggregate: Aggregate) -> None:
|
|
97
|
+
cast("BankAccount", aggregate).is_closed = True
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class AccountClosedError(Exception):
|
|
101
|
+
"""Raised when attempting to operate a closed account."""
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class InsufficientFundsError(Exception):
|
|
105
|
+
"""Raised when attempting to go past overdraft limit."""
|