eventsourcing 9.2.22__py3-none-any.whl → 9.3.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eventsourcing might be problematic. Click here for more details.
- eventsourcing/__init__.py +1 -1
- eventsourcing/application.py +106 -135
- eventsourcing/cipher.py +15 -12
- eventsourcing/dispatch.py +31 -91
- eventsourcing/domain.py +138 -143
- eventsourcing/examples/__init__.py +0 -0
- eventsourcing/examples/aggregate1/__init__.py +0 -0
- eventsourcing/examples/aggregate1/application.py +27 -0
- eventsourcing/examples/aggregate1/domainmodel.py +16 -0
- eventsourcing/examples/aggregate1/test_application.py +37 -0
- eventsourcing/examples/aggregate2/__init__.py +0 -0
- eventsourcing/examples/aggregate2/application.py +27 -0
- eventsourcing/examples/aggregate2/domainmodel.py +22 -0
- eventsourcing/examples/aggregate2/test_application.py +37 -0
- eventsourcing/examples/aggregate3/__init__.py +0 -0
- eventsourcing/examples/aggregate3/application.py +27 -0
- eventsourcing/examples/aggregate3/domainmodel.py +38 -0
- eventsourcing/examples/aggregate3/test_application.py +37 -0
- eventsourcing/examples/aggregate4/__init__.py +0 -0
- eventsourcing/examples/aggregate4/application.py +27 -0
- eventsourcing/examples/aggregate4/domainmodel.py +128 -0
- eventsourcing/examples/aggregate4/test_application.py +38 -0
- eventsourcing/examples/aggregate5/__init__.py +0 -0
- eventsourcing/examples/aggregate5/application.py +27 -0
- eventsourcing/examples/aggregate5/domainmodel.py +131 -0
- eventsourcing/examples/aggregate5/test_application.py +38 -0
- eventsourcing/examples/aggregate6/__init__.py +0 -0
- eventsourcing/examples/aggregate6/application.py +30 -0
- eventsourcing/examples/aggregate6/domainmodel.py +123 -0
- eventsourcing/examples/aggregate6/test_application.py +38 -0
- eventsourcing/examples/aggregate6a/__init__.py +0 -0
- eventsourcing/examples/aggregate6a/application.py +40 -0
- eventsourcing/examples/aggregate6a/domainmodel.py +149 -0
- eventsourcing/examples/aggregate6a/test_application.py +45 -0
- eventsourcing/examples/aggregate7/__init__.py +0 -0
- eventsourcing/examples/aggregate7/application.py +48 -0
- eventsourcing/examples/aggregate7/domainmodel.py +144 -0
- eventsourcing/examples/aggregate7/persistence.py +57 -0
- eventsourcing/examples/aggregate7/test_application.py +38 -0
- eventsourcing/examples/aggregate7/test_compression_and_encryption.py +45 -0
- eventsourcing/examples/aggregate7/test_snapshotting_intervals.py +67 -0
- eventsourcing/examples/aggregate7a/__init__.py +0 -0
- eventsourcing/examples/aggregate7a/application.py +56 -0
- eventsourcing/examples/aggregate7a/domainmodel.py +170 -0
- eventsourcing/examples/aggregate7a/test_application.py +46 -0
- eventsourcing/examples/aggregate7a/test_compression_and_encryption.py +45 -0
- eventsourcing/examples/aggregate8/__init__.py +0 -0
- eventsourcing/examples/aggregate8/application.py +47 -0
- eventsourcing/examples/aggregate8/domainmodel.py +65 -0
- eventsourcing/examples/aggregate8/persistence.py +57 -0
- eventsourcing/examples/aggregate8/test_application.py +37 -0
- eventsourcing/examples/aggregate8/test_compression_and_encryption.py +44 -0
- eventsourcing/examples/aggregate8/test_snapshotting_intervals.py +38 -0
- eventsourcing/examples/bankaccounts/__init__.py +0 -0
- eventsourcing/examples/bankaccounts/application.py +70 -0
- eventsourcing/examples/bankaccounts/domainmodel.py +56 -0
- eventsourcing/examples/bankaccounts/test.py +173 -0
- eventsourcing/examples/cargoshipping/__init__.py +0 -0
- eventsourcing/examples/cargoshipping/application.py +126 -0
- eventsourcing/examples/cargoshipping/domainmodel.py +330 -0
- eventsourcing/examples/cargoshipping/interface.py +143 -0
- eventsourcing/examples/cargoshipping/test.py +231 -0
- eventsourcing/examples/contentmanagement/__init__.py +0 -0
- eventsourcing/examples/contentmanagement/application.py +118 -0
- eventsourcing/examples/contentmanagement/domainmodel.py +69 -0
- eventsourcing/examples/contentmanagement/test.py +180 -0
- eventsourcing/examples/contentmanagement/utils.py +26 -0
- eventsourcing/examples/contentmanagementsystem/__init__.py +0 -0
- eventsourcing/examples/contentmanagementsystem/application.py +54 -0
- eventsourcing/examples/contentmanagementsystem/postgres.py +17 -0
- eventsourcing/examples/contentmanagementsystem/sqlite.py +17 -0
- eventsourcing/examples/contentmanagementsystem/system.py +14 -0
- eventsourcing/examples/contentmanagementsystem/test_system.py +174 -0
- eventsourcing/examples/searchablecontent/__init__.py +0 -0
- eventsourcing/examples/searchablecontent/application.py +45 -0
- eventsourcing/examples/searchablecontent/persistence.py +23 -0
- eventsourcing/examples/searchablecontent/postgres.py +118 -0
- eventsourcing/examples/searchablecontent/sqlite.py +136 -0
- eventsourcing/examples/searchablecontent/test_application.py +111 -0
- eventsourcing/examples/searchablecontent/test_recorder.py +69 -0
- eventsourcing/examples/searchabletimestamps/__init__.py +0 -0
- eventsourcing/examples/searchabletimestamps/application.py +32 -0
- eventsourcing/examples/searchabletimestamps/persistence.py +20 -0
- eventsourcing/examples/searchabletimestamps/postgres.py +110 -0
- eventsourcing/examples/searchabletimestamps/sqlite.py +99 -0
- eventsourcing/examples/searchabletimestamps/test_searchabletimestamps.py +91 -0
- eventsourcing/examples/test_invoice.py +176 -0
- eventsourcing/examples/test_parking_lot.py +206 -0
- eventsourcing/interface.py +2 -2
- eventsourcing/persistence.py +85 -81
- eventsourcing/popo.py +30 -31
- eventsourcing/postgres.py +361 -578
- eventsourcing/sqlite.py +91 -99
- eventsourcing/system.py +42 -57
- eventsourcing/tests/application.py +20 -32
- eventsourcing/tests/application_tests/__init__.py +0 -0
- eventsourcing/tests/application_tests/test_application_with_automatic_snapshotting.py +55 -0
- eventsourcing/tests/application_tests/test_application_with_popo.py +22 -0
- eventsourcing/tests/application_tests/test_application_with_postgres.py +75 -0
- eventsourcing/tests/application_tests/test_application_with_sqlite.py +72 -0
- eventsourcing/tests/application_tests/test_cache.py +134 -0
- eventsourcing/tests/application_tests/test_event_sourced_log.py +162 -0
- eventsourcing/tests/application_tests/test_notificationlog.py +232 -0
- eventsourcing/tests/application_tests/test_notificationlogreader.py +126 -0
- eventsourcing/tests/application_tests/test_processapplication.py +110 -0
- eventsourcing/tests/application_tests/test_processingpolicy.py +109 -0
- eventsourcing/tests/application_tests/test_repository.py +504 -0
- eventsourcing/tests/application_tests/test_snapshotting.py +68 -0
- eventsourcing/tests/application_tests/test_upcasting.py +459 -0
- eventsourcing/tests/docs_tests/__init__.py +0 -0
- eventsourcing/tests/docs_tests/test_docs.py +293 -0
- eventsourcing/tests/domain.py +1 -1
- eventsourcing/tests/domain_tests/__init__.py +0 -0
- eventsourcing/tests/domain_tests/test_aggregate.py +1159 -0
- eventsourcing/tests/domain_tests/test_aggregate_decorators.py +1604 -0
- eventsourcing/tests/domain_tests/test_domainevent.py +80 -0
- eventsourcing/tests/interface_tests/__init__.py +0 -0
- eventsourcing/tests/interface_tests/test_remotenotificationlog.py +258 -0
- eventsourcing/tests/persistence.py +49 -50
- eventsourcing/tests/persistence_tests/__init__.py +0 -0
- eventsourcing/tests/persistence_tests/test_aes.py +93 -0
- eventsourcing/tests/persistence_tests/test_connection_pool.py +722 -0
- eventsourcing/tests/persistence_tests/test_eventstore.py +72 -0
- eventsourcing/tests/persistence_tests/test_infrastructure_factory.py +21 -0
- eventsourcing/tests/persistence_tests/test_mapper.py +113 -0
- eventsourcing/tests/persistence_tests/test_noninterleaving_notification_ids.py +69 -0
- eventsourcing/tests/persistence_tests/test_popo.py +124 -0
- eventsourcing/tests/persistence_tests/test_postgres.py +1121 -0
- eventsourcing/tests/persistence_tests/test_sqlite.py +348 -0
- eventsourcing/tests/persistence_tests/test_transcoder.py +44 -0
- eventsourcing/tests/postgres_utils.py +7 -7
- eventsourcing/tests/system_tests/__init__.py +0 -0
- eventsourcing/tests/system_tests/test_runner.py +935 -0
- eventsourcing/tests/system_tests/test_system.py +287 -0
- eventsourcing/tests/utils_tests/__init__.py +0 -0
- eventsourcing/tests/utils_tests/test_utils.py +226 -0
- eventsourcing/utils.py +47 -50
- {eventsourcing-9.2.22.dist-info → eventsourcing-9.3.0a1.dist-info}/METADATA +28 -80
- eventsourcing-9.3.0a1.dist-info/RECORD +144 -0
- {eventsourcing-9.2.22.dist-info → eventsourcing-9.3.0a1.dist-info}/WHEEL +1 -2
- eventsourcing-9.2.22.dist-info/AUTHORS +0 -10
- eventsourcing-9.2.22.dist-info/RECORD +0 -25
- eventsourcing-9.2.22.dist-info/top_level.txt +0 -1
- {eventsourcing-9.2.22.dist-info → eventsourcing-9.3.0a1.dist-info}/LICENSE +0 -0
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
from dataclasses import _DataclassParams
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from time import sleep
|
|
4
|
+
from unittest.case import TestCase
|
|
5
|
+
from uuid import UUID, uuid4
|
|
6
|
+
|
|
7
|
+
from eventsourcing.domain import DomainEvent, MetaDomainEvent
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TestMetaDomainEvent(TestCase):
|
|
11
|
+
def test_class_instance_defined_as_frozen_dataclass(self):
|
|
12
|
+
class A(metaclass=MetaDomainEvent):
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
self.assertIsInstance(A, type)
|
|
16
|
+
self.assertTrue("__dataclass_params__" in A.__dict__)
|
|
17
|
+
self.assertIsInstance(A.__dataclass_params__, _DataclassParams)
|
|
18
|
+
self.assertTrue(A.__dataclass_params__.frozen)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TestDomainEvent(TestCase):
|
|
22
|
+
def test_domain_event_class_is_a_meta_domain_event(self):
|
|
23
|
+
self.assertIsInstance(DomainEvent, MetaDomainEvent)
|
|
24
|
+
|
|
25
|
+
def test_create_timestamp(self):
|
|
26
|
+
before = datetime.now(tz=timezone.utc)
|
|
27
|
+
sleep(1e-5)
|
|
28
|
+
timestamp = DomainEvent.create_timestamp()
|
|
29
|
+
sleep(1e-5)
|
|
30
|
+
after = datetime.now(tz=timezone.utc)
|
|
31
|
+
self.assertGreater(timestamp, before)
|
|
32
|
+
self.assertGreater(after, timestamp)
|
|
33
|
+
|
|
34
|
+
def test_domain_event_instance(self):
|
|
35
|
+
originator_id = uuid4()
|
|
36
|
+
originator_version = 101
|
|
37
|
+
timestamp = DomainEvent.create_timestamp()
|
|
38
|
+
a = DomainEvent(
|
|
39
|
+
originator_id=originator_id,
|
|
40
|
+
originator_version=originator_version,
|
|
41
|
+
timestamp=timestamp,
|
|
42
|
+
)
|
|
43
|
+
self.assertEqual(a.originator_id, originator_id)
|
|
44
|
+
self.assertEqual(a.originator_version, originator_version)
|
|
45
|
+
self.assertEqual(a.timestamp, timestamp)
|
|
46
|
+
|
|
47
|
+
def test_examples(self):
|
|
48
|
+
# Define an 'account opened' domain event.
|
|
49
|
+
class AccountOpened(DomainEvent):
|
|
50
|
+
full_name: str
|
|
51
|
+
|
|
52
|
+
# Create an 'account opened' event.
|
|
53
|
+
event3 = AccountOpened(
|
|
54
|
+
originator_id=uuid4(),
|
|
55
|
+
originator_version=0,
|
|
56
|
+
timestamp=AccountOpened.create_timestamp(),
|
|
57
|
+
full_name="Alice",
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
self.assertEqual(event3.full_name, "Alice")
|
|
61
|
+
assert isinstance(event3.originator_id, UUID)
|
|
62
|
+
self.assertEqual(event3.originator_version, 0)
|
|
63
|
+
|
|
64
|
+
# Define a 'full name updated' domain event.
|
|
65
|
+
class FullNameUpdated(DomainEvent):
|
|
66
|
+
full_name: str
|
|
67
|
+
timestamp: datetime
|
|
68
|
+
|
|
69
|
+
# Create a 'full name updated' domain event.
|
|
70
|
+
event4 = FullNameUpdated(
|
|
71
|
+
originator_id=event3.originator_id,
|
|
72
|
+
originator_version=1,
|
|
73
|
+
timestamp=FullNameUpdated.create_timestamp(),
|
|
74
|
+
full_name="Bob",
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# Check the attribute values of the domain event.
|
|
78
|
+
self.assertEqual(event4.full_name, "Bob")
|
|
79
|
+
assert isinstance(event4.originator_id, UUID)
|
|
80
|
+
self.assertEqual(event4.originator_version, 1)
|
|
File without changes
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import threading
|
|
5
|
+
from abc import abstractmethod
|
|
6
|
+
from http.client import HTTPConnection
|
|
7
|
+
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
8
|
+
from threading import Event, Thread
|
|
9
|
+
from typing import Callable, ClassVar, List, Sequence
|
|
10
|
+
from unittest.case import TestCase
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
|
|
13
|
+
from eventsourcing.interface import (
|
|
14
|
+
NotificationLogInterface,
|
|
15
|
+
NotificationLogJSONClient,
|
|
16
|
+
NotificationLogJSONService,
|
|
17
|
+
)
|
|
18
|
+
from eventsourcing.tests.application import BankAccounts
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TestRemoteNotificationLog(TestCase):
|
|
22
|
+
def test_directly(self):
|
|
23
|
+
client = BankAccountsJSONClient(BankAccountsJSONService(BankAccounts()))
|
|
24
|
+
account_id1 = client.open_account("Alice", "alice@example.com")
|
|
25
|
+
account_id2 = client.open_account("Bob", "bob@example.com")
|
|
26
|
+
|
|
27
|
+
# Get the "first" section of log.
|
|
28
|
+
section = client.log["1,10"]
|
|
29
|
+
self.assertEqual(len(section.items), 2)
|
|
30
|
+
self.assertEqual(section.items[0].originator_id, account_id1)
|
|
31
|
+
self.assertEqual(section.items[1].originator_id, account_id2)
|
|
32
|
+
|
|
33
|
+
# Get notifications start 1, limit 10.
|
|
34
|
+
notifications = client.log.select(start=1, limit=10)
|
|
35
|
+
self.assertEqual(len(notifications), 2)
|
|
36
|
+
self.assertEqual(notifications[0].originator_id, account_id1)
|
|
37
|
+
self.assertEqual(notifications[1].originator_id, account_id2)
|
|
38
|
+
|
|
39
|
+
def test_with_http(self):
|
|
40
|
+
server_address = ("127.0.0.1", 8080)
|
|
41
|
+
|
|
42
|
+
server = HTTPApplicationServer(
|
|
43
|
+
address=server_address,
|
|
44
|
+
handler=BankAccountsHTTPHandler,
|
|
45
|
+
)
|
|
46
|
+
server.start()
|
|
47
|
+
if not server.is_running.wait(timeout=5):
|
|
48
|
+
server.stop()
|
|
49
|
+
self.fail("Unable to start HTTPApplicationServer")
|
|
50
|
+
|
|
51
|
+
try:
|
|
52
|
+
client = BankAccountsJSONClient(
|
|
53
|
+
BankAccountsHTTPClient(server_address=server_address)
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
account_id1 = client.open_account("Alice", "alice@example.com")
|
|
57
|
+
account_id2 = client.open_account("Bob", "bob@example.com")
|
|
58
|
+
|
|
59
|
+
# Get the "first" section of log.
|
|
60
|
+
section = client.log["1,10"]
|
|
61
|
+
self.assertEqual(len(section.items), 2)
|
|
62
|
+
self.assertEqual(section.items[0].originator_id, account_id1)
|
|
63
|
+
self.assertEqual(section.items[1].originator_id, account_id2)
|
|
64
|
+
|
|
65
|
+
# Get notifications start 1, limit 10.
|
|
66
|
+
notifications = client.log.select(1, 10)
|
|
67
|
+
self.assertEqual(len(notifications), 2)
|
|
68
|
+
self.assertEqual(notifications[0].originator_id, account_id1)
|
|
69
|
+
self.assertEqual(notifications[1].originator_id, account_id2)
|
|
70
|
+
finally:
|
|
71
|
+
server.stop()
|
|
72
|
+
|
|
73
|
+
def test_with_http_and_threads(self):
|
|
74
|
+
server_address = ("127.0.0.1", 8081)
|
|
75
|
+
|
|
76
|
+
server = HTTPApplicationServer(
|
|
77
|
+
address=server_address,
|
|
78
|
+
handler=BankAccountsHTTPHandler,
|
|
79
|
+
)
|
|
80
|
+
server.start()
|
|
81
|
+
if not server.is_running.wait(timeout=5):
|
|
82
|
+
server.stop()
|
|
83
|
+
self.fail("Unable to start HTTPApplicationServer")
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
self.has_errors = False
|
|
87
|
+
|
|
88
|
+
def open_account():
|
|
89
|
+
client = BankAccountsJSONClient(
|
|
90
|
+
BankAccountsHTTPClient(server_address=server_address)
|
|
91
|
+
)
|
|
92
|
+
try:
|
|
93
|
+
for _ in range(30):
|
|
94
|
+
client.open_account("Alice", "alice@example.com")
|
|
95
|
+
# print(threading.get_ident(), account_id1)
|
|
96
|
+
except Exception as e:
|
|
97
|
+
print(threading.get_ident(), "error:", e)
|
|
98
|
+
self.has_errors = True
|
|
99
|
+
raise
|
|
100
|
+
|
|
101
|
+
thread1 = Thread(target=open_account)
|
|
102
|
+
thread1.start()
|
|
103
|
+
thread2 = Thread(target=open_account)
|
|
104
|
+
thread2.start()
|
|
105
|
+
|
|
106
|
+
thread1.join()
|
|
107
|
+
thread2.join()
|
|
108
|
+
|
|
109
|
+
self.assertFalse(self.has_errors)
|
|
110
|
+
|
|
111
|
+
# Check the notification log.
|
|
112
|
+
client = BankAccountsJSONClient(
|
|
113
|
+
BankAccountsHTTPClient(server_address=server_address)
|
|
114
|
+
)
|
|
115
|
+
self.assertEqual(len(client.log["1,10"].items), 10)
|
|
116
|
+
self.assertEqual(len(client.log["11,20"].items), 10)
|
|
117
|
+
self.assertEqual(len(client.log["21,30"].items), 10)
|
|
118
|
+
self.assertEqual(len(client.log["31,40"].items), 10)
|
|
119
|
+
self.assertEqual(len(client.log["41,50"].items), 10)
|
|
120
|
+
self.assertEqual(len(client.log["51,60"].items), 10)
|
|
121
|
+
self.assertEqual(len(client.log["61,70"].items), 0)
|
|
122
|
+
|
|
123
|
+
self.assertEqual(len(client.log.select(start=1, limit=10)), 10)
|
|
124
|
+
self.assertEqual(len(client.log.select(start=11, limit=10)), 10)
|
|
125
|
+
self.assertEqual(len(client.log.select(start=21, limit=10)), 10)
|
|
126
|
+
self.assertEqual(len(client.log.select(start=31, limit=10)), 10)
|
|
127
|
+
self.assertEqual(len(client.log.select(start=41, limit=10)), 10)
|
|
128
|
+
self.assertEqual(len(client.log.select(start=51, limit=10)), 10)
|
|
129
|
+
self.assertEqual(len(client.log.select(start=61, limit=10)), 0)
|
|
130
|
+
|
|
131
|
+
finally:
|
|
132
|
+
server.stop()
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class BankAccountsInterface(NotificationLogInterface):
|
|
136
|
+
@abstractmethod
|
|
137
|
+
def open_account(self, body: str) -> str:
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class BankAccountsJSONService(
|
|
142
|
+
BankAccountsInterface,
|
|
143
|
+
NotificationLogJSONService[BankAccounts],
|
|
144
|
+
):
|
|
145
|
+
def open_account(self, request: str) -> str:
|
|
146
|
+
kwargs = json.loads(request)
|
|
147
|
+
account_id = self.app.open_account(**kwargs)
|
|
148
|
+
return json.dumps({"account_id": account_id.hex})
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class BankAccountsJSONClient:
|
|
152
|
+
def __init__(self, interface: BankAccountsInterface):
|
|
153
|
+
self.interface = interface
|
|
154
|
+
self.log = NotificationLogJSONClient(interface)
|
|
155
|
+
|
|
156
|
+
def open_account(self, full_name, email_address) -> UUID:
|
|
157
|
+
body = json.dumps(
|
|
158
|
+
{
|
|
159
|
+
"full_name": full_name,
|
|
160
|
+
"email_address": email_address,
|
|
161
|
+
}
|
|
162
|
+
)
|
|
163
|
+
body = self.interface.open_account(body)
|
|
164
|
+
return UUID(json.loads(body)["account_id"])
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class HTTPApplicationServer(Thread):
|
|
168
|
+
prepare: ClassVar[List[Callable]] = []
|
|
169
|
+
|
|
170
|
+
def __init__(self, address, handler):
|
|
171
|
+
super().__init__(daemon=True)
|
|
172
|
+
self.server = HTTPServer(
|
|
173
|
+
server_address=address,
|
|
174
|
+
RequestHandlerClass=handler,
|
|
175
|
+
)
|
|
176
|
+
self.is_running = Event()
|
|
177
|
+
|
|
178
|
+
def run(self):
|
|
179
|
+
[f() for f in self.prepare]
|
|
180
|
+
self.is_running.set()
|
|
181
|
+
self.server.serve_forever()
|
|
182
|
+
|
|
183
|
+
def stop(self):
|
|
184
|
+
self.server.shutdown()
|
|
185
|
+
self.join()
|
|
186
|
+
|
|
187
|
+
@classmethod
|
|
188
|
+
def before_first_request(cls, f):
|
|
189
|
+
HTTPApplicationServer.prepare.append(f)
|
|
190
|
+
return f
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class BankAccountsHTTPHandler(BaseHTTPRequestHandler):
|
|
194
|
+
def do_PUT(self): # noqa: N802
|
|
195
|
+
if self.path.startswith("/accounts/"):
|
|
196
|
+
length = int(self.headers["Content-Length"])
|
|
197
|
+
request_msg = self.rfile.read(length).decode("utf8")
|
|
198
|
+
body = bank_accounts_service.open_account(request_msg)
|
|
199
|
+
status = 201
|
|
200
|
+
else:
|
|
201
|
+
body = "Not found: " + self.path
|
|
202
|
+
status = 404
|
|
203
|
+
self.send(body, status)
|
|
204
|
+
|
|
205
|
+
def do_GET(self): # noqa: N802
|
|
206
|
+
if self.path.startswith("/notifications/"):
|
|
207
|
+
section_id = self.path.split("/")[-1]
|
|
208
|
+
body = bank_accounts_service.get_log_section(section_id)
|
|
209
|
+
status = 200
|
|
210
|
+
elif self.path.startswith("/notifications"):
|
|
211
|
+
args = self.path.split("?")[-1].split("&")
|
|
212
|
+
args = [p.split("=") for p in args]
|
|
213
|
+
args = {p[0]: p[1] for p in args}
|
|
214
|
+
start = int(args["start"])
|
|
215
|
+
limit = int(args["limit"])
|
|
216
|
+
|
|
217
|
+
body = bank_accounts_service.get_notifications(start=start, limit=limit)
|
|
218
|
+
status = 200
|
|
219
|
+
else:
|
|
220
|
+
body = "Not found: " + self.path
|
|
221
|
+
status = 404
|
|
222
|
+
self.send(body, status)
|
|
223
|
+
|
|
224
|
+
def send(self, body: str, status: int):
|
|
225
|
+
self.send_response(status)
|
|
226
|
+
self.send_header("Content-type", "text/html")
|
|
227
|
+
self.end_headers()
|
|
228
|
+
self.wfile.write(body.encode("utf8"))
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
class BankAccountsHTTPClient(BankAccountsInterface):
|
|
232
|
+
def __init__(self, server_address):
|
|
233
|
+
self.connection = HTTPConnection(*server_address)
|
|
234
|
+
|
|
235
|
+
def get_log_section(self, section_id: str) -> str:
|
|
236
|
+
return self._request("GET", f"/notifications/{section_id}")
|
|
237
|
+
|
|
238
|
+
def get_notifications(
|
|
239
|
+
self, start: int, limit: int, topics: Sequence[str] = ()
|
|
240
|
+
) -> str:
|
|
241
|
+
return self._request("GET", f"/notifications?start={start}&limit={limit}")
|
|
242
|
+
|
|
243
|
+
def open_account(self, body: str) -> str:
|
|
244
|
+
return self._request("PUT", "/accounts/", body.encode("utf8"))
|
|
245
|
+
|
|
246
|
+
def _request(self, method, url, body=None):
|
|
247
|
+
self.connection.request(method, url, body)
|
|
248
|
+
response = self.connection.getresponse()
|
|
249
|
+
return response.read().decode()
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
bank_accounts_service: BankAccountsInterface
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
@HTTPApplicationServer.before_first_request
|
|
256
|
+
def init_bank_accounts() -> None:
|
|
257
|
+
global bank_accounts_service # noqa: PLW0603
|
|
258
|
+
bank_accounts_service = BankAccountsJSONService(BankAccounts())
|
|
@@ -10,7 +10,7 @@ from tempfile import NamedTemporaryFile
|
|
|
10
10
|
from threading import Event, Thread, get_ident
|
|
11
11
|
from time import sleep
|
|
12
12
|
from timeit import timeit
|
|
13
|
-
from typing import Any, Dict, List
|
|
13
|
+
from typing import Any, Dict, List
|
|
14
14
|
from unittest import TestCase
|
|
15
15
|
from uuid import UUID, uuid4
|
|
16
16
|
|
|
@@ -70,12 +70,13 @@ class AggregateRecorderTestCase(TestCase, ABC):
|
|
|
70
70
|
# Select stored events, expect list of one.
|
|
71
71
|
stored_events = recorder.select_events(originator_id1)
|
|
72
72
|
self.assertEqual(len(stored_events), 1)
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
73
|
+
self.assertEqual(stored_events[0].originator_id, originator_id1)
|
|
74
|
+
self.assertEqual(stored_events[0].originator_version, self.INITIAL_VERSION)
|
|
75
|
+
self.assertEqual(stored_events[0].topic, "topic1")
|
|
76
|
+
self.assertEqual(stored_events[0].state, b"state1")
|
|
77
|
+
self.assertIsInstance(stored_events[0].state, bytes)
|
|
76
78
|
|
|
77
79
|
# Check get record conflict error if attempt to store it again.
|
|
78
|
-
stored_events = recorder.select_events(originator_id1)
|
|
79
80
|
with self.assertRaises(IntegrityError):
|
|
80
81
|
recorder.insert_events([stored_event1])
|
|
81
82
|
|
|
@@ -95,9 +96,11 @@ class AggregateRecorderTestCase(TestCase, ABC):
|
|
|
95
96
|
# Check still only have one record.
|
|
96
97
|
stored_events = recorder.select_events(originator_id1)
|
|
97
98
|
self.assertEqual(len(stored_events), 1)
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
99
|
+
self.assertEqual(stored_events[0].originator_id, stored_event1.originator_id)
|
|
100
|
+
self.assertEqual(
|
|
101
|
+
stored_events[0].originator_version, stored_event1.originator_version
|
|
102
|
+
)
|
|
103
|
+
self.assertEqual(stored_events[0].topic, stored_event1.topic)
|
|
101
104
|
|
|
102
105
|
# Check can write two events together.
|
|
103
106
|
stored_event3 = StoredEvent(
|
|
@@ -112,18 +115,18 @@ class AggregateRecorderTestCase(TestCase, ABC):
|
|
|
112
115
|
# Check we got what was written.
|
|
113
116
|
stored_events = recorder.select_events(originator_id1)
|
|
114
117
|
self.assertEqual(len(stored_events), 3)
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
+
self.assertEqual(stored_events[0].originator_id, originator_id1)
|
|
119
|
+
self.assertEqual(stored_events[0].originator_version, self.INITIAL_VERSION)
|
|
120
|
+
self.assertEqual(stored_events[0].topic, "topic1")
|
|
118
121
|
self.assertEqual(stored_events[0].state, b"state1")
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
122
|
+
self.assertEqual(stored_events[1].originator_id, originator_id1)
|
|
123
|
+
self.assertEqual(stored_events[1].originator_version, self.INITIAL_VERSION + 1)
|
|
124
|
+
self.assertEqual(stored_events[1].topic, "topic2")
|
|
125
|
+
self.assertEqual(stored_events[1].state, b"state2")
|
|
126
|
+
self.assertEqual(stored_events[2].originator_id, originator_id1)
|
|
127
|
+
self.assertEqual(stored_events[2].originator_version, self.INITIAL_VERSION + 2)
|
|
128
|
+
self.assertEqual(stored_events[2].topic, "topic3")
|
|
129
|
+
self.assertEqual(stored_events[2].state, b"state3")
|
|
127
130
|
|
|
128
131
|
# Check we can get the last one recorded (used to get last snapshot).
|
|
129
132
|
stored_events = recorder.select_events(originator_id1, desc=True, limit=1)
|
|
@@ -248,21 +251,19 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
248
251
|
topic="topic2",
|
|
249
252
|
state=b"state2",
|
|
250
253
|
)
|
|
251
|
-
stored_event3 = StoredEvent(
|
|
252
|
-
originator_id=originator_id2,
|
|
253
|
-
originator_version=self.INITIAL_VERSION,
|
|
254
|
-
topic="topic3",
|
|
255
|
-
state=b"state3",
|
|
256
|
-
)
|
|
257
|
-
|
|
258
|
-
notification_ids = recorder.insert_events([])
|
|
259
|
-
self.assertEqual(notification_ids, [])
|
|
260
254
|
|
|
261
255
|
notification_ids = recorder.insert_events([stored_event1, stored_event2])
|
|
262
256
|
self.assertEqual(
|
|
263
257
|
notification_ids, [max_notification_id + 1, max_notification_id + 2]
|
|
264
258
|
)
|
|
265
259
|
|
|
260
|
+
# Store a third event.
|
|
261
|
+
stored_event3 = StoredEvent(
|
|
262
|
+
originator_id=originator_id2,
|
|
263
|
+
originator_version=self.INITIAL_VERSION,
|
|
264
|
+
topic="topic3",
|
|
265
|
+
state=b"state3",
|
|
266
|
+
)
|
|
266
267
|
notification_ids = recorder.insert_events([stored_event3])
|
|
267
268
|
self.assertEqual(notification_ids, [max_notification_id + 3])
|
|
268
269
|
|
|
@@ -273,6 +274,10 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
273
274
|
self.assertEqual(len(stored_events1), 2)
|
|
274
275
|
self.assertEqual(len(stored_events2), 1)
|
|
275
276
|
|
|
277
|
+
# Check get record conflict error if attempt to store it again.
|
|
278
|
+
with self.assertRaises(IntegrityError):
|
|
279
|
+
recorder.insert_events([stored_event3])
|
|
280
|
+
|
|
276
281
|
sleep(1) # Added to make eventsourcing-axon tests work, perhaps not necessary.
|
|
277
282
|
notifications = recorder.select_notifications(max_notification_id + 1, 3)
|
|
278
283
|
self.assertEqual(len(notifications), 3)
|
|
@@ -497,7 +502,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
497
502
|
durations: Dict[int, float] = {}
|
|
498
503
|
|
|
499
504
|
# Match this to the batch page size in postgres insert for max throughput.
|
|
500
|
-
|
|
505
|
+
num_events = 500
|
|
501
506
|
|
|
502
507
|
started = datetime.now()
|
|
503
508
|
|
|
@@ -518,7 +523,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
518
523
|
topic="topic",
|
|
519
524
|
state=b"state",
|
|
520
525
|
)
|
|
521
|
-
for i in range(
|
|
526
|
+
for i in range(num_events)
|
|
522
527
|
]
|
|
523
528
|
|
|
524
529
|
try:
|
|
@@ -534,11 +539,11 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
534
539
|
counts[thread_id] += 1
|
|
535
540
|
durations[thread_id] = duration
|
|
536
541
|
|
|
537
|
-
|
|
542
|
+
num_jobs = 60
|
|
538
543
|
|
|
539
544
|
with ThreadPoolExecutor(max_workers=4) as executor:
|
|
540
545
|
futures = []
|
|
541
|
-
for _ in range(
|
|
546
|
+
for _ in range(num_jobs):
|
|
542
547
|
future = executor.submit(insert_events)
|
|
543
548
|
# future.add_done_callback(self.close_db_connection)
|
|
544
549
|
futures.append(future)
|
|
@@ -547,7 +552,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
547
552
|
|
|
548
553
|
self.assertFalse(errors_happened.is_set(), "There were errors (see above)")
|
|
549
554
|
ended = datetime.now()
|
|
550
|
-
rate =
|
|
555
|
+
rate = num_jobs * num_events / (ended - started).total_seconds()
|
|
551
556
|
print(f"Rate: {rate:.0f} inserts per second")
|
|
552
557
|
|
|
553
558
|
def close_db_connection(self, *args: Any) -> None:
|
|
@@ -808,7 +813,7 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
808
813
|
|
|
809
814
|
|
|
810
815
|
class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
811
|
-
env:
|
|
816
|
+
env: Environment | None = None
|
|
812
817
|
|
|
813
818
|
@abstractmethod
|
|
814
819
|
def expected_factory_class(self):
|
|
@@ -1074,7 +1079,7 @@ class TranscoderTestCase(TestCase):
|
|
|
1074
1079
|
self.transcoder = self.construct_transcoder()
|
|
1075
1080
|
|
|
1076
1081
|
def construct_transcoder(self):
|
|
1077
|
-
raise NotImplementedError
|
|
1082
|
+
raise NotImplementedError
|
|
1078
1083
|
|
|
1079
1084
|
def test_str(self):
|
|
1080
1085
|
obj = "a"
|
|
@@ -1151,7 +1156,7 @@ class TranscoderTestCase(TestCase):
|
|
|
1151
1156
|
self.assertEqual(data, b'{"a":{"b":{"c":1}}}')
|
|
1152
1157
|
self.assertEqual(obj, self.transcoder.decode(data))
|
|
1153
1158
|
|
|
1154
|
-
#
|
|
1159
|
+
# TODO: Int keys?
|
|
1155
1160
|
# obj = {1: "a"}
|
|
1156
1161
|
# data = self.transcoder.encode(obj)
|
|
1157
1162
|
# self.assertEqual(data, b'{1:{"a"}')
|
|
@@ -1288,11 +1293,9 @@ class TranscoderTestCase(TestCase):
|
|
|
1288
1293
|
|
|
1289
1294
|
self.assertEqual(
|
|
1290
1295
|
cm.exception.args[0],
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
"and register a custom transcoding for this type."
|
|
1295
|
-
),
|
|
1296
|
+
"Object of type <class 'eventsourcing.tests.persistence."
|
|
1297
|
+
"MyClass'> is not serializable. Please define "
|
|
1298
|
+
"and register a custom transcoding for this type.",
|
|
1296
1299
|
)
|
|
1297
1300
|
|
|
1298
1301
|
# Expect a TypeError when encoding because transcoding not registered (nested).
|
|
@@ -1301,11 +1304,9 @@ class TranscoderTestCase(TestCase):
|
|
|
1301
1304
|
|
|
1302
1305
|
self.assertEqual(
|
|
1303
1306
|
cm.exception.args[0],
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
"and register a custom transcoding for this type."
|
|
1308
|
-
),
|
|
1307
|
+
"Object of type <class 'eventsourcing.tests.persistence."
|
|
1308
|
+
"MyClass'> is not serializable. Please define "
|
|
1309
|
+
"and register a custom transcoding for this type.",
|
|
1309
1310
|
)
|
|
1310
1311
|
|
|
1311
1312
|
# Check we get a TypeError when decoding because transcodings aren't registered.
|
|
@@ -1316,8 +1317,6 @@ class TranscoderTestCase(TestCase):
|
|
|
1316
1317
|
|
|
1317
1318
|
self.assertEqual(
|
|
1318
1319
|
cm.exception.args[0],
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
"deserializable. Please register a custom transcoding for this type."
|
|
1322
|
-
),
|
|
1320
|
+
"Data serialized with name 'custom_type3_as_dict' is not "
|
|
1321
|
+
"deserializable. Please register a custom transcoding for this type.",
|
|
1323
1322
|
)
|
|
File without changes
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
from base64 import b64encode
|
|
2
|
+
from unittest.case import TestCase
|
|
3
|
+
|
|
4
|
+
from eventsourcing.cipher import AESCipher
|
|
5
|
+
from eventsourcing.utils import Environment
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TestAESCipher(TestCase):
|
|
9
|
+
def test_createkey(self):
|
|
10
|
+
environment = Environment()
|
|
11
|
+
|
|
12
|
+
# Valid key lengths.
|
|
13
|
+
key = AESCipher.create_key(16)
|
|
14
|
+
environment["CIPHER_KEY"] = key
|
|
15
|
+
AESCipher(environment)
|
|
16
|
+
|
|
17
|
+
key = AESCipher.create_key(24)
|
|
18
|
+
environment["CIPHER_KEY"] = key
|
|
19
|
+
AESCipher(environment)
|
|
20
|
+
|
|
21
|
+
key = AESCipher.create_key(32)
|
|
22
|
+
environment["CIPHER_KEY"] = key
|
|
23
|
+
AESCipher(environment)
|
|
24
|
+
|
|
25
|
+
# Non-valid key lengths (on generate key).
|
|
26
|
+
with self.assertRaises(ValueError):
|
|
27
|
+
AESCipher.create_key(12)
|
|
28
|
+
|
|
29
|
+
with self.assertRaises(ValueError):
|
|
30
|
+
AESCipher.create_key(20)
|
|
31
|
+
|
|
32
|
+
with self.assertRaises(ValueError):
|
|
33
|
+
AESCipher.create_key(28)
|
|
34
|
+
|
|
35
|
+
with self.assertRaises(ValueError):
|
|
36
|
+
AESCipher.create_key(36)
|
|
37
|
+
|
|
38
|
+
# Non-valid key lengths (on construction).
|
|
39
|
+
def create_key(num_bytes):
|
|
40
|
+
return b64encode(AESCipher.random_bytes(num_bytes)).decode("utf8")
|
|
41
|
+
|
|
42
|
+
key = create_key(12)
|
|
43
|
+
environment["CIPHER_KEY"] = key
|
|
44
|
+
with self.assertRaises(ValueError):
|
|
45
|
+
AESCipher(environment)
|
|
46
|
+
|
|
47
|
+
key = create_key(20)
|
|
48
|
+
environment["CIPHER_KEY"] = key
|
|
49
|
+
with self.assertRaises(ValueError):
|
|
50
|
+
AESCipher(environment)
|
|
51
|
+
|
|
52
|
+
key = create_key(28)
|
|
53
|
+
environment["CIPHER_KEY"] = key
|
|
54
|
+
with self.assertRaises(ValueError):
|
|
55
|
+
AESCipher(environment)
|
|
56
|
+
|
|
57
|
+
key = create_key(36)
|
|
58
|
+
environment["CIPHER_KEY"] = key
|
|
59
|
+
with self.assertRaises(ValueError):
|
|
60
|
+
AESCipher(environment)
|
|
61
|
+
|
|
62
|
+
def test_encrypt_and_decrypt(self):
|
|
63
|
+
environment = Environment()
|
|
64
|
+
|
|
65
|
+
key = AESCipher.create_key(16)
|
|
66
|
+
environment["CIPHER_KEY"] = key
|
|
67
|
+
|
|
68
|
+
# Check plain text can be encrypted and recovered.
|
|
69
|
+
plain_text = b"some text"
|
|
70
|
+
cipher = AESCipher(environment)
|
|
71
|
+
cipher_text = cipher.encrypt(plain_text)
|
|
72
|
+
cipher = AESCipher(environment)
|
|
73
|
+
recovered_text = cipher.decrypt(cipher_text)
|
|
74
|
+
self.assertEqual(recovered_text, plain_text)
|
|
75
|
+
|
|
76
|
+
# Check raises on invalid nonce.
|
|
77
|
+
with self.assertRaises(ValueError):
|
|
78
|
+
cipher.decrypt(cipher_text[:10])
|
|
79
|
+
|
|
80
|
+
# Check raises on invalid tag.
|
|
81
|
+
with self.assertRaises(ValueError):
|
|
82
|
+
cipher.decrypt(cipher_text[:20])
|
|
83
|
+
|
|
84
|
+
# Check raises on invalid data.
|
|
85
|
+
with self.assertRaises(ValueError):
|
|
86
|
+
cipher.decrypt(cipher_text[:30])
|
|
87
|
+
|
|
88
|
+
# Check raises on invalid key.
|
|
89
|
+
key = AESCipher.create_key(16)
|
|
90
|
+
environment["CIPHER_KEY"] = key
|
|
91
|
+
cipher = AESCipher(environment)
|
|
92
|
+
with self.assertRaises(ValueError):
|
|
93
|
+
cipher.decrypt(cipher_text)
|