eventsourcing 9.4.0a7__py3-none-any.whl → 9.4.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eventsourcing might be problematic. Click here for more details.
- eventsourcing/application.py +22 -30
- eventsourcing/cipher.py +3 -1
- eventsourcing/dispatch.py +52 -11
- eventsourcing/domain.py +373 -360
- eventsourcing/interface.py +1 -1
- eventsourcing/persistence.py +26 -28
- eventsourcing/popo.py +5 -1
- eventsourcing/postgres.py +174 -127
- eventsourcing/projection.py +82 -26
- eventsourcing/sqlite.py +5 -1
- eventsourcing/system.py +14 -9
- eventsourcing/tests/application.py +57 -49
- eventsourcing/tests/domain.py +8 -6
- eventsourcing/tests/persistence.py +170 -143
- eventsourcing/tests/postgres_utils.py +12 -9
- eventsourcing/utils.py +27 -17
- {eventsourcing-9.4.0a7.dist-info → eventsourcing-9.4.0b1.dist-info}/METADATA +2 -2
- eventsourcing-9.4.0b1.dist-info/RECORD +26 -0
- eventsourcing-9.4.0a7.dist-info/RECORD +0 -26
- {eventsourcing-9.4.0a7.dist-info → eventsourcing-9.4.0b1.dist-info}/AUTHORS +0 -0
- {eventsourcing-9.4.0a7.dist-info → eventsourcing-9.4.0b1.dist-info}/LICENSE +0 -0
- {eventsourcing-9.4.0a7.dist-info → eventsourcing-9.4.0b1.dist-info}/WHEEL +0 -0
|
@@ -10,10 +10,12 @@ from tempfile import NamedTemporaryFile
|
|
|
10
10
|
from threading import Event, Thread, get_ident
|
|
11
11
|
from time import sleep
|
|
12
12
|
from timeit import timeit
|
|
13
|
-
from typing import Any
|
|
13
|
+
from typing import TYPE_CHECKING, Any, Generic, cast
|
|
14
14
|
from unittest import TestCase
|
|
15
15
|
from uuid import UUID, uuid4
|
|
16
16
|
|
|
17
|
+
from typing_extensions import TypeVar
|
|
18
|
+
|
|
17
19
|
from eventsourcing.cipher import AESCipher
|
|
18
20
|
from eventsourcing.compressor import ZlibCompressor
|
|
19
21
|
from eventsourcing.domain import DomainEvent
|
|
@@ -31,12 +33,18 @@ from eventsourcing.persistence import (
|
|
|
31
33
|
StoredEvent,
|
|
32
34
|
Tracking,
|
|
33
35
|
TrackingRecorder,
|
|
36
|
+
Transcoder,
|
|
34
37
|
Transcoding,
|
|
35
38
|
UUIDAsHex,
|
|
36
39
|
WaitInterruptedError,
|
|
37
40
|
)
|
|
38
41
|
from eventsourcing.utils import Environment, get_topic
|
|
39
42
|
|
|
43
|
+
if TYPE_CHECKING:
|
|
44
|
+
from collections.abc import Iterator
|
|
45
|
+
|
|
46
|
+
from typing_extensions import Never
|
|
47
|
+
|
|
40
48
|
|
|
41
49
|
class AggregateRecorderTestCase(TestCase, ABC):
|
|
42
50
|
INITIAL_VERSION = 1
|
|
@@ -207,12 +215,17 @@ class AggregateRecorderTestCase(TestCase, ABC):
|
|
|
207
215
|
)
|
|
208
216
|
|
|
209
217
|
|
|
210
|
-
|
|
218
|
+
_TApplicationRecorder = TypeVar(
|
|
219
|
+
"_TApplicationRecorder", bound=ApplicationRecorder, default=ApplicationRecorder
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder]):
|
|
211
224
|
INITIAL_VERSION = 1
|
|
212
225
|
EXPECT_CONTIGUOUS_NOTIFICATION_IDS = True
|
|
213
226
|
|
|
214
227
|
@abstractmethod
|
|
215
|
-
def create_recorder(self) ->
|
|
228
|
+
def create_recorder(self) -> _TApplicationRecorder:
|
|
216
229
|
""""""
|
|
217
230
|
|
|
218
231
|
def test_insert_select(self) -> None:
|
|
@@ -535,24 +548,12 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
535
548
|
|
|
536
549
|
errors_happened = Event()
|
|
537
550
|
|
|
538
|
-
counts = {}
|
|
539
|
-
threads: dict[int, int] = {}
|
|
540
|
-
durations: dict[int, float] = {}
|
|
541
|
-
|
|
542
551
|
# Match this to the batch page size in postgres insert for max throughput.
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
552
|
+
num_events_per_job = 500
|
|
553
|
+
num_jobs = 60
|
|
554
|
+
num_workers = 4
|
|
546
555
|
|
|
547
556
|
def insert_events() -> None:
|
|
548
|
-
thread_id = get_ident()
|
|
549
|
-
if thread_id not in threads:
|
|
550
|
-
threads[thread_id] = len(threads)
|
|
551
|
-
if thread_id not in counts:
|
|
552
|
-
counts[thread_id] = 0
|
|
553
|
-
if thread_id not in durations:
|
|
554
|
-
durations[thread_id] = 0
|
|
555
|
-
|
|
556
557
|
originator_id = uuid4()
|
|
557
558
|
stored_events = [
|
|
558
559
|
StoredEvent(
|
|
@@ -561,7 +562,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
561
562
|
topic="topic",
|
|
562
563
|
state=b"state",
|
|
563
564
|
)
|
|
564
|
-
for i in range(
|
|
565
|
+
for i in range(num_events_per_job)
|
|
565
566
|
]
|
|
566
567
|
|
|
567
568
|
try:
|
|
@@ -571,26 +572,29 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
571
572
|
errors_happened.set()
|
|
572
573
|
tb = traceback.format_exc()
|
|
573
574
|
print(tb)
|
|
574
|
-
finally:
|
|
575
|
-
ended = datetime.now()
|
|
576
|
-
duration = (ended - started).total_seconds()
|
|
577
|
-
counts[thread_id] += 1
|
|
578
|
-
durations[thread_id] = duration
|
|
579
575
|
|
|
580
|
-
|
|
576
|
+
# Warm up.
|
|
577
|
+
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
|
578
|
+
futures = []
|
|
579
|
+
for _ in range(num_workers):
|
|
580
|
+
future = executor.submit(insert_events)
|
|
581
|
+
futures.append(future)
|
|
582
|
+
for future in futures:
|
|
583
|
+
future.result()
|
|
581
584
|
|
|
582
|
-
|
|
585
|
+
# Run.
|
|
586
|
+
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
|
587
|
+
started = datetime.now()
|
|
583
588
|
futures = []
|
|
584
589
|
for _ in range(num_jobs):
|
|
585
590
|
future = executor.submit(insert_events)
|
|
586
|
-
# future.add_done_callback(self.close_db_connection)
|
|
587
591
|
futures.append(future)
|
|
588
592
|
for future in futures:
|
|
589
593
|
future.result()
|
|
590
594
|
|
|
591
595
|
self.assertFalse(errors_happened.is_set(), "There were errors (see above)")
|
|
592
596
|
ended = datetime.now()
|
|
593
|
-
rate = num_jobs *
|
|
597
|
+
rate = num_jobs * num_events_per_job / (ended - started).total_seconds()
|
|
594
598
|
print(f"Rate: {rate:.0f} inserts per second")
|
|
595
599
|
|
|
596
600
|
def optional_test_insert_subscribe(self) -> None:
|
|
@@ -696,7 +700,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
696
700
|
with recorder.subscribe(gt=max_notification_id2) as subscription:
|
|
697
701
|
|
|
698
702
|
# Receive events from the subscription.
|
|
699
|
-
notifications
|
|
703
|
+
notifications = []
|
|
700
704
|
for notification in subscription:
|
|
701
705
|
notifications.append(notification)
|
|
702
706
|
if len(notifications) == 1:
|
|
@@ -752,10 +756,10 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
752
756
|
|
|
753
757
|
class TrackingRecorderTestCase(TestCase, ABC):
|
|
754
758
|
@abstractmethod
|
|
755
|
-
def create_recorder(self) ->
|
|
759
|
+
def create_recorder(self) -> TrackingRecorder:
|
|
756
760
|
""""""
|
|
757
761
|
|
|
758
|
-
def test_insert_tracking(self):
|
|
762
|
+
def test_insert_tracking(self) -> None:
|
|
759
763
|
tracking_recorder = self.create_recorder()
|
|
760
764
|
|
|
761
765
|
# Construct tracking objects.
|
|
@@ -787,10 +791,18 @@ class TrackingRecorderTestCase(TestCase, ABC):
|
|
|
787
791
|
assert tracking_recorder.has_tracking_id("upstream2", 21)
|
|
788
792
|
assert not tracking_recorder.has_tracking_id("upstream2", 22)
|
|
789
793
|
|
|
790
|
-
def test_wait(self):
|
|
794
|
+
def test_wait(self) -> None:
|
|
791
795
|
tracking_recorder = self.create_recorder()
|
|
796
|
+
|
|
797
|
+
tracking_recorder.wait("upstream1", None)
|
|
798
|
+
|
|
799
|
+
with self.assertRaises(TimeoutError):
|
|
800
|
+
tracking_recorder.wait("upstream1", 21, timeout=0.1)
|
|
801
|
+
|
|
792
802
|
tracking1 = Tracking(notification_id=21, application_name="upstream1")
|
|
793
803
|
tracking_recorder.insert_tracking(tracking=tracking1)
|
|
804
|
+
tracking_recorder.wait("upstream1", None)
|
|
805
|
+
tracking_recorder.wait("upstream1", 10)
|
|
794
806
|
tracking_recorder.wait("upstream1", 21)
|
|
795
807
|
with self.assertRaises(TimeoutError):
|
|
796
808
|
tracking_recorder.wait("upstream1", 22, timeout=0.1)
|
|
@@ -900,10 +912,11 @@ class ProcessRecorderTestCase(TestCase, ABC):
|
|
|
900
912
|
2,
|
|
901
913
|
)
|
|
902
914
|
|
|
903
|
-
def test_has_tracking_id(self):
|
|
915
|
+
def test_has_tracking_id(self) -> None:
|
|
904
916
|
# Construct the recorder.
|
|
905
917
|
recorder = self.create_recorder()
|
|
906
918
|
|
|
919
|
+
self.assertTrue(recorder.has_tracking_id("upstream_app", None))
|
|
907
920
|
self.assertFalse(recorder.has_tracking_id("upstream_app", 1))
|
|
908
921
|
self.assertFalse(recorder.has_tracking_id("upstream_app", 2))
|
|
909
922
|
self.assertFalse(recorder.has_tracking_id("upstream_app", 3))
|
|
@@ -975,7 +988,7 @@ class ProcessRecorderTestCase(TestCase, ABC):
|
|
|
975
988
|
class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
976
989
|
insert_num = 1000
|
|
977
990
|
|
|
978
|
-
def test(self):
|
|
991
|
+
def test(self) -> None:
|
|
979
992
|
recorder = self.create_recorder()
|
|
980
993
|
|
|
981
994
|
max_notification_id = recorder.max_notification_id()
|
|
@@ -990,7 +1003,7 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
990
1003
|
|
|
991
1004
|
errors = []
|
|
992
1005
|
|
|
993
|
-
def insert_stack(stack):
|
|
1006
|
+
def insert_stack(stack: list[StoredEvent]) -> None:
|
|
994
1007
|
try:
|
|
995
1008
|
race_started.wait()
|
|
996
1009
|
recorder.insert_events(stack)
|
|
@@ -1036,7 +1049,7 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
1036
1049
|
else:
|
|
1037
1050
|
self.assertGreater(min_id_for_sequence2, max_id_for_sequence1)
|
|
1038
1051
|
|
|
1039
|
-
def create_stack(self, originator_id):
|
|
1052
|
+
def create_stack(self, originator_id: UUID) -> list[StoredEvent]:
|
|
1040
1053
|
return [
|
|
1041
1054
|
StoredEvent(
|
|
1042
1055
|
originator_id=originator_id,
|
|
@@ -1052,45 +1065,52 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
1052
1065
|
pass
|
|
1053
1066
|
|
|
1054
1067
|
|
|
1055
|
-
|
|
1056
|
-
|
|
1068
|
+
_TInfrastrutureFactory = TypeVar(
|
|
1069
|
+
"_TInfrastrutureFactory", bound=InfrastructureFactory[Any]
|
|
1070
|
+
)
|
|
1071
|
+
|
|
1072
|
+
|
|
1073
|
+
class InfrastructureFactoryTestCase(ABC, TestCase, Generic[_TInfrastrutureFactory]):
|
|
1074
|
+
env: Environment
|
|
1057
1075
|
|
|
1058
1076
|
@abstractmethod
|
|
1059
|
-
def expected_factory_class(self):
|
|
1077
|
+
def expected_factory_class(self) -> type[_TInfrastrutureFactory]:
|
|
1060
1078
|
pass
|
|
1061
1079
|
|
|
1062
1080
|
@abstractmethod
|
|
1063
|
-
def expected_aggregate_recorder_class(self):
|
|
1081
|
+
def expected_aggregate_recorder_class(self) -> type[AggregateRecorder]:
|
|
1064
1082
|
pass
|
|
1065
1083
|
|
|
1066
1084
|
@abstractmethod
|
|
1067
|
-
def expected_application_recorder_class(self):
|
|
1085
|
+
def expected_application_recorder_class(self) -> type[ApplicationRecorder]:
|
|
1068
1086
|
pass
|
|
1069
1087
|
|
|
1070
1088
|
@abstractmethod
|
|
1071
|
-
def expected_tracking_recorder_class(self):
|
|
1089
|
+
def expected_tracking_recorder_class(self) -> type[TrackingRecorder]:
|
|
1072
1090
|
pass
|
|
1073
1091
|
|
|
1074
1092
|
@abstractmethod
|
|
1075
|
-
def tracking_recorder_subclass(self):
|
|
1093
|
+
def tracking_recorder_subclass(self) -> type[TrackingRecorder]:
|
|
1076
1094
|
pass
|
|
1077
1095
|
|
|
1078
1096
|
@abstractmethod
|
|
1079
|
-
def expected_process_recorder_class(self):
|
|
1097
|
+
def expected_process_recorder_class(self) -> type[ProcessRecorder]:
|
|
1080
1098
|
pass
|
|
1081
1099
|
|
|
1082
1100
|
def setUp(self) -> None:
|
|
1083
|
-
self.factory =
|
|
1101
|
+
self.factory = cast(
|
|
1102
|
+
_TInfrastrutureFactory, InfrastructureFactory.construct(self.env)
|
|
1103
|
+
)
|
|
1084
1104
|
self.assertIsInstance(self.factory, self.expected_factory_class())
|
|
1085
1105
|
self.transcoder = JSONTranscoder()
|
|
1086
1106
|
self.transcoder.register(UUIDAsHex())
|
|
1087
1107
|
self.transcoder.register(DecimalAsStr())
|
|
1088
1108
|
self.transcoder.register(DatetimeAsISO())
|
|
1089
1109
|
|
|
1090
|
-
def tearDown(self):
|
|
1110
|
+
def tearDown(self) -> None:
|
|
1091
1111
|
self.factory.close()
|
|
1092
1112
|
|
|
1093
|
-
def test_createmapper(self):
|
|
1113
|
+
def test_createmapper(self) -> None:
|
|
1094
1114
|
# Want to construct:
|
|
1095
1115
|
# - application recorder
|
|
1096
1116
|
# - snapshot recorder
|
|
@@ -1126,7 +1146,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1126
1146
|
self.assertIsNone(mapper.cipher)
|
|
1127
1147
|
self.assertIsNone(mapper.compressor)
|
|
1128
1148
|
|
|
1129
|
-
def test_createmapper_with_compressor(self):
|
|
1149
|
+
def test_createmapper_with_compressor(self) -> None:
|
|
1130
1150
|
# Create mapper with compressor class as topic.
|
|
1131
1151
|
self.env[self.factory.COMPRESSOR_TOPIC] = get_topic(ZlibCompressor)
|
|
1132
1152
|
mapper = self.factory.mapper(transcoder=self.transcoder)
|
|
@@ -1141,7 +1161,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1141
1161
|
self.assertEqual(mapper.compressor, zlib)
|
|
1142
1162
|
self.assertIsNone(mapper.cipher)
|
|
1143
1163
|
|
|
1144
|
-
def test_createmapper_with_cipher(self):
|
|
1164
|
+
def test_createmapper_with_cipher(self) -> None:
|
|
1145
1165
|
# Check cipher needs a key.
|
|
1146
1166
|
self.env[self.factory.CIPHER_TOPIC] = get_topic(AESCipher)
|
|
1147
1167
|
|
|
@@ -1162,7 +1182,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1162
1182
|
|
|
1163
1183
|
def test_createmapper_with_cipher_and_compressor(
|
|
1164
1184
|
self,
|
|
1165
|
-
):
|
|
1185
|
+
) -> None:
|
|
1166
1186
|
# Create mapper with cipher and compressor.
|
|
1167
1187
|
self.env[self.factory.COMPRESSOR_TOPIC] = get_topic(ZlibCompressor)
|
|
1168
1188
|
|
|
@@ -1175,7 +1195,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1175
1195
|
self.assertIsNotNone(mapper.cipher)
|
|
1176
1196
|
self.assertIsNotNone(mapper.compressor)
|
|
1177
1197
|
|
|
1178
|
-
def test_mapper_with_wrong_cipher_key(self):
|
|
1198
|
+
def test_mapper_with_wrong_cipher_key(self) -> None:
|
|
1179
1199
|
self.env.name = "App1"
|
|
1180
1200
|
self.env[self.factory.CIPHER_TOPIC] = get_topic(AESCipher)
|
|
1181
1201
|
cipher_key1 = AESCipher.create_key(16)
|
|
@@ -1205,7 +1225,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1205
1225
|
with self.assertRaises(ValueError):
|
|
1206
1226
|
mapper2.to_domain_event(stored_event)
|
|
1207
1227
|
|
|
1208
|
-
def test_create_aggregate_recorder(self):
|
|
1228
|
+
def test_create_aggregate_recorder(self) -> None:
|
|
1209
1229
|
recorder = self.factory.aggregate_recorder()
|
|
1210
1230
|
self.assertEqual(type(recorder), self.expected_aggregate_recorder_class())
|
|
1211
1231
|
|
|
@@ -1216,7 +1236,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1216
1236
|
recorder = self.factory.aggregate_recorder()
|
|
1217
1237
|
self.assertEqual(type(recorder), self.expected_aggregate_recorder_class())
|
|
1218
1238
|
|
|
1219
|
-
def test_create_application_recorder(self):
|
|
1239
|
+
def test_create_application_recorder(self) -> None:
|
|
1220
1240
|
recorder = self.factory.application_recorder()
|
|
1221
1241
|
self.assertEqual(type(recorder), self.expected_application_recorder_class())
|
|
1222
1242
|
self.assertIsInstance(recorder, ApplicationRecorder)
|
|
@@ -1226,7 +1246,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1226
1246
|
recorder = self.factory.application_recorder()
|
|
1227
1247
|
self.assertEqual(type(recorder), self.expected_application_recorder_class())
|
|
1228
1248
|
|
|
1229
|
-
def test_create_tracking_recorder(self):
|
|
1249
|
+
def test_create_tracking_recorder(self) -> None:
|
|
1230
1250
|
recorder = self.factory.tracking_recorder()
|
|
1231
1251
|
self.assertEqual(type(recorder), self.expected_tracking_recorder_class())
|
|
1232
1252
|
self.assertIsInstance(recorder, TrackingRecorder)
|
|
@@ -1246,7 +1266,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1246
1266
|
recorder = self.factory.tracking_recorder()
|
|
1247
1267
|
self.assertEqual(type(recorder), subclass)
|
|
1248
1268
|
|
|
1249
|
-
def test_create_process_recorder(self):
|
|
1269
|
+
def test_create_process_recorder(self) -> None:
|
|
1250
1270
|
recorder = self.factory.process_recorder()
|
|
1251
1271
|
self.assertEqual(type(recorder), self.expected_process_recorder_class())
|
|
1252
1272
|
self.assertIsInstance(recorder, ProcessRecorder)
|
|
@@ -1257,7 +1277,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
1257
1277
|
self.assertEqual(type(recorder), self.expected_process_recorder_class())
|
|
1258
1278
|
|
|
1259
1279
|
|
|
1260
|
-
def tmpfile_uris():
|
|
1280
|
+
def tmpfile_uris() -> Iterator[str]:
|
|
1261
1281
|
tmp_files = []
|
|
1262
1282
|
ram_disk_path = "/Volumes/RAM DISK/"
|
|
1263
1283
|
prefix = None
|
|
@@ -1273,7 +1293,7 @@ class CustomType1:
|
|
|
1273
1293
|
def __init__(self, value: UUID):
|
|
1274
1294
|
self.value = value
|
|
1275
1295
|
|
|
1276
|
-
def __eq__(self, other:
|
|
1296
|
+
def __eq__(self, other: object) -> bool:
|
|
1277
1297
|
return type(self) is type(other) and self.__dict__ == other.__dict__
|
|
1278
1298
|
|
|
1279
1299
|
|
|
@@ -1281,39 +1301,46 @@ class CustomType2:
|
|
|
1281
1301
|
def __init__(self, value: CustomType1):
|
|
1282
1302
|
self.value = value
|
|
1283
1303
|
|
|
1284
|
-
def __eq__(self, other:
|
|
1304
|
+
def __eq__(self, other: object) -> bool:
|
|
1285
1305
|
return type(self) is type(other) and self.__dict__ == other.__dict__
|
|
1286
1306
|
|
|
1287
1307
|
|
|
1288
|
-
|
|
1289
|
-
|
|
1308
|
+
_KT = TypeVar("_KT")
|
|
1309
|
+
_VT = TypeVar("_VT")
|
|
1310
|
+
|
|
1311
|
+
|
|
1312
|
+
class Mydict(dict[_KT, _VT]):
|
|
1313
|
+
def __repr__(self) -> str:
|
|
1290
1314
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1291
1315
|
|
|
1292
|
-
def __eq__(self, other):
|
|
1316
|
+
def __eq__(self, other: object) -> bool:
|
|
1293
1317
|
return type(self) is type(other) and super().__eq__(other)
|
|
1294
1318
|
|
|
1295
1319
|
|
|
1296
|
-
|
|
1297
|
-
|
|
1320
|
+
_T = TypeVar("_T")
|
|
1321
|
+
|
|
1322
|
+
|
|
1323
|
+
class MyList(list[_T]):
|
|
1324
|
+
def __repr__(self) -> str:
|
|
1298
1325
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1299
1326
|
|
|
1300
|
-
def __eq__(self, other):
|
|
1327
|
+
def __eq__(self, other: object) -> bool:
|
|
1301
1328
|
return type(self) is type(other) and super().__eq__(other)
|
|
1302
1329
|
|
|
1303
1330
|
|
|
1304
1331
|
class MyStr(str):
|
|
1305
|
-
def __repr__(self):
|
|
1332
|
+
def __repr__(self) -> str:
|
|
1306
1333
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1307
1334
|
|
|
1308
|
-
def __eq__(self, other):
|
|
1335
|
+
def __eq__(self, other: object) -> bool:
|
|
1309
1336
|
return type(self) is type(other) and super().__eq__(other)
|
|
1310
1337
|
|
|
1311
1338
|
|
|
1312
1339
|
class MyInt(int):
|
|
1313
|
-
def __repr__(self):
|
|
1340
|
+
def __repr__(self) -> str:
|
|
1314
1341
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1315
1342
|
|
|
1316
|
-
def __eq__(self, other):
|
|
1343
|
+
def __eq__(self, other: object) -> bool:
|
|
1317
1344
|
return type(self) is type(other) and super().__eq__(other)
|
|
1318
1345
|
|
|
1319
1346
|
|
|
@@ -1349,10 +1376,10 @@ class TranscoderTestCase(TestCase):
|
|
|
1349
1376
|
def setUp(self) -> None:
|
|
1350
1377
|
self.transcoder = self.construct_transcoder()
|
|
1351
1378
|
|
|
1352
|
-
def construct_transcoder(self):
|
|
1379
|
+
def construct_transcoder(self) -> Transcoder:
|
|
1353
1380
|
raise NotImplementedError
|
|
1354
1381
|
|
|
1355
|
-
def test_str(self):
|
|
1382
|
+
def test_str(self) -> None:
|
|
1356
1383
|
obj = "a"
|
|
1357
1384
|
data = self.transcoder.encode(obj)
|
|
1358
1385
|
self.assertEqual(data, b'"a"')
|
|
@@ -1384,48 +1411,48 @@ class TranscoderTestCase(TestCase):
|
|
|
1384
1411
|
obj, self.transcoder.decode(legacy_encoding_with_ensure_ascii_true)
|
|
1385
1412
|
)
|
|
1386
1413
|
|
|
1387
|
-
def test_dict(self):
|
|
1414
|
+
def test_dict(self) -> None:
|
|
1388
1415
|
# Empty dict.
|
|
1389
|
-
|
|
1390
|
-
data = self.transcoder.encode(
|
|
1416
|
+
obj1: dict[Never, Never] = {}
|
|
1417
|
+
data = self.transcoder.encode(obj1)
|
|
1391
1418
|
self.assertEqual(data, b"{}")
|
|
1392
|
-
self.assertEqual(
|
|
1419
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1393
1420
|
|
|
1394
1421
|
# dict with single key.
|
|
1395
|
-
|
|
1396
|
-
data = self.transcoder.encode(
|
|
1422
|
+
obj2 = {"a": 1}
|
|
1423
|
+
data = self.transcoder.encode(obj2)
|
|
1397
1424
|
self.assertEqual(data, b'{"a":1}')
|
|
1398
|
-
self.assertEqual(
|
|
1425
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1399
1426
|
|
|
1400
1427
|
# dict with many keys.
|
|
1401
|
-
|
|
1402
|
-
data = self.transcoder.encode(
|
|
1428
|
+
obj3 = {"a": 1, "b": 2}
|
|
1429
|
+
data = self.transcoder.encode(obj3)
|
|
1403
1430
|
self.assertEqual(data, b'{"a":1,"b":2}')
|
|
1404
|
-
self.assertEqual(
|
|
1431
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1405
1432
|
|
|
1406
1433
|
# Empty dict in dict.
|
|
1407
|
-
|
|
1408
|
-
data = self.transcoder.encode(
|
|
1434
|
+
obj4: dict[str, dict[Never, Never]] = {"a": {}}
|
|
1435
|
+
data = self.transcoder.encode(obj4)
|
|
1409
1436
|
self.assertEqual(data, b'{"a":{}}')
|
|
1410
|
-
self.assertEqual(
|
|
1437
|
+
self.assertEqual(obj4, self.transcoder.decode(data))
|
|
1411
1438
|
|
|
1412
1439
|
# Empty dicts in dict.
|
|
1413
|
-
|
|
1414
|
-
data = self.transcoder.encode(
|
|
1440
|
+
obj5: dict[str, dict[Never, Never]] = {"a": {}, "b": {}}
|
|
1441
|
+
data = self.transcoder.encode(obj5)
|
|
1415
1442
|
self.assertEqual(data, b'{"a":{},"b":{}}')
|
|
1416
|
-
self.assertEqual(
|
|
1443
|
+
self.assertEqual(obj5, self.transcoder.decode(data))
|
|
1417
1444
|
|
|
1418
1445
|
# Empty dict in dict in dict.
|
|
1419
|
-
|
|
1420
|
-
data = self.transcoder.encode(
|
|
1446
|
+
obj6: dict[str, dict[str, dict[Never, Never]]] = {"a": {"b": {}}}
|
|
1447
|
+
data = self.transcoder.encode(obj6)
|
|
1421
1448
|
self.assertEqual(data, b'{"a":{"b":{}}}')
|
|
1422
|
-
self.assertEqual(
|
|
1449
|
+
self.assertEqual(obj6, self.transcoder.decode(data))
|
|
1423
1450
|
|
|
1424
1451
|
# Int in dict in dict in dict.
|
|
1425
|
-
|
|
1426
|
-
data = self.transcoder.encode(
|
|
1452
|
+
obj7 = {"a": {"b": {"c": 1}}}
|
|
1453
|
+
data = self.transcoder.encode(obj7)
|
|
1427
1454
|
self.assertEqual(data, b'{"a":{"b":{"c":1}}}')
|
|
1428
|
-
self.assertEqual(
|
|
1455
|
+
self.assertEqual(obj7, self.transcoder.decode(data))
|
|
1429
1456
|
|
|
1430
1457
|
# TODO: Int keys?
|
|
1431
1458
|
# obj = {1: "a"}
|
|
@@ -1433,115 +1460,115 @@ class TranscoderTestCase(TestCase):
|
|
|
1433
1460
|
# self.assertEqual(data, b'{1:{"a"}')
|
|
1434
1461
|
# self.assertEqual(obj, self.transcoder.decode(data))
|
|
1435
1462
|
|
|
1436
|
-
def test_dict_with_len_2_and__data_(self):
|
|
1463
|
+
def test_dict_with_len_2_and__data_(self) -> None:
|
|
1437
1464
|
obj = {"_data_": 1, "something_else": 2}
|
|
1438
1465
|
data = self.transcoder.encode(obj)
|
|
1439
1466
|
self.assertEqual(obj, self.transcoder.decode(data))
|
|
1440
1467
|
|
|
1441
|
-
def test_dict_with_len_2_and__type_(self):
|
|
1468
|
+
def test_dict_with_len_2_and__type_(self) -> None:
|
|
1442
1469
|
obj = {"_type_": 1, "something_else": 2}
|
|
1443
1470
|
data = self.transcoder.encode(obj)
|
|
1444
1471
|
self.assertEqual(obj, self.transcoder.decode(data))
|
|
1445
1472
|
|
|
1446
|
-
def test_dict_subclass(self):
|
|
1473
|
+
def test_dict_subclass(self) -> None:
|
|
1447
1474
|
my_dict = Mydict({"a": 1})
|
|
1448
1475
|
data = self.transcoder.encode(my_dict)
|
|
1449
1476
|
self.assertEqual(b'{"_type_":"mydict","_data_":{"a":1}}', data)
|
|
1450
1477
|
copy = self.transcoder.decode(data)
|
|
1451
1478
|
self.assertEqual(my_dict, copy)
|
|
1452
1479
|
|
|
1453
|
-
def test_list_subclass(self):
|
|
1480
|
+
def test_list_subclass(self) -> None:
|
|
1454
1481
|
my_list = MyList((("a", 1),))
|
|
1455
1482
|
data = self.transcoder.encode(my_list)
|
|
1456
1483
|
copy = self.transcoder.decode(data)
|
|
1457
1484
|
self.assertEqual(my_list, copy)
|
|
1458
1485
|
|
|
1459
|
-
def test_str_subclass(self):
|
|
1486
|
+
def test_str_subclass(self) -> None:
|
|
1460
1487
|
my_str = MyStr("a")
|
|
1461
1488
|
data = self.transcoder.encode(my_str)
|
|
1462
1489
|
copy = self.transcoder.decode(data)
|
|
1463
1490
|
self.assertEqual(my_str, copy)
|
|
1464
1491
|
|
|
1465
|
-
def test_int_subclass(self):
|
|
1492
|
+
def test_int_subclass(self) -> None:
|
|
1466
1493
|
my_int = MyInt(3)
|
|
1467
1494
|
data = self.transcoder.encode(my_int)
|
|
1468
1495
|
copy = self.transcoder.decode(data)
|
|
1469
1496
|
self.assertEqual(my_int, copy)
|
|
1470
1497
|
|
|
1471
|
-
def test_tuple(self):
|
|
1498
|
+
def test_tuple(self) -> None:
|
|
1472
1499
|
# Empty tuple.
|
|
1473
|
-
|
|
1474
|
-
data = self.transcoder.encode(
|
|
1500
|
+
obj1 = ()
|
|
1501
|
+
data = self.transcoder.encode(obj1)
|
|
1475
1502
|
self.assertEqual(data, b'{"_type_":"tuple_as_list","_data_":[]}')
|
|
1476
|
-
self.assertEqual(
|
|
1503
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1477
1504
|
|
|
1478
1505
|
# Empty tuple in a tuple.
|
|
1479
|
-
|
|
1480
|
-
data = self.transcoder.encode(
|
|
1481
|
-
self.assertEqual(
|
|
1506
|
+
obj2 = ((),)
|
|
1507
|
+
data = self.transcoder.encode(obj2)
|
|
1508
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1482
1509
|
|
|
1483
1510
|
# Int in tuple in a tuple.
|
|
1484
|
-
|
|
1485
|
-
data = self.transcoder.encode(
|
|
1486
|
-
self.assertEqual(
|
|
1511
|
+
obj3 = ((1, 2),)
|
|
1512
|
+
data = self.transcoder.encode(obj3)
|
|
1513
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1487
1514
|
|
|
1488
1515
|
# Str in tuple in a tuple.
|
|
1489
|
-
|
|
1490
|
-
data = self.transcoder.encode(
|
|
1491
|
-
self.assertEqual(
|
|
1516
|
+
obj4 = (("a", "b"),)
|
|
1517
|
+
data = self.transcoder.encode(obj4)
|
|
1518
|
+
self.assertEqual(obj4, self.transcoder.decode(data))
|
|
1492
1519
|
|
|
1493
1520
|
# Int and str in tuple in a tuple.
|
|
1494
|
-
|
|
1495
|
-
data = self.transcoder.encode(
|
|
1496
|
-
self.assertEqual(
|
|
1521
|
+
obj5 = ((1, "a"),)
|
|
1522
|
+
data = self.transcoder.encode(obj5)
|
|
1523
|
+
self.assertEqual(obj5, self.transcoder.decode(data))
|
|
1497
1524
|
|
|
1498
|
-
def test_list(self):
|
|
1525
|
+
def test_list(self) -> None:
|
|
1499
1526
|
# Empty list.
|
|
1500
|
-
|
|
1501
|
-
data = self.transcoder.encode(
|
|
1502
|
-
self.assertEqual(
|
|
1527
|
+
obj1: list[Never] = []
|
|
1528
|
+
data = self.transcoder.encode(obj1)
|
|
1529
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1503
1530
|
|
|
1504
1531
|
# Empty list in a list.
|
|
1505
|
-
|
|
1506
|
-
data = self.transcoder.encode(
|
|
1507
|
-
self.assertEqual(
|
|
1532
|
+
obj2: list[list[Never]] = [[]]
|
|
1533
|
+
data = self.transcoder.encode(obj2)
|
|
1534
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1508
1535
|
|
|
1509
1536
|
# Int in list in a list.
|
|
1510
|
-
|
|
1511
|
-
data = self.transcoder.encode(
|
|
1512
|
-
self.assertEqual(
|
|
1537
|
+
obj3 = [[1, 2]]
|
|
1538
|
+
data = self.transcoder.encode(obj3)
|
|
1539
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1513
1540
|
|
|
1514
1541
|
# Str in list in a list.
|
|
1515
|
-
|
|
1516
|
-
data = self.transcoder.encode(
|
|
1517
|
-
self.assertEqual(
|
|
1542
|
+
obj4 = [["a", "b"]]
|
|
1543
|
+
data = self.transcoder.encode(obj4)
|
|
1544
|
+
self.assertEqual(obj4, self.transcoder.decode(data))
|
|
1518
1545
|
|
|
1519
1546
|
# Int and str in list in a list.
|
|
1520
|
-
|
|
1521
|
-
data = self.transcoder.encode(
|
|
1522
|
-
self.assertEqual(
|
|
1547
|
+
obj5 = [[1, "a"]]
|
|
1548
|
+
data = self.transcoder.encode(obj5)
|
|
1549
|
+
self.assertEqual(obj5, self.transcoder.decode(data))
|
|
1523
1550
|
|
|
1524
|
-
def test_mixed(self):
|
|
1525
|
-
|
|
1526
|
-
data = self.transcoder.encode(
|
|
1527
|
-
self.assertEqual(
|
|
1551
|
+
def test_mixed(self) -> None:
|
|
1552
|
+
obj1 = [(1, "a"), {"b": 2}]
|
|
1553
|
+
data = self.transcoder.encode(obj1)
|
|
1554
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1528
1555
|
|
|
1529
|
-
|
|
1530
|
-
data = self.transcoder.encode(
|
|
1531
|
-
self.assertEqual(
|
|
1556
|
+
obj2 = ([1, "a"], {"b": 2})
|
|
1557
|
+
data = self.transcoder.encode(obj2)
|
|
1558
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1532
1559
|
|
|
1533
|
-
|
|
1534
|
-
data = self.transcoder.encode(
|
|
1535
|
-
self.assertEqual(
|
|
1560
|
+
obj3 = {"a": (1, 2), "b": [3, 4]}
|
|
1561
|
+
data = self.transcoder.encode(obj3)
|
|
1562
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1536
1563
|
|
|
1537
|
-
def test_custom_type_in_dict(self):
|
|
1564
|
+
def test_custom_type_in_dict(self) -> None:
|
|
1538
1565
|
# Int in dict in dict in dict.
|
|
1539
1566
|
obj = {"a": CustomType2(CustomType1(UUID("b2723fe2c01a40d2875ea3aac6a09ff5")))}
|
|
1540
1567
|
data = self.transcoder.encode(obj)
|
|
1541
1568
|
decoded_obj = self.transcoder.decode(data)
|
|
1542
1569
|
self.assertEqual(obj, decoded_obj)
|
|
1543
1570
|
|
|
1544
|
-
def test_nested_custom_type(self):
|
|
1571
|
+
def test_nested_custom_type(self) -> None:
|
|
1545
1572
|
obj = CustomType2(CustomType1(UUID("b2723fe2c01a40d2875ea3aac6a09ff5")))
|
|
1546
1573
|
data = self.transcoder.encode(obj)
|
|
1547
1574
|
expect = (
|
|
@@ -1557,7 +1584,7 @@ class TranscoderTestCase(TestCase):
|
|
|
1557
1584
|
self.assertIsInstance(copy.value.value, UUID)
|
|
1558
1585
|
self.assertEqual(copy.value.value, obj.value.value)
|
|
1559
1586
|
|
|
1560
|
-
def test_custom_type_error(self):
|
|
1587
|
+
def test_custom_type_error(self) -> None:
|
|
1561
1588
|
# Expect a TypeError when encoding because transcoding not registered.
|
|
1562
1589
|
with self.assertRaises(TypeError) as cm:
|
|
1563
1590
|
self.transcoder.encode(MyClass())
|