eventsourcing 9.3.5__py3-none-any.whl → 9.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eventsourcing might be problematic. Click here for more details.
- eventsourcing/__init__.py +0 -1
- eventsourcing/application.py +115 -173
- eventsourcing/cipher.py +9 -10
- eventsourcing/compressor.py +2 -6
- eventsourcing/cryptography.py +91 -0
- eventsourcing/dispatch.py +52 -11
- eventsourcing/domain.py +733 -690
- eventsourcing/interface.py +39 -32
- eventsourcing/persistence.py +412 -287
- eventsourcing/popo.py +136 -44
- eventsourcing/postgres.py +404 -187
- eventsourcing/projection.py +428 -0
- eventsourcing/sqlite.py +167 -55
- eventsourcing/system.py +230 -341
- eventsourcing/tests/__init__.py +3 -0
- eventsourcing/tests/application.py +195 -129
- eventsourcing/tests/domain.py +19 -37
- eventsourcing/tests/persistence.py +533 -235
- eventsourcing/tests/postgres_utils.py +12 -9
- eventsourcing/utils.py +39 -47
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0.dist-info}/LICENSE +1 -1
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0.dist-info}/METADATA +14 -13
- eventsourcing-9.4.0.dist-info/RECORD +26 -0
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0.dist-info}/WHEEL +1 -1
- eventsourcing-9.3.5.dist-info/RECORD +0 -24
- {eventsourcing-9.3.5.dist-info → eventsourcing-9.4.0.dist-info}/AUTHORS +0 -0
|
@@ -1,22 +1,23 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import os
|
|
4
3
|
import traceback
|
|
5
4
|
import zlib
|
|
6
5
|
from abc import ABC, abstractmethod
|
|
7
6
|
from concurrent.futures import ThreadPoolExecutor
|
|
8
|
-
from
|
|
7
|
+
from pathlib import Path
|
|
9
8
|
from tempfile import NamedTemporaryFile
|
|
10
9
|
from threading import Event, Thread, get_ident
|
|
11
10
|
from time import sleep
|
|
12
11
|
from timeit import timeit
|
|
13
|
-
from typing import Any,
|
|
12
|
+
from typing import TYPE_CHECKING, Any, Generic, cast
|
|
14
13
|
from unittest import TestCase
|
|
15
14
|
from uuid import UUID, uuid4
|
|
16
15
|
|
|
16
|
+
from typing_extensions import TypeVar
|
|
17
|
+
|
|
17
18
|
from eventsourcing.cipher import AESCipher
|
|
18
19
|
from eventsourcing.compressor import ZlibCompressor
|
|
19
|
-
from eventsourcing.domain import DomainEvent
|
|
20
|
+
from eventsourcing.domain import DomainEvent, datetime_now_with_tzinfo
|
|
20
21
|
from eventsourcing.persistence import (
|
|
21
22
|
AggregateRecorder,
|
|
22
23
|
ApplicationRecorder,
|
|
@@ -26,14 +27,23 @@ from eventsourcing.persistence import (
|
|
|
26
27
|
IntegrityError,
|
|
27
28
|
JSONTranscoder,
|
|
28
29
|
Mapper,
|
|
30
|
+
Notification,
|
|
29
31
|
ProcessRecorder,
|
|
30
32
|
StoredEvent,
|
|
31
33
|
Tracking,
|
|
34
|
+
TrackingRecorder,
|
|
35
|
+
Transcoder,
|
|
32
36
|
Transcoding,
|
|
33
37
|
UUIDAsHex,
|
|
38
|
+
WaitInterruptedError,
|
|
34
39
|
)
|
|
35
40
|
from eventsourcing.utils import Environment, get_topic
|
|
36
41
|
|
|
42
|
+
if TYPE_CHECKING:
|
|
43
|
+
from collections.abc import Iterator
|
|
44
|
+
|
|
45
|
+
from typing_extensions import Never
|
|
46
|
+
|
|
37
47
|
|
|
38
48
|
class AggregateRecorderTestCase(TestCase, ABC):
|
|
39
49
|
INITIAL_VERSION = 1
|
|
@@ -204,37 +214,32 @@ class AggregateRecorderTestCase(TestCase, ABC):
|
|
|
204
214
|
)
|
|
205
215
|
|
|
206
216
|
|
|
207
|
-
|
|
217
|
+
_TApplicationRecorder = TypeVar(
|
|
218
|
+
"_TApplicationRecorder", bound=ApplicationRecorder, default=ApplicationRecorder
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder]):
|
|
208
223
|
INITIAL_VERSION = 1
|
|
224
|
+
EXPECT_CONTIGUOUS_NOTIFICATION_IDS = True
|
|
209
225
|
|
|
210
226
|
@abstractmethod
|
|
211
|
-
def create_recorder(self) ->
|
|
227
|
+
def create_recorder(self) -> _TApplicationRecorder:
|
|
212
228
|
""""""
|
|
213
229
|
|
|
214
230
|
def test_insert_select(self) -> None:
|
|
215
231
|
# Construct the recorder.
|
|
216
232
|
recorder = self.create_recorder()
|
|
217
233
|
|
|
218
|
-
max_notification_id = recorder.max_notification_id()
|
|
219
|
-
|
|
220
234
|
# Check notifications methods work when there aren't any.
|
|
235
|
+
self.assertEqual(len(recorder.select_notifications(start=None, limit=3)), 0)
|
|
221
236
|
self.assertEqual(
|
|
222
|
-
recorder.
|
|
223
|
-
max_notification_id,
|
|
224
|
-
)
|
|
225
|
-
self.assertEqual(
|
|
226
|
-
len(recorder.select_notifications(max_notification_id + 1, 3)),
|
|
227
|
-
0,
|
|
228
|
-
)
|
|
229
|
-
self.assertEqual(
|
|
230
|
-
len(
|
|
231
|
-
recorder.select_notifications(
|
|
232
|
-
max_notification_id + 1, 3, topics=["topic1"]
|
|
233
|
-
)
|
|
234
|
-
),
|
|
237
|
+
len(recorder.select_notifications(start=None, limit=3, topics=["topic1"])),
|
|
235
238
|
0,
|
|
236
239
|
)
|
|
237
240
|
|
|
241
|
+
self.assertIsNone(recorder.max_notification_id())
|
|
242
|
+
|
|
238
243
|
# Write two stored events.
|
|
239
244
|
originator_id1 = uuid4()
|
|
240
245
|
originator_id2 = uuid4()
|
|
@@ -253,9 +258,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
253
258
|
)
|
|
254
259
|
|
|
255
260
|
notification_ids = recorder.insert_events([stored_event1, stored_event2])
|
|
256
|
-
self.assertEqual(
|
|
257
|
-
notification_ids, [max_notification_id + 1, max_notification_id + 2]
|
|
258
|
-
)
|
|
261
|
+
self.assertEqual(notification_ids, [1, 2])
|
|
259
262
|
|
|
260
263
|
# Store a third event.
|
|
261
264
|
stored_event3 = StoredEvent(
|
|
@@ -265,7 +268,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
265
268
|
state=b"state3",
|
|
266
269
|
)
|
|
267
270
|
notification_ids = recorder.insert_events([stored_event3])
|
|
268
|
-
self.assertEqual(notification_ids, [
|
|
271
|
+
self.assertEqual(notification_ids, [3])
|
|
269
272
|
|
|
270
273
|
stored_events1 = recorder.select_events(originator_id1)
|
|
271
274
|
stored_events2 = recorder.select_events(originator_id2)
|
|
@@ -279,111 +282,158 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
279
282
|
recorder.insert_events([stored_event3])
|
|
280
283
|
|
|
281
284
|
sleep(1) # Added to make eventsourcing-axon tests work, perhaps not necessary.
|
|
282
|
-
notifications = recorder.select_notifications(
|
|
285
|
+
notifications = recorder.select_notifications(start=None, limit=10)
|
|
283
286
|
self.assertEqual(len(notifications), 3)
|
|
284
|
-
self.assertEqual(notifications[0].id,
|
|
287
|
+
self.assertEqual(notifications[0].id, 1)
|
|
285
288
|
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
286
289
|
self.assertEqual(notifications[0].topic, "topic1")
|
|
287
290
|
self.assertEqual(notifications[0].state, b"state1")
|
|
288
|
-
self.assertEqual(notifications[1].id,
|
|
291
|
+
self.assertEqual(notifications[1].id, 2)
|
|
289
292
|
self.assertEqual(notifications[1].originator_id, originator_id1)
|
|
290
293
|
self.assertEqual(notifications[1].topic, "topic2")
|
|
291
294
|
self.assertEqual(notifications[1].state, b"state2")
|
|
292
|
-
self.assertEqual(notifications[2].id,
|
|
295
|
+
self.assertEqual(notifications[2].id, 3)
|
|
293
296
|
self.assertEqual(notifications[2].originator_id, originator_id2)
|
|
294
297
|
self.assertEqual(notifications[2].topic, "topic3")
|
|
295
298
|
self.assertEqual(notifications[2].state, b"state3")
|
|
296
299
|
|
|
297
|
-
notifications = recorder.select_notifications(
|
|
298
|
-
max_notification_id + 1, 3, topics=["topic1", "topic2", "topic3"]
|
|
299
|
-
)
|
|
300
|
+
notifications = recorder.select_notifications(start=1, limit=10)
|
|
300
301
|
self.assertEqual(len(notifications), 3)
|
|
301
|
-
self.assertEqual(notifications[0].id,
|
|
302
|
+
self.assertEqual(notifications[0].id, 1)
|
|
302
303
|
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
303
304
|
self.assertEqual(notifications[0].topic, "topic1")
|
|
304
305
|
self.assertEqual(notifications[0].state, b"state1")
|
|
305
|
-
self.assertEqual(notifications[1].id,
|
|
306
|
+
self.assertEqual(notifications[1].id, 2)
|
|
306
307
|
self.assertEqual(notifications[1].originator_id, originator_id1)
|
|
307
308
|
self.assertEqual(notifications[1].topic, "topic2")
|
|
308
309
|
self.assertEqual(notifications[1].state, b"state2")
|
|
309
|
-
self.assertEqual(notifications[2].id,
|
|
310
|
+
self.assertEqual(notifications[2].id, 3)
|
|
310
311
|
self.assertEqual(notifications[2].originator_id, originator_id2)
|
|
311
312
|
self.assertEqual(notifications[2].topic, "topic3")
|
|
312
313
|
self.assertEqual(notifications[2].state, b"state3")
|
|
313
314
|
|
|
314
|
-
notifications = recorder.select_notifications(
|
|
315
|
-
|
|
316
|
-
)
|
|
317
|
-
self.assertEqual(len(notifications), 1)
|
|
318
|
-
self.assertEqual(notifications[0].id, max_notification_id + 1)
|
|
315
|
+
notifications = recorder.select_notifications(start=None, stop=2, limit=10)
|
|
316
|
+
self.assertEqual(len(notifications), 2)
|
|
317
|
+
self.assertEqual(notifications[0].id, 1)
|
|
319
318
|
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
320
319
|
self.assertEqual(notifications[0].topic, "topic1")
|
|
321
320
|
self.assertEqual(notifications[0].state, b"state1")
|
|
321
|
+
self.assertEqual(notifications[1].id, 2)
|
|
322
|
+
self.assertEqual(notifications[1].originator_id, originator_id1)
|
|
323
|
+
self.assertEqual(notifications[1].topic, "topic2")
|
|
324
|
+
self.assertEqual(notifications[1].state, b"state2")
|
|
322
325
|
|
|
323
326
|
notifications = recorder.select_notifications(
|
|
324
|
-
|
|
327
|
+
start=1, limit=10, inclusive_of_start=False
|
|
325
328
|
)
|
|
326
|
-
self.assertEqual(len(notifications),
|
|
327
|
-
self.assertEqual(notifications[0].id,
|
|
329
|
+
self.assertEqual(len(notifications), 2)
|
|
330
|
+
self.assertEqual(notifications[0].id, 2)
|
|
328
331
|
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
329
332
|
self.assertEqual(notifications[0].topic, "topic2")
|
|
330
333
|
self.assertEqual(notifications[0].state, b"state2")
|
|
334
|
+
self.assertEqual(notifications[1].id, 3)
|
|
335
|
+
self.assertEqual(notifications[1].originator_id, originator_id2)
|
|
336
|
+
self.assertEqual(notifications[1].topic, "topic3")
|
|
337
|
+
self.assertEqual(notifications[1].state, b"state3")
|
|
331
338
|
|
|
332
339
|
notifications = recorder.select_notifications(
|
|
333
|
-
|
|
340
|
+
start=2, limit=10, inclusive_of_start=False
|
|
334
341
|
)
|
|
335
342
|
self.assertEqual(len(notifications), 1)
|
|
336
|
-
self.assertEqual(notifications[0].id,
|
|
343
|
+
self.assertEqual(notifications[0].id, 3)
|
|
337
344
|
self.assertEqual(notifications[0].originator_id, originator_id2)
|
|
338
345
|
self.assertEqual(notifications[0].topic, "topic3")
|
|
339
346
|
self.assertEqual(notifications[0].state, b"state3")
|
|
340
347
|
|
|
341
348
|
notifications = recorder.select_notifications(
|
|
342
|
-
|
|
349
|
+
start=None, limit=10, topics=["topic1", "topic2", "topic3"]
|
|
343
350
|
)
|
|
351
|
+
self.assertEqual(len(notifications), 3)
|
|
352
|
+
self.assertEqual(notifications[0].id, 1)
|
|
353
|
+
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
354
|
+
self.assertEqual(notifications[0].topic, "topic1")
|
|
355
|
+
self.assertEqual(notifications[0].state, b"state1")
|
|
356
|
+
self.assertEqual(notifications[1].id, 2)
|
|
357
|
+
self.assertEqual(notifications[1].originator_id, originator_id1)
|
|
358
|
+
self.assertEqual(notifications[1].topic, "topic2")
|
|
359
|
+
self.assertEqual(notifications[1].state, b"state2")
|
|
360
|
+
self.assertEqual(notifications[2].id, 3)
|
|
361
|
+
self.assertEqual(notifications[2].originator_id, originator_id2)
|
|
362
|
+
self.assertEqual(notifications[2].topic, "topic3")
|
|
363
|
+
self.assertEqual(notifications[2].state, b"state3")
|
|
364
|
+
|
|
365
|
+
notifications = recorder.select_notifications(1, 10, topics=["topic1"])
|
|
366
|
+
self.assertEqual(len(notifications), 1)
|
|
367
|
+
self.assertEqual(notifications[0].id, 1)
|
|
368
|
+
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
369
|
+
self.assertEqual(notifications[0].topic, "topic1")
|
|
370
|
+
self.assertEqual(notifications[0].state, b"state1")
|
|
371
|
+
|
|
372
|
+
notifications = recorder.select_notifications(1, 3, topics=["topic2"])
|
|
373
|
+
self.assertEqual(len(notifications), 1)
|
|
374
|
+
self.assertEqual(notifications[0].id, 2)
|
|
375
|
+
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
376
|
+
self.assertEqual(notifications[0].topic, "topic2")
|
|
377
|
+
self.assertEqual(notifications[0].state, b"state2")
|
|
378
|
+
|
|
379
|
+
notifications = recorder.select_notifications(1, 3, topics=["topic3"])
|
|
380
|
+
self.assertEqual(len(notifications), 1)
|
|
381
|
+
self.assertEqual(notifications[0].id, 3)
|
|
382
|
+
self.assertEqual(notifications[0].originator_id, originator_id2)
|
|
383
|
+
self.assertEqual(notifications[0].topic, "topic3")
|
|
384
|
+
self.assertEqual(notifications[0].state, b"state3")
|
|
385
|
+
|
|
386
|
+
notifications = recorder.select_notifications(1, 3, topics=["topic1", "topic3"])
|
|
344
387
|
self.assertEqual(len(notifications), 2)
|
|
345
|
-
self.assertEqual(notifications[0].id,
|
|
388
|
+
self.assertEqual(notifications[0].id, 1)
|
|
346
389
|
self.assertEqual(notifications[0].originator_id, originator_id1)
|
|
347
390
|
self.assertEqual(notifications[0].topic, "topic1")
|
|
348
391
|
self.assertEqual(notifications[0].state, b"state1")
|
|
349
|
-
self.assertEqual(notifications[1].id,
|
|
392
|
+
self.assertEqual(notifications[1].id, 3)
|
|
350
393
|
self.assertEqual(notifications[1].topic, "topic3")
|
|
351
394
|
self.assertEqual(notifications[1].state, b"state3")
|
|
352
395
|
|
|
353
|
-
self.assertEqual(
|
|
354
|
-
recorder.max_notification_id(),
|
|
355
|
-
max_notification_id + 3,
|
|
356
|
-
)
|
|
396
|
+
self.assertEqual(recorder.max_notification_id(), 3)
|
|
357
397
|
|
|
358
|
-
|
|
398
|
+
# Check limit is working
|
|
399
|
+
notifications = recorder.select_notifications(None, 1)
|
|
359
400
|
self.assertEqual(len(notifications), 1)
|
|
360
|
-
self.assertEqual(notifications[0].id,
|
|
401
|
+
self.assertEqual(notifications[0].id, 1)
|
|
361
402
|
|
|
362
|
-
notifications = recorder.select_notifications(
|
|
403
|
+
notifications = recorder.select_notifications(2, 1)
|
|
363
404
|
self.assertEqual(len(notifications), 1)
|
|
364
|
-
self.assertEqual(notifications[0].id,
|
|
405
|
+
self.assertEqual(notifications[0].id, 2)
|
|
365
406
|
|
|
366
|
-
notifications = recorder.select_notifications(
|
|
407
|
+
notifications = recorder.select_notifications(1, 1, inclusive_of_start=False)
|
|
408
|
+
self.assertEqual(len(notifications), 1)
|
|
409
|
+
self.assertEqual(notifications[0].id, 2)
|
|
410
|
+
|
|
411
|
+
notifications = recorder.select_notifications(2, 2)
|
|
367
412
|
self.assertEqual(len(notifications), 2)
|
|
368
|
-
self.assertEqual(notifications[0].id,
|
|
369
|
-
self.assertEqual(notifications[1].id,
|
|
413
|
+
self.assertEqual(notifications[0].id, 2)
|
|
414
|
+
self.assertEqual(notifications[1].id, 3)
|
|
370
415
|
|
|
371
|
-
notifications = recorder.select_notifications(
|
|
416
|
+
notifications = recorder.select_notifications(3, 1)
|
|
372
417
|
self.assertEqual(len(notifications), 1)
|
|
373
|
-
self.assertEqual(notifications[0].id,
|
|
418
|
+
self.assertEqual(notifications[0].id, 3)
|
|
374
419
|
|
|
375
|
-
notifications = recorder.select_notifications(
|
|
376
|
-
|
|
377
|
-
|
|
420
|
+
notifications = recorder.select_notifications(3, 1, inclusive_of_start=False)
|
|
421
|
+
self.assertEqual(len(notifications), 0)
|
|
422
|
+
|
|
423
|
+
notifications = recorder.select_notifications(start=2, limit=10, stop=2)
|
|
378
424
|
self.assertEqual(len(notifications), 1)
|
|
379
|
-
self.assertEqual(notifications[0].id,
|
|
425
|
+
self.assertEqual(notifications[0].id, 2)
|
|
426
|
+
|
|
427
|
+
notifications = recorder.select_notifications(start=1, limit=10, stop=2)
|
|
428
|
+
self.assertEqual(len(notifications), 2, len(notifications))
|
|
429
|
+
self.assertEqual(notifications[0].id, 1)
|
|
430
|
+
self.assertEqual(notifications[1].id, 2)
|
|
380
431
|
|
|
381
432
|
notifications = recorder.select_notifications(
|
|
382
|
-
start=
|
|
433
|
+
start=1, limit=10, stop=2, inclusive_of_start=False
|
|
383
434
|
)
|
|
384
|
-
self.assertEqual(len(notifications),
|
|
385
|
-
self.assertEqual(notifications[0].id,
|
|
386
|
-
self.assertEqual(notifications[1].id, max_notification_id + 2)
|
|
435
|
+
self.assertEqual(len(notifications), 1, len(notifications))
|
|
436
|
+
self.assertEqual(notifications[0].id, 2)
|
|
387
437
|
|
|
388
438
|
def test_concurrent_no_conflicts(self) -> None:
|
|
389
439
|
print(self)
|
|
@@ -391,17 +441,17 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
391
441
|
recorder = self.create_recorder()
|
|
392
442
|
|
|
393
443
|
errors_happened = Event()
|
|
394
|
-
errors:
|
|
444
|
+
errors: list[Exception] = []
|
|
395
445
|
|
|
396
446
|
counts = {}
|
|
397
|
-
threads:
|
|
398
|
-
durations:
|
|
447
|
+
threads: dict[int, int] = {}
|
|
448
|
+
durations: dict[int, float] = {}
|
|
399
449
|
|
|
400
450
|
num_writers = 10
|
|
401
451
|
num_writes_per_writer = 100
|
|
402
452
|
num_events_per_write = 100
|
|
403
|
-
reader_sleep = 0.
|
|
404
|
-
writer_sleep = 0.
|
|
453
|
+
reader_sleep = 0.0001
|
|
454
|
+
writer_sleep = 0.0001
|
|
405
455
|
|
|
406
456
|
def insert_events() -> None:
|
|
407
457
|
thread_id = get_ident()
|
|
@@ -425,24 +475,23 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
425
475
|
)
|
|
426
476
|
for i in range(num_events_per_write)
|
|
427
477
|
]
|
|
428
|
-
started =
|
|
478
|
+
started = datetime_now_with_tzinfo()
|
|
429
479
|
# print(f"Thread {thread_num} write beginning #{count + 1}")
|
|
430
480
|
try:
|
|
431
481
|
recorder.insert_events(stored_events)
|
|
432
482
|
|
|
433
|
-
except Exception as e: # pragma:
|
|
483
|
+
except Exception as e: # pragma: no cover
|
|
434
484
|
if errors:
|
|
435
485
|
return
|
|
436
|
-
ended =
|
|
486
|
+
ended = datetime_now_with_tzinfo()
|
|
437
487
|
duration = (ended - started).total_seconds()
|
|
438
488
|
print(f"Error after starting {duration}", e)
|
|
439
489
|
errors.append(e)
|
|
440
490
|
else:
|
|
441
|
-
ended =
|
|
491
|
+
ended = datetime_now_with_tzinfo()
|
|
442
492
|
duration = (ended - started).total_seconds()
|
|
443
493
|
counts[thread_id] += 1
|
|
444
|
-
|
|
445
|
-
durations[thread_id] = duration
|
|
494
|
+
durations[thread_id] = max(durations[thread_id], duration)
|
|
446
495
|
sleep(writer_sleep)
|
|
447
496
|
|
|
448
497
|
stop_reading = Event()
|
|
@@ -451,7 +500,7 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
451
500
|
while not stop_reading.is_set():
|
|
452
501
|
try:
|
|
453
502
|
recorder.select_notifications(0, 10)
|
|
454
|
-
except Exception as e: # pragma:
|
|
503
|
+
except Exception as e: # pragma: no cover
|
|
455
504
|
errors.append(e)
|
|
456
505
|
return
|
|
457
506
|
# else:
|
|
@@ -466,22 +515,22 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
466
515
|
with ThreadPoolExecutor(max_workers=num_writers) as executor:
|
|
467
516
|
futures = []
|
|
468
517
|
for _ in range(num_writes_per_writer):
|
|
469
|
-
if errors: # pragma:
|
|
518
|
+
if errors: # pragma: no cover
|
|
470
519
|
break
|
|
471
520
|
future = executor.submit(insert_events)
|
|
472
521
|
futures.append(future)
|
|
473
522
|
for future in futures:
|
|
474
|
-
if errors: # pragma:
|
|
523
|
+
if errors: # pragma: no cover
|
|
475
524
|
break
|
|
476
525
|
try:
|
|
477
526
|
future.result()
|
|
478
|
-
except Exception as e: # pragma:
|
|
527
|
+
except Exception as e: # pragma: no cover
|
|
479
528
|
errors.append(e)
|
|
480
529
|
break
|
|
481
530
|
|
|
482
531
|
stop_reading.set()
|
|
483
532
|
|
|
484
|
-
if errors: # pragma:
|
|
533
|
+
if errors: # pragma: no cover
|
|
485
534
|
raise errors[0]
|
|
486
535
|
|
|
487
536
|
for thread_id, thread_num in threads.items():
|
|
@@ -497,24 +546,12 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
497
546
|
|
|
498
547
|
errors_happened = Event()
|
|
499
548
|
|
|
500
|
-
counts = {}
|
|
501
|
-
threads: Dict[int, int] = {}
|
|
502
|
-
durations: Dict[int, float] = {}
|
|
503
|
-
|
|
504
549
|
# Match this to the batch page size in postgres insert for max throughput.
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
550
|
+
num_events_per_job = 500
|
|
551
|
+
num_jobs = 60
|
|
552
|
+
num_workers = 4
|
|
508
553
|
|
|
509
554
|
def insert_events() -> None:
|
|
510
|
-
thread_id = get_ident()
|
|
511
|
-
if thread_id not in threads:
|
|
512
|
-
threads[thread_id] = len(threads)
|
|
513
|
-
if thread_id not in counts:
|
|
514
|
-
counts[thread_id] = 0
|
|
515
|
-
if thread_id not in durations:
|
|
516
|
-
durations[thread_id] = 0
|
|
517
|
-
|
|
518
555
|
originator_id = uuid4()
|
|
519
556
|
stored_events = [
|
|
520
557
|
StoredEvent(
|
|
@@ -523,42 +560,256 @@ class ApplicationRecorderTestCase(TestCase, ABC):
|
|
|
523
560
|
topic="topic",
|
|
524
561
|
state=b"state",
|
|
525
562
|
)
|
|
526
|
-
for i in range(
|
|
563
|
+
for i in range(num_events_per_job)
|
|
527
564
|
]
|
|
528
565
|
|
|
529
566
|
try:
|
|
530
567
|
recorder.insert_events(stored_events)
|
|
531
568
|
|
|
532
|
-
except Exception: # pragma:
|
|
569
|
+
except Exception: # pragma: no cover
|
|
533
570
|
errors_happened.set()
|
|
534
571
|
tb = traceback.format_exc()
|
|
535
572
|
print(tb)
|
|
536
|
-
finally:
|
|
537
|
-
ended = datetime.now()
|
|
538
|
-
duration = (ended - started).total_seconds()
|
|
539
|
-
counts[thread_id] += 1
|
|
540
|
-
durations[thread_id] = duration
|
|
541
573
|
|
|
542
|
-
|
|
574
|
+
# Warm up.
|
|
575
|
+
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
|
576
|
+
futures = []
|
|
577
|
+
for _ in range(num_workers):
|
|
578
|
+
future = executor.submit(insert_events)
|
|
579
|
+
futures.append(future)
|
|
580
|
+
for future in futures:
|
|
581
|
+
future.result()
|
|
543
582
|
|
|
544
|
-
|
|
583
|
+
# Run.
|
|
584
|
+
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
|
585
|
+
started = datetime_now_with_tzinfo()
|
|
545
586
|
futures = []
|
|
546
587
|
for _ in range(num_jobs):
|
|
547
588
|
future = executor.submit(insert_events)
|
|
548
|
-
# future.add_done_callback(self.close_db_connection)
|
|
549
589
|
futures.append(future)
|
|
550
590
|
for future in futures:
|
|
551
591
|
future.result()
|
|
552
592
|
|
|
553
593
|
self.assertFalse(errors_happened.is_set(), "There were errors (see above)")
|
|
554
|
-
ended =
|
|
555
|
-
rate = num_jobs *
|
|
594
|
+
ended = datetime_now_with_tzinfo()
|
|
595
|
+
rate = num_jobs * num_events_per_job / (ended - started).total_seconds()
|
|
556
596
|
print(f"Rate: {rate:.0f} inserts per second")
|
|
557
597
|
|
|
598
|
+
def optional_test_insert_subscribe(self) -> None:
|
|
599
|
+
|
|
600
|
+
recorder = self.create_recorder()
|
|
601
|
+
|
|
602
|
+
# Get the max notification ID.
|
|
603
|
+
max_notification_id1 = recorder.max_notification_id()
|
|
604
|
+
|
|
605
|
+
# Write two stored events.
|
|
606
|
+
originator_id1 = uuid4()
|
|
607
|
+
originator_id2 = uuid4()
|
|
608
|
+
|
|
609
|
+
stored_event1 = StoredEvent(
|
|
610
|
+
originator_id=originator_id1,
|
|
611
|
+
originator_version=self.INITIAL_VERSION,
|
|
612
|
+
topic="topic1",
|
|
613
|
+
state=b"state1",
|
|
614
|
+
)
|
|
615
|
+
stored_event2 = StoredEvent(
|
|
616
|
+
originator_id=originator_id1,
|
|
617
|
+
originator_version=self.INITIAL_VERSION + 1,
|
|
618
|
+
topic="topic2",
|
|
619
|
+
state=b"state2",
|
|
620
|
+
)
|
|
621
|
+
|
|
622
|
+
notification_ids = recorder.insert_events([stored_event1, stored_event2])
|
|
623
|
+
if self.EXPECT_CONTIGUOUS_NOTIFICATION_IDS:
|
|
624
|
+
self.assertEqual(notification_ids, [1, 2])
|
|
625
|
+
|
|
626
|
+
# Get the max notification ID.
|
|
627
|
+
max_notification_id2 = recorder.max_notification_id()
|
|
628
|
+
|
|
629
|
+
# Start a subscription with default value for 'start'.
|
|
630
|
+
with recorder.subscribe() as subscription:
|
|
631
|
+
|
|
632
|
+
# Receive events from the subscription.
|
|
633
|
+
for _ in subscription:
|
|
634
|
+
break
|
|
635
|
+
|
|
636
|
+
# Start a subscription with None value for 'start'.
|
|
637
|
+
with recorder.subscribe(gt=None) as subscription:
|
|
638
|
+
|
|
639
|
+
# Receive events from the subscription.
|
|
640
|
+
for _ in subscription:
|
|
641
|
+
break
|
|
642
|
+
|
|
643
|
+
# Start a subscription with int value for 'start'.
|
|
644
|
+
with recorder.subscribe(gt=max_notification_id1) as subscription:
|
|
645
|
+
|
|
646
|
+
# Receive events from the subscription.
|
|
647
|
+
notifications: list[Notification] = []
|
|
648
|
+
for notification in subscription:
|
|
649
|
+
notifications.append(notification)
|
|
650
|
+
if len(notifications) == 2:
|
|
651
|
+
break
|
|
652
|
+
|
|
653
|
+
# Check the events we received are the ones that were written.
|
|
654
|
+
self.assertEqual(
|
|
655
|
+
stored_event1.originator_id, notifications[0].originator_id
|
|
656
|
+
)
|
|
657
|
+
self.assertEqual(
|
|
658
|
+
stored_event1.originator_version, notifications[0].originator_version
|
|
659
|
+
)
|
|
660
|
+
self.assertEqual(
|
|
661
|
+
stored_event2.originator_id, notifications[1].originator_id
|
|
662
|
+
)
|
|
663
|
+
self.assertEqual(
|
|
664
|
+
stored_event2.originator_version, notifications[1].originator_version
|
|
665
|
+
)
|
|
666
|
+
if self.EXPECT_CONTIGUOUS_NOTIFICATION_IDS:
|
|
667
|
+
self.assertEqual(1, notifications[0].id)
|
|
668
|
+
self.assertEqual(2, notifications[1].id)
|
|
669
|
+
|
|
670
|
+
# Store a third event.
|
|
671
|
+
stored_event3 = StoredEvent(
|
|
672
|
+
originator_id=originator_id2,
|
|
673
|
+
originator_version=self.INITIAL_VERSION,
|
|
674
|
+
topic="topic3",
|
|
675
|
+
state=b"state3",
|
|
676
|
+
)
|
|
677
|
+
notification_ids = recorder.insert_events([stored_event3])
|
|
678
|
+
if self.EXPECT_CONTIGUOUS_NOTIFICATION_IDS:
|
|
679
|
+
self.assertEqual(notification_ids, [3])
|
|
680
|
+
|
|
681
|
+
# Receive events from the subscription.
|
|
682
|
+
for notification in subscription:
|
|
683
|
+
notifications.append(notification)
|
|
684
|
+
if len(notifications) == 3:
|
|
685
|
+
break
|
|
686
|
+
|
|
687
|
+
# Check the events we received are the ones that were written.
|
|
688
|
+
self.assertEqual(
|
|
689
|
+
stored_event3.originator_id, notifications[2].originator_id
|
|
690
|
+
)
|
|
691
|
+
self.assertEqual(
|
|
692
|
+
stored_event3.originator_version, notifications[2].originator_version
|
|
693
|
+
)
|
|
694
|
+
if self.EXPECT_CONTIGUOUS_NOTIFICATION_IDS:
|
|
695
|
+
self.assertEqual(3, notifications[2].id)
|
|
696
|
+
|
|
697
|
+
# Start a subscription with int value for 'start'.
|
|
698
|
+
with recorder.subscribe(gt=max_notification_id2) as subscription:
|
|
699
|
+
|
|
700
|
+
# Receive events from the subscription.
|
|
701
|
+
notifications = []
|
|
702
|
+
for notification in subscription:
|
|
703
|
+
notifications.append(notification)
|
|
704
|
+
if len(notifications) == 1:
|
|
705
|
+
break
|
|
706
|
+
|
|
707
|
+
# Check the events we received are the ones that were written.
|
|
708
|
+
self.assertEqual(
|
|
709
|
+
stored_event3.originator_id, notifications[0].originator_id
|
|
710
|
+
)
|
|
711
|
+
|
|
712
|
+
# Start a subscription, call stop() during iteration.
|
|
713
|
+
with recorder.subscribe(gt=None) as subscription:
|
|
714
|
+
|
|
715
|
+
# Receive events from the subscription.
|
|
716
|
+
for i, _ in enumerate(subscription):
|
|
717
|
+
subscription.stop()
|
|
718
|
+
# Shouldn't get here twice...
|
|
719
|
+
self.assertLess(i, 1, "Got here twice")
|
|
720
|
+
|
|
721
|
+
# Start a subscription, call stop() before iteration.
|
|
722
|
+
subscription = recorder.subscribe(gt=None)
|
|
723
|
+
with subscription:
|
|
724
|
+
subscription.stop()
|
|
725
|
+
# Receive events from the subscription.
|
|
726
|
+
for _ in subscription:
|
|
727
|
+
# Shouldn't get here...
|
|
728
|
+
self.fail("Got here")
|
|
729
|
+
|
|
730
|
+
# Start a subscription, call stop() before entering context manager.
|
|
731
|
+
subscription = recorder.subscribe(gt=None)
|
|
732
|
+
subscription.stop()
|
|
733
|
+
with subscription:
|
|
734
|
+
# Receive events from the subscription.
|
|
735
|
+
for _ in subscription:
|
|
736
|
+
# Shouldn't get here...
|
|
737
|
+
self.fail("Got here")
|
|
738
|
+
|
|
739
|
+
# Start a subscription with topics.
|
|
740
|
+
subscription = recorder.subscribe(gt=None, topics=["topic3"])
|
|
741
|
+
with subscription:
|
|
742
|
+
for notification in subscription:
|
|
743
|
+
self.assertEqual(notification.topic, "topic3")
|
|
744
|
+
if (
|
|
745
|
+
notification.originator_id == stored_event3.originator_id
|
|
746
|
+
and notification.originator_version
|
|
747
|
+
== stored_event3.originator_version
|
|
748
|
+
):
|
|
749
|
+
break
|
|
750
|
+
|
|
558
751
|
def close_db_connection(self, *args: Any) -> None:
|
|
559
752
|
""""""
|
|
560
753
|
|
|
561
754
|
|
|
755
|
+
class TrackingRecorderTestCase(TestCase, ABC):
|
|
756
|
+
@abstractmethod
|
|
757
|
+
def create_recorder(self) -> TrackingRecorder:
|
|
758
|
+
""""""
|
|
759
|
+
|
|
760
|
+
def test_insert_tracking(self) -> None:
|
|
761
|
+
tracking_recorder = self.create_recorder()
|
|
762
|
+
|
|
763
|
+
# Construct tracking objects.
|
|
764
|
+
tracking1 = Tracking(notification_id=21, application_name="upstream1")
|
|
765
|
+
tracking2 = Tracking(notification_id=22, application_name="upstream1")
|
|
766
|
+
tracking3 = Tracking(notification_id=21, application_name="upstream2")
|
|
767
|
+
|
|
768
|
+
# Insert tracking objects.
|
|
769
|
+
tracking_recorder.insert_tracking(tracking=tracking1)
|
|
770
|
+
tracking_recorder.insert_tracking(tracking=tracking2)
|
|
771
|
+
tracking_recorder.insert_tracking(tracking=tracking3)
|
|
772
|
+
|
|
773
|
+
# Fail to insert same tracking object twice.
|
|
774
|
+
with self.assertRaises(IntegrityError):
|
|
775
|
+
tracking_recorder.insert_tracking(tracking=tracking1)
|
|
776
|
+
with self.assertRaises(IntegrityError):
|
|
777
|
+
tracking_recorder.insert_tracking(tracking=tracking2)
|
|
778
|
+
with self.assertRaises(IntegrityError):
|
|
779
|
+
tracking_recorder.insert_tracking(tracking=tracking3)
|
|
780
|
+
|
|
781
|
+
# Get latest tracked position.
|
|
782
|
+
self.assertEqual(tracking_recorder.max_tracking_id("upstream1"), 22)
|
|
783
|
+
self.assertEqual(tracking_recorder.max_tracking_id("upstream2"), 21)
|
|
784
|
+
self.assertIsNone(tracking_recorder.max_tracking_id("upstream3"))
|
|
785
|
+
|
|
786
|
+
# Check if an event notification has been processed.
|
|
787
|
+
assert tracking_recorder.has_tracking_id("upstream1", 21)
|
|
788
|
+
assert tracking_recorder.has_tracking_id("upstream1", 22)
|
|
789
|
+
assert tracking_recorder.has_tracking_id("upstream2", 21)
|
|
790
|
+
assert not tracking_recorder.has_tracking_id("upstream2", 22)
|
|
791
|
+
|
|
792
|
+
def test_wait(self) -> None:
|
|
793
|
+
tracking_recorder = self.create_recorder()
|
|
794
|
+
|
|
795
|
+
tracking_recorder.wait("upstream1", None)
|
|
796
|
+
|
|
797
|
+
with self.assertRaises(TimeoutError):
|
|
798
|
+
tracking_recorder.wait("upstream1", 21, timeout=0.1)
|
|
799
|
+
|
|
800
|
+
tracking1 = Tracking(notification_id=21, application_name="upstream1")
|
|
801
|
+
tracking_recorder.insert_tracking(tracking=tracking1)
|
|
802
|
+
tracking_recorder.wait("upstream1", None)
|
|
803
|
+
tracking_recorder.wait("upstream1", 10)
|
|
804
|
+
tracking_recorder.wait("upstream1", 21)
|
|
805
|
+
with self.assertRaises(TimeoutError):
|
|
806
|
+
tracking_recorder.wait("upstream1", 22, timeout=0.1)
|
|
807
|
+
with self.assertRaises(WaitInterruptedError):
|
|
808
|
+
interrupt = Event()
|
|
809
|
+
interrupt.set()
|
|
810
|
+
tracking_recorder.wait("upstream1", 22, interrupt=interrupt)
|
|
811
|
+
|
|
812
|
+
|
|
562
813
|
class ProcessRecorderTestCase(TestCase, ABC):
|
|
563
814
|
@abstractmethod
|
|
564
815
|
def create_recorder(self) -> ProcessRecorder:
|
|
@@ -569,10 +820,7 @@ class ProcessRecorderTestCase(TestCase, ABC):
|
|
|
569
820
|
recorder = self.create_recorder()
|
|
570
821
|
|
|
571
822
|
# Get current position.
|
|
572
|
-
self.
|
|
573
|
-
recorder.max_tracking_id("upstream_app"),
|
|
574
|
-
0,
|
|
575
|
-
)
|
|
823
|
+
self.assertIsNone(recorder.max_tracking_id("upstream_app"))
|
|
576
824
|
|
|
577
825
|
# Write two stored events.
|
|
578
826
|
originator_id1 = uuid4()
|
|
@@ -662,10 +910,11 @@ class ProcessRecorderTestCase(TestCase, ABC):
|
|
|
662
910
|
2,
|
|
663
911
|
)
|
|
664
912
|
|
|
665
|
-
def test_has_tracking_id(self):
|
|
913
|
+
def test_has_tracking_id(self) -> None:
|
|
666
914
|
# Construct the recorder.
|
|
667
915
|
recorder = self.create_recorder()
|
|
668
916
|
|
|
917
|
+
self.assertTrue(recorder.has_tracking_id("upstream_app", None))
|
|
669
918
|
self.assertFalse(recorder.has_tracking_id("upstream_app", 1))
|
|
670
919
|
self.assertFalse(recorder.has_tracking_id("upstream_app", 2))
|
|
671
920
|
self.assertFalse(recorder.has_tracking_id("upstream_app", 3))
|
|
@@ -737,7 +986,7 @@ class ProcessRecorderTestCase(TestCase, ABC):
|
|
|
737
986
|
class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
738
987
|
insert_num = 1000
|
|
739
988
|
|
|
740
|
-
def test(self):
|
|
989
|
+
def test(self) -> None:
|
|
741
990
|
recorder = self.create_recorder()
|
|
742
991
|
|
|
743
992
|
max_notification_id = recorder.max_notification_id()
|
|
@@ -752,7 +1001,7 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
752
1001
|
|
|
753
1002
|
errors = []
|
|
754
1003
|
|
|
755
|
-
def insert_stack(stack):
|
|
1004
|
+
def insert_stack(stack: list[StoredEvent]) -> None:
|
|
756
1005
|
try:
|
|
757
1006
|
race_started.wait()
|
|
758
1007
|
recorder.insert_events(stack)
|
|
@@ -775,7 +1024,9 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
775
1024
|
|
|
776
1025
|
sleep(1) # Added to make eventsourcing-axon tests work, perhaps not necessary.
|
|
777
1026
|
notifications = recorder.select_notifications(
|
|
778
|
-
start=max_notification_id
|
|
1027
|
+
start=max_notification_id,
|
|
1028
|
+
limit=2 * self.insert_num,
|
|
1029
|
+
inclusive_of_start=False,
|
|
779
1030
|
)
|
|
780
1031
|
ids_for_sequence1 = [
|
|
781
1032
|
e.id for e in notifications if e.originator_id == originator1_id
|
|
@@ -796,7 +1047,7 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
796
1047
|
else:
|
|
797
1048
|
self.assertGreater(min_id_for_sequence2, max_id_for_sequence1)
|
|
798
1049
|
|
|
799
|
-
def create_stack(self, originator_id):
|
|
1050
|
+
def create_stack(self, originator_id: UUID) -> list[StoredEvent]:
|
|
800
1051
|
return [
|
|
801
1052
|
StoredEvent(
|
|
802
1053
|
originator_id=originator_id,
|
|
@@ -812,37 +1063,52 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
|
|
|
812
1063
|
pass
|
|
813
1064
|
|
|
814
1065
|
|
|
815
|
-
|
|
816
|
-
|
|
1066
|
+
_TInfrastrutureFactory = TypeVar(
|
|
1067
|
+
"_TInfrastrutureFactory", bound=InfrastructureFactory[Any]
|
|
1068
|
+
)
|
|
1069
|
+
|
|
1070
|
+
|
|
1071
|
+
class InfrastructureFactoryTestCase(ABC, TestCase, Generic[_TInfrastrutureFactory]):
|
|
1072
|
+
env: Environment
|
|
1073
|
+
|
|
1074
|
+
@abstractmethod
|
|
1075
|
+
def expected_factory_class(self) -> type[_TInfrastrutureFactory]:
|
|
1076
|
+
pass
|
|
1077
|
+
|
|
1078
|
+
@abstractmethod
|
|
1079
|
+
def expected_aggregate_recorder_class(self) -> type[AggregateRecorder]:
|
|
1080
|
+
pass
|
|
817
1081
|
|
|
818
1082
|
@abstractmethod
|
|
819
|
-
def
|
|
1083
|
+
def expected_application_recorder_class(self) -> type[ApplicationRecorder]:
|
|
820
1084
|
pass
|
|
821
1085
|
|
|
822
1086
|
@abstractmethod
|
|
823
|
-
def
|
|
1087
|
+
def expected_tracking_recorder_class(self) -> type[TrackingRecorder]:
|
|
824
1088
|
pass
|
|
825
1089
|
|
|
826
1090
|
@abstractmethod
|
|
827
|
-
def
|
|
1091
|
+
def tracking_recorder_subclass(self) -> type[TrackingRecorder]:
|
|
828
1092
|
pass
|
|
829
1093
|
|
|
830
1094
|
@abstractmethod
|
|
831
|
-
def expected_process_recorder_class(self):
|
|
1095
|
+
def expected_process_recorder_class(self) -> type[ProcessRecorder]:
|
|
832
1096
|
pass
|
|
833
1097
|
|
|
834
1098
|
def setUp(self) -> None:
|
|
835
|
-
self.factory =
|
|
1099
|
+
self.factory = cast(
|
|
1100
|
+
"_TInfrastrutureFactory", InfrastructureFactory.construct(self.env)
|
|
1101
|
+
)
|
|
836
1102
|
self.assertIsInstance(self.factory, self.expected_factory_class())
|
|
837
1103
|
self.transcoder = JSONTranscoder()
|
|
838
1104
|
self.transcoder.register(UUIDAsHex())
|
|
839
1105
|
self.transcoder.register(DecimalAsStr())
|
|
840
1106
|
self.transcoder.register(DatetimeAsISO())
|
|
841
1107
|
|
|
842
|
-
def tearDown(self):
|
|
1108
|
+
def tearDown(self) -> None:
|
|
843
1109
|
self.factory.close()
|
|
844
1110
|
|
|
845
|
-
def test_createmapper(self):
|
|
1111
|
+
def test_createmapper(self) -> None:
|
|
846
1112
|
# Want to construct:
|
|
847
1113
|
# - application recorder
|
|
848
1114
|
# - snapshot recorder
|
|
@@ -878,7 +1144,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
878
1144
|
self.assertIsNone(mapper.cipher)
|
|
879
1145
|
self.assertIsNone(mapper.compressor)
|
|
880
1146
|
|
|
881
|
-
def test_createmapper_with_compressor(self):
|
|
1147
|
+
def test_createmapper_with_compressor(self) -> None:
|
|
882
1148
|
# Create mapper with compressor class as topic.
|
|
883
1149
|
self.env[self.factory.COMPRESSOR_TOPIC] = get_topic(ZlibCompressor)
|
|
884
1150
|
mapper = self.factory.mapper(transcoder=self.transcoder)
|
|
@@ -893,7 +1159,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
893
1159
|
self.assertEqual(mapper.compressor, zlib)
|
|
894
1160
|
self.assertIsNone(mapper.cipher)
|
|
895
1161
|
|
|
896
|
-
def test_createmapper_with_cipher(self):
|
|
1162
|
+
def test_createmapper_with_cipher(self) -> None:
|
|
897
1163
|
# Check cipher needs a key.
|
|
898
1164
|
self.env[self.factory.CIPHER_TOPIC] = get_topic(AESCipher)
|
|
899
1165
|
|
|
@@ -914,7 +1180,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
914
1180
|
|
|
915
1181
|
def test_createmapper_with_cipher_and_compressor(
|
|
916
1182
|
self,
|
|
917
|
-
):
|
|
1183
|
+
) -> None:
|
|
918
1184
|
# Create mapper with cipher and compressor.
|
|
919
1185
|
self.env[self.factory.COMPRESSOR_TOPIC] = get_topic(ZlibCompressor)
|
|
920
1186
|
|
|
@@ -927,7 +1193,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
927
1193
|
self.assertIsNotNone(mapper.cipher)
|
|
928
1194
|
self.assertIsNotNone(mapper.compressor)
|
|
929
1195
|
|
|
930
|
-
def test_mapper_with_wrong_cipher_key(self):
|
|
1196
|
+
def test_mapper_with_wrong_cipher_key(self) -> None:
|
|
931
1197
|
self.env.name = "App1"
|
|
932
1198
|
self.env[self.factory.CIPHER_TOPIC] = get_topic(AESCipher)
|
|
933
1199
|
cipher_key1 = AESCipher.create_key(16)
|
|
@@ -957,7 +1223,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
957
1223
|
with self.assertRaises(ValueError):
|
|
958
1224
|
mapper2.to_domain_event(stored_event)
|
|
959
1225
|
|
|
960
|
-
def test_create_aggregate_recorder(self):
|
|
1226
|
+
def test_create_aggregate_recorder(self) -> None:
|
|
961
1227
|
recorder = self.factory.aggregate_recorder()
|
|
962
1228
|
self.assertEqual(type(recorder), self.expected_aggregate_recorder_class())
|
|
963
1229
|
|
|
@@ -968,7 +1234,7 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
968
1234
|
recorder = self.factory.aggregate_recorder()
|
|
969
1235
|
self.assertEqual(type(recorder), self.expected_aggregate_recorder_class())
|
|
970
1236
|
|
|
971
|
-
def test_create_application_recorder(self):
|
|
1237
|
+
def test_create_application_recorder(self) -> None:
|
|
972
1238
|
recorder = self.factory.application_recorder()
|
|
973
1239
|
self.assertEqual(type(recorder), self.expected_application_recorder_class())
|
|
974
1240
|
self.assertIsInstance(recorder, ApplicationRecorder)
|
|
@@ -978,7 +1244,27 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
978
1244
|
recorder = self.factory.application_recorder()
|
|
979
1245
|
self.assertEqual(type(recorder), self.expected_application_recorder_class())
|
|
980
1246
|
|
|
981
|
-
def
|
|
1247
|
+
def test_create_tracking_recorder(self) -> None:
|
|
1248
|
+
recorder = self.factory.tracking_recorder()
|
|
1249
|
+
self.assertEqual(type(recorder), self.expected_tracking_recorder_class())
|
|
1250
|
+
self.assertIsInstance(recorder, TrackingRecorder)
|
|
1251
|
+
|
|
1252
|
+
# Exercise code path where table is not created.
|
|
1253
|
+
self.env["CREATE_TABLE"] = "f"
|
|
1254
|
+
recorder = self.factory.tracking_recorder()
|
|
1255
|
+
self.assertEqual(type(recorder), self.expected_tracking_recorder_class())
|
|
1256
|
+
|
|
1257
|
+
# Exercise code path where tracking recorder class is specified as arg.
|
|
1258
|
+
subclass = self.tracking_recorder_subclass()
|
|
1259
|
+
recorder = self.factory.tracking_recorder(subclass)
|
|
1260
|
+
self.assertEqual(type(recorder), subclass)
|
|
1261
|
+
|
|
1262
|
+
# Exercise code path where tracking recorder class is specified as topic.
|
|
1263
|
+
self.factory.env[self.factory.TRACKING_RECORDER_TOPIC] = get_topic(subclass)
|
|
1264
|
+
recorder = self.factory.tracking_recorder()
|
|
1265
|
+
self.assertEqual(type(recorder), subclass)
|
|
1266
|
+
|
|
1267
|
+
def test_create_process_recorder(self) -> None:
|
|
982
1268
|
recorder = self.factory.process_recorder()
|
|
983
1269
|
self.assertEqual(type(recorder), self.expected_process_recorder_class())
|
|
984
1270
|
self.assertIsInstance(recorder, ProcessRecorder)
|
|
@@ -989,23 +1275,26 @@ class InfrastructureFactoryTestCase(ABC, TestCase):
|
|
|
989
1275
|
self.assertEqual(type(recorder), self.expected_process_recorder_class())
|
|
990
1276
|
|
|
991
1277
|
|
|
992
|
-
def tmpfile_uris():
|
|
1278
|
+
def tmpfile_uris() -> Iterator[str]:
|
|
993
1279
|
tmp_files = []
|
|
994
|
-
ram_disk_path = "/Volumes/RAM DISK/"
|
|
995
|
-
prefix = None
|
|
996
|
-
if
|
|
997
|
-
prefix = ram_disk_path
|
|
1280
|
+
ram_disk_path = Path("/Volumes/RAM DISK/")
|
|
1281
|
+
prefix: str | None = None
|
|
1282
|
+
if ram_disk_path.exists():
|
|
1283
|
+
prefix = str(ram_disk_path)
|
|
998
1284
|
while True:
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1285
|
+
with NamedTemporaryFile(
|
|
1286
|
+
prefix=prefix,
|
|
1287
|
+
suffix="_eventsourcing_test.db",
|
|
1288
|
+
) as tmp_file:
|
|
1289
|
+
tmp_files.append(tmp_file)
|
|
1290
|
+
yield "file:" + tmp_file.name
|
|
1002
1291
|
|
|
1003
1292
|
|
|
1004
1293
|
class CustomType1:
|
|
1005
1294
|
def __init__(self, value: UUID):
|
|
1006
1295
|
self.value = value
|
|
1007
1296
|
|
|
1008
|
-
def __eq__(self, other:
|
|
1297
|
+
def __eq__(self, other: object) -> bool:
|
|
1009
1298
|
return type(self) is type(other) and self.__dict__ == other.__dict__
|
|
1010
1299
|
|
|
1011
1300
|
|
|
@@ -1013,39 +1302,48 @@ class CustomType2:
|
|
|
1013
1302
|
def __init__(self, value: CustomType1):
|
|
1014
1303
|
self.value = value
|
|
1015
1304
|
|
|
1016
|
-
def __eq__(self, other:
|
|
1305
|
+
def __eq__(self, other: object) -> bool:
|
|
1017
1306
|
return type(self) is type(other) and self.__dict__ == other.__dict__
|
|
1018
1307
|
|
|
1019
1308
|
|
|
1020
|
-
|
|
1021
|
-
|
|
1309
|
+
_KT = TypeVar("_KT")
|
|
1310
|
+
_VT = TypeVar("_VT")
|
|
1311
|
+
|
|
1312
|
+
|
|
1313
|
+
class Mydict(dict[_KT, _VT]):
|
|
1314
|
+
def __repr__(self) -> str:
|
|
1022
1315
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1023
1316
|
|
|
1024
|
-
def __eq__(self, other):
|
|
1317
|
+
def __eq__(self, other: object) -> bool:
|
|
1025
1318
|
return type(self) is type(other) and super().__eq__(other)
|
|
1026
1319
|
|
|
1027
1320
|
|
|
1028
|
-
|
|
1029
|
-
|
|
1321
|
+
_T = TypeVar("_T")
|
|
1322
|
+
|
|
1323
|
+
|
|
1324
|
+
class MyList(list[_T]):
|
|
1325
|
+
def __repr__(self) -> str:
|
|
1030
1326
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1031
1327
|
|
|
1032
|
-
def __eq__(self, other):
|
|
1328
|
+
def __eq__(self, other: object) -> bool:
|
|
1033
1329
|
return type(self) is type(other) and super().__eq__(other)
|
|
1034
1330
|
|
|
1035
1331
|
|
|
1036
1332
|
class MyStr(str):
|
|
1037
|
-
|
|
1333
|
+
__slots__ = ()
|
|
1334
|
+
|
|
1335
|
+
def __repr__(self) -> str:
|
|
1038
1336
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1039
1337
|
|
|
1040
|
-
def __eq__(self, other):
|
|
1338
|
+
def __eq__(self, other: object) -> bool:
|
|
1041
1339
|
return type(self) is type(other) and super().__eq__(other)
|
|
1042
1340
|
|
|
1043
1341
|
|
|
1044
1342
|
class MyInt(int):
|
|
1045
|
-
def __repr__(self):
|
|
1343
|
+
def __repr__(self) -> str:
|
|
1046
1344
|
return f"{type(self).__name__}({super().__repr__()})"
|
|
1047
1345
|
|
|
1048
|
-
def __eq__(self, other):
|
|
1346
|
+
def __eq__(self, other: object) -> bool:
|
|
1049
1347
|
return type(self) is type(other) and super().__eq__(other)
|
|
1050
1348
|
|
|
1051
1349
|
|
|
@@ -1081,10 +1379,10 @@ class TranscoderTestCase(TestCase):
|
|
|
1081
1379
|
def setUp(self) -> None:
|
|
1082
1380
|
self.transcoder = self.construct_transcoder()
|
|
1083
1381
|
|
|
1084
|
-
def construct_transcoder(self):
|
|
1382
|
+
def construct_transcoder(self) -> Transcoder:
|
|
1085
1383
|
raise NotImplementedError
|
|
1086
1384
|
|
|
1087
|
-
def test_str(self):
|
|
1385
|
+
def test_str(self) -> None:
|
|
1088
1386
|
obj = "a"
|
|
1089
1387
|
data = self.transcoder.encode(obj)
|
|
1090
1388
|
self.assertEqual(data, b'"a"')
|
|
@@ -1116,48 +1414,48 @@ class TranscoderTestCase(TestCase):
|
|
|
1116
1414
|
obj, self.transcoder.decode(legacy_encoding_with_ensure_ascii_true)
|
|
1117
1415
|
)
|
|
1118
1416
|
|
|
1119
|
-
def test_dict(self):
|
|
1417
|
+
def test_dict(self) -> None:
|
|
1120
1418
|
# Empty dict.
|
|
1121
|
-
|
|
1122
|
-
data = self.transcoder.encode(
|
|
1419
|
+
obj1: dict[Never, Never] = {}
|
|
1420
|
+
data = self.transcoder.encode(obj1)
|
|
1123
1421
|
self.assertEqual(data, b"{}")
|
|
1124
|
-
self.assertEqual(
|
|
1422
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1125
1423
|
|
|
1126
|
-
#
|
|
1127
|
-
|
|
1128
|
-
data = self.transcoder.encode(
|
|
1424
|
+
# dict with single key.
|
|
1425
|
+
obj2 = {"a": 1}
|
|
1426
|
+
data = self.transcoder.encode(obj2)
|
|
1129
1427
|
self.assertEqual(data, b'{"a":1}')
|
|
1130
|
-
self.assertEqual(
|
|
1428
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1131
1429
|
|
|
1132
|
-
#
|
|
1133
|
-
|
|
1134
|
-
data = self.transcoder.encode(
|
|
1430
|
+
# dict with many keys.
|
|
1431
|
+
obj3 = {"a": 1, "b": 2}
|
|
1432
|
+
data = self.transcoder.encode(obj3)
|
|
1135
1433
|
self.assertEqual(data, b'{"a":1,"b":2}')
|
|
1136
|
-
self.assertEqual(
|
|
1434
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1137
1435
|
|
|
1138
1436
|
# Empty dict in dict.
|
|
1139
|
-
|
|
1140
|
-
data = self.transcoder.encode(
|
|
1437
|
+
obj4: dict[str, dict[Never, Never]] = {"a": {}}
|
|
1438
|
+
data = self.transcoder.encode(obj4)
|
|
1141
1439
|
self.assertEqual(data, b'{"a":{}}')
|
|
1142
|
-
self.assertEqual(
|
|
1440
|
+
self.assertEqual(obj4, self.transcoder.decode(data))
|
|
1143
1441
|
|
|
1144
1442
|
# Empty dicts in dict.
|
|
1145
|
-
|
|
1146
|
-
data = self.transcoder.encode(
|
|
1443
|
+
obj5: dict[str, dict[Never, Never]] = {"a": {}, "b": {}}
|
|
1444
|
+
data = self.transcoder.encode(obj5)
|
|
1147
1445
|
self.assertEqual(data, b'{"a":{},"b":{}}')
|
|
1148
|
-
self.assertEqual(
|
|
1446
|
+
self.assertEqual(obj5, self.transcoder.decode(data))
|
|
1149
1447
|
|
|
1150
1448
|
# Empty dict in dict in dict.
|
|
1151
|
-
|
|
1152
|
-
data = self.transcoder.encode(
|
|
1449
|
+
obj6: dict[str, dict[str, dict[Never, Never]]] = {"a": {"b": {}}}
|
|
1450
|
+
data = self.transcoder.encode(obj6)
|
|
1153
1451
|
self.assertEqual(data, b'{"a":{"b":{}}}')
|
|
1154
|
-
self.assertEqual(
|
|
1452
|
+
self.assertEqual(obj6, self.transcoder.decode(data))
|
|
1155
1453
|
|
|
1156
1454
|
# Int in dict in dict in dict.
|
|
1157
|
-
|
|
1158
|
-
data = self.transcoder.encode(
|
|
1455
|
+
obj7 = {"a": {"b": {"c": 1}}}
|
|
1456
|
+
data = self.transcoder.encode(obj7)
|
|
1159
1457
|
self.assertEqual(data, b'{"a":{"b":{"c":1}}}')
|
|
1160
|
-
self.assertEqual(
|
|
1458
|
+
self.assertEqual(obj7, self.transcoder.decode(data))
|
|
1161
1459
|
|
|
1162
1460
|
# TODO: Int keys?
|
|
1163
1461
|
# obj = {1: "a"}
|
|
@@ -1165,115 +1463,115 @@ class TranscoderTestCase(TestCase):
|
|
|
1165
1463
|
# self.assertEqual(data, b'{1:{"a"}')
|
|
1166
1464
|
# self.assertEqual(obj, self.transcoder.decode(data))
|
|
1167
1465
|
|
|
1168
|
-
def test_dict_with_len_2_and__data_(self):
|
|
1466
|
+
def test_dict_with_len_2_and__data_(self) -> None:
|
|
1169
1467
|
obj = {"_data_": 1, "something_else": 2}
|
|
1170
1468
|
data = self.transcoder.encode(obj)
|
|
1171
1469
|
self.assertEqual(obj, self.transcoder.decode(data))
|
|
1172
1470
|
|
|
1173
|
-
def test_dict_with_len_2_and__type_(self):
|
|
1471
|
+
def test_dict_with_len_2_and__type_(self) -> None:
|
|
1174
1472
|
obj = {"_type_": 1, "something_else": 2}
|
|
1175
1473
|
data = self.transcoder.encode(obj)
|
|
1176
1474
|
self.assertEqual(obj, self.transcoder.decode(data))
|
|
1177
1475
|
|
|
1178
|
-
def test_dict_subclass(self):
|
|
1179
|
-
my_dict =
|
|
1476
|
+
def test_dict_subclass(self) -> None:
|
|
1477
|
+
my_dict = Mydict({"a": 1})
|
|
1180
1478
|
data = self.transcoder.encode(my_dict)
|
|
1181
1479
|
self.assertEqual(b'{"_type_":"mydict","_data_":{"a":1}}', data)
|
|
1182
1480
|
copy = self.transcoder.decode(data)
|
|
1183
1481
|
self.assertEqual(my_dict, copy)
|
|
1184
1482
|
|
|
1185
|
-
def test_list_subclass(self):
|
|
1483
|
+
def test_list_subclass(self) -> None:
|
|
1186
1484
|
my_list = MyList((("a", 1),))
|
|
1187
1485
|
data = self.transcoder.encode(my_list)
|
|
1188
1486
|
copy = self.transcoder.decode(data)
|
|
1189
1487
|
self.assertEqual(my_list, copy)
|
|
1190
1488
|
|
|
1191
|
-
def test_str_subclass(self):
|
|
1489
|
+
def test_str_subclass(self) -> None:
|
|
1192
1490
|
my_str = MyStr("a")
|
|
1193
1491
|
data = self.transcoder.encode(my_str)
|
|
1194
1492
|
copy = self.transcoder.decode(data)
|
|
1195
1493
|
self.assertEqual(my_str, copy)
|
|
1196
1494
|
|
|
1197
|
-
def test_int_subclass(self):
|
|
1495
|
+
def test_int_subclass(self) -> None:
|
|
1198
1496
|
my_int = MyInt(3)
|
|
1199
1497
|
data = self.transcoder.encode(my_int)
|
|
1200
1498
|
copy = self.transcoder.decode(data)
|
|
1201
1499
|
self.assertEqual(my_int, copy)
|
|
1202
1500
|
|
|
1203
|
-
def test_tuple(self):
|
|
1501
|
+
def test_tuple(self) -> None:
|
|
1204
1502
|
# Empty tuple.
|
|
1205
|
-
|
|
1206
|
-
data = self.transcoder.encode(
|
|
1503
|
+
obj1 = ()
|
|
1504
|
+
data = self.transcoder.encode(obj1)
|
|
1207
1505
|
self.assertEqual(data, b'{"_type_":"tuple_as_list","_data_":[]}')
|
|
1208
|
-
self.assertEqual(
|
|
1506
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1209
1507
|
|
|
1210
1508
|
# Empty tuple in a tuple.
|
|
1211
|
-
|
|
1212
|
-
data = self.transcoder.encode(
|
|
1213
|
-
self.assertEqual(
|
|
1509
|
+
obj2 = ((),)
|
|
1510
|
+
data = self.transcoder.encode(obj2)
|
|
1511
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1214
1512
|
|
|
1215
1513
|
# Int in tuple in a tuple.
|
|
1216
|
-
|
|
1217
|
-
data = self.transcoder.encode(
|
|
1218
|
-
self.assertEqual(
|
|
1514
|
+
obj3 = ((1, 2),)
|
|
1515
|
+
data = self.transcoder.encode(obj3)
|
|
1516
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1219
1517
|
|
|
1220
1518
|
# Str in tuple in a tuple.
|
|
1221
|
-
|
|
1222
|
-
data = self.transcoder.encode(
|
|
1223
|
-
self.assertEqual(
|
|
1519
|
+
obj4 = (("a", "b"),)
|
|
1520
|
+
data = self.transcoder.encode(obj4)
|
|
1521
|
+
self.assertEqual(obj4, self.transcoder.decode(data))
|
|
1224
1522
|
|
|
1225
1523
|
# Int and str in tuple in a tuple.
|
|
1226
|
-
|
|
1227
|
-
data = self.transcoder.encode(
|
|
1228
|
-
self.assertEqual(
|
|
1524
|
+
obj5 = ((1, "a"),)
|
|
1525
|
+
data = self.transcoder.encode(obj5)
|
|
1526
|
+
self.assertEqual(obj5, self.transcoder.decode(data))
|
|
1229
1527
|
|
|
1230
|
-
def test_list(self):
|
|
1528
|
+
def test_list(self) -> None:
|
|
1231
1529
|
# Empty list.
|
|
1232
|
-
|
|
1233
|
-
data = self.transcoder.encode(
|
|
1234
|
-
self.assertEqual(
|
|
1530
|
+
obj1: list[Never] = []
|
|
1531
|
+
data = self.transcoder.encode(obj1)
|
|
1532
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1235
1533
|
|
|
1236
1534
|
# Empty list in a list.
|
|
1237
|
-
|
|
1238
|
-
data = self.transcoder.encode(
|
|
1239
|
-
self.assertEqual(
|
|
1535
|
+
obj2: list[list[Never]] = [[]]
|
|
1536
|
+
data = self.transcoder.encode(obj2)
|
|
1537
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1240
1538
|
|
|
1241
1539
|
# Int in list in a list.
|
|
1242
|
-
|
|
1243
|
-
data = self.transcoder.encode(
|
|
1244
|
-
self.assertEqual(
|
|
1540
|
+
obj3 = [[1, 2]]
|
|
1541
|
+
data = self.transcoder.encode(obj3)
|
|
1542
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1245
1543
|
|
|
1246
1544
|
# Str in list in a list.
|
|
1247
|
-
|
|
1248
|
-
data = self.transcoder.encode(
|
|
1249
|
-
self.assertEqual(
|
|
1545
|
+
obj4 = [["a", "b"]]
|
|
1546
|
+
data = self.transcoder.encode(obj4)
|
|
1547
|
+
self.assertEqual(obj4, self.transcoder.decode(data))
|
|
1250
1548
|
|
|
1251
1549
|
# Int and str in list in a list.
|
|
1252
|
-
|
|
1253
|
-
data = self.transcoder.encode(
|
|
1254
|
-
self.assertEqual(
|
|
1550
|
+
obj5 = [[1, "a"]]
|
|
1551
|
+
data = self.transcoder.encode(obj5)
|
|
1552
|
+
self.assertEqual(obj5, self.transcoder.decode(data))
|
|
1255
1553
|
|
|
1256
|
-
def test_mixed(self):
|
|
1257
|
-
|
|
1258
|
-
data = self.transcoder.encode(
|
|
1259
|
-
self.assertEqual(
|
|
1554
|
+
def test_mixed(self) -> None:
|
|
1555
|
+
obj1 = [(1, "a"), {"b": 2}]
|
|
1556
|
+
data = self.transcoder.encode(obj1)
|
|
1557
|
+
self.assertEqual(obj1, self.transcoder.decode(data))
|
|
1260
1558
|
|
|
1261
|
-
|
|
1262
|
-
data = self.transcoder.encode(
|
|
1263
|
-
self.assertEqual(
|
|
1559
|
+
obj2 = ([1, "a"], {"b": 2})
|
|
1560
|
+
data = self.transcoder.encode(obj2)
|
|
1561
|
+
self.assertEqual(obj2, self.transcoder.decode(data))
|
|
1264
1562
|
|
|
1265
|
-
|
|
1266
|
-
data = self.transcoder.encode(
|
|
1267
|
-
self.assertEqual(
|
|
1563
|
+
obj3 = {"a": (1, 2), "b": [3, 4]}
|
|
1564
|
+
data = self.transcoder.encode(obj3)
|
|
1565
|
+
self.assertEqual(obj3, self.transcoder.decode(data))
|
|
1268
1566
|
|
|
1269
|
-
def test_custom_type_in_dict(self):
|
|
1567
|
+
def test_custom_type_in_dict(self) -> None:
|
|
1270
1568
|
# Int in dict in dict in dict.
|
|
1271
1569
|
obj = {"a": CustomType2(CustomType1(UUID("b2723fe2c01a40d2875ea3aac6a09ff5")))}
|
|
1272
1570
|
data = self.transcoder.encode(obj)
|
|
1273
1571
|
decoded_obj = self.transcoder.decode(data)
|
|
1274
1572
|
self.assertEqual(obj, decoded_obj)
|
|
1275
1573
|
|
|
1276
|
-
def test_nested_custom_type(self):
|
|
1574
|
+
def test_nested_custom_type(self) -> None:
|
|
1277
1575
|
obj = CustomType2(CustomType1(UUID("b2723fe2c01a40d2875ea3aac6a09ff5")))
|
|
1278
1576
|
data = self.transcoder.encode(obj)
|
|
1279
1577
|
expect = (
|
|
@@ -1289,7 +1587,7 @@ class TranscoderTestCase(TestCase):
|
|
|
1289
1587
|
self.assertIsInstance(copy.value.value, UUID)
|
|
1290
1588
|
self.assertEqual(copy.value.value, obj.value.value)
|
|
1291
1589
|
|
|
1292
|
-
def test_custom_type_error(self):
|
|
1590
|
+
def test_custom_type_error(self) -> None:
|
|
1293
1591
|
# Expect a TypeError when encoding because transcoding not registered.
|
|
1294
1592
|
with self.assertRaises(TypeError) as cm:
|
|
1295
1593
|
self.transcoder.encode(MyClass())
|