eventsourcing 9.4.6__py3-none-any.whl → 9.5.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eventsourcing might be problematic. Click here for more details.

eventsourcing/sqlite.py CHANGED
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import sqlite3
4
4
  from contextlib import contextmanager
5
- from typing import TYPE_CHECKING, Any
5
+ from typing import TYPE_CHECKING, Any, Literal, cast
6
6
  from uuid import UUID
7
7
 
8
8
  from eventsourcing.persistence import (
@@ -213,6 +213,7 @@ class SQLiteDatastore:
213
213
  max_age: float | None = None,
214
214
  pre_ping: bool = False,
215
215
  single_row_tracking: bool = True,
216
+ originator_id_type: Literal["uuid", "text"] = "uuid",
216
217
  ):
217
218
  self.pool = SQLiteConnectionPool(
218
219
  db_name=db_name,
@@ -224,6 +225,7 @@ class SQLiteDatastore:
224
225
  pre_ping=pre_ping,
225
226
  )
226
227
  self.single_row_tracking = single_row_tracking
228
+ self.originator_id_type = originator_id_type
227
229
 
228
230
  @contextmanager
229
231
  def transaction(self, *, commit: bool) -> Iterator[SQLiteCursor]:
@@ -268,6 +270,13 @@ class SQLiteRecorder(Recorder):
268
270
  for statement in self.create_table_statements:
269
271
  c.execute(statement)
270
272
 
273
+ def convert_originator_id(self, originator_id: str) -> UUID | str:
274
+ return (
275
+ UUID(originator_id)
276
+ if self.datastore.originator_id_type == "uuid"
277
+ else originator_id
278
+ )
279
+
271
280
 
272
281
  class SQLiteAggregateRecorder(SQLiteRecorder, AggregateRecorder):
273
282
  def __init__(
@@ -358,7 +367,7 @@ class SQLiteAggregateRecorder(SQLiteRecorder, AggregateRecorder):
358
367
  c.execute(statement, params)
359
368
  return [
360
369
  StoredEvent(
361
- originator_id=row["originator_id"],
370
+ originator_id=self.convert_originator_id(row["originator_id"]),
362
371
  originator_version=row["originator_version"],
363
372
  topic=row["topic"],
364
373
  state=row["state"],
@@ -467,7 +476,7 @@ class SQLiteApplicationRecorder(
467
476
  return [
468
477
  Notification(
469
478
  id=row["rowid"],
470
- originator_id=row["originator_id"],
479
+ originator_id=self.convert_originator_id(row["originator_id"]),
471
480
  originator_version=row["originator_version"],
472
481
  topic=row["topic"],
473
482
  state=row["state"],
@@ -674,6 +683,7 @@ class SQLiteFactory(InfrastructureFactory[SQLiteTrackingRecorder]):
674
683
  SQLITE_DBNAME = "SQLITE_DBNAME"
675
684
  SQLITE_LOCK_TIMEOUT = "SQLITE_LOCK_TIMEOUT"
676
685
  SQLITE_SINGLE_ROW_TRACKING = "SINGLE_ROW_TRACKING"
686
+ ORIGINATOR_ID_TYPE = "ORIGINATOR_ID_TYPE"
677
687
  CREATE_TABLE = "CREATE_TABLE"
678
688
 
679
689
  aggregate_recorder_class = SQLiteAggregateRecorder
@@ -713,10 +723,22 @@ class SQLiteFactory(InfrastructureFactory[SQLiteTrackingRecorder]):
713
723
  self.env.get(self.SQLITE_SINGLE_ROW_TRACKING, "t")
714
724
  )
715
725
 
726
+ originator_id_type = cast(
727
+ Literal["uuid", "text"],
728
+ self.env.get(self.ORIGINATOR_ID_TYPE, "uuid"),
729
+ )
730
+ if originator_id_type.lower() not in ("uuid", "text"):
731
+ msg = (
732
+ f"Invalid {self.ORIGINATOR_ID_TYPE} '{originator_id_type}', "
733
+ f"must be 'uuid' or 'text'"
734
+ )
735
+ raise OSError(msg)
736
+
716
737
  self.datastore = SQLiteDatastore(
717
738
  db_name=db_name,
718
739
  lock_timeout=lock_timeout,
719
740
  single_row_tracking=single_row_tracking,
741
+ originator_id_type=originator_id_type,
720
742
  )
721
743
 
722
744
  def aggregate_recorder(self, purpose: str = "events") -> AggregateRecorder:
@@ -165,6 +165,10 @@ class ApplicationTestCase(TestCase):
165
165
 
166
166
  self.assertEqual(MyApplication2.name, "MyBoundedContext")
167
167
 
168
+ def test_as_context_manager(self) -> None:
169
+ with Application[UUID]():
170
+ pass
171
+
168
172
  def test_resolve_persistence_topics(self) -> None:
169
173
  # None specified.
170
174
  app = Application[UUID]()
@@ -475,5 +479,5 @@ class ApplicationTestCase(TestCase):
475
479
  self.assertEqual(1, len(w))
476
480
  self.assertIs(w[-1].category, DeprecationWarning)
477
481
  self.assertIn(
478
- "'log' is deprecated, use 'notifications' instead", str(w[-1].message)
482
+ "'log' is deprecated, use 'notification_log' instead", str(w[-1].message)
479
483
  )
@@ -45,9 +45,14 @@ if TYPE_CHECKING:
45
45
  from typing_extensions import Never
46
46
 
47
47
 
48
- class AggregateRecorderTestCase(TestCase, ABC):
48
+ class RecorderTestCase(TestCase, ABC):
49
49
  INITIAL_VERSION = 1
50
50
 
51
+ def new_originator_id(self) -> UUID | str:
52
+ return uuid4()
53
+
54
+
55
+ class AggregateRecorderTestCase(RecorderTestCase, ABC):
51
56
  @abstractmethod
52
57
  def create_recorder(self) -> AggregateRecorder:
53
58
  """"""
@@ -61,7 +66,7 @@ class AggregateRecorderTestCase(TestCase, ABC):
61
66
  self.assertEqual(notification_ids, None)
62
67
 
63
68
  # Select stored events, expect empty list.
64
- originator_id1 = uuid4()
69
+ originator_id1 = self.new_originator_id()
65
70
  self.assertEqual(
66
71
  recorder.select_events(originator_id1, desc=True, limit=1),
67
72
  [],
@@ -79,7 +84,6 @@ class AggregateRecorderTestCase(TestCase, ABC):
79
84
 
80
85
  # Select stored events, expect list of one.
81
86
  stored_events = recorder.select_events(originator_id1)
82
- stored_events = convert_stored_event_originator_ids(stored_events)
83
87
  self.assertEqual(len(stored_events), 1)
84
88
  self.assertEqual(stored_events[0].originator_id, originator_id1)
85
89
  self.assertEqual(stored_events[0].originator_version, self.INITIAL_VERSION)
@@ -106,7 +110,6 @@ class AggregateRecorderTestCase(TestCase, ABC):
106
110
 
107
111
  # Check still only have one record.
108
112
  stored_events = recorder.select_events(originator_id1)
109
- stored_events = convert_stored_event_originator_ids(stored_events)
110
113
  self.assertEqual(len(stored_events), 1)
111
114
  self.assertEqual(stored_events[0].originator_id, stored_event1.originator_id)
112
115
  self.assertEqual(
@@ -126,7 +129,6 @@ class AggregateRecorderTestCase(TestCase, ABC):
126
129
 
127
130
  # Check we got what was written.
128
131
  stored_events = recorder.select_events(originator_id1)
129
- stored_events = convert_stored_event_originator_ids(stored_events)
130
132
  self.assertEqual(len(stored_events), 3)
131
133
  self.assertEqual(stored_events[0].originator_id, originator_id1)
132
134
  self.assertEqual(stored_events[0].originator_version, self.INITIAL_VERSION)
@@ -143,7 +145,6 @@ class AggregateRecorderTestCase(TestCase, ABC):
143
145
 
144
146
  # Check we can get the last one recorded (used to get last snapshot).
145
147
  stored_events = recorder.select_events(originator_id1, desc=True, limit=1)
146
- stored_events = convert_stored_event_originator_ids(stored_events)
147
148
  self.assertEqual(len(stored_events), 1)
148
149
  self.assertEqual(
149
150
  stored_events[0],
@@ -154,7 +155,6 @@ class AggregateRecorderTestCase(TestCase, ABC):
154
155
  stored_events = recorder.select_events(
155
156
  originator_id1, lte=self.INITIAL_VERSION + 1, desc=True, limit=1
156
157
  )
157
- stored_events = convert_stored_event_originator_ids(stored_events)
158
158
  self.assertEqual(len(stored_events), 1)
159
159
  self.assertEqual(
160
160
  stored_events[0],
@@ -165,7 +165,6 @@ class AggregateRecorderTestCase(TestCase, ABC):
165
165
  stored_events = recorder.select_events(
166
166
  originator_id1, gt=self.INITIAL_VERSION, lte=self.INITIAL_VERSION + 1
167
167
  )
168
- stored_events = convert_stored_event_originator_ids(stored_events)
169
168
  self.assertEqual(len(stored_events), 1)
170
169
  self.assertEqual(
171
170
  stored_events[0],
@@ -173,7 +172,7 @@ class AggregateRecorderTestCase(TestCase, ABC):
173
172
  )
174
173
 
175
174
  # Check aggregate sequences are distinguished.
176
- originator_id2 = uuid4()
175
+ originator_id2 = self.new_originator_id()
177
176
  self.assertEqual(
178
177
  recorder.select_events(originator_id2),
179
178
  [],
@@ -188,7 +187,6 @@ class AggregateRecorderTestCase(TestCase, ABC):
188
187
  )
189
188
  recorder.insert_events([stored_event4])
190
189
  stored_events = recorder.select_events(originator_id2)
191
- stored_events = convert_stored_event_originator_ids(stored_events)
192
190
  self.assertEqual(
193
191
  stored_events,
194
192
  [stored_event4],
@@ -199,7 +197,7 @@ class AggregateRecorderTestCase(TestCase, ABC):
199
197
  recorder = self.create_recorder()
200
198
 
201
199
  def insert() -> None:
202
- originator_id = uuid4()
200
+ originator_id = self.new_originator_id()
203
201
 
204
202
  stored_event = StoredEvent(
205
203
  originator_id=originator_id,
@@ -227,41 +225,9 @@ _TApplicationRecorder = TypeVar(
227
225
  )
228
226
 
229
227
 
230
- def convert_notification_originator_ids(
231
- notifications: Sequence[Notification],
232
- ) -> Sequence[Notification]:
233
- return [
234
- Notification(
235
- originator_id=convert_originator_id(n.originator_id),
236
- originator_version=n.originator_version,
237
- topic=n.topic,
238
- state=n.state,
239
- id=n.id,
240
- )
241
- for n in notifications
242
- ]
243
-
244
-
245
- def convert_stored_event_originator_ids(
246
- stored_events: Sequence[StoredEvent],
247
- ) -> Sequence[StoredEvent]:
248
- return [
249
- StoredEvent(
250
- originator_id=convert_originator_id(s.originator_id),
251
- originator_version=s.originator_version,
252
- topic=s.topic,
253
- state=s.state,
254
- )
255
- for s in stored_events
256
- ]
257
-
258
-
259
- def convert_originator_id(originator_id: UUID | str) -> UUID:
260
- return originator_id if isinstance(originator_id, UUID) else UUID(originator_id)
261
-
262
-
263
- class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder]):
264
- INITIAL_VERSION = 1
228
+ class ApplicationRecorderTestCase(
229
+ RecorderTestCase, ABC, Generic[_TApplicationRecorder]
230
+ ):
265
231
  EXPECT_CONTIGUOUS_NOTIFICATION_IDS = True
266
232
 
267
233
  @abstractmethod
@@ -282,8 +248,8 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
282
248
  self.assertIsNone(recorder.max_notification_id())
283
249
 
284
250
  # Write two stored events.
285
- originator_id1 = uuid4()
286
- originator_id2 = uuid4()
251
+ originator_id1 = self.new_originator_id()
252
+ originator_id2 = self.new_originator_id()
287
253
 
288
254
  stored_event1 = StoredEvent(
289
255
  originator_id=originator_id1,
@@ -324,7 +290,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
324
290
 
325
291
  # sleep(1) # Added to make eventsourcing-axon tests work.
326
292
  notifications = recorder.select_notifications(start=None, limit=10)
327
- notifications = convert_notification_originator_ids(notifications)
328
293
  self.assertEqual(len(notifications), 3)
329
294
  self.assertEqual(notifications[0].id, 1)
330
295
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -340,7 +305,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
340
305
  self.assertEqual(notifications[2].state, b"state3")
341
306
 
342
307
  notifications = recorder.select_notifications(start=1, limit=10)
343
- notifications = convert_notification_originator_ids(notifications)
344
308
  self.assertEqual(len(notifications), 3)
345
309
  self.assertEqual(notifications[0].id, 1)
346
310
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -356,7 +320,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
356
320
  self.assertEqual(notifications[2].state, b"state3")
357
321
 
358
322
  notifications = recorder.select_notifications(start=None, stop=2, limit=10)
359
- notifications = convert_notification_originator_ids(notifications)
360
323
  self.assertEqual(len(notifications), 2)
361
324
  self.assertEqual(notifications[0].id, 1)
362
325
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -370,7 +333,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
370
333
  notifications = recorder.select_notifications(
371
334
  start=1, limit=10, inclusive_of_start=False
372
335
  )
373
- notifications = convert_notification_originator_ids(notifications)
374
336
  self.assertEqual(len(notifications), 2)
375
337
  self.assertEqual(notifications[0].id, 2)
376
338
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -384,7 +346,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
384
346
  notifications = recorder.select_notifications(
385
347
  start=2, limit=10, inclusive_of_start=False
386
348
  )
387
- notifications = convert_notification_originator_ids(notifications)
388
349
  self.assertEqual(len(notifications), 1)
389
350
  self.assertEqual(notifications[0].id, 3)
390
351
  self.assertEqual(notifications[0].originator_id, originator_id2)
@@ -394,7 +355,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
394
355
  notifications = recorder.select_notifications(
395
356
  start=None, limit=10, topics=["topic1", "topic2", "topic3"]
396
357
  )
397
- notifications = convert_notification_originator_ids(notifications)
398
358
  self.assertEqual(len(notifications), 3)
399
359
  self.assertEqual(notifications[0].id, 1)
400
360
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -410,7 +370,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
410
370
  self.assertEqual(notifications[2].state, b"state3")
411
371
 
412
372
  notifications = recorder.select_notifications(1, 10, topics=["topic1"])
413
- notifications = convert_notification_originator_ids(notifications)
414
373
  self.assertEqual(len(notifications), 1)
415
374
  self.assertEqual(notifications[0].id, 1)
416
375
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -418,7 +377,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
418
377
  self.assertEqual(notifications[0].state, b"state1")
419
378
 
420
379
  notifications = recorder.select_notifications(1, 3, topics=["topic2"])
421
- notifications = convert_notification_originator_ids(notifications)
422
380
  self.assertEqual(len(notifications), 1)
423
381
  self.assertEqual(notifications[0].id, 2)
424
382
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -426,7 +384,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
426
384
  self.assertEqual(notifications[0].state, b"state2")
427
385
 
428
386
  notifications = recorder.select_notifications(1, 3, topics=["topic3"])
429
- notifications = convert_notification_originator_ids(notifications)
430
387
  self.assertEqual(len(notifications), 1)
431
388
  self.assertEqual(notifications[0].id, 3)
432
389
  self.assertEqual(notifications[0].originator_id, originator_id2)
@@ -434,7 +391,6 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
434
391
  self.assertEqual(notifications[0].state, b"state3")
435
392
 
436
393
  notifications = recorder.select_notifications(1, 3, topics=["topic1", "topic3"])
437
- notifications = convert_notification_originator_ids(notifications)
438
394
  self.assertEqual(len(notifications), 2)
439
395
  self.assertEqual(notifications[0].id, 1)
440
396
  self.assertEqual(notifications[0].originator_id, originator_id1)
@@ -448,42 +404,34 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
448
404
 
449
405
  # Check limit is working
450
406
  notifications = recorder.select_notifications(None, 1)
451
- notifications = convert_notification_originator_ids(notifications)
452
407
  self.assertEqual(len(notifications), 1)
453
408
  self.assertEqual(notifications[0].id, 1)
454
409
 
455
410
  notifications = recorder.select_notifications(2, 1)
456
- notifications = convert_notification_originator_ids(notifications)
457
411
  self.assertEqual(len(notifications), 1)
458
412
  self.assertEqual(notifications[0].id, 2)
459
413
 
460
414
  notifications = recorder.select_notifications(1, 1, inclusive_of_start=False)
461
- notifications = convert_notification_originator_ids(notifications)
462
415
  self.assertEqual(len(notifications), 1)
463
416
  self.assertEqual(notifications[0].id, 2)
464
417
 
465
418
  notifications = recorder.select_notifications(2, 2)
466
- notifications = convert_notification_originator_ids(notifications)
467
419
  self.assertEqual(len(notifications), 2)
468
420
  self.assertEqual(notifications[0].id, 2)
469
421
  self.assertEqual(notifications[1].id, 3)
470
422
 
471
423
  notifications = recorder.select_notifications(3, 1)
472
- notifications = convert_notification_originator_ids(notifications)
473
424
  self.assertEqual(len(notifications), 1)
474
425
  self.assertEqual(notifications[0].id, 3)
475
426
 
476
427
  notifications = recorder.select_notifications(3, 1, inclusive_of_start=False)
477
- notifications = convert_notification_originator_ids(notifications)
478
428
  self.assertEqual(len(notifications), 0)
479
429
 
480
430
  notifications = recorder.select_notifications(start=2, limit=10, stop=2)
481
- notifications = convert_notification_originator_ids(notifications)
482
431
  self.assertEqual(len(notifications), 1)
483
432
  self.assertEqual(notifications[0].id, 2)
484
433
 
485
434
  notifications = recorder.select_notifications(start=1, limit=10, stop=2)
486
- notifications = convert_notification_originator_ids(notifications)
487
435
  self.assertEqual(len(notifications), 2, len(notifications))
488
436
  self.assertEqual(notifications[0].id, 1)
489
437
  self.assertEqual(notifications[1].id, 2)
@@ -491,10 +439,36 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
491
439
  notifications = recorder.select_notifications(
492
440
  start=1, limit=10, stop=2, inclusive_of_start=False
493
441
  )
494
- notifications = convert_notification_originator_ids(notifications)
495
442
  self.assertEqual(len(notifications), 1, len(notifications))
496
443
  self.assertEqual(notifications[0].id, 2)
497
444
 
445
+ def test_performance(self) -> None:
446
+ # Construct the recorder.
447
+ recorder = self.create_recorder()
448
+
449
+ def insert() -> None:
450
+ originator_id = self.new_originator_id()
451
+
452
+ stored_event = StoredEvent(
453
+ originator_id=originator_id,
454
+ originator_version=self.INITIAL_VERSION,
455
+ topic="topic1",
456
+ state=b"state1",
457
+ )
458
+ recorder.insert_events([stored_event])
459
+
460
+ # Warm up.
461
+ number = 10
462
+ timeit(insert, number=number)
463
+
464
+ number = 100
465
+ duration = timeit(insert, number=number)
466
+ print(
467
+ self,
468
+ f"\n{1000000 * duration / number:.1f} μs per insert, "
469
+ f"{number / duration:.0f} inserts per second",
470
+ )
471
+
498
472
  def test_concurrent_no_conflicts(self) -> None:
499
473
  print(self)
500
474
 
@@ -525,7 +499,7 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
525
499
  # thread_num = threads[thread_id]
526
500
  # count = counts[thread_id]
527
501
 
528
- originator_id = uuid4()
502
+ originator_id = self.new_originator_id()
529
503
  stored_events = [
530
504
  StoredEvent(
531
505
  originator_id=originator_id,
@@ -612,7 +586,7 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
612
586
  num_workers = 4
613
587
 
614
588
  def insert_events() -> None:
615
- originator_id = uuid4()
589
+ originator_id = self.new_originator_id()
616
590
  stored_events = [
617
591
  StoredEvent(
618
592
  originator_id=originator_id,
@@ -663,8 +637,8 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
663
637
  max_notification_id1 = recorder.max_notification_id()
664
638
 
665
639
  # Write two stored events.
666
- originator_id1 = uuid4()
667
- originator_id2 = uuid4()
640
+ originator_id1 = self.new_originator_id()
641
+ originator_id2 = self.new_originator_id()
668
642
 
669
643
  stored_event1 = StoredEvent(
670
644
  originator_id=originator_id1,
@@ -893,7 +867,7 @@ class TrackingRecorderTestCase(TestCase, ABC):
893
867
  tracking_recorder.wait("upstream1", 22, interrupt=interrupt)
894
868
 
895
869
 
896
- class ProcessRecorderTestCase(TestCase, ABC):
870
+ class ProcessRecorderTestCase(RecorderTestCase, ABC):
897
871
  @abstractmethod
898
872
  def create_recorder(self) -> ProcessRecorder:
899
873
  """"""
@@ -906,8 +880,8 @@ class ProcessRecorderTestCase(TestCase, ABC):
906
880
  self.assertIsNone(recorder.max_tracking_id("upstream_app"))
907
881
 
908
882
  # Write two stored events.
909
- originator_id1 = uuid4()
910
- originator_id2 = uuid4()
883
+ originator_id1 = self.new_originator_id()
884
+ originator_id2 = self.new_originator_id()
911
885
 
912
886
  stored_event1 = StoredEvent(
913
887
  originator_id=originator_id1,
@@ -1086,7 +1060,7 @@ class ProcessRecorderTestCase(TestCase, ABC):
1086
1060
  notification_ids = iter(range(1, number + 1))
1087
1061
 
1088
1062
  def insert_events() -> None:
1089
- originator_id = uuid4()
1063
+ originator_id = self.new_originator_id()
1090
1064
 
1091
1065
  stored_event = StoredEvent(
1092
1066
  originator_id=originator_id,
@@ -1114,7 +1088,7 @@ class ProcessRecorderTestCase(TestCase, ABC):
1114
1088
  )
1115
1089
 
1116
1090
 
1117
- class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
1091
+ class NonInterleavingNotificationIDsBaseCase(RecorderTestCase, ABC):
1118
1092
  insert_num = 1000
1119
1093
 
1120
1094
  def test(self) -> None:
@@ -1124,8 +1098,8 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
1124
1098
 
1125
1099
  race_started = Event()
1126
1100
 
1127
- originator1_id = uuid4()
1128
- originator2_id = uuid4()
1101
+ originator1_id = self.new_originator_id()
1102
+ originator2_id = self.new_originator_id()
1129
1103
 
1130
1104
  stack1 = self.create_stack(originator1_id)
1131
1105
  stack2 = self.create_stack(originator2_id)
@@ -1159,7 +1133,6 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
1159
1133
  limit=2 * self.insert_num,
1160
1134
  inclusive_of_start=False,
1161
1135
  )
1162
- notifications = convert_notification_originator_ids(notifications)
1163
1136
  ids_for_sequence1 = [
1164
1137
  e.id for e in notifications if e.originator_id == originator1_id
1165
1138
  ]
@@ -1179,7 +1152,7 @@ class NonInterleavingNotificationIDsBaseCase(ABC, TestCase):
1179
1152
  else:
1180
1153
  self.assertGreater(min_id_for_sequence2, max_id_for_sequence1)
1181
1154
 
1182
- def create_stack(self, originator_id: UUID) -> Sequence[StoredEvent]:
1155
+ def create_stack(self, originator_id: UUID | str) -> Sequence[StoredEvent]:
1183
1156
  return [
1184
1157
  StoredEvent(
1185
1158
  originator_id=originator_id,
@@ -3,7 +3,19 @@ import os
3
3
  import psycopg
4
4
  from psycopg.sql import SQL, Identifier
5
5
 
6
+ from eventsourcing.dcb.postgres_tt import (
7
+ DB_FUNCTION_NAME_DCB_CONDITIONAL_APPEND_TT,
8
+ DB_TYPE_NAME_DCB_EVENT_TT,
9
+ DB_TYPE_NAME_DCB_QUERY_ITEM_TT,
10
+ )
6
11
  from eventsourcing.postgres import PostgresDatastore
12
+ from examples.coursebookingdcb.postgres_ts import (
13
+ PG_FUNCTION_NAME_DCB_CHECK_APPEND_CONDITION_TS,
14
+ PG_FUNCTION_NAME_DCB_INSERT_EVENTS_TS,
15
+ PG_FUNCTION_NAME_DCB_SELECT_EVENTS_TS,
16
+ PG_PROCEDURE_NAME_DCB_APPEND_EVENTS_TS,
17
+ PG_TYPE_NAME_DCB_EVENT_TS,
18
+ )
7
19
 
8
20
 
9
21
  def pg_close_all_connections(
@@ -64,8 +76,54 @@ def drop_tables() -> None:
64
76
  for row in fetchall:
65
77
  table_name = row["table_name"]
66
78
  # print(f"Dropping table '{table_name}' in schema '{schema}'")
67
- statement = SQL("DROP TABLE IF EXISTS {0}.{1}").format(
79
+ statement = SQL("DROP TABLE IF EXISTS {0}.{1} CASCADE").format(
68
80
  Identifier(datastore.schema), Identifier(table_name)
69
81
  )
70
82
  curs.execute(statement, prepare=False)
71
83
  # print(f"Dropped table '{table_name}' in schema '{schema}'")
84
+
85
+ # Also drop composite types.
86
+ composite_types = [
87
+ "stored_event_uuid",
88
+ "stored_event_text",
89
+ PG_TYPE_NAME_DCB_EVENT_TS,
90
+ DB_TYPE_NAME_DCB_EVENT_TT,
91
+ DB_TYPE_NAME_DCB_QUERY_ITEM_TT,
92
+ ]
93
+ for name in composite_types:
94
+ statement = SQL("DROP TYPE IF EXISTS {schema}.{name} CASCADE").format(
95
+ schema=Identifier(datastore.schema),
96
+ name=Identifier(name),
97
+ )
98
+ curs.execute(statement, prepare=False)
99
+
100
+ # Also drop functions.
101
+ functions = [
102
+ "es_insert_events_uuid",
103
+ "es_insert_events_text",
104
+ PG_FUNCTION_NAME_DCB_INSERT_EVENTS_TS,
105
+ PG_FUNCTION_NAME_DCB_SELECT_EVENTS_TS,
106
+ PG_FUNCTION_NAME_DCB_CHECK_APPEND_CONDITION_TS,
107
+ DB_FUNCTION_NAME_DCB_CONDITIONAL_APPEND_TT,
108
+ ]
109
+ for name in functions:
110
+ statement = SQL(
111
+ "DROP FUNCTION IF EXISTS {schema}.{name} CASCADE"
112
+ ).format(
113
+ schema=Identifier(datastore.schema),
114
+ name=Identifier(name),
115
+ )
116
+ curs.execute(statement, prepare=False)
117
+
118
+ # Also drop procedures.
119
+ procedures = [
120
+ PG_PROCEDURE_NAME_DCB_APPEND_EVENTS_TS,
121
+ ]
122
+ for name in procedures:
123
+ statement = SQL(
124
+ "DROP PROCEDURE IF EXISTS {schema}.{name} CASCADE"
125
+ ).format(
126
+ schema=Identifier(datastore.schema),
127
+ name=Identifier(name),
128
+ )
129
+ curs.execute(statement, prepare=False)
eventsourcing/utils.py CHANGED
@@ -35,12 +35,16 @@ def get_topic(obj: SupportsTopic, /) -> str:
35
35
  try:
36
36
  return _type_cache[obj]
37
37
  except KeyError:
38
- topic = getattr(obj, "TOPIC", f"{obj.__module__}:{obj.__qualname__}")
38
+ topic = construct_topic(obj)
39
39
  register_topic(topic, obj)
40
40
  _type_cache[obj] = topic
41
41
  return topic
42
42
 
43
43
 
44
+ def construct_topic(obj: SupportsTopic, /) -> str:
45
+ return getattr(obj, "TOPIC", f"{obj.__module__}:{obj.__qualname__}")
46
+
47
+
44
48
  def resolve_topic(topic: str) -> Any:
45
49
  """Returns an object located by the given topic.
46
50
 
@@ -113,8 +117,8 @@ def register_topic(topic: str, obj: SupportsTopic) -> None:
113
117
  else:
114
118
  if cached_obj != obj:
115
119
  msg = (
116
- f"Object {cached_obj} is already registered "
117
- f"for topic '{topic}', so refusing to cache obj {obj}"
120
+ f"Refusing to cache {obj} (oid {id(obj)}): {cached_obj} (oid "
121
+ f"{id(cached_obj)}) is already registered for topic '{topic}'"
118
122
  )
119
123
  raise TopicError(msg)
120
124
 
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: eventsourcing
3
- Version: 9.4.6
3
+ Version: 9.5.0a0
4
4
  Summary: Event sourcing in Python
5
5
  License: BSD-3-Clause
6
6
  Keywords: event sourcing,event store,domain driven design,domain-driven design,ddd,cqrs,cqs
7
7
  Author: John Bywater
8
8
  Author-email: john.bywater@appropriatesoftware.net
9
9
  Requires-Python: >=3.9.2
10
- Classifier: Development Status :: 5 - Production/Stable
10
+ Classifier: Development Status :: 3 - Alpha
11
11
  Classifier: Intended Audience :: Developers
12
12
  Classifier: Intended Audience :: Education
13
13
  Classifier: Intended Audience :: Science/Research
@@ -0,0 +1,33 @@
1
+ eventsourcing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ eventsourcing/application.py,sha256=MT24nfoakzRhpHJsV0knNz3Z3Dammh9m3MYipzo1gr8,37122
3
+ eventsourcing/cipher.py,sha256=ulTBtX5K9ejRAkdUaUbdIaj4H7anYwDOi7JxOolj2uo,3295
4
+ eventsourcing/compressor.py,sha256=qEYWvsUXFLyhKgfuv-HGNJ6VF4sRw4z0IxbNW9ukOfc,385
5
+ eventsourcing/cryptography.py,sha256=aFZLlJxxSb5seVbh94-T8FA_RIGOe-VFu5SJrbOnwUU,2969
6
+ eventsourcing/dcb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ eventsourcing/dcb/api.py,sha256=daCNfhFdAj9DE0CJDHMLWo2fwAhDlDA5jJ2Jx2Tz0EQ,1628
8
+ eventsourcing/dcb/application.py,sha256=OlC6H09tGG43Fk9RkeP9rxEiyz_1bSQZbZCXw5cQC6w,3845
9
+ eventsourcing/dcb/domain.py,sha256=vZ3kv_lTTlzVY1kPWAr7OPGGGHntfZtU0vFJLKqp8Ks,14523
10
+ eventsourcing/dcb/persistence.py,sha256=PuBC1LvYH-AWc233qaTTATFMyWFMeTB-CV1GxiEhb1M,4126
11
+ eventsourcing/dcb/popo.py,sha256=0X_9mcC6II1zqYI1gRAqYdQ5hvAcUTtoPPgJ3K8akjk,3145
12
+ eventsourcing/dcb/postgres_tt.py,sha256=Wn4EPnHojmjGRALPSZfz7_1Qgs0mmBquCKcibYkuruQ,19674
13
+ eventsourcing/dispatch.py,sha256=-yI-0EpyXnpMBkciTHNPlxSHJebUe7Ko9rT-gdOjoIo,2797
14
+ eventsourcing/domain.py,sha256=YmI1_Szmefceh1IBfghzQd-A-vkHGDU5yeJj5nHaXns,76301
15
+ eventsourcing/interface.py,sha256=K7tAJjriOJa_XB9-wptJR9VTb5sHlBpqrz3BGUXxI4A,5387
16
+ eventsourcing/persistence.py,sha256=WcOpFo7710MU5xeOrlqYxJWJ1bTaDSrkbhw1gGHAzmY,49509
17
+ eventsourcing/popo.py,sha256=fBPLn6_49kDbfqtFvVPozYfPQfeSl4VhBP38klCUL_A,9203
18
+ eventsourcing/postgres.py,sha256=xj5xMvBG5WW8SKqj26rVTqq7CREfQqrGmS0_GlDx0Aw,53980
19
+ eventsourcing/projection.py,sha256=iSNSRMEJO3W6NspNDZRk68ABkOXkmiNolkA41QYVNXk,14962
20
+ eventsourcing/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
+ eventsourcing/sqlite.py,sha256=xvgu9euGYalgMoKurCuzBWySEMi3Ga4rGzXmm0XGep8,27949
22
+ eventsourcing/system.py,sha256=JG9JudGCbnSSF9N9UHBYCQT-zUgL2XXw0As3TGsGLDo,45933
23
+ eventsourcing/tests/__init__.py,sha256=FtOyuj-L-oSisYeByTIrnUw-XzsctSbq76XmjPy5fMc,102
24
+ eventsourcing/tests/application.py,sha256=pE2tYfuykbV4Q6WW1U-gi_YgyW2485NGLXkemaH46Do,18072
25
+ eventsourcing/tests/domain.py,sha256=yN-F6gMRumeX6nIXIcZGxAR3RrUslzmEMM8JksnkI8Q,3227
26
+ eventsourcing/tests/persistence.py,sha256=Rux3Wq30moBGioUvpa091Mego3o4zCTvpU1DwJi9zg0,62340
27
+ eventsourcing/tests/postgres_utils.py,sha256=Wl7SnPhCctT7bgQesWAlmeyvAeQlm5Vb9MARwhuka74,4562
28
+ eventsourcing/utils.py,sha256=9ZcCyttDqEFu3EMiKq0t04tSY4_PHTsZcuctaBNC9ms,8691
29
+ eventsourcing-9.5.0a0.dist-info/AUTHORS,sha256=8aHOM4UbNZcKlD-cHpFRcM6RWyCqtwtxRev6DeUgVRs,137
30
+ eventsourcing-9.5.0a0.dist-info/LICENSE,sha256=CQEQzcZO8AWXL5i3hIo4yVKrYjh2FBz6hCM7kpXWpw4,1512
31
+ eventsourcing-9.5.0a0.dist-info/METADATA,sha256=psiUPn_VCBGOoDGHWs5ABO-G4eAcYbJvBt9okp2ZMSA,10255
32
+ eventsourcing-9.5.0a0.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
33
+ eventsourcing-9.5.0a0.dist-info/RECORD,,