eventsourcing 9.4.4__py3-none-any.whl → 9.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eventsourcing might be problematic. Click here for more details.

@@ -57,7 +57,11 @@ class NotificationLogJSONService(NotificationLogInterface, Generic[TApplication]
57
57
  "items": [
58
58
  {
59
59
  "id": item.id,
60
- "originator_id": item.originator_id.hex,
60
+ "originator_id": (
61
+ item.originator_id.hex
62
+ if isinstance(item.originator_id, UUID)
63
+ else item.originator_id
64
+ ),
61
65
  "originator_version": item.originator_version,
62
66
  "topic": item.topic,
63
67
  "state": b64encode(item.state).decode("utf8"),
@@ -85,7 +89,11 @@ class NotificationLogJSONService(NotificationLogInterface, Generic[TApplication]
85
89
  [
86
90
  {
87
91
  "id": notification.id,
88
- "originator_id": notification.originator_id.hex,
92
+ "originator_id": (
93
+ notification.originator_id.hex
94
+ if isinstance(notification.originator_id, UUID)
95
+ else notification.originator_id
96
+ ),
89
97
  "originator_version": notification.originator_version,
90
98
  "topic": notification.topic,
91
99
  "state": b64encode(notification.state).decode("utf8"),
@@ -1,25 +1,32 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
- import uuid
4
+ import typing
5
5
  from abc import ABC, abstractmethod
6
6
  from collections import deque
7
- from collections.abc import Iterator, Mapping, Sequence
7
+ from collections.abc import Hashable, Iterator, Mapping, Sequence
8
8
  from dataclasses import dataclass
9
9
  from datetime import datetime
10
10
  from decimal import Decimal
11
+ from functools import lru_cache
11
12
  from queue import Queue
12
13
  from threading import Condition, Event, Lock, Semaphore, Thread, Timer
13
14
  from time import monotonic, sleep, time
14
15
  from types import GenericAlias, ModuleType
15
- from typing import TYPE_CHECKING, Any, Generic, Union, cast
16
+ from typing import TYPE_CHECKING, Any, Callable, Generic, Union, cast
16
17
  from uuid import UUID
17
18
 
18
19
  from typing_extensions import TypeVar
19
20
 
20
- from eventsourcing.domain import DomainEventProtocol, EventSourcingError
21
+ from eventsourcing.domain import (
22
+ DomainEventProtocol,
23
+ EventSourcingError,
24
+ HasOriginatorIDVersion,
25
+ TAggregateID,
26
+ )
21
27
  from eventsourcing.utils import (
22
28
  Environment,
29
+ EnvType,
23
30
  TopicError,
24
31
  get_topic,
25
32
  resolve_topic,
@@ -178,7 +185,7 @@ class StoredEvent:
178
185
  objects and :class:`~eventsourcing.domain.Snapshot` objects.
179
186
  """
180
187
 
181
- originator_id: uuid.UUID
188
+ originator_id: UUID | str
182
189
  """ID of the originating aggregate."""
183
190
  originator_version: int
184
191
  """Position in an aggregate sequence."""
@@ -220,7 +227,10 @@ class MapperDeserialisationError(EventSourcingError, ValueError):
220
227
  """Raised when deserialization fails in a Mapper."""
221
228
 
222
229
 
223
- class Mapper:
230
+ TAggregateIDType = TypeVar("TAggregateIDType", type[UUID], type[str])
231
+
232
+
233
+ class Mapper(Generic[TAggregateID]):
224
234
  """Converts between domain event objects and :class:`StoredEvent` objects.
225
235
 
226
236
  Uses a :class:`Transcoder`, and optionally a cryptographic cipher and compressor.
@@ -236,7 +246,9 @@ class Mapper:
236
246
  self.compressor = compressor
237
247
  self.cipher = cipher
238
248
 
239
- def to_stored_event(self, domain_event: DomainEventProtocol) -> StoredEvent:
249
+ def to_stored_event(
250
+ self, domain_event: DomainEventProtocol[TAggregateID]
251
+ ) -> StoredEvent:
240
252
  """Converts the given domain event to a :class:`StoredEvent` object."""
241
253
  topic = get_topic(domain_event.__class__)
242
254
  event_state = domain_event.__dict__.copy()
@@ -257,8 +269,12 @@ class Mapper:
257
269
  state=stored_state,
258
270
  )
259
271
 
260
- def to_domain_event(self, stored_event: StoredEvent) -> DomainEventProtocol:
272
+ def to_domain_event(
273
+ self, stored_event: StoredEvent
274
+ ) -> DomainEventProtocol[TAggregateID]:
261
275
  """Converts the given :class:`StoredEvent` to a domain event object."""
276
+ cls = resolve_topic(stored_event.topic)
277
+
262
278
  stored_state = stored_event.state
263
279
  try:
264
280
  if self.cipher:
@@ -275,9 +291,12 @@ class Mapper:
275
291
  )
276
292
  raise MapperDeserialisationError(msg) from e
277
293
 
278
- event_state["originator_id"] = stored_event.originator_id
294
+ id_convertor = find_id_convertor(
295
+ cls, cast(Hashable, type(stored_event.originator_id))
296
+ )
297
+ # print("ID of convertor:", id(convertor))
298
+ event_state["originator_id"] = id_convertor(stored_event.originator_id)
279
299
  event_state["originator_version"] = stored_event.originator_version
280
- cls = resolve_topic(stored_event.topic)
281
300
  class_version = getattr(cls, "class_version", 1)
282
301
  from_version = event_state.pop("class_version", 1)
283
302
  while from_version < class_version:
@@ -289,6 +308,66 @@ class Mapper:
289
308
  return domain_event
290
309
 
291
310
 
311
+ @lru_cache
312
+ def find_id_convertor(
313
+ domain_event_cls: type[object], originator_id_cls: type[UUID | str]
314
+ ) -> Callable[[UUID | str], UUID | str]:
315
+ # Try to find the originator_id type.
316
+ if issubclass(domain_event_cls, HasOriginatorIDVersion):
317
+ # For classes that inherit CanMutateAggregate, and don't use a different
318
+ # mapper, then assume they aren't overriding __init_subclass__ is a way
319
+ # that prevents 'originator_id_type' being found from type arguments and
320
+ # set on the class.
321
+ # TODO: Write a test where a custom class does override __init_subclass__
322
+ # so that the next line will cause an AssertionError. Then fix this code.
323
+ if domain_event_cls.originator_id_type is None:
324
+ msg = "originator_id_type cannot be None"
325
+ raise TypeError(msg)
326
+ originator_id_type = domain_event_cls.originator_id_type
327
+ else:
328
+ # Otherwise look for annotations.
329
+ for cls in domain_event_cls.__mro__:
330
+ try:
331
+ annotation = cls.__annotations__["originator_id"]
332
+ except (KeyError, AttributeError): # noqa: PERF203
333
+ continue
334
+ else:
335
+ valid_annotations = {
336
+ str: str,
337
+ UUID: UUID,
338
+ "str": str,
339
+ "UUID": UUID,
340
+ "uuid.UUID": UUID,
341
+ }
342
+ if annotation not in valid_annotations:
343
+ msg = f"originator_id annotation on {cls} is not either UUID or str"
344
+ raise TypeError(msg)
345
+ assert annotation in valid_annotations, annotation
346
+ originator_id_type = valid_annotations[annotation]
347
+ break
348
+ else:
349
+ msg = (
350
+ f"Neither event class {domain_event_cls}"
351
+ f"nor its bases have an originator_id annotation"
352
+ )
353
+ raise TypeError(msg)
354
+
355
+ if originator_id_cls is str and originator_id_type is UUID:
356
+ convertor = str_to_uuid_convertor
357
+ else:
358
+ convertor = pass_through_convertor
359
+ return convertor
360
+
361
+
362
+ def str_to_uuid_convertor(originator_id: UUID | str) -> UUID | str:
363
+ assert isinstance(originator_id, str)
364
+ return UUID(originator_id)
365
+
366
+
367
+ def pass_through_convertor(originator_id: UUID | str) -> UUID | str:
368
+ return originator_id
369
+
370
+
292
371
  class RecordConflictError(EventSourcingError):
293
372
  """Legacy exception, replaced with IntegrityError."""
294
373
 
@@ -366,20 +445,20 @@ class AggregateRecorder(Recorder, ABC):
366
445
 
367
446
  @abstractmethod
368
447
  def insert_events(
369
- self, stored_events: list[StoredEvent], **kwargs: Any
448
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
370
449
  ) -> Sequence[int] | None:
371
450
  """Writes stored events into database."""
372
451
 
373
452
  @abstractmethod
374
453
  def select_events(
375
454
  self,
376
- originator_id: UUID,
455
+ originator_id: UUID | str,
377
456
  *,
378
457
  gt: int | None = None,
379
458
  lte: int | None = None,
380
459
  desc: bool = False,
381
460
  limit: int | None = None,
382
- ) -> list[StoredEvent]:
461
+ ) -> Sequence[StoredEvent]:
383
462
  """Reads stored events from database."""
384
463
 
385
464
 
@@ -405,7 +484,7 @@ class ApplicationRecorder(AggregateRecorder):
405
484
  topics: Sequence[str] = (),
406
485
  *,
407
486
  inclusive_of_start: bool = True,
408
- ) -> list[Notification]:
487
+ ) -> Sequence[Notification]:
409
488
  """Returns a list of Notification objects representing events from an
410
489
  application sequence. If `inclusive_of_start` is True (the default),
411
490
  the returned Notification objects will have IDs greater than or equal
@@ -510,29 +589,29 @@ class ProcessRecorder(TrackingRecorder, ApplicationRecorder, ABC):
510
589
 
511
590
 
512
591
  @dataclass(frozen=True)
513
- class Recording:
592
+ class Recording(Generic[TAggregateID]):
514
593
  """Represents the recording of a domain event."""
515
594
 
516
- domain_event: DomainEventProtocol
595
+ domain_event: DomainEventProtocol[TAggregateID]
517
596
  """The domain event that has been recorded."""
518
597
  notification: Notification
519
598
  """A Notification that represents the domain event in the application sequence."""
520
599
 
521
600
 
522
- class EventStore:
601
+ class EventStore(Generic[TAggregateID]):
523
602
  """Stores and retrieves domain events."""
524
603
 
525
604
  def __init__(
526
605
  self,
527
- mapper: Mapper,
606
+ mapper: Mapper[TAggregateID],
528
607
  recorder: AggregateRecorder,
529
608
  ):
530
- self.mapper = mapper
609
+ self.mapper: Mapper[TAggregateID] = mapper
531
610
  self.recorder = recorder
532
611
 
533
612
  def put(
534
- self, domain_events: Sequence[DomainEventProtocol], **kwargs: Any
535
- ) -> list[Recording]:
613
+ self, domain_events: Sequence[DomainEventProtocol[TAggregateID]], **kwargs: Any
614
+ ) -> list[Recording[TAggregateID]]:
536
615
  """Stores domain events in aggregate sequence."""
537
616
  stored_events = list(map(self.mapper.to_stored_event, domain_events))
538
617
  recordings = []
@@ -556,13 +635,13 @@ class EventStore:
556
635
 
557
636
  def get(
558
637
  self,
559
- originator_id: UUID,
638
+ originator_id: TAggregateID,
560
639
  *,
561
640
  gt: int | None = None,
562
641
  lte: int | None = None,
563
642
  desc: bool = False,
564
643
  limit: int | None = None,
565
- ) -> Iterator[DomainEventProtocol]:
644
+ ) -> Iterator[DomainEventProtocol[TAggregateID]]:
566
645
  """Retrieves domain events from aggregate sequence."""
567
646
  return map(
568
647
  self.mapper.to_domain_event,
@@ -671,9 +750,9 @@ class InfrastructureFactory(ABC, Generic[TTrackingRecorder]):
671
750
  raise InfrastructureFactoryError(msg)
672
751
  return factory_cls(env=env)
673
752
 
674
- def __init__(self, env: Environment):
753
+ def __init__(self, env: Environment | EnvType | None):
675
754
  """Initialises infrastructure factory object with given application name."""
676
- self.env = env
755
+ self.env = env if isinstance(env, Environment) else Environment(env=env)
677
756
 
678
757
  def transcoder(
679
758
  self,
@@ -689,14 +768,23 @@ class InfrastructureFactory(ABC, Generic[TTrackingRecorder]):
689
768
  def mapper(
690
769
  self,
691
770
  transcoder: Transcoder | None = None,
692
- mapper_class: type[Mapper] | None = None,
693
- ) -> Mapper:
771
+ mapper_class: type[Mapper[TAggregateID]] | None = None,
772
+ ) -> Mapper[TAggregateID]:
694
773
  """Constructs a mapper."""
774
+ # Resolve MAPPER_TOPIC if no given class.
695
775
  if mapper_class is None:
696
776
  mapper_topic = self.env.get(self.MAPPER_TOPIC)
697
- mapper_class = resolve_topic(mapper_topic) if mapper_topic else Mapper
777
+ mapper_class = (
778
+ resolve_topic(mapper_topic) if mapper_topic else Mapper[TAggregateID]
779
+ )
780
+
781
+ # Check we have a mapper class.
782
+ assert mapper_class is not None
783
+ origin_mapper_class = typing.get_origin(mapper_class) or mapper_class
784
+ assert isinstance(origin_mapper_class, type), mapper_class
785
+ assert issubclass(origin_mapper_class, Mapper), mapper_class
698
786
 
699
- assert isinstance(mapper_class, type) and issubclass(mapper_class, Mapper)
787
+ # Construct and return a mapper.
700
788
  return mapper_class(
701
789
  transcoder=transcoder or self.transcoder(),
702
790
  cipher=self.cipher(),
@@ -738,9 +826,9 @@ class InfrastructureFactory(ABC, Generic[TTrackingRecorder]):
738
826
 
739
827
  def event_store(
740
828
  self,
741
- mapper: Mapper | None = None,
829
+ mapper: Mapper[TAggregateID] | None = None,
742
830
  recorder: AggregateRecorder | None = None,
743
- ) -> EventStore:
831
+ ) -> EventStore[TAggregateID]:
744
832
  """Constructs an event store."""
745
833
  return EventStore(
746
834
  mapper=mapper or self.mapper(),
@@ -1258,9 +1346,9 @@ class ListenNotifySubscription(Subscription[TApplicationRecorder_co]):
1258
1346
  ) -> None:
1259
1347
  super().__init__(recorder=recorder, gt=gt, topics=topics)
1260
1348
  self._select_limit = 500
1261
- self._notifications: list[Notification] = []
1349
+ self._notifications: Sequence[Notification] = []
1262
1350
  self._notifications_index: int = 0
1263
- self._notifications_queue: Queue[list[Notification]] = Queue(maxsize=10)
1351
+ self._notifications_queue: Queue[Sequence[Notification]] = Queue(maxsize=10)
1264
1352
  self._has_been_notified = Event()
1265
1353
  self._thread_error: BaseException | None = None
1266
1354
  self._pull_thread = Thread(target=self._loop_on_pull)
eventsourcing/popo.py CHANGED
@@ -34,26 +34,28 @@ class POPOAggregateRecorder(POPORecorder, AggregateRecorder):
34
34
  def __init__(self) -> None:
35
35
  super().__init__()
36
36
  self._stored_events: list[StoredEvent] = []
37
- self._stored_events_index: dict[UUID, dict[int, int]] = defaultdict(dict)
37
+ self._stored_events_index: dict[str, dict[int, int]] = defaultdict(dict)
38
38
 
39
39
  def insert_events(
40
- self, stored_events: list[StoredEvent], **kwargs: Any
40
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
41
41
  ) -> Sequence[int] | None:
42
42
  self._insert_events(stored_events, **kwargs)
43
43
  return None
44
44
 
45
45
  def _insert_events(
46
- self, stored_events: list[StoredEvent], **kwargs: Any
46
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
47
47
  ) -> Sequence[int] | None:
48
48
  with self._database_lock:
49
49
  self._assert_uniqueness(stored_events, **kwargs)
50
50
  return self._update_table(stored_events, **kwargs)
51
51
 
52
- def _assert_uniqueness(self, stored_events: list[StoredEvent], **_: Any) -> None:
52
+ def _assert_uniqueness(
53
+ self, stored_events: Sequence[StoredEvent], **_: Any
54
+ ) -> None:
53
55
  new = set()
54
56
  for s in stored_events:
55
57
  # Check events don't already exist.
56
- if s.originator_version in self._stored_events_index[s.originator_id]:
58
+ if s.originator_version in self._stored_events_index[str(s.originator_id)]:
57
59
  msg = f"Stored event already recorded: {s}"
58
60
  raise IntegrityError(msg)
59
61
  new.add((s.originator_id, s.originator_version))
@@ -63,12 +65,12 @@ class POPOAggregateRecorder(POPORecorder, AggregateRecorder):
63
65
  raise IntegrityError(msg)
64
66
 
65
67
  def _update_table(
66
- self, stored_events: list[StoredEvent], **_: Any
68
+ self, stored_events: Sequence[StoredEvent], **_: Any
67
69
  ) -> Sequence[int] | None:
68
70
  notification_ids = []
69
71
  for s in stored_events:
70
72
  self._stored_events.append(s)
71
- self._stored_events_index[s.originator_id][s.originator_version] = (
73
+ self._stored_events_index[str(s.originator_id)][s.originator_version] = (
72
74
  len(self._stored_events) - 1
73
75
  )
74
76
  notification_ids.append(len(self._stored_events))
@@ -76,17 +78,17 @@ class POPOAggregateRecorder(POPORecorder, AggregateRecorder):
76
78
 
77
79
  def select_events(
78
80
  self,
79
- originator_id: UUID,
81
+ originator_id: UUID | str,
80
82
  *,
81
83
  gt: int | None = None,
82
84
  lte: int | None = None,
83
85
  desc: bool = False,
84
86
  limit: int | None = None,
85
- ) -> list[StoredEvent]:
87
+ ) -> Sequence[StoredEvent]:
86
88
  with self._database_lock:
87
89
  results = []
88
90
 
89
- index = self._stored_events_index[originator_id]
91
+ index = self._stored_events_index[str(originator_id)]
90
92
  positions: Iterable[int]
91
93
  positions = reversed_keys(index) if desc else index.keys()
92
94
  for p in positions:
@@ -107,7 +109,7 @@ class POPOApplicationRecorder(POPOAggregateRecorder, ApplicationRecorder):
107
109
  self._listeners: set[Event] = set()
108
110
 
109
111
  def insert_events(
110
- self, stored_events: list[StoredEvent], **kwargs: Any
112
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
111
113
  ) -> Sequence[int] | None:
112
114
  notification_ids = self._insert_events(stored_events, **kwargs)
113
115
  self._notify_listeners()
@@ -121,7 +123,7 @@ class POPOApplicationRecorder(POPOAggregateRecorder, ApplicationRecorder):
121
123
  topics: Sequence[str] = (),
122
124
  *,
123
125
  inclusive_of_start: bool = True,
124
- ) -> list[Notification]:
126
+ ) -> Sequence[Notification]:
125
127
  with self._database_lock:
126
128
  results = []
127
129
  if start is None:
@@ -221,7 +223,7 @@ class POPOProcessRecorder(
221
223
  POPOTrackingRecorder, POPOApplicationRecorder, ProcessRecorder
222
224
  ):
223
225
  def _assert_uniqueness(
224
- self, stored_events: list[StoredEvent], **kwargs: Any
226
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
225
227
  ) -> None:
226
228
  super()._assert_uniqueness(stored_events, **kwargs)
227
229
  t: Tracking | None = kwargs.get("tracking")
@@ -229,7 +231,7 @@ class POPOProcessRecorder(
229
231
  self._assert_tracking_uniqueness(t)
230
232
 
231
233
  def _update_table(
232
- self, stored_events: list[StoredEvent], **kwargs: Any
234
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
233
235
  ) -> Sequence[int] | None:
234
236
  notification_ids = super()._update_table(stored_events, **kwargs)
235
237
  t: Tracking | None = kwargs.get("tracking")
eventsourcing/postgres.py CHANGED
@@ -37,7 +37,7 @@ from eventsourcing.persistence import (
37
37
  Tracking,
38
38
  TrackingRecorder,
39
39
  )
40
- from eventsourcing.utils import Environment, resolve_topic, retry, strtobool
40
+ from eventsourcing.utils import Environment, EnvType, resolve_topic, retry, strtobool
41
41
 
42
42
  if TYPE_CHECKING:
43
43
  from collections.abc import Iterator, Sequence
@@ -46,8 +46,8 @@ if TYPE_CHECKING:
46
46
  from psycopg.abc import Query
47
47
  from typing_extensions import Self
48
48
 
49
- logging.getLogger("psycopg.pool").setLevel(logging.CRITICAL)
50
- logging.getLogger("psycopg").setLevel(logging.CRITICAL)
49
+ logging.getLogger("psycopg.pool").setLevel(logging.ERROR)
50
+ logging.getLogger("psycopg").setLevel(logging.ERROR)
51
51
 
52
52
  # Copy of "private" psycopg.errors._NO_TRACEBACK (in case it changes)
53
53
  # From psycopg: "Don't show a complete traceback upon raising these exception.
@@ -83,16 +83,16 @@ class PostgresDatastore:
83
83
  user: str,
84
84
  password: str,
85
85
  *,
86
- connect_timeout: int = 30,
87
- idle_in_transaction_session_timeout: int = 0,
88
- pool_size: int = 2,
89
- max_overflow: int = 2,
86
+ connect_timeout: float = 5.0,
87
+ idle_in_transaction_session_timeout: float = 0,
88
+ pool_size: int = 1,
89
+ max_overflow: int = 0,
90
90
  max_waiting: int = 0,
91
91
  conn_max_age: float = 60 * 60.0,
92
92
  pre_ping: bool = False,
93
93
  lock_timeout: int = 0,
94
94
  schema: str = "",
95
- pool_open_timeout: int | None = None,
95
+ pool_open_timeout: float | None = None,
96
96
  get_password_func: Callable[[], str] | None = None,
97
97
  single_row_tracking: bool = True,
98
98
  ):
@@ -126,8 +126,8 @@ class PostgresDatastore:
126
126
  self.schema = schema.strip() or "public"
127
127
 
128
128
  def after_connect_func(self) -> Callable[[Connection[Any]], None]:
129
- statement = SQL("SET idle_in_transaction_session_timeout = '{0}s'").format(
130
- self.idle_in_transaction_session_timeout
129
+ statement = SQL("SET idle_in_transaction_session_timeout = '{0}ms'").format(
130
+ int(self.idle_in_transaction_session_timeout * 1000)
131
131
  )
132
132
 
133
133
  def after_connect(conn: Connection[DictRow]) -> None:
@@ -271,7 +271,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
271
271
 
272
272
  @retry((InterfaceError, OperationalError), max_attempts=10, wait=0.2)
273
273
  def insert_events(
274
- self, stored_events: list[StoredEvent], **kwargs: Any
274
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
275
275
  ) -> Sequence[int] | None:
276
276
  exc: Exception | None = None
277
277
  notification_ids: Sequence[int] | None = None
@@ -302,7 +302,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
302
302
  def _insert_events(
303
303
  self,
304
304
  curs: Cursor[DictRow],
305
- stored_events: list[StoredEvent],
305
+ stored_events: Sequence[StoredEvent],
306
306
  **_: Any,
307
307
  ) -> None:
308
308
  pass
@@ -310,7 +310,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
310
310
  def _insert_stored_events(
311
311
  self,
312
312
  curs: Cursor[DictRow],
313
- stored_events: list[StoredEvent],
313
+ stored_events: Sequence[StoredEvent],
314
314
  **_: Any,
315
315
  ) -> None:
316
316
  # Only do something if there is something to do.
@@ -343,7 +343,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
343
343
  def _fetch_ids_after_insert_events(
344
344
  self,
345
345
  curs: Cursor[DictRow],
346
- stored_events: list[StoredEvent],
346
+ stored_events: Sequence[StoredEvent],
347
347
  **kwargs: Any,
348
348
  ) -> Sequence[int] | None:
349
349
  return None
@@ -351,13 +351,13 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
351
351
  @retry((InterfaceError, OperationalError), max_attempts=10, wait=0.2)
352
352
  def select_events(
353
353
  self,
354
- originator_id: UUID,
354
+ originator_id: UUID | str,
355
355
  *,
356
356
  gt: int | None = None,
357
357
  lte: int | None = None,
358
358
  desc: bool = False,
359
359
  limit: int | None = None,
360
- ) -> list[StoredEvent]:
360
+ ) -> Sequence[StoredEvent]:
361
361
  statement = self.select_events_statement
362
362
  params: list[Any] = [originator_id]
363
363
  if gt is not None:
@@ -451,7 +451,7 @@ class PostgresApplicationRecorder(PostgresAggregateRecorder, ApplicationRecorder
451
451
  topics: Sequence[str] = (),
452
452
  *,
453
453
  inclusive_of_start: bool = True,
454
- ) -> list[Notification]:
454
+ ) -> Sequence[Notification]:
455
455
  """Returns a list of event notifications
456
456
  from 'start', limited by 'limit'.
457
457
  """
@@ -548,7 +548,7 @@ class PostgresApplicationRecorder(PostgresAggregateRecorder, ApplicationRecorder
548
548
  def _fetch_ids_after_insert_events(
549
549
  self,
550
550
  curs: Cursor[DictRow],
551
- stored_events: list[StoredEvent],
551
+ stored_events: Sequence[StoredEvent],
552
552
  **kwargs: Any,
553
553
  ) -> Sequence[int] | None:
554
554
  notification_ids: list[int] = []
@@ -845,7 +845,7 @@ class PostgresProcessRecorder(
845
845
  def _insert_events(
846
846
  self,
847
847
  curs: Cursor[DictRow],
848
- stored_events: list[StoredEvent],
848
+ stored_events: Sequence[StoredEvent],
849
849
  **kwargs: Any,
850
850
  ) -> None:
851
851
  tracking: Tracking | None = kwargs.get("tracking")
@@ -880,7 +880,7 @@ class PostgresFactory(InfrastructureFactory[PostgresTrackingRecorder]):
880
880
  tracking_recorder_class = PostgresTrackingRecorder
881
881
  process_recorder_class = PostgresProcessRecorder
882
882
 
883
- def __init__(self, env: Environment):
883
+ def __init__(self, env: Environment | EnvType | None):
884
884
  super().__init__(env)
885
885
  dbname = self.env.get(self.POSTGRES_DBNAME)
886
886
  if dbname is None: