eventsourcing 9.4.4__py3-none-any.whl → 9.4.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eventsourcing might be problematic. Click here for more details.

@@ -57,7 +57,11 @@ class NotificationLogJSONService(NotificationLogInterface, Generic[TApplication]
57
57
  "items": [
58
58
  {
59
59
  "id": item.id,
60
- "originator_id": item.originator_id.hex,
60
+ "originator_id": (
61
+ item.originator_id.hex
62
+ if isinstance(item.originator_id, UUID)
63
+ else item.originator_id
64
+ ),
61
65
  "originator_version": item.originator_version,
62
66
  "topic": item.topic,
63
67
  "state": b64encode(item.state).decode("utf8"),
@@ -85,7 +89,11 @@ class NotificationLogJSONService(NotificationLogInterface, Generic[TApplication]
85
89
  [
86
90
  {
87
91
  "id": notification.id,
88
- "originator_id": notification.originator_id.hex,
92
+ "originator_id": (
93
+ notification.originator_id.hex
94
+ if isinstance(notification.originator_id, UUID)
95
+ else notification.originator_id
96
+ ),
89
97
  "originator_version": notification.originator_version,
90
98
  "topic": notification.topic,
91
99
  "state": b64encode(notification.state).decode("utf8"),
@@ -1,25 +1,32 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
- import uuid
4
+ import typing
5
5
  from abc import ABC, abstractmethod
6
6
  from collections import deque
7
- from collections.abc import Iterator, Mapping, Sequence
7
+ from collections.abc import Hashable, Iterator, Mapping, Sequence
8
8
  from dataclasses import dataclass
9
9
  from datetime import datetime
10
10
  from decimal import Decimal
11
+ from functools import lru_cache
11
12
  from queue import Queue
12
13
  from threading import Condition, Event, Lock, Semaphore, Thread, Timer
13
14
  from time import monotonic, sleep, time
14
15
  from types import GenericAlias, ModuleType
15
- from typing import TYPE_CHECKING, Any, Generic, Union, cast
16
+ from typing import TYPE_CHECKING, Any, Callable, Generic, Union, cast
16
17
  from uuid import UUID
17
18
 
18
19
  from typing_extensions import TypeVar
19
20
 
20
- from eventsourcing.domain import DomainEventProtocol, EventSourcingError
21
+ from eventsourcing.domain import (
22
+ DomainEventProtocol,
23
+ EventSourcingError,
24
+ HasOriginatorIDVersion,
25
+ TAggregateID,
26
+ )
21
27
  from eventsourcing.utils import (
22
28
  Environment,
29
+ EnvType,
23
30
  TopicError,
24
31
  get_topic,
25
32
  resolve_topic,
@@ -178,7 +185,7 @@ class StoredEvent:
178
185
  objects and :class:`~eventsourcing.domain.Snapshot` objects.
179
186
  """
180
187
 
181
- originator_id: uuid.UUID
188
+ originator_id: UUID | str
182
189
  """ID of the originating aggregate."""
183
190
  originator_version: int
184
191
  """Position in an aggregate sequence."""
@@ -220,7 +227,10 @@ class MapperDeserialisationError(EventSourcingError, ValueError):
220
227
  """Raised when deserialization fails in a Mapper."""
221
228
 
222
229
 
223
- class Mapper:
230
+ TAggregateIDType = TypeVar("TAggregateIDType", type[UUID], type[str])
231
+
232
+
233
+ class Mapper(Generic[TAggregateID]):
224
234
  """Converts between domain event objects and :class:`StoredEvent` objects.
225
235
 
226
236
  Uses a :class:`Transcoder`, and optionally a cryptographic cipher and compressor.
@@ -236,7 +246,9 @@ class Mapper:
236
246
  self.compressor = compressor
237
247
  self.cipher = cipher
238
248
 
239
- def to_stored_event(self, domain_event: DomainEventProtocol) -> StoredEvent:
249
+ def to_stored_event(
250
+ self, domain_event: DomainEventProtocol[TAggregateID]
251
+ ) -> StoredEvent:
240
252
  """Converts the given domain event to a :class:`StoredEvent` object."""
241
253
  topic = get_topic(domain_event.__class__)
242
254
  event_state = domain_event.__dict__.copy()
@@ -257,8 +269,12 @@ class Mapper:
257
269
  state=stored_state,
258
270
  )
259
271
 
260
- def to_domain_event(self, stored_event: StoredEvent) -> DomainEventProtocol:
272
+ def to_domain_event(
273
+ self, stored_event: StoredEvent
274
+ ) -> DomainEventProtocol[TAggregateID]:
261
275
  """Converts the given :class:`StoredEvent` to a domain event object."""
276
+ cls = resolve_topic(stored_event.topic)
277
+
262
278
  stored_state = stored_event.state
263
279
  try:
264
280
  if self.cipher:
@@ -275,9 +291,12 @@ class Mapper:
275
291
  )
276
292
  raise MapperDeserialisationError(msg) from e
277
293
 
278
- event_state["originator_id"] = stored_event.originator_id
294
+ id_convertor = _find_id_convertor(
295
+ cls, cast(Hashable, type(stored_event.originator_id))
296
+ )
297
+ # print("ID of convertor:", id(convertor))
298
+ event_state["originator_id"] = id_convertor(stored_event.originator_id)
279
299
  event_state["originator_version"] = stored_event.originator_version
280
- cls = resolve_topic(stored_event.topic)
281
300
  class_version = getattr(cls, "class_version", 1)
282
301
  from_version = event_state.pop("class_version", 1)
283
302
  while from_version < class_version:
@@ -289,6 +308,46 @@ class Mapper:
289
308
  return domain_event
290
309
 
291
310
 
311
+ @lru_cache
312
+ def _find_id_convertor(
313
+ domain_event_cls: type[object], originator_id_cls: type[UUID | str]
314
+ ) -> Callable[[UUID | str], UUID | str]:
315
+ # Try to find the originator_id type.
316
+ type_originator_id: type[UUID | str] = UUID
317
+ if issubclass(domain_event_cls, HasOriginatorIDVersion):
318
+ type_originator_id = domain_event_cls.type_originator_id
319
+ else:
320
+ try:
321
+ # Look on plain simple annotations.
322
+ originator_id_annotation = typing.get_type_hints(
323
+ domain_event_cls, globalns=globals()
324
+ ).get("originator_id", None)
325
+ assert originator_id_annotation in [UUID, str]
326
+ type_originator_id = cast(type[Union[UUID, str]], originator_id_annotation)
327
+ except NameError:
328
+ pass
329
+
330
+ if originator_id_cls is str and type_originator_id is UUID:
331
+ convertor = str_to_uuid_convertor
332
+ else:
333
+ convertor = pass_through_convertor
334
+ # print(
335
+ # f"Decided {convertor.__name__} "
336
+ # f"for {domain_event_cls.__name__} "
337
+ # f"and {originator_id_cls.__name__}."
338
+ # )
339
+ return convertor
340
+
341
+
342
+ def str_to_uuid_convertor(originator_id: UUID | str) -> UUID | str:
343
+ assert isinstance(originator_id, str)
344
+ return UUID(originator_id)
345
+
346
+
347
+ def pass_through_convertor(originator_id: UUID | str) -> UUID | str:
348
+ return originator_id
349
+
350
+
292
351
  class RecordConflictError(EventSourcingError):
293
352
  """Legacy exception, replaced with IntegrityError."""
294
353
 
@@ -366,20 +425,20 @@ class AggregateRecorder(Recorder, ABC):
366
425
 
367
426
  @abstractmethod
368
427
  def insert_events(
369
- self, stored_events: list[StoredEvent], **kwargs: Any
428
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
370
429
  ) -> Sequence[int] | None:
371
430
  """Writes stored events into database."""
372
431
 
373
432
  @abstractmethod
374
433
  def select_events(
375
434
  self,
376
- originator_id: UUID,
435
+ originator_id: UUID | str,
377
436
  *,
378
437
  gt: int | None = None,
379
438
  lte: int | None = None,
380
439
  desc: bool = False,
381
440
  limit: int | None = None,
382
- ) -> list[StoredEvent]:
441
+ ) -> Sequence[StoredEvent]:
383
442
  """Reads stored events from database."""
384
443
 
385
444
 
@@ -405,7 +464,7 @@ class ApplicationRecorder(AggregateRecorder):
405
464
  topics: Sequence[str] = (),
406
465
  *,
407
466
  inclusive_of_start: bool = True,
408
- ) -> list[Notification]:
467
+ ) -> Sequence[Notification]:
409
468
  """Returns a list of Notification objects representing events from an
410
469
  application sequence. If `inclusive_of_start` is True (the default),
411
470
  the returned Notification objects will have IDs greater than or equal
@@ -510,29 +569,29 @@ class ProcessRecorder(TrackingRecorder, ApplicationRecorder, ABC):
510
569
 
511
570
 
512
571
  @dataclass(frozen=True)
513
- class Recording:
572
+ class Recording(Generic[TAggregateID]):
514
573
  """Represents the recording of a domain event."""
515
574
 
516
- domain_event: DomainEventProtocol
575
+ domain_event: DomainEventProtocol[TAggregateID]
517
576
  """The domain event that has been recorded."""
518
577
  notification: Notification
519
578
  """A Notification that represents the domain event in the application sequence."""
520
579
 
521
580
 
522
- class EventStore:
581
+ class EventStore(Generic[TAggregateID]):
523
582
  """Stores and retrieves domain events."""
524
583
 
525
584
  def __init__(
526
585
  self,
527
- mapper: Mapper,
586
+ mapper: Mapper[TAggregateID],
528
587
  recorder: AggregateRecorder,
529
588
  ):
530
- self.mapper = mapper
589
+ self.mapper: Mapper[TAggregateID] = mapper
531
590
  self.recorder = recorder
532
591
 
533
592
  def put(
534
- self, domain_events: Sequence[DomainEventProtocol], **kwargs: Any
535
- ) -> list[Recording]:
593
+ self, domain_events: Sequence[DomainEventProtocol[TAggregateID]], **kwargs: Any
594
+ ) -> list[Recording[TAggregateID]]:
536
595
  """Stores domain events in aggregate sequence."""
537
596
  stored_events = list(map(self.mapper.to_stored_event, domain_events))
538
597
  recordings = []
@@ -556,13 +615,13 @@ class EventStore:
556
615
 
557
616
  def get(
558
617
  self,
559
- originator_id: UUID,
618
+ originator_id: TAggregateID,
560
619
  *,
561
620
  gt: int | None = None,
562
621
  lte: int | None = None,
563
622
  desc: bool = False,
564
623
  limit: int | None = None,
565
- ) -> Iterator[DomainEventProtocol]:
624
+ ) -> Iterator[DomainEventProtocol[TAggregateID]]:
566
625
  """Retrieves domain events from aggregate sequence."""
567
626
  return map(
568
627
  self.mapper.to_domain_event,
@@ -671,9 +730,9 @@ class InfrastructureFactory(ABC, Generic[TTrackingRecorder]):
671
730
  raise InfrastructureFactoryError(msg)
672
731
  return factory_cls(env=env)
673
732
 
674
- def __init__(self, env: Environment):
733
+ def __init__(self, env: Environment | EnvType | None):
675
734
  """Initialises infrastructure factory object with given application name."""
676
- self.env = env
735
+ self.env = env if isinstance(env, Environment) else Environment(env=env)
677
736
 
678
737
  def transcoder(
679
738
  self,
@@ -689,14 +748,23 @@ class InfrastructureFactory(ABC, Generic[TTrackingRecorder]):
689
748
  def mapper(
690
749
  self,
691
750
  transcoder: Transcoder | None = None,
692
- mapper_class: type[Mapper] | None = None,
693
- ) -> Mapper:
751
+ mapper_class: type[Mapper[TAggregateID]] | None = None,
752
+ ) -> Mapper[TAggregateID]:
694
753
  """Constructs a mapper."""
754
+ # Resolve MAPPER_TOPIC if no given class.
695
755
  if mapper_class is None:
696
756
  mapper_topic = self.env.get(self.MAPPER_TOPIC)
697
- mapper_class = resolve_topic(mapper_topic) if mapper_topic else Mapper
757
+ mapper_class = (
758
+ resolve_topic(mapper_topic) if mapper_topic else Mapper[TAggregateID]
759
+ )
760
+
761
+ # Check we have a mapper class.
762
+ assert mapper_class is not None
763
+ origin_mapper_class = typing.get_origin(mapper_class) or mapper_class
764
+ assert isinstance(origin_mapper_class, type), mapper_class
765
+ assert issubclass(origin_mapper_class, Mapper), mapper_class
698
766
 
699
- assert isinstance(mapper_class, type) and issubclass(mapper_class, Mapper)
767
+ # Construct and return a mapper.
700
768
  return mapper_class(
701
769
  transcoder=transcoder or self.transcoder(),
702
770
  cipher=self.cipher(),
@@ -738,9 +806,9 @@ class InfrastructureFactory(ABC, Generic[TTrackingRecorder]):
738
806
 
739
807
  def event_store(
740
808
  self,
741
- mapper: Mapper | None = None,
809
+ mapper: Mapper[TAggregateID] | None = None,
742
810
  recorder: AggregateRecorder | None = None,
743
- ) -> EventStore:
811
+ ) -> EventStore[TAggregateID]:
744
812
  """Constructs an event store."""
745
813
  return EventStore(
746
814
  mapper=mapper or self.mapper(),
@@ -1258,9 +1326,9 @@ class ListenNotifySubscription(Subscription[TApplicationRecorder_co]):
1258
1326
  ) -> None:
1259
1327
  super().__init__(recorder=recorder, gt=gt, topics=topics)
1260
1328
  self._select_limit = 500
1261
- self._notifications: list[Notification] = []
1329
+ self._notifications: Sequence[Notification] = []
1262
1330
  self._notifications_index: int = 0
1263
- self._notifications_queue: Queue[list[Notification]] = Queue(maxsize=10)
1331
+ self._notifications_queue: Queue[Sequence[Notification]] = Queue(maxsize=10)
1264
1332
  self._has_been_notified = Event()
1265
1333
  self._thread_error: BaseException | None = None
1266
1334
  self._pull_thread = Thread(target=self._loop_on_pull)
eventsourcing/popo.py CHANGED
@@ -34,26 +34,28 @@ class POPOAggregateRecorder(POPORecorder, AggregateRecorder):
34
34
  def __init__(self) -> None:
35
35
  super().__init__()
36
36
  self._stored_events: list[StoredEvent] = []
37
- self._stored_events_index: dict[UUID, dict[int, int]] = defaultdict(dict)
37
+ self._stored_events_index: dict[str, dict[int, int]] = defaultdict(dict)
38
38
 
39
39
  def insert_events(
40
- self, stored_events: list[StoredEvent], **kwargs: Any
40
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
41
41
  ) -> Sequence[int] | None:
42
42
  self._insert_events(stored_events, **kwargs)
43
43
  return None
44
44
 
45
45
  def _insert_events(
46
- self, stored_events: list[StoredEvent], **kwargs: Any
46
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
47
47
  ) -> Sequence[int] | None:
48
48
  with self._database_lock:
49
49
  self._assert_uniqueness(stored_events, **kwargs)
50
50
  return self._update_table(stored_events, **kwargs)
51
51
 
52
- def _assert_uniqueness(self, stored_events: list[StoredEvent], **_: Any) -> None:
52
+ def _assert_uniqueness(
53
+ self, stored_events: Sequence[StoredEvent], **_: Any
54
+ ) -> None:
53
55
  new = set()
54
56
  for s in stored_events:
55
57
  # Check events don't already exist.
56
- if s.originator_version in self._stored_events_index[s.originator_id]:
58
+ if s.originator_version in self._stored_events_index[str(s.originator_id)]:
57
59
  msg = f"Stored event already recorded: {s}"
58
60
  raise IntegrityError(msg)
59
61
  new.add((s.originator_id, s.originator_version))
@@ -63,12 +65,12 @@ class POPOAggregateRecorder(POPORecorder, AggregateRecorder):
63
65
  raise IntegrityError(msg)
64
66
 
65
67
  def _update_table(
66
- self, stored_events: list[StoredEvent], **_: Any
68
+ self, stored_events: Sequence[StoredEvent], **_: Any
67
69
  ) -> Sequence[int] | None:
68
70
  notification_ids = []
69
71
  for s in stored_events:
70
72
  self._stored_events.append(s)
71
- self._stored_events_index[s.originator_id][s.originator_version] = (
73
+ self._stored_events_index[str(s.originator_id)][s.originator_version] = (
72
74
  len(self._stored_events) - 1
73
75
  )
74
76
  notification_ids.append(len(self._stored_events))
@@ -76,17 +78,17 @@ class POPOAggregateRecorder(POPORecorder, AggregateRecorder):
76
78
 
77
79
  def select_events(
78
80
  self,
79
- originator_id: UUID,
81
+ originator_id: UUID | str,
80
82
  *,
81
83
  gt: int | None = None,
82
84
  lte: int | None = None,
83
85
  desc: bool = False,
84
86
  limit: int | None = None,
85
- ) -> list[StoredEvent]:
87
+ ) -> Sequence[StoredEvent]:
86
88
  with self._database_lock:
87
89
  results = []
88
90
 
89
- index = self._stored_events_index[originator_id]
91
+ index = self._stored_events_index[str(originator_id)]
90
92
  positions: Iterable[int]
91
93
  positions = reversed_keys(index) if desc else index.keys()
92
94
  for p in positions:
@@ -107,7 +109,7 @@ class POPOApplicationRecorder(POPOAggregateRecorder, ApplicationRecorder):
107
109
  self._listeners: set[Event] = set()
108
110
 
109
111
  def insert_events(
110
- self, stored_events: list[StoredEvent], **kwargs: Any
112
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
111
113
  ) -> Sequence[int] | None:
112
114
  notification_ids = self._insert_events(stored_events, **kwargs)
113
115
  self._notify_listeners()
@@ -121,7 +123,7 @@ class POPOApplicationRecorder(POPOAggregateRecorder, ApplicationRecorder):
121
123
  topics: Sequence[str] = (),
122
124
  *,
123
125
  inclusive_of_start: bool = True,
124
- ) -> list[Notification]:
126
+ ) -> Sequence[Notification]:
125
127
  with self._database_lock:
126
128
  results = []
127
129
  if start is None:
@@ -221,7 +223,7 @@ class POPOProcessRecorder(
221
223
  POPOTrackingRecorder, POPOApplicationRecorder, ProcessRecorder
222
224
  ):
223
225
  def _assert_uniqueness(
224
- self, stored_events: list[StoredEvent], **kwargs: Any
226
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
225
227
  ) -> None:
226
228
  super()._assert_uniqueness(stored_events, **kwargs)
227
229
  t: Tracking | None = kwargs.get("tracking")
@@ -229,7 +231,7 @@ class POPOProcessRecorder(
229
231
  self._assert_tracking_uniqueness(t)
230
232
 
231
233
  def _update_table(
232
- self, stored_events: list[StoredEvent], **kwargs: Any
234
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
233
235
  ) -> Sequence[int] | None:
234
236
  notification_ids = super()._update_table(stored_events, **kwargs)
235
237
  t: Tracking | None = kwargs.get("tracking")
eventsourcing/postgres.py CHANGED
@@ -37,7 +37,7 @@ from eventsourcing.persistence import (
37
37
  Tracking,
38
38
  TrackingRecorder,
39
39
  )
40
- from eventsourcing.utils import Environment, resolve_topic, retry, strtobool
40
+ from eventsourcing.utils import Environment, EnvType, resolve_topic, retry, strtobool
41
41
 
42
42
  if TYPE_CHECKING:
43
43
  from collections.abc import Iterator, Sequence
@@ -46,8 +46,8 @@ if TYPE_CHECKING:
46
46
  from psycopg.abc import Query
47
47
  from typing_extensions import Self
48
48
 
49
- logging.getLogger("psycopg.pool").setLevel(logging.CRITICAL)
50
- logging.getLogger("psycopg").setLevel(logging.CRITICAL)
49
+ logging.getLogger("psycopg.pool").setLevel(logging.ERROR)
50
+ logging.getLogger("psycopg").setLevel(logging.ERROR)
51
51
 
52
52
  # Copy of "private" psycopg.errors._NO_TRACEBACK (in case it changes)
53
53
  # From psycopg: "Don't show a complete traceback upon raising these exception.
@@ -83,16 +83,16 @@ class PostgresDatastore:
83
83
  user: str,
84
84
  password: str,
85
85
  *,
86
- connect_timeout: int = 30,
87
- idle_in_transaction_session_timeout: int = 0,
88
- pool_size: int = 2,
89
- max_overflow: int = 2,
86
+ connect_timeout: float = 5.0,
87
+ idle_in_transaction_session_timeout: float = 0,
88
+ pool_size: int = 1,
89
+ max_overflow: int = 0,
90
90
  max_waiting: int = 0,
91
91
  conn_max_age: float = 60 * 60.0,
92
92
  pre_ping: bool = False,
93
93
  lock_timeout: int = 0,
94
94
  schema: str = "",
95
- pool_open_timeout: int | None = None,
95
+ pool_open_timeout: float | None = None,
96
96
  get_password_func: Callable[[], str] | None = None,
97
97
  single_row_tracking: bool = True,
98
98
  ):
@@ -126,8 +126,8 @@ class PostgresDatastore:
126
126
  self.schema = schema.strip() or "public"
127
127
 
128
128
  def after_connect_func(self) -> Callable[[Connection[Any]], None]:
129
- statement = SQL("SET idle_in_transaction_session_timeout = '{0}s'").format(
130
- self.idle_in_transaction_session_timeout
129
+ statement = SQL("SET idle_in_transaction_session_timeout = '{0}ms'").format(
130
+ int(self.idle_in_transaction_session_timeout * 1000)
131
131
  )
132
132
 
133
133
  def after_connect(conn: Connection[DictRow]) -> None:
@@ -271,7 +271,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
271
271
 
272
272
  @retry((InterfaceError, OperationalError), max_attempts=10, wait=0.2)
273
273
  def insert_events(
274
- self, stored_events: list[StoredEvent], **kwargs: Any
274
+ self, stored_events: Sequence[StoredEvent], **kwargs: Any
275
275
  ) -> Sequence[int] | None:
276
276
  exc: Exception | None = None
277
277
  notification_ids: Sequence[int] | None = None
@@ -302,7 +302,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
302
302
  def _insert_events(
303
303
  self,
304
304
  curs: Cursor[DictRow],
305
- stored_events: list[StoredEvent],
305
+ stored_events: Sequence[StoredEvent],
306
306
  **_: Any,
307
307
  ) -> None:
308
308
  pass
@@ -310,7 +310,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
310
310
  def _insert_stored_events(
311
311
  self,
312
312
  curs: Cursor[DictRow],
313
- stored_events: list[StoredEvent],
313
+ stored_events: Sequence[StoredEvent],
314
314
  **_: Any,
315
315
  ) -> None:
316
316
  # Only do something if there is something to do.
@@ -343,7 +343,7 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
343
343
  def _fetch_ids_after_insert_events(
344
344
  self,
345
345
  curs: Cursor[DictRow],
346
- stored_events: list[StoredEvent],
346
+ stored_events: Sequence[StoredEvent],
347
347
  **kwargs: Any,
348
348
  ) -> Sequence[int] | None:
349
349
  return None
@@ -351,13 +351,13 @@ class PostgresAggregateRecorder(PostgresRecorder, AggregateRecorder):
351
351
  @retry((InterfaceError, OperationalError), max_attempts=10, wait=0.2)
352
352
  def select_events(
353
353
  self,
354
- originator_id: UUID,
354
+ originator_id: UUID | str,
355
355
  *,
356
356
  gt: int | None = None,
357
357
  lte: int | None = None,
358
358
  desc: bool = False,
359
359
  limit: int | None = None,
360
- ) -> list[StoredEvent]:
360
+ ) -> Sequence[StoredEvent]:
361
361
  statement = self.select_events_statement
362
362
  params: list[Any] = [originator_id]
363
363
  if gt is not None:
@@ -451,7 +451,7 @@ class PostgresApplicationRecorder(PostgresAggregateRecorder, ApplicationRecorder
451
451
  topics: Sequence[str] = (),
452
452
  *,
453
453
  inclusive_of_start: bool = True,
454
- ) -> list[Notification]:
454
+ ) -> Sequence[Notification]:
455
455
  """Returns a list of event notifications
456
456
  from 'start', limited by 'limit'.
457
457
  """
@@ -548,7 +548,7 @@ class PostgresApplicationRecorder(PostgresAggregateRecorder, ApplicationRecorder
548
548
  def _fetch_ids_after_insert_events(
549
549
  self,
550
550
  curs: Cursor[DictRow],
551
- stored_events: list[StoredEvent],
551
+ stored_events: Sequence[StoredEvent],
552
552
  **kwargs: Any,
553
553
  ) -> Sequence[int] | None:
554
554
  notification_ids: list[int] = []
@@ -845,7 +845,7 @@ class PostgresProcessRecorder(
845
845
  def _insert_events(
846
846
  self,
847
847
  curs: Cursor[DictRow],
848
- stored_events: list[StoredEvent],
848
+ stored_events: Sequence[StoredEvent],
849
849
  **kwargs: Any,
850
850
  ) -> None:
851
851
  tracking: Tracking | None = kwargs.get("tracking")
@@ -880,7 +880,7 @@ class PostgresFactory(InfrastructureFactory[PostgresTrackingRecorder]):
880
880
  tracking_recorder_class = PostgresTrackingRecorder
881
881
  process_recorder_class = PostgresProcessRecorder
882
882
 
883
- def __init__(self, env: Environment):
883
+ def __init__(self, env: Environment | EnvType | None):
884
884
  super().__init__(env)
885
885
  dbname = self.env.get(self.POSTGRES_DBNAME)
886
886
  if dbname is None: