eventsourcing 9.4.0b3__tar.gz → 9.4.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eventsourcing might be problematic. Click here for more details.
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/PKG-INFO +12 -12
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/README.md +4 -3
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/domain.py +182 -125
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/postgres.py +4 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/projection.py +184 -45
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/sqlite.py +1 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/system.py +28 -86
- eventsourcing-9.4.1/eventsourcing/tests/__init__.py +3 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/tests/persistence.py +2 -2
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/pyproject.toml +30 -27
- eventsourcing-9.4.0b3/eventsourcing/tests/__init__.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/AUTHORS +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/LICENSE +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/__init__.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/application.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/cipher.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/compressor.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/cryptography.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/dispatch.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/interface.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/persistence.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/popo.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/py.typed +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/tests/application.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/tests/domain.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/tests/postgres_utils.py +0 -0
- {eventsourcing-9.4.0b3 → eventsourcing-9.4.1}/eventsourcing/utils.py +0 -0
|
@@ -1,33 +1,32 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: eventsourcing
|
|
3
|
-
Version: 9.4.
|
|
3
|
+
Version: 9.4.1
|
|
4
4
|
Summary: Event sourcing in Python
|
|
5
|
-
License: BSD
|
|
5
|
+
License: BSD-3-Clause
|
|
6
6
|
Keywords: event sourcing,event store,domain driven design,domain-driven design,ddd,cqrs,cqs
|
|
7
7
|
Author: John Bywater
|
|
8
8
|
Author-email: john.bywater@appropriatesoftware.net
|
|
9
|
-
Requires-Python: >=3.9
|
|
10
|
-
Classifier: Development Status ::
|
|
9
|
+
Requires-Python: >=3.9.2
|
|
10
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
11
11
|
Classifier: Intended Audience :: Developers
|
|
12
12
|
Classifier: Intended Audience :: Education
|
|
13
13
|
Classifier: Intended Audience :: Science/Research
|
|
14
14
|
Classifier: License :: OSI Approved :: BSD License
|
|
15
|
-
Classifier: License :: Other/Proprietary License
|
|
16
15
|
Classifier: Operating System :: OS Independent
|
|
17
|
-
Classifier: Programming Language :: Python
|
|
18
16
|
Classifier: Programming Language :: Python :: 3
|
|
19
17
|
Classifier: Programming Language :: Python :: 3.10
|
|
20
18
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
19
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
20
|
Classifier: Programming Language :: Python :: 3.13
|
|
23
21
|
Classifier: Programming Language :: Python :: 3.9
|
|
22
|
+
Classifier: Programming Language :: Python
|
|
24
23
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
25
24
|
Provides-Extra: crypto
|
|
26
25
|
Provides-Extra: cryptography
|
|
27
26
|
Provides-Extra: postgres
|
|
28
|
-
Requires-Dist: cryptography (>=44.0,<
|
|
29
|
-
Requires-Dist: psycopg[pool] (
|
|
30
|
-
Requires-Dist: pycryptodome (>=3.22,<
|
|
27
|
+
Requires-Dist: cryptography (>=44.0,<45.0) ; extra == "cryptography"
|
|
28
|
+
Requires-Dist: psycopg[pool] (>=3.2,<3.3) ; extra == "postgres"
|
|
29
|
+
Requires-Dist: pycryptodome (>=3.22,<4.0) ; extra == "crypto"
|
|
31
30
|
Requires-Dist: typing_extensions
|
|
32
31
|
Project-URL: Homepage, https://github.com/pyeventsourcing/eventsourcing
|
|
33
32
|
Project-URL: Repository, https://github.com/pyeventsourcing/eventsourcing
|
|
@@ -43,7 +42,10 @@ Description-Content-Type: text/markdown
|
|
|
43
42
|
|
|
44
43
|
# Event Sourcing in Python
|
|
45
44
|
|
|
46
|
-
|
|
45
|
+
This project is a comprehensive Python library for implementing event sourcing, a design pattern where all
|
|
46
|
+
changes to application state are stored as a sequence of events. This library provides a solid foundation
|
|
47
|
+
for building event-sourced applications in Python, with a focus on reliability, performance, and developer
|
|
48
|
+
experience. Please [read the docs](https://eventsourcing.readthedocs.io/). See also [extension projects](https://github.com/pyeventsourcing).
|
|
47
49
|
|
|
48
50
|
*"totally amazing and a pleasure to use"*
|
|
49
51
|
|
|
@@ -51,8 +53,6 @@ A library for event sourcing in Python.
|
|
|
51
53
|
|
|
52
54
|
*"a huge help and time saver"*
|
|
53
55
|
|
|
54
|
-
Please [read the docs](https://eventsourcing.readthedocs.io/). See also [extension projects](https://github.com/pyeventsourcing).
|
|
55
|
-
|
|
56
56
|
|
|
57
57
|
## Installation
|
|
58
58
|
|
|
@@ -8,7 +8,10 @@
|
|
|
8
8
|
|
|
9
9
|
# Event Sourcing in Python
|
|
10
10
|
|
|
11
|
-
|
|
11
|
+
This project is a comprehensive Python library for implementing event sourcing, a design pattern where all
|
|
12
|
+
changes to application state are stored as a sequence of events. This library provides a solid foundation
|
|
13
|
+
for building event-sourced applications in Python, with a focus on reliability, performance, and developer
|
|
14
|
+
experience. Please [read the docs](https://eventsourcing.readthedocs.io/). See also [extension projects](https://github.com/pyeventsourcing).
|
|
12
15
|
|
|
13
16
|
*"totally amazing and a pleasure to use"*
|
|
14
17
|
|
|
@@ -16,8 +19,6 @@ A library for event sourcing in Python.
|
|
|
16
19
|
|
|
17
20
|
*"a huge help and time saver"*
|
|
18
21
|
|
|
19
|
-
Please [read the docs](https://eventsourcing.readthedocs.io/). See also [extension projects](https://github.com/pyeventsourcing).
|
|
20
|
-
|
|
21
22
|
|
|
22
23
|
## Installation
|
|
23
24
|
|
|
@@ -943,12 +943,20 @@ class MetaAggregate(EventsourcingType, Generic[TAggregate], type):
|
|
|
943
943
|
}
|
|
944
944
|
|
|
945
945
|
# Create the event class object.
|
|
946
|
-
|
|
946
|
+
_new_class = type(name, bases, event_cls_dict)
|
|
947
|
+
return cast("type[CanMutateAggregate]", _new_class)
|
|
947
948
|
|
|
948
949
|
def __call__(
|
|
949
950
|
cls: MetaAggregate[TAggregate], *args: Any, **kwargs: Any
|
|
950
951
|
) -> TAggregate:
|
|
952
|
+
if cls is BaseAggregate:
|
|
953
|
+
msg = "BaseAggregate class cannot be instantiated directly"
|
|
954
|
+
raise TypeError(msg)
|
|
951
955
|
created_event_classes = _created_event_classes[cls]
|
|
956
|
+
# Here, unlike when calling _create(), we don't have a given event class,
|
|
957
|
+
# so we need to check that there is one "created" event class to use here.
|
|
958
|
+
# We don't check this in __init_subclass__ to allow for alternatives that
|
|
959
|
+
# can be selected by developers by calling _create(event_class=...).
|
|
952
960
|
if len(created_event_classes) > 1:
|
|
953
961
|
msg = (
|
|
954
962
|
f"{cls.__qualname__} can't decide which of many "
|
|
@@ -975,10 +983,9 @@ class MetaAggregate(EventsourcingType, Generic[TAggregate], type):
|
|
|
975
983
|
event_class: type[CanInitAggregate],
|
|
976
984
|
**kwargs: Any,
|
|
977
985
|
) -> TAggregate:
|
|
986
|
+
# Just define method signature for the __call__() method.
|
|
978
987
|
raise NotImplementedError # pragma: no cover
|
|
979
988
|
|
|
980
|
-
_created_event_class: type[CanInitAggregate]
|
|
981
|
-
|
|
982
989
|
|
|
983
990
|
class BaseAggregate(metaclass=MetaAggregate):
|
|
984
991
|
"""Base class for aggregates."""
|
|
@@ -1141,13 +1148,22 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1141
1148
|
return f"{type(self).__name__}({', '.join(attrs)})"
|
|
1142
1149
|
|
|
1143
1150
|
def __init_subclass__(
|
|
1144
|
-
cls: type[BaseAggregate], *, created_event_name: str
|
|
1151
|
+
cls: type[BaseAggregate], *, created_event_name: str = ""
|
|
1145
1152
|
) -> None:
|
|
1146
1153
|
"""
|
|
1147
1154
|
Initialises aggregate subclass by defining __init__ method and event classes.
|
|
1148
1155
|
"""
|
|
1149
1156
|
super().__init_subclass__()
|
|
1150
1157
|
|
|
1158
|
+
# Ensure we aren't defining another instance of the same class,
|
|
1159
|
+
# because annotations can get confused when using singledispatchmethod
|
|
1160
|
+
# during class definition e.g. on an aggregate projector function.
|
|
1161
|
+
_module = importlib.import_module(cls.__module__)
|
|
1162
|
+
if cls.__name__ in _module.__dict__:
|
|
1163
|
+
msg = f"Name '{cls.__name__}' already defined in '{cls.__module__}' module"
|
|
1164
|
+
raise ProgrammingError(msg)
|
|
1165
|
+
|
|
1166
|
+
# Get the class annotations.
|
|
1151
1167
|
class_annotations = cls.__dict__.get("__annotations__", {})
|
|
1152
1168
|
try:
|
|
1153
1169
|
class_annotations.pop("id")
|
|
@@ -1155,6 +1171,11 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1155
1171
|
except KeyError:
|
|
1156
1172
|
pass
|
|
1157
1173
|
|
|
1174
|
+
if "id" in cls.__dict__:
|
|
1175
|
+
msg = f"Setting attribute 'id' on class '{cls.__name__}' is not allowed"
|
|
1176
|
+
raise ProgrammingError(msg)
|
|
1177
|
+
|
|
1178
|
+
# Process the class as a dataclass, if there are annotations.
|
|
1158
1179
|
if (
|
|
1159
1180
|
class_annotations
|
|
1160
1181
|
or cls in _annotations_mention_id
|
|
@@ -1162,6 +1183,29 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1162
1183
|
):
|
|
1163
1184
|
dataclasses.dataclass(eq=False, repr=False)(cls)
|
|
1164
1185
|
|
|
1186
|
+
# Remember if __init__ mentions ID.
|
|
1187
|
+
for param_name in inspect.signature(cls.__init__).parameters:
|
|
1188
|
+
if param_name == "id":
|
|
1189
|
+
_init_mentions_id.add(cls)
|
|
1190
|
+
break
|
|
1191
|
+
|
|
1192
|
+
# Analyse __init__ attribute, to get __init__ method and @event decorator.
|
|
1193
|
+
init_attr: FunctionType | CommandMethodDecorator | None = cls.__dict__.get(
|
|
1194
|
+
"__init__"
|
|
1195
|
+
)
|
|
1196
|
+
if init_attr is None:
|
|
1197
|
+
# No method, no decorator.
|
|
1198
|
+
init_method: CallableType | None = None
|
|
1199
|
+
init_decorator: CommandMethodDecorator | None = None
|
|
1200
|
+
elif isinstance(init_attr, CommandMethodDecorator):
|
|
1201
|
+
# Method decorated with @event.
|
|
1202
|
+
init_method = init_attr.decorated_func
|
|
1203
|
+
init_decorator = init_attr
|
|
1204
|
+
else:
|
|
1205
|
+
# Undecorated __init__ method.
|
|
1206
|
+
init_decorator = None
|
|
1207
|
+
init_method = init_attr
|
|
1208
|
+
|
|
1165
1209
|
# Identify or define a base event class for this aggregate.
|
|
1166
1210
|
base_event_name = "Event"
|
|
1167
1211
|
base_event_cls: type[CanMutateAggregate]
|
|
@@ -1175,142 +1219,150 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1175
1219
|
)
|
|
1176
1220
|
setattr(cls, base_event_name, base_event_cls)
|
|
1177
1221
|
|
|
1178
|
-
#
|
|
1222
|
+
# Ensure all events defined on this class are subclasses of base event class.
|
|
1223
|
+
created_event_classes: dict[str, type[CanInitAggregate]] = {}
|
|
1179
1224
|
for name, value in tuple(cls.__dict__.items()):
|
|
1180
1225
|
if name == base_event_name:
|
|
1181
1226
|
# Don't subclass the base event class again.
|
|
1182
1227
|
continue
|
|
1183
1228
|
if name.lower() == name:
|
|
1184
|
-
# Don't subclass lowercase named attributes
|
|
1229
|
+
# Don't subclass lowercase named attributes.
|
|
1185
1230
|
continue
|
|
1186
|
-
if (
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
# Analyse the __init__ attribute.
|
|
1199
|
-
init_attr: FunctionType | CommandMethodDecorator | None = cls.__dict__.get(
|
|
1200
|
-
"__init__"
|
|
1201
|
-
)
|
|
1202
|
-
if init_attr is None:
|
|
1203
|
-
init_decorator: CommandMethodDecorator | None = None
|
|
1204
|
-
init_method: CallableType | None = None
|
|
1205
|
-
elif isinstance(init_attr, CommandMethodDecorator):
|
|
1206
|
-
init_decorator = init_attr
|
|
1207
|
-
init_method = init_attr.decorated_func
|
|
1208
|
-
else:
|
|
1209
|
-
init_decorator = None
|
|
1210
|
-
init_method = init_attr
|
|
1231
|
+
if isinstance(value, type) and issubclass(value, CanMutateAggregate):
|
|
1232
|
+
if not issubclass(value, base_event_cls):
|
|
1233
|
+
event_class = cls._define_event_class(
|
|
1234
|
+
name, (value, base_event_cls), None
|
|
1235
|
+
)
|
|
1236
|
+
setattr(cls, name, event_class)
|
|
1237
|
+
else:
|
|
1238
|
+
event_class = value
|
|
1239
|
+
|
|
1240
|
+
# Remember all "created" event classes defined on this class.
|
|
1241
|
+
if issubclass(event_class, CanInitAggregate):
|
|
1242
|
+
created_event_classes[name] = event_class
|
|
1211
1243
|
|
|
1212
1244
|
# Identify or define the aggregate's "created" event class.
|
|
1213
1245
|
created_event_class: type[CanInitAggregate] | None = None
|
|
1214
1246
|
|
|
1215
|
-
#
|
|
1247
|
+
# Analyse __init__ method decorator.
|
|
1216
1248
|
if init_decorator:
|
|
1217
1249
|
|
|
1218
|
-
#
|
|
1219
|
-
if created_event_name:
|
|
1220
|
-
msg = "Can't use both 'created_event_name' and decorator on __init__"
|
|
1221
|
-
raise TypeError(msg)
|
|
1222
|
-
|
|
1223
|
-
# Does the decorator specify a "created" event class?
|
|
1250
|
+
# Does the decorator specify an event class?
|
|
1224
1251
|
if init_decorator.given_event_cls:
|
|
1225
|
-
created_event_class = cast(
|
|
1226
|
-
"type[CanInitAggregate]", init_decorator.given_event_cls
|
|
1227
|
-
)
|
|
1228
|
-
|
|
1229
|
-
# Does the decorator specify a "created" event name?
|
|
1230
|
-
elif init_decorator.event_cls_name:
|
|
1231
|
-
created_event_name = init_decorator.event_cls_name
|
|
1232
|
-
|
|
1233
|
-
# Disallow using decorator on __init__ without event spec.
|
|
1234
|
-
else:
|
|
1235
|
-
msg = "Decorator on __init__ has neither event name nor class"
|
|
1236
|
-
raise TypeError(msg)
|
|
1237
|
-
|
|
1238
|
-
# Check if init mentions ID.
|
|
1239
|
-
for param_name in inspect.signature(cls.__init__).parameters:
|
|
1240
|
-
if param_name == "id":
|
|
1241
|
-
_init_mentions_id.add(cls)
|
|
1242
|
-
break
|
|
1243
1252
|
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1253
|
+
# Disallow conflicts between 'created_event_name' and given class.
|
|
1254
|
+
if (
|
|
1255
|
+
created_event_name
|
|
1256
|
+
and created_event_name != init_decorator.given_event_cls.__name__
|
|
1257
|
+
):
|
|
1258
|
+
msg = (
|
|
1259
|
+
"Given 'created_event_name' conflicts "
|
|
1260
|
+
"with decorator on __init__"
|
|
1261
|
+
)
|
|
1262
|
+
raise TypeError(msg)
|
|
1251
1263
|
|
|
1252
|
-
|
|
1253
|
-
if issubclass(
|
|
1254
|
-
|
|
1255
|
-
|
|
1264
|
+
# Check given event class can init aggregate.
|
|
1265
|
+
if not issubclass(init_decorator.given_event_cls, CanInitAggregate):
|
|
1266
|
+
msg = (
|
|
1267
|
+
f"class '{init_decorator.given_event_cls.__name__}' "
|
|
1268
|
+
f'not a "created" event class'
|
|
1269
|
+
)
|
|
1270
|
+
raise TypeError(msg)
|
|
1256
1271
|
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1272
|
+
# Have we already subclassed the given event class?
|
|
1273
|
+
for sub_class in created_event_classes.values():
|
|
1274
|
+
if issubclass(sub_class, init_decorator.given_event_cls):
|
|
1275
|
+
created_event_class = sub_class
|
|
1276
|
+
break
|
|
1277
|
+
else:
|
|
1278
|
+
created_event_class = init_decorator.given_event_cls
|
|
1260
1279
|
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1280
|
+
# Does the decorator specify an event name?
|
|
1281
|
+
elif init_decorator.event_cls_name:
|
|
1282
|
+
# Disallow conflicts between 'created_event_name' and given name.
|
|
1283
|
+
if (
|
|
1284
|
+
created_event_name
|
|
1285
|
+
and created_event_name != init_decorator.event_cls_name
|
|
1286
|
+
):
|
|
1287
|
+
msg = (
|
|
1288
|
+
"Given 'created_event_name' conflicts "
|
|
1289
|
+
"with decorator on __init__"
|
|
1290
|
+
)
|
|
1291
|
+
raise TypeError(msg)
|
|
1264
1292
|
|
|
1265
|
-
|
|
1266
|
-
# specified that hasn't matched, then define a "created" event class.
|
|
1267
|
-
elif len(created_event_classes) == 0 or created_event_name:
|
|
1293
|
+
created_event_name = init_decorator.event_cls_name
|
|
1268
1294
|
|
|
1269
|
-
#
|
|
1270
|
-
if created_event_name and len(created_event_classes) == 1:
|
|
1271
|
-
base_created_event_cls = next(iter(created_event_classes.values()))
|
|
1295
|
+
# Disallow using decorator on __init__ without event name or class.
|
|
1272
1296
|
else:
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1297
|
+
msg = "@event decorator on __init__ has neither event name nor class"
|
|
1298
|
+
raise TypeError(msg)
|
|
1299
|
+
|
|
1300
|
+
# Do we need to define a created event class?
|
|
1301
|
+
if not created_event_class:
|
|
1302
|
+
# If we have a "created" event class that matches the name, then use it.
|
|
1303
|
+
if created_event_name in created_event_classes:
|
|
1304
|
+
created_event_class = created_event_classes[created_event_name]
|
|
1305
|
+
# Otherwise, if we have no name and only one class defined, then use it.
|
|
1306
|
+
elif not created_event_name and len(created_event_classes) == 1:
|
|
1307
|
+
created_event_class = next(iter(created_event_classes.values()))
|
|
1308
|
+
|
|
1309
|
+
# Otherwise, if there are no "created" events, or a name is
|
|
1310
|
+
# specified that hasn't matched, then define a "created" event class.
|
|
1311
|
+
elif len(created_event_classes) == 0 or created_event_name:
|
|
1312
|
+
# Decide the base "created" event class.
|
|
1313
|
+
|
|
1314
|
+
try:
|
|
1315
|
+
# Look for a base class with the same name.
|
|
1316
|
+
base_created_event_cls = cast(
|
|
1317
|
+
"type[CanInitAggregate]",
|
|
1318
|
+
getattr(cls, created_event_name),
|
|
1287
1319
|
)
|
|
1288
|
-
|
|
1320
|
+
except AttributeError:
|
|
1321
|
+
# Look for base class with one nominated "created" event.
|
|
1322
|
+
for base_cls in cls.__mro__:
|
|
1323
|
+
if (
|
|
1324
|
+
base_cls in _created_event_classes
|
|
1325
|
+
and len(_created_event_classes[base_cls]) == 1
|
|
1326
|
+
):
|
|
1327
|
+
base_created_event_cls = _created_event_classes[base_cls][0]
|
|
1328
|
+
break
|
|
1329
|
+
else:
|
|
1330
|
+
msg = (
|
|
1331
|
+
"Can't identify suitable base class for "
|
|
1332
|
+
f"\"created\" event class on class '{cls.__name__}'"
|
|
1333
|
+
)
|
|
1334
|
+
raise TypeError(msg) from None
|
|
1289
1335
|
|
|
1290
|
-
|
|
1291
|
-
|
|
1336
|
+
if not created_event_name:
|
|
1337
|
+
created_event_name = base_created_event_cls.__name__
|
|
1292
1338
|
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1339
|
+
# Disallow init method from having variable params, because
|
|
1340
|
+
# we are using it to define a "created" event class.
|
|
1341
|
+
if init_method:
|
|
1342
|
+
_raise_type_error_if_func_has_variable_params(init_method)
|
|
1297
1343
|
|
|
1298
|
-
# Define a "created" event class for this aggregate.
|
|
1299
|
-
if issubclass(base_created_event_cls, base_event_cls):
|
|
1300
1344
|
# Don't subclass from base event class twice.
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1345
|
+
assert isinstance(base_created_event_cls, type), base_created_event_cls
|
|
1346
|
+
assert not issubclass(
|
|
1347
|
+
base_created_event_cls, base_event_cls
|
|
1348
|
+
), base_created_event_cls
|
|
1349
|
+
|
|
1350
|
+
# Define "created" event class.
|
|
1351
|
+
assert created_event_name
|
|
1352
|
+
assert issubclass(base_created_event_cls, CanInitAggregate)
|
|
1353
|
+
created_event_class_bases = (base_created_event_cls, base_event_cls)
|
|
1354
|
+
created_event_class = cast(
|
|
1355
|
+
"type[CanInitAggregate]",
|
|
1356
|
+
cls._define_event_class(
|
|
1357
|
+
created_event_name,
|
|
1358
|
+
created_event_class_bases,
|
|
1359
|
+
init_method,
|
|
1360
|
+
),
|
|
1361
|
+
)
|
|
1362
|
+
# Set the event class as an attribute of the aggregate class.
|
|
1363
|
+
setattr(cls, created_event_name, created_event_class)
|
|
1364
|
+
|
|
1365
|
+
assert created_event_class or len(created_event_classes) > 1
|
|
1314
1366
|
|
|
1315
1367
|
if created_event_class:
|
|
1316
1368
|
_created_event_classes[cls] = [created_event_class]
|
|
@@ -1318,16 +1370,18 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1318
1370
|
# Prepare to disallow ambiguity of choice between created event classes.
|
|
1319
1371
|
_created_event_classes[cls] = list(created_event_classes.values())
|
|
1320
1372
|
|
|
1321
|
-
#
|
|
1373
|
+
# Find and analyse any @event decorators.
|
|
1322
1374
|
for attr_name, attr_value in tuple(cls.__dict__.items()):
|
|
1323
1375
|
event_decorator: CommandMethodDecorator | None = None
|
|
1324
1376
|
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1377
|
+
# Ignore a decorator on the __init__ method.
|
|
1378
|
+
if isinstance(attr_value, CommandMethodDecorator) and (
|
|
1379
|
+
attr_value.decorated_func.__name__ == "__init__"
|
|
1380
|
+
):
|
|
1381
|
+
continue
|
|
1329
1382
|
|
|
1330
|
-
|
|
1383
|
+
# Handle @property.setter decorator on top of @event decorator.
|
|
1384
|
+
if isinstance(attr_value, property) and isinstance(
|
|
1331
1385
|
attr_value.fset, CommandMethodDecorator
|
|
1332
1386
|
):
|
|
1333
1387
|
event_decorator = attr_value.fset
|
|
@@ -1355,6 +1409,9 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1355
1409
|
)
|
|
1356
1410
|
raise TypeError(msg)
|
|
1357
1411
|
|
|
1412
|
+
elif isinstance(attr_value, CommandMethodDecorator):
|
|
1413
|
+
event_decorator = attr_value
|
|
1414
|
+
|
|
1358
1415
|
if event_decorator is not None:
|
|
1359
1416
|
if event_decorator.given_event_cls:
|
|
1360
1417
|
# Check this is not a "created" event class.
|
|
@@ -1404,7 +1461,7 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1404
1461
|
"type[DecoratorEvent]", event_cls
|
|
1405
1462
|
)
|
|
1406
1463
|
|
|
1407
|
-
# Check any create_id method defined on this class is static or class method.
|
|
1464
|
+
# Check any create_id() method defined on this class is static or class method.
|
|
1408
1465
|
if "create_id" in cls.__dict__ and not isinstance(
|
|
1409
1466
|
cls.__dict__["create_id"], (staticmethod, classmethod)
|
|
1410
1467
|
):
|
|
@@ -1419,7 +1476,7 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1419
1476
|
if param.kind in [param.KEYWORD_ONLY, param.POSITIONAL_OR_KEYWORD]:
|
|
1420
1477
|
_create_id_param_names[cls].append(name)
|
|
1421
1478
|
|
|
1422
|
-
# Define event classes for all events on bases.
|
|
1479
|
+
# Define event classes for all events on all bases if not defined on this class.
|
|
1423
1480
|
for aggregate_base_class in cls.__bases__:
|
|
1424
1481
|
for name, value in aggregate_base_class.__dict__.items():
|
|
1425
1482
|
if (
|
|
@@ -1428,10 +1485,10 @@ class BaseAggregate(metaclass=MetaAggregate):
|
|
|
1428
1485
|
and name not in cls.__dict__
|
|
1429
1486
|
and name.lower() != name
|
|
1430
1487
|
):
|
|
1431
|
-
|
|
1488
|
+
event_class = cls._define_event_class(
|
|
1432
1489
|
name, (base_event_cls, value), None
|
|
1433
1490
|
)
|
|
1434
|
-
setattr(cls, name,
|
|
1491
|
+
setattr(cls, name, event_class)
|
|
1435
1492
|
|
|
1436
1493
|
|
|
1437
1494
|
class Aggregate(BaseAggregate):
|
|
@@ -766,6 +766,10 @@ class PostgresFactory(InfrastructureFactory[PostgresTrackingRecorder]):
|
|
|
766
766
|
"in environment with key "
|
|
767
767
|
f"'{self.POSTGRES_DBNAME}'"
|
|
768
768
|
)
|
|
769
|
+
# TODO: Indicate both keys here, also for other environment variables.
|
|
770
|
+
# ) + " or ".join(
|
|
771
|
+
# [f"'{key}'" for key in self.env.create_keys(self.POSTGRES_DBNAME)]
|
|
772
|
+
# )
|
|
769
773
|
raise OSError(msg)
|
|
770
774
|
|
|
771
775
|
host = self.env.get(self.POSTGRES_HOST)
|
|
@@ -2,19 +2,22 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import contextlib
|
|
4
4
|
import os
|
|
5
|
+
import threading
|
|
5
6
|
import weakref
|
|
6
7
|
from abc import ABC, abstractmethod
|
|
7
8
|
from collections.abc import Iterator, Sequence
|
|
8
9
|
from threading import Event, Thread
|
|
9
10
|
from traceback import format_exc
|
|
10
|
-
from typing import TYPE_CHECKING, Any, Generic, TypeVar
|
|
11
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar
|
|
11
12
|
from warnings import warn
|
|
12
13
|
|
|
13
|
-
from eventsourcing.application import Application
|
|
14
|
+
from eventsourcing.application import Application, ProcessingEvent
|
|
14
15
|
from eventsourcing.dispatch import singledispatchmethod
|
|
15
16
|
from eventsourcing.domain import DomainEventProtocol
|
|
16
17
|
from eventsourcing.persistence import (
|
|
17
18
|
InfrastructureFactory,
|
|
19
|
+
IntegrityError,
|
|
20
|
+
ProcessRecorder,
|
|
18
21
|
Tracking,
|
|
19
22
|
TrackingRecorder,
|
|
20
23
|
TTrackingRecorder,
|
|
@@ -43,7 +46,7 @@ class ApplicationSubscription(Iterator[tuple[DomainEventProtocol, Tracking]]):
|
|
|
43
46
|
topics: Sequence[str] = (),
|
|
44
47
|
):
|
|
45
48
|
"""
|
|
46
|
-
Starts subscription to application's
|
|
49
|
+
Starts a subscription to application's recorder.
|
|
47
50
|
"""
|
|
48
51
|
self.name = app.name
|
|
49
52
|
self.recorder = app.recorder
|
|
@@ -51,7 +54,7 @@ class ApplicationSubscription(Iterator[tuple[DomainEventProtocol, Tracking]]):
|
|
|
51
54
|
self.subscription = self.recorder.subscribe(gt=gt, topics=topics)
|
|
52
55
|
|
|
53
56
|
def stop(self) -> None:
|
|
54
|
-
"""Stops the
|
|
57
|
+
"""Stops the subscription to the application's recorder."""
|
|
55
58
|
self.subscription.stop()
|
|
56
59
|
|
|
57
60
|
def __enter__(self) -> Self:
|
|
@@ -67,10 +70,11 @@ class ApplicationSubscription(Iterator[tuple[DomainEventProtocol, Tracking]]):
|
|
|
67
70
|
return self
|
|
68
71
|
|
|
69
72
|
def __next__(self) -> tuple[DomainEventProtocol, Tracking]:
|
|
70
|
-
"""Returns the next stored event from
|
|
71
|
-
Constructs a tracking object that identifies the position of
|
|
72
|
-
the event in the application sequence
|
|
73
|
-
|
|
73
|
+
"""Returns the next stored event from subscription to the application's
|
|
74
|
+
recorder. Constructs a tracking object that identifies the position of
|
|
75
|
+
the event in the application sequence. Constructs a domain event object
|
|
76
|
+
from the stored event object using the application's mapper. Returns a
|
|
77
|
+
tuple of the domain event object and the tracking object.
|
|
74
78
|
"""
|
|
75
79
|
notification = next(self.subscription)
|
|
76
80
|
tracking = Tracking(self.name, notification.id)
|
|
@@ -79,7 +83,8 @@ class ApplicationSubscription(Iterator[tuple[DomainEventProtocol, Tracking]]):
|
|
|
79
83
|
|
|
80
84
|
def __del__(self) -> None:
|
|
81
85
|
"""Stops the stored event subscription."""
|
|
82
|
-
|
|
86
|
+
with contextlib.suppress(AttributeError):
|
|
87
|
+
self.stop()
|
|
83
88
|
|
|
84
89
|
|
|
85
90
|
class Projection(ABC, Generic[TTrackingRecorder]):
|
|
@@ -90,14 +95,18 @@ class Projection(ABC, Generic[TTrackingRecorder]):
|
|
|
90
95
|
"""
|
|
91
96
|
topics: tuple[str, ...] = ()
|
|
92
97
|
"""
|
|
93
|
-
|
|
98
|
+
Event topics, used to filter events in database when subscribing to an application.
|
|
94
99
|
"""
|
|
95
100
|
|
|
101
|
+
def __init_subclass__(cls, **kwargs: Any) -> None:
|
|
102
|
+
if "name" not in cls.__dict__:
|
|
103
|
+
cls.name = cls.__name__
|
|
104
|
+
|
|
96
105
|
def __init__(
|
|
97
106
|
self,
|
|
98
107
|
view: TTrackingRecorder,
|
|
99
108
|
):
|
|
100
|
-
"""Initialises
|
|
109
|
+
"""Initialises the view property with the given view argument."""
|
|
101
110
|
self._view = view
|
|
102
111
|
|
|
103
112
|
@property
|
|
@@ -113,63 +122,130 @@ class Projection(ABC, Generic[TTrackingRecorder]):
|
|
|
113
122
|
"""Process a domain event and track it."""
|
|
114
123
|
|
|
115
124
|
|
|
125
|
+
class EventSourcedProjection(Application, ABC):
|
|
126
|
+
"""Extends the :py:class:`~eventsourcing.application.Application` class
|
|
127
|
+
by using a process recorder as its application recorder, and by
|
|
128
|
+
processing domain events through its :py:func:`policy` method.
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
topics: ClassVar[Sequence[str]] = ()
|
|
132
|
+
|
|
133
|
+
def __init__(self, env: EnvType | None = None) -> None:
|
|
134
|
+
super().__init__(env)
|
|
135
|
+
self.recorder: ProcessRecorder
|
|
136
|
+
self.processing_lock = threading.Lock()
|
|
137
|
+
|
|
138
|
+
def construct_recorder(self) -> ProcessRecorder:
|
|
139
|
+
"""Constructs and returns a :class:`~eventsourcing.persistence.ProcessRecorder`
|
|
140
|
+
for the application to use as its application recorder.
|
|
141
|
+
"""
|
|
142
|
+
return self.factory.process_recorder()
|
|
143
|
+
|
|
144
|
+
def process_event(
|
|
145
|
+
self, domain_event: DomainEventProtocol, tracking: Tracking
|
|
146
|
+
) -> None:
|
|
147
|
+
"""Calls :func:`~eventsourcing.system.Follower.policy` method with
|
|
148
|
+
the given :class:`~eventsourcing.domain.AggregateEvent` and a
|
|
149
|
+
new :class:`~eventsourcing.application.ProcessingEvent` created from
|
|
150
|
+
the given :class:`~eventsourcing.persistence.Tracking` object.
|
|
151
|
+
|
|
152
|
+
The policy will collect any new aggregate events on the process
|
|
153
|
+
event object.
|
|
154
|
+
|
|
155
|
+
After the policy method returns, the process event object will
|
|
156
|
+
then be recorded by calling
|
|
157
|
+
:func:`~eventsourcing.application.Application.record`, which
|
|
158
|
+
will return new notifications.
|
|
159
|
+
|
|
160
|
+
After calling
|
|
161
|
+
:func:`~eventsourcing.application.Application.take_snapshots`,
|
|
162
|
+
the new notifications are passed to the
|
|
163
|
+
:func:`~eventsourcing.application.Application.notify` method.
|
|
164
|
+
"""
|
|
165
|
+
processing_event = ProcessingEvent(tracking=tracking)
|
|
166
|
+
self.policy(domain_event, processing_event)
|
|
167
|
+
try:
|
|
168
|
+
recordings = self._record(processing_event)
|
|
169
|
+
except IntegrityError:
|
|
170
|
+
if self.recorder.has_tracking_id(
|
|
171
|
+
tracking.application_name,
|
|
172
|
+
tracking.notification_id,
|
|
173
|
+
):
|
|
174
|
+
pass
|
|
175
|
+
else:
|
|
176
|
+
raise
|
|
177
|
+
else:
|
|
178
|
+
self._take_snapshots(processing_event)
|
|
179
|
+
self.notify(processing_event.events)
|
|
180
|
+
self._notify(recordings)
|
|
181
|
+
|
|
182
|
+
@singledispatchmethod
|
|
183
|
+
def policy(
|
|
184
|
+
self,
|
|
185
|
+
domain_event: DomainEventProtocol,
|
|
186
|
+
processing_event: ProcessingEvent,
|
|
187
|
+
) -> None:
|
|
188
|
+
"""Abstract domain event processing policy method. Must be
|
|
189
|
+
implemented by event processing applications. When
|
|
190
|
+
processing the given domain event, event processing
|
|
191
|
+
applications must use the :func:`~ProcessingEvent.collect_events`
|
|
192
|
+
method of the given :py:class:`~ProcessingEvent` object (not
|
|
193
|
+
the application's :func:`~eventsourcing.application.Application.save`
|
|
194
|
+
method) so that the new domain events will be recorded atomically
|
|
195
|
+
and uniquely with tracking information about the position of the processed
|
|
196
|
+
event in its application sequence.
|
|
197
|
+
"""
|
|
198
|
+
|
|
199
|
+
|
|
116
200
|
TApplication = TypeVar("TApplication", bound=Application)
|
|
201
|
+
TEventSourcedProjection = TypeVar(
|
|
202
|
+
"TEventSourcedProjection", bound=EventSourcedProjection
|
|
203
|
+
)
|
|
117
204
|
|
|
118
205
|
|
|
119
|
-
class
|
|
206
|
+
class BaseProjectionRunner(Generic[TApplication]):
|
|
120
207
|
def __init__(
|
|
121
208
|
self,
|
|
122
209
|
*,
|
|
210
|
+
projection: EventSourcedProjection | Projection[Any],
|
|
123
211
|
application_class: type[TApplication],
|
|
124
|
-
|
|
125
|
-
|
|
212
|
+
tracking_recorder: TrackingRecorder,
|
|
213
|
+
topics: Sequence[str],
|
|
126
214
|
env: EnvType | None = None,
|
|
127
|
-
):
|
|
128
|
-
|
|
129
|
-
Also constructs a materialised view from given class using an infrastructure
|
|
130
|
-
factory constructed with an environment named after the projection. Also
|
|
131
|
-
constructs a projection with the constructed materialised view object.
|
|
132
|
-
Starts a subscription to application and, in a separate event-processing
|
|
133
|
-
thread, calls projection's process_event() method for each event and tracking
|
|
134
|
-
object pair received from the subscription.
|
|
135
|
-
"""
|
|
215
|
+
) -> None:
|
|
216
|
+
self._projection = projection
|
|
136
217
|
self._is_interrupted = Event()
|
|
137
218
|
self._has_called_stop = False
|
|
138
219
|
|
|
220
|
+
# Construct the application.
|
|
139
221
|
self.app: TApplication = application_class(env)
|
|
140
222
|
|
|
141
|
-
self.
|
|
142
|
-
InfrastructureFactory[TTrackingRecorder]
|
|
143
|
-
.construct(
|
|
144
|
-
env=self._construct_env(
|
|
145
|
-
name=projection_class.name or projection_class.__name__, env=env
|
|
146
|
-
)
|
|
147
|
-
)
|
|
148
|
-
.tracking_recorder(view_class)
|
|
149
|
-
)
|
|
223
|
+
self._tracking_recorder = tracking_recorder
|
|
150
224
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
)
|
|
154
|
-
self.subscription = ApplicationSubscription(
|
|
225
|
+
# Subscribe to the application.
|
|
226
|
+
self._subscription = ApplicationSubscription(
|
|
155
227
|
app=self.app,
|
|
156
|
-
gt=self.
|
|
157
|
-
topics=
|
|
228
|
+
gt=self._tracking_recorder.max_tracking_id(self.app.name),
|
|
229
|
+
topics=topics,
|
|
158
230
|
)
|
|
231
|
+
|
|
232
|
+
# Start a thread to stop the subscription when the runner is interrupted.
|
|
159
233
|
self._thread_error: BaseException | None = None
|
|
160
234
|
self._stop_thread = Thread(
|
|
161
235
|
target=self._stop_subscription_when_stopping,
|
|
162
236
|
kwargs={
|
|
163
|
-
"subscription": self.
|
|
237
|
+
"subscription": self._subscription,
|
|
164
238
|
"is_stopping": self._is_interrupted,
|
|
165
239
|
},
|
|
166
240
|
)
|
|
167
241
|
self._stop_thread.start()
|
|
242
|
+
|
|
243
|
+
# Start a thread to iterate over the subscription.
|
|
168
244
|
self._processing_thread = Thread(
|
|
169
245
|
target=self._process_events_loop,
|
|
170
246
|
kwargs={
|
|
171
|
-
"subscription": self.
|
|
172
|
-
"projection": self.
|
|
247
|
+
"subscription": self._subscription,
|
|
248
|
+
"projection": self._projection,
|
|
173
249
|
"is_stopping": self._is_interrupted,
|
|
174
250
|
"runner": weakref.ref(self),
|
|
175
251
|
},
|
|
@@ -180,7 +256,8 @@ class ProjectionRunner(Generic[TApplication, TTrackingRecorder]):
|
|
|
180
256
|
def is_interrupted(self) -> Event:
|
|
181
257
|
return self._is_interrupted
|
|
182
258
|
|
|
183
|
-
|
|
259
|
+
@staticmethod
|
|
260
|
+
def _construct_env(name: str, env: EnvType | None = None) -> Environment:
|
|
184
261
|
"""Constructs environment from which projection will be configured."""
|
|
185
262
|
_env: dict[str, str] = {}
|
|
186
263
|
_env.update(os.environ)
|
|
@@ -210,10 +287,11 @@ class ProjectionRunner(Generic[TApplication, TTrackingRecorder]):
|
|
|
210
287
|
@staticmethod
|
|
211
288
|
def _process_events_loop(
|
|
212
289
|
subscription: ApplicationSubscription,
|
|
213
|
-
projection: Projection[
|
|
290
|
+
projection: EventSourcedProjection | Projection[Any],
|
|
214
291
|
is_stopping: Event,
|
|
215
292
|
runner: weakref.ReferenceType[ProjectionRunner[Application, TrackingRecorder]],
|
|
216
293
|
) -> None:
|
|
294
|
+
"""Iterates over the subscription and calls process_event()."""
|
|
217
295
|
try:
|
|
218
296
|
with subscription:
|
|
219
297
|
for domain_event, tracking in subscription:
|
|
@@ -250,8 +328,8 @@ class ProjectionRunner(Generic[TApplication, TTrackingRecorder]):
|
|
|
250
328
|
object that is greater than or equal to the given notification ID.
|
|
251
329
|
"""
|
|
252
330
|
try:
|
|
253
|
-
self.
|
|
254
|
-
application_name=self.
|
|
331
|
+
self._tracking_recorder.wait(
|
|
332
|
+
application_name=self.app.name,
|
|
255
333
|
notification_id=notification_id,
|
|
256
334
|
timeout=timeout,
|
|
257
335
|
interrupt=self._is_interrupted,
|
|
@@ -287,3 +365,64 @@ class ProjectionRunner(Generic[TApplication, TTrackingRecorder]):
|
|
|
287
365
|
"""Calls stop()."""
|
|
288
366
|
with contextlib.suppress(AttributeError):
|
|
289
367
|
self.stop()
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
class ProjectionRunner(
|
|
371
|
+
BaseProjectionRunner[TApplication], Generic[TApplication, TTrackingRecorder]
|
|
372
|
+
):
|
|
373
|
+
def __init__(
|
|
374
|
+
self,
|
|
375
|
+
*,
|
|
376
|
+
application_class: type[TApplication],
|
|
377
|
+
projection_class: type[Projection[TTrackingRecorder]],
|
|
378
|
+
view_class: type[TTrackingRecorder],
|
|
379
|
+
env: EnvType | None = None,
|
|
380
|
+
):
|
|
381
|
+
"""Constructs application from given application class with given environment.
|
|
382
|
+
Also constructs a materialised view from given class using an infrastructure
|
|
383
|
+
factory constructed with an environment named after the projection. Also
|
|
384
|
+
constructs a projection with the constructed materialised view object.
|
|
385
|
+
Starts a subscription to application and, in a separate event-processing
|
|
386
|
+
thread, calls projection's process_event() method for each event and tracking
|
|
387
|
+
object pair received from the subscription.
|
|
388
|
+
"""
|
|
389
|
+
# Construct the materialised view using an infrastructure factory.
|
|
390
|
+
self.view = (
|
|
391
|
+
InfrastructureFactory[TTrackingRecorder]
|
|
392
|
+
.construct(env=self._construct_env(name=projection_class.name, env=env))
|
|
393
|
+
.tracking_recorder(view_class)
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
# Construct the projection using the materialised view.
|
|
397
|
+
self.projection = projection_class(view=self.view)
|
|
398
|
+
|
|
399
|
+
super().__init__(
|
|
400
|
+
projection=self.projection,
|
|
401
|
+
application_class=application_class,
|
|
402
|
+
tracking_recorder=self.view,
|
|
403
|
+
topics=self.projection.topics,
|
|
404
|
+
env=env,
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
class EventSourcedProjectionRunner(
|
|
409
|
+
BaseProjectionRunner[TApplication], Generic[TApplication, TEventSourcedProjection]
|
|
410
|
+
):
|
|
411
|
+
def __init__(
|
|
412
|
+
self,
|
|
413
|
+
*,
|
|
414
|
+
application_class: type[TApplication],
|
|
415
|
+
projection_class: type[TEventSourcedProjection],
|
|
416
|
+
env: EnvType | None = None,
|
|
417
|
+
):
|
|
418
|
+
self.projection: TEventSourcedProjection = projection_class(
|
|
419
|
+
env=self._construct_env(name=projection_class.name, env=env)
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
super().__init__(
|
|
423
|
+
projection=self.projection,
|
|
424
|
+
application_class=application_class,
|
|
425
|
+
tracking_recorder=self.projection.recorder,
|
|
426
|
+
topics=self.projection.topics,
|
|
427
|
+
env=env,
|
|
428
|
+
)
|
|
@@ -472,6 +472,7 @@ class SQLiteApplicationRecorder(
|
|
|
472
472
|
def subscribe(
|
|
473
473
|
self, gt: int | None = None, topics: Sequence[str] = ()
|
|
474
474
|
) -> Subscription[ApplicationRecorder]:
|
|
475
|
+
"""This method is not implemented on this class."""
|
|
475
476
|
msg = f"The {type(self).__qualname__} recorder does not support subscriptions"
|
|
476
477
|
raise NotImplementedError(msg)
|
|
477
478
|
|
|
@@ -6,26 +6,25 @@ import traceback
|
|
|
6
6
|
from abc import ABC, abstractmethod
|
|
7
7
|
from collections import defaultdict
|
|
8
8
|
from queue import Full, Queue
|
|
9
|
+
from types import FrameType, ModuleType
|
|
9
10
|
from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast
|
|
10
11
|
|
|
12
|
+
from eventsourcing.projection import EventSourcedProjection
|
|
13
|
+
|
|
11
14
|
if TYPE_CHECKING:
|
|
12
15
|
from collections.abc import Iterable, Iterator, Sequence
|
|
13
|
-
from types import FrameType, ModuleType
|
|
14
16
|
|
|
15
17
|
from typing_extensions import Self
|
|
16
18
|
|
|
17
19
|
from eventsourcing.application import (
|
|
18
20
|
Application,
|
|
19
21
|
NotificationLog,
|
|
20
|
-
ProcessingEvent,
|
|
21
22
|
ProgrammingError,
|
|
22
23
|
Section,
|
|
23
24
|
TApplication,
|
|
24
25
|
)
|
|
25
|
-
from eventsourcing.dispatch import singledispatchmethod
|
|
26
26
|
from eventsourcing.domain import DomainEventProtocol, MutableOrImmutableAggregate
|
|
27
27
|
from eventsourcing.persistence import (
|
|
28
|
-
IntegrityError,
|
|
29
28
|
Mapper,
|
|
30
29
|
Notification,
|
|
31
30
|
ProcessRecorder,
|
|
@@ -52,29 +51,25 @@ class RecordingEvent:
|
|
|
52
51
|
ConvertingJob = Optional[Union[RecordingEvent, list[Notification]]]
|
|
53
52
|
|
|
54
53
|
|
|
55
|
-
class Follower(
|
|
56
|
-
"""Extends the :class:`~eventsourcing.
|
|
57
|
-
by
|
|
58
|
-
|
|
59
|
-
|
|
54
|
+
class Follower(EventSourcedProjection):
|
|
55
|
+
"""Extends the :class:`~eventsourcing.projection.EventSourcedProjection` class
|
|
56
|
+
by pulling notification objects from its notification log readers, by converting
|
|
57
|
+
the notification objects to domain events and tracking objects and by processing
|
|
58
|
+
the reconstructed domain event objects.
|
|
60
59
|
"""
|
|
61
60
|
|
|
62
|
-
follow_topics: ClassVar[Sequence[str]] = []
|
|
63
61
|
pull_section_size = 10
|
|
64
62
|
|
|
63
|
+
def __init_subclass__(cls, **kwargs: Any) -> None:
|
|
64
|
+
super().__init_subclass__(**kwargs)
|
|
65
|
+
# for backwards compatibility, set "topics" if has "follow_topics".
|
|
66
|
+
cls.topics = getattr(cls, "follow_topics", cls.topics)
|
|
67
|
+
|
|
65
68
|
def __init__(self, env: EnvType | None = None) -> None:
|
|
66
69
|
super().__init__(env)
|
|
67
70
|
self.readers: dict[str, NotificationLogReader] = {}
|
|
68
71
|
self.mappers: dict[str, Mapper] = {}
|
|
69
|
-
self.recorder: ProcessRecorder
|
|
70
72
|
self.is_threading_enabled = False
|
|
71
|
-
self.processing_lock = threading.Lock()
|
|
72
|
-
|
|
73
|
-
def construct_recorder(self) -> ProcessRecorder:
|
|
74
|
-
"""Constructs and returns a :class:`~eventsourcing.persistence.ProcessRecorder`
|
|
75
|
-
for the application to use as its application recorder.
|
|
76
|
-
"""
|
|
77
|
-
return self.factory.process_recorder()
|
|
78
73
|
|
|
79
74
|
def follow(self, name: str, log: NotificationLog) -> None:
|
|
80
75
|
"""Constructs a notification log reader and a mapper for
|
|
@@ -107,6 +102,12 @@ class Follower(Application):
|
|
|
107
102
|
):
|
|
108
103
|
self.process_event(domain_event, tracking)
|
|
109
104
|
|
|
105
|
+
def process_event(
|
|
106
|
+
self, domain_event: DomainEventProtocol, tracking: Tracking
|
|
107
|
+
) -> None:
|
|
108
|
+
with self.processing_lock:
|
|
109
|
+
super().process_event(domain_event, tracking)
|
|
110
|
+
|
|
110
111
|
def pull_notifications(
|
|
111
112
|
self,
|
|
112
113
|
leader_name: str,
|
|
@@ -121,15 +122,15 @@ class Follower(Application):
|
|
|
121
122
|
return self.readers[leader_name].select(
|
|
122
123
|
start=start,
|
|
123
124
|
stop=stop,
|
|
124
|
-
topics=self.
|
|
125
|
+
topics=self.topics,
|
|
125
126
|
inclusive_of_start=inclusive_of_start,
|
|
126
127
|
)
|
|
127
128
|
|
|
128
129
|
def filter_received_notifications(
|
|
129
130
|
self, notifications: list[Notification]
|
|
130
131
|
) -> list[Notification]:
|
|
131
|
-
if self.
|
|
132
|
-
return [n for n in notifications if n.topic in self.
|
|
132
|
+
if self.topics:
|
|
133
|
+
return [n for n in notifications if n.topic in self.topics]
|
|
133
134
|
return notifications
|
|
134
135
|
|
|
135
136
|
def convert_notifications(
|
|
@@ -151,64 +152,6 @@ class Follower(Application):
|
|
|
151
152
|
processing_jobs.append((domain_event, tracking))
|
|
152
153
|
return processing_jobs
|
|
153
154
|
|
|
154
|
-
# @retry(IntegrityError, max_attempts=50000, wait=0.01)
|
|
155
|
-
def process_event(
|
|
156
|
-
self, domain_event: DomainEventProtocol, tracking: Tracking
|
|
157
|
-
) -> None:
|
|
158
|
-
"""Calls :func:`~eventsourcing.system.Follower.policy` method with
|
|
159
|
-
the given :class:`~eventsourcing.domain.AggregateEvent` and a
|
|
160
|
-
new :class:`~eventsourcing.application.ProcessingEvent` created from
|
|
161
|
-
the given :class:`~eventsourcing.persistence.Tracking` object.
|
|
162
|
-
|
|
163
|
-
The policy will collect any new aggregate events on the process
|
|
164
|
-
event object.
|
|
165
|
-
|
|
166
|
-
After the policy method returns, the process event object will
|
|
167
|
-
then be recorded by calling
|
|
168
|
-
:func:`~eventsourcing.application.Application.record`, which
|
|
169
|
-
will return new notifications.
|
|
170
|
-
|
|
171
|
-
After calling
|
|
172
|
-
:func:`~eventsourcing.application.Application.take_snapshots`,
|
|
173
|
-
the new notifications are passed to the
|
|
174
|
-
:func:`~eventsourcing.application.Application.notify` method.
|
|
175
|
-
"""
|
|
176
|
-
processing_event = ProcessingEvent(tracking=tracking)
|
|
177
|
-
with self.processing_lock:
|
|
178
|
-
self.policy(domain_event, processing_event)
|
|
179
|
-
try:
|
|
180
|
-
recordings = self._record(processing_event)
|
|
181
|
-
except IntegrityError:
|
|
182
|
-
if self.recorder.has_tracking_id(
|
|
183
|
-
tracking.application_name,
|
|
184
|
-
tracking.notification_id,
|
|
185
|
-
):
|
|
186
|
-
pass
|
|
187
|
-
else:
|
|
188
|
-
raise
|
|
189
|
-
else:
|
|
190
|
-
self._take_snapshots(processing_event)
|
|
191
|
-
self.notify(processing_event.events)
|
|
192
|
-
self._notify(recordings)
|
|
193
|
-
|
|
194
|
-
@singledispatchmethod
|
|
195
|
-
def policy(
|
|
196
|
-
self,
|
|
197
|
-
domain_event: DomainEventProtocol,
|
|
198
|
-
processing_event: ProcessingEvent,
|
|
199
|
-
) -> None:
|
|
200
|
-
"""Abstract domain event processing policy method. Must be
|
|
201
|
-
implemented by event processing applications. When
|
|
202
|
-
processing the given domain event, event processing
|
|
203
|
-
applications must use the :func:`~ProcessingEvent.collect_events`
|
|
204
|
-
method of the given process event object (instead of
|
|
205
|
-
the application's :func:`~eventsourcing.application.Application.save`
|
|
206
|
-
method) to collect pending events from changed aggregates,
|
|
207
|
-
so that the new domain events will be recorded atomically
|
|
208
|
-
with tracking information about the position of the given
|
|
209
|
-
domain event's notification.
|
|
210
|
-
"""
|
|
211
|
-
|
|
212
155
|
|
|
213
156
|
class RecordingEventReceiver(ABC):
|
|
214
157
|
"""Abstract base class for objects that may receive recording events."""
|
|
@@ -279,8 +222,8 @@ class System:
|
|
|
279
222
|
pipes: Iterable[Iterable[type[Application]]],
|
|
280
223
|
):
|
|
281
224
|
# Remember the caller frame's module, so that we might identify a topic.
|
|
282
|
-
caller_frame = cast(
|
|
283
|
-
module = cast(
|
|
225
|
+
caller_frame = cast(FrameType, inspect.currentframe()).f_back
|
|
226
|
+
module = cast(ModuleType, inspect.getmodule(caller_frame))
|
|
284
227
|
type(self).__caller_modules[id(self)] = module # noqa: SLF001
|
|
285
228
|
|
|
286
229
|
# Build nodes and edges.
|
|
@@ -629,9 +572,9 @@ class NewSingleThreadedRunner(Runner, RecordingEventReceiver):
|
|
|
629
572
|
follower = self.apps[follower_name]
|
|
630
573
|
assert isinstance(follower, Follower)
|
|
631
574
|
if (
|
|
632
|
-
follower.
|
|
575
|
+
follower.topics
|
|
633
576
|
and recording.notification.topic
|
|
634
|
-
not in follower.
|
|
577
|
+
not in follower.topics
|
|
635
578
|
):
|
|
636
579
|
continue
|
|
637
580
|
follower.process_event(
|
|
@@ -1076,9 +1019,8 @@ class ConvertingThread(threading.Thread):
|
|
|
1076
1019
|
recording_event = recording_event_or_notifications
|
|
1077
1020
|
for recording in recording_event.recordings:
|
|
1078
1021
|
if (
|
|
1079
|
-
self.follower.
|
|
1080
|
-
and recording.notification.topic
|
|
1081
|
-
not in self.follower.follow_topics
|
|
1022
|
+
self.follower.topics
|
|
1023
|
+
and recording.notification.topic not in self.follower.topics
|
|
1082
1024
|
):
|
|
1083
1025
|
continue
|
|
1084
1026
|
tracking = Tracking(
|
|
@@ -450,8 +450,8 @@ class ApplicationRecorderTestCase(TestCase, ABC, Generic[_TApplicationRecorder])
|
|
|
450
450
|
num_writers = 10
|
|
451
451
|
num_writes_per_writer = 100
|
|
452
452
|
num_events_per_write = 100
|
|
453
|
-
reader_sleep = 0.
|
|
454
|
-
writer_sleep = 0.
|
|
453
|
+
reader_sleep = 0.0001
|
|
454
|
+
writer_sleep = 0.0001
|
|
455
455
|
|
|
456
456
|
def insert_events() -> None:
|
|
457
457
|
thread_id = get_ident()
|
|
@@ -1,16 +1,21 @@
|
|
|
1
|
-
[
|
|
2
|
-
|
|
3
|
-
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["poetry-core>=1.0.0"]
|
|
3
|
+
build-backend = "poetry.core.masonry.api"
|
|
4
4
|
|
|
5
|
+
[project]
|
|
6
|
+
name = "eventsourcing"
|
|
7
|
+
version = "9.4.1"
|
|
5
8
|
description = "Event sourcing in Python"
|
|
9
|
+
license = "BSD-3-Clause"
|
|
10
|
+
readme = "README.md"
|
|
11
|
+
requires-python = ">=3.9.2"
|
|
6
12
|
authors = [
|
|
7
|
-
"John Bywater
|
|
13
|
+
{ "name" = "John Bywater", "email" = "john.bywater@appropriatesoftware.net" },
|
|
8
14
|
]
|
|
9
|
-
license = "BSD 3-Clause"
|
|
10
15
|
classifiers = [
|
|
11
16
|
# "Development Status :: 3 - Alpha",
|
|
12
|
-
"Development Status :: 4 - Beta",
|
|
13
|
-
|
|
17
|
+
# "Development Status :: 4 - Beta",
|
|
18
|
+
"Development Status :: 5 - Production/Stable",
|
|
14
19
|
"Intended Audience :: Developers",
|
|
15
20
|
"Intended Audience :: Education",
|
|
16
21
|
"Intended Audience :: Science/Research",
|
|
@@ -25,10 +30,6 @@ classifiers = [
|
|
|
25
30
|
"Programming Language :: Python",
|
|
26
31
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
27
32
|
]
|
|
28
|
-
readme = "README.md"
|
|
29
|
-
homepage = "https://github.com/pyeventsourcing/eventsourcing"
|
|
30
|
-
repository = "https://github.com/pyeventsourcing/eventsourcing"
|
|
31
|
-
include = ["eventsourcing/py.typed"]
|
|
32
33
|
keywords=[
|
|
33
34
|
"event sourcing",
|
|
34
35
|
"event store",
|
|
@@ -38,18 +39,21 @@ keywords=[
|
|
|
38
39
|
"cqrs",
|
|
39
40
|
"cqs",
|
|
40
41
|
]
|
|
42
|
+
dependencies = [
|
|
43
|
+
"typing_extensions",
|
|
44
|
+
]
|
|
41
45
|
|
|
42
|
-
[
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
cryptography = { version = "~44.0", optional = true }
|
|
47
|
-
psycopg = { version = "<=3.2.99999", optional = true, extras=["pool"]}
|
|
46
|
+
[project.optional-dependencies]
|
|
47
|
+
crypto = ["pycryptodome>=3.22,<4.0"]
|
|
48
|
+
cryptography = ["cryptography>=44.0,<45.0"]
|
|
49
|
+
postgres = ["psycopg[pool]>=3.2,<3.3"]
|
|
48
50
|
|
|
49
|
-
[
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
51
|
+
[project.urls]
|
|
52
|
+
homepage = "https://github.com/pyeventsourcing/eventsourcing"
|
|
53
|
+
repository = "https://github.com/pyeventsourcing/eventsourcing"
|
|
54
|
+
|
|
55
|
+
[tool.poetry]
|
|
56
|
+
include = ["eventsourcing/py.typed"]
|
|
53
57
|
|
|
54
58
|
[tool.poetry.group.dev.dependencies]
|
|
55
59
|
black = { version = "*", allow-prereleases = true }
|
|
@@ -57,7 +61,6 @@ coverage = "^7.2.7"
|
|
|
57
61
|
isort = "*"
|
|
58
62
|
mypy = "*"
|
|
59
63
|
ruff = "*"
|
|
60
|
-
psycopg = { version = "<=3.2.99999", extras = ["pool"] }
|
|
61
64
|
psycopg-binary = "*"
|
|
62
65
|
pyright = "*"
|
|
63
66
|
|
|
@@ -68,11 +71,6 @@ pydantic = { version = "~2.9"}
|
|
|
68
71
|
autodoc_pydantic = "*"
|
|
69
72
|
orjson = { version = "~3.10.11"}
|
|
70
73
|
|
|
71
|
-
|
|
72
|
-
[build-system]
|
|
73
|
-
requires = ["poetry-core>=1.0.0"]
|
|
74
|
-
build-backend = "poetry.core.masonry.api"
|
|
75
|
-
|
|
76
74
|
[tool.black]
|
|
77
75
|
line-length = 88
|
|
78
76
|
target-version = ["py39"]
|
|
@@ -177,6 +175,7 @@ ignore = [
|
|
|
177
175
|
"C901", # is too complex
|
|
178
176
|
"TD002", # Missing author in TODO
|
|
179
177
|
"TD003", # Missing issue line for this TODO
|
|
178
|
+
"TC006", # Add quotes to type expression in `typing.cast()`
|
|
180
179
|
"PLR0915", # Too many statements
|
|
181
180
|
"PLR0912", # Too many branches
|
|
182
181
|
"S101", # Use of `assert` detected
|
|
@@ -200,6 +199,10 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
|
|
200
199
|
"*/sqlite.py" = [
|
|
201
200
|
"S608", # Possible SQL injection vector through string-based query construction
|
|
202
201
|
]
|
|
202
|
+
"examples/shopstandard/domain.py" = [
|
|
203
|
+
"A002",
|
|
204
|
+
]
|
|
205
|
+
|
|
203
206
|
[tool.ruff.lint.flake8-type-checking]
|
|
204
207
|
runtime-evaluated-base-classes = ["pydantic.BaseModel"]
|
|
205
208
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|