python-hexagonal 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. hexagonal/__init__.py +2 -0
  2. hexagonal/adapters/drivens/buses/base/__init__.py +15 -0
  3. hexagonal/adapters/drivens/buses/base/command_bus.py +69 -0
  4. hexagonal/adapters/drivens/buses/base/event_bus.py +160 -0
  5. hexagonal/adapters/drivens/buses/base/infrastructure.py +38 -0
  6. hexagonal/adapters/drivens/buses/base/message_bus.py +73 -0
  7. hexagonal/adapters/drivens/buses/base/query.py +82 -0
  8. hexagonal/adapters/drivens/buses/base/utils.py +1 -0
  9. hexagonal/adapters/drivens/buses/inmemory/__init__.py +12 -0
  10. hexagonal/adapters/drivens/buses/inmemory/command_bus.py +70 -0
  11. hexagonal/adapters/drivens/buses/inmemory/event_bus.py +69 -0
  12. hexagonal/adapters/drivens/buses/inmemory/infra.py +49 -0
  13. hexagonal/adapters/drivens/mappers.py +127 -0
  14. hexagonal/adapters/drivens/repository/base/__init__.py +13 -0
  15. hexagonal/adapters/drivens/repository/base/repository.py +85 -0
  16. hexagonal/adapters/drivens/repository/base/unit_of_work.py +75 -0
  17. hexagonal/adapters/drivens/repository/sqlite/__init__.py +18 -0
  18. hexagonal/adapters/drivens/repository/sqlite/datastore.py +197 -0
  19. hexagonal/adapters/drivens/repository/sqlite/env_vars.py +2 -0
  20. hexagonal/adapters/drivens/repository/sqlite/infrastructure.py +20 -0
  21. hexagonal/adapters/drivens/repository/sqlite/outbox.py +405 -0
  22. hexagonal/adapters/drivens/repository/sqlite/repository.py +286 -0
  23. hexagonal/adapters/drivens/repository/sqlite/unit_of_work.py +25 -0
  24. hexagonal/adapters/drivers/__init__.py +5 -0
  25. hexagonal/adapters/drivers/app.py +38 -0
  26. hexagonal/application/__init__.py +29 -0
  27. hexagonal/application/api.py +61 -0
  28. hexagonal/application/app.py +76 -0
  29. hexagonal/application/bus_app.py +70 -0
  30. hexagonal/application/handlers.py +107 -0
  31. hexagonal/application/infrastructure.py +64 -0
  32. hexagonal/application/query.py +71 -0
  33. hexagonal/domain/__init__.py +77 -0
  34. hexagonal/domain/aggregate.py +159 -0
  35. hexagonal/domain/base.py +169 -0
  36. hexagonal/domain/exceptions.py +38 -0
  37. hexagonal/entrypoints/__init__.py +4 -0
  38. hexagonal/entrypoints/app.py +53 -0
  39. hexagonal/entrypoints/base.py +105 -0
  40. hexagonal/entrypoints/bus.py +68 -0
  41. hexagonal/entrypoints/sqlite.py +49 -0
  42. hexagonal/ports/__init__.py +0 -0
  43. hexagonal/ports/drivens/__init__.py +43 -0
  44. hexagonal/ports/drivens/application.py +35 -0
  45. hexagonal/ports/drivens/buses.py +148 -0
  46. hexagonal/ports/drivens/infrastructure.py +19 -0
  47. hexagonal/ports/drivens/repository.py +152 -0
  48. hexagonal/ports/drivers/__init__.py +3 -0
  49. hexagonal/ports/drivers/app.py +58 -0
  50. hexagonal/py.typed +0 -0
  51. python_hexagonal-0.1.0.dist-info/METADATA +15 -0
  52. python_hexagonal-0.1.0.dist-info/RECORD +53 -0
  53. python_hexagonal-0.1.0.dist-info/WHEEL +4 -0
@@ -0,0 +1,405 @@
1
+ """SQLite implementation for Outbox and Inbox repositories."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+ import sqlite3
7
+ from datetime import datetime, timezone
8
+ from typing import Any, ClassVar, Dict, Mapping
9
+ from uuid import UUID
10
+
11
+ import orjson
12
+ from eventsourcing.domain import CanCreateTimestamp
13
+ from eventsourcing.utils import strtobool
14
+
15
+ from hexagonal.adapters.drivens.mappers import MessageMapper, StoredMessage
16
+ from hexagonal.adapters.drivens.repository.base import BaseRepositoryAdapter
17
+ from hexagonal.application import InfrastructureGroup
18
+ from hexagonal.domain import CloudMessage, TMessagePayload
19
+ from hexagonal.ports.drivens import (
20
+ IInboxRepository,
21
+ IOutboxRepository,
22
+ IPairInboxOutbox,
23
+ )
24
+
25
+ from .datastore import SQLiteConnectionContextManager
26
+
27
+ logger = logging.getLogger(__name__)
28
+
29
+
30
+ class SQLiteOutboxRepository(
31
+ BaseRepositoryAdapter[SQLiteConnectionContextManager],
32
+ IOutboxRepository[SQLiteConnectionContextManager],
33
+ ):
34
+ """SQLite implementation of the Outbox pattern repository.
35
+
36
+ Stores outgoing messages to be published to external systems,
37
+ ensuring at-least-once delivery semantics.
38
+ """
39
+
40
+ ENV: ClassVar[Dict[str, str]] = {
41
+ "TABLE_NAME": "outbox",
42
+ "CREATE_TABLES": "False",
43
+ }
44
+
45
+ def __init__(
46
+ self,
47
+ mapper: MessageMapper,
48
+ connection_manager: SQLiteConnectionContextManager,
49
+ ):
50
+ super().__init__(connection_manager)
51
+ self._mapper = mapper
52
+
53
+ def initialize(self, env: Mapping[str, str]) -> None:
54
+ super().initialize(env)
55
+ self._table_name: str = self.env.get("TABLE_NAME", "outbox")
56
+ create_tables = strtobool(self.env.get("CREATE_TABLES", "False"))
57
+ self.create_table_statements: list[str] = self._create_table_statements()
58
+
59
+ if create_tables:
60
+ self.create_tables()
61
+
62
+ def _create_table_statements(self) -> list[str]:
63
+ return [
64
+ f"""
65
+ CREATE TABLE IF NOT EXISTS {self._table_name} (
66
+ message_id TEXT PRIMARY KEY,
67
+ topic TEXT NOT NULL,
68
+ message BLOB NOT NULL,
69
+ published_at TIMESTAMP,
70
+ failed_at TIMESTAMP,
71
+ error TEXT,
72
+ retry_count INTEGER DEFAULT 0,
73
+ created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
74
+ );
75
+ """,
76
+ f"""
77
+ CREATE INDEX IF NOT EXISTS idx_{self._table_name}_published
78
+ ON {self._table_name}(published_at)
79
+ WHERE published_at IS NULL;
80
+ """,
81
+ f"""
82
+ CREATE INDEX IF NOT EXISTS idx_{self._table_name}_topic
83
+ ON {self._table_name}(topic);
84
+ """,
85
+ ]
86
+
87
+ def create_tables(self) -> None:
88
+ """Create the outbox table if it doesn't exist."""
89
+ with self.connection_manager.datastore.transaction(commit=True) as cursor:
90
+ for statement in self.create_table_statements:
91
+ cursor.execute(statement)
92
+ logger.debug("Outbox table created: %s", self._table_name)
93
+
94
+ def _get_stored_message(self, message: CloudMessage[Any]) -> tuple[str, str, bytes]:
95
+ stored = self._mapper.to_stored_message(message.payload)
96
+ serialized_message: dict[str, Any] = {
97
+ "type": message.type,
98
+ "metadata": message.metadata,
99
+ "correlation_id": str(message.correlation_id)
100
+ if message.correlation_id
101
+ else None,
102
+ "causation_id": str(message.causation_id) if message.causation_id else None,
103
+ "occurred_at": message.occurred_at.isoformat(),
104
+ "payload_topic": stored.topic,
105
+ "payload_state": stored.state.hex(),
106
+ }
107
+
108
+ return (
109
+ str(message.message_id),
110
+ stored.topic,
111
+ orjson.dumps(serialized_message),
112
+ )
113
+
114
+ def save(self, *messages: CloudMessage[Any]) -> None:
115
+ """Save messages to the outbox table."""
116
+ self.verify()
117
+ if not messages:
118
+ return
119
+
120
+ to_stored_message = map(self._get_stored_message, messages)
121
+
122
+ with self.connection_manager.cursor() as cursor:
123
+ cursor.executemany(
124
+ f"""
125
+ INSERT OR IGNORE INTO {self._table_name} (
126
+ message_id,
127
+ topic,
128
+ message,
129
+ created_at
130
+ ) VALUES (?, ?, ?, ?)
131
+ """,
132
+ [
133
+ (
134
+ mid,
135
+ topic,
136
+ message,
137
+ CanCreateTimestamp.create_timestamp().isoformat(
138
+ " ", "milliseconds"
139
+ ),
140
+ )
141
+ for mid, topic, message in to_stored_message
142
+ ],
143
+ )
144
+
145
+ def fetch_pending(self, limit: int | None = None) -> list[CloudMessage[Any]]:
146
+ """Fetch pending messages that haven't been published yet."""
147
+ self.verify()
148
+ with self.connection_manager.cursor() as cursor:
149
+ cursor.execute(
150
+ f"""
151
+ SELECT
152
+ message_id,
153
+ topic,
154
+ message
155
+ FROM {self._table_name}
156
+ WHERE published_at IS NULL
157
+ AND (failed_at IS NULL OR retry_count < 3)
158
+ ORDER BY created_at ASC
159
+ {"LIMIT ?" if limit is not None else ""}
160
+ """,
161
+ (limit,) if limit is not None else (),
162
+ )
163
+ rows = cursor.fetchall()
164
+
165
+ messages: list[CloudMessage[Any]] = []
166
+ for row in rows:
167
+ try:
168
+ # Reconstruct the message from stored data
169
+ stored_payload_bytes: bytes = row[2]
170
+ data = orjson.loads(stored_payload_bytes)
171
+ payload_state = bytes.fromhex(data.pop("payload_state"))
172
+ payload_topic = data.pop("payload_topic")
173
+ stored_message = StoredMessage(payload_topic, payload_state)
174
+ payload = self._mapper.to_message(stored_message)
175
+ assert isinstance(payload, TMessagePayload)
176
+ data["payload"] = payload
177
+ data["message_id"] = row[0]
178
+ cloud_message = CloudMessage[type(payload)](**data) # type: ignore
179
+ messages.append(cloud_message)
180
+ except Exception as e:
181
+ logger.error(
182
+ "Failed to deserialize message %s: %s", row[0], e, exc_info=True
183
+ )
184
+ continue
185
+
186
+ return messages
187
+
188
+ def mark_as_published(self, *message_ids: UUID) -> None:
189
+ """Mark messages as successfully published."""
190
+ self.verify()
191
+ if not message_ids:
192
+ return
193
+
194
+ with self.cursor() as cursor:
195
+ now = datetime.now(timezone.utc).isoformat(" ", "seconds")
196
+ cursor.executemany(
197
+ f"""
198
+ UPDATE {self._table_name}
199
+ SET published_at = ?
200
+ WHERE message_id = ?
201
+ """,
202
+ [(now, str(mid)) for mid in message_ids],
203
+ )
204
+
205
+ def cursor(self):
206
+ return self._connection_manager.datastore.transaction(commit=True)
207
+
208
+ def mark_as_failed(self, *message_ids: UUID, error: str) -> None:
209
+ """Mark messages as failed and increment retry count."""
210
+ self.verify()
211
+ if not message_ids:
212
+ return
213
+
214
+ with self.cursor() as cursor:
215
+ cursor.executemany(
216
+ f"""
217
+ UPDATE {self._table_name}
218
+ SET
219
+ failed_at = ?,
220
+ error = ?,
221
+ retry_count = retry_count + 1
222
+ WHERE message_id = ?
223
+ """,
224
+ [
225
+ (datetime.now(timezone.utc).isoformat(), error, str(mid))
226
+ for mid in message_ids
227
+ ],
228
+ )
229
+
230
+
231
+ class SQLiteInboxRepository(
232
+ BaseRepositoryAdapter[SQLiteConnectionContextManager],
233
+ IInboxRepository[SQLiteConnectionContextManager],
234
+ ):
235
+ """SQLite implementation of the Inbox pattern repository.
236
+
237
+ Ensures idempotent message processing by tracking which messages
238
+ have been processed by which handlers.
239
+ """
240
+
241
+ ENV: ClassVar[Dict[str, str]] = {
242
+ "TABLE_NAME": "inbox",
243
+ "CREATE_TABLES": "False",
244
+ }
245
+
246
+ def cursor(self):
247
+ return self._connection_manager.datastore.transaction(commit=True)
248
+
249
+ def __init__(
250
+ self,
251
+ mapper: MessageMapper,
252
+ connection_manager: SQLiteConnectionContextManager,
253
+ ):
254
+ super().__init__(connection_manager)
255
+ self._mapper = mapper
256
+
257
+ def initialize(self, env: Mapping[str, str]) -> None:
258
+ super().initialize(env)
259
+ self._table_name: str = self.env.get("TABLE_NAME", "inbox")
260
+ create_tables = strtobool(self.env.get("CREATE_TABLES", "False"))
261
+ self.create_table_statements: list[str] = self._create_table_statements()
262
+
263
+ if create_tables:
264
+ self.create_tables()
265
+
266
+ def _create_table_statements(self) -> list[str]:
267
+ return [
268
+ f"""
269
+ CREATE TABLE IF NOT EXISTS {self._table_name} (
270
+ message_id TEXT NOT NULL,
271
+ handler TEXT NOT NULL,
272
+ received_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
273
+ processed_at TIMESTAMP,
274
+ error TEXT,
275
+ retry_count INTEGER NOT NULL DEFAULT 0,
276
+ failed_at TIMESTAMP,
277
+ PRIMARY KEY (message_id, handler)
278
+ );
279
+ """,
280
+ f"""
281
+ CREATE INDEX IF NOT EXISTS idx_{self._table_name}_processed
282
+ ON {self._table_name}(processed_at)
283
+ WHERE processed_at IS NULL;
284
+ """,
285
+ ]
286
+
287
+ def create_tables(self) -> None:
288
+ """Create the inbox table if it doesn't exist."""
289
+ with self.cursor() as cursor:
290
+ for statement in self.create_table_statements:
291
+ cursor.execute(statement)
292
+ logger.debug("Inbox table created: %s", self._table_name)
293
+
294
+ def register_message(
295
+ self, message: CloudMessage[Any], handler: str, max_retries: int = 3
296
+ ) -> bool:
297
+ """Register a message for processing.
298
+
299
+ Returns:
300
+ True if the message was already registered (duplicate)
301
+ False if this is a new message
302
+ """
303
+ self.verify()
304
+ with self.cursor() as cursor:
305
+ try:
306
+ cursor.execute(
307
+ f"""
308
+ INSERT INTO {self._table_name} (message_id, handler, received_at)
309
+ VALUES (?, ?, ?)
310
+ """,
311
+ (
312
+ str(message.message_id),
313
+ handler,
314
+ CanCreateTimestamp.create_timestamp().isoformat(
315
+ " ", "milliseconds"
316
+ ),
317
+ ),
318
+ )
319
+ # Successfully inserted, so it's a new message
320
+ return False
321
+ except sqlite3.IntegrityError:
322
+ # Primary key violation means it's a duplicate
323
+ cursor.execute(
324
+ f"""
325
+ select
326
+ message_id,
327
+ handler,
328
+ processed_at,
329
+ error,
330
+ retry_count,
331
+ failed_at
332
+ from {self._table_name}
333
+ where message_id = ?
334
+ and handler = ?
335
+ and processed_at is null
336
+ and (failed_at is null or retry_count < ?)
337
+ """,
338
+ (str(message.message_id), handler, max_retries),
339
+ )
340
+ row = cursor.fetchone()
341
+ if row is not None:
342
+ return False
343
+
344
+ return True
345
+
346
+ def mark_as_processed(self, message_id: UUID, handler: str) -> None:
347
+ """Mark a message as successfully processed by a handler."""
348
+ self.verify()
349
+ with self.cursor() as cursor:
350
+ cursor.execute(
351
+ f"""
352
+ UPDATE {self._table_name}
353
+ SET processed_at = ?
354
+ WHERE message_id = ? AND handler = ?
355
+ """,
356
+ (
357
+ CanCreateTimestamp.create_timestamp().isoformat(
358
+ " ", "milliseconds"
359
+ ),
360
+ str(message_id),
361
+ handler,
362
+ ),
363
+ )
364
+
365
+ def mark_as_failed(self, message_id: UUID, handler: str, error: str) -> None:
366
+ """Mark a message as failed by a handler."""
367
+ self.verify()
368
+ with self.cursor() as cursor:
369
+ cursor.execute(
370
+ f"""
371
+ UPDATE {self._table_name}
372
+ SET failed_at = ?, error = ?, retry_count = retry_count + 1
373
+ WHERE message_id = ? AND handler = ?
374
+ """,
375
+ (
376
+ CanCreateTimestamp.create_timestamp().isoformat(
377
+ " ", "milliseconds"
378
+ ),
379
+ error,
380
+ str(message_id),
381
+ handler,
382
+ ),
383
+ )
384
+
385
+
386
+ class SQLitePairInboxOutbox(
387
+ InfrastructureGroup,
388
+ IPairInboxOutbox[SQLiteConnectionContextManager],
389
+ ):
390
+ def __init__(
391
+ self,
392
+ mapper: MessageMapper,
393
+ connection_manager: SQLiteConnectionContextManager,
394
+ ):
395
+ self._inbox = SQLiteInboxRepository(mapper, connection_manager)
396
+ self._outbox = SQLiteOutboxRepository(mapper, connection_manager)
397
+ super().__init__(self._inbox, self._outbox)
398
+
399
+ @property
400
+ def inbox(self) -> IInboxRepository[SQLiteConnectionContextManager]:
401
+ return self._inbox
402
+
403
+ @property
404
+ def outbox(self) -> IOutboxRepository[SQLiteConnectionContextManager]:
405
+ return self._outbox
@@ -0,0 +1,286 @@
1
+ """SQLite repository adapter for event-sourced aggregates."""
2
+ # pyright: reportMissingTypeStubs=false, reportUnknownArgumentType=false, reportMissingParameterType=none, reportGeneralTypeIssues=none
3
+
4
+ from __future__ import annotations
5
+
6
+ import logging
7
+ import sqlite3
8
+ from typing import Any, ClassVar, Dict, Mapping, Sequence, Tuple, TypeVar, Union, cast
9
+ from uuid import UUID
10
+
11
+ from eventsourcing.domain import CanMutateAggregate
12
+ from eventsourcing.persistence import StoredEvent
13
+ from eventsourcing.utils import strtobool
14
+
15
+ from hexagonal.adapters.drivens.repository.base import BaseAggregateRepositoryAdapter
16
+ from hexagonal.domain import (
17
+ AggregateNotFound,
18
+ AggregateRoot,
19
+ AggregateSnapshot,
20
+ AggregateVersionMismatch,
21
+ SnapshotState,
22
+ TIdEntity,
23
+ )
24
+
25
+ from .datastore import SQLiteConnectionContextManager
26
+
27
+ # Type aliases
28
+ SQLiteRow = Dict[str, Any]
29
+ SQLiteParams = Union[Tuple[Any, ...], Dict[str, Any]]
30
+ TAggregate = TypeVar("TAggregate", bound=AggregateRoot[Any, Any])
31
+
32
+ logger = logging.getLogger(__name__)
33
+
34
+
35
+ class SQLiteRepositoryAdapter(
36
+ BaseAggregateRepositoryAdapter[
37
+ SQLiteConnectionContextManager, TAggregate, TIdEntity
38
+ ]
39
+ ):
40
+ """SQLite repository adapter for event-sourced aggregates.
41
+
42
+ This adapter implements the IRepository interface using SQLite as the backing store.
43
+ It handles the persistence and retrieval of event-sourced aggregates, including
44
+ snapshotting and event history.
45
+
46
+ Args:
47
+ datastore: The SQLite datastore to use for database connections
48
+ mapper: Mapper for converting between domain and persistence models
49
+ create_tables: If True, create the required database tables on initialization
50
+ table_name: Base name for the database tables
51
+ (will append _snapshots and _events)
52
+ """
53
+
54
+ ENV: ClassVar[Dict[str, str]] = {
55
+ "TABLE_NAME": "aggregates",
56
+ "CREATE_TABLES": "False",
57
+ }
58
+
59
+ def initialize(self, env: Mapping[str, str]) -> None:
60
+ super().initialize(env)
61
+ self._current_connection: sqlite3.Connection | None = None
62
+ self._table_name: str = self.env.get("TABLE_NAME", "aggregates")
63
+ self._schema_name: str = self.env.get("SCHEMA_NAME", "anomatrace")
64
+ create_tables = strtobool(self.env.get("CREATE_TABLES", "False"))
65
+ self.create_table_statements: list[str] = self._create_table_statements()
66
+
67
+ if create_tables:
68
+ self.create_tables()
69
+
70
+ def _create_table_statements(self) -> list[str]:
71
+ complete_table_name = self._get_table_name()
72
+ complete_event_history = self._get_event_history_table_name()
73
+
74
+ return [
75
+ f"""
76
+ CREATE TABLE IF NOT EXISTS {complete_table_name} (
77
+ originator_id UUID NOT NULL,
78
+ aggregate_name TEXT NOT NULL,
79
+ originator_version INTEGER NOT NULL,
80
+ topic TEXT NOT NULL,
81
+ state BLOB NOT NULL,
82
+ timestamp TIMESTAMP NOT NULL,
83
+ PRIMARY KEY (originator_id, aggregate_name)
84
+ );
85
+ """,
86
+ f"""
87
+ CREATE TABLE IF NOT EXISTS {complete_event_history} (
88
+ originator_id UUID NOT NULL,
89
+ aggregate_name TEXT NOT NULL,
90
+ originator_version INTEGER NOT NULL,
91
+ topic TEXT NOT NULL,
92
+ state BLOB NOT NULL,
93
+ timestamp TIMESTAMP NOT NULL,
94
+ PRIMARY KEY (originator_id, aggregate_name, originator_version)
95
+ );
96
+ """,
97
+ ]
98
+
99
+ def create_tables(self) -> None:
100
+ with self.connection_manager.datastore.transaction(commit=True) as cursor:
101
+ for statement in self.create_table_statements:
102
+ logger.debug("Table created: %s", self._get_table_name())
103
+ cursor.execute(statement)
104
+
105
+ def _get_table_name(self) -> str:
106
+ return f"aggregates_{self._table_name}"
107
+
108
+ def _get_event_history_table_name(self) -> str:
109
+ return f"aggregates_{self._table_name}_events"
110
+
111
+ def _verify_new_aggregate(
112
+ self, cursor: sqlite3.Cursor, aggregate: TAggregate
113
+ ) -> Tuple[Sequence[CanMutateAggregate[UUID]], bool]:
114
+ actual_agg = self._get_aggregate(cursor, aggregate.value_id)
115
+ eventos = aggregate.collect_events()
116
+ if actual_agg is None:
117
+ return eventos, True
118
+ if actual_agg.originator_version != aggregate.version - len(eventos):
119
+ raise AggregateVersionMismatch(
120
+ f"Aggregate {self.aggregate_name} with id {aggregate.value_id}"
121
+ " has a different version"
122
+ )
123
+ return eventos, False
124
+
125
+ def _insert_snapshot(
126
+ self, cursor: sqlite3.Cursor, snap: AggregateSnapshot[SnapshotState[TIdEntity]]
127
+ ) -> None:
128
+ stored_event = self._mapper.to_stored_event(snap)
129
+ complete_table_name = self._get_table_name()
130
+ cursor.execute(
131
+ f"""
132
+ INSERT INTO {complete_table_name} (
133
+ originator_id,
134
+ aggregate_name,
135
+ originator_version,
136
+ topic,
137
+ state,
138
+ timestamp)
139
+ VALUES (?, ?, ?, ?, ?, ?)
140
+ """,
141
+ (
142
+ str(stored_event.originator_id),
143
+ self.aggregate_name,
144
+ stored_event.originator_version,
145
+ stored_event.topic,
146
+ stored_event.state,
147
+ snap.timestamp.isoformat(" ", "milliseconds"),
148
+ ),
149
+ )
150
+
151
+ def _update_snapshot(
152
+ self, cursor: sqlite3.Cursor, snap: AggregateSnapshot[SnapshotState[TIdEntity]]
153
+ ) -> None:
154
+ stored_event = self._mapper.to_stored_event(snap)
155
+ complete_table_name = self._get_table_name()
156
+ cursor.execute(
157
+ f"""
158
+ UPDATE {complete_table_name}
159
+ SET
160
+ originator_version = ?,
161
+ topic = ?,
162
+ state = ?,
163
+ timestamp = ?
164
+ WHERE originator_id = ?
165
+ AND aggregate_name = ?
166
+ """,
167
+ (
168
+ stored_event.originator_version,
169
+ stored_event.topic,
170
+ stored_event.state,
171
+ snap.timestamp.isoformat(" ", "milliseconds"),
172
+ str(stored_event.originator_id),
173
+ self.aggregate_name,
174
+ ),
175
+ )
176
+
177
+ def _save_event_history(
178
+ self, cursor: sqlite3.Cursor, events: Sequence[CanMutateAggregate[UUID]]
179
+ ) -> None:
180
+ complete_event_history = self._get_event_history_table_name()
181
+ stored_events = list(
182
+ zip(
183
+ map(self._mapper.to_stored_event, events),
184
+ (e.timestamp for e in events),
185
+ strict=True,
186
+ )
187
+ )
188
+ cursor.executemany(
189
+ f"""
190
+ INSERT INTO {complete_event_history} (
191
+ originator_id,
192
+ aggregate_name,
193
+ originator_version,
194
+ topic,
195
+ state,
196
+ timestamp)
197
+ VALUES (?, ?, ?, ?, ?, ?)
198
+ """,
199
+ [
200
+ (
201
+ str(stored_event.originator_id),
202
+ self.aggregate_name,
203
+ stored_event.originator_version,
204
+ stored_event.topic,
205
+ stored_event.state,
206
+ timestamp.isoformat(" ", "milliseconds"),
207
+ )
208
+ for stored_event, timestamp in stored_events
209
+ ], # type: ignore
210
+ )
211
+
212
+ def save(self, aggregate: TAggregate) -> None:
213
+ self.verify()
214
+ with self.connection_manager.cursor() as cursor:
215
+ eventos, new = self._verify_new_aggregate(cursor, aggregate)
216
+ snapshot = aggregate.take_snapshot()
217
+ if new:
218
+ self._insert_snapshot(cursor, snapshot)
219
+ else:
220
+ self._update_snapshot(cursor, snapshot)
221
+ self._save_event_history(cursor, eventos)
222
+ agg = self._get_aggregate(cursor, aggregate.value_id)
223
+ assert agg is not None
224
+
225
+ def _get_aggregate(
226
+ self, cursor: sqlite3.Cursor, id: TIdEntity
227
+ ) -> StoredEvent | None:
228
+ complete_table_name = self._get_table_name()
229
+ cursor.execute(
230
+ f"""
231
+ SELECT
232
+ originator_id,
233
+ originator_version,
234
+ topic,
235
+ state
236
+ FROM {complete_table_name}
237
+ WHERE originator_id = ?
238
+ AND aggregate_name = ?
239
+ LIMIT 1
240
+ """,
241
+ (str(id.value), self.aggregate_name),
242
+ )
243
+ data: tuple[str, int, str, bytes] | None = cursor.fetchone()
244
+ if data is None:
245
+ return None
246
+ stored_event: StoredEvent = StoredEvent(
247
+ originator_id=data[0],
248
+ originator_version=data[1],
249
+ topic=data[2],
250
+ state=data[3],
251
+ )
252
+ return stored_event
253
+
254
+ def get(self, id: TIdEntity) -> TAggregate:
255
+ self.verify()
256
+ with self.connection_manager.cursor() as cursor:
257
+ stored_event = self._get_aggregate(cursor, id)
258
+ if stored_event is None:
259
+ raise AggregateNotFound(
260
+ f"Aggregate {self.aggregate_name} with id {id} not found"
261
+ )
262
+ snap_event = self._mapper.to_domain_event(stored_event)
263
+ assert isinstance(snap_event, self._type_of_aggregate.Snapshot)
264
+ return cast(TAggregate, snap_event.mutate(None))
265
+
266
+ def _delete(self, cursor: sqlite3.Cursor, id: TIdEntity) -> None:
267
+ complete_table_name = self._get_table_name()
268
+ cursor.execute(
269
+ f"""
270
+ DELETE FROM {complete_table_name}
271
+ WHERE originator_id = ?
272
+ AND aggregate_name = ?
273
+ """,
274
+ (str(id.value), self.aggregate_name),
275
+ )
276
+
277
+ def delete(self, id: TIdEntity) -> TAggregate:
278
+ self.verify()
279
+ agg = self.get(id)
280
+
281
+ agg.trigger_event(event_class=agg.Deleted)
282
+ events = agg.collect_events()
283
+ with self.connection_manager.cursor() as cursor:
284
+ self._save_event_history(cursor, events)
285
+ self._delete(cursor, id)
286
+ return agg