@l-etabli/events 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +262 -11
  2. package/dist/adapters/in-memory/InMemoryEventQueries.cjs +9 -1
  3. package/dist/adapters/in-memory/InMemoryEventQueries.cjs.map +1 -1
  4. package/dist/adapters/in-memory/InMemoryEventQueries.mjs +9 -1
  5. package/dist/adapters/in-memory/InMemoryEventQueries.mjs.map +1 -1
  6. package/dist/adapters/in-memory/InMemoryEventRepository.cjs +4 -2
  7. package/dist/adapters/in-memory/InMemoryEventRepository.cjs.map +1 -1
  8. package/dist/adapters/in-memory/InMemoryEventRepository.mjs +4 -2
  9. package/dist/adapters/in-memory/InMemoryEventRepository.mjs.map +1 -1
  10. package/dist/adapters/kysely/KyselyEventQueries.cjs +6 -0
  11. package/dist/adapters/kysely/KyselyEventQueries.cjs.map +1 -1
  12. package/dist/adapters/kysely/KyselyEventQueries.mjs +6 -0
  13. package/dist/adapters/kysely/KyselyEventQueries.mjs.map +1 -1
  14. package/dist/adapters/kysely/KyselyEventRepository.cjs +4 -1
  15. package/dist/adapters/kysely/KyselyEventRepository.cjs.map +1 -1
  16. package/dist/adapters/kysely/KyselyEventRepository.mjs +4 -1
  17. package/dist/adapters/kysely/KyselyEventRepository.mjs.map +1 -1
  18. package/dist/createEventCrawler.cjs +16 -1
  19. package/dist/createEventCrawler.cjs.map +1 -1
  20. package/dist/createEventCrawler.d.cts +13 -2
  21. package/dist/createEventCrawler.d.ts +13 -2
  22. package/dist/createEventCrawler.mjs +16 -1
  23. package/dist/createEventCrawler.mjs.map +1 -1
  24. package/dist/index.d.cts +1 -1
  25. package/dist/index.d.ts +1 -1
  26. package/dist/ports/EventQueries.cjs.map +1 -1
  27. package/dist/ports/EventQueries.d.cts +7 -0
  28. package/dist/ports/EventQueries.d.ts +7 -0
  29. package/dist/ports/EventRepository.cjs.map +1 -1
  30. package/dist/ports/EventRepository.d.cts +32 -4
  31. package/dist/ports/EventRepository.d.ts +32 -4
  32. package/package.json +4 -3
  33. package/src/adapters/in-memory/InMemoryEventQueries.ts +15 -1
  34. package/src/adapters/in-memory/InMemoryEventRepository.ts +5 -2
  35. package/src/adapters/kysely/KyselyEventQueries.ts +9 -1
  36. package/src/adapters/kysely/KyselyEventRepository.ts +15 -1
  37. package/src/createEventCrawler.ts +37 -3
  38. package/src/ports/EventQueries.ts +7 -0
  39. package/src/ports/EventRepository.ts +35 -3
package/README.md CHANGED
@@ -1,16 +1,267 @@
1
- # l-etabli/events
1
+ # @l-etabli/events
2
2
 
3
- ${project_description}
3
+ Event-driven architecture library implementing the **outbox pattern** for TypeScript.
4
4
 
5
+ Events are persisted in the same transaction as your domain changes, then reliably published asynchronously. No lost events, even on failures.
5
6
 
6
- This project is a template for creating typscript libraries.
7
+ ## Installation
7
8
 
8
- It uses :
9
+ ```bash
10
+ pnpm add @l-etabli/events
11
+ ```
9
12
 
10
- - Typescript for type checking
11
- - Bun for running tests
12
- - Bun as a package manager
13
- - Biome as a formatter
14
- - Biome as a linter
15
- - Lefthook for pre-commit hooks
16
- - GitHub Actions for CI, which will run typecheck, format, lint, test and than deploy the package to npm
13
+ For Kysely adapter (PostgreSQL):
14
+
15
+ ```bash
16
+ pnpm add @l-etabli/events kysely pg
17
+ ```
18
+
19
+ ## Quick Start
20
+
21
+ ### 1. Define Your Events
22
+
23
+ ```typescript
24
+ import { GenericEvent } from "@l-etabli/events";
25
+
26
+ type MyEvents =
27
+ | GenericEvent<"UserCreated", { userId: string; email: string }>
28
+ | GenericEvent<"OrderPlaced", { orderId: string; amount: number }>;
29
+ ```
30
+
31
+ ### 2. Setup Event Infrastructure
32
+
33
+ ```typescript
34
+ import {
35
+ createInMemoryEventBus,
36
+ createInMemoryEventRepositoryAndQueries,
37
+ createEventCrawler,
38
+ } from "@l-etabli/events";
39
+
40
+ const { eventQueries, withUow } = createInMemoryEventRepositoryAndQueries<MyEvents>();
41
+ const { eventBus, createNewEvent } = createInMemoryEventBus<MyEvents>(withUow);
42
+
43
+ const crawler = createEventCrawler({
44
+ withUow,
45
+ eventQueries,
46
+ eventBus,
47
+ });
48
+ ```
49
+
50
+ ### 3. Subscribe to Events
51
+
52
+ ```typescript
53
+ eventBus.subscribe({
54
+ topic: "OrderPlaced",
55
+ subscriptionId: "send-confirmation-email",
56
+ callBack: async (event) => {
57
+ await emailService.sendOrderConfirmation(event.payload.orderId);
58
+ },
59
+ });
60
+ ```
61
+
62
+ ### 4. Emit Events (in a use case)
63
+
64
+ ```typescript
65
+ await withUow(async (uow) => {
66
+ // Save your domain entity
67
+ await orderRepository.save(order);
68
+
69
+ // Emit event in the same transaction
70
+ await uow.eventRepository.saveNewEventsBatch([
71
+ createNewEvent({
72
+ topic: "OrderPlaced",
73
+ payload: { orderId: order.id, amount: order.total },
74
+ triggeredByUserId: currentUserId,
75
+ }),
76
+ ]);
77
+ });
78
+ ```
79
+
80
+ ### 5. Process Events
81
+
82
+ **Traditional server** - start background polling:
83
+
84
+ ```typescript
85
+ crawler.start();
86
+ ```
87
+
88
+ **Serverless** - trigger on-demand after commit:
89
+
90
+ ```typescript
91
+ await withUow(
92
+ async (uow) => {
93
+ await uow.eventRepository.saveNewEventsBatch([event]);
94
+ },
95
+ {
96
+ afterCommit: async () => {
97
+ await crawler.triggerProcessing();
98
+ },
99
+ }
100
+ );
101
+
102
+ ### Returning Values from Transactions
103
+
104
+ The `withUow` function supports returning values from your transaction callback:
105
+
106
+ ```typescript
107
+ const result = await withUow(async (uow) => {
108
+ const order = await orderRepository.save(newOrder);
109
+
110
+ await uow.eventRepository.saveNewEventsBatch([
111
+ createNewEvent({
112
+ topic: "OrderPlaced",
113
+ payload: { orderId: order.id, amount: order.total },
114
+ triggeredByUserId: currentUserId,
115
+ }),
116
+ ]);
117
+
118
+ return { orderId: order.id, createdAt: order.createdAt };
119
+ });
120
+
121
+ console.log(result.orderId); // Access the returned value
122
+ ```
123
+
124
+ ## Event Lifecycle
125
+
126
+ ```
127
+ never-published → in-process → published
128
+ ↘ failed-but-will-retry → published
129
+ ↘ quarantined (after maxRetries)
130
+ ```
131
+
132
+ - `never-published` - New event, not yet processed
133
+ - `in-process` - Currently being published
134
+ - `published` - Successfully delivered to all subscribers
135
+ - `failed-but-will-retry` - Some subscribers failed, will retry
136
+ - `quarantined` - Exceeded max retries, requires manual intervention
137
+ - `to-republish` - Force republish to all subscribers
138
+
139
+ ## API Reference
140
+
141
+ ### Types
142
+
143
+ ```typescript
144
+ // Define events with topic, payload, and optional context
145
+ type GenericEvent<Topic, Payload, Context?> = {
146
+ id: EventId;
147
+ topic: Topic;
148
+ payload: Payload;
149
+ status: EventStatus;
150
+ occurredAt: Date;
151
+ triggeredByUserId: UserId;
152
+ publications: EventPublication[];
153
+ context?: Context;
154
+ priority?: number;
155
+ };
156
+ ```
157
+
158
+ ### `makeCreateNewEvent<Events>(options?)`
159
+
160
+ Creates a type-safe event factory. Payload is validated against topic at compile time.
161
+
162
+ ```typescript
163
+ const createEvent = makeCreateNewEvent<MyEvents>({
164
+ getNow: () => new Date(), // optional, for testing
165
+ generateId: () => crypto.randomUUID(), // optional, for testing
166
+ });
167
+
168
+ // Type-safe: payload must match topic
169
+ createEvent({ topic: "UserCreated", payload: { userId: "1", email: "a@b.com" }, triggeredByUserId: "u1" });
170
+ ```
171
+
172
+ ### `createInMemoryEventBus<Events>(withUow, options?)`
173
+
174
+ Creates an in-memory event bus with a typed `createNewEvent` function.
175
+
176
+ ```typescript
177
+ const { eventBus, createNewEvent } = createInMemoryEventBus<MyEvents>(withUow, {
178
+ maxRetries: 3, // default
179
+ });
180
+ ```
181
+
182
+ ### `createEventCrawler(config)`
183
+
184
+ Creates a background processor for publishing events.
185
+
186
+ ```typescript
187
+ const crawler = createEventCrawler({
188
+ withUow,
189
+ eventQueries,
190
+ eventBus,
191
+ options: {
192
+ batchSize: 100, // events per batch (default: 100)
193
+ maxParallelProcessing: 1, // parallel publishes (default: 1)
194
+ newEventsIntervalMs: 10000, // polling interval (default: 10s)
195
+ failedEventsIntervalMs: 60000, // retry interval (default: 60s)
196
+ },
197
+ });
198
+
199
+ crawler.start(); // Start background polling
200
+ crawler.processNewEvents(); // Manual: process new events
201
+ crawler.retryFailedEvents(); // Manual: retry failed events
202
+ crawler.triggerProcessing(); // Manual: process new + retry failed
203
+ ```
204
+
205
+ ## Database Setup (Kysely/PostgreSQL)
206
+
207
+ ### Migration
208
+
209
+ ```typescript
210
+ import type { Kysely } from "kysely";
211
+
212
+ export async function up(db: Kysely<unknown>): Promise<void> {
213
+ await db.schema
214
+ .createTable("events")
215
+ .addColumn("id", "text", (col) => col.primaryKey())
216
+ .addColumn("topic", "text", (col) => col.notNull())
217
+ .addColumn("payload", "jsonb", (col) => col.notNull())
218
+ .addColumn("context", "jsonb")
219
+ .addColumn("status", "text", (col) => col.notNull())
220
+ .addColumn("triggeredByUserId", "text", (col) => col.notNull())
221
+ .addColumn("occurredAt", "timestamptz", (col) => col.notNull())
222
+ .addColumn("publications", "jsonb", (col) => col.notNull().defaultTo("[]"))
223
+ .addColumn("priority", "integer")
224
+ .execute();
225
+
226
+ await db.schema
227
+ .createIndex("events_status_idx")
228
+ .on("events")
229
+ .column("status")
230
+ .execute();
231
+
232
+ await db.schema
233
+ .createIndex("events_topic_idx")
234
+ .on("events")
235
+ .column("topic")
236
+ .execute();
237
+ }
238
+
239
+ export async function down(db: Kysely<unknown>): Promise<void> {
240
+ await db.schema.dropTable("events").execute();
241
+ }
242
+ ```
243
+
244
+ ### Usage with Kysely
245
+
246
+ ```typescript
247
+ import { createInMemoryEventBus, createEventCrawler } from "@l-etabli/events";
248
+ import {
249
+ KyselyEventRepository,
250
+ KyselyEventQueries,
251
+ createKyselyMigration,
252
+ } from "@l-etabli/events/kysely";
253
+
254
+ // See examples/kysely/ for complete implementation
255
+ ```
256
+
257
+ ## Examples
258
+
259
+ See the [`examples/`](./examples/) directory for complete implementations:
260
+
261
+ - **[kysely-adapter.ts](./examples/kysely/kysely-adapter.ts)** - Kysely adapter with transaction support
262
+ - **[serverless-usage.ts](./examples/kysely/serverless-usage.ts)** - AWS Lambda / serverless deployment
263
+ - **[cascading-events.ts](./examples/kysely/cascading-events.ts)** - Transactional event chains
264
+
265
+ ## License
266
+
267
+ MIT
@@ -31,8 +31,16 @@ const createInMemoryEventQueries = (helpers) => ({
31
31
  ([key, value]) => event.context?.[key] === value
32
32
  );
33
33
  };
34
+ const matchesOccurredAt = (event) => {
35
+ if (!filters.occurredAt) return true;
36
+ const { from, to } = filters.occurredAt;
37
+ const eventTime = event.occurredAt.getTime();
38
+ if (from && eventTime < from.getTime()) return false;
39
+ if (to && eventTime > to.getTime()) return false;
40
+ return true;
41
+ };
34
42
  return helpers.getAllEvents().filter(
35
- (event) => filters.statuses.includes(event.status) && matchesContext(event)
43
+ (event) => filters.statuses.includes(event.status) && matchesContext(event) && matchesOccurredAt(event)
36
44
  ).slice(0, limit);
37
45
  }
38
46
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventQueries.ts"],"sourcesContent":["import type { EventQueries } from \"../../ports/EventQueries.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\nimport type { InMemoryEventRepositoryHelpers } from \"./InMemoryEventRepository.ts\";\n\nexport const createInMemoryEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n helpers: InMemoryEventRepositoryHelpers<Event>,\n): { eventQueries: EventQueries<Event> } => ({\n eventQueries: {\n getEvents: async ({ filters, limit }) => {\n const matchesContext = (event: Event): boolean => {\n if (!filters.context) return true;\n if (!event.context) return false;\n\n return Object.entries(filters.context).every(\n ([key, value]) => event.context?.[key] === value,\n );\n };\n\n return helpers\n .getAllEvents()\n .filter(\n (event) =>\n filters.statuses.includes(event.status) && matchesContext(event),\n )\n .slice(0, limit);\n },\n },\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIO,MAAM,6BAA6B,CAGxC,aAC2C;AAAA,EAC3C,cAAc;AAAA,IACZ,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,YAAM,iBAAiB,CAAC,UAA0B;AAChD,YAAI,CAAC,QAAQ,QAAS,QAAO;AAC7B,YAAI,CAAC,MAAM,QAAS,QAAO;AAE3B,eAAO,OAAO,QAAQ,QAAQ,OAAO,EAAE;AAAA,UACrC,CAAC,CAAC,KAAK,KAAK,MAAM,MAAM,UAAU,GAAG,MAAM;AAAA,QAC7C;AAAA,MACF;AAEA,aAAO,QACJ,aAAa,EACb;AAAA,QACC,CAAC,UACC,QAAQ,SAAS,SAAS,MAAM,MAAM,KAAK,eAAe,KAAK;AAAA,MACnE,EACC,MAAM,GAAG,KAAK;AAAA,IACnB;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventQueries.ts"],"sourcesContent":["import type { EventQueries } from \"../../ports/EventQueries.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\nimport type { InMemoryEventRepositoryHelpers } from \"./InMemoryEventRepository.ts\";\n\nexport const createInMemoryEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n helpers: InMemoryEventRepositoryHelpers<Event>,\n): { eventQueries: EventQueries<Event> } => ({\n eventQueries: {\n getEvents: async ({ filters, limit }) => {\n const matchesContext = (event: Event): boolean => {\n if (!filters.context) return true;\n if (!event.context) return false;\n\n return Object.entries(filters.context).every(\n ([key, value]) => event.context?.[key] === value,\n );\n };\n\n const matchesOccurredAt = (event: Event): boolean => {\n if (!filters.occurredAt) return true;\n\n const { from, to } = filters.occurredAt;\n const eventTime = event.occurredAt.getTime();\n\n if (from && eventTime < from.getTime()) return false;\n if (to && eventTime > to.getTime()) return false;\n\n return true;\n };\n\n return helpers\n .getAllEvents()\n .filter(\n (event) =>\n filters.statuses.includes(event.status) &&\n matchesContext(event) &&\n matchesOccurredAt(event),\n )\n .slice(0, limit);\n },\n },\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIO,MAAM,6BAA6B,CAGxC,aAC2C;AAAA,EAC3C,cAAc;AAAA,IACZ,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,YAAM,iBAAiB,CAAC,UAA0B;AAChD,YAAI,CAAC,QAAQ,QAAS,QAAO;AAC7B,YAAI,CAAC,MAAM,QAAS,QAAO;AAE3B,eAAO,OAAO,QAAQ,QAAQ,OAAO,EAAE;AAAA,UACrC,CAAC,CAAC,KAAK,KAAK,MAAM,MAAM,UAAU,GAAG,MAAM;AAAA,QAC7C;AAAA,MACF;AAEA,YAAM,oBAAoB,CAAC,UAA0B;AACnD,YAAI,CAAC,QAAQ,WAAY,QAAO;AAEhC,cAAM,EAAE,MAAM,GAAG,IAAI,QAAQ;AAC7B,cAAM,YAAY,MAAM,WAAW,QAAQ;AAE3C,YAAI,QAAQ,YAAY,KAAK,QAAQ,EAAG,QAAO;AAC/C,YAAI,MAAM,YAAY,GAAG,QAAQ,EAAG,QAAO;AAE3C,eAAO;AAAA,MACT;AAEA,aAAO,QACJ,aAAa,EACb;AAAA,QACC,CAAC,UACC,QAAQ,SAAS,SAAS,MAAM,MAAM,KACtC,eAAe,KAAK,KACpB,kBAAkB,KAAK;AAAA,MAC3B,EACC,MAAM,GAAG,KAAK;AAAA,IACnB;AAAA,EACF;AACF;","names":[]}
@@ -8,8 +8,16 @@ const createInMemoryEventQueries = (helpers) => ({
8
8
  ([key, value]) => event.context?.[key] === value
9
9
  );
10
10
  };
11
+ const matchesOccurredAt = (event) => {
12
+ if (!filters.occurredAt) return true;
13
+ const { from, to } = filters.occurredAt;
14
+ const eventTime = event.occurredAt.getTime();
15
+ if (from && eventTime < from.getTime()) return false;
16
+ if (to && eventTime > to.getTime()) return false;
17
+ return true;
18
+ };
11
19
  return helpers.getAllEvents().filter(
12
- (event) => filters.statuses.includes(event.status) && matchesContext(event)
20
+ (event) => filters.statuses.includes(event.status) && matchesContext(event) && matchesOccurredAt(event)
13
21
  ).slice(0, limit);
14
22
  }
15
23
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventQueries.ts"],"sourcesContent":["import type { EventQueries } from '../../ports/EventQueries.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\nimport type { InMemoryEventRepositoryHelpers } from './InMemoryEventRepository.ts.mjs';\n\nexport const createInMemoryEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n helpers: InMemoryEventRepositoryHelpers<Event>,\n): { eventQueries: EventQueries<Event> } => ({\n eventQueries: {\n getEvents: async ({ filters, limit }) => {\n const matchesContext = (event: Event): boolean => {\n if (!filters.context) return true;\n if (!event.context) return false;\n\n return Object.entries(filters.context).every(\n ([key, value]) => event.context?.[key] === value,\n );\n };\n\n return helpers\n .getAllEvents()\n .filter(\n (event) =>\n filters.statuses.includes(event.status) && matchesContext(event),\n )\n .slice(0, limit);\n },\n },\n});\n"],"mappings":"AAIO,MAAM,6BAA6B,CAGxC,aAC2C;AAAA,EAC3C,cAAc;AAAA,IACZ,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,YAAM,iBAAiB,CAAC,UAA0B;AAChD,YAAI,CAAC,QAAQ,QAAS,QAAO;AAC7B,YAAI,CAAC,MAAM,QAAS,QAAO;AAE3B,eAAO,OAAO,QAAQ,QAAQ,OAAO,EAAE;AAAA,UACrC,CAAC,CAAC,KAAK,KAAK,MAAM,MAAM,UAAU,GAAG,MAAM;AAAA,QAC7C;AAAA,MACF;AAEA,aAAO,QACJ,aAAa,EACb;AAAA,QACC,CAAC,UACC,QAAQ,SAAS,SAAS,MAAM,MAAM,KAAK,eAAe,KAAK;AAAA,MACnE,EACC,MAAM,GAAG,KAAK;AAAA,IACnB;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventQueries.ts"],"sourcesContent":["import type { EventQueries } from '../../ports/EventQueries.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\nimport type { InMemoryEventRepositoryHelpers } from './InMemoryEventRepository.ts.mjs';\n\nexport const createInMemoryEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n helpers: InMemoryEventRepositoryHelpers<Event>,\n): { eventQueries: EventQueries<Event> } => ({\n eventQueries: {\n getEvents: async ({ filters, limit }) => {\n const matchesContext = (event: Event): boolean => {\n if (!filters.context) return true;\n if (!event.context) return false;\n\n return Object.entries(filters.context).every(\n ([key, value]) => event.context?.[key] === value,\n );\n };\n\n const matchesOccurredAt = (event: Event): boolean => {\n if (!filters.occurredAt) return true;\n\n const { from, to } = filters.occurredAt;\n const eventTime = event.occurredAt.getTime();\n\n if (from && eventTime < from.getTime()) return false;\n if (to && eventTime > to.getTime()) return false;\n\n return true;\n };\n\n return helpers\n .getAllEvents()\n .filter(\n (event) =>\n filters.statuses.includes(event.status) &&\n matchesContext(event) &&\n matchesOccurredAt(event),\n )\n .slice(0, limit);\n },\n },\n});\n"],"mappings":"AAIO,MAAM,6BAA6B,CAGxC,aAC2C;AAAA,EAC3C,cAAc;AAAA,IACZ,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,YAAM,iBAAiB,CAAC,UAA0B;AAChD,YAAI,CAAC,QAAQ,QAAS,QAAO;AAC7B,YAAI,CAAC,MAAM,QAAS,QAAO;AAE3B,eAAO,OAAO,QAAQ,QAAQ,OAAO,EAAE;AAAA,UACrC,CAAC,CAAC,KAAK,KAAK,MAAM,MAAM,UAAU,GAAG,MAAM;AAAA,QAC7C;AAAA,MACF;AAEA,YAAM,oBAAoB,CAAC,UAA0B;AACnD,YAAI,CAAC,QAAQ,WAAY,QAAO;AAEhC,cAAM,EAAE,MAAM,GAAG,IAAI,QAAQ;AAC7B,cAAM,YAAY,MAAM,WAAW,QAAQ;AAE3C,YAAI,QAAQ,YAAY,KAAK,QAAQ,EAAG,QAAO;AAC/C,YAAI,MAAM,YAAY,GAAG,QAAQ,EAAG,QAAO;AAE3C,eAAO;AAAA,MACT;AAEA,aAAO,QACJ,aAAa,EACb;AAAA,QACC,CAAC,UACC,QAAQ,SAAS,SAAS,MAAM,MAAM,KACtC,eAAe,KAAK,KACpB,kBAAkB,KAAK;AAAA,MAC3B,EACC,MAAM,GAAG,KAAK;AAAA,IACnB;AAAA,EACF;AACF;","names":[]}
@@ -55,8 +55,10 @@ const createInMemoryEventRepository = () => {
55
55
  };
56
56
  };
57
57
  const createInMemoryWithUow = (eventRepository) => {
58
- const withUow = async (fn) => {
59
- await fn({ eventRepository });
58
+ const withUow = async (fn, options) => {
59
+ const result = await fn({ eventRepository });
60
+ await options?.afterCommit?.();
61
+ return result;
60
62
  };
61
63
  return { withUow };
62
64
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventRepository.ts"],"sourcesContent":["import type {\n EventRepository,\n WithEventsUow,\n} from \"../../ports/EventRepository.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\n\nexport type InMemoryEventRepositoryHelpers<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = { getAllEvents: () => Event[]; setEvents: (events: Event[]) => void };\n\nexport const createInMemoryEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(): {\n eventRepository: EventRepository<Event>;\n helpers: InMemoryEventRepositoryHelpers<Event>;\n} => {\n const eventById: Record<string, Event> = {};\n\n const eventRepository: EventRepository<Event> = {\n save: async (event) => {\n eventById[event.id] = event;\n },\n saveNewEventsBatch: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n markEventsAsInProcess: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = { ...event, status: \"in-process\" };\n });\n },\n };\n\n return {\n eventRepository,\n helpers: {\n getAllEvents: () => Object.values(eventById),\n setEvents: (events) => {\n Object.keys(eventById).forEach((key) => {\n delete eventById[key];\n });\n\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n },\n };\n};\n\nexport const createInMemoryWithUow = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n eventRepository: EventRepository<Event>,\n): { withUow: WithEventsUow<Event> } => {\n const withUow: WithEventsUow<Event> = async (fn) => {\n await fn({ eventRepository });\n };\n return { withUow };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUO,MAAM,gCAAgC,MAKxC;AACH,QAAM,YAAmC,CAAC;AAE1C,QAAM,kBAA0C;AAAA,IAC9C,MAAM,OAAO,UAAU;AACrB,gBAAU,MAAM,EAAE,IAAI;AAAA,IACxB;AAAA,IACA,oBAAoB,OAAO,WAAW;AACpC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,IACA,uBAAuB,OAAO,WAAW;AACvC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,QAAQ,aAAa;AAAA,MACzD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,SAAS;AAAA,MACP,cAAc,MAAM,OAAO,OAAO,SAAS;AAAA,MAC3C,WAAW,CAAC,WAAW;AACrB,eAAO,KAAK,SAAS,EAAE,QAAQ,CAAC,QAAQ;AACtC,iBAAO,UAAU,GAAG;AAAA,QACtB,CAAC;AAED,eAAO,QAAQ,CAAC,UAAU;AACxB,oBAAU,MAAM,EAAE,IAAI;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,CAGnC,oBACsC;AACtC,QAAM,UAAgC,OAAO,OAAO;AAClD,UAAM,GAAG,EAAE,gBAAgB,CAAC;AAAA,EAC9B;AACA,SAAO,EAAE,QAAQ;AACnB;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventRepository.ts"],"sourcesContent":["import type {\n EventRepository,\n WithEventsUow,\n} from \"../../ports/EventRepository.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\n\nexport type InMemoryEventRepositoryHelpers<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = { getAllEvents: () => Event[]; setEvents: (events: Event[]) => void };\n\nexport const createInMemoryEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(): {\n eventRepository: EventRepository<Event>;\n helpers: InMemoryEventRepositoryHelpers<Event>;\n} => {\n const eventById: Record<string, Event> = {};\n\n const eventRepository: EventRepository<Event> = {\n save: async (event) => {\n eventById[event.id] = event;\n },\n saveNewEventsBatch: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n markEventsAsInProcess: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = { ...event, status: \"in-process\" };\n });\n },\n };\n\n return {\n eventRepository,\n helpers: {\n getAllEvents: () => Object.values(eventById),\n setEvents: (events) => {\n Object.keys(eventById).forEach((key) => {\n delete eventById[key];\n });\n\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n },\n };\n};\n\nexport const createInMemoryWithUow = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n eventRepository: EventRepository<Event>,\n): { withUow: WithEventsUow<Event> } => {\n // In-memory adapter awaits afterCommit for predictable test behavior\n const withUow: WithEventsUow<Event> = async (fn, options) => {\n const result = await fn({ eventRepository });\n await options?.afterCommit?.();\n return result;\n };\n return { withUow };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUO,MAAM,gCAAgC,MAKxC;AACH,QAAM,YAAmC,CAAC;AAE1C,QAAM,kBAA0C;AAAA,IAC9C,MAAM,OAAO,UAAU;AACrB,gBAAU,MAAM,EAAE,IAAI;AAAA,IACxB;AAAA,IACA,oBAAoB,OAAO,WAAW;AACpC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,IACA,uBAAuB,OAAO,WAAW;AACvC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,QAAQ,aAAa;AAAA,MACzD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,SAAS;AAAA,MACP,cAAc,MAAM,OAAO,OAAO,SAAS;AAAA,MAC3C,WAAW,CAAC,WAAW;AACrB,eAAO,KAAK,SAAS,EAAE,QAAQ,CAAC,QAAQ;AACtC,iBAAO,UAAU,GAAG;AAAA,QACtB,CAAC;AAED,eAAO,QAAQ,CAAC,UAAU;AACxB,oBAAU,MAAM,EAAE,IAAI;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,CAGnC,oBACsC;AAEtC,QAAM,UAAgC,OAAO,IAAI,YAAY;AAC3D,UAAM,SAAS,MAAM,GAAG,EAAE,gBAAgB,CAAC;AAC3C,UAAM,SAAS,cAAc;AAC7B,WAAO;AAAA,EACT;AACA,SAAO,EAAE,QAAQ;AACnB;","names":[]}
@@ -31,8 +31,10 @@ const createInMemoryEventRepository = () => {
31
31
  };
32
32
  };
33
33
  const createInMemoryWithUow = (eventRepository) => {
34
- const withUow = async (fn) => {
35
- await fn({ eventRepository });
34
+ const withUow = async (fn, options) => {
35
+ const result = await fn({ eventRepository });
36
+ await options?.afterCommit?.();
37
+ return result;
36
38
  };
37
39
  return { withUow };
38
40
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventRepository.ts"],"sourcesContent":["import type {\n EventRepository,\n WithEventsUow,\n} from '../../ports/EventRepository.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\n\nexport type InMemoryEventRepositoryHelpers<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = { getAllEvents: () => Event[]; setEvents: (events: Event[]) => void };\n\nexport const createInMemoryEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(): {\n eventRepository: EventRepository<Event>;\n helpers: InMemoryEventRepositoryHelpers<Event>;\n} => {\n const eventById: Record<string, Event> = {};\n\n const eventRepository: EventRepository<Event> = {\n save: async (event) => {\n eventById[event.id] = event;\n },\n saveNewEventsBatch: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n markEventsAsInProcess: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = { ...event, status: \"in-process\" };\n });\n },\n };\n\n return {\n eventRepository,\n helpers: {\n getAllEvents: () => Object.values(eventById),\n setEvents: (events) => {\n Object.keys(eventById).forEach((key) => {\n delete eventById[key];\n });\n\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n },\n };\n};\n\nexport const createInMemoryWithUow = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n eventRepository: EventRepository<Event>,\n): { withUow: WithEventsUow<Event> } => {\n const withUow: WithEventsUow<Event> = async (fn) => {\n await fn({ eventRepository });\n };\n return { withUow };\n};\n"],"mappings":"AAUO,MAAM,gCAAgC,MAKxC;AACH,QAAM,YAAmC,CAAC;AAE1C,QAAM,kBAA0C;AAAA,IAC9C,MAAM,OAAO,UAAU;AACrB,gBAAU,MAAM,EAAE,IAAI;AAAA,IACxB;AAAA,IACA,oBAAoB,OAAO,WAAW;AACpC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,IACA,uBAAuB,OAAO,WAAW;AACvC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,QAAQ,aAAa;AAAA,MACzD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,SAAS;AAAA,MACP,cAAc,MAAM,OAAO,OAAO,SAAS;AAAA,MAC3C,WAAW,CAAC,WAAW;AACrB,eAAO,KAAK,SAAS,EAAE,QAAQ,CAAC,QAAQ;AACtC,iBAAO,UAAU,GAAG;AAAA,QACtB,CAAC;AAED,eAAO,QAAQ,CAAC,UAAU;AACxB,oBAAU,MAAM,EAAE,IAAI;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,CAGnC,oBACsC;AACtC,QAAM,UAAgC,OAAO,OAAO;AAClD,UAAM,GAAG,EAAE,gBAAgB,CAAC;AAAA,EAC9B;AACA,SAAO,EAAE,QAAQ;AACnB;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/in-memory/InMemoryEventRepository.ts"],"sourcesContent":["import type {\n EventRepository,\n WithEventsUow,\n} from '../../ports/EventRepository.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\n\nexport type InMemoryEventRepositoryHelpers<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = { getAllEvents: () => Event[]; setEvents: (events: Event[]) => void };\n\nexport const createInMemoryEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(): {\n eventRepository: EventRepository<Event>;\n helpers: InMemoryEventRepositoryHelpers<Event>;\n} => {\n const eventById: Record<string, Event> = {};\n\n const eventRepository: EventRepository<Event> = {\n save: async (event) => {\n eventById[event.id] = event;\n },\n saveNewEventsBatch: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n markEventsAsInProcess: async (events) => {\n events.forEach((event) => {\n eventById[event.id] = { ...event, status: \"in-process\" };\n });\n },\n };\n\n return {\n eventRepository,\n helpers: {\n getAllEvents: () => Object.values(eventById),\n setEvents: (events) => {\n Object.keys(eventById).forEach((key) => {\n delete eventById[key];\n });\n\n events.forEach((event) => {\n eventById[event.id] = event;\n });\n },\n },\n };\n};\n\nexport const createInMemoryWithUow = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n eventRepository: EventRepository<Event>,\n): { withUow: WithEventsUow<Event> } => {\n // In-memory adapter awaits afterCommit for predictable test behavior\n const withUow: WithEventsUow<Event> = async (fn, options) => {\n const result = await fn({ eventRepository });\n await options?.afterCommit?.();\n return result;\n };\n return { withUow };\n};\n"],"mappings":"AAUO,MAAM,gCAAgC,MAKxC;AACH,QAAM,YAAmC,CAAC;AAE1C,QAAM,kBAA0C;AAAA,IAC9C,MAAM,OAAO,UAAU;AACrB,gBAAU,MAAM,EAAE,IAAI;AAAA,IACxB;AAAA,IACA,oBAAoB,OAAO,WAAW;AACpC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,IACA,uBAAuB,OAAO,WAAW;AACvC,aAAO,QAAQ,CAAC,UAAU;AACxB,kBAAU,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,QAAQ,aAAa;AAAA,MACzD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,SAAS;AAAA,MACP,cAAc,MAAM,OAAO,OAAO,SAAS;AAAA,MAC3C,WAAW,CAAC,WAAW;AACrB,eAAO,KAAK,SAAS,EAAE,QAAQ,CAAC,QAAQ;AACtC,iBAAO,UAAU,GAAG;AAAA,QACtB,CAAC;AAED,eAAO,QAAQ,CAAC,UAAU;AACxB,oBAAU,MAAM,EAAE,IAAI;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,CAGnC,oBACsC;AAEtC,QAAM,UAAgC,OAAO,IAAI,YAAY;AAC3D,UAAM,SAAS,MAAM,GAAG,EAAE,gBAAgB,CAAC;AAC3C,UAAM,SAAS,cAAc;AAC7B,WAAO;AAAA,EACT;AACA,SAAO,EAAE,QAAQ;AACnB;","names":[]}
@@ -30,6 +30,12 @@ const createKyselyEventQueries = (db) => ({
30
30
  query = query.where(import_kysely.sql`context->>${key} = ${value}`);
31
31
  }
32
32
  }
33
+ if (filters.occurredAt?.from) {
34
+ query = query.where("occurredAt", ">=", filters.occurredAt.from);
35
+ }
36
+ if (filters.occurredAt?.to) {
37
+ query = query.where("occurredAt", "<=", filters.occurredAt.to);
38
+ }
33
39
  const rows = await query.execute();
34
40
  return rows.map(
35
41
  (row) => ({
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventQueries.ts"],"sourcesContent":["import type { Kysely, SqlBool } from \"kysely\";\nimport { sql } from \"kysely\";\nimport type { EventQueries } from \"../../ports/EventQueries.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\nimport type { EventsTable } from \"./types.ts\";\n\nexport const createKyselyEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventQueries<Event> => ({\n getEvents: async ({ filters, limit }) => {\n let query = db\n .selectFrom(\"events\")\n .selectAll()\n .where(\"status\", \"in\", filters.statuses)\n .limit(limit);\n\n if (filters.context) {\n for (const [key, value] of Object.entries(filters.context)) {\n query = query.where(sql<SqlBool>`context->>${key} = ${value}`);\n }\n }\n\n const rows = await query.execute();\n return rows.map(\n (row) =>\n ({\n ...row,\n context: row.context ?? undefined,\n priority: row.priority ?? undefined,\n }) as Event,\n );\n },\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAoB;AAKb,MAAM,2BAA2B,CAGtC,QACyB;AAAA,EACzB,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,QAAI,QAAQ,GACT,WAAW,QAAQ,EACnB,UAAU,EACV,MAAM,UAAU,MAAM,QAAQ,QAAQ,EACtC,MAAM,KAAK;AAEd,QAAI,QAAQ,SAAS;AACnB,iBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AAC1D,gBAAQ,MAAM,MAAM,8BAAyB,GAAG,MAAM,KAAK,EAAE;AAAA,MAC/D;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,WAAO,KAAK;AAAA,MACV,CAAC,SACE;AAAA,QACC,GAAG;AAAA,QACH,SAAS,IAAI,WAAW;AAAA,QACxB,UAAU,IAAI,YAAY;AAAA,MAC5B;AAAA,IACJ;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventQueries.ts"],"sourcesContent":["import type { Kysely, SqlBool } from \"kysely\";\nimport { sql } from \"kysely\";\nimport type { EventQueries } from \"../../ports/EventQueries.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\nimport type { EventsTable } from \"./types.ts\";\n\nexport const createKyselyEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventQueries<Event> => ({\n getEvents: async ({ filters, limit }) => {\n let query = db\n .selectFrom(\"events\")\n .selectAll()\n .where(\"status\", \"in\", filters.statuses)\n .limit(limit);\n\n if (filters.context) {\n for (const [key, value] of Object.entries(filters.context)) {\n query = query.where(sql<SqlBool>`context->>${key} = ${value}`);\n }\n }\n\n if (filters.occurredAt?.from) {\n query = query.where(\"occurredAt\", \">=\", filters.occurredAt.from);\n }\n\n if (filters.occurredAt?.to) {\n query = query.where(\"occurredAt\", \"<=\", filters.occurredAt.to);\n }\n\n const rows = await query.execute();\n return rows.map(\n (row: EventsTable[\"events\"]) =>\n ({\n ...row,\n context: row.context ?? undefined,\n priority: row.priority ?? undefined,\n }) as Event,\n );\n },\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAoB;AAKb,MAAM,2BAA2B,CAGtC,QACyB;AAAA,EACzB,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,QAAI,QAAQ,GACT,WAAW,QAAQ,EACnB,UAAU,EACV,MAAM,UAAU,MAAM,QAAQ,QAAQ,EACtC,MAAM,KAAK;AAEd,QAAI,QAAQ,SAAS;AACnB,iBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AAC1D,gBAAQ,MAAM,MAAM,8BAAyB,GAAG,MAAM,KAAK,EAAE;AAAA,MAC/D;AAAA,IACF;AAEA,QAAI,QAAQ,YAAY,MAAM;AAC5B,cAAQ,MAAM,MAAM,cAAc,MAAM,QAAQ,WAAW,IAAI;AAAA,IACjE;AAEA,QAAI,QAAQ,YAAY,IAAI;AAC1B,cAAQ,MAAM,MAAM,cAAc,MAAM,QAAQ,WAAW,EAAE;AAAA,IAC/D;AAEA,UAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,WAAO,KAAK;AAAA,MACV,CAAC,SACE;AAAA,QACC,GAAG;AAAA,QACH,SAAS,IAAI,WAAW;AAAA,QACxB,UAAU,IAAI,YAAY;AAAA,MAC5B;AAAA,IACJ;AAAA,EACF;AACF;","names":[]}
@@ -7,6 +7,12 @@ const createKyselyEventQueries = (db) => ({
7
7
  query = query.where(sql`context->>${key} = ${value}`);
8
8
  }
9
9
  }
10
+ if (filters.occurredAt?.from) {
11
+ query = query.where("occurredAt", ">=", filters.occurredAt.from);
12
+ }
13
+ if (filters.occurredAt?.to) {
14
+ query = query.where("occurredAt", "<=", filters.occurredAt.to);
15
+ }
10
16
  const rows = await query.execute();
11
17
  return rows.map(
12
18
  (row) => ({
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventQueries.ts"],"sourcesContent":["import type { Kysely, SqlBool } from \"kysely\";\nimport { sql } from \"kysely\";\nimport type { EventQueries } from '../../ports/EventQueries.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\nimport type { EventsTable } from './types.ts.mjs';\n\nexport const createKyselyEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventQueries<Event> => ({\n getEvents: async ({ filters, limit }) => {\n let query = db\n .selectFrom(\"events\")\n .selectAll()\n .where(\"status\", \"in\", filters.statuses)\n .limit(limit);\n\n if (filters.context) {\n for (const [key, value] of Object.entries(filters.context)) {\n query = query.where(sql<SqlBool>`context->>${key} = ${value}`);\n }\n }\n\n const rows = await query.execute();\n return rows.map(\n (row) =>\n ({\n ...row,\n context: row.context ?? undefined,\n priority: row.priority ?? undefined,\n }) as Event,\n );\n },\n});\n"],"mappings":"AACA,SAAS,WAAW;AAKb,MAAM,2BAA2B,CAGtC,QACyB;AAAA,EACzB,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,QAAI,QAAQ,GACT,WAAW,QAAQ,EACnB,UAAU,EACV,MAAM,UAAU,MAAM,QAAQ,QAAQ,EACtC,MAAM,KAAK;AAEd,QAAI,QAAQ,SAAS;AACnB,iBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AAC1D,gBAAQ,MAAM,MAAM,gBAAyB,GAAG,MAAM,KAAK,EAAE;AAAA,MAC/D;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,WAAO,KAAK;AAAA,MACV,CAAC,SACE;AAAA,QACC,GAAG;AAAA,QACH,SAAS,IAAI,WAAW;AAAA,QACxB,UAAU,IAAI,YAAY;AAAA,MAC5B;AAAA,IACJ;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventQueries.ts"],"sourcesContent":["import type { Kysely, SqlBool } from \"kysely\";\nimport { sql } from \"kysely\";\nimport type { EventQueries } from '../../ports/EventQueries.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\nimport type { EventsTable } from './types.ts.mjs';\n\nexport const createKyselyEventQueries = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventQueries<Event> => ({\n getEvents: async ({ filters, limit }) => {\n let query = db\n .selectFrom(\"events\")\n .selectAll()\n .where(\"status\", \"in\", filters.statuses)\n .limit(limit);\n\n if (filters.context) {\n for (const [key, value] of Object.entries(filters.context)) {\n query = query.where(sql<SqlBool>`context->>${key} = ${value}`);\n }\n }\n\n if (filters.occurredAt?.from) {\n query = query.where(\"occurredAt\", \">=\", filters.occurredAt.from);\n }\n\n if (filters.occurredAt?.to) {\n query = query.where(\"occurredAt\", \"<=\", filters.occurredAt.to);\n }\n\n const rows = await query.execute();\n return rows.map(\n (row: EventsTable[\"events\"]) =>\n ({\n ...row,\n context: row.context ?? undefined,\n priority: row.priority ?? undefined,\n }) as Event,\n );\n },\n});\n"],"mappings":"AACA,SAAS,WAAW;AAKb,MAAM,2BAA2B,CAGtC,QACyB;AAAA,EACzB,WAAW,OAAO,EAAE,SAAS,MAAM,MAAM;AACvC,QAAI,QAAQ,GACT,WAAW,QAAQ,EACnB,UAAU,EACV,MAAM,UAAU,MAAM,QAAQ,QAAQ,EACtC,MAAM,KAAK;AAEd,QAAI,QAAQ,SAAS;AACnB,iBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AAC1D,gBAAQ,MAAM,MAAM,gBAAyB,GAAG,MAAM,KAAK,EAAE;AAAA,MAC/D;AAAA,IACF;AAEA,QAAI,QAAQ,YAAY,MAAM;AAC5B,cAAQ,MAAM,MAAM,cAAc,MAAM,QAAQ,WAAW,IAAI;AAAA,IACjE;AAEA,QAAI,QAAQ,YAAY,IAAI;AAC1B,cAAQ,MAAM,MAAM,cAAc,MAAM,QAAQ,WAAW,EAAE;AAAA,IAC/D;AAEA,UAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,WAAO,KAAK;AAAA,MACV,CAAC,SACE;AAAA,QACC,GAAG;AAAA,QACH,SAAS,IAAI,WAAW;AAAA,QACxB,UAAU,IAAI,YAAY;AAAA,MAC5B;AAAA,IACJ;AAAA,EACF;AACF;","names":[]}
@@ -43,7 +43,10 @@ const createKyselyEventRepository = (db) => ({
43
43
  markEventsAsInProcess: async (events) => {
44
44
  if (events.length === 0) return;
45
45
  const ids = events.map((e) => e.id);
46
- await db.updateTable("events").set({ status: "in-process" }).where("id", "in", ids).execute();
46
+ const lockedRows = await db.selectFrom("events").select("id").where("id", "in", ids).forUpdate().skipLocked().execute();
47
+ if (lockedRows.length === 0) return;
48
+ const lockedIds = lockedRows.map((r) => r.id);
49
+ await db.updateTable("events").set({ status: "in-process" }).where("id", "in", lockedIds).execute();
47
50
  }
48
51
  });
49
52
  // Annotate the CommonJS export names for ESM import in node:
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventRepository.ts"],"sourcesContent":["import type { Kysely } from \"kysely\";\nimport type { EventRepository } from \"../../ports/EventRepository.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\nimport type { EventsTable } from \"./types.ts\";\n\nexport const createKyselyEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventRepository<Event> => ({\n save: async (event) => {\n await db\n .insertInto(\"events\")\n .values(event)\n .onConflict((oc) =>\n oc.column(\"id\").doUpdateSet({\n topic: event.topic,\n payload: event.payload,\n context: event.context,\n status: event.status,\n triggeredByUserId: event.triggeredByUserId,\n occurredAt: event.occurredAt,\n publications: event.publications,\n priority: event.priority,\n }),\n )\n .execute();\n },\n\n saveNewEventsBatch: async (events) => {\n if (events.length === 0) return;\n await db.insertInto(\"events\").values(events).execute();\n },\n\n markEventsAsInProcess: async (events) => {\n if (events.length === 0) return;\n const ids = events.map((e) => e.id);\n await db\n .updateTable(\"events\")\n .set({ status: \"in-process\" })\n .where(\"id\", \"in\", ids)\n .execute();\n },\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAKO,MAAM,8BAA8B,CAGzC,QAC4B;AAAA,EAC5B,MAAM,OAAO,UAAU;AACrB,UAAM,GACH,WAAW,QAAQ,EACnB,OAAO,KAAK,EACZ;AAAA,MAAW,CAAC,OACX,GAAG,OAAO,IAAI,EAAE,YAAY;AAAA,QAC1B,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,SAAS,MAAM;AAAA,QACf,QAAQ,MAAM;AAAA,QACd,mBAAmB,MAAM;AAAA,QACzB,YAAY,MAAM;AAAA,QAClB,cAAc,MAAM;AAAA,QACpB,UAAU,MAAM;AAAA,MAClB,CAAC;AAAA,IACH,EACC,QAAQ;AAAA,EACb;AAAA,EAEA,oBAAoB,OAAO,WAAW;AACpC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,GAAG,WAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,QAAQ;AAAA,EACvD;AAAA,EAEA,uBAAuB,OAAO,WAAW;AACvC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,MAAM,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE;AAClC,UAAM,GACH,YAAY,QAAQ,EACpB,IAAI,EAAE,QAAQ,aAAa,CAAC,EAC5B,MAAM,MAAM,MAAM,GAAG,EACrB,QAAQ;AAAA,EACb;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventRepository.ts"],"sourcesContent":["import type { Kysely } from \"kysely\";\nimport type { EventRepository } from \"../../ports/EventRepository.ts\";\nimport type { DefaultContext, GenericEvent } from \"../../types.ts\";\nimport type { EventsTable } from \"./types.ts\";\n\nexport const createKyselyEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventRepository<Event> => ({\n save: async (event) => {\n await db\n .insertInto(\"events\")\n .values(event)\n .onConflict((oc) =>\n oc.column(\"id\").doUpdateSet({\n topic: event.topic,\n payload: event.payload,\n context: event.context,\n status: event.status,\n triggeredByUserId: event.triggeredByUserId,\n occurredAt: event.occurredAt,\n publications: event.publications,\n priority: event.priority,\n }),\n )\n .execute();\n },\n\n saveNewEventsBatch: async (events) => {\n if (events.length === 0) return;\n await db.insertInto(\"events\").values(events).execute();\n },\n\n markEventsAsInProcess: async (events) => {\n if (events.length === 0) return;\n const ids = events.map((e) => e.id);\n\n // Lock the rows to prevent concurrent processing\n const lockedRows = await db\n .selectFrom(\"events\")\n .select(\"id\")\n .where(\"id\", \"in\", ids)\n .forUpdate()\n .skipLocked()\n .execute();\n\n if (lockedRows.length === 0) return;\n const lockedIds = lockedRows.map((r) => r.id);\n\n // Update status to in-process (only for locked rows)\n await db\n .updateTable(\"events\")\n .set({ status: \"in-process\" })\n .where(\"id\", \"in\", lockedIds)\n .execute();\n },\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAKO,MAAM,8BAA8B,CAGzC,QAC4B;AAAA,EAC5B,MAAM,OAAO,UAAU;AACrB,UAAM,GACH,WAAW,QAAQ,EACnB,OAAO,KAAK,EACZ;AAAA,MAAW,CAAC,OACX,GAAG,OAAO,IAAI,EAAE,YAAY;AAAA,QAC1B,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,SAAS,MAAM;AAAA,QACf,QAAQ,MAAM;AAAA,QACd,mBAAmB,MAAM;AAAA,QACzB,YAAY,MAAM;AAAA,QAClB,cAAc,MAAM;AAAA,QACpB,UAAU,MAAM;AAAA,MAClB,CAAC;AAAA,IACH,EACC,QAAQ;AAAA,EACb;AAAA,EAEA,oBAAoB,OAAO,WAAW;AACpC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,GAAG,WAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,QAAQ;AAAA,EACvD;AAAA,EAEA,uBAAuB,OAAO,WAAW;AACvC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,MAAM,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE;AAGlC,UAAM,aAAa,MAAM,GACtB,WAAW,QAAQ,EACnB,OAAO,IAAI,EACX,MAAM,MAAM,MAAM,GAAG,EACrB,UAAU,EACV,WAAW,EACX,QAAQ;AAEX,QAAI,WAAW,WAAW,EAAG;AAC7B,UAAM,YAAY,WAAW,IAAI,CAAC,MAAM,EAAE,EAAE;AAG5C,UAAM,GACH,YAAY,QAAQ,EACpB,IAAI,EAAE,QAAQ,aAAa,CAAC,EAC5B,MAAM,MAAM,MAAM,SAAS,EAC3B,QAAQ;AAAA,EACb;AACF;","names":[]}
@@ -20,7 +20,10 @@ const createKyselyEventRepository = (db) => ({
20
20
  markEventsAsInProcess: async (events) => {
21
21
  if (events.length === 0) return;
22
22
  const ids = events.map((e) => e.id);
23
- await db.updateTable("events").set({ status: "in-process" }).where("id", "in", ids).execute();
23
+ const lockedRows = await db.selectFrom("events").select("id").where("id", "in", ids).forUpdate().skipLocked().execute();
24
+ if (lockedRows.length === 0) return;
25
+ const lockedIds = lockedRows.map((r) => r.id);
26
+ await db.updateTable("events").set({ status: "in-process" }).where("id", "in", lockedIds).execute();
24
27
  }
25
28
  });
26
29
  export {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventRepository.ts"],"sourcesContent":["import type { Kysely } from \"kysely\";\nimport type { EventRepository } from '../../ports/EventRepository.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\nimport type { EventsTable } from './types.ts.mjs';\n\nexport const createKyselyEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventRepository<Event> => ({\n save: async (event) => {\n await db\n .insertInto(\"events\")\n .values(event)\n .onConflict((oc) =>\n oc.column(\"id\").doUpdateSet({\n topic: event.topic,\n payload: event.payload,\n context: event.context,\n status: event.status,\n triggeredByUserId: event.triggeredByUserId,\n occurredAt: event.occurredAt,\n publications: event.publications,\n priority: event.priority,\n }),\n )\n .execute();\n },\n\n saveNewEventsBatch: async (events) => {\n if (events.length === 0) return;\n await db.insertInto(\"events\").values(events).execute();\n },\n\n markEventsAsInProcess: async (events) => {\n if (events.length === 0) return;\n const ids = events.map((e) => e.id);\n await db\n .updateTable(\"events\")\n .set({ status: \"in-process\" })\n .where(\"id\", \"in\", ids)\n .execute();\n },\n});\n"],"mappings":"AAKO,MAAM,8BAA8B,CAGzC,QAC4B;AAAA,EAC5B,MAAM,OAAO,UAAU;AACrB,UAAM,GACH,WAAW,QAAQ,EACnB,OAAO,KAAK,EACZ;AAAA,MAAW,CAAC,OACX,GAAG,OAAO,IAAI,EAAE,YAAY;AAAA,QAC1B,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,SAAS,MAAM;AAAA,QACf,QAAQ,MAAM;AAAA,QACd,mBAAmB,MAAM;AAAA,QACzB,YAAY,MAAM;AAAA,QAClB,cAAc,MAAM;AAAA,QACpB,UAAU,MAAM;AAAA,MAClB,CAAC;AAAA,IACH,EACC,QAAQ;AAAA,EACb;AAAA,EAEA,oBAAoB,OAAO,WAAW;AACpC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,GAAG,WAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,QAAQ;AAAA,EACvD;AAAA,EAEA,uBAAuB,OAAO,WAAW;AACvC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,MAAM,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE;AAClC,UAAM,GACH,YAAY,QAAQ,EACpB,IAAI,EAAE,QAAQ,aAAa,CAAC,EAC5B,MAAM,MAAM,MAAM,GAAG,EACrB,QAAQ;AAAA,EACb;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/adapters/kysely/KyselyEventRepository.ts"],"sourcesContent":["import type { Kysely } from \"kysely\";\nimport type { EventRepository } from '../../ports/EventRepository.ts.mjs';\nimport type { DefaultContext, GenericEvent } from '../../types.ts.mjs';\nimport type { EventsTable } from './types.ts.mjs';\n\nexport const createKyselyEventRepository = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n db: Kysely<EventsTable>,\n): EventRepository<Event> => ({\n save: async (event) => {\n await db\n .insertInto(\"events\")\n .values(event)\n .onConflict((oc) =>\n oc.column(\"id\").doUpdateSet({\n topic: event.topic,\n payload: event.payload,\n context: event.context,\n status: event.status,\n triggeredByUserId: event.triggeredByUserId,\n occurredAt: event.occurredAt,\n publications: event.publications,\n priority: event.priority,\n }),\n )\n .execute();\n },\n\n saveNewEventsBatch: async (events) => {\n if (events.length === 0) return;\n await db.insertInto(\"events\").values(events).execute();\n },\n\n markEventsAsInProcess: async (events) => {\n if (events.length === 0) return;\n const ids = events.map((e) => e.id);\n\n // Lock the rows to prevent concurrent processing\n const lockedRows = await db\n .selectFrom(\"events\")\n .select(\"id\")\n .where(\"id\", \"in\", ids)\n .forUpdate()\n .skipLocked()\n .execute();\n\n if (lockedRows.length === 0) return;\n const lockedIds = lockedRows.map((r) => r.id);\n\n // Update status to in-process (only for locked rows)\n await db\n .updateTable(\"events\")\n .set({ status: \"in-process\" })\n .where(\"id\", \"in\", lockedIds)\n .execute();\n },\n});\n"],"mappings":"AAKO,MAAM,8BAA8B,CAGzC,QAC4B;AAAA,EAC5B,MAAM,OAAO,UAAU;AACrB,UAAM,GACH,WAAW,QAAQ,EACnB,OAAO,KAAK,EACZ;AAAA,MAAW,CAAC,OACX,GAAG,OAAO,IAAI,EAAE,YAAY;AAAA,QAC1B,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,SAAS,MAAM;AAAA,QACf,QAAQ,MAAM;AAAA,QACd,mBAAmB,MAAM;AAAA,QACzB,YAAY,MAAM;AAAA,QAClB,cAAc,MAAM;AAAA,QACpB,UAAU,MAAM;AAAA,MAClB,CAAC;AAAA,IACH,EACC,QAAQ;AAAA,EACb;AAAA,EAEA,oBAAoB,OAAO,WAAW;AACpC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,GAAG,WAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,QAAQ;AAAA,EACvD;AAAA,EAEA,uBAAuB,OAAO,WAAW;AACvC,QAAI,OAAO,WAAW,EAAG;AACzB,UAAM,MAAM,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE;AAGlC,UAAM,aAAa,MAAM,GACtB,WAAW,QAAQ,EACnB,OAAO,IAAI,EACX,MAAM,MAAM,MAAM,GAAG,EACrB,UAAU,EACV,WAAW,EACX,QAAQ;AAEX,QAAI,WAAW,WAAW,EAAG;AAC7B,UAAM,YAAY,WAAW,IAAI,CAAC,MAAM,EAAE,EAAE;AAG5C,UAAM,GACH,YAAY,QAAQ,EACpB,IAAI,EAAE,QAAQ,aAAa,CAAC,EAC5B,MAAM,MAAM,MAAM,SAAS,EAC3B,QAAQ;AAAA,EACb;AACF;","names":[]}
@@ -56,13 +56,27 @@ const createEventCrawler = ({
56
56
  await publishEventsInParallel(events);
57
57
  };
58
58
  const retryFailedEvents = async () => {
59
+ const oneMinuteAgo = new Date(Date.now() - 6e4);
59
60
  const events = await eventQueries.getEvents({
60
- filters: { statuses: ["to-republish", "failed-but-will-retry"] },
61
+ filters: {
62
+ statuses: ["to-republish", "failed-but-will-retry"],
63
+ occurredAt: { to: oneMinuteAgo }
64
+ },
61
65
  limit: batchSize
62
66
  });
63
67
  if (events.length === 0) return;
64
68
  await publishEventsInParallel(events);
65
69
  };
70
+ const triggerProcessing = async () => {
71
+ const results = await Promise.allSettled([
72
+ processNewEvents(),
73
+ retryFailedEvents()
74
+ ]);
75
+ const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
76
+ if (errors.length > 0) {
77
+ throw new AggregateError(errors, "Event processing failed");
78
+ }
79
+ };
66
80
  const start = () => {
67
81
  const scheduleProcessNewEvents = () => {
68
82
  setTimeout(async () => {
@@ -92,6 +106,7 @@ const createEventCrawler = ({
92
106
  return {
93
107
  processNewEvents,
94
108
  retryFailedEvents,
109
+ triggerProcessing,
95
110
  start
96
111
  };
97
112
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from \"./ports/EventBus.ts\";\nimport type { EventQueries } from \"./ports/EventQueries.ts\";\nimport type { WithEventsUow } from \"./ports/EventRepository.ts\";\nimport type { DefaultContext, GenericEvent } from \"./types.ts\";\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\nconst splitIntoChunks = <T>(array: T[], chunkSize: number): T[][] => {\n const chunks: T[][] = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n chunks.push(array.slice(i, i + chunkSize));\n }\n return chunks;\n};\n\n/**\n * Creates a background event crawler that processes and publishes events.\n *\n * The crawler runs two loops:\n * 1. Process new events: polls for \"never-published\" events and publishes them\n * 2. Retry failed events: polls for failed events and retries them\n *\n * @returns Object with:\n * - `start()`: Start the background polling loops\n * - `processNewEvents()`: Manually trigger new event processing\n * - `retryFailedEvents()`: Manually trigger failed event retry\n *\n * @example\n * ```typescript\n * const crawler = createEventCrawler({\n * withUow,\n * eventQueries,\n * eventBus,\n * options: { batchSize: 50, newEventsIntervalMs: 5000 },\n * });\n *\n * // Start background processing\n * crawler.start();\n *\n * // Or trigger manually (useful for testing)\n * await crawler.processNewEvents();\n * ```\n */\nexport const createEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = async (events: Event[]) => {\n const eventChunks = splitIntoChunks(events, maxParallelProcessing);\n for (const chunk of eventChunks) {\n await Promise.all(chunk.map((event) => eventBus.publish(event)));\n }\n };\n\n const processNewEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await withUow(async (uow) => {\n await uow.eventRepository.markEventsAsInProcess(events);\n });\n\n await publishEventsInParallel(events);\n };\n\n const retryFailedEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"to-republish\", \"failed-but-will-retry\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await publishEventsInParallel(events);\n };\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await processNewEvents();\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await retryFailedEvents();\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n start,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,MAAM,kBAAkB,CAAI,OAAY,cAA6B;AACnE,QAAM,SAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC3C;AACA,SAAO;AACT;AA8BO,MAAM,qBAAqB,CAEhC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,OAAO,WAAoB;AACzD,UAAM,cAAc,gBAAgB,QAAQ,qBAAqB;AACjE,eAAW,SAAS,aAAa;AAC/B,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,mBAAmB,YAA2B;AAClD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,QAAQ,OAAO,QAAQ;AAC3B,YAAM,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IACxD,CAAC;AAED,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AACnD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,gBAAgB,uBAAuB,EAAE;AAAA,MAC/D,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,iBAAiB;AAAA,QACzB,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,kBAAkB;AAAA,QAC1B,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from \"./ports/EventBus.ts\";\nimport type { EventQueries } from \"./ports/EventQueries.ts\";\nimport type { WithEventsUow } from \"./ports/EventRepository.ts\";\nimport type { DefaultContext, GenericEvent } from \"./types.ts\";\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\nconst splitIntoChunks = <T>(array: T[], chunkSize: number): T[][] => {\n const chunks: T[][] = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n chunks.push(array.slice(i, i + chunkSize));\n }\n return chunks;\n};\n\n/**\n * Creates a background event crawler that processes and publishes events.\n *\n * The crawler runs two loops:\n * 1. Process new events: polls for \"never-published\" events and publishes them\n * 2. Retry failed events: polls for failed events and retries them\n *\n * @returns Object with:\n * - `start()`: Start the background polling loops (for traditional server environments)\n * - `processNewEvents()`: Manually trigger new event processing\n * - `retryFailedEvents()`: Manually trigger failed event retry\n * - `triggerProcessing()`: Process both new and failed events (for serverless environments)\n *\n * @example\n * ```typescript\n * const crawler = createEventCrawler({\n * withUow,\n * eventQueries,\n * eventBus,\n * options: { batchSize: 50, newEventsIntervalMs: 5000 },\n * });\n *\n * // Traditional server mode: Start background processing\n * crawler.start();\n *\n * // Serverless mode: Trigger on-demand after saving events\n * await withUow(async (uow) => {\n * await uow.eventRepository.save(event);\n * }, {\n * afterCommit: () => {\n * crawler.triggerProcessing().catch(console.error);\n * }\n * });\n *\n * // Or trigger manually (useful for testing)\n * await crawler.processNewEvents();\n * ```\n */\nexport const createEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = async (events: Event[]) => {\n const eventChunks = splitIntoChunks(events, maxParallelProcessing);\n for (const chunk of eventChunks) {\n await Promise.all(chunk.map((event) => eventBus.publish(event)));\n }\n };\n\n const processNewEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await withUow(async (uow) => {\n await uow.eventRepository.markEventsAsInProcess(events);\n });\n\n await publishEventsInParallel(events);\n };\n\n const retryFailedEvents = async (): Promise<void> => {\n const oneMinuteAgo = new Date(Date.now() - 60_000);\n\n const events = await eventQueries.getEvents({\n filters: {\n statuses: [\"to-republish\", \"failed-but-will-retry\"],\n occurredAt: { to: oneMinuteAgo },\n },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await publishEventsInParallel(events);\n };\n\n const triggerProcessing = async (): Promise<void> => {\n // Use Promise.allSettled to ensure both processing steps run independently\n // If processNewEvents fails, retryFailedEvents will still execute\n const results = await Promise.allSettled([\n processNewEvents(),\n retryFailedEvents(),\n ]);\n\n // Re-throw if both failed\n const errors = results\n .filter((r) => r.status === \"rejected\")\n .map((r) => (r as PromiseRejectedResult).reason);\n\n if (errors.length > 0) {\n throw new AggregateError(errors, \"Event processing failed\");\n }\n };\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await processNewEvents();\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await retryFailedEvents();\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n triggerProcessing,\n start,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,MAAM,kBAAkB,CAAI,OAAY,cAA6B;AACnE,QAAM,SAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC3C;AACA,SAAO;AACT;AAwCO,MAAM,qBAAqB,CAEhC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,OAAO,WAAoB;AACzD,UAAM,cAAc,gBAAgB,QAAQ,qBAAqB;AACjE,eAAW,SAAS,aAAa;AAC/B,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,mBAAmB,YAA2B;AAClD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,QAAQ,OAAO,QAAQ;AAC3B,YAAM,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IACxD,CAAC;AAED,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AACnD,UAAM,eAAe,IAAI,KAAK,KAAK,IAAI,IAAI,GAAM;AAEjD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS;AAAA,QACP,UAAU,CAAC,gBAAgB,uBAAuB;AAAA,QAClD,YAAY,EAAE,IAAI,aAAa;AAAA,MACjC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AAGnD,UAAM,UAAU,MAAM,QAAQ,WAAW;AAAA,MACvC,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAGD,UAAM,SAAS,QACZ,OAAO,CAAC,MAAM,EAAE,WAAW,UAAU,EACrC,IAAI,CAAC,MAAO,EAA4B,MAAM;AAEjD,QAAI,OAAO,SAAS,GAAG;AACrB,YAAM,IAAI,eAAe,QAAQ,yBAAyB;AAAA,IAC5D;AAAA,EACF;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,iBAAiB;AAAA,QACzB,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,kBAAkB;AAAA,QAC1B,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
@@ -22,9 +22,10 @@ type CreateEventCrawlerOptions = {
22
22
  * 2. Retry failed events: polls for failed events and retries them
23
23
  *
24
24
  * @returns Object with:
25
- * - `start()`: Start the background polling loops
25
+ * - `start()`: Start the background polling loops (for traditional server environments)
26
26
  * - `processNewEvents()`: Manually trigger new event processing
27
27
  * - `retryFailedEvents()`: Manually trigger failed event retry
28
+ * - `triggerProcessing()`: Process both new and failed events (for serverless environments)
28
29
  *
29
30
  * @example
30
31
  * ```typescript
@@ -35,9 +36,18 @@ type CreateEventCrawlerOptions = {
35
36
  * options: { batchSize: 50, newEventsIntervalMs: 5000 },
36
37
  * });
37
38
  *
38
- * // Start background processing
39
+ * // Traditional server mode: Start background processing
39
40
  * crawler.start();
40
41
  *
42
+ * // Serverless mode: Trigger on-demand after saving events
43
+ * await withUow(async (uow) => {
44
+ * await uow.eventRepository.save(event);
45
+ * }, {
46
+ * afterCommit: () => {
47
+ * crawler.triggerProcessing().catch(console.error);
48
+ * }
49
+ * });
50
+ *
41
51
  * // Or trigger manually (useful for testing)
42
52
  * await crawler.processNewEvents();
43
53
  * ```
@@ -50,6 +60,7 @@ declare const createEventCrawler: <Event extends GenericEvent<string, unknown, D
50
60
  }) => {
51
61
  processNewEvents: () => Promise<void>;
52
62
  retryFailedEvents: () => Promise<void>;
63
+ triggerProcessing: () => Promise<void>;
53
64
  start: () => void;
54
65
  };
55
66
 
@@ -22,9 +22,10 @@ type CreateEventCrawlerOptions = {
22
22
  * 2. Retry failed events: polls for failed events and retries them
23
23
  *
24
24
  * @returns Object with:
25
- * - `start()`: Start the background polling loops
25
+ * - `start()`: Start the background polling loops (for traditional server environments)
26
26
  * - `processNewEvents()`: Manually trigger new event processing
27
27
  * - `retryFailedEvents()`: Manually trigger failed event retry
28
+ * - `triggerProcessing()`: Process both new and failed events (for serverless environments)
28
29
  *
29
30
  * @example
30
31
  * ```typescript
@@ -35,9 +36,18 @@ type CreateEventCrawlerOptions = {
35
36
  * options: { batchSize: 50, newEventsIntervalMs: 5000 },
36
37
  * });
37
38
  *
38
- * // Start background processing
39
+ * // Traditional server mode: Start background processing
39
40
  * crawler.start();
40
41
  *
42
+ * // Serverless mode: Trigger on-demand after saving events
43
+ * await withUow(async (uow) => {
44
+ * await uow.eventRepository.save(event);
45
+ * }, {
46
+ * afterCommit: () => {
47
+ * crawler.triggerProcessing().catch(console.error);
48
+ * }
49
+ * });
50
+ *
41
51
  * // Or trigger manually (useful for testing)
42
52
  * await crawler.processNewEvents();
43
53
  * ```
@@ -50,6 +60,7 @@ declare const createEventCrawler: <Event extends GenericEvent<string, unknown, D
50
60
  }) => {
51
61
  processNewEvents: () => Promise<void>;
52
62
  retryFailedEvents: () => Promise<void>;
63
+ triggerProcessing: () => Promise<void>;
53
64
  start: () => void;
54
65
  };
55
66
 
@@ -33,13 +33,27 @@ const createEventCrawler = ({
33
33
  await publishEventsInParallel(events);
34
34
  };
35
35
  const retryFailedEvents = async () => {
36
+ const oneMinuteAgo = new Date(Date.now() - 6e4);
36
37
  const events = await eventQueries.getEvents({
37
- filters: { statuses: ["to-republish", "failed-but-will-retry"] },
38
+ filters: {
39
+ statuses: ["to-republish", "failed-but-will-retry"],
40
+ occurredAt: { to: oneMinuteAgo }
41
+ },
38
42
  limit: batchSize
39
43
  });
40
44
  if (events.length === 0) return;
41
45
  await publishEventsInParallel(events);
42
46
  };
47
+ const triggerProcessing = async () => {
48
+ const results = await Promise.allSettled([
49
+ processNewEvents(),
50
+ retryFailedEvents()
51
+ ]);
52
+ const errors = results.filter((r) => r.status === "rejected").map((r) => r.reason);
53
+ if (errors.length > 0) {
54
+ throw new AggregateError(errors, "Event processing failed");
55
+ }
56
+ };
43
57
  const start = () => {
44
58
  const scheduleProcessNewEvents = () => {
45
59
  setTimeout(async () => {
@@ -69,6 +83,7 @@ const createEventCrawler = ({
69
83
  return {
70
84
  processNewEvents,
71
85
  retryFailedEvents,
86
+ triggerProcessing,
72
87
  start
73
88
  };
74
89
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from './ports/EventBus.ts.mjs';\nimport type { EventQueries } from './ports/EventQueries.ts.mjs';\nimport type { WithEventsUow } from './ports/EventRepository.ts.mjs';\nimport type { DefaultContext, GenericEvent } from './types.ts.mjs';\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\nconst splitIntoChunks = <T>(array: T[], chunkSize: number): T[][] => {\n const chunks: T[][] = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n chunks.push(array.slice(i, i + chunkSize));\n }\n return chunks;\n};\n\n/**\n * Creates a background event crawler that processes and publishes events.\n *\n * The crawler runs two loops:\n * 1. Process new events: polls for \"never-published\" events and publishes them\n * 2. Retry failed events: polls for failed events and retries them\n *\n * @returns Object with:\n * - `start()`: Start the background polling loops\n * - `processNewEvents()`: Manually trigger new event processing\n * - `retryFailedEvents()`: Manually trigger failed event retry\n *\n * @example\n * ```typescript\n * const crawler = createEventCrawler({\n * withUow,\n * eventQueries,\n * eventBus,\n * options: { batchSize: 50, newEventsIntervalMs: 5000 },\n * });\n *\n * // Start background processing\n * crawler.start();\n *\n * // Or trigger manually (useful for testing)\n * await crawler.processNewEvents();\n * ```\n */\nexport const createEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = async (events: Event[]) => {\n const eventChunks = splitIntoChunks(events, maxParallelProcessing);\n for (const chunk of eventChunks) {\n await Promise.all(chunk.map((event) => eventBus.publish(event)));\n }\n };\n\n const processNewEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await withUow(async (uow) => {\n await uow.eventRepository.markEventsAsInProcess(events);\n });\n\n await publishEventsInParallel(events);\n };\n\n const retryFailedEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"to-republish\", \"failed-but-will-retry\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await publishEventsInParallel(events);\n };\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await processNewEvents();\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await retryFailedEvents();\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n start,\n };\n};\n"],"mappings":"AAiBA,MAAM,kBAAkB,CAAI,OAAY,cAA6B;AACnE,QAAM,SAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC3C;AACA,SAAO;AACT;AA8BO,MAAM,qBAAqB,CAEhC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,OAAO,WAAoB;AACzD,UAAM,cAAc,gBAAgB,QAAQ,qBAAqB;AACjE,eAAW,SAAS,aAAa;AAC/B,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,mBAAmB,YAA2B;AAClD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,QAAQ,OAAO,QAAQ;AAC3B,YAAM,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IACxD,CAAC;AAED,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AACnD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,gBAAgB,uBAAuB,EAAE;AAAA,MAC/D,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,iBAAiB;AAAA,QACzB,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,kBAAkB;AAAA,QAC1B,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from './ports/EventBus.ts.mjs';\nimport type { EventQueries } from './ports/EventQueries.ts.mjs';\nimport type { WithEventsUow } from './ports/EventRepository.ts.mjs';\nimport type { DefaultContext, GenericEvent } from './types.ts.mjs';\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\nconst splitIntoChunks = <T>(array: T[], chunkSize: number): T[][] => {\n const chunks: T[][] = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n chunks.push(array.slice(i, i + chunkSize));\n }\n return chunks;\n};\n\n/**\n * Creates a background event crawler that processes and publishes events.\n *\n * The crawler runs two loops:\n * 1. Process new events: polls for \"never-published\" events and publishes them\n * 2. Retry failed events: polls for failed events and retries them\n *\n * @returns Object with:\n * - `start()`: Start the background polling loops (for traditional server environments)\n * - `processNewEvents()`: Manually trigger new event processing\n * - `retryFailedEvents()`: Manually trigger failed event retry\n * - `triggerProcessing()`: Process both new and failed events (for serverless environments)\n *\n * @example\n * ```typescript\n * const crawler = createEventCrawler({\n * withUow,\n * eventQueries,\n * eventBus,\n * options: { batchSize: 50, newEventsIntervalMs: 5000 },\n * });\n *\n * // Traditional server mode: Start background processing\n * crawler.start();\n *\n * // Serverless mode: Trigger on-demand after saving events\n * await withUow(async (uow) => {\n * await uow.eventRepository.save(event);\n * }, {\n * afterCommit: () => {\n * crawler.triggerProcessing().catch(console.error);\n * }\n * });\n *\n * // Or trigger manually (useful for testing)\n * await crawler.processNewEvents();\n * ```\n */\nexport const createEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = async (events: Event[]) => {\n const eventChunks = splitIntoChunks(events, maxParallelProcessing);\n for (const chunk of eventChunks) {\n await Promise.all(chunk.map((event) => eventBus.publish(event)));\n }\n };\n\n const processNewEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await withUow(async (uow) => {\n await uow.eventRepository.markEventsAsInProcess(events);\n });\n\n await publishEventsInParallel(events);\n };\n\n const retryFailedEvents = async (): Promise<void> => {\n const oneMinuteAgo = new Date(Date.now() - 60_000);\n\n const events = await eventQueries.getEvents({\n filters: {\n statuses: [\"to-republish\", \"failed-but-will-retry\"],\n occurredAt: { to: oneMinuteAgo },\n },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await publishEventsInParallel(events);\n };\n\n const triggerProcessing = async (): Promise<void> => {\n // Use Promise.allSettled to ensure both processing steps run independently\n // If processNewEvents fails, retryFailedEvents will still execute\n const results = await Promise.allSettled([\n processNewEvents(),\n retryFailedEvents(),\n ]);\n\n // Re-throw if both failed\n const errors = results\n .filter((r) => r.status === \"rejected\")\n .map((r) => (r as PromiseRejectedResult).reason);\n\n if (errors.length > 0) {\n throw new AggregateError(errors, \"Event processing failed\");\n }\n };\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await processNewEvents();\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await retryFailedEvents();\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n triggerProcessing,\n start,\n };\n};\n"],"mappings":"AAiBA,MAAM,kBAAkB,CAAI,OAAY,cAA6B;AACnE,QAAM,SAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC3C;AACA,SAAO;AACT;AAwCO,MAAM,qBAAqB,CAEhC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,OAAO,WAAoB;AACzD,UAAM,cAAc,gBAAgB,QAAQ,qBAAqB;AACjE,eAAW,SAAS,aAAa;AAC/B,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,mBAAmB,YAA2B;AAClD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,QAAQ,OAAO,QAAQ;AAC3B,YAAM,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IACxD,CAAC;AAED,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AACnD,UAAM,eAAe,IAAI,KAAK,KAAK,IAAI,IAAI,GAAM;AAEjD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS;AAAA,QACP,UAAU,CAAC,gBAAgB,uBAAuB;AAAA,QAClD,YAAY,EAAE,IAAI,aAAa;AAAA,MACjC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AAGnD,UAAM,UAAU,MAAM,QAAQ,WAAW;AAAA,MACvC,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAGD,UAAM,SAAS,QACZ,OAAO,CAAC,MAAM,EAAE,WAAW,UAAU,EACrC,IAAI,CAAC,MAAO,EAA4B,MAAM;AAEjD,QAAI,OAAO,SAAS,GAAG;AACrB,YAAM,IAAI,eAAe,QAAQ,yBAAyB;AAAA,IAC5D;AAAA,EACF;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,iBAAiB;AAAA,QACzB,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,kBAAkB;AAAA,QAC1B,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
package/dist/index.d.cts CHANGED
@@ -3,7 +3,7 @@ export { createEventCrawler } from './createEventCrawler.cjs';
3
3
  export { CreateNewEvent, makeCreateNewEvent } from './createNewEvent.cjs';
4
4
  export { EventBus } from './ports/EventBus.cjs';
5
5
  export { EventQueries } from './ports/EventQueries.cjs';
6
- export { EventRepository, EventsUnitOfWork, WithEventsUow } from './ports/EventRepository.cjs';
6
+ export { EventRepository, EventsUnitOfWork, WithEventsUow, WithEventsUowOptions } from './ports/EventRepository.cjs';
7
7
  export { DefaultContext, EventFailure, EventId, EventPublication, EventStatus, Flavor, GenericEvent, SubscriptionId, UserId } from './types.cjs';
8
8
  export { createInMemoryEventBus } from './adapters/in-memory/InMemoryEventBus.cjs';
9
9
  export { createInMemoryEventQueries } from './adapters/in-memory/InMemoryEventQueries.cjs';
package/dist/index.d.ts CHANGED
@@ -3,7 +3,7 @@ export { createEventCrawler } from './createEventCrawler.js';
3
3
  export { CreateNewEvent, makeCreateNewEvent } from './createNewEvent.js';
4
4
  export { EventBus } from './ports/EventBus.js';
5
5
  export { EventQueries } from './ports/EventQueries.js';
6
- export { EventRepository, EventsUnitOfWork, WithEventsUow } from './ports/EventRepository.js';
6
+ export { EventRepository, EventsUnitOfWork, WithEventsUow, WithEventsUowOptions } from './ports/EventRepository.js';
7
7
  export { DefaultContext, EventFailure, EventId, EventPublication, EventStatus, Flavor, GenericEvent, SubscriptionId, UserId } from './types.js';
8
8
  export { createInMemoryEventBus } from './adapters/in-memory/InMemoryEventBus.js';
9
9
  export { createInMemoryEventQueries } from './adapters/in-memory/InMemoryEventQueries.js';
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/ports/EventQueries.ts"],"sourcesContent":["import type { DefaultContext, EventStatus, GenericEvent } from \"../types.ts\";\n\n/** Parameters for querying events. */\ntype GetEventsParams = {\n filters: {\n /** Filter by event status (e.g., [\"never-published\", \"failed-but-will-retry\"]). */\n statuses: EventStatus[];\n /** Optional context filter for multi-tenant scenarios. */\n context?: Partial<Record<string, string>>;\n };\n /** Maximum number of events to return. */\n limit: number;\n};\n\n/**\n * Query interface for reading events.\n * Used by the event crawler to fetch events for processing.\n * Implement this to query events from your database.\n */\nexport type EventQueries<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = {\n /** Fetch events matching the given filters. */\n getEvents: (params: GetEventsParams) => Promise<Event[]>;\n};\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA;","names":[]}
1
+ {"version":3,"sources":["../../src/ports/EventQueries.ts"],"sourcesContent":["import type { DefaultContext, EventStatus, GenericEvent } from \"../types.ts\";\n\n/** Parameters for querying events. */\ntype GetEventsParams = {\n filters: {\n /** Filter by event status (e.g., [\"never-published\", \"failed-but-will-retry\"]). */\n statuses: EventStatus[];\n /** Optional context filter for multi-tenant scenarios. */\n context?: Partial<Record<string, string>>;\n /** Optional time-based filter for when events occurred. */\n occurredAt?: {\n /** Include events that occurred on or after this date. */\n from?: Date;\n /** Include events that occurred on or before this date. */\n to?: Date;\n };\n };\n /** Maximum number of events to return. */\n limit: number;\n};\n\n/**\n * Query interface for reading events.\n * Used by the event crawler to fetch events for processing.\n * Implement this to query events from your database.\n */\nexport type EventQueries<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = {\n /** Fetch events matching the given filters. */\n getEvents: (params: GetEventsParams) => Promise<Event[]>;\n};\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA;","names":[]}
@@ -7,6 +7,13 @@ type GetEventsParams = {
7
7
  statuses: EventStatus[];
8
8
  /** Optional context filter for multi-tenant scenarios. */
9
9
  context?: Partial<Record<string, string>>;
10
+ /** Optional time-based filter for when events occurred. */
11
+ occurredAt?: {
12
+ /** Include events that occurred on or after this date. */
13
+ from?: Date;
14
+ /** Include events that occurred on or before this date. */
15
+ to?: Date;
16
+ };
10
17
  };
11
18
  /** Maximum number of events to return. */
12
19
  limit: number;
@@ -7,6 +7,13 @@ type GetEventsParams = {
7
7
  statuses: EventStatus[];
8
8
  /** Optional context filter for multi-tenant scenarios. */
9
9
  context?: Partial<Record<string, string>>;
10
+ /** Optional time-based filter for when events occurred. */
11
+ occurredAt?: {
12
+ /** Include events that occurred on or after this date. */
13
+ from?: Date;
14
+ /** Include events that occurred on or before this date. */
15
+ to?: Date;
16
+ };
10
17
  };
11
18
  /** Maximum number of events to return. */
12
19
  limit: number;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/ports/EventRepository.ts"],"sourcesContent":["import type { DefaultContext, GenericEvent } from \"../types.ts\";\n\n/**\n * Repository interface for persisting events.\n * Implement this to store events in your database (e.g., PostgreSQL, MongoDB).\n * Events should be saved in the same transaction as your domain changes.\n */\nexport type EventRepository<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = {\n /** Persist a single event (typically after publication status update). */\n save: (event: Event) => Promise<void>;\n /** Persist multiple new events in a batch. */\n saveNewEventsBatch: (events: Event[]) => Promise<void>;\n /** Mark events as \"in-process\" before publishing (prevents duplicate processing). */\n markEventsAsInProcess: (events: Event[]) => Promise<void>;\n};\n\n/**\n * Unit of work containing the event repository.\n * Extend this with your own repositories for transactional consistency.\n */\nexport type EventsUnitOfWork<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = {\n eventRepository: EventRepository<Event>;\n};\n\n/**\n * Higher-order function that provides a unit of work for transactional operations.\n * Your implementation should handle transaction begin/commit/rollback.\n *\n * @example\n * ```typescript\n * const withUow: WithEventsUow<MyEvent> = async (fn) => {\n * const tx = await db.beginTransaction();\n * try {\n * await fn({ eventRepository: createEventRepo(tx) });\n * await tx.commit();\n * } catch (e) {\n * await tx.rollback();\n * throw e;\n * }\n * };\n * ```\n */\nexport type WithEventsUow<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = (fn: (uow: EventsUnitOfWork<Event>) => Promise<void>) => Promise<void>;\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA;","names":[]}
1
+ {"version":3,"sources":["../../src/ports/EventRepository.ts"],"sourcesContent":["import type { DefaultContext, GenericEvent } from \"../types.ts\";\n\n/**\n * Repository interface for persisting events.\n * Implement this to store events in your database (e.g., PostgreSQL, MongoDB).\n * Events should be saved in the same transaction as your domain changes.\n */\nexport type EventRepository<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = {\n /** Persist a single event (typically after publication status update). */\n save: (event: Event) => Promise<void>;\n /** Persist multiple new events in a batch. */\n saveNewEventsBatch: (events: Event[]) => Promise<void>;\n /** Mark events as \"in-process\" before publishing (prevents duplicate processing). */\n markEventsAsInProcess: (events: Event[]) => Promise<void>;\n};\n\n/**\n * Unit of work containing the event repository.\n * Extend this with your own repositories for transactional consistency.\n */\nexport type EventsUnitOfWork<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = {\n eventRepository: EventRepository<Event>;\n};\n\n/**\n * Options for unit of work execution.\n */\nexport type WithEventsUowOptions = {\n /**\n * Callback executed after successful transaction commit.\n * Useful for triggering event processing in serverless environments.\n *\n * The callback should return a Promise. Whether it's awaited depends on\n * the withUow implementation:\n * - Serverless (Lambda): await to ensure completion before runtime freezes\n * - Long-running servers: fire-and-forget for faster response times\n *\n * @example\n * ```typescript\n * await withUow(async (uow) => {\n * await uow.eventRepository.save(event);\n * }, {\n * afterCommit: async () => {\n * await eventCrawler.triggerProcessing();\n * }\n * });\n * ```\n */\n afterCommit?: () => Promise<void>;\n};\n\n/**\n * Higher-order function that provides a unit of work for transactional operations.\n * Your implementation should handle transaction begin/commit/rollback.\n *\n * @example\n * ```typescript\n * const withUow: WithEventsUow<MyEvent> = async (fn, options) => {\n * const tx = await db.beginTransaction();\n * try {\n * const result = await fn({ eventRepository: createEventRepo(tx) });\n * await tx.commit();\n * await options?.afterCommit?.();\n * return result;\n * } catch (e) {\n * await tx.rollback();\n * throw e;\n * }\n * };\n * ```\n */\nexport type WithEventsUow<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = <T>(\n fn: (uow: EventsUnitOfWork<Event>) => Promise<T>,\n options?: WithEventsUowOptions,\n) => Promise<T>;\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA;","names":[]}
@@ -20,17 +20,45 @@ type EventRepository<Event extends GenericEvent<string, unknown, DefaultContext>
20
20
  type EventsUnitOfWork<Event extends GenericEvent<string, unknown, DefaultContext>> = {
21
21
  eventRepository: EventRepository<Event>;
22
22
  };
23
+ /**
24
+ * Options for unit of work execution.
25
+ */
26
+ type WithEventsUowOptions = {
27
+ /**
28
+ * Callback executed after successful transaction commit.
29
+ * Useful for triggering event processing in serverless environments.
30
+ *
31
+ * The callback should return a Promise. Whether it's awaited depends on
32
+ * the withUow implementation:
33
+ * - Serverless (Lambda): await to ensure completion before runtime freezes
34
+ * - Long-running servers: fire-and-forget for faster response times
35
+ *
36
+ * @example
37
+ * ```typescript
38
+ * await withUow(async (uow) => {
39
+ * await uow.eventRepository.save(event);
40
+ * }, {
41
+ * afterCommit: async () => {
42
+ * await eventCrawler.triggerProcessing();
43
+ * }
44
+ * });
45
+ * ```
46
+ */
47
+ afterCommit?: () => Promise<void>;
48
+ };
23
49
  /**
24
50
  * Higher-order function that provides a unit of work for transactional operations.
25
51
  * Your implementation should handle transaction begin/commit/rollback.
26
52
  *
27
53
  * @example
28
54
  * ```typescript
29
- * const withUow: WithEventsUow<MyEvent> = async (fn) => {
55
+ * const withUow: WithEventsUow<MyEvent> = async (fn, options) => {
30
56
  * const tx = await db.beginTransaction();
31
57
  * try {
32
- * await fn({ eventRepository: createEventRepo(tx) });
58
+ * const result = await fn({ eventRepository: createEventRepo(tx) });
33
59
  * await tx.commit();
60
+ * await options?.afterCommit?.();
61
+ * return result;
34
62
  * } catch (e) {
35
63
  * await tx.rollback();
36
64
  * throw e;
@@ -38,6 +66,6 @@ type EventsUnitOfWork<Event extends GenericEvent<string, unknown, DefaultContext
38
66
  * };
39
67
  * ```
40
68
  */
41
- type WithEventsUow<Event extends GenericEvent<string, unknown, DefaultContext>> = (fn: (uow: EventsUnitOfWork<Event>) => Promise<void>) => Promise<void>;
69
+ type WithEventsUow<Event extends GenericEvent<string, unknown, DefaultContext>> = <T>(fn: (uow: EventsUnitOfWork<Event>) => Promise<T>, options?: WithEventsUowOptions) => Promise<T>;
42
70
 
43
- export type { EventRepository, EventsUnitOfWork, WithEventsUow };
71
+ export type { EventRepository, EventsUnitOfWork, WithEventsUow, WithEventsUowOptions };
@@ -20,17 +20,45 @@ type EventRepository<Event extends GenericEvent<string, unknown, DefaultContext>
20
20
  type EventsUnitOfWork<Event extends GenericEvent<string, unknown, DefaultContext>> = {
21
21
  eventRepository: EventRepository<Event>;
22
22
  };
23
+ /**
24
+ * Options for unit of work execution.
25
+ */
26
+ type WithEventsUowOptions = {
27
+ /**
28
+ * Callback executed after successful transaction commit.
29
+ * Useful for triggering event processing in serverless environments.
30
+ *
31
+ * The callback should return a Promise. Whether it's awaited depends on
32
+ * the withUow implementation:
33
+ * - Serverless (Lambda): await to ensure completion before runtime freezes
34
+ * - Long-running servers: fire-and-forget for faster response times
35
+ *
36
+ * @example
37
+ * ```typescript
38
+ * await withUow(async (uow) => {
39
+ * await uow.eventRepository.save(event);
40
+ * }, {
41
+ * afterCommit: async () => {
42
+ * await eventCrawler.triggerProcessing();
43
+ * }
44
+ * });
45
+ * ```
46
+ */
47
+ afterCommit?: () => Promise<void>;
48
+ };
23
49
  /**
24
50
  * Higher-order function that provides a unit of work for transactional operations.
25
51
  * Your implementation should handle transaction begin/commit/rollback.
26
52
  *
27
53
  * @example
28
54
  * ```typescript
29
- * const withUow: WithEventsUow<MyEvent> = async (fn) => {
55
+ * const withUow: WithEventsUow<MyEvent> = async (fn, options) => {
30
56
  * const tx = await db.beginTransaction();
31
57
  * try {
32
- * await fn({ eventRepository: createEventRepo(tx) });
58
+ * const result = await fn({ eventRepository: createEventRepo(tx) });
33
59
  * await tx.commit();
60
+ * await options?.afterCommit?.();
61
+ * return result;
34
62
  * } catch (e) {
35
63
  * await tx.rollback();
36
64
  * throw e;
@@ -38,6 +66,6 @@ type EventsUnitOfWork<Event extends GenericEvent<string, unknown, DefaultContext
38
66
  * };
39
67
  * ```
40
68
  */
41
- type WithEventsUow<Event extends GenericEvent<string, unknown, DefaultContext>> = (fn: (uow: EventsUnitOfWork<Event>) => Promise<void>) => Promise<void>;
69
+ type WithEventsUow<Event extends GenericEvent<string, unknown, DefaultContext>> = <T>(fn: (uow: EventsUnitOfWork<Event>) => Promise<T>, options?: WithEventsUowOptions) => Promise<T>;
42
70
 
43
- export type { EventRepository, EventsUnitOfWork, WithEventsUow };
71
+ export type { EventRepository, EventsUnitOfWork, WithEventsUow, WithEventsUowOptions };
package/package.json CHANGED
@@ -1,9 +1,9 @@
1
1
  {
2
2
  "name": "@l-etabli/events",
3
- "description": "The purpose of this repositiory is to make it easy to setup event driven architecture using outbox pattern",
3
+ "description": "The purpose of this repository is to make it easy to setup event driven architecture using outbox pattern",
4
4
  "module": "src/index.ts",
5
5
  "type": "module",
6
- "version": "0.2.0",
6
+ "version": "0.4.0",
7
7
  "main": "./dist/index.mjs",
8
8
  "types": "./dist/index.d.ts",
9
9
  "files": [
@@ -28,8 +28,9 @@
28
28
  "check": "biome check",
29
29
  "check:fix": "biome check --fix --no-errors-on-unmatched --files-ignore-unknown=true",
30
30
  "test": "bun test",
31
+ "test:integration": "bun test .integration.test.ts",
31
32
  "typecheck": "tsc --noEmit",
32
- "fullcheck": "bun run check:fix && bun run typecheck && bun test --bail tests/ --exclude '**/kyselyAdapter.test.ts'",
33
+ "fullcheck": "bun run check:fix && bun run typecheck && bun test --bail $(find tests -name '*.test.ts' ! -name '*.integration.test.ts')",
33
34
  "release": "semantic-release"
34
35
  },
35
36
  "devDependencies": {
@@ -18,11 +18,25 @@ export const createInMemoryEventQueries = <
18
18
  );
19
19
  };
20
20
 
21
+ const matchesOccurredAt = (event: Event): boolean => {
22
+ if (!filters.occurredAt) return true;
23
+
24
+ const { from, to } = filters.occurredAt;
25
+ const eventTime = event.occurredAt.getTime();
26
+
27
+ if (from && eventTime < from.getTime()) return false;
28
+ if (to && eventTime > to.getTime()) return false;
29
+
30
+ return true;
31
+ };
32
+
21
33
  return helpers
22
34
  .getAllEvents()
23
35
  .filter(
24
36
  (event) =>
25
- filters.statuses.includes(event.status) && matchesContext(event),
37
+ filters.statuses.includes(event.status) &&
38
+ matchesContext(event) &&
39
+ matchesOccurredAt(event),
26
40
  )
27
41
  .slice(0, limit);
28
42
  },
@@ -54,8 +54,11 @@ export const createInMemoryWithUow = <
54
54
  >(
55
55
  eventRepository: EventRepository<Event>,
56
56
  ): { withUow: WithEventsUow<Event> } => {
57
- const withUow: WithEventsUow<Event> = async (fn) => {
58
- await fn({ eventRepository });
57
+ // In-memory adapter awaits afterCommit for predictable test behavior
58
+ const withUow: WithEventsUow<Event> = async (fn, options) => {
59
+ const result = await fn({ eventRepository });
60
+ await options?.afterCommit?.();
61
+ return result;
59
62
  };
60
63
  return { withUow };
61
64
  };
@@ -22,9 +22,17 @@ export const createKyselyEventQueries = <
22
22
  }
23
23
  }
24
24
 
25
+ if (filters.occurredAt?.from) {
26
+ query = query.where("occurredAt", ">=", filters.occurredAt.from);
27
+ }
28
+
29
+ if (filters.occurredAt?.to) {
30
+ query = query.where("occurredAt", "<=", filters.occurredAt.to);
31
+ }
32
+
25
33
  const rows = await query.execute();
26
34
  return rows.map(
27
- (row) =>
35
+ (row: EventsTable["events"]) =>
28
36
  ({
29
37
  ...row,
30
38
  context: row.context ?? undefined,
@@ -35,10 +35,24 @@ export const createKyselyEventRepository = <
35
35
  markEventsAsInProcess: async (events) => {
36
36
  if (events.length === 0) return;
37
37
  const ids = events.map((e) => e.id);
38
+
39
+ // Lock the rows to prevent concurrent processing
40
+ const lockedRows = await db
41
+ .selectFrom("events")
42
+ .select("id")
43
+ .where("id", "in", ids)
44
+ .forUpdate()
45
+ .skipLocked()
46
+ .execute();
47
+
48
+ if (lockedRows.length === 0) return;
49
+ const lockedIds = lockedRows.map((r) => r.id);
50
+
51
+ // Update status to in-process (only for locked rows)
38
52
  await db
39
53
  .updateTable("events")
40
54
  .set({ status: "in-process" })
41
- .where("id", "in", ids)
55
+ .where("id", "in", lockedIds)
42
56
  .execute();
43
57
  },
44
58
  });
@@ -31,9 +31,10 @@ const splitIntoChunks = <T>(array: T[], chunkSize: number): T[][] => {
31
31
  * 2. Retry failed events: polls for failed events and retries them
32
32
  *
33
33
  * @returns Object with:
34
- * - `start()`: Start the background polling loops
34
+ * - `start()`: Start the background polling loops (for traditional server environments)
35
35
  * - `processNewEvents()`: Manually trigger new event processing
36
36
  * - `retryFailedEvents()`: Manually trigger failed event retry
37
+ * - `triggerProcessing()`: Process both new and failed events (for serverless environments)
37
38
  *
38
39
  * @example
39
40
  * ```typescript
@@ -44,9 +45,18 @@ const splitIntoChunks = <T>(array: T[], chunkSize: number): T[][] => {
44
45
  * options: { batchSize: 50, newEventsIntervalMs: 5000 },
45
46
  * });
46
47
  *
47
- * // Start background processing
48
+ * // Traditional server mode: Start background processing
48
49
  * crawler.start();
49
50
  *
51
+ * // Serverless mode: Trigger on-demand after saving events
52
+ * await withUow(async (uow) => {
53
+ * await uow.eventRepository.save(event);
54
+ * }, {
55
+ * afterCommit: () => {
56
+ * crawler.triggerProcessing().catch(console.error);
57
+ * }
58
+ * });
59
+ *
50
60
  * // Or trigger manually (useful for testing)
51
61
  * await crawler.processNewEvents();
52
62
  * ```
@@ -92,8 +102,13 @@ export const createEventCrawler = <
92
102
  };
93
103
 
94
104
  const retryFailedEvents = async (): Promise<void> => {
105
+ const oneMinuteAgo = new Date(Date.now() - 60_000);
106
+
95
107
  const events = await eventQueries.getEvents({
96
- filters: { statuses: ["to-republish", "failed-but-will-retry"] },
108
+ filters: {
109
+ statuses: ["to-republish", "failed-but-will-retry"],
110
+ occurredAt: { to: oneMinuteAgo },
111
+ },
97
112
  limit: batchSize,
98
113
  });
99
114
 
@@ -102,6 +117,24 @@ export const createEventCrawler = <
102
117
  await publishEventsInParallel(events);
103
118
  };
104
119
 
120
+ const triggerProcessing = async (): Promise<void> => {
121
+ // Use Promise.allSettled to ensure both processing steps run independently
122
+ // If processNewEvents fails, retryFailedEvents will still execute
123
+ const results = await Promise.allSettled([
124
+ processNewEvents(),
125
+ retryFailedEvents(),
126
+ ]);
127
+
128
+ // Re-throw if both failed
129
+ const errors = results
130
+ .filter((r) => r.status === "rejected")
131
+ .map((r) => (r as PromiseRejectedResult).reason);
132
+
133
+ if (errors.length > 0) {
134
+ throw new AggregateError(errors, "Event processing failed");
135
+ }
136
+ };
137
+
105
138
  const start = () => {
106
139
  const scheduleProcessNewEvents = () => {
107
140
  setTimeout(async () => {
@@ -134,6 +167,7 @@ export const createEventCrawler = <
134
167
  return {
135
168
  processNewEvents,
136
169
  retryFailedEvents,
170
+ triggerProcessing,
137
171
  start,
138
172
  };
139
173
  };
@@ -7,6 +7,13 @@ type GetEventsParams = {
7
7
  statuses: EventStatus[];
8
8
  /** Optional context filter for multi-tenant scenarios. */
9
9
  context?: Partial<Record<string, string>>;
10
+ /** Optional time-based filter for when events occurred. */
11
+ occurredAt?: {
12
+ /** Include events that occurred on or after this date. */
13
+ from?: Date;
14
+ /** Include events that occurred on or before this date. */
15
+ to?: Date;
16
+ };
10
17
  };
11
18
  /** Maximum number of events to return. */
12
19
  limit: number;
@@ -26,17 +26,46 @@ export type EventsUnitOfWork<
26
26
  eventRepository: EventRepository<Event>;
27
27
  };
28
28
 
29
+ /**
30
+ * Options for unit of work execution.
31
+ */
32
+ export type WithEventsUowOptions = {
33
+ /**
34
+ * Callback executed after successful transaction commit.
35
+ * Useful for triggering event processing in serverless environments.
36
+ *
37
+ * The callback should return a Promise. Whether it's awaited depends on
38
+ * the withUow implementation:
39
+ * - Serverless (Lambda): await to ensure completion before runtime freezes
40
+ * - Long-running servers: fire-and-forget for faster response times
41
+ *
42
+ * @example
43
+ * ```typescript
44
+ * await withUow(async (uow) => {
45
+ * await uow.eventRepository.save(event);
46
+ * }, {
47
+ * afterCommit: async () => {
48
+ * await eventCrawler.triggerProcessing();
49
+ * }
50
+ * });
51
+ * ```
52
+ */
53
+ afterCommit?: () => Promise<void>;
54
+ };
55
+
29
56
  /**
30
57
  * Higher-order function that provides a unit of work for transactional operations.
31
58
  * Your implementation should handle transaction begin/commit/rollback.
32
59
  *
33
60
  * @example
34
61
  * ```typescript
35
- * const withUow: WithEventsUow<MyEvent> = async (fn) => {
62
+ * const withUow: WithEventsUow<MyEvent> = async (fn, options) => {
36
63
  * const tx = await db.beginTransaction();
37
64
  * try {
38
- * await fn({ eventRepository: createEventRepo(tx) });
65
+ * const result = await fn({ eventRepository: createEventRepo(tx) });
39
66
  * await tx.commit();
67
+ * await options?.afterCommit?.();
68
+ * return result;
40
69
  * } catch (e) {
41
70
  * await tx.rollback();
42
71
  * throw e;
@@ -46,4 +75,7 @@ export type EventsUnitOfWork<
46
75
  */
47
76
  export type WithEventsUow<
48
77
  Event extends GenericEvent<string, unknown, DefaultContext>,
49
- > = (fn: (uow: EventsUnitOfWork<Event>) => Promise<void>) => Promise<void>;
78
+ > = <T>(
79
+ fn: (uow: EventsUnitOfWork<Event>) => Promise<T>,
80
+ options?: WithEventsUowOptions,
81
+ ) => Promise<T>;