@l-etabli/events 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +96 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventQueries.cjs +54 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventQueries.cjs.map +1 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventQueries.d.cts +10 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventQueries.d.ts +10 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventQueries.mjs +30 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventQueries.mjs.map +1 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventRepository.cjs +85 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventRepository.cjs.map +1 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventRepository.d.cts +9 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventRepository.d.ts +9 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventRepository.mjs +61 -0
- package/dist/adapters/effect-kysely/EffectKyselyEventRepository.mjs.map +1 -0
- package/dist/adapters/effect-kysely/index.cjs +32 -0
- package/dist/adapters/effect-kysely/index.cjs.map +1 -0
- package/dist/adapters/effect-kysely/index.d.cts +9 -0
- package/dist/adapters/effect-kysely/index.d.ts +9 -0
- package/dist/adapters/effect-kysely/index.mjs +7 -0
- package/dist/adapters/effect-kysely/index.mjs.map +1 -0
- package/dist/adapters/in-memory/InMemoryEventBus.cjs +3 -17
- package/dist/adapters/in-memory/InMemoryEventBus.cjs.map +1 -1
- package/dist/adapters/in-memory/InMemoryEventBus.mjs +2 -16
- package/dist/adapters/in-memory/InMemoryEventBus.mjs.map +1 -1
- package/dist/adapters/in-memory/InMemoryEventQueries.cjs +2 -20
- package/dist/adapters/in-memory/InMemoryEventQueries.cjs.map +1 -1
- package/dist/adapters/in-memory/InMemoryEventQueries.mjs +2 -20
- package/dist/adapters/in-memory/InMemoryEventQueries.mjs.map +1 -1
- package/dist/adapters/kysely/KyselyEventQueries.cjs +20 -24
- package/dist/adapters/kysely/KyselyEventQueries.cjs.map +1 -1
- package/dist/adapters/kysely/KyselyEventQueries.d.cts +1 -1
- package/dist/adapters/kysely/KyselyEventQueries.d.ts +1 -1
- package/dist/adapters/kysely/KyselyEventQueries.mjs +20 -24
- package/dist/adapters/kysely/KyselyEventQueries.mjs.map +1 -1
- package/dist/adapters/kysely/KyselyEventRepository.cjs +47 -45
- package/dist/adapters/kysely/KyselyEventRepository.cjs.map +1 -1
- package/dist/adapters/kysely/KyselyEventRepository.d.cts +1 -1
- package/dist/adapters/kysely/KyselyEventRepository.d.ts +1 -1
- package/dist/adapters/kysely/KyselyEventRepository.mjs +43 -41
- package/dist/adapters/kysely/KyselyEventRepository.mjs.map +1 -1
- package/dist/adapters/kysely/jsonb.cjs +30 -0
- package/dist/adapters/kysely/jsonb.cjs.map +1 -0
- package/dist/adapters/kysely/jsonb.d.cts +5 -0
- package/dist/adapters/kysely/jsonb.d.ts +5 -0
- package/dist/adapters/kysely/jsonb.mjs +6 -0
- package/dist/adapters/kysely/jsonb.mjs.map +1 -0
- package/dist/adapters/kysely/mapEventRow.cjs +35 -0
- package/dist/adapters/kysely/mapEventRow.cjs.map +1 -0
- package/dist/adapters/kysely/mapEventRow.d.cts +6 -0
- package/dist/adapters/kysely/mapEventRow.d.ts +6 -0
- package/dist/adapters/kysely/mapEventRow.mjs +11 -0
- package/dist/adapters/kysely/mapEventRow.mjs.map +1 -0
- package/dist/createEventCrawler.cjs +2 -8
- package/dist/createEventCrawler.cjs.map +1 -1
- package/dist/createEventCrawler.mjs +1 -7
- package/dist/createEventCrawler.mjs.map +1 -1
- package/dist/effect/EffectEventCrawler.cjs +111 -0
- package/dist/effect/EffectEventCrawler.cjs.map +1 -0
- package/dist/effect/EffectEventCrawler.d.cts +26 -0
- package/dist/effect/EffectEventCrawler.d.ts +26 -0
- package/dist/effect/EffectEventCrawler.mjs +87 -0
- package/dist/effect/EffectEventCrawler.mjs.map +1 -0
- package/dist/effect/EffectInMemoryEventBus.cjs +131 -0
- package/dist/effect/EffectInMemoryEventBus.cjs.map +1 -0
- package/dist/effect/EffectInMemoryEventBus.d.cts +31 -0
- package/dist/effect/EffectInMemoryEventBus.d.ts +31 -0
- package/dist/effect/EffectInMemoryEventBus.mjs +112 -0
- package/dist/effect/EffectInMemoryEventBus.mjs.map +1 -0
- package/dist/effect/EffectInMemoryEventQueries.cjs +35 -0
- package/dist/effect/EffectInMemoryEventQueries.cjs.map +1 -0
- package/dist/effect/EffectInMemoryEventQueries.d.cts +12 -0
- package/dist/effect/EffectInMemoryEventQueries.d.ts +12 -0
- package/dist/effect/EffectInMemoryEventQueries.mjs +11 -0
- package/dist/effect/EffectInMemoryEventQueries.mjs.map +1 -0
- package/dist/effect/EffectInMemoryEventRepository.cjs +73 -0
- package/dist/effect/EffectInMemoryEventRepository.cjs.map +1 -0
- package/dist/effect/EffectInMemoryEventRepository.d.cts +15 -0
- package/dist/effect/EffectInMemoryEventRepository.d.ts +15 -0
- package/dist/effect/EffectInMemoryEventRepository.mjs +48 -0
- package/dist/effect/EffectInMemoryEventRepository.mjs.map +1 -0
- package/dist/effect/EffectSubscriptions.cjs +61 -0
- package/dist/effect/EffectSubscriptions.cjs.map +1 -0
- package/dist/effect/EffectSubscriptions.d.cts +22 -0
- package/dist/effect/EffectSubscriptions.d.ts +22 -0
- package/dist/effect/EffectSubscriptions.mjs +36 -0
- package/dist/effect/EffectSubscriptions.mjs.map +1 -0
- package/dist/effect/index.cjs +47 -0
- package/dist/effect/index.cjs.map +1 -0
- package/dist/effect/index.d.cts +27 -0
- package/dist/effect/index.d.ts +27 -0
- package/dist/effect/index.mjs +20 -0
- package/dist/effect/index.mjs.map +1 -0
- package/dist/effect/ports/EffectEventBus.cjs +17 -0
- package/dist/effect/ports/EffectEventBus.cjs.map +1 -0
- package/dist/effect/ports/EffectEventBus.d.cts +13 -0
- package/dist/effect/ports/EffectEventBus.d.ts +13 -0
- package/dist/effect/ports/EffectEventBus.mjs +1 -0
- package/dist/effect/ports/EffectEventBus.mjs.map +1 -0
- package/dist/effect/ports/EffectEventQueries.cjs +17 -0
- package/dist/effect/ports/EffectEventQueries.cjs.map +1 -0
- package/dist/effect/ports/EffectEventQueries.d.cts +9 -0
- package/dist/effect/ports/EffectEventQueries.d.ts +9 -0
- package/dist/effect/ports/EffectEventQueries.mjs +1 -0
- package/dist/effect/ports/EffectEventQueries.mjs.map +1 -0
- package/dist/effect/ports/EffectEventRepository.cjs +17 -0
- package/dist/effect/ports/EffectEventRepository.cjs.map +1 -0
- package/dist/effect/ports/EffectEventRepository.d.cts +17 -0
- package/dist/effect/ports/EffectEventRepository.d.ts +17 -0
- package/dist/effect/ports/EffectEventRepository.mjs +1 -0
- package/dist/effect/ports/EffectEventRepository.mjs.map +1 -0
- package/dist/filterEvents.cjs +48 -0
- package/dist/filterEvents.cjs.map +1 -0
- package/dist/filterEvents.d.cts +6 -0
- package/dist/filterEvents.d.ts +6 -0
- package/dist/filterEvents.mjs +24 -0
- package/dist/filterEvents.mjs.map +1 -0
- package/dist/getSubscriptionIdsToPublish.cjs +40 -0
- package/dist/getSubscriptionIdsToPublish.cjs.map +1 -0
- package/dist/getSubscriptionIdsToPublish.d.cts +5 -0
- package/dist/getSubscriptionIdsToPublish.d.ts +5 -0
- package/dist/getSubscriptionIdsToPublish.mjs +16 -0
- package/dist/getSubscriptionIdsToPublish.mjs.map +1 -0
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/ports/EventQueries.cjs.map +1 -1
- package/dist/ports/EventQueries.d.cts +1 -1
- package/dist/ports/EventQueries.d.ts +1 -1
- package/dist/splitIntoChunks.cjs +35 -0
- package/dist/splitIntoChunks.cjs.map +1 -0
- package/dist/splitIntoChunks.d.cts +3 -0
- package/dist/splitIntoChunks.d.ts +3 -0
- package/dist/splitIntoChunks.mjs +11 -0
- package/dist/splitIntoChunks.mjs.map +1 -0
- package/package.json +18 -3
- package/src/adapters/effect-kysely/EffectKyselyEventQueries.ts +45 -0
- package/src/adapters/effect-kysely/EffectKyselyEventRepository.ts +90 -0
- package/src/adapters/effect-kysely/index.ts +3 -0
- package/src/adapters/in-memory/InMemoryEventBus.ts +2 -23
- package/src/adapters/in-memory/InMemoryEventQueries.ts +2 -32
- package/src/adapters/kysely/KyselyEventQueries.ts +27 -31
- package/src/adapters/kysely/KyselyEventRepository.ts +66 -64
- package/src/adapters/kysely/jsonb.ts +4 -0
- package/src/adapters/kysely/mapEventRow.ts +15 -0
- package/src/createEventCrawler.ts +1 -8
- package/src/effect/EffectEventCrawler.ts +124 -0
- package/src/effect/EffectInMemoryEventBus.ts +231 -0
- package/src/effect/EffectInMemoryEventQueries.ts +16 -0
- package/src/effect/EffectInMemoryEventRepository.ts +68 -0
- package/src/effect/EffectSubscriptions.ts +74 -0
- package/src/effect/index.ts +26 -0
- package/src/effect/ports/EffectEventBus.ts +17 -0
- package/src/effect/ports/EffectEventQueries.ts +9 -0
- package/src/effect/ports/EffectEventRepository.ts +27 -0
- package/src/filterEvents.ts +39 -0
- package/src/getSubscriptionIdsToPublish.ts +21 -0
- package/src/ports/EventQueries.ts +1 -1
- package/src/splitIntoChunks.ts +7 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/adapters/kysely/jsonb.ts"],"sourcesContent":["import { type RawBuilder, sql } from \"kysely\";\n\nexport const jsonb = <T>(value: T): RawBuilder<T> =>\n sql`${JSON.stringify(value)}::jsonb`;\n"],"mappings":"AAAA,SAA0B,WAAW;AAE9B,MAAM,QAAQ,CAAI,UACvB,MAAM,KAAK,UAAU,KAAK,CAAC;","names":[]}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var mapEventRow_exports = {};
|
|
20
|
+
__export(mapEventRow_exports, {
|
|
21
|
+
mapEventRow: () => mapEventRow
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(mapEventRow_exports);
|
|
24
|
+
const mapEventRow = (row) => ({
|
|
25
|
+
...row,
|
|
26
|
+
context: row.context ?? void 0,
|
|
27
|
+
flowId: row.flowId ?? void 0,
|
|
28
|
+
causedByEventId: row.causedByEventId ?? void 0,
|
|
29
|
+
priority: row.priority ?? void 0
|
|
30
|
+
});
|
|
31
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
32
|
+
0 && (module.exports = {
|
|
33
|
+
mapEventRow
|
|
34
|
+
});
|
|
35
|
+
//# sourceMappingURL=mapEventRow.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/adapters/kysely/mapEventRow.ts"],"sourcesContent":["import type { DefaultContext, GenericEvent } from \"../../types.ts\";\nimport type { EventsTable } from \"./types.ts\";\n\nexport const mapEventRow = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n row: EventsTable[\"events\"],\n): Event =>\n ({\n ...row,\n context: row.context ?? undefined,\n flowId: row.flowId ?? undefined,\n causedByEventId: row.causedByEventId ?? undefined,\n priority: row.priority ?? undefined,\n }) as Event;\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGO,MAAM,cAAc,CAGzB,SAEC;AAAA,EACC,GAAG;AAAA,EACH,SAAS,IAAI,WAAW;AAAA,EACxB,QAAQ,IAAI,UAAU;AAAA,EACtB,iBAAiB,IAAI,mBAAmB;AAAA,EACxC,UAAU,IAAI,YAAY;AAC5B;","names":[]}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
const mapEventRow = (row) => ({
|
|
2
|
+
...row,
|
|
3
|
+
context: row.context ?? void 0,
|
|
4
|
+
flowId: row.flowId ?? void 0,
|
|
5
|
+
causedByEventId: row.causedByEventId ?? void 0,
|
|
6
|
+
priority: row.priority ?? void 0
|
|
7
|
+
});
|
|
8
|
+
export {
|
|
9
|
+
mapEventRow
|
|
10
|
+
};
|
|
11
|
+
//# sourceMappingURL=mapEventRow.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/adapters/kysely/mapEventRow.ts"],"sourcesContent":["import type { DefaultContext, GenericEvent } from '../../types.mjs';\nimport type { EventsTable } from './types.mjs';\n\nexport const mapEventRow = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n row: EventsTable[\"events\"],\n): Event =>\n ({\n ...row,\n context: row.context ?? undefined,\n flowId: row.flowId ?? undefined,\n causedByEventId: row.causedByEventId ?? undefined,\n priority: row.priority ?? undefined,\n }) as Event;\n"],"mappings":"AAGO,MAAM,cAAc,CAGzB,SAEC;AAAA,EACC,GAAG;AAAA,EACH,SAAS,IAAI,WAAW;AAAA,EACxB,QAAQ,IAAI,UAAU;AAAA,EACtB,iBAAiB,IAAI,mBAAmB;AAAA,EACxC,UAAU,IAAI,YAAY;AAC5B;","names":[]}
|
|
@@ -21,13 +21,7 @@ __export(createEventCrawler_exports, {
|
|
|
21
21
|
createEventCrawler: () => createEventCrawler
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(createEventCrawler_exports);
|
|
24
|
-
|
|
25
|
-
const chunks = [];
|
|
26
|
-
for (let i = 0; i < array.length; i += chunkSize) {
|
|
27
|
-
chunks.push(array.slice(i, i + chunkSize));
|
|
28
|
-
}
|
|
29
|
-
return chunks;
|
|
30
|
-
};
|
|
24
|
+
var import_splitIntoChunks = require("./splitIntoChunks.ts");
|
|
31
25
|
const createEventCrawler = ({
|
|
32
26
|
withUow,
|
|
33
27
|
eventQueries,
|
|
@@ -39,7 +33,7 @@ const createEventCrawler = ({
|
|
|
39
33
|
const newEventsIntervalMs = options.newEventsIntervalMs ?? 1e4;
|
|
40
34
|
const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 6e4;
|
|
41
35
|
const publishEventsInParallel = async (events) => {
|
|
42
|
-
const eventChunks = splitIntoChunks(events, maxParallelProcessing);
|
|
36
|
+
const eventChunks = (0, import_splitIntoChunks.splitIntoChunks)(events, maxParallelProcessing);
|
|
43
37
|
for (const chunk of eventChunks) {
|
|
44
38
|
await Promise.all(chunk.map((event) => eventBus.publish(event)));
|
|
45
39
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from \"./ports/EventBus.ts\";\nimport type { EventQueries } from \"./ports/EventQueries.ts\";\nimport type { WithEventsUow } from \"./ports/EventRepository.ts\";\nimport type { DefaultContext, GenericEvent } from \"./types.ts\";\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\
|
|
1
|
+
{"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from \"./ports/EventBus.ts\";\nimport type { EventQueries } from \"./ports/EventQueries.ts\";\nimport type { WithEventsUow } from \"./ports/EventRepository.ts\";\nimport { splitIntoChunks } from \"./splitIntoChunks.ts\";\nimport type { DefaultContext, GenericEvent } from \"./types.ts\";\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\n/**\n * Creates a background event crawler that processes and publishes events.\n *\n * The crawler runs two loops:\n * 1. Process new events: polls for \"never-published\" events and publishes them\n * 2. Retry failed events: polls for failed events and retries them\n *\n * @returns Object with:\n * - `start()`: Start the background polling loops (for traditional server environments)\n * - `processNewEvents()`: Manually trigger new event processing\n * - `retryFailedEvents()`: Manually trigger failed event retry\n * - `triggerProcessing()`: Process both new and failed events (for serverless environments)\n *\n * @example\n * ```typescript\n * const crawler = createEventCrawler({\n * withUow,\n * eventQueries,\n * eventBus,\n * options: { batchSize: 50, newEventsIntervalMs: 5000 },\n * });\n *\n * // Traditional server mode: Start background processing\n * crawler.start();\n *\n * // Serverless mode: Trigger on-demand after saving events\n * await withUow(async (uow) => {\n * await uow.eventRepository.save(event);\n * }, {\n * afterCommit: () => {\n * crawler.triggerProcessing().catch(console.error);\n * }\n * });\n *\n * // Or trigger manually (useful for testing)\n * await crawler.processNewEvents();\n * ```\n */\nexport const createEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = async (events: Event[]) => {\n const eventChunks = splitIntoChunks(events, maxParallelProcessing);\n for (const chunk of eventChunks) {\n await Promise.all(chunk.map((event) => eventBus.publish(event)));\n }\n };\n\n const processNewEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await withUow(async (uow) => {\n await uow.eventRepository.markEventsAsInProcess(events);\n });\n\n await publishEventsInParallel(events);\n };\n\n const retryFailedEvents = async (): Promise<void> => {\n const oneMinuteAgo = new Date(Date.now() - 60_000);\n\n const events = await eventQueries.getEvents({\n filters: {\n statuses: [\"to-republish\", \"failed-but-will-retry\"],\n occurredAt: { to: oneMinuteAgo },\n },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await publishEventsInParallel(events);\n };\n\n const triggerProcessing = async (): Promise<void> => {\n // Use Promise.allSettled to ensure both processing steps run independently\n // If processNewEvents fails, retryFailedEvents will still execute\n const results = await Promise.allSettled([\n processNewEvents(),\n retryFailedEvents(),\n ]);\n\n // Re-throw if both failed\n const errors = results\n .filter((r) => r.status === \"rejected\")\n .map((r) => (r as PromiseRejectedResult).reason);\n\n if (errors.length > 0) {\n throw new AggregateError(errors, \"Event processing failed\");\n }\n };\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await processNewEvents();\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await retryFailedEvents();\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n triggerProcessing,\n start,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,6BAAgC;AAqDzB,MAAM,qBAAqB,CAEhC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,OAAO,WAAoB;AACzD,UAAM,kBAAc,wCAAgB,QAAQ,qBAAqB;AACjE,eAAW,SAAS,aAAa;AAC/B,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,mBAAmB,YAA2B;AAClD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,QAAQ,OAAO,QAAQ;AAC3B,YAAM,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IACxD,CAAC;AAED,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AACnD,UAAM,eAAe,IAAI,KAAK,KAAK,IAAI,IAAI,GAAM;AAEjD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS;AAAA,QACP,UAAU,CAAC,gBAAgB,uBAAuB;AAAA,QAClD,YAAY,EAAE,IAAI,aAAa;AAAA,MACjC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AAGnD,UAAM,UAAU,MAAM,QAAQ,WAAW;AAAA,MACvC,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAGD,UAAM,SAAS,QACZ,OAAO,CAAC,MAAM,EAAE,WAAW,UAAU,EACrC,IAAI,CAAC,MAAO,EAA4B,MAAM;AAEjD,QAAI,OAAO,SAAS,GAAG;AACrB,YAAM,IAAI,eAAe,QAAQ,yBAAyB;AAAA,IAC5D;AAAA,EACF;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,iBAAiB;AAAA,QACzB,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,kBAAkB;AAAA,QAC1B,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|
|
@@ -1,10 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
const chunks = [];
|
|
3
|
-
for (let i = 0; i < array.length; i += chunkSize) {
|
|
4
|
-
chunks.push(array.slice(i, i + chunkSize));
|
|
5
|
-
}
|
|
6
|
-
return chunks;
|
|
7
|
-
};
|
|
1
|
+
import { splitIntoChunks } from "./splitIntoChunks.mjs";
|
|
8
2
|
const createEventCrawler = ({
|
|
9
3
|
withUow,
|
|
10
4
|
eventQueries,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from './ports/EventBus.mjs';\nimport type { EventQueries } from './ports/EventQueries.mjs';\nimport type { WithEventsUow } from './ports/EventRepository.mjs';\nimport type { DefaultContext, GenericEvent } from './types.mjs';\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\
|
|
1
|
+
{"version":3,"sources":["../src/createEventCrawler.ts"],"sourcesContent":["import type { EventBus } from './ports/EventBus.mjs';\nimport type { EventQueries } from './ports/EventQueries.mjs';\nimport type { WithEventsUow } from './ports/EventRepository.mjs';\nimport { splitIntoChunks } from './splitIntoChunks.mjs';\nimport type { DefaultContext, GenericEvent } from './types.mjs';\n\n/** Configuration options for the event crawler. */\ntype CreateEventCrawlerOptions = {\n /** Max events to fetch per batch (default: 100). */\n batchSize?: number;\n /** Max events to publish in parallel (default: 1). */\n maxParallelProcessing?: number;\n /** Interval for processing new events in ms (default: 10000). */\n newEventsIntervalMs?: number;\n /** Interval for retrying failed events in ms (default: 60000). */\n failedEventsIntervalMs?: number;\n};\n\n/**\n * Creates a background event crawler that processes and publishes events.\n *\n * The crawler runs two loops:\n * 1. Process new events: polls for \"never-published\" events and publishes them\n * 2. Retry failed events: polls for failed events and retries them\n *\n * @returns Object with:\n * - `start()`: Start the background polling loops (for traditional server environments)\n * - `processNewEvents()`: Manually trigger new event processing\n * - `retryFailedEvents()`: Manually trigger failed event retry\n * - `triggerProcessing()`: Process both new and failed events (for serverless environments)\n *\n * @example\n * ```typescript\n * const crawler = createEventCrawler({\n * withUow,\n * eventQueries,\n * eventBus,\n * options: { batchSize: 50, newEventsIntervalMs: 5000 },\n * });\n *\n * // Traditional server mode: Start background processing\n * crawler.start();\n *\n * // Serverless mode: Trigger on-demand after saving events\n * await withUow(async (uow) => {\n * await uow.eventRepository.save(event);\n * }, {\n * afterCommit: () => {\n * crawler.triggerProcessing().catch(console.error);\n * }\n * });\n *\n * // Or trigger manually (useful for testing)\n * await crawler.processNewEvents();\n * ```\n */\nexport const createEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = async (events: Event[]) => {\n const eventChunks = splitIntoChunks(events, maxParallelProcessing);\n for (const chunk of eventChunks) {\n await Promise.all(chunk.map((event) => eventBus.publish(event)));\n }\n };\n\n const processNewEvents = async (): Promise<void> => {\n const events = await eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await withUow(async (uow) => {\n await uow.eventRepository.markEventsAsInProcess(events);\n });\n\n await publishEventsInParallel(events);\n };\n\n const retryFailedEvents = async (): Promise<void> => {\n const oneMinuteAgo = new Date(Date.now() - 60_000);\n\n const events = await eventQueries.getEvents({\n filters: {\n statuses: [\"to-republish\", \"failed-but-will-retry\"],\n occurredAt: { to: oneMinuteAgo },\n },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n await publishEventsInParallel(events);\n };\n\n const triggerProcessing = async (): Promise<void> => {\n // Use Promise.allSettled to ensure both processing steps run independently\n // If processNewEvents fails, retryFailedEvents will still execute\n const results = await Promise.allSettled([\n processNewEvents(),\n retryFailedEvents(),\n ]);\n\n // Re-throw if both failed\n const errors = results\n .filter((r) => r.status === \"rejected\")\n .map((r) => (r as PromiseRejectedResult).reason);\n\n if (errors.length > 0) {\n throw new AggregateError(errors, \"Event processing failed\");\n }\n };\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await processNewEvents();\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await retryFailedEvents();\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n triggerProcessing,\n start,\n };\n};\n"],"mappings":"AAGA,SAAS,uBAAuB;AAqDzB,MAAM,qBAAqB,CAEhC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,OAAO,WAAoB;AACzD,UAAM,cAAc,gBAAgB,QAAQ,qBAAqB;AACjE,eAAW,SAAS,aAAa;AAC/B,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,mBAAmB,YAA2B;AAClD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,QAAQ,OAAO,QAAQ;AAC3B,YAAM,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IACxD,CAAC;AAED,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AACnD,UAAM,eAAe,IAAI,KAAK,KAAK,IAAI,IAAI,GAAM;AAEjD,UAAM,SAAS,MAAM,aAAa,UAAU;AAAA,MAC1C,SAAS;AAAA,QACP,UAAU,CAAC,gBAAgB,uBAAuB;AAAA,QAClD,YAAY,EAAE,IAAI,aAAa;AAAA,MACjC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,wBAAwB,MAAM;AAAA,EACtC;AAEA,QAAM,oBAAoB,YAA2B;AAGnD,UAAM,UAAU,MAAM,QAAQ,WAAW;AAAA,MACvC,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AAGD,UAAM,SAAS,QACZ,OAAO,CAAC,MAAM,EAAE,WAAW,UAAU,EACrC,IAAI,CAAC,MAAO,EAA4B,MAAM;AAEjD,QAAI,OAAO,SAAS,GAAG;AACrB,YAAM,IAAI,eAAe,QAAQ,yBAAyB;AAAA,IAC5D;AAAA,EACF;AAEA,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,iBAAiB;AAAA,QACzB,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,kBAAkB;AAAA,QAC1B,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var EffectEventCrawler_exports = {};
|
|
20
|
+
__export(EffectEventCrawler_exports, {
|
|
21
|
+
createEffectEventCrawler: () => createEffectEventCrawler
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(EffectEventCrawler_exports);
|
|
24
|
+
var import_effect = require("effect");
|
|
25
|
+
const createEffectEventCrawler = ({
|
|
26
|
+
withUow,
|
|
27
|
+
eventQueries,
|
|
28
|
+
eventBus,
|
|
29
|
+
options = {}
|
|
30
|
+
}) => {
|
|
31
|
+
const batchSize = options.batchSize ?? 100;
|
|
32
|
+
const maxParallelProcessing = options.maxParallelProcessing ?? 1;
|
|
33
|
+
const newEventsIntervalMs = options.newEventsIntervalMs ?? 1e4;
|
|
34
|
+
const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 6e4;
|
|
35
|
+
const publishEventsInParallel = (events) => import_effect.Effect.all(
|
|
36
|
+
events.map((event) => eventBus.publish(event)),
|
|
37
|
+
{ concurrency: maxParallelProcessing, discard: true }
|
|
38
|
+
);
|
|
39
|
+
const processNewEvents = () => import_effect.Effect.gen(function* () {
|
|
40
|
+
const events = yield* eventQueries.getEvents({
|
|
41
|
+
filters: { statuses: ["never-published"] },
|
|
42
|
+
limit: batchSize
|
|
43
|
+
});
|
|
44
|
+
if (events.length === 0) return;
|
|
45
|
+
yield* withUow(
|
|
46
|
+
(uow) => uow.eventRepository.markEventsAsInProcess(events)
|
|
47
|
+
);
|
|
48
|
+
yield* publishEventsInParallel(events);
|
|
49
|
+
});
|
|
50
|
+
const retryFailedEvents = () => import_effect.Effect.gen(function* () {
|
|
51
|
+
const oneMinuteAgo = new Date(Date.now() - 6e4);
|
|
52
|
+
const events = yield* eventQueries.getEvents({
|
|
53
|
+
filters: {
|
|
54
|
+
statuses: ["to-republish", "failed-but-will-retry"],
|
|
55
|
+
occurredAt: { to: oneMinuteAgo }
|
|
56
|
+
},
|
|
57
|
+
limit: batchSize
|
|
58
|
+
});
|
|
59
|
+
if (events.length === 0) return;
|
|
60
|
+
yield* publishEventsInParallel(events);
|
|
61
|
+
});
|
|
62
|
+
const triggerProcessing = () => import_effect.Effect.gen(function* () {
|
|
63
|
+
const results = yield* import_effect.Effect.all(
|
|
64
|
+
[import_effect.Effect.exit(processNewEvents()), import_effect.Effect.exit(retryFailedEvents())],
|
|
65
|
+
{ concurrency: "unbounded" }
|
|
66
|
+
);
|
|
67
|
+
const errors = results.filter(import_effect.Exit.isFailure).map((exit) => import_effect.Cause.squash(exit.cause));
|
|
68
|
+
if (errors.length > 0) {
|
|
69
|
+
yield* import_effect.Effect.die(
|
|
70
|
+
new AggregateError(errors, "Event processing failed")
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
const start = () => {
|
|
75
|
+
const scheduleProcessNewEvents = () => {
|
|
76
|
+
setTimeout(async () => {
|
|
77
|
+
try {
|
|
78
|
+
await import_effect.Effect.runPromise(processNewEvents());
|
|
79
|
+
} catch (error) {
|
|
80
|
+
console.error("Error processing new events:", error);
|
|
81
|
+
} finally {
|
|
82
|
+
scheduleProcessNewEvents();
|
|
83
|
+
}
|
|
84
|
+
}, newEventsIntervalMs);
|
|
85
|
+
};
|
|
86
|
+
const scheduleRetryFailedEvents = () => {
|
|
87
|
+
setTimeout(async () => {
|
|
88
|
+
try {
|
|
89
|
+
await import_effect.Effect.runPromise(retryFailedEvents());
|
|
90
|
+
} catch (error) {
|
|
91
|
+
console.error("Error retrying failed events:", error);
|
|
92
|
+
} finally {
|
|
93
|
+
scheduleRetryFailedEvents();
|
|
94
|
+
}
|
|
95
|
+
}, failedEventsIntervalMs);
|
|
96
|
+
};
|
|
97
|
+
scheduleProcessNewEvents();
|
|
98
|
+
scheduleRetryFailedEvents();
|
|
99
|
+
};
|
|
100
|
+
return {
|
|
101
|
+
processNewEvents,
|
|
102
|
+
retryFailedEvents,
|
|
103
|
+
triggerProcessing,
|
|
104
|
+
start
|
|
105
|
+
};
|
|
106
|
+
};
|
|
107
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
108
|
+
0 && (module.exports = {
|
|
109
|
+
createEffectEventCrawler
|
|
110
|
+
});
|
|
111
|
+
//# sourceMappingURL=EffectEventCrawler.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/effect/EffectEventCrawler.ts"],"sourcesContent":["import { Cause, Effect, Exit } from \"effect\";\nimport type { DefaultContext, GenericEvent } from \"../types.ts\";\nimport type { EventBus } from \"./ports/EffectEventBus.ts\";\nimport type { EventQueries } from \"./ports/EffectEventQueries.ts\";\nimport type { WithEventsUow } from \"./ports/EffectEventRepository.ts\";\n\ntype CreateEffectEventCrawlerOptions = {\n batchSize?: number;\n maxParallelProcessing?: number;\n newEventsIntervalMs?: number;\n failedEventsIntervalMs?: number;\n};\n\nexport const createEffectEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEffectEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = (events: Event[]): Effect.Effect<void> =>\n Effect.all(\n events.map((event) => eventBus.publish(event)),\n { concurrency: maxParallelProcessing, discard: true },\n );\n\n const processNewEvents = (): Effect.Effect<void> =>\n Effect.gen(function* () {\n const events = yield* eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n yield* withUow((uow) =>\n uow.eventRepository.markEventsAsInProcess(events),\n );\n\n yield* publishEventsInParallel(events);\n });\n\n const retryFailedEvents = (): Effect.Effect<void> =>\n Effect.gen(function* () {\n const oneMinuteAgo = new Date(Date.now() - 60_000);\n\n const events = yield* eventQueries.getEvents({\n filters: {\n statuses: [\"to-republish\", \"failed-but-will-retry\"],\n occurredAt: { to: oneMinuteAgo },\n },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n yield* publishEventsInParallel(events);\n });\n\n const triggerProcessing = (): Effect.Effect<void> =>\n Effect.gen(function* () {\n const results = yield* Effect.all(\n [Effect.exit(processNewEvents()), Effect.exit(retryFailedEvents())],\n { concurrency: \"unbounded\" },\n );\n\n const errors = results\n .filter(Exit.isFailure)\n .map((exit) => Cause.squash(exit.cause));\n\n if (errors.length > 0) {\n yield* Effect.die(\n new AggregateError(errors, \"Event processing failed\"),\n );\n }\n });\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await Effect.runPromise(processNewEvents());\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await Effect.runPromise(retryFailedEvents());\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n triggerProcessing,\n start,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAoC;AAa7B,MAAM,2BAA2B,CAEtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,CAAC,WAC/B,qBAAO;AAAA,IACL,OAAO,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC;AAAA,IAC7C,EAAE,aAAa,uBAAuB,SAAS,KAAK;AAAA,EACtD;AAEF,QAAM,mBAAmB,MACvB,qBAAO,IAAI,aAAa;AACtB,UAAM,SAAS,OAAO,aAAa,UAAU;AAAA,MAC3C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,WAAO;AAAA,MAAQ,CAAC,QACd,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IAClD;AAEA,WAAO,wBAAwB,MAAM;AAAA,EACvC,CAAC;AAEH,QAAM,oBAAoB,MACxB,qBAAO,IAAI,aAAa;AACtB,UAAM,eAAe,IAAI,KAAK,KAAK,IAAI,IAAI,GAAM;AAEjD,UAAM,SAAS,OAAO,aAAa,UAAU;AAAA,MAC3C,SAAS;AAAA,QACP,UAAU,CAAC,gBAAgB,uBAAuB;AAAA,QAClD,YAAY,EAAE,IAAI,aAAa;AAAA,MACjC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,WAAO,wBAAwB,MAAM;AAAA,EACvC,CAAC;AAEH,QAAM,oBAAoB,MACxB,qBAAO,IAAI,aAAa;AACtB,UAAM,UAAU,OAAO,qBAAO;AAAA,MAC5B,CAAC,qBAAO,KAAK,iBAAiB,CAAC,GAAG,qBAAO,KAAK,kBAAkB,CAAC,CAAC;AAAA,MAClE,EAAE,aAAa,YAAY;AAAA,IAC7B;AAEA,UAAM,SAAS,QACZ,OAAO,mBAAK,SAAS,EACrB,IAAI,CAAC,SAAS,oBAAM,OAAO,KAAK,KAAK,CAAC;AAEzC,QAAI,OAAO,SAAS,GAAG;AACrB,aAAO,qBAAO;AAAA,QACZ,IAAI,eAAe,QAAQ,yBAAyB;AAAA,MACtD;AAAA,IACF;AAAA,EACF,CAAC;AAEH,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,qBAAO,WAAW,iBAAiB,CAAC;AAAA,QAC5C,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,qBAAO,WAAW,kBAAkB,CAAC;AAAA,QAC7C,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { Effect } from 'effect';
|
|
2
|
+
import { GenericEvent, DefaultContext } from '../types.cjs';
|
|
3
|
+
import { EventBus } from './ports/EffectEventBus.cjs';
|
|
4
|
+
import { EventQueries } from './ports/EffectEventQueries.cjs';
|
|
5
|
+
import { WithEventsUow } from './ports/EffectEventRepository.cjs';
|
|
6
|
+
import '../ports/EventQueries.cjs';
|
|
7
|
+
|
|
8
|
+
type CreateEffectEventCrawlerOptions = {
|
|
9
|
+
batchSize?: number;
|
|
10
|
+
maxParallelProcessing?: number;
|
|
11
|
+
newEventsIntervalMs?: number;
|
|
12
|
+
failedEventsIntervalMs?: number;
|
|
13
|
+
};
|
|
14
|
+
declare const createEffectEventCrawler: <Event extends GenericEvent<string, unknown, DefaultContext>>({ withUow, eventQueries, eventBus, options, }: {
|
|
15
|
+
withUow: WithEventsUow<Event>;
|
|
16
|
+
eventQueries: EventQueries<Event>;
|
|
17
|
+
eventBus: EventBus<Event>;
|
|
18
|
+
options?: CreateEffectEventCrawlerOptions;
|
|
19
|
+
}) => {
|
|
20
|
+
processNewEvents: () => Effect.Effect<void>;
|
|
21
|
+
retryFailedEvents: () => Effect.Effect<void>;
|
|
22
|
+
triggerProcessing: () => Effect.Effect<void>;
|
|
23
|
+
start: () => void;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export { createEffectEventCrawler };
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { Effect } from 'effect';
|
|
2
|
+
import { GenericEvent, DefaultContext } from '../types.js';
|
|
3
|
+
import { EventBus } from './ports/EffectEventBus.js';
|
|
4
|
+
import { EventQueries } from './ports/EffectEventQueries.js';
|
|
5
|
+
import { WithEventsUow } from './ports/EffectEventRepository.js';
|
|
6
|
+
import '../ports/EventQueries.js';
|
|
7
|
+
|
|
8
|
+
type CreateEffectEventCrawlerOptions = {
|
|
9
|
+
batchSize?: number;
|
|
10
|
+
maxParallelProcessing?: number;
|
|
11
|
+
newEventsIntervalMs?: number;
|
|
12
|
+
failedEventsIntervalMs?: number;
|
|
13
|
+
};
|
|
14
|
+
declare const createEffectEventCrawler: <Event extends GenericEvent<string, unknown, DefaultContext>>({ withUow, eventQueries, eventBus, options, }: {
|
|
15
|
+
withUow: WithEventsUow<Event>;
|
|
16
|
+
eventQueries: EventQueries<Event>;
|
|
17
|
+
eventBus: EventBus<Event>;
|
|
18
|
+
options?: CreateEffectEventCrawlerOptions;
|
|
19
|
+
}) => {
|
|
20
|
+
processNewEvents: () => Effect.Effect<void>;
|
|
21
|
+
retryFailedEvents: () => Effect.Effect<void>;
|
|
22
|
+
triggerProcessing: () => Effect.Effect<void>;
|
|
23
|
+
start: () => void;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export { createEffectEventCrawler };
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { Cause, Effect, Exit } from "effect";
|
|
2
|
+
const createEffectEventCrawler = ({
|
|
3
|
+
withUow,
|
|
4
|
+
eventQueries,
|
|
5
|
+
eventBus,
|
|
6
|
+
options = {}
|
|
7
|
+
}) => {
|
|
8
|
+
const batchSize = options.batchSize ?? 100;
|
|
9
|
+
const maxParallelProcessing = options.maxParallelProcessing ?? 1;
|
|
10
|
+
const newEventsIntervalMs = options.newEventsIntervalMs ?? 1e4;
|
|
11
|
+
const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 6e4;
|
|
12
|
+
const publishEventsInParallel = (events) => Effect.all(
|
|
13
|
+
events.map((event) => eventBus.publish(event)),
|
|
14
|
+
{ concurrency: maxParallelProcessing, discard: true }
|
|
15
|
+
);
|
|
16
|
+
const processNewEvents = () => Effect.gen(function* () {
|
|
17
|
+
const events = yield* eventQueries.getEvents({
|
|
18
|
+
filters: { statuses: ["never-published"] },
|
|
19
|
+
limit: batchSize
|
|
20
|
+
});
|
|
21
|
+
if (events.length === 0) return;
|
|
22
|
+
yield* withUow(
|
|
23
|
+
(uow) => uow.eventRepository.markEventsAsInProcess(events)
|
|
24
|
+
);
|
|
25
|
+
yield* publishEventsInParallel(events);
|
|
26
|
+
});
|
|
27
|
+
const retryFailedEvents = () => Effect.gen(function* () {
|
|
28
|
+
const oneMinuteAgo = new Date(Date.now() - 6e4);
|
|
29
|
+
const events = yield* eventQueries.getEvents({
|
|
30
|
+
filters: {
|
|
31
|
+
statuses: ["to-republish", "failed-but-will-retry"],
|
|
32
|
+
occurredAt: { to: oneMinuteAgo }
|
|
33
|
+
},
|
|
34
|
+
limit: batchSize
|
|
35
|
+
});
|
|
36
|
+
if (events.length === 0) return;
|
|
37
|
+
yield* publishEventsInParallel(events);
|
|
38
|
+
});
|
|
39
|
+
const triggerProcessing = () => Effect.gen(function* () {
|
|
40
|
+
const results = yield* Effect.all(
|
|
41
|
+
[Effect.exit(processNewEvents()), Effect.exit(retryFailedEvents())],
|
|
42
|
+
{ concurrency: "unbounded" }
|
|
43
|
+
);
|
|
44
|
+
const errors = results.filter(Exit.isFailure).map((exit) => Cause.squash(exit.cause));
|
|
45
|
+
if (errors.length > 0) {
|
|
46
|
+
yield* Effect.die(
|
|
47
|
+
new AggregateError(errors, "Event processing failed")
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
const start = () => {
|
|
52
|
+
const scheduleProcessNewEvents = () => {
|
|
53
|
+
setTimeout(async () => {
|
|
54
|
+
try {
|
|
55
|
+
await Effect.runPromise(processNewEvents());
|
|
56
|
+
} catch (error) {
|
|
57
|
+
console.error("Error processing new events:", error);
|
|
58
|
+
} finally {
|
|
59
|
+
scheduleProcessNewEvents();
|
|
60
|
+
}
|
|
61
|
+
}, newEventsIntervalMs);
|
|
62
|
+
};
|
|
63
|
+
const scheduleRetryFailedEvents = () => {
|
|
64
|
+
setTimeout(async () => {
|
|
65
|
+
try {
|
|
66
|
+
await Effect.runPromise(retryFailedEvents());
|
|
67
|
+
} catch (error) {
|
|
68
|
+
console.error("Error retrying failed events:", error);
|
|
69
|
+
} finally {
|
|
70
|
+
scheduleRetryFailedEvents();
|
|
71
|
+
}
|
|
72
|
+
}, failedEventsIntervalMs);
|
|
73
|
+
};
|
|
74
|
+
scheduleProcessNewEvents();
|
|
75
|
+
scheduleRetryFailedEvents();
|
|
76
|
+
};
|
|
77
|
+
return {
|
|
78
|
+
processNewEvents,
|
|
79
|
+
retryFailedEvents,
|
|
80
|
+
triggerProcessing,
|
|
81
|
+
start
|
|
82
|
+
};
|
|
83
|
+
};
|
|
84
|
+
export {
|
|
85
|
+
createEffectEventCrawler
|
|
86
|
+
};
|
|
87
|
+
//# sourceMappingURL=EffectEventCrawler.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/effect/EffectEventCrawler.ts"],"sourcesContent":["import { Cause, Effect, Exit } from \"effect\";\nimport type { DefaultContext, GenericEvent } from '../types.mjs';\nimport type { EventBus } from './ports/EffectEventBus.mjs';\nimport type { EventQueries } from './ports/EffectEventQueries.mjs';\nimport type { WithEventsUow } from './ports/EffectEventRepository.mjs';\n\ntype CreateEffectEventCrawlerOptions = {\n batchSize?: number;\n maxParallelProcessing?: number;\n newEventsIntervalMs?: number;\n failedEventsIntervalMs?: number;\n};\n\nexport const createEffectEventCrawler = <\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>({\n withUow,\n eventQueries,\n eventBus,\n options = {},\n}: {\n withUow: WithEventsUow<Event>;\n eventQueries: EventQueries<Event>;\n eventBus: EventBus<Event>;\n options?: CreateEffectEventCrawlerOptions;\n}) => {\n const batchSize = options.batchSize ?? 100;\n const maxParallelProcessing = options.maxParallelProcessing ?? 1;\n const newEventsIntervalMs = options.newEventsIntervalMs ?? 10_000;\n const failedEventsIntervalMs = options.failedEventsIntervalMs ?? 60_000;\n\n const publishEventsInParallel = (events: Event[]): Effect.Effect<void> =>\n Effect.all(\n events.map((event) => eventBus.publish(event)),\n { concurrency: maxParallelProcessing, discard: true },\n );\n\n const processNewEvents = (): Effect.Effect<void> =>\n Effect.gen(function* () {\n const events = yield* eventQueries.getEvents({\n filters: { statuses: [\"never-published\"] },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n yield* withUow((uow) =>\n uow.eventRepository.markEventsAsInProcess(events),\n );\n\n yield* publishEventsInParallel(events);\n });\n\n const retryFailedEvents = (): Effect.Effect<void> =>\n Effect.gen(function* () {\n const oneMinuteAgo = new Date(Date.now() - 60_000);\n\n const events = yield* eventQueries.getEvents({\n filters: {\n statuses: [\"to-republish\", \"failed-but-will-retry\"],\n occurredAt: { to: oneMinuteAgo },\n },\n limit: batchSize,\n });\n\n if (events.length === 0) return;\n\n yield* publishEventsInParallel(events);\n });\n\n const triggerProcessing = (): Effect.Effect<void> =>\n Effect.gen(function* () {\n const results = yield* Effect.all(\n [Effect.exit(processNewEvents()), Effect.exit(retryFailedEvents())],\n { concurrency: \"unbounded\" },\n );\n\n const errors = results\n .filter(Exit.isFailure)\n .map((exit) => Cause.squash(exit.cause));\n\n if (errors.length > 0) {\n yield* Effect.die(\n new AggregateError(errors, \"Event processing failed\"),\n );\n }\n });\n\n const start = () => {\n const scheduleProcessNewEvents = () => {\n setTimeout(async () => {\n try {\n await Effect.runPromise(processNewEvents());\n } catch (error) {\n console.error(\"Error processing new events:\", error);\n } finally {\n scheduleProcessNewEvents();\n }\n }, newEventsIntervalMs);\n };\n\n const scheduleRetryFailedEvents = () => {\n setTimeout(async () => {\n try {\n await Effect.runPromise(retryFailedEvents());\n } catch (error) {\n console.error(\"Error retrying failed events:\", error);\n } finally {\n scheduleRetryFailedEvents();\n }\n }, failedEventsIntervalMs);\n };\n\n scheduleProcessNewEvents();\n scheduleRetryFailedEvents();\n };\n\n return {\n processNewEvents,\n retryFailedEvents,\n triggerProcessing,\n start,\n };\n};\n"],"mappings":"AAAA,SAAS,OAAO,QAAQ,YAAY;AAa7B,MAAM,2BAA2B,CAEtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,MAKM;AACJ,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,wBAAwB,QAAQ,yBAAyB;AAC/D,QAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAM,yBAAyB,QAAQ,0BAA0B;AAEjE,QAAM,0BAA0B,CAAC,WAC/B,OAAO;AAAA,IACL,OAAO,IAAI,CAAC,UAAU,SAAS,QAAQ,KAAK,CAAC;AAAA,IAC7C,EAAE,aAAa,uBAAuB,SAAS,KAAK;AAAA,EACtD;AAEF,QAAM,mBAAmB,MACvB,OAAO,IAAI,aAAa;AACtB,UAAM,SAAS,OAAO,aAAa,UAAU;AAAA,MAC3C,SAAS,EAAE,UAAU,CAAC,iBAAiB,EAAE;AAAA,MACzC,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,WAAO;AAAA,MAAQ,CAAC,QACd,IAAI,gBAAgB,sBAAsB,MAAM;AAAA,IAClD;AAEA,WAAO,wBAAwB,MAAM;AAAA,EACvC,CAAC;AAEH,QAAM,oBAAoB,MACxB,OAAO,IAAI,aAAa;AACtB,UAAM,eAAe,IAAI,KAAK,KAAK,IAAI,IAAI,GAAM;AAEjD,UAAM,SAAS,OAAO,aAAa,UAAU;AAAA,MAC3C,SAAS;AAAA,QACP,UAAU,CAAC,gBAAgB,uBAAuB;AAAA,QAClD,YAAY,EAAE,IAAI,aAAa;AAAA,MACjC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,OAAO,WAAW,EAAG;AAEzB,WAAO,wBAAwB,MAAM;AAAA,EACvC,CAAC;AAEH,QAAM,oBAAoB,MACxB,OAAO,IAAI,aAAa;AACtB,UAAM,UAAU,OAAO,OAAO;AAAA,MAC5B,CAAC,OAAO,KAAK,iBAAiB,CAAC,GAAG,OAAO,KAAK,kBAAkB,CAAC,CAAC;AAAA,MAClE,EAAE,aAAa,YAAY;AAAA,IAC7B;AAEA,UAAM,SAAS,QACZ,OAAO,KAAK,SAAS,EACrB,IAAI,CAAC,SAAS,MAAM,OAAO,KAAK,KAAK,CAAC;AAEzC,QAAI,OAAO,SAAS,GAAG;AACrB,aAAO,OAAO;AAAA,QACZ,IAAI,eAAe,QAAQ,yBAAyB;AAAA,MACtD;AAAA,IACF;AAAA,EACF,CAAC;AAEH,QAAM,QAAQ,MAAM;AAClB,UAAM,2BAA2B,MAAM;AACrC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,OAAO,WAAW,iBAAiB,CAAC;AAAA,QAC5C,SAAS,OAAO;AACd,kBAAQ,MAAM,gCAAgC,KAAK;AAAA,QACrD,UAAE;AACA,mCAAyB;AAAA,QAC3B;AAAA,MACF,GAAG,mBAAmB;AAAA,IACxB;AAEA,UAAM,4BAA4B,MAAM;AACtC,iBAAW,YAAY;AACrB,YAAI;AACF,gBAAM,OAAO,WAAW,kBAAkB,CAAC;AAAA,QAC7C,SAAS,OAAO;AACd,kBAAQ,MAAM,iCAAiC,KAAK;AAAA,QACtD,UAAE;AACA,oCAA0B;AAAA,QAC5B;AAAA,MACF,GAAG,sBAAsB;AAAA,IAC3B;AAEA,6BAAyB;AACzB,8BAA0B;AAAA,EAC5B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var EffectInMemoryEventBus_exports = {};
|
|
20
|
+
__export(EffectInMemoryEventBus_exports, {
|
|
21
|
+
createEffectInMemoryEventBus: () => createEffectInMemoryEventBus
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(EffectInMemoryEventBus_exports);
|
|
24
|
+
var import_effect = require("effect");
|
|
25
|
+
var import_createNewEvent = require("../createNewEvent.ts");
|
|
26
|
+
var import_getSubscriptionIdsToPublish = require("../getSubscriptionIdsToPublish.ts");
|
|
27
|
+
var import_EffectSubscriptions = require("./EffectSubscriptions.ts");
|
|
28
|
+
function createEffectInMemoryEventBus(withUow, options = {}) {
|
|
29
|
+
const maxRetries = options.maxRetries ?? 3;
|
|
30
|
+
const eventDefinitions = "eventDefinitions" in options ? options.eventDefinitions : void 0;
|
|
31
|
+
const createNewEvent = eventDefinitions ? (0, import_createNewEvent.makeCreateNewEvent)({
|
|
32
|
+
getNow: options.getNow,
|
|
33
|
+
generateId: options.generateId,
|
|
34
|
+
eventDefinitions
|
|
35
|
+
}) : (0, import_createNewEvent.makeCreateNewEvent)({
|
|
36
|
+
getNow: options.getNow,
|
|
37
|
+
generateId: options.generateId
|
|
38
|
+
});
|
|
39
|
+
const subscriptions = {};
|
|
40
|
+
const executeCallback = (event, subscriptionId, callback) => import_effect.Effect.map(
|
|
41
|
+
import_effect.Effect.exit(callback(event)),
|
|
42
|
+
(exit) => {
|
|
43
|
+
if (import_effect.Exit.isSuccess(exit)) return void 0;
|
|
44
|
+
const error = import_effect.Cause.squash(exit.cause);
|
|
45
|
+
return {
|
|
46
|
+
subscriptionId,
|
|
47
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
48
|
+
stack: error instanceof Error ? error.stack : void 0
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
);
|
|
52
|
+
const eventBus = {
|
|
53
|
+
publish: (event) => import_effect.Effect.gen(function* () {
|
|
54
|
+
const publishedAt = /* @__PURE__ */ new Date();
|
|
55
|
+
const topic = event.topic;
|
|
56
|
+
const callbacksBySubscriptionSlug = subscriptions[topic];
|
|
57
|
+
if (!callbacksBySubscriptionSlug) {
|
|
58
|
+
event.publications.push({
|
|
59
|
+
publishedAt,
|
|
60
|
+
publishedSubscribers: []
|
|
61
|
+
});
|
|
62
|
+
event.status = "published";
|
|
63
|
+
yield* withUow((uow) => uow.eventRepository.save(event));
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
const subscriptionIdsToPublish = (0, import_getSubscriptionIdsToPublish.getSubscriptionIdsToPublish)(
|
|
67
|
+
event,
|
|
68
|
+
Object.keys(callbacksBySubscriptionSlug)
|
|
69
|
+
);
|
|
70
|
+
const failuresOrUndefined = yield* import_effect.Effect.all(
|
|
71
|
+
subscriptionIdsToPublish.map(
|
|
72
|
+
(subscriptionId) => executeCallback(
|
|
73
|
+
event,
|
|
74
|
+
subscriptionId,
|
|
75
|
+
callbacksBySubscriptionSlug[subscriptionId]
|
|
76
|
+
)
|
|
77
|
+
),
|
|
78
|
+
{ concurrency: "unbounded" }
|
|
79
|
+
);
|
|
80
|
+
const failures = failuresOrUndefined.filter(
|
|
81
|
+
(f) => f !== void 0
|
|
82
|
+
);
|
|
83
|
+
const publications = [
|
|
84
|
+
...event.publications,
|
|
85
|
+
{
|
|
86
|
+
publishedAt,
|
|
87
|
+
publishedSubscribers: subscriptionIdsToPublish.map(
|
|
88
|
+
(id) => id
|
|
89
|
+
),
|
|
90
|
+
...failures.length > 0 && { failures }
|
|
91
|
+
}
|
|
92
|
+
];
|
|
93
|
+
if (failures.length === 0) {
|
|
94
|
+
event.status = "published";
|
|
95
|
+
} else {
|
|
96
|
+
const wasMaxNumberOfErrorsReached = publications.length >= maxRetries;
|
|
97
|
+
event.status = wasMaxNumberOfErrorsReached ? "quarantined" : "failed-but-will-retry";
|
|
98
|
+
}
|
|
99
|
+
event.publications = publications;
|
|
100
|
+
yield* withUow((uow) => uow.eventRepository.save(event));
|
|
101
|
+
}),
|
|
102
|
+
subscribe: ({ topic, subscriptionId, callBack }) => {
|
|
103
|
+
if (!subscriptions[topic]) {
|
|
104
|
+
subscriptions[topic] = {};
|
|
105
|
+
}
|
|
106
|
+
const subscriptionsForTopic = subscriptions[topic];
|
|
107
|
+
if (subscriptionsForTopic) {
|
|
108
|
+
subscriptionsForTopic[subscriptionId] = callBack;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
};
|
|
112
|
+
const defineSubscriptions = (subs) => subs;
|
|
113
|
+
const subscribeAll = (subs) => {
|
|
114
|
+
(0, import_EffectSubscriptions.subscribeByTopic)(eventBus, subs);
|
|
115
|
+
};
|
|
116
|
+
const subscribeGlobal = (subs, config) => {
|
|
117
|
+
(0, import_EffectSubscriptions.subscribeGlobalToTopics)(eventBus, subs, config);
|
|
118
|
+
};
|
|
119
|
+
return {
|
|
120
|
+
eventBus,
|
|
121
|
+
createNewEvent,
|
|
122
|
+
defineSubscriptions,
|
|
123
|
+
subscribeAll,
|
|
124
|
+
subscribeGlobal
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
128
|
+
0 && (module.exports = {
|
|
129
|
+
createEffectInMemoryEventBus
|
|
130
|
+
});
|
|
131
|
+
//# sourceMappingURL=EffectInMemoryEventBus.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/effect/EffectInMemoryEventBus.ts"],"sourcesContent":["import { Cause, Effect, Exit } from \"effect\";\nimport {\n type CreateNewEvent,\n type CreateNewEventFromDefinitions,\n makeCreateNewEvent,\n} from \"../createNewEvent.ts\";\nimport type {\n EventDefinitions,\n InferEventsFromDefinitions,\n} from \"../eventDefinitions.ts\";\nimport { getSubscriptionIdsToPublish } from \"../getSubscriptionIdsToPublish.ts\";\nimport type {\n DefaultContext,\n EventId,\n EventPublication,\n GenericEvent,\n SubscriptionId,\n} from \"../types.ts\";\nimport {\n type GlobalSubscriberConfig,\n subscribeByTopic,\n subscribeGlobalToTopics,\n type TopicSubscriptions,\n} from \"./EffectSubscriptions.ts\";\nimport type { EventBus } from \"./ports/EffectEventBus.ts\";\nimport type { WithEventsUow } from \"./ports/EffectEventRepository.ts\";\n\ntype SubscriptionsForTopic = Record<\n string,\n (event: GenericEvent<string, unknown, DefaultContext>) => Effect.Effect<void>\n>;\n\ntype CreateEffectInMemoryEventBusOptions = {\n maxRetries?: number;\n getNow?: () => Date;\n generateId?: () => EventId;\n};\n\ntype CreateEffectInMemoryEventBusFromDefinitionsOptions<\n Definitions extends EventDefinitions,\n> = CreateEffectInMemoryEventBusOptions & {\n eventDefinitions: Definitions;\n};\n\ntype CreateEffectInMemoryEventBusResult<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n> = {\n eventBus: EventBus<Event>;\n createNewEvent: CreateNewEvent<Event>;\n defineSubscriptions: (\n subscriptions: TopicSubscriptions<Event>,\n ) => TopicSubscriptions<Event>;\n subscribeAll: (subscriptions: TopicSubscriptions<Event>) => void;\n subscribeGlobal: (\n subscriptions: TopicSubscriptions<Event>,\n config: GlobalSubscriberConfig<Event>,\n ) => void;\n};\n\nexport function createEffectInMemoryEventBus<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n withUow: WithEventsUow<Event>,\n options?: CreateEffectInMemoryEventBusOptions,\n): CreateEffectInMemoryEventBusResult<Event>;\nexport function createEffectInMemoryEventBus<\n Definitions extends EventDefinitions,\n>(\n withUow: WithEventsUow<InferEventsFromDefinitions<Definitions>>,\n options: CreateEffectInMemoryEventBusFromDefinitionsOptions<Definitions>,\n): Omit<\n CreateEffectInMemoryEventBusResult<InferEventsFromDefinitions<Definitions>>,\n \"createNewEvent\"\n> & {\n createNewEvent: CreateNewEventFromDefinitions<Definitions>;\n};\nexport function createEffectInMemoryEventBus<\n Event extends GenericEvent<string, unknown, DefaultContext>,\n>(\n withUow: WithEventsUow<Event>,\n options:\n | CreateEffectInMemoryEventBusOptions\n | CreateEffectInMemoryEventBusFromDefinitionsOptions<EventDefinitions> = {},\n) {\n const maxRetries = options.maxRetries ?? 3;\n const eventDefinitions =\n \"eventDefinitions\" in options ? options.eventDefinitions : undefined;\n const createNewEvent = eventDefinitions\n ? makeCreateNewEvent({\n getNow: options.getNow,\n generateId: options.generateId,\n eventDefinitions,\n })\n : makeCreateNewEvent<Event>({\n getNow: options.getNow,\n generateId: options.generateId,\n });\n const subscriptions: Partial<Record<string, SubscriptionsForTopic>> = {};\n\n const executeCallback = (\n event: Event,\n subscriptionId: string,\n callback: (\n event: GenericEvent<string, unknown, DefaultContext>,\n ) => Effect.Effect<void>,\n ): Effect.Effect<\n { subscriptionId: string; errorMessage: string; stack?: string } | undefined\n > =>\n Effect.map(\n Effect.exit(callback(event)),\n (\n exit,\n ):\n | { subscriptionId: string; errorMessage: string; stack?: string }\n | undefined => {\n if (Exit.isSuccess(exit)) return undefined;\n const error = Cause.squash(exit.cause);\n return {\n subscriptionId,\n errorMessage: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n };\n },\n );\n\n const eventBus: EventBus<Event> = {\n publish: (event) =>\n Effect.gen(function* () {\n const publishedAt = new Date();\n const topic = event.topic;\n\n const callbacksBySubscriptionSlug = subscriptions[topic];\n\n if (!callbacksBySubscriptionSlug) {\n event.publications.push({\n publishedAt,\n publishedSubscribers: [],\n });\n event.status = \"published\";\n yield* withUow((uow) => uow.eventRepository.save(event));\n return;\n }\n\n const subscriptionIdsToPublish = getSubscriptionIdsToPublish(\n event,\n Object.keys(callbacksBySubscriptionSlug),\n );\n\n const failuresOrUndefined = yield* Effect.all(\n subscriptionIdsToPublish.map((subscriptionId) =>\n executeCallback(\n event,\n subscriptionId,\n callbacksBySubscriptionSlug[subscriptionId],\n ),\n ),\n { concurrency: \"unbounded\" },\n );\n\n const failures = failuresOrUndefined.filter(\n (\n f,\n ): f is {\n subscriptionId: string;\n errorMessage: string;\n stack?: string;\n } => f !== undefined,\n );\n\n const publications: EventPublication[] = [\n ...event.publications,\n {\n publishedAt,\n publishedSubscribers: subscriptionIdsToPublish.map(\n (id) => id as SubscriptionId,\n ),\n ...(failures.length > 0 && { failures }),\n },\n ];\n\n if (failures.length === 0) {\n event.status = \"published\";\n } else {\n const wasMaxNumberOfErrorsReached = publications.length >= maxRetries;\n event.status = wasMaxNumberOfErrorsReached\n ? \"quarantined\"\n : \"failed-but-will-retry\";\n }\n\n event.publications = publications;\n\n yield* withUow((uow) => uow.eventRepository.save(event));\n }),\n\n subscribe: ({ topic, subscriptionId, callBack }) => {\n if (!subscriptions[topic]) {\n subscriptions[topic] = {};\n }\n\n const subscriptionsForTopic = subscriptions[topic];\n if (subscriptionsForTopic) {\n subscriptionsForTopic[subscriptionId] = callBack as (\n event: GenericEvent<string, unknown, DefaultContext>,\n ) => Effect.Effect<void>;\n }\n },\n };\n\n const defineSubscriptions = (\n subs: TopicSubscriptions<Event>,\n ): TopicSubscriptions<Event> => subs;\n\n const subscribeAll = (subs: TopicSubscriptions<Event>): void => {\n subscribeByTopic(eventBus, subs);\n };\n\n const subscribeGlobal = (\n subs: TopicSubscriptions<Event>,\n config: GlobalSubscriberConfig<Event>,\n ): void => {\n subscribeGlobalToTopics(eventBus, subs, config);\n };\n\n return {\n eventBus,\n createNewEvent,\n defineSubscriptions,\n subscribeAll,\n subscribeGlobal,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAoC;AACpC,4BAIO;AAKP,yCAA4C;AAQ5C,iCAKO;AAqDA,SAAS,6BAGd,SACA,UAE2E,CAAC,GAC5E;AACA,QAAM,aAAa,QAAQ,cAAc;AACzC,QAAM,mBACJ,sBAAsB,UAAU,QAAQ,mBAAmB;AAC7D,QAAM,iBAAiB,uBACnB,0CAAmB;AAAA,IACjB,QAAQ,QAAQ;AAAA,IAChB,YAAY,QAAQ;AAAA,IACpB;AAAA,EACF,CAAC,QACD,0CAA0B;AAAA,IACxB,QAAQ,QAAQ;AAAA,IAChB,YAAY,QAAQ;AAAA,EACtB,CAAC;AACL,QAAM,gBAAgE,CAAC;AAEvE,QAAM,kBAAkB,CACtB,OACA,gBACA,aAMA,qBAAO;AAAA,IACL,qBAAO,KAAK,SAAS,KAAK,CAAC;AAAA,IAC3B,CACE,SAGe;AACf,UAAI,mBAAK,UAAU,IAAI,EAAG,QAAO;AACjC,YAAM,QAAQ,oBAAM,OAAO,KAAK,KAAK;AACrC,aAAO;AAAA,QACL;AAAA,QACA,cAAc,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACnE,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAEF,QAAM,WAA4B;AAAA,IAChC,SAAS,CAAC,UACR,qBAAO,IAAI,aAAa;AACtB,YAAM,cAAc,oBAAI,KAAK;AAC7B,YAAM,QAAQ,MAAM;AAEpB,YAAM,8BAA8B,cAAc,KAAK;AAEvD,UAAI,CAAC,6BAA6B;AAChC,cAAM,aAAa,KAAK;AAAA,UACtB;AAAA,UACA,sBAAsB,CAAC;AAAA,QACzB,CAAC;AACD,cAAM,SAAS;AACf,eAAO,QAAQ,CAAC,QAAQ,IAAI,gBAAgB,KAAK,KAAK,CAAC;AACvD;AAAA,MACF;AAEA,YAAM,+BAA2B;AAAA,QAC/B;AAAA,QACA,OAAO,KAAK,2BAA2B;AAAA,MACzC;AAEA,YAAM,sBAAsB,OAAO,qBAAO;AAAA,QACxC,yBAAyB;AAAA,UAAI,CAAC,mBAC5B;AAAA,YACE;AAAA,YACA;AAAA,YACA,4BAA4B,cAAc;AAAA,UAC5C;AAAA,QACF;AAAA,QACA,EAAE,aAAa,YAAY;AAAA,MAC7B;AAEA,YAAM,WAAW,oBAAoB;AAAA,QACnC,CACE,MAKG,MAAM;AAAA,MACb;AAEA,YAAM,eAAmC;AAAA,QACvC,GAAG,MAAM;AAAA,QACT;AAAA,UACE;AAAA,UACA,sBAAsB,yBAAyB;AAAA,YAC7C,CAAC,OAAO;AAAA,UACV;AAAA,UACA,GAAI,SAAS,SAAS,KAAK,EAAE,SAAS;AAAA,QACxC;AAAA,MACF;AAEA,UAAI,SAAS,WAAW,GAAG;AACzB,cAAM,SAAS;AAAA,MACjB,OAAO;AACL,cAAM,8BAA8B,aAAa,UAAU;AAC3D,cAAM,SAAS,8BACX,gBACA;AAAA,MACN;AAEA,YAAM,eAAe;AAErB,aAAO,QAAQ,CAAC,QAAQ,IAAI,gBAAgB,KAAK,KAAK,CAAC;AAAA,IACzD,CAAC;AAAA,IAEH,WAAW,CAAC,EAAE,OAAO,gBAAgB,SAAS,MAAM;AAClD,UAAI,CAAC,cAAc,KAAK,GAAG;AACzB,sBAAc,KAAK,IAAI,CAAC;AAAA,MAC1B;AAEA,YAAM,wBAAwB,cAAc,KAAK;AACjD,UAAI,uBAAuB;AACzB,8BAAsB,cAAc,IAAI;AAAA,MAG1C;AAAA,IACF;AAAA,EACF;AAEA,QAAM,sBAAsB,CAC1B,SAC8B;AAEhC,QAAM,eAAe,CAAC,SAA0C;AAC9D,qDAAiB,UAAU,IAAI;AAAA,EACjC;AAEA,QAAM,kBAAkB,CACtB,MACA,WACS;AACT,4DAAwB,UAAU,MAAM,MAAM;AAAA,EAChD;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
|