@wataruoguchi/emmett-event-store-kysely 1.1.2 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +76 -164
  2. package/dist/event-store/consumers.d.ts +23 -0
  3. package/dist/event-store/consumers.d.ts.map +1 -0
  4. package/dist/event-store/consumers.js +155 -0
  5. package/dist/event-store/kysely-event-store.d.ts +42 -0
  6. package/dist/event-store/kysely-event-store.d.ts.map +1 -0
  7. package/dist/event-store/kysely-event-store.js +256 -0
  8. package/dist/index.cjs +584 -0
  9. package/dist/index.d.ts +10 -0
  10. package/dist/index.d.ts.map +1 -0
  11. package/dist/index.js +5 -0
  12. package/dist/projections/runner.d.ts +3 -2
  13. package/dist/projections/runner.d.ts.map +1 -1
  14. package/dist/projections/snapshot-projection.d.ts +120 -0
  15. package/dist/projections/snapshot-projection.d.ts.map +1 -0
  16. package/dist/projections/snapshot-projection.js +125 -0
  17. package/dist/types.d.ts +39 -11
  18. package/dist/types.d.ts.map +1 -1
  19. package/package.json +9 -14
  20. package/dist/event-store/aggregate-stream.d.ts +0 -10
  21. package/dist/event-store/aggregate-stream.d.ts.map +0 -1
  22. package/dist/event-store/aggregate-stream.js +0 -18
  23. package/dist/event-store/append-to-stream.d.ts +0 -7
  24. package/dist/event-store/append-to-stream.d.ts.map +0 -1
  25. package/dist/event-store/append-to-stream.js +0 -143
  26. package/dist/event-store/index.cjs +0 -291
  27. package/dist/event-store/index.d.ts +0 -13
  28. package/dist/event-store/index.d.ts.map +0 -1
  29. package/dist/event-store/index.js +0 -10
  30. package/dist/event-store/read-stream.d.ts +0 -14
  31. package/dist/event-store/read-stream.d.ts.map +0 -1
  32. package/dist/event-store/read-stream.js +0 -88
  33. package/dist/projections/index.cjs +0 -124
  34. package/dist/projections/index.d.ts +0 -4
  35. package/dist/projections/index.d.ts.map +0 -1
  36. package/dist/projections/index.js +0 -2
package/dist/index.cjs ADDED
@@ -0,0 +1,584 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ createKyselyEventStoreConsumer: () => createKyselyEventStoreConsumer,
24
+ createProjectionRegistry: () => createProjectionRegistry,
25
+ createProjectionRunner: () => createProjectionRunner,
26
+ createSnapshotProjection: () => createSnapshotProjection,
27
+ createSnapshotProjectionRegistry: () => createSnapshotProjectionRegistry,
28
+ getKyselyEventStore: () => getKyselyEventStore
29
+ });
30
+ module.exports = __toCommonJS(index_exports);
31
+
32
+ // src/event-store/consumers.ts
33
+ function createKyselyEventStoreConsumer({
34
+ db,
35
+ logger,
36
+ consumerName = "default-consumer",
37
+ batchSize = 100,
38
+ pollingInterval = 1e3
39
+ }) {
40
+ let isRunning = false;
41
+ let lastProcessedPosition = 0n;
42
+ const eventHandlers = /* @__PURE__ */ new Map();
43
+ const allEventHandlers = [];
44
+ let pollingTimer = null;
45
+ const processEvents = async () => {
46
+ if (!isRunning) return;
47
+ try {
48
+ const events = await db.selectFrom("messages").select([
49
+ "message_type",
50
+ "message_data",
51
+ "message_metadata",
52
+ "stream_position",
53
+ "global_position",
54
+ "message_id",
55
+ "stream_id"
56
+ ]).where("global_position", ">", lastProcessedPosition).where("is_archived", "=", false).orderBy("global_position").limit(batchSize).execute();
57
+ if (events.length === 0) {
58
+ return;
59
+ }
60
+ for (const row of events) {
61
+ const event = {
62
+ kind: "Event",
63
+ type: row.message_type,
64
+ data: row.message_data,
65
+ metadata: {
66
+ ...row.message_metadata,
67
+ messageId: row.message_id,
68
+ streamName: row.stream_id,
69
+ streamPosition: BigInt(String(row.stream_position)),
70
+ globalPosition: BigInt(String(row.global_position))
71
+ }
72
+ };
73
+ const typeHandlers = eventHandlers.get(row.message_type) || [];
74
+ for (const handler of typeHandlers) {
75
+ try {
76
+ await handler(event);
77
+ } catch (error) {
78
+ logger.error(
79
+ { error, event },
80
+ `Error processing event ${row.message_type}`
81
+ );
82
+ }
83
+ }
84
+ for (const handler of allEventHandlers) {
85
+ try {
86
+ await handler(event);
87
+ } catch (error) {
88
+ logger.error(
89
+ { error, event },
90
+ "Error processing event in all-event handler"
91
+ );
92
+ }
93
+ }
94
+ const globalPos = row.global_position;
95
+ if (globalPos !== null) {
96
+ lastProcessedPosition = BigInt(String(globalPos));
97
+ }
98
+ }
99
+ await updateSubscriptionPosition();
100
+ } catch (error) {
101
+ logger.error({ error }, "Error processing events");
102
+ }
103
+ };
104
+ const updateSubscriptionPosition = async () => {
105
+ try {
106
+ await db.insertInto("subscriptions").values({
107
+ consumer_name: consumerName,
108
+ last_processed_position: lastProcessedPosition,
109
+ last_processed_transaction_id: lastProcessedPosition,
110
+ created_at: /* @__PURE__ */ new Date(),
111
+ updated_at: /* @__PURE__ */ new Date()
112
+ }).onConflict(
113
+ (oc) => oc.column("consumer_name").doUpdateSet({
114
+ last_processed_position: lastProcessedPosition,
115
+ last_processed_transaction_id: lastProcessedPosition,
116
+ updated_at: /* @__PURE__ */ new Date()
117
+ })
118
+ ).execute();
119
+ } catch (error) {
120
+ logger.error({ error }, "Error updating subscription position");
121
+ }
122
+ };
123
+ const loadLastProcessedPosition = async () => {
124
+ try {
125
+ const subscription = await db.selectFrom("subscriptions").select(["last_processed_position"]).where("consumer_name", "=", consumerName).executeTakeFirst();
126
+ if (subscription) {
127
+ lastProcessedPosition = BigInt(
128
+ String(subscription.last_processed_position)
129
+ );
130
+ }
131
+ } catch (error) {
132
+ logger.error({ error }, "Error loading last processed position");
133
+ }
134
+ };
135
+ return {
136
+ async start() {
137
+ if (isRunning) return;
138
+ isRunning = true;
139
+ await loadLastProcessedPosition();
140
+ logger.info({ consumerName }, "Starting event store consumer");
141
+ pollingTimer = setInterval(processEvents, pollingInterval);
142
+ },
143
+ async stop() {
144
+ if (!isRunning) return;
145
+ isRunning = false;
146
+ if (pollingTimer) {
147
+ clearInterval(pollingTimer);
148
+ pollingTimer = null;
149
+ }
150
+ logger.info({ consumerName }, "Stopped event store consumer");
151
+ },
152
+ subscribe(handler, eventType) {
153
+ if (!eventHandlers.has(eventType)) {
154
+ eventHandlers.set(eventType, []);
155
+ }
156
+ const handlers = eventHandlers.get(eventType);
157
+ if (handlers) {
158
+ handlers.push(
159
+ handler
160
+ );
161
+ }
162
+ },
163
+ subscribeToAll(handler) {
164
+ allEventHandlers.push(handler);
165
+ }
166
+ };
167
+ }
168
+
169
+ // src/event-store/kysely-event-store.ts
170
+ var import_emmett = require("@event-driven-io/emmett");
171
+
172
+ // src/types.ts
173
+ var PostgreSQLEventStoreDefaultStreamVersion = 0n;
174
+ var DEFAULT_PARTITION = "default_partition";
175
+ function createProjectionRegistry(...registries) {
176
+ const combined = {};
177
+ for (const reg of registries) {
178
+ for (const [eventType, handlers] of Object.entries(reg)) {
179
+ combined[eventType] = [...combined[eventType] ?? [], ...handlers];
180
+ }
181
+ }
182
+ return combined;
183
+ }
184
+
185
+ // src/event-store/kysely-event-store.ts
186
+ var getKyselyEventStore = (deps) => {
187
+ const { db, logger } = deps;
188
+ const eventStore = {
189
+ /**
190
+ * @description We do not use schema management in this package.
191
+ */
192
+ schema: {
193
+ sql: () => "",
194
+ print: () => console.log(""),
195
+ migrate: async () => Promise.resolve()
196
+ },
197
+ /**
198
+ * @description We do not use session management in this package.
199
+ */
200
+ async withSession(callback) {
201
+ return await callback({
202
+ eventStore,
203
+ close: () => Promise.resolve()
204
+ });
205
+ },
206
+ async aggregateStream(streamName, options) {
207
+ const { evolve, initialState, read } = options;
208
+ logger.info({ streamName, options }, "aggregateStream");
209
+ const expectedStreamVersion = read?.expectedStreamVersion;
210
+ const result = await eventStore.readStream(streamName, read);
211
+ (0, import_emmett.assertExpectedVersionMatchesCurrent)(
212
+ result.currentStreamVersion,
213
+ expectedStreamVersion,
214
+ PostgreSQLEventStoreDefaultStreamVersion
215
+ );
216
+ const state = result.events.reduce(
217
+ (state2, event) => event ? evolve(state2, event) : state2,
218
+ initialState()
219
+ );
220
+ return {
221
+ state,
222
+ currentStreamVersion: result.currentStreamVersion,
223
+ streamExists: result.streamExists
224
+ };
225
+ },
226
+ async readStream(streamName, options) {
227
+ const partition = getPartition(options);
228
+ logger.info({ streamName, options, partition }, "readStream");
229
+ const { currentStreamVersion, streamExists } = await fetchStreamInfo(
230
+ db,
231
+ streamName,
232
+ partition
233
+ );
234
+ const range = parseRangeOptions(options);
235
+ const rows = await buildEventsQuery(
236
+ { db, logger },
237
+ streamName,
238
+ partition,
239
+ range
240
+ ).execute();
241
+ const events = rows.map(
242
+ (row) => mapRowToEvent(row, streamName)
243
+ );
244
+ return {
245
+ events,
246
+ currentStreamVersion,
247
+ streamExists
248
+ };
249
+ },
250
+ async appendToStream(streamName, events, options) {
251
+ const streamType = getStreamType(options);
252
+ const partition = getPartition(options);
253
+ const expected = options?.expectedStreamVersion;
254
+ logger.info({ streamName, events, options, partition }, "appendToStream");
255
+ ensureEventsNotEmpty(events, expected);
256
+ const result = await db.transaction().execute(async (trx) => {
257
+ const { currentStreamVersion, streamExists } = await fetchStreamInfo(
258
+ trx,
259
+ streamName,
260
+ partition
261
+ );
262
+ assertExpectedVersion(expected, currentStreamVersion, streamExists);
263
+ const basePos = currentStreamVersion;
264
+ const nextStreamPosition = computeNextStreamPosition(
265
+ basePos,
266
+ events.length
267
+ );
268
+ await upsertStreamRow(
269
+ trx,
270
+ streamName,
271
+ partition,
272
+ streamType,
273
+ basePos,
274
+ nextStreamPosition,
275
+ expected,
276
+ streamExists
277
+ );
278
+ const messagesToInsert = buildMessagesToInsert(
279
+ events,
280
+ basePos,
281
+ streamName,
282
+ partition
283
+ );
284
+ const lastEventGlobalPosition = await insertMessagesAndGetLastGlobalPosition(trx, messagesToInsert);
285
+ return {
286
+ nextExpectedStreamVersion: nextStreamPosition,
287
+ lastEventGlobalPosition,
288
+ createdNewStream: !streamExists
289
+ };
290
+ });
291
+ return result;
292
+ },
293
+ close: async () => {
294
+ await Promise.resolve();
295
+ }
296
+ };
297
+ return eventStore;
298
+ };
299
+ function getStreamType(options) {
300
+ return options?.streamType ?? "unknown";
301
+ }
302
+ function getPartition(options) {
303
+ return options?.partition ?? DEFAULT_PARTITION;
304
+ }
305
+ function ensureEventsNotEmpty(events, _expected) {
306
+ if (events.length === 0) {
307
+ throw new Error("Cannot append empty events array");
308
+ }
309
+ }
310
+ function assertExpectedVersion(expected, currentPos, streamExistsNow) {
311
+ if (expected === "STREAM_EXISTS" && !streamExistsNow) {
312
+ throw new Error("Stream does not exist but expected to exist");
313
+ }
314
+ if (expected === "STREAM_DOES_NOT_EXIST" && streamExistsNow) {
315
+ throw new Error("Stream exists but expected not to exist");
316
+ }
317
+ if (typeof expected === "bigint" && expected !== currentPos) {
318
+ throw new Error(
319
+ `Expected version ${expected} but current is ${currentPos}`
320
+ );
321
+ }
322
+ }
323
+ function computeNextStreamPosition(basePos, eventCount) {
324
+ return basePos + BigInt(eventCount);
325
+ }
326
+ async function upsertStreamRow(executor, streamId, partition, streamType, basePos, nextStreamPosition, expected, streamExistsNow) {
327
+ if (!streamExistsNow) {
328
+ await executor.insertInto("streams").values({
329
+ stream_id: streamId,
330
+ stream_position: nextStreamPosition,
331
+ partition,
332
+ stream_type: streamType,
333
+ stream_metadata: {},
334
+ is_archived: false
335
+ }).execute();
336
+ return;
337
+ }
338
+ if (typeof expected === "bigint") {
339
+ const updatedRow = await executor.updateTable("streams").set({ stream_position: nextStreamPosition }).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).where("stream_position", "=", basePos).returning("stream_position").executeTakeFirst();
340
+ if (!updatedRow) {
341
+ throw new Error(`Expected version ${expected} but current is ${basePos}`);
342
+ }
343
+ return;
344
+ }
345
+ await executor.updateTable("streams").set({ stream_position: nextStreamPosition }).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).execute();
346
+ }
347
+ function buildMessagesToInsert(events, basePos, streamId, partition) {
348
+ return events.map((e, index) => {
349
+ const messageId = crypto.randomUUID();
350
+ const streamPosition = basePos + BigInt(index + 1);
351
+ const rawMeta = "metadata" in e ? e.metadata : void 0;
352
+ const eventMeta = rawMeta && typeof rawMeta === "object" ? rawMeta : {};
353
+ const messageMetadata = {
354
+ messageId,
355
+ ...eventMeta
356
+ };
357
+ return {
358
+ stream_id: streamId,
359
+ stream_position: streamPosition,
360
+ partition,
361
+ message_data: e.data,
362
+ message_metadata: messageMetadata,
363
+ message_schema_version: index.toString(),
364
+ message_type: e.type,
365
+ message_kind: "E",
366
+ message_id: messageId,
367
+ is_archived: false,
368
+ created: /* @__PURE__ */ new Date()
369
+ };
370
+ });
371
+ }
372
+ async function insertMessagesAndGetLastGlobalPosition(executor, messagesToInsert) {
373
+ const inserted = await executor.insertInto("messages").values(messagesToInsert).returning("global_position").execute();
374
+ if (!inserted || Array.isArray(inserted) && inserted.length === 0) {
375
+ return 0n;
376
+ }
377
+ const globalPositions = inserted.map(
378
+ (r) => BigInt(String(r.global_position))
379
+ );
380
+ return globalPositions[globalPositions.length - 1];
381
+ }
382
+ function parseRangeOptions(options) {
383
+ const from = options && typeof options === "object" && "from" in options ? options.from : void 0;
384
+ const to = options && typeof options === "object" && "to" in options ? options.to : void 0;
385
+ const maxCount = options && typeof options === "object" && "maxCount" in options ? options.maxCount : void 0;
386
+ return { from, to, maxCount };
387
+ }
388
+ function buildEventsQuery(deps, streamId, partition, range) {
389
+ const { db } = deps;
390
+ let q = db.selectFrom("messages").select([
391
+ "message_type",
392
+ "message_data",
393
+ "message_metadata",
394
+ "stream_position",
395
+ "global_position",
396
+ "message_id"
397
+ ]).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).orderBy("stream_position");
398
+ if (range.from !== void 0) {
399
+ q = q.where("stream_position", ">=", range.from);
400
+ }
401
+ if (range.to !== void 0) {
402
+ q = q.where("stream_position", "<=", range.to);
403
+ }
404
+ if (range.maxCount !== void 0) {
405
+ q = q.limit(Number(range.maxCount));
406
+ }
407
+ return q;
408
+ }
409
+ function mapRowToEvent(row, streamId) {
410
+ const streamPosition = BigInt(String(row.stream_position));
411
+ const globalPosition = BigInt(String(row.global_position ?? 0));
412
+ const baseMetadata = row.message_metadata ?? {};
413
+ return {
414
+ kind: "Event",
415
+ type: row.message_type,
416
+ data: row.message_data,
417
+ metadata: {
418
+ ...baseMetadata,
419
+ messageId: row.message_id,
420
+ streamId,
421
+ streamPosition,
422
+ globalPosition
423
+ }
424
+ };
425
+ }
426
+ async function fetchStreamInfo(executor, streamId, partition) {
427
+ const streamRow = await executor.selectFrom("streams").select(["stream_position"]).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).executeTakeFirst();
428
+ const currentStreamVersion = streamRow ? BigInt(String(streamRow.stream_position)) : PostgreSQLEventStoreDefaultStreamVersion;
429
+ return { currentStreamVersion, streamExists: !!streamRow };
430
+ }
431
+
432
+ // src/projections/runner.ts
433
+ function createProjectionRunner({ db, readStream, registry }) {
434
+ async function getOrCreateCheckpoint(subscriptionId, partition) {
435
+ const existing = await db.selectFrom("subscriptions").select([
436
+ "subscription_id as subscriptionId",
437
+ "partition",
438
+ "last_processed_position as lastProcessedPosition"
439
+ ]).where("subscription_id", "=", subscriptionId).where("partition", "=", partition).executeTakeFirst();
440
+ if (existing) {
441
+ const last = BigInt(
442
+ String(
443
+ existing.lastProcessedPosition
444
+ )
445
+ );
446
+ return {
447
+ subscriptionId,
448
+ partition,
449
+ lastProcessedPosition: last
450
+ };
451
+ }
452
+ await db.insertInto("subscriptions").values({
453
+ subscription_id: subscriptionId,
454
+ partition,
455
+ version: 1,
456
+ last_processed_position: 0n
457
+ }).onConflict(
458
+ (oc) => oc.columns(["subscription_id", "partition", "version"]).doUpdateSet({
459
+ last_processed_position: (eb) => eb.ref("excluded.last_processed_position")
460
+ })
461
+ ).execute();
462
+ return {
463
+ subscriptionId,
464
+ partition,
465
+ lastProcessedPosition: 0n
466
+ };
467
+ }
468
+ async function updateCheckpoint(subscriptionId, partition, lastProcessedPosition) {
469
+ await db.updateTable("subscriptions").set({ last_processed_position: lastProcessedPosition }).where("subscription_id", "=", subscriptionId).where("partition", "=", partition).execute();
470
+ }
471
+ async function projectEvents(subscriptionId, streamId, opts) {
472
+ const partition = opts?.partition ?? "default_partition";
473
+ const batchSize = BigInt(opts?.batchSize ?? 500);
474
+ const checkpoint = await getOrCreateCheckpoint(subscriptionId, partition);
475
+ const { events, currentStreamVersion } = await readStream(streamId, {
476
+ from: checkpoint.lastProcessedPosition + 1n,
477
+ to: checkpoint.lastProcessedPosition + batchSize,
478
+ partition
479
+ });
480
+ for (const ev of events) {
481
+ if (!ev) continue;
482
+ const handlers = registry[ev.type] ?? [];
483
+ if (handlers.length === 0) {
484
+ await updateCheckpoint(
485
+ subscriptionId,
486
+ partition,
487
+ ev.metadata.streamPosition
488
+ );
489
+ continue;
490
+ }
491
+ const projectionEvent = {
492
+ type: ev.type,
493
+ data: ev.data,
494
+ metadata: {
495
+ streamId: ev.metadata.streamId,
496
+ streamPosition: ev.metadata.streamPosition,
497
+ globalPosition: ev.metadata.globalPosition
498
+ }
499
+ };
500
+ for (const handler of handlers) {
501
+ await handler({ db, partition }, projectionEvent);
502
+ }
503
+ await updateCheckpoint(
504
+ subscriptionId,
505
+ partition,
506
+ projectionEvent.metadata.streamPosition
507
+ );
508
+ }
509
+ return { processed: events.length, currentStreamVersion };
510
+ }
511
+ return { projectEvents };
512
+ }
513
+
514
+ // src/projections/snapshot-projection.ts
515
+ function createSnapshotProjection(config) {
516
+ const {
517
+ tableName,
518
+ primaryKeys,
519
+ extractKeys,
520
+ evolve,
521
+ initialState,
522
+ mapToColumns
523
+ } = config;
524
+ return async ({ db, partition }, event) => {
525
+ const keys = extractKeys(event, partition);
526
+ const existing = await db.selectFrom(tableName).select(["last_stream_position", "snapshot"]).where((eb) => {
527
+ const conditions = Object.entries(keys).map(
528
+ ([key, value]) => eb(key, "=", value)
529
+ );
530
+ return eb.and(conditions);
531
+ }).executeTakeFirst();
532
+ const lastPos = existing?.last_stream_position ? BigInt(String(existing.last_stream_position)) : -1n;
533
+ if (event.metadata.streamPosition <= lastPos) {
534
+ return;
535
+ }
536
+ const currentState = existing?.snapshot ? existing.snapshot : initialState();
537
+ const newState = evolve(currentState, event);
538
+ const rowData = {
539
+ ...keys,
540
+ snapshot: JSON.stringify(newState),
541
+ stream_id: event.metadata.streamId,
542
+ last_stream_position: event.metadata.streamPosition.toString(),
543
+ last_global_position: event.metadata.globalPosition.toString()
544
+ };
545
+ if (mapToColumns) {
546
+ const columns = mapToColumns(newState);
547
+ Object.assign(rowData, columns);
548
+ }
549
+ const insertQuery = db.insertInto(tableName).values(rowData);
550
+ const updateSet = {
551
+ snapshot: (eb) => eb.ref("excluded.snapshot"),
552
+ stream_id: (eb) => eb.ref("excluded.stream_id"),
553
+ last_stream_position: (eb) => eb.ref("excluded.last_stream_position"),
554
+ last_global_position: (eb) => eb.ref("excluded.last_global_position")
555
+ };
556
+ if (mapToColumns) {
557
+ const columns = mapToColumns(newState);
558
+ for (const columnName of Object.keys(columns)) {
559
+ updateSet[columnName] = (eb) => eb.ref(`excluded.${columnName}`);
560
+ }
561
+ }
562
+ await insertQuery.onConflict((oc) => {
563
+ const conflictBuilder = oc.columns(primaryKeys);
564
+ return conflictBuilder.doUpdateSet(updateSet);
565
+ }).execute();
566
+ };
567
+ }
568
+ function createSnapshotProjectionRegistry(eventTypes, config) {
569
+ const handler = createSnapshotProjection(config);
570
+ const registry = {};
571
+ for (const eventType of eventTypes) {
572
+ registry[eventType] = [handler];
573
+ }
574
+ return registry;
575
+ }
576
+ // Annotate the CommonJS export names for ESM import in node:
577
+ 0 && (module.exports = {
578
+ createKyselyEventStoreConsumer,
579
+ createProjectionRegistry,
580
+ createProjectionRunner,
581
+ createSnapshotProjection,
582
+ createSnapshotProjectionRegistry,
583
+ getKyselyEventStore
584
+ });
@@ -0,0 +1,10 @@
1
+ export { createKyselyEventStoreConsumer } from "./event-store/consumers.js";
2
+ export type { KyselyEventStoreConsumer, KyselyEventStoreConsumerConfig, } from "./event-store/consumers.js";
3
+ export { getKyselyEventStore } from "./event-store/kysely-event-store.js";
4
+ export type { KyselyEventStore, KyselyEventStoreOptions, ProjectionReadStreamOptions, } from "./event-store/kysely-event-store.js";
5
+ export { createProjectionRunner } from "./projections/runner.js";
6
+ export { createSnapshotProjection, createSnapshotProjectionRegistry, } from "./projections/snapshot-projection.js";
7
+ export type { SnapshotProjectionConfig } from "./projections/snapshot-projection.js";
8
+ export { createProjectionRegistry } from "./types.js";
9
+ export type { DatabaseExecutor, Dependencies, ExtendedOptions, ProjectionContext, ProjectionEvent, ProjectionEventMetadata, ProjectionHandler, ProjectionRegistry, } from "./types.js";
10
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,8BAA8B,EAAE,MAAM,4BAA4B,CAAC;AAC5E,YAAY,EACV,wBAAwB,EACxB,8BAA8B,GAC/B,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,mBAAmB,EAAE,MAAM,qCAAqC,CAAC;AAC1E,YAAY,EACV,gBAAgB,EAChB,uBAAuB,EACvB,2BAA2B,GAC5B,MAAM,qCAAqC,CAAC;AAC7C,OAAO,EAAE,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AACjE,OAAO,EACL,wBAAwB,EACxB,gCAAgC,GACjC,MAAM,sCAAsC,CAAC;AAC9C,YAAY,EAAE,wBAAwB,EAAE,MAAM,sCAAsC,CAAC;AACrF,OAAO,EAAE,wBAAwB,EAAE,MAAM,YAAY,CAAC;AACtD,YAAY,EACV,gBAAgB,EAChB,YAAY,EACZ,eAAe,EACf,iBAAiB,EACjB,eAAe,EACf,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,GACnB,MAAM,YAAY,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,5 @@
1
+ export { createKyselyEventStoreConsumer } from "./event-store/consumers.js";
2
+ export { getKyselyEventStore } from "./event-store/kysely-event-store.js";
3
+ export { createProjectionRunner } from "./projections/runner.js";
4
+ export { createSnapshotProjection, createSnapshotProjectionRegistry, } from "./projections/snapshot-projection.js";
5
+ export { createProjectionRegistry } from "./types.js";
@@ -1,4 +1,5 @@
1
- import type { DatabaseExecutor, ProjectionRegistry, ReadStream } from "../types.js";
1
+ import type { KyselyEventStore } from "../event-store/kysely-event-store.js";
2
+ import type { DatabaseExecutor, ProjectionRegistry } from "../types.js";
2
3
  export type SubscriptionCheckpoint = {
3
4
  subscriptionId: string;
4
5
  partition: string;
@@ -6,7 +7,7 @@ export type SubscriptionCheckpoint = {
6
7
  };
7
8
  export type ProjectionRunnerDeps<T extends DatabaseExecutor = DatabaseExecutor> = {
8
9
  db: T;
9
- readStream: ReadStream;
10
+ readStream: KyselyEventStore["readStream"];
10
11
  registry: ProjectionRegistry<T>;
11
12
  };
12
13
  export declare function createProjectionRunner<T extends DatabaseExecutor = DatabaseExecutor>({ db, readStream, registry }: ProjectionRunnerDeps<T>): {
@@ -1 +1 @@
1
- {"version":3,"file":"runner.d.ts","sourceRoot":"","sources":["../../src/projections/runner.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACV,gBAAgB,EAEhB,kBAAkB,EAClB,UAAU,EACX,MAAM,aAAa,CAAC;AAErB,MAAM,MAAM,sBAAsB,GAAG;IACnC,cAAc,EAAE,MAAM,CAAC;IACvB,SAAS,EAAE,MAAM,CAAC;IAClB,qBAAqB,EAAE,MAAM,CAAC;CAC/B,CAAC;AAEF,MAAM,MAAM,oBAAoB,CAC9B,CAAC,SAAS,gBAAgB,GAAG,gBAAgB,IAC3C;IACF,EAAE,EAAE,CAAC,CAAC;IACN,UAAU,EAAE,UAAU,CAAC;IACvB,QAAQ,EAAE,kBAAkB,CAAC,CAAC,CAAC,CAAC;CACjC,CAAC;AAEF,wBAAgB,sBAAsB,CACpC,CAAC,SAAS,gBAAgB,GAAG,gBAAgB,EAC7C,EAAE,EAAE,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,oBAAoB,CAAC,CAAC,CAAC;oCA4EnC,MAAM,YACZ,MAAM,SACT;QAAE,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE;;;;EAgDpD"}
1
+ {"version":3,"file":"runner.d.ts","sourceRoot":"","sources":["../../src/projections/runner.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,sCAAsC,CAAC;AAC7E,OAAO,KAAK,EACV,gBAAgB,EAEhB,kBAAkB,EACnB,MAAM,aAAa,CAAC;AAErB,MAAM,MAAM,sBAAsB,GAAG;IACnC,cAAc,EAAE,MAAM,CAAC;IACvB,SAAS,EAAE,MAAM,CAAC;IAClB,qBAAqB,EAAE,MAAM,CAAC;CAC/B,CAAC;AAEF,MAAM,MAAM,oBAAoB,CAC9B,CAAC,SAAS,gBAAgB,GAAG,gBAAgB,IAC3C;IACF,EAAE,EAAE,CAAC,CAAC;IACN,UAAU,EAAE,gBAAgB,CAAC,YAAY,CAAC,CAAC;IAC3C,QAAQ,EAAE,kBAAkB,CAAC,CAAC,CAAC,CAAC;CACjC,CAAC;AAEF,wBAAgB,sBAAsB,CACpC,CAAC,SAAS,gBAAgB,GAAG,gBAAgB,EAC7C,EAAE,EAAE,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,oBAAoB,CAAC,CAAC,CAAC;oCA4EnC,MAAM,YACZ,MAAM,SACT;QAAE,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE;;;;EAiDpD"}