@wataruoguchi/emmett-event-store-kysely 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +127 -0
- package/dist/db-schema.d.ts +34 -0
- package/dist/db-schema.d.ts.map +1 -0
- package/dist/db-schema.js +3 -0
- package/dist/event-store/consumers.d.ts +23 -0
- package/dist/event-store/consumers.d.ts.map +1 -0
- package/dist/event-store/consumers.js +155 -0
- package/dist/event-store/kysely-event-store.d.ts +42 -0
- package/dist/event-store/kysely-event-store.d.ts.map +1 -0
- package/dist/event-store/kysely-event-store.js +265 -0
- package/dist/index.cjs +600 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/projections/runner.d.ts +22 -0
- package/dist/projections/runner.d.ts.map +1 -0
- package/dist/projections/runner.js +82 -0
- package/dist/projections/snapshot-projection.d.ts +120 -0
- package/dist/projections/snapshot-projection.d.ts.map +1 -0
- package/dist/projections/snapshot-projection.js +135 -0
- package/dist/types.d.ts +77 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +15 -0
- package/package.json +72 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,600 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
createKyselyEventStoreConsumer: () => createKyselyEventStoreConsumer,
|
|
24
|
+
createProjectionRegistry: () => createProjectionRegistry,
|
|
25
|
+
createProjectionRunner: () => createProjectionRunner,
|
|
26
|
+
createSnapshotProjection: () => createSnapshotProjection,
|
|
27
|
+
createSnapshotProjectionRegistry: () => createSnapshotProjectionRegistry,
|
|
28
|
+
getKyselyEventStore: () => getKyselyEventStore
|
|
29
|
+
});
|
|
30
|
+
module.exports = __toCommonJS(index_exports);
|
|
31
|
+
|
|
32
|
+
// src/event-store/consumers.ts
|
|
33
|
+
function createKyselyEventStoreConsumer({
|
|
34
|
+
db,
|
|
35
|
+
logger,
|
|
36
|
+
consumerName = "default-consumer",
|
|
37
|
+
batchSize = 100,
|
|
38
|
+
pollingInterval = 1e3
|
|
39
|
+
}) {
|
|
40
|
+
let isRunning = false;
|
|
41
|
+
let lastProcessedPosition = 0n;
|
|
42
|
+
const eventHandlers = /* @__PURE__ */ new Map();
|
|
43
|
+
const allEventHandlers = [];
|
|
44
|
+
let pollingTimer = null;
|
|
45
|
+
const processEvents = async () => {
|
|
46
|
+
if (!isRunning) return;
|
|
47
|
+
try {
|
|
48
|
+
const events = await db.selectFrom("messages").select([
|
|
49
|
+
"message_type",
|
|
50
|
+
"message_data",
|
|
51
|
+
"message_metadata",
|
|
52
|
+
"stream_position",
|
|
53
|
+
"global_position",
|
|
54
|
+
"message_id",
|
|
55
|
+
"stream_id"
|
|
56
|
+
]).where("global_position", ">", lastProcessedPosition).where("is_archived", "=", false).orderBy("global_position").limit(batchSize).execute();
|
|
57
|
+
if (events.length === 0) {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
for (const row of events) {
|
|
61
|
+
const event = {
|
|
62
|
+
kind: "Event",
|
|
63
|
+
type: row.message_type,
|
|
64
|
+
data: row.message_data,
|
|
65
|
+
metadata: {
|
|
66
|
+
...row.message_metadata,
|
|
67
|
+
messageId: row.message_id,
|
|
68
|
+
streamName: row.stream_id,
|
|
69
|
+
streamPosition: BigInt(String(row.stream_position)),
|
|
70
|
+
globalPosition: BigInt(String(row.global_position))
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
const typeHandlers = eventHandlers.get(row.message_type) || [];
|
|
74
|
+
for (const handler of typeHandlers) {
|
|
75
|
+
try {
|
|
76
|
+
await handler(event);
|
|
77
|
+
} catch (error) {
|
|
78
|
+
logger.error(
|
|
79
|
+
{ error, event },
|
|
80
|
+
`Error processing event ${row.message_type}`
|
|
81
|
+
);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
for (const handler of allEventHandlers) {
|
|
85
|
+
try {
|
|
86
|
+
await handler(event);
|
|
87
|
+
} catch (error) {
|
|
88
|
+
logger.error(
|
|
89
|
+
{ error, event },
|
|
90
|
+
"Error processing event in all-event handler"
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
const globalPos = row.global_position;
|
|
95
|
+
if (globalPos !== null) {
|
|
96
|
+
lastProcessedPosition = BigInt(String(globalPos));
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
await updateSubscriptionPosition();
|
|
100
|
+
} catch (error) {
|
|
101
|
+
logger.error({ error }, "Error processing events");
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
const updateSubscriptionPosition = async () => {
|
|
105
|
+
try {
|
|
106
|
+
await db.insertInto("subscriptions").values({
|
|
107
|
+
consumer_name: consumerName,
|
|
108
|
+
last_processed_position: lastProcessedPosition,
|
|
109
|
+
last_processed_transaction_id: lastProcessedPosition,
|
|
110
|
+
created_at: /* @__PURE__ */ new Date(),
|
|
111
|
+
updated_at: /* @__PURE__ */ new Date()
|
|
112
|
+
}).onConflict(
|
|
113
|
+
(oc) => oc.column("consumer_name").doUpdateSet({
|
|
114
|
+
last_processed_position: lastProcessedPosition,
|
|
115
|
+
last_processed_transaction_id: lastProcessedPosition,
|
|
116
|
+
updated_at: /* @__PURE__ */ new Date()
|
|
117
|
+
})
|
|
118
|
+
).execute();
|
|
119
|
+
} catch (error) {
|
|
120
|
+
logger.error({ error }, "Error updating subscription position");
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
const loadLastProcessedPosition = async () => {
|
|
124
|
+
try {
|
|
125
|
+
const subscription = await db.selectFrom("subscriptions").select(["last_processed_position"]).where("consumer_name", "=", consumerName).executeTakeFirst();
|
|
126
|
+
if (subscription) {
|
|
127
|
+
lastProcessedPosition = BigInt(
|
|
128
|
+
String(subscription.last_processed_position)
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
} catch (error) {
|
|
132
|
+
logger.error({ error }, "Error loading last processed position");
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
return {
|
|
136
|
+
async start() {
|
|
137
|
+
if (isRunning) return;
|
|
138
|
+
isRunning = true;
|
|
139
|
+
await loadLastProcessedPosition();
|
|
140
|
+
logger.info({ consumerName }, "Starting event store consumer");
|
|
141
|
+
pollingTimer = setInterval(processEvents, pollingInterval);
|
|
142
|
+
},
|
|
143
|
+
async stop() {
|
|
144
|
+
if (!isRunning) return;
|
|
145
|
+
isRunning = false;
|
|
146
|
+
if (pollingTimer) {
|
|
147
|
+
clearInterval(pollingTimer);
|
|
148
|
+
pollingTimer = null;
|
|
149
|
+
}
|
|
150
|
+
logger.info({ consumerName }, "Stopped event store consumer");
|
|
151
|
+
},
|
|
152
|
+
subscribe(handler, eventType) {
|
|
153
|
+
if (!eventHandlers.has(eventType)) {
|
|
154
|
+
eventHandlers.set(eventType, []);
|
|
155
|
+
}
|
|
156
|
+
const handlers = eventHandlers.get(eventType);
|
|
157
|
+
if (handlers) {
|
|
158
|
+
handlers.push(
|
|
159
|
+
handler
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
},
|
|
163
|
+
subscribeToAll(handler) {
|
|
164
|
+
allEventHandlers.push(handler);
|
|
165
|
+
}
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// src/event-store/kysely-event-store.ts
|
|
170
|
+
var import_emmett = require("@event-driven-io/emmett");
|
|
171
|
+
|
|
172
|
+
// src/types.ts
|
|
173
|
+
var PostgreSQLEventStoreDefaultStreamVersion = 0n;
|
|
174
|
+
var DEFAULT_PARTITION = "default_partition";
|
|
175
|
+
function createProjectionRegistry(...registries) {
|
|
176
|
+
const combined = {};
|
|
177
|
+
for (const reg of registries) {
|
|
178
|
+
for (const [eventType, handlers] of Object.entries(reg)) {
|
|
179
|
+
combined[eventType] = [...combined[eventType] ?? [], ...handlers];
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
return combined;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// src/event-store/kysely-event-store.ts
|
|
186
|
+
var getKyselyEventStore = (deps) => {
|
|
187
|
+
const { db, logger, inTransaction = false } = deps;
|
|
188
|
+
const eventStore = {
|
|
189
|
+
/**
|
|
190
|
+
* @description We do not use schema management in this package.
|
|
191
|
+
*/
|
|
192
|
+
schema: {
|
|
193
|
+
sql: () => "",
|
|
194
|
+
print: () => console.log(""),
|
|
195
|
+
migrate: async () => Promise.resolve()
|
|
196
|
+
},
|
|
197
|
+
/**
|
|
198
|
+
* Provide a session-bound event store using a Kysely transaction.
|
|
199
|
+
* All operations within the callback will share the same DB transaction.
|
|
200
|
+
*/
|
|
201
|
+
async withSession(callback) {
|
|
202
|
+
return await db.transaction().execute(async (trx) => {
|
|
203
|
+
const sessionEventStore = getKyselyEventStore({
|
|
204
|
+
db: trx,
|
|
205
|
+
logger,
|
|
206
|
+
inTransaction: true
|
|
207
|
+
});
|
|
208
|
+
return await callback({
|
|
209
|
+
eventStore: sessionEventStore,
|
|
210
|
+
close: () => Promise.resolve()
|
|
211
|
+
});
|
|
212
|
+
});
|
|
213
|
+
},
|
|
214
|
+
async aggregateStream(streamName, options) {
|
|
215
|
+
const { evolve, initialState, read } = options;
|
|
216
|
+
logger.debug?.({ streamName, options }, "aggregateStream");
|
|
217
|
+
const expectedStreamVersion = read?.expectedStreamVersion;
|
|
218
|
+
const result = await eventStore.readStream(streamName, read);
|
|
219
|
+
(0, import_emmett.assertExpectedVersionMatchesCurrent)(
|
|
220
|
+
result.currentStreamVersion,
|
|
221
|
+
expectedStreamVersion,
|
|
222
|
+
PostgreSQLEventStoreDefaultStreamVersion
|
|
223
|
+
);
|
|
224
|
+
const state = result.events.reduce(
|
|
225
|
+
(state2, event) => event ? evolve(state2, event) : state2,
|
|
226
|
+
initialState()
|
|
227
|
+
);
|
|
228
|
+
return {
|
|
229
|
+
state,
|
|
230
|
+
currentStreamVersion: result.currentStreamVersion,
|
|
231
|
+
streamExists: result.streamExists
|
|
232
|
+
};
|
|
233
|
+
},
|
|
234
|
+
async readStream(streamName, options) {
|
|
235
|
+
const partition = getPartition(options);
|
|
236
|
+
logger.debug?.({ streamName, options, partition }, "readStream");
|
|
237
|
+
const { currentStreamVersion, streamExists } = await fetchStreamInfo(
|
|
238
|
+
db,
|
|
239
|
+
streamName,
|
|
240
|
+
partition
|
|
241
|
+
);
|
|
242
|
+
const range = parseRangeOptions(options);
|
|
243
|
+
const rows = await buildEventsQuery(
|
|
244
|
+
{ db, logger },
|
|
245
|
+
streamName,
|
|
246
|
+
partition,
|
|
247
|
+
range
|
|
248
|
+
).execute();
|
|
249
|
+
const events = rows.map(
|
|
250
|
+
(row) => mapRowToEvent(row, streamName)
|
|
251
|
+
);
|
|
252
|
+
return {
|
|
253
|
+
events,
|
|
254
|
+
currentStreamVersion,
|
|
255
|
+
streamExists
|
|
256
|
+
};
|
|
257
|
+
},
|
|
258
|
+
async appendToStream(streamName, events, options) {
|
|
259
|
+
const streamType = getStreamType(options);
|
|
260
|
+
const partition = getPartition(options);
|
|
261
|
+
const expected = options?.expectedStreamVersion;
|
|
262
|
+
logger.debug?.(
|
|
263
|
+
{ streamName, events, options, partition },
|
|
264
|
+
"appendToStream"
|
|
265
|
+
);
|
|
266
|
+
ensureEventsNotEmpty(events, expected);
|
|
267
|
+
const executeOn = async (executor) => {
|
|
268
|
+
const { currentStreamVersion, streamExists } = await fetchStreamInfo(
|
|
269
|
+
executor,
|
|
270
|
+
streamName,
|
|
271
|
+
partition
|
|
272
|
+
);
|
|
273
|
+
assertExpectedVersion(expected, currentStreamVersion, streamExists);
|
|
274
|
+
const basePos = currentStreamVersion;
|
|
275
|
+
const nextStreamPosition = computeNextStreamPosition(
|
|
276
|
+
basePos,
|
|
277
|
+
events.length
|
|
278
|
+
);
|
|
279
|
+
await upsertStreamRow(
|
|
280
|
+
executor,
|
|
281
|
+
streamName,
|
|
282
|
+
partition,
|
|
283
|
+
streamType,
|
|
284
|
+
basePos,
|
|
285
|
+
nextStreamPosition,
|
|
286
|
+
expected,
|
|
287
|
+
streamExists
|
|
288
|
+
);
|
|
289
|
+
const messagesToInsert = buildMessagesToInsert(
|
|
290
|
+
events,
|
|
291
|
+
basePos,
|
|
292
|
+
streamName,
|
|
293
|
+
partition
|
|
294
|
+
);
|
|
295
|
+
const lastEventGlobalPosition = await insertMessagesAndGetLastGlobalPosition(
|
|
296
|
+
executor,
|
|
297
|
+
messagesToInsert
|
|
298
|
+
);
|
|
299
|
+
return {
|
|
300
|
+
nextExpectedStreamVersion: nextStreamPosition,
|
|
301
|
+
lastEventGlobalPosition,
|
|
302
|
+
createdNewStream: !streamExists
|
|
303
|
+
};
|
|
304
|
+
};
|
|
305
|
+
if (inTransaction) {
|
|
306
|
+
return executeOn(db);
|
|
307
|
+
}
|
|
308
|
+
return db.transaction().execute(async (trx) => executeOn(trx));
|
|
309
|
+
},
|
|
310
|
+
close: async () => {
|
|
311
|
+
await Promise.resolve();
|
|
312
|
+
}
|
|
313
|
+
};
|
|
314
|
+
return eventStore;
|
|
315
|
+
};
|
|
316
|
+
function getStreamType(options) {
|
|
317
|
+
return options?.streamType ?? "unknown";
|
|
318
|
+
}
|
|
319
|
+
function getPartition(options) {
|
|
320
|
+
return options?.partition ?? DEFAULT_PARTITION;
|
|
321
|
+
}
|
|
322
|
+
function ensureEventsNotEmpty(events, _expected) {
|
|
323
|
+
if (events.length === 0) {
|
|
324
|
+
throw new Error("Cannot append empty events array");
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
function assertExpectedVersion(expected, currentPos, streamExistsNow) {
|
|
328
|
+
if (expected === "STREAM_EXISTS" && !streamExistsNow) {
|
|
329
|
+
throw new Error("Stream does not exist but expected to exist");
|
|
330
|
+
}
|
|
331
|
+
if (expected === "STREAM_DOES_NOT_EXIST" && streamExistsNow) {
|
|
332
|
+
throw new Error("Stream exists but expected not to exist");
|
|
333
|
+
}
|
|
334
|
+
if (typeof expected === "bigint" && expected !== currentPos) {
|
|
335
|
+
throw new Error(
|
|
336
|
+
`Expected version ${expected} but current is ${currentPos}`
|
|
337
|
+
);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
function computeNextStreamPosition(basePos, eventCount) {
|
|
341
|
+
return basePos + BigInt(eventCount);
|
|
342
|
+
}
|
|
343
|
+
async function upsertStreamRow(executor, streamId, partition, streamType, basePos, nextStreamPosition, expected, streamExistsNow) {
|
|
344
|
+
if (!streamExistsNow) {
|
|
345
|
+
await executor.insertInto("streams").values({
|
|
346
|
+
stream_id: streamId,
|
|
347
|
+
stream_position: nextStreamPosition,
|
|
348
|
+
partition,
|
|
349
|
+
stream_type: streamType,
|
|
350
|
+
stream_metadata: {},
|
|
351
|
+
is_archived: false
|
|
352
|
+
}).execute();
|
|
353
|
+
return;
|
|
354
|
+
}
|
|
355
|
+
if (typeof expected === "bigint") {
|
|
356
|
+
const updatedRow = await executor.updateTable("streams").set({ stream_position: nextStreamPosition }).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).where("stream_position", "=", basePos).returning("stream_position").executeTakeFirst();
|
|
357
|
+
if (!updatedRow) {
|
|
358
|
+
throw new Error(`Expected version ${expected} but current is ${basePos}`);
|
|
359
|
+
}
|
|
360
|
+
return;
|
|
361
|
+
}
|
|
362
|
+
await executor.updateTable("streams").set({ stream_position: nextStreamPosition }).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).execute();
|
|
363
|
+
}
|
|
364
|
+
function buildMessagesToInsert(events, basePos, streamId, partition) {
|
|
365
|
+
return events.map((e, index) => {
|
|
366
|
+
const messageId = crypto.randomUUID();
|
|
367
|
+
const streamPosition = basePos + BigInt(index + 1);
|
|
368
|
+
const rawMeta = "metadata" in e ? e.metadata : void 0;
|
|
369
|
+
const eventMeta = rawMeta && typeof rawMeta === "object" ? rawMeta : {};
|
|
370
|
+
const messageMetadata = {
|
|
371
|
+
...eventMeta
|
|
372
|
+
};
|
|
373
|
+
return {
|
|
374
|
+
stream_id: streamId,
|
|
375
|
+
stream_position: streamPosition,
|
|
376
|
+
partition,
|
|
377
|
+
message_data: e.data,
|
|
378
|
+
message_metadata: messageMetadata,
|
|
379
|
+
message_schema_version: index.toString(),
|
|
380
|
+
message_type: e.type,
|
|
381
|
+
message_kind: "E",
|
|
382
|
+
message_id: messageId,
|
|
383
|
+
is_archived: false,
|
|
384
|
+
created: /* @__PURE__ */ new Date()
|
|
385
|
+
};
|
|
386
|
+
});
|
|
387
|
+
}
|
|
388
|
+
async function insertMessagesAndGetLastGlobalPosition(executor, messagesToInsert) {
|
|
389
|
+
const inserted = await executor.insertInto("messages").values(messagesToInsert).returning("global_position").execute();
|
|
390
|
+
if (!inserted || Array.isArray(inserted) && inserted.length === 0) {
|
|
391
|
+
return 0n;
|
|
392
|
+
}
|
|
393
|
+
const globalPositions = inserted.map(
|
|
394
|
+
(r) => BigInt(String(r.global_position))
|
|
395
|
+
);
|
|
396
|
+
return globalPositions[globalPositions.length - 1];
|
|
397
|
+
}
|
|
398
|
+
function parseRangeOptions(options) {
|
|
399
|
+
const from = options && typeof options === "object" && "from" in options ? options.from : void 0;
|
|
400
|
+
const to = options && typeof options === "object" && "to" in options ? options.to : void 0;
|
|
401
|
+
const maxCount = options && typeof options === "object" && "maxCount" in options ? options.maxCount : void 0;
|
|
402
|
+
return { from, to, maxCount };
|
|
403
|
+
}
|
|
404
|
+
function buildEventsQuery(deps, streamId, partition, range) {
|
|
405
|
+
const { db } = deps;
|
|
406
|
+
let q = db.selectFrom("messages").select([
|
|
407
|
+
"message_type",
|
|
408
|
+
"message_data",
|
|
409
|
+
"message_metadata",
|
|
410
|
+
"stream_position",
|
|
411
|
+
"global_position",
|
|
412
|
+
"message_id"
|
|
413
|
+
]).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).orderBy("stream_position");
|
|
414
|
+
if (range.from !== void 0) {
|
|
415
|
+
q = q.where("stream_position", ">=", range.from);
|
|
416
|
+
}
|
|
417
|
+
if (range.to !== void 0) {
|
|
418
|
+
q = q.where("stream_position", "<=", range.to);
|
|
419
|
+
}
|
|
420
|
+
if (range.maxCount !== void 0) {
|
|
421
|
+
q = q.limit(Number(range.maxCount));
|
|
422
|
+
}
|
|
423
|
+
return q;
|
|
424
|
+
}
|
|
425
|
+
function mapRowToEvent(row, streamId) {
|
|
426
|
+
const streamPosition = BigInt(String(row.stream_position));
|
|
427
|
+
const globalPosition = BigInt(String(row.global_position ?? 0));
|
|
428
|
+
const baseMetadata = row.message_metadata ?? {};
|
|
429
|
+
return {
|
|
430
|
+
kind: "Event",
|
|
431
|
+
type: row.message_type,
|
|
432
|
+
data: row.message_data,
|
|
433
|
+
metadata: {
|
|
434
|
+
...baseMetadata,
|
|
435
|
+
messageId: row.message_id,
|
|
436
|
+
streamId,
|
|
437
|
+
streamPosition,
|
|
438
|
+
globalPosition
|
|
439
|
+
}
|
|
440
|
+
};
|
|
441
|
+
}
|
|
442
|
+
async function fetchStreamInfo(executor, streamId, partition) {
|
|
443
|
+
const streamRow = await executor.selectFrom("streams").select(["stream_position"]).where("stream_id", "=", streamId).where("partition", "=", partition).where("is_archived", "=", false).executeTakeFirst();
|
|
444
|
+
const currentStreamVersion = streamRow ? BigInt(String(streamRow.stream_position)) : PostgreSQLEventStoreDefaultStreamVersion;
|
|
445
|
+
return { currentStreamVersion, streamExists: !!streamRow };
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
// src/projections/runner.ts
|
|
449
|
+
function createProjectionRunner({ db, readStream, registry }) {
|
|
450
|
+
async function getOrCreateCheckpoint(subscriptionId, partition) {
|
|
451
|
+
const existing = await db.selectFrom("subscriptions").select([
|
|
452
|
+
"subscription_id as subscriptionId",
|
|
453
|
+
"partition",
|
|
454
|
+
"last_processed_position as lastProcessedPosition"
|
|
455
|
+
]).where("subscription_id", "=", subscriptionId).where("partition", "=", partition).executeTakeFirst();
|
|
456
|
+
if (existing) {
|
|
457
|
+
const last = BigInt(
|
|
458
|
+
String(
|
|
459
|
+
existing.lastProcessedPosition
|
|
460
|
+
)
|
|
461
|
+
);
|
|
462
|
+
return {
|
|
463
|
+
subscriptionId,
|
|
464
|
+
partition,
|
|
465
|
+
lastProcessedPosition: last
|
|
466
|
+
};
|
|
467
|
+
}
|
|
468
|
+
await db.insertInto("subscriptions").values({
|
|
469
|
+
subscription_id: subscriptionId,
|
|
470
|
+
partition,
|
|
471
|
+
version: 1,
|
|
472
|
+
last_processed_position: 0n
|
|
473
|
+
}).onConflict(
|
|
474
|
+
(oc) => oc.columns(["subscription_id", "partition", "version"]).doUpdateSet({
|
|
475
|
+
last_processed_position: (eb) => eb.ref("excluded.last_processed_position")
|
|
476
|
+
})
|
|
477
|
+
).execute();
|
|
478
|
+
return {
|
|
479
|
+
subscriptionId,
|
|
480
|
+
partition,
|
|
481
|
+
lastProcessedPosition: 0n
|
|
482
|
+
};
|
|
483
|
+
}
|
|
484
|
+
async function updateCheckpoint(subscriptionId, partition, lastProcessedPosition) {
|
|
485
|
+
await db.updateTable("subscriptions").set({ last_processed_position: lastProcessedPosition }).where("subscription_id", "=", subscriptionId).where("partition", "=", partition).execute();
|
|
486
|
+
}
|
|
487
|
+
async function projectEvents(subscriptionId, streamId, opts) {
|
|
488
|
+
const partition = opts?.partition ?? "default_partition";
|
|
489
|
+
const batchSize = BigInt(opts?.batchSize ?? 500);
|
|
490
|
+
const checkpoint = await getOrCreateCheckpoint(subscriptionId, partition);
|
|
491
|
+
const { events, currentStreamVersion } = await readStream(streamId, {
|
|
492
|
+
from: checkpoint.lastProcessedPosition + 1n,
|
|
493
|
+
to: checkpoint.lastProcessedPosition + batchSize,
|
|
494
|
+
partition
|
|
495
|
+
});
|
|
496
|
+
for (const ev of events) {
|
|
497
|
+
if (!ev) continue;
|
|
498
|
+
const handlers = registry[ev.type] ?? [];
|
|
499
|
+
if (handlers.length === 0) {
|
|
500
|
+
await updateCheckpoint(
|
|
501
|
+
subscriptionId,
|
|
502
|
+
partition,
|
|
503
|
+
ev.metadata.streamPosition
|
|
504
|
+
);
|
|
505
|
+
continue;
|
|
506
|
+
}
|
|
507
|
+
const projectionEvent = {
|
|
508
|
+
type: ev.type,
|
|
509
|
+
data: ev.data,
|
|
510
|
+
metadata: {
|
|
511
|
+
streamId: ev.metadata.streamId,
|
|
512
|
+
streamPosition: ev.metadata.streamPosition,
|
|
513
|
+
globalPosition: ev.metadata.globalPosition
|
|
514
|
+
}
|
|
515
|
+
};
|
|
516
|
+
for (const handler of handlers) {
|
|
517
|
+
await handler({ db, partition }, projectionEvent);
|
|
518
|
+
}
|
|
519
|
+
await updateCheckpoint(
|
|
520
|
+
subscriptionId,
|
|
521
|
+
partition,
|
|
522
|
+
projectionEvent.metadata.streamPosition
|
|
523
|
+
);
|
|
524
|
+
}
|
|
525
|
+
return { processed: events.length, currentStreamVersion };
|
|
526
|
+
}
|
|
527
|
+
return { projectEvents };
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
// src/projections/snapshot-projection.ts
|
|
531
|
+
function createSnapshotProjection(config) {
|
|
532
|
+
const {
|
|
533
|
+
tableName,
|
|
534
|
+
primaryKeys,
|
|
535
|
+
extractKeys,
|
|
536
|
+
evolve,
|
|
537
|
+
initialState,
|
|
538
|
+
mapToColumns
|
|
539
|
+
} = config;
|
|
540
|
+
return async ({ db, partition }, event) => {
|
|
541
|
+
const keys = extractKeys(event, partition);
|
|
542
|
+
const existing = await db.selectFrom(tableName).select(["last_stream_position", "snapshot"]).where((eb) => {
|
|
543
|
+
const conditions = Object.entries(keys).map(
|
|
544
|
+
([key, value]) => eb(key, "=", value)
|
|
545
|
+
);
|
|
546
|
+
return eb.and(conditions);
|
|
547
|
+
}).executeTakeFirst();
|
|
548
|
+
const lastPos = existing?.last_stream_position ? BigInt(String(existing.last_stream_position)) : -1n;
|
|
549
|
+
if (event.metadata.streamPosition <= lastPos) {
|
|
550
|
+
return;
|
|
551
|
+
}
|
|
552
|
+
const currentState = existing?.snapshot ? existing.snapshot : initialState();
|
|
553
|
+
const newState = evolve(currentState, event);
|
|
554
|
+
const rowData = {
|
|
555
|
+
...keys,
|
|
556
|
+
snapshot: JSON.stringify(newState),
|
|
557
|
+
stream_id: event.metadata.streamId,
|
|
558
|
+
last_stream_position: event.metadata.streamPosition.toString(),
|
|
559
|
+
last_global_position: event.metadata.globalPosition.toString()
|
|
560
|
+
};
|
|
561
|
+
if (mapToColumns) {
|
|
562
|
+
const columns = mapToColumns(newState);
|
|
563
|
+
Object.assign(rowData, columns);
|
|
564
|
+
}
|
|
565
|
+
const insertQuery = db.insertInto(tableName).values(rowData);
|
|
566
|
+
const updateSet = {
|
|
567
|
+
snapshot: (eb) => eb.ref("excluded.snapshot"),
|
|
568
|
+
stream_id: (eb) => eb.ref("excluded.stream_id"),
|
|
569
|
+
last_stream_position: (eb) => eb.ref("excluded.last_stream_position"),
|
|
570
|
+
last_global_position: (eb) => eb.ref("excluded.last_global_position")
|
|
571
|
+
};
|
|
572
|
+
if (mapToColumns) {
|
|
573
|
+
const columns = mapToColumns(newState);
|
|
574
|
+
for (const columnName of Object.keys(columns)) {
|
|
575
|
+
updateSet[columnName] = (eb) => eb.ref(`excluded.${columnName}`);
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
await insertQuery.onConflict((oc) => {
|
|
579
|
+
const conflictBuilder = oc.columns(primaryKeys);
|
|
580
|
+
return conflictBuilder.doUpdateSet(updateSet);
|
|
581
|
+
}).execute();
|
|
582
|
+
};
|
|
583
|
+
}
|
|
584
|
+
function createSnapshotProjectionRegistry(eventTypes, config) {
|
|
585
|
+
const handler = createSnapshotProjection(config);
|
|
586
|
+
const registry = {};
|
|
587
|
+
for (const eventType of eventTypes) {
|
|
588
|
+
registry[eventType] = [handler];
|
|
589
|
+
}
|
|
590
|
+
return registry;
|
|
591
|
+
}
|
|
592
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
593
|
+
0 && (module.exports = {
|
|
594
|
+
createKyselyEventStoreConsumer,
|
|
595
|
+
createProjectionRegistry,
|
|
596
|
+
createProjectionRunner,
|
|
597
|
+
createSnapshotProjection,
|
|
598
|
+
createSnapshotProjectionRegistry,
|
|
599
|
+
getKyselyEventStore
|
|
600
|
+
});
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export { createKyselyEventStoreConsumer } from "./event-store/consumers.js";
|
|
2
|
+
export type { KyselyEventStoreConsumer, KyselyEventStoreConsumerConfig, } from "./event-store/consumers.js";
|
|
3
|
+
export { getKyselyEventStore } from "./event-store/kysely-event-store.js";
|
|
4
|
+
export type { KyselyEventStore, KyselyEventStoreOptions, ProjectionReadStreamOptions, } from "./event-store/kysely-event-store.js";
|
|
5
|
+
export { createProjectionRunner } from "./projections/runner.js";
|
|
6
|
+
export { createSnapshotProjection, createSnapshotProjectionRegistry, } from "./projections/snapshot-projection.js";
|
|
7
|
+
export type { SnapshotProjectionConfig } from "./projections/snapshot-projection.js";
|
|
8
|
+
export { createProjectionRegistry } from "./types.js";
|
|
9
|
+
export type { DatabaseExecutor, Dependencies, ExtendedOptions, ProjectionContext, ProjectionEvent, ProjectionEventMetadata, ProjectionHandler, ProjectionRegistry, } from "./types.js";
|
|
10
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,8BAA8B,EAAE,MAAM,4BAA4B,CAAC;AAC5E,YAAY,EACV,wBAAwB,EACxB,8BAA8B,GAC/B,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,mBAAmB,EAAE,MAAM,qCAAqC,CAAC;AAC1E,YAAY,EACV,gBAAgB,EAChB,uBAAuB,EACvB,2BAA2B,GAC5B,MAAM,qCAAqC,CAAC;AAC7C,OAAO,EAAE,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AACjE,OAAO,EACL,wBAAwB,EACxB,gCAAgC,GACjC,MAAM,sCAAsC,CAAC;AAC9C,YAAY,EAAE,wBAAwB,EAAE,MAAM,sCAAsC,CAAC;AACrF,OAAO,EAAE,wBAAwB,EAAE,MAAM,YAAY,CAAC;AACtD,YAAY,EACV,gBAAgB,EAChB,YAAY,EACZ,eAAe,EACf,iBAAiB,EACjB,eAAe,EACf,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,GACnB,MAAM,YAAY,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { createKyselyEventStoreConsumer } from "./event-store/consumers.js";
|
|
2
|
+
export { getKyselyEventStore } from "./event-store/kysely-event-store.js";
|
|
3
|
+
export { createProjectionRunner } from "./projections/runner.js";
|
|
4
|
+
export { createSnapshotProjection, createSnapshotProjectionRegistry, } from "./projections/snapshot-projection.js";
|
|
5
|
+
export { createProjectionRegistry } from "./types.js";
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { KyselyEventStore } from "../event-store/kysely-event-store.js";
|
|
2
|
+
import type { DatabaseExecutor, ProjectionRegistry } from "../types.js";
|
|
3
|
+
export type SubscriptionCheckpoint = {
|
|
4
|
+
subscriptionId: string;
|
|
5
|
+
partition: string;
|
|
6
|
+
lastProcessedPosition: bigint;
|
|
7
|
+
};
|
|
8
|
+
export type ProjectionRunnerDeps<T extends DatabaseExecutor = DatabaseExecutor> = {
|
|
9
|
+
db: T;
|
|
10
|
+
readStream: KyselyEventStore["readStream"];
|
|
11
|
+
registry: ProjectionRegistry<T>;
|
|
12
|
+
};
|
|
13
|
+
export declare function createProjectionRunner<T extends DatabaseExecutor = DatabaseExecutor>({ db, readStream, registry }: ProjectionRunnerDeps<T>): {
|
|
14
|
+
projectEvents: (subscriptionId: string, streamId: string, opts?: {
|
|
15
|
+
partition?: string;
|
|
16
|
+
batchSize?: number;
|
|
17
|
+
}) => Promise<{
|
|
18
|
+
processed: number;
|
|
19
|
+
currentStreamVersion: bigint;
|
|
20
|
+
}>;
|
|
21
|
+
};
|
|
22
|
+
//# sourceMappingURL=runner.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"runner.d.ts","sourceRoot":"","sources":["../../src/projections/runner.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,sCAAsC,CAAC;AAC7E,OAAO,KAAK,EACV,gBAAgB,EAEhB,kBAAkB,EACnB,MAAM,aAAa,CAAC;AAErB,MAAM,MAAM,sBAAsB,GAAG;IACnC,cAAc,EAAE,MAAM,CAAC;IACvB,SAAS,EAAE,MAAM,CAAC;IAClB,qBAAqB,EAAE,MAAM,CAAC;CAC/B,CAAC;AAEF,MAAM,MAAM,oBAAoB,CAC9B,CAAC,SAAS,gBAAgB,GAAG,gBAAgB,IAC3C;IACF,EAAE,EAAE,CAAC,CAAC;IACN,UAAU,EAAE,gBAAgB,CAAC,YAAY,CAAC,CAAC;IAC3C,QAAQ,EAAE,kBAAkB,CAAC,CAAC,CAAC,CAAC;CACjC,CAAC;AAEF,wBAAgB,sBAAsB,CACpC,CAAC,SAAS,gBAAgB,GAAG,gBAAgB,EAC7C,EAAE,EAAE,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,oBAAoB,CAAC,CAAC,CAAC;oCA4EnC,MAAM,YACZ,MAAM,SACT;QAAE,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE;;;;EAiDpD"}
|