@event-driven-io/emmett-sqlite 0.32.0 → 0.34.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1 +1,786 @@
1
- "use strict";//# sourceMappingURL=index.cjs.map
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3;// src/connection/sqliteConnection.ts
2
+ var _sqlite3 = require('sqlite3'); var _sqlite32 = _interopRequireDefault(_sqlite3);
3
+ var isSQLiteError = (error) => {
4
+ if (error instanceof Error && "code" in error) {
5
+ return true;
6
+ }
7
+ return false;
8
+ };
9
+ var InMemorySQLiteDatabase = ":memory:";
10
+ var sqliteConnection = (options) => {
11
+ const db = new _sqlite32.default.Database(_nullishCoalesce(options.fileName, () => ( InMemorySQLiteDatabase)));
12
+ return {
13
+ close: () => db.close(),
14
+ command: (sql2, params) => new Promise((resolve, reject) => {
15
+ db.run(sql2, _nullishCoalesce(params, () => ( [])), (err) => {
16
+ if (err) {
17
+ reject(err);
18
+ return;
19
+ }
20
+ resolve();
21
+ });
22
+ }),
23
+ query: (sql2, params) => new Promise((resolve, reject) => {
24
+ db.all(sql2, _nullishCoalesce(params, () => ( [])), (err, result) => {
25
+ if (err) {
26
+ reject(err);
27
+ return;
28
+ }
29
+ resolve(result);
30
+ });
31
+ }),
32
+ querySingle: (sql2, params) => new Promise((resolve, reject) => {
33
+ db.get(sql2, _nullishCoalesce(params, () => ( [])), (err, result) => {
34
+ if (err) {
35
+ reject(err);
36
+ return;
37
+ }
38
+ resolve(result);
39
+ });
40
+ })
41
+ };
42
+ };
43
+
44
+ // ../emmett/dist/chunk-4E7QLAH5.js
45
+ var isNumber = (val) => typeof val === "number" && val === val;
46
+ var isString = (val) => typeof val === "string";
47
+ var EmmettError = class _EmmettError extends Error {
48
+
49
+ constructor(options) {
50
+ const errorCode = options && typeof options === "object" && "errorCode" in options ? options.errorCode : isNumber(options) ? options : 500;
51
+ const message = options && typeof options === "object" && "message" in options ? options.message : isString(options) ? options : `Error with status code '${errorCode}' ocurred during Emmett processing`;
52
+ super(message);
53
+ this.errorCode = errorCode;
54
+ Object.setPrototypeOf(this, _EmmettError.prototype);
55
+ }
56
+ };
57
+ var ConcurrencyError = class _ConcurrencyError extends EmmettError {
58
+ constructor(current, expected, message) {
59
+ super({
60
+ errorCode: 412,
61
+ message: _nullishCoalesce(message, () => ( `Expected version ${expected.toString()} does not match current ${_optionalChain([current, 'optionalAccess', _2 => _2.toString, 'call', _3 => _3()])}`))
62
+ });
63
+ this.current = current;
64
+ this.expected = expected;
65
+ Object.setPrototypeOf(this, _ConcurrencyError.prototype);
66
+ }
67
+ };
68
+
69
+ // ../emmett/dist/index.js
70
+ var _uuid = require('uuid');
71
+ var _webstreamspolyfill = require('web-streams-polyfill');
72
+
73
+
74
+
75
+ var _asyncretry = require('async-retry'); var _asyncretry2 = _interopRequireDefault(_asyncretry);
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+ var STREAM_EXISTS = "STREAM_EXISTS";
90
+ var STREAM_DOES_NOT_EXIST = "STREAM_DOES_NOT_EXIST";
91
+ var NO_CONCURRENCY_CHECK = "NO_CONCURRENCY_CHECK";
92
+ var matchesExpectedVersion = (current, expected, defaultVersion) => {
93
+ if (expected === NO_CONCURRENCY_CHECK) return true;
94
+ if (expected == STREAM_DOES_NOT_EXIST) return current === defaultVersion;
95
+ if (expected == STREAM_EXISTS) return current !== defaultVersion;
96
+ return current === expected;
97
+ };
98
+ var assertExpectedVersionMatchesCurrent = (current, expected, defaultVersion) => {
99
+ expected ??= NO_CONCURRENCY_CHECK;
100
+ if (!matchesExpectedVersion(current, expected, defaultVersion))
101
+ throw new ExpectedVersionConflictError(current, expected);
102
+ };
103
+ var ExpectedVersionConflictError = class _ExpectedVersionConflictError extends ConcurrencyError {
104
+ constructor(current, expected) {
105
+ super(_optionalChain([current, 'optionalAccess', _4 => _4.toString, 'call', _5 => _5()]), _optionalChain([expected, 'optionalAccess', _6 => _6.toString, 'call', _7 => _7()]));
106
+ Object.setPrototypeOf(this, _ExpectedVersionConflictError.prototype);
107
+ }
108
+ };
109
+ var notifyAboutNoActiveReadersStream = (onNoActiveReaderCallback, options = {}) => new NotifyAboutNoActiveReadersStream(onNoActiveReaderCallback, options);
110
+ var NotifyAboutNoActiveReadersStream = (_class = class extends _webstreamspolyfill.TransformStream {
111
+ constructor(onNoActiveReaderCallback, options = {}) {
112
+ super({
113
+ cancel: (reason) => {
114
+ console.log("Stream was canceled. Reason:", reason);
115
+ this.stopChecking();
116
+ }
117
+ });_class.prototype.__init.call(this);_class.prototype.__init2.call(this);;
118
+ this.onNoActiveReaderCallback = onNoActiveReaderCallback;
119
+ this.streamId = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _8 => _8.streamId]), () => ( _uuid.v4.call(void 0, )));
120
+ this.onNoActiveReaderCallback = onNoActiveReaderCallback;
121
+ this.startChecking(_nullishCoalesce(_optionalChain([options, 'optionalAccess', _9 => _9.intervalCheckInMs]), () => ( 20)));
122
+ }
123
+ __init() {this.checkInterval = null}
124
+
125
+ __init2() {this._isStopped = false}
126
+ get hasActiveSubscribers() {
127
+ return !this._isStopped;
128
+ }
129
+ startChecking(interval) {
130
+ this.checkInterval = setInterval(() => {
131
+ this.checkNoActiveReader();
132
+ }, interval);
133
+ }
134
+ stopChecking() {
135
+ if (!this.checkInterval) return;
136
+ clearInterval(this.checkInterval);
137
+ this.checkInterval = null;
138
+ this._isStopped = true;
139
+ this.onNoActiveReaderCallback(this);
140
+ }
141
+ checkNoActiveReader() {
142
+ if (!this.readable.locked && !this._isStopped) {
143
+ this.stopChecking();
144
+ }
145
+ }
146
+ }, _class);
147
+ var asyncRetry = async (fn, opts) => {
148
+ if (opts === void 0 || opts.retries === 0) return fn();
149
+ return _asyncretry2.default.call(void 0,
150
+ async (bail) => {
151
+ try {
152
+ return await fn();
153
+ } catch (error2) {
154
+ if (_optionalChain([opts, 'optionalAccess', _10 => _10.shouldRetryError]) && !opts.shouldRetryError(error2)) {
155
+ bail(error2);
156
+ }
157
+ throw error2;
158
+ }
159
+ },
160
+ _nullishCoalesce(opts, () => ( { retries: 0 }))
161
+ );
162
+ };
163
+ var ParseError = class extends Error {
164
+ constructor(text) {
165
+ super(`Cannot parse! ${text}`);
166
+ }
167
+ };
168
+ var JSONParser = {
169
+ stringify: (value, options) => {
170
+ return JSON.stringify(
171
+ _optionalChain([options, 'optionalAccess', _11 => _11.map]) ? options.map(value) : value,
172
+ //TODO: Consider adding support to DateTime and adding specific format to mark that's a bigint
173
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-return
174
+ (_, v) => typeof v === "bigint" ? v.toString() : v
175
+ );
176
+ },
177
+ parse: (text, options) => {
178
+ const parsed = JSON.parse(text, _optionalChain([options, 'optionalAccess', _12 => _12.reviver]));
179
+ if (_optionalChain([options, 'optionalAccess', _13 => _13.typeCheck]) && !_optionalChain([options, 'optionalAccess', _14 => _14.typeCheck, 'call', _15 => _15(parsed)]))
180
+ throw new ParseError(text);
181
+ return _optionalChain([options, 'optionalAccess', _16 => _16.map]) ? options.map(parsed) : parsed;
182
+ }
183
+ };
184
+ var filter = (filter2) => new (0, _webstreamspolyfill.TransformStream)({
185
+ transform(chunk, controller) {
186
+ if (filter2(chunk)) {
187
+ controller.enqueue(chunk);
188
+ }
189
+ }
190
+ });
191
+ var map = (map2) => new (0, _webstreamspolyfill.TransformStream)({
192
+ transform(chunk, controller) {
193
+ controller.enqueue(map2(chunk));
194
+ }
195
+ });
196
+ var reduce = (reducer, initialValue) => new ReduceTransformStream(reducer, initialValue);
197
+ var ReduceTransformStream = class extends _webstreamspolyfill.TransformStream {
198
+
199
+
200
+ constructor(reducer, initialValue) {
201
+ super({
202
+ transform: (chunk) => {
203
+ this.accumulator = this.reducer(this.accumulator, chunk);
204
+ },
205
+ flush: (controller) => {
206
+ controller.enqueue(this.accumulator);
207
+ controller.terminate();
208
+ }
209
+ });
210
+ this.accumulator = initialValue;
211
+ this.reducer = reducer;
212
+ }
213
+ };
214
+ var retryStream = (createSourceStream, handleChunk2, retryOptions = { forever: true, minTimeout: 25 }) => new (0, _webstreamspolyfill.TransformStream)({
215
+ start(controller) {
216
+ asyncRetry(
217
+ () => onRestream(createSourceStream, handleChunk2, controller),
218
+ retryOptions
219
+ ).catch((error2) => {
220
+ controller.error(error2);
221
+ });
222
+ }
223
+ });
224
+ var onRestream = async (createSourceStream, handleChunk2, controller) => {
225
+ const sourceStream = createSourceStream();
226
+ const reader = sourceStream.getReader();
227
+ try {
228
+ let done;
229
+ do {
230
+ const result = await reader.read();
231
+ done = result.done;
232
+ await handleChunk2(result, controller);
233
+ if (done) {
234
+ controller.terminate();
235
+ }
236
+ } while (!done);
237
+ } finally {
238
+ reader.releaseLock();
239
+ }
240
+ };
241
+ var skip = (limit) => new SkipTransformStream(limit);
242
+ var SkipTransformStream = (_class2 = class extends _webstreamspolyfill.TransformStream {
243
+ __init3() {this.count = 0}
244
+
245
+ constructor(skip2) {
246
+ super({
247
+ transform: (chunk, controller) => {
248
+ this.count++;
249
+ if (this.count > this.skip) {
250
+ controller.enqueue(chunk);
251
+ }
252
+ }
253
+ });_class2.prototype.__init3.call(this);;
254
+ this.skip = skip2;
255
+ }
256
+ }, _class2);
257
+ var stopAfter = (stopCondition) => new (0, _webstreamspolyfill.TransformStream)({
258
+ transform(chunk, controller) {
259
+ controller.enqueue(chunk);
260
+ if (stopCondition(chunk)) {
261
+ controller.terminate();
262
+ }
263
+ }
264
+ });
265
+ var stopOn = (stopCondition) => new (0, _webstreamspolyfill.TransformStream)({
266
+ async transform(chunk, controller) {
267
+ if (!stopCondition(chunk)) {
268
+ controller.enqueue(chunk);
269
+ return;
270
+ }
271
+ await Promise.resolve();
272
+ controller.terminate();
273
+ }
274
+ });
275
+ var take = (limit) => new TakeTransformStream(limit);
276
+ var TakeTransformStream = (_class3 = class extends _webstreamspolyfill.TransformStream {
277
+ __init4() {this.count = 0}
278
+
279
+ constructor(limit) {
280
+ super({
281
+ transform: (chunk, controller) => {
282
+ if (this.count < this.limit) {
283
+ this.count++;
284
+ controller.enqueue(chunk);
285
+ } else {
286
+ controller.terminate();
287
+ }
288
+ }
289
+ });_class3.prototype.__init4.call(this);;
290
+ this.limit = limit;
291
+ }
292
+ }, _class3);
293
+ var waitAtMost = (waitTimeInMs) => new (0, _webstreamspolyfill.TransformStream)({
294
+ start(controller) {
295
+ const timeoutId = setTimeout(() => {
296
+ controller.terminate();
297
+ }, waitTimeInMs);
298
+ const originalTerminate = controller.terminate.bind(controller);
299
+ controller.terminate = () => {
300
+ clearTimeout(timeoutId);
301
+ originalTerminate();
302
+ };
303
+ },
304
+ transform(chunk, controller) {
305
+ controller.enqueue(chunk);
306
+ }
307
+ });
308
+ var streamTransformations = {
309
+ filter,
310
+ take,
311
+ TakeTransformStream,
312
+ skip,
313
+ SkipTransformStream,
314
+ map,
315
+ notifyAboutNoActiveReadersStream,
316
+ NotifyAboutNoActiveReadersStream,
317
+ reduce,
318
+ ReduceTransformStream,
319
+ retry: retryStream,
320
+ stopAfter,
321
+ stopOn,
322
+ waitAtMost
323
+ };
324
+ var { retry: retry2 } = streamTransformations;
325
+
326
+ // src/eventStore/schema/appendToStream.ts
327
+
328
+
329
+ // src/eventStore/schema/typing.ts
330
+ var emmettPrefix = "emt";
331
+ var globalTag = "global";
332
+ var defaultTag = "emt:default";
333
+ var globalNames = {
334
+ module: `${emmettPrefix}:module:${globalTag}`
335
+ };
336
+ var columns = {
337
+ partition: {
338
+ name: "partition"
339
+ },
340
+ isArchived: { name: "is_archived" }
341
+ };
342
+ var streamsTable = {
343
+ name: `${emmettPrefix}_streams`,
344
+ columns: {
345
+ partition: columns.partition,
346
+ isArchived: columns.isArchived
347
+ }
348
+ };
349
+ var messagesTable = {
350
+ name: `${emmettPrefix}_messages`,
351
+ columns: {
352
+ partition: columns.partition,
353
+ isArchived: columns.isArchived
354
+ }
355
+ };
356
+
357
+ // src/eventStore/schema/appendToStream.ts
358
+ var appendToStream = async (db, streamName, streamType, messages, options) => {
359
+ if (messages.length === 0) return { success: false };
360
+ const expectedStreamVersion = toExpectedVersion(
361
+ _optionalChain([options, 'optionalAccess', _17 => _17.expectedStreamVersion])
362
+ );
363
+ const messagesToAppend = messages.map(
364
+ (m, i) => ({
365
+ ...m,
366
+ kind: _nullishCoalesce(m.kind, () => ( "Event")),
367
+ metadata: {
368
+ streamName,
369
+ messageId: _uuid.v4.call(void 0, ),
370
+ streamPosition: BigInt(i + 1),
371
+ ..."metadata" in m ? _nullishCoalesce(m.metadata, () => ( {})) : {}
372
+ }
373
+ })
374
+ );
375
+ let result;
376
+ await db.command(`BEGIN TRANSACTION`);
377
+ try {
378
+ result = await appendToStreamRaw(
379
+ db,
380
+ streamName,
381
+ streamType,
382
+ messagesToAppend,
383
+ {
384
+ expectedStreamVersion
385
+ }
386
+ );
387
+ if (_optionalChain([options, 'optionalAccess', _18 => _18.onBeforeCommit]))
388
+ await options.onBeforeCommit(messagesToAppend, { connection: db });
389
+ } catch (err) {
390
+ await db.command(`ROLLBACK`);
391
+ throw err;
392
+ }
393
+ if (result.success == null || !result.success) {
394
+ await db.command(`ROLLBACK`);
395
+ return result;
396
+ }
397
+ await db.command(`COMMIT`);
398
+ return result;
399
+ };
400
+ var toExpectedVersion = (expected) => {
401
+ if (expected === void 0) return null;
402
+ if (expected === NO_CONCURRENCY_CHECK) return null;
403
+ if (expected == STREAM_DOES_NOT_EXIST) return null;
404
+ if (expected == STREAM_EXISTS) return null;
405
+ return expected;
406
+ };
407
+ var appendToStreamRaw = async (db, streamId, streamType, messages, options) => {
408
+ let streamPosition;
409
+ let globalPosition;
410
+ try {
411
+ let expectedStreamVersion = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _19 => _19.expectedStreamVersion]), () => ( null));
412
+ if (expectedStreamVersion == null) {
413
+ expectedStreamVersion = await getLastStreamPosition(
414
+ db,
415
+ streamId,
416
+ expectedStreamVersion
417
+ );
418
+ }
419
+ let position;
420
+ if (expectedStreamVersion === 0n) {
421
+ position = await db.querySingle(
422
+ `INSERT INTO ${streamsTable.name}
423
+ (stream_id, stream_position, partition, stream_type, stream_metadata, is_archived)
424
+ VALUES (
425
+ ?,
426
+ ?,
427
+ ?,
428
+ ?,
429
+ '[]',
430
+ false
431
+ )
432
+ RETURNING stream_position;
433
+ `,
434
+ [
435
+ streamId,
436
+ messages.length,
437
+ _nullishCoalesce(_optionalChain([options, 'optionalAccess', _20 => _20.partition]), () => ( streamsTable.columns.partition)),
438
+ streamType
439
+ ]
440
+ );
441
+ } else {
442
+ position = await db.querySingle(
443
+ `UPDATE ${streamsTable.name}
444
+ SET stream_position = stream_position + ?
445
+ WHERE stream_id = ?
446
+ AND partition = ?
447
+ AND is_archived = false
448
+ RETURNING stream_position;
449
+ `,
450
+ [
451
+ messages.length,
452
+ streamId,
453
+ _nullishCoalesce(_optionalChain([options, 'optionalAccess', _21 => _21.partition]), () => ( streamsTable.columns.partition))
454
+ ]
455
+ );
456
+ }
457
+ if (position == null) {
458
+ throw new Error("Could not find stream position");
459
+ }
460
+ streamPosition = BigInt(position.stream_position);
461
+ if (expectedStreamVersion != null) {
462
+ const expectedStreamPositionAfterSave = BigInt(expectedStreamVersion) + BigInt(messages.length);
463
+ if (streamPosition !== expectedStreamPositionAfterSave) {
464
+ return {
465
+ success: false
466
+ };
467
+ }
468
+ }
469
+ const { sqlString, values } = buildMessageInsertQuery(
470
+ messages,
471
+ expectedStreamVersion,
472
+ streamId,
473
+ _nullishCoalesce(_optionalChain([options, 'optionalAccess', _22 => _22.partition, 'optionalAccess', _23 => _23.toString, 'call', _24 => _24()]), () => ( defaultTag))
474
+ );
475
+ const returningId = await db.querySingle(sqlString, values);
476
+ if (_optionalChain([returningId, 'optionalAccess', _25 => _25.global_position]) == null) {
477
+ throw new Error("Could not find global position");
478
+ }
479
+ globalPosition = BigInt(returningId.global_position);
480
+ } catch (err) {
481
+ if (isSQLiteError(err) && isOptimisticConcurrencyError(err)) {
482
+ return {
483
+ success: false
484
+ };
485
+ }
486
+ throw err;
487
+ }
488
+ return {
489
+ success: true,
490
+ nextStreamPosition: streamPosition,
491
+ lastGlobalPosition: globalPosition
492
+ };
493
+ };
494
+ var isOptimisticConcurrencyError = (error) => {
495
+ return _optionalChain([error, 'optionalAccess', _26 => _26.errno]) !== void 0 && error.errno === 19;
496
+ };
497
+ async function getLastStreamPosition(db, streamId, expectedStreamVersion) {
498
+ const result = await db.querySingle(
499
+ `SELECT CAST(stream_position AS VARCHAR) AS stream_position FROM ${streamsTable.name} WHERE stream_id = ?`,
500
+ [streamId]
501
+ );
502
+ if (_optionalChain([result, 'optionalAccess', _27 => _27.stream_position]) == null) {
503
+ expectedStreamVersion = 0n;
504
+ } else {
505
+ expectedStreamVersion = BigInt(result.stream_position);
506
+ }
507
+ return expectedStreamVersion;
508
+ }
509
+ var buildMessageInsertQuery = (messages, expectedStreamVersion, streamId, partition) => {
510
+ const query = messages.reduce(
511
+ (queryBuilder, message) => {
512
+ if (_optionalChain([message, 'access', _28 => _28.metadata, 'optionalAccess', _29 => _29.streamPosition]) == null || typeof message.metadata.streamPosition !== "bigint") {
513
+ throw new Error("Stream position is required");
514
+ }
515
+ const streamPosition = BigInt(message.metadata.streamPosition) + BigInt(expectedStreamVersion);
516
+ queryBuilder.parameterMarkers.push(`(?,?,?,?,?,?,?,?,?,?)`);
517
+ queryBuilder.values.push(
518
+ streamId,
519
+ _nullishCoalesce(streamPosition.toString(), () => ( 0)),
520
+ _nullishCoalesce(partition, () => ( defaultTag)),
521
+ message.kind === "Event" ? "E" : "C",
522
+ JSONParser.stringify(message.data),
523
+ JSONParser.stringify(message.metadata),
524
+ _nullishCoalesce(_optionalChain([expectedStreamVersion, 'optionalAccess', _30 => _30.toString, 'call', _31 => _31()]), () => ( 0)),
525
+ message.type,
526
+ message.metadata.messageId,
527
+ false
528
+ );
529
+ return queryBuilder;
530
+ },
531
+ {
532
+ parameterMarkers: [],
533
+ values: []
534
+ }
535
+ );
536
+ const sqlString = `
537
+ INSERT INTO ${messagesTable.name} (
538
+ stream_id,
539
+ stream_position,
540
+ partition,
541
+ message_kind,
542
+ message_data,
543
+ message_metadata,
544
+ message_schema_version,
545
+ message_type,
546
+ message_id,
547
+ is_archived
548
+ )
549
+ VALUES ${query.parameterMarkers.join(", ")}
550
+ RETURNING
551
+ CAST(global_position as VARCHAR) AS global_position
552
+ `;
553
+ return { sqlString, values: query.values };
554
+ };
555
+
556
+ // src/eventStore/projections/index.ts
557
+ var handleProjections = async (options) => {
558
+ const { projections: allProjections, events, connection } = options;
559
+ const eventTypes = events.map((e) => e.type);
560
+ const projections = allProjections.filter(
561
+ (p) => p.canHandle.some((type) => eventTypes.includes(type))
562
+ );
563
+ for (const projection2 of projections) {
564
+ await projection2.handle(events, {
565
+ connection
566
+ });
567
+ }
568
+ };
569
+
570
+ // src/eventStore/SQLiteEventStore.ts
571
+ var SQLiteEventStoreDefaultStreamVersion = 0n;
572
+ var getSQLiteEventStore = (options) => {
573
+ let schemaMigrated = false;
574
+ let autoGenerateSchema = false;
575
+ let database;
576
+ const fileName = _nullishCoalesce(options.fileName, () => ( InMemorySQLiteDatabase));
577
+ const isInMemory = fileName === InMemorySQLiteDatabase;
578
+ const inlineProjections = (_nullishCoalesce(options.projections, () => ( []))).filter(({ type }) => type === "inline").map(({ projection: projection2 }) => projection2);
579
+ const onBeforeCommitHook = _optionalChain([options, 'access', _32 => _32.hooks, 'optionalAccess', _33 => _33.onBeforeCommit]);
580
+ const createConnection = () => {
581
+ if (database != null) {
582
+ return database;
583
+ }
584
+ return sqliteConnection({
585
+ fileName
586
+ });
587
+ };
588
+ const closeConnection = () => {
589
+ if (isInMemory) {
590
+ return;
591
+ }
592
+ if (database != null) {
593
+ database.close();
594
+ database = null;
595
+ }
596
+ };
597
+ const withConnection = async (handler) => {
598
+ if (database == null) {
599
+ database = createConnection();
600
+ }
601
+ try {
602
+ await ensureSchemaExists(database);
603
+ return await handler(database);
604
+ } finally {
605
+ closeConnection();
606
+ }
607
+ };
608
+ if (options) {
609
+ autoGenerateSchema = _optionalChain([options, 'access', _34 => _34.schema, 'optionalAccess', _35 => _35.autoMigration]) === void 0 || _optionalChain([options, 'access', _36 => _36.schema, 'optionalAccess', _37 => _37.autoMigration]) !== "None";
610
+ }
611
+ const ensureSchemaExists = async (connection) => {
612
+ if (!autoGenerateSchema) return Promise.resolve();
613
+ if (!schemaMigrated) {
614
+ await createEventStoreSchema(connection);
615
+ schemaMigrated = true;
616
+ }
617
+ return Promise.resolve();
618
+ };
619
+ return {
620
+ async aggregateStream(streamName, options2) {
621
+ const { evolve, initialState, read } = options2;
622
+ const expectedStreamVersion = _optionalChain([read, 'optionalAccess', _38 => _38.expectedStreamVersion]);
623
+ let state = initialState();
624
+ if (typeof streamName !== "string") {
625
+ throw new Error("Stream name is not string");
626
+ }
627
+ if (database == null) {
628
+ database = createConnection();
629
+ }
630
+ const result = await withConnection(
631
+ (db) => readStream(db, streamName, options2.read)
632
+ );
633
+ const currentStreamVersion = result.currentStreamVersion;
634
+ assertExpectedVersionMatchesCurrent(
635
+ currentStreamVersion,
636
+ expectedStreamVersion,
637
+ SQLiteEventStoreDefaultStreamVersion
638
+ );
639
+ for (const event of result.events) {
640
+ if (!event) continue;
641
+ state = evolve(state, event);
642
+ }
643
+ return {
644
+ currentStreamVersion,
645
+ state,
646
+ streamExists: result.streamExists
647
+ };
648
+ },
649
+ readStream: async (streamName, options2) => withConnection((db) => readStream(db, streamName, options2)),
650
+ appendToStream: async (streamName, events, options2) => {
651
+ if (database == null) {
652
+ database = createConnection();
653
+ }
654
+ const [firstPart, ...rest] = streamName.split("-");
655
+ const streamType = firstPart && rest.length > 0 ? firstPart : "emt:unknown";
656
+ const appendResult = await withConnection(
657
+ (db) => appendToStream(db, streamName, streamType, events, {
658
+ ...options2,
659
+ onBeforeCommit: async (messages, context) => {
660
+ if (inlineProjections.length > 0)
661
+ await handleProjections({
662
+ projections: inlineProjections,
663
+ events: messages,
664
+ ...context
665
+ });
666
+ if (onBeforeCommitHook) await onBeforeCommitHook(messages, context);
667
+ }
668
+ })
669
+ );
670
+ if (!appendResult.success)
671
+ throw new ExpectedVersionConflictError(
672
+ -1n,
673
+ //TODO: Return actual version in case of error
674
+ _nullishCoalesce(_optionalChain([options2, 'optionalAccess', _39 => _39.expectedStreamVersion]), () => ( NO_CONCURRENCY_CHECK))
675
+ );
676
+ return {
677
+ nextExpectedStreamVersion: appendResult.nextStreamPosition,
678
+ lastEventGlobalPosition: appendResult.lastGlobalPosition,
679
+ createdNewStream: appendResult.nextStreamPosition >= BigInt(events.length)
680
+ };
681
+ }
682
+ };
683
+ };
684
+
685
+ // src/eventStore/schema/readStream.ts
686
+ var readStream = async (db, streamId, options) => {
687
+ const fromCondition = options && "from" in options ? `AND stream_position >= ${options.from}` : "";
688
+ const to = Number(
689
+ options && "to" in options ? options.to : options && "maxCount" in options && options.maxCount ? options.from + options.maxCount : NaN
690
+ );
691
+ const toCondition = !isNaN(to) ? `AND stream_position <= ${to}` : "";
692
+ const results = await db.query(
693
+ `SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id
694
+ FROM ${messagesTable.name}
695
+ WHERE stream_id = ? AND partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}`,
696
+ [streamId, _nullishCoalesce(_optionalChain([options, 'optionalAccess', _40 => _40.partition]), () => ( defaultTag))]
697
+ );
698
+ const messages = results.map((row) => {
699
+ const rawEvent = {
700
+ type: row.message_type,
701
+ data: JSONParser.parse(row.message_data),
702
+ metadata: JSONParser.parse(row.message_metadata)
703
+ };
704
+ const metadata = {
705
+ ..."metadata" in rawEvent ? _nullishCoalesce(rawEvent.metadata, () => ( {})) : {},
706
+ messageId: row.message_id,
707
+ streamName: streamId,
708
+ streamPosition: BigInt(row.stream_position),
709
+ globalPosition: BigInt(row.global_position)
710
+ };
711
+ return {
712
+ ...rawEvent,
713
+ kind: "Event",
714
+ metadata
715
+ };
716
+ });
717
+ return messages.length > 0 ? {
718
+ currentStreamVersion: messages[messages.length - 1].metadata.streamPosition,
719
+ events: messages,
720
+ streamExists: true
721
+ } : {
722
+ currentStreamVersion: SQLiteEventStoreDefaultStreamVersion,
723
+ events: [],
724
+ streamExists: false
725
+ };
726
+ };
727
+
728
+ // src/eventStore/schema/tables.ts
729
+ var sql = (sql2) => sql2;
730
+ var streamsTableSQL = sql(
731
+ `CREATE TABLE IF NOT EXISTS ${streamsTable.name}(
732
+ stream_id TEXT NOT NULL,
733
+ stream_position BIGINT NOT NULL DEFAULT 0,
734
+ partition TEXT NOT NULL DEFAULT '${globalTag}',
735
+ stream_type TEXT NOT NULL,
736
+ stream_metadata JSONB NOT NULL,
737
+ is_archived BOOLEAN NOT NULL DEFAULT FALSE,
738
+ PRIMARY KEY (stream_id, stream_position, partition, is_archived),
739
+ UNIQUE (stream_id, partition, is_archived)
740
+ );`
741
+ );
742
+ var messagesTableSQL = sql(
743
+ `CREATE TABLE IF NOT EXISTS ${messagesTable.name}(
744
+ stream_id TEXT NOT NULL,
745
+ stream_position BIGINT NOT NULL,
746
+ partition TEXT NOT NULL DEFAULT '${globalTag}',
747
+ message_kind CHAR(1) NOT NULL DEFAULT 'E',
748
+ message_data JSONB NOT NULL,
749
+ message_metadata JSONB NOT NULL,
750
+ message_schema_version TEXT NOT NULL,
751
+ message_type TEXT NOT NULL,
752
+ message_id TEXT NOT NULL,
753
+ is_archived BOOLEAN NOT NULL DEFAULT FALSE,
754
+ global_position INTEGER PRIMARY KEY,
755
+ created DATETIME DEFAULT CURRENT_TIMESTAMP,
756
+ UNIQUE (stream_id, stream_position, partition, is_archived)
757
+ );
758
+ `
759
+ );
760
+ var schemaSQL = [streamsTableSQL, messagesTableSQL];
761
+ var createEventStoreSchema = async (db) => {
762
+ for (const sql2 of schemaSQL) {
763
+ await db.command(sql2);
764
+ }
765
+ };
766
+
767
+
768
+
769
+
770
+
771
+
772
+
773
+
774
+
775
+
776
+
777
+
778
+
779
+
780
+
781
+
782
+
783
+
784
+
785
+ exports.InMemorySQLiteDatabase = InMemorySQLiteDatabase; exports.SQLiteEventStoreDefaultStreamVersion = SQLiteEventStoreDefaultStreamVersion; exports.appendToStream = appendToStream; exports.createEventStoreSchema = createEventStoreSchema; exports.defaultTag = defaultTag; exports.emmettPrefix = emmettPrefix; exports.getSQLiteEventStore = getSQLiteEventStore; exports.globalNames = globalNames; exports.globalTag = globalTag; exports.isSQLiteError = isSQLiteError; exports.messagesTable = messagesTable; exports.messagesTableSQL = messagesTableSQL; exports.readStream = readStream; exports.schemaSQL = schemaSQL; exports.sql = sql; exports.sqliteConnection = sqliteConnection; exports.streamsTable = streamsTable; exports.streamsTableSQL = streamsTableSQL;
786
+ //# sourceMappingURL=index.cjs.map