@event-driven-io/emmett-sqlite 0.33.0 → 0.34.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +34 -6
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +32 -16
- package/dist/index.d.ts +32 -16
- package/dist/index.js +30 -2
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
package/dist/index.cjs
CHANGED
|
@@ -384,7 +384,8 @@ var appendToStream = async (db, streamName, streamType, messages, options) => {
|
|
|
384
384
|
expectedStreamVersion
|
|
385
385
|
}
|
|
386
386
|
);
|
|
387
|
-
if (_optionalChain([options, 'optionalAccess', _18 => _18.
|
|
387
|
+
if (_optionalChain([options, 'optionalAccess', _18 => _18.onBeforeCommit]))
|
|
388
|
+
await options.onBeforeCommit(messagesToAppend, { connection: db });
|
|
388
389
|
} catch (err) {
|
|
389
390
|
await db.command(`ROLLBACK`);
|
|
390
391
|
throw err;
|
|
@@ -552,6 +553,20 @@ var buildMessageInsertQuery = (messages, expectedStreamVersion, streamId, partit
|
|
|
552
553
|
return { sqlString, values: query.values };
|
|
553
554
|
};
|
|
554
555
|
|
|
556
|
+
// src/eventStore/projections/index.ts
|
|
557
|
+
var handleProjections = async (options) => {
|
|
558
|
+
const { projections: allProjections, events, connection } = options;
|
|
559
|
+
const eventTypes = events.map((e) => e.type);
|
|
560
|
+
const projections = allProjections.filter(
|
|
561
|
+
(p) => p.canHandle.some((type) => eventTypes.includes(type))
|
|
562
|
+
);
|
|
563
|
+
for (const projection2 of projections) {
|
|
564
|
+
await projection2.handle(events, {
|
|
565
|
+
connection
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
};
|
|
569
|
+
|
|
555
570
|
// src/eventStore/SQLiteEventStore.ts
|
|
556
571
|
var SQLiteEventStoreDefaultStreamVersion = 0n;
|
|
557
572
|
var getSQLiteEventStore = (options) => {
|
|
@@ -560,6 +575,8 @@ var getSQLiteEventStore = (options) => {
|
|
|
560
575
|
let database;
|
|
561
576
|
const fileName = _nullishCoalesce(options.fileName, () => ( InMemorySQLiteDatabase));
|
|
562
577
|
const isInMemory = fileName === InMemorySQLiteDatabase;
|
|
578
|
+
const inlineProjections = (_nullishCoalesce(options.projections, () => ( []))).filter(({ type }) => type === "inline").map(({ projection: projection2 }) => projection2);
|
|
579
|
+
const onBeforeCommitHook = _optionalChain([options, 'access', _32 => _32.hooks, 'optionalAccess', _33 => _33.onBeforeCommit]);
|
|
563
580
|
const createConnection = () => {
|
|
564
581
|
if (database != null) {
|
|
565
582
|
return database;
|
|
@@ -589,7 +606,7 @@ var getSQLiteEventStore = (options) => {
|
|
|
589
606
|
}
|
|
590
607
|
};
|
|
591
608
|
if (options) {
|
|
592
|
-
autoGenerateSchema = _optionalChain([options, 'access',
|
|
609
|
+
autoGenerateSchema = _optionalChain([options, 'access', _34 => _34.schema, 'optionalAccess', _35 => _35.autoMigration]) === void 0 || _optionalChain([options, 'access', _36 => _36.schema, 'optionalAccess', _37 => _37.autoMigration]) !== "None";
|
|
593
610
|
}
|
|
594
611
|
const ensureSchemaExists = async (connection) => {
|
|
595
612
|
if (!autoGenerateSchema) return Promise.resolve();
|
|
@@ -602,7 +619,7 @@ var getSQLiteEventStore = (options) => {
|
|
|
602
619
|
return {
|
|
603
620
|
async aggregateStream(streamName, options2) {
|
|
604
621
|
const { evolve, initialState, read } = options2;
|
|
605
|
-
const expectedStreamVersion = _optionalChain([read, 'optionalAccess',
|
|
622
|
+
const expectedStreamVersion = _optionalChain([read, 'optionalAccess', _38 => _38.expectedStreamVersion]);
|
|
606
623
|
let state = initialState();
|
|
607
624
|
if (typeof streamName !== "string") {
|
|
608
625
|
throw new Error("Stream name is not string");
|
|
@@ -637,13 +654,24 @@ var getSQLiteEventStore = (options) => {
|
|
|
637
654
|
const [firstPart, ...rest] = streamName.split("-");
|
|
638
655
|
const streamType = firstPart && rest.length > 0 ? firstPart : "emt:unknown";
|
|
639
656
|
const appendResult = await withConnection(
|
|
640
|
-
(db) => appendToStream(db, streamName, streamType, events,
|
|
657
|
+
(db) => appendToStream(db, streamName, streamType, events, {
|
|
658
|
+
...options2,
|
|
659
|
+
onBeforeCommit: async (messages, context) => {
|
|
660
|
+
if (inlineProjections.length > 0)
|
|
661
|
+
await handleProjections({
|
|
662
|
+
projections: inlineProjections,
|
|
663
|
+
events: messages,
|
|
664
|
+
...context
|
|
665
|
+
});
|
|
666
|
+
if (onBeforeCommitHook) await onBeforeCommitHook(messages, context);
|
|
667
|
+
}
|
|
668
|
+
})
|
|
641
669
|
);
|
|
642
670
|
if (!appendResult.success)
|
|
643
671
|
throw new ExpectedVersionConflictError(
|
|
644
672
|
-1n,
|
|
645
673
|
//TODO: Return actual version in case of error
|
|
646
|
-
_nullishCoalesce(_optionalChain([options2, 'optionalAccess',
|
|
674
|
+
_nullishCoalesce(_optionalChain([options2, 'optionalAccess', _39 => _39.expectedStreamVersion]), () => ( NO_CONCURRENCY_CHECK))
|
|
647
675
|
);
|
|
648
676
|
return {
|
|
649
677
|
nextExpectedStreamVersion: appendResult.nextStreamPosition,
|
|
@@ -665,7 +693,7 @@ var readStream = async (db, streamId, options) => {
|
|
|
665
693
|
`SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id
|
|
666
694
|
FROM ${messagesTable.name}
|
|
667
695
|
WHERE stream_id = ? AND partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}`,
|
|
668
|
-
[streamId, _nullishCoalesce(_optionalChain([options, 'optionalAccess',
|
|
696
|
+
[streamId, _nullishCoalesce(_optionalChain([options, 'optionalAccess', _40 => _40.partition]), () => ( defaultTag))]
|
|
669
697
|
);
|
|
670
698
|
const messages = results.map((row) => {
|
|
671
699
|
const rawEvent = {
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","../src/connection/sqliteConnection.ts","../../emmett/src/validation/index.ts","../../emmett/src/errors/index.ts","../../emmett/src/eventStore/inMemoryEventStore.ts","../../emmett/src/eventStore/subscriptions/caughtUpTransformStream.ts","../../emmett/src/eventStore/subscriptions/streamingCoordinator.ts","../../emmett/src/streaming/transformations/notifyAboutNoActiveReaders.ts","../../emmett/src/utils/retry.ts","../../emmett/src/streaming/generators/fromArray.ts","../../emmett/src/streaming/restream.ts","../../emmett/src/streaming/transformations/filter.ts","../../emmett/src/streaming/transformations/map.ts","../../emmett/src/streaming/transformations/reduce.ts","../../emmett/src/streaming/transformations/retry.ts","../../emmett/src/streaming/transformations/skip.ts","../../emmett/src/streaming/transformations/stopAfter.ts","../../emmett/src/streaming/transformations/stopOn.ts","../../emmett/src/streaming/transformations/take.ts","../../emmett/src/streaming/transformations/waitAtMost.ts","../../emmett/src/eventStore/expectedVersion.ts","../../emmett/src/serialization/json/JSONParser.ts","../../emmett/src/streaming/transformations/index.ts","../src/eventStore/schema/appendToStream.ts","../src/eventStore/schema/typing.ts","../src/eventStore/SQLiteEventStore.ts","../src/eventStore/schema/readStream.ts","../src/eventStore/schema/tables.ts"],"names":["sql"],"mappings":"AAAA;ACAA,oFAAoB;AAeb,IAAM,cAAA,EAAgB,CAAC,KAAA,EAAA,GAAyC;AACrE,EAAA,GAAA,CAAI,MAAA,WAAiB,MAAA,GAAS,OAAA,GAAU,KAAA,EAAO;AAC7C,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA;AACT,CAAA;AAGO,IAAM,uBAAA,EAAyB,UAAA;AAO/B,IAAM,iBAAA,EAAmB,CAC9B,OAAA,EAAA,GACqB;AACrB,EAAA,MAAM,GAAA,EAAK,IAAI,iBAAA,CAAQ,QAAA,kBAAS,OAAA,CAAQ,QAAA,UAAY,wBAAsB,CAAA;AAE1E,EAAA,OAAO;AAAA,IACL,KAAA,EAAO,CAAA,EAAA,GAAY,EAAA,CAAG,KAAA,CAAM,CAAA;AAAA,IAC5B,OAAA,EAAS,CAACA,IAAAA,EAAa,MAAA,EAAA,GACrB,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAA,GAAsB;AAC/C,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,CAAA;AAAA,MACV,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,KAAA,EAAO,CAAIA,IAAAA,EAAa,MAAA,EAAA,GACtB,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAgB;AAC5D,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,WAAA,EAAa,CAAIA,IAAAA,EAAa,MAAA,EAAA,GAC5B,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAqB;AACjE,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC;AAAA,EACL,CAAA;AACF,CAAA;AD9BA;AACA;AEnCO,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,SAAA,GAAY,IAAA,IAAQ,GAAA;AAE9B,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,QAAA;ACQV,IAAM,YAAA,EAAN,MAAM,aAAA,QAAoB,MAAM;AH2BvC,EG1BS;AH2BT,EGzBE,WAAA,CACE,OAAA,EACA;AACA,IAAA,MAAM,UAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,YAAA,GAAe,QAAA,EACrD,OAAA,CAAQ,UAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,GAAA;AACR,IAAA,MAAM,QAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,UAAA,GAAa,QAAA,EACnD,OAAA,CAAQ,QAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,CAAA,wBAAA,EAA2B,SAAS,CAAA,kCAAA,CAAA;AAE5C,IAAA,KAAA,CAAM,OAAO,CAAA;AACb,IAAA,IAAA,CAAK,UAAA,EAAY,SAAA;AAGjB,IAAA,MAAA,CAAO,cAAA,CAAe,IAAA,EAAM,YAAA,CAAY,SAAS,CAAA;AHWrD,EGVE;AACF,CAAA;AAEO,IAAM,iBAAA,EAAN,MAAM,kBAAA,QAAyB,YAAY;AHUlD,EGTE,WAAA,CACS,OAAA,EACA,QAAA,EACP,OAAA,EACA;AACA,IAAA,KAAA,CAAM;AHMV,MGLM,SAAA,EAAW,GAAA;AHMjB,MGLM,OAAA,mBACE,OAAA,UACA,CAAA,iBAAA,EAAoB,QAAA,CAAS,QAAA,CAAS,CAAC,CAAA,wBAAA,kBAA2B,OAAA,6BAAS,QAAA,mBAAS,GAAC,CAAA;AHI7F,IAAA;AGZW,IAAA;AACA,IAAA;AAWP,IAAA;AHIJ,EAAA;AGFA;AHIA;AACA;AIpEA;ACAA;ACAA;ACAA;AACA;ACDA;ACAA;ACAA;ACAA;ACAA;ACAA;ACAA;AdiFA;AACA;AelFA;ACAA;ACAA;ACAA;ACAA;ACeO;AACA;AAEA;AAGA;AAKL,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AACF;AAEO;AAOL,EAAA;AAEA,EAAA;AACE,IAAA;AACJ;AAEO;ApBuDP,EAAA;AoBhDI,IAAA;AAGA,IAAA;ApBgDJ,EAAA;AoB9CA;AbzDO;AAOA;APoGP,EAAA;AOnFI,IAAA;APqFJ,MAAA;AOnFQ,QAAA;AACA,QAAA;APqFR,MAAA;AACA,IAAA;AO9FY,IAAA;AAWR,IAAA;AAEA,IAAA;AAEA,IAAA;APoFJ,EAAA;AACA,iBAAA;AACA,EAAA;AACA,kBAAA;AACA,EAAA;AO3GI,IAAA;AP6GJ,EAAA;AACA,EAAA;AOvFI,IAAA;AACE,MAAA;APyFN,IAAA;AACA,EAAA;AACA,EAAA;AOtFI,IAAA;AAEA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;APuFJ,EAAA;AACA,EAAA;AOpFI,IAAA;AACE,MAAA;APsFN,IAAA;AACA,EAAA;AOpFA;ACpDO;AAIL,EAAA;AAEA,EAAA;ARuIF,IAAA;AQrIM,MAAA;AACE,QAAA;ARuIR,MAAA;AQrIQ,QAAA;AACE,UAAA;ARuIV,QAAA;AQrIQ,QAAA;ARuIR,MAAA;AACA,IAAA;AACA,qBAAA;AACA,EAAA;AQrIA;Aa3BO;ArBmKP,EAAA;AqBjKI,IAAA;ArBmKJ,EAAA;AqBjKA;AA0BO;ArB0IP,EAAA;AqBrII,IAAA;ArBuIJ,sBAAA;AACA;AACA;AACA,MAAA;AACA,IAAA;AACA,EAAA;AACA,EAAA;AqBlII,IAAA;AAEA,IAAA;AACE,MAAA;AAEF,IAAA;ArBkIJ,EAAA;AqB9HA;AVrDO;AXsLP,EAAA;AWnLM,IAAA;AACE,MAAA;AXqLR,IAAA;AACA,EAAA;AWnLE;ACPK;AZ6LP,EAAA;AY1LM,IAAA;AZ4LN,EAAA;AY1LE;ACLK;AAKA;Ab8LP,EAAA;AACA,EAAA;AACA,EAAA;Aa3LI,IAAA;Ab6LJ,MAAA;Aa3LQ,QAAA;Ab6LR,MAAA;AACA,MAAA;Aa3LQ,QAAA;AACA,QAAA;Ab6LR,MAAA;AACA,IAAA;Aa1LI,IAAA;AACA,IAAA;Ab4LJ,EAAA;Aa1LA;ACjBO;Ad8MP,EAAA;AchMM,IAAA;AdkMN,MAAA;AACA,MAAA;AACA,IAAA;AchMQ,MAAA;AdkMR,IAAA;AACA,EAAA;AchME;AAEF;AAQE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AdsLR,MAAA;AACA,IAAA;AACA,EAAA;AcpLI,IAAA;AdsLJ,EAAA;AcpLA;ACxDO;AAEA;Af8OP,kBAAA;AACA,EAAA;AACA,EAAA;Ae3OI,IAAA;Af6OJ,MAAA;Ae3OQ,QAAA;AACA,QAAA;AACE,UAAA;Af6OV,QAAA;AACA,MAAA;AACA,IAAA;Ae1OI,IAAA;Af4OJ,EAAA;Ae1OA;AClBO;AhB+PP,EAAA;AgB5PM,IAAA;AAEA,IAAA;AACE,MAAA;AhB6PR,IAAA;AACA,EAAA;AgB3PE;ACTK;AjBuQP,EAAA;AiBpQM,IAAA;AACE,MAAA;AACA,MAAA;AjBsQR,IAAA;AiBpQM,IAAA;AACA,IAAA;AjBsQN,EAAA;AiBpQE;ACVK;AAEA;AlBgRP,kBAAA;AACA,EAAA;AACA,EAAA;AkB7QI,IAAA;AlB+QJ,MAAA;AkB7QQ,QAAA;AACE,UAAA;AACA,UAAA;AlB+QV,QAAA;AkB7QU,UAAA;AlB+QV,QAAA;AACA,MAAA;AACA,IAAA;AkB5QI,IAAA;AlB8QJ,EAAA;AkB5QA;ACpBO;AnBmSP,EAAA;AmBhSM,IAAA;AACE,MAAA;AnBkSR,IAAA;AmB/RM,IAAA;AAGA,IAAA;AACE,MAAA;AACA,MAAA;AnB+RR,IAAA;AACA,EAAA;AACA,EAAA;AmB7RM,IAAA;AnB+RN,EAAA;AmB7RE;AGNK;AtBsSP,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AsBpSA;AZnBA;AV0TA;AACA;AuB3TA;AvB6TA;AACA;AwBxUO;AAEA;AACA;AAEA;AAAoB,EAAA;AAE3B;AAEA;AAAgB,EAAA;AACH,IAAA;AACH,EAAA;AACR,EAAA;AAEF;AAEO;AAAqB,EAAA;AACL,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AAEO;AAAsB,EAAA;AACN,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AxBqUA;AACA;AuBzUO;AAUL,EAAA;AAEA,EAAA;AAA8B,oBAAA;AACnB,EAAA;AAGX,EAAA;AAAqD,IAAA;AAEhD,MAAA;AACI,MAAA;AACa,MAAA;AACN,QAAA;AACR,QAAA;AACgB,QAAA;AACY,QAAA;AACgB,MAAA;AAC9C,IAAA;AACF,EAAA;AAGJ,EAAA;AAEA,EAAA;AAEA,EAAA;AACE,IAAA;AAAe,MAAA;AACb,MAAA;AACA,MAAA;AACA,MAAA;AACA,MAAA;AACA,QAAA;AACE,MAAA;AACF,IAAA;AAGF,IAAA;AAAkE,EAAA;AAElE,IAAA;AACA,IAAA;AAAM,EAAA;AAGR,EAAA;AACE,IAAA;AACA,IAAA;AAAO,EAAA;AAGT,EAAA;AAEA,EAAA;AACF;AAEA;AAGE,EAAA;AAEA,EAAA;AAGA,EAAA;AAGA,EAAA;AAEA,EAAA;AACF;AAEA;AAUE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AAA8B,QAAA;AAC5B,QAAA;AACA,QAAA;AACA,MAAA;AACF,IAAA;AAGF,IAAA;AAEA,IAAA;AACE,MAAA;AAAoB,QAAA;AAGc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAYhC,UAAA;AACE,UAAA;AACS,2CAAA;AACkC,UAAA;AAC3C,QAAA;AACF,MAAA;AACF,IAAA;AAEA,MAAA;AAAoB,QAAA;AAGS;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAO3B,UAAA;AACW,UAAA;AACT,2CAAA;AAC2C,QAAA;AAC7C,MAAA;AACF,IAAA;AAGF,IAAA;AACE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAEA,IAAA;AACE,MAAA;AAEA,MAAA;AACE,QAAA;AAAO,UAAA;AACI,QAAA;AACX,MAAA;AACF,IAAA;AAGF,IAAA;AAA8B,MAAA;AAC5B,MAAA;AACA,MAAA;AACA,uCAAA;AACkC,IAAA;AAGpC,IAAA;AAIA,IAAA;AACE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAAmD,EAAA;AAEnD,IAAA;AACE,MAAA;AAAO,QAAA;AACI,MAAA;AACX,IAAA;AAGF,IAAA;AAAM,EAAA;AAGR,EAAA;AAAO,IAAA;AACI,IAAA;AACW,IAAA;AACA,EAAA;AAExB;AAEA;AACE,EAAA;AACF;AAEA;AAKE,EAAA;AAAwB,IAAA;AAC8D,IAAA;AAC3E,EAAA;AAGX,EAAA;AACE,IAAA;AAAwB,EAAA;AAExB,IAAA;AAAqD,EAAA;AAEvD,EAAA;AACF;AAEA;AASE,EAAA;AAAuB,IAAA;AAKnB,MAAA;AAIE,QAAA;AAA6C,MAAA;AAG/C,MAAA;AAGA,MAAA;AACA,MAAA;AAAoB,QAAA;AAClB,yBAAA;AAC6B,yBAAA;AAChB,QAAA;AACoB,QAAA;AACA,QAAA;AACI,yCAAA;AACA,QAAA;AAC7B,QAAA;AACS,QAAA;AACjB,MAAA;AAGF,MAAA;AAAO,IAAA;AACT,IAAA;AACA,MAAA;AACqB,MAAA;AACV,IAAA;AACX,EAAA;AAGF,EAAA;AAAkB,kBAAA;AACkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAA;AAYU;AAAA;AAAA,IAAA;AAI9C,EAAA;AACF;AvBsPA;AACA;AyB1gBO;AAmBA;AAGL,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAEA,EAAA;AAEA,EAAA;AACE,IAAA;AACE,MAAA;AAAO,IAAA;AAGT,IAAA;AAAwB,MAAA;AACtB,IAAA;AACD,EAAA;AAGH,EAAA;AACE,IAAA;AACE,MAAA;AAAA,IAAA;AAEF,IAAA;AACE,MAAA;AACA,MAAA;AAAW,IAAA;AACb,EAAA;AAGF,EAAA;AAGE,IAAA;AACE,MAAA;AAA4B,IAAA;AAG9B,IAAA;AACE,MAAA;AACA,MAAA;AAA6B,IAAA;AAE7B,MAAA;AAAgB,IAAA;AAClB,EAAA;AAGF,EAAA;AACE,IAAA;AAEoC,EAAA;AAGtC,EAAA;AAGE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAAiB,IAAA;AAGnB,IAAA;AAAuB,EAAA;AAGzB,EAAA;AAAO,IAAA;AASH,MAAA;AAEA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAA2C,MAAA;AAG7C,MAAA;AACE,QAAA;AAA4B,MAAA;AAG9B,MAAA;AAAqB,QAAA;AAC+B,MAAA;AAGpD,MAAA;AAEA,MAAA;AAAA,QAAA;AACE,QAAA;AACA,QAAA;AACA,MAAA;AAGF,MAAA;AACE,QAAA;AAEA,QAAA;AAA2B,MAAA;AAG7B,MAAA;AAAO,QAAA;AACL,QAAA;AACA,QAAA;AACqB,MAAA;AACvB,IAAA;AACF,IAAA;AAO0E,IAAA;AAOxE,MAAA;AACE,QAAA;AAA4B,MAAA;AAI9B,MAAA;AAEA,MAAA;AAGA,MAAA;AAA2B,QAAA;AACiC,MAAA;AAG5D,MAAA;AACE,QAAA;AAAU,UAAA;AACP;AAAA,2CAAA;AACiC,QAAA;AAGtC,MAAA;AAAO,QAAA;AACmC,QAAA;AACF,QAAA;AAEmB,MAAA;AAC3D,IAAA;AACF,EAAA;AAEJ;AzBocA;AACA;A0BxnBO;AAOL,EAAA;AAKA,EAAA;AAAW,IAAA;AAKH,EAAA;AAGR,EAAA;AAEA,EAAA;AAAyB,IAAA;AACvB,gBAAA;AACgC,yEAAA;AACmE,IAAA;AACxD,EAAA;AAG7C,EAAA;AAEI,IAAA;AAAiB,MAAA;AACL,MAAA;AAC6B,MAAA;AACQ,IAAA;AAGjD,IAAA;AAAsD,MAAA;AACM,MAAA;AAC3C,MAAA;AACH,MAAA;AAC8B,MAAA;AACA,IAAA;AAG5C,IAAA;AAAO,MAAA;AACF,MAAA;AACG,MAAA;AACN,IAAA;AAIF,EAAA;AAGJ,EAAA;AACI,IAAA;AAE4C,IAAA;AAClC,IAAA;AACM,EAAA;AAEhB,IAAA;AACwB,IAAA;AACb,IAAA;AACK,EAAA;AAEtB;A1B+lBA;AACA;A2BxrBO;AAEA;AAAwB,EAAA;AACkB;AAAA;AAAA,oEAAA;AAG8B;AAAA;AAAA;AAAA;AAAA;AAAA,IAAA;AAO/E;AAEO;AAAyB,EAAA;AACkB;AAAA;AAAA,yEAAA;AAGkC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAapF;AAEO;AAEA;AAGL,EAAA;AACE,IAAA;AAAoB,EAAA;AAExB;A3BorBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","sourcesContent":[null,"import sqlite3 from 'sqlite3';\n\nexport type Parameters = object | string | bigint | number | boolean | null;\n\nexport type SQLiteConnection = {\n close: () => void;\n command: (sql: string, values?: Parameters[]) => Promise<void>;\n query: <T>(sql: string, values?: Parameters[]) => Promise<T[]>;\n querySingle: <T>(sql: string, values?: Parameters[]) => Promise<T | null>;\n};\n\nexport interface SQLiteError extends Error {\n errno: number;\n}\n\nexport const isSQLiteError = (error: unknown): error is SQLiteError => {\n if (error instanceof Error && 'code' in error) {\n return true;\n }\n\n return false;\n};\n\nexport type InMemorySQLiteDatabase = ':memory:';\nexport const InMemorySQLiteDatabase = ':memory:';\n\ntype SQLiteConnectionOptions = {\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n fileName: InMemorySQLiteDatabase | string | undefined;\n};\n\nexport const sqliteConnection = (\n options: SQLiteConnectionOptions,\n): SQLiteConnection => {\n const db = new sqlite3.Database(options.fileName ?? InMemorySQLiteDatabase);\n\n return {\n close: (): void => db.close(),\n command: (sql: string, params?: Parameters[]) =>\n new Promise((resolve, reject) => {\n db.run(sql, params ?? [], (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n }),\n query: <T>(sql: string, params?: Parameters[]): Promise<T[]> =>\n new Promise((resolve, reject) => {\n db.all(sql, params ?? [], (err: Error | null, result: T[]) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n querySingle: <T>(sql: string, params?: Parameters[]): Promise<T | null> =>\n new Promise((resolve, reject) => {\n db.get(sql, params ?? [], (err: Error | null, result: T | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n };\n};\n","import { ValidationError } from '../errors';\n\nexport const enum ValidationErrors {\n NOT_A_NONEMPTY_STRING = 'NOT_A_NONEMPTY_STRING',\n NOT_A_POSITIVE_NUMBER = 'NOT_A_POSITIVE_NUMBER',\n NOT_AN_UNSIGNED_BIGINT = 'NOT_AN_UNSIGNED_BIGINT',\n}\n\nexport const isNumber = (val: unknown): val is number =>\n typeof val === 'number' && val === val;\n\nexport const isString = (val: unknown): val is string =>\n typeof val === 'string';\n\nexport const assertNotEmptyString = (value: unknown): string => {\n if (!isString(value) || value.length === 0) {\n throw new ValidationError(ValidationErrors.NOT_A_NONEMPTY_STRING);\n }\n return value;\n};\n\nexport const assertPositiveNumber = (value: unknown): number => {\n if (!isNumber(value) || value <= 0) {\n throw new ValidationError(ValidationErrors.NOT_A_POSITIVE_NUMBER);\n }\n return value;\n};\n\nexport const assertUnsignedBigInt = (value: string): bigint => {\n const number = BigInt(value);\n if (number < 0) {\n throw new ValidationError(ValidationErrors.NOT_AN_UNSIGNED_BIGINT);\n }\n return number;\n};\n\nexport * from './dates';\n","import { isNumber, isString } from '../validation';\n\nexport type ErrorConstructor<ErrorType extends Error> = new (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ...args: any[]\n) => ErrorType;\n\nexport const isErrorConstructor = <ErrorType extends Error>(\n // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type\n expect: Function,\n): expect is ErrorConstructor<ErrorType> => {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n return (\n typeof expect === 'function' &&\n expect.prototype &&\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n expect.prototype.constructor === expect\n );\n};\n\nexport class EmmettError extends Error {\n public errorCode: number;\n\n constructor(\n options?: { errorCode: number; message?: string } | string | number,\n ) {\n const errorCode =\n options && typeof options === 'object' && 'errorCode' in options\n ? options.errorCode\n : isNumber(options)\n ? options\n : 500;\n const message =\n options && typeof options === 'object' && 'message' in options\n ? options.message\n : isString(options)\n ? options\n : `Error with status code '${errorCode}' ocurred during Emmett processing`;\n\n super(message);\n this.errorCode = errorCode;\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, EmmettError.prototype);\n }\n}\n\nexport class ConcurrencyError extends EmmettError {\n constructor(\n public current: string | undefined,\n public expected: string,\n message?: string,\n ) {\n super({\n errorCode: 412,\n message:\n message ??\n `Expected version ${expected.toString()} does not match current ${current?.toString()}`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ConcurrencyError.prototype);\n }\n}\n\nexport class ValidationError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 400,\n message: message ?? `Validation Error ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ValidationError.prototype);\n }\n}\n\nexport class IllegalStateError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 403,\n message: message ?? `Illegal State ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, IllegalStateError.prototype);\n }\n}\n\nexport class NotFoundError extends EmmettError {\n constructor(options?: { id: string; type: string; message?: string }) {\n super({\n errorCode: 404,\n message:\n options?.message ??\n (options?.id\n ? options.type\n ? `${options.type} with ${options.id} was not found during Emmett processing`\n : `State with ${options.id} was not found during Emmett processing`\n : options?.type\n ? `${options.type} was not found during Emmett processing`\n : 'State was not found during Emmett processing'),\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, NotFoundError.prototype);\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport type {\n BigIntStreamPosition,\n CombinedReadEventMetadata,\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../typing';\nimport { tryPublishMessagesAfterCommit } from './afterCommit';\nimport {\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type AppendToStreamResult,\n type DefaultEventStoreOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from './eventStore';\nimport { assertExpectedVersionMatchesCurrent } from './expectedVersion';\nimport { StreamingCoordinator } from './subscriptions';\nimport type { ProjectionRegistration } from '../projections';\n\nexport const InMemoryEventStoreDefaultStreamVersion = 0n;\n\nexport type InMemoryEventStore =\n EventStore<ReadEventMetadataWithGlobalPosition>;\n\nexport type InMemoryReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type InMemoryProjectionHandlerContext = {\n eventStore: InMemoryEventStore;\n};\n\nexport type InMemoryEventStoreOptions =\n DefaultEventStoreOptions<InMemoryEventStore> & {\n projections?: ProjectionRegistration<\n 'inline',\n InMemoryReadEventMetadata,\n InMemoryProjectionHandlerContext\n >[];\n };\n\nexport type InMemoryReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n>;\n\nexport const getInMemoryEventStore = (\n eventStoreOptions?: InMemoryEventStoreOptions,\n): InMemoryEventStore => {\n const streams = new Map<\n string,\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]\n >();\n const streamingCoordinator = StreamingCoordinator();\n\n const getAllEventsCount = () => {\n return Array.from<ReadEvent[]>(streams.values())\n .map((s) => s.length)\n .reduce((p, c) => p + c, 0);\n };\n\n const _inlineProjections = (eventStoreOptions?.projections ?? [])\n .filter(({ type }) => type === 'inline')\n .map(({ projection }) => projection);\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const result = await this.readStream<EventType>(streamName, read);\n\n const events = result?.events ?? [];\n\n return {\n currentStreamVersion: BigInt(events.length),\n state: events.reduce(evolve, initialState()),\n streamExists: result.streamExists,\n };\n },\n\n readStream: <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => {\n const events = streams.get(streamName);\n const currentStreamVersion = events\n ? BigInt(events.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const from = Number(options && 'from' in options ? options.from : 0);\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : (events?.length ?? 1),\n );\n\n const resultEvents =\n events !== undefined && events.length > 0\n ? events\n .map(\n (e) =>\n e as ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n )\n .slice(from, to)\n : [];\n\n const result: ReadStreamResult<\n EventType,\n ReadEventMetadataWithGlobalPosition\n > = {\n currentStreamVersion,\n events: resultEvents,\n streamExists: events !== undefined && events.length > 0,\n };\n\n return Promise.resolve(result);\n },\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResult> => {\n const currentEvents = streams.get(streamName) ?? [];\n const currentStreamVersion =\n currentEvents.length > 0\n ? BigInt(currentEvents.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const newEvents: ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >[] = events.map((event, index) => {\n const metadata: ReadEventMetadataWithGlobalPosition = {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(currentEvents.length + index + 1),\n globalPosition: BigInt(getAllEventsCount() + index + 1),\n };\n return {\n ...event,\n kind: event.kind ?? 'Event',\n metadata: {\n ...('metadata' in event ? (event.metadata ?? {}) : {}),\n ...metadata,\n } as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n const positionOfLastEventInTheStream = BigInt(\n newEvents.slice(-1)[0]!.metadata.streamPosition,\n );\n\n streams.set(streamName, [...currentEvents, ...newEvents]);\n await streamingCoordinator.notify(newEvents);\n\n const result: AppendToStreamResult = {\n nextExpectedStreamVersion: positionOfLastEventInTheStream,\n createdNewStream:\n currentStreamVersion === InMemoryEventStoreDefaultStreamVersion,\n };\n\n await tryPublishMessagesAfterCommit<InMemoryEventStore>(\n newEvents,\n eventStoreOptions?.hooks,\n );\n\n return result;\n },\n\n //streamEvents: streamingCoordinator.stream,\n };\n};\n","import { TransformStream } from 'web-streams-polyfill';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport { globalStreamCaughtUp, type GlobalSubscriptionEvent } from '../events';\n\nexport const streamTrackingGlobalPosition = (\n currentEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n) => new CaughtUpTransformStream(currentEvents);\n\nexport class CaughtUpTransformStream extends TransformStream<\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>,\n | ReadEvent<Event, ReadEventMetadataWithGlobalPosition>\n | GlobalSubscriptionEvent\n> {\n private _currentPosition: bigint;\n private _logPosition: bigint;\n\n constructor(events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]) {\n super({\n start: (controller) => {\n let globalPosition = 0n;\n for (const event of events) {\n controller.enqueue(event);\n globalPosition = event.metadata.globalPosition;\n }\n controller.enqueue(globalStreamCaughtUp({ globalPosition }));\n },\n transform: (event, controller) => {\n this._currentPosition = event.metadata.globalPosition;\n controller.enqueue(event);\n\n if (this._currentPosition < this._logPosition) return;\n\n controller.enqueue(\n globalStreamCaughtUp({ globalPosition: this._currentPosition }),\n );\n },\n });\n\n this._currentPosition = this._logPosition =\n events.length > 0\n ? events[events.length - 1]!.metadata.globalPosition\n : 0n;\n }\n\n public set logPosition(value: bigint) {\n this._logPosition = value;\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport { notifyAboutNoActiveReadersStream } from '../../streaming/transformations/notifyAboutNoActiveReaders';\nimport { writeToStream } from '../../streaming/writers';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport {\n CaughtUpTransformStream,\n streamTrackingGlobalPosition,\n} from './caughtUpTransformStream';\n\nexport const StreamingCoordinator = () => {\n const allEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[] = [];\n const listeners = new Map<string, CaughtUpTransformStream>();\n\n return {\n notify: async (\n events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n ) => {\n if (events.length === 0) return;\n\n allEvents.push(...events);\n\n for (const listener of listeners.values()) {\n listener.logPosition =\n events[events.length - 1]!.metadata.globalPosition;\n\n await writeToStream(listener, events);\n }\n },\n\n stream: () => {\n const streamId = uuid();\n const transformStream = streamTrackingGlobalPosition(allEvents);\n\n listeners.set(streamId, transformStream);\n return transformStream.readable.pipeThrough(\n notifyAboutNoActiveReadersStream(\n (stream) => {\n if (listeners.has(stream.streamId))\n listeners.delete(stream.streamId);\n },\n { streamId },\n ),\n );\n },\n };\n};\n","import { v4 as uuid } from 'uuid';\nimport { TransformStream } from 'web-streams-polyfill';\n\nexport const notifyAboutNoActiveReadersStream = <Item>(\n onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n) => new NotifyAboutNoActiveReadersStream(onNoActiveReaderCallback, options);\n\nexport class NotifyAboutNoActiveReadersStream<Item> extends TransformStream<\n Item,\n Item\n> {\n private checkInterval: NodeJS.Timeout | null = null;\n public readonly streamId: string;\n private _isStopped: boolean = false;\n public get hasActiveSubscribers() {\n return !this._isStopped;\n }\n\n constructor(\n private onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n ) {\n super({\n cancel: (reason) => {\n console.log('Stream was canceled. Reason:', reason);\n this.stopChecking();\n },\n });\n this.streamId = options?.streamId ?? uuid();\n\n this.onNoActiveReaderCallback = onNoActiveReaderCallback;\n\n this.startChecking(options?.intervalCheckInMs ?? 20);\n }\n\n private startChecking(interval: number) {\n this.checkInterval = setInterval(() => {\n this.checkNoActiveReader();\n }, interval);\n }\n\n private stopChecking() {\n if (!this.checkInterval) return;\n\n clearInterval(this.checkInterval);\n this.checkInterval = null;\n this._isStopped = true;\n this.onNoActiveReaderCallback(this);\n }\n\n private checkNoActiveReader() {\n if (!this.readable.locked && !this._isStopped) {\n this.stopChecking();\n }\n }\n}\n","import retry from 'async-retry';\n\nexport type AsyncRetryOptions = retry.Options & {\n shouldRetryError?: (error: unknown) => boolean;\n};\n\nexport const NoRetries: AsyncRetryOptions = { retries: 0 };\n\nexport const asyncRetry = async <T>(\n fn: () => Promise<T>,\n opts?: AsyncRetryOptions,\n): Promise<T> => {\n if (opts === undefined || opts.retries === 0) return fn();\n\n return retry(\n async (bail) => {\n try {\n return await fn();\n } catch (error) {\n if (opts?.shouldRetryError && !opts.shouldRetryError(error)) {\n bail(error as Error);\n }\n throw error;\n }\n },\n opts ?? { retries: 0 },\n );\n};\n","import { ReadableStream } from 'web-streams-polyfill';\n\nexport const fromArray = <T>(chunks: T[]) =>\n new ReadableStream<T>({\n start(controller) {\n for (const chunk of chunks) controller.enqueue(chunk);\n controller.close();\n },\n });\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport type { AsyncRetryOptions } from '../utils';\nimport type { Decoder } from './decoders';\nimport { DefaultDecoder } from './decoders/composite';\nimport { streamTransformations } from './transformations';\n\nconst { retry } = streamTransformations;\n\nexport const restream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n): ReadableStream<Transformed> =>\n retry(createSourceStream, handleChunk(transform, decoder), retryOptions)\n .readable;\n\nconst handleChunk =\n <Source = unknown, Transformed = Source, StreamType = Source>(\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n ) =>\n (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ): void => {\n const { done: isDone, value } = readResult;\n\n if (value) decoder.addToBuffer(value);\n\n if (!isDone && !decoder.hasCompleteMessage()) return;\n\n decodeAndTransform(decoder, transform, controller);\n };\n\nconst decodeAndTransform = <StreamType, Source, Transformed = Source>(\n decoder: Decoder<StreamType, Source>,\n transform: (input: Source) => Transformed,\n controller: TransformStreamDefaultController<Transformed>,\n) => {\n try {\n const decoded = decoder.decode();\n if (!decoded) return; // TODO: Add a proper handling of decode errors\n\n const transformed = transform(decoded);\n controller.enqueue(transformed);\n } catch (error) {\n controller.error(new Error(`Decoding error: ${error?.toString()}`));\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const filter = <Item>(filter: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n if (filter(chunk)) {\n controller.enqueue(chunk);\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const map = <From, To>(map: (item: From) => To) =>\n new TransformStream<From, To>({\n transform(chunk, controller) {\n controller.enqueue(map(chunk));\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const reduce = <I, O>(\n reducer: (accumulator: O, chunk: I) => O,\n initialValue: O,\n) => new ReduceTransformStream<I, O>(reducer, initialValue);\n\nexport class ReduceTransformStream<I, O> extends TransformStream<I, O> {\n private accumulator: O;\n private reducer: (accumulator: O, chunk: I) => O;\n\n constructor(reducer: (accumulator: O, chunk: I) => O, initialValue: O) {\n super({\n transform: (chunk) => {\n this.accumulator = this.reducer(this.accumulator, chunk);\n },\n flush: (controller) => {\n controller.enqueue(this.accumulator);\n controller.terminate();\n },\n });\n\n this.accumulator = initialValue;\n this.reducer = reducer;\n }\n}\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n TransformStream,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport { type AsyncRetryOptions, asyncRetry } from '../../utils';\n\nexport const retryStream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n): TransformStream<Source, Transformed> =>\n new TransformStream<Source, Transformed>({\n start(controller) {\n asyncRetry(\n () => onRestream(createSourceStream, handleChunk, controller),\n retryOptions,\n ).catch((error) => {\n controller.error(error);\n });\n },\n });\n\nconst onRestream = async <StreamType, Source, Transformed = Source>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n controller: TransformStreamDefaultController<Transformed>,\n): Promise<void> => {\n const sourceStream = createSourceStream();\n const reader = sourceStream.getReader();\n\n try {\n let done: boolean;\n\n do {\n const result = await reader.read();\n done = result.done;\n\n await handleChunk(result, controller);\n\n if (done) {\n controller.terminate();\n }\n } while (!done);\n } finally {\n reader.releaseLock();\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const skip = <T>(limit: number) => new SkipTransformStream<T>(limit);\n\nexport class SkipTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private skip: number;\n\n constructor(skip: number) {\n super({\n transform: (chunk, controller) => {\n this.count++;\n if (this.count > this.skip) {\n controller.enqueue(chunk);\n }\n },\n });\n\n this.skip = skip;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopAfter = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n controller.enqueue(chunk);\n\n if (stopCondition(chunk)) {\n controller.terminate();\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopOn = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n async transform(chunk, controller) {\n if (!stopCondition(chunk)) {\n controller.enqueue(chunk);\n return;\n }\n await Promise.resolve();\n controller.terminate();\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const take = <T>(limit: number) => new TakeTransformStream<T>(limit);\n\nexport class TakeTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private limit: number;\n\n constructor(limit: number) {\n super({\n transform: (chunk, controller) => {\n if (this.count < this.limit) {\n this.count++;\n controller.enqueue(chunk);\n } else {\n controller.terminate();\n }\n },\n });\n\n this.limit = limit;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const waitAtMost = <Item>(waitTimeInMs: number) =>\n new TransformStream<Item, Item>({\n start(controller) {\n const timeoutId = setTimeout(() => {\n controller.terminate();\n }, waitTimeInMs);\n\n const originalTerminate = controller.terminate.bind(controller);\n\n // Clear the timeout if the stream is terminated early\n controller.terminate = () => {\n clearTimeout(timeoutId);\n originalTerminate();\n };\n },\n transform(chunk, controller) {\n controller.enqueue(chunk);\n },\n });\n","import { ConcurrencyError } from '../errors';\nimport type { BigIntStreamPosition, Flavour } from '../typing';\n\nexport type ExpectedStreamVersion<VersionType = BigIntStreamPosition> =\n | ExpectedStreamVersionWithValue<VersionType>\n | ExpectedStreamVersionGeneral;\n\nexport type ExpectedStreamVersionWithValue<VersionType = BigIntStreamPosition> =\n Flavour<VersionType, 'StreamVersion'>;\n\nexport type ExpectedStreamVersionGeneral = Flavour<\n 'STREAM_EXISTS' | 'STREAM_DOES_NOT_EXIST' | 'NO_CONCURRENCY_CHECK',\n 'StreamVersion'\n>;\n\nexport const STREAM_EXISTS = 'STREAM_EXISTS' as ExpectedStreamVersionGeneral;\nexport const STREAM_DOES_NOT_EXIST =\n 'STREAM_DOES_NOT_EXIST' as ExpectedStreamVersionGeneral;\nexport const NO_CONCURRENCY_CHECK =\n 'NO_CONCURRENCY_CHECK' as ExpectedStreamVersionGeneral;\n\nexport const matchesExpectedVersion = <StreamVersion = BigIntStreamPosition>(\n current: StreamVersion | undefined,\n expected: ExpectedStreamVersion<StreamVersion>,\n defaultVersion: StreamVersion,\n): boolean => {\n if (expected === NO_CONCURRENCY_CHECK) return true;\n\n if (expected == STREAM_DOES_NOT_EXIST) return current === defaultVersion;\n\n if (expected == STREAM_EXISTS) return current !== defaultVersion;\n\n return current === expected;\n};\n\nexport const assertExpectedVersionMatchesCurrent = <\n StreamVersion = BigIntStreamPosition,\n>(\n current: StreamVersion,\n expected: ExpectedStreamVersion<StreamVersion> | undefined,\n defaultVersion: StreamVersion,\n): void => {\n expected ??= NO_CONCURRENCY_CHECK;\n\n if (!matchesExpectedVersion(current, expected, defaultVersion))\n throw new ExpectedVersionConflictError(current, expected);\n};\n\nexport class ExpectedVersionConflictError<\n VersionType = BigIntStreamPosition,\n> extends ConcurrencyError {\n constructor(\n current: VersionType,\n expected: ExpectedStreamVersion<VersionType>,\n ) {\n super(current?.toString(), expected?.toString());\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ExpectedVersionConflictError.prototype);\n }\n}\n\nexport const isExpectedVersionConflictError = (\n error: unknown,\n): error is ExpectedVersionConflictError =>\n error instanceof ExpectedVersionConflictError;\n","export class ParseError extends Error {\n constructor(text: string) {\n super(`Cannot parse! ${text}`);\n }\n}\n\nexport type Mapper<From, To = From> =\n | ((value: unknown) => To)\n | ((value: Partial<From>) => To)\n | ((value: From) => To)\n | ((value: Partial<To>) => To)\n | ((value: To) => To)\n | ((value: Partial<To | From>) => To)\n | ((value: To | From) => To);\n\nexport type MapperArgs<From, To = From> = Partial<From> &\n From &\n Partial<To> &\n To;\n\nexport type ParseOptions<From, To = From> = {\n reviver?: (key: string, value: unknown) => unknown;\n map?: Mapper<From, To>;\n typeCheck?: <To>(value: unknown) => value is To;\n};\n\nexport type StringifyOptions<From, To = From> = {\n map?: Mapper<From, To>;\n};\n\nexport const JSONParser = {\n stringify: <From, To = From>(\n value: From,\n options?: StringifyOptions<From, To>,\n ) => {\n return JSON.stringify(\n options?.map ? options.map(value as MapperArgs<From, To>) : value,\n //TODO: Consider adding support to DateTime and adding specific format to mark that's a bigint\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n (_, v) => (typeof v === 'bigint' ? v.toString() : v),\n );\n },\n parse: <From, To = From>(\n text: string,\n options?: ParseOptions<From, To>,\n ): To | undefined => {\n const parsed: unknown = JSON.parse(text, options?.reviver);\n\n if (options?.typeCheck && !options?.typeCheck<To>(parsed))\n throw new ParseError(text);\n\n return options?.map\n ? options.map(parsed as MapperArgs<From, To>)\n : (parsed as To | undefined);\n },\n};\n","import { filter } from './filter';\nimport { map } from './map';\nimport {\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n} from './notifyAboutNoActiveReaders';\nimport { reduce, ReduceTransformStream } from './reduce';\nimport { retryStream } from './retry';\nimport { skip, SkipTransformStream } from './skip';\nimport { stopAfter } from './stopAfter';\nimport { stopOn } from './stopOn';\nimport { take, TakeTransformStream } from './take';\nimport { waitAtMost } from './waitAtMost';\n\nexport const streamTransformations = {\n filter,\n take,\n TakeTransformStream,\n skip,\n SkipTransformStream,\n map,\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n reduce,\n ReduceTransformStream,\n retry: retryStream,\n stopAfter,\n stopOn,\n waitAtMost,\n};\n","import {\n JSONParser,\n NO_CONCURRENCY_CHECK,\n STREAM_DOES_NOT_EXIST,\n STREAM_EXISTS,\n type AppendToStreamOptions,\n type ExpectedStreamVersion,\n type Event as Message,\n type RecordedMessage,\n} from '@event-driven-io/emmett';\nimport { v4 as uuid } from 'uuid';\nimport {\n isSQLiteError,\n type Parameters,\n type SQLiteConnection,\n type SQLiteError,\n} from '../../connection';\nimport { defaultTag, messagesTable, streamsTable } from './typing';\n\nexport type AppendEventResult =\n | {\n success: true;\n nextStreamPosition: bigint;\n lastGlobalPosition: bigint;\n }\n | { success: false };\n\nexport const appendToStream = async (\n db: SQLiteConnection,\n streamName: string,\n streamType: string,\n messages: Message[],\n options?: AppendToStreamOptions & {\n partition?: string;\n preCommitHook?: (events: RecordedMessage[]) => void;\n },\n): Promise<AppendEventResult> => {\n if (messages.length === 0) return { success: false };\n\n const expectedStreamVersion = toExpectedVersion(\n options?.expectedStreamVersion,\n );\n\n const messagesToAppend: RecordedMessage[] = messages.map(\n (m: Message, i: number): RecordedMessage =>\n ({\n ...m,\n kind: m.kind ?? 'Event',\n metadata: {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(i + 1),\n ...('metadata' in m ? (m.metadata ?? {}) : {}),\n },\n }) as RecordedMessage,\n );\n\n let result: AppendEventResult;\n\n await db.command(`BEGIN TRANSACTION`);\n\n try {\n result = await appendToStreamRaw(\n db,\n streamName,\n streamType,\n messagesToAppend,\n {\n expectedStreamVersion,\n },\n );\n\n if (options?.preCommitHook) options.preCommitHook(messagesToAppend);\n } catch (err: unknown) {\n await db.command(`ROLLBACK`);\n throw err;\n }\n\n if (result.success == null || !result.success) {\n await db.command(`ROLLBACK`);\n return result;\n }\n\n await db.command(`COMMIT`);\n\n return result;\n};\n\nconst toExpectedVersion = (\n expected: ExpectedStreamVersion | undefined,\n): bigint | null => {\n if (expected === undefined) return null;\n\n if (expected === NO_CONCURRENCY_CHECK) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_DOES_NOT_EXIST) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_EXISTS) return null;\n\n return expected as bigint;\n};\n\nconst appendToStreamRaw = async (\n db: SQLiteConnection,\n streamId: string,\n streamType: string,\n messages: RecordedMessage[],\n options?: {\n expectedStreamVersion: bigint | null;\n partition?: string;\n },\n): Promise<AppendEventResult> => {\n let streamPosition;\n let globalPosition;\n\n try {\n let expectedStreamVersion = options?.expectedStreamVersion ?? null;\n\n if (expectedStreamVersion == null) {\n expectedStreamVersion = await getLastStreamPosition(\n db,\n streamId,\n expectedStreamVersion,\n );\n }\n\n let position: { stream_position: string } | null;\n\n if (expectedStreamVersion === 0n) {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `INSERT INTO ${streamsTable.name}\n (stream_id, stream_position, partition, stream_type, stream_metadata, is_archived)\n VALUES (\n ?,\n ?,\n ?,\n ?,\n '[]',\n false\n )\n RETURNING stream_position;\n `,\n [\n streamId,\n messages.length,\n options?.partition ?? streamsTable.columns.partition,\n streamType,\n ],\n );\n } else {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `UPDATE ${streamsTable.name}\n SET stream_position = stream_position + ?\n WHERE stream_id = ?\n AND partition = ?\n AND is_archived = false\n RETURNING stream_position;\n `,\n [\n messages.length,\n streamId,\n options?.partition ?? streamsTable.columns.partition,\n ],\n );\n }\n\n if (position == null) {\n throw new Error('Could not find stream position');\n }\n\n streamPosition = BigInt(position.stream_position);\n\n if (expectedStreamVersion != null) {\n const expectedStreamPositionAfterSave =\n BigInt(expectedStreamVersion) + BigInt(messages.length);\n if (streamPosition !== expectedStreamPositionAfterSave) {\n return {\n success: false,\n };\n }\n }\n\n const { sqlString, values } = buildMessageInsertQuery(\n messages,\n expectedStreamVersion,\n streamId,\n options?.partition?.toString() ?? defaultTag,\n );\n\n const returningId = await db.querySingle<{\n global_position: string;\n } | null>(sqlString, values);\n\n if (returningId?.global_position == null) {\n throw new Error('Could not find global position');\n }\n\n globalPosition = BigInt(returningId.global_position);\n } catch (err: unknown) {\n if (isSQLiteError(err) && isOptimisticConcurrencyError(err)) {\n return {\n success: false,\n };\n }\n\n throw err;\n }\n\n return {\n success: true,\n nextStreamPosition: streamPosition,\n lastGlobalPosition: globalPosition,\n };\n};\n\nconst isOptimisticConcurrencyError = (error: SQLiteError): boolean => {\n return error?.errno !== undefined && error.errno === 19;\n};\n\nasync function getLastStreamPosition(\n db: SQLiteConnection,\n streamId: string,\n expectedStreamVersion: bigint | null,\n): Promise<bigint> {\n const result = await db.querySingle<{ stream_position: string } | null>(\n `SELECT CAST(stream_position AS VARCHAR) AS stream_position FROM ${streamsTable.name} WHERE stream_id = ?`,\n [streamId],\n );\n\n if (result?.stream_position == null) {\n expectedStreamVersion = 0n;\n } else {\n expectedStreamVersion = BigInt(result.stream_position);\n }\n return expectedStreamVersion;\n}\n\nconst buildMessageInsertQuery = (\n messages: RecordedMessage[],\n expectedStreamVersion: bigint,\n streamId: string,\n partition: string | null | undefined,\n): {\n sqlString: string;\n values: Parameters[];\n} => {\n const query = messages.reduce(\n (\n queryBuilder: { parameterMarkers: string[]; values: Parameters[] },\n message: RecordedMessage,\n ) => {\n if (\n message.metadata?.streamPosition == null ||\n typeof message.metadata.streamPosition !== 'bigint'\n ) {\n throw new Error('Stream position is required');\n }\n\n const streamPosition =\n BigInt(message.metadata.streamPosition) + BigInt(expectedStreamVersion);\n\n queryBuilder.parameterMarkers.push(`(?,?,?,?,?,?,?,?,?,?)`);\n queryBuilder.values.push(\n streamId,\n streamPosition.toString() ?? 0,\n partition ?? defaultTag,\n message.kind === 'Event' ? 'E' : 'C',\n JSONParser.stringify(message.data),\n JSONParser.stringify(message.metadata),\n expectedStreamVersion?.toString() ?? 0,\n message.type,\n message.metadata.messageId,\n false,\n );\n\n return queryBuilder;\n },\n {\n parameterMarkers: [],\n values: [],\n },\n );\n\n const sqlString = `\n INSERT INTO ${messagesTable.name} (\n stream_id, \n stream_position, \n partition, \n message_kind,\n message_data, \n message_metadata, \n message_schema_version, \n message_type, \n message_id, \n is_archived\n ) \n VALUES ${query.parameterMarkers.join(', ')} \n RETURNING \n CAST(global_position as VARCHAR) AS global_position\n `;\n return { sqlString, values: query.values };\n};\n","export const emmettPrefix = 'emt';\n\nexport const globalTag = 'global';\nexport const defaultTag = 'emt:default';\n\nexport const globalNames = {\n module: `${emmettPrefix}:module:${globalTag}`,\n};\n\nconst columns = {\n partition: {\n name: 'partition',\n },\n isArchived: { name: 'is_archived' },\n};\n\nexport const streamsTable = {\n name: `${emmettPrefix}_streams`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n\nexport const messagesTable = {\n name: `${emmettPrefix}_messages`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n","import type {\n AppendToStreamResultWithGlobalPosition,\n BigIntStreamPosition,\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\n\nimport {\n assertExpectedVersionMatchesCurrent,\n ExpectedVersionConflictError,\n NO_CONCURRENCY_CHECK,\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport {\n InMemorySQLiteDatabase,\n sqliteConnection,\n type SQLiteConnection,\n} from '../connection';\nimport { createEventStoreSchema } from './schema';\nimport { appendToStream } from './schema/appendToStream';\nimport { readStream } from './schema/readStream';\n\nexport type EventHandler<E extends Event = Event> = (\n eventEnvelope: ReadEvent<E>,\n) => void;\n\nexport const SQLiteEventStoreDefaultStreamVersion = 0n;\n\nexport type SQLiteEventStore = EventStore<SQLiteReadEventMetadata>;\n\nexport type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n SQLiteReadEventMetadata\n>;\n\nexport type SQLiteEventStoreOptions = {\n schema?: {\n autoMigration?: 'None' | 'CreateOrUpdate';\n };\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n fileName: InMemorySQLiteDatabase | string | undefined;\n};\n\nexport const getSQLiteEventStore = (\n options: SQLiteEventStoreOptions,\n): SQLiteEventStore => {\n let schemaMigrated = false;\n let autoGenerateSchema = false;\n let database: SQLiteConnection | null;\n const fileName = options.fileName ?? InMemorySQLiteDatabase;\n\n const isInMemory: boolean = fileName === InMemorySQLiteDatabase;\n\n const createConnection = () => {\n if (database != null) {\n return database;\n }\n\n return sqliteConnection({\n fileName,\n });\n };\n\n const closeConnection = () => {\n if (isInMemory) {\n return;\n }\n if (database != null) {\n database.close();\n database = null;\n }\n };\n\n const withConnection = async <Result>(\n handler: (db: SQLiteConnection) => Promise<Result>,\n ): Promise<Result> => {\n if (database == null) {\n database = createConnection();\n }\n\n try {\n await ensureSchemaExists(database);\n return await handler(database);\n } finally {\n closeConnection();\n }\n };\n\n if (options) {\n autoGenerateSchema =\n options.schema?.autoMigration === undefined ||\n options.schema?.autoMigration !== 'None';\n }\n\n const ensureSchemaExists = async (\n connection: SQLiteConnection,\n ): Promise<void> => {\n if (!autoGenerateSchema) return Promise.resolve();\n\n if (!schemaMigrated) {\n await createEventStoreSchema(connection);\n schemaMigrated = true;\n }\n\n return Promise.resolve();\n };\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const expectedStreamVersion = read?.expectedStreamVersion;\n\n let state = initialState();\n\n if (typeof streamName !== 'string') {\n throw new Error('Stream name is not string');\n }\n\n if (database == null) {\n database = createConnection();\n }\n\n const result = await withConnection((db) =>\n readStream<EventType>(db, streamName, options.read),\n );\n\n const currentStreamVersion = result.currentStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n expectedStreamVersion,\n SQLiteEventStoreDefaultStreamVersion,\n );\n\n for (const event of result.events) {\n if (!event) continue;\n\n state = evolve(state, event);\n }\n\n return {\n currentStreamVersion: currentStreamVersion,\n state,\n streamExists: result.streamExists,\n };\n },\n\n readStream: async <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => withConnection((db) => readStream<EventType>(db, streamName, options)),\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResultWithGlobalPosition> => {\n if (database == null) {\n database = createConnection();\n }\n\n // TODO: This has to be smarter when we introduce urn-based resolution\n const [firstPart, ...rest] = streamName.split('-');\n\n const streamType =\n firstPart && rest.length > 0 ? firstPart : 'emt:unknown';\n\n const appendResult = await withConnection((db) =>\n appendToStream(db, streamName, streamType, events, options),\n );\n\n if (!appendResult.success)\n throw new ExpectedVersionConflictError<bigint>(\n -1n, //TODO: Return actual version in case of error\n options?.expectedStreamVersion ?? NO_CONCURRENCY_CHECK,\n );\n\n return {\n nextExpectedStreamVersion: appendResult.nextStreamPosition,\n lastEventGlobalPosition: appendResult.lastGlobalPosition,\n createdNewStream:\n appendResult.nextStreamPosition >= BigInt(events.length),\n };\n },\n };\n};\n","import {\n JSONParser,\n type CombinedReadEventMetadata,\n type Event,\n type ReadEvent,\n type ReadEventMetadataWithGlobalPosition,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport { type SQLiteConnection } from '../../connection';\nimport { SQLiteEventStoreDefaultStreamVersion } from '../SQLiteEventStore';\nimport { defaultTag, messagesTable } from './typing';\n\ntype ReadStreamSqlResult = {\n stream_position: string;\n message_data: string;\n message_metadata: string;\n message_schema_version: string;\n message_type: string;\n message_id: string;\n global_position: string;\n created: string;\n};\n\nexport const readStream = async <EventType extends Event>(\n db: SQLiteConnection,\n streamId: string,\n options?: ReadStreamOptions & { partition?: string },\n): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n> => {\n const fromCondition: string =\n options && 'from' in options\n ? `AND stream_position >= ${options.from}`\n : '';\n\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : NaN,\n );\n\n const toCondition = !isNaN(to) ? `AND stream_position <= ${to}` : '';\n\n const results = await db.query<ReadStreamSqlResult>(\n `SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id\n FROM ${messagesTable.name}\n WHERE stream_id = ? AND partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}`,\n [streamId, options?.partition ?? defaultTag],\n );\n\n const messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[] =\n results.map((row) => {\n const rawEvent = {\n type: row.message_type,\n data: JSONParser.parse(row.message_data),\n metadata: JSONParser.parse(row.message_metadata),\n } as unknown as EventType;\n\n const metadata: ReadEventMetadataWithGlobalPosition = {\n ...('metadata' in rawEvent ? (rawEvent.metadata ?? {}) : {}),\n messageId: row.message_id,\n streamName: streamId,\n streamPosition: BigInt(row.stream_position),\n globalPosition: BigInt(row.global_position),\n };\n\n return {\n ...rawEvent,\n kind: 'Event',\n metadata: metadata as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n return messages.length > 0\n ? {\n currentStreamVersion:\n messages[messages.length - 1]!.metadata.streamPosition,\n events: messages,\n streamExists: true,\n }\n : {\n currentStreamVersion: SQLiteEventStoreDefaultStreamVersion,\n events: [],\n streamExists: false,\n };\n};\n","import type { SQLiteConnection } from '../../connection';\nimport { globalTag, messagesTable, streamsTable } from './typing';\n\nexport const sql = (sql: string) => sql;\n\nexport const streamsTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${streamsTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL DEFAULT 0,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n stream_type TEXT NOT NULL,\n stream_metadata JSONB NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n PRIMARY KEY (stream_id, stream_position, partition, is_archived),\n UNIQUE (stream_id, partition, is_archived)\n );`,\n);\n\nexport const messagesTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${messagesTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n message_kind CHAR(1) NOT NULL DEFAULT 'E',\n message_data JSONB NOT NULL,\n message_metadata JSONB NOT NULL,\n message_schema_version TEXT NOT NULL,\n message_type TEXT NOT NULL,\n message_id TEXT NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n global_position INTEGER PRIMARY KEY,\n created DATETIME DEFAULT CURRENT_TIMESTAMP,\n UNIQUE (stream_id, stream_position, partition, is_archived)\n ); \n`,\n);\n\nexport const schemaSQL: string[] = [streamsTableSQL, messagesTableSQL];\n\nexport const createEventStoreSchema = async (\n db: SQLiteConnection,\n): Promise<void> => {\n for (const sql of schemaSQL) {\n await db.command(sql);\n }\n};\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","../src/connection/sqliteConnection.ts","../../emmett/src/validation/index.ts","../../emmett/src/errors/index.ts","../../emmett/src/eventStore/inMemoryEventStore.ts","../../emmett/src/eventStore/subscriptions/caughtUpTransformStream.ts","../../emmett/src/eventStore/subscriptions/streamingCoordinator.ts","../../emmett/src/streaming/transformations/notifyAboutNoActiveReaders.ts","../../emmett/src/utils/retry.ts","../../emmett/src/streaming/generators/fromArray.ts","../../emmett/src/streaming/restream.ts","../../emmett/src/streaming/transformations/filter.ts","../../emmett/src/streaming/transformations/map.ts","../../emmett/src/streaming/transformations/reduce.ts","../../emmett/src/streaming/transformations/retry.ts","../../emmett/src/streaming/transformations/skip.ts","../../emmett/src/streaming/transformations/stopAfter.ts","../../emmett/src/streaming/transformations/stopOn.ts","../../emmett/src/streaming/transformations/take.ts","../../emmett/src/streaming/transformations/waitAtMost.ts","../../emmett/src/eventStore/expectedVersion.ts","../../emmett/src/serialization/json/JSONParser.ts","../../emmett/src/streaming/transformations/index.ts","../src/eventStore/schema/appendToStream.ts","../src/eventStore/schema/typing.ts","../src/eventStore/projections/index.ts","../src/eventStore/SQLiteEventStore.ts","../src/eventStore/schema/readStream.ts","../src/eventStore/schema/tables.ts"],"names":["sql"],"mappings":"AAAA;ACAA,oFAAoB;AAeb,IAAM,cAAA,EAAgB,CAAC,KAAA,EAAA,GAAyC;AACrE,EAAA,GAAA,CAAI,MAAA,WAAiB,MAAA,GAAS,OAAA,GAAU,KAAA,EAAO;AAC7C,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA;AACT,CAAA;AAGO,IAAM,uBAAA,EAAyB,UAAA;AAO/B,IAAM,iBAAA,EAAmB,CAC9B,OAAA,EAAA,GACqB;AACrB,EAAA,MAAM,GAAA,EAAK,IAAI,iBAAA,CAAQ,QAAA,kBAAS,OAAA,CAAQ,QAAA,UAAY,wBAAsB,CAAA;AAE1E,EAAA,OAAO;AAAA,IACL,KAAA,EAAO,CAAA,EAAA,GAAY,EAAA,CAAG,KAAA,CAAM,CAAA;AAAA,IAC5B,OAAA,EAAS,CAACA,IAAAA,EAAa,MAAA,EAAA,GACrB,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAA,GAAsB;AAC/C,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,CAAA;AAAA,MACV,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,KAAA,EAAO,CAAIA,IAAAA,EAAa,MAAA,EAAA,GACtB,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAgB;AAC5D,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,WAAA,EAAa,CAAIA,IAAAA,EAAa,MAAA,EAAA,GAC5B,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAqB;AACjE,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC;AAAA,EACL,CAAA;AACF,CAAA;AD9BA;AACA;AEnCO,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,SAAA,GAAY,IAAA,IAAQ,GAAA;AAE9B,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,QAAA;ACQV,IAAM,YAAA,EAAN,MAAM,aAAA,QAAoB,MAAM;AH2BvC,EG1BS;AH2BT,EGzBE,WAAA,CACE,OAAA,EACA;AACA,IAAA,MAAM,UAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,YAAA,GAAe,QAAA,EACrD,OAAA,CAAQ,UAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,GAAA;AACR,IAAA,MAAM,QAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,UAAA,GAAa,QAAA,EACnD,OAAA,CAAQ,QAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,CAAA,wBAAA,EAA2B,SAAS,CAAA,kCAAA,CAAA;AAE5C,IAAA,KAAA,CAAM,OAAO,CAAA;AACb,IAAA,IAAA,CAAK,UAAA,EAAY,SAAA;AAGjB,IAAA,MAAA,CAAO,cAAA,CAAe,IAAA,EAAM,YAAA,CAAY,SAAS,CAAA;AHWrD,EGVE;AACF,CAAA;AAEO,IAAM,iBAAA,EAAN,MAAM,kBAAA,QAAyB,YAAY;AHUlD,EGTE,WAAA,CACS,OAAA,EACA,QAAA,EACP,OAAA,EACA;AACA,IAAA,KAAA,CAAM;AHMV,MGLM,SAAA,EAAW,GAAA;AHMjB,MGLM,OAAA,mBACE,OAAA,UACA,CAAA,iBAAA,EAAoB,QAAA,CAAS,QAAA,CAAS,CAAC,CAAA,wBAAA,kBAA2B,OAAA,6BAAS,QAAA,mBAAS,GAAC,CAAA;AHI7F,IAAA;AGZW,IAAA;AACA,IAAA;AAWP,IAAA;AHIJ,EAAA;AGFA;AHIA;AACA;AIpEA;ACAA;ACAA;ACAA;AACA;ACDA;ACAA;ACAA;ACAA;ACAA;ACAA;ACAA;AdiFA;AACA;AelFA;ACAA;ACAA;ACAA;ACAA;ACeO;AACA;AAEA;AAGA;AAKL,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AACF;AAEO;AAOL,EAAA;AAEA,EAAA;AACE,IAAA;AACJ;AAEO;ApBuDP,EAAA;AoBhDI,IAAA;AAGA,IAAA;ApBgDJ,EAAA;AoB9CA;AbzDO;AAOA;APoGP,EAAA;AOnFI,IAAA;APqFJ,MAAA;AOnFQ,QAAA;AACA,QAAA;APqFR,MAAA;AACA,IAAA;AO9FY,IAAA;AAWR,IAAA;AAEA,IAAA;AAEA,IAAA;APoFJ,EAAA;AACA,iBAAA;AACA,EAAA;AACA,kBAAA;AACA,EAAA;AO3GI,IAAA;AP6GJ,EAAA;AACA,EAAA;AOvFI,IAAA;AACE,MAAA;APyFN,IAAA;AACA,EAAA;AACA,EAAA;AOtFI,IAAA;AAEA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;APuFJ,EAAA;AACA,EAAA;AOpFI,IAAA;AACE,MAAA;APsFN,IAAA;AACA,EAAA;AOpFA;ACpDO;AAIL,EAAA;AAEA,EAAA;ARuIF,IAAA;AQrIM,MAAA;AACE,QAAA;ARuIR,MAAA;AQrIQ,QAAA;AACE,UAAA;ARuIV,QAAA;AQrIQ,QAAA;ARuIR,MAAA;AACA,IAAA;AACA,qBAAA;AACA,EAAA;AQrIA;Aa3BO;ArBmKP,EAAA;AqBjKI,IAAA;ArBmKJ,EAAA;AqBjKA;AA0BO;ArB0IP,EAAA;AqBrII,IAAA;ArBuIJ,sBAAA;AACA;AACA;AACA,MAAA;AACA,IAAA;AACA,EAAA;AACA,EAAA;AqBlII,IAAA;AAEA,IAAA;AACE,MAAA;AAEF,IAAA;ArBkIJ,EAAA;AqB9HA;AVrDO;AXsLP,EAAA;AWnLM,IAAA;AACE,MAAA;AXqLR,IAAA;AACA,EAAA;AWnLE;ACPK;AZ6LP,EAAA;AY1LM,IAAA;AZ4LN,EAAA;AY1LE;ACLK;AAKA;Ab8LP,EAAA;AACA,EAAA;AACA,EAAA;Aa3LI,IAAA;Ab6LJ,MAAA;Aa3LQ,QAAA;Ab6LR,MAAA;AACA,MAAA;Aa3LQ,QAAA;AACA,QAAA;Ab6LR,MAAA;AACA,IAAA;Aa1LI,IAAA;AACA,IAAA;Ab4LJ,EAAA;Aa1LA;ACjBO;Ad8MP,EAAA;AchMM,IAAA;AdkMN,MAAA;AACA,MAAA;AACA,IAAA;AchMQ,MAAA;AdkMR,IAAA;AACA,EAAA;AchME;AAEF;AAQE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AdsLR,MAAA;AACA,IAAA;AACA,EAAA;AcpLI,IAAA;AdsLJ,EAAA;AcpLA;ACxDO;AAEA;Af8OP,kBAAA;AACA,EAAA;AACA,EAAA;Ae3OI,IAAA;Af6OJ,MAAA;Ae3OQ,QAAA;AACA,QAAA;AACE,UAAA;Af6OV,QAAA;AACA,MAAA;AACA,IAAA;Ae1OI,IAAA;Af4OJ,EAAA;Ae1OA;AClBO;AhB+PP,EAAA;AgB5PM,IAAA;AAEA,IAAA;AACE,MAAA;AhB6PR,IAAA;AACA,EAAA;AgB3PE;ACTK;AjBuQP,EAAA;AiBpQM,IAAA;AACE,MAAA;AACA,MAAA;AjBsQR,IAAA;AiBpQM,IAAA;AACA,IAAA;AjBsQN,EAAA;AiBpQE;ACVK;AAEA;AlBgRP,kBAAA;AACA,EAAA;AACA,EAAA;AkB7QI,IAAA;AlB+QJ,MAAA;AkB7QQ,QAAA;AACE,UAAA;AACA,UAAA;AlB+QV,QAAA;AkB7QU,UAAA;AlB+QV,QAAA;AACA,MAAA;AACA,IAAA;AkB5QI,IAAA;AlB8QJ,EAAA;AkB5QA;ACpBO;AnBmSP,EAAA;AmBhSM,IAAA;AACE,MAAA;AnBkSR,IAAA;AmB/RM,IAAA;AAGA,IAAA;AACE,MAAA;AACA,MAAA;AnB+RR,IAAA;AACA,EAAA;AACA,EAAA;AmB7RM,IAAA;AnB+RN,EAAA;AmB7RE;AGNK;AtBsSP,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AsBpSA;AZnBA;AV0TA;AACA;AuB1TA;AvB4TA;AACA;AwBxUO;AAEA;AACA;AAEA;AAAoB,EAAA;AAE3B;AAEA;AAAgB,EAAA;AACH,IAAA;AACH,EAAA;AACR,EAAA;AAEF;AAEO;AAAqB,EAAA;AACL,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AAEO;AAAsB,EAAA;AACN,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AxBqUA;AACA;AuBpUO;AAaL,EAAA;AAEA,EAAA;AAA8B,oBAAA;AACnB,EAAA;AAGX,EAAA;AAGe,IAAA;AAKV,MAAA;AACI,MAAA;AACa,MAAA;AACN,QAAA;AACR,QAAA;AACgB,QAAA;AACY,QAAA;AACgB,MAAA;AAC9C,IAAA;AACF,EAAA;AAGJ,EAAA;AAEA,EAAA;AAEA,EAAA;AACE,IAAA;AAAe,MAAA;AACb,MAAA;AACA,MAAA;AACA,MAAA;AACA,MAAA;AACA,QAAA;AACE,MAAA;AACF,IAAA;AAGF,IAAA;AACE,MAAA;AAAiE,EAAA;AAEnE,IAAA;AACA,IAAA;AAAM,EAAA;AAGR,EAAA;AACE,IAAA;AACA,IAAA;AAAO,EAAA;AAGT,EAAA;AAEA,EAAA;AACF;AAEA;AAGE,EAAA;AAEA,EAAA;AAGA,EAAA;AAGA,EAAA;AAEA,EAAA;AACF;AAEA;AAUE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AAA8B,QAAA;AAC5B,QAAA;AACA,QAAA;AACA,MAAA;AACF,IAAA;AAGF,IAAA;AAEA,IAAA;AACE,MAAA;AAAoB,QAAA;AAGc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAYhC,UAAA;AACE,UAAA;AACS,2CAAA;AACkC,UAAA;AAC3C,QAAA;AACF,MAAA;AACF,IAAA;AAEA,MAAA;AAAoB,QAAA;AAGS;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAO3B,UAAA;AACW,UAAA;AACT,2CAAA;AAC2C,QAAA;AAC7C,MAAA;AACF,IAAA;AAGF,IAAA;AACE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAEA,IAAA;AACE,MAAA;AAEA,MAAA;AACE,QAAA;AAAO,UAAA;AACI,QAAA;AACX,MAAA;AACF,IAAA;AAGF,IAAA;AAA8B,MAAA;AAC5B,MAAA;AACA,MAAA;AACA,uCAAA;AACkC,IAAA;AAGpC,IAAA;AAIA,IAAA;AACE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAAmD,EAAA;AAEnD,IAAA;AACE,MAAA;AAAO,QAAA;AACI,MAAA;AACX,IAAA;AAGF,IAAA;AAAM,EAAA;AAGR,EAAA;AAAO,IAAA;AACI,IAAA;AACW,IAAA;AACA,EAAA;AAExB;AAEA;AACE,EAAA;AACF;AAEA;AAKE,EAAA;AAAwB,IAAA;AAC8D,IAAA;AAC3E,EAAA;AAGX,EAAA;AACE,IAAA;AAAwB,EAAA;AAExB,IAAA;AAAqD,EAAA;AAEvD,EAAA;AACF;AAEA;AASE,EAAA;AAAuB,IAAA;AAKnB,MAAA;AAIE,QAAA;AAA6C,MAAA;AAG/C,MAAA;AAGA,MAAA;AACA,MAAA;AAAoB,QAAA;AAClB,yBAAA;AAC6B,yBAAA;AAChB,QAAA;AACoB,QAAA;AACA,QAAA;AACI,yCAAA;AACA,QAAA;AAC7B,QAAA;AACS,QAAA;AACjB,MAAA;AAGF,MAAA;AAAO,IAAA;AACT,IAAA;AACA,MAAA;AACqB,MAAA;AACV,IAAA;AACX,EAAA;AAGF,EAAA;AAAkB,kBAAA;AACkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAA;AAYU;AAAA;AAAA,IAAA;AAI9C,EAAA;AACF;AvBwOA;AACA;AyBtgBO;AAGL,EAAA;AAEA,EAAA;AAEA,EAAA;AAAmC,IAAA;AACmB,EAAA;AAGtD,EAAA;AACE,IAAA;AAAgC,MAAA;AAC9B,IAAA;AACD,EAAA;AAEL;AzBmgBA;AACA;A0BnhBO;AAkCA;AAGL,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAEA,EAAA;AAEA,EAAA;AAIA,EAAA;AAEA,EAAA;AACE,IAAA;AACE,MAAA;AAAO,IAAA;AAGT,IAAA;AAAwB,MAAA;AACtB,IAAA;AACD,EAAA;AAGH,EAAA;AACE,IAAA;AACE,MAAA;AAAA,IAAA;AAEF,IAAA;AACE,MAAA;AACA,MAAA;AAAW,IAAA;AACb,EAAA;AAGF,EAAA;AAGE,IAAA;AACE,MAAA;AAA4B,IAAA;AAG9B,IAAA;AACE,MAAA;AACA,MAAA;AAA6B,IAAA;AAE7B,MAAA;AAAgB,IAAA;AAClB,EAAA;AAGF,EAAA;AACE,IAAA;AAEoC,EAAA;AAGtC,EAAA;AAGE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAAiB,IAAA;AAGnB,IAAA;AAAuB,EAAA;AAGzB,EAAA;AAAO,IAAA;AASH,MAAA;AAEA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAA2C,MAAA;AAG7C,MAAA;AACE,QAAA;AAA4B,MAAA;AAG9B,MAAA;AAAqB,QAAA;AAC+B,MAAA;AAGpD,MAAA;AAEA,MAAA;AAAA,QAAA;AACE,QAAA;AACA,QAAA;AACA,MAAA;AAGF,MAAA;AACE,QAAA;AAEA,QAAA;AAA2B,MAAA;AAG7B,MAAA;AAAO,QAAA;AACL,QAAA;AACA,QAAA;AACqB,MAAA;AACvB,IAAA;AACF,IAAA;AAO0E,IAAA;AAOxE,MAAA;AACE,QAAA;AAA4B,MAAA;AAI9B,MAAA;AAEA,MAAA;AAGA,MAAA;AAA2B,QAAA;AAC0B,UAAA;AAC9C,UAAA;AAED,YAAA;AACE,cAAA;AAAwB,gBAAA;AACT,gBAAA;AACL,gBAAA;AACL,cAAA;AAGP,YAAA;AAAkE,UAAA;AACpE,QAAA;AACD,MAAA;AAGH,MAAA;AACE,QAAA;AAAU,UAAA;AACP;AAAA,2CAAA;AACiC,QAAA;AAGtC,MAAA;AAAO,QAAA;AACmC,QAAA;AACF,QAAA;AAEmB,MAAA;AAC3D,IAAA;AACF,EAAA;AAEJ;A1BybA;AACA;A2BppBO;AAOL,EAAA;AAKA,EAAA;AAAW,IAAA;AAKH,EAAA;AAGR,EAAA;AAEA,EAAA;AAAyB,IAAA;AACvB,gBAAA;AACgC,yEAAA;AACmE,IAAA;AACxD,EAAA;AAG7C,EAAA;AAEI,IAAA;AAAiB,MAAA;AACL,MAAA;AAC6B,MAAA;AACQ,IAAA;AAGjD,IAAA;AAAsD,MAAA;AACM,MAAA;AAC3C,MAAA;AACH,MAAA;AAC8B,MAAA;AACA,IAAA;AAG5C,IAAA;AAAO,MAAA;AACF,MAAA;AACG,MAAA;AACN,IAAA;AAIF,EAAA;AAGJ,EAAA;AACI,IAAA;AAE4C,IAAA;AAClC,IAAA;AACM,EAAA;AAEhB,IAAA;AACwB,IAAA;AACb,IAAA;AACK,EAAA;AAEtB;A3B2nBA;AACA;A4BptBO;AAEA;AAAwB,EAAA;AACkB;AAAA;AAAA,oEAAA;AAG8B;AAAA;AAAA;AAAA;AAAA;AAAA,IAAA;AAO/E;AAEO;AAAyB,EAAA;AACkB;AAAA;AAAA,yEAAA;AAGkC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAapF;AAEO;AAEA;AAGL,EAAA;AACE,IAAA;AAAoB,EAAA;AAExB;A5BgtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","sourcesContent":[null,"import sqlite3 from 'sqlite3';\n\nexport type Parameters = object | string | bigint | number | boolean | null;\n\nexport type SQLiteConnection = {\n close: () => void;\n command: (sql: string, values?: Parameters[]) => Promise<void>;\n query: <T>(sql: string, values?: Parameters[]) => Promise<T[]>;\n querySingle: <T>(sql: string, values?: Parameters[]) => Promise<T | null>;\n};\n\nexport interface SQLiteError extends Error {\n errno: number;\n}\n\nexport const isSQLiteError = (error: unknown): error is SQLiteError => {\n if (error instanceof Error && 'code' in error) {\n return true;\n }\n\n return false;\n};\n\nexport type InMemorySQLiteDatabase = ':memory:';\nexport const InMemorySQLiteDatabase = ':memory:';\n\ntype SQLiteConnectionOptions = {\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n fileName: InMemorySQLiteDatabase | string | undefined;\n};\n\nexport const sqliteConnection = (\n options: SQLiteConnectionOptions,\n): SQLiteConnection => {\n const db = new sqlite3.Database(options.fileName ?? InMemorySQLiteDatabase);\n\n return {\n close: (): void => db.close(),\n command: (sql: string, params?: Parameters[]) =>\n new Promise((resolve, reject) => {\n db.run(sql, params ?? [], (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n }),\n query: <T>(sql: string, params?: Parameters[]): Promise<T[]> =>\n new Promise((resolve, reject) => {\n db.all(sql, params ?? [], (err: Error | null, result: T[]) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n querySingle: <T>(sql: string, params?: Parameters[]): Promise<T | null> =>\n new Promise((resolve, reject) => {\n db.get(sql, params ?? [], (err: Error | null, result: T | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n };\n};\n","import { ValidationError } from '../errors';\n\nexport const enum ValidationErrors {\n NOT_A_NONEMPTY_STRING = 'NOT_A_NONEMPTY_STRING',\n NOT_A_POSITIVE_NUMBER = 'NOT_A_POSITIVE_NUMBER',\n NOT_AN_UNSIGNED_BIGINT = 'NOT_AN_UNSIGNED_BIGINT',\n}\n\nexport const isNumber = (val: unknown): val is number =>\n typeof val === 'number' && val === val;\n\nexport const isString = (val: unknown): val is string =>\n typeof val === 'string';\n\nexport const assertNotEmptyString = (value: unknown): string => {\n if (!isString(value) || value.length === 0) {\n throw new ValidationError(ValidationErrors.NOT_A_NONEMPTY_STRING);\n }\n return value;\n};\n\nexport const assertPositiveNumber = (value: unknown): number => {\n if (!isNumber(value) || value <= 0) {\n throw new ValidationError(ValidationErrors.NOT_A_POSITIVE_NUMBER);\n }\n return value;\n};\n\nexport const assertUnsignedBigInt = (value: string): bigint => {\n const number = BigInt(value);\n if (number < 0) {\n throw new ValidationError(ValidationErrors.NOT_AN_UNSIGNED_BIGINT);\n }\n return number;\n};\n\nexport * from './dates';\n","import { isNumber, isString } from '../validation';\n\nexport type ErrorConstructor<ErrorType extends Error> = new (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ...args: any[]\n) => ErrorType;\n\nexport const isErrorConstructor = <ErrorType extends Error>(\n // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type\n expect: Function,\n): expect is ErrorConstructor<ErrorType> => {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n return (\n typeof expect === 'function' &&\n expect.prototype &&\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n expect.prototype.constructor === expect\n );\n};\n\nexport class EmmettError extends Error {\n public errorCode: number;\n\n constructor(\n options?: { errorCode: number; message?: string } | string | number,\n ) {\n const errorCode =\n options && typeof options === 'object' && 'errorCode' in options\n ? options.errorCode\n : isNumber(options)\n ? options\n : 500;\n const message =\n options && typeof options === 'object' && 'message' in options\n ? options.message\n : isString(options)\n ? options\n : `Error with status code '${errorCode}' ocurred during Emmett processing`;\n\n super(message);\n this.errorCode = errorCode;\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, EmmettError.prototype);\n }\n}\n\nexport class ConcurrencyError extends EmmettError {\n constructor(\n public current: string | undefined,\n public expected: string,\n message?: string,\n ) {\n super({\n errorCode: 412,\n message:\n message ??\n `Expected version ${expected.toString()} does not match current ${current?.toString()}`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ConcurrencyError.prototype);\n }\n}\n\nexport class ValidationError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 400,\n message: message ?? `Validation Error ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ValidationError.prototype);\n }\n}\n\nexport class IllegalStateError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 403,\n message: message ?? `Illegal State ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, IllegalStateError.prototype);\n }\n}\n\nexport class NotFoundError extends EmmettError {\n constructor(options?: { id: string; type: string; message?: string }) {\n super({\n errorCode: 404,\n message:\n options?.message ??\n (options?.id\n ? options.type\n ? `${options.type} with ${options.id} was not found during Emmett processing`\n : `State with ${options.id} was not found during Emmett processing`\n : options?.type\n ? `${options.type} was not found during Emmett processing`\n : 'State was not found during Emmett processing'),\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, NotFoundError.prototype);\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport type {\n BigIntStreamPosition,\n CombinedReadEventMetadata,\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../typing';\nimport { tryPublishMessagesAfterCommit } from './afterCommit';\nimport {\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type AppendToStreamResult,\n type DefaultEventStoreOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from './eventStore';\nimport { assertExpectedVersionMatchesCurrent } from './expectedVersion';\nimport { StreamingCoordinator } from './subscriptions';\nimport type { ProjectionRegistration } from '../projections';\n\nexport const InMemoryEventStoreDefaultStreamVersion = 0n;\n\nexport type InMemoryEventStore =\n EventStore<ReadEventMetadataWithGlobalPosition>;\n\nexport type InMemoryReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type InMemoryProjectionHandlerContext = {\n eventStore: InMemoryEventStore;\n};\n\nexport type InMemoryEventStoreOptions =\n DefaultEventStoreOptions<InMemoryEventStore> & {\n projections?: ProjectionRegistration<\n 'inline',\n InMemoryReadEventMetadata,\n InMemoryProjectionHandlerContext\n >[];\n };\n\nexport type InMemoryReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n>;\n\nexport const getInMemoryEventStore = (\n eventStoreOptions?: InMemoryEventStoreOptions,\n): InMemoryEventStore => {\n const streams = new Map<\n string,\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]\n >();\n const streamingCoordinator = StreamingCoordinator();\n\n const getAllEventsCount = () => {\n return Array.from<ReadEvent[]>(streams.values())\n .map((s) => s.length)\n .reduce((p, c) => p + c, 0);\n };\n\n const _inlineProjections = (eventStoreOptions?.projections ?? [])\n .filter(({ type }) => type === 'inline')\n .map(({ projection }) => projection);\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const result = await this.readStream<EventType>(streamName, read);\n\n const events = result?.events ?? [];\n\n return {\n currentStreamVersion: BigInt(events.length),\n state: events.reduce(evolve, initialState()),\n streamExists: result.streamExists,\n };\n },\n\n readStream: <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => {\n const events = streams.get(streamName);\n const currentStreamVersion = events\n ? BigInt(events.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const from = Number(options && 'from' in options ? options.from : 0);\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : (events?.length ?? 1),\n );\n\n const resultEvents =\n events !== undefined && events.length > 0\n ? events\n .map(\n (e) =>\n e as ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n )\n .slice(from, to)\n : [];\n\n const result: ReadStreamResult<\n EventType,\n ReadEventMetadataWithGlobalPosition\n > = {\n currentStreamVersion,\n events: resultEvents,\n streamExists: events !== undefined && events.length > 0,\n };\n\n return Promise.resolve(result);\n },\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResult> => {\n const currentEvents = streams.get(streamName) ?? [];\n const currentStreamVersion =\n currentEvents.length > 0\n ? BigInt(currentEvents.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const newEvents: ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >[] = events.map((event, index) => {\n const metadata: ReadEventMetadataWithGlobalPosition = {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(currentEvents.length + index + 1),\n globalPosition: BigInt(getAllEventsCount() + index + 1),\n };\n return {\n ...event,\n kind: event.kind ?? 'Event',\n metadata: {\n ...('metadata' in event ? (event.metadata ?? {}) : {}),\n ...metadata,\n } as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n const positionOfLastEventInTheStream = BigInt(\n newEvents.slice(-1)[0]!.metadata.streamPosition,\n );\n\n streams.set(streamName, [...currentEvents, ...newEvents]);\n await streamingCoordinator.notify(newEvents);\n\n const result: AppendToStreamResult = {\n nextExpectedStreamVersion: positionOfLastEventInTheStream,\n createdNewStream:\n currentStreamVersion === InMemoryEventStoreDefaultStreamVersion,\n };\n\n await tryPublishMessagesAfterCommit<InMemoryEventStore>(\n newEvents,\n eventStoreOptions?.hooks,\n );\n\n return result;\n },\n\n //streamEvents: streamingCoordinator.stream,\n };\n};\n","import { TransformStream } from 'web-streams-polyfill';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport { globalStreamCaughtUp, type GlobalSubscriptionEvent } from '../events';\n\nexport const streamTrackingGlobalPosition = (\n currentEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n) => new CaughtUpTransformStream(currentEvents);\n\nexport class CaughtUpTransformStream extends TransformStream<\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>,\n | ReadEvent<Event, ReadEventMetadataWithGlobalPosition>\n | GlobalSubscriptionEvent\n> {\n private _currentPosition: bigint;\n private _logPosition: bigint;\n\n constructor(events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]) {\n super({\n start: (controller) => {\n let globalPosition = 0n;\n for (const event of events) {\n controller.enqueue(event);\n globalPosition = event.metadata.globalPosition;\n }\n controller.enqueue(globalStreamCaughtUp({ globalPosition }));\n },\n transform: (event, controller) => {\n this._currentPosition = event.metadata.globalPosition;\n controller.enqueue(event);\n\n if (this._currentPosition < this._logPosition) return;\n\n controller.enqueue(\n globalStreamCaughtUp({ globalPosition: this._currentPosition }),\n );\n },\n });\n\n this._currentPosition = this._logPosition =\n events.length > 0\n ? events[events.length - 1]!.metadata.globalPosition\n : 0n;\n }\n\n public set logPosition(value: bigint) {\n this._logPosition = value;\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport { notifyAboutNoActiveReadersStream } from '../../streaming/transformations/notifyAboutNoActiveReaders';\nimport { writeToStream } from '../../streaming/writers';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport {\n CaughtUpTransformStream,\n streamTrackingGlobalPosition,\n} from './caughtUpTransformStream';\n\nexport const StreamingCoordinator = () => {\n const allEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[] = [];\n const listeners = new Map<string, CaughtUpTransformStream>();\n\n return {\n notify: async (\n events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n ) => {\n if (events.length === 0) return;\n\n allEvents.push(...events);\n\n for (const listener of listeners.values()) {\n listener.logPosition =\n events[events.length - 1]!.metadata.globalPosition;\n\n await writeToStream(listener, events);\n }\n },\n\n stream: () => {\n const streamId = uuid();\n const transformStream = streamTrackingGlobalPosition(allEvents);\n\n listeners.set(streamId, transformStream);\n return transformStream.readable.pipeThrough(\n notifyAboutNoActiveReadersStream(\n (stream) => {\n if (listeners.has(stream.streamId))\n listeners.delete(stream.streamId);\n },\n { streamId },\n ),\n );\n },\n };\n};\n","import { v4 as uuid } from 'uuid';\nimport { TransformStream } from 'web-streams-polyfill';\n\nexport const notifyAboutNoActiveReadersStream = <Item>(\n onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n) => new NotifyAboutNoActiveReadersStream(onNoActiveReaderCallback, options);\n\nexport class NotifyAboutNoActiveReadersStream<Item> extends TransformStream<\n Item,\n Item\n> {\n private checkInterval: NodeJS.Timeout | null = null;\n public readonly streamId: string;\n private _isStopped: boolean = false;\n public get hasActiveSubscribers() {\n return !this._isStopped;\n }\n\n constructor(\n private onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n ) {\n super({\n cancel: (reason) => {\n console.log('Stream was canceled. Reason:', reason);\n this.stopChecking();\n },\n });\n this.streamId = options?.streamId ?? uuid();\n\n this.onNoActiveReaderCallback = onNoActiveReaderCallback;\n\n this.startChecking(options?.intervalCheckInMs ?? 20);\n }\n\n private startChecking(interval: number) {\n this.checkInterval = setInterval(() => {\n this.checkNoActiveReader();\n }, interval);\n }\n\n private stopChecking() {\n if (!this.checkInterval) return;\n\n clearInterval(this.checkInterval);\n this.checkInterval = null;\n this._isStopped = true;\n this.onNoActiveReaderCallback(this);\n }\n\n private checkNoActiveReader() {\n if (!this.readable.locked && !this._isStopped) {\n this.stopChecking();\n }\n }\n}\n","import retry from 'async-retry';\n\nexport type AsyncRetryOptions = retry.Options & {\n shouldRetryError?: (error: unknown) => boolean;\n};\n\nexport const NoRetries: AsyncRetryOptions = { retries: 0 };\n\nexport const asyncRetry = async <T>(\n fn: () => Promise<T>,\n opts?: AsyncRetryOptions,\n): Promise<T> => {\n if (opts === undefined || opts.retries === 0) return fn();\n\n return retry(\n async (bail) => {\n try {\n return await fn();\n } catch (error) {\n if (opts?.shouldRetryError && !opts.shouldRetryError(error)) {\n bail(error as Error);\n }\n throw error;\n }\n },\n opts ?? { retries: 0 },\n );\n};\n","import { ReadableStream } from 'web-streams-polyfill';\n\nexport const fromArray = <T>(chunks: T[]) =>\n new ReadableStream<T>({\n start(controller) {\n for (const chunk of chunks) controller.enqueue(chunk);\n controller.close();\n },\n });\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport type { AsyncRetryOptions } from '../utils';\nimport type { Decoder } from './decoders';\nimport { DefaultDecoder } from './decoders/composite';\nimport { streamTransformations } from './transformations';\n\nconst { retry } = streamTransformations;\n\nexport const restream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n): ReadableStream<Transformed> =>\n retry(createSourceStream, handleChunk(transform, decoder), retryOptions)\n .readable;\n\nconst handleChunk =\n <Source = unknown, Transformed = Source, StreamType = Source>(\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n ) =>\n (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ): void => {\n const { done: isDone, value } = readResult;\n\n if (value) decoder.addToBuffer(value);\n\n if (!isDone && !decoder.hasCompleteMessage()) return;\n\n decodeAndTransform(decoder, transform, controller);\n };\n\nconst decodeAndTransform = <StreamType, Source, Transformed = Source>(\n decoder: Decoder<StreamType, Source>,\n transform: (input: Source) => Transformed,\n controller: TransformStreamDefaultController<Transformed>,\n) => {\n try {\n const decoded = decoder.decode();\n if (!decoded) return; // TODO: Add a proper handling of decode errors\n\n const transformed = transform(decoded);\n controller.enqueue(transformed);\n } catch (error) {\n controller.error(new Error(`Decoding error: ${error?.toString()}`));\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const filter = <Item>(filter: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n if (filter(chunk)) {\n controller.enqueue(chunk);\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const map = <From, To>(map: (item: From) => To) =>\n new TransformStream<From, To>({\n transform(chunk, controller) {\n controller.enqueue(map(chunk));\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const reduce = <I, O>(\n reducer: (accumulator: O, chunk: I) => O,\n initialValue: O,\n) => new ReduceTransformStream<I, O>(reducer, initialValue);\n\nexport class ReduceTransformStream<I, O> extends TransformStream<I, O> {\n private accumulator: O;\n private reducer: (accumulator: O, chunk: I) => O;\n\n constructor(reducer: (accumulator: O, chunk: I) => O, initialValue: O) {\n super({\n transform: (chunk) => {\n this.accumulator = this.reducer(this.accumulator, chunk);\n },\n flush: (controller) => {\n controller.enqueue(this.accumulator);\n controller.terminate();\n },\n });\n\n this.accumulator = initialValue;\n this.reducer = reducer;\n }\n}\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n TransformStream,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport { type AsyncRetryOptions, asyncRetry } from '../../utils';\n\nexport const retryStream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n): TransformStream<Source, Transformed> =>\n new TransformStream<Source, Transformed>({\n start(controller) {\n asyncRetry(\n () => onRestream(createSourceStream, handleChunk, controller),\n retryOptions,\n ).catch((error) => {\n controller.error(error);\n });\n },\n });\n\nconst onRestream = async <StreamType, Source, Transformed = Source>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n controller: TransformStreamDefaultController<Transformed>,\n): Promise<void> => {\n const sourceStream = createSourceStream();\n const reader = sourceStream.getReader();\n\n try {\n let done: boolean;\n\n do {\n const result = await reader.read();\n done = result.done;\n\n await handleChunk(result, controller);\n\n if (done) {\n controller.terminate();\n }\n } while (!done);\n } finally {\n reader.releaseLock();\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const skip = <T>(limit: number) => new SkipTransformStream<T>(limit);\n\nexport class SkipTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private skip: number;\n\n constructor(skip: number) {\n super({\n transform: (chunk, controller) => {\n this.count++;\n if (this.count > this.skip) {\n controller.enqueue(chunk);\n }\n },\n });\n\n this.skip = skip;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopAfter = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n controller.enqueue(chunk);\n\n if (stopCondition(chunk)) {\n controller.terminate();\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopOn = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n async transform(chunk, controller) {\n if (!stopCondition(chunk)) {\n controller.enqueue(chunk);\n return;\n }\n await Promise.resolve();\n controller.terminate();\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const take = <T>(limit: number) => new TakeTransformStream<T>(limit);\n\nexport class TakeTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private limit: number;\n\n constructor(limit: number) {\n super({\n transform: (chunk, controller) => {\n if (this.count < this.limit) {\n this.count++;\n controller.enqueue(chunk);\n } else {\n controller.terminate();\n }\n },\n });\n\n this.limit = limit;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const waitAtMost = <Item>(waitTimeInMs: number) =>\n new TransformStream<Item, Item>({\n start(controller) {\n const timeoutId = setTimeout(() => {\n controller.terminate();\n }, waitTimeInMs);\n\n const originalTerminate = controller.terminate.bind(controller);\n\n // Clear the timeout if the stream is terminated early\n controller.terminate = () => {\n clearTimeout(timeoutId);\n originalTerminate();\n };\n },\n transform(chunk, controller) {\n controller.enqueue(chunk);\n },\n });\n","import { ConcurrencyError } from '../errors';\nimport type { BigIntStreamPosition, Flavour } from '../typing';\n\nexport type ExpectedStreamVersion<VersionType = BigIntStreamPosition> =\n | ExpectedStreamVersionWithValue<VersionType>\n | ExpectedStreamVersionGeneral;\n\nexport type ExpectedStreamVersionWithValue<VersionType = BigIntStreamPosition> =\n Flavour<VersionType, 'StreamVersion'>;\n\nexport type ExpectedStreamVersionGeneral = Flavour<\n 'STREAM_EXISTS' | 'STREAM_DOES_NOT_EXIST' | 'NO_CONCURRENCY_CHECK',\n 'StreamVersion'\n>;\n\nexport const STREAM_EXISTS = 'STREAM_EXISTS' as ExpectedStreamVersionGeneral;\nexport const STREAM_DOES_NOT_EXIST =\n 'STREAM_DOES_NOT_EXIST' as ExpectedStreamVersionGeneral;\nexport const NO_CONCURRENCY_CHECK =\n 'NO_CONCURRENCY_CHECK' as ExpectedStreamVersionGeneral;\n\nexport const matchesExpectedVersion = <StreamVersion = BigIntStreamPosition>(\n current: StreamVersion | undefined,\n expected: ExpectedStreamVersion<StreamVersion>,\n defaultVersion: StreamVersion,\n): boolean => {\n if (expected === NO_CONCURRENCY_CHECK) return true;\n\n if (expected == STREAM_DOES_NOT_EXIST) return current === defaultVersion;\n\n if (expected == STREAM_EXISTS) return current !== defaultVersion;\n\n return current === expected;\n};\n\nexport const assertExpectedVersionMatchesCurrent = <\n StreamVersion = BigIntStreamPosition,\n>(\n current: StreamVersion,\n expected: ExpectedStreamVersion<StreamVersion> | undefined,\n defaultVersion: StreamVersion,\n): void => {\n expected ??= NO_CONCURRENCY_CHECK;\n\n if (!matchesExpectedVersion(current, expected, defaultVersion))\n throw new ExpectedVersionConflictError(current, expected);\n};\n\nexport class ExpectedVersionConflictError<\n VersionType = BigIntStreamPosition,\n> extends ConcurrencyError {\n constructor(\n current: VersionType,\n expected: ExpectedStreamVersion<VersionType>,\n ) {\n super(current?.toString(), expected?.toString());\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ExpectedVersionConflictError.prototype);\n }\n}\n\nexport const isExpectedVersionConflictError = (\n error: unknown,\n): error is ExpectedVersionConflictError =>\n error instanceof ExpectedVersionConflictError;\n","export class ParseError extends Error {\n constructor(text: string) {\n super(`Cannot parse! ${text}`);\n }\n}\n\nexport type Mapper<From, To = From> =\n | ((value: unknown) => To)\n | ((value: Partial<From>) => To)\n | ((value: From) => To)\n | ((value: Partial<To>) => To)\n | ((value: To) => To)\n | ((value: Partial<To | From>) => To)\n | ((value: To | From) => To);\n\nexport type MapperArgs<From, To = From> = Partial<From> &\n From &\n Partial<To> &\n To;\n\nexport type ParseOptions<From, To = From> = {\n reviver?: (key: string, value: unknown) => unknown;\n map?: Mapper<From, To>;\n typeCheck?: <To>(value: unknown) => value is To;\n};\n\nexport type StringifyOptions<From, To = From> = {\n map?: Mapper<From, To>;\n};\n\nexport const JSONParser = {\n stringify: <From, To = From>(\n value: From,\n options?: StringifyOptions<From, To>,\n ) => {\n return JSON.stringify(\n options?.map ? options.map(value as MapperArgs<From, To>) : value,\n //TODO: Consider adding support to DateTime and adding specific format to mark that's a bigint\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n (_, v) => (typeof v === 'bigint' ? v.toString() : v),\n );\n },\n parse: <From, To = From>(\n text: string,\n options?: ParseOptions<From, To>,\n ): To | undefined => {\n const parsed: unknown = JSON.parse(text, options?.reviver);\n\n if (options?.typeCheck && !options?.typeCheck<To>(parsed))\n throw new ParseError(text);\n\n return options?.map\n ? options.map(parsed as MapperArgs<From, To>)\n : (parsed as To | undefined);\n },\n};\n","import { filter } from './filter';\nimport { map } from './map';\nimport {\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n} from './notifyAboutNoActiveReaders';\nimport { reduce, ReduceTransformStream } from './reduce';\nimport { retryStream } from './retry';\nimport { skip, SkipTransformStream } from './skip';\nimport { stopAfter } from './stopAfter';\nimport { stopOn } from './stopOn';\nimport { take, TakeTransformStream } from './take';\nimport { waitAtMost } from './waitAtMost';\n\nexport const streamTransformations = {\n filter,\n take,\n TakeTransformStream,\n skip,\n SkipTransformStream,\n map,\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n reduce,\n ReduceTransformStream,\n retry: retryStream,\n stopAfter,\n stopOn,\n waitAtMost,\n};\n","import {\n JSONParser,\n NO_CONCURRENCY_CHECK,\n STREAM_DOES_NOT_EXIST,\n STREAM_EXISTS,\n type AppendToStreamOptions,\n type BeforeEventStoreCommitHandler,\n type ExpectedStreamVersion,\n type Event as Message,\n type RecordedMessage,\n} from '@event-driven-io/emmett';\nimport { v4 as uuid } from 'uuid';\nimport {\n isSQLiteError,\n type Parameters,\n type SQLiteConnection,\n type SQLiteError,\n} from '../../connection';\nimport type {\n SQLiteEventStore,\n SQLiteReadEventMetadata,\n} from '../SQLiteEventStore';\nimport { defaultTag, messagesTable, streamsTable } from './typing';\n\nexport type AppendEventResult =\n | {\n success: true;\n nextStreamPosition: bigint;\n lastGlobalPosition: bigint;\n }\n | { success: false };\n\nexport const appendToStream = async <MessageType extends Message>(\n db: SQLiteConnection,\n streamName: string,\n streamType: string,\n messages: MessageType[],\n options?: AppendToStreamOptions & {\n partition?: string;\n onBeforeCommit?: BeforeEventStoreCommitHandler<\n SQLiteEventStore,\n { connection: SQLiteConnection }\n >;\n },\n): Promise<AppendEventResult> => {\n if (messages.length === 0) return { success: false };\n\n const expectedStreamVersion = toExpectedVersion(\n options?.expectedStreamVersion,\n );\n\n const messagesToAppend: RecordedMessage<\n MessageType,\n SQLiteReadEventMetadata\n >[] = messages.map(\n (\n m: Message,\n i: number,\n ): RecordedMessage<MessageType, SQLiteReadEventMetadata> =>\n ({\n ...m,\n kind: m.kind ?? 'Event',\n metadata: {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(i + 1),\n ...('metadata' in m ? (m.metadata ?? {}) : {}),\n },\n }) as RecordedMessage<MessageType, SQLiteReadEventMetadata>,\n );\n\n let result: AppendEventResult;\n\n await db.command(`BEGIN TRANSACTION`);\n\n try {\n result = await appendToStreamRaw(\n db,\n streamName,\n streamType,\n messagesToAppend,\n {\n expectedStreamVersion,\n },\n );\n\n if (options?.onBeforeCommit)\n await options.onBeforeCommit(messagesToAppend, { connection: db });\n } catch (err: unknown) {\n await db.command(`ROLLBACK`);\n throw err;\n }\n\n if (result.success == null || !result.success) {\n await db.command(`ROLLBACK`);\n return result;\n }\n\n await db.command(`COMMIT`);\n\n return result;\n};\n\nconst toExpectedVersion = (\n expected: ExpectedStreamVersion | undefined,\n): bigint | null => {\n if (expected === undefined) return null;\n\n if (expected === NO_CONCURRENCY_CHECK) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_DOES_NOT_EXIST) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_EXISTS) return null;\n\n return expected as bigint;\n};\n\nconst appendToStreamRaw = async (\n db: SQLiteConnection,\n streamId: string,\n streamType: string,\n messages: RecordedMessage[],\n options?: {\n expectedStreamVersion: bigint | null;\n partition?: string;\n },\n): Promise<AppendEventResult> => {\n let streamPosition;\n let globalPosition;\n\n try {\n let expectedStreamVersion = options?.expectedStreamVersion ?? null;\n\n if (expectedStreamVersion == null) {\n expectedStreamVersion = await getLastStreamPosition(\n db,\n streamId,\n expectedStreamVersion,\n );\n }\n\n let position: { stream_position: string } | null;\n\n if (expectedStreamVersion === 0n) {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `INSERT INTO ${streamsTable.name}\n (stream_id, stream_position, partition, stream_type, stream_metadata, is_archived)\n VALUES (\n ?,\n ?,\n ?,\n ?,\n '[]',\n false\n )\n RETURNING stream_position;\n `,\n [\n streamId,\n messages.length,\n options?.partition ?? streamsTable.columns.partition,\n streamType,\n ],\n );\n } else {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `UPDATE ${streamsTable.name}\n SET stream_position = stream_position + ?\n WHERE stream_id = ?\n AND partition = ?\n AND is_archived = false\n RETURNING stream_position;\n `,\n [\n messages.length,\n streamId,\n options?.partition ?? streamsTable.columns.partition,\n ],\n );\n }\n\n if (position == null) {\n throw new Error('Could not find stream position');\n }\n\n streamPosition = BigInt(position.stream_position);\n\n if (expectedStreamVersion != null) {\n const expectedStreamPositionAfterSave =\n BigInt(expectedStreamVersion) + BigInt(messages.length);\n if (streamPosition !== expectedStreamPositionAfterSave) {\n return {\n success: false,\n };\n }\n }\n\n const { sqlString, values } = buildMessageInsertQuery(\n messages,\n expectedStreamVersion,\n streamId,\n options?.partition?.toString() ?? defaultTag,\n );\n\n const returningId = await db.querySingle<{\n global_position: string;\n } | null>(sqlString, values);\n\n if (returningId?.global_position == null) {\n throw new Error('Could not find global position');\n }\n\n globalPosition = BigInt(returningId.global_position);\n } catch (err: unknown) {\n if (isSQLiteError(err) && isOptimisticConcurrencyError(err)) {\n return {\n success: false,\n };\n }\n\n throw err;\n }\n\n return {\n success: true,\n nextStreamPosition: streamPosition,\n lastGlobalPosition: globalPosition,\n };\n};\n\nconst isOptimisticConcurrencyError = (error: SQLiteError): boolean => {\n return error?.errno !== undefined && error.errno === 19;\n};\n\nasync function getLastStreamPosition(\n db: SQLiteConnection,\n streamId: string,\n expectedStreamVersion: bigint | null,\n): Promise<bigint> {\n const result = await db.querySingle<{ stream_position: string } | null>(\n `SELECT CAST(stream_position AS VARCHAR) AS stream_position FROM ${streamsTable.name} WHERE stream_id = ?`,\n [streamId],\n );\n\n if (result?.stream_position == null) {\n expectedStreamVersion = 0n;\n } else {\n expectedStreamVersion = BigInt(result.stream_position);\n }\n return expectedStreamVersion;\n}\n\nconst buildMessageInsertQuery = (\n messages: RecordedMessage[],\n expectedStreamVersion: bigint,\n streamId: string,\n partition: string | null | undefined,\n): {\n sqlString: string;\n values: Parameters[];\n} => {\n const query = messages.reduce(\n (\n queryBuilder: { parameterMarkers: string[]; values: Parameters[] },\n message: RecordedMessage,\n ) => {\n if (\n message.metadata?.streamPosition == null ||\n typeof message.metadata.streamPosition !== 'bigint'\n ) {\n throw new Error('Stream position is required');\n }\n\n const streamPosition =\n BigInt(message.metadata.streamPosition) + BigInt(expectedStreamVersion);\n\n queryBuilder.parameterMarkers.push(`(?,?,?,?,?,?,?,?,?,?)`);\n queryBuilder.values.push(\n streamId,\n streamPosition.toString() ?? 0,\n partition ?? defaultTag,\n message.kind === 'Event' ? 'E' : 'C',\n JSONParser.stringify(message.data),\n JSONParser.stringify(message.metadata),\n expectedStreamVersion?.toString() ?? 0,\n message.type,\n message.metadata.messageId,\n false,\n );\n\n return queryBuilder;\n },\n {\n parameterMarkers: [],\n values: [],\n },\n );\n\n const sqlString = `\n INSERT INTO ${messagesTable.name} (\n stream_id, \n stream_position, \n partition, \n message_kind,\n message_data, \n message_metadata, \n message_schema_version, \n message_type, \n message_id, \n is_archived\n ) \n VALUES ${query.parameterMarkers.join(', ')} \n RETURNING \n CAST(global_position as VARCHAR) AS global_position\n `;\n return { sqlString, values: query.values };\n};\n","export const emmettPrefix = 'emt';\n\nexport const globalTag = 'global';\nexport const defaultTag = 'emt:default';\n\nexport const globalNames = {\n module: `${emmettPrefix}:module:${globalTag}`,\n};\n\nconst columns = {\n partition: {\n name: 'partition',\n },\n isArchived: { name: 'is_archived' },\n};\n\nexport const streamsTable = {\n name: `${emmettPrefix}_streams`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n\nexport const messagesTable = {\n name: `${emmettPrefix}_messages`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n","import {\n projection,\n type CanHandle,\n type Event,\n type ProjectionDefinition,\n type ProjectionHandler,\n type ReadEvent,\n} from '@event-driven-io/emmett';\nimport type { SQLiteConnection } from '../../connection';\nimport type { SQLiteReadEventMetadata } from '../SQLiteEventStore';\n\nexport type SQLiteProjectionHandlerContext = {\n connection: SQLiteConnection;\n};\n\nexport type SQLiteProjectionHandler<\n EventType extends Event = Event,\n EventMetaDataType extends SQLiteReadEventMetadata = SQLiteReadEventMetadata,\n> = ProjectionHandler<\n EventType,\n EventMetaDataType,\n SQLiteProjectionHandlerContext\n>;\n\nexport type SQLiteProjectionDefinition<EventType extends Event = Event> =\n ProjectionDefinition<\n EventType,\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >;\n\nexport type SQLiteProjectionHandlerOptions<EventType extends Event = Event> = {\n events: ReadEvent<EventType, SQLiteReadEventMetadata>[];\n projections: SQLiteProjectionDefinition<EventType>[];\n connection: SQLiteConnection;\n};\n\nexport const handleProjections = async <EventType extends Event = Event>(\n options: SQLiteProjectionHandlerOptions<EventType>,\n): Promise<void> => {\n const { projections: allProjections, events, connection } = options;\n\n const eventTypes = events.map((e) => e.type);\n\n const projections = allProjections.filter((p) =>\n p.canHandle.some((type) => eventTypes.includes(type)),\n );\n\n for (const projection of projections) {\n await projection.handle(events, {\n connection,\n });\n }\n};\n\nexport const sqliteProjection = <EventType extends Event>(\n definition: SQLiteProjectionDefinition<EventType>,\n): SQLiteProjectionDefinition<EventType> =>\n projection<\n EventType,\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >(definition);\n\nexport const sqliteRawBatchSQLProjection = <EventType extends Event>(\n handle: (\n events: EventType[],\n context: SQLiteProjectionHandlerContext,\n ) => Promise<string[]> | string[],\n ...canHandle: CanHandle<EventType>\n): SQLiteProjectionDefinition<EventType> =>\n sqliteProjection<EventType>({\n canHandle,\n handle: async (events, context) => {\n const sqls: string[] = await handle(events, context);\n\n for (const sql of sqls) await context.connection.command(sql);\n },\n });\n\nexport const sqliteRawSQLProjection = <EventType extends Event>(\n handle: (\n event: EventType,\n context: SQLiteProjectionHandlerContext,\n ) => Promise<string> | string,\n ...canHandle: CanHandle<EventType>\n): SQLiteProjectionDefinition<EventType> =>\n sqliteRawBatchSQLProjection<EventType>(\n async (events, context) => {\n const sqls: string[] = [];\n\n for (const event of events) {\n sqls.push(await handle(event, context));\n }\n return sqls;\n },\n ...canHandle,\n );\n","import type {\n AppendToStreamResultWithGlobalPosition,\n BeforeEventStoreCommitHandler,\n BigIntStreamPosition,\n Event,\n ProjectionRegistration,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\n\nimport {\n assertExpectedVersionMatchesCurrent,\n ExpectedVersionConflictError,\n NO_CONCURRENCY_CHECK,\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport {\n InMemorySQLiteDatabase,\n sqliteConnection,\n type SQLiteConnection,\n} from '../connection';\nimport {\n handleProjections,\n type SQLiteProjectionHandlerContext,\n} from './projections';\nimport { createEventStoreSchema } from './schema';\nimport { appendToStream } from './schema/appendToStream';\nimport { readStream } from './schema/readStream';\n\nexport type EventHandler<E extends Event = Event> = (\n eventEnvelope: ReadEvent<E>,\n) => void;\n\nexport const SQLiteEventStoreDefaultStreamVersion = 0n;\n\nexport type SQLiteEventStore = EventStore<SQLiteReadEventMetadata>;\n\nexport type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n SQLiteReadEventMetadata\n>;\n\nexport type SQLiteEventStoreOptions = {\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n fileName: InMemorySQLiteDatabase | string | undefined;\n projections?: ProjectionRegistration<\n 'inline',\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >[];\n schema?: {\n autoMigration?: 'None' | 'CreateOrUpdate';\n };\n hooks?: {\n /**\n * This hook will be called **BEFORE** events were stored in the event store.\n * @type {BeforeEventStoreCommitHandler<SQLiteEventStore, HandlerContext>}\n */\n onBeforeCommit?: BeforeEventStoreCommitHandler<\n SQLiteEventStore,\n { connection: SQLiteConnection }\n >;\n };\n};\n\nexport const getSQLiteEventStore = (\n options: SQLiteEventStoreOptions,\n): SQLiteEventStore => {\n let schemaMigrated = false;\n let autoGenerateSchema = false;\n let database: SQLiteConnection | null;\n const fileName = options.fileName ?? InMemorySQLiteDatabase;\n\n const isInMemory: boolean = fileName === InMemorySQLiteDatabase;\n\n const inlineProjections = (options.projections ?? [])\n .filter(({ type }) => type === 'inline')\n .map(({ projection }) => projection);\n\n const onBeforeCommitHook = options.hooks?.onBeforeCommit;\n\n const createConnection = () => {\n if (database != null) {\n return database;\n }\n\n return sqliteConnection({\n fileName,\n });\n };\n\n const closeConnection = () => {\n if (isInMemory) {\n return;\n }\n if (database != null) {\n database.close();\n database = null;\n }\n };\n\n const withConnection = async <Result>(\n handler: (db: SQLiteConnection) => Promise<Result>,\n ): Promise<Result> => {\n if (database == null) {\n database = createConnection();\n }\n\n try {\n await ensureSchemaExists(database);\n return await handler(database);\n } finally {\n closeConnection();\n }\n };\n\n if (options) {\n autoGenerateSchema =\n options.schema?.autoMigration === undefined ||\n options.schema?.autoMigration !== 'None';\n }\n\n const ensureSchemaExists = async (\n connection: SQLiteConnection,\n ): Promise<void> => {\n if (!autoGenerateSchema) return Promise.resolve();\n\n if (!schemaMigrated) {\n await createEventStoreSchema(connection);\n schemaMigrated = true;\n }\n\n return Promise.resolve();\n };\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const expectedStreamVersion = read?.expectedStreamVersion;\n\n let state = initialState();\n\n if (typeof streamName !== 'string') {\n throw new Error('Stream name is not string');\n }\n\n if (database == null) {\n database = createConnection();\n }\n\n const result = await withConnection((db) =>\n readStream<EventType>(db, streamName, options.read),\n );\n\n const currentStreamVersion = result.currentStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n expectedStreamVersion,\n SQLiteEventStoreDefaultStreamVersion,\n );\n\n for (const event of result.events) {\n if (!event) continue;\n\n state = evolve(state, event);\n }\n\n return {\n currentStreamVersion: currentStreamVersion,\n state,\n streamExists: result.streamExists,\n };\n },\n\n readStream: async <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => withConnection((db) => readStream<EventType>(db, streamName, options)),\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResultWithGlobalPosition> => {\n if (database == null) {\n database = createConnection();\n }\n\n // TODO: This has to be smarter when we introduce urn-based resolution\n const [firstPart, ...rest] = streamName.split('-');\n\n const streamType =\n firstPart && rest.length > 0 ? firstPart : 'emt:unknown';\n\n const appendResult = await withConnection((db) =>\n appendToStream(db, streamName, streamType, events, {\n ...options,\n onBeforeCommit: async (messages, context) => {\n if (inlineProjections.length > 0)\n await handleProjections({\n projections: inlineProjections,\n events: messages,\n ...context,\n });\n\n if (onBeforeCommitHook) await onBeforeCommitHook(messages, context);\n },\n }),\n );\n\n if (!appendResult.success)\n throw new ExpectedVersionConflictError<bigint>(\n -1n, //TODO: Return actual version in case of error\n options?.expectedStreamVersion ?? NO_CONCURRENCY_CHECK,\n );\n\n return {\n nextExpectedStreamVersion: appendResult.nextStreamPosition,\n lastEventGlobalPosition: appendResult.lastGlobalPosition,\n createdNewStream:\n appendResult.nextStreamPosition >= BigInt(events.length),\n };\n },\n };\n};\n","import {\n JSONParser,\n type CombinedReadEventMetadata,\n type Event,\n type ReadEvent,\n type ReadEventMetadataWithGlobalPosition,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport { type SQLiteConnection } from '../../connection';\nimport { SQLiteEventStoreDefaultStreamVersion } from '../SQLiteEventStore';\nimport { defaultTag, messagesTable } from './typing';\n\ntype ReadStreamSqlResult = {\n stream_position: string;\n message_data: string;\n message_metadata: string;\n message_schema_version: string;\n message_type: string;\n message_id: string;\n global_position: string;\n created: string;\n};\n\nexport const readStream = async <EventType extends Event>(\n db: SQLiteConnection,\n streamId: string,\n options?: ReadStreamOptions & { partition?: string },\n): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n> => {\n const fromCondition: string =\n options && 'from' in options\n ? `AND stream_position >= ${options.from}`\n : '';\n\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : NaN,\n );\n\n const toCondition = !isNaN(to) ? `AND stream_position <= ${to}` : '';\n\n const results = await db.query<ReadStreamSqlResult>(\n `SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id\n FROM ${messagesTable.name}\n WHERE stream_id = ? AND partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}`,\n [streamId, options?.partition ?? defaultTag],\n );\n\n const messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[] =\n results.map((row) => {\n const rawEvent = {\n type: row.message_type,\n data: JSONParser.parse(row.message_data),\n metadata: JSONParser.parse(row.message_metadata),\n } as unknown as EventType;\n\n const metadata: ReadEventMetadataWithGlobalPosition = {\n ...('metadata' in rawEvent ? (rawEvent.metadata ?? {}) : {}),\n messageId: row.message_id,\n streamName: streamId,\n streamPosition: BigInt(row.stream_position),\n globalPosition: BigInt(row.global_position),\n };\n\n return {\n ...rawEvent,\n kind: 'Event',\n metadata: metadata as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n return messages.length > 0\n ? {\n currentStreamVersion:\n messages[messages.length - 1]!.metadata.streamPosition,\n events: messages,\n streamExists: true,\n }\n : {\n currentStreamVersion: SQLiteEventStoreDefaultStreamVersion,\n events: [],\n streamExists: false,\n };\n};\n","import type { SQLiteConnection } from '../../connection';\nimport { globalTag, messagesTable, streamsTable } from './typing';\n\nexport const sql = (sql: string) => sql;\n\nexport const streamsTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${streamsTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL DEFAULT 0,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n stream_type TEXT NOT NULL,\n stream_metadata JSONB NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n PRIMARY KEY (stream_id, stream_position, partition, is_archived),\n UNIQUE (stream_id, partition, is_archived)\n );`,\n);\n\nexport const messagesTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${messagesTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n message_kind CHAR(1) NOT NULL DEFAULT 'E',\n message_data JSONB NOT NULL,\n message_metadata JSONB NOT NULL,\n message_schema_version TEXT NOT NULL,\n message_type TEXT NOT NULL,\n message_id TEXT NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n global_position INTEGER PRIMARY KEY,\n created DATETIME DEFAULT CURRENT_TIMESTAMP,\n UNIQUE (stream_id, stream_position, partition, is_archived)\n ); \n`,\n);\n\nexport const schemaSQL: string[] = [streamsTableSQL, messagesTableSQL];\n\nexport const createEventStoreSchema = async (\n db: SQLiteConnection,\n): Promise<void> => {\n for (const sql of schemaSQL) {\n await db.command(sql);\n }\n};\n"]}
|
package/dist/index.d.cts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { EventStore, ReadEventMetadataWithGlobalPosition, Event, ReadEvent, ProjectionRegistration, BeforeEventStoreCommitHandler, AppendToStreamOptions, ReadStreamOptions, ReadStreamResult } from '@event-driven-io/emmett';
|
|
2
2
|
|
|
3
3
|
type Parameters = object | string | bigint | number | boolean | null;
|
|
4
4
|
type SQLiteConnection = {
|
|
@@ -18,6 +18,33 @@ type SQLiteConnectionOptions = {
|
|
|
18
18
|
};
|
|
19
19
|
declare const sqliteConnection: (options: SQLiteConnectionOptions) => SQLiteConnection;
|
|
20
20
|
|
|
21
|
+
type SQLiteProjectionHandlerContext = {
|
|
22
|
+
connection: SQLiteConnection;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
type EventHandler<E extends Event = Event> = (eventEnvelope: ReadEvent<E>) => void;
|
|
26
|
+
declare const SQLiteEventStoreDefaultStreamVersion = 0n;
|
|
27
|
+
type SQLiteEventStore = EventStore<SQLiteReadEventMetadata>;
|
|
28
|
+
type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;
|
|
29
|
+
type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<EventType, SQLiteReadEventMetadata>;
|
|
30
|
+
type SQLiteEventStoreOptions = {
|
|
31
|
+
fileName: InMemorySQLiteDatabase | string | undefined;
|
|
32
|
+
projections?: ProjectionRegistration<'inline', SQLiteReadEventMetadata, SQLiteProjectionHandlerContext>[];
|
|
33
|
+
schema?: {
|
|
34
|
+
autoMigration?: 'None' | 'CreateOrUpdate';
|
|
35
|
+
};
|
|
36
|
+
hooks?: {
|
|
37
|
+
/**
|
|
38
|
+
* This hook will be called **BEFORE** events were stored in the event store.
|
|
39
|
+
* @type {BeforeEventStoreCommitHandler<SQLiteEventStore, HandlerContext>}
|
|
40
|
+
*/
|
|
41
|
+
onBeforeCommit?: BeforeEventStoreCommitHandler<SQLiteEventStore, {
|
|
42
|
+
connection: SQLiteConnection;
|
|
43
|
+
}>;
|
|
44
|
+
};
|
|
45
|
+
};
|
|
46
|
+
declare const getSQLiteEventStore: (options: SQLiteEventStoreOptions) => SQLiteEventStore;
|
|
47
|
+
|
|
21
48
|
type AppendEventResult = {
|
|
22
49
|
success: true;
|
|
23
50
|
nextStreamPosition: bigint;
|
|
@@ -25,9 +52,11 @@ type AppendEventResult = {
|
|
|
25
52
|
} | {
|
|
26
53
|
success: false;
|
|
27
54
|
};
|
|
28
|
-
declare const appendToStream: (db: SQLiteConnection, streamName: string, streamType: string, messages:
|
|
55
|
+
declare const appendToStream: <MessageType extends Event>(db: SQLiteConnection, streamName: string, streamType: string, messages: MessageType[], options?: AppendToStreamOptions & {
|
|
29
56
|
partition?: string;
|
|
30
|
-
|
|
57
|
+
onBeforeCommit?: BeforeEventStoreCommitHandler<SQLiteEventStore, {
|
|
58
|
+
connection: SQLiteConnection;
|
|
59
|
+
}>;
|
|
31
60
|
}) => Promise<AppendEventResult>;
|
|
32
61
|
|
|
33
62
|
declare const readStream: <EventType extends Event>(db: SQLiteConnection, streamId: string, options?: ReadStreamOptions & {
|
|
@@ -69,17 +98,4 @@ declare const messagesTable: {
|
|
|
69
98
|
};
|
|
70
99
|
};
|
|
71
100
|
|
|
72
|
-
type EventHandler<E extends Event = Event> = (eventEnvelope: ReadEvent<E>) => void;
|
|
73
|
-
declare const SQLiteEventStoreDefaultStreamVersion = 0n;
|
|
74
|
-
type SQLiteEventStore = EventStore<SQLiteReadEventMetadata>;
|
|
75
|
-
type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;
|
|
76
|
-
type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<EventType, SQLiteReadEventMetadata>;
|
|
77
|
-
type SQLiteEventStoreOptions = {
|
|
78
|
-
schema?: {
|
|
79
|
-
autoMigration?: 'None' | 'CreateOrUpdate';
|
|
80
|
-
};
|
|
81
|
-
fileName: InMemorySQLiteDatabase | string | undefined;
|
|
82
|
-
};
|
|
83
|
-
declare const getSQLiteEventStore: (options: SQLiteEventStoreOptions) => SQLiteEventStore;
|
|
84
|
-
|
|
85
101
|
export { type AppendEventResult, type EventHandler, InMemorySQLiteDatabase, type Parameters, type SQLiteConnection, type SQLiteError, type SQLiteEventStore, SQLiteEventStoreDefaultStreamVersion, type SQLiteEventStoreOptions, type SQLiteReadEvent, type SQLiteReadEventMetadata, appendToStream, createEventStoreSchema, defaultTag, emmettPrefix, getSQLiteEventStore, globalNames, globalTag, isSQLiteError, messagesTable, messagesTableSQL, readStream, schemaSQL, sql, sqliteConnection, streamsTable, streamsTableSQL };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { EventStore, ReadEventMetadataWithGlobalPosition, Event, ReadEvent, ProjectionRegistration, BeforeEventStoreCommitHandler, AppendToStreamOptions, ReadStreamOptions, ReadStreamResult } from '@event-driven-io/emmett';
|
|
2
2
|
|
|
3
3
|
type Parameters = object | string | bigint | number | boolean | null;
|
|
4
4
|
type SQLiteConnection = {
|
|
@@ -18,6 +18,33 @@ type SQLiteConnectionOptions = {
|
|
|
18
18
|
};
|
|
19
19
|
declare const sqliteConnection: (options: SQLiteConnectionOptions) => SQLiteConnection;
|
|
20
20
|
|
|
21
|
+
type SQLiteProjectionHandlerContext = {
|
|
22
|
+
connection: SQLiteConnection;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
type EventHandler<E extends Event = Event> = (eventEnvelope: ReadEvent<E>) => void;
|
|
26
|
+
declare const SQLiteEventStoreDefaultStreamVersion = 0n;
|
|
27
|
+
type SQLiteEventStore = EventStore<SQLiteReadEventMetadata>;
|
|
28
|
+
type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;
|
|
29
|
+
type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<EventType, SQLiteReadEventMetadata>;
|
|
30
|
+
type SQLiteEventStoreOptions = {
|
|
31
|
+
fileName: InMemorySQLiteDatabase | string | undefined;
|
|
32
|
+
projections?: ProjectionRegistration<'inline', SQLiteReadEventMetadata, SQLiteProjectionHandlerContext>[];
|
|
33
|
+
schema?: {
|
|
34
|
+
autoMigration?: 'None' | 'CreateOrUpdate';
|
|
35
|
+
};
|
|
36
|
+
hooks?: {
|
|
37
|
+
/**
|
|
38
|
+
* This hook will be called **BEFORE** events were stored in the event store.
|
|
39
|
+
* @type {BeforeEventStoreCommitHandler<SQLiteEventStore, HandlerContext>}
|
|
40
|
+
*/
|
|
41
|
+
onBeforeCommit?: BeforeEventStoreCommitHandler<SQLiteEventStore, {
|
|
42
|
+
connection: SQLiteConnection;
|
|
43
|
+
}>;
|
|
44
|
+
};
|
|
45
|
+
};
|
|
46
|
+
declare const getSQLiteEventStore: (options: SQLiteEventStoreOptions) => SQLiteEventStore;
|
|
47
|
+
|
|
21
48
|
type AppendEventResult = {
|
|
22
49
|
success: true;
|
|
23
50
|
nextStreamPosition: bigint;
|
|
@@ -25,9 +52,11 @@ type AppendEventResult = {
|
|
|
25
52
|
} | {
|
|
26
53
|
success: false;
|
|
27
54
|
};
|
|
28
|
-
declare const appendToStream: (db: SQLiteConnection, streamName: string, streamType: string, messages:
|
|
55
|
+
declare const appendToStream: <MessageType extends Event>(db: SQLiteConnection, streamName: string, streamType: string, messages: MessageType[], options?: AppendToStreamOptions & {
|
|
29
56
|
partition?: string;
|
|
30
|
-
|
|
57
|
+
onBeforeCommit?: BeforeEventStoreCommitHandler<SQLiteEventStore, {
|
|
58
|
+
connection: SQLiteConnection;
|
|
59
|
+
}>;
|
|
31
60
|
}) => Promise<AppendEventResult>;
|
|
32
61
|
|
|
33
62
|
declare const readStream: <EventType extends Event>(db: SQLiteConnection, streamId: string, options?: ReadStreamOptions & {
|
|
@@ -69,17 +98,4 @@ declare const messagesTable: {
|
|
|
69
98
|
};
|
|
70
99
|
};
|
|
71
100
|
|
|
72
|
-
type EventHandler<E extends Event = Event> = (eventEnvelope: ReadEvent<E>) => void;
|
|
73
|
-
declare const SQLiteEventStoreDefaultStreamVersion = 0n;
|
|
74
|
-
type SQLiteEventStore = EventStore<SQLiteReadEventMetadata>;
|
|
75
|
-
type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;
|
|
76
|
-
type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<EventType, SQLiteReadEventMetadata>;
|
|
77
|
-
type SQLiteEventStoreOptions = {
|
|
78
|
-
schema?: {
|
|
79
|
-
autoMigration?: 'None' | 'CreateOrUpdate';
|
|
80
|
-
};
|
|
81
|
-
fileName: InMemorySQLiteDatabase | string | undefined;
|
|
82
|
-
};
|
|
83
|
-
declare const getSQLiteEventStore: (options: SQLiteEventStoreOptions) => SQLiteEventStore;
|
|
84
|
-
|
|
85
101
|
export { type AppendEventResult, type EventHandler, InMemorySQLiteDatabase, type Parameters, type SQLiteConnection, type SQLiteError, type SQLiteEventStore, SQLiteEventStoreDefaultStreamVersion, type SQLiteEventStoreOptions, type SQLiteReadEvent, type SQLiteReadEventMetadata, appendToStream, createEventStoreSchema, defaultTag, emmettPrefix, getSQLiteEventStore, globalNames, globalTag, isSQLiteError, messagesTable, messagesTableSQL, readStream, schemaSQL, sql, sqliteConnection, streamsTable, streamsTableSQL };
|
package/dist/index.js
CHANGED
|
@@ -384,7 +384,8 @@ var appendToStream = async (db, streamName, streamType, messages, options) => {
|
|
|
384
384
|
expectedStreamVersion
|
|
385
385
|
}
|
|
386
386
|
);
|
|
387
|
-
if (options?.
|
|
387
|
+
if (options?.onBeforeCommit)
|
|
388
|
+
await options.onBeforeCommit(messagesToAppend, { connection: db });
|
|
388
389
|
} catch (err) {
|
|
389
390
|
await db.command(`ROLLBACK`);
|
|
390
391
|
throw err;
|
|
@@ -552,6 +553,20 @@ var buildMessageInsertQuery = (messages, expectedStreamVersion, streamId, partit
|
|
|
552
553
|
return { sqlString, values: query.values };
|
|
553
554
|
};
|
|
554
555
|
|
|
556
|
+
// src/eventStore/projections/index.ts
|
|
557
|
+
var handleProjections = async (options) => {
|
|
558
|
+
const { projections: allProjections, events, connection } = options;
|
|
559
|
+
const eventTypes = events.map((e) => e.type);
|
|
560
|
+
const projections = allProjections.filter(
|
|
561
|
+
(p) => p.canHandle.some((type) => eventTypes.includes(type))
|
|
562
|
+
);
|
|
563
|
+
for (const projection2 of projections) {
|
|
564
|
+
await projection2.handle(events, {
|
|
565
|
+
connection
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
};
|
|
569
|
+
|
|
555
570
|
// src/eventStore/SQLiteEventStore.ts
|
|
556
571
|
var SQLiteEventStoreDefaultStreamVersion = 0n;
|
|
557
572
|
var getSQLiteEventStore = (options) => {
|
|
@@ -560,6 +575,8 @@ var getSQLiteEventStore = (options) => {
|
|
|
560
575
|
let database;
|
|
561
576
|
const fileName = options.fileName ?? InMemorySQLiteDatabase;
|
|
562
577
|
const isInMemory = fileName === InMemorySQLiteDatabase;
|
|
578
|
+
const inlineProjections = (options.projections ?? []).filter(({ type }) => type === "inline").map(({ projection: projection2 }) => projection2);
|
|
579
|
+
const onBeforeCommitHook = options.hooks?.onBeforeCommit;
|
|
563
580
|
const createConnection = () => {
|
|
564
581
|
if (database != null) {
|
|
565
582
|
return database;
|
|
@@ -637,7 +654,18 @@ var getSQLiteEventStore = (options) => {
|
|
|
637
654
|
const [firstPart, ...rest] = streamName.split("-");
|
|
638
655
|
const streamType = firstPart && rest.length > 0 ? firstPart : "emt:unknown";
|
|
639
656
|
const appendResult = await withConnection(
|
|
640
|
-
(db) => appendToStream(db, streamName, streamType, events,
|
|
657
|
+
(db) => appendToStream(db, streamName, streamType, events, {
|
|
658
|
+
...options2,
|
|
659
|
+
onBeforeCommit: async (messages, context) => {
|
|
660
|
+
if (inlineProjections.length > 0)
|
|
661
|
+
await handleProjections({
|
|
662
|
+
projections: inlineProjections,
|
|
663
|
+
events: messages,
|
|
664
|
+
...context
|
|
665
|
+
});
|
|
666
|
+
if (onBeforeCommitHook) await onBeforeCommitHook(messages, context);
|
|
667
|
+
}
|
|
668
|
+
})
|
|
641
669
|
);
|
|
642
670
|
if (!appendResult.success)
|
|
643
671
|
throw new ExpectedVersionConflictError(
|