@event-driven-io/emmett-sqlite 0.35.0 → 0.36.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs.map +1 -1
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","../src/connection/sqliteConnection.ts","../../emmett/src/validation/index.ts","../../emmett/src/errors/index.ts","../../emmett/src/eventStore/inMemoryEventStore.ts","../../emmett/src/eventStore/subscriptions/caughtUpTransformStream.ts","../../emmett/src/eventStore/subscriptions/streamingCoordinator.ts","../../emmett/src/streaming/transformations/notifyAboutNoActiveReaders.ts","../../emmett/src/utils/retry.ts","../../emmett/src/database/inMemoryDatabase.ts","../../emmett/src/streaming/generators/fromArray.ts","../../emmett/src/streaming/restream.ts","../../emmett/src/streaming/transformations/filter.ts","../../emmett/src/streaming/transformations/map.ts","../../emmett/src/streaming/transformations/reduce.ts","../../emmett/src/streaming/transformations/retry.ts","../../emmett/src/streaming/transformations/skip.ts","../../emmett/src/streaming/transformations/stopAfter.ts","../../emmett/src/streaming/transformations/stopOn.ts","../../emmett/src/streaming/transformations/take.ts","../../emmett/src/streaming/transformations/waitAtMost.ts","../../emmett/src/eventStore/expectedVersion.ts","../../emmett/src/serialization/json/JSONParser.ts","../../emmett/src/streaming/transformations/index.ts","../src/eventStore/schema/appendToStream.ts","../src/eventStore/schema/typing.ts","../src/eventStore/schema/tables.ts","../src/eventStore/schema/utils.ts","../src/eventStore/schema/readLastMessageGlobalPosition.ts","../src/eventStore/schema/readMessagesBatch.ts","../src/eventStore/schema/readProcessorCheckpoint.ts","../src/eventStore/consumers/messageBatchProcessing/index.ts","../src/eventStore/consumers/sqliteProcessor.ts","../src/eventStore/consumers/sqliteEventStoreConsumer.ts","../src/eventStore/projections/index.ts","../src/eventStore/SQLiteEventStore.ts","../src/eventStore/schema/readStream.ts","../src/eventStore/schema/storeProcessorCheckpoint.ts"],"names":["sql"],"mappings":"AAAA;ACAA,oFAAoB;AAgBb,IAAM,cAAA,EAAgB,CAAC,KAAA,EAAA,GAAyC;AACrE,EAAA,GAAA,CAAI,MAAA,WAAiB,MAAA,GAAS,OAAA,GAAU,KAAA,EAAO;AAC7C,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA;AACT,CAAA;AAGO,IAAM,kCAAA,EAAoC,4BAAA;AAE1C,IAAM,uBAAA,EAAyB,UAAA;AAO/B,IAAM,iBAAA,EAAmB,CAC9B,OAAA,EAAA,GACqB;AACrB,EAAA,MAAM,SAAA,mBAAW,OAAA,CAAQ,QAAA,UAAY,wBAAA;AACrC,EAAA,IAAI,EAAA;AAEJ,EAAA,GAAA,CAAI,QAAA,CAAS,UAAA,CAAW,OAAO,CAAA,EAAG;AAChC,IAAA,GAAA,EAAK,IAAI,iBAAA,CAAQ,QAAA;AAAA,MACf,QAAA;AAAA,MACA,iBAAA,CAAQ,SAAA,EAAW,iBAAA,CAAQ,eAAA,EAAiB,iBAAA,CAAQ;AAAA,IACtD,CAAA;AAAA,EACF,EAAA,KAAO;AACL,IAAA,GAAA,EAAK,IAAI,iBAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA;AAAA,EACpC;AACA,EAAA,EAAA,CAAG,GAAA,CAAI,4BAA4B,CAAA;AACnC,EAAA,IAAI,mBAAA,EAAqB,CAAA;AAEzB,EAAA,OAAO;AAAA,IACL,KAAA,EAAO,CAAA,EAAA,GAAY,EAAA,CAAG,KAAA,CAAM,CAAA;AAAA,IAC5B,OAAA,EAAS,CAACA,IAAAA,EAAa,MAAA,EAAA,GACrB,IAAI,OAAA,CAA2B,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAClD,MAAA,EAAA,CAAG,GAAA;AAAA,QACDA,IAAAA;AAAA,yBACA,MAAA,UAAU,CAAC,GAAA;AAAA,QACX,QAAA,CAAmC,GAAA,EAAmB;AACpD,UAAA,GAAA,CAAI,GAAA,EAAK;AACP,YAAA,MAAA,CAAO,GAAG,CAAA;AACV,YAAA,MAAA;AAAA,UACF;AAEA,UAAA,OAAA,CAAQ,IAAI,CAAA;AAAA,QACd;AAAA,MACF,CAAA;AAAA,IACF,CAAC,CAAA;AAAA,IACH,KAAA,EAAO,CAAIA,IAAAA,EAAa,MAAA,EAAA,GACtB,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAgB;AAC5D,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,WAAA,EAAa,CAAIA,IAAAA,EAAa,MAAA,EAAA,GAC5B,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAqB;AACjE,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,eAAA,EAAiB,MAAA,CAAU,EAAA,EAAA,GAAyB;AAClD,MAAA,IAAI;AACF,QAAA,GAAA,CAAI,kBAAA,GAAA,GAAwB,CAAA,EAAG;AAC7B,UAAA,MAAM,gBAAA,CAAiB,EAAE,CAAA;AAAA,QAC3B;AACA,QAAA,MAAM,OAAA,EAAS,MAAM,EAAA,CAAG,CAAA;AAExB,QAAA,GAAA,CAAI,mBAAA,IAAuB,CAAA,EAAG,MAAM,iBAAA,CAAkB,EAAE,CAAA;AACxD,QAAA,kBAAA,EAAA;AAEA,QAAA,OAAO,MAAA;AAAA,MACT,EAAA,MAAA,CAAS,GAAA,EAAK;AACZ,QAAA,OAAA,CAAQ,GAAA,CAAI,GAAG,CAAA;AAEf,QAAA,GAAA,CAAI,EAAE,mBAAA,IAAuB,CAAA,EAAG,MAAM,mBAAA,CAAoB,EAAE,CAAA;AAE5D,QAAA,MAAM,GAAA;AAAA,MACR;AAAA,IACF;AAAA,EACF,CAAA;AACF,CAAA;AAEA,IAAM,iBAAA,EAAmB,CAAC,EAAA,EAAA,GACxB,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AACrC,EAAA,EAAA,CAAG,GAAA,CAAI,6BAAA,EAA+B,CAAC,GAAA,EAAA,GAAsB;AAC3D,IAAA,GAAA,CAAI,GAAA,EAAK;AACP,MAAA,MAAA,CAAO,GAAG,CAAA;AACV,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,CAAA;AAAA,EACV,CAAC,CAAA;AACH,CAAC,CAAA;AAEH,IAAM,kBAAA,EAAoB,CAAC,EAAA,EAAA,GACzB,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AACrC,EAAA,EAAA,CAAG,GAAA,CAAI,QAAA,EAAU,CAAC,GAAA,EAAA,GAAsB;AACtC,IAAA,GAAA,CAAI,GAAA,EAAK;AACP,MAAA,MAAA,CAAO,GAAG,CAAA;AACV,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,CAAA;AAAA,EACV,CAAC,CAAA;AACH,CAAC,CAAA;AAEH,IAAM,oBAAA,EAAsB,CAAC,EAAA,EAAA,GAC3B,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AACrC,EAAA,EAAA,CAAG,GAAA,CAAI,UAAA,EAAY,CAAC,GAAA,EAAA,GAAsB;AACxC,IAAA,GAAA,CAAI,GAAA,EAAK;AACP,MAAA,MAAA,CAAO,GAAG,CAAA;AACV,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,CAAA;AAAA,EACV,CAAC,CAAA;AACH,CAAC,CAAA;AD9CH;AACA;AE7FO,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,SAAA,GAAY,IAAA,IAAQ,GAAA;AAE9B,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,QAAA;ACQV,IAAM,YAAA,EAAN,MAAM,aAAA,QAAoB,MAAM;AHqFvC,EGpFS;AHqFT,EGnFE,WAAA,CACE,OAAA,EACA;AACA,IAAA,MAAM,UAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,YAAA,GAAe,QAAA,EACrD,OAAA,CAAQ,UAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,GAAA;AACR,IAAA,MAAM,QAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,UAAA,GAAa,QAAA,EACnD,OAAA,CAAQ,QAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,CAAA,wBAAA,EAA2B,SAAS,CAAA,kCAAA,CAAA;AAE5C,IAAA,KAAA,CAAM,OAAO,CAAA;AACb,IAAA,IAAA,CAAK,UAAA,EAAY,SAAA;AAGjB,IAAA,MAAA,CAAO,cAAA,CAAe,IAAA,EAAM,YAAA,CAAY,SAAS,CAAA;AHqErD,EGpEE;AACF,CAAA;AAEO,IAAM,iBAAA,EAAN,MAAM,kBAAA,QAAyB,YAAY;AHoElD,EGnEE,WAAA,CACS,OAAA,EACA,QAAA,EACP,OAAA,EACA;AACA,IAAA,KAAA,CAAM;AHgEV,MG/DM,SAAA,EAAW,GAAA;AHgEjB,MG/DM,OAAA,mBACE,OAAA,UACA,CAAA,iBAAA,EAAoB,QAAA,CAAS,QAAA,CAAS,CAAC,CAAA,wBAAA,kBAA2B,OAAA,6BAAS,QAAA,mBAAS,GAAC,CAAA;AH8D7F,IAAA;AGtEW,IAAA;AACA,IAAA;AAWP,IAAA;AH8DJ,EAAA;AG5DA;AH8DA;AACA;AI9HA;ACAA;ACAA;ACAA;AACA;ACDA;ACAA;ACAA;ACAA;ACAA;ACAA;ACAA;ACAA;Af4IA;AACA;AgB7IA;ACAA;ACAA;ACAA;ACAA;ACeO;AACA;AAEA;AAGA;AAKL,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AACF;AAEO;AAOL,EAAA;AAEA,EAAA;AACE,IAAA;AACJ;AAEO;ArBkHP,EAAA;AqB3GI,IAAA;AAGA,IAAA;ArB2GJ,EAAA;AqBzGA;AdzDO;AAOA;AP+JP,EAAA;AO9II,IAAA;APgJJ,MAAA;AO9IQ,QAAA;AACA,QAAA;APgJR,MAAA;AACA,IAAA;AOzJY,IAAA;AAWR,IAAA;AAEA,IAAA;AAEA,IAAA;AP+IJ,EAAA;AACA,iBAAA;AACA,EAAA;AACA,kBAAA;AACA,EAAA;AOtKI,IAAA;APwKJ,EAAA;AACA,EAAA;AOlJI,IAAA;AACE,MAAA;APoJN,IAAA;AACA,EAAA;AACA,EAAA;AOjJI,IAAA;AAEA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;APkJJ,EAAA;AACA,EAAA;AO/II,IAAA;AACE,MAAA;APiJN,IAAA;AACA,EAAA;AO/IA;ACpDO;AAIL,EAAA;AAEA,EAAA;ARkMF,IAAA;AQhMM,MAAA;AACE,QAAA;ARkMR,MAAA;AQhMQ,QAAA;AACE,UAAA;ARkMV,QAAA;AQhMQ,QAAA;ARkMR,MAAA;AACA,IAAA;AACA,qBAAA;AACA,EAAA;AQhMA;Ac3BO;AtB8NP,EAAA;AsB5NI,IAAA;AtB8NJ,EAAA;AsB5NA;AA0BO;AtBqMP,EAAA;AsBhMI,IAAA;AtBkMJ,sBAAA;AACA;AACA;AACA,MAAA;AACA,IAAA;AACA,EAAA;AACA,EAAA;AsB7LI,IAAA;AAEA,IAAA;AACE,MAAA;AAEF,IAAA;AtB6LJ,EAAA;AsBzLA;AVrDO;AZiPP,EAAA;AY9OM,IAAA;AACE,MAAA;AZgPR,IAAA;AACA,EAAA;AY9OE;ACPK;AbwPP,EAAA;AarPM,IAAA;AbuPN,EAAA;AarPE;ACLK;AAKA;AdyPP,EAAA;AACA,EAAA;AACA,EAAA;ActPI,IAAA;AdwPJ,MAAA;ActPQ,QAAA;AdwPR,MAAA;AACA,MAAA;ActPQ,QAAA;AACA,QAAA;AdwPR,MAAA;AACA,IAAA;AcrPI,IAAA;AACA,IAAA;AduPJ,EAAA;AcrPA;ACjBO;AfyQP,EAAA;Ae3PM,IAAA;Af6PN,MAAA;AACA,MAAA;AACA,IAAA;Ae3PQ,MAAA;Af6PR,IAAA;AACA,EAAA;Ae3PE;AAEF;AAQE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AfiPR,MAAA;AACA,IAAA;AACA,EAAA;Ae/OI,IAAA;AfiPJ,EAAA;Ae/OA;ACxDO;AAEA;AhBySP,kBAAA;AACA,EAAA;AACA,EAAA;AgBtSI,IAAA;AhBwSJ,MAAA;AgBtSQ,QAAA;AACA,QAAA;AACE,UAAA;AhBwSV,QAAA;AACA,MAAA;AACA,IAAA;AgBrSI,IAAA;AhBuSJ,EAAA;AgBrSA;AClBO;AjB0TP,EAAA;AiBvTM,IAAA;AAEA,IAAA;AACE,MAAA;AjBwTR,IAAA;AACA,EAAA;AiBtTE;ACTK;AlBkUP,EAAA;AkB/TM,IAAA;AACE,MAAA;AACA,MAAA;AlBiUR,IAAA;AkB/TM,IAAA;AACA,IAAA;AlBiUN,EAAA;AkB/TE;ACVK;AAEA;AnB2UP,kBAAA;AACA,EAAA;AACA,EAAA;AmBxUI,IAAA;AnB0UJ,MAAA;AmBxUQ,QAAA;AACE,UAAA;AACA,UAAA;AnB0UV,QAAA;AmBxUU,UAAA;AnB0UV,QAAA;AACA,MAAA;AACA,IAAA;AmBvUI,IAAA;AnByUJ,EAAA;AmBvUA;ACpBO;ApB8VP,EAAA;AoB3VM,IAAA;AACE,MAAA;ApB6VR,IAAA;AoB1VM,IAAA;AAGA,IAAA;AACE,MAAA;AACA,MAAA;ApB0VR,IAAA;AACA,EAAA;AACA,EAAA;AoBxVM,IAAA;ApB0VN,EAAA;AoBxVE;AGNK;AvBiWP,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AuB/VA;AZnBA;AXqXA;AACA;AwBrXA;AxBuXA;AACA;AyBnYO;AAEA;AACA;AAEA;AAAoB,EAAA;AAE3B;AAEA;AAAgB,EAAA;AACH,IAAA;AACH,EAAA;AACR,EAAA;AAEF;AAEO;AAAqB,EAAA;AACL,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AAEO;AAAsB,EAAA;AACN,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AAEO;AAA2B,EAAA;AAElC;AzB+XA;AACA;AwBlYO;AAaL,EAAA;AAEA,EAAA;AAA8B,oBAAA;AACnB,EAAA;AAGX,EAAA;AAGe,IAAA;AAKV,MAAA;AACI,MAAA;AACa,MAAA;AACN,QAAA;AACR,QAAA;AACgB,QAAA;AACY,QAAA;AACgB,MAAA;AAC9C,IAAA;AACF,EAAA;AAGJ,EAAA;AAEA,EAAA;AACE,IAAA;AAAe,MAAA;AACb,MAAA;AACA,MAAA;AACA,MAAA;AACA,MAAA;AACA,QAAA;AACE,MAAA;AACF,IAAA;AAGF,IAAA;AACE,MAAA;AAEF,IAAA;AAAO,EAAA;AAEX;AAEA;AAGE,EAAA;AAEA,EAAA;AAGA,EAAA;AAGA,EAAA;AAEA,EAAA;AACF;AAEA;AAUE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AAA8B,QAAA;AAC5B,QAAA;AACA,QAAA;AACA,MAAA;AACF,IAAA;AAGF,IAAA;AAEA,IAAA;AACE,MAAA;AAAoB,QAAA;AAGc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAYhC,UAAA;AACE,UAAA;AACS,2CAAA;AACkC,UAAA;AAC3C,QAAA;AACF,MAAA;AACF,IAAA;AAEA,MAAA;AAAoB,QAAA;AAGS;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAO3B,UAAA;AACW,UAAA;AACT,2CAAA;AAC2C,QAAA;AAC7C,MAAA;AACF,IAAA;AAGF,IAAA;AACE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAEA,IAAA;AACE,MAAA;AAEA,MAAA;AACE,QAAA;AAAO,UAAA;AACI,QAAA;AACX,MAAA;AACF,IAAA;AAGF,IAAA;AAA8B,MAAA;AAC5B,MAAA;AACA,MAAA;AACA,uCAAA;AACkC,IAAA;AAGpC,IAAA;AAIA,IAAA;AAIE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAAiB,MAAA;AACwB,IAAA;AACzC,EAAA;AAEA,IAAA;AACE,MAAA;AAAO,QAAA;AACI,MAAA;AACX,IAAA;AAGF,IAAA;AAAM,EAAA;AAGR,EAAA;AAAO,IAAA;AACI,IAAA;AACW,IAAA;AACA,EAAA;AAExB;AAEA;AACE,EAAA;AACF;AAEA;AAKE,EAAA;AAAwB,IAAA;AAC8D,IAAA;AAC3E,EAAA;AAGX,EAAA;AACE,IAAA;AAAwB,EAAA;AAExB,IAAA;AAAqD,EAAA;AAEvD,EAAA;AACF;AAEA;AASE,EAAA;AAAuB,IAAA;AAKnB,MAAA;AAIE,QAAA;AAA6C,MAAA;AAG/C,MAAA;AAGA,MAAA;AACA,MAAA;AAAoB,QAAA;AAClB,yBAAA;AAC6B,yBAAA;AAChB,QAAA;AACoB,QAAA;AACA,QAAA;AACI,yCAAA;AACA,QAAA;AAC7B,QAAA;AACS,QAAA;AACjB,MAAA;AAGF,MAAA;AAAO,IAAA;AACT,IAAA;AACA,MAAA;AACqB,MAAA;AACV,IAAA;AACX,EAAA;AAGF,EAAA;AAAkB,kBAAA;AACkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAA;AAYU;AAAA;AAAA,IAAA;AAI9C,EAAA;AACF;AxBsSA;AACA;A0B1lBO;AAEA;AAAwB,EAAA;AACkB;AAAA;AAAA,oEAAA;AAG8B;AAAA;AAAA;AAAA;AAAA;AAAA,IAAA;AAO/E;AAEO;AAAyB,EAAA;AACkB;AAAA;AAAA,yEAAA;AAGkC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAapF;AAEO;AAA8B,EAAA;AACnC,6BAAA;AACoD;AAAA;AAAA,+EAAA;AAGoC;AAAA;AAAA;AAAA;AAK1F;AAEO;AAA4B,EAAA;AACjC,EAAA;AACA,EAAA;AAEF;AAEO;AAGL,EAAA;AACE,IAAA;AAAoB,EAAA;AAExB;A1BqlBA;AACA;A2BxpBO;AAGL,EAAA;AAEA,EAAA;AAEA,EAAA;AACF;A3BspBA;AACA;A4BlpBO;AAIL,EAAA;AAAqB,IAAA;AAChB,MAAA;AACD,QAAA;AACE,cAAA;AAC0B;AAAA;AAAA,gBAAA;AAAA,MAAA;AAI5B,MAAA;AACiC,IAAA;AACnC,EAAA;AAGF,EAAA;AAAO,IAAA;AAEgD,EAAA;AAEzD;A5B+oBA;AACA;A6BpoBO;AAQL,EAAA;AAMA,EAAA;AAKA,EAAA;AAGA,EAAA;AAGA,EAAA;AAGA,EAAA;AACW,IAAA;AACP,MAAA;AACE,gBAAA;AAC4B,uDAAA;AACiD;AAAA,WAAA;AAE1D,IAAA;AACrB,IAAA;AACiC,EAAA;AAGnC,IAAA;AAAiB,MAAA;AACL,MAAA;AAC6B,MAAA;AACQ,IAAA;AAGjD,IAAA;AAAsD,MAAA;AACM,MAAA;AAC3C,MAAA;AACC,MAAA;AAC0B,MAAA;AACA,IAAA;AAG5C,IAAA;AAAO,MAAA;AACF,MAAA;AACG,MAAA;AACN,IAAA;AAIF,EAAA;AAGF,EAAA;AACI,IAAA;AAEwC,IAAA;AAC5B,IAAA;AACuB,EAAA;AAEnC,IAAA;AAMU,IAAA;AACG,IAAA;AACI,EAAA;AAEvB;A7BgmBA;AACA;A8BntBO;AAIL,EAAA;AAAqB,IAAA;AAChB,MAAA;AACD,QAAA;AACE,gBAAA;AACiC;AAAA,kBAAA;AAAA,MAAA;AAGnC,MAAA;AACsD,IAAA;AACxD,EAAA;AAGF,EAAA;AAAO,IAAA;AAEwD,EAAA;AAEjE;A9BgtBA;AACA;A+BruBO;AACA;AA4CA;AAEL,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAEF;AACE,EAAA;AAEA,EAAA;AAEA,EAAA;AAGE,IAAA;AAQA,IAAA;AAAsD,MAAA;AACpD,MAAA;AACA,IAAA;AAGF,IAAA;AAEA,IAAA;AACE,MAAA;AAGA,MAAA;AACE,QAAA;AAEA,QAAA;AACE,UAAA;AACA,UAAA;AAAA,QAAA;AACF,MAAA;AAGF,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAAsC,MAAA;AAEtC,QAAA;AAAW,MAAA;AACb,IAAA;AACO,EAAA;AAGX,EAAA;AAAO,IAAA;AAEH,MAAA;AAAO,IAAA;AACT,IAAA;AAEE,MAAA;AAEA,MAAA;AACE,QAAA;AAEA,QAAA;AAA2B,MAAA;AAG7B,MAAA;AAAO,IAAA;AACT,IAAA;AAEE,MAAA;AACA,MAAA;AACA,MAAA;AAAM,IAAA;AACR,EAAA;AAEJ;AAEO;AAGL,EAAA;AAIE,IAAA;AAEF,EAAA;AAEA,EAAA;AAGF;A/BypBA;AACA;AgCpsBA;AAGE,EAAA;AACA,EAAA;AAGA,EAAA;AAIE,IAAA;AACA,IAAA;AACE,MAAA;AAAU,QAAA;AACgC,MAAA;AAG5C,IAAA;AAKA,IAAA;AAAsB,EAAA;AAGxB,EAAA;AAAO,IAAA;AACO,IAAA;AAIV,MAAA;AACA,MAAA;AAEA,MAAA;AAAoE,QAAA;AAC7C,QAAA;AACF,MAAA;AAGrB,MAAA;AAEA,MAAA;AAA+C,IAAA;AACjD,IAAA;AAEE,MAAA;AAAO,IAAA;AACT,IAAA;AAKE,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAEA,QAAA;AAEA,QAAA;AACE,UAAA;AAKA,UAAA;AAAgE,YAAA;AAC9D,YAAA;AACA,UAAA;AAIF,UAAA;AAAmC,YAAA;AACZ,YAAA;AACJ,YAAA;AACjB,YAAA;AACmC,YAAA;AAChB,UAAA;AAGrB,UAAA;AAEA,UAAA;AAIE,YAAA;AACA,YAAA;AACA,YAAA;AAAA,UAAA;AAGF,UAAA;AACE,YAAA;AACA,YAAA;AACA,YAAA;AAAA,UAAA;AAGF,UAAA;AAIE,YAAA;AAAA,QAAA;AAEJ,QAAA;AAAO,MAAA;AACR,IAAA;AACH,EAAA;AAEJ;AAEO;AAGL,EAAA;AAEA,EAAA;AAAyC,IAAA;AAC0B,IAAA;AAE/D,MAAA;AAEA,MAAA;AAAwC,IAAA;AAC1C,IAAA;AACG,EAAA;AAEP;AAEO;AAGL,EAAA;AACE,IAAA;AAAwC,EAAA;AAG1C,EAAA;AACF;AhCopBA;AACA;AiCx1BO;AAKL,EAAA;AACA,EAAA;AACA,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AAGE,IAAA;AAEA,IAAA;AACE,MAAA;AAAO,QAAA;AACC,QAAA;AACE,MAAA;AAGZ,IAAA;AAA6B,MAAA;AAGzB,QAAA;AAAiE,MAAA;AAClE,IAAA;AAGH,IAAA;AAAc,MAAA;AACyC,IAAA;AAGnD,MAAA;AACQ,IAAA;AACR,EAAA;AAGN,EAAA;AACqC,IAAA;AACjC,IAAA;AACA,IAAA;AAEwB,IAAA;AAGtB,EAAA;AAGN,EAAA;AACE,IAAA;AACA,IAAA;AACA,IAAA;AACE,MAAA;AACA,MAAA;AAAuB,IAAA;AAEzB,IAAA;AAAM,EAAA;AAGR,EAAA;AAAO,IAAA;AACL,IAAA;AAEE,MAAA;AAAO,IAAA;AACT,IAAA;AAIE,MAAA;AAEA,MAAA;AAEA,MAAA;AAAO,IAAA;AACT,IAAA;AAEE,MAAA;AAEA,MAAA;AACE,QAAA;AACE,UAAA;AAAe,YAAA;AACT,cAAA;AACF,YAAA;AACF,UAAA;AAGJ,QAAA;AAEA,QAAA;AAAkB,UAAA;AACoC,QAAA;AAGtD,QAAA;AAAwC,MAAA;AAG1C,MAAA;AAAO,IAAA;AACT,IAAA;AACA,IAAA;AAEE,MAAA;AAEA,MAAA;AAEA,MAAA;AAAuD,IAAA;AACzD,EAAA;AAEJ;AjCwzBA;AACA;AkC56BO;AAGL,EAAA;AAEA,EAAA;AAEA,EAAA;AAAmC,IAAA;AACmB,EAAA;AAGtD,EAAA;AACE,IAAA;AAAgC,MAAA;AAC9B,IAAA;AACD,EAAA;AAEL;AlCy6BA;AACA;AmCn7BO;AA+CA;AAGL,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAEA,EAAA;AAIA,EAAA;AAIA,EAAA;AAEA,EAAA;AACE,IAAA;AACE,MAAA;AAAO,IAAA;AAGT,IAAA;AAAwB,MAAA;AACtB,IAAA;AACD,EAAA;AAGH,EAAA;AACE,IAAA;AACE,MAAA;AAAA,IAAA;AAEF,IAAA;AACE,MAAA;AACA,MAAA;AAAW,IAAA;AACb,EAAA;AAGF,EAAA;AAGE,IAAA;AACE,MAAA;AAA4B,IAAA;AAG9B,IAAA;AACE,MAAA;AACA,MAAA;AAA6B,IAAA;AAE7B,MAAA;AAAgB,IAAA;AAClB,EAAA;AAGF,EAAA;AACE,IAAA;AAEoC,EAAA;AAGtC,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAAiB,IAAA;AAGnB,IAAA;AAAuB,EAAA;AAGzB,EAAA;AAAO,IAAA;AASH,MAAA;AAEA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAA2C,MAAA;AAG7C,MAAA;AACE,QAAA;AAA4B,MAAA;AAG9B,MAAA;AAAqB,QAAA;AAC+B,MAAA;AAGpD,MAAA;AAEA,MAAA;AAAA,QAAA;AACE,QAAA;AACA,QAAA;AACA,MAAA;AAGF,MAAA;AACE,QAAA;AAEA,QAAA;AAA2B,MAAA;AAG7B,MAAA;AAAO,QAAA;AACL,QAAA;AACA,QAAA;AACqB,MAAA;AACvB,IAAA;AACF,IAAA;AAO0E,IAAA;AAOxE,MAAA;AACE,QAAA;AAA4B,MAAA;AAI9B,MAAA;AAEA,MAAA;AAGA,MAAA;AAA2B,QAAA;AAC0B,UAAA;AAC9C,UAAA;AAED,YAAA;AACE,cAAA;AAAwB,gBAAA;AACT,gBAAA;AACL,gBAAA;AACL,cAAA;AAGP,YAAA;AAAkE,UAAA;AACpE,QAAA;AACD,MAAA;AAGH,MAAA;AACE,QAAA;AAAU,UAAA;AACP;AAAA,2CAAA;AACiC,QAAA;AAGtC,MAAA;AAAO,QAAA;AACmC,QAAA;AACF,QAAA;AAEmB,MAAA;AAC3D,IAAA;AACF,IAAA;AAI8C,MAAA;AAC1B,MAAA;AAChB,MAAA;AACgB,IAAA;AACjB,EAAA;AAEP;AnCy0BA;AACA;AoC/jCO;AAOL,EAAA;AAKA,EAAA;AAAW,IAAA;AAKH,EAAA;AAGR,EAAA;AAEA,EAAA;AAAyB,IAAA;AACvB,gBAAA;AACgC,yEAAA;AACmE,IAAA;AACxD,EAAA;AAG7C,EAAA;AAEI,IAAA;AAAiB,MAAA;AACL,MAAA;AAC6B,MAAA;AACQ,IAAA;AAGjD,IAAA;AAAsD,MAAA;AACM,MAAA;AAC3C,MAAA;AACH,MAAA;AAC8B,MAAA;AACA,IAAA;AAG5C,IAAA;AAAO,MAAA;AACF,MAAA;AACG,MAAA;AACN,IAAA;AAIF,EAAA;AAGJ,EAAA;AACI,IAAA;AAE4C,IAAA;AAClC,IAAA;AACM,EAAA;AAEhB,IAAA;AACwB,IAAA;AACb,IAAA;AACK,EAAA;AAEtB;ApCsiCA;AACA;AqC5nCA;AAQE,EAAA;AACE,IAAA;AAA8B,MAAA;AACxB,iBAAA;AACgC;AAAA;AAAA;AAAA;AAAA,QAAA;AAKjC,MAAA;AACoE,IAAA;AAEzE,IAAA;AACE,MAAA;AAAO,IAAA;AAEP,MAAA;AAA+B,QAAA;AAC1B,UAAA;AACD,YAAA;AACgE,0DAAA;AAAA,UAAA;AAEhE,UAAA;AACuB,QAAA;AACzB,MAAA;AAGF,MAAA;AACE,QAAA;AAAO,MAAA;AAMP,QAAA;AAAO,MAAA;AAEP,QAAA;AAAO,MAAA;AACT,IAAA;AACF,EAAA;AAEA,IAAA;AACE,MAAA;AAAS,QAAA;AACP,UAAA;AACwC,QAAA;AACxC,QAAA;AACsD,MAAA;AAExD,MAAA;AAAO,IAAA;AAEP,MAAA;AACE,QAAA;AAAM,MAAA;AAGR,MAAA;AAAsB,QAAA;AACjB,UAAA;AACD,YAAA;AACgE,UAAA;AAChE,UAAA;AACuB,QAAA;AACzB,MAAA;AAEF,MAAA;AACE,QAAA;AAAO,MAAA;AAEP,QAAA;AAAO,MAAA;AACT,IAAA;AACF,EAAA;AAEJ;AAWA;AAUE,EAAA;AACE,IAAA;AAAqB,MAAA;AACnB,MAAA;AACQ,uBAAA;AACW,MAAA;AACX,MAAA;AACA,uBAAA;AACa,IAAA;AAGvB,IAAA;AAEoE,EAAA;AAEpE,IAAA;AACA,IAAA;AAAM,EAAA;AAEV;ArC2lCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","sourcesContent":[null,"import sqlite3 from 'sqlite3';\n\nexport type Parameters = object | string | bigint | number | boolean | null;\n\nexport type SQLiteConnection = {\n close: () => void;\n command: (sql: string, values?: Parameters[]) => Promise<sqlite3.RunResult>;\n query: <T>(sql: string, values?: Parameters[]) => Promise<T[]>;\n querySingle: <T>(sql: string, values?: Parameters[]) => Promise<T | null>;\n withTransaction: <T>(fn: () => Promise<T>) => Promise<T>;\n};\n\nexport interface SQLiteError extends Error {\n errno: number;\n}\n\nexport const isSQLiteError = (error: unknown): error is SQLiteError => {\n if (error instanceof Error && 'code' in error) {\n return true;\n }\n\n return false;\n};\n\nexport type InMemorySharedCacheSQLiteDatabase = 'file::memory:?cache=shared';\nexport const InMemorySharedCacheSQLiteDatabase = 'file::memory:?cache=shared';\nexport type InMemorySQLiteDatabase = ':memory:';\nexport const InMemorySQLiteDatabase = ':memory:';\n\ntype SQLiteConnectionOptions = {\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n fileName: InMemorySQLiteDatabase | string | undefined;\n};\n\nexport const sqliteConnection = (\n options: SQLiteConnectionOptions,\n): SQLiteConnection => {\n const fileName = options.fileName ?? InMemorySQLiteDatabase;\n let db: sqlite3.Database;\n\n if (fileName.startsWith('file:')) {\n db = new sqlite3.Database(\n fileName,\n sqlite3.OPEN_URI | sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE,\n );\n } else {\n db = new sqlite3.Database(fileName);\n }\n db.run('PRAGMA journal_mode = WAL;');\n let transactionNesting = 0;\n\n return {\n close: (): void => db.close(),\n command: (sql: string, params?: Parameters[]) =>\n new Promise<sqlite3.RunResult>((resolve, reject) => {\n db.run(\n sql,\n params ?? [],\n function (this: sqlite3.RunResult, err: Error | null) {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(this);\n },\n );\n }),\n query: <T>(sql: string, params?: Parameters[]): Promise<T[]> =>\n new Promise((resolve, reject) => {\n db.all(sql, params ?? [], (err: Error | null, result: T[]) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n querySingle: <T>(sql: string, params?: Parameters[]): Promise<T | null> =>\n new Promise((resolve, reject) => {\n db.get(sql, params ?? [], (err: Error | null, result: T | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n withTransaction: async <T>(fn: () => Promise<T>) => {\n try {\n if (transactionNesting++ == 0) {\n await beginTransaction(db);\n }\n const result = await fn();\n\n if (transactionNesting === 1) await commitTransaction(db);\n transactionNesting--;\n\n return result;\n } catch (err) {\n console.log(err);\n\n if (--transactionNesting === 0) await rollbackTransaction(db);\n\n throw err;\n }\n },\n };\n};\n\nconst beginTransaction = (db: sqlite3.Database) =>\n new Promise<void>((resolve, reject) => {\n db.run('BEGIN IMMEDIATE TRANSACTION', (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n });\n\nconst commitTransaction = (db: sqlite3.Database) =>\n new Promise<void>((resolve, reject) => {\n db.run('COMMIT', (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n });\n\nconst rollbackTransaction = (db: sqlite3.Database) =>\n new Promise<void>((resolve, reject) => {\n db.run('ROLLBACK', (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n });\n","import { ValidationError } from '../errors';\n\nexport const enum ValidationErrors {\n NOT_A_NONEMPTY_STRING = 'NOT_A_NONEMPTY_STRING',\n NOT_A_POSITIVE_NUMBER = 'NOT_A_POSITIVE_NUMBER',\n NOT_AN_UNSIGNED_BIGINT = 'NOT_AN_UNSIGNED_BIGINT',\n}\n\nexport const isNumber = (val: unknown): val is number =>\n typeof val === 'number' && val === val;\n\nexport const isString = (val: unknown): val is string =>\n typeof val === 'string';\n\nexport const assertNotEmptyString = (value: unknown): string => {\n if (!isString(value) || value.length === 0) {\n throw new ValidationError(ValidationErrors.NOT_A_NONEMPTY_STRING);\n }\n return value;\n};\n\nexport const assertPositiveNumber = (value: unknown): number => {\n if (!isNumber(value) || value <= 0) {\n throw new ValidationError(ValidationErrors.NOT_A_POSITIVE_NUMBER);\n }\n return value;\n};\n\nexport const assertUnsignedBigInt = (value: string): bigint => {\n const number = BigInt(value);\n if (number < 0) {\n throw new ValidationError(ValidationErrors.NOT_AN_UNSIGNED_BIGINT);\n }\n return number;\n};\n\nexport * from './dates';\n","import { isNumber, isString } from '../validation';\n\nexport type ErrorConstructor<ErrorType extends Error> = new (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ...args: any[]\n) => ErrorType;\n\nexport const isErrorConstructor = <ErrorType extends Error>(\n // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type\n expect: Function,\n): expect is ErrorConstructor<ErrorType> => {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n return (\n typeof expect === 'function' &&\n expect.prototype &&\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n expect.prototype.constructor === expect\n );\n};\n\nexport class EmmettError extends Error {\n public errorCode: number;\n\n constructor(\n options?: { errorCode: number; message?: string } | string | number,\n ) {\n const errorCode =\n options && typeof options === 'object' && 'errorCode' in options\n ? options.errorCode\n : isNumber(options)\n ? options\n : 500;\n const message =\n options && typeof options === 'object' && 'message' in options\n ? options.message\n : isString(options)\n ? options\n : `Error with status code '${errorCode}' ocurred during Emmett processing`;\n\n super(message);\n this.errorCode = errorCode;\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, EmmettError.prototype);\n }\n}\n\nexport class ConcurrencyError extends EmmettError {\n constructor(\n public current: string | undefined,\n public expected: string,\n message?: string,\n ) {\n super({\n errorCode: 412,\n message:\n message ??\n `Expected version ${expected.toString()} does not match current ${current?.toString()}`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ConcurrencyError.prototype);\n }\n}\n\nexport class ConcurrencyInMemoryDatabaseError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 412,\n message: message ?? `Expected document state does not match current one!`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ConcurrencyInMemoryDatabaseError.prototype);\n }\n}\n\nexport class ValidationError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 400,\n message: message ?? `Validation Error ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ValidationError.prototype);\n }\n}\n\nexport class IllegalStateError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 403,\n message: message ?? `Illegal State ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, IllegalStateError.prototype);\n }\n}\n\nexport class NotFoundError extends EmmettError {\n constructor(options?: { id: string; type: string; message?: string }) {\n super({\n errorCode: 404,\n message:\n options?.message ??\n (options?.id\n ? options.type\n ? `${options.type} with ${options.id} was not found during Emmett processing`\n : `State with ${options.id} was not found during Emmett processing`\n : options?.type\n ? `${options.type} was not found during Emmett processing`\n : 'State was not found during Emmett processing'),\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, NotFoundError.prototype);\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport type {\n BigIntStreamPosition,\n CombinedReadEventMetadata,\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../typing';\nimport { tryPublishMessagesAfterCommit } from './afterCommit';\nimport {\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type AppendToStreamResult,\n type DefaultEventStoreOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from './eventStore';\nimport { assertExpectedVersionMatchesCurrent } from './expectedVersion';\nimport { StreamingCoordinator } from './subscriptions';\nimport type { ProjectionRegistration } from '../projections';\n\nexport const InMemoryEventStoreDefaultStreamVersion = 0n;\n\nexport type InMemoryEventStore =\n EventStore<ReadEventMetadataWithGlobalPosition>;\n\nexport type InMemoryReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type InMemoryProjectionHandlerContext = {\n eventStore: InMemoryEventStore;\n};\n\nexport type InMemoryEventStoreOptions =\n DefaultEventStoreOptions<InMemoryEventStore> & {\n projections?: ProjectionRegistration<\n 'inline',\n InMemoryReadEventMetadata,\n InMemoryProjectionHandlerContext\n >[];\n };\n\nexport type InMemoryReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n>;\n\nexport const getInMemoryEventStore = (\n eventStoreOptions?: InMemoryEventStoreOptions,\n): InMemoryEventStore => {\n const streams = new Map<\n string,\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]\n >();\n const streamingCoordinator = StreamingCoordinator();\n\n const getAllEventsCount = () => {\n return Array.from<ReadEvent[]>(streams.values())\n .map((s) => s.length)\n .reduce((p, c) => p + c, 0);\n };\n\n const _inlineProjections = (eventStoreOptions?.projections ?? [])\n .filter(({ type }) => type === 'inline')\n .map(({ projection }) => projection);\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const result = await this.readStream<EventType>(streamName, read);\n\n const events = result?.events ?? [];\n\n return {\n currentStreamVersion: BigInt(events.length),\n state: events.reduce(evolve, initialState()),\n streamExists: result.streamExists,\n };\n },\n\n readStream: <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => {\n const events = streams.get(streamName);\n const currentStreamVersion = events\n ? BigInt(events.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const from = Number(options && 'from' in options ? options.from : 0);\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : (events?.length ?? 1),\n );\n\n const resultEvents =\n events !== undefined && events.length > 0\n ? events\n .map(\n (e) =>\n e as ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n )\n .slice(from, to)\n : [];\n\n const result: ReadStreamResult<\n EventType,\n ReadEventMetadataWithGlobalPosition\n > = {\n currentStreamVersion,\n events: resultEvents,\n streamExists: events !== undefined && events.length > 0,\n };\n\n return Promise.resolve(result);\n },\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResult> => {\n const currentEvents = streams.get(streamName) ?? [];\n const currentStreamVersion =\n currentEvents.length > 0\n ? BigInt(currentEvents.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const newEvents: ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >[] = events.map((event, index) => {\n const metadata: ReadEventMetadataWithGlobalPosition = {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(currentEvents.length + index + 1),\n globalPosition: BigInt(getAllEventsCount() + index + 1),\n };\n return {\n ...event,\n kind: event.kind ?? 'Event',\n metadata: {\n ...('metadata' in event ? (event.metadata ?? {}) : {}),\n ...metadata,\n } as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n const positionOfLastEventInTheStream = BigInt(\n newEvents.slice(-1)[0]!.metadata.streamPosition,\n );\n\n streams.set(streamName, [...currentEvents, ...newEvents]);\n await streamingCoordinator.notify(newEvents);\n\n const result: AppendToStreamResult = {\n nextExpectedStreamVersion: positionOfLastEventInTheStream,\n createdNewStream:\n currentStreamVersion === InMemoryEventStoreDefaultStreamVersion,\n };\n\n await tryPublishMessagesAfterCommit<InMemoryEventStore>(\n newEvents,\n eventStoreOptions?.hooks,\n );\n\n return result;\n },\n\n //streamEvents: streamingCoordinator.stream,\n };\n};\n","import { TransformStream } from 'web-streams-polyfill';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport { globalStreamCaughtUp, type GlobalSubscriptionEvent } from '../events';\n\nexport const streamTrackingGlobalPosition = (\n currentEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n) => new CaughtUpTransformStream(currentEvents);\n\nexport class CaughtUpTransformStream extends TransformStream<\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>,\n | ReadEvent<Event, ReadEventMetadataWithGlobalPosition>\n | GlobalSubscriptionEvent\n> {\n private _currentPosition: bigint;\n private _logPosition: bigint;\n\n constructor(events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]) {\n super({\n start: (controller) => {\n let globalPosition = 0n;\n for (const event of events) {\n controller.enqueue(event);\n globalPosition = event.metadata.globalPosition;\n }\n controller.enqueue(globalStreamCaughtUp({ globalPosition }));\n },\n transform: (event, controller) => {\n this._currentPosition = event.metadata.globalPosition;\n controller.enqueue(event);\n\n if (this._currentPosition < this._logPosition) return;\n\n controller.enqueue(\n globalStreamCaughtUp({ globalPosition: this._currentPosition }),\n );\n },\n });\n\n this._currentPosition = this._logPosition =\n events.length > 0\n ? events[events.length - 1]!.metadata.globalPosition\n : 0n;\n }\n\n public set logPosition(value: bigint) {\n this._logPosition = value;\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport { notifyAboutNoActiveReadersStream } from '../../streaming/transformations/notifyAboutNoActiveReaders';\nimport { writeToStream } from '../../streaming/writers';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport {\n CaughtUpTransformStream,\n streamTrackingGlobalPosition,\n} from './caughtUpTransformStream';\n\nexport const StreamingCoordinator = () => {\n const allEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[] = [];\n const listeners = new Map<string, CaughtUpTransformStream>();\n\n return {\n notify: async (\n events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n ) => {\n if (events.length === 0) return;\n\n allEvents.push(...events);\n\n for (const listener of listeners.values()) {\n listener.logPosition =\n events[events.length - 1]!.metadata.globalPosition;\n\n await writeToStream(listener, events);\n }\n },\n\n stream: () => {\n const streamId = uuid();\n const transformStream = streamTrackingGlobalPosition(allEvents);\n\n listeners.set(streamId, transformStream);\n return transformStream.readable.pipeThrough(\n notifyAboutNoActiveReadersStream(\n (stream) => {\n if (listeners.has(stream.streamId))\n listeners.delete(stream.streamId);\n },\n { streamId },\n ),\n );\n },\n };\n};\n","import { v4 as uuid } from 'uuid';\nimport { TransformStream } from 'web-streams-polyfill';\n\nexport const notifyAboutNoActiveReadersStream = <Item>(\n onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n) => new NotifyAboutNoActiveReadersStream(onNoActiveReaderCallback, options);\n\nexport class NotifyAboutNoActiveReadersStream<Item> extends TransformStream<\n Item,\n Item\n> {\n private checkInterval: NodeJS.Timeout | null = null;\n public readonly streamId: string;\n private _isStopped: boolean = false;\n public get hasActiveSubscribers() {\n return !this._isStopped;\n }\n\n constructor(\n private onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n ) {\n super({\n cancel: (reason) => {\n console.log('Stream was canceled. Reason:', reason);\n this.stopChecking();\n },\n });\n this.streamId = options?.streamId ?? uuid();\n\n this.onNoActiveReaderCallback = onNoActiveReaderCallback;\n\n this.startChecking(options?.intervalCheckInMs ?? 20);\n }\n\n private startChecking(interval: number) {\n this.checkInterval = setInterval(() => {\n this.checkNoActiveReader();\n }, interval);\n }\n\n private stopChecking() {\n if (!this.checkInterval) return;\n\n clearInterval(this.checkInterval);\n this.checkInterval = null;\n this._isStopped = true;\n this.onNoActiveReaderCallback(this);\n }\n\n private checkNoActiveReader() {\n if (!this.readable.locked && !this._isStopped) {\n this.stopChecking();\n }\n }\n}\n","import retry from 'async-retry';\n\nexport type AsyncRetryOptions = retry.Options & {\n shouldRetryError?: (error: unknown) => boolean;\n};\n\nexport const NoRetries: AsyncRetryOptions = { retries: 0 };\n\nexport const asyncRetry = async <T>(\n fn: () => Promise<T>,\n opts?: AsyncRetryOptions,\n): Promise<T> => {\n if (opts === undefined || opts.retries === 0) return fn();\n\n return retry(\n async (bail) => {\n try {\n return await fn();\n } catch (error) {\n if (opts?.shouldRetryError && !opts.shouldRetryError(error)) {\n bail(error as Error);\n }\n throw error;\n }\n },\n opts ?? { retries: 0 },\n );\n};\n","import { v7 as uuid } from 'uuid';\nimport { deepEquals } from '../utils';\nimport {\n type DeleteResult,\n type Document,\n type DocumentHandler,\n type HandleOptionErrors,\n type HandleOptions,\n type HandleResult,\n type InsertOneResult,\n type OptionalUnlessRequiredIdAndVersion,\n type ReplaceOneOptions,\n type UpdateResult,\n type WithoutId,\n type WithIdAndVersion,\n} from './types';\nimport { expectedVersionValue, operationResult } from './utils';\n\nexport interface DocumentsCollection<T extends Document> {\n handle: (\n id: string,\n handle: DocumentHandler<T>,\n options?: HandleOptions,\n ) => HandleResult<T>;\n findOne: (predicate?: Predicate<T>) => T | null;\n insertOne: (\n document: OptionalUnlessRequiredIdAndVersion<T>,\n ) => InsertOneResult;\n deleteOne: (predicate?: Predicate<T>) => DeleteResult;\n replaceOne: (\n predicate: Predicate<T>,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ) => UpdateResult;\n}\n\nexport interface Database {\n collection: <T extends Document>(name: string) => DocumentsCollection<T>;\n}\n\ntype Predicate<T> = (item: T) => boolean;\n\nexport const getInMemoryDatabase = (): Database => {\n const storage = new Map<string, WithIdAndVersion<Document>[]>();\n\n return {\n collection: <T extends Document>(\n collectionName: string,\n collectionOptions: {\n errors?: HandleOptionErrors;\n } = {},\n ): DocumentsCollection<T> => {\n const ensureCollectionCreated = () => {\n if (!storage.has(collectionName)) storage.set(collectionName, []);\n };\n\n const errors = collectionOptions.errors;\n\n const collection = {\n collectionName,\n insertOne: (\n document: OptionalUnlessRequiredIdAndVersion<T>,\n ): InsertOneResult => {\n ensureCollectionCreated();\n\n const _id = (document._id as string | undefined | null) ?? uuid();\n const _version = document._version ?? 1n;\n\n const existing = collection.findOne((c) => c._id === _id);\n\n if (existing) {\n return operationResult<InsertOneResult>(\n {\n successful: false,\n insertedId: null,\n nextExpectedVersion: _version,\n },\n { operationName: 'insertOne', collectionName, errors },\n );\n }\n\n const documentsInCollection = storage.get(collectionName)!;\n const newDocument = { ...document, _id, _version };\n const newCollection = [...documentsInCollection, newDocument];\n storage.set(collectionName, newCollection);\n\n return operationResult<InsertOneResult>(\n {\n successful: true,\n insertedId: _id,\n nextExpectedVersion: _version,\n },\n { operationName: 'insertOne', collectionName, errors },\n );\n },\n findOne: (predicate?: Predicate<T>): T | null => {\n ensureCollectionCreated();\n\n const documentsInCollection = storage.get(collectionName);\n const filteredDocuments = predicate\n ? documentsInCollection?.filter((doc) => predicate(doc as T))\n : documentsInCollection;\n\n const firstOne = filteredDocuments?.[0] ?? null;\n\n return firstOne as T | null;\n },\n deleteOne: (predicate?: Predicate<T>): DeleteResult => {\n ensureCollectionCreated();\n\n const documentsInCollection = storage.get(collectionName)!;\n\n if (predicate) {\n const foundIndex = documentsInCollection.findIndex((doc) =>\n predicate(doc as T),\n );\n\n if (foundIndex === -1) {\n return operationResult<DeleteResult>(\n {\n successful: false,\n matchedCount: 0,\n deletedCount: 0,\n },\n { operationName: 'deleteOne', collectionName, errors },\n );\n } else {\n const newCollection = documentsInCollection.toSpliced(\n foundIndex,\n 1,\n );\n\n storage.set(collectionName, newCollection);\n\n return operationResult<DeleteResult>(\n {\n successful: true,\n matchedCount: 1,\n deletedCount: 1,\n },\n { operationName: 'deleteOne', collectionName, errors },\n );\n }\n } else {\n const newCollection = documentsInCollection.slice(1);\n\n storage.set(collectionName, newCollection);\n\n return operationResult<DeleteResult>(\n {\n successful: true,\n matchedCount: 1,\n deletedCount: 1,\n },\n { operationName: 'deleteOne', collectionName, errors },\n );\n }\n },\n replaceOne: (\n predicate: Predicate<T>,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): UpdateResult => {\n ensureCollectionCreated();\n\n const documentsInCollection = storage.get(collectionName)!;\n\n const foundIndexes = documentsInCollection\n .filter((doc) => predicate(doc as T))\n .map((_, index) => index);\n\n const firstIndex = foundIndexes[0];\n\n if (!firstIndex || firstIndex === -1) {\n return operationResult<UpdateResult>(\n {\n successful: false,\n matchedCount: 0,\n modifiedCount: 0,\n nextExpectedVersion: 0n,\n },\n { operationName: 'replaceOne', collectionName, errors },\n );\n }\n\n const existing = documentsInCollection[firstIndex]!;\n\n if (\n typeof options?.expectedVersion === 'bigint' &&\n existing._version !== options.expectedVersion\n ) {\n return operationResult<UpdateResult>(\n {\n successful: false,\n matchedCount: 1,\n modifiedCount: 0,\n nextExpectedVersion: existing._version,\n },\n { operationName: 'replaceOne', collectionName, errors },\n );\n }\n\n const newVersion = existing._version + 1n;\n\n const newCollection = documentsInCollection.with(firstIndex, {\n _id: existing._id,\n ...document,\n _version: newVersion,\n });\n\n storage.set(collectionName, newCollection);\n\n return operationResult<UpdateResult>(\n {\n successful: true,\n modifiedCount: 1,\n matchedCount: foundIndexes.length,\n nextExpectedVersion: newVersion,\n },\n { operationName: 'replaceOne', collectionName, errors },\n );\n },\n handle: (\n id: string,\n handle: DocumentHandler<T>,\n options?: HandleOptions,\n ): HandleResult<T> => {\n const { expectedVersion: version, ...operationOptions } =\n options ?? {};\n ensureCollectionCreated();\n const existing = collection.findOne((c) => c.id === id);\n\n const expectedVersion = expectedVersionValue(version);\n\n if (\n (existing == null && version === 'DOCUMENT_EXISTS') ||\n (existing == null && expectedVersion != null) ||\n (existing != null && version === 'DOCUMENT_DOES_NOT_EXIST') ||\n (existing != null &&\n expectedVersion !== null &&\n existing._version !== expectedVersion)\n ) {\n return operationResult<HandleResult<T>>(\n {\n successful: false,\n document: existing as WithIdAndVersion<T>,\n },\n { operationName: 'handle', collectionName, errors },\n );\n }\n\n const result = handle(existing !== null ? { ...existing } : null);\n\n if (deepEquals(existing, result))\n return operationResult<HandleResult<T>>(\n {\n successful: true,\n document: existing as WithIdAndVersion<T>,\n },\n { operationName: 'handle', collectionName, errors },\n );\n\n if (!existing && result) {\n const newDoc = { ...result, _id: id };\n const insertResult = collection.insertOne({\n ...newDoc,\n _id: id,\n } as OptionalUnlessRequiredIdAndVersion<T>);\n return {\n ...insertResult,\n document: {\n ...newDoc,\n _version: insertResult.nextExpectedVersion,\n } as unknown as WithIdAndVersion<T>,\n };\n }\n\n if (existing && !result) {\n const deleteResult = collection.deleteOne(({ _id }) => id === _id);\n return { ...deleteResult, document: null };\n }\n\n if (existing && result) {\n const replaceResult = collection.replaceOne(\n ({ _id }) => id === _id,\n result,\n {\n ...operationOptions,\n expectedVersion: expectedVersion ?? 'DOCUMENT_EXISTS',\n },\n );\n return {\n ...replaceResult,\n document: {\n ...result,\n _version: replaceResult.nextExpectedVersion,\n } as unknown as WithIdAndVersion<T>,\n };\n }\n\n return operationResult<HandleResult<T>>(\n {\n successful: true,\n document: existing as WithIdAndVersion<T>,\n },\n { operationName: 'handle', collectionName, errors },\n );\n },\n };\n\n return collection;\n },\n };\n};\n","import { ReadableStream } from 'web-streams-polyfill';\n\nexport const fromArray = <T>(chunks: T[]) =>\n new ReadableStream<T>({\n start(controller) {\n for (const chunk of chunks) controller.enqueue(chunk);\n controller.close();\n },\n });\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport type { AsyncRetryOptions } from '../utils';\nimport type { Decoder } from './decoders';\nimport { DefaultDecoder } from './decoders/composite';\nimport { streamTransformations } from './transformations';\n\nconst { retry } = streamTransformations;\n\nexport const restream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n): ReadableStream<Transformed> =>\n retry(createSourceStream, handleChunk(transform, decoder), retryOptions)\n .readable;\n\nconst handleChunk =\n <Source = unknown, Transformed = Source, StreamType = Source>(\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n ) =>\n (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ): void => {\n const { done: isDone, value } = readResult;\n\n if (value) decoder.addToBuffer(value);\n\n if (!isDone && !decoder.hasCompleteMessage()) return;\n\n decodeAndTransform(decoder, transform, controller);\n };\n\nconst decodeAndTransform = <StreamType, Source, Transformed = Source>(\n decoder: Decoder<StreamType, Source>,\n transform: (input: Source) => Transformed,\n controller: TransformStreamDefaultController<Transformed>,\n) => {\n try {\n const decoded = decoder.decode();\n if (!decoded) return; // TODO: Add a proper handling of decode errors\n\n const transformed = transform(decoded);\n controller.enqueue(transformed);\n } catch (error) {\n controller.error(new Error(`Decoding error: ${error?.toString()}`));\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const filter = <Item>(filter: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n if (filter(chunk)) {\n controller.enqueue(chunk);\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const map = <From, To>(map: (item: From) => To) =>\n new TransformStream<From, To>({\n transform(chunk, controller) {\n controller.enqueue(map(chunk));\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const reduce = <I, O>(\n reducer: (accumulator: O, chunk: I) => O,\n initialValue: O,\n) => new ReduceTransformStream<I, O>(reducer, initialValue);\n\nexport class ReduceTransformStream<I, O> extends TransformStream<I, O> {\n private accumulator: O;\n private reducer: (accumulator: O, chunk: I) => O;\n\n constructor(reducer: (accumulator: O, chunk: I) => O, initialValue: O) {\n super({\n transform: (chunk) => {\n this.accumulator = this.reducer(this.accumulator, chunk);\n },\n flush: (controller) => {\n controller.enqueue(this.accumulator);\n controller.terminate();\n },\n });\n\n this.accumulator = initialValue;\n this.reducer = reducer;\n }\n}\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n TransformStream,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport { type AsyncRetryOptions, asyncRetry } from '../../utils';\n\nexport const retryStream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n): TransformStream<Source, Transformed> =>\n new TransformStream<Source, Transformed>({\n start(controller) {\n asyncRetry(\n () => onRestream(createSourceStream, handleChunk, controller),\n retryOptions,\n ).catch((error) => {\n controller.error(error);\n });\n },\n });\n\nconst onRestream = async <StreamType, Source, Transformed = Source>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n controller: TransformStreamDefaultController<Transformed>,\n): Promise<void> => {\n const sourceStream = createSourceStream();\n const reader = sourceStream.getReader();\n\n try {\n let done: boolean;\n\n do {\n const result = await reader.read();\n done = result.done;\n\n await handleChunk(result, controller);\n\n if (done) {\n controller.terminate();\n }\n } while (!done);\n } finally {\n reader.releaseLock();\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const skip = <T>(limit: number) => new SkipTransformStream<T>(limit);\n\nexport class SkipTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private skip: number;\n\n constructor(skip: number) {\n super({\n transform: (chunk, controller) => {\n this.count++;\n if (this.count > this.skip) {\n controller.enqueue(chunk);\n }\n },\n });\n\n this.skip = skip;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopAfter = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n controller.enqueue(chunk);\n\n if (stopCondition(chunk)) {\n controller.terminate();\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopOn = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n async transform(chunk, controller) {\n if (!stopCondition(chunk)) {\n controller.enqueue(chunk);\n return;\n }\n await Promise.resolve();\n controller.terminate();\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const take = <T>(limit: number) => new TakeTransformStream<T>(limit);\n\nexport class TakeTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private limit: number;\n\n constructor(limit: number) {\n super({\n transform: (chunk, controller) => {\n if (this.count < this.limit) {\n this.count++;\n controller.enqueue(chunk);\n } else {\n controller.terminate();\n }\n },\n });\n\n this.limit = limit;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const waitAtMost = <Item>(waitTimeInMs: number) =>\n new TransformStream<Item, Item>({\n start(controller) {\n const timeoutId = setTimeout(() => {\n controller.terminate();\n }, waitTimeInMs);\n\n const originalTerminate = controller.terminate.bind(controller);\n\n // Clear the timeout if the stream is terminated early\n controller.terminate = () => {\n clearTimeout(timeoutId);\n originalTerminate();\n };\n },\n transform(chunk, controller) {\n controller.enqueue(chunk);\n },\n });\n","import { ConcurrencyError } from '../errors';\nimport type { BigIntStreamPosition, Flavour } from '../typing';\n\nexport type ExpectedStreamVersion<VersionType = BigIntStreamPosition> =\n | ExpectedStreamVersionWithValue<VersionType>\n | ExpectedStreamVersionGeneral;\n\nexport type ExpectedStreamVersionWithValue<VersionType = BigIntStreamPosition> =\n Flavour<VersionType, 'StreamVersion'>;\n\nexport type ExpectedStreamVersionGeneral = Flavour<\n 'STREAM_EXISTS' | 'STREAM_DOES_NOT_EXIST' | 'NO_CONCURRENCY_CHECK',\n 'StreamVersion'\n>;\n\nexport const STREAM_EXISTS = 'STREAM_EXISTS' as ExpectedStreamVersionGeneral;\nexport const STREAM_DOES_NOT_EXIST =\n 'STREAM_DOES_NOT_EXIST' as ExpectedStreamVersionGeneral;\nexport const NO_CONCURRENCY_CHECK =\n 'NO_CONCURRENCY_CHECK' as ExpectedStreamVersionGeneral;\n\nexport const matchesExpectedVersion = <StreamVersion = BigIntStreamPosition>(\n current: StreamVersion | undefined,\n expected: ExpectedStreamVersion<StreamVersion>,\n defaultVersion: StreamVersion,\n): boolean => {\n if (expected === NO_CONCURRENCY_CHECK) return true;\n\n if (expected == STREAM_DOES_NOT_EXIST) return current === defaultVersion;\n\n if (expected == STREAM_EXISTS) return current !== defaultVersion;\n\n return current === expected;\n};\n\nexport const assertExpectedVersionMatchesCurrent = <\n StreamVersion = BigIntStreamPosition,\n>(\n current: StreamVersion,\n expected: ExpectedStreamVersion<StreamVersion> | undefined,\n defaultVersion: StreamVersion,\n): void => {\n expected ??= NO_CONCURRENCY_CHECK;\n\n if (!matchesExpectedVersion(current, expected, defaultVersion))\n throw new ExpectedVersionConflictError(current, expected);\n};\n\nexport class ExpectedVersionConflictError<\n VersionType = BigIntStreamPosition,\n> extends ConcurrencyError {\n constructor(\n current: VersionType,\n expected: ExpectedStreamVersion<VersionType>,\n ) {\n super(current?.toString(), expected?.toString());\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ExpectedVersionConflictError.prototype);\n }\n}\n\nexport const isExpectedVersionConflictError = (\n error: unknown,\n): error is ExpectedVersionConflictError =>\n error instanceof ExpectedVersionConflictError;\n","export class ParseError extends Error {\n constructor(text: string) {\n super(`Cannot parse! ${text}`);\n }\n}\n\nexport type Mapper<From, To = From> =\n | ((value: unknown) => To)\n | ((value: Partial<From>) => To)\n | ((value: From) => To)\n | ((value: Partial<To>) => To)\n | ((value: To) => To)\n | ((value: Partial<To | From>) => To)\n | ((value: To | From) => To);\n\nexport type MapperArgs<From, To = From> = Partial<From> &\n From &\n Partial<To> &\n To;\n\nexport type ParseOptions<From, To = From> = {\n reviver?: (key: string, value: unknown) => unknown;\n map?: Mapper<From, To>;\n typeCheck?: <To>(value: unknown) => value is To;\n};\n\nexport type StringifyOptions<From, To = From> = {\n map?: Mapper<From, To>;\n};\n\nexport const JSONParser = {\n stringify: <From, To = From>(\n value: From,\n options?: StringifyOptions<From, To>,\n ) => {\n return JSON.stringify(\n options?.map ? options.map(value as MapperArgs<From, To>) : value,\n //TODO: Consider adding support to DateTime and adding specific format to mark that's a bigint\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n (_, v) => (typeof v === 'bigint' ? v.toString() : v),\n );\n },\n parse: <From, To = From>(\n text: string,\n options?: ParseOptions<From, To>,\n ): To | undefined => {\n const parsed: unknown = JSON.parse(text, options?.reviver);\n\n if (options?.typeCheck && !options?.typeCheck<To>(parsed))\n throw new ParseError(text);\n\n return options?.map\n ? options.map(parsed as MapperArgs<From, To>)\n : (parsed as To | undefined);\n },\n};\n","import { filter } from './filter';\nimport { map } from './map';\nimport {\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n} from './notifyAboutNoActiveReaders';\nimport { reduce, ReduceTransformStream } from './reduce';\nimport { retryStream } from './retry';\nimport { skip, SkipTransformStream } from './skip';\nimport { stopAfter } from './stopAfter';\nimport { stopOn } from './stopOn';\nimport { take, TakeTransformStream } from './take';\nimport { waitAtMost } from './waitAtMost';\n\nexport const streamTransformations = {\n filter,\n take,\n TakeTransformStream,\n skip,\n SkipTransformStream,\n map,\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n reduce,\n ReduceTransformStream,\n retry: retryStream,\n stopAfter,\n stopOn,\n waitAtMost,\n};\n","import {\n JSONParser,\n NO_CONCURRENCY_CHECK,\n STREAM_DOES_NOT_EXIST,\n STREAM_EXISTS,\n type AppendToStreamOptions,\n type BeforeEventStoreCommitHandler,\n type ExpectedStreamVersion,\n type Event as Message,\n type RecordedMessage,\n} from '@event-driven-io/emmett';\nimport { v4 as uuid } from 'uuid';\nimport {\n isSQLiteError,\n type Parameters,\n type SQLiteConnection,\n type SQLiteError,\n} from '../../connection';\nimport type {\n SQLiteEventStore,\n SQLiteReadEventMetadata,\n} from '../SQLiteEventStore';\nimport { defaultTag, messagesTable, streamsTable } from './typing';\n\nexport type AppendEventResult =\n | {\n success: true;\n nextStreamPosition: bigint;\n lastGlobalPosition: bigint;\n }\n | { success: false };\n\nexport const appendToStream = async <MessageType extends Message>(\n db: SQLiteConnection,\n streamName: string,\n streamType: string,\n messages: MessageType[],\n options?: AppendToStreamOptions & {\n partition?: string;\n onBeforeCommit?: BeforeEventStoreCommitHandler<\n SQLiteEventStore,\n { db: SQLiteConnection }\n >;\n },\n): Promise<AppendEventResult> => {\n if (messages.length === 0) return { success: false };\n\n const expectedStreamVersion = toExpectedVersion(\n options?.expectedStreamVersion,\n );\n\n const messagesToAppend: RecordedMessage<\n MessageType,\n SQLiteReadEventMetadata\n >[] = messages.map(\n (\n m: Message,\n i: number,\n ): RecordedMessage<MessageType, SQLiteReadEventMetadata> =>\n ({\n ...m,\n kind: m.kind ?? 'Event',\n metadata: {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(i + 1),\n ...('metadata' in m ? (m.metadata ?? {}) : {}),\n },\n }) as RecordedMessage<MessageType, SQLiteReadEventMetadata>,\n );\n\n let result: AppendEventResult;\n\n return await db.withTransaction(async () => {\n result = await appendToStreamRaw(\n db,\n streamName,\n streamType,\n messagesToAppend,\n {\n expectedStreamVersion,\n },\n );\n\n if (options?.onBeforeCommit)\n await options.onBeforeCommit(messagesToAppend, { db });\n\n return result;\n });\n};\n\nconst toExpectedVersion = (\n expected: ExpectedStreamVersion | undefined,\n): bigint | null => {\n if (expected === undefined) return null;\n\n if (expected === NO_CONCURRENCY_CHECK) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_DOES_NOT_EXIST) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_EXISTS) return null;\n\n return expected as bigint;\n};\n\nconst appendToStreamRaw = async (\n db: SQLiteConnection,\n streamId: string,\n streamType: string,\n messages: RecordedMessage[],\n options?: {\n expectedStreamVersion: bigint | null;\n partition?: string;\n },\n): Promise<AppendEventResult> => {\n let streamPosition;\n let globalPosition;\n\n try {\n let expectedStreamVersion = options?.expectedStreamVersion ?? null;\n\n if (expectedStreamVersion == null) {\n expectedStreamVersion = await getLastStreamPosition(\n db,\n streamId,\n expectedStreamVersion,\n );\n }\n\n let position: { stream_position: string } | null;\n\n if (expectedStreamVersion === 0n) {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `INSERT INTO ${streamsTable.name}\n (stream_id, stream_position, partition, stream_type, stream_metadata, is_archived)\n VALUES (\n ?,\n ?,\n ?,\n ?,\n '[]',\n false\n )\n RETURNING stream_position;\n `,\n [\n streamId,\n messages.length,\n options?.partition ?? streamsTable.columns.partition,\n streamType,\n ],\n );\n } else {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `UPDATE ${streamsTable.name}\n SET stream_position = stream_position + ?\n WHERE stream_id = ?\n AND partition = ?\n AND is_archived = false\n RETURNING stream_position;\n `,\n [\n messages.length,\n streamId,\n options?.partition ?? streamsTable.columns.partition,\n ],\n );\n }\n\n if (position == null) {\n throw new Error('Could not find stream position');\n }\n\n streamPosition = BigInt(position.stream_position);\n\n if (expectedStreamVersion != null) {\n const expectedStreamPositionAfterSave =\n BigInt(expectedStreamVersion) + BigInt(messages.length);\n if (streamPosition !== expectedStreamPositionAfterSave) {\n return {\n success: false,\n };\n }\n }\n\n const { sqlString, values } = buildMessageInsertQuery(\n messages,\n expectedStreamVersion,\n streamId,\n options?.partition?.toString() ?? defaultTag,\n );\n\n const returningIds = await db.query<{\n global_position: string;\n } | null>(sqlString, values);\n\n if (\n returningIds.length === 0 ||\n !returningIds[returningIds.length - 1]?.global_position\n ) {\n throw new Error('Could not find global position');\n }\n\n globalPosition = BigInt(\n returningIds[returningIds.length - 1]!.global_position,\n );\n } catch (err: unknown) {\n if (isSQLiteError(err) && isOptimisticConcurrencyError(err)) {\n return {\n success: false,\n };\n }\n\n throw err;\n }\n\n return {\n success: true,\n nextStreamPosition: streamPosition,\n lastGlobalPosition: globalPosition,\n };\n};\n\nconst isOptimisticConcurrencyError = (error: SQLiteError): boolean => {\n return error?.errno !== undefined && error.errno === 19;\n};\n\nasync function getLastStreamPosition(\n db: SQLiteConnection,\n streamId: string,\n expectedStreamVersion: bigint | null,\n): Promise<bigint> {\n const result = await db.querySingle<{ stream_position: string } | null>(\n `SELECT CAST(stream_position AS VARCHAR) AS stream_position FROM ${streamsTable.name} WHERE stream_id = ?`,\n [streamId],\n );\n\n if (result?.stream_position == null) {\n expectedStreamVersion = 0n;\n } else {\n expectedStreamVersion = BigInt(result.stream_position);\n }\n return expectedStreamVersion;\n}\n\nconst buildMessageInsertQuery = (\n messages: RecordedMessage[],\n expectedStreamVersion: bigint,\n streamId: string,\n partition: string | null | undefined,\n): {\n sqlString: string;\n values: Parameters[];\n} => {\n const query = messages.reduce(\n (\n queryBuilder: { parameterMarkers: string[]; values: Parameters[] },\n message: RecordedMessage,\n ) => {\n if (\n message.metadata?.streamPosition == null ||\n typeof message.metadata.streamPosition !== 'bigint'\n ) {\n throw new Error('Stream position is required');\n }\n\n const streamPosition =\n BigInt(message.metadata.streamPosition) + BigInt(expectedStreamVersion);\n\n queryBuilder.parameterMarkers.push(`(?,?,?,?,?,?,?,?,?,?)`);\n queryBuilder.values.push(\n streamId,\n streamPosition.toString() ?? 0,\n partition ?? defaultTag,\n message.kind === 'Event' ? 'E' : 'C',\n JSONParser.stringify(message.data),\n JSONParser.stringify(message.metadata),\n expectedStreamVersion?.toString() ?? 0,\n message.type,\n message.metadata.messageId,\n false,\n );\n\n return queryBuilder;\n },\n {\n parameterMarkers: [],\n values: [],\n },\n );\n\n const sqlString = `\n INSERT INTO ${messagesTable.name} (\n stream_id, \n stream_position, \n partition, \n message_kind,\n message_data, \n message_metadata, \n message_schema_version, \n message_type, \n message_id, \n is_archived\n ) \n VALUES ${query.parameterMarkers.join(', ')} \n RETURNING \n CAST(global_position as VARCHAR) AS global_position\n `;\n return { sqlString, values: query.values };\n};\n","export const emmettPrefix = 'emt';\n\nexport const globalTag = 'global';\nexport const defaultTag = 'emt:default';\n\nexport const globalNames = {\n module: `${emmettPrefix}:module:${globalTag}`,\n};\n\nconst columns = {\n partition: {\n name: 'partition',\n },\n isArchived: { name: 'is_archived' },\n};\n\nexport const streamsTable = {\n name: `${emmettPrefix}_streams`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n\nexport const messagesTable = {\n name: `${emmettPrefix}_messages`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n\nexport const subscriptionsTable = {\n name: `${emmettPrefix}_subscriptions`,\n};\n","import type { SQLiteConnection } from '../../connection';\nimport {\n globalTag,\n messagesTable,\n streamsTable,\n subscriptionsTable,\n} from './typing';\n\nexport const sql = (sql: string) => sql;\n\nexport const streamsTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${streamsTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL DEFAULT 0,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n stream_type TEXT NOT NULL,\n stream_metadata JSONB NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n PRIMARY KEY (stream_id, stream_position, partition, is_archived),\n UNIQUE (stream_id, partition, is_archived)\n );`,\n);\n\nexport const messagesTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${messagesTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n message_kind CHAR(1) NOT NULL DEFAULT 'E',\n message_data JSONB NOT NULL,\n message_metadata JSONB NOT NULL,\n message_schema_version TEXT NOT NULL,\n message_type TEXT NOT NULL,\n message_id TEXT NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n global_position INTEGER PRIMARY KEY,\n created DATETIME DEFAULT CURRENT_TIMESTAMP,\n UNIQUE (stream_id, stream_position, partition, is_archived)\n ); \n`,\n);\n\nexport const subscriptionsTableSQL = sql(\n `\n CREATE TABLE IF NOT EXISTS ${subscriptionsTable.name}(\n subscription_id TEXT NOT NULL,\n version INTEGER NOT NULL DEFAULT 1,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n last_processed_position BIGINT NOT NULL,\n PRIMARY KEY (subscription_id, partition, version)\n );\n`,\n);\n\nexport const schemaSQL: string[] = [\n streamsTableSQL,\n messagesTableSQL,\n subscriptionsTableSQL,\n];\n\nexport const createEventStoreSchema = async (\n db: SQLiteConnection,\n): Promise<void> => {\n for (const sql of schemaSQL) {\n await db.command(sql);\n }\n};\n","export const singleOrNull = async <T>(\n getResult: Promise<T[]>,\n): Promise<T | null> => {\n const result = await getResult;\n\n if (result.length > 1) throw new Error('Query had more than one result');\n\n return result.length > 0 ? (result[0] ?? null) : null;\n};\n\nexport const single = async <T>(getResult: Promise<T[]>): Promise<T> => {\n const result = await getResult;\n\n if (result.length === 0) throw new Error(\"Query didn't return any result\");\n\n if (result.length > 1) throw new Error('Query had more than one result');\n\n return result[0]!;\n};\n","import type { SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, messagesTable } from './typing';\nimport { singleOrNull } from './utils';\n\ntype ReadLastMessageGlobalPositionSqlResult = {\n global_position: string;\n};\n\nexport type ReadLastMessageGlobalPositionResult = {\n currentGlobalPosition: bigint | null;\n};\n\nexport const readLastMessageGlobalPosition = async (\n db: SQLiteConnection,\n options?: { partition?: string },\n): Promise<ReadLastMessageGlobalPositionResult> => {\n const result = await singleOrNull(\n db.query<ReadLastMessageGlobalPositionSqlResult>(\n sql(\n `SELECT global_position\n FROM ${messagesTable.name}\n WHERE partition = ? AND is_archived = FALSE\n ORDER BY global_position\n LIMIT 1`,\n ),\n [options?.partition ?? defaultTag],\n ),\n );\n\n return {\n currentGlobalPosition:\n result !== null ? BigInt(result.global_position) : null,\n };\n};\n","import {\n JSONParser,\n type CombinedReadEventMetadata,\n type Event,\n type ReadEvent,\n type ReadEventMetadata,\n type ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\nimport type { SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, messagesTable } from './typing';\n\ntype ReadMessagesBatchSqlResult = {\n stream_position: string;\n stream_id: string;\n message_data: string;\n message_metadata: string;\n message_schema_version: string;\n message_type: string;\n message_id: string;\n global_position: string;\n transaction_id: string;\n created: string;\n};\n\nexport type ReadMessagesBatchOptions =\n | {\n after: bigint;\n batchSize: number;\n }\n | {\n from: bigint;\n batchSize: number;\n }\n | { to: bigint; batchSize: number }\n | { from: bigint; to: bigint };\n\nexport type ReadMessagesBatchResult<\n EventType extends Event,\n ReadEventMetadataType extends ReadEventMetadata = ReadEventMetadata,\n> = {\n currentGlobalPosition: bigint;\n messages: ReadEvent<EventType, ReadEventMetadataType>[];\n areEventsLeft: boolean;\n};\n\nexport const readMessagesBatch = async <\n MessageType extends Event,\n ReadEventMetadataType extends\n ReadEventMetadataWithGlobalPosition = ReadEventMetadataWithGlobalPosition,\n>(\n db: SQLiteConnection,\n options: ReadMessagesBatchOptions & { partition?: string },\n): Promise<ReadMessagesBatchResult<MessageType, ReadEventMetadataType>> => {\n const from =\n 'from' in options\n ? options.from\n : 'after' in options\n ? options.after + 1n\n : 0n;\n const batchSize =\n options && 'batchSize' in options\n ? options.batchSize\n : options.to - options.from;\n\n const fromCondition: string =\n from !== -0n ? `AND global_position >= ${from}` : '';\n\n const toCondition =\n 'to' in options ? `AND global_position <= ${options.to}` : '';\n\n const limitCondition =\n 'batchSize' in options ? `LIMIT ${options.batchSize}` : '';\n\n const events: ReadEvent<MessageType, ReadEventMetadataType>[] = (\n await db.query<ReadMessagesBatchSqlResult>(\n sql(\n `SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id\n FROM ${messagesTable.name}\n WHERE partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}\n ORDER BY global_position\n ${limitCondition}`,\n ),\n [options?.partition ?? defaultTag],\n )\n ).map((row) => {\n const rawEvent = {\n type: row.message_type,\n data: JSONParser.parse(row.message_data),\n metadata: JSONParser.parse(row.message_metadata),\n } as unknown as MessageType;\n\n const metadata: ReadEventMetadataWithGlobalPosition = {\n ...('metadata' in rawEvent ? (rawEvent.metadata ?? {}) : {}),\n messageId: row.message_id,\n streamName: row.stream_id,\n streamPosition: BigInt(row.stream_position),\n globalPosition: BigInt(row.global_position),\n };\n\n return {\n ...rawEvent,\n kind: 'Event',\n metadata: metadata as CombinedReadEventMetadata<\n MessageType,\n ReadEventMetadataType\n >,\n };\n });\n\n return events.length > 0\n ? {\n currentGlobalPosition:\n events[events.length - 1]!.metadata.globalPosition,\n messages: events,\n areEventsLeft: events.length === batchSize,\n }\n : {\n currentGlobalPosition:\n 'from' in options\n ? options.from\n : 'after' in options\n ? options.after\n : 0n,\n messages: [],\n areEventsLeft: false,\n };\n};\n","import type { SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, subscriptionsTable } from './typing';\nimport { singleOrNull } from './utils';\n\ntype ReadProcessorCheckpointSqlResult = {\n last_processed_position: string;\n};\n\nexport type ReadProcessorCheckpointResult = {\n lastProcessedPosition: bigint | null;\n};\n\nexport const readProcessorCheckpoint = async (\n db: SQLiteConnection,\n options: { processorId: string; partition?: string },\n): Promise<ReadProcessorCheckpointResult> => {\n const result = await singleOrNull(\n db.query<ReadProcessorCheckpointSqlResult>(\n sql(\n `SELECT last_processed_position\n FROM ${subscriptionsTable.name}\n WHERE partition = ? AND subscription_id = ?\n LIMIT 1`,\n ),\n [options?.partition ?? defaultTag, options.processorId],\n ),\n );\n\n return {\n lastProcessedPosition:\n result !== null ? BigInt(result.last_processed_position) : null,\n };\n};\n","import type {\n EmmettError,\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\nimport type { SQLiteConnection } from '../../../connection';\nimport { readLastMessageGlobalPosition } from '../../schema/readLastMessageGlobalPosition';\nimport {\n readMessagesBatch,\n type ReadMessagesBatchOptions,\n} from '../../schema/readMessagesBatch';\n\nexport const DefaultSQLiteEventStoreProcessorBatchSize = 100;\nexport const DefaultSQLiteEventStoreProcessorPullingFrequencyInMs = 50;\n\nexport type SQLiteEventStoreMessagesBatch<EventType extends Event = Event> = {\n messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[];\n};\n\nexport type SQLiteEventStoreMessagesBatchHandlerResult = void | {\n type: 'STOP';\n reason?: string;\n error?: EmmettError;\n};\n\nexport type SQLiteEventStoreMessagesBatchHandler<\n EventType extends Event = Event,\n> = (\n messagesBatch: SQLiteEventStoreMessagesBatch<EventType>,\n) =>\n | Promise<SQLiteEventStoreMessagesBatchHandlerResult>\n | SQLiteEventStoreMessagesBatchHandlerResult;\n\nexport type SQLiteEventStoreMessageBatchPullerOptions<\n EventType extends Event = Event,\n> = {\n db: SQLiteConnection;\n pullingFrequencyInMs: number;\n batchSize: number;\n eachBatch: SQLiteEventStoreMessagesBatchHandler<EventType>;\n};\n\nexport type SQLiteEventStoreMessageBatchPullerStartFrom =\n | { globalPosition: bigint }\n | 'BEGINNING'\n | 'END';\n\nexport type SQLiteEventStoreMessageBatchPullerStartOptions = {\n startFrom: SQLiteEventStoreMessageBatchPullerStartFrom;\n};\n\nexport type SQLiteEventStoreMessageBatchPuller = {\n isRunning: boolean;\n start(options: SQLiteEventStoreMessageBatchPullerStartOptions): Promise<void>;\n stop(): Promise<void>;\n};\n\nexport const sqliteEventStoreMessageBatchPuller = <\n EventType extends Event = Event,\n>({\n db,\n batchSize,\n eachBatch,\n pullingFrequencyInMs,\n}: SQLiteEventStoreMessageBatchPullerOptions<EventType>): SQLiteEventStoreMessageBatchPuller => {\n let isRunning = false;\n\n let start: Promise<void>;\n\n const pullMessages = async (\n options: SQLiteEventStoreMessageBatchPullerStartOptions,\n ) => {\n const after =\n options.startFrom === 'BEGINNING'\n ? 0n\n : options.startFrom === 'END'\n ? ((await readLastMessageGlobalPosition(db)).currentGlobalPosition ??\n 0n)\n : options.startFrom.globalPosition;\n\n const readMessagesOptions: ReadMessagesBatchOptions = {\n after,\n batchSize,\n };\n\n let waitTime = 100;\n\n do {\n const { messages, currentGlobalPosition, areEventsLeft } =\n await readMessagesBatch<EventType>(db, readMessagesOptions);\n\n if (messages.length > 0) {\n const result = await eachBatch({ messages });\n\n if (result && result.type === 'STOP') {\n isRunning = false;\n break;\n }\n }\n\n readMessagesOptions.after = currentGlobalPosition;\n\n await new Promise((resolve) => setTimeout(resolve, waitTime));\n\n if (!areEventsLeft) {\n waitTime = Math.min(waitTime * 2, 1000);\n } else {\n waitTime = pullingFrequencyInMs;\n }\n } while (isRunning);\n };\n\n return {\n get isRunning() {\n return isRunning;\n },\n start: (options) => {\n if (isRunning) return start;\n\n start = (async () => {\n isRunning = true;\n\n return pullMessages(options);\n })();\n\n return start;\n },\n stop: async () => {\n if (!isRunning) return;\n isRunning = false;\n await start;\n },\n };\n};\n\nexport const zipSQLiteEventStoreMessageBatchPullerStartFrom = (\n options: (SQLiteEventStoreMessageBatchPullerStartFrom | undefined)[],\n): SQLiteEventStoreMessageBatchPullerStartFrom => {\n if (\n options.length === 0 ||\n options.some((o) => o === undefined || o === 'BEGINNING')\n )\n return 'BEGINNING';\n\n if (options.every((o) => o === 'END')) return 'END';\n\n return options\n .filter((o) => o !== undefined && o !== 'BEGINNING' && o !== 'END')\n .sort((a, b) => (a > b ? 1 : -1))[0]!;\n};\n","import {\n EmmettError,\n type Event,\n type ReadEvent,\n type ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\nimport { sqliteConnection, type SQLiteConnection } from '../../connection';\nimport type { SQLiteProjectionDefinition } from '../projections';\nimport { readProcessorCheckpoint, storeProcessorCheckpoint } from '../schema';\nimport type { SQLiteEventStoreMessageBatchPullerStartFrom } from './messageBatchProcessing';\n\nexport type SQLiteProcessorEventsBatch<EventType extends Event = Event> = {\n messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[];\n};\n\nexport type SQLiteProcessorHandlerContext = {\n db: SQLiteConnection;\n fileName: string;\n};\n\nexport type SQLiteProcessor<EventType extends Event = Event> = {\n id: string;\n start: (\n db: SQLiteConnection,\n ) => Promise<SQLiteEventStoreMessageBatchPullerStartFrom | undefined>;\n isActive: boolean;\n handle: (\n messagesBatch: SQLiteProcessorEventsBatch<EventType>,\n context: { db?: SQLiteConnection; fileName?: string },\n ) => Promise<SQLiteProcessorMessageHandlerResult>;\n};\n\nexport const SQLiteProcessor = {\n result: {\n skip: (options?: {\n reason?: string;\n }): SQLiteProcessorMessageHandlerResult => ({\n type: 'SKIP',\n ...(options ?? {}),\n }),\n stop: (options?: {\n reason?: string;\n error?: EmmettError;\n }): SQLiteProcessorMessageHandlerResult => ({\n type: 'STOP',\n ...(options ?? {}),\n }),\n },\n};\n\nexport type SQLiteProcessorMessageHandlerResult =\n | void\n | { type: 'SKIP'; reason?: string }\n | { type: 'STOP'; reason?: string; error?: EmmettError };\n\nexport type SQLiteProcessorEachMessageHandler<EventType extends Event = Event> =\n (\n event: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>,\n context: SQLiteProcessorHandlerContext,\n ) =>\n | Promise<SQLiteProcessorMessageHandlerResult>\n | SQLiteProcessorMessageHandlerResult;\n\nexport type SQLiteProcessorEachBatchHandler<EventType extends Event = Event> = (\n event: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[],\n context: SQLiteProcessorHandlerContext,\n) =>\n | Promise<SQLiteProcessorMessageHandlerResult>\n | SQLiteProcessorMessageHandlerResult;\n\nexport type SQLiteProcessorStartFrom =\n | SQLiteEventStoreMessageBatchPullerStartFrom\n | 'CURRENT';\n\nexport type SQLiteProcessorConnectionOptions = {\n fileName: string;\n db?: SQLiteConnection;\n};\n\nexport type GenericSQLiteProcessorOptions<EventType extends Event = Event> = {\n processorId: string;\n version?: number;\n partition?: string;\n startFrom?: SQLiteProcessorStartFrom;\n stopAfter?: (\n message: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>,\n ) => boolean;\n eachMessage: SQLiteProcessorEachMessageHandler<EventType>;\n connectionOptions?: SQLiteProcessorConnectionOptions;\n // TODO: Add eachBatch\n};\n\nexport type SQLiteProjectionProcessorOptions<EventType extends Event = Event> =\n {\n processorId?: string;\n version?: number;\n projection: SQLiteProjectionDefinition<EventType>;\n partition?: string;\n startFrom?: SQLiteProcessorStartFrom;\n stopAfter?: (\n message: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>,\n ) => boolean;\n };\n\nexport type SQLiteProcessorOptions<EventType extends Event = Event> =\n | GenericSQLiteProcessorOptions<EventType>\n | SQLiteProjectionProcessorOptions<EventType>;\n\nconst genericSQLiteProcessor = <EventType extends Event = Event>(\n options: GenericSQLiteProcessorOptions<EventType>,\n): SQLiteProcessor => {\n const { eachMessage } = options;\n let isActive = true;\n //let lastProcessedPosition: number | null = null;\n\n const getDb = (context: {\n db?: SQLiteConnection;\n fileName?: string;\n }): { db: SQLiteConnection; fileName: string } => {\n const fileName = context.fileName ?? options.connectionOptions?.fileName;\n if (!fileName)\n throw new EmmettError(\n `SQLite processor '${options.processorId}' is missing file name. Ensure that you passed it through options`,\n );\n\n const db =\n context.db ??\n options.connectionOptions?.db ??\n sqliteConnection({ fileName });\n\n return { db, fileName };\n };\n\n return {\n id: options.processorId,\n start: async (\n db: SQLiteConnection,\n ): Promise<SQLiteEventStoreMessageBatchPullerStartFrom | undefined> => {\n isActive = true;\n if (options.startFrom !== 'CURRENT') return options.startFrom;\n\n const { lastProcessedPosition } = await readProcessorCheckpoint(db, {\n processorId: options.processorId,\n partition: options.partition,\n });\n\n if (lastProcessedPosition === null) return 'BEGINNING';\n\n return { globalPosition: lastProcessedPosition };\n },\n get isActive() {\n return isActive;\n },\n handle: async (\n { messages },\n context,\n ): Promise<SQLiteProcessorMessageHandlerResult> => {\n if (!isActive) return;\n\n const { db, fileName } = getDb(context);\n\n return db.withTransaction(async () => {\n let result: SQLiteProcessorMessageHandlerResult | undefined = undefined;\n\n let lastProcessedPosition: bigint | null = null;\n\n for (const message of messages) {\n const typedMessage = message as ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >;\n\n const messageProcessingResult = await eachMessage(typedMessage, {\n db,\n fileName,\n });\n\n // TODO: Add correct handling of the storing checkpoint\n await storeProcessorCheckpoint(db, {\n processorId: options.processorId,\n version: options.version,\n lastProcessedPosition,\n newPosition: typedMessage.metadata.globalPosition,\n partition: options.partition,\n });\n\n lastProcessedPosition = typedMessage.metadata.globalPosition;\n\n if (\n messageProcessingResult &&\n messageProcessingResult.type === 'STOP'\n ) {\n isActive = false;\n result = messageProcessingResult;\n break;\n }\n\n if (options.stopAfter && options.stopAfter(typedMessage)) {\n isActive = false;\n result = { type: 'STOP', reason: 'Stop condition reached' };\n break;\n }\n\n if (\n messageProcessingResult &&\n messageProcessingResult.type === 'SKIP'\n )\n continue;\n }\n return result;\n });\n },\n };\n};\n\nexport const sqliteProjectionProcessor = <EventType extends Event = Event>(\n options: SQLiteProjectionProcessorOptions<EventType>,\n): SQLiteProcessor => {\n const projection = options.projection;\n\n return genericSQLiteProcessor<EventType>({\n processorId: options.processorId ?? `projection:${projection.name}`,\n eachMessage: async (event, context) => {\n if (!projection.canHandle.includes(event.type)) return;\n\n await projection.handle([event], context);\n },\n ...options,\n });\n};\n\nexport const sqliteProcessor = <EventType extends Event = Event>(\n options: SQLiteProcessorOptions<EventType>,\n): SQLiteProcessor => {\n if ('projection' in options) {\n return sqliteProjectionProcessor(options);\n }\n\n return genericSQLiteProcessor(options);\n};\n","import { EmmettError, type Event } from '@event-driven-io/emmett';\nimport { sqliteConnection, type SQLiteConnection } from '../../connection';\nimport {\n DefaultSQLiteEventStoreProcessorBatchSize,\n DefaultSQLiteEventStoreProcessorPullingFrequencyInMs,\n sqliteEventStoreMessageBatchPuller,\n zipSQLiteEventStoreMessageBatchPullerStartFrom,\n type SQLiteEventStoreMessageBatchPuller,\n type SQLiteEventStoreMessagesBatchHandler,\n} from './messageBatchProcessing';\nimport {\n sqliteProcessor,\n type SQLiteProcessor,\n type SQLiteProcessorOptions,\n} from './sqliteProcessor';\n\nexport type SQLiteEventStoreConsumerConfig<\n ConsumerEventType extends Event = Event,\n> = {\n processors?: SQLiteProcessor<ConsumerEventType>[];\n pulling?: {\n batchSize?: number;\n pullingFrequencyInMs?: number;\n };\n};\nexport type SQLiteEventStoreConsumerOptions<\n ConsumerEventType extends Event = Event,\n> = SQLiteEventStoreConsumerConfig<ConsumerEventType> & {\n fileName: string;\n db?: SQLiteConnection;\n};\n\nexport type SQLiteEventStoreConsumer<ConsumerEventType extends Event = Event> =\n Readonly<{\n isRunning: boolean;\n processors: SQLiteProcessor<ConsumerEventType>[];\n processor: <EventType extends ConsumerEventType = ConsumerEventType>(\n options: SQLiteProcessorOptions<EventType>,\n ) => SQLiteProcessor<EventType>;\n start: () => Promise<void>;\n stop: () => Promise<void>;\n close: () => Promise<void>;\n }>;\n\nexport const sqliteEventStoreConsumer = <\n ConsumerEventType extends Event = Event,\n>(\n options: SQLiteEventStoreConsumerOptions<ConsumerEventType>,\n): SQLiteEventStoreConsumer<ConsumerEventType> => {\n let isRunning = false;\n const { pulling } = options;\n const processors = options.processors ?? [];\n\n let start: Promise<void>;\n\n let currentMessagePuller: SQLiteEventStoreMessageBatchPuller | undefined;\n\n const db = options.db ?? sqliteConnection({ fileName: options.fileName });\n\n const eachBatch: SQLiteEventStoreMessagesBatchHandler<\n ConsumerEventType\n > = async (messagesBatch) => {\n const activeProcessors = processors.filter((s) => s.isActive);\n\n if (activeProcessors.length === 0)\n return {\n type: 'STOP',\n reason: 'No active processors',\n };\n\n const result = await Promise.allSettled(\n activeProcessors.map((s) => {\n // TODO: Add here filtering to only pass messages that can be handled by processor\n return s.handle(messagesBatch, { db, fileName: options.fileName });\n }),\n );\n\n return result.some(\n (r) => r.status === 'fulfilled' && r.value?.type !== 'STOP',\n )\n ? undefined\n : {\n type: 'STOP',\n };\n };\n\n const messagePooler = (currentMessagePuller =\n sqliteEventStoreMessageBatchPuller({\n db,\n eachBatch,\n batchSize:\n pulling?.batchSize ?? DefaultSQLiteEventStoreProcessorBatchSize,\n pullingFrequencyInMs:\n pulling?.pullingFrequencyInMs ??\n DefaultSQLiteEventStoreProcessorPullingFrequencyInMs,\n }));\n\n const stop = async () => {\n if (!isRunning) return;\n isRunning = false;\n if (currentMessagePuller) {\n await currentMessagePuller.stop();\n currentMessagePuller = undefined;\n }\n await start;\n };\n\n return {\n processors,\n get isRunning() {\n return isRunning;\n },\n processor: <EventType extends ConsumerEventType = ConsumerEventType>(\n options: SQLiteProcessorOptions<EventType>,\n ): SQLiteProcessor<EventType> => {\n const processor = sqliteProcessor<EventType>(options);\n\n processors.push(processor);\n\n return processor;\n },\n start: () => {\n if (isRunning) return start;\n\n start = (async () => {\n if (processors.length === 0)\n return Promise.reject(\n new EmmettError(\n 'Cannot start consumer without at least a single processor',\n ),\n );\n\n isRunning = true;\n\n const startFrom = zipSQLiteEventStoreMessageBatchPullerStartFrom(\n await Promise.all(processors.map((o) => o.start(db))),\n );\n\n return messagePooler.start({ startFrom });\n })();\n\n return start;\n },\n stop,\n close: async () => {\n await stop();\n\n db.close();\n\n await new Promise((resolve) => setTimeout(resolve, 250));\n },\n };\n};\n","import {\n projection,\n type CanHandle,\n type Event,\n type ProjectionDefinition,\n type ProjectionHandler,\n type ReadEvent,\n} from '@event-driven-io/emmett';\nimport type { SQLiteConnection } from '../../connection';\nimport type { SQLiteReadEventMetadata } from '../SQLiteEventStore';\n\nexport type SQLiteProjectionHandlerContext = {\n db: SQLiteConnection;\n};\n\nexport type SQLiteProjectionHandler<\n EventType extends Event = Event,\n EventMetaDataType extends SQLiteReadEventMetadata = SQLiteReadEventMetadata,\n> = ProjectionHandler<\n EventType,\n EventMetaDataType,\n SQLiteProjectionHandlerContext\n>;\n\nexport type SQLiteProjectionDefinition<EventType extends Event = Event> =\n ProjectionDefinition<\n EventType,\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >;\n\nexport type SQLiteProjectionHandlerOptions<EventType extends Event = Event> = {\n events: ReadEvent<EventType, SQLiteReadEventMetadata>[];\n projections: SQLiteProjectionDefinition<EventType>[];\n db: SQLiteConnection;\n};\n\nexport const handleProjections = async <EventType extends Event = Event>(\n options: SQLiteProjectionHandlerOptions<EventType>,\n): Promise<void> => {\n const { projections: allProjections, events, db } = options;\n\n const eventTypes = events.map((e) => e.type);\n\n const projections = allProjections.filter((p) =>\n p.canHandle.some((type) => eventTypes.includes(type)),\n );\n\n for (const projection of projections) {\n await projection.handle(events, {\n db,\n });\n }\n};\n\nexport const sqliteProjection = <EventType extends Event>(\n definition: SQLiteProjectionDefinition<EventType>,\n): SQLiteProjectionDefinition<EventType> =>\n projection<\n EventType,\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >(definition);\n\nexport const sqliteRawBatchSQLProjection = <EventType extends Event>(\n handle: (\n events: EventType[],\n context: SQLiteProjectionHandlerContext,\n ) => Promise<string[]> | string[],\n ...canHandle: CanHandle<EventType>\n): SQLiteProjectionDefinition<EventType> =>\n sqliteProjection<EventType>({\n canHandle,\n handle: async (events, context) => {\n const sqls: string[] = await handle(events, context);\n\n for (const sql of sqls) await context.db.command(sql);\n },\n });\n\nexport const sqliteRawSQLProjection = <EventType extends Event>(\n handle: (\n event: EventType,\n context: SQLiteProjectionHandlerContext,\n ) => Promise<string> | string,\n ...canHandle: CanHandle<EventType>\n): SQLiteProjectionDefinition<EventType> =>\n sqliteRawBatchSQLProjection<EventType>(\n async (events, context) => {\n const sqls: string[] = [];\n\n for (const event of events) {\n sqls.push(await handle(event, context));\n }\n return sqls;\n },\n ...canHandle,\n );\n","import type {\n AppendToStreamResultWithGlobalPosition,\n BeforeEventStoreCommitHandler,\n BigIntStreamPosition,\n Event,\n ProjectionRegistration,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\n\nimport {\n assertExpectedVersionMatchesCurrent,\n ExpectedVersionConflictError,\n NO_CONCURRENCY_CHECK,\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport {\n InMemorySharedCacheSQLiteDatabase,\n InMemorySQLiteDatabase,\n sqliteConnection,\n type SQLiteConnection,\n} from '../connection';\nimport {\n sqliteEventStoreConsumer,\n type SQLiteEventStoreConsumer,\n type SQLiteEventStoreConsumerConfig,\n} from './consumers';\nimport {\n handleProjections,\n type SQLiteProjectionHandlerContext,\n} from './projections';\nimport { createEventStoreSchema } from './schema';\nimport { appendToStream } from './schema/appendToStream';\nimport { readStream } from './schema/readStream';\n\nexport type EventHandler<E extends Event = Event> = (\n eventEnvelope: ReadEvent<E>,\n) => void;\n\nexport const SQLiteEventStoreDefaultStreamVersion = 0n;\n\nexport interface SQLiteEventStore extends EventStore<SQLiteReadEventMetadata> {\n appendToStream<EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResultWithGlobalPosition>;\n consumer<ConsumerEventType extends Event = Event>(\n options?: SQLiteEventStoreConsumerConfig<ConsumerEventType>,\n ): SQLiteEventStoreConsumer<ConsumerEventType>;\n}\n\nexport type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n SQLiteReadEventMetadata\n>;\n\nexport type SQLiteEventStoreOptions = {\n fileName: // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n | InMemorySQLiteDatabase\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n | InMemorySharedCacheSQLiteDatabase\n | string\n | undefined;\n projections?: ProjectionRegistration<\n 'inline',\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >[];\n schema?: {\n autoMigration?: 'None' | 'CreateOrUpdate';\n };\n hooks?: {\n /**\n * This hook will be called **BEFORE** events were stored in the event store.\n * @type {BeforeEventStoreCommitHandler<SQLiteEventStore, HandlerContext>}\n */\n onBeforeCommit?: BeforeEventStoreCommitHandler<\n SQLiteEventStore,\n { db: SQLiteConnection }\n >;\n };\n};\n\nexport const getSQLiteEventStore = (\n options: SQLiteEventStoreOptions,\n): SQLiteEventStore => {\n let schemaMigrated = false;\n let autoGenerateSchema = false;\n let database: SQLiteConnection | null;\n const fileName = options.fileName ?? InMemorySQLiteDatabase;\n\n const isInMemory: boolean =\n fileName === InMemorySQLiteDatabase ||\n fileName === InMemorySharedCacheSQLiteDatabase;\n\n const inlineProjections = (options.projections ?? [])\n .filter(({ type }) => type === 'inline')\n .map(({ projection }) => projection);\n\n const onBeforeCommitHook = options.hooks?.onBeforeCommit;\n\n const createConnection = () => {\n if (database != null) {\n return database;\n }\n\n return sqliteConnection({\n fileName,\n });\n };\n\n const closeConnection = () => {\n if (isInMemory) {\n return;\n }\n if (database != null) {\n database.close();\n database = null;\n }\n };\n\n const withConnection = async <Result>(\n handler: (db: SQLiteConnection) => Promise<Result>,\n ): Promise<Result> => {\n if (database == null) {\n database = createConnection();\n }\n\n try {\n await ensureSchemaExists(database);\n return await handler(database);\n } finally {\n closeConnection();\n }\n };\n\n if (options) {\n autoGenerateSchema =\n options.schema?.autoMigration === undefined ||\n options.schema?.autoMigration !== 'None';\n }\n\n const ensureSchemaExists = async (db: SQLiteConnection): Promise<void> => {\n if (!autoGenerateSchema) return Promise.resolve();\n\n if (!schemaMigrated) {\n await createEventStoreSchema(db);\n schemaMigrated = true;\n }\n\n return Promise.resolve();\n };\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const expectedStreamVersion = read?.expectedStreamVersion;\n\n let state = initialState();\n\n if (typeof streamName !== 'string') {\n throw new Error('Stream name is not string');\n }\n\n if (database == null) {\n database = createConnection();\n }\n\n const result = await withConnection((db) =>\n readStream<EventType>(db, streamName, options.read),\n );\n\n const currentStreamVersion = result.currentStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n expectedStreamVersion,\n SQLiteEventStoreDefaultStreamVersion,\n );\n\n for (const event of result.events) {\n if (!event) continue;\n\n state = evolve(state, event);\n }\n\n return {\n currentStreamVersion: currentStreamVersion,\n state,\n streamExists: result.streamExists,\n };\n },\n\n readStream: async <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => withConnection((db) => readStream<EventType>(db, streamName, options)),\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResultWithGlobalPosition> => {\n if (database == null) {\n database = createConnection();\n }\n\n // TODO: This has to be smarter when we introduce urn-based resolution\n const [firstPart, ...rest] = streamName.split('-');\n\n const streamType =\n firstPart && rest.length > 0 ? firstPart : 'emt:unknown';\n\n const appendResult = await withConnection((db) =>\n appendToStream(db, streamName, streamType, events, {\n ...options,\n onBeforeCommit: async (messages, context) => {\n if (inlineProjections.length > 0)\n await handleProjections({\n projections: inlineProjections,\n events: messages,\n ...context,\n });\n\n if (onBeforeCommitHook) await onBeforeCommitHook(messages, context);\n },\n }),\n );\n\n if (!appendResult.success)\n throw new ExpectedVersionConflictError<bigint>(\n -1n, //TODO: Return actual version in case of error\n options?.expectedStreamVersion ?? NO_CONCURRENCY_CHECK,\n );\n\n return {\n nextExpectedStreamVersion: appendResult.nextStreamPosition,\n lastEventGlobalPosition: appendResult.lastGlobalPosition,\n createdNewStream:\n appendResult.nextStreamPosition >= BigInt(events.length),\n };\n },\n consumer: <ConsumerEventType extends Event = Event>(\n options?: SQLiteEventStoreConsumerConfig<ConsumerEventType>,\n ): SQLiteEventStoreConsumer<ConsumerEventType> =>\n sqliteEventStoreConsumer<ConsumerEventType>({\n ...(options ?? {}),\n fileName,\n db: database ?? undefined,\n }),\n };\n};\n","import {\n JSONParser,\n type CombinedReadEventMetadata,\n type Event,\n type ReadEvent,\n type ReadEventMetadataWithGlobalPosition,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport { type SQLiteConnection } from '../../connection';\nimport { SQLiteEventStoreDefaultStreamVersion } from '../SQLiteEventStore';\nimport { defaultTag, messagesTable } from './typing';\n\ntype ReadStreamSqlResult = {\n stream_position: string;\n message_data: string;\n message_metadata: string;\n message_schema_version: string;\n message_type: string;\n message_id: string;\n global_position: string;\n created: string;\n};\n\nexport const readStream = async <EventType extends Event>(\n db: SQLiteConnection,\n streamId: string,\n options?: ReadStreamOptions & { partition?: string },\n): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n> => {\n const fromCondition: string =\n options && 'from' in options\n ? `AND stream_position >= ${options.from}`\n : '';\n\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : NaN,\n );\n\n const toCondition = !isNaN(to) ? `AND stream_position <= ${to}` : '';\n\n const results = await db.query<ReadStreamSqlResult>(\n `SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id\n FROM ${messagesTable.name}\n WHERE stream_id = ? AND partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}`,\n [streamId, options?.partition ?? defaultTag],\n );\n\n const messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[] =\n results.map((row) => {\n const rawEvent = {\n type: row.message_type,\n data: JSONParser.parse(row.message_data),\n metadata: JSONParser.parse(row.message_metadata),\n } as unknown as EventType;\n\n const metadata: ReadEventMetadataWithGlobalPosition = {\n ...('metadata' in rawEvent ? (rawEvent.metadata ?? {}) : {}),\n messageId: row.message_id,\n streamName: streamId,\n streamPosition: BigInt(row.stream_position),\n globalPosition: BigInt(row.global_position),\n };\n\n return {\n ...rawEvent,\n kind: 'Event',\n metadata: metadata as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n return messages.length > 0\n ? {\n currentStreamVersion:\n messages[messages.length - 1]!.metadata.streamPosition,\n events: messages,\n streamExists: true,\n }\n : {\n currentStreamVersion: SQLiteEventStoreDefaultStreamVersion,\n events: [],\n streamExists: false,\n };\n};\n","import { isSQLiteError, type SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, subscriptionsTable } from './typing';\nimport { singleOrNull } from './utils';\n\n// for more infos see the postgresql stored procedure version\nasync function storeSubscriptionCheckpointSQLite(\n db: SQLiteConnection,\n processorId: string,\n version: number,\n position: bigint | null,\n checkPosition: bigint | null,\n partition: string,\n): Promise<0 | 1 | 2> {\n if (checkPosition !== null) {\n const updateResult = await db.command(\n sql(`\n UPDATE ${subscriptionsTable.name}\n SET last_processed_position = ?\n WHERE subscription_id = ? \n AND last_processed_position = ? \n AND partition = ?\n `),\n [position!.toString(), processorId, checkPosition.toString(), partition],\n );\n if (updateResult.changes > 0) {\n return 1;\n } else {\n const current_position = await singleOrNull(\n db.query<{ last_processed_position: bigint }>(\n sql(\n `SELECT last_processed_position FROM ${subscriptionsTable.name} \n WHERE subscription_id = ? AND partition = ?`,\n ),\n [processorId, partition],\n ),\n );\n\n if (current_position?.last_processed_position === position) {\n return 0;\n } else if (\n position !== null &&\n current_position !== null &&\n current_position?.last_processed_position > position\n ) {\n return 2;\n } else {\n return 2;\n }\n }\n } else {\n try {\n await db.command(\n sql(\n `INSERT INTO ${subscriptionsTable.name} (subscription_id, version, last_processed_position, partition) VALUES (?, ?, ?, ?)`,\n ),\n [processorId, version, position!.toString(), partition],\n );\n return 1;\n } catch (err) {\n if (!(isSQLiteError(err) && (err.errno === 19 || err.errno === 2067))) {\n throw err;\n }\n\n const current = await singleOrNull(\n db.query<{ last_processed_position: bigint }>(\n sql(\n `SELECT last_processed_position FROM ${subscriptionsTable.name} WHERE subscription_id = ? AND partition = ?`,\n ),\n [processorId, partition],\n ),\n );\n if (current?.last_processed_position === position) {\n return 0;\n } else {\n return 2;\n }\n }\n }\n}\n\nexport type StoreLastProcessedProcessorPositionResult<\n Position extends bigint | null = bigint,\n> =\n | {\n success: true;\n newPosition: Position;\n }\n | { success: false; reason: 'IGNORED' | 'MISMATCH' };\n\nexport async function storeProcessorCheckpoint(\n db: SQLiteConnection,\n options: {\n processorId: string;\n version: number | undefined;\n newPosition: bigint | null;\n lastProcessedPosition: bigint | null;\n partition?: string;\n },\n): Promise<StoreLastProcessedProcessorPositionResult<bigint | null>> {\n try {\n const result = await storeSubscriptionCheckpointSQLite(\n db,\n options.processorId,\n options.version ?? 1,\n options.newPosition,\n options.lastProcessedPosition,\n options.partition ?? defaultTag,\n );\n\n return result === 1\n ? { success: true, newPosition: options.newPosition }\n : { success: false, reason: result === 0 ? 'IGNORED' : 'MISMATCH' };\n } catch (error) {\n console.log(error);\n throw error;\n }\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","../src/connection/sqliteConnection.ts","../../emmett/src/validation/index.ts","../../emmett/src/errors/index.ts","../../emmett/src/eventStore/inMemoryEventStore.ts","../../emmett/src/eventStore/subscriptions/caughtUpTransformStream.ts","../../emmett/src/eventStore/subscriptions/streamingCoordinator.ts","../../emmett/src/streaming/transformations/notifyAboutNoActiveReaders.ts","../../emmett/src/utils/retry.ts","../../emmett/src/database/inMemoryDatabase.ts","../../emmett/src/streaming/generators/fromArray.ts","../../emmett/src/streaming/restream.ts","../../emmett/src/streaming/transformations/filter.ts","../../emmett/src/streaming/transformations/map.ts","../../emmett/src/streaming/transformations/reduce.ts","../../emmett/src/streaming/transformations/retry.ts","../../emmett/src/streaming/transformations/skip.ts","../../emmett/src/streaming/transformations/stopAfter.ts","../../emmett/src/streaming/transformations/stopOn.ts","../../emmett/src/streaming/transformations/take.ts","../../emmett/src/streaming/transformations/waitAtMost.ts","../../emmett/src/eventStore/expectedVersion.ts","../../emmett/src/serialization/json/JSONParser.ts","../../emmett/src/streaming/transformations/index.ts","../src/eventStore/schema/appendToStream.ts","../src/eventStore/schema/typing.ts","../src/eventStore/schema/tables.ts","../src/eventStore/schema/utils.ts","../src/eventStore/schema/readLastMessageGlobalPosition.ts","../src/eventStore/schema/readMessagesBatch.ts","../src/eventStore/schema/readProcessorCheckpoint.ts","../src/eventStore/consumers/messageBatchProcessing/index.ts","../src/eventStore/consumers/sqliteProcessor.ts","../src/eventStore/consumers/sqliteEventStoreConsumer.ts","../src/eventStore/projections/index.ts","../src/eventStore/SQLiteEventStore.ts","../src/eventStore/schema/readStream.ts","../src/eventStore/schema/storeProcessorCheckpoint.ts"],"names":["sql"],"mappings":"AAAA;ACAA,oFAAoB;AAgBb,IAAM,cAAA,EAAgB,CAAC,KAAA,EAAA,GAAyC;AACrE,EAAA,GAAA,CAAI,MAAA,WAAiB,MAAA,GAAS,OAAA,GAAU,KAAA,EAAO;AAC7C,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA;AACT,CAAA;AAGO,IAAM,kCAAA,EAAoC,4BAAA;AAE1C,IAAM,uBAAA,EAAyB,UAAA;AAO/B,IAAM,iBAAA,EAAmB,CAC9B,OAAA,EAAA,GACqB;AACrB,EAAA,MAAM,SAAA,mBAAW,OAAA,CAAQ,QAAA,UAAY,wBAAA;AACrC,EAAA,IAAI,EAAA;AAEJ,EAAA,GAAA,CAAI,QAAA,CAAS,UAAA,CAAW,OAAO,CAAA,EAAG;AAChC,IAAA,GAAA,EAAK,IAAI,iBAAA,CAAQ,QAAA;AAAA,MACf,QAAA;AAAA,MACA,iBAAA,CAAQ,SAAA,EAAW,iBAAA,CAAQ,eAAA,EAAiB,iBAAA,CAAQ;AAAA,IACtD,CAAA;AAAA,EACF,EAAA,KAAO;AACL,IAAA,GAAA,EAAK,IAAI,iBAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA;AAAA,EACpC;AACA,EAAA,EAAA,CAAG,GAAA,CAAI,4BAA4B,CAAA;AACnC,EAAA,IAAI,mBAAA,EAAqB,CAAA;AAEzB,EAAA,OAAO;AAAA,IACL,KAAA,EAAO,CAAA,EAAA,GAAY,EAAA,CAAG,KAAA,CAAM,CAAA;AAAA,IAC5B,OAAA,EAAS,CAACA,IAAAA,EAAa,MAAA,EAAA,GACrB,IAAI,OAAA,CAA2B,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAClD,MAAA,EAAA,CAAG,GAAA;AAAA,QACDA,IAAAA;AAAA,yBACA,MAAA,UAAU,CAAC,GAAA;AAAA,QACX,QAAA,CAAmC,GAAA,EAAmB;AACpD,UAAA,GAAA,CAAI,GAAA,EAAK;AACP,YAAA,MAAA,CAAO,GAAG,CAAA;AACV,YAAA,MAAA;AAAA,UACF;AAEA,UAAA,OAAA,CAAQ,IAAI,CAAA;AAAA,QACd;AAAA,MACF,CAAA;AAAA,IACF,CAAC,CAAA;AAAA,IACH,KAAA,EAAO,CAAIA,IAAAA,EAAa,MAAA,EAAA,GACtB,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAgB;AAC5D,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,WAAA,EAAa,CAAIA,IAAAA,EAAa,MAAA,EAAA,GAC5B,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AAC/B,MAAA,EAAA,CAAG,GAAA,CAAIA,IAAAA,mBAAK,MAAA,UAAU,CAAC,GAAA,EAAG,CAAC,GAAA,EAAmB,MAAA,EAAA,GAAqB;AACjE,QAAA,GAAA,CAAI,GAAA,EAAK;AACP,UAAA,MAAA,CAAO,GAAG,CAAA;AACV,UAAA,MAAA;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,MAAM,CAAA;AAAA,MAChB,CAAC,CAAA;AAAA,IACH,CAAC,CAAA;AAAA,IACH,eAAA,EAAiB,MAAA,CAAU,EAAA,EAAA,GAAyB;AAClD,MAAA,IAAI;AACF,QAAA,GAAA,CAAI,kBAAA,GAAA,GAAwB,CAAA,EAAG;AAC7B,UAAA,MAAM,gBAAA,CAAiB,EAAE,CAAA;AAAA,QAC3B;AACA,QAAA,MAAM,OAAA,EAAS,MAAM,EAAA,CAAG,CAAA;AAExB,QAAA,GAAA,CAAI,mBAAA,IAAuB,CAAA,EAAG,MAAM,iBAAA,CAAkB,EAAE,CAAA;AACxD,QAAA,kBAAA,EAAA;AAEA,QAAA,OAAO,MAAA;AAAA,MACT,EAAA,MAAA,CAAS,GAAA,EAAK;AACZ,QAAA,OAAA,CAAQ,GAAA,CAAI,GAAG,CAAA;AAEf,QAAA,GAAA,CAAI,EAAE,mBAAA,IAAuB,CAAA,EAAG,MAAM,mBAAA,CAAoB,EAAE,CAAA;AAE5D,QAAA,MAAM,GAAA;AAAA,MACR;AAAA,IACF;AAAA,EACF,CAAA;AACF,CAAA;AAEA,IAAM,iBAAA,EAAmB,CAAC,EAAA,EAAA,GACxB,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AACrC,EAAA,EAAA,CAAG,GAAA,CAAI,6BAAA,EAA+B,CAAC,GAAA,EAAA,GAAsB;AAC3D,IAAA,GAAA,CAAI,GAAA,EAAK;AACP,MAAA,MAAA,CAAO,GAAG,CAAA;AACV,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,CAAA;AAAA,EACV,CAAC,CAAA;AACH,CAAC,CAAA;AAEH,IAAM,kBAAA,EAAoB,CAAC,EAAA,EAAA,GACzB,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AACrC,EAAA,EAAA,CAAG,GAAA,CAAI,QAAA,EAAU,CAAC,GAAA,EAAA,GAAsB;AACtC,IAAA,GAAA,CAAI,GAAA,EAAK;AACP,MAAA,MAAA,CAAO,GAAG,CAAA;AACV,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,CAAA;AAAA,EACV,CAAC,CAAA;AACH,CAAC,CAAA;AAEH,IAAM,oBAAA,EAAsB,CAAC,EAAA,EAAA,GAC3B,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,EAAA,GAAW;AACrC,EAAA,EAAA,CAAG,GAAA,CAAI,UAAA,EAAY,CAAC,GAAA,EAAA,GAAsB;AACxC,IAAA,GAAA,CAAI,GAAA,EAAK;AACP,MAAA,MAAA,CAAO,GAAG,CAAA;AACV,MAAA,MAAA;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,CAAA;AAAA,EACV,CAAC,CAAA;AACH,CAAC,CAAA;AD9CH;AACA;AE7FO,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,SAAA,GAAY,IAAA,IAAQ,GAAA;AAE9B,IAAM,SAAA,EAAW,CAAC,GAAA,EAAA,GACvB,OAAO,IAAA,IAAQ,QAAA;ACQV,IAAM,YAAA,EAAN,MAAM,aAAA,QAAoB,MAAM;AHqFvC,EGpFS;AHqFT,EGnFE,WAAA,CACE,OAAA,EACA;AACA,IAAA,MAAM,UAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,YAAA,GAAe,QAAA,EACrD,OAAA,CAAQ,UAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,GAAA;AACR,IAAA,MAAM,QAAA,EACJ,QAAA,GAAW,OAAO,QAAA,IAAY,SAAA,GAAY,UAAA,GAAa,QAAA,EACnD,OAAA,CAAQ,QAAA,EACR,QAAA,CAAS,OAAO,EAAA,EACd,QAAA,EACA,CAAA,wBAAA,EAA2B,SAAS,CAAA,kCAAA,CAAA;AAE5C,IAAA,KAAA,CAAM,OAAO,CAAA;AACb,IAAA,IAAA,CAAK,UAAA,EAAY,SAAA;AAGjB,IAAA,MAAA,CAAO,cAAA,CAAe,IAAA,EAAM,YAAA,CAAY,SAAS,CAAA;AHqErD,EGpEE;AACF,CAAA;AAEO,IAAM,iBAAA,EAAN,MAAM,kBAAA,QAAyB,YAAY;AHoElD,EGnEE,WAAA,CACS,OAAA,EACA,QAAA,EACP,OAAA,EACA;AACA,IAAA,KAAA,CAAM;AHgEV,MG/DM,SAAA,EAAW,GAAA;AHgEjB,MG/DM,OAAA,mBACE,OAAA,UACA,CAAA,iBAAA,EAAoB,QAAA,CAAS,QAAA,CAAS,CAAC,CAAA,wBAAA,kBAA2B,OAAA,6BAAS,QAAA,mBAAS,GAAC,CAAA;AH8D7F,IAAA;AGtEW,IAAA;AACA,IAAA;AAWP,IAAA;AH8DJ,EAAA;AG5DA;AH8DA;AACA;AI9HA;ACAA;ACAA;ACAA;AACA;ACDA;ACAA;ACAA;ACAA;ACAA;ACAA;ACAA;ACAA;Af4IA;AACA;AgB7IA;ACAA;ACAA;ACAA;ACAA;ACeO;AACA;AAEA;AAGA;AAKL,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AACF;AAEO;AAOL,EAAA;AAEA,EAAA;AACE,IAAA;AACJ;AAEO;ArBkHP,EAAA;AqB3GI,IAAA;AAGA,IAAA;ArB2GJ,EAAA;AqBzGA;AdzDO;AAOA;AP+JP,EAAA;AO9II,IAAA;APgJJ,MAAA;AO9IQ,QAAA;AACA,QAAA;APgJR,MAAA;AACA,IAAA;AOzJY,IAAA;AAWR,IAAA;AAEA,IAAA;AAEA,IAAA;AP+IJ,EAAA;AACA,iBAAA;AACA,EAAA;AACA,kBAAA;AACA,EAAA;AOtKI,IAAA;APwKJ,EAAA;AACA,EAAA;AOlJI,IAAA;AACE,MAAA;APoJN,IAAA;AACA,EAAA;AACA,EAAA;AOjJI,IAAA;AAEA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;APkJJ,EAAA;AACA,EAAA;AO/II,IAAA;AACE,MAAA;APiJN,IAAA;AACA,EAAA;AO/IA;ACpDO;AAIL,EAAA;AAEA,EAAA;ARkMF,IAAA;AQhMM,MAAA;AACE,QAAA;ARkMR,MAAA;AQhMQ,QAAA;AACE,UAAA;ARkMV,QAAA;AQhMQ,QAAA;ARkMR,MAAA;AACA,IAAA;AACA,qBAAA;AACA,EAAA;AQhMA;Ac3BO;AtB8NP,EAAA;AsB5NI,IAAA;AtB8NJ,EAAA;AsB5NA;AA0BO;AtBqMP,EAAA;AsBhMI,IAAA;AtBkMJ,sBAAA;AACA;AACA;AACA,MAAA;AACA,IAAA;AACA,EAAA;AACA,EAAA;AsB7LI,IAAA;AAEA,IAAA;AACE,MAAA;AAEF,IAAA;AtB6LJ,EAAA;AsBzLA;AVrDO;AZiPP,EAAA;AY9OM,IAAA;AACE,MAAA;AZgPR,IAAA;AACA,EAAA;AY9OE;ACPK;AbwPP,EAAA;AarPM,IAAA;AbuPN,EAAA;AarPE;ACLK;AAKA;AdyPP,EAAA;AACA,EAAA;AACA,EAAA;ActPI,IAAA;AdwPJ,MAAA;ActPQ,QAAA;AdwPR,MAAA;AACA,MAAA;ActPQ,QAAA;AACA,QAAA;AdwPR,MAAA;AACA,IAAA;AcrPI,IAAA;AACA,IAAA;AduPJ,EAAA;AcrPA;ACjBO;AfyQP,EAAA;Ae3PM,IAAA;Af6PN,MAAA;AACA,MAAA;AACA,IAAA;Ae3PQ,MAAA;Af6PR,IAAA;AACA,EAAA;Ae3PE;AAEF;AAQE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AfiPR,MAAA;AACA,IAAA;AACA,EAAA;Ae/OI,IAAA;AfiPJ,EAAA;Ae/OA;ACxDO;AAEA;AhBySP,kBAAA;AACA,EAAA;AACA,EAAA;AgBtSI,IAAA;AhBwSJ,MAAA;AgBtSQ,QAAA;AACA,QAAA;AACE,UAAA;AhBwSV,QAAA;AACA,MAAA;AACA,IAAA;AgBrSI,IAAA;AhBuSJ,EAAA;AgBrSA;AClBO;AjB0TP,EAAA;AiBvTM,IAAA;AAEA,IAAA;AACE,MAAA;AjBwTR,IAAA;AACA,EAAA;AiBtTE;ACTK;AlBkUP,EAAA;AkB/TM,IAAA;AACE,MAAA;AACA,MAAA;AlBiUR,IAAA;AkB/TM,IAAA;AACA,IAAA;AlBiUN,EAAA;AkB/TE;ACVK;AAEA;AnB2UP,kBAAA;AACA,EAAA;AACA,EAAA;AmBxUI,IAAA;AnB0UJ,MAAA;AmBxUQ,QAAA;AACE,UAAA;AACA,UAAA;AnB0UV,QAAA;AmBxUU,UAAA;AnB0UV,QAAA;AACA,MAAA;AACA,IAAA;AmBvUI,IAAA;AnByUJ,EAAA;AmBvUA;ACpBO;ApB8VP,EAAA;AoB3VM,IAAA;AACE,MAAA;ApB6VR,IAAA;AoB1VM,IAAA;AAGA,IAAA;AACE,MAAA;AACA,MAAA;ApB0VR,IAAA;AACA,EAAA;AACA,EAAA;AoBxVM,IAAA;ApB0VN,EAAA;AoBxVE;AGNK;AvBiWP,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AuB/VA;AZnBA;AXqXA;AACA;AwBrXA;AxBuXA;AACA;AyBnYO;AAEA;AACA;AAEA;AAAoB,EAAA;AAE3B;AAEA;AAAgB,EAAA;AACH,IAAA;AACH,EAAA;AACR,EAAA;AAEF;AAEO;AAAqB,EAAA;AACL,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AAEO;AAAsB,EAAA;AACN,EAAA;AACZ,IAAA;AACY,IAAA;AACC,EAAA;AAExB;AAEO;AAA2B,EAAA;AAElC;AzB+XA;AACA;AwBlYO;AAaL,EAAA;AAEA,EAAA;AAA8B,oBAAA;AACnB,EAAA;AAGX,EAAA;AAGe,IAAA;AAKV,MAAA;AACI,MAAA;AACa,MAAA;AACN,QAAA;AACR,QAAA;AACgB,QAAA;AACY,QAAA;AACgB,MAAA;AAC9C,IAAA;AACF,EAAA;AAGJ,EAAA;AAEA,EAAA;AACE,IAAA;AAAe,MAAA;AACb,MAAA;AACA,MAAA;AACA,MAAA;AACA,MAAA;AACA,QAAA;AACE,MAAA;AACF,IAAA;AAGF,IAAA;AACE,MAAA;AAEF,IAAA;AAAO,EAAA;AAEX;AAEA;AAGE,EAAA;AAEA,EAAA;AAGA,EAAA;AAGA,EAAA;AAEA,EAAA;AACF;AAEA;AAUE,EAAA;AACA,EAAA;AAEA,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AAA8B,QAAA;AAC5B,QAAA;AACA,QAAA;AACA,MAAA;AACF,IAAA;AAGF,IAAA;AAEA,IAAA;AACE,MAAA;AAAoB,QAAA;AAGc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAYhC,UAAA;AACE,UAAA;AACS,2CAAA;AACkC,UAAA;AAC3C,QAAA;AACF,MAAA;AACF,IAAA;AAEA,MAAA;AAAoB,QAAA;AAGS;AAAA;AAAA;AAAA;AAAA;AAAA,UAAA;AAAA,QAAA;AAO3B,UAAA;AACW,UAAA;AACT,2CAAA;AAC2C,QAAA;AAC7C,MAAA;AACF,IAAA;AAGF,IAAA;AACE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAEA,IAAA;AACE,MAAA;AAEA,MAAA;AACE,QAAA;AAAO,UAAA;AACI,QAAA;AACX,MAAA;AACF,IAAA;AAGF,IAAA;AAA8B,MAAA;AAC5B,MAAA;AACA,MAAA;AACA,uCAAA;AACkC,IAAA;AAGpC,IAAA;AAIA,IAAA;AAIE,MAAA;AAAgD,IAAA;AAGlD,IAAA;AAAiB,MAAA;AACwB,IAAA;AACzC,EAAA;AAEA,IAAA;AACE,MAAA;AAAO,QAAA;AACI,MAAA;AACX,IAAA;AAGF,IAAA;AAAM,EAAA;AAGR,EAAA;AAAO,IAAA;AACI,IAAA;AACW,IAAA;AACA,EAAA;AAExB;AAEA;AACE,EAAA;AACF;AAEA;AAKE,EAAA;AAAwB,IAAA;AAC8D,IAAA;AAC3E,EAAA;AAGX,EAAA;AACE,IAAA;AAAwB,EAAA;AAExB,IAAA;AAAqD,EAAA;AAEvD,EAAA;AACF;AAEA;AASE,EAAA;AAAuB,IAAA;AAKnB,MAAA;AAIE,QAAA;AAA6C,MAAA;AAG/C,MAAA;AAGA,MAAA;AACA,MAAA;AAAoB,QAAA;AAClB,yBAAA;AAC6B,yBAAA;AAChB,QAAA;AACoB,QAAA;AACA,QAAA;AACI,yCAAA;AACA,QAAA;AAC7B,QAAA;AACS,QAAA;AACjB,MAAA;AAGF,MAAA;AAAO,IAAA;AACT,IAAA;AACA,MAAA;AACqB,MAAA;AACV,IAAA;AACX,EAAA;AAGF,EAAA;AAAkB,kBAAA;AACkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAA;AAYU;AAAA;AAAA,IAAA;AAI9C,EAAA;AACF;AxBsSA;AACA;A0B1lBO;AAEA;AAAwB,EAAA;AACkB;AAAA;AAAA,oEAAA;AAG8B;AAAA;AAAA;AAAA;AAAA;AAAA,IAAA;AAO/E;AAEO;AAAyB,EAAA;AACkB;AAAA;AAAA,yEAAA;AAGkC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAapF;AAEO;AAA8B,EAAA;AACnC,6BAAA;AACoD;AAAA;AAAA,+EAAA;AAGoC;AAAA;AAAA;AAAA;AAK1F;AAEO;AAA4B,EAAA;AACjC,EAAA;AACA,EAAA;AAEF;AAEO;AAGL,EAAA;AACE,IAAA;AAAoB,EAAA;AAExB;A1BqlBA;AACA;A2BxpBO;AAGL,EAAA;AAEA,EAAA;AAEA,EAAA;AACF;A3BspBA;AACA;A4BlpBO;AAIL,EAAA;AAAqB,IAAA;AAChB,MAAA;AACD,QAAA;AACE,cAAA;AAC0B;AAAA;AAAA,gBAAA;AAAA,MAAA;AAI5B,MAAA;AACiC,IAAA;AACnC,EAAA;AAGF,EAAA;AAAO,IAAA;AAEgD,EAAA;AAEzD;A5B+oBA;AACA;A6BpoBO;AAQL,EAAA;AAMA,EAAA;AAKA,EAAA;AAGA,EAAA;AAGA,EAAA;AAGA,EAAA;AACW,IAAA;AACP,MAAA;AACE,gBAAA;AAC4B,uDAAA;AACiD;AAAA,WAAA;AAE1D,IAAA;AACrB,IAAA;AACiC,EAAA;AAGnC,IAAA;AAAiB,MAAA;AACL,MAAA;AAC6B,MAAA;AACQ,IAAA;AAGjD,IAAA;AAAsD,MAAA;AACM,MAAA;AAC3C,MAAA;AACC,MAAA;AAC0B,MAAA;AACA,IAAA;AAG5C,IAAA;AAAO,MAAA;AACF,MAAA;AACG,MAAA;AACN,IAAA;AAIF,EAAA;AAGF,EAAA;AACI,IAAA;AAEwC,IAAA;AAC5B,IAAA;AACuB,EAAA;AAEnC,IAAA;AAMU,IAAA;AACG,IAAA;AACI,EAAA;AAEvB;A7BgmBA;AACA;A8BntBO;AAIL,EAAA;AAAqB,IAAA;AAChB,MAAA;AACD,QAAA;AACE,gBAAA;AACiC;AAAA,kBAAA;AAAA,MAAA;AAGnC,MAAA;AACsD,IAAA;AACxD,EAAA;AAGF,EAAA;AAAO,IAAA;AAEwD,EAAA;AAEjE;A9BgtBA;AACA;A+BruBO;AACA;AA4CA;AAEL,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAEF;AACE,EAAA;AAEA,EAAA;AAEA,EAAA;AAGE,IAAA;AAQA,IAAA;AAAsD,MAAA;AACpD,MAAA;AACA,IAAA;AAGF,IAAA;AAEA,IAAA;AACE,MAAA;AAGA,MAAA;AACE,QAAA;AAEA,QAAA;AACE,UAAA;AACA,UAAA;AAAA,QAAA;AACF,MAAA;AAGF,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAAsC,MAAA;AAEtC,QAAA;AAAW,MAAA;AACb,IAAA;AACO,EAAA;AAGX,EAAA;AAAO,IAAA;AAEH,MAAA;AAAO,IAAA;AACT,IAAA;AAEE,MAAA;AAEA,MAAA;AACE,QAAA;AAEA,QAAA;AAA2B,MAAA;AAG7B,MAAA;AAAO,IAAA;AACT,IAAA;AAEE,MAAA;AACA,MAAA;AACA,MAAA;AAAM,IAAA;AACR,EAAA;AAEJ;AAEO;AAGL,EAAA;AAIE,IAAA;AAEF,EAAA;AAEA,EAAA;AAGF;A/BypBA;AACA;AgCpsBA;AAGE,EAAA;AACA,EAAA;AAGA,EAAA;AAIE,IAAA;AACA,IAAA;AACE,MAAA;AAAU,QAAA;AACgC,MAAA;AAG5C,IAAA;AAKA,IAAA;AAAsB,EAAA;AAGxB,EAAA;AAAO,IAAA;AACO,IAAA;AAIV,MAAA;AACA,MAAA;AAEA,MAAA;AAAoE,QAAA;AAC7C,QAAA;AACF,MAAA;AAGrB,MAAA;AAEA,MAAA;AAA+C,IAAA;AACjD,IAAA;AAEE,MAAA;AAAO,IAAA;AACT,IAAA;AAKE,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAEA,QAAA;AAEA,QAAA;AACE,UAAA;AAKA,UAAA;AAAgE,YAAA;AAC9D,YAAA;AACA,UAAA;AAIF,UAAA;AAAmC,YAAA;AACZ,YAAA;AACJ,YAAA;AACjB,YAAA;AACmC,YAAA;AAChB,UAAA;AAGrB,UAAA;AAEA,UAAA;AAIE,YAAA;AACA,YAAA;AACA,YAAA;AAAA,UAAA;AAGF,UAAA;AACE,YAAA;AACA,YAAA;AACA,YAAA;AAAA,UAAA;AAGF,UAAA;AAIE,YAAA;AAAA,QAAA;AAEJ,QAAA;AAAO,MAAA;AACR,IAAA;AACH,EAAA;AAEJ;AAEO;AAGL,EAAA;AAEA,EAAA;AAAyC,IAAA;AAC0B,IAAA;AAE/D,MAAA;AAEA,MAAA;AAAwC,IAAA;AAC1C,IAAA;AACG,EAAA;AAEP;AAEO;AAGL,EAAA;AACE,IAAA;AAAwC,EAAA;AAG1C,EAAA;AACF;AhCopBA;AACA;AiCx1BO;AAKL,EAAA;AACA,EAAA;AACA,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AAEA,EAAA;AAGE,IAAA;AAEA,IAAA;AACE,MAAA;AAAO,QAAA;AACC,QAAA;AACE,MAAA;AAGZ,IAAA;AAA6B,MAAA;AAGzB,QAAA;AAAiE,MAAA;AAClE,IAAA;AAGH,IAAA;AAAc,MAAA;AACyC,IAAA;AAGnD,MAAA;AACQ,IAAA;AACR,EAAA;AAGN,EAAA;AACqC,IAAA;AACjC,IAAA;AACA,IAAA;AAEwB,IAAA;AAGtB,EAAA;AAGN,EAAA;AACE,IAAA;AACA,IAAA;AACA,IAAA;AACE,MAAA;AACA,MAAA;AAAuB,IAAA;AAEzB,IAAA;AAAM,EAAA;AAGR,EAAA;AAAO,IAAA;AACL,IAAA;AAEE,MAAA;AAAO,IAAA;AACT,IAAA;AAIE,MAAA;AAEA,MAAA;AAEA,MAAA;AAAO,IAAA;AACT,IAAA;AAEE,MAAA;AAEA,MAAA;AACE,QAAA;AACE,UAAA;AAAe,YAAA;AACT,cAAA;AACF,YAAA;AACF,UAAA;AAGJ,QAAA;AAEA,QAAA;AAAkB,UAAA;AACoC,QAAA;AAGtD,QAAA;AAAwC,MAAA;AAG1C,MAAA;AAAO,IAAA;AACT,IAAA;AACA,IAAA;AAEE,MAAA;AAEA,MAAA;AAEA,MAAA;AAAuD,IAAA;AACzD,EAAA;AAEJ;AjCwzBA;AACA;AkC56BO;AAGL,EAAA;AAEA,EAAA;AAEA,EAAA;AAAmC,IAAA;AACmB,EAAA;AAGtD,EAAA;AACE,IAAA;AAAgC,MAAA;AAC9B,IAAA;AACD,EAAA;AAEL;AlCy6BA;AACA;AmCn7BO;AA+CA;AAGL,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAEA,EAAA;AAIA,EAAA;AAIA,EAAA;AAEA,EAAA;AACE,IAAA;AACE,MAAA;AAAO,IAAA;AAGT,IAAA;AAAwB,MAAA;AACtB,IAAA;AACD,EAAA;AAGH,EAAA;AACE,IAAA;AACE,MAAA;AAAA,IAAA;AAEF,IAAA;AACE,MAAA;AACA,MAAA;AAAW,IAAA;AACb,EAAA;AAGF,EAAA;AAGE,IAAA;AACE,MAAA;AAA4B,IAAA;AAG9B,IAAA;AACE,MAAA;AACA,MAAA;AAA6B,IAAA;AAE7B,MAAA;AAAgB,IAAA;AAClB,EAAA;AAGF,EAAA;AACE,IAAA;AAEoC,EAAA;AAGtC,EAAA;AACE,IAAA;AAEA,IAAA;AACE,MAAA;AACA,MAAA;AAAiB,IAAA;AAGnB,IAAA;AAAuB,EAAA;AAGzB,EAAA;AAAO,IAAA;AASH,MAAA;AAEA,MAAA;AAEA,MAAA;AAEA,MAAA;AACE,QAAA;AAA2C,MAAA;AAG7C,MAAA;AACE,QAAA;AAA4B,MAAA;AAG9B,MAAA;AAAqB,QAAA;AAC+B,MAAA;AAGpD,MAAA;AAEA,MAAA;AAAA,QAAA;AACE,QAAA;AACA,QAAA;AACA,MAAA;AAGF,MAAA;AACE,QAAA;AAEA,QAAA;AAA2B,MAAA;AAG7B,MAAA;AAAO,QAAA;AACL,QAAA;AACA,QAAA;AACqB,MAAA;AACvB,IAAA;AACF,IAAA;AAO0E,IAAA;AAOxE,MAAA;AACE,QAAA;AAA4B,MAAA;AAI9B,MAAA;AAEA,MAAA;AAGA,MAAA;AAA2B,QAAA;AAC0B,UAAA;AAC9C,UAAA;AAED,YAAA;AACE,cAAA;AAAwB,gBAAA;AACT,gBAAA;AACL,gBAAA;AACL,cAAA;AAGP,YAAA;AAAkE,UAAA;AACpE,QAAA;AACD,MAAA;AAGH,MAAA;AACE,QAAA;AAAU,UAAA;AACP;AAAA,2CAAA;AACiC,QAAA;AAGtC,MAAA;AAAO,QAAA;AACmC,QAAA;AACF,QAAA;AAEmB,MAAA;AAC3D,IAAA;AACF,IAAA;AAI8C,MAAA;AAC1B,MAAA;AAChB,MAAA;AACgB,IAAA;AACjB,EAAA;AAEP;AnCy0BA;AACA;AoC/jCO;AAOL,EAAA;AAKA,EAAA;AAAW,IAAA;AAKH,EAAA;AAGR,EAAA;AAEA,EAAA;AAAyB,IAAA;AACvB,gBAAA;AACgC,yEAAA;AACmE,IAAA;AACxD,EAAA;AAG7C,EAAA;AAEI,IAAA;AAAiB,MAAA;AACL,MAAA;AAC6B,MAAA;AACQ,IAAA;AAGjD,IAAA;AAAsD,MAAA;AACM,MAAA;AAC3C,MAAA;AACH,MAAA;AAC8B,MAAA;AACA,IAAA;AAG5C,IAAA;AAAO,MAAA;AACF,MAAA;AACG,MAAA;AACN,IAAA;AAIF,EAAA;AAGJ,EAAA;AACI,IAAA;AAE4C,IAAA;AAClC,IAAA;AACM,EAAA;AAEhB,IAAA;AACwB,IAAA;AACb,IAAA;AACK,EAAA;AAEtB;ApCsiCA;AACA;AqC5nCA;AAQE,EAAA;AACE,IAAA;AAA8B,MAAA;AACxB,iBAAA;AACgC;AAAA;AAAA;AAAA;AAAA,QAAA;AAKjC,MAAA;AACoE,IAAA;AAEzE,IAAA;AACE,MAAA;AAAO,IAAA;AAEP,MAAA;AAA+B,QAAA;AAC1B,UAAA;AACD,YAAA;AACgE,0DAAA;AAAA,UAAA;AAEhE,UAAA;AACuB,QAAA;AACzB,MAAA;AAGF,MAAA;AACE,QAAA;AAAO,MAAA;AAMP,QAAA;AAAO,MAAA;AAEP,QAAA;AAAO,MAAA;AACT,IAAA;AACF,EAAA;AAEA,IAAA;AACE,MAAA;AAAS,QAAA;AACP,UAAA;AACwC,QAAA;AACxC,QAAA;AACsD,MAAA;AAExD,MAAA;AAAO,IAAA;AAEP,MAAA;AACE,QAAA;AAAM,MAAA;AAGR,MAAA;AAAsB,QAAA;AACjB,UAAA;AACD,YAAA;AACgE,UAAA;AAChE,UAAA;AACuB,QAAA;AACzB,MAAA;AAEF,MAAA;AACE,QAAA;AAAO,MAAA;AAEP,QAAA;AAAO,MAAA;AACT,IAAA;AACF,EAAA;AAEJ;AAWA;AAUE,EAAA;AACE,IAAA;AAAqB,MAAA;AACnB,MAAA;AACQ,uBAAA;AACW,MAAA;AACX,MAAA;AACA,uBAAA;AACa,IAAA;AAGvB,IAAA;AAEoE,EAAA;AAEpE,IAAA;AACA,IAAA;AAAM,EAAA;AAEV;ArC2lCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/emmett/emmett/src/packages/emmett-sqlite/dist/index.cjs","sourcesContent":[null,"import sqlite3 from 'sqlite3';\n\nexport type Parameters = object | string | bigint | number | boolean | null;\n\nexport type SQLiteConnection = {\n close: () => void;\n command: (sql: string, values?: Parameters[]) => Promise<sqlite3.RunResult>;\n query: <T>(sql: string, values?: Parameters[]) => Promise<T[]>;\n querySingle: <T>(sql: string, values?: Parameters[]) => Promise<T | null>;\n withTransaction: <T>(fn: () => Promise<T>) => Promise<T>;\n};\n\nexport interface SQLiteError extends Error {\n errno: number;\n}\n\nexport const isSQLiteError = (error: unknown): error is SQLiteError => {\n if (error instanceof Error && 'code' in error) {\n return true;\n }\n\n return false;\n};\n\nexport type InMemorySharedCacheSQLiteDatabase = 'file::memory:?cache=shared';\nexport const InMemorySharedCacheSQLiteDatabase = 'file::memory:?cache=shared';\nexport type InMemorySQLiteDatabase = ':memory:';\nexport const InMemorySQLiteDatabase = ':memory:';\n\ntype SQLiteConnectionOptions = {\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n fileName: InMemorySQLiteDatabase | string | undefined;\n};\n\nexport const sqliteConnection = (\n options: SQLiteConnectionOptions,\n): SQLiteConnection => {\n const fileName = options.fileName ?? InMemorySQLiteDatabase;\n let db: sqlite3.Database;\n\n if (fileName.startsWith('file:')) {\n db = new sqlite3.Database(\n fileName,\n sqlite3.OPEN_URI | sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE,\n );\n } else {\n db = new sqlite3.Database(fileName);\n }\n db.run('PRAGMA journal_mode = WAL;');\n let transactionNesting = 0;\n\n return {\n close: (): void => db.close(),\n command: (sql: string, params?: Parameters[]) =>\n new Promise<sqlite3.RunResult>((resolve, reject) => {\n db.run(\n sql,\n params ?? [],\n function (this: sqlite3.RunResult, err: Error | null) {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(this);\n },\n );\n }),\n query: <T>(sql: string, params?: Parameters[]): Promise<T[]> =>\n new Promise((resolve, reject) => {\n db.all(sql, params ?? [], (err: Error | null, result: T[]) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n querySingle: <T>(sql: string, params?: Parameters[]): Promise<T | null> =>\n new Promise((resolve, reject) => {\n db.get(sql, params ?? [], (err: Error | null, result: T | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(result);\n });\n }),\n withTransaction: async <T>(fn: () => Promise<T>) => {\n try {\n if (transactionNesting++ == 0) {\n await beginTransaction(db);\n }\n const result = await fn();\n\n if (transactionNesting === 1) await commitTransaction(db);\n transactionNesting--;\n\n return result;\n } catch (err) {\n console.log(err);\n\n if (--transactionNesting === 0) await rollbackTransaction(db);\n\n throw err;\n }\n },\n };\n};\n\nconst beginTransaction = (db: sqlite3.Database) =>\n new Promise<void>((resolve, reject) => {\n db.run('BEGIN IMMEDIATE TRANSACTION', (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n });\n\nconst commitTransaction = (db: sqlite3.Database) =>\n new Promise<void>((resolve, reject) => {\n db.run('COMMIT', (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n });\n\nconst rollbackTransaction = (db: sqlite3.Database) =>\n new Promise<void>((resolve, reject) => {\n db.run('ROLLBACK', (err: Error | null) => {\n if (err) {\n reject(err);\n return;\n }\n\n resolve();\n });\n });\n","import { ValidationError } from '../errors';\n\nexport const enum ValidationErrors {\n NOT_A_NONEMPTY_STRING = 'NOT_A_NONEMPTY_STRING',\n NOT_A_POSITIVE_NUMBER = 'NOT_A_POSITIVE_NUMBER',\n NOT_AN_UNSIGNED_BIGINT = 'NOT_AN_UNSIGNED_BIGINT',\n}\n\nexport const isNumber = (val: unknown): val is number =>\n typeof val === 'number' && val === val;\n\nexport const isString = (val: unknown): val is string =>\n typeof val === 'string';\n\nexport const assertNotEmptyString = (value: unknown): string => {\n if (!isString(value) || value.length === 0) {\n throw new ValidationError(ValidationErrors.NOT_A_NONEMPTY_STRING);\n }\n return value;\n};\n\nexport const assertPositiveNumber = (value: unknown): number => {\n if (!isNumber(value) || value <= 0) {\n throw new ValidationError(ValidationErrors.NOT_A_POSITIVE_NUMBER);\n }\n return value;\n};\n\nexport const assertUnsignedBigInt = (value: string): bigint => {\n const number = BigInt(value);\n if (number < 0) {\n throw new ValidationError(ValidationErrors.NOT_AN_UNSIGNED_BIGINT);\n }\n return number;\n};\n\nexport * from './dates';\n","import { isNumber, isString } from '../validation';\n\nexport type ErrorConstructor<ErrorType extends Error> = new (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ...args: any[]\n) => ErrorType;\n\nexport const isErrorConstructor = <ErrorType extends Error>(\n // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type\n expect: Function,\n): expect is ErrorConstructor<ErrorType> => {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n return (\n typeof expect === 'function' &&\n expect.prototype &&\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n expect.prototype.constructor === expect\n );\n};\n\nexport class EmmettError extends Error {\n public errorCode: number;\n\n constructor(\n options?: { errorCode: number; message?: string } | string | number,\n ) {\n const errorCode =\n options && typeof options === 'object' && 'errorCode' in options\n ? options.errorCode\n : isNumber(options)\n ? options\n : 500;\n const message =\n options && typeof options === 'object' && 'message' in options\n ? options.message\n : isString(options)\n ? options\n : `Error with status code '${errorCode}' ocurred during Emmett processing`;\n\n super(message);\n this.errorCode = errorCode;\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, EmmettError.prototype);\n }\n}\n\nexport class ConcurrencyError extends EmmettError {\n constructor(\n public current: string | undefined,\n public expected: string,\n message?: string,\n ) {\n super({\n errorCode: 412,\n message:\n message ??\n `Expected version ${expected.toString()} does not match current ${current?.toString()}`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ConcurrencyError.prototype);\n }\n}\n\nexport class ConcurrencyInMemoryDatabaseError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 412,\n message: message ?? `Expected document state does not match current one!`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ConcurrencyInMemoryDatabaseError.prototype);\n }\n}\n\nexport class ValidationError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 400,\n message: message ?? `Validation Error ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ValidationError.prototype);\n }\n}\n\nexport class IllegalStateError extends EmmettError {\n constructor(message?: string) {\n super({\n errorCode: 403,\n message: message ?? `Illegal State ocurred during Emmett processing`,\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, IllegalStateError.prototype);\n }\n}\n\nexport class NotFoundError extends EmmettError {\n constructor(options?: { id: string; type: string; message?: string }) {\n super({\n errorCode: 404,\n message:\n options?.message ??\n (options?.id\n ? options.type\n ? `${options.type} with ${options.id} was not found during Emmett processing`\n : `State with ${options.id} was not found during Emmett processing`\n : options?.type\n ? `${options.type} was not found during Emmett processing`\n : 'State was not found during Emmett processing'),\n });\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, NotFoundError.prototype);\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport type {\n BigIntStreamPosition,\n CombinedReadEventMetadata,\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../typing';\nimport { tryPublishMessagesAfterCommit } from './afterCommit';\nimport {\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type AppendToStreamResult,\n type DefaultEventStoreOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from './eventStore';\nimport { assertExpectedVersionMatchesCurrent } from './expectedVersion';\nimport { StreamingCoordinator } from './subscriptions';\nimport type { ProjectionRegistration } from '../projections';\n\nexport const InMemoryEventStoreDefaultStreamVersion = 0n;\n\nexport type InMemoryEventStore =\n EventStore<ReadEventMetadataWithGlobalPosition>;\n\nexport type InMemoryReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type InMemoryProjectionHandlerContext = {\n eventStore: InMemoryEventStore;\n};\n\nexport type InMemoryEventStoreOptions =\n DefaultEventStoreOptions<InMemoryEventStore> & {\n projections?: ProjectionRegistration<\n 'inline',\n InMemoryReadEventMetadata,\n InMemoryProjectionHandlerContext\n >[];\n };\n\nexport type InMemoryReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n>;\n\nexport const getInMemoryEventStore = (\n eventStoreOptions?: InMemoryEventStoreOptions,\n): InMemoryEventStore => {\n const streams = new Map<\n string,\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]\n >();\n const streamingCoordinator = StreamingCoordinator();\n\n const getAllEventsCount = () => {\n return Array.from<ReadEvent[]>(streams.values())\n .map((s) => s.length)\n .reduce((p, c) => p + c, 0);\n };\n\n const _inlineProjections = (eventStoreOptions?.projections ?? [])\n .filter(({ type }) => type === 'inline')\n .map(({ projection }) => projection);\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const result = await this.readStream<EventType>(streamName, read);\n\n const events = result?.events ?? [];\n\n return {\n currentStreamVersion: BigInt(events.length),\n state: events.reduce(evolve, initialState()),\n streamExists: result.streamExists,\n };\n },\n\n readStream: <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => {\n const events = streams.get(streamName);\n const currentStreamVersion = events\n ? BigInt(events.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const from = Number(options && 'from' in options ? options.from : 0);\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : (events?.length ?? 1),\n );\n\n const resultEvents =\n events !== undefined && events.length > 0\n ? events\n .map(\n (e) =>\n e as ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n )\n .slice(from, to)\n : [];\n\n const result: ReadStreamResult<\n EventType,\n ReadEventMetadataWithGlobalPosition\n > = {\n currentStreamVersion,\n events: resultEvents,\n streamExists: events !== undefined && events.length > 0,\n };\n\n return Promise.resolve(result);\n },\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResult> => {\n const currentEvents = streams.get(streamName) ?? [];\n const currentStreamVersion =\n currentEvents.length > 0\n ? BigInt(currentEvents.length)\n : InMemoryEventStoreDefaultStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n options?.expectedStreamVersion,\n InMemoryEventStoreDefaultStreamVersion,\n );\n\n const newEvents: ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >[] = events.map((event, index) => {\n const metadata: ReadEventMetadataWithGlobalPosition = {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(currentEvents.length + index + 1),\n globalPosition: BigInt(getAllEventsCount() + index + 1),\n };\n return {\n ...event,\n kind: event.kind ?? 'Event',\n metadata: {\n ...('metadata' in event ? (event.metadata ?? {}) : {}),\n ...metadata,\n } as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n const positionOfLastEventInTheStream = BigInt(\n newEvents.slice(-1)[0]!.metadata.streamPosition,\n );\n\n streams.set(streamName, [...currentEvents, ...newEvents]);\n await streamingCoordinator.notify(newEvents);\n\n const result: AppendToStreamResult = {\n nextExpectedStreamVersion: positionOfLastEventInTheStream,\n createdNewStream:\n currentStreamVersion === InMemoryEventStoreDefaultStreamVersion,\n };\n\n await tryPublishMessagesAfterCommit<InMemoryEventStore>(\n newEvents,\n eventStoreOptions?.hooks,\n );\n\n return result;\n },\n\n //streamEvents: streamingCoordinator.stream,\n };\n};\n","import { TransformStream } from 'web-streams-polyfill';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport { globalStreamCaughtUp, type GlobalSubscriptionEvent } from '../events';\n\nexport const streamTrackingGlobalPosition = (\n currentEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n) => new CaughtUpTransformStream(currentEvents);\n\nexport class CaughtUpTransformStream extends TransformStream<\n ReadEvent<Event, ReadEventMetadataWithGlobalPosition>,\n | ReadEvent<Event, ReadEventMetadataWithGlobalPosition>\n | GlobalSubscriptionEvent\n> {\n private _currentPosition: bigint;\n private _logPosition: bigint;\n\n constructor(events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[]) {\n super({\n start: (controller) => {\n let globalPosition = 0n;\n for (const event of events) {\n controller.enqueue(event);\n globalPosition = event.metadata.globalPosition;\n }\n controller.enqueue(globalStreamCaughtUp({ globalPosition }));\n },\n transform: (event, controller) => {\n this._currentPosition = event.metadata.globalPosition;\n controller.enqueue(event);\n\n if (this._currentPosition < this._logPosition) return;\n\n controller.enqueue(\n globalStreamCaughtUp({ globalPosition: this._currentPosition }),\n );\n },\n });\n\n this._currentPosition = this._logPosition =\n events.length > 0\n ? events[events.length - 1]!.metadata.globalPosition\n : 0n;\n }\n\n public set logPosition(value: bigint) {\n this._logPosition = value;\n }\n}\n","import { v4 as uuid } from 'uuid';\nimport { notifyAboutNoActiveReadersStream } from '../../streaming/transformations/notifyAboutNoActiveReaders';\nimport { writeToStream } from '../../streaming/writers';\nimport type {\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '../../typing';\nimport {\n CaughtUpTransformStream,\n streamTrackingGlobalPosition,\n} from './caughtUpTransformStream';\n\nexport const StreamingCoordinator = () => {\n const allEvents: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[] = [];\n const listeners = new Map<string, CaughtUpTransformStream>();\n\n return {\n notify: async (\n events: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>[],\n ) => {\n if (events.length === 0) return;\n\n allEvents.push(...events);\n\n for (const listener of listeners.values()) {\n listener.logPosition =\n events[events.length - 1]!.metadata.globalPosition;\n\n await writeToStream(listener, events);\n }\n },\n\n stream: () => {\n const streamId = uuid();\n const transformStream = streamTrackingGlobalPosition(allEvents);\n\n listeners.set(streamId, transformStream);\n return transformStream.readable.pipeThrough(\n notifyAboutNoActiveReadersStream(\n (stream) => {\n if (listeners.has(stream.streamId))\n listeners.delete(stream.streamId);\n },\n { streamId },\n ),\n );\n },\n };\n};\n","import { v4 as uuid } from 'uuid';\nimport { TransformStream } from 'web-streams-polyfill';\n\nexport const notifyAboutNoActiveReadersStream = <Item>(\n onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n) => new NotifyAboutNoActiveReadersStream(onNoActiveReaderCallback, options);\n\nexport class NotifyAboutNoActiveReadersStream<Item> extends TransformStream<\n Item,\n Item\n> {\n private checkInterval: NodeJS.Timeout | null = null;\n public readonly streamId: string;\n private _isStopped: boolean = false;\n public get hasActiveSubscribers() {\n return !this._isStopped;\n }\n\n constructor(\n private onNoActiveReaderCallback: (\n stream: NotifyAboutNoActiveReadersStream<Item>,\n ) => void,\n options: { streamId?: string; intervalCheckInMs?: number } = {},\n ) {\n super({\n cancel: (reason) => {\n console.log('Stream was canceled. Reason:', reason);\n this.stopChecking();\n },\n });\n this.streamId = options?.streamId ?? uuid();\n\n this.onNoActiveReaderCallback = onNoActiveReaderCallback;\n\n this.startChecking(options?.intervalCheckInMs ?? 20);\n }\n\n private startChecking(interval: number) {\n this.checkInterval = setInterval(() => {\n this.checkNoActiveReader();\n }, interval);\n }\n\n private stopChecking() {\n if (!this.checkInterval) return;\n\n clearInterval(this.checkInterval);\n this.checkInterval = null;\n this._isStopped = true;\n this.onNoActiveReaderCallback(this);\n }\n\n private checkNoActiveReader() {\n if (!this.readable.locked && !this._isStopped) {\n this.stopChecking();\n }\n }\n}\n","import retry from 'async-retry';\n\nexport type AsyncRetryOptions = retry.Options & {\n shouldRetryError?: (error: unknown) => boolean;\n};\n\nexport const NoRetries: AsyncRetryOptions = { retries: 0 };\n\nexport const asyncRetry = async <T>(\n fn: () => Promise<T>,\n opts?: AsyncRetryOptions,\n): Promise<T> => {\n if (opts === undefined || opts.retries === 0) return fn();\n\n return retry(\n async (bail) => {\n try {\n return await fn();\n } catch (error) {\n if (opts?.shouldRetryError && !opts.shouldRetryError(error)) {\n bail(error as Error);\n }\n throw error;\n }\n },\n opts ?? { retries: 0 },\n );\n};\n","import { v7 as uuid } from 'uuid';\nimport { deepEquals } from '../utils';\nimport {\n type DeleteResult,\n type Document,\n type DocumentHandler,\n type HandleOptionErrors,\n type HandleOptions,\n type HandleResult,\n type InsertOneResult,\n type OptionalUnlessRequiredIdAndVersion,\n type ReplaceOneOptions,\n type UpdateResult,\n type WithoutId,\n type WithIdAndVersion,\n} from './types';\nimport { expectedVersionValue, operationResult } from './utils';\n\nexport interface DocumentsCollection<T extends Document> {\n handle: (\n id: string,\n handle: DocumentHandler<T>,\n options?: HandleOptions,\n ) => HandleResult<T>;\n findOne: (predicate?: Predicate<T>) => T | null;\n find: (predicate?: Predicate<T>) => T[];\n insertOne: (\n document: OptionalUnlessRequiredIdAndVersion<T>,\n ) => InsertOneResult;\n deleteOne: (predicate?: Predicate<T>) => DeleteResult;\n replaceOne: (\n predicate: Predicate<T>,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ) => UpdateResult;\n}\n\nexport interface Database {\n collection: <T extends Document>(name: string) => DocumentsCollection<T>;\n}\n\ntype Predicate<T> = (item: T) => boolean;\ntype CollectionName = string;\n\nexport const getInMemoryDatabase = (): Database => {\n const storage = new Map<CollectionName, WithIdAndVersion<Document>[]>();\n\n return {\n collection: <T extends Document, CollectionName extends string>(\n collectionName: CollectionName,\n collectionOptions: {\n errors?: HandleOptionErrors;\n } = {},\n ): DocumentsCollection<T> => {\n const ensureCollectionCreated = () => {\n if (!storage.has(collectionName)) storage.set(collectionName, []);\n };\n\n const errors = collectionOptions.errors;\n\n const collection = {\n collectionName,\n insertOne: (\n document: OptionalUnlessRequiredIdAndVersion<T>,\n ): InsertOneResult => {\n ensureCollectionCreated();\n\n const _id = (document._id as string | undefined | null) ?? uuid();\n const _version = document._version ?? 1n;\n\n const existing = collection.findOne((c) => c._id === _id);\n\n if (existing) {\n return operationResult<InsertOneResult>(\n {\n successful: false,\n insertedId: null,\n nextExpectedVersion: _version,\n },\n { operationName: 'insertOne', collectionName, errors },\n );\n }\n\n const documentsInCollection = storage.get(collectionName)!;\n const newDocument = { ...document, _id, _version };\n const newCollection = [...documentsInCollection, newDocument];\n storage.set(collectionName, newCollection);\n\n return operationResult<InsertOneResult>(\n {\n successful: true,\n insertedId: _id,\n nextExpectedVersion: _version,\n },\n { operationName: 'insertOne', collectionName, errors },\n );\n },\n findOne: (predicate?: Predicate<T>): T | null => {\n ensureCollectionCreated();\n\n const documentsInCollection = storage.get(collectionName);\n const filteredDocuments = predicate\n ? documentsInCollection?.filter((doc) => predicate(doc as T))\n : documentsInCollection;\n\n const firstOne = filteredDocuments?.[0] ?? null;\n\n return firstOne as T | null;\n },\n find: (predicate?: Predicate<T>): T[] => {\n ensureCollectionCreated();\n\n const documentsInCollection = storage.get(collectionName);\n const filteredDocuments = predicate\n ? documentsInCollection?.filter((doc) => predicate(doc as T))\n : documentsInCollection;\n\n return filteredDocuments as T[];\n },\n deleteOne: (predicate?: Predicate<T>): DeleteResult => {\n ensureCollectionCreated();\n\n const documentsInCollection = storage.get(collectionName)!;\n\n if (predicate) {\n const foundIndex = documentsInCollection.findIndex((doc) =>\n predicate(doc as T),\n );\n\n if (foundIndex === -1) {\n return operationResult<DeleteResult>(\n {\n successful: false,\n matchedCount: 0,\n deletedCount: 0,\n },\n { operationName: 'deleteOne', collectionName, errors },\n );\n } else {\n const newCollection = documentsInCollection.toSpliced(\n foundIndex,\n 1,\n );\n\n storage.set(collectionName, newCollection);\n\n return operationResult<DeleteResult>(\n {\n successful: true,\n matchedCount: 1,\n deletedCount: 1,\n },\n { operationName: 'deleteOne', collectionName, errors },\n );\n }\n }\n\n const newCollection = documentsInCollection.slice(1);\n\n storage.set(collectionName, newCollection);\n\n return operationResult<DeleteResult>(\n {\n successful: true,\n matchedCount: 1,\n deletedCount: 1,\n },\n { operationName: 'deleteOne', collectionName, errors },\n );\n },\n replaceOne: (\n predicate: Predicate<T>,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): UpdateResult => {\n ensureCollectionCreated();\n\n const documentsInCollection = storage.get(collectionName)!;\n\n const foundIndexes = documentsInCollection\n .filter((doc) => predicate(doc as T))\n .map((_, index) => index);\n\n const firstIndex = foundIndexes[0];\n\n if (firstIndex === undefined || firstIndex === -1) {\n return operationResult<UpdateResult>(\n {\n successful: false,\n matchedCount: 0,\n modifiedCount: 0,\n nextExpectedVersion: 0n,\n },\n { operationName: 'replaceOne', collectionName, errors },\n );\n }\n\n const existing = documentsInCollection[firstIndex]!;\n\n if (\n typeof options?.expectedVersion === 'bigint' &&\n existing._version !== options.expectedVersion\n ) {\n return operationResult<UpdateResult>(\n {\n successful: false,\n matchedCount: 1,\n modifiedCount: 0,\n nextExpectedVersion: existing._version,\n },\n { operationName: 'replaceOne', collectionName, errors },\n );\n }\n\n const newVersion = existing._version + 1n;\n\n const newCollection = documentsInCollection.with(firstIndex, {\n _id: existing._id,\n ...document,\n _version: newVersion,\n });\n\n storage.set(collectionName, newCollection);\n\n return operationResult<UpdateResult>(\n {\n successful: true,\n modifiedCount: 1,\n matchedCount: foundIndexes.length,\n nextExpectedVersion: newVersion,\n },\n { operationName: 'replaceOne', collectionName, errors },\n );\n },\n handle: (\n id: string,\n handle: DocumentHandler<T>,\n options?: HandleOptions,\n ): HandleResult<T> => {\n const { expectedVersion: version, ...operationOptions } =\n options ?? {};\n ensureCollectionCreated();\n const existing = collection.findOne(({ _id }) => _id === id);\n\n const expectedVersion = expectedVersionValue(version);\n\n if (\n (existing == null && version === 'DOCUMENT_EXISTS') ||\n (existing == null && expectedVersion != null) ||\n (existing != null && version === 'DOCUMENT_DOES_NOT_EXIST') ||\n (existing != null &&\n expectedVersion !== null &&\n existing._version !== expectedVersion)\n ) {\n return operationResult<HandleResult<T>>(\n {\n successful: false,\n document: existing as WithIdAndVersion<T>,\n },\n { operationName: 'handle', collectionName, errors },\n );\n }\n\n const result = handle(existing !== null ? { ...existing } : null);\n\n if (deepEquals(existing, result))\n return operationResult<HandleResult<T>>(\n {\n successful: true,\n document: existing as WithIdAndVersion<T>,\n },\n { operationName: 'handle', collectionName, errors },\n );\n\n if (!existing && result) {\n const newDoc = { ...result, _id: id };\n const insertResult = collection.insertOne({\n ...newDoc,\n _id: id,\n } as OptionalUnlessRequiredIdAndVersion<T>);\n return {\n ...insertResult,\n document: {\n ...newDoc,\n _version: insertResult.nextExpectedVersion,\n } as unknown as WithIdAndVersion<T>,\n };\n }\n\n if (existing && !result) {\n const deleteResult = collection.deleteOne(({ _id }) => id === _id);\n return { ...deleteResult, document: null };\n }\n\n if (existing && result) {\n const replaceResult = collection.replaceOne(\n ({ _id }) => id === _id,\n result,\n {\n ...operationOptions,\n expectedVersion: expectedVersion ?? 'DOCUMENT_EXISTS',\n },\n );\n return {\n ...replaceResult,\n document: {\n ...result,\n _version: replaceResult.nextExpectedVersion,\n } as unknown as WithIdAndVersion<T>,\n };\n }\n\n return operationResult<HandleResult<T>>(\n {\n successful: true,\n document: existing as WithIdAndVersion<T>,\n },\n { operationName: 'handle', collectionName, errors },\n );\n },\n };\n\n return collection;\n },\n };\n};\n","import { ReadableStream } from 'web-streams-polyfill';\n\nexport const fromArray = <T>(chunks: T[]) =>\n new ReadableStream<T>({\n start(controller) {\n for (const chunk of chunks) controller.enqueue(chunk);\n controller.close();\n },\n });\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport type { AsyncRetryOptions } from '../utils';\nimport type { Decoder } from './decoders';\nimport { DefaultDecoder } from './decoders/composite';\nimport { streamTransformations } from './transformations';\n\nconst { retry } = streamTransformations;\n\nexport const restream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n): ReadableStream<Transformed> =>\n retry(createSourceStream, handleChunk(transform, decoder), retryOptions)\n .readable;\n\nconst handleChunk =\n <Source = unknown, Transformed = Source, StreamType = Source>(\n transform: (input: Source) => Transformed = (source) =>\n source as unknown as Transformed,\n decoder: Decoder<StreamType, Source> = new DefaultDecoder<Source>(),\n ) =>\n (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ): void => {\n const { done: isDone, value } = readResult;\n\n if (value) decoder.addToBuffer(value);\n\n if (!isDone && !decoder.hasCompleteMessage()) return;\n\n decodeAndTransform(decoder, transform, controller);\n };\n\nconst decodeAndTransform = <StreamType, Source, Transformed = Source>(\n decoder: Decoder<StreamType, Source>,\n transform: (input: Source) => Transformed,\n controller: TransformStreamDefaultController<Transformed>,\n) => {\n try {\n const decoded = decoder.decode();\n if (!decoded) return; // TODO: Add a proper handling of decode errors\n\n const transformed = transform(decoded);\n controller.enqueue(transformed);\n } catch (error) {\n controller.error(new Error(`Decoding error: ${error?.toString()}`));\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const filter = <Item>(filter: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n if (filter(chunk)) {\n controller.enqueue(chunk);\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const map = <From, To>(map: (item: From) => To) =>\n new TransformStream<From, To>({\n transform(chunk, controller) {\n controller.enqueue(map(chunk));\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const reduce = <I, O>(\n reducer: (accumulator: O, chunk: I) => O,\n initialValue: O,\n) => new ReduceTransformStream<I, O>(reducer, initialValue);\n\nexport class ReduceTransformStream<I, O> extends TransformStream<I, O> {\n private accumulator: O;\n private reducer: (accumulator: O, chunk: I) => O;\n\n constructor(reducer: (accumulator: O, chunk: I) => O, initialValue: O) {\n super({\n transform: (chunk) => {\n this.accumulator = this.reducer(this.accumulator, chunk);\n },\n flush: (controller) => {\n controller.enqueue(this.accumulator);\n controller.terminate();\n },\n });\n\n this.accumulator = initialValue;\n this.reducer = reducer;\n }\n}\n","import {\n type ReadableStream,\n type ReadableStreamDefaultReadResult,\n TransformStream,\n type TransformStreamDefaultController,\n} from 'web-streams-polyfill';\nimport { type AsyncRetryOptions, asyncRetry } from '../../utils';\n\nexport const retryStream = <\n Source = unknown,\n Transformed = Source,\n StreamType = Source,\n>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n retryOptions: AsyncRetryOptions = { forever: true, minTimeout: 25 },\n): TransformStream<Source, Transformed> =>\n new TransformStream<Source, Transformed>({\n start(controller) {\n asyncRetry(\n () => onRestream(createSourceStream, handleChunk, controller),\n retryOptions,\n ).catch((error) => {\n controller.error(error);\n });\n },\n });\n\nconst onRestream = async <StreamType, Source, Transformed = Source>(\n createSourceStream: () => ReadableStream<StreamType>,\n handleChunk: (\n readResult: ReadableStreamDefaultReadResult<StreamType>,\n controller: TransformStreamDefaultController<Transformed>,\n ) => Promise<void> | void,\n controller: TransformStreamDefaultController<Transformed>,\n): Promise<void> => {\n const sourceStream = createSourceStream();\n const reader = sourceStream.getReader();\n\n try {\n let done: boolean;\n\n do {\n const result = await reader.read();\n done = result.done;\n\n await handleChunk(result, controller);\n\n if (done) {\n controller.terminate();\n }\n } while (!done);\n } finally {\n reader.releaseLock();\n }\n};\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const skip = <T>(limit: number) => new SkipTransformStream<T>(limit);\n\nexport class SkipTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private skip: number;\n\n constructor(skip: number) {\n super({\n transform: (chunk, controller) => {\n this.count++;\n if (this.count > this.skip) {\n controller.enqueue(chunk);\n }\n },\n });\n\n this.skip = skip;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopAfter = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n transform(chunk, controller) {\n controller.enqueue(chunk);\n\n if (stopCondition(chunk)) {\n controller.terminate();\n }\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const stopOn = <Item>(stopCondition: (item: Item) => boolean) =>\n new TransformStream<Item, Item>({\n async transform(chunk, controller) {\n if (!stopCondition(chunk)) {\n controller.enqueue(chunk);\n return;\n }\n await Promise.resolve();\n controller.terminate();\n },\n });\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const take = <T>(limit: number) => new TakeTransformStream<T>(limit);\n\nexport class TakeTransformStream<T> extends TransformStream<T, T> {\n private count = 0;\n private limit: number;\n\n constructor(limit: number) {\n super({\n transform: (chunk, controller) => {\n if (this.count < this.limit) {\n this.count++;\n controller.enqueue(chunk);\n } else {\n controller.terminate();\n }\n },\n });\n\n this.limit = limit;\n }\n}\n","import { TransformStream } from 'web-streams-polyfill';\n\nexport const waitAtMost = <Item>(waitTimeInMs: number) =>\n new TransformStream<Item, Item>({\n start(controller) {\n const timeoutId = setTimeout(() => {\n controller.terminate();\n }, waitTimeInMs);\n\n const originalTerminate = controller.terminate.bind(controller);\n\n // Clear the timeout if the stream is terminated early\n controller.terminate = () => {\n clearTimeout(timeoutId);\n originalTerminate();\n };\n },\n transform(chunk, controller) {\n controller.enqueue(chunk);\n },\n });\n","import { ConcurrencyError } from '../errors';\nimport type { BigIntStreamPosition, Flavour } from '../typing';\n\nexport type ExpectedStreamVersion<VersionType = BigIntStreamPosition> =\n | ExpectedStreamVersionWithValue<VersionType>\n | ExpectedStreamVersionGeneral;\n\nexport type ExpectedStreamVersionWithValue<VersionType = BigIntStreamPosition> =\n Flavour<VersionType, 'StreamVersion'>;\n\nexport type ExpectedStreamVersionGeneral = Flavour<\n 'STREAM_EXISTS' | 'STREAM_DOES_NOT_EXIST' | 'NO_CONCURRENCY_CHECK',\n 'StreamVersion'\n>;\n\nexport const STREAM_EXISTS = 'STREAM_EXISTS' as ExpectedStreamVersionGeneral;\nexport const STREAM_DOES_NOT_EXIST =\n 'STREAM_DOES_NOT_EXIST' as ExpectedStreamVersionGeneral;\nexport const NO_CONCURRENCY_CHECK =\n 'NO_CONCURRENCY_CHECK' as ExpectedStreamVersionGeneral;\n\nexport const matchesExpectedVersion = <StreamVersion = BigIntStreamPosition>(\n current: StreamVersion | undefined,\n expected: ExpectedStreamVersion<StreamVersion>,\n defaultVersion: StreamVersion,\n): boolean => {\n if (expected === NO_CONCURRENCY_CHECK) return true;\n\n if (expected == STREAM_DOES_NOT_EXIST) return current === defaultVersion;\n\n if (expected == STREAM_EXISTS) return current !== defaultVersion;\n\n return current === expected;\n};\n\nexport const assertExpectedVersionMatchesCurrent = <\n StreamVersion = BigIntStreamPosition,\n>(\n current: StreamVersion,\n expected: ExpectedStreamVersion<StreamVersion> | undefined,\n defaultVersion: StreamVersion,\n): void => {\n expected ??= NO_CONCURRENCY_CHECK;\n\n if (!matchesExpectedVersion(current, expected, defaultVersion))\n throw new ExpectedVersionConflictError(current, expected);\n};\n\nexport class ExpectedVersionConflictError<\n VersionType = BigIntStreamPosition,\n> extends ConcurrencyError {\n constructor(\n current: VersionType,\n expected: ExpectedStreamVersion<VersionType>,\n ) {\n super(current?.toString(), expected?.toString());\n\n // 👇️ because we are extending a built-in class\n Object.setPrototypeOf(this, ExpectedVersionConflictError.prototype);\n }\n}\n\nexport const isExpectedVersionConflictError = (\n error: unknown,\n): error is ExpectedVersionConflictError =>\n error instanceof ExpectedVersionConflictError;\n","export class ParseError extends Error {\n constructor(text: string) {\n super(`Cannot parse! ${text}`);\n }\n}\n\nexport type Mapper<From, To = From> =\n | ((value: unknown) => To)\n | ((value: Partial<From>) => To)\n | ((value: From) => To)\n | ((value: Partial<To>) => To)\n | ((value: To) => To)\n | ((value: Partial<To | From>) => To)\n | ((value: To | From) => To);\n\nexport type MapperArgs<From, To = From> = Partial<From> &\n From &\n Partial<To> &\n To;\n\nexport type ParseOptions<From, To = From> = {\n reviver?: (key: string, value: unknown) => unknown;\n map?: Mapper<From, To>;\n typeCheck?: <To>(value: unknown) => value is To;\n};\n\nexport type StringifyOptions<From, To = From> = {\n map?: Mapper<From, To>;\n};\n\nexport const JSONParser = {\n stringify: <From, To = From>(\n value: From,\n options?: StringifyOptions<From, To>,\n ) => {\n return JSON.stringify(\n options?.map ? options.map(value as MapperArgs<From, To>) : value,\n //TODO: Consider adding support to DateTime and adding specific format to mark that's a bigint\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n (_, v) => (typeof v === 'bigint' ? v.toString() : v),\n );\n },\n parse: <From, To = From>(\n text: string,\n options?: ParseOptions<From, To>,\n ): To | undefined => {\n const parsed: unknown = JSON.parse(text, options?.reviver);\n\n if (options?.typeCheck && !options?.typeCheck<To>(parsed))\n throw new ParseError(text);\n\n return options?.map\n ? options.map(parsed as MapperArgs<From, To>)\n : (parsed as To | undefined);\n },\n};\n","import { filter } from './filter';\nimport { map } from './map';\nimport {\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n} from './notifyAboutNoActiveReaders';\nimport { reduce, ReduceTransformStream } from './reduce';\nimport { retryStream } from './retry';\nimport { skip, SkipTransformStream } from './skip';\nimport { stopAfter } from './stopAfter';\nimport { stopOn } from './stopOn';\nimport { take, TakeTransformStream } from './take';\nimport { waitAtMost } from './waitAtMost';\n\nexport const streamTransformations = {\n filter,\n take,\n TakeTransformStream,\n skip,\n SkipTransformStream,\n map,\n notifyAboutNoActiveReadersStream,\n NotifyAboutNoActiveReadersStream,\n reduce,\n ReduceTransformStream,\n retry: retryStream,\n stopAfter,\n stopOn,\n waitAtMost,\n};\n","import {\n JSONParser,\n NO_CONCURRENCY_CHECK,\n STREAM_DOES_NOT_EXIST,\n STREAM_EXISTS,\n type AppendToStreamOptions,\n type BeforeEventStoreCommitHandler,\n type ExpectedStreamVersion,\n type Event as Message,\n type RecordedMessage,\n} from '@event-driven-io/emmett';\nimport { v4 as uuid } from 'uuid';\nimport {\n isSQLiteError,\n type Parameters,\n type SQLiteConnection,\n type SQLiteError,\n} from '../../connection';\nimport type {\n SQLiteEventStore,\n SQLiteReadEventMetadata,\n} from '../SQLiteEventStore';\nimport { defaultTag, messagesTable, streamsTable } from './typing';\n\nexport type AppendEventResult =\n | {\n success: true;\n nextStreamPosition: bigint;\n lastGlobalPosition: bigint;\n }\n | { success: false };\n\nexport const appendToStream = async <MessageType extends Message>(\n db: SQLiteConnection,\n streamName: string,\n streamType: string,\n messages: MessageType[],\n options?: AppendToStreamOptions & {\n partition?: string;\n onBeforeCommit?: BeforeEventStoreCommitHandler<\n SQLiteEventStore,\n { db: SQLiteConnection }\n >;\n },\n): Promise<AppendEventResult> => {\n if (messages.length === 0) return { success: false };\n\n const expectedStreamVersion = toExpectedVersion(\n options?.expectedStreamVersion,\n );\n\n const messagesToAppend: RecordedMessage<\n MessageType,\n SQLiteReadEventMetadata\n >[] = messages.map(\n (\n m: Message,\n i: number,\n ): RecordedMessage<MessageType, SQLiteReadEventMetadata> =>\n ({\n ...m,\n kind: m.kind ?? 'Event',\n metadata: {\n streamName,\n messageId: uuid(),\n streamPosition: BigInt(i + 1),\n ...('metadata' in m ? (m.metadata ?? {}) : {}),\n },\n }) as RecordedMessage<MessageType, SQLiteReadEventMetadata>,\n );\n\n let result: AppendEventResult;\n\n return await db.withTransaction(async () => {\n result = await appendToStreamRaw(\n db,\n streamName,\n streamType,\n messagesToAppend,\n {\n expectedStreamVersion,\n },\n );\n\n if (options?.onBeforeCommit)\n await options.onBeforeCommit(messagesToAppend, { db });\n\n return result;\n });\n};\n\nconst toExpectedVersion = (\n expected: ExpectedStreamVersion | undefined,\n): bigint | null => {\n if (expected === undefined) return null;\n\n if (expected === NO_CONCURRENCY_CHECK) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_DOES_NOT_EXIST) return null;\n\n // TODO: this needs to be fixed\n if (expected == STREAM_EXISTS) return null;\n\n return expected as bigint;\n};\n\nconst appendToStreamRaw = async (\n db: SQLiteConnection,\n streamId: string,\n streamType: string,\n messages: RecordedMessage[],\n options?: {\n expectedStreamVersion: bigint | null;\n partition?: string;\n },\n): Promise<AppendEventResult> => {\n let streamPosition;\n let globalPosition;\n\n try {\n let expectedStreamVersion = options?.expectedStreamVersion ?? null;\n\n if (expectedStreamVersion == null) {\n expectedStreamVersion = await getLastStreamPosition(\n db,\n streamId,\n expectedStreamVersion,\n );\n }\n\n let position: { stream_position: string } | null;\n\n if (expectedStreamVersion === 0n) {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `INSERT INTO ${streamsTable.name}\n (stream_id, stream_position, partition, stream_type, stream_metadata, is_archived)\n VALUES (\n ?,\n ?,\n ?,\n ?,\n '[]',\n false\n )\n RETURNING stream_position;\n `,\n [\n streamId,\n messages.length,\n options?.partition ?? streamsTable.columns.partition,\n streamType,\n ],\n );\n } else {\n position = await db.querySingle<{\n stream_position: string;\n } | null>(\n `UPDATE ${streamsTable.name}\n SET stream_position = stream_position + ?\n WHERE stream_id = ?\n AND partition = ?\n AND is_archived = false\n RETURNING stream_position;\n `,\n [\n messages.length,\n streamId,\n options?.partition ?? streamsTable.columns.partition,\n ],\n );\n }\n\n if (position == null) {\n throw new Error('Could not find stream position');\n }\n\n streamPosition = BigInt(position.stream_position);\n\n if (expectedStreamVersion != null) {\n const expectedStreamPositionAfterSave =\n BigInt(expectedStreamVersion) + BigInt(messages.length);\n if (streamPosition !== expectedStreamPositionAfterSave) {\n return {\n success: false,\n };\n }\n }\n\n const { sqlString, values } = buildMessageInsertQuery(\n messages,\n expectedStreamVersion,\n streamId,\n options?.partition?.toString() ?? defaultTag,\n );\n\n const returningIds = await db.query<{\n global_position: string;\n } | null>(sqlString, values);\n\n if (\n returningIds.length === 0 ||\n !returningIds[returningIds.length - 1]?.global_position\n ) {\n throw new Error('Could not find global position');\n }\n\n globalPosition = BigInt(\n returningIds[returningIds.length - 1]!.global_position,\n );\n } catch (err: unknown) {\n if (isSQLiteError(err) && isOptimisticConcurrencyError(err)) {\n return {\n success: false,\n };\n }\n\n throw err;\n }\n\n return {\n success: true,\n nextStreamPosition: streamPosition,\n lastGlobalPosition: globalPosition,\n };\n};\n\nconst isOptimisticConcurrencyError = (error: SQLiteError): boolean => {\n return error?.errno !== undefined && error.errno === 19;\n};\n\nasync function getLastStreamPosition(\n db: SQLiteConnection,\n streamId: string,\n expectedStreamVersion: bigint | null,\n): Promise<bigint> {\n const result = await db.querySingle<{ stream_position: string } | null>(\n `SELECT CAST(stream_position AS VARCHAR) AS stream_position FROM ${streamsTable.name} WHERE stream_id = ?`,\n [streamId],\n );\n\n if (result?.stream_position == null) {\n expectedStreamVersion = 0n;\n } else {\n expectedStreamVersion = BigInt(result.stream_position);\n }\n return expectedStreamVersion;\n}\n\nconst buildMessageInsertQuery = (\n messages: RecordedMessage[],\n expectedStreamVersion: bigint,\n streamId: string,\n partition: string | null | undefined,\n): {\n sqlString: string;\n values: Parameters[];\n} => {\n const query = messages.reduce(\n (\n queryBuilder: { parameterMarkers: string[]; values: Parameters[] },\n message: RecordedMessage,\n ) => {\n if (\n message.metadata?.streamPosition == null ||\n typeof message.metadata.streamPosition !== 'bigint'\n ) {\n throw new Error('Stream position is required');\n }\n\n const streamPosition =\n BigInt(message.metadata.streamPosition) + BigInt(expectedStreamVersion);\n\n queryBuilder.parameterMarkers.push(`(?,?,?,?,?,?,?,?,?,?)`);\n queryBuilder.values.push(\n streamId,\n streamPosition.toString() ?? 0,\n partition ?? defaultTag,\n message.kind === 'Event' ? 'E' : 'C',\n JSONParser.stringify(message.data),\n JSONParser.stringify(message.metadata),\n expectedStreamVersion?.toString() ?? 0,\n message.type,\n message.metadata.messageId,\n false,\n );\n\n return queryBuilder;\n },\n {\n parameterMarkers: [],\n values: [],\n },\n );\n\n const sqlString = `\n INSERT INTO ${messagesTable.name} (\n stream_id, \n stream_position, \n partition, \n message_kind,\n message_data, \n message_metadata, \n message_schema_version, \n message_type, \n message_id, \n is_archived\n ) \n VALUES ${query.parameterMarkers.join(', ')} \n RETURNING \n CAST(global_position as VARCHAR) AS global_position\n `;\n return { sqlString, values: query.values };\n};\n","export const emmettPrefix = 'emt';\n\nexport const globalTag = 'global';\nexport const defaultTag = 'emt:default';\n\nexport const globalNames = {\n module: `${emmettPrefix}:module:${globalTag}`,\n};\n\nconst columns = {\n partition: {\n name: 'partition',\n },\n isArchived: { name: 'is_archived' },\n};\n\nexport const streamsTable = {\n name: `${emmettPrefix}_streams`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n\nexport const messagesTable = {\n name: `${emmettPrefix}_messages`,\n columns: {\n partition: columns.partition,\n isArchived: columns.isArchived,\n },\n};\n\nexport const subscriptionsTable = {\n name: `${emmettPrefix}_subscriptions`,\n};\n","import type { SQLiteConnection } from '../../connection';\nimport {\n globalTag,\n messagesTable,\n streamsTable,\n subscriptionsTable,\n} from './typing';\n\nexport const sql = (sql: string) => sql;\n\nexport const streamsTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${streamsTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL DEFAULT 0,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n stream_type TEXT NOT NULL,\n stream_metadata JSONB NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n PRIMARY KEY (stream_id, stream_position, partition, is_archived),\n UNIQUE (stream_id, partition, is_archived)\n );`,\n);\n\nexport const messagesTableSQL = sql(\n `CREATE TABLE IF NOT EXISTS ${messagesTable.name}(\n stream_id TEXT NOT NULL,\n stream_position BIGINT NOT NULL,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n message_kind CHAR(1) NOT NULL DEFAULT 'E',\n message_data JSONB NOT NULL,\n message_metadata JSONB NOT NULL,\n message_schema_version TEXT NOT NULL,\n message_type TEXT NOT NULL,\n message_id TEXT NOT NULL,\n is_archived BOOLEAN NOT NULL DEFAULT FALSE,\n global_position INTEGER PRIMARY KEY,\n created DATETIME DEFAULT CURRENT_TIMESTAMP,\n UNIQUE (stream_id, stream_position, partition, is_archived)\n ); \n`,\n);\n\nexport const subscriptionsTableSQL = sql(\n `\n CREATE TABLE IF NOT EXISTS ${subscriptionsTable.name}(\n subscription_id TEXT NOT NULL,\n version INTEGER NOT NULL DEFAULT 1,\n partition TEXT NOT NULL DEFAULT '${globalTag}',\n last_processed_position BIGINT NOT NULL,\n PRIMARY KEY (subscription_id, partition, version)\n );\n`,\n);\n\nexport const schemaSQL: string[] = [\n streamsTableSQL,\n messagesTableSQL,\n subscriptionsTableSQL,\n];\n\nexport const createEventStoreSchema = async (\n db: SQLiteConnection,\n): Promise<void> => {\n for (const sql of schemaSQL) {\n await db.command(sql);\n }\n};\n","export const singleOrNull = async <T>(\n getResult: Promise<T[]>,\n): Promise<T | null> => {\n const result = await getResult;\n\n if (result.length > 1) throw new Error('Query had more than one result');\n\n return result.length > 0 ? (result[0] ?? null) : null;\n};\n\nexport const single = async <T>(getResult: Promise<T[]>): Promise<T> => {\n const result = await getResult;\n\n if (result.length === 0) throw new Error(\"Query didn't return any result\");\n\n if (result.length > 1) throw new Error('Query had more than one result');\n\n return result[0]!;\n};\n","import type { SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, messagesTable } from './typing';\nimport { singleOrNull } from './utils';\n\ntype ReadLastMessageGlobalPositionSqlResult = {\n global_position: string;\n};\n\nexport type ReadLastMessageGlobalPositionResult = {\n currentGlobalPosition: bigint | null;\n};\n\nexport const readLastMessageGlobalPosition = async (\n db: SQLiteConnection,\n options?: { partition?: string },\n): Promise<ReadLastMessageGlobalPositionResult> => {\n const result = await singleOrNull(\n db.query<ReadLastMessageGlobalPositionSqlResult>(\n sql(\n `SELECT global_position\n FROM ${messagesTable.name}\n WHERE partition = ? AND is_archived = FALSE\n ORDER BY global_position\n LIMIT 1`,\n ),\n [options?.partition ?? defaultTag],\n ),\n );\n\n return {\n currentGlobalPosition:\n result !== null ? BigInt(result.global_position) : null,\n };\n};\n","import {\n JSONParser,\n type CombinedReadEventMetadata,\n type Event,\n type ReadEvent,\n type ReadEventMetadata,\n type ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\nimport type { SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, messagesTable } from './typing';\n\ntype ReadMessagesBatchSqlResult = {\n stream_position: string;\n stream_id: string;\n message_data: string;\n message_metadata: string;\n message_schema_version: string;\n message_type: string;\n message_id: string;\n global_position: string;\n transaction_id: string;\n created: string;\n};\n\nexport type ReadMessagesBatchOptions =\n | {\n after: bigint;\n batchSize: number;\n }\n | {\n from: bigint;\n batchSize: number;\n }\n | { to: bigint; batchSize: number }\n | { from: bigint; to: bigint };\n\nexport type ReadMessagesBatchResult<\n EventType extends Event,\n ReadEventMetadataType extends ReadEventMetadata = ReadEventMetadata,\n> = {\n currentGlobalPosition: bigint;\n messages: ReadEvent<EventType, ReadEventMetadataType>[];\n areEventsLeft: boolean;\n};\n\nexport const readMessagesBatch = async <\n MessageType extends Event,\n ReadEventMetadataType extends\n ReadEventMetadataWithGlobalPosition = ReadEventMetadataWithGlobalPosition,\n>(\n db: SQLiteConnection,\n options: ReadMessagesBatchOptions & { partition?: string },\n): Promise<ReadMessagesBatchResult<MessageType, ReadEventMetadataType>> => {\n const from =\n 'from' in options\n ? options.from\n : 'after' in options\n ? options.after + 1n\n : 0n;\n const batchSize =\n options && 'batchSize' in options\n ? options.batchSize\n : options.to - options.from;\n\n const fromCondition: string =\n from !== -0n ? `AND global_position >= ${from}` : '';\n\n const toCondition =\n 'to' in options ? `AND global_position <= ${options.to}` : '';\n\n const limitCondition =\n 'batchSize' in options ? `LIMIT ${options.batchSize}` : '';\n\n const events: ReadEvent<MessageType, ReadEventMetadataType>[] = (\n await db.query<ReadMessagesBatchSqlResult>(\n sql(\n `SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id\n FROM ${messagesTable.name}\n WHERE partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}\n ORDER BY global_position\n ${limitCondition}`,\n ),\n [options?.partition ?? defaultTag],\n )\n ).map((row) => {\n const rawEvent = {\n type: row.message_type,\n data: JSONParser.parse(row.message_data),\n metadata: JSONParser.parse(row.message_metadata),\n } as unknown as MessageType;\n\n const metadata: ReadEventMetadataWithGlobalPosition = {\n ...('metadata' in rawEvent ? (rawEvent.metadata ?? {}) : {}),\n messageId: row.message_id,\n streamName: row.stream_id,\n streamPosition: BigInt(row.stream_position),\n globalPosition: BigInt(row.global_position),\n };\n\n return {\n ...rawEvent,\n kind: 'Event',\n metadata: metadata as CombinedReadEventMetadata<\n MessageType,\n ReadEventMetadataType\n >,\n };\n });\n\n return events.length > 0\n ? {\n currentGlobalPosition:\n events[events.length - 1]!.metadata.globalPosition,\n messages: events,\n areEventsLeft: events.length === batchSize,\n }\n : {\n currentGlobalPosition:\n 'from' in options\n ? options.from\n : 'after' in options\n ? options.after\n : 0n,\n messages: [],\n areEventsLeft: false,\n };\n};\n","import type { SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, subscriptionsTable } from './typing';\nimport { singleOrNull } from './utils';\n\ntype ReadProcessorCheckpointSqlResult = {\n last_processed_position: string;\n};\n\nexport type ReadProcessorCheckpointResult = {\n lastProcessedPosition: bigint | null;\n};\n\nexport const readProcessorCheckpoint = async (\n db: SQLiteConnection,\n options: { processorId: string; partition?: string },\n): Promise<ReadProcessorCheckpointResult> => {\n const result = await singleOrNull(\n db.query<ReadProcessorCheckpointSqlResult>(\n sql(\n `SELECT last_processed_position\n FROM ${subscriptionsTable.name}\n WHERE partition = ? AND subscription_id = ?\n LIMIT 1`,\n ),\n [options?.partition ?? defaultTag, options.processorId],\n ),\n );\n\n return {\n lastProcessedPosition:\n result !== null ? BigInt(result.last_processed_position) : null,\n };\n};\n","import type {\n EmmettError,\n Event,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\nimport type { SQLiteConnection } from '../../../connection';\nimport { readLastMessageGlobalPosition } from '../../schema/readLastMessageGlobalPosition';\nimport {\n readMessagesBatch,\n type ReadMessagesBatchOptions,\n} from '../../schema/readMessagesBatch';\n\nexport const DefaultSQLiteEventStoreProcessorBatchSize = 100;\nexport const DefaultSQLiteEventStoreProcessorPullingFrequencyInMs = 50;\n\nexport type SQLiteEventStoreMessagesBatch<EventType extends Event = Event> = {\n messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[];\n};\n\nexport type SQLiteEventStoreMessagesBatchHandlerResult = void | {\n type: 'STOP';\n reason?: string;\n error?: EmmettError;\n};\n\nexport type SQLiteEventStoreMessagesBatchHandler<\n EventType extends Event = Event,\n> = (\n messagesBatch: SQLiteEventStoreMessagesBatch<EventType>,\n) =>\n | Promise<SQLiteEventStoreMessagesBatchHandlerResult>\n | SQLiteEventStoreMessagesBatchHandlerResult;\n\nexport type SQLiteEventStoreMessageBatchPullerOptions<\n EventType extends Event = Event,\n> = {\n db: SQLiteConnection;\n pullingFrequencyInMs: number;\n batchSize: number;\n eachBatch: SQLiteEventStoreMessagesBatchHandler<EventType>;\n};\n\nexport type SQLiteEventStoreMessageBatchPullerStartFrom =\n | { globalPosition: bigint }\n | 'BEGINNING'\n | 'END';\n\nexport type SQLiteEventStoreMessageBatchPullerStartOptions = {\n startFrom: SQLiteEventStoreMessageBatchPullerStartFrom;\n};\n\nexport type SQLiteEventStoreMessageBatchPuller = {\n isRunning: boolean;\n start(options: SQLiteEventStoreMessageBatchPullerStartOptions): Promise<void>;\n stop(): Promise<void>;\n};\n\nexport const sqliteEventStoreMessageBatchPuller = <\n EventType extends Event = Event,\n>({\n db,\n batchSize,\n eachBatch,\n pullingFrequencyInMs,\n}: SQLiteEventStoreMessageBatchPullerOptions<EventType>): SQLiteEventStoreMessageBatchPuller => {\n let isRunning = false;\n\n let start: Promise<void>;\n\n const pullMessages = async (\n options: SQLiteEventStoreMessageBatchPullerStartOptions,\n ) => {\n const after =\n options.startFrom === 'BEGINNING'\n ? 0n\n : options.startFrom === 'END'\n ? ((await readLastMessageGlobalPosition(db)).currentGlobalPosition ??\n 0n)\n : options.startFrom.globalPosition;\n\n const readMessagesOptions: ReadMessagesBatchOptions = {\n after,\n batchSize,\n };\n\n let waitTime = 100;\n\n do {\n const { messages, currentGlobalPosition, areEventsLeft } =\n await readMessagesBatch<EventType>(db, readMessagesOptions);\n\n if (messages.length > 0) {\n const result = await eachBatch({ messages });\n\n if (result && result.type === 'STOP') {\n isRunning = false;\n break;\n }\n }\n\n readMessagesOptions.after = currentGlobalPosition;\n\n await new Promise((resolve) => setTimeout(resolve, waitTime));\n\n if (!areEventsLeft) {\n waitTime = Math.min(waitTime * 2, 1000);\n } else {\n waitTime = pullingFrequencyInMs;\n }\n } while (isRunning);\n };\n\n return {\n get isRunning() {\n return isRunning;\n },\n start: (options) => {\n if (isRunning) return start;\n\n start = (async () => {\n isRunning = true;\n\n return pullMessages(options);\n })();\n\n return start;\n },\n stop: async () => {\n if (!isRunning) return;\n isRunning = false;\n await start;\n },\n };\n};\n\nexport const zipSQLiteEventStoreMessageBatchPullerStartFrom = (\n options: (SQLiteEventStoreMessageBatchPullerStartFrom | undefined)[],\n): SQLiteEventStoreMessageBatchPullerStartFrom => {\n if (\n options.length === 0 ||\n options.some((o) => o === undefined || o === 'BEGINNING')\n )\n return 'BEGINNING';\n\n if (options.every((o) => o === 'END')) return 'END';\n\n return options\n .filter((o) => o !== undefined && o !== 'BEGINNING' && o !== 'END')\n .sort((a, b) => (a > b ? 1 : -1))[0]!;\n};\n","import {\n EmmettError,\n type Event,\n type ReadEvent,\n type ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\nimport { sqliteConnection, type SQLiteConnection } from '../../connection';\nimport type { SQLiteProjectionDefinition } from '../projections';\nimport { readProcessorCheckpoint, storeProcessorCheckpoint } from '../schema';\nimport type { SQLiteEventStoreMessageBatchPullerStartFrom } from './messageBatchProcessing';\n\nexport type SQLiteProcessorEventsBatch<EventType extends Event = Event> = {\n messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[];\n};\n\nexport type SQLiteProcessorHandlerContext = {\n db: SQLiteConnection;\n fileName: string;\n};\n\nexport type SQLiteProcessor<EventType extends Event = Event> = {\n id: string;\n start: (\n db: SQLiteConnection,\n ) => Promise<SQLiteEventStoreMessageBatchPullerStartFrom | undefined>;\n isActive: boolean;\n handle: (\n messagesBatch: SQLiteProcessorEventsBatch<EventType>,\n context: { db?: SQLiteConnection; fileName?: string },\n ) => Promise<SQLiteProcessorMessageHandlerResult>;\n};\n\nexport const SQLiteProcessor = {\n result: {\n skip: (options?: {\n reason?: string;\n }): SQLiteProcessorMessageHandlerResult => ({\n type: 'SKIP',\n ...(options ?? {}),\n }),\n stop: (options?: {\n reason?: string;\n error?: EmmettError;\n }): SQLiteProcessorMessageHandlerResult => ({\n type: 'STOP',\n ...(options ?? {}),\n }),\n },\n};\n\nexport type SQLiteProcessorMessageHandlerResult =\n | void\n | { type: 'SKIP'; reason?: string }\n | { type: 'STOP'; reason?: string; error?: EmmettError };\n\nexport type SQLiteProcessorEachMessageHandler<EventType extends Event = Event> =\n (\n event: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>,\n context: SQLiteProcessorHandlerContext,\n ) =>\n | Promise<SQLiteProcessorMessageHandlerResult>\n | SQLiteProcessorMessageHandlerResult;\n\nexport type SQLiteProcessorEachBatchHandler<EventType extends Event = Event> = (\n event: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[],\n context: SQLiteProcessorHandlerContext,\n) =>\n | Promise<SQLiteProcessorMessageHandlerResult>\n | SQLiteProcessorMessageHandlerResult;\n\nexport type SQLiteProcessorStartFrom =\n | SQLiteEventStoreMessageBatchPullerStartFrom\n | 'CURRENT';\n\nexport type SQLiteProcessorConnectionOptions = {\n fileName: string;\n db?: SQLiteConnection;\n};\n\nexport type GenericSQLiteProcessorOptions<EventType extends Event = Event> = {\n processorId: string;\n version?: number;\n partition?: string;\n startFrom?: SQLiteProcessorStartFrom;\n stopAfter?: (\n message: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>,\n ) => boolean;\n eachMessage: SQLiteProcessorEachMessageHandler<EventType>;\n connectionOptions?: SQLiteProcessorConnectionOptions;\n // TODO: Add eachBatch\n};\n\nexport type SQLiteProjectionProcessorOptions<EventType extends Event = Event> =\n {\n processorId?: string;\n version?: number;\n projection: SQLiteProjectionDefinition<EventType>;\n partition?: string;\n startFrom?: SQLiteProcessorStartFrom;\n stopAfter?: (\n message: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>,\n ) => boolean;\n };\n\nexport type SQLiteProcessorOptions<EventType extends Event = Event> =\n | GenericSQLiteProcessorOptions<EventType>\n | SQLiteProjectionProcessorOptions<EventType>;\n\nconst genericSQLiteProcessor = <EventType extends Event = Event>(\n options: GenericSQLiteProcessorOptions<EventType>,\n): SQLiteProcessor => {\n const { eachMessage } = options;\n let isActive = true;\n //let lastProcessedPosition: number | null = null;\n\n const getDb = (context: {\n db?: SQLiteConnection;\n fileName?: string;\n }): { db: SQLiteConnection; fileName: string } => {\n const fileName = context.fileName ?? options.connectionOptions?.fileName;\n if (!fileName)\n throw new EmmettError(\n `SQLite processor '${options.processorId}' is missing file name. Ensure that you passed it through options`,\n );\n\n const db =\n context.db ??\n options.connectionOptions?.db ??\n sqliteConnection({ fileName });\n\n return { db, fileName };\n };\n\n return {\n id: options.processorId,\n start: async (\n db: SQLiteConnection,\n ): Promise<SQLiteEventStoreMessageBatchPullerStartFrom | undefined> => {\n isActive = true;\n if (options.startFrom !== 'CURRENT') return options.startFrom;\n\n const { lastProcessedPosition } = await readProcessorCheckpoint(db, {\n processorId: options.processorId,\n partition: options.partition,\n });\n\n if (lastProcessedPosition === null) return 'BEGINNING';\n\n return { globalPosition: lastProcessedPosition };\n },\n get isActive() {\n return isActive;\n },\n handle: async (\n { messages },\n context,\n ): Promise<SQLiteProcessorMessageHandlerResult> => {\n if (!isActive) return;\n\n const { db, fileName } = getDb(context);\n\n return db.withTransaction(async () => {\n let result: SQLiteProcessorMessageHandlerResult | undefined = undefined;\n\n let lastProcessedPosition: bigint | null = null;\n\n for (const message of messages) {\n const typedMessage = message as ReadEvent<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >;\n\n const messageProcessingResult = await eachMessage(typedMessage, {\n db,\n fileName,\n });\n\n // TODO: Add correct handling of the storing checkpoint\n await storeProcessorCheckpoint(db, {\n processorId: options.processorId,\n version: options.version,\n lastProcessedPosition,\n newPosition: typedMessage.metadata.globalPosition,\n partition: options.partition,\n });\n\n lastProcessedPosition = typedMessage.metadata.globalPosition;\n\n if (\n messageProcessingResult &&\n messageProcessingResult.type === 'STOP'\n ) {\n isActive = false;\n result = messageProcessingResult;\n break;\n }\n\n if (options.stopAfter && options.stopAfter(typedMessage)) {\n isActive = false;\n result = { type: 'STOP', reason: 'Stop condition reached' };\n break;\n }\n\n if (\n messageProcessingResult &&\n messageProcessingResult.type === 'SKIP'\n )\n continue;\n }\n return result;\n });\n },\n };\n};\n\nexport const sqliteProjectionProcessor = <EventType extends Event = Event>(\n options: SQLiteProjectionProcessorOptions<EventType>,\n): SQLiteProcessor => {\n const projection = options.projection;\n\n return genericSQLiteProcessor<EventType>({\n processorId: options.processorId ?? `projection:${projection.name}`,\n eachMessage: async (event, context) => {\n if (!projection.canHandle.includes(event.type)) return;\n\n await projection.handle([event], context);\n },\n ...options,\n });\n};\n\nexport const sqliteProcessor = <EventType extends Event = Event>(\n options: SQLiteProcessorOptions<EventType>,\n): SQLiteProcessor => {\n if ('projection' in options) {\n return sqliteProjectionProcessor(options);\n }\n\n return genericSQLiteProcessor(options);\n};\n","import { EmmettError, type Event } from '@event-driven-io/emmett';\nimport { sqliteConnection, type SQLiteConnection } from '../../connection';\nimport {\n DefaultSQLiteEventStoreProcessorBatchSize,\n DefaultSQLiteEventStoreProcessorPullingFrequencyInMs,\n sqliteEventStoreMessageBatchPuller,\n zipSQLiteEventStoreMessageBatchPullerStartFrom,\n type SQLiteEventStoreMessageBatchPuller,\n type SQLiteEventStoreMessagesBatchHandler,\n} from './messageBatchProcessing';\nimport {\n sqliteProcessor,\n type SQLiteProcessor,\n type SQLiteProcessorOptions,\n} from './sqliteProcessor';\n\nexport type SQLiteEventStoreConsumerConfig<\n ConsumerEventType extends Event = Event,\n> = {\n processors?: SQLiteProcessor<ConsumerEventType>[];\n pulling?: {\n batchSize?: number;\n pullingFrequencyInMs?: number;\n };\n};\nexport type SQLiteEventStoreConsumerOptions<\n ConsumerEventType extends Event = Event,\n> = SQLiteEventStoreConsumerConfig<ConsumerEventType> & {\n fileName: string;\n db?: SQLiteConnection;\n};\n\nexport type SQLiteEventStoreConsumer<ConsumerEventType extends Event = Event> =\n Readonly<{\n isRunning: boolean;\n processors: SQLiteProcessor<ConsumerEventType>[];\n processor: <EventType extends ConsumerEventType = ConsumerEventType>(\n options: SQLiteProcessorOptions<EventType>,\n ) => SQLiteProcessor<EventType>;\n start: () => Promise<void>;\n stop: () => Promise<void>;\n close: () => Promise<void>;\n }>;\n\nexport const sqliteEventStoreConsumer = <\n ConsumerEventType extends Event = Event,\n>(\n options: SQLiteEventStoreConsumerOptions<ConsumerEventType>,\n): SQLiteEventStoreConsumer<ConsumerEventType> => {\n let isRunning = false;\n const { pulling } = options;\n const processors = options.processors ?? [];\n\n let start: Promise<void>;\n\n let currentMessagePuller: SQLiteEventStoreMessageBatchPuller | undefined;\n\n const db = options.db ?? sqliteConnection({ fileName: options.fileName });\n\n const eachBatch: SQLiteEventStoreMessagesBatchHandler<\n ConsumerEventType\n > = async (messagesBatch) => {\n const activeProcessors = processors.filter((s) => s.isActive);\n\n if (activeProcessors.length === 0)\n return {\n type: 'STOP',\n reason: 'No active processors',\n };\n\n const result = await Promise.allSettled(\n activeProcessors.map((s) => {\n // TODO: Add here filtering to only pass messages that can be handled by processor\n return s.handle(messagesBatch, { db, fileName: options.fileName });\n }),\n );\n\n return result.some(\n (r) => r.status === 'fulfilled' && r.value?.type !== 'STOP',\n )\n ? undefined\n : {\n type: 'STOP',\n };\n };\n\n const messagePooler = (currentMessagePuller =\n sqliteEventStoreMessageBatchPuller({\n db,\n eachBatch,\n batchSize:\n pulling?.batchSize ?? DefaultSQLiteEventStoreProcessorBatchSize,\n pullingFrequencyInMs:\n pulling?.pullingFrequencyInMs ??\n DefaultSQLiteEventStoreProcessorPullingFrequencyInMs,\n }));\n\n const stop = async () => {\n if (!isRunning) return;\n isRunning = false;\n if (currentMessagePuller) {\n await currentMessagePuller.stop();\n currentMessagePuller = undefined;\n }\n await start;\n };\n\n return {\n processors,\n get isRunning() {\n return isRunning;\n },\n processor: <EventType extends ConsumerEventType = ConsumerEventType>(\n options: SQLiteProcessorOptions<EventType>,\n ): SQLiteProcessor<EventType> => {\n const processor = sqliteProcessor<EventType>(options);\n\n processors.push(processor);\n\n return processor;\n },\n start: () => {\n if (isRunning) return start;\n\n start = (async () => {\n if (processors.length === 0)\n return Promise.reject(\n new EmmettError(\n 'Cannot start consumer without at least a single processor',\n ),\n );\n\n isRunning = true;\n\n const startFrom = zipSQLiteEventStoreMessageBatchPullerStartFrom(\n await Promise.all(processors.map((o) => o.start(db))),\n );\n\n return messagePooler.start({ startFrom });\n })();\n\n return start;\n },\n stop,\n close: async () => {\n await stop();\n\n db.close();\n\n await new Promise((resolve) => setTimeout(resolve, 250));\n },\n };\n};\n","import {\n projection,\n type CanHandle,\n type Event,\n type ProjectionDefinition,\n type ProjectionHandler,\n type ReadEvent,\n} from '@event-driven-io/emmett';\nimport type { SQLiteConnection } from '../../connection';\nimport type { SQLiteReadEventMetadata } from '../SQLiteEventStore';\n\nexport type SQLiteProjectionHandlerContext = {\n db: SQLiteConnection;\n};\n\nexport type SQLiteProjectionHandler<\n EventType extends Event = Event,\n EventMetaDataType extends SQLiteReadEventMetadata = SQLiteReadEventMetadata,\n> = ProjectionHandler<\n EventType,\n EventMetaDataType,\n SQLiteProjectionHandlerContext\n>;\n\nexport type SQLiteProjectionDefinition<EventType extends Event = Event> =\n ProjectionDefinition<\n EventType,\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >;\n\nexport type SQLiteProjectionHandlerOptions<EventType extends Event = Event> = {\n events: ReadEvent<EventType, SQLiteReadEventMetadata>[];\n projections: SQLiteProjectionDefinition<EventType>[];\n db: SQLiteConnection;\n};\n\nexport const handleProjections = async <EventType extends Event = Event>(\n options: SQLiteProjectionHandlerOptions<EventType>,\n): Promise<void> => {\n const { projections: allProjections, events, db } = options;\n\n const eventTypes = events.map((e) => e.type);\n\n const projections = allProjections.filter((p) =>\n p.canHandle.some((type) => eventTypes.includes(type)),\n );\n\n for (const projection of projections) {\n await projection.handle(events, {\n db,\n });\n }\n};\n\nexport const sqliteProjection = <EventType extends Event>(\n definition: SQLiteProjectionDefinition<EventType>,\n): SQLiteProjectionDefinition<EventType> =>\n projection<\n EventType,\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >(definition);\n\nexport const sqliteRawBatchSQLProjection = <EventType extends Event>(\n handle: (\n events: EventType[],\n context: SQLiteProjectionHandlerContext,\n ) => Promise<string[]> | string[],\n ...canHandle: CanHandle<EventType>\n): SQLiteProjectionDefinition<EventType> =>\n sqliteProjection<EventType>({\n canHandle,\n handle: async (events, context) => {\n const sqls: string[] = await handle(events, context);\n\n for (const sql of sqls) await context.db.command(sql);\n },\n });\n\nexport const sqliteRawSQLProjection = <EventType extends Event>(\n handle: (\n event: EventType,\n context: SQLiteProjectionHandlerContext,\n ) => Promise<string> | string,\n ...canHandle: CanHandle<EventType>\n): SQLiteProjectionDefinition<EventType> =>\n sqliteRawBatchSQLProjection<EventType>(\n async (events, context) => {\n const sqls: string[] = [];\n\n for (const event of events) {\n sqls.push(await handle(event, context));\n }\n return sqls;\n },\n ...canHandle,\n );\n","import type {\n AppendToStreamResultWithGlobalPosition,\n BeforeEventStoreCommitHandler,\n BigIntStreamPosition,\n Event,\n ProjectionRegistration,\n ReadEvent,\n ReadEventMetadataWithGlobalPosition,\n} from '@event-driven-io/emmett';\n\nimport {\n assertExpectedVersionMatchesCurrent,\n ExpectedVersionConflictError,\n NO_CONCURRENCY_CHECK,\n type AggregateStreamOptions,\n type AggregateStreamResult,\n type AppendToStreamOptions,\n type EventStore,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport {\n InMemorySharedCacheSQLiteDatabase,\n InMemorySQLiteDatabase,\n sqliteConnection,\n type SQLiteConnection,\n} from '../connection';\nimport {\n sqliteEventStoreConsumer,\n type SQLiteEventStoreConsumer,\n type SQLiteEventStoreConsumerConfig,\n} from './consumers';\nimport {\n handleProjections,\n type SQLiteProjectionHandlerContext,\n} from './projections';\nimport { createEventStoreSchema } from './schema';\nimport { appendToStream } from './schema/appendToStream';\nimport { readStream } from './schema/readStream';\n\nexport type EventHandler<E extends Event = Event> = (\n eventEnvelope: ReadEvent<E>,\n) => void;\n\nexport const SQLiteEventStoreDefaultStreamVersion = 0n;\n\nexport interface SQLiteEventStore extends EventStore<SQLiteReadEventMetadata> {\n appendToStream<EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResultWithGlobalPosition>;\n consumer<ConsumerEventType extends Event = Event>(\n options?: SQLiteEventStoreConsumerConfig<ConsumerEventType>,\n ): SQLiteEventStoreConsumer<ConsumerEventType>;\n}\n\nexport type SQLiteReadEventMetadata = ReadEventMetadataWithGlobalPosition;\n\nexport type SQLiteReadEvent<EventType extends Event = Event> = ReadEvent<\n EventType,\n SQLiteReadEventMetadata\n>;\n\nexport type SQLiteEventStoreOptions = {\n fileName: // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n | InMemorySQLiteDatabase\n // eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents\n | InMemorySharedCacheSQLiteDatabase\n | string\n | undefined;\n projections?: ProjectionRegistration<\n 'inline',\n SQLiteReadEventMetadata,\n SQLiteProjectionHandlerContext\n >[];\n schema?: {\n autoMigration?: 'None' | 'CreateOrUpdate';\n };\n hooks?: {\n /**\n * This hook will be called **BEFORE** events were stored in the event store.\n * @type {BeforeEventStoreCommitHandler<SQLiteEventStore, HandlerContext>}\n */\n onBeforeCommit?: BeforeEventStoreCommitHandler<\n SQLiteEventStore,\n { db: SQLiteConnection }\n >;\n };\n};\n\nexport const getSQLiteEventStore = (\n options: SQLiteEventStoreOptions,\n): SQLiteEventStore => {\n let schemaMigrated = false;\n let autoGenerateSchema = false;\n let database: SQLiteConnection | null;\n const fileName = options.fileName ?? InMemorySQLiteDatabase;\n\n const isInMemory: boolean =\n fileName === InMemorySQLiteDatabase ||\n fileName === InMemorySharedCacheSQLiteDatabase;\n\n const inlineProjections = (options.projections ?? [])\n .filter(({ type }) => type === 'inline')\n .map(({ projection }) => projection);\n\n const onBeforeCommitHook = options.hooks?.onBeforeCommit;\n\n const createConnection = () => {\n if (database != null) {\n return database;\n }\n\n return sqliteConnection({\n fileName,\n });\n };\n\n const closeConnection = () => {\n if (isInMemory) {\n return;\n }\n if (database != null) {\n database.close();\n database = null;\n }\n };\n\n const withConnection = async <Result>(\n handler: (db: SQLiteConnection) => Promise<Result>,\n ): Promise<Result> => {\n if (database == null) {\n database = createConnection();\n }\n\n try {\n await ensureSchemaExists(database);\n return await handler(database);\n } finally {\n closeConnection();\n }\n };\n\n if (options) {\n autoGenerateSchema =\n options.schema?.autoMigration === undefined ||\n options.schema?.autoMigration !== 'None';\n }\n\n const ensureSchemaExists = async (db: SQLiteConnection): Promise<void> => {\n if (!autoGenerateSchema) return Promise.resolve();\n\n if (!schemaMigrated) {\n await createEventStoreSchema(db);\n schemaMigrated = true;\n }\n\n return Promise.resolve();\n };\n\n return {\n async aggregateStream<State, EventType extends Event>(\n streamName: string,\n options: AggregateStreamOptions<\n State,\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n ): Promise<AggregateStreamResult<State>> {\n const { evolve, initialState, read } = options;\n\n const expectedStreamVersion = read?.expectedStreamVersion;\n\n let state = initialState();\n\n if (typeof streamName !== 'string') {\n throw new Error('Stream name is not string');\n }\n\n if (database == null) {\n database = createConnection();\n }\n\n const result = await withConnection((db) =>\n readStream<EventType>(db, streamName, options.read),\n );\n\n const currentStreamVersion = result.currentStreamVersion;\n\n assertExpectedVersionMatchesCurrent(\n currentStreamVersion,\n expectedStreamVersion,\n SQLiteEventStoreDefaultStreamVersion,\n );\n\n for (const event of result.events) {\n if (!event) continue;\n\n state = evolve(state, event);\n }\n\n return {\n currentStreamVersion: currentStreamVersion,\n state,\n streamExists: result.streamExists,\n };\n },\n\n readStream: async <EventType extends Event>(\n streamName: string,\n options?: ReadStreamOptions<BigIntStreamPosition>,\n ): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n > => withConnection((db) => readStream<EventType>(db, streamName, options)),\n\n appendToStream: async <EventType extends Event>(\n streamName: string,\n events: EventType[],\n options?: AppendToStreamOptions,\n ): Promise<AppendToStreamResultWithGlobalPosition> => {\n if (database == null) {\n database = createConnection();\n }\n\n // TODO: This has to be smarter when we introduce urn-based resolution\n const [firstPart, ...rest] = streamName.split('-');\n\n const streamType =\n firstPart && rest.length > 0 ? firstPart : 'emt:unknown';\n\n const appendResult = await withConnection((db) =>\n appendToStream(db, streamName, streamType, events, {\n ...options,\n onBeforeCommit: async (messages, context) => {\n if (inlineProjections.length > 0)\n await handleProjections({\n projections: inlineProjections,\n events: messages,\n ...context,\n });\n\n if (onBeforeCommitHook) await onBeforeCommitHook(messages, context);\n },\n }),\n );\n\n if (!appendResult.success)\n throw new ExpectedVersionConflictError<bigint>(\n -1n, //TODO: Return actual version in case of error\n options?.expectedStreamVersion ?? NO_CONCURRENCY_CHECK,\n );\n\n return {\n nextExpectedStreamVersion: appendResult.nextStreamPosition,\n lastEventGlobalPosition: appendResult.lastGlobalPosition,\n createdNewStream:\n appendResult.nextStreamPosition >= BigInt(events.length),\n };\n },\n consumer: <ConsumerEventType extends Event = Event>(\n options?: SQLiteEventStoreConsumerConfig<ConsumerEventType>,\n ): SQLiteEventStoreConsumer<ConsumerEventType> =>\n sqliteEventStoreConsumer<ConsumerEventType>({\n ...(options ?? {}),\n fileName,\n db: database ?? undefined,\n }),\n };\n};\n","import {\n JSONParser,\n type CombinedReadEventMetadata,\n type Event,\n type ReadEvent,\n type ReadEventMetadataWithGlobalPosition,\n type ReadStreamOptions,\n type ReadStreamResult,\n} from '@event-driven-io/emmett';\nimport { type SQLiteConnection } from '../../connection';\nimport { SQLiteEventStoreDefaultStreamVersion } from '../SQLiteEventStore';\nimport { defaultTag, messagesTable } from './typing';\n\ntype ReadStreamSqlResult = {\n stream_position: string;\n message_data: string;\n message_metadata: string;\n message_schema_version: string;\n message_type: string;\n message_id: string;\n global_position: string;\n created: string;\n};\n\nexport const readStream = async <EventType extends Event>(\n db: SQLiteConnection,\n streamId: string,\n options?: ReadStreamOptions & { partition?: string },\n): Promise<\n ReadStreamResult<EventType, ReadEventMetadataWithGlobalPosition>\n> => {\n const fromCondition: string =\n options && 'from' in options\n ? `AND stream_position >= ${options.from}`\n : '';\n\n const to = Number(\n options && 'to' in options\n ? options.to\n : options && 'maxCount' in options && options.maxCount\n ? options.from + options.maxCount\n : NaN,\n );\n\n const toCondition = !isNaN(to) ? `AND stream_position <= ${to}` : '';\n\n const results = await db.query<ReadStreamSqlResult>(\n `SELECT stream_id, stream_position, global_position, message_data, message_metadata, message_schema_version, message_type, message_id\n FROM ${messagesTable.name}\n WHERE stream_id = ? AND partition = ? AND is_archived = FALSE ${fromCondition} ${toCondition}`,\n [streamId, options?.partition ?? defaultTag],\n );\n\n const messages: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>[] =\n results.map((row) => {\n const rawEvent = {\n type: row.message_type,\n data: JSONParser.parse(row.message_data),\n metadata: JSONParser.parse(row.message_metadata),\n } as unknown as EventType;\n\n const metadata: ReadEventMetadataWithGlobalPosition = {\n ...('metadata' in rawEvent ? (rawEvent.metadata ?? {}) : {}),\n messageId: row.message_id,\n streamName: streamId,\n streamPosition: BigInt(row.stream_position),\n globalPosition: BigInt(row.global_position),\n };\n\n return {\n ...rawEvent,\n kind: 'Event',\n metadata: metadata as CombinedReadEventMetadata<\n EventType,\n ReadEventMetadataWithGlobalPosition\n >,\n };\n });\n\n return messages.length > 0\n ? {\n currentStreamVersion:\n messages[messages.length - 1]!.metadata.streamPosition,\n events: messages,\n streamExists: true,\n }\n : {\n currentStreamVersion: SQLiteEventStoreDefaultStreamVersion,\n events: [],\n streamExists: false,\n };\n};\n","import { isSQLiteError, type SQLiteConnection } from '../../connection';\nimport { sql } from './tables';\nimport { defaultTag, subscriptionsTable } from './typing';\nimport { singleOrNull } from './utils';\n\n// for more infos see the postgresql stored procedure version\nasync function storeSubscriptionCheckpointSQLite(\n db: SQLiteConnection,\n processorId: string,\n version: number,\n position: bigint | null,\n checkPosition: bigint | null,\n partition: string,\n): Promise<0 | 1 | 2> {\n if (checkPosition !== null) {\n const updateResult = await db.command(\n sql(`\n UPDATE ${subscriptionsTable.name}\n SET last_processed_position = ?\n WHERE subscription_id = ? \n AND last_processed_position = ? \n AND partition = ?\n `),\n [position!.toString(), processorId, checkPosition.toString(), partition],\n );\n if (updateResult.changes > 0) {\n return 1;\n } else {\n const current_position = await singleOrNull(\n db.query<{ last_processed_position: bigint }>(\n sql(\n `SELECT last_processed_position FROM ${subscriptionsTable.name} \n WHERE subscription_id = ? AND partition = ?`,\n ),\n [processorId, partition],\n ),\n );\n\n if (current_position?.last_processed_position === position) {\n return 0;\n } else if (\n position !== null &&\n current_position !== null &&\n current_position?.last_processed_position > position\n ) {\n return 2;\n } else {\n return 2;\n }\n }\n } else {\n try {\n await db.command(\n sql(\n `INSERT INTO ${subscriptionsTable.name} (subscription_id, version, last_processed_position, partition) VALUES (?, ?, ?, ?)`,\n ),\n [processorId, version, position!.toString(), partition],\n );\n return 1;\n } catch (err) {\n if (!(isSQLiteError(err) && (err.errno === 19 || err.errno === 2067))) {\n throw err;\n }\n\n const current = await singleOrNull(\n db.query<{ last_processed_position: bigint }>(\n sql(\n `SELECT last_processed_position FROM ${subscriptionsTable.name} WHERE subscription_id = ? AND partition = ?`,\n ),\n [processorId, partition],\n ),\n );\n if (current?.last_processed_position === position) {\n return 0;\n } else {\n return 2;\n }\n }\n }\n}\n\nexport type StoreLastProcessedProcessorPositionResult<\n Position extends bigint | null = bigint,\n> =\n | {\n success: true;\n newPosition: Position;\n }\n | { success: false; reason: 'IGNORED' | 'MISMATCH' };\n\nexport async function storeProcessorCheckpoint(\n db: SQLiteConnection,\n options: {\n processorId: string;\n version: number | undefined;\n newPosition: bigint | null;\n lastProcessedPosition: bigint | null;\n partition?: string;\n },\n): Promise<StoreLastProcessedProcessorPositionResult<bigint | null>> {\n try {\n const result = await storeSubscriptionCheckpointSQLite(\n db,\n options.processorId,\n options.version ?? 1,\n options.newPosition,\n options.lastProcessedPosition,\n options.partition ?? defaultTag,\n );\n\n return result === 1\n ? { success: true, newPosition: options.newPosition }\n : { success: false, reason: result === 0 ? 'IGNORED' : 'MISMATCH' };\n } catch (error) {\n console.log(error);\n throw error;\n }\n}\n"]}
|