@ddd-ts/event-sourcing-inmemory 0.0.0-compute-timeout-on-process.4 → 0.0.0-compute-timeout-on-process.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2023 Aetherall
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,6 +1,6 @@
1
1
  import { InMemoryEventLakeStorageLayer } from "./in-memory.event-lake.storage-layer.js";
2
- import { EventLakeStore } from "@ddd-ts/core";
3
- import { InMemoryStore, InMemoryTransactionPerformer } from "@ddd-ts/store-inmemory";
2
+ import { EventLakeStore, EventOf, EventSourced, IEventSourced, IIdentifiable, ISerializer, Identifiable, LakeId } from "@ddd-ts/core";
3
+ import { InMemoryDatabase, InMemoryStore, InMemoryTransaction, InMemoryTransactionPerformer } from "@ddd-ts/store-inmemory";
4
4
 
5
5
  //#region src/in-memory.event-lake.aggregate-store.ts
6
6
  const MakeInMemoryEventLakeAggregateStore = (AGGREGATE) => {
@@ -1,6 +1,6 @@
1
1
  import { EventId, EventLakeStorageLayer, ISerializedChange, ISerializedFact, LakeId } from "@ddd-ts/core";
2
- import { InMemoryDatabase, InMemoryTransaction } from "@ddd-ts/store-inmemory";
3
2
  import { ISerializedSavedChange as ISerializedSavedChange$1 } from "@ddd-ts/core/dist/interfaces/es-event";
3
+ import { InMemoryDatabase, InMemoryTransaction } from "@ddd-ts/store-inmemory";
4
4
 
5
5
  //#region src/in-memory.event-lake.storage-layer.d.ts
6
6
  declare class InMemoryEventLakeStorageLayer implements EventLakeStorageLayer {
@@ -1,3 +1,7 @@
1
+ import { EventId, EventLakeStorageLayer, LakeId } from "@ddd-ts/core";
2
+ import { ISerializedSavedChange as ISerializedSavedChange$1 } from "@ddd-ts/core/dist/interfaces/es-event";
3
+ import { InMemoryDatabase, InMemoryTransaction } from "@ddd-ts/store-inmemory";
4
+
1
5
  //#region src/in-memory.event-lake.storage-layer.ts
2
6
  var InMemoryEventLakeStorageLayer = class {
3
7
  constructor(database) {
@@ -1 +1 @@
1
- {"version":3,"file":"in-memory.event-lake.storage-layer.js","names":[],"sources":["../src/in-memory.event-lake.storage-layer.ts"],"sourcesContent":["import {\n LakeId,\n type ISerializedChange,\n type ISerializedFact,\n EventLakeStorageLayer,\n EventId,\n} from \"@ddd-ts/core\";\nimport { ISerializedSavedChange } from \"@ddd-ts/core/dist/interfaces/es-event\";\nimport { InMemoryDatabase, InMemoryTransaction } from \"@ddd-ts/store-inmemory\";\n\nexport class InMemoryEventLakeStorageLayer implements EventLakeStorageLayer {\n constructor(public readonly database: InMemoryDatabase) {}\n\n async append(\n lakeId: LakeId,\n changes: ISerializedChange[],\n trx: InMemoryTransaction,\n ) {\n const result: ISerializedSavedChange[] = [];\n\n let revision = 0;\n for (const change of changes) {\n const ref = `${lakeId.serialize()}/${change.id}`;\n\n const stored = {\n ...change,\n ref: ref,\n revision: revision,\n };\n\n this.database.save(\n lakeId.serialize(),\n change.id,\n stored,\n trx.transaction,\n );\n result.push({\n ...change,\n ref: ref,\n revision: revision,\n occurredAt: undefined,\n });\n revision++;\n }\n return result;\n }\n\n async *read(\n lakeId: LakeId,\n startAfter?: EventId,\n endAt?: EventId,\n ): AsyncIterable<ISerializedFact> {\n const events = this.database.loadAll(lakeId.serialize());\n\n const sorted = events.sort((a, b) =>\n a.data.savedAt === b.data.savedAt\n ? a.data.data.revision - b.data.data.revision\n : a.data.savedAt < b.data.savedAt\n ? -1\n : 1,\n );\n\n const facts = sorted.map((e) => e.data);\n\n let started = !startAfter;\n\n for (const fact of facts) {\n if (startAfter && fact.data.id === startAfter.serialize()) {\n started = true;\n continue;\n }\n if (endAt && fact.data.id === endAt.serialize()) {\n yield { ...fact.data, occurredAt: fact.data.savedAt };\n break;\n }\n if (started) {\n yield { ...fact.data, occurredAt: fact.data.savedAt };\n }\n }\n }\n}\n"],"mappings":";AAUA,IAAa,gCAAb,MAA4E;CAC1E,YAAY,AAAgB,UAA4B;EAA5B;;CAE5B,MAAM,OACJ,QACA,SACA,KACA;EACA,MAAM,SAAmC,EAAE;EAE3C,IAAI,WAAW;AACf,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,MAAM,GAAG,OAAO,WAAW,CAAC,GAAG,OAAO;GAE5C,MAAM,SAAS;IACb,GAAG;IACE;IACK;IACX;AAED,QAAK,SAAS,KACZ,OAAO,WAAW,EAClB,OAAO,IACP,QACA,IAAI,YACL;AACD,UAAO,KAAK;IACV,GAAG;IACE;IACK;IACV,YAAY;IACb,CAAC;AACF;;AAEF,SAAO;;CAGT,OAAO,KACL,QACA,YACA,OACgC;EAWhC,MAAM,QAVS,KAAK,SAAS,QAAQ,OAAO,WAAW,CAAC,CAElC,MAAM,GAAG,MAC7B,EAAE,KAAK,YAAY,EAAE,KAAK,UACtB,EAAE,KAAK,KAAK,WAAW,EAAE,KAAK,KAAK,WACnC,EAAE,KAAK,UAAU,EAAE,KAAK,UACtB,KACA,EACP,CAEoB,KAAK,MAAM,EAAE,KAAK;EAEvC,IAAI,UAAU,CAAC;AAEf,OAAK,MAAM,QAAQ,OAAO;AACxB,OAAI,cAAc,KAAK,KAAK,OAAO,WAAW,WAAW,EAAE;AACzD,cAAU;AACV;;AAEF,OAAI,SAAS,KAAK,KAAK,OAAO,MAAM,WAAW,EAAE;AAC/C,UAAM;KAAE,GAAG,KAAK;KAAM,YAAY,KAAK,KAAK;KAAS;AACrD;;AAEF,OAAI,QACF,OAAM;IAAE,GAAG,KAAK;IAAM,YAAY,KAAK,KAAK;IAAS"}
1
+ {"version":3,"file":"in-memory.event-lake.storage-layer.js","names":[],"sources":["../src/in-memory.event-lake.storage-layer.ts"],"sourcesContent":["import {\n LakeId,\n type ISerializedChange,\n type ISerializedFact,\n EventLakeStorageLayer,\n EventId,\n} from \"@ddd-ts/core\";\nimport { ISerializedSavedChange } from \"@ddd-ts/core/dist/interfaces/es-event\";\nimport { InMemoryDatabase, InMemoryTransaction } from \"@ddd-ts/store-inmemory\";\n\nexport class InMemoryEventLakeStorageLayer implements EventLakeStorageLayer {\n constructor(public readonly database: InMemoryDatabase) {}\n\n async append(\n lakeId: LakeId,\n changes: ISerializedChange[],\n trx: InMemoryTransaction,\n ) {\n const result: ISerializedSavedChange[] = [];\n\n let revision = 0;\n for (const change of changes) {\n const ref = `${lakeId.serialize()}/${change.id}`;\n\n const stored = {\n ...change,\n ref: ref,\n revision: revision,\n };\n\n this.database.save(\n lakeId.serialize(),\n change.id,\n stored,\n trx.transaction,\n );\n result.push({\n ...change,\n ref: ref,\n revision: revision,\n occurredAt: undefined,\n });\n revision++;\n }\n return result;\n }\n\n async *read(\n lakeId: LakeId,\n startAfter?: EventId,\n endAt?: EventId,\n ): AsyncIterable<ISerializedFact> {\n const events = this.database.loadAll(lakeId.serialize());\n\n const sorted = events.sort((a, b) =>\n a.data.savedAt === b.data.savedAt\n ? a.data.data.revision - b.data.data.revision\n : a.data.savedAt < b.data.savedAt\n ? -1\n : 1,\n );\n\n const facts = sorted.map((e) => e.data);\n\n let started = !startAfter;\n\n for (const fact of facts) {\n if (startAfter && fact.data.id === startAfter.serialize()) {\n started = true;\n continue;\n }\n if (endAt && fact.data.id === endAt.serialize()) {\n yield { ...fact.data, occurredAt: fact.data.savedAt };\n break;\n }\n if (started) {\n yield { ...fact.data, occurredAt: fact.data.savedAt };\n }\n }\n }\n}\n"],"mappings":";;;;;AAUA,IAAa,gCAAb,MAA4E;CAC1E,YAAY,AAAgB,UAA4B;EAA5B;;CAE5B,MAAM,OACJ,QACA,SACA,KACA;EACA,MAAM,SAAmC,EAAE;EAE3C,IAAI,WAAW;AACf,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,MAAM,GAAG,OAAO,WAAW,CAAC,GAAG,OAAO;GAE5C,MAAM,SAAS;IACb,GAAG;IACE;IACK;IACX;AAED,QAAK,SAAS,KACZ,OAAO,WAAW,EAClB,OAAO,IACP,QACA,IAAI,YACL;AACD,UAAO,KAAK;IACV,GAAG;IACE;IACK;IACV,YAAY;IACb,CAAC;AACF;;AAEF,SAAO;;CAGT,OAAO,KACL,QACA,YACA,OACgC;EAWhC,MAAM,QAVS,KAAK,SAAS,QAAQ,OAAO,WAAW,CAAC,CAElC,MAAM,GAAG,MAC7B,EAAE,KAAK,YAAY,EAAE,KAAK,UACtB,EAAE,KAAK,KAAK,WAAW,EAAE,KAAK,KAAK,WACnC,EAAE,KAAK,UAAU,EAAE,KAAK,UACtB,KACA,EACP,CAEoB,KAAK,MAAM,EAAE,KAAK;EAEvC,IAAI,UAAU,CAAC;AAEf,OAAK,MAAM,QAAQ,OAAO;AACxB,OAAI,cAAc,KAAK,KAAK,OAAO,WAAW,WAAW,EAAE;AACzD,cAAU;AACV;;AAEF,OAAI,SAAS,KAAK,KAAK,OAAO,MAAM,WAAW,EAAE;AAC/C,UAAM;KAAE,GAAG,KAAK;KAAM,YAAY,KAAK,KAAK;KAAS;AACrD;;AAEF,OAAI,QACF,OAAM;IAAE,GAAG,KAAK;IAAM,YAAY,KAAK,KAAK;IAAS"}
@@ -1,5 +1,5 @@
1
1
  import { InMemoryEventLakeStorageLayer } from "./in-memory.event-lake.storage-layer.js";
2
- import { EventLakeStore } from "@ddd-ts/core";
2
+ import { EventLakeStore, IEsEvent, ISerializer } from "@ddd-ts/core";
3
3
 
4
4
  //#region src/in-memory.event-lake.store.ts
5
5
  var InMemoryEventLakeStore = class extends EventLakeStore {
@@ -1,7 +1,7 @@
1
1
  import { InMemoryEventStreamStorageLayer } from "./in-memory.event-stream.storage-layer.js";
2
2
  import { InMemorySnapshotter } from "./in-memory.snapshotter.js";
3
- import { EventStreamAggregateStore, EventStreamStore, StreamId } from "@ddd-ts/core";
4
- import { InMemoryTransactionPerformer } from "@ddd-ts/store-inmemory";
3
+ import { EventOf, EventSourced, EventStreamAggregateStore, EventStreamStore, EventsOf, IEventSourced, IIdentifiable, ISerializer, StreamId } from "@ddd-ts/core";
4
+ import { InMemoryDatabase, InMemoryTransactionPerformer } from "@ddd-ts/store-inmemory";
5
5
 
6
6
  //#region src/in-memory.event-stream.aggregate-store.ts
7
7
  const MakeInMemoryEventStreamAggregateStore = (AGGREGATE) => {
@@ -1,6 +1,6 @@
1
1
  import { EventStreamStorageLayer, ISerializedChange, ISerializedFact, StreamId } from "@ddd-ts/core";
2
- import { InMemoryDatabase, InMemoryTransaction } from "@ddd-ts/store-inmemory";
3
2
  import { ISerializedSavedChange as ISerializedSavedChange$1 } from "@ddd-ts/core/dist/interfaces/es-event";
3
+ import { InMemoryDatabase, InMemoryTransaction } from "@ddd-ts/store-inmemory";
4
4
 
5
5
  //#region src/in-memory.event-stream.storage-layer.d.ts
6
6
  declare class InMemoryEventStreamStorageLayer implements EventStreamStorageLayer {
@@ -1,4 +1,6 @@
1
- import { ConcurrencyError } from "@ddd-ts/core";
1
+ import { ConcurrencyError, EventStreamStorageLayer, StreamId } from "@ddd-ts/core";
2
+ import { ISerializedSavedChange as ISerializedSavedChange$1 } from "@ddd-ts/core/dist/interfaces/es-event";
3
+ import { InMemoryDatabase, InMemoryTransaction } from "@ddd-ts/store-inmemory";
2
4
 
3
5
  //#region src/in-memory.event-stream.storage-layer.ts
4
6
  var InMemoryEventStreamStorageLayer = class {
@@ -1 +1 @@
1
- {"version":3,"file":"in-memory.event-stream.storage-layer.js","names":[],"sources":["../src/in-memory.event-stream.storage-layer.ts"],"sourcesContent":["import {\n StreamId,\n type ISerializedChange,\n type ISerializedFact,\n EventStreamStorageLayer,\n ConcurrencyError,\n} from \"@ddd-ts/core\";\nimport { ISerializedSavedChange } from \"@ddd-ts/core/dist/interfaces/es-event\";\nimport { InMemoryDatabase, InMemoryTransaction } from \"@ddd-ts/store-inmemory\";\n\nexport class InMemoryEventStreamStorageLayer\n implements EventStreamStorageLayer\n{\n constructor(public readonly database: InMemoryDatabase) {}\n\n isLocalRevisionOutdatedError(error: unknown): boolean {\n return error instanceof ConcurrencyError;\n }\n\n async append(\n streamId: StreamId,\n changes: ISerializedChange[],\n expectedRevision: number,\n trx: InMemoryTransaction,\n ) {\n const result: ISerializedSavedChange[] = [];\n\n let revision = expectedRevision + 1;\n\n for (const change of changes) {\n const ref = `${streamId.serialize()}/${revision}`;\n\n const stored = {\n ...change,\n ref: ref,\n revision: revision,\n };\n\n this.database.create(\n streamId.serialize(),\n `${revision}`,\n stored,\n trx.transaction,\n );\n\n result.push({\n ...change,\n ref: ref,\n revision: revision,\n occurredAt: undefined,\n });\n\n revision++;\n }\n return result;\n }\n\n async *read(\n streamId: StreamId,\n from?: number,\n ): AsyncIterable<ISerializedFact> {\n const events = this.database.loadAll(streamId.serialize());\n\n const sorted = events.sort(\n (a, b) => a.data.data.revision - b.data.data.revision,\n );\n\n const facts = sorted.map((e) => ({\n ...e.data.data,\n occurredAt: e.data.savedAt,\n }));\n\n yield* facts.slice(from !== undefined ? from : 0);\n }\n}\n"],"mappings":";;;AAUA,IAAa,kCAAb,MAEA;CACE,YAAY,AAAgB,UAA4B;EAA5B;;CAE5B,6BAA6B,OAAyB;AACpD,SAAO,iBAAiB;;CAG1B,MAAM,OACJ,UACA,SACA,kBACA,KACA;EACA,MAAM,SAAmC,EAAE;EAE3C,IAAI,WAAW,mBAAmB;AAElC,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,MAAM,GAAG,SAAS,WAAW,CAAC,GAAG;GAEvC,MAAM,SAAS;IACb,GAAG;IACE;IACK;IACX;AAED,QAAK,SAAS,OACZ,SAAS,WAAW,EACpB,GAAG,YACH,QACA,IAAI,YACL;AAED,UAAO,KAAK;IACV,GAAG;IACE;IACK;IACV,YAAY;IACb,CAAC;AAEF;;AAEF,SAAO;;CAGT,OAAO,KACL,UACA,MACgC;AAYhC,SAXe,KAAK,SAAS,QAAQ,SAAS,WAAW,CAAC,CAEpC,MACnB,GAAG,MAAM,EAAE,KAAK,KAAK,WAAW,EAAE,KAAK,KAAK,SAC9C,CAEoB,KAAK,OAAO;GAC/B,GAAG,EAAE,KAAK;GACV,YAAY,EAAE,KAAK;GACpB,EAAE,CAEU,MAAM,SAAS,SAAY,OAAO,EAAE"}
1
+ {"version":3,"file":"in-memory.event-stream.storage-layer.js","names":[],"sources":["../src/in-memory.event-stream.storage-layer.ts"],"sourcesContent":["import {\n StreamId,\n type ISerializedChange,\n type ISerializedFact,\n EventStreamStorageLayer,\n ConcurrencyError,\n} from \"@ddd-ts/core\";\nimport { ISerializedSavedChange } from \"@ddd-ts/core/dist/interfaces/es-event\";\nimport { InMemoryDatabase, InMemoryTransaction } from \"@ddd-ts/store-inmemory\";\n\nexport class InMemoryEventStreamStorageLayer\n implements EventStreamStorageLayer\n{\n constructor(public readonly database: InMemoryDatabase) {}\n\n isLocalRevisionOutdatedError(error: unknown): boolean {\n return error instanceof ConcurrencyError;\n }\n\n async append(\n streamId: StreamId,\n changes: ISerializedChange[],\n expectedRevision: number,\n trx: InMemoryTransaction,\n ) {\n const result: ISerializedSavedChange[] = [];\n\n let revision = expectedRevision + 1;\n\n for (const change of changes) {\n const ref = `${streamId.serialize()}/${revision}`;\n\n const stored = {\n ...change,\n ref: ref,\n revision: revision,\n };\n\n this.database.create(\n streamId.serialize(),\n `${revision}`,\n stored,\n trx.transaction,\n );\n\n result.push({\n ...change,\n ref: ref,\n revision: revision,\n occurredAt: undefined,\n });\n\n revision++;\n }\n return result;\n }\n\n async *read(\n streamId: StreamId,\n from?: number,\n ): AsyncIterable<ISerializedFact> {\n const events = this.database.loadAll(streamId.serialize());\n\n const sorted = events.sort(\n (a, b) => a.data.data.revision - b.data.data.revision,\n );\n\n const facts = sorted.map((e) => ({\n ...e.data.data,\n occurredAt: e.data.savedAt,\n }));\n\n yield* facts.slice(from !== undefined ? from : 0);\n }\n}\n"],"mappings":";;;;;AAUA,IAAa,kCAAb,MAEA;CACE,YAAY,AAAgB,UAA4B;EAA5B;;CAE5B,6BAA6B,OAAyB;AACpD,SAAO,iBAAiB;;CAG1B,MAAM,OACJ,UACA,SACA,kBACA,KACA;EACA,MAAM,SAAmC,EAAE;EAE3C,IAAI,WAAW,mBAAmB;AAElC,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,MAAM,GAAG,SAAS,WAAW,CAAC,GAAG;GAEvC,MAAM,SAAS;IACb,GAAG;IACE;IACK;IACX;AAED,QAAK,SAAS,OACZ,SAAS,WAAW,EACpB,GAAG,YACH,QACA,IAAI,YACL;AAED,UAAO,KAAK;IACV,GAAG;IACE;IACK;IACV,YAAY;IACb,CAAC;AAEF;;AAEF,SAAO;;CAGT,OAAO,KACL,UACA,MACgC;AAYhC,SAXe,KAAK,SAAS,QAAQ,SAAS,WAAW,CAAC,CAEpC,MACnB,GAAG,MAAM,EAAE,KAAK,KAAK,WAAW,EAAE,KAAK,KAAK,SAC9C,CAEoB,KAAK,OAAO;GAC/B,GAAG,EAAE,KAAK;GACV,YAAY,EAAE,KAAK;GACpB,EAAE,CAEU,MAAM,SAAS,SAAY,OAAO,EAAE"}
@@ -1,5 +1,5 @@
1
1
  import { InMemoryEventStreamStorageLayer } from "./in-memory.event-stream.storage-layer.js";
2
- import { EventStreamStore } from "@ddd-ts/core";
2
+ import { EventStreamStore, IEsEvent, ISerializer } from "@ddd-ts/core";
3
3
 
4
4
  //#region src/in-memory.event-stream.store.ts
5
5
  var InMemoryEventStreamStore = class extends EventStreamStore {
@@ -1,5 +1,6 @@
1
1
  import { InMemoryProjectedStreamStorageLayer } from "./in-memory.projected-stream.storage-layer.js";
2
- import { ProjectedStreamReader } from "@ddd-ts/core";
2
+ import { IEsEvent, ISerializer, ProjectedStreamReader } from "@ddd-ts/core";
3
+ import { InMemoryDatabase } from "@ddd-ts/store-inmemory";
3
4
 
4
5
  //#region src/in-memory.projected-stream.reader.ts
5
6
  var InMemoryProjectedStreamReader = class extends ProjectedStreamReader {
@@ -1 +1 @@
1
- {"version":3,"file":"in-memory.projected-stream.reader.js","names":[],"sources":["../src/in-memory.projected-stream.reader.ts"],"sourcesContent":["import { IEsEvent, ISerializer, ProjectedStreamReader } from \"@ddd-ts/core\";\nimport { InMemoryDatabase } from \"@ddd-ts/store-inmemory\";\nimport { InMemoryProjectedStreamStorageLayer } from \"./in-memory.projected-stream.storage-layer\";\n\nexport class InMemoryProjectedStreamReader<\n Events extends IEsEvent,\n> extends ProjectedStreamReader<Events> {\n constructor(database: InMemoryDatabase, serializer: ISerializer<Events>) {\n super(new InMemoryProjectedStreamStorageLayer(database), serializer);\n }\n}\n"],"mappings":";;;;AAIA,IAAa,gCAAb,cAEU,sBAA8B;CACtC,YAAY,UAA4B,YAAiC;AACvE,QAAM,IAAI,oCAAoC,SAAS,EAAE,WAAW"}
1
+ {"version":3,"file":"in-memory.projected-stream.reader.js","names":[],"sources":["../src/in-memory.projected-stream.reader.ts"],"sourcesContent":["import { IEsEvent, ISerializer, ProjectedStreamReader } from \"@ddd-ts/core\";\nimport { InMemoryDatabase } from \"@ddd-ts/store-inmemory\";\nimport { InMemoryProjectedStreamStorageLayer } from \"./in-memory.projected-stream.storage-layer\";\n\nexport class InMemoryProjectedStreamReader<\n Events extends IEsEvent,\n> extends ProjectedStreamReader<Events> {\n constructor(database: InMemoryDatabase, serializer: ISerializer<Events>) {\n super(new InMemoryProjectedStreamStorageLayer(database), serializer);\n }\n}\n"],"mappings":";;;;;AAIA,IAAa,gCAAb,cAEU,sBAA8B;CACtC,YAAY,UAA4B,YAAiC;AACvE,QAAM,IAAI,oCAAoC,SAAS,EAAE,WAAW"}
@@ -1,4 +1,5 @@
1
- import { Cursor, EventId, LakeId, StreamSource } from "@ddd-ts/core";
1
+ import { Cursor, EventId, ISerializedFact, ISerializedSavedChange, LakeId, LakeSource, ProjectedStream, ProjectedStreamStorageLayer, StreamSource } from "@ddd-ts/core";
2
+ import { InMemoryDatabase } from "@ddd-ts/store-inmemory";
2
3
  import { MicrosecondTimestamp } from "@ddd-ts/shape";
3
4
 
4
5
  //#region src/in-memory.projected-stream.storage-layer.ts
@@ -1 +1 @@
1
- {"version":3,"file":"in-memory.projected-stream.storage-layer.js","names":[],"sources":["../src/in-memory.projected-stream.storage-layer.ts"],"sourcesContent":["import {\n EventId,\n ISerializedFact,\n LakeId,\n LakeSource,\n ProjectedStream,\n ProjectedStreamStorageLayer,\n StreamSource,\n Cursor,\n ISerializedSavedChange,\n} from \"@ddd-ts/core\";\nimport { InMemoryDatabase } from \"@ddd-ts/store-inmemory\";\nimport { MicrosecondTimestamp } from \"@ddd-ts/shape\";\n\nexport class InMemoryStreamSourceFilter {\n constructor(private readonly database: InMemoryDatabase) {}\n\n *all(source: StreamSource, shard: string) {\n const cols = [...this.database.storage.collections.keys()];\n const streams = cols.filter((it) => it.startsWith(source.aggregateType));\n for (const stream of streams) {\n yield* this.database\n .loadAll(stream)\n .filter((event) => {\n if (!source.events.includes(event.data.data.name)) return false;\n const payload = event.data.data.payload;\n if (!payload) return false;\n return payload[source.shardKey] === shard;\n })\n .map((event) => event.data);\n }\n }\n}\n\nexport class InMemoryLakeSourceFilter {\n constructor(private readonly database: InMemoryDatabase) {}\n\n *all(source: LakeSource, shard: string) {\n const lakeId = LakeId.from(source.shardType, shard);\n yield* this.database\n .loadAll(lakeId.serialize())\n .filter((event) => {\n if (!source.events.includes(event.data.data.name)) return false;\n const payload = event.data.data.payload;\n if (!payload) return false;\n return payload[source.shardKey] === shard;\n })\n .map((event) => event.data);\n }\n}\n\nexport class InMemoryProjectedStreamStorageLayer\n implements ProjectedStreamStorageLayer\n{\n constructor(private readonly database: InMemoryDatabase) {}\n\n async getCursor(\n savedChange: ISerializedSavedChange,\n ): Promise<Cursor | undefined> {\n const [collection, id] = savedChange.ref.split(\"/\");\n if (!collection || !id) return undefined;\n\n const raw = this.database.storage.getCollection(collection).getRaw(id);\n\n if (!raw) return undefined;\n return new Cursor({\n ref: savedChange.ref,\n eventId: new EventId(savedChange.id),\n occurredAt: MicrosecondTimestamp.fromMicroseconds(raw.savedAt),\n revision: savedChange.revision,\n });\n }\n\n async get(cursor: Cursor): Promise<ISerializedFact | undefined> {\n const [collection, id] = cursor.ref.split(\"/\");\n if (!collection || !id) return undefined;\n\n const raw = this.database.storage.getCollection(collection).getRaw(id);\n\n if (!raw) return undefined;\n return { ...raw.data, occurredAt: raw.savedAt };\n }\n\n async *read(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n ) {\n const sources = projectedStream.sources.map((source) => {\n if (source instanceof StreamSource) {\n return new InMemoryStreamSourceFilter(this.database).all(source, shard);\n }\n return new InMemoryLakeSourceFilter(this.database).all(source, shard);\n });\n\n const all = sources\n .flatMap((source) => [...source])\n .sort((a, b) =>\n a.savedAt === b.savedAt\n ? a.data.revision - b.data.revision\n : a.savedAt > b.savedAt\n ? 1\n : -1,\n );\n\n let started = !startAfter?.ref;\n for (const fact of all) {\n if (startAfter && fact.data.ref === startAfter.ref) {\n started = true;\n continue;\n }\n if (endAt?.ref && fact.data.ref === endAt.ref) {\n yield { ...fact.data, occurredAt: fact.savedAt };\n break;\n }\n if (started) {\n yield { ...fact.data, occurredAt: fact.savedAt };\n }\n }\n }\n\n async slice(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n count?: number,\n ): Promise<ISerializedFact[]> {\n const stream = this.read(projectedStream, shard, startAfter, endAt);\n\n const result: ISerializedFact[] = [];\n\n let limit = count ?? Number.POSITIVE_INFINITY;\n for await (const fact of stream) {\n if (!limit--) break;\n result.push(fact);\n }\n\n return result;\n }\n}\n"],"mappings":";;;;AAcA,IAAa,6BAAb,MAAwC;CACtC,YAAY,AAAiB,UAA4B;EAA5B;;CAE7B,CAAC,IAAI,QAAsB,OAAe;EAExC,MAAM,UADO,CAAC,GAAG,KAAK,SAAS,QAAQ,YAAY,MAAM,CAAC,CACrC,QAAQ,OAAO,GAAG,WAAW,OAAO,cAAc,CAAC;AACxE,OAAK,MAAM,UAAU,QACnB,QAAO,KAAK,SACT,QAAQ,OAAO,CACf,QAAQ,UAAU;AACjB,OAAI,CAAC,OAAO,OAAO,SAAS,MAAM,KAAK,KAAK,KAAK,CAAE,QAAO;GAC1D,MAAM,UAAU,MAAM,KAAK,KAAK;AAChC,OAAI,CAAC,QAAS,QAAO;AACrB,UAAO,QAAQ,OAAO,cAAc;IACpC,CACD,KAAK,UAAU,MAAM,KAAK;;;AAKnC,IAAa,2BAAb,MAAsC;CACpC,YAAY,AAAiB,UAA4B;EAA5B;;CAE7B,CAAC,IAAI,QAAoB,OAAe;EACtC,MAAM,SAAS,OAAO,KAAK,OAAO,WAAW,MAAM;AACnD,SAAO,KAAK,SACT,QAAQ,OAAO,WAAW,CAAC,CAC3B,QAAQ,UAAU;AACjB,OAAI,CAAC,OAAO,OAAO,SAAS,MAAM,KAAK,KAAK,KAAK,CAAE,QAAO;GAC1D,MAAM,UAAU,MAAM,KAAK,KAAK;AAChC,OAAI,CAAC,QAAS,QAAO;AACrB,UAAO,QAAQ,OAAO,cAAc;IACpC,CACD,KAAK,UAAU,MAAM,KAAK;;;AAIjC,IAAa,sCAAb,MAEA;CACE,YAAY,AAAiB,UAA4B;EAA5B;;CAE7B,MAAM,UACJ,aAC6B;EAC7B,MAAM,CAAC,YAAY,MAAM,YAAY,IAAI,MAAM,IAAI;AACnD,MAAI,CAAC,cAAc,CAAC,GAAI,QAAO;EAE/B,MAAM,MAAM,KAAK,SAAS,QAAQ,cAAc,WAAW,CAAC,OAAO,GAAG;AAEtE,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,IAAI,OAAO;GAChB,KAAK,YAAY;GACjB,SAAS,IAAI,QAAQ,YAAY,GAAG;GACpC,YAAY,qBAAqB,iBAAiB,IAAI,QAAQ;GAC9D,UAAU,YAAY;GACvB,CAAC;;CAGJ,MAAM,IAAI,QAAsD;EAC9D,MAAM,CAAC,YAAY,MAAM,OAAO,IAAI,MAAM,IAAI;AAC9C,MAAI,CAAC,cAAc,CAAC,GAAI,QAAO;EAE/B,MAAM,MAAM,KAAK,SAAS,QAAQ,cAAc,WAAW,CAAC,OAAO,GAAG;AAEtE,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO;GAAE,GAAG,IAAI;GAAM,YAAY,IAAI;GAAS;;CAGjD,OAAO,KACL,iBACA,OACA,YACA,OACA;EAQA,MAAM,MAPU,gBAAgB,QAAQ,KAAK,WAAW;AACtD,OAAI,kBAAkB,aACpB,QAAO,IAAI,2BAA2B,KAAK,SAAS,CAAC,IAAI,QAAQ,MAAM;AAEzE,UAAO,IAAI,yBAAyB,KAAK,SAAS,CAAC,IAAI,QAAQ,MAAM;IACrE,CAGC,SAAS,WAAW,CAAC,GAAG,OAAO,CAAC,CAChC,MAAM,GAAG,MACR,EAAE,YAAY,EAAE,UACZ,EAAE,KAAK,WAAW,EAAE,KAAK,WACzB,EAAE,UAAU,EAAE,UACZ,IACA,GACP;EAEH,IAAI,UAAU,CAAC,YAAY;AAC3B,OAAK,MAAM,QAAQ,KAAK;AACtB,OAAI,cAAc,KAAK,KAAK,QAAQ,WAAW,KAAK;AAClD,cAAU;AACV;;AAEF,OAAI,OAAO,OAAO,KAAK,KAAK,QAAQ,MAAM,KAAK;AAC7C,UAAM;KAAE,GAAG,KAAK;KAAM,YAAY,KAAK;KAAS;AAChD;;AAEF,OAAI,QACF,OAAM;IAAE,GAAG,KAAK;IAAM,YAAY,KAAK;IAAS;;;CAKtD,MAAM,MACJ,iBACA,OACA,YACA,OACA,OAC4B;EAC5B,MAAM,SAAS,KAAK,KAAK,iBAAiB,OAAO,YAAY,MAAM;EAEnE,MAAM,SAA4B,EAAE;EAEpC,IAAI,QAAQ,SAAS,OAAO;AAC5B,aAAW,MAAM,QAAQ,QAAQ;AAC/B,OAAI,CAAC,QAAS;AACd,UAAO,KAAK,KAAK;;AAGnB,SAAO"}
1
+ {"version":3,"file":"in-memory.projected-stream.storage-layer.js","names":[],"sources":["../src/in-memory.projected-stream.storage-layer.ts"],"sourcesContent":["import {\n EventId,\n ISerializedFact,\n LakeId,\n LakeSource,\n ProjectedStream,\n ProjectedStreamStorageLayer,\n StreamSource,\n Cursor,\n ISerializedSavedChange,\n} from \"@ddd-ts/core\";\nimport { InMemoryDatabase } from \"@ddd-ts/store-inmemory\";\nimport { MicrosecondTimestamp } from \"@ddd-ts/shape\";\n\nexport class InMemoryStreamSourceFilter {\n constructor(private readonly database: InMemoryDatabase) {}\n\n *all(source: StreamSource, shard: string) {\n const cols = [...this.database.storage.collections.keys()];\n const streams = cols.filter((it) => it.startsWith(source.aggregateType));\n for (const stream of streams) {\n yield* this.database\n .loadAll(stream)\n .filter((event) => {\n if (!source.events.includes(event.data.data.name)) return false;\n const payload = event.data.data.payload;\n if (!payload) return false;\n return payload[source.shardKey] === shard;\n })\n .map((event) => event.data);\n }\n }\n}\n\nexport class InMemoryLakeSourceFilter {\n constructor(private readonly database: InMemoryDatabase) {}\n\n *all(source: LakeSource, shard: string) {\n const lakeId = LakeId.from(source.shardType, shard);\n yield* this.database\n .loadAll(lakeId.serialize())\n .filter((event) => {\n if (!source.events.includes(event.data.data.name)) return false;\n const payload = event.data.data.payload;\n if (!payload) return false;\n return payload[source.shardKey] === shard;\n })\n .map((event) => event.data);\n }\n}\n\nexport class InMemoryProjectedStreamStorageLayer\n implements ProjectedStreamStorageLayer\n{\n constructor(private readonly database: InMemoryDatabase) {}\n\n async getCursor(\n savedChange: ISerializedSavedChange,\n ): Promise<Cursor | undefined> {\n const [collection, id] = savedChange.ref.split(\"/\");\n if (!collection || !id) return undefined;\n\n const raw = this.database.storage.getCollection(collection).getRaw(id);\n\n if (!raw) return undefined;\n return new Cursor({\n ref: savedChange.ref,\n eventId: new EventId(savedChange.id),\n occurredAt: MicrosecondTimestamp.fromMicroseconds(raw.savedAt),\n revision: savedChange.revision,\n });\n }\n\n async get(cursor: Cursor): Promise<ISerializedFact | undefined> {\n const [collection, id] = cursor.ref.split(\"/\");\n if (!collection || !id) return undefined;\n\n const raw = this.database.storage.getCollection(collection).getRaw(id);\n\n if (!raw) return undefined;\n return { ...raw.data, occurredAt: raw.savedAt };\n }\n\n async *read(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n ) {\n const sources = projectedStream.sources.map((source) => {\n if (source instanceof StreamSource) {\n return new InMemoryStreamSourceFilter(this.database).all(source, shard);\n }\n return new InMemoryLakeSourceFilter(this.database).all(source, shard);\n });\n\n const all = sources\n .flatMap((source) => [...source])\n .sort((a, b) =>\n a.savedAt === b.savedAt\n ? a.data.revision - b.data.revision\n : a.savedAt > b.savedAt\n ? 1\n : -1,\n );\n\n let started = !startAfter?.ref;\n for (const fact of all) {\n if (startAfter && fact.data.ref === startAfter.ref) {\n started = true;\n continue;\n }\n if (endAt?.ref && fact.data.ref === endAt.ref) {\n yield { ...fact.data, occurredAt: fact.savedAt };\n break;\n }\n if (started) {\n yield { ...fact.data, occurredAt: fact.savedAt };\n }\n }\n }\n\n async slice(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n count?: number,\n ): Promise<ISerializedFact[]> {\n const stream = this.read(projectedStream, shard, startAfter, endAt);\n\n const result: ISerializedFact[] = [];\n\n let limit = count ?? Number.POSITIVE_INFINITY;\n for await (const fact of stream) {\n if (!limit--) break;\n result.push(fact);\n }\n\n return result;\n }\n}\n"],"mappings":";;;;;AAcA,IAAa,6BAAb,MAAwC;CACtC,YAAY,AAAiB,UAA4B;EAA5B;;CAE7B,CAAC,IAAI,QAAsB,OAAe;EAExC,MAAM,UADO,CAAC,GAAG,KAAK,SAAS,QAAQ,YAAY,MAAM,CAAC,CACrC,QAAQ,OAAO,GAAG,WAAW,OAAO,cAAc,CAAC;AACxE,OAAK,MAAM,UAAU,QACnB,QAAO,KAAK,SACT,QAAQ,OAAO,CACf,QAAQ,UAAU;AACjB,OAAI,CAAC,OAAO,OAAO,SAAS,MAAM,KAAK,KAAK,KAAK,CAAE,QAAO;GAC1D,MAAM,UAAU,MAAM,KAAK,KAAK;AAChC,OAAI,CAAC,QAAS,QAAO;AACrB,UAAO,QAAQ,OAAO,cAAc;IACpC,CACD,KAAK,UAAU,MAAM,KAAK;;;AAKnC,IAAa,2BAAb,MAAsC;CACpC,YAAY,AAAiB,UAA4B;EAA5B;;CAE7B,CAAC,IAAI,QAAoB,OAAe;EACtC,MAAM,SAAS,OAAO,KAAK,OAAO,WAAW,MAAM;AACnD,SAAO,KAAK,SACT,QAAQ,OAAO,WAAW,CAAC,CAC3B,QAAQ,UAAU;AACjB,OAAI,CAAC,OAAO,OAAO,SAAS,MAAM,KAAK,KAAK,KAAK,CAAE,QAAO;GAC1D,MAAM,UAAU,MAAM,KAAK,KAAK;AAChC,OAAI,CAAC,QAAS,QAAO;AACrB,UAAO,QAAQ,OAAO,cAAc;IACpC,CACD,KAAK,UAAU,MAAM,KAAK;;;AAIjC,IAAa,sCAAb,MAEA;CACE,YAAY,AAAiB,UAA4B;EAA5B;;CAE7B,MAAM,UACJ,aAC6B;EAC7B,MAAM,CAAC,YAAY,MAAM,YAAY,IAAI,MAAM,IAAI;AACnD,MAAI,CAAC,cAAc,CAAC,GAAI,QAAO;EAE/B,MAAM,MAAM,KAAK,SAAS,QAAQ,cAAc,WAAW,CAAC,OAAO,GAAG;AAEtE,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,IAAI,OAAO;GAChB,KAAK,YAAY;GACjB,SAAS,IAAI,QAAQ,YAAY,GAAG;GACpC,YAAY,qBAAqB,iBAAiB,IAAI,QAAQ;GAC9D,UAAU,YAAY;GACvB,CAAC;;CAGJ,MAAM,IAAI,QAAsD;EAC9D,MAAM,CAAC,YAAY,MAAM,OAAO,IAAI,MAAM,IAAI;AAC9C,MAAI,CAAC,cAAc,CAAC,GAAI,QAAO;EAE/B,MAAM,MAAM,KAAK,SAAS,QAAQ,cAAc,WAAW,CAAC,OAAO,GAAG;AAEtE,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO;GAAE,GAAG,IAAI;GAAM,YAAY,IAAI;GAAS;;CAGjD,OAAO,KACL,iBACA,OACA,YACA,OACA;EAQA,MAAM,MAPU,gBAAgB,QAAQ,KAAK,WAAW;AACtD,OAAI,kBAAkB,aACpB,QAAO,IAAI,2BAA2B,KAAK,SAAS,CAAC,IAAI,QAAQ,MAAM;AAEzE,UAAO,IAAI,yBAAyB,KAAK,SAAS,CAAC,IAAI,QAAQ,MAAM;IACrE,CAGC,SAAS,WAAW,CAAC,GAAG,OAAO,CAAC,CAChC,MAAM,GAAG,MACR,EAAE,YAAY,EAAE,UACZ,EAAE,KAAK,WAAW,EAAE,KAAK,WACzB,EAAE,UAAU,EAAE,UACZ,IACA,GACP;EAEH,IAAI,UAAU,CAAC,YAAY;AAC3B,OAAK,MAAM,QAAQ,KAAK;AACtB,OAAI,cAAc,KAAK,KAAK,QAAQ,WAAW,KAAK;AAClD,cAAU;AACV;;AAEF,OAAI,OAAO,OAAO,KAAK,KAAK,QAAQ,MAAM,KAAK;AAC7C,UAAM;KAAE,GAAG,KAAK;KAAM,YAAY,KAAK;KAAS;AAChD;;AAEF,OAAI,QACF,OAAM;IAAE,GAAG,KAAK;IAAM,YAAY,KAAK;IAAS;;;CAKtD,MAAM,MACJ,iBACA,OACA,YACA,OACA,OAC4B;EAC5B,MAAM,SAAS,KAAK,KAAK,iBAAiB,OAAO,YAAY,MAAM;EAEnE,MAAM,SAA4B,EAAE;EAEpC,IAAI,QAAQ,SAAS,OAAO;AAC5B,aAAW,MAAM,QAAQ,QAAQ;AAC/B,OAAI,CAAC,QAAS;AACd,UAAO,KAAK,KAAK;;AAGnB,SAAO"}
@@ -1,4 +1,5 @@
1
- import { InMemoryStore } from "@ddd-ts/store-inmemory";
1
+ import "@ddd-ts/core";
2
+ import { InMemoryDatabase, InMemoryStore } from "@ddd-ts/store-inmemory";
2
3
 
3
4
  //#region src/in-memory.snapshotter.ts
4
5
  var SnapshotSerializer = class {
@@ -1 +1 @@
1
- {"version":3,"file":"in-memory.snapshotter.js","names":[],"sources":["../src/in-memory.snapshotter.ts"],"sourcesContent":["import {\n type IEsAggregateStore,\n type IEventSourced,\n type IIdentifiable,\n type ISerializer,\n} from \"@ddd-ts/core\";\n\nimport { InMemoryDatabase, InMemoryStore } from \"@ddd-ts/store-inmemory\";\n\nclass SnapshotSerializer<A extends IEventSourced & IIdentifiable> {\n constructor(private readonly serializer: ISerializer<A>) {}\n\n async serialize(instance: A) {\n const serialized = await this.serializer.serialize(instance);\n return {\n ...serialized,\n revision: instance.acknowledgedRevision,\n };\n }\n\n async deserialize(serialized: any) {\n const { revision, ...content } = serialized;\n const instance = await this.serializer.deserialize(content);\n instance.acknowledgedRevision = Number(revision);\n return instance;\n }\n}\n\nexport class InMemorySnapshotter<A extends IEventSourced & IIdentifiable>\n extends InMemoryStore<A>\n implements IEsAggregateStore<A>\n{\n constructor(\n aggregate: string,\n db: InMemoryDatabase,\n serializer: ISerializer<A>,\n ) {\n super(`snapshots-${aggregate}`, db, new SnapshotSerializer(serializer));\n }\n}\n"],"mappings":";;;AASA,IAAM,qBAAN,MAAkE;CAChE,YAAY,AAAiB,YAA4B;EAA5B;;CAE7B,MAAM,UAAU,UAAa;AAE3B,SAAO;GACL,GAFiB,MAAM,KAAK,WAAW,UAAU,SAAS;GAG1D,UAAU,SAAS;GACpB;;CAGH,MAAM,YAAY,YAAiB;EACjC,MAAM,EAAE,UAAU,GAAG,YAAY;EACjC,MAAM,WAAW,MAAM,KAAK,WAAW,YAAY,QAAQ;AAC3D,WAAS,uBAAuB,OAAO,SAAS;AAChD,SAAO;;;AAIX,IAAa,sBAAb,cACU,cAEV;CACE,YACE,WACA,IACA,YACA;AACA,QAAM,aAAa,aAAa,IAAI,IAAI,mBAAmB,WAAW,CAAC"}
1
+ {"version":3,"file":"in-memory.snapshotter.js","names":[],"sources":["../src/in-memory.snapshotter.ts"],"sourcesContent":["import {\n type IEsAggregateStore,\n type IEventSourced,\n type IIdentifiable,\n type ISerializer,\n} from \"@ddd-ts/core\";\n\nimport { InMemoryDatabase, InMemoryStore } from \"@ddd-ts/store-inmemory\";\n\nclass SnapshotSerializer<A extends IEventSourced & IIdentifiable> {\n constructor(private readonly serializer: ISerializer<A>) {}\n\n async serialize(instance: A) {\n const serialized = await this.serializer.serialize(instance);\n return {\n ...serialized,\n revision: instance.acknowledgedRevision,\n };\n }\n\n async deserialize(serialized: any) {\n const { revision, ...content } = serialized;\n const instance = await this.serializer.deserialize(content);\n instance.acknowledgedRevision = Number(revision);\n return instance;\n }\n}\n\nexport class InMemorySnapshotter<A extends IEventSourced & IIdentifiable>\n extends InMemoryStore<A>\n implements IEsAggregateStore<A>\n{\n constructor(\n aggregate: string,\n db: InMemoryDatabase,\n serializer: ISerializer<A>,\n ) {\n super(`snapshots-${aggregate}`, db, new SnapshotSerializer(serializer));\n }\n}\n"],"mappings":";;;;AASA,IAAM,qBAAN,MAAkE;CAChE,YAAY,AAAiB,YAA4B;EAA5B;;CAE7B,MAAM,UAAU,UAAa;AAE3B,SAAO;GACL,GAFiB,MAAM,KAAK,WAAW,UAAU,SAAS;GAG1D,UAAU,SAAS;GACpB;;CAGH,MAAM,YAAY,YAAiB;EACjC,MAAM,EAAE,UAAU,GAAG,YAAY;EACjC,MAAM,WAAW,MAAM,KAAK,WAAW,YAAY,QAAQ;AAC3D,WAAS,uBAAuB,OAAO,SAAS;AAChD,SAAO;;;AAIX,IAAa,sBAAb,cACU,cAEV;CACE,YACE,WACA,IACA,YACA;AACA,QAAM,aAAa,aAAa,IAAI,IAAI,mBAAmB,WAAW,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ddd-ts/event-sourcing-inmemory",
3
- "version": "0.0.0-compute-timeout-on-process.4",
3
+ "version": "0.0.0-compute-timeout-on-process.6",
4
4
  "types": "dist/index.d.ts",
5
5
  "license": "MIT",
6
6
  "repository": {
@@ -9,32 +9,26 @@
9
9
  "files": [
10
10
  "dist"
11
11
  ],
12
- "scripts": {
13
- "test": "jest --config node_modules/@ddd-ts/tools/jest.config.js",
14
- "build": "tsdown --config node_modules/@ddd-ts/tools/tsdown.config.js"
15
- },
16
12
  "dependencies": {
17
- "@ddd-ts/core": "0.0.0-compute-timeout-on-process.4",
18
- "@ddd-ts/shape": "0.0.0-compute-timeout-on-process.4",
19
- "@ddd-ts/store-inmemory": "0.0.0-compute-timeout-on-process.4",
20
- "@ddd-ts/types": "0.0.0-compute-timeout-on-process.4"
13
+ "@ddd-ts/core": "0.0.0-compute-timeout-on-process.6",
14
+ "@ddd-ts/shape": "0.0.0-compute-timeout-on-process.6",
15
+ "@ddd-ts/store-inmemory": "0.0.0-compute-timeout-on-process.6",
16
+ "@ddd-ts/types": "0.0.0-compute-timeout-on-process.6"
21
17
  },
22
18
  "devDependencies": {
23
- "@ddd-ts/shape": "0.0.0-compute-timeout-on-process.4",
24
- "@ddd-ts/tests": "0.0.0-compute-timeout-on-process.4",
25
- "@ddd-ts/tools": "0.0.0-compute-timeout-on-process.4",
26
- "@ddd-ts/traits": "0.0.0-compute-timeout-on-process.4",
19
+ "@ddd-ts/shape": "0.0.0-compute-timeout-on-process.6",
20
+ "@ddd-ts/tests": "0.0.0-compute-timeout-on-process.6",
21
+ "@ddd-ts/tools": "0.0.0-compute-timeout-on-process.6",
22
+ "@ddd-ts/traits": "0.0.0-compute-timeout-on-process.6",
27
23
  "@types/jest": "^29.5.1",
28
24
  "@types/node": "^20.12.4"
29
25
  },
30
26
  "exports": {
31
- ".": "./src/index.ts",
27
+ ".": "./dist/index.js",
32
28
  "./package.json": "./package.json"
33
29
  },
34
- "publishConfig": {
35
- "exports": {
36
- ".": "./dist/index.js",
37
- "./package.json": "./package.json"
38
- }
30
+ "scripts": {
31
+ "test": "jest --config node_modules/@ddd-ts/tools/jest.config.js",
32
+ "build": "tsdown --config node_modules/@ddd-ts/tools/tsdown.config.js"
39
33
  }
40
- }
34
+ }