@ddd-ts/event-sourcing-firestore 0.0.32 → 0.0.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/firestore.event-lake.storage-layer.d.ts +3 -2
- package/dist/firestore.event-lake.storage-layer.d.ts.map +1 -1
- package/dist/firestore.event-lake.storage-layer.js +8 -4
- package/dist/firestore.event-lake.storage-layer.js.map +1 -1
- package/dist/firestore.event-stream.storage-layer.d.ts +3 -2
- package/dist/firestore.event-stream.storage-layer.d.ts.map +1 -1
- package/dist/firestore.event-stream.storage-layer.js +13 -7
- package/dist/firestore.event-stream.storage-layer.js.map +1 -1
- package/dist/firestore.projected-stream.reader.d.ts +8 -1
- package/dist/firestore.projected-stream.reader.d.ts.map +1 -1
- package/dist/firestore.projected-stream.reader.js +26 -1
- package/dist/firestore.projected-stream.reader.js.map +1 -1
- package/dist/firestore.projected-stream.storage-layer.d.ts +9 -3
- package/dist/firestore.projected-stream.storage-layer.d.ts.map +1 -1
- package/dist/firestore.projected-stream.storage-layer.js +83 -14
- package/dist/firestore.projected-stream.storage-layer.js.map +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +7 -1
- package/dist/index.js.map +1 -1
- package/dist/projection/cases/attempts.spec.d.ts +2 -0
- package/dist/projection/cases/attempts.spec.d.ts.map +1 -0
- package/dist/projection/cases/attempts.spec.js +42 -0
- package/dist/projection/cases/attempts.spec.js.map +1 -0
- package/dist/projection/cases/batchlast.spec.d.ts +2 -0
- package/dist/projection/cases/batchlast.spec.d.ts.map +1 -0
- package/dist/projection/cases/batchlast.spec.js +51 -0
- package/dist/projection/cases/batchlast.spec.js.map +1 -0
- package/dist/projection/cases/bigshuffle.spec.d.ts +2 -0
- package/dist/projection/cases/bigshuffle.spec.d.ts.map +1 -0
- package/dist/projection/cases/bigshuffle.spec.js +59 -0
- package/dist/projection/cases/bigshuffle.spec.js.map +1 -0
- package/dist/projection/cases/burst.spec.d.ts +2 -0
- package/dist/projection/cases/burst.spec.d.ts.map +1 -0
- package/dist/projection/cases/burst.spec.js +38 -0
- package/dist/projection/cases/burst.spec.js.map +1 -0
- package/dist/projection/cases/claimtimeout.spec.d.ts +2 -0
- package/dist/projection/cases/claimtimeout.spec.d.ts.map +1 -0
- package/dist/projection/cases/claimtimeout.spec.js +40 -0
- package/dist/projection/cases/claimtimeout.spec.js.map +1 -0
- package/dist/projection/cases/concurrency.spec.d.ts +2 -0
- package/dist/projection/cases/concurrency.spec.d.ts.map +1 -0
- package/dist/projection/cases/concurrency.spec.js +49 -0
- package/dist/projection/cases/concurrency.spec.js.map +1 -0
- package/dist/projection/cases/deduplicate.spec.d.ts +2 -0
- package/dist/projection/cases/deduplicate.spec.d.ts.map +1 -0
- package/dist/projection/cases/deduplicate.spec.js +22 -0
- package/dist/projection/cases/deduplicate.spec.js.map +1 -0
- package/dist/projection/cases/defer.spec.d.ts +2 -0
- package/dist/projection/cases/defer.spec.d.ts.map +1 -0
- package/dist/projection/cases/defer.spec.js +44 -0
- package/dist/projection/cases/defer.spec.js.map +1 -0
- package/dist/projection/cases/lock.spec.d.ts +2 -0
- package/dist/projection/cases/lock.spec.d.ts.map +1 -0
- package/dist/projection/cases/lock.spec.js +91 -0
- package/dist/projection/cases/lock.spec.js.map +1 -0
- package/dist/projection/cases/skip.spec.d.ts +2 -0
- package/dist/projection/cases/skip.spec.d.ts.map +1 -0
- package/dist/projection/cases/skip.spec.js +86 -0
- package/dist/projection/cases/skip.spec.js.map +1 -0
- package/dist/projection/cases/stress.spec.d.ts +2 -0
- package/dist/projection/cases/stress.spec.d.ts.map +1 -0
- package/dist/projection/cases/stress.spec.js +73 -0
- package/dist/projection/cases/stress.spec.js.map +1 -0
- package/dist/projection/firestore.projector.d.ts +123 -0
- package/dist/projection/firestore.projector.d.ts.map +1 -0
- package/dist/projection/firestore.projector.js +605 -0
- package/dist/projection/firestore.projector.js.map +1 -0
- package/dist/projection/testkit/case-fixture.d.ts +610 -0
- package/dist/projection/testkit/case-fixture.d.ts.map +1 -0
- package/dist/projection/testkit/case-fixture.js +341 -0
- package/dist/projection/testkit/case-fixture.js.map +1 -0
- package/dist/projection/testkit.d.ts +44 -0
- package/dist/projection/testkit.d.ts.map +1 -0
- package/dist/projection/testkit.js +77 -0
- package/dist/projection/testkit.js.map +1 -0
- package/dist/projection/trace.decorator.d.ts +2 -0
- package/dist/projection/trace.decorator.d.ts.map +1 -0
- package/dist/projection/trace.decorator.js +35 -0
- package/dist/projection/trace.decorator.js.map +1 -0
- package/package.json +13 -11
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import { LakeId, EventId, type ISerializedChange, type ISerializedFact,
|
|
1
|
+
import { LakeId, EventId, type ISerializedChange, type ISerializedFact, EventLakeStorageLayer } from "@ddd-ts/core";
|
|
2
|
+
import { ISerializedSavedChange } from "@ddd-ts/core/dist/interfaces/es-event";
|
|
2
3
|
import { DefaultConverter, FirestoreTransaction } from "@ddd-ts/store-firestore";
|
|
3
4
|
import * as fb from "firebase-admin";
|
|
4
5
|
export declare const serverTimestamp: typeof fb.firestore.FieldValue.serverTimestamp;
|
|
@@ -7,7 +8,7 @@ export declare class FirestoreEventLakeStorageLayer implements EventLakeStorageL
|
|
|
7
8
|
readonly converter: DefaultConverter<fb.firestore.DocumentData>;
|
|
8
9
|
constructor(firestore: fb.firestore.Firestore, converter?: DefaultConverter<fb.firestore.DocumentData>);
|
|
9
10
|
getCollection(lakeId: LakeId): fb.firestore.CollectionReference<fb.firestore.DocumentData, fb.firestore.DocumentData>;
|
|
10
|
-
append(lakeId: LakeId, changes: ISerializedChange[], trx: FirestoreTransaction): Promise<
|
|
11
|
+
append(lakeId: LakeId, changes: ISerializedChange[], trx: FirestoreTransaction): Promise<ISerializedSavedChange[]>;
|
|
11
12
|
read(lakeId: LakeId, startAfter?: EventId, endAt?: EventId): AsyncIterable<ISerializedFact>;
|
|
12
13
|
}
|
|
13
14
|
//# sourceMappingURL=firestore.event-lake.storage-layer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.event-lake.storage-layer.d.ts","sourceRoot":"","sources":["../src/firestore.event-lake.storage-layer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,OAAO,EACP,KAAK,iBAAiB,EACtB,KAAK,eAAe,EACpB,
|
|
1
|
+
{"version":3,"file":"firestore.event-lake.storage-layer.d.ts","sourceRoot":"","sources":["../src/firestore.event-lake.storage-layer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,OAAO,EACP,KAAK,iBAAiB,EACtB,KAAK,eAAe,EACpB,qBAAqB,EACtB,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,sBAAsB,EAAE,MAAM,uCAAuC,CAAC;AAE/E,OAAO,EACL,gBAAgB,EAChB,oBAAoB,EACrB,MAAM,yBAAyB,CAAC;AACjC,OAAO,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAErC,eAAO,MAAM,eAAe,gDAA0C,CAAC;AAEvE,qBAAa,8BAA+B,YAAW,qBAAqB;aAExD,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,SAAS;aACjC,SAAS;gBADT,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,SAAS,EACjC,SAAS,8CAAyB;IAGpD,aAAa,CAAC,MAAM,EAAE,MAAM;IAStB,MAAM,CACV,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,iBAAiB,EAAE,EAC5B,GAAG,EAAE,oBAAoB;IAiCpB,IAAI,CACT,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,OAAO,EACpB,KAAK,CAAC,EAAE,OAAO,GACd,aAAa,CAAC,eAAe,CAAC;CA2ClC"}
|
|
@@ -34,7 +34,6 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
34
34
|
})();
|
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
36
|
exports.FirestoreEventLakeStorageLayer = exports.serverTimestamp = void 0;
|
|
37
|
-
const core_1 = require("@ddd-ts/core");
|
|
38
37
|
const store_firestore_1 = require("@ddd-ts/store-firestore");
|
|
39
38
|
const fb = __importStar(require("firebase-admin"));
|
|
40
39
|
exports.serverTimestamp = fb.firestore.FieldValue.serverTimestamp;
|
|
@@ -55,7 +54,7 @@ class FirestoreEventLakeStorageLayer {
|
|
|
55
54
|
}
|
|
56
55
|
async append(lakeId, changes, trx) {
|
|
57
56
|
const collection = this.getCollection(lakeId);
|
|
58
|
-
const
|
|
57
|
+
const result = [];
|
|
59
58
|
let revision = 0;
|
|
60
59
|
for (const change of changes) {
|
|
61
60
|
const storageChange = {
|
|
@@ -67,11 +66,16 @@ class FirestoreEventLakeStorageLayer {
|
|
|
67
66
|
revision: revision,
|
|
68
67
|
};
|
|
69
68
|
const ref = collection.doc(change.id);
|
|
70
|
-
refs.push(new core_1.EventReference(ref.path));
|
|
71
69
|
trx.transaction.create(ref, this.converter.toFirestore(storageChange));
|
|
70
|
+
result.push({
|
|
71
|
+
...change,
|
|
72
|
+
ref: ref.path,
|
|
73
|
+
revision: revision,
|
|
74
|
+
occurredAt: undefined,
|
|
75
|
+
});
|
|
72
76
|
revision++;
|
|
73
77
|
}
|
|
74
|
-
return
|
|
78
|
+
return result;
|
|
75
79
|
}
|
|
76
80
|
async *read(lakeId, startAfter, endAt) {
|
|
77
81
|
const collection = this.getCollection(lakeId);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.event-lake.storage-layer.js","sourceRoot":"","sources":["../src/firestore.event-lake.storage-layer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1
|
+
{"version":3,"file":"firestore.event-lake.storage-layer.js","sourceRoot":"","sources":["../src/firestore.event-lake.storage-layer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AASA,6DAGiC;AACjC,mDAAqC;AAExB,QAAA,eAAe,GAAG,EAAE,CAAC,SAAS,CAAC,UAAU,CAAC,eAAe,CAAC;AAEvE,MAAa,8BAA8B;IAEvB;IACA;IAFlB,YACkB,SAAiC,EACjC,YAAY,IAAI,kCAAgB,EAAE;QADlC,cAAS,GAAT,SAAS,CAAwB;QACjC,cAAS,GAAT,SAAS,CAAyB;IACjD,CAAC;IAEJ,aAAa,CAAC,MAAc;QAC1B,OAAO,IAAI,CAAC,SAAS;aAClB,UAAU,CAAC,aAAa,CAAC;aACzB,GAAG,CAAC,OAAO,CAAC;aACZ,UAAU,CAAC,MAAM,CAAC,SAAS,CAAC;aAC5B,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC;aACnB,UAAU,CAAC,QAAQ,CAAC,CAAC;IAC1B,CAAC;IAED,KAAK,CAAC,MAAM,CACV,MAAc,EACd,OAA4B,EAC5B,GAAyB;QAEzB,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;QAE9C,MAAM,MAAM,GAA6B,EAAE,CAAC;QAE5C,IAAI,QAAQ,GAAG,CAAC,CAAC;QACjB,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC7B,MAAM,aAAa,GAAG;gBACpB,OAAO,EAAE,MAAM,CAAC,EAAE;gBAClB,IAAI,EAAE,MAAM,CAAC,IAAI;gBACjB,OAAO,EAAE,MAAM,CAAC,OAAO;gBACvB,UAAU,EAAE,IAAA,uBAAe,GAAE;gBAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;gBACvB,QAAQ,EAAE,QAAQ;aACnB,CAAC;YAEF,MAAM,GAAG,GAAG,UAAU,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;YACtC,GAAG,CAAC,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;YAEvE,MAAM,CAAC,IAAI,CAAC;gBACV,GAAG,MAAM;gBACT,GAAG,EAAE,GAAG,CAAC,IAAI;gBACb,QAAQ,EAAE,QAAQ;gBAClB,UAAU,EAAE,SAAS;aACtB,CAAC,CAAC;YAEH,QAAQ,EAAE,CAAC;QACb,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,KAAK,CAAC,CAAC,IAAI,CACT,MAAc,EACd,UAAoB,EACpB,KAAe;QAEf,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;QAE9C,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC;YACrC,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI;YAChE,KAAK,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI;SACvD,CAAC,CAAC;QAEH,IAAI,UAAU,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC;YACjC,MAAM,IAAI,KAAK,CAAC,+BAA+B,UAAU,EAAE,CAAC,CAAC;QAC/D,CAAC;QAED,IAAI,KAAK,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,CAAC;YAC1B,MAAM,IAAI,KAAK,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QACrD,CAAC;QAED,IAAI,KAAK,GAAG,UAAU;aACnB,OAAO,CAAC,YAAY,EAAE,KAAK,CAAC;aAC5B,OAAO,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;QAE9B,IAAI,KAAK,EAAE,CAAC;YACV,KAAK,GAAG,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QAClC,CAAC;QAED,IAAI,KAAK,EAAE,CAAC;YACV,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,CAAC;QAED,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC;YACzC,MAAM,CAAC,GAAG,KAAuD,CAAC;YAClE,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;YAC7C,MAAM;gBACJ,EAAE,EAAE,IAAI,CAAC,OAAO;gBAChB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,IAAI;gBACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,KAAK,EAAE,IAAI,CAAC,IAAI;gBAChB,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,CAAC;aACR,CAAC;QACvB,CAAC;IACH,CAAC;CACF;AAlGD,wEAkGC","sourcesContent":["import {\n LakeId,\n EventId,\n type ISerializedChange,\n type ISerializedFact,\n EventLakeStorageLayer,\n} from \"@ddd-ts/core\";\nimport { ISerializedSavedChange } from \"@ddd-ts/core/dist/interfaces/es-event\";\n\nimport {\n DefaultConverter,\n FirestoreTransaction,\n} from \"@ddd-ts/store-firestore\";\nimport * as fb from \"firebase-admin\";\n\nexport const serverTimestamp = fb.firestore.FieldValue.serverTimestamp;\n\nexport class FirestoreEventLakeStorageLayer implements EventLakeStorageLayer {\n constructor(\n public readonly firestore: fb.firestore.Firestore,\n public readonly converter = new DefaultConverter(),\n ) {}\n\n getCollection(lakeId: LakeId) {\n return this.firestore\n .collection(\"event-store\")\n .doc(\"Lakes\")\n .collection(lakeId.shardType)\n .doc(lakeId.shardId)\n .collection(\"events\");\n }\n\n async append(\n lakeId: LakeId,\n changes: ISerializedChange[],\n trx: FirestoreTransaction,\n ) {\n const collection = this.getCollection(lakeId);\n\n const result: ISerializedSavedChange[] = [];\n\n let revision = 0;\n for (const change of changes) {\n const storageChange = {\n eventId: change.id,\n name: change.name,\n payload: change.payload,\n occurredAt: serverTimestamp(),\n version: change.version,\n revision: revision,\n };\n\n const ref = collection.doc(change.id);\n trx.transaction.create(ref, this.converter.toFirestore(storageChange));\n\n result.push({\n ...change,\n ref: ref.path,\n revision: revision,\n occurredAt: undefined,\n });\n\n revision++;\n }\n\n return result;\n }\n\n async *read(\n lakeId: LakeId,\n startAfter?: EventId,\n endAt?: EventId,\n ): AsyncIterable<ISerializedFact> {\n const collection = this.getCollection(lakeId);\n\n const [start, end] = await Promise.all([\n startAfter ? collection.doc(startAfter.serialize()).get() : null,\n endAt ? collection.doc(endAt.serialize()).get() : null,\n ]);\n\n if (startAfter && !start?.exists) {\n throw new Error(`StartAfter event not found: ${startAfter}`);\n }\n\n if (endAt && !end?.exists) {\n throw new Error(`EndAt event not found: ${endAt}`);\n }\n\n let query = collection\n .orderBy(\"occurredAt\", \"asc\")\n .orderBy(\"revision\", \"asc\");\n\n if (start) {\n query = query.startAfter(start);\n }\n\n if (endAt) {\n query = query.endAt(end);\n }\n\n for await (const event of query.stream()) {\n const e = event as any as fb.firestore.QueryDocumentSnapshot<any>;\n const data = this.converter.fromFirestore(e);\n yield {\n id: data.eventId,\n ref: e.ref.path,\n revision: data.revision,\n name: data.name,\n $name: data.name,\n payload: data.payload,\n occurredAt: data.occurredAt,\n version: data.version ?? 1,\n } as ISerializedFact;\n }\n }\n}\n"]}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import { StreamId, type ISerializedChange, type ISerializedFact,
|
|
1
|
+
import { StreamId, type ISerializedChange, type ISerializedFact, EventStreamStorageLayer } from "@ddd-ts/core";
|
|
2
|
+
import { ISerializedSavedChange } from "@ddd-ts/core/dist/interfaces/es-event";
|
|
2
3
|
import { DefaultConverter, FirestoreTransaction } from "@ddd-ts/store-firestore";
|
|
3
4
|
import * as fb from "firebase-admin";
|
|
4
5
|
export declare const serverTimestamp: typeof fb.firestore.FieldValue.serverTimestamp;
|
|
@@ -8,7 +9,7 @@ export declare class FirestoreEventStreamStorageLayer implements EventStreamStor
|
|
|
8
9
|
constructor(firestore: fb.firestore.Firestore, converter?: DefaultConverter<fb.firestore.DocumentData>);
|
|
9
10
|
isLocalRevisionOutdatedError(error: unknown): boolean;
|
|
10
11
|
getCollection(streamId: StreamId): fb.firestore.CollectionReference<fb.firestore.DocumentData, fb.firestore.DocumentData>;
|
|
11
|
-
append(streamId: StreamId, changes: ISerializedChange[], expectedRevision: number, trx: FirestoreTransaction): Promise<
|
|
12
|
+
append(streamId: StreamId, changes: ISerializedChange[], expectedRevision: number, trx: FirestoreTransaction): Promise<ISerializedSavedChange[]>;
|
|
12
13
|
read(streamId: StreamId, startAt?: number): AsyncIterable<ISerializedFact>;
|
|
13
14
|
}
|
|
14
15
|
//# sourceMappingURL=firestore.event-stream.storage-layer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.event-stream.storage-layer.d.ts","sourceRoot":"","sources":["../src/firestore.event-stream.storage-layer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,KAAK,iBAAiB,EACtB,KAAK,eAAe,EACpB,
|
|
1
|
+
{"version":3,"file":"firestore.event-stream.storage-layer.d.ts","sourceRoot":"","sources":["../src/firestore.event-stream.storage-layer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,KAAK,iBAAiB,EACtB,KAAK,eAAe,EACpB,uBAAuB,EACxB,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,sBAAsB,EAAE,MAAM,uCAAuC,CAAC;AAE/E,OAAO,EACL,gBAAgB,EAChB,oBAAoB,EACrB,MAAM,yBAAyB,CAAC;AACjC,OAAO,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAErC,eAAO,MAAM,eAAe,gDAA0C,CAAC;AAEvE,qBAAa,gCACX,YAAW,uBAAuB;aAGhB,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,SAAS;aACjC,SAAS;gBADT,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,SAAS,EACjC,SAAS,8CAAyB;IAGpD,4BAA4B,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO;IASrD,aAAa,CAAC,QAAQ,EAAE,QAAQ;IAS1B,MAAM,CACV,QAAQ,EAAE,QAAQ,EAClB,OAAO,EAAE,iBAAiB,EAAE,EAC5B,gBAAgB,EAAE,MAAM,EACxB,GAAG,EAAE,oBAAoB;IAkCpB,IAAI,CACT,QAAQ,EAAE,QAAQ,EAClB,OAAO,CAAC,EAAE,MAAM,GACf,aAAa,CAAC,eAAe,CAAC;CAsBlC"}
|
|
@@ -34,7 +34,6 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
34
34
|
})();
|
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
36
|
exports.FirestoreEventStreamStorageLayer = exports.serverTimestamp = void 0;
|
|
37
|
-
const core_1 = require("@ddd-ts/core");
|
|
38
37
|
const store_firestore_1 = require("@ddd-ts/store-firestore");
|
|
39
38
|
const fb = __importStar(require("firebase-admin"));
|
|
40
39
|
exports.serverTimestamp = fb.firestore.FieldValue.serverTimestamp;
|
|
@@ -61,12 +60,10 @@ class FirestoreEventStreamStorageLayer {
|
|
|
61
60
|
}
|
|
62
61
|
async append(streamId, changes, expectedRevision, trx) {
|
|
63
62
|
const collection = this.getCollection(streamId);
|
|
64
|
-
const
|
|
63
|
+
const result = [];
|
|
65
64
|
let revision = expectedRevision + 1;
|
|
66
65
|
for (const change of changes) {
|
|
67
|
-
const
|
|
68
|
-
refs.push(new core_1.EventReference(ref.path));
|
|
69
|
-
trx.transaction.create(ref, this.converter.toFirestore({
|
|
66
|
+
const storageChange = {
|
|
70
67
|
aggregateType: streamId.aggregateType,
|
|
71
68
|
eventId: change.id,
|
|
72
69
|
aggregateId: streamId.aggregateId,
|
|
@@ -75,10 +72,18 @@ class FirestoreEventStreamStorageLayer {
|
|
|
75
72
|
payload: change.payload,
|
|
76
73
|
occurredAt: (0, exports.serverTimestamp)(),
|
|
77
74
|
version: change.version,
|
|
78
|
-
}
|
|
75
|
+
};
|
|
76
|
+
const ref = collection.doc(`${revision}`);
|
|
77
|
+
result.push({
|
|
78
|
+
...change,
|
|
79
|
+
ref: ref.path,
|
|
80
|
+
revision: revision,
|
|
81
|
+
occurredAt: undefined,
|
|
82
|
+
});
|
|
83
|
+
trx.transaction.create(ref, this.converter.toFirestore(storageChange));
|
|
79
84
|
revision++;
|
|
80
85
|
}
|
|
81
|
-
return
|
|
86
|
+
return result;
|
|
82
87
|
}
|
|
83
88
|
async *read(streamId, startAt) {
|
|
84
89
|
const collection = this.getCollection(streamId);
|
|
@@ -90,6 +95,7 @@ class FirestoreEventStreamStorageLayer {
|
|
|
90
95
|
const data = this.converter.fromFirestore(e);
|
|
91
96
|
yield {
|
|
92
97
|
id: data.eventId,
|
|
98
|
+
ref: e.ref.path,
|
|
93
99
|
revision: data.revision,
|
|
94
100
|
name: data.name,
|
|
95
101
|
$name: data.name,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.event-stream.storage-layer.js","sourceRoot":"","sources":["../src/firestore.event-stream.storage-layer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1
|
+
{"version":3,"file":"firestore.event-stream.storage-layer.js","sourceRoot":"","sources":["../src/firestore.event-stream.storage-layer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAQA,6DAGiC;AACjC,mDAAqC;AAExB,QAAA,eAAe,GAAG,EAAE,CAAC,SAAS,CAAC,UAAU,CAAC,eAAe,CAAC;AAEvE,MAAa,gCAAgC;IAIzB;IACA;IAFlB,YACkB,SAAiC,EACjC,YAAY,IAAI,kCAAgB,EAAE;QADlC,cAAS,GAAT,SAAS,CAAwB;QACjC,cAAS,GAAT,SAAS,CAAyB;IACjD,CAAC;IAEJ,4BAA4B,CAAC,KAAc;QACzC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;YACzB,KAAK,KAAK,IAAI;YACd,MAAM,IAAI,KAAK;YACf,KAAK,CAAC,IAAI,KAAK,CAAC,CACjB,CAAC;IACJ,CAAC;IAED,aAAa,CAAC,QAAkB;QAC9B,OAAO,IAAI,CAAC,SAAS;aAClB,UAAU,CAAC,aAAa,CAAC;aACzB,GAAG,CAAC,QAAQ,CAAC,aAAa,CAAC;aAC3B,UAAU,CAAC,SAAS,CAAC;aACrB,GAAG,CAAC,QAAQ,CAAC,WAAW,CAAC;aACzB,UAAU,CAAC,QAAQ,CAAC,CAAC;IAC1B,CAAC;IAED,KAAK,CAAC,MAAM,CACV,QAAkB,EAClB,OAA4B,EAC5B,gBAAwB,EACxB,GAAyB;QAEzB,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QAChD,MAAM,MAAM,GAA6B,EAAE,CAAC;QAE5C,IAAI,QAAQ,GAAG,gBAAgB,GAAG,CAAC,CAAC;QACpC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC7B,MAAM,aAAa,GAAG;gBACpB,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,OAAO,EAAE,MAAM,CAAC,EAAE;gBAClB,WAAW,EAAE,QAAQ,CAAC,WAAW;gBACjC,QAAQ,EAAE,QAAQ;gBAClB,IAAI,EAAE,MAAM,CAAC,IAAI;gBACjB,OAAO,EAAE,MAAM,CAAC,OAAO;gBACvB,UAAU,EAAE,IAAA,uBAAe,GAAE;gBAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;aACxB,CAAC;YAEF,MAAM,GAAG,GAAG,UAAU,CAAC,GAAG,CAAC,GAAG,QAAQ,EAAE,CAAC,CAAC;YAE1C,MAAM,CAAC,IAAI,CAAC;gBACV,GAAG,MAAM;gBACT,GAAG,EAAE,GAAG,CAAC,IAAI;gBACb,QAAQ,EAAE,QAAQ;gBAClB,UAAU,EAAE,SAAS;aACtB,CAAC,CAAC;YAEH,GAAG,CAAC,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;YACvE,QAAQ,EAAE,CAAC;QACb,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,KAAK,CAAC,CAAC,IAAI,CACT,QAAkB,EAClB,OAAgB;QAEhB,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QAEhD,MAAM,KAAK,GAAG,UAAU;aACrB,KAAK,CAAC,UAAU,EAAE,IAAI,EAAE,OAAO,IAAI,CAAC,CAAC;aACrC,OAAO,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;QAE9B,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC;YACzC,MAAM,CAAC,GAAG,KAAuD,CAAC;YAClE,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;YAC7C,MAAM;gBACJ,EAAE,EAAE,IAAI,CAAC,OAAO;gBAChB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,IAAI;gBACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,KAAK,EAAE,IAAI,CAAC,IAAI;gBAChB,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,CAAC;aAC3B,CAAC;QACJ,CAAC;IACH,CAAC;CACF;AAzFD,4EAyFC","sourcesContent":["import {\n StreamId,\n type ISerializedChange,\n type ISerializedFact,\n EventStreamStorageLayer,\n} from \"@ddd-ts/core\";\nimport { ISerializedSavedChange } from \"@ddd-ts/core/dist/interfaces/es-event\";\n\nimport {\n DefaultConverter,\n FirestoreTransaction,\n} from \"@ddd-ts/store-firestore\";\nimport * as fb from \"firebase-admin\";\n\nexport const serverTimestamp = fb.firestore.FieldValue.serverTimestamp;\n\nexport class FirestoreEventStreamStorageLayer\n implements EventStreamStorageLayer\n{\n constructor(\n public readonly firestore: fb.firestore.Firestore,\n public readonly converter = new DefaultConverter(),\n ) {}\n\n isLocalRevisionOutdatedError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n error.code === 6\n );\n }\n\n getCollection(streamId: StreamId) {\n return this.firestore\n .collection(\"event-store\")\n .doc(streamId.aggregateType)\n .collection(\"streams\")\n .doc(streamId.aggregateId)\n .collection(\"events\");\n }\n\n async append(\n streamId: StreamId,\n changes: ISerializedChange[],\n expectedRevision: number,\n trx: FirestoreTransaction,\n ) {\n const collection = this.getCollection(streamId);\n const result: ISerializedSavedChange[] = [];\n\n let revision = expectedRevision + 1;\n for (const change of changes) {\n const storageChange = {\n aggregateType: streamId.aggregateType,\n eventId: change.id,\n aggregateId: streamId.aggregateId,\n revision: revision,\n name: change.name,\n payload: change.payload,\n occurredAt: serverTimestamp(),\n version: change.version,\n };\n\n const ref = collection.doc(`${revision}`);\n\n result.push({\n ...change,\n ref: ref.path,\n revision: revision,\n occurredAt: undefined,\n });\n\n trx.transaction.create(ref, this.converter.toFirestore(storageChange));\n revision++;\n }\n\n return result;\n }\n\n async *read(\n streamId: StreamId,\n startAt?: number,\n ): AsyncIterable<ISerializedFact> {\n const collection = this.getCollection(streamId);\n\n const query = collection\n .where(\"revision\", \">=\", startAt || 0)\n .orderBy(\"revision\", \"asc\");\n\n for await (const event of query.stream()) {\n const e = event as any as fb.firestore.QueryDocumentSnapshot<any>;\n const data = this.converter.fromFirestore(e);\n yield {\n id: data.eventId,\n ref: e.ref.path,\n revision: data.revision,\n name: data.name,\n $name: data.name,\n payload: data.payload,\n occurredAt: data.occurredAt,\n version: data.version ?? 1,\n };\n }\n }\n}\n"]}
|
|
@@ -1,6 +1,13 @@
|
|
|
1
|
-
import { IEsEvent, ISerializer, ProjectedStreamReader } from "@ddd-ts/core";
|
|
1
|
+
import { Cursor, IEsEvent, IFact, ISavedChange, ISerializer, ProjectedStream, ProjectedStreamReader } from "@ddd-ts/core";
|
|
2
2
|
import { Firestore } from "firebase-admin/firestore";
|
|
3
|
+
import { FirestoreProjectedStreamStorageLayer } from "./firestore.projected-stream.storage-layer";
|
|
3
4
|
export declare class FirestoreProjectedStreamReader<Event extends IEsEvent> extends ProjectedStreamReader<Event> {
|
|
5
|
+
storage: FirestoreProjectedStreamStorageLayer;
|
|
6
|
+
serializer: ISerializer<Event>;
|
|
4
7
|
constructor(firestore: Firestore, serializer: ISerializer<Event>);
|
|
8
|
+
getCursor(savedChange: ISavedChange<Event>): Promise<Cursor | undefined>;
|
|
9
|
+
get(cursor: Cursor): Promise<IFact<Event> | undefined>;
|
|
10
|
+
slice(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor, limit?: number): Promise<any>;
|
|
11
|
+
read(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor): AsyncGenerator<IFact<Event>, void, unknown>;
|
|
5
12
|
}
|
|
6
13
|
//# sourceMappingURL=firestore.projected-stream.reader.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.projected-stream.reader.d.ts","sourceRoot":"","sources":["../src/firestore.projected-stream.reader.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"firestore.projected-stream.reader.d.ts","sourceRoot":"","sources":["../src/firestore.projected-stream.reader.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,QAAQ,EACR,KAAK,EACL,YAAY,EAEZ,WAAW,EACX,eAAe,EACf,qBAAqB,EACtB,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AACrD,OAAO,EAAE,oCAAoC,EAAE,MAAM,4CAA4C,CAAC;AAElG,qBAAa,8BAA8B,CACzC,KAAK,SAAS,QAAQ,CACtB,SAAQ,qBAAqB,CAAC,KAAK,CAAC;IACpC,OAAO,EAAE,oCAAoC,CAAC;IAC9C,UAAU,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;gBACnB,SAAS,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,CAAC,KAAK,CAAC;IAO1D,SAAS,CAAC,WAAW,EAAE,YAAY,CAAC,KAAK,CAAC;IAK1C,GAAG,CAAC,MAAM,EAAE,MAAM;IAUlB,KAAK,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM,EACd,KAAK,CAAC,EAAE,MAAM;IAcT,IAAI,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM;CAWjB"}
|
|
@@ -4,8 +4,33 @@ exports.FirestoreProjectedStreamReader = void 0;
|
|
|
4
4
|
const core_1 = require("@ddd-ts/core");
|
|
5
5
|
const firestore_projected_stream_storage_layer_1 = require("./firestore.projected-stream.storage-layer");
|
|
6
6
|
class FirestoreProjectedStreamReader extends core_1.ProjectedStreamReader {
|
|
7
|
+
storage;
|
|
8
|
+
serializer;
|
|
7
9
|
constructor(firestore, serializer) {
|
|
8
|
-
|
|
10
|
+
const storage = new firestore_projected_stream_storage_layer_1.FirestoreProjectedStreamStorageLayer(firestore);
|
|
11
|
+
super(storage, serializer);
|
|
12
|
+
this.storage = storage;
|
|
13
|
+
this.serializer = serializer;
|
|
14
|
+
}
|
|
15
|
+
async getCursor(savedChange) {
|
|
16
|
+
const serialized = await this.serializer.serialize(savedChange);
|
|
17
|
+
return this.storage.getCursor(serialized);
|
|
18
|
+
}
|
|
19
|
+
async get(cursor) {
|
|
20
|
+
const serialized = await this.storage.get(cursor);
|
|
21
|
+
if (!serialized) {
|
|
22
|
+
return undefined;
|
|
23
|
+
}
|
|
24
|
+
return this.serializer.deserialize(serialized);
|
|
25
|
+
}
|
|
26
|
+
async slice(projectedStream, shard, startAfter, endAt, limit) {
|
|
27
|
+
const serialized = await this.storage.slice(projectedStream, shard, startAfter, endAt, limit);
|
|
28
|
+
return Promise.all(serialized.map((s) => this.serializer.deserialize(s)));
|
|
29
|
+
}
|
|
30
|
+
async *read(projectedStream, shard, startAfter, endAt) {
|
|
31
|
+
for await (const serialized of this.storage.read(projectedStream, shard, startAfter, endAt)) {
|
|
32
|
+
yield this.serializer.deserialize(serialized);
|
|
33
|
+
}
|
|
9
34
|
}
|
|
10
35
|
}
|
|
11
36
|
exports.FirestoreProjectedStreamReader = FirestoreProjectedStreamReader;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.projected-stream.reader.js","sourceRoot":"","sources":["../src/firestore.projected-stream.reader.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"firestore.projected-stream.reader.js","sourceRoot":"","sources":["../src/firestore.projected-stream.reader.ts"],"names":[],"mappings":";;;AAAA,uCASsB;AAEtB,yGAAkG;AAElG,MAAa,8BAEX,SAAQ,4BAA4B;IACpC,OAAO,CAAuC;IAC9C,UAAU,CAAqB;IAC/B,YAAY,SAAoB,EAAE,UAA8B;QAC9D,MAAM,OAAO,GAAG,IAAI,+EAAoC,CAAC,SAAS,CAAC,CAAC;QACpE,KAAK,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED,KAAK,CAAC,SAAS,CAAC,WAAgC;QAC9C,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;QAChE,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,UAAoC,CAAC,CAAC;IACtE,CAAC;IAED,KAAK,CAAC,GAAG,CAAC,MAAc;QACtB,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,OAAO,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,UAAU,CAE5C,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,KAAK,CACT,eAAgC,EAChC,KAAa,EACb,UAAmB,EACnB,KAAc,EACd,KAAc;QAEd,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CACzC,eAAe,EACf,KAAK,EACL,UAAU,EACV,KAAK,EACL,KAAK,CACN,CAAC;QACF,OAAO,OAAO,CAAC,GAAG,CAChB,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAC/C,CAAC;IACX,CAAC;IAED,KAAK,CAAC,CAAC,IAAI,CACT,eAAgC,EAChC,KAAa,EACb,UAAmB,EACnB,KAAc;QAEd,IAAI,KAAK,EAAE,MAAM,UAAU,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,CAC9C,eAAe,EACf,KAAK,EACL,UAAU,EACV,KAAK,CACN,EAAE,CAAC;YACF,MAAM,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,UAAU,CAA4B,CAAC;QAC3E,CAAC;IACH,CAAC;CACF;AA7DD,wEA6DC","sourcesContent":["import {\n Cursor,\n IEsEvent,\n IFact,\n ISavedChange,\n ISerializedSavedChange,\n ISerializer,\n ProjectedStream,\n ProjectedStreamReader,\n} from \"@ddd-ts/core\";\nimport { Firestore } from \"firebase-admin/firestore\";\nimport { FirestoreProjectedStreamStorageLayer } from \"./firestore.projected-stream.storage-layer\";\n\nexport class FirestoreProjectedStreamReader<\n Event extends IEsEvent,\n> extends ProjectedStreamReader<Event> {\n storage: FirestoreProjectedStreamStorageLayer;\n serializer: ISerializer<Event>;\n constructor(firestore: Firestore, serializer: ISerializer<Event>) {\n const storage = new FirestoreProjectedStreamStorageLayer(firestore);\n super(storage, serializer);\n this.storage = storage;\n this.serializer = serializer;\n }\n\n async getCursor(savedChange: ISavedChange<Event>) {\n const serialized = await this.serializer.serialize(savedChange);\n return this.storage.getCursor(serialized as ISerializedSavedChange);\n }\n\n async get(cursor: Cursor) {\n const serialized = await this.storage.get(cursor);\n if (!serialized) {\n return undefined;\n }\n return this.serializer.deserialize(serialized) as unknown as Promise<\n IFact<Event>\n >;\n }\n\n async slice(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n limit?: number,\n ) {\n const serialized = await this.storage.slice(\n projectedStream,\n shard,\n startAfter,\n endAt,\n limit,\n );\n return Promise.all(\n serialized.map((s) => this.serializer.deserialize(s)),\n ) as any;\n }\n\n async *read(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n ) {\n for await (const serialized of this.storage.read(\n projectedStream,\n shard,\n startAfter,\n endAt,\n )) {\n yield this.serializer.deserialize(serialized) as unknown as IFact<Event>;\n }\n }\n}\n"]}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { ISerializedFact, ISerializedSavedChange, LakeSource, ProjectedStream, ProjectedStreamStorageLayer, StreamSource } from "@ddd-ts/core";
|
|
2
2
|
import { DefaultConverter } from "@ddd-ts/store-firestore";
|
|
3
|
-
import { Filter, Firestore } from "firebase-admin/firestore";
|
|
3
|
+
import { Filter, Firestore, Timestamp } from "firebase-admin/firestore";
|
|
4
|
+
import { MicrosecondTimestamp } from "@ddd-ts/shape";
|
|
5
|
+
import { Cursor } from "@ddd-ts/core/dist/components/cursor";
|
|
4
6
|
export declare class FirestoreLakeSourceFilter {
|
|
5
7
|
filter(shard: string, lakeSource: LakeSource): Filter;
|
|
6
8
|
}
|
|
@@ -11,7 +13,7 @@ export declare class FirestoreProjectedStreamStorageLayer implements ProjectedSt
|
|
|
11
13
|
private readonly firestore;
|
|
12
14
|
readonly converter: DefaultConverter<FirebaseFirestore.DocumentData>;
|
|
13
15
|
constructor(firestore: Firestore, converter?: DefaultConverter<FirebaseFirestore.DocumentData>);
|
|
14
|
-
read(projectedStream: ProjectedStream, shard: string, startAfter?:
|
|
16
|
+
read(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor): AsyncGenerator<{
|
|
15
17
|
id: any;
|
|
16
18
|
ref: string;
|
|
17
19
|
revision: any;
|
|
@@ -21,5 +23,9 @@ export declare class FirestoreProjectedStreamStorageLayer implements ProjectedSt
|
|
|
21
23
|
occurredAt: any;
|
|
22
24
|
version: any;
|
|
23
25
|
}, void, unknown>;
|
|
26
|
+
microsecondToTimestamp(microseconds: MicrosecondTimestamp): Timestamp;
|
|
27
|
+
get(cursor: Cursor): Promise<ISerializedFact | undefined>;
|
|
28
|
+
getCursor(savedChange: ISerializedSavedChange): Promise<Cursor | undefined>;
|
|
29
|
+
slice(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor, limit?: number): Promise<ISerializedFact[]>;
|
|
24
30
|
}
|
|
25
31
|
//# sourceMappingURL=firestore.projected-stream.storage-layer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.projected-stream.storage-layer.d.ts","sourceRoot":"","sources":["../src/firestore.projected-stream.storage-layer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,
|
|
1
|
+
{"version":3,"file":"firestore.projected-stream.storage-layer.d.ts","sourceRoot":"","sources":["../src/firestore.projected-stream.storage-layer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,eAAe,EACf,sBAAsB,EACtB,UAAU,EACV,eAAe,EACf,2BAA2B,EAC3B,YAAY,EACb,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAC3D,OAAO,EACL,MAAM,EACN,SAAS,EAET,SAAS,EACV,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,oBAAoB,EAAE,MAAM,eAAe,CAAC;AACrD,OAAO,EAAE,MAAM,EAAE,MAAM,qCAAqC,CAAC;AAE7D,qBAAa,yBAAyB;IACpC,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU;CAM7C;AAED,qBAAa,2BAA2B;IACtC,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,YAAY;CAOjD;AAED,qBAAa,oCACX,YAAW,2BAA2B;IAGpC,OAAO,CAAC,QAAQ,CAAC,SAAS;aACV,SAAS;gBADR,SAAS,EAAE,SAAS,EACrB,SAAS,mDAAyB;IAG7C,IAAI,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM;;;;;;;;;;IA2CT,sBAAsB,CAAC,YAAY,EAAE,oBAAoB;IAM1D,GAAG,CAAC,MAAM,EAAE,MAAM;IAkBlB,SAAS,CACb,WAAW,EAAE,sBAAsB,GAClC,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;IAcxB,KAAK,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM,EACd,KAAK,CAAC,EAAE,MAAM;CAiDjB"}
|
|
@@ -4,6 +4,7 @@ exports.FirestoreProjectedStreamStorageLayer = exports.FirestoreStreamSourceFilt
|
|
|
4
4
|
const core_1 = require("@ddd-ts/core");
|
|
5
5
|
const store_firestore_1 = require("@ddd-ts/store-firestore");
|
|
6
6
|
const firestore_1 = require("firebase-admin/firestore");
|
|
7
|
+
const cursor_1 = require("@ddd-ts/core/dist/components/cursor");
|
|
7
8
|
class FirestoreLakeSourceFilter {
|
|
8
9
|
filter(shard, lakeSource) {
|
|
9
10
|
return firestore_1.Filter.and(firestore_1.Filter.where(`payload.${lakeSource.shardKey}`, "==", shard), firestore_1.Filter.where("name", "in", lakeSource.events));
|
|
@@ -28,16 +29,6 @@ class FirestoreProjectedStreamStorageLayer {
|
|
|
28
29
|
.collectionGroup("events")
|
|
29
30
|
.orderBy("occurredAt")
|
|
30
31
|
.orderBy("revision");
|
|
31
|
-
const [start, end] = await Promise.all([
|
|
32
|
-
startAfter ? this.firestore.doc(startAfter.serialize()).get() : null,
|
|
33
|
-
endAt ? this.firestore.doc(endAt.serialize()).get() : null,
|
|
34
|
-
]);
|
|
35
|
-
if (startAfter && !start?.exists) {
|
|
36
|
-
throw new Error(`StartAfter event not found: ${startAfter}`);
|
|
37
|
-
}
|
|
38
|
-
if (endAt && !end?.exists) {
|
|
39
|
-
throw new Error(`EndAt event not found: ${endAt}`);
|
|
40
|
-
}
|
|
41
32
|
const filters = projectedStream.sources.map((source) => {
|
|
42
33
|
if (source instanceof core_1.LakeSource) {
|
|
43
34
|
return new FirestoreLakeSourceFilter().filter(shard, source);
|
|
@@ -48,11 +39,13 @@ class FirestoreProjectedStreamStorageLayer {
|
|
|
48
39
|
throw new Error("Unknown source type");
|
|
49
40
|
});
|
|
50
41
|
query = query.where(firestore_1.Filter.or(...filters));
|
|
51
|
-
if (
|
|
52
|
-
|
|
42
|
+
if (startAfter) {
|
|
43
|
+
const ts = this.microsecondToTimestamp(startAfter.occurredAt);
|
|
44
|
+
query = query.startAfter(ts, startAfter.revision);
|
|
53
45
|
}
|
|
54
|
-
if (
|
|
55
|
-
|
|
46
|
+
if (endAt) {
|
|
47
|
+
const ts = this.microsecondToTimestamp(endAt.occurredAt);
|
|
48
|
+
query = query.endAt(ts, endAt.revision);
|
|
56
49
|
}
|
|
57
50
|
for await (const doc of query.stream()) {
|
|
58
51
|
const data = this.converter.fromFirestore(doc);
|
|
@@ -68,6 +61,82 @@ class FirestoreProjectedStreamStorageLayer {
|
|
|
68
61
|
};
|
|
69
62
|
}
|
|
70
63
|
}
|
|
64
|
+
microsecondToTimestamp(microseconds) {
|
|
65
|
+
const seconds = BigInt(microseconds.micros) / 1000000n;
|
|
66
|
+
const nanoseconds = (BigInt(microseconds.micros) % 1000000n) * 1000n; // Convert to nanoseconds
|
|
67
|
+
return new firestore_1.Timestamp(Number(seconds), Number(nanoseconds));
|
|
68
|
+
}
|
|
69
|
+
async get(cursor) {
|
|
70
|
+
const doc = await this.firestore.doc(cursor.ref).get();
|
|
71
|
+
if (!doc.exists) {
|
|
72
|
+
return undefined;
|
|
73
|
+
}
|
|
74
|
+
const data = this.converter.fromFirestoreSnapshot(doc);
|
|
75
|
+
return {
|
|
76
|
+
id: data.eventId,
|
|
77
|
+
ref: doc.ref.path,
|
|
78
|
+
revision: data.revision,
|
|
79
|
+
name: data.name,
|
|
80
|
+
$name: data.name,
|
|
81
|
+
payload: data.payload,
|
|
82
|
+
occurredAt: data.occurredAt,
|
|
83
|
+
version: data.version ?? 1,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
async getCursor(savedChange) {
|
|
87
|
+
const doc = await this.firestore.doc(savedChange.ref).get();
|
|
88
|
+
if (!doc.exists) {
|
|
89
|
+
return undefined;
|
|
90
|
+
}
|
|
91
|
+
const data = this.converter.fromFirestoreSnapshot(doc);
|
|
92
|
+
return cursor_1.Cursor.deserialize({
|
|
93
|
+
eventId: data.eventId,
|
|
94
|
+
ref: doc.ref.path,
|
|
95
|
+
occurredAt: data.occurredAt,
|
|
96
|
+
revision: data.revision,
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
async slice(projectedStream, shard, startAfter, endAt, limit) {
|
|
100
|
+
let query = this.firestore
|
|
101
|
+
.collectionGroup("events")
|
|
102
|
+
.orderBy("occurredAt")
|
|
103
|
+
.orderBy("revision");
|
|
104
|
+
const filters = projectedStream.sources.map((source) => {
|
|
105
|
+
if (source instanceof core_1.LakeSource) {
|
|
106
|
+
return new FirestoreLakeSourceFilter().filter(shard, source);
|
|
107
|
+
}
|
|
108
|
+
if (source instanceof core_1.StreamSource) {
|
|
109
|
+
return new FirestoreStreamSourceFilter().filter(shard, source);
|
|
110
|
+
}
|
|
111
|
+
throw new Error("Unknown source type");
|
|
112
|
+
});
|
|
113
|
+
query = query.where(firestore_1.Filter.or(...filters));
|
|
114
|
+
if (startAfter) {
|
|
115
|
+
const ts = this.microsecondToTimestamp(startAfter.occurredAt);
|
|
116
|
+
query = query.startAfter(ts, startAfter.revision);
|
|
117
|
+
}
|
|
118
|
+
if (endAt) {
|
|
119
|
+
const ts = this.microsecondToTimestamp(endAt.occurredAt);
|
|
120
|
+
query = query.endAt(ts, endAt.revision);
|
|
121
|
+
}
|
|
122
|
+
if (limit) {
|
|
123
|
+
query = query.limit(limit);
|
|
124
|
+
}
|
|
125
|
+
const all = await query.get();
|
|
126
|
+
return all.docs.map((doc) => {
|
|
127
|
+
const data = this.converter.fromFirestore(doc);
|
|
128
|
+
return {
|
|
129
|
+
id: data.eventId,
|
|
130
|
+
ref: doc.ref.path,
|
|
131
|
+
revision: data.revision,
|
|
132
|
+
name: data.name,
|
|
133
|
+
$name: data.name,
|
|
134
|
+
payload: data.payload,
|
|
135
|
+
occurredAt: data.occurredAt,
|
|
136
|
+
version: data.version ?? 1,
|
|
137
|
+
};
|
|
138
|
+
});
|
|
139
|
+
}
|
|
71
140
|
}
|
|
72
141
|
exports.FirestoreProjectedStreamStorageLayer = FirestoreProjectedStreamStorageLayer;
|
|
73
142
|
//# sourceMappingURL=firestore.projected-stream.storage-layer.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"firestore.projected-stream.storage-layer.js","sourceRoot":"","sources":["../src/firestore.projected-stream.storage-layer.ts"],"names":[],"mappings":";;;AAAA,uCAMsB;AACtB,6DAA2D;AAC3D,wDAIkC;AAElC,MAAa,yBAAyB;IACpC,MAAM,CAAC,KAAa,EAAE,UAAsB;QAC1C,OAAO,kBAAM,CAAC,GAAG,CACf,kBAAM,CAAC,KAAK,CAAC,WAAW,UAAU,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,KAAK,CAAC,EAC3D,kBAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE,UAAU,CAAC,MAAM,CAAC,CAC9C,CAAC;IACJ,CAAC;CACF;AAPD,8DAOC;AAED,MAAa,2BAA2B;IACtC,MAAM,CAAC,KAAa,EAAE,YAA0B;QAC9C,OAAO,kBAAM,CAAC,GAAG,CACf,kBAAM,CAAC,KAAK,CAAC,eAAe,EAAE,IAAI,EAAE,YAAY,CAAC,aAAa,CAAC,EAC/D,kBAAM,CAAC,KAAK,CAAC,WAAW,YAAY,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,KAAK,CAAC,EAC7D,kBAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE,YAAY,CAAC,MAAM,CAAC,CAChD,CAAC;IACJ,CAAC;CACF;AARD,kEAQC;AAED,MAAa,oCAAoC;IAI5B;IACD;IAFlB,YACmB,SAAoB,EACrB,YAAY,IAAI,kCAAgB,EAAE;QADjC,cAAS,GAAT,SAAS,CAAW;QACrB,cAAS,GAAT,SAAS,CAAyB;IACjD,CAAC;IAEJ,KAAK,CAAC,CAAC,IAAI,CACT,eAAgC,EAChC,KAAa,EACb,UAA2B,EAC3B,KAAsB;QAEtB,IAAI,KAAK,GAAG,IAAI,CAAC,SAAS;aACvB,eAAe,CAAC,QAAQ,CAAC;aACzB,OAAO,CAAC,YAAY,CAAC;aACrB,OAAO,CAAC,UAAU,CAAC,CAAC;QAEvB,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC;YACrC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI;YACpE,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI;SAC3D,CAAC,CAAC;QAEH,IAAI,UAAU,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC;YACjC,MAAM,IAAI,KAAK,CAAC,+BAA+B,UAAU,EAAE,CAAC,CAAC;QAC/D,CAAC;QAED,IAAI,KAAK,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,CAAC;YAC1B,MAAM,IAAI,KAAK,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QACrD,CAAC;QAED,MAAM,OAAO,GAAG,eAAe,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE;YACrD,IAAI,MAAM,YAAY,iBAAU,EAAE,CAAC;gBACjC,OAAO,IAAI,yBAAyB,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;YAC/D,CAAC;YACD,IAAI,MAAM,YAAY,mBAAY,EAAE,CAAC;gBACnC,OAAO,IAAI,2BAA2B,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;YACjE,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;QACzC,CAAC,CAAC,CAAC;QAEH,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,kBAAM,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;QAE3C,IAAI,KAAK,EAAE,CAAC;YACV,KAAK,GAAG,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QAClC,CAAC;QAED,IAAI,GAAG,EAAE,CAAC;YACR,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,CAAC;QAED,IAAI,KAAK,EAAE,MAAM,GAAG,IAAI,KAAK,CAAC,MAAM,EAA0C,EAAE,CAAC;YAC/E,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;YAC/C,MAAM;gBACJ,EAAE,EAAE,IAAI,CAAC,OAAO;gBAChB,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC,IAAI;gBACjB,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,KAAK,EAAE,IAAI,CAAC,IAAI;gBAChB,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,CAAC;aAC3B,CAAC;QACJ,CAAC;IACH,CAAC;CACF;AAlED,oFAkEC","sourcesContent":["import {\n EventReference,\n LakeSource,\n ProjectedStream,\n ProjectedStreamStorageLayer,\n StreamSource,\n} from \"@ddd-ts/core\";\nimport { DefaultConverter } from \"@ddd-ts/store-firestore\";\nimport {\n Filter,\n Firestore,\n QueryDocumentSnapshot,\n} from \"firebase-admin/firestore\";\n\nexport class FirestoreLakeSourceFilter {\n filter(shard: string, lakeSource: LakeSource) {\n return Filter.and(\n Filter.where(`payload.${lakeSource.shardKey}`, \"==\", shard),\n Filter.where(\"name\", \"in\", lakeSource.events),\n );\n }\n}\n\nexport class FirestoreStreamSourceFilter {\n filter(shard: string, streamSource: StreamSource) {\n return Filter.and(\n Filter.where(\"aggregateType\", \"==\", streamSource.aggregateType),\n Filter.where(`payload.${streamSource.shardKey}`, \"==\", shard),\n Filter.where(\"name\", \"in\", streamSource.events),\n );\n }\n}\n\nexport class FirestoreProjectedStreamStorageLayer\n implements ProjectedStreamStorageLayer\n{\n constructor(\n private readonly firestore: Firestore,\n public readonly converter = new DefaultConverter(),\n ) {}\n\n async *read(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: EventReference,\n endAt?: EventReference,\n ) {\n let query = this.firestore\n .collectionGroup(\"events\")\n .orderBy(\"occurredAt\")\n .orderBy(\"revision\");\n\n const [start, end] = await Promise.all([\n startAfter ? this.firestore.doc(startAfter.serialize()).get() : null,\n endAt ? this.firestore.doc(endAt.serialize()).get() : null,\n ]);\n\n if (startAfter && !start?.exists) {\n throw new Error(`StartAfter event not found: ${startAfter}`);\n }\n\n if (endAt && !end?.exists) {\n throw new Error(`EndAt event not found: ${endAt}`);\n }\n\n const filters = projectedStream.sources.map((source) => {\n if (source instanceof LakeSource) {\n return new FirestoreLakeSourceFilter().filter(shard, source);\n }\n if (source instanceof StreamSource) {\n return new FirestoreStreamSourceFilter().filter(shard, source);\n }\n throw new Error(\"Unknown source type\");\n });\n\n query = query.where(Filter.or(...filters));\n\n if (start) {\n query = query.startAfter(start);\n }\n\n if (end) {\n query = query.endAt(end);\n }\n\n for await (const doc of query.stream() as AsyncIterable<QueryDocumentSnapshot>) {\n const data = this.converter.fromFirestore(doc);\n yield {\n id: data.eventId,\n ref: doc.ref.path,\n revision: data.revision,\n name: data.name,\n $name: data.name,\n payload: data.payload,\n occurredAt: data.occurredAt,\n version: data.version ?? 1,\n };\n }\n }\n}\n"]}
|
|
1
|
+
{"version":3,"file":"firestore.projected-stream.storage-layer.js","sourceRoot":"","sources":["../src/firestore.projected-stream.storage-layer.ts"],"names":[],"mappings":";;;AAAA,uCAOsB;AACtB,6DAA2D;AAC3D,wDAKkC;AAElC,gEAA6D;AAE7D,MAAa,yBAAyB;IACpC,MAAM,CAAC,KAAa,EAAE,UAAsB;QAC1C,OAAO,kBAAM,CAAC,GAAG,CACf,kBAAM,CAAC,KAAK,CAAC,WAAW,UAAU,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,KAAK,CAAC,EAC3D,kBAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE,UAAU,CAAC,MAAM,CAAC,CAC9C,CAAC;IACJ,CAAC;CACF;AAPD,8DAOC;AAED,MAAa,2BAA2B;IACtC,MAAM,CAAC,KAAa,EAAE,YAA0B;QAC9C,OAAO,kBAAM,CAAC,GAAG,CACf,kBAAM,CAAC,KAAK,CAAC,eAAe,EAAE,IAAI,EAAE,YAAY,CAAC,aAAa,CAAC,EAC/D,kBAAM,CAAC,KAAK,CAAC,WAAW,YAAY,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,KAAK,CAAC,EAC7D,kBAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE,YAAY,CAAC,MAAM,CAAC,CAChD,CAAC;IACJ,CAAC;CACF;AARD,kEAQC;AAED,MAAa,oCAAoC;IAI5B;IACD;IAFlB,YACmB,SAAoB,EACrB,YAAY,IAAI,kCAAgB,EAAE;QADjC,cAAS,GAAT,SAAS,CAAW;QACrB,cAAS,GAAT,SAAS,CAAyB;IACjD,CAAC;IAEJ,KAAK,CAAC,CAAC,IAAI,CACT,eAAgC,EAChC,KAAa,EACb,UAAmB,EACnB,KAAc;QAEd,IAAI,KAAK,GAAG,IAAI,CAAC,SAAS;aACvB,eAAe,CAAC,QAAQ,CAAC;aACzB,OAAO,CAAC,YAAY,CAAC;aACrB,OAAO,CAAC,UAAU,CAAC,CAAC;QAEvB,MAAM,OAAO,GAAG,eAAe,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE;YACrD,IAAI,MAAM,YAAY,iBAAU,EAAE,CAAC;gBACjC,OAAO,IAAI,yBAAyB,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;YAC/D,CAAC;YACD,IAAI,MAAM,YAAY,mBAAY,EAAE,CAAC;gBACnC,OAAO,IAAI,2BAA2B,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;YACjE,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;QACzC,CAAC,CAAC,CAAC;QAEH,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,kBAAM,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;QAE3C,IAAI,UAAU,EAAE,CAAC;YACf,MAAM,EAAE,GAAG,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC9D,KAAK,GAAG,KAAK,CAAC,UAAU,CAAC,EAAE,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC;QACpD,CAAC;QAED,IAAI,KAAK,EAAE,CAAC;YACV,MAAM,EAAE,GAAG,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;YACzD,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,EAAE,EAAE,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC1C,CAAC;QAED,IAAI,KAAK,EAAE,MAAM,GAAG,IAAI,KAAK,CAAC,MAAM,EAA0C,EAAE,CAAC;YAC/E,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;YAC/C,MAAM;gBACJ,EAAE,EAAE,IAAI,CAAC,OAAO;gBAChB,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC,IAAI;gBACjB,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,KAAK,EAAE,IAAI,CAAC,IAAI;gBAChB,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,CAAC;aAC3B,CAAC;QACJ,CAAC;IACH,CAAC;IACM,sBAAsB,CAAC,YAAkC;QAC9D,MAAM,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,GAAG,QAAU,CAAC;QACzD,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,GAAG,QAAU,CAAC,GAAG,KAAK,CAAC,CAAC,yBAAyB;QACjG,OAAO,IAAI,qBAAS,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;IAC7D,CAAC;IAED,KAAK,CAAC,GAAG,CAAC,MAAc;QACtB,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC;QACvD,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC;YAChB,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,qBAAqB,CAAC,GAAG,CAAQ,CAAC;QAC9D,OAAO;YACL,EAAE,EAAE,IAAI,CAAC,OAAO;YAChB,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC,IAAI;YACjB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,IAAI,EAAE,IAAI,CAAC,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,IAAI;YAChB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,UAAU,EAAE,IAAI,CAAC,UAAU;YAC3B,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,CAAC;SACR,CAAC;IACvB,CAAC;IAED,KAAK,CAAC,SAAS,CACb,WAAmC;QAEnC,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC;QAC5D,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC;YAChB,OAAO,SAAS,CAAC;QACnB,CAAC;QACD,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,qBAAqB,CAAC,GAAG,CAAQ,CAAC;QAC9D,OAAO,eAAM,CAAC,WAAW,CAAC;YACxB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC,IAAI;YACjB,UAAU,EAAE,IAAI,CAAC,UAAU;YAC3B,QAAQ,EAAE,IAAI,CAAC,QAAQ;SACxB,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,KAAK,CACT,eAAgC,EAChC,KAAa,EACb,UAAmB,EACnB,KAAc,EACd,KAAc;QAEd,IAAI,KAAK,GAAG,IAAI,CAAC,SAAS;aACvB,eAAe,CAAC,QAAQ,CAAC;aACzB,OAAO,CAAC,YAAY,CAAC;aACrB,OAAO,CAAC,UAAU,CAAC,CAAC;QAEvB,MAAM,OAAO,GAAG,eAAe,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE;YACrD,IAAI,MAAM,YAAY,iBAAU,EAAE,CAAC;gBACjC,OAAO,IAAI,yBAAyB,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;YAC/D,CAAC;YACD,IAAI,MAAM,YAAY,mBAAY,EAAE,CAAC;gBACnC,OAAO,IAAI,2BAA2B,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;YACjE,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;QACzC,CAAC,CAAC,CAAC;QAEH,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,kBAAM,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;QAE3C,IAAI,UAAU,EAAE,CAAC;YACf,MAAM,EAAE,GAAG,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC9D,KAAK,GAAG,KAAK,CAAC,UAAU,CAAC,EAAE,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAC;QACpD,CAAC;QAED,IAAI,KAAK,EAAE,CAAC;YACV,MAAM,EAAE,GAAG,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;YACzD,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,EAAE,EAAE,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC1C,CAAC;QAED,IAAI,KAAK,EAAE,CAAC;YACV,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAC7B,CAAC;QAED,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE,CAAC;QAE9B,OAAO,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;YAC1B,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;YAC/C,OAAO;gBACL,EAAE,EAAE,IAAI,CAAC,OAAO;gBAChB,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC,IAAI;gBACjB,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,KAAK,EAAE,IAAI,CAAC,IAAI;gBAChB,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,CAAC;aACR,CAAC;QACvB,CAAC,CAAC,CAAC;IACL,CAAC;CACF;AArJD,oFAqJC","sourcesContent":["import {\n ISerializedFact,\n ISerializedSavedChange,\n LakeSource,\n ProjectedStream,\n ProjectedStreamStorageLayer,\n StreamSource,\n} from \"@ddd-ts/core\";\nimport { DefaultConverter } from \"@ddd-ts/store-firestore\";\nimport {\n Filter,\n Firestore,\n QueryDocumentSnapshot,\n Timestamp,\n} from \"firebase-admin/firestore\";\nimport { MicrosecondTimestamp } from \"@ddd-ts/shape\";\nimport { Cursor } from \"@ddd-ts/core/dist/components/cursor\";\n\nexport class FirestoreLakeSourceFilter {\n filter(shard: string, lakeSource: LakeSource) {\n return Filter.and(\n Filter.where(`payload.${lakeSource.shardKey}`, \"==\", shard),\n Filter.where(\"name\", \"in\", lakeSource.events),\n );\n }\n}\n\nexport class FirestoreStreamSourceFilter {\n filter(shard: string, streamSource: StreamSource) {\n return Filter.and(\n Filter.where(\"aggregateType\", \"==\", streamSource.aggregateType),\n Filter.where(`payload.${streamSource.shardKey}`, \"==\", shard),\n Filter.where(\"name\", \"in\", streamSource.events),\n );\n }\n}\n\nexport class FirestoreProjectedStreamStorageLayer\n implements ProjectedStreamStorageLayer\n{\n constructor(\n private readonly firestore: Firestore,\n public readonly converter = new DefaultConverter(),\n ) {}\n\n async *read(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n ) {\n let query = this.firestore\n .collectionGroup(\"events\")\n .orderBy(\"occurredAt\")\n .orderBy(\"revision\");\n\n const filters = projectedStream.sources.map((source) => {\n if (source instanceof LakeSource) {\n return new FirestoreLakeSourceFilter().filter(shard, source);\n }\n if (source instanceof StreamSource) {\n return new FirestoreStreamSourceFilter().filter(shard, source);\n }\n throw new Error(\"Unknown source type\");\n });\n\n query = query.where(Filter.or(...filters));\n\n if (startAfter) {\n const ts = this.microsecondToTimestamp(startAfter.occurredAt);\n query = query.startAfter(ts, startAfter.revision);\n }\n\n if (endAt) {\n const ts = this.microsecondToTimestamp(endAt.occurredAt);\n query = query.endAt(ts, endAt.revision);\n }\n\n for await (const doc of query.stream() as AsyncIterable<QueryDocumentSnapshot>) {\n const data = this.converter.fromFirestore(doc);\n yield {\n id: data.eventId,\n ref: doc.ref.path,\n revision: data.revision,\n name: data.name,\n $name: data.name,\n payload: data.payload,\n occurredAt: data.occurredAt,\n version: data.version ?? 1,\n };\n }\n }\n public microsecondToTimestamp(microseconds: MicrosecondTimestamp) {\n const seconds = BigInt(microseconds.micros) / 1_000_000n;\n const nanoseconds = (BigInt(microseconds.micros) % 1_000_000n) * 1000n; // Convert to nanoseconds\n return new Timestamp(Number(seconds), Number(nanoseconds));\n }\n\n async get(cursor: Cursor) {\n const doc = await this.firestore.doc(cursor.ref).get();\n if (!doc.exists) {\n return undefined;\n }\n const data = this.converter.fromFirestoreSnapshot(doc) as any;\n return {\n id: data.eventId,\n ref: doc.ref.path,\n revision: data.revision,\n name: data.name,\n $name: data.name,\n payload: data.payload,\n occurredAt: data.occurredAt,\n version: data.version ?? 1,\n } as ISerializedFact;\n }\n\n async getCursor(\n savedChange: ISerializedSavedChange,\n ): Promise<Cursor | undefined> {\n const doc = await this.firestore.doc(savedChange.ref).get();\n if (!doc.exists) {\n return undefined;\n }\n const data = this.converter.fromFirestoreSnapshot(doc) as any;\n return Cursor.deserialize({\n eventId: data.eventId,\n ref: doc.ref.path,\n occurredAt: data.occurredAt,\n revision: data.revision,\n });\n }\n\n async slice(\n projectedStream: ProjectedStream,\n shard: string,\n startAfter?: Cursor,\n endAt?: Cursor,\n limit?: number,\n ) {\n let query = this.firestore\n .collectionGroup(\"events\")\n .orderBy(\"occurredAt\")\n .orderBy(\"revision\");\n\n const filters = projectedStream.sources.map((source) => {\n if (source instanceof LakeSource) {\n return new FirestoreLakeSourceFilter().filter(shard, source);\n }\n if (source instanceof StreamSource) {\n return new FirestoreStreamSourceFilter().filter(shard, source);\n }\n throw new Error(\"Unknown source type\");\n });\n\n query = query.where(Filter.or(...filters));\n\n if (startAfter) {\n const ts = this.microsecondToTimestamp(startAfter.occurredAt);\n query = query.startAfter(ts, startAfter.revision);\n }\n\n if (endAt) {\n const ts = this.microsecondToTimestamp(endAt.occurredAt);\n query = query.endAt(ts, endAt.revision);\n }\n\n if (limit) {\n query = query.limit(limit);\n }\n\n const all = await query.get();\n\n return all.docs.map((doc) => {\n const data = this.converter.fromFirestore(doc);\n return {\n id: data.eventId,\n ref: doc.ref.path,\n revision: data.revision,\n name: data.name,\n $name: data.name,\n payload: data.payload,\n occurredAt: data.occurredAt,\n version: data.version ?? 1,\n } as ISerializedFact;\n });\n }\n}\n"]}
|
package/dist/index.d.ts
CHANGED
|
@@ -7,4 +7,5 @@ export { FirestoreEventLakeAggregateStore, MakeFirestoreEventLakeAggregateStore,
|
|
|
7
7
|
export { FirestoreProjectedStreamStorageLayer, FirestoreLakeSourceFilter, FirestoreStreamSourceFilter, } from "./firestore.projected-stream.storage-layer";
|
|
8
8
|
export { FirestoreProjectedStreamReader } from "./firestore.projected-stream.reader";
|
|
9
9
|
export { FirestoreSnapshotter } from "./firestore.snapshotter";
|
|
10
|
+
export { FirestoreProjector, FirestoreQueueStore, Task, AlreadyEnqueuedError, ClaimerId, } from "./projection/firestore.projector";
|
|
10
11
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,8BAA8B,EAAE,MAAM,sCAAsC,CAAC;AACtF,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,gCAAgC,EAAE,MAAM,wCAAwC,CAAC;AAC1F,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAE3E,OAAO,EACL,kCAAkC,EAClC,sCAAsC,GACvC,MAAM,0CAA0C,CAAC;AAElD,OAAO,EACL,gCAAgC,EAChC,oCAAoC,GACrC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,EACL,oCAAoC,EACpC,yBAAyB,EACzB,2BAA2B,GAC5B,MAAM,4CAA4C,CAAC;AAEpD,OAAO,EAAE,8BAA8B,EAAE,MAAM,qCAAqC,CAAC;AAErF,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,8BAA8B,EAAE,MAAM,sCAAsC,CAAC;AACtF,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,gCAAgC,EAAE,MAAM,wCAAwC,CAAC;AAC1F,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAE3E,OAAO,EACL,kCAAkC,EAClC,sCAAsC,GACvC,MAAM,0CAA0C,CAAC;AAElD,OAAO,EACL,gCAAgC,EAChC,oCAAoC,GACrC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,EACL,oCAAoC,EACpC,yBAAyB,EACzB,2BAA2B,GAC5B,MAAM,4CAA4C,CAAC;AAEpD,OAAO,EAAE,8BAA8B,EAAE,MAAM,qCAAqC,CAAC;AAErF,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAE/D,OAAO,EACL,kBAAkB,EAClB,mBAAmB,EACnB,IAAI,EACJ,oBAAoB,EACpB,SAAS,GACV,MAAM,kCAAkC,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.FirestoreSnapshotter = exports.FirestoreProjectedStreamReader = exports.FirestoreStreamSourceFilter = exports.FirestoreLakeSourceFilter = exports.FirestoreProjectedStreamStorageLayer = exports.MakeFirestoreEventLakeAggregateStore = exports.FirestoreEventLakeAggregateStore = exports.MakeFirestoreEventStreamAggregateStore = exports.FirestoreEventStreamAggregateStore = exports.FirestoreEventStreamStore = exports.FirestoreEventStreamStorageLayer = exports.FirestoreEventLakeStore = exports.FirestoreEventLakeStorageLayer = void 0;
|
|
3
|
+
exports.ClaimerId = exports.AlreadyEnqueuedError = exports.Task = exports.FirestoreQueueStore = exports.FirestoreProjector = exports.FirestoreSnapshotter = exports.FirestoreProjectedStreamReader = exports.FirestoreStreamSourceFilter = exports.FirestoreLakeSourceFilter = exports.FirestoreProjectedStreamStorageLayer = exports.MakeFirestoreEventLakeAggregateStore = exports.FirestoreEventLakeAggregateStore = exports.MakeFirestoreEventStreamAggregateStore = exports.FirestoreEventStreamAggregateStore = exports.FirestoreEventStreamStore = exports.FirestoreEventStreamStorageLayer = exports.FirestoreEventLakeStore = exports.FirestoreEventLakeStorageLayer = void 0;
|
|
4
4
|
var firestore_event_lake_storage_layer_1 = require("./firestore.event-lake.storage-layer");
|
|
5
5
|
Object.defineProperty(exports, "FirestoreEventLakeStorageLayer", { enumerable: true, get: function () { return firestore_event_lake_storage_layer_1.FirestoreEventLakeStorageLayer; } });
|
|
6
6
|
var firestore_event_lake_store_1 = require("./firestore.event-lake.store");
|
|
@@ -23,4 +23,10 @@ var firestore_projected_stream_reader_1 = require("./firestore.projected-stream.
|
|
|
23
23
|
Object.defineProperty(exports, "FirestoreProjectedStreamReader", { enumerable: true, get: function () { return firestore_projected_stream_reader_1.FirestoreProjectedStreamReader; } });
|
|
24
24
|
var firestore_snapshotter_1 = require("./firestore.snapshotter");
|
|
25
25
|
Object.defineProperty(exports, "FirestoreSnapshotter", { enumerable: true, get: function () { return firestore_snapshotter_1.FirestoreSnapshotter; } });
|
|
26
|
+
var firestore_projector_1 = require("./projection/firestore.projector");
|
|
27
|
+
Object.defineProperty(exports, "FirestoreProjector", { enumerable: true, get: function () { return firestore_projector_1.FirestoreProjector; } });
|
|
28
|
+
Object.defineProperty(exports, "FirestoreQueueStore", { enumerable: true, get: function () { return firestore_projector_1.FirestoreQueueStore; } });
|
|
29
|
+
Object.defineProperty(exports, "Task", { enumerable: true, get: function () { return firestore_projector_1.Task; } });
|
|
30
|
+
Object.defineProperty(exports, "AlreadyEnqueuedError", { enumerable: true, get: function () { return firestore_projector_1.AlreadyEnqueuedError; } });
|
|
31
|
+
Object.defineProperty(exports, "ClaimerId", { enumerable: true, get: function () { return firestore_projector_1.ClaimerId; } });
|
|
26
32
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,2FAAsF;AAA7E,oJAAA,8BAA8B,OAAA;AACvC,2EAAuE;AAA9D,qIAAA,uBAAuB,OAAA;AAChC,+FAA0F;AAAjF,wJAAA,gCAAgC,OAAA;AACzC,+EAA2E;AAAlE,yIAAA,yBAAyB,OAAA;AAElC,mGAGkD;AAFhD,4JAAA,kCAAkC,OAAA;AAClC,gKAAA,sCAAsC,OAAA;AAGxC,+FAGgD;AAF9C,wJAAA,gCAAgC,OAAA;AAChC,4JAAA,oCAAoC,OAAA;AAGtC,uGAIoD;AAHlD,gKAAA,oCAAoC,OAAA;AACpC,qJAAA,yBAAyB,OAAA;AACzB,uJAAA,2BAA2B,OAAA;AAG7B,yFAAqF;AAA5E,mJAAA,8BAA8B,OAAA;AAEvC,iEAA+D;AAAtD,6HAAA,oBAAoB,OAAA","sourcesContent":["export { FirestoreEventLakeStorageLayer } from \"./firestore.event-lake.storage-layer\";\nexport { FirestoreEventLakeStore } from \"./firestore.event-lake.store\";\nexport { FirestoreEventStreamStorageLayer } from \"./firestore.event-stream.storage-layer\";\nexport { FirestoreEventStreamStore } from \"./firestore.event-stream.store\";\n\nexport {\n FirestoreEventStreamAggregateStore,\n MakeFirestoreEventStreamAggregateStore,\n} from \"./firestore.event-stream.aggregate-store\";\n\nexport {\n FirestoreEventLakeAggregateStore,\n MakeFirestoreEventLakeAggregateStore,\n} from \"./firestore.event-lake.aggregate-store\";\n\nexport {\n FirestoreProjectedStreamStorageLayer,\n FirestoreLakeSourceFilter,\n FirestoreStreamSourceFilter,\n} from \"./firestore.projected-stream.storage-layer\";\n\nexport { FirestoreProjectedStreamReader } from \"./firestore.projected-stream.reader\";\n\nexport { FirestoreSnapshotter } from \"./firestore.snapshotter\";\n"]}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,2FAAsF;AAA7E,oJAAA,8BAA8B,OAAA;AACvC,2EAAuE;AAA9D,qIAAA,uBAAuB,OAAA;AAChC,+FAA0F;AAAjF,wJAAA,gCAAgC,OAAA;AACzC,+EAA2E;AAAlE,yIAAA,yBAAyB,OAAA;AAElC,mGAGkD;AAFhD,4JAAA,kCAAkC,OAAA;AAClC,gKAAA,sCAAsC,OAAA;AAGxC,+FAGgD;AAF9C,wJAAA,gCAAgC,OAAA;AAChC,4JAAA,oCAAoC,OAAA;AAGtC,uGAIoD;AAHlD,gKAAA,oCAAoC,OAAA;AACpC,qJAAA,yBAAyB,OAAA;AACzB,uJAAA,2BAA2B,OAAA;AAG7B,yFAAqF;AAA5E,mJAAA,8BAA8B,OAAA;AAEvC,iEAA+D;AAAtD,6HAAA,oBAAoB,OAAA;AAE7B,wEAM0C;AALxC,yHAAA,kBAAkB,OAAA;AAClB,0HAAA,mBAAmB,OAAA;AACnB,2GAAA,IAAI,OAAA;AACJ,2HAAA,oBAAoB,OAAA;AACpB,gHAAA,SAAS,OAAA","sourcesContent":["export { FirestoreEventLakeStorageLayer } from \"./firestore.event-lake.storage-layer\";\nexport { FirestoreEventLakeStore } from \"./firestore.event-lake.store\";\nexport { FirestoreEventStreamStorageLayer } from \"./firestore.event-stream.storage-layer\";\nexport { FirestoreEventStreamStore } from \"./firestore.event-stream.store\";\n\nexport {\n FirestoreEventStreamAggregateStore,\n MakeFirestoreEventStreamAggregateStore,\n} from \"./firestore.event-stream.aggregate-store\";\n\nexport {\n FirestoreEventLakeAggregateStore,\n MakeFirestoreEventLakeAggregateStore,\n} from \"./firestore.event-lake.aggregate-store\";\n\nexport {\n FirestoreProjectedStreamStorageLayer,\n FirestoreLakeSourceFilter,\n FirestoreStreamSourceFilter,\n} from \"./firestore.projected-stream.storage-layer\";\n\nexport { FirestoreProjectedStreamReader } from \"./firestore.projected-stream.reader\";\n\nexport { FirestoreSnapshotter } from \"./firestore.snapshotter\";\n\nexport {\n FirestoreProjector,\n FirestoreQueueStore,\n Task,\n AlreadyEnqueuedError,\n ClaimerId,\n} from \"./projection/firestore.projector\";\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"attempts.spec.d.ts","sourceRoot":"","sources":["../../../src/projection/cases/attempts.spec.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const case_fixture_1 = require("../testkit/case-fixture");
|
|
4
|
+
const test = (0, case_fixture_1.caseFixture)("Breathing", {
|
|
5
|
+
projector: {
|
|
6
|
+
retry: { attempts: 3, minDelay: 10, maxDelay: 10, backoff: 1 },
|
|
7
|
+
enqueue: { batchSize: 100 },
|
|
8
|
+
},
|
|
9
|
+
});
|
|
10
|
+
test.describe(() => {
|
|
11
|
+
it("will attempt multiple retries when handler explicitely fails", async () => {
|
|
12
|
+
const { events, act, control, assert } = await test.setup();
|
|
13
|
+
const [account, opened] = events.open(`${test.name}_1`);
|
|
14
|
+
await act.save(account);
|
|
15
|
+
// Only one handle call, but we will fail multiple times inside
|
|
16
|
+
const operation = act.handle(opened);
|
|
17
|
+
const first = await control.suspend(opened);
|
|
18
|
+
first.fail(new Error("first"));
|
|
19
|
+
const second = await control.suspend(opened);
|
|
20
|
+
second.fail(new Error("second"));
|
|
21
|
+
const catched = await control.suspend(opened);
|
|
22
|
+
catched.resume();
|
|
23
|
+
await operation;
|
|
24
|
+
await assert.cashflow(account.id).toHave({ id: account.id, flow: 0 });
|
|
25
|
+
});
|
|
26
|
+
it("but if we exceed the max attempts, the operation should fail", async () => {
|
|
27
|
+
const { events, act, control, assert } = await test.setup();
|
|
28
|
+
const [account, opened] = events.open(`${test.name}_2`);
|
|
29
|
+
await act.save(account);
|
|
30
|
+
const operation = act.handle(opened);
|
|
31
|
+
const first = await control.suspend(opened);
|
|
32
|
+
first.fail(new Error("first"));
|
|
33
|
+
const second = await control.suspend(opened);
|
|
34
|
+
second.fail(new Error("second"));
|
|
35
|
+
// This is the third failure, which exceeds the max attempts of 3
|
|
36
|
+
const third = await control.suspend(opened);
|
|
37
|
+
third.fail(new Error("third"));
|
|
38
|
+
await operation.catch(() => { });
|
|
39
|
+
await assert.cashflow(account.id).toNotExist();
|
|
40
|
+
});
|
|
41
|
+
});
|
|
42
|
+
//# sourceMappingURL=attempts.spec.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"attempts.spec.js","sourceRoot":"","sources":["../../../src/projection/cases/attempts.spec.ts"],"names":[],"mappings":";;AAAA,0DAAsD;AAEtD,MAAM,IAAI,GAAG,IAAA,0BAAW,EAAC,WAAW,EAAE;IACpC,SAAS,EAAE;QACT,KAAK,EAAE,EAAE,QAAQ,EAAE,CAAC,EAAE,QAAQ,EAAE,EAAE,EAAE,QAAQ,EAAE,EAAE,EAAE,OAAO,EAAE,CAAC,EAAE;QAC9D,OAAO,EAAE,EAAE,SAAS,EAAE,GAAG,EAAE;KAC5B;CACF,CAAC,CAAC;AAEH,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE;IACjB,EAAE,CAAC,8DAA8D,EAAE,KAAK,IAAI,EAAE;QAC5E,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QAE5D,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC;QACxD,MAAM,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAExB,+DAA+D;QAC/D,MAAM,SAAS,GAAG,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAErC,MAAM,KAAK,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5C,KAAK,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;QAE/B,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC7C,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC;QAEjC,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC9C,OAAO,CAAC,MAAM,EAAE,CAAC;QAEjB,MAAM,SAAS,CAAC;QAEhB,MAAM,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,EAAE,EAAE,EAAE,OAAO,CAAC,EAAE,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC;IACxE,CAAC,CAAC,CAAC;IAEH,EAAE,CAAC,8DAA8D,EAAE,KAAK,IAAI,EAAE;QAC5E,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;QAE5D,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC;QACxD,MAAM,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAExB,MAAM,SAAS,GAAG,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAErC,MAAM,KAAK,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5C,KAAK,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;QAE/B,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC7C,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC;QAEjC,iEAAiE;QACjE,MAAM,KAAK,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5C,KAAK,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;QAE/B,MAAM,SAAS,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;QAEhC,MAAM,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,UAAU,EAAE,CAAC;IACjD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC","sourcesContent":["import { caseFixture } from \"../testkit/case-fixture\";\n\nconst test = caseFixture(\"Breathing\", {\n projector: {\n retry: { attempts: 3, minDelay: 10, maxDelay: 10, backoff: 1 },\n enqueue: { batchSize: 100 },\n },\n});\n\ntest.describe(() => {\n it(\"will attempt multiple retries when handler explicitely fails\", async () => {\n const { events, act, control, assert } = await test.setup();\n\n const [account, opened] = events.open(`${test.name}_1`);\n await act.save(account);\n\n // Only one handle call, but we will fail multiple times inside\n const operation = act.handle(opened);\n\n const first = await control.suspend(opened);\n first.fail(new Error(\"first\"));\n\n const second = await control.suspend(opened);\n second.fail(new Error(\"second\"));\n\n const catched = await control.suspend(opened);\n catched.resume();\n\n await operation;\n\n await assert.cashflow(account.id).toHave({ id: account.id, flow: 0 });\n });\n\n it(\"but if we exceed the max attempts, the operation should fail\", async () => {\n const { events, act, control, assert } = await test.setup();\n\n const [account, opened] = events.open(`${test.name}_2`);\n await act.save(account);\n\n const operation = act.handle(opened);\n\n const first = await control.suspend(opened);\n first.fail(new Error(\"first\"));\n\n const second = await control.suspend(opened);\n second.fail(new Error(\"second\"));\n\n // This is the third failure, which exceeds the max attempts of 3\n const third = await control.suspend(opened);\n third.fail(new Error(\"third\"));\n\n await operation.catch(() => {});\n\n await assert.cashflow(account.id).toNotExist();\n });\n});\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batchlast.spec.d.ts","sourceRoot":"","sources":["../../../src/projection/cases/batchlast.spec.ts"],"names":[],"mappings":""}
|