@ddd-ts/event-sourcing-firestore 0.0.37 → 0.0.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/LICENSE +21 -0
  2. package/dist/_virtual/_rolldown/runtime.js +29 -0
  3. package/dist/firestore.event-lake.aggregate-store.d.ts +35 -0
  4. package/dist/firestore.event-lake.aggregate-store.d.ts.map +1 -0
  5. package/dist/firestore.event-lake.aggregate-store.js +38 -0
  6. package/dist/firestore.event-lake.aggregate-store.mjs +36 -0
  7. package/dist/firestore.event-lake.aggregate-store.spec.d.ts +2 -0
  8. package/dist/firestore.event-lake.aggregate-store.spec.d.ts.map +1 -0
  9. package/dist/firestore.event-lake.storage-layer.d.ts +14 -0
  10. package/dist/firestore.event-lake.storage-layer.d.ts.map +1 -0
  11. package/dist/firestore.event-lake.storage-layer.js +67 -0
  12. package/dist/firestore.event-lake.storage-layer.mjs +65 -0
  13. package/dist/firestore.event-lake.store.d.ts +6 -0
  14. package/dist/firestore.event-lake.store.d.ts.map +1 -0
  15. package/dist/firestore.event-lake.store.js +14 -0
  16. package/dist/firestore.event-lake.store.mjs +13 -0
  17. package/dist/firestore.event-lake.store.spec.d.ts +2 -0
  18. package/dist/firestore.event-lake.store.spec.d.ts.map +1 -0
  19. package/dist/firestore.event-stream-store.spec.d.ts +2 -0
  20. package/dist/firestore.event-stream-store.spec.d.ts.map +1 -0
  21. package/dist/firestore.event-stream.aggregate-store.d.ts +30 -0
  22. package/dist/firestore.event-stream.aggregate-store.d.ts.map +1 -0
  23. package/dist/firestore.event-stream.aggregate-store.js +38 -0
  24. package/dist/firestore.event-stream.aggregate-store.mjs +36 -0
  25. package/dist/firestore.event-stream.aggregate-store.spec.d.ts +2 -0
  26. package/dist/firestore.event-stream.aggregate-store.spec.d.ts.map +1 -0
  27. package/dist/firestore.event-stream.storage-layer.d.ts +15 -0
  28. package/dist/firestore.event-stream.storage-layer.d.ts.map +1 -0
  29. package/dist/firestore.event-stream.storage-layer.js +67 -0
  30. package/dist/firestore.event-stream.storage-layer.mjs +65 -0
  31. package/dist/firestore.event-stream.store.d.ts +6 -0
  32. package/dist/firestore.event-stream.store.d.ts.map +1 -0
  33. package/dist/firestore.event-stream.store.js +14 -0
  34. package/dist/firestore.event-stream.store.mjs +13 -0
  35. package/dist/firestore.projected-stream.reader.d.ts +13 -0
  36. package/dist/firestore.projected-stream.reader.d.ts.map +1 -0
  37. package/dist/firestore.projected-stream.reader.js +35 -0
  38. package/dist/firestore.projected-stream.reader.mjs +34 -0
  39. package/dist/firestore.projected-stream.reader.spec.d.ts +2 -0
  40. package/dist/firestore.projected-stream.reader.spec.d.ts.map +1 -0
  41. package/dist/firestore.projected-stream.storage-layer.d.ts +31 -0
  42. package/dist/firestore.projected-stream.storage-layer.d.ts.map +1 -0
  43. package/dist/firestore.projected-stream.storage-layer.js +121 -0
  44. package/dist/firestore.projected-stream.storage-layer.mjs +118 -0
  45. package/dist/firestore.snapshotter.d.ts +6 -0
  46. package/dist/firestore.snapshotter.d.ts.map +1 -0
  47. package/dist/firestore.snapshotter.js +36 -0
  48. package/dist/firestore.snapshotter.mjs +35 -0
  49. package/dist/index.d.ts +11 -0
  50. package/dist/index.d.ts.map +1 -0
  51. package/dist/index.js +30 -0
  52. package/dist/index.mjs +12 -0
  53. package/dist/projection/cases/attempts.spec.d.ts +2 -0
  54. package/dist/projection/cases/attempts.spec.d.ts.map +1 -0
  55. package/dist/projection/cases/batchlast.spec.d.ts +2 -0
  56. package/dist/projection/cases/batchlast.spec.d.ts.map +1 -0
  57. package/dist/projection/cases/bigshuffle.spec.d.ts +2 -0
  58. package/dist/projection/cases/bigshuffle.spec.d.ts.map +1 -0
  59. package/dist/projection/cases/burst.spec.d.ts +2 -0
  60. package/dist/projection/cases/burst.spec.d.ts.map +1 -0
  61. package/dist/projection/cases/claimtimeout.spec.d.ts +2 -0
  62. package/dist/projection/cases/claimtimeout.spec.d.ts.map +1 -0
  63. package/dist/projection/cases/concurrency.spec.d.ts +2 -0
  64. package/dist/projection/cases/concurrency.spec.d.ts.map +1 -0
  65. package/dist/projection/cases/deduplicate.spec.d.ts +2 -0
  66. package/dist/projection/cases/deduplicate.spec.d.ts.map +1 -0
  67. package/dist/projection/cases/defer.spec.d.ts +2 -0
  68. package/dist/projection/cases/defer.spec.d.ts.map +1 -0
  69. package/dist/projection/cases/lock.spec.d.ts +2 -0
  70. package/dist/projection/cases/lock.spec.d.ts.map +1 -0
  71. package/dist/projection/cases/skip.spec.d.ts +2 -0
  72. package/dist/projection/cases/skip.spec.d.ts.map +1 -0
  73. package/dist/projection/cases/stress.spec.d.ts +2 -0
  74. package/dist/projection/cases/stress.spec.d.ts.map +1 -0
  75. package/dist/projection/firestore.projector.d.ts +130 -0
  76. package/dist/projection/firestore.projector.d.ts.map +1 -0
  77. package/dist/projection/firestore.projector.js +484 -0
  78. package/dist/projection/firestore.projector.mjs +479 -0
  79. package/dist/projection/testkit/case-fixture.d.ts +610 -0
  80. package/dist/projection/testkit/case-fixture.d.ts.map +1 -0
  81. package/dist/projection/testkit.d.ts +44 -0
  82. package/dist/projection/testkit.d.ts.map +1 -0
  83. package/dist/projection/trace.decorator.d.ts +2 -0
  84. package/dist/projection/trace.decorator.d.ts.map +1 -0
  85. package/package.json +43 -41
@@ -0,0 +1,65 @@
1
+ import { StreamId } from "@ddd-ts/core";
2
+ import { DefaultConverter } from "@ddd-ts/store-firestore";
3
+ import * as fb from "firebase-admin";
4
+
5
+ //#region src/firestore.event-stream.storage-layer.ts
6
+ const serverTimestamp = fb.firestore.FieldValue.serverTimestamp;
7
+ var FirestoreEventStreamStorageLayer = class {
8
+ constructor(firestore, converter = new DefaultConverter()) {
9
+ this.firestore = firestore;
10
+ this.converter = converter;
11
+ }
12
+ isLocalRevisionOutdatedError(error) {
13
+ return typeof error === "object" && error !== null && "code" in error && error.code === 6;
14
+ }
15
+ getCollection(streamId) {
16
+ return this.firestore.collection("event-store").doc(streamId.aggregateType).collection("streams").doc(streamId.aggregateId).collection("events");
17
+ }
18
+ async append(streamId, changes, expectedRevision, trx) {
19
+ const collection = this.getCollection(streamId);
20
+ const result = [];
21
+ let revision = expectedRevision + 1;
22
+ for (const change of changes) {
23
+ const storageChange = {
24
+ aggregateType: streamId.aggregateType,
25
+ eventId: change.id,
26
+ aggregateId: streamId.aggregateId,
27
+ revision,
28
+ name: change.name,
29
+ payload: change.payload,
30
+ occurredAt: serverTimestamp(),
31
+ version: change.version
32
+ };
33
+ const ref = collection.doc(`${revision}`);
34
+ result.push({
35
+ ...change,
36
+ ref: ref.path,
37
+ revision,
38
+ occurredAt: void 0
39
+ });
40
+ trx.transaction.create(ref, this.converter.toFirestore(storageChange));
41
+ revision++;
42
+ }
43
+ return result;
44
+ }
45
+ async *read(streamId, startAt) {
46
+ const query = this.getCollection(streamId).where("revision", ">=", startAt || 0).orderBy("revision", "asc");
47
+ for await (const event of query.stream()) {
48
+ const e = event;
49
+ const data = this.converter.fromFirestore(e);
50
+ yield {
51
+ id: data.eventId,
52
+ ref: e.ref.path,
53
+ revision: data.revision,
54
+ name: data.name,
55
+ $name: data.name,
56
+ payload: data.payload,
57
+ occurredAt: data.occurredAt,
58
+ version: data.version ?? 1
59
+ };
60
+ }
61
+ }
62
+ };
63
+
64
+ //#endregion
65
+ export { FirestoreEventStreamStorageLayer };
@@ -0,0 +1,6 @@
1
+ import { EventStreamStore, type IEsEvent, type IEventBus, type ISerializer } from "@ddd-ts/core";
2
+ import { Firestore } from "firebase-admin/firestore";
3
+ export declare class FirestoreEventStreamStore<Event extends IEsEvent> extends EventStreamStore<Event> {
4
+ constructor(firestore: Firestore, serializer: ISerializer<Event>, eventBus?: IEventBus);
5
+ }
6
+ //# sourceMappingURL=firestore.event-stream.store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"firestore.event-stream.store.d.ts","sourceRoot":"","sources":["../src/firestore.event-stream.store.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,gBAAgB,EAChB,KAAK,QAAQ,EACb,KAAK,SAAS,EACd,KAAK,WAAW,EACjB,MAAM,cAAc,CAAC;AAEtB,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAErD,qBAAa,yBAAyB,CACpC,KAAK,SAAS,QAAQ,CACtB,SAAQ,gBAAgB,CAAC,KAAK,CAAC;gBAE7B,SAAS,EAAE,SAAS,EACpB,UAAU,EAAE,WAAW,CAAC,KAAK,CAAC,EAC9B,QAAQ,CAAC,EAAE,SAAS;CAQvB"}
@@ -0,0 +1,14 @@
1
+ const require_runtime = require('./_virtual/_rolldown/runtime.js');
2
+ const require_firestore_event_stream_storage_layer = require('./firestore.event-stream.storage-layer.js');
3
+ let _ddd_ts_core = require("@ddd-ts/core");
4
+ let firebase_admin_firestore = require("firebase-admin/firestore");
5
+
6
+ //#region src/firestore.event-stream.store.ts
7
+ var FirestoreEventStreamStore = class extends _ddd_ts_core.EventStreamStore {
8
+ constructor(firestore, serializer, eventBus) {
9
+ super(new require_firestore_event_stream_storage_layer.FirestoreEventStreamStorageLayer(firestore), serializer, eventBus);
10
+ }
11
+ };
12
+
13
+ //#endregion
14
+ exports.FirestoreEventStreamStore = FirestoreEventStreamStore;
@@ -0,0 +1,13 @@
1
+ import { FirestoreEventStreamStorageLayer } from "./firestore.event-stream.storage-layer.mjs";
2
+ import { EventStreamStore } from "@ddd-ts/core";
3
+ import "firebase-admin/firestore";
4
+
5
+ //#region src/firestore.event-stream.store.ts
6
+ var FirestoreEventStreamStore = class extends EventStreamStore {
7
+ constructor(firestore, serializer, eventBus) {
8
+ super(new FirestoreEventStreamStorageLayer(firestore), serializer, eventBus);
9
+ }
10
+ };
11
+
12
+ //#endregion
13
+ export { FirestoreEventStreamStore };
@@ -0,0 +1,13 @@
1
+ import { Cursor, type IEsEvent, type IFact, type ISavedChange, type ISerializer, ProjectedStream, ProjectedStreamReader } from "@ddd-ts/core";
2
+ import { Firestore } from "firebase-admin/firestore";
3
+ import { FirestoreProjectedStreamStorageLayer } from "./firestore.projected-stream.storage-layer";
4
+ export declare class FirestoreProjectedStreamReader<Event extends IEsEvent> extends ProjectedStreamReader<Event> {
5
+ storage: FirestoreProjectedStreamStorageLayer;
6
+ serializer: ISerializer<Event>;
7
+ constructor(firestore: Firestore, serializer: ISerializer<Event>);
8
+ getCursor(savedChange: ISavedChange<Event>): Promise<Cursor | undefined>;
9
+ get(cursor: Cursor): Promise<IFact<Event> | undefined>;
10
+ slice(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor, limit?: number): Promise<any>;
11
+ read(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor): AsyncGenerator<IFact<Event>, void, unknown>;
12
+ }
13
+ //# sourceMappingURL=firestore.projected-stream.reader.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"firestore.projected-stream.reader.d.ts","sourceRoot":"","sources":["../src/firestore.projected-stream.reader.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,EACN,KAAK,QAAQ,EACb,KAAK,KAAK,EACV,KAAK,YAAY,EAEjB,KAAK,WAAW,EAChB,eAAe,EACf,qBAAqB,EACtB,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AACrD,OAAO,EAAE,oCAAoC,EAAE,MAAM,4CAA4C,CAAC;AAElG,qBAAa,8BAA8B,CACzC,KAAK,SAAS,QAAQ,CACtB,SAAQ,qBAAqB,CAAC,KAAK,CAAC;IACpC,OAAO,EAAE,oCAAoC,CAAC;IAC9C,UAAU,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;gBACnB,SAAS,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,CAAC,KAAK,CAAC;IAO1D,SAAS,CAAC,WAAW,EAAE,YAAY,CAAC,KAAK,CAAC;IAK1C,GAAG,CAAC,MAAM,EAAE,MAAM;IAUlB,KAAK,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM,EACd,KAAK,CAAC,EAAE,MAAM;IAcT,IAAI,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM;CAWjB"}
@@ -0,0 +1,35 @@
1
+ const require_runtime = require('./_virtual/_rolldown/runtime.js');
2
+ const require_firestore_projected_stream_storage_layer = require('./firestore.projected-stream.storage-layer.js');
3
+ let _ddd_ts_core = require("@ddd-ts/core");
4
+ let firebase_admin_firestore = require("firebase-admin/firestore");
5
+
6
+ //#region src/firestore.projected-stream.reader.ts
7
+ var FirestoreProjectedStreamReader = class extends _ddd_ts_core.ProjectedStreamReader {
8
+ storage;
9
+ serializer;
10
+ constructor(firestore, serializer) {
11
+ const storage = new require_firestore_projected_stream_storage_layer.FirestoreProjectedStreamStorageLayer(firestore);
12
+ super(storage, serializer);
13
+ this.storage = storage;
14
+ this.serializer = serializer;
15
+ }
16
+ async getCursor(savedChange) {
17
+ const serialized = await this.serializer.serialize(savedChange);
18
+ return this.storage.getCursor(serialized);
19
+ }
20
+ async get(cursor) {
21
+ const serialized = await this.storage.get(cursor);
22
+ if (!serialized) return;
23
+ return this.serializer.deserialize(serialized);
24
+ }
25
+ async slice(projectedStream, shard, startAfter, endAt, limit) {
26
+ const serialized = await this.storage.slice(projectedStream, shard, startAfter, endAt, limit);
27
+ return Promise.all(serialized.map((s) => this.serializer.deserialize(s)));
28
+ }
29
+ async *read(projectedStream, shard, startAfter, endAt) {
30
+ for await (const serialized of this.storage.read(projectedStream, shard, startAfter, endAt)) yield this.serializer.deserialize(serialized);
31
+ }
32
+ };
33
+
34
+ //#endregion
35
+ exports.FirestoreProjectedStreamReader = FirestoreProjectedStreamReader;
@@ -0,0 +1,34 @@
1
+ import { FirestoreProjectedStreamStorageLayer } from "./firestore.projected-stream.storage-layer.mjs";
2
+ import { Cursor, ProjectedStreamReader } from "@ddd-ts/core";
3
+ import "firebase-admin/firestore";
4
+
5
+ //#region src/firestore.projected-stream.reader.ts
6
+ var FirestoreProjectedStreamReader = class extends ProjectedStreamReader {
7
+ storage;
8
+ serializer;
9
+ constructor(firestore, serializer) {
10
+ const storage = new FirestoreProjectedStreamStorageLayer(firestore);
11
+ super(storage, serializer);
12
+ this.storage = storage;
13
+ this.serializer = serializer;
14
+ }
15
+ async getCursor(savedChange) {
16
+ const serialized = await this.serializer.serialize(savedChange);
17
+ return this.storage.getCursor(serialized);
18
+ }
19
+ async get(cursor) {
20
+ const serialized = await this.storage.get(cursor);
21
+ if (!serialized) return;
22
+ return this.serializer.deserialize(serialized);
23
+ }
24
+ async slice(projectedStream, shard, startAfter, endAt, limit) {
25
+ const serialized = await this.storage.slice(projectedStream, shard, startAfter, endAt, limit);
26
+ return Promise.all(serialized.map((s) => this.serializer.deserialize(s)));
27
+ }
28
+ async *read(projectedStream, shard, startAfter, endAt) {
29
+ for await (const serialized of this.storage.read(projectedStream, shard, startAfter, endAt)) yield this.serializer.deserialize(serialized);
30
+ }
31
+ };
32
+
33
+ //#endregion
34
+ export { FirestoreProjectedStreamReader };
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=firestore.projected-stream.reader.spec.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"firestore.projected-stream.reader.spec.d.ts","sourceRoot":"","sources":["../src/firestore.projected-stream.reader.spec.ts"],"names":[],"mappings":""}
@@ -0,0 +1,31 @@
1
+ import { type ISerializedFact, type ISerializedSavedChange, LakeSource, ProjectedStream, type ProjectedStreamStorageLayer, StreamSource } from "@ddd-ts/core";
2
+ import { DefaultConverter } from "@ddd-ts/store-firestore";
3
+ import { Filter, Firestore, Timestamp } from "firebase-admin/firestore";
4
+ import { MicrosecondTimestamp } from "@ddd-ts/shape";
5
+ import { Cursor } from "@ddd-ts/core/dist/components/cursor";
6
+ export declare class FirestoreLakeSourceFilter {
7
+ filter(shard: string, lakeSource: LakeSource): Filter;
8
+ }
9
+ export declare class FirestoreStreamSourceFilter {
10
+ filter(shard: string, streamSource: StreamSource): Filter;
11
+ }
12
+ export declare class FirestoreProjectedStreamStorageLayer implements ProjectedStreamStorageLayer {
13
+ private readonly firestore;
14
+ readonly converter: DefaultConverter<FirebaseFirestore.DocumentData>;
15
+ constructor(firestore: Firestore, converter?: DefaultConverter<FirebaseFirestore.DocumentData>);
16
+ read(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor): AsyncGenerator<{
17
+ id: any;
18
+ ref: string;
19
+ revision: any;
20
+ name: any;
21
+ $name: any;
22
+ payload: any;
23
+ occurredAt: any;
24
+ version: any;
25
+ }, void, unknown>;
26
+ microsecondToTimestamp(microseconds: MicrosecondTimestamp): Timestamp;
27
+ get(cursor: Cursor): Promise<ISerializedFact | undefined>;
28
+ getCursor(savedChange: ISerializedSavedChange): Promise<Cursor | undefined>;
29
+ slice(projectedStream: ProjectedStream, shard: string, startAfter?: Cursor, endAt?: Cursor, limit?: number): Promise<ISerializedFact[]>;
30
+ }
31
+ //# sourceMappingURL=firestore.projected-stream.storage-layer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"firestore.projected-stream.storage-layer.d.ts","sourceRoot":"","sources":["../src/firestore.projected-stream.storage-layer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,sBAAsB,EAC3B,UAAU,EACV,eAAe,EACf,KAAK,2BAA2B,EAChC,YAAY,EACb,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAC3D,OAAO,EACL,MAAM,EACN,SAAS,EAET,SAAS,EACV,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,oBAAoB,EAAE,MAAM,eAAe,CAAC;AACrD,OAAO,EAAE,MAAM,EAAE,MAAM,qCAAqC,CAAC;AAE7D,qBAAa,yBAAyB;IACpC,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,UAAU;CAM7C;AAED,qBAAa,2BAA2B;IACtC,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,YAAY;CAOjD;AAED,qBAAa,oCACX,YAAW,2BAA2B;IAGpC,OAAO,CAAC,QAAQ,CAAC,SAAS;aACV,SAAS;gBADR,SAAS,EAAE,SAAS,EACrB,SAAS,mDAAyB;IAG7C,IAAI,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM;;;;;;;;;;IA2CT,sBAAsB,CAAC,YAAY,EAAE,oBAAoB;IAM1D,GAAG,CAAC,MAAM,EAAE,MAAM;IAkBlB,SAAS,CACb,WAAW,EAAE,sBAAsB,GAClC,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;IAcxB,KAAK,CACT,eAAe,EAAE,eAAe,EAChC,KAAK,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,KAAK,CAAC,EAAE,MAAM,EACd,KAAK,CAAC,EAAE,MAAM;CAiDjB"}
@@ -0,0 +1,121 @@
1
+ const require_runtime = require('./_virtual/_rolldown/runtime.js');
2
+ let _ddd_ts_core = require("@ddd-ts/core");
3
+ let _ddd_ts_store_firestore = require("@ddd-ts/store-firestore");
4
+ let firebase_admin_firestore = require("firebase-admin/firestore");
5
+ let _ddd_ts_shape = require("@ddd-ts/shape");
6
+ let _ddd_ts_core_dist_components_cursor = require("@ddd-ts/core/dist/components/cursor");
7
+
8
+ //#region src/firestore.projected-stream.storage-layer.ts
9
+ var FirestoreLakeSourceFilter = class {
10
+ filter(shard, lakeSource) {
11
+ return firebase_admin_firestore.Filter.and(firebase_admin_firestore.Filter.where(`payload.${lakeSource.shardKey}`, "==", shard), firebase_admin_firestore.Filter.where("name", "in", lakeSource.events));
12
+ }
13
+ };
14
+ var FirestoreStreamSourceFilter = class {
15
+ filter(shard, streamSource) {
16
+ return firebase_admin_firestore.Filter.and(firebase_admin_firestore.Filter.where("aggregateType", "==", streamSource.aggregateType), firebase_admin_firestore.Filter.where(`payload.${streamSource.shardKey}`, "==", shard), firebase_admin_firestore.Filter.where("name", "in", streamSource.events));
17
+ }
18
+ };
19
+ var FirestoreProjectedStreamStorageLayer = class {
20
+ constructor(firestore, converter = new _ddd_ts_store_firestore.DefaultConverter()) {
21
+ this.firestore = firestore;
22
+ this.converter = converter;
23
+ }
24
+ async *read(projectedStream, shard, startAfter, endAt) {
25
+ let query = this.firestore.collectionGroup("events").orderBy("occurredAt").orderBy("revision");
26
+ const filters = projectedStream.sources.map((source) => {
27
+ if (source instanceof _ddd_ts_core.LakeSource) return new FirestoreLakeSourceFilter().filter(shard, source);
28
+ if (source instanceof _ddd_ts_core.StreamSource) return new FirestoreStreamSourceFilter().filter(shard, source);
29
+ throw new Error("Unknown source type");
30
+ });
31
+ query = query.where(firebase_admin_firestore.Filter.or(...filters));
32
+ if (startAfter) {
33
+ const ts = this.microsecondToTimestamp(startAfter.occurredAt);
34
+ query = query.startAfter(ts, startAfter.revision);
35
+ }
36
+ if (endAt) {
37
+ const ts = this.microsecondToTimestamp(endAt.occurredAt);
38
+ query = query.endAt(ts, endAt.revision);
39
+ }
40
+ for await (const doc of query.stream()) {
41
+ const data = this.converter.fromFirestore(doc);
42
+ yield {
43
+ id: data.eventId,
44
+ ref: doc.ref.path,
45
+ revision: data.revision,
46
+ name: data.name,
47
+ $name: data.name,
48
+ payload: data.payload,
49
+ occurredAt: data.occurredAt,
50
+ version: data.version ?? 1
51
+ };
52
+ }
53
+ }
54
+ microsecondToTimestamp(microseconds) {
55
+ const seconds = BigInt(microseconds.micros) / 1000000n;
56
+ const nanoseconds = BigInt(microseconds.micros) % 1000000n * 1000n;
57
+ return new firebase_admin_firestore.Timestamp(Number(seconds), Number(nanoseconds));
58
+ }
59
+ async get(cursor) {
60
+ const doc = await this.firestore.doc(cursor.ref).get();
61
+ if (!doc.exists) return;
62
+ const data = this.converter.fromFirestoreSnapshot(doc);
63
+ return {
64
+ id: data.eventId,
65
+ ref: doc.ref.path,
66
+ revision: data.revision,
67
+ name: data.name,
68
+ $name: data.name,
69
+ payload: data.payload,
70
+ occurredAt: data.occurredAt,
71
+ version: data.version ?? 1
72
+ };
73
+ }
74
+ async getCursor(savedChange) {
75
+ const doc = await this.firestore.doc(savedChange.ref).get();
76
+ if (!doc.exists) return;
77
+ const data = this.converter.fromFirestoreSnapshot(doc);
78
+ return _ddd_ts_core_dist_components_cursor.Cursor.deserialize({
79
+ eventId: data.eventId,
80
+ ref: doc.ref.path,
81
+ occurredAt: data.occurredAt,
82
+ revision: data.revision
83
+ });
84
+ }
85
+ async slice(projectedStream, shard, startAfter, endAt, limit) {
86
+ let query = this.firestore.collectionGroup("events").orderBy("occurredAt").orderBy("revision");
87
+ const filters = projectedStream.sources.map((source) => {
88
+ if (source instanceof _ddd_ts_core.LakeSource) return new FirestoreLakeSourceFilter().filter(shard, source);
89
+ if (source instanceof _ddd_ts_core.StreamSource) return new FirestoreStreamSourceFilter().filter(shard, source);
90
+ throw new Error("Unknown source type");
91
+ });
92
+ query = query.where(firebase_admin_firestore.Filter.or(...filters));
93
+ if (startAfter) {
94
+ const ts = this.microsecondToTimestamp(startAfter.occurredAt);
95
+ query = query.startAfter(ts, startAfter.revision);
96
+ }
97
+ if (endAt) {
98
+ const ts = this.microsecondToTimestamp(endAt.occurredAt);
99
+ query = query.endAt(ts, endAt.revision);
100
+ }
101
+ if (limit) query = query.limit(limit);
102
+ return (await query.get()).docs.map((doc) => {
103
+ const data = this.converter.fromFirestore(doc);
104
+ return {
105
+ id: data.eventId,
106
+ ref: doc.ref.path,
107
+ revision: data.revision,
108
+ name: data.name,
109
+ $name: data.name,
110
+ payload: data.payload,
111
+ occurredAt: data.occurredAt,
112
+ version: data.version ?? 1
113
+ };
114
+ });
115
+ }
116
+ };
117
+
118
+ //#endregion
119
+ exports.FirestoreLakeSourceFilter = FirestoreLakeSourceFilter;
120
+ exports.FirestoreProjectedStreamStorageLayer = FirestoreProjectedStreamStorageLayer;
121
+ exports.FirestoreStreamSourceFilter = FirestoreStreamSourceFilter;
@@ -0,0 +1,118 @@
1
+ import { LakeSource, StreamSource } from "@ddd-ts/core";
2
+ import { DefaultConverter } from "@ddd-ts/store-firestore";
3
+ import { Filter, Timestamp } from "firebase-admin/firestore";
4
+ import { MicrosecondTimestamp } from "@ddd-ts/shape";
5
+ import { Cursor as Cursor$1 } from "@ddd-ts/core/dist/components/cursor";
6
+
7
+ //#region src/firestore.projected-stream.storage-layer.ts
8
+ var FirestoreLakeSourceFilter = class {
9
+ filter(shard, lakeSource) {
10
+ return Filter.and(Filter.where(`payload.${lakeSource.shardKey}`, "==", shard), Filter.where("name", "in", lakeSource.events));
11
+ }
12
+ };
13
+ var FirestoreStreamSourceFilter = class {
14
+ filter(shard, streamSource) {
15
+ return Filter.and(Filter.where("aggregateType", "==", streamSource.aggregateType), Filter.where(`payload.${streamSource.shardKey}`, "==", shard), Filter.where("name", "in", streamSource.events));
16
+ }
17
+ };
18
+ var FirestoreProjectedStreamStorageLayer = class {
19
+ constructor(firestore, converter = new DefaultConverter()) {
20
+ this.firestore = firestore;
21
+ this.converter = converter;
22
+ }
23
+ async *read(projectedStream, shard, startAfter, endAt) {
24
+ let query = this.firestore.collectionGroup("events").orderBy("occurredAt").orderBy("revision");
25
+ const filters = projectedStream.sources.map((source) => {
26
+ if (source instanceof LakeSource) return new FirestoreLakeSourceFilter().filter(shard, source);
27
+ if (source instanceof StreamSource) return new FirestoreStreamSourceFilter().filter(shard, source);
28
+ throw new Error("Unknown source type");
29
+ });
30
+ query = query.where(Filter.or(...filters));
31
+ if (startAfter) {
32
+ const ts = this.microsecondToTimestamp(startAfter.occurredAt);
33
+ query = query.startAfter(ts, startAfter.revision);
34
+ }
35
+ if (endAt) {
36
+ const ts = this.microsecondToTimestamp(endAt.occurredAt);
37
+ query = query.endAt(ts, endAt.revision);
38
+ }
39
+ for await (const doc of query.stream()) {
40
+ const data = this.converter.fromFirestore(doc);
41
+ yield {
42
+ id: data.eventId,
43
+ ref: doc.ref.path,
44
+ revision: data.revision,
45
+ name: data.name,
46
+ $name: data.name,
47
+ payload: data.payload,
48
+ occurredAt: data.occurredAt,
49
+ version: data.version ?? 1
50
+ };
51
+ }
52
+ }
53
+ microsecondToTimestamp(microseconds) {
54
+ const seconds = BigInt(microseconds.micros) / 1000000n;
55
+ const nanoseconds = BigInt(microseconds.micros) % 1000000n * 1000n;
56
+ return new Timestamp(Number(seconds), Number(nanoseconds));
57
+ }
58
+ async get(cursor) {
59
+ const doc = await this.firestore.doc(cursor.ref).get();
60
+ if (!doc.exists) return;
61
+ const data = this.converter.fromFirestoreSnapshot(doc);
62
+ return {
63
+ id: data.eventId,
64
+ ref: doc.ref.path,
65
+ revision: data.revision,
66
+ name: data.name,
67
+ $name: data.name,
68
+ payload: data.payload,
69
+ occurredAt: data.occurredAt,
70
+ version: data.version ?? 1
71
+ };
72
+ }
73
+ async getCursor(savedChange) {
74
+ const doc = await this.firestore.doc(savedChange.ref).get();
75
+ if (!doc.exists) return;
76
+ const data = this.converter.fromFirestoreSnapshot(doc);
77
+ return Cursor$1.deserialize({
78
+ eventId: data.eventId,
79
+ ref: doc.ref.path,
80
+ occurredAt: data.occurredAt,
81
+ revision: data.revision
82
+ });
83
+ }
84
+ async slice(projectedStream, shard, startAfter, endAt, limit) {
85
+ let query = this.firestore.collectionGroup("events").orderBy("occurredAt").orderBy("revision");
86
+ const filters = projectedStream.sources.map((source) => {
87
+ if (source instanceof LakeSource) return new FirestoreLakeSourceFilter().filter(shard, source);
88
+ if (source instanceof StreamSource) return new FirestoreStreamSourceFilter().filter(shard, source);
89
+ throw new Error("Unknown source type");
90
+ });
91
+ query = query.where(Filter.or(...filters));
92
+ if (startAfter) {
93
+ const ts = this.microsecondToTimestamp(startAfter.occurredAt);
94
+ query = query.startAfter(ts, startAfter.revision);
95
+ }
96
+ if (endAt) {
97
+ const ts = this.microsecondToTimestamp(endAt.occurredAt);
98
+ query = query.endAt(ts, endAt.revision);
99
+ }
100
+ if (limit) query = query.limit(limit);
101
+ return (await query.get()).docs.map((doc) => {
102
+ const data = this.converter.fromFirestore(doc);
103
+ return {
104
+ id: data.eventId,
105
+ ref: doc.ref.path,
106
+ revision: data.revision,
107
+ name: data.name,
108
+ $name: data.name,
109
+ payload: data.payload,
110
+ occurredAt: data.occurredAt,
111
+ version: data.version ?? 1
112
+ };
113
+ });
114
+ }
115
+ };
116
+
117
+ //#endregion
118
+ export { FirestoreLakeSourceFilter, FirestoreProjectedStreamStorageLayer, FirestoreStreamSourceFilter };
@@ -0,0 +1,6 @@
1
+ import { type IEventSourced, type IIdentifiable, type ISerializer } from "@ddd-ts/core";
2
+ import { FirestoreStore } from "@ddd-ts/store-firestore";
3
+ export declare class FirestoreSnapshotter<A extends IEventSourced & IIdentifiable> extends FirestoreStore<A> {
4
+ constructor(aggregateType: string, database: FirebaseFirestore.Firestore, serializer: ISerializer<A>);
5
+ }
6
+ //# sourceMappingURL=firestore.snapshotter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"firestore.snapshotter.d.ts","sourceRoot":"","sources":["../src/firestore.snapshotter.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,aAAa,EAClB,KAAK,aAAa,EAClB,KAAK,WAAW,EACjB,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,cAAc,EAAE,MAAM,yBAAyB,CAAC;AA4BzD,qBAAa,oBAAoB,CAC/B,CAAC,SAAS,aAAa,GAAG,aAAa,CACvC,SAAQ,cAAc,CAAC,CAAC,CAAC;gBAEvB,aAAa,EAAE,MAAM,EACrB,QAAQ,EAAE,iBAAiB,CAAC,SAAS,EACrC,UAAU,EAAE,WAAW,CAAC,CAAC,CAAC;CAY7B"}
@@ -0,0 +1,36 @@
1
+ const require_runtime = require('./_virtual/_rolldown/runtime.js');
2
+ let _ddd_ts_core = require("@ddd-ts/core");
3
+ let _ddd_ts_store_firestore = require("@ddd-ts/store-firestore");
4
+
5
+ //#region src/firestore.snapshotter.ts
6
+ var SnapshotSerializer = class {
7
+ constructor(serializer, aggregateType) {
8
+ this.serializer = serializer;
9
+ this.aggregateType = aggregateType;
10
+ }
11
+ async serialize(instance) {
12
+ return {
13
+ ...await this.serializer.serialize(instance),
14
+ $name: this.aggregateType,
15
+ revision: instance.acknowledgedRevision
16
+ };
17
+ }
18
+ async deserialize(serialized) {
19
+ const { revision, ...content } = serialized;
20
+ const instance = await this.serializer.deserialize({
21
+ $name: this.aggregateType,
22
+ ...content
23
+ });
24
+ instance.acknowledgedRevision = Number(revision);
25
+ return instance;
26
+ }
27
+ };
28
+ var FirestoreSnapshotter = class extends _ddd_ts_store_firestore.FirestoreStore {
29
+ constructor(aggregateType, database, serializer) {
30
+ const collection = database.collection("event-store").doc(aggregateType).collection("streams");
31
+ super(collection, new SnapshotSerializer(serializer, aggregateType), aggregateType);
32
+ }
33
+ };
34
+
35
+ //#endregion
36
+ exports.FirestoreSnapshotter = FirestoreSnapshotter;
@@ -0,0 +1,35 @@
1
+ import "@ddd-ts/core";
2
+ import { FirestoreStore } from "@ddd-ts/store-firestore";
3
+
4
+ //#region src/firestore.snapshotter.ts
5
+ var SnapshotSerializer = class {
6
+ constructor(serializer, aggregateType) {
7
+ this.serializer = serializer;
8
+ this.aggregateType = aggregateType;
9
+ }
10
+ async serialize(instance) {
11
+ return {
12
+ ...await this.serializer.serialize(instance),
13
+ $name: this.aggregateType,
14
+ revision: instance.acknowledgedRevision
15
+ };
16
+ }
17
+ async deserialize(serialized) {
18
+ const { revision, ...content } = serialized;
19
+ const instance = await this.serializer.deserialize({
20
+ $name: this.aggregateType,
21
+ ...content
22
+ });
23
+ instance.acknowledgedRevision = Number(revision);
24
+ return instance;
25
+ }
26
+ };
27
+ var FirestoreSnapshotter = class extends FirestoreStore {
28
+ constructor(aggregateType, database, serializer) {
29
+ const collection = database.collection("event-store").doc(aggregateType).collection("streams");
30
+ super(collection, new SnapshotSerializer(serializer, aggregateType), aggregateType);
31
+ }
32
+ };
33
+
34
+ //#endregion
35
+ export { FirestoreSnapshotter };
@@ -0,0 +1,11 @@
1
+ export { FirestoreEventLakeStorageLayer } from "./firestore.event-lake.storage-layer";
2
+ export { FirestoreEventLakeStore } from "./firestore.event-lake.store";
3
+ export { FirestoreEventStreamStorageLayer } from "./firestore.event-stream.storage-layer";
4
+ export { FirestoreEventStreamStore } from "./firestore.event-stream.store";
5
+ export { FirestoreEventStreamAggregateStore, MakeFirestoreEventStreamAggregateStore, } from "./firestore.event-stream.aggregate-store";
6
+ export { FirestoreEventLakeAggregateStore, MakeFirestoreEventLakeAggregateStore, } from "./firestore.event-lake.aggregate-store";
7
+ export { FirestoreProjectedStreamStorageLayer, FirestoreLakeSourceFilter, FirestoreStreamSourceFilter, } from "./firestore.projected-stream.storage-layer";
8
+ export { FirestoreProjectedStreamReader } from "./firestore.projected-stream.reader";
9
+ export { FirestoreSnapshotter } from "./firestore.snapshotter";
10
+ export { FirestoreProjector, FirestoreQueueStore, Task, AlreadyEnqueuedError, ClaimerId, } from "./projection/firestore.projector";
11
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,8BAA8B,EAAE,MAAM,sCAAsC,CAAC;AACtF,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,gCAAgC,EAAE,MAAM,wCAAwC,CAAC;AAC1F,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAE3E,OAAO,EACL,kCAAkC,EAClC,sCAAsC,GACvC,MAAM,0CAA0C,CAAC;AAElD,OAAO,EACL,gCAAgC,EAChC,oCAAoC,GACrC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,EACL,oCAAoC,EACpC,yBAAyB,EACzB,2BAA2B,GAC5B,MAAM,4CAA4C,CAAC;AAEpD,OAAO,EAAE,8BAA8B,EAAE,MAAM,qCAAqC,CAAC;AAErF,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAE/D,OAAO,EACL,kBAAkB,EAClB,mBAAmB,EACnB,IAAI,EACJ,oBAAoB,EACpB,SAAS,GACV,MAAM,kCAAkC,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,30 @@
1
+ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
2
+ const require_firestore_event_lake_storage_layer = require('./firestore.event-lake.storage-layer.js');
3
+ const require_firestore_event_lake_store = require('./firestore.event-lake.store.js');
4
+ const require_firestore_event_stream_storage_layer = require('./firestore.event-stream.storage-layer.js');
5
+ const require_firestore_event_stream_store = require('./firestore.event-stream.store.js');
6
+ const require_firestore_snapshotter = require('./firestore.snapshotter.js');
7
+ const require_firestore_event_stream_aggregate_store = require('./firestore.event-stream.aggregate-store.js');
8
+ const require_firestore_event_lake_aggregate_store = require('./firestore.event-lake.aggregate-store.js');
9
+ const require_firestore_projected_stream_storage_layer = require('./firestore.projected-stream.storage-layer.js');
10
+ const require_firestore_projected_stream_reader = require('./firestore.projected-stream.reader.js');
11
+ const require_firestore_projector = require('./projection/firestore.projector.js');
12
+
13
+ exports.AlreadyEnqueuedError = require_firestore_projector.AlreadyEnqueuedError;
14
+ exports.ClaimerId = require_firestore_projector.ClaimerId;
15
+ exports.FirestoreEventLakeAggregateStore = require_firestore_event_lake_aggregate_store.FirestoreEventLakeAggregateStore;
16
+ exports.FirestoreEventLakeStorageLayer = require_firestore_event_lake_storage_layer.FirestoreEventLakeStorageLayer;
17
+ exports.FirestoreEventLakeStore = require_firestore_event_lake_store.FirestoreEventLakeStore;
18
+ exports.FirestoreEventStreamAggregateStore = require_firestore_event_stream_aggregate_store.FirestoreEventStreamAggregateStore;
19
+ exports.FirestoreEventStreamStorageLayer = require_firestore_event_stream_storage_layer.FirestoreEventStreamStorageLayer;
20
+ exports.FirestoreEventStreamStore = require_firestore_event_stream_store.FirestoreEventStreamStore;
21
+ exports.FirestoreLakeSourceFilter = require_firestore_projected_stream_storage_layer.FirestoreLakeSourceFilter;
22
+ exports.FirestoreProjectedStreamReader = require_firestore_projected_stream_reader.FirestoreProjectedStreamReader;
23
+ exports.FirestoreProjectedStreamStorageLayer = require_firestore_projected_stream_storage_layer.FirestoreProjectedStreamStorageLayer;
24
+ exports.FirestoreProjector = require_firestore_projector.FirestoreProjector;
25
+ exports.FirestoreQueueStore = require_firestore_projector.FirestoreQueueStore;
26
+ exports.FirestoreSnapshotter = require_firestore_snapshotter.FirestoreSnapshotter;
27
+ exports.FirestoreStreamSourceFilter = require_firestore_projected_stream_storage_layer.FirestoreStreamSourceFilter;
28
+ exports.MakeFirestoreEventLakeAggregateStore = require_firestore_event_lake_aggregate_store.MakeFirestoreEventLakeAggregateStore;
29
+ exports.MakeFirestoreEventStreamAggregateStore = require_firestore_event_stream_aggregate_store.MakeFirestoreEventStreamAggregateStore;
30
+ exports.Task = require_firestore_projector.Task;
package/dist/index.mjs ADDED
@@ -0,0 +1,12 @@
1
+ import { FirestoreEventLakeStorageLayer } from "./firestore.event-lake.storage-layer.mjs";
2
+ import { FirestoreEventLakeStore } from "./firestore.event-lake.store.mjs";
3
+ import { FirestoreEventStreamStorageLayer } from "./firestore.event-stream.storage-layer.mjs";
4
+ import { FirestoreEventStreamStore } from "./firestore.event-stream.store.mjs";
5
+ import { FirestoreSnapshotter } from "./firestore.snapshotter.mjs";
6
+ import { FirestoreEventStreamAggregateStore, MakeFirestoreEventStreamAggregateStore } from "./firestore.event-stream.aggregate-store.mjs";
7
+ import { FirestoreEventLakeAggregateStore, MakeFirestoreEventLakeAggregateStore } from "./firestore.event-lake.aggregate-store.mjs";
8
+ import { FirestoreLakeSourceFilter, FirestoreProjectedStreamStorageLayer, FirestoreStreamSourceFilter } from "./firestore.projected-stream.storage-layer.mjs";
9
+ import { FirestoreProjectedStreamReader } from "./firestore.projected-stream.reader.mjs";
10
+ import { AlreadyEnqueuedError, ClaimerId, FirestoreProjector, FirestoreQueueStore, Task } from "./projection/firestore.projector.mjs";
11
+
12
+ export { AlreadyEnqueuedError, ClaimerId, FirestoreEventLakeAggregateStore, FirestoreEventLakeStorageLayer, FirestoreEventLakeStore, FirestoreEventStreamAggregateStore, FirestoreEventStreamStorageLayer, FirestoreEventStreamStore, FirestoreLakeSourceFilter, FirestoreProjectedStreamReader, FirestoreProjectedStreamStorageLayer, FirestoreProjector, FirestoreQueueStore, FirestoreSnapshotter, FirestoreStreamSourceFilter, MakeFirestoreEventLakeAggregateStore, MakeFirestoreEventStreamAggregateStore, Task };
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=attempts.spec.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"attempts.spec.d.ts","sourceRoot":"","sources":["../../../src/projection/cases/attempts.spec.ts"],"names":[],"mappings":""}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=batchlast.spec.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"batchlast.spec.d.ts","sourceRoot":"","sources":["../../../src/projection/cases/batchlast.spec.ts"],"names":[],"mappings":""}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=bigshuffle.spec.d.ts.map