@wataruoguchi/emmett-event-store-kysely 1.1.2 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +76 -164
- package/dist/event-store/consumers.d.ts +23 -0
- package/dist/event-store/consumers.d.ts.map +1 -0
- package/dist/event-store/consumers.js +155 -0
- package/dist/event-store/kysely-event-store.d.ts +42 -0
- package/dist/event-store/kysely-event-store.d.ts.map +1 -0
- package/dist/event-store/kysely-event-store.js +256 -0
- package/dist/index.cjs +584 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/projections/runner.d.ts +3 -2
- package/dist/projections/runner.d.ts.map +1 -1
- package/dist/projections/snapshot-projection.d.ts +120 -0
- package/dist/projections/snapshot-projection.d.ts.map +1 -0
- package/dist/projections/snapshot-projection.js +125 -0
- package/dist/types.d.ts +39 -11
- package/dist/types.d.ts.map +1 -1
- package/package.json +9 -14
- package/dist/event-store/aggregate-stream.d.ts +0 -10
- package/dist/event-store/aggregate-stream.d.ts.map +0 -1
- package/dist/event-store/aggregate-stream.js +0 -18
- package/dist/event-store/append-to-stream.d.ts +0 -7
- package/dist/event-store/append-to-stream.d.ts.map +0 -1
- package/dist/event-store/append-to-stream.js +0 -143
- package/dist/event-store/index.cjs +0 -291
- package/dist/event-store/index.d.ts +0 -13
- package/dist/event-store/index.d.ts.map +0 -1
- package/dist/event-store/index.js +0 -10
- package/dist/event-store/read-stream.d.ts +0 -14
- package/dist/event-store/read-stream.d.ts.map +0 -1
- package/dist/event-store/read-stream.js +0 -88
- package/dist/projections/index.cjs +0 -124
- package/dist/projections/index.d.ts +0 -4
- package/dist/projections/index.d.ts.map +0 -1
- package/dist/projections/index.js +0 -2
package/README.md
CHANGED
|
@@ -4,210 +4,122 @@ A Kysely-based event store implementation for [Emmett](https://github.com/event-
|
|
|
4
4
|
|
|
5
5
|
## Features
|
|
6
6
|
|
|
7
|
-
- **Event Store
|
|
8
|
-
- **Projections
|
|
7
|
+
- **Event Store** - Full event sourcing with Kysely and PostgreSQL
|
|
8
|
+
- **Snapshot Projections** - Recommended approach for read models
|
|
9
|
+
- **Event Consumer** - Continuous background event processing
|
|
10
|
+
- **Type Safety** - Full TypeScript support with discriminated unions
|
|
11
|
+
- **Multi-Tenancy** - Built-in partition support
|
|
9
12
|
|
|
10
13
|
## Installation
|
|
11
14
|
|
|
12
15
|
```bash
|
|
13
|
-
npm install @wataruoguchi/emmett-event-store-kysely @event-driven-io/emmett kysely
|
|
16
|
+
npm install @wataruoguchi/emmett-event-store-kysely @event-driven-io/emmett kysely pg
|
|
14
17
|
```
|
|
15
18
|
|
|
16
|
-
##
|
|
19
|
+
## Quick Start
|
|
17
20
|
|
|
18
|
-
|
|
21
|
+
### 1. Database Setup
|
|
19
22
|
|
|
20
|
-
|
|
23
|
+
Set up the required PostgreSQL tables using [our migration example](./database/migrations/1758758113676_event_sourcing_migration_example.ts):
|
|
21
24
|
|
|
22
|
-
|
|
25
|
+
```typescript
|
|
26
|
+
import { Kysely } from "kysely";
|
|
27
|
+
|
|
28
|
+
// Required tables: messages, streams, subscriptions
|
|
29
|
+
// See docs/database-setup.md for details
|
|
30
|
+
```
|
|
23
31
|
|
|
24
|
-
###
|
|
32
|
+
### 2. Create Event Store
|
|
25
33
|
|
|
26
34
|
```typescript
|
|
27
|
-
import {
|
|
35
|
+
import { getKyselyEventStore } from "@wataruoguchi/emmett-event-store-kysely";
|
|
28
36
|
import { Kysely, PostgresDialect } from "kysely";
|
|
29
|
-
import { Pool } from "pg";
|
|
30
37
|
|
|
31
|
-
|
|
32
|
-
const db = new Kysely<YourDatabaseSchema>({
|
|
38
|
+
const db = new Kysely({
|
|
33
39
|
dialect: new PostgresDialect({
|
|
34
|
-
pool: new Pool({
|
|
35
|
-
connectionString: process.env.DATABASE_URL,
|
|
36
|
-
}),
|
|
40
|
+
pool: new Pool({ connectionString: process.env.DATABASE_URL }),
|
|
37
41
|
}),
|
|
38
42
|
});
|
|
39
43
|
|
|
40
|
-
|
|
41
|
-
|
|
44
|
+
const eventStore = getKyselyEventStore({
|
|
45
|
+
db,
|
|
46
|
+
logger: console,
|
|
47
|
+
});
|
|
42
48
|
```
|
|
43
49
|
|
|
44
|
-
###
|
|
50
|
+
### 3. Write Events & Commands & Business Logic & State
|
|
45
51
|
|
|
46
|
-
|
|
47
|
-
import { DeciderCommandHandler } from "@event-driven-io/emmett";
|
|
48
|
-
import type { EventStore } from "@wataruoguchi/emmett-event-store-kysely";
|
|
49
|
-
|
|
50
|
-
// Define your domain events and commands
|
|
51
|
-
type CreateCartCommand = {
|
|
52
|
-
type: "CreateCart";
|
|
53
|
-
data: { tenantId: string; cartId: string; currency: string };
|
|
54
|
-
};
|
|
55
|
-
|
|
56
|
-
type CartCreatedEvent = {
|
|
57
|
-
type: "CartCreated";
|
|
58
|
-
data: { tenantId: string; cartId: string; currency: string };
|
|
59
|
-
};
|
|
60
|
-
|
|
61
|
-
// Create your event handler
|
|
62
|
-
export function cartEventHandler({
|
|
63
|
-
eventStore,
|
|
64
|
-
getContext,
|
|
65
|
-
}: {
|
|
66
|
-
eventStore: EventStore;
|
|
67
|
-
getContext: () => AppContext;
|
|
68
|
-
}) {
|
|
69
|
-
const handler = DeciderCommandHandler({
|
|
70
|
-
decide: createDecide(getContext),
|
|
71
|
-
evolve: createEvolve(),
|
|
72
|
-
initialState,
|
|
73
|
-
});
|
|
74
|
-
|
|
75
|
-
return {
|
|
76
|
-
create: (cartId: string, data: CreateCartCommand["data"]) =>
|
|
77
|
-
handler(
|
|
78
|
-
eventStore,
|
|
79
|
-
cartId,
|
|
80
|
-
{ type: "CreateCart", data },
|
|
81
|
-
{ partition: data.tenantId, streamType: "cart" }
|
|
82
|
-
),
|
|
83
|
-
};
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
// Use in your service
|
|
87
|
-
const cartService = createCartService({
|
|
88
|
-
eventStore,
|
|
89
|
-
getContext,
|
|
90
|
-
});
|
|
91
|
-
```
|
|
52
|
+
Please read <https://event-driven-io.github.io/emmett/getting-started.html>
|
|
92
53
|
|
|
93
|
-
|
|
54
|
+
- [Events](https://event-driven-io.github.io/emmett/getting-started.html#events)
|
|
55
|
+
- [Commands](https://event-driven-io.github.io/emmett/getting-started.html#commands)
|
|
56
|
+
- [Business logic and decisions](https://event-driven-io.github.io/emmett/getting-started.html#business-logic-and-decisions)
|
|
57
|
+
- [Building state from events](https://event-driven-io.github.io/emmett/getting-started.html#building-state-from-events)
|
|
94
58
|
|
|
95
|
-
###
|
|
59
|
+
### 4. Build Read Models
|
|
60
|
+
|
|
61
|
+
This package supports "Snapshot Projections".
|
|
96
62
|
|
|
97
63
|
```typescript
|
|
98
|
-
import
|
|
99
|
-
|
|
100
|
-
ProjectionRegistry,
|
|
64
|
+
import {
|
|
65
|
+
createSnapshotProjectionRegistry
|
|
101
66
|
} from "@wataruoguchi/emmett-event-store-kysely/projections";
|
|
102
67
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
.updateTable("carts")
|
|
123
|
-
.set({
|
|
124
|
-
items: JSON.stringify([...existingItems, event.data.item]),
|
|
125
|
-
total: newTotal,
|
|
126
|
-
last_stream_position: event.metadata.streamPosition,
|
|
127
|
-
})
|
|
128
|
-
.where("stream_id", "=", event.metadata.streamId)
|
|
129
|
-
.execute();
|
|
130
|
-
},
|
|
131
|
-
};
|
|
132
|
-
}
|
|
68
|
+
// Reuse your write model's evolve function!
|
|
69
|
+
const registry = createSnapshotProjectionRegistry(
|
|
70
|
+
["CartCreated", "ItemAdded", "CartCheckedOut"],
|
|
71
|
+
{
|
|
72
|
+
tableName: "carts",
|
|
73
|
+
primaryKeys: ["tenant_id", "cart_id", "partition"],
|
|
74
|
+
extractKeys: (event, partition) => ({
|
|
75
|
+
tenant_id: event.data.eventMeta.tenantId,
|
|
76
|
+
cart_id: event.data.eventMeta.cartId,
|
|
77
|
+
partition,
|
|
78
|
+
}),
|
|
79
|
+
evolve: domainEvolve, // Reuse from write model!
|
|
80
|
+
initialState,
|
|
81
|
+
mapToColumns: (state) => ({ // Optional: denormalize for queries
|
|
82
|
+
currency: state.currency,
|
|
83
|
+
total: state.status === "checkedOut" ? state.total : null,
|
|
84
|
+
}),
|
|
85
|
+
}
|
|
86
|
+
);
|
|
133
87
|
```
|
|
134
88
|
|
|
135
|
-
###
|
|
89
|
+
### 5. Process Events and Update Read Model
|
|
136
90
|
|
|
137
91
|
```typescript
|
|
138
|
-
import {
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
// Set up projection runner
|
|
145
|
-
const readStream = createReadStream({ db, logger });
|
|
146
|
-
const registry = createProjectionRegistry(cartsProjection());
|
|
147
|
-
const runner = createProjectionRunner({
|
|
148
|
-
db,
|
|
149
|
-
readStream,
|
|
92
|
+
import { createProjectionRunner } from "@wataruoguchi/emmett-event-store-kysely/projections";
|
|
93
|
+
|
|
94
|
+
const runner = createProjectionRunner({
|
|
95
|
+
db,
|
|
96
|
+
readStream: eventStore.readStream,
|
|
150
97
|
registry,
|
|
151
98
|
});
|
|
152
99
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
partition: "tenant-456",
|
|
156
|
-
batchSize: 100,
|
|
100
|
+
await runner.projectEvents("subscription-id", "cart-123", {
|
|
101
|
+
partition: "tenant-456"
|
|
157
102
|
});
|
|
158
103
|
```
|
|
159
104
|
|
|
160
|
-
|
|
105
|
+
See [Snapshot Projections documentation](./docs/snapshot-projections.md) for details.
|
|
161
106
|
|
|
162
|
-
|
|
107
|
+
## Documentation
|
|
163
108
|
|
|
164
|
-
|
|
165
|
-
#!/usr/bin/env node
|
|
166
|
-
import { createReadStream } from "@wataruoguchi/emmett-event-store-kysely";
|
|
167
|
-
import {
|
|
168
|
-
createProjectionRegistry,
|
|
169
|
-
createProjectionRunner,
|
|
170
|
-
} from "@wataruoguchi/emmett-event-store-kysely/projections";
|
|
109
|
+
📚 **[Complete Documentation](./docs/README.md)**
|
|
171
110
|
|
|
172
|
-
|
|
173
|
-
const db = getDb();
|
|
174
|
-
const readStream = createReadStream({ db, logger });
|
|
175
|
-
const registry = createProjectionRegistry(cartsProjection());
|
|
176
|
-
const runner = createProjectionRunner({
|
|
177
|
-
db,
|
|
178
|
-
readStream,
|
|
179
|
-
registry,
|
|
180
|
-
});
|
|
181
|
-
|
|
182
|
-
const subscriptionId = "carts-read-model";
|
|
183
|
-
const batchSize = 200;
|
|
184
|
-
const pollIntervalMs = 1000;
|
|
185
|
-
|
|
186
|
-
while (true) {
|
|
187
|
-
// Get streams for this partition
|
|
188
|
-
const streams = await db
|
|
189
|
-
.selectFrom("streams")
|
|
190
|
-
.select(["stream_id"])
|
|
191
|
-
.where("is_archived", "=", false)
|
|
192
|
-
.where("partition", "=", partition)
|
|
193
|
-
.where("stream_type", "=", "cart")
|
|
194
|
-
.execute();
|
|
195
|
-
|
|
196
|
-
// Process each stream
|
|
197
|
-
for (const stream of streams) {
|
|
198
|
-
await runner.projectEvents(subscriptionId, stream.stream_id, {
|
|
199
|
-
partition,
|
|
200
|
-
batchSize,
|
|
201
|
-
});
|
|
202
|
-
}
|
|
203
|
-
|
|
204
|
-
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
205
|
-
}
|
|
206
|
-
}
|
|
111
|
+
### Core Guides
|
|
207
112
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
113
|
+
- [Database Setup](./docs/database-setup.md) - PostgreSQL schema and requirements
|
|
114
|
+
- [Event Store](./docs/event-store.md) - Core event store API
|
|
115
|
+
- [Snapshot Projections](./docs/snapshot-projections.md) - Build read models (recommended) ⭐
|
|
116
|
+
- [Event Consumer](./docs/consumer.md) - Continuous background processing
|
|
117
|
+
- [Projection Runner](./docs/projection-runner.md) - On-demand processing for tests
|
|
118
|
+
|
|
119
|
+
### Examples
|
|
120
|
+
|
|
121
|
+
- [Working Example](../example/) - Complete application with carts and generators
|
|
122
|
+
- [Migration Example](./database/migrations/1758758113676_event_sourcing_migration_example.ts) - Database setup
|
|
211
123
|
|
|
212
124
|
## License
|
|
213
125
|
|
|
@@ -215,4 +127,4 @@ MIT
|
|
|
215
127
|
|
|
216
128
|
## Contributing
|
|
217
129
|
|
|
218
|
-
Contributions are welcome! Please
|
|
130
|
+
Contributions are welcome! Please see our [GitHub repository](https://github.com/wataruoguchi/poc-emmett) for issues and PRs.
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { Event, ReadEvent, ReadEventMetadataWithGlobalPosition } from "@event-driven-io/emmett";
|
|
2
|
+
import type { Dependencies } from "../types.js";
|
|
3
|
+
export type KyselyEventStoreConsumerConfig = {
|
|
4
|
+
/** Consumer name for tracking subscription state */
|
|
5
|
+
consumerName?: string;
|
|
6
|
+
/** Batch size for processing events */
|
|
7
|
+
batchSize?: number;
|
|
8
|
+
/** Polling interval in milliseconds */
|
|
9
|
+
pollingInterval?: number;
|
|
10
|
+
};
|
|
11
|
+
export type KyselyEventStoreConsumer = {
|
|
12
|
+
/** Start consuming events */
|
|
13
|
+
start(): Promise<void>;
|
|
14
|
+
/** Stop consuming events */
|
|
15
|
+
stop(): Promise<void>;
|
|
16
|
+
/** Subscribe to specific event types */
|
|
17
|
+
subscribe<EventType extends Event>(handler: (event: ReadEvent<EventType, ReadEventMetadataWithGlobalPosition>) => Promise<void> | void, eventType: string): void;
|
|
18
|
+
/** Subscribe to all events */
|
|
19
|
+
subscribeToAll(handler: (event: ReadEvent<Event, ReadEventMetadataWithGlobalPosition>) => Promise<void> | void): void;
|
|
20
|
+
};
|
|
21
|
+
export declare function createKyselyEventStoreConsumer({ db, logger, consumerName, batchSize, pollingInterval, }: Dependencies & KyselyEventStoreConsumerConfig): KyselyEventStoreConsumer;
|
|
22
|
+
export declare function createKyselyEventStoreConsumerWithDefaults(deps: Dependencies, config?: KyselyEventStoreConsumerConfig): KyselyEventStoreConsumer;
|
|
23
|
+
//# sourceMappingURL=consumers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"consumers.d.ts","sourceRoot":"","sources":["../../src/event-store/consumers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,KAAK,EACL,SAAS,EACT,mCAAmC,EACpC,MAAM,yBAAyB,CAAC;AACjC,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,MAAM,8BAA8B,GAAG;IAC3C,oDAAoD;IACpD,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,uCAAuC;IACvC,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,uCAAuC;IACvC,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,CAAC;AAEF,MAAM,MAAM,wBAAwB,GAAG;IACrC,6BAA6B;IAC7B,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACvB,4BAA4B;IAC5B,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACtB,wCAAwC;IACxC,SAAS,CAAC,SAAS,SAAS,KAAK,EAC/B,OAAO,EAAE,CACP,KAAK,EAAE,SAAS,CAAC,SAAS,EAAE,mCAAmC,CAAC,KAC7D,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,EACzB,SAAS,EAAE,MAAM,GAChB,IAAI,CAAC;IACR,8BAA8B;IAC9B,cAAc,CACZ,OAAO,EAAE,CACP,KAAK,EAAE,SAAS,CAAC,KAAK,EAAE,mCAAmC,CAAC,KACzD,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,GACxB,IAAI,CAAC;CACT,CAAC;AAEF,wBAAgB,8BAA8B,CAAC,EAC7C,EAAE,EACF,MAAM,EACN,YAAiC,EACjC,SAAe,EACf,eAAsB,GACvB,EAAE,YAAY,GAAG,8BAA8B,GAAG,wBAAwB,CAkM1E;AAGD,wBAAgB,0CAA0C,CACxD,IAAI,EAAE,YAAY,EAClB,MAAM,GAAE,8BAAmC,GAC1C,wBAAwB,CAK1B"}
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
export function createKyselyEventStoreConsumer({ db, logger, consumerName = "default-consumer", batchSize = 100, pollingInterval = 1000, }) {
|
|
2
|
+
let isRunning = false;
|
|
3
|
+
let lastProcessedPosition = 0n;
|
|
4
|
+
const eventHandlers = new Map();
|
|
5
|
+
const allEventHandlers = [];
|
|
6
|
+
let pollingTimer = null;
|
|
7
|
+
const processEvents = async () => {
|
|
8
|
+
if (!isRunning)
|
|
9
|
+
return;
|
|
10
|
+
try {
|
|
11
|
+
// Get events from the last processed position
|
|
12
|
+
const events = await db
|
|
13
|
+
.selectFrom("messages")
|
|
14
|
+
.select([
|
|
15
|
+
"message_type",
|
|
16
|
+
"message_data",
|
|
17
|
+
"message_metadata",
|
|
18
|
+
"stream_position",
|
|
19
|
+
"global_position",
|
|
20
|
+
"message_id",
|
|
21
|
+
"stream_id",
|
|
22
|
+
])
|
|
23
|
+
.where("global_position", ">", lastProcessedPosition)
|
|
24
|
+
.where("is_archived", "=", false)
|
|
25
|
+
.orderBy("global_position")
|
|
26
|
+
.limit(batchSize)
|
|
27
|
+
.execute();
|
|
28
|
+
if (events.length === 0) {
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
// Process each event
|
|
32
|
+
for (const row of events) {
|
|
33
|
+
const event = {
|
|
34
|
+
kind: "Event",
|
|
35
|
+
type: row.message_type,
|
|
36
|
+
data: row.message_data,
|
|
37
|
+
metadata: {
|
|
38
|
+
...row.message_metadata,
|
|
39
|
+
messageId: row.message_id,
|
|
40
|
+
streamName: row.stream_id,
|
|
41
|
+
streamPosition: BigInt(String(row.stream_position)),
|
|
42
|
+
globalPosition: BigInt(String(row.global_position)),
|
|
43
|
+
},
|
|
44
|
+
};
|
|
45
|
+
// Call type-specific handlers
|
|
46
|
+
const typeHandlers = eventHandlers.get(row.message_type) || [];
|
|
47
|
+
for (const handler of typeHandlers) {
|
|
48
|
+
try {
|
|
49
|
+
await handler(event);
|
|
50
|
+
}
|
|
51
|
+
catch (error) {
|
|
52
|
+
logger.error({ error, event }, `Error processing event ${row.message_type}`);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
// Call all-event handlers
|
|
56
|
+
for (const handler of allEventHandlers) {
|
|
57
|
+
try {
|
|
58
|
+
await handler(event);
|
|
59
|
+
}
|
|
60
|
+
catch (error) {
|
|
61
|
+
logger.error({ error, event }, "Error processing event in all-event handler");
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
// Update last processed position
|
|
65
|
+
const globalPos = row.global_position;
|
|
66
|
+
if (globalPos !== null) {
|
|
67
|
+
lastProcessedPosition = BigInt(String(globalPos));
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
// Update subscription tracking
|
|
71
|
+
await updateSubscriptionPosition();
|
|
72
|
+
}
|
|
73
|
+
catch (error) {
|
|
74
|
+
logger.error({ error }, "Error processing events");
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
const updateSubscriptionPosition = async () => {
|
|
78
|
+
try {
|
|
79
|
+
await db
|
|
80
|
+
.insertInto("subscriptions")
|
|
81
|
+
.values({
|
|
82
|
+
consumer_name: consumerName,
|
|
83
|
+
last_processed_position: lastProcessedPosition,
|
|
84
|
+
last_processed_transaction_id: lastProcessedPosition,
|
|
85
|
+
created_at: new Date(),
|
|
86
|
+
updated_at: new Date(),
|
|
87
|
+
})
|
|
88
|
+
.onConflict((oc) => oc.column("consumer_name").doUpdateSet({
|
|
89
|
+
last_processed_position: lastProcessedPosition,
|
|
90
|
+
last_processed_transaction_id: lastProcessedPosition,
|
|
91
|
+
updated_at: new Date(),
|
|
92
|
+
}))
|
|
93
|
+
.execute();
|
|
94
|
+
}
|
|
95
|
+
catch (error) {
|
|
96
|
+
logger.error({ error }, "Error updating subscription position");
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
const loadLastProcessedPosition = async () => {
|
|
100
|
+
try {
|
|
101
|
+
const subscription = await db
|
|
102
|
+
.selectFrom("subscriptions")
|
|
103
|
+
.select(["last_processed_position"])
|
|
104
|
+
.where("consumer_name", "=", consumerName)
|
|
105
|
+
.executeTakeFirst();
|
|
106
|
+
if (subscription) {
|
|
107
|
+
lastProcessedPosition = BigInt(String(subscription.last_processed_position));
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
catch (error) {
|
|
111
|
+
logger.error({ error }, "Error loading last processed position");
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
return {
|
|
115
|
+
async start() {
|
|
116
|
+
if (isRunning)
|
|
117
|
+
return;
|
|
118
|
+
isRunning = true;
|
|
119
|
+
await loadLastProcessedPosition();
|
|
120
|
+
logger.info({ consumerName }, "Starting event store consumer");
|
|
121
|
+
pollingTimer = setInterval(processEvents, pollingInterval);
|
|
122
|
+
},
|
|
123
|
+
async stop() {
|
|
124
|
+
if (!isRunning)
|
|
125
|
+
return;
|
|
126
|
+
isRunning = false;
|
|
127
|
+
if (pollingTimer) {
|
|
128
|
+
clearInterval(pollingTimer);
|
|
129
|
+
pollingTimer = null;
|
|
130
|
+
}
|
|
131
|
+
logger.info({ consumerName }, "Stopped event store consumer");
|
|
132
|
+
},
|
|
133
|
+
subscribe(handler, eventType) {
|
|
134
|
+
if (!eventHandlers.has(eventType)) {
|
|
135
|
+
eventHandlers.set(eventType, []);
|
|
136
|
+
}
|
|
137
|
+
const handlers = eventHandlers.get(eventType);
|
|
138
|
+
if (handlers) {
|
|
139
|
+
// Type assertion needed because we're storing handlers for specific event types
|
|
140
|
+
// in a generic Map that accepts Event handlers
|
|
141
|
+
handlers.push(handler);
|
|
142
|
+
}
|
|
143
|
+
},
|
|
144
|
+
subscribeToAll(handler) {
|
|
145
|
+
allEventHandlers.push(handler);
|
|
146
|
+
},
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
// Helper function to create consumer with default options
|
|
150
|
+
export function createKyselyEventStoreConsumerWithDefaults(deps, config = {}) {
|
|
151
|
+
return createKyselyEventStoreConsumer({
|
|
152
|
+
...deps,
|
|
153
|
+
...config,
|
|
154
|
+
});
|
|
155
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { type AppendToStreamOptions, type AppendToStreamResultWithGlobalPosition, type Event, type EventStore, type EventStoreSessionFactory, type ReadEventMetadataWithGlobalPosition, type ReadStreamOptions, type ReadStreamResult } from "@event-driven-io/emmett";
|
|
2
|
+
import type { Kysely } from "kysely";
|
|
3
|
+
import { type Dependencies, type ExtendedOptions } from "../types.js";
|
|
4
|
+
type KyselyReadEventMetadata = ReadEventMetadataWithGlobalPosition;
|
|
5
|
+
type ExtendedAppendToStreamOptions = AppendToStreamOptions & ExtendedOptions;
|
|
6
|
+
export type ProjectionReadStreamOptions = {
|
|
7
|
+
from?: bigint;
|
|
8
|
+
to?: bigint;
|
|
9
|
+
partition?: string;
|
|
10
|
+
maxCount?: bigint;
|
|
11
|
+
};
|
|
12
|
+
export interface KyselyEventStore extends EventStore<KyselyReadEventMetadata>, EventStoreSessionFactory<KyselyEventStore> {
|
|
13
|
+
readStream<EventType extends Event>(streamName: string, options?: ReadStreamOptions<bigint> | ProjectionReadStreamOptions): Promise<ReadStreamResult<EventType, KyselyReadEventMetadata>>;
|
|
14
|
+
appendToStream<EventType extends Event>(streamName: string, events: EventType[], options?: ExtendedAppendToStreamOptions): Promise<AppendToStreamResultWithGlobalPosition>;
|
|
15
|
+
close(): Promise<void>;
|
|
16
|
+
schema: {
|
|
17
|
+
sql(): string;
|
|
18
|
+
print(): void;
|
|
19
|
+
migrate(): Promise<void>;
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
export type KyselyEventStoreOptions = {
|
|
23
|
+
/** Database connection options */
|
|
24
|
+
connectionOptions?: {
|
|
25
|
+
/** Custom database executor (Kysely instance) */
|
|
26
|
+
db?: Kysely<unknown>;
|
|
27
|
+
};
|
|
28
|
+
/** Schema management options */
|
|
29
|
+
schema?: {
|
|
30
|
+
/** Auto-migration strategy */
|
|
31
|
+
autoMigration?: "CreateOrUpdate" | "None";
|
|
32
|
+
};
|
|
33
|
+
/** Hooks for lifecycle events */
|
|
34
|
+
hooks?: {
|
|
35
|
+
/** Called after schema is created */
|
|
36
|
+
onAfterSchemaCreated?: () => Promise<void> | void;
|
|
37
|
+
};
|
|
38
|
+
};
|
|
39
|
+
export declare const defaultKyselyOptions: KyselyEventStoreOptions;
|
|
40
|
+
export declare const getKyselyEventStore: (deps: Dependencies) => KyselyEventStore;
|
|
41
|
+
export {};
|
|
42
|
+
//# sourceMappingURL=kysely-event-store.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kysely-event-store.d.ts","sourceRoot":"","sources":["../../src/event-store/kysely-event-store.ts"],"names":[],"mappings":"AACA,OAAO,EAIL,KAAK,qBAAqB,EAC1B,KAAK,sCAAsC,EAC3C,KAAK,KAAK,EACV,KAAK,UAAU,EAEf,KAAK,wBAAwB,EAE7B,KAAK,mCAAmC,EACxC,KAAK,iBAAiB,EACtB,KAAK,gBAAgB,EACtB,MAAM,yBAAyB,CAAC;AACjC,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AACrC,OAAO,EAGL,KAAK,YAAY,EACjB,KAAK,eAAe,EACrB,MAAM,aAAa,CAAC;AAErB,KAAK,uBAAuB,GAAG,mCAAmC,CAAC;AACnE,KAAK,6BAA6B,GAAG,qBAAqB,GAAG,eAAe,CAAC;AAI7E,MAAM,MAAM,2BAA2B,GAAG;IACxC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,MAAM,WAAW,gBACf,SAAQ,UAAU,CAAC,uBAAuB,CAAC,EACzC,wBAAwB,CAAC,gBAAgB,CAAC;IAE5C,UAAU,CAAC,SAAS,SAAS,KAAK,EAChC,UAAU,EAAE,MAAM,EAClB,OAAO,CAAC,EAAE,iBAAiB,CAAC,MAAM,CAAC,GAAG,2BAA2B,GAChE,OAAO,CAAC,gBAAgB,CAAC,SAAS,EAAE,uBAAuB,CAAC,CAAC,CAAC;IACjE,cAAc,CAAC,SAAS,SAAS,KAAK,EACpC,UAAU,EAAE,MAAM,EAClB,MAAM,EAAE,SAAS,EAAE,EACnB,OAAO,CAAC,EAAE,6BAA6B,GACtC,OAAO,CAAC,sCAAsC,CAAC,CAAC;IACnD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACvB,MAAM,EAAE;QACN,GAAG,IAAI,MAAM,CAAC;QACd,KAAK,IAAI,IAAI,CAAC;QACd,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;KAC1B,CAAC;CACH;AAED,MAAM,MAAM,uBAAuB,GAAG;IACpC,kCAAkC;IAClC,iBAAiB,CAAC,EAAE;QAClB,iDAAiD;QACjD,EAAE,CAAC,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KACtB,CAAC;IACF,gCAAgC;IAChC,MAAM,CAAC,EAAE;QACP,8BAA8B;QAC9B,aAAa,CAAC,EAAE,gBAAgB,GAAG,MAAM,CAAC;KAC3C,CAAC;IACF,iCAAiC;IACjC,KAAK,CAAC,EAAE;QACN,qCAAqC;QACrC,oBAAoB,CAAC,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC;KACnD,CAAC;CACH,CAAC;AAEF,eAAO,MAAM,oBAAoB,EAAE,uBAIlC,CAAC;AAEF,eAAO,MAAM,mBAAmB,GAAI,MAAM,YAAY,KAAG,gBAiKxD,CAAC"}
|