@durable-streams/state 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +654 -0
- package/STATE-PROTOCOL.md +502 -0
- package/dist/index.cjs +558 -0
- package/dist/index.d.cts +284 -0
- package/dist/index.d.ts +284 -0
- package/dist/index.js +530 -0
- package/package.json +48 -0
- package/src/index.ts +33 -0
- package/src/materialized-state.ts +93 -0
- package/src/stream-db.ts +934 -0
- package/src/types.ts +80 -0
- package/state-protocol.schema.json +186 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,530 @@
|
|
|
1
|
+
import { createCollection, createOptimisticAction } from "@tanstack/db";
|
|
2
|
+
import { DurableStream } from "@durable-streams/client";
|
|
3
|
+
|
|
4
|
+
//#region src/types.ts
|
|
5
|
+
/**
|
|
6
|
+
* Type guard to check if an event is a change event
|
|
7
|
+
*/
|
|
8
|
+
function isChangeEvent(event) {
|
|
9
|
+
return event != null && `operation` in event.headers;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Type guard to check if an event is a control event
|
|
13
|
+
*/
|
|
14
|
+
function isControlEvent(event) {
|
|
15
|
+
return event != null && `control` in event.headers;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
//#endregion
|
|
19
|
+
//#region src/materialized-state.ts
|
|
20
|
+
/**
|
|
21
|
+
* MaterializedState maintains an in-memory view of state from change events.
|
|
22
|
+
*
|
|
23
|
+
* It organizes data by type, where each type contains a map of key -> value.
|
|
24
|
+
* This supports multi-type streams where different entity types can coexist.
|
|
25
|
+
*/
|
|
26
|
+
var MaterializedState = class {
|
|
27
|
+
data;
|
|
28
|
+
constructor() {
|
|
29
|
+
this.data = new Map();
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Apply a single change event to update the materialized state
|
|
33
|
+
*/
|
|
34
|
+
apply(event) {
|
|
35
|
+
const { type, key, value, headers } = event;
|
|
36
|
+
let typeMap = this.data.get(type);
|
|
37
|
+
if (!typeMap) {
|
|
38
|
+
typeMap = new Map();
|
|
39
|
+
this.data.set(type, typeMap);
|
|
40
|
+
}
|
|
41
|
+
switch (headers.operation) {
|
|
42
|
+
case `insert`:
|
|
43
|
+
typeMap.set(key, value);
|
|
44
|
+
break;
|
|
45
|
+
case `update`:
|
|
46
|
+
typeMap.set(key, value);
|
|
47
|
+
break;
|
|
48
|
+
case `upsert`:
|
|
49
|
+
typeMap.set(key, value);
|
|
50
|
+
break;
|
|
51
|
+
case `delete`:
|
|
52
|
+
typeMap.delete(key);
|
|
53
|
+
break;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Apply a batch of change events
|
|
58
|
+
*/
|
|
59
|
+
applyBatch(events) {
|
|
60
|
+
for (const event of events) this.apply(event);
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Get a specific value by type and key
|
|
64
|
+
*/
|
|
65
|
+
get(type, key) {
|
|
66
|
+
const typeMap = this.data.get(type);
|
|
67
|
+
if (!typeMap) return void 0;
|
|
68
|
+
return typeMap.get(key);
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Get all entries for a specific type
|
|
72
|
+
*/
|
|
73
|
+
getType(type) {
|
|
74
|
+
return this.data.get(type) || new Map();
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Clear all state
|
|
78
|
+
*/
|
|
79
|
+
clear() {
|
|
80
|
+
this.data.clear();
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Get the number of types in the state
|
|
84
|
+
*/
|
|
85
|
+
get typeCount() {
|
|
86
|
+
return this.data.size;
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Get all type names
|
|
90
|
+
*/
|
|
91
|
+
get types() {
|
|
92
|
+
return Array.from(this.data.keys());
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
//#endregion
|
|
97
|
+
//#region src/stream-db.ts
|
|
98
|
+
/**
|
|
99
|
+
* Internal event dispatcher that routes stream events to collection handlers
|
|
100
|
+
*/
|
|
101
|
+
var EventDispatcher = class {
|
|
102
|
+
/** Map from event type to collection handler */
|
|
103
|
+
handlers = new Map();
|
|
104
|
+
/** Handlers that have pending writes (need commit) */
|
|
105
|
+
pendingHandlers = new Set();
|
|
106
|
+
/** Whether we've received the initial up-to-date signal */
|
|
107
|
+
isUpToDate = false;
|
|
108
|
+
/** Resolvers and rejecters for preload promises */
|
|
109
|
+
preloadResolvers = [];
|
|
110
|
+
preloadRejecters = [];
|
|
111
|
+
/** Set of all txids that have been seen and committed */
|
|
112
|
+
seenTxids = new Set();
|
|
113
|
+
/** Txids collected during current batch (before commit) */
|
|
114
|
+
pendingTxids = new Set();
|
|
115
|
+
/** Resolvers waiting for specific txids */
|
|
116
|
+
txidResolvers = new Map();
|
|
117
|
+
/** Track existing keys per collection for upsert logic */
|
|
118
|
+
existingKeys = new Map();
|
|
119
|
+
/**
|
|
120
|
+
* Register a handler for a specific event type
|
|
121
|
+
*/
|
|
122
|
+
registerHandler(eventType, handler) {
|
|
123
|
+
this.handlers.set(eventType, handler);
|
|
124
|
+
if (!this.existingKeys.has(eventType)) this.existingKeys.set(eventType, new Set());
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Dispatch a change event to the appropriate collection.
|
|
128
|
+
* Writes are buffered until commit() is called via markUpToDate().
|
|
129
|
+
*/
|
|
130
|
+
dispatchChange(event) {
|
|
131
|
+
if (!isChangeEvent(event)) return;
|
|
132
|
+
if (event.headers.txid && typeof event.headers.txid === `string`) this.pendingTxids.add(event.headers.txid);
|
|
133
|
+
const handler = this.handlers.get(event.type);
|
|
134
|
+
if (!handler) return;
|
|
135
|
+
let operation = event.headers.operation;
|
|
136
|
+
if (operation !== `delete`) {
|
|
137
|
+
if (typeof event.value !== `object` || event.value === null) throw new Error(`StreamDB collections require object values; got ${typeof event.value} for type=${event.type}, key=${event.key}`);
|
|
138
|
+
}
|
|
139
|
+
const originalValue = event.value ?? {};
|
|
140
|
+
const value = { ...originalValue };
|
|
141
|
+
value[handler.primaryKey] = event.key;
|
|
142
|
+
if (!this.pendingHandlers.has(handler)) {
|
|
143
|
+
handler.begin();
|
|
144
|
+
this.pendingHandlers.add(handler);
|
|
145
|
+
}
|
|
146
|
+
if (operation === `upsert`) {
|
|
147
|
+
const keys$1 = this.existingKeys.get(event.type);
|
|
148
|
+
const existing = keys$1?.has(event.key);
|
|
149
|
+
operation = existing ? `update` : `insert`;
|
|
150
|
+
}
|
|
151
|
+
const keys = this.existingKeys.get(event.type);
|
|
152
|
+
if (operation === `insert` || operation === `update`) keys?.add(event.key);
|
|
153
|
+
else keys?.delete(event.key);
|
|
154
|
+
try {
|
|
155
|
+
handler.write(value, operation);
|
|
156
|
+
} catch (error) {
|
|
157
|
+
console.error(`[StreamDB] Error in handler.write():`, error);
|
|
158
|
+
console.error(`[StreamDB] Event that caused error:`, {
|
|
159
|
+
type: event.type,
|
|
160
|
+
key: event.key,
|
|
161
|
+
operation
|
|
162
|
+
});
|
|
163
|
+
throw error;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
/**
|
|
167
|
+
* Handle control events from the stream JSON items
|
|
168
|
+
*/
|
|
169
|
+
dispatchControl(event) {
|
|
170
|
+
if (!isControlEvent(event)) return;
|
|
171
|
+
switch (event.headers.control) {
|
|
172
|
+
case `reset`:
|
|
173
|
+
for (const handler of this.handlers.values()) handler.truncate();
|
|
174
|
+
for (const keys of this.existingKeys.values()) keys.clear();
|
|
175
|
+
this.pendingHandlers.clear();
|
|
176
|
+
this.isUpToDate = false;
|
|
177
|
+
break;
|
|
178
|
+
case `snapshot-start`:
|
|
179
|
+
case `snapshot-end`: break;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
/**
|
|
183
|
+
* Commit all pending writes and handle up-to-date signal
|
|
184
|
+
*/
|
|
185
|
+
markUpToDate() {
|
|
186
|
+
for (const handler of this.pendingHandlers) try {
|
|
187
|
+
handler.commit();
|
|
188
|
+
} catch (error) {
|
|
189
|
+
console.error(`[StreamDB] Error in handler.commit():`, error);
|
|
190
|
+
if (error instanceof Error && error.message.includes(`already exists in the collection`) && error.message.includes(`live-query`)) {
|
|
191
|
+
console.warn(`[StreamDB] Known TanStack DB groupBy bug detected - continuing despite error`);
|
|
192
|
+
console.warn(`[StreamDB] Queries with groupBy may show stale data until fixed`);
|
|
193
|
+
continue;
|
|
194
|
+
}
|
|
195
|
+
throw error;
|
|
196
|
+
}
|
|
197
|
+
this.pendingHandlers.clear();
|
|
198
|
+
for (const txid of this.pendingTxids) {
|
|
199
|
+
this.seenTxids.add(txid);
|
|
200
|
+
const resolvers = this.txidResolvers.get(txid);
|
|
201
|
+
if (resolvers) {
|
|
202
|
+
for (const { resolve, timeoutId } of resolvers) {
|
|
203
|
+
clearTimeout(timeoutId);
|
|
204
|
+
resolve();
|
|
205
|
+
}
|
|
206
|
+
this.txidResolvers.delete(txid);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
this.pendingTxids.clear();
|
|
210
|
+
if (!this.isUpToDate) {
|
|
211
|
+
this.isUpToDate = true;
|
|
212
|
+
for (const handler of this.handlers.values()) handler.markReady();
|
|
213
|
+
for (const resolve of this.preloadResolvers) resolve();
|
|
214
|
+
this.preloadResolvers = [];
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Wait for the stream to reach up-to-date state
|
|
219
|
+
*/
|
|
220
|
+
waitForUpToDate() {
|
|
221
|
+
if (this.isUpToDate) return Promise.resolve();
|
|
222
|
+
return new Promise((resolve, reject) => {
|
|
223
|
+
this.preloadResolvers.push(resolve);
|
|
224
|
+
this.preloadRejecters.push(reject);
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Reject all waiting preload promises with an error
|
|
229
|
+
*/
|
|
230
|
+
rejectAll(error) {
|
|
231
|
+
for (const reject of this.preloadRejecters) reject(error);
|
|
232
|
+
this.preloadResolvers = [];
|
|
233
|
+
this.preloadRejecters = [];
|
|
234
|
+
for (const resolvers of this.txidResolvers.values()) for (const { reject, timeoutId } of resolvers) {
|
|
235
|
+
clearTimeout(timeoutId);
|
|
236
|
+
reject(error);
|
|
237
|
+
}
|
|
238
|
+
this.txidResolvers.clear();
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Check if we've received up-to-date
|
|
242
|
+
*/
|
|
243
|
+
get ready() {
|
|
244
|
+
return this.isUpToDate;
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* Wait for a specific txid to be seen in the stream
|
|
248
|
+
*/
|
|
249
|
+
awaitTxId(txid, timeout = 5e3) {
|
|
250
|
+
if (this.seenTxids.has(txid)) return Promise.resolve();
|
|
251
|
+
return new Promise((resolve, reject) => {
|
|
252
|
+
const timeoutId = setTimeout(() => {
|
|
253
|
+
const resolvers = this.txidResolvers.get(txid);
|
|
254
|
+
if (resolvers) {
|
|
255
|
+
const index = resolvers.findIndex((r) => r.timeoutId === timeoutId);
|
|
256
|
+
if (index !== -1) resolvers.splice(index, 1);
|
|
257
|
+
if (resolvers.length === 0) this.txidResolvers.delete(txid);
|
|
258
|
+
}
|
|
259
|
+
reject(new Error(`Timeout waiting for txid: ${txid}`));
|
|
260
|
+
}, timeout);
|
|
261
|
+
if (!this.txidResolvers.has(txid)) this.txidResolvers.set(txid, []);
|
|
262
|
+
this.txidResolvers.get(txid).push({
|
|
263
|
+
resolve,
|
|
264
|
+
reject,
|
|
265
|
+
timeoutId
|
|
266
|
+
});
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
};
|
|
270
|
+
/**
|
|
271
|
+
* Create a sync config for a stream-backed collection
|
|
272
|
+
*/
|
|
273
|
+
function createStreamSyncConfig(eventType, dispatcher, primaryKey) {
|
|
274
|
+
return { sync: ({ begin, write, commit, markReady, truncate }) => {
|
|
275
|
+
dispatcher.registerHandler(eventType, {
|
|
276
|
+
begin,
|
|
277
|
+
write: (value, type) => {
|
|
278
|
+
write({
|
|
279
|
+
value,
|
|
280
|
+
type
|
|
281
|
+
});
|
|
282
|
+
},
|
|
283
|
+
commit,
|
|
284
|
+
markReady,
|
|
285
|
+
truncate,
|
|
286
|
+
primaryKey
|
|
287
|
+
});
|
|
288
|
+
if (dispatcher.ready) markReady();
|
|
289
|
+
return () => {};
|
|
290
|
+
} };
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Reserved collection names that would collide with StreamDB properties
|
|
294
|
+
* (collections are now namespaced, but we still prevent internal name collisions)
|
|
295
|
+
*/
|
|
296
|
+
const RESERVED_COLLECTION_NAMES = new Set([
|
|
297
|
+
`collections`,
|
|
298
|
+
`preload`,
|
|
299
|
+
`close`,
|
|
300
|
+
`utils`,
|
|
301
|
+
`actions`
|
|
302
|
+
]);
|
|
303
|
+
/**
|
|
304
|
+
* Create helper functions for a collection
|
|
305
|
+
*/
|
|
306
|
+
function createCollectionHelpers(eventType, primaryKey, schema) {
|
|
307
|
+
return {
|
|
308
|
+
insert: ({ key, value, headers }) => {
|
|
309
|
+
const result = schema[`~standard`].validate(value);
|
|
310
|
+
if (`issues` in result) throw new Error(`Validation failed for ${eventType} insert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
|
|
311
|
+
const derived = value[primaryKey];
|
|
312
|
+
const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
|
|
313
|
+
if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} insert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
|
|
314
|
+
return {
|
|
315
|
+
type: eventType,
|
|
316
|
+
key: finalKey,
|
|
317
|
+
value,
|
|
318
|
+
headers: {
|
|
319
|
+
...headers,
|
|
320
|
+
operation: `insert`
|
|
321
|
+
}
|
|
322
|
+
};
|
|
323
|
+
},
|
|
324
|
+
update: ({ key, value, oldValue, headers }) => {
|
|
325
|
+
const result = schema[`~standard`].validate(value);
|
|
326
|
+
if (`issues` in result) throw new Error(`Validation failed for ${eventType} update: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
|
|
327
|
+
if (oldValue !== void 0) {
|
|
328
|
+
const oldResult = schema[`~standard`].validate(oldValue);
|
|
329
|
+
if (`issues` in oldResult) throw new Error(`Validation failed for ${eventType} update (oldValue): ${oldResult.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
|
|
330
|
+
}
|
|
331
|
+
const derived = value[primaryKey];
|
|
332
|
+
const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
|
|
333
|
+
if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} update event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
|
|
334
|
+
return {
|
|
335
|
+
type: eventType,
|
|
336
|
+
key: finalKey,
|
|
337
|
+
value,
|
|
338
|
+
old_value: oldValue,
|
|
339
|
+
headers: {
|
|
340
|
+
...headers,
|
|
341
|
+
operation: `update`
|
|
342
|
+
}
|
|
343
|
+
};
|
|
344
|
+
},
|
|
345
|
+
delete: ({ key, oldValue, headers }) => {
|
|
346
|
+
if (oldValue !== void 0) {
|
|
347
|
+
const result = schema[`~standard`].validate(oldValue);
|
|
348
|
+
if (`issues` in result) throw new Error(`Validation failed for ${eventType} delete (oldValue): ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
|
|
349
|
+
}
|
|
350
|
+
const finalKey = key ?? (oldValue ? String(oldValue[primaryKey]) : void 0);
|
|
351
|
+
if (!finalKey) throw new Error(`Cannot create ${eventType} delete event: must provide either 'key' or 'oldValue' with a ${primaryKey} field`);
|
|
352
|
+
return {
|
|
353
|
+
type: eventType,
|
|
354
|
+
key: finalKey,
|
|
355
|
+
old_value: oldValue,
|
|
356
|
+
headers: {
|
|
357
|
+
...headers,
|
|
358
|
+
operation: `delete`
|
|
359
|
+
}
|
|
360
|
+
};
|
|
361
|
+
},
|
|
362
|
+
upsert: ({ key, value, headers }) => {
|
|
363
|
+
const result = schema[`~standard`].validate(value);
|
|
364
|
+
if (`issues` in result) throw new Error(`Validation failed for ${eventType} upsert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
|
|
365
|
+
const derived = value[primaryKey];
|
|
366
|
+
const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
|
|
367
|
+
if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} upsert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
|
|
368
|
+
return {
|
|
369
|
+
type: eventType,
|
|
370
|
+
key: finalKey,
|
|
371
|
+
value,
|
|
372
|
+
headers: {
|
|
373
|
+
...headers,
|
|
374
|
+
operation: `upsert`
|
|
375
|
+
}
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
};
|
|
379
|
+
}
|
|
380
|
+
/**
|
|
381
|
+
* Create a state schema definition with typed collections and event helpers
|
|
382
|
+
*/
|
|
383
|
+
function createStateSchema(collections) {
|
|
384
|
+
for (const name of Object.keys(collections)) if (RESERVED_COLLECTION_NAMES.has(name)) throw new Error(`Reserved collection name "${name}" - this would collide with StreamDB properties (${Array.from(RESERVED_COLLECTION_NAMES).join(`, `)})`);
|
|
385
|
+
const typeToCollection = new Map();
|
|
386
|
+
for (const [collectionName, def] of Object.entries(collections)) {
|
|
387
|
+
const existing = typeToCollection.get(def.type);
|
|
388
|
+
if (existing) throw new Error(`Duplicate event type "${def.type}" - used by both "${existing}" and "${collectionName}" collections`);
|
|
389
|
+
typeToCollection.set(def.type, collectionName);
|
|
390
|
+
}
|
|
391
|
+
const enhancedCollections = {};
|
|
392
|
+
for (const [name, collectionDef] of Object.entries(collections)) enhancedCollections[name] = {
|
|
393
|
+
...collectionDef,
|
|
394
|
+
...createCollectionHelpers(collectionDef.type, collectionDef.primaryKey, collectionDef.schema)
|
|
395
|
+
};
|
|
396
|
+
return enhancedCollections;
|
|
397
|
+
}
|
|
398
|
+
/**
|
|
399
|
+
* Create a stream-backed database with TanStack DB collections
|
|
400
|
+
*
|
|
401
|
+
* This function is synchronous - it creates the stream handle and collections
|
|
402
|
+
* but does not start the stream connection. Call `db.preload()` to connect
|
|
403
|
+
* and sync initial data.
|
|
404
|
+
*
|
|
405
|
+
* @example
|
|
406
|
+
* ```typescript
|
|
407
|
+
* const stateSchema = createStateSchema({
|
|
408
|
+
* users: { schema: userSchema, type: "user", primaryKey: "id" },
|
|
409
|
+
* messages: { schema: messageSchema, type: "message", primaryKey: "id" },
|
|
410
|
+
* })
|
|
411
|
+
*
|
|
412
|
+
* // Create a stream DB (synchronous - stream is created lazily on preload)
|
|
413
|
+
* const db = createStreamDB({
|
|
414
|
+
* streamOptions: {
|
|
415
|
+
* url: "https://api.example.com/streams/my-stream",
|
|
416
|
+
* contentType: "application/json",
|
|
417
|
+
* },
|
|
418
|
+
* state: stateSchema,
|
|
419
|
+
* })
|
|
420
|
+
*
|
|
421
|
+
* // preload() creates the stream and loads initial data
|
|
422
|
+
* await db.preload()
|
|
423
|
+
* const user = await db.collections.users.get("123")
|
|
424
|
+
* ```
|
|
425
|
+
*/
|
|
426
|
+
function createStreamDB(options) {
|
|
427
|
+
const { streamOptions, state, actions: actionsFactory } = options;
|
|
428
|
+
const stream = new DurableStream(streamOptions);
|
|
429
|
+
const dispatcher = new EventDispatcher();
|
|
430
|
+
const collectionInstances = {};
|
|
431
|
+
for (const [name, definition] of Object.entries(state)) {
|
|
432
|
+
const collection = createCollection({
|
|
433
|
+
id: `stream-db:${name}`,
|
|
434
|
+
schema: definition.schema,
|
|
435
|
+
getKey: (item) => String(item[definition.primaryKey]),
|
|
436
|
+
sync: createStreamSyncConfig(definition.type, dispatcher, definition.primaryKey),
|
|
437
|
+
startSync: true,
|
|
438
|
+
gcTime: 0
|
|
439
|
+
});
|
|
440
|
+
console.log(`[StreamDB] Created collection "${name}":`, {
|
|
441
|
+
type: typeof collection,
|
|
442
|
+
constructor: collection.constructor.name,
|
|
443
|
+
isCollection: collection instanceof Object,
|
|
444
|
+
hasSize: `size` in collection
|
|
445
|
+
});
|
|
446
|
+
collectionInstances[name] = collection;
|
|
447
|
+
}
|
|
448
|
+
let streamResponse = null;
|
|
449
|
+
const abortController = new AbortController();
|
|
450
|
+
let consumerStarted = false;
|
|
451
|
+
/**
|
|
452
|
+
* Start the stream consumer (called lazily on first preload)
|
|
453
|
+
*/
|
|
454
|
+
const startConsumer = async () => {
|
|
455
|
+
if (consumerStarted) return;
|
|
456
|
+
consumerStarted = true;
|
|
457
|
+
streamResponse = await stream.stream({
|
|
458
|
+
live: `auto`,
|
|
459
|
+
signal: abortController.signal
|
|
460
|
+
});
|
|
461
|
+
let batchCount = 0;
|
|
462
|
+
let lastBatchTime = Date.now();
|
|
463
|
+
streamResponse.subscribeJson(async (batch) => {
|
|
464
|
+
try {
|
|
465
|
+
batchCount++;
|
|
466
|
+
lastBatchTime = Date.now();
|
|
467
|
+
if (batch.items.length > 0) console.log(`[StreamDB] Processing batch #${batchCount}: ${batch.items.length} items, upToDate=${batch.upToDate}`);
|
|
468
|
+
for (const event of batch.items) if (isChangeEvent(event)) dispatcher.dispatchChange(event);
|
|
469
|
+
else if (isControlEvent(event)) dispatcher.dispatchControl(event);
|
|
470
|
+
if (batch.upToDate) {
|
|
471
|
+
console.log(`[StreamDB] Marking up-to-date after batch #${batchCount}`);
|
|
472
|
+
dispatcher.markUpToDate();
|
|
473
|
+
console.log(`[StreamDB] Successfully marked up-to-date`);
|
|
474
|
+
}
|
|
475
|
+
if (batch.items.length > 0) console.log(`[StreamDB] Successfully processed batch #${batchCount}`);
|
|
476
|
+
} catch (error) {
|
|
477
|
+
console.error(`[StreamDB] Error processing batch:`, error);
|
|
478
|
+
console.error(`[StreamDB] Failed batch:`, batch);
|
|
479
|
+
dispatcher.rejectAll(error);
|
|
480
|
+
abortController.abort();
|
|
481
|
+
}
|
|
482
|
+
});
|
|
483
|
+
const healthCheck = setInterval(() => {
|
|
484
|
+
const timeSinceLastBatch = Date.now() - lastBatchTime;
|
|
485
|
+
console.log(`[StreamDB] Health: ${batchCount} batches processed, last batch ${(timeSinceLastBatch / 1e3).toFixed(1)}s ago`);
|
|
486
|
+
}, 15e3);
|
|
487
|
+
abortController.signal.addEventListener(`abort`, () => {
|
|
488
|
+
clearInterval(healthCheck);
|
|
489
|
+
console.log(`[StreamDB] Aborted - cleaning up health check`);
|
|
490
|
+
});
|
|
491
|
+
};
|
|
492
|
+
const dbMethods = {
|
|
493
|
+
stream,
|
|
494
|
+
preload: async () => {
|
|
495
|
+
await startConsumer();
|
|
496
|
+
await dispatcher.waitForUpToDate();
|
|
497
|
+
},
|
|
498
|
+
close: () => {
|
|
499
|
+
dispatcher.rejectAll(new Error(`StreamDB closed`));
|
|
500
|
+
abortController.abort();
|
|
501
|
+
},
|
|
502
|
+
utils: { awaitTxId: (txid, timeout) => dispatcher.awaitTxId(txid, timeout) }
|
|
503
|
+
};
|
|
504
|
+
console.log(`[StreamDB] Creating db object with collections:`, Object.keys(collectionInstances));
|
|
505
|
+
const db = {
|
|
506
|
+
collections: collectionInstances,
|
|
507
|
+
...dbMethods
|
|
508
|
+
};
|
|
509
|
+
console.log(`[StreamDB] db.collections:`, Object.keys(db.collections));
|
|
510
|
+
console.log(`[StreamDB] db.collections.events:`, db.collections.events);
|
|
511
|
+
if (actionsFactory) {
|
|
512
|
+
const actionDefs = actionsFactory({
|
|
513
|
+
db,
|
|
514
|
+
stream
|
|
515
|
+
});
|
|
516
|
+
const wrappedActions = {};
|
|
517
|
+
for (const [name, def] of Object.entries(actionDefs)) wrappedActions[name] = createOptimisticAction({
|
|
518
|
+
onMutate: def.onMutate,
|
|
519
|
+
mutationFn: def.mutationFn
|
|
520
|
+
});
|
|
521
|
+
return {
|
|
522
|
+
...db,
|
|
523
|
+
actions: wrappedActions
|
|
524
|
+
};
|
|
525
|
+
}
|
|
526
|
+
return db;
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
//#endregion
|
|
530
|
+
export { MaterializedState, createStateSchema, createStreamDB, isChangeEvent, isControlEvent };
|
package/package.json
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@durable-streams/state",
|
|
3
|
+
"description": "State change event protocol for Durable Streams",
|
|
4
|
+
"version": "0.1.0",
|
|
5
|
+
"author": "Durable Stream contributors",
|
|
6
|
+
"license": "Apache-2.0",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"exports": {
|
|
9
|
+
"./package.json": "./package.json",
|
|
10
|
+
".": {
|
|
11
|
+
"import": {
|
|
12
|
+
"types": "./dist/index.d.ts",
|
|
13
|
+
"default": "./dist/index.js"
|
|
14
|
+
},
|
|
15
|
+
"require": {
|
|
16
|
+
"types": "./dist/index.d.cts",
|
|
17
|
+
"default": "./dist/index.cjs"
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"files": [
|
|
22
|
+
"dist",
|
|
23
|
+
"src",
|
|
24
|
+
"state-protocol.schema.json",
|
|
25
|
+
"STATE-PROTOCOL.md"
|
|
26
|
+
],
|
|
27
|
+
"main": "./dist/index.cjs",
|
|
28
|
+
"module": "./dist/index.js",
|
|
29
|
+
"types": "./dist/index.d.ts",
|
|
30
|
+
"sideEffects": false,
|
|
31
|
+
"dependencies": {
|
|
32
|
+
"@durable-streams/client": "workspace:*",
|
|
33
|
+
"@standard-schema/spec": "^1.0.0",
|
|
34
|
+
"@tanstack/db": "latest"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@durable-streams/server": "workspace:*",
|
|
38
|
+
"tsdown": "^0.9.0"
|
|
39
|
+
},
|
|
40
|
+
"scripts": {
|
|
41
|
+
"build": "tsdown",
|
|
42
|
+
"dev": "tsdown --watch",
|
|
43
|
+
"typecheck": "tsc --noEmit"
|
|
44
|
+
},
|
|
45
|
+
"engines": {
|
|
46
|
+
"node": ">=18.0.0"
|
|
47
|
+
}
|
|
48
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
// Types
|
|
2
|
+
export type {
|
|
3
|
+
Operation,
|
|
4
|
+
Value,
|
|
5
|
+
Row,
|
|
6
|
+
ChangeHeaders,
|
|
7
|
+
ChangeEvent,
|
|
8
|
+
ControlEvent,
|
|
9
|
+
StateEvent,
|
|
10
|
+
} from "./types"
|
|
11
|
+
|
|
12
|
+
export { isChangeEvent, isControlEvent } from "./types"
|
|
13
|
+
|
|
14
|
+
// Classes
|
|
15
|
+
export { MaterializedState } from "./materialized-state"
|
|
16
|
+
|
|
17
|
+
// Stream DB
|
|
18
|
+
export { createStreamDB, createStateSchema } from "./stream-db"
|
|
19
|
+
export type {
|
|
20
|
+
CollectionDefinition,
|
|
21
|
+
CollectionEventHelpers,
|
|
22
|
+
CollectionWithHelpers,
|
|
23
|
+
StreamStateDefinition,
|
|
24
|
+
StateSchema,
|
|
25
|
+
CreateStreamDBOptions,
|
|
26
|
+
StreamDB,
|
|
27
|
+
StreamDBMethods,
|
|
28
|
+
StreamDBUtils,
|
|
29
|
+
StreamDBWithActions,
|
|
30
|
+
ActionFactory,
|
|
31
|
+
ActionMap,
|
|
32
|
+
ActionDefinition,
|
|
33
|
+
} from "./stream-db"
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import type { ChangeEvent } from "./types"
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* MaterializedState maintains an in-memory view of state from change events.
|
|
5
|
+
*
|
|
6
|
+
* It organizes data by type, where each type contains a map of key -> value.
|
|
7
|
+
* This supports multi-type streams where different entity types can coexist.
|
|
8
|
+
*/
|
|
9
|
+
export class MaterializedState {
|
|
10
|
+
private data: Map<string, Map<string, unknown>>
|
|
11
|
+
|
|
12
|
+
constructor() {
|
|
13
|
+
this.data = new Map()
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Apply a single change event to update the materialized state
|
|
18
|
+
*/
|
|
19
|
+
apply(event: ChangeEvent): void {
|
|
20
|
+
const { type, key, value, headers } = event
|
|
21
|
+
|
|
22
|
+
// Get or create the type map
|
|
23
|
+
let typeMap = this.data.get(type)
|
|
24
|
+
if (!typeMap) {
|
|
25
|
+
typeMap = new Map()
|
|
26
|
+
this.data.set(type, typeMap)
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Apply the operation
|
|
30
|
+
switch (headers.operation) {
|
|
31
|
+
case `insert`:
|
|
32
|
+
typeMap.set(key, value)
|
|
33
|
+
break
|
|
34
|
+
case `update`:
|
|
35
|
+
typeMap.set(key, value)
|
|
36
|
+
break
|
|
37
|
+
case `upsert`:
|
|
38
|
+
typeMap.set(key, value)
|
|
39
|
+
break
|
|
40
|
+
case `delete`:
|
|
41
|
+
typeMap.delete(key)
|
|
42
|
+
break
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Apply a batch of change events
|
|
48
|
+
*/
|
|
49
|
+
applyBatch(events: Array<ChangeEvent>): void {
|
|
50
|
+
for (const event of events) {
|
|
51
|
+
this.apply(event)
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Get a specific value by type and key
|
|
57
|
+
*/
|
|
58
|
+
get<T = unknown>(type: string, key: string): T | undefined {
|
|
59
|
+
const typeMap = this.data.get(type)
|
|
60
|
+
if (!typeMap) {
|
|
61
|
+
return undefined
|
|
62
|
+
}
|
|
63
|
+
return typeMap.get(key) as T | undefined
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Get all entries for a specific type
|
|
68
|
+
*/
|
|
69
|
+
getType(type: string): Map<string, unknown> {
|
|
70
|
+
return this.data.get(type) || new Map()
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Clear all state
|
|
75
|
+
*/
|
|
76
|
+
clear(): void {
|
|
77
|
+
this.data.clear()
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Get the number of types in the state
|
|
82
|
+
*/
|
|
83
|
+
get typeCount(): number {
|
|
84
|
+
return this.data.size
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Get all type names
|
|
89
|
+
*/
|
|
90
|
+
get types(): Array<string> {
|
|
91
|
+
return Array.from(this.data.keys())
|
|
92
|
+
}
|
|
93
|
+
}
|