@durable-streams/state 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,558 @@
1
+ "use strict";
2
+ //#region rolldown:runtime
3
+ var __create = Object.create;
4
+ var __defProp = Object.defineProperty;
5
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getProtoOf = Object.getPrototypeOf;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __copyProps = (to, from, except, desc) => {
10
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
11
+ key = keys[i];
12
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
13
+ get: ((k) => from[k]).bind(null, key),
14
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
15
+ });
16
+ }
17
+ return to;
18
+ };
19
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
20
+ value: mod,
21
+ enumerable: true
22
+ }) : target, mod));
23
+
24
+ //#endregion
25
+ const __tanstack_db = __toESM(require("@tanstack/db"));
26
+ const __durable_streams_client = __toESM(require("@durable-streams/client"));
27
+
28
+ //#region src/types.ts
29
+ /**
30
+ * Type guard to check if an event is a change event
31
+ */
32
+ function isChangeEvent(event) {
33
+ return event != null && `operation` in event.headers;
34
+ }
35
+ /**
36
+ * Type guard to check if an event is a control event
37
+ */
38
+ function isControlEvent(event) {
39
+ return event != null && `control` in event.headers;
40
+ }
41
+
42
+ //#endregion
43
+ //#region src/materialized-state.ts
44
+ /**
45
+ * MaterializedState maintains an in-memory view of state from change events.
46
+ *
47
+ * It organizes data by type, where each type contains a map of key -> value.
48
+ * This supports multi-type streams where different entity types can coexist.
49
+ */
50
+ var MaterializedState = class {
51
+ data;
52
+ constructor() {
53
+ this.data = new Map();
54
+ }
55
+ /**
56
+ * Apply a single change event to update the materialized state
57
+ */
58
+ apply(event) {
59
+ const { type, key, value, headers } = event;
60
+ let typeMap = this.data.get(type);
61
+ if (!typeMap) {
62
+ typeMap = new Map();
63
+ this.data.set(type, typeMap);
64
+ }
65
+ switch (headers.operation) {
66
+ case `insert`:
67
+ typeMap.set(key, value);
68
+ break;
69
+ case `update`:
70
+ typeMap.set(key, value);
71
+ break;
72
+ case `upsert`:
73
+ typeMap.set(key, value);
74
+ break;
75
+ case `delete`:
76
+ typeMap.delete(key);
77
+ break;
78
+ }
79
+ }
80
+ /**
81
+ * Apply a batch of change events
82
+ */
83
+ applyBatch(events) {
84
+ for (const event of events) this.apply(event);
85
+ }
86
+ /**
87
+ * Get a specific value by type and key
88
+ */
89
+ get(type, key) {
90
+ const typeMap = this.data.get(type);
91
+ if (!typeMap) return void 0;
92
+ return typeMap.get(key);
93
+ }
94
+ /**
95
+ * Get all entries for a specific type
96
+ */
97
+ getType(type) {
98
+ return this.data.get(type) || new Map();
99
+ }
100
+ /**
101
+ * Clear all state
102
+ */
103
+ clear() {
104
+ this.data.clear();
105
+ }
106
+ /**
107
+ * Get the number of types in the state
108
+ */
109
+ get typeCount() {
110
+ return this.data.size;
111
+ }
112
+ /**
113
+ * Get all type names
114
+ */
115
+ get types() {
116
+ return Array.from(this.data.keys());
117
+ }
118
+ };
119
+
120
+ //#endregion
121
+ //#region src/stream-db.ts
122
+ /**
123
+ * Internal event dispatcher that routes stream events to collection handlers
124
+ */
125
+ var EventDispatcher = class {
126
+ /** Map from event type to collection handler */
127
+ handlers = new Map();
128
+ /** Handlers that have pending writes (need commit) */
129
+ pendingHandlers = new Set();
130
+ /** Whether we've received the initial up-to-date signal */
131
+ isUpToDate = false;
132
+ /** Resolvers and rejecters for preload promises */
133
+ preloadResolvers = [];
134
+ preloadRejecters = [];
135
+ /** Set of all txids that have been seen and committed */
136
+ seenTxids = new Set();
137
+ /** Txids collected during current batch (before commit) */
138
+ pendingTxids = new Set();
139
+ /** Resolvers waiting for specific txids */
140
+ txidResolvers = new Map();
141
+ /** Track existing keys per collection for upsert logic */
142
+ existingKeys = new Map();
143
+ /**
144
+ * Register a handler for a specific event type
145
+ */
146
+ registerHandler(eventType, handler) {
147
+ this.handlers.set(eventType, handler);
148
+ if (!this.existingKeys.has(eventType)) this.existingKeys.set(eventType, new Set());
149
+ }
150
+ /**
151
+ * Dispatch a change event to the appropriate collection.
152
+ * Writes are buffered until commit() is called via markUpToDate().
153
+ */
154
+ dispatchChange(event) {
155
+ if (!isChangeEvent(event)) return;
156
+ if (event.headers.txid && typeof event.headers.txid === `string`) this.pendingTxids.add(event.headers.txid);
157
+ const handler = this.handlers.get(event.type);
158
+ if (!handler) return;
159
+ let operation = event.headers.operation;
160
+ if (operation !== `delete`) {
161
+ if (typeof event.value !== `object` || event.value === null) throw new Error(`StreamDB collections require object values; got ${typeof event.value} for type=${event.type}, key=${event.key}`);
162
+ }
163
+ const originalValue = event.value ?? {};
164
+ const value = { ...originalValue };
165
+ value[handler.primaryKey] = event.key;
166
+ if (!this.pendingHandlers.has(handler)) {
167
+ handler.begin();
168
+ this.pendingHandlers.add(handler);
169
+ }
170
+ if (operation === `upsert`) {
171
+ const keys$1 = this.existingKeys.get(event.type);
172
+ const existing = keys$1?.has(event.key);
173
+ operation = existing ? `update` : `insert`;
174
+ }
175
+ const keys = this.existingKeys.get(event.type);
176
+ if (operation === `insert` || operation === `update`) keys?.add(event.key);
177
+ else keys?.delete(event.key);
178
+ try {
179
+ handler.write(value, operation);
180
+ } catch (error) {
181
+ console.error(`[StreamDB] Error in handler.write():`, error);
182
+ console.error(`[StreamDB] Event that caused error:`, {
183
+ type: event.type,
184
+ key: event.key,
185
+ operation
186
+ });
187
+ throw error;
188
+ }
189
+ }
190
+ /**
191
+ * Handle control events from the stream JSON items
192
+ */
193
+ dispatchControl(event) {
194
+ if (!isControlEvent(event)) return;
195
+ switch (event.headers.control) {
196
+ case `reset`:
197
+ for (const handler of this.handlers.values()) handler.truncate();
198
+ for (const keys of this.existingKeys.values()) keys.clear();
199
+ this.pendingHandlers.clear();
200
+ this.isUpToDate = false;
201
+ break;
202
+ case `snapshot-start`:
203
+ case `snapshot-end`: break;
204
+ }
205
+ }
206
+ /**
207
+ * Commit all pending writes and handle up-to-date signal
208
+ */
209
+ markUpToDate() {
210
+ for (const handler of this.pendingHandlers) try {
211
+ handler.commit();
212
+ } catch (error) {
213
+ console.error(`[StreamDB] Error in handler.commit():`, error);
214
+ if (error instanceof Error && error.message.includes(`already exists in the collection`) && error.message.includes(`live-query`)) {
215
+ console.warn(`[StreamDB] Known TanStack DB groupBy bug detected - continuing despite error`);
216
+ console.warn(`[StreamDB] Queries with groupBy may show stale data until fixed`);
217
+ continue;
218
+ }
219
+ throw error;
220
+ }
221
+ this.pendingHandlers.clear();
222
+ for (const txid of this.pendingTxids) {
223
+ this.seenTxids.add(txid);
224
+ const resolvers = this.txidResolvers.get(txid);
225
+ if (resolvers) {
226
+ for (const { resolve, timeoutId } of resolvers) {
227
+ clearTimeout(timeoutId);
228
+ resolve();
229
+ }
230
+ this.txidResolvers.delete(txid);
231
+ }
232
+ }
233
+ this.pendingTxids.clear();
234
+ if (!this.isUpToDate) {
235
+ this.isUpToDate = true;
236
+ for (const handler of this.handlers.values()) handler.markReady();
237
+ for (const resolve of this.preloadResolvers) resolve();
238
+ this.preloadResolvers = [];
239
+ }
240
+ }
241
+ /**
242
+ * Wait for the stream to reach up-to-date state
243
+ */
244
+ waitForUpToDate() {
245
+ if (this.isUpToDate) return Promise.resolve();
246
+ return new Promise((resolve, reject) => {
247
+ this.preloadResolvers.push(resolve);
248
+ this.preloadRejecters.push(reject);
249
+ });
250
+ }
251
+ /**
252
+ * Reject all waiting preload promises with an error
253
+ */
254
+ rejectAll(error) {
255
+ for (const reject of this.preloadRejecters) reject(error);
256
+ this.preloadResolvers = [];
257
+ this.preloadRejecters = [];
258
+ for (const resolvers of this.txidResolvers.values()) for (const { reject, timeoutId } of resolvers) {
259
+ clearTimeout(timeoutId);
260
+ reject(error);
261
+ }
262
+ this.txidResolvers.clear();
263
+ }
264
+ /**
265
+ * Check if we've received up-to-date
266
+ */
267
+ get ready() {
268
+ return this.isUpToDate;
269
+ }
270
+ /**
271
+ * Wait for a specific txid to be seen in the stream
272
+ */
273
+ awaitTxId(txid, timeout = 5e3) {
274
+ if (this.seenTxids.has(txid)) return Promise.resolve();
275
+ return new Promise((resolve, reject) => {
276
+ const timeoutId = setTimeout(() => {
277
+ const resolvers = this.txidResolvers.get(txid);
278
+ if (resolvers) {
279
+ const index = resolvers.findIndex((r) => r.timeoutId === timeoutId);
280
+ if (index !== -1) resolvers.splice(index, 1);
281
+ if (resolvers.length === 0) this.txidResolvers.delete(txid);
282
+ }
283
+ reject(new Error(`Timeout waiting for txid: ${txid}`));
284
+ }, timeout);
285
+ if (!this.txidResolvers.has(txid)) this.txidResolvers.set(txid, []);
286
+ this.txidResolvers.get(txid).push({
287
+ resolve,
288
+ reject,
289
+ timeoutId
290
+ });
291
+ });
292
+ }
293
+ };
294
+ /**
295
+ * Create a sync config for a stream-backed collection
296
+ */
297
+ function createStreamSyncConfig(eventType, dispatcher, primaryKey) {
298
+ return { sync: ({ begin, write, commit, markReady, truncate }) => {
299
+ dispatcher.registerHandler(eventType, {
300
+ begin,
301
+ write: (value, type) => {
302
+ write({
303
+ value,
304
+ type
305
+ });
306
+ },
307
+ commit,
308
+ markReady,
309
+ truncate,
310
+ primaryKey
311
+ });
312
+ if (dispatcher.ready) markReady();
313
+ return () => {};
314
+ } };
315
+ }
316
+ /**
317
+ * Reserved collection names that would collide with StreamDB properties
318
+ * (collections are now namespaced, but we still prevent internal name collisions)
319
+ */
320
+ const RESERVED_COLLECTION_NAMES = new Set([
321
+ `collections`,
322
+ `preload`,
323
+ `close`,
324
+ `utils`,
325
+ `actions`
326
+ ]);
327
+ /**
328
+ * Create helper functions for a collection
329
+ */
330
+ function createCollectionHelpers(eventType, primaryKey, schema) {
331
+ return {
332
+ insert: ({ key, value, headers }) => {
333
+ const result = schema[`~standard`].validate(value);
334
+ if (`issues` in result) throw new Error(`Validation failed for ${eventType} insert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
335
+ const derived = value[primaryKey];
336
+ const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
337
+ if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} insert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
338
+ return {
339
+ type: eventType,
340
+ key: finalKey,
341
+ value,
342
+ headers: {
343
+ ...headers,
344
+ operation: `insert`
345
+ }
346
+ };
347
+ },
348
+ update: ({ key, value, oldValue, headers }) => {
349
+ const result = schema[`~standard`].validate(value);
350
+ if (`issues` in result) throw new Error(`Validation failed for ${eventType} update: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
351
+ if (oldValue !== void 0) {
352
+ const oldResult = schema[`~standard`].validate(oldValue);
353
+ if (`issues` in oldResult) throw new Error(`Validation failed for ${eventType} update (oldValue): ${oldResult.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
354
+ }
355
+ const derived = value[primaryKey];
356
+ const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
357
+ if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} update event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
358
+ return {
359
+ type: eventType,
360
+ key: finalKey,
361
+ value,
362
+ old_value: oldValue,
363
+ headers: {
364
+ ...headers,
365
+ operation: `update`
366
+ }
367
+ };
368
+ },
369
+ delete: ({ key, oldValue, headers }) => {
370
+ if (oldValue !== void 0) {
371
+ const result = schema[`~standard`].validate(oldValue);
372
+ if (`issues` in result) throw new Error(`Validation failed for ${eventType} delete (oldValue): ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
373
+ }
374
+ const finalKey = key ?? (oldValue ? String(oldValue[primaryKey]) : void 0);
375
+ if (!finalKey) throw new Error(`Cannot create ${eventType} delete event: must provide either 'key' or 'oldValue' with a ${primaryKey} field`);
376
+ return {
377
+ type: eventType,
378
+ key: finalKey,
379
+ old_value: oldValue,
380
+ headers: {
381
+ ...headers,
382
+ operation: `delete`
383
+ }
384
+ };
385
+ },
386
+ upsert: ({ key, value, headers }) => {
387
+ const result = schema[`~standard`].validate(value);
388
+ if (`issues` in result) throw new Error(`Validation failed for ${eventType} upsert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
389
+ const derived = value[primaryKey];
390
+ const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
391
+ if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} upsert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
392
+ return {
393
+ type: eventType,
394
+ key: finalKey,
395
+ value,
396
+ headers: {
397
+ ...headers,
398
+ operation: `upsert`
399
+ }
400
+ };
401
+ }
402
+ };
403
+ }
404
+ /**
405
+ * Create a state schema definition with typed collections and event helpers
406
+ */
407
+ function createStateSchema(collections) {
408
+ for (const name of Object.keys(collections)) if (RESERVED_COLLECTION_NAMES.has(name)) throw new Error(`Reserved collection name "${name}" - this would collide with StreamDB properties (${Array.from(RESERVED_COLLECTION_NAMES).join(`, `)})`);
409
+ const typeToCollection = new Map();
410
+ for (const [collectionName, def] of Object.entries(collections)) {
411
+ const existing = typeToCollection.get(def.type);
412
+ if (existing) throw new Error(`Duplicate event type "${def.type}" - used by both "${existing}" and "${collectionName}" collections`);
413
+ typeToCollection.set(def.type, collectionName);
414
+ }
415
+ const enhancedCollections = {};
416
+ for (const [name, collectionDef] of Object.entries(collections)) enhancedCollections[name] = {
417
+ ...collectionDef,
418
+ ...createCollectionHelpers(collectionDef.type, collectionDef.primaryKey, collectionDef.schema)
419
+ };
420
+ return enhancedCollections;
421
+ }
422
+ /**
423
+ * Create a stream-backed database with TanStack DB collections
424
+ *
425
+ * This function is synchronous - it creates the stream handle and collections
426
+ * but does not start the stream connection. Call `db.preload()` to connect
427
+ * and sync initial data.
428
+ *
429
+ * @example
430
+ * ```typescript
431
+ * const stateSchema = createStateSchema({
432
+ * users: { schema: userSchema, type: "user", primaryKey: "id" },
433
+ * messages: { schema: messageSchema, type: "message", primaryKey: "id" },
434
+ * })
435
+ *
436
+ * // Create a stream DB (synchronous - stream is created lazily on preload)
437
+ * const db = createStreamDB({
438
+ * streamOptions: {
439
+ * url: "https://api.example.com/streams/my-stream",
440
+ * contentType: "application/json",
441
+ * },
442
+ * state: stateSchema,
443
+ * })
444
+ *
445
+ * // preload() creates the stream and loads initial data
446
+ * await db.preload()
447
+ * const user = await db.collections.users.get("123")
448
+ * ```
449
+ */
450
+ function createStreamDB(options) {
451
+ const { streamOptions, state, actions: actionsFactory } = options;
452
+ const stream = new __durable_streams_client.DurableStream(streamOptions);
453
+ const dispatcher = new EventDispatcher();
454
+ const collectionInstances = {};
455
+ for (const [name, definition] of Object.entries(state)) {
456
+ const collection = (0, __tanstack_db.createCollection)({
457
+ id: `stream-db:${name}`,
458
+ schema: definition.schema,
459
+ getKey: (item) => String(item[definition.primaryKey]),
460
+ sync: createStreamSyncConfig(definition.type, dispatcher, definition.primaryKey),
461
+ startSync: true,
462
+ gcTime: 0
463
+ });
464
+ console.log(`[StreamDB] Created collection "${name}":`, {
465
+ type: typeof collection,
466
+ constructor: collection.constructor.name,
467
+ isCollection: collection instanceof Object,
468
+ hasSize: `size` in collection
469
+ });
470
+ collectionInstances[name] = collection;
471
+ }
472
+ let streamResponse = null;
473
+ const abortController = new AbortController();
474
+ let consumerStarted = false;
475
+ /**
476
+ * Start the stream consumer (called lazily on first preload)
477
+ */
478
+ const startConsumer = async () => {
479
+ if (consumerStarted) return;
480
+ consumerStarted = true;
481
+ streamResponse = await stream.stream({
482
+ live: `auto`,
483
+ signal: abortController.signal
484
+ });
485
+ let batchCount = 0;
486
+ let lastBatchTime = Date.now();
487
+ streamResponse.subscribeJson(async (batch) => {
488
+ try {
489
+ batchCount++;
490
+ lastBatchTime = Date.now();
491
+ if (batch.items.length > 0) console.log(`[StreamDB] Processing batch #${batchCount}: ${batch.items.length} items, upToDate=${batch.upToDate}`);
492
+ for (const event of batch.items) if (isChangeEvent(event)) dispatcher.dispatchChange(event);
493
+ else if (isControlEvent(event)) dispatcher.dispatchControl(event);
494
+ if (batch.upToDate) {
495
+ console.log(`[StreamDB] Marking up-to-date after batch #${batchCount}`);
496
+ dispatcher.markUpToDate();
497
+ console.log(`[StreamDB] Successfully marked up-to-date`);
498
+ }
499
+ if (batch.items.length > 0) console.log(`[StreamDB] Successfully processed batch #${batchCount}`);
500
+ } catch (error) {
501
+ console.error(`[StreamDB] Error processing batch:`, error);
502
+ console.error(`[StreamDB] Failed batch:`, batch);
503
+ dispatcher.rejectAll(error);
504
+ abortController.abort();
505
+ }
506
+ });
507
+ const healthCheck = setInterval(() => {
508
+ const timeSinceLastBatch = Date.now() - lastBatchTime;
509
+ console.log(`[StreamDB] Health: ${batchCount} batches processed, last batch ${(timeSinceLastBatch / 1e3).toFixed(1)}s ago`);
510
+ }, 15e3);
511
+ abortController.signal.addEventListener(`abort`, () => {
512
+ clearInterval(healthCheck);
513
+ console.log(`[StreamDB] Aborted - cleaning up health check`);
514
+ });
515
+ };
516
+ const dbMethods = {
517
+ stream,
518
+ preload: async () => {
519
+ await startConsumer();
520
+ await dispatcher.waitForUpToDate();
521
+ },
522
+ close: () => {
523
+ dispatcher.rejectAll(new Error(`StreamDB closed`));
524
+ abortController.abort();
525
+ },
526
+ utils: { awaitTxId: (txid, timeout) => dispatcher.awaitTxId(txid, timeout) }
527
+ };
528
+ console.log(`[StreamDB] Creating db object with collections:`, Object.keys(collectionInstances));
529
+ const db = {
530
+ collections: collectionInstances,
531
+ ...dbMethods
532
+ };
533
+ console.log(`[StreamDB] db.collections:`, Object.keys(db.collections));
534
+ console.log(`[StreamDB] db.collections.events:`, db.collections.events);
535
+ if (actionsFactory) {
536
+ const actionDefs = actionsFactory({
537
+ db,
538
+ stream
539
+ });
540
+ const wrappedActions = {};
541
+ for (const [name, def] of Object.entries(actionDefs)) wrappedActions[name] = (0, __tanstack_db.createOptimisticAction)({
542
+ onMutate: def.onMutate,
543
+ mutationFn: def.mutationFn
544
+ });
545
+ return {
546
+ ...db,
547
+ actions: wrappedActions
548
+ };
549
+ }
550
+ return db;
551
+ }
552
+
553
+ //#endregion
554
+ exports.MaterializedState = MaterializedState
555
+ exports.createStateSchema = createStateSchema
556
+ exports.createStreamDB = createStreamDB
557
+ exports.isChangeEvent = isChangeEvent
558
+ exports.isControlEvent = isControlEvent