@durable-streams/server 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1592 @@
1
+ import { createServer } from "node:http";
2
+ import { deflateSync, gzipSync } from "node:zlib";
3
+ import * as fs from "node:fs";
4
+ import * as path$1 from "node:path";
5
+ import * as path from "node:path";
6
+ import { createHash, randomBytes } from "node:crypto";
7
+ import { open } from "lmdb";
8
+ import { SieveCache } from "@neophi/sieve-cache";
9
+ import * as fs$1 from "node:fs/promises";
10
+ import { DurableStream } from "@durable-streams/client";
11
+ import { createStateSchema } from "@durable-streams/state";
12
+
13
+ //#region src/store.ts
14
+ /**
15
+ * Normalize content-type by extracting the media type (before any semicolon).
16
+ * Handles cases like "application/json; charset=utf-8".
17
+ */
18
+ function normalizeContentType(contentType) {
19
+ if (!contentType) return ``;
20
+ return contentType.split(`;`)[0].trim().toLowerCase();
21
+ }
22
+ /**
23
+ * Process JSON data for append in JSON mode.
24
+ * - Validates JSON
25
+ * - Extracts array elements if data is an array
26
+ * - Always appends trailing comma for easy concatenation
27
+ * @param isInitialCreate - If true, empty arrays are allowed (creates empty stream)
28
+ * @throws Error if JSON is invalid or array is empty (for non-create operations)
29
+ */
30
+ function processJsonAppend(data, isInitialCreate = false) {
31
+ const text = new TextDecoder().decode(data);
32
+ let parsed;
33
+ try {
34
+ parsed = JSON.parse(text);
35
+ } catch {
36
+ throw new Error(`Invalid JSON`);
37
+ }
38
+ let result;
39
+ if (Array.isArray(parsed)) {
40
+ if (parsed.length === 0) {
41
+ if (isInitialCreate) return new Uint8Array(0);
42
+ throw new Error(`Empty arrays are not allowed`);
43
+ }
44
+ const elements = parsed.map((item) => JSON.stringify(item));
45
+ result = elements.join(`,`) + `,`;
46
+ } else result = JSON.stringify(parsed) + `,`;
47
+ return new TextEncoder().encode(result);
48
+ }
49
+ /**
50
+ * Format JSON mode response by wrapping in array brackets.
51
+ * Strips trailing comma before wrapping.
52
+ */
53
+ function formatJsonResponse(data) {
54
+ if (data.length === 0) return new TextEncoder().encode(`[]`);
55
+ let text = new TextDecoder().decode(data);
56
+ text = text.trimEnd();
57
+ if (text.endsWith(`,`)) text = text.slice(0, -1);
58
+ const wrapped = `[${text}]`;
59
+ return new TextEncoder().encode(wrapped);
60
+ }
61
+ /**
62
+ * In-memory store for durable streams.
63
+ */
64
+ var StreamStore = class {
65
+ streams = new Map();
66
+ pendingLongPolls = [];
67
+ /**
68
+ * Create a new stream.
69
+ * @throws Error if stream already exists with different config
70
+ * @returns existing stream if config matches (idempotent)
71
+ */
72
+ create(path$2, options = {}) {
73
+ const existing = this.streams.get(path$2);
74
+ if (existing) {
75
+ const contentTypeMatches = (normalizeContentType(options.contentType) || `application/octet-stream`) === (normalizeContentType(existing.contentType) || `application/octet-stream`);
76
+ const ttlMatches = options.ttlSeconds === existing.ttlSeconds;
77
+ const expiresMatches = options.expiresAt === existing.expiresAt;
78
+ if (contentTypeMatches && ttlMatches && expiresMatches) return existing;
79
+ else throw new Error(`Stream already exists with different configuration: ${path$2}`);
80
+ }
81
+ const stream = {
82
+ path: path$2,
83
+ contentType: options.contentType,
84
+ messages: [],
85
+ currentOffset: `0000000000000000_0000000000000000`,
86
+ ttlSeconds: options.ttlSeconds,
87
+ expiresAt: options.expiresAt,
88
+ createdAt: Date.now()
89
+ };
90
+ if (options.initialData && options.initialData.length > 0) this.appendToStream(stream, options.initialData, true);
91
+ this.streams.set(path$2, stream);
92
+ return stream;
93
+ }
94
+ /**
95
+ * Get a stream by path.
96
+ */
97
+ get(path$2) {
98
+ return this.streams.get(path$2);
99
+ }
100
+ /**
101
+ * Check if a stream exists.
102
+ */
103
+ has(path$2) {
104
+ return this.streams.has(path$2);
105
+ }
106
+ /**
107
+ * Delete a stream.
108
+ */
109
+ delete(path$2) {
110
+ this.cancelLongPollsForStream(path$2);
111
+ return this.streams.delete(path$2);
112
+ }
113
+ /**
114
+ * Append data to a stream.
115
+ * @throws Error if stream doesn't exist
116
+ * @throws Error if seq is lower than lastSeq
117
+ * @throws Error if JSON mode and array is empty
118
+ */
119
+ append(path$2, data, options = {}) {
120
+ const stream = this.streams.get(path$2);
121
+ if (!stream) throw new Error(`Stream not found: ${path$2}`);
122
+ if (options.contentType && stream.contentType) {
123
+ const providedType = normalizeContentType(options.contentType);
124
+ const streamType = normalizeContentType(stream.contentType);
125
+ if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${stream.contentType}, got ${options.contentType}`);
126
+ }
127
+ if (options.seq !== void 0) {
128
+ if (stream.lastSeq !== void 0 && options.seq <= stream.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${stream.lastSeq}`);
129
+ stream.lastSeq = options.seq;
130
+ }
131
+ const message = this.appendToStream(stream, data);
132
+ this.notifyLongPolls(path$2);
133
+ return message;
134
+ }
135
+ /**
136
+ * Read messages from a stream starting at the given offset.
137
+ */
138
+ read(path$2, offset) {
139
+ const stream = this.streams.get(path$2);
140
+ if (!stream) throw new Error(`Stream not found: ${path$2}`);
141
+ if (!offset || offset === `-1`) return {
142
+ messages: [...stream.messages],
143
+ upToDate: true
144
+ };
145
+ const offsetIndex = this.findOffsetIndex(stream, offset);
146
+ if (offsetIndex === -1) return {
147
+ messages: [],
148
+ upToDate: true
149
+ };
150
+ return {
151
+ messages: stream.messages.slice(offsetIndex),
152
+ upToDate: true
153
+ };
154
+ }
155
+ /**
156
+ * Format messages for response.
157
+ * For JSON mode, wraps concatenated data in array brackets.
158
+ */
159
+ formatResponse(path$2, messages) {
160
+ const stream = this.streams.get(path$2);
161
+ if (!stream) throw new Error(`Stream not found: ${path$2}`);
162
+ const totalSize = messages.reduce((sum, m) => sum + m.data.length, 0);
163
+ const concatenated = new Uint8Array(totalSize);
164
+ let offset = 0;
165
+ for (const msg of messages) {
166
+ concatenated.set(msg.data, offset);
167
+ offset += msg.data.length;
168
+ }
169
+ if (normalizeContentType(stream.contentType) === `application/json`) return formatJsonResponse(concatenated);
170
+ return concatenated;
171
+ }
172
+ /**
173
+ * Wait for new messages (long-poll).
174
+ */
175
+ async waitForMessages(path$2, offset, timeoutMs) {
176
+ const stream = this.streams.get(path$2);
177
+ if (!stream) throw new Error(`Stream not found: ${path$2}`);
178
+ const { messages } = this.read(path$2, offset);
179
+ if (messages.length > 0) return {
180
+ messages,
181
+ timedOut: false
182
+ };
183
+ return new Promise((resolve) => {
184
+ const timeoutId = setTimeout(() => {
185
+ this.removePendingLongPoll(pending);
186
+ resolve({
187
+ messages: [],
188
+ timedOut: true
189
+ });
190
+ }, timeoutMs);
191
+ const pending = {
192
+ path: path$2,
193
+ offset,
194
+ resolve: (msgs) => {
195
+ clearTimeout(timeoutId);
196
+ this.removePendingLongPoll(pending);
197
+ resolve({
198
+ messages: msgs,
199
+ timedOut: false
200
+ });
201
+ },
202
+ timeoutId
203
+ };
204
+ this.pendingLongPolls.push(pending);
205
+ });
206
+ }
207
+ /**
208
+ * Get the current offset for a stream.
209
+ */
210
+ getCurrentOffset(path$2) {
211
+ return this.streams.get(path$2)?.currentOffset;
212
+ }
213
+ /**
214
+ * Clear all streams.
215
+ */
216
+ clear() {
217
+ for (const pending of this.pendingLongPolls) {
218
+ clearTimeout(pending.timeoutId);
219
+ pending.resolve([]);
220
+ }
221
+ this.pendingLongPolls = [];
222
+ this.streams.clear();
223
+ }
224
+ /**
225
+ * Cancel all pending long-polls (used during shutdown).
226
+ */
227
+ cancelAllWaits() {
228
+ for (const pending of this.pendingLongPolls) {
229
+ clearTimeout(pending.timeoutId);
230
+ pending.resolve([]);
231
+ }
232
+ this.pendingLongPolls = [];
233
+ }
234
+ /**
235
+ * Get all stream paths.
236
+ */
237
+ list() {
238
+ return Array.from(this.streams.keys());
239
+ }
240
+ appendToStream(stream, data, isInitialCreate = false) {
241
+ let processedData = data;
242
+ if (normalizeContentType(stream.contentType) === `application/json`) {
243
+ processedData = processJsonAppend(data, isInitialCreate);
244
+ if (processedData.length === 0) return null;
245
+ }
246
+ const parts = stream.currentOffset.split(`_`).map(Number);
247
+ const readSeq = parts[0];
248
+ const byteOffset = parts[1];
249
+ const newByteOffset = byteOffset + processedData.length;
250
+ const newOffset = `${String(readSeq).padStart(16, `0`)}_${String(newByteOffset).padStart(16, `0`)}`;
251
+ const message = {
252
+ data: processedData,
253
+ offset: newOffset,
254
+ timestamp: Date.now()
255
+ };
256
+ stream.messages.push(message);
257
+ stream.currentOffset = newOffset;
258
+ return message;
259
+ }
260
+ findOffsetIndex(stream, offset) {
261
+ for (let i = 0; i < stream.messages.length; i++) if (stream.messages[i].offset > offset) return i;
262
+ return -1;
263
+ }
264
+ notifyLongPolls(path$2) {
265
+ const toNotify = this.pendingLongPolls.filter((p) => p.path === path$2);
266
+ for (const pending of toNotify) {
267
+ const { messages } = this.read(path$2, pending.offset);
268
+ if (messages.length > 0) pending.resolve(messages);
269
+ }
270
+ }
271
+ cancelLongPollsForStream(path$2) {
272
+ const toCancel = this.pendingLongPolls.filter((p) => p.path === path$2);
273
+ for (const pending of toCancel) {
274
+ clearTimeout(pending.timeoutId);
275
+ pending.resolve([]);
276
+ }
277
+ this.pendingLongPolls = this.pendingLongPolls.filter((p) => p.path !== path$2);
278
+ }
279
+ removePendingLongPoll(pending) {
280
+ const index = this.pendingLongPolls.indexOf(pending);
281
+ if (index !== -1) this.pendingLongPolls.splice(index, 1);
282
+ }
283
+ };
284
+
285
+ //#endregion
286
+ //#region src/path-encoding.ts
287
+ const MAX_ENCODED_LENGTH = 200;
288
+ /**
289
+ * Encode a stream path to a filesystem-safe directory name using base64url encoding.
290
+ * Long paths (>200 chars) are hashed to keep directory names manageable.
291
+ *
292
+ * @example
293
+ * encodeStreamPath("/stream/users:created") → "L3N0cmVhbS91c2VyczpjcmVhdGVk"
294
+ */
295
+ function encodeStreamPath(path$2) {
296
+ const base64 = Buffer.from(path$2, `utf-8`).toString(`base64`).replace(/\+/g, `-`).replace(/\//g, `_`).replace(/=/g, ``);
297
+ if (base64.length > MAX_ENCODED_LENGTH) {
298
+ const hash = createHash(`sha256`).update(path$2).digest(`hex`).slice(0, 16);
299
+ return `${base64.slice(0, 180)}~${hash}`;
300
+ }
301
+ return base64;
302
+ }
303
+ /**
304
+ * Decode a filesystem-safe directory name back to the original stream path.
305
+ *
306
+ * @example
307
+ * decodeStreamPath("L3N0cmVhbS91c2VyczpjcmVhdGVk") → "/stream/users:created"
308
+ */
309
+ function decodeStreamPath(encoded) {
310
+ let base = encoded;
311
+ const tildeIndex = encoded.lastIndexOf(`~`);
312
+ if (tildeIndex !== -1) {
313
+ const possibleHash = encoded.slice(tildeIndex + 1);
314
+ if (possibleHash.length === 16 && /^[0-9a-f]+$/.test(possibleHash)) base = encoded.slice(0, tildeIndex);
315
+ }
316
+ const normalized = base.replace(/-/g, `+`).replace(/_/g, `/`);
317
+ const padded = normalized + `=`.repeat((4 - normalized.length % 4) % 4);
318
+ return Buffer.from(padded, `base64`).toString(`utf-8`);
319
+ }
320
+
321
+ //#endregion
322
+ //#region src/file-manager.ts
323
+ var StreamFileManager = class {
324
+ constructor(streamsDir) {
325
+ this.streamsDir = streamsDir;
326
+ }
327
+ /**
328
+ * Create a directory for a new stream and initialize the first segment file.
329
+ * Returns the absolute path to the stream directory.
330
+ */
331
+ async createStreamDirectory(streamPath) {
332
+ const encoded = encodeStreamPath(streamPath);
333
+ const dir = path$1.join(this.streamsDir, encoded);
334
+ await fs$1.mkdir(dir, { recursive: true });
335
+ const segmentPath = path$1.join(dir, `segment_00000.log`);
336
+ await fs$1.writeFile(segmentPath, ``);
337
+ return dir;
338
+ }
339
+ /**
340
+ * Delete a stream directory and all its contents.
341
+ */
342
+ async deleteStreamDirectory(streamPath) {
343
+ const encoded = encodeStreamPath(streamPath);
344
+ const dir = path$1.join(this.streamsDir, encoded);
345
+ await fs$1.rm(dir, {
346
+ recursive: true,
347
+ force: true
348
+ });
349
+ }
350
+ /**
351
+ * Delete a directory by its exact name (used for unique directory names).
352
+ */
353
+ async deleteDirectoryByName(directoryName) {
354
+ const dir = path$1.join(this.streamsDir, directoryName);
355
+ await fs$1.rm(dir, {
356
+ recursive: true,
357
+ force: true
358
+ });
359
+ }
360
+ /**
361
+ * Get the absolute path to a stream's directory.
362
+ * Returns null if the directory doesn't exist.
363
+ */
364
+ async getStreamDirectory(streamPath) {
365
+ const encoded = encodeStreamPath(streamPath);
366
+ const dir = path$1.join(this.streamsDir, encoded);
367
+ try {
368
+ await fs$1.access(dir);
369
+ return dir;
370
+ } catch {
371
+ return null;
372
+ }
373
+ }
374
+ /**
375
+ * List all stream paths by scanning the streams directory.
376
+ */
377
+ async listStreamPaths() {
378
+ try {
379
+ const entries = await fs$1.readdir(this.streamsDir, { withFileTypes: true });
380
+ return entries.filter((e) => e.isDirectory()).map((e) => decodeStreamPath(e.name));
381
+ } catch {
382
+ return [];
383
+ }
384
+ }
385
+ /**
386
+ * Get the path to a segment file within a stream directory.
387
+ *
388
+ * @param streamDir - Absolute path to the stream directory
389
+ * @param index - Segment index (0-based)
390
+ */
391
+ getSegmentPath(streamDir, index) {
392
+ const paddedIndex = String(index).padStart(5, `0`);
393
+ return path$1.join(streamDir, `segment_${paddedIndex}.log`);
394
+ }
395
+ };
396
+
397
+ //#endregion
398
+ //#region src/file-store.ts
399
+ var FileHandlePool = class {
400
+ cache;
401
+ constructor(maxSize) {
402
+ this.cache = new SieveCache(maxSize, { evictHook: (_key, handle) => {
403
+ this.closeHandle(handle).catch((err) => {
404
+ console.error(`[FileHandlePool] Error closing evicted handle:`, err);
405
+ });
406
+ } });
407
+ }
408
+ getWriteStream(filePath) {
409
+ let handle = this.cache.get(filePath);
410
+ if (!handle) {
411
+ const stream = fs.createWriteStream(filePath, { flags: `a` });
412
+ handle = { stream };
413
+ this.cache.set(filePath, handle);
414
+ }
415
+ return handle.stream;
416
+ }
417
+ /**
418
+ * Flush a specific file to disk immediately.
419
+ * This is called after each append to ensure durability.
420
+ */
421
+ async fsyncFile(filePath) {
422
+ const handle = this.cache.get(filePath);
423
+ if (!handle) return;
424
+ return new Promise((resolve, reject) => {
425
+ const fd = handle.stream.fd;
426
+ if (typeof fd !== `number`) {
427
+ const onOpen = (openedFd) => {
428
+ handle.stream.off(`error`, onError);
429
+ fs.fdatasync(openedFd, (err) => {
430
+ if (err) reject(err);
431
+ else resolve();
432
+ });
433
+ };
434
+ const onError = (err) => {
435
+ handle.stream.off(`open`, onOpen);
436
+ reject(err);
437
+ };
438
+ handle.stream.once(`open`, onOpen);
439
+ handle.stream.once(`error`, onError);
440
+ return;
441
+ }
442
+ fs.fdatasync(fd, (err) => {
443
+ if (err) reject(err);
444
+ else resolve();
445
+ });
446
+ });
447
+ }
448
+ async closeAll() {
449
+ const promises = [];
450
+ for (const [_key, handle] of this.cache.entries()) promises.push(this.closeHandle(handle));
451
+ await Promise.all(promises);
452
+ this.cache.clear();
453
+ }
454
+ /**
455
+ * Close a specific file handle if it exists in the cache.
456
+ * Useful for cleanup before deleting files.
457
+ */
458
+ async closeFileHandle(filePath) {
459
+ const handle = this.cache.get(filePath);
460
+ if (handle) {
461
+ await this.closeHandle(handle);
462
+ this.cache.delete(filePath);
463
+ }
464
+ }
465
+ async closeHandle(handle) {
466
+ return new Promise((resolve) => {
467
+ handle.stream.end(() => resolve());
468
+ });
469
+ }
470
+ };
471
+ /**
472
+ * Generate a unique directory name for a stream.
473
+ * Format: {encoded_path}~{timestamp}~{random_hex}
474
+ * This allows safe async deletion and immediate reuse of stream paths.
475
+ */
476
+ function generateUniqueDirectoryName(streamPath) {
477
+ const encoded = encodeStreamPath(streamPath);
478
+ const timestamp = Date.now().toString(36);
479
+ const random = randomBytes(4).toString(`hex`);
480
+ return `${encoded}~${timestamp}~${random}`;
481
+ }
482
+ /**
483
+ * File-backed implementation of StreamStore.
484
+ * Maintains the same interface as the in-memory StreamStore for drop-in compatibility.
485
+ */
486
+ var FileBackedStreamStore = class {
487
+ db;
488
+ fileManager;
489
+ fileHandlePool;
490
+ pendingLongPolls = [];
491
+ dataDir;
492
+ constructor(options) {
493
+ this.dataDir = options.dataDir;
494
+ this.db = open({
495
+ path: path.join(this.dataDir, `metadata.lmdb`),
496
+ compression: true
497
+ });
498
+ this.fileManager = new StreamFileManager(path.join(this.dataDir, `streams`));
499
+ const maxFileHandles = options.maxFileHandles ?? 100;
500
+ this.fileHandlePool = new FileHandlePool(maxFileHandles);
501
+ this.recover();
502
+ }
503
+ /**
504
+ * Recover streams from disk on startup.
505
+ * Validates that LMDB metadata matches actual file contents and reconciles any mismatches.
506
+ */
507
+ recover() {
508
+ console.log(`[FileBackedStreamStore] Starting recovery...`);
509
+ let recovered = 0;
510
+ let reconciled = 0;
511
+ let errors = 0;
512
+ const range = this.db.getRange({
513
+ start: `stream:`,
514
+ end: `stream:\xFF`
515
+ });
516
+ const entries = Array.from(range);
517
+ for (const { key, value } of entries) try {
518
+ if (typeof key !== `string`) continue;
519
+ const streamMeta = value;
520
+ const streamPath = key.replace(`stream:`, ``);
521
+ const segmentPath = path.join(this.dataDir, `streams`, streamMeta.directoryName, `segment_00000.log`);
522
+ if (!fs.existsSync(segmentPath)) {
523
+ console.warn(`[FileBackedStreamStore] Recovery: Stream file missing for ${streamPath}, removing from LMDB`);
524
+ this.db.removeSync(key);
525
+ errors++;
526
+ continue;
527
+ }
528
+ const trueOffset = this.scanFileForTrueOffset(segmentPath);
529
+ if (trueOffset !== streamMeta.currentOffset) {
530
+ console.warn(`[FileBackedStreamStore] Recovery: Offset mismatch for ${streamPath}: LMDB says ${streamMeta.currentOffset}, file says ${trueOffset}. Reconciling to file.`);
531
+ const reconciledMeta = {
532
+ ...streamMeta,
533
+ currentOffset: trueOffset
534
+ };
535
+ this.db.putSync(key, reconciledMeta);
536
+ reconciled++;
537
+ }
538
+ recovered++;
539
+ } catch (err) {
540
+ console.error(`[FileBackedStreamStore] Error recovering stream:`, err);
541
+ errors++;
542
+ }
543
+ console.log(`[FileBackedStreamStore] Recovery complete: ${recovered} streams, ${reconciled} reconciled, ${errors} errors`);
544
+ }
545
+ /**
546
+ * Scan a segment file to compute the true last offset.
547
+ * Handles partial/truncated messages at the end.
548
+ */
549
+ scanFileForTrueOffset(segmentPath) {
550
+ try {
551
+ const fileContent = fs.readFileSync(segmentPath);
552
+ let filePos = 0;
553
+ let currentDataOffset = 0;
554
+ while (filePos < fileContent.length) {
555
+ if (filePos + 4 > fileContent.length) break;
556
+ const messageLength = fileContent.readUInt32BE(filePos);
557
+ filePos += 4;
558
+ if (filePos + messageLength > fileContent.length) break;
559
+ filePos += messageLength;
560
+ if (filePos < fileContent.length) filePos += 1;
561
+ currentDataOffset += messageLength;
562
+ }
563
+ return `0000000000000000_${String(currentDataOffset).padStart(16, `0`)}`;
564
+ } catch (err) {
565
+ console.error(`[FileBackedStreamStore] Error scanning file ${segmentPath}:`, err);
566
+ return `0000000000000000_0000000000000000`;
567
+ }
568
+ }
569
+ /**
570
+ * Convert LMDB metadata to Stream object.
571
+ */
572
+ streamMetaToStream(meta) {
573
+ return {
574
+ path: meta.path,
575
+ contentType: meta.contentType,
576
+ messages: [],
577
+ currentOffset: meta.currentOffset,
578
+ lastSeq: meta.lastSeq,
579
+ ttlSeconds: meta.ttlSeconds,
580
+ expiresAt: meta.expiresAt,
581
+ createdAt: meta.createdAt
582
+ };
583
+ }
584
+ /**
585
+ * Close the store, closing all file handles and database.
586
+ * All data is already fsynced on each append, so no final flush needed.
587
+ */
588
+ async close() {
589
+ await this.fileHandlePool.closeAll();
590
+ await this.db.close();
591
+ }
592
+ async create(streamPath, options = {}) {
593
+ const key = `stream:${streamPath}`;
594
+ const existing = this.db.get(key);
595
+ if (existing) {
596
+ const normalizeMimeType = (ct) => (ct ?? `application/octet-stream`).toLowerCase();
597
+ const contentTypeMatches = normalizeMimeType(options.contentType) === normalizeMimeType(existing.contentType);
598
+ const ttlMatches = options.ttlSeconds === existing.ttlSeconds;
599
+ const expiresMatches = options.expiresAt === existing.expiresAt;
600
+ if (contentTypeMatches && ttlMatches && expiresMatches) return this.streamMetaToStream(existing);
601
+ else throw new Error(`Stream already exists with different configuration: ${streamPath}`);
602
+ }
603
+ const streamMeta = {
604
+ path: streamPath,
605
+ contentType: options.contentType,
606
+ currentOffset: `0000000000000000_0000000000000000`,
607
+ lastSeq: void 0,
608
+ ttlSeconds: options.ttlSeconds,
609
+ expiresAt: options.expiresAt,
610
+ createdAt: Date.now(),
611
+ segmentCount: 1,
612
+ totalBytes: 0,
613
+ directoryName: generateUniqueDirectoryName(streamPath)
614
+ };
615
+ const streamDir = path.join(this.dataDir, `streams`, streamMeta.directoryName);
616
+ try {
617
+ fs.mkdirSync(streamDir, { recursive: true });
618
+ const segmentPath = path.join(streamDir, `segment_00000.log`);
619
+ fs.writeFileSync(segmentPath, ``);
620
+ } catch (err) {
621
+ console.error(`[FileBackedStreamStore] Error creating stream directory:`, err);
622
+ throw err;
623
+ }
624
+ this.db.putSync(key, streamMeta);
625
+ if (options.initialData && options.initialData.length > 0) {
626
+ await this.append(streamPath, options.initialData, {
627
+ contentType: options.contentType,
628
+ isInitialCreate: true
629
+ });
630
+ const updated = this.db.get(key);
631
+ return this.streamMetaToStream(updated);
632
+ }
633
+ return this.streamMetaToStream(streamMeta);
634
+ }
635
+ get(streamPath) {
636
+ const key = `stream:${streamPath}`;
637
+ const meta = this.db.get(key);
638
+ return meta ? this.streamMetaToStream(meta) : void 0;
639
+ }
640
+ has(streamPath) {
641
+ const key = `stream:${streamPath}`;
642
+ return this.db.get(key) !== void 0;
643
+ }
644
+ delete(streamPath) {
645
+ const key = `stream:${streamPath}`;
646
+ const streamMeta = this.db.get(key);
647
+ if (!streamMeta) return false;
648
+ this.cancelLongPollsForStream(streamPath);
649
+ const segmentPath = path.join(this.dataDir, `streams`, streamMeta.directoryName, `segment_00000.log`);
650
+ this.fileHandlePool.closeFileHandle(segmentPath).catch((err) => {
651
+ console.error(`[FileBackedStreamStore] Error closing file handle:`, err);
652
+ });
653
+ this.db.removeSync(key);
654
+ this.fileManager.deleteDirectoryByName(streamMeta.directoryName).catch((err) => {
655
+ console.error(`[FileBackedStreamStore] Error deleting stream directory:`, err);
656
+ });
657
+ return true;
658
+ }
659
+ async append(streamPath, data, options = {}) {
660
+ const key = `stream:${streamPath}`;
661
+ const streamMeta = this.db.get(key);
662
+ if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
663
+ if (options.contentType && streamMeta.contentType) {
664
+ const providedType = normalizeContentType(options.contentType);
665
+ const streamType = normalizeContentType(streamMeta.contentType);
666
+ if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${streamMeta.contentType}, got ${options.contentType}`);
667
+ }
668
+ if (options.seq !== void 0) {
669
+ if (streamMeta.lastSeq !== void 0 && options.seq <= streamMeta.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${streamMeta.lastSeq}`);
670
+ }
671
+ let processedData = data;
672
+ if (normalizeContentType(streamMeta.contentType) === `application/json`) {
673
+ processedData = processJsonAppend(data, options.isInitialCreate ?? false);
674
+ if (processedData.length === 0) return null;
675
+ }
676
+ const parts = streamMeta.currentOffset.split(`_`).map(Number);
677
+ const readSeq = parts[0];
678
+ const byteOffset = parts[1];
679
+ const newByteOffset = byteOffset + processedData.length;
680
+ const newOffset = `${String(readSeq).padStart(16, `0`)}_${String(newByteOffset).padStart(16, `0`)}`;
681
+ const streamDir = path.join(this.dataDir, `streams`, streamMeta.directoryName);
682
+ const segmentPath = path.join(streamDir, `segment_00000.log`);
683
+ const stream = this.fileHandlePool.getWriteStream(segmentPath);
684
+ const lengthBuf = Buffer.allocUnsafe(4);
685
+ lengthBuf.writeUInt32BE(processedData.length, 0);
686
+ const frameBuf = Buffer.concat([
687
+ lengthBuf,
688
+ processedData,
689
+ Buffer.from(`\n`)
690
+ ]);
691
+ await new Promise((resolve, reject) => {
692
+ stream.write(frameBuf, (err) => {
693
+ if (err) reject(err);
694
+ else resolve();
695
+ });
696
+ });
697
+ const message = {
698
+ data: processedData,
699
+ offset: newOffset,
700
+ timestamp: Date.now()
701
+ };
702
+ await this.fileHandlePool.fsyncFile(segmentPath);
703
+ const updatedMeta = {
704
+ ...streamMeta,
705
+ currentOffset: newOffset,
706
+ lastSeq: options.seq ?? streamMeta.lastSeq,
707
+ totalBytes: streamMeta.totalBytes + processedData.length + 5
708
+ };
709
+ this.db.putSync(key, updatedMeta);
710
+ this.notifyLongPolls(streamPath);
711
+ return message;
712
+ }
713
+ read(streamPath, offset) {
714
+ const key = `stream:${streamPath}`;
715
+ const streamMeta = this.db.get(key);
716
+ if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
717
+ const startOffset = offset ?? `0000000000000000_0000000000000000`;
718
+ const startParts = startOffset.split(`_`).map(Number);
719
+ const startByte = startParts[1] ?? 0;
720
+ const currentParts = streamMeta.currentOffset.split(`_`).map(Number);
721
+ const currentSeq = currentParts[0] ?? 0;
722
+ const currentByte = currentParts[1] ?? 0;
723
+ if (streamMeta.currentOffset === `0000000000000000_0000000000000000`) return {
724
+ messages: [],
725
+ upToDate: true
726
+ };
727
+ if (startByte >= currentByte) return {
728
+ messages: [],
729
+ upToDate: true
730
+ };
731
+ const streamDir = path.join(this.dataDir, `streams`, streamMeta.directoryName);
732
+ const segmentPath = path.join(streamDir, `segment_00000.log`);
733
+ if (!fs.existsSync(segmentPath)) return {
734
+ messages: [],
735
+ upToDate: true
736
+ };
737
+ const messages = [];
738
+ try {
739
+ const fileContent = fs.readFileSync(segmentPath);
740
+ let filePos = 0;
741
+ let currentDataOffset = 0;
742
+ while (filePos < fileContent.length) {
743
+ if (filePos + 4 > fileContent.length) break;
744
+ const messageLength = fileContent.readUInt32BE(filePos);
745
+ filePos += 4;
746
+ if (filePos + messageLength > fileContent.length) break;
747
+ const messageData = fileContent.subarray(filePos, filePos + messageLength);
748
+ filePos += messageLength;
749
+ filePos += 1;
750
+ const messageOffset = currentDataOffset + messageLength;
751
+ if (messageOffset > startByte) messages.push({
752
+ data: new Uint8Array(messageData),
753
+ offset: `${String(currentSeq).padStart(16, `0`)}_${String(messageOffset).padStart(16, `0`)}`,
754
+ timestamp: 0
755
+ });
756
+ currentDataOffset = messageOffset;
757
+ }
758
+ } catch (err) {
759
+ console.error(`[FileBackedStreamStore] Error reading file:`, err);
760
+ }
761
+ return {
762
+ messages,
763
+ upToDate: true
764
+ };
765
+ }
766
+ async waitForMessages(streamPath, offset, timeoutMs) {
767
+ const key = `stream:${streamPath}`;
768
+ const streamMeta = this.db.get(key);
769
+ if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
770
+ const { messages } = this.read(streamPath, offset);
771
+ if (messages.length > 0) return {
772
+ messages,
773
+ timedOut: false
774
+ };
775
+ return new Promise((resolve) => {
776
+ const timeoutId = setTimeout(() => {
777
+ this.removePendingLongPoll(pending);
778
+ resolve({
779
+ messages: [],
780
+ timedOut: true
781
+ });
782
+ }, timeoutMs);
783
+ const pending = {
784
+ path: streamPath,
785
+ offset,
786
+ resolve: (msgs) => {
787
+ clearTimeout(timeoutId);
788
+ this.removePendingLongPoll(pending);
789
+ resolve({
790
+ messages: msgs,
791
+ timedOut: false
792
+ });
793
+ },
794
+ timeoutId
795
+ };
796
+ this.pendingLongPolls.push(pending);
797
+ });
798
+ }
799
+ /**
800
+ * Format messages for response.
801
+ * For JSON mode, wraps concatenated data in array brackets.
802
+ */
803
+ formatResponse(streamPath, messages) {
804
+ const key = `stream:${streamPath}`;
805
+ const streamMeta = this.db.get(key);
806
+ if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
807
+ const totalSize = messages.reduce((sum, m) => sum + m.data.length, 0);
808
+ const concatenated = new Uint8Array(totalSize);
809
+ let offset = 0;
810
+ for (const msg of messages) {
811
+ concatenated.set(msg.data, offset);
812
+ offset += msg.data.length;
813
+ }
814
+ if (normalizeContentType(streamMeta.contentType) === `application/json`) return formatJsonResponse(concatenated);
815
+ return concatenated;
816
+ }
817
+ getCurrentOffset(streamPath) {
818
+ const key = `stream:${streamPath}`;
819
+ const streamMeta = this.db.get(key);
820
+ return streamMeta?.currentOffset;
821
+ }
822
+ clear() {
823
+ for (const pending of this.pendingLongPolls) {
824
+ clearTimeout(pending.timeoutId);
825
+ pending.resolve([]);
826
+ }
827
+ this.pendingLongPolls = [];
828
+ const range = this.db.getRange({
829
+ start: `stream:`,
830
+ end: `stream:\xFF`
831
+ });
832
+ const entries = Array.from(range);
833
+ for (const { key } of entries) this.db.removeSync(key);
834
+ this.fileHandlePool.closeAll().catch((err) => {
835
+ console.error(`[FileBackedStreamStore] Error closing handles:`, err);
836
+ });
837
+ }
838
+ /**
839
+ * Cancel all pending long-polls (used during shutdown).
840
+ */
841
+ cancelAllWaits() {
842
+ for (const pending of this.pendingLongPolls) {
843
+ clearTimeout(pending.timeoutId);
844
+ pending.resolve([]);
845
+ }
846
+ this.pendingLongPolls = [];
847
+ }
848
+ list() {
849
+ const paths = [];
850
+ const range = this.db.getRange({
851
+ start: `stream:`,
852
+ end: `stream:\xFF`
853
+ });
854
+ const entries = Array.from(range);
855
+ for (const { key } of entries) if (typeof key === `string`) paths.push(key.replace(`stream:`, ``));
856
+ return paths;
857
+ }
858
+ notifyLongPolls(streamPath) {
859
+ const toNotify = this.pendingLongPolls.filter((p) => p.path === streamPath);
860
+ for (const pending of toNotify) {
861
+ const { messages } = this.read(streamPath, pending.offset);
862
+ if (messages.length > 0) pending.resolve(messages);
863
+ }
864
+ }
865
+ cancelLongPollsForStream(streamPath) {
866
+ const toCancel = this.pendingLongPolls.filter((p) => p.path === streamPath);
867
+ for (const pending of toCancel) {
868
+ clearTimeout(pending.timeoutId);
869
+ pending.resolve([]);
870
+ }
871
+ this.pendingLongPolls = this.pendingLongPolls.filter((p) => p.path !== streamPath);
872
+ }
873
+ removePendingLongPoll(pending) {
874
+ const index = this.pendingLongPolls.indexOf(pending);
875
+ if (index !== -1) this.pendingLongPolls.splice(index, 1);
876
+ }
877
+ };
878
+
879
+ //#endregion
880
+ //#region src/cursor.ts
881
+ /**
882
+ * Stream cursor calculation for CDN cache collapsing.
883
+ *
884
+ * This module implements interval-based cursor generation to prevent
885
+ * infinite CDN cache loops while enabling request collapsing.
886
+ *
887
+ * The mechanism works by:
888
+ * 1. Dividing time into fixed intervals (default 20 seconds)
889
+ * 2. Computing interval number from an epoch (October 9, 2024)
890
+ * 3. Returning cursor values that change at interval boundaries
891
+ * 4. Ensuring monotonic cursor progression (never going backwards)
892
+ */
893
+ /**
894
+ * Default epoch for cursor calculation: October 9, 2024 00:00:00 UTC.
895
+ * This is the reference point from which intervals are counted.
896
+ * Using a past date ensures cursors are always positive.
897
+ */
898
+ const DEFAULT_CURSOR_EPOCH = new Date(`2024-10-09T00:00:00.000Z`);
899
+ /**
900
+ * Default interval duration in seconds.
901
+ */
902
+ const DEFAULT_CURSOR_INTERVAL_SECONDS = 20;
903
+ /**
904
+ * Maximum jitter in seconds to add on collision.
905
+ * Per protocol spec: random value between 1-3600 seconds.
906
+ */
907
+ const MAX_JITTER_SECONDS = 3600;
908
+ /**
909
+ * Minimum jitter in seconds.
910
+ */
911
+ const MIN_JITTER_SECONDS = 1;
912
+ /**
913
+ * Calculate the current cursor value based on time intervals.
914
+ *
915
+ * @param options - Configuration for cursor calculation
916
+ * @returns The current cursor value as a string
917
+ */
918
+ function calculateCursor(options = {}) {
919
+ const intervalSeconds = options.intervalSeconds ?? DEFAULT_CURSOR_INTERVAL_SECONDS;
920
+ const epoch = options.epoch ?? DEFAULT_CURSOR_EPOCH;
921
+ const now = Date.now();
922
+ const epochMs = epoch.getTime();
923
+ const intervalMs = intervalSeconds * 1e3;
924
+ const intervalNumber = Math.floor((now - epochMs) / intervalMs);
925
+ return String(intervalNumber);
926
+ }
927
+ /**
928
+ * Generate a random jitter value in intervals.
929
+ *
930
+ * @param intervalSeconds - The interval duration in seconds
931
+ * @returns Number of intervals to add as jitter
932
+ */
933
+ function generateJitterIntervals(intervalSeconds) {
934
+ const jitterSeconds = MIN_JITTER_SECONDS + Math.floor(Math.random() * (MAX_JITTER_SECONDS - MIN_JITTER_SECONDS + 1));
935
+ return Math.max(1, Math.ceil(jitterSeconds / intervalSeconds));
936
+ }
937
+ /**
938
+ * Generate a cursor for a response, ensuring monotonic progression.
939
+ *
940
+ * This function ensures the returned cursor is always greater than or equal
941
+ * to the current time interval, and strictly greater than any client-provided
942
+ * cursor. This prevents cache loops where a client could cycle between
943
+ * cursor values.
944
+ *
945
+ * Algorithm:
946
+ * - If no client cursor: return current interval
947
+ * - If client cursor < current interval: return current interval
948
+ * - If client cursor >= current interval: return client cursor + jitter
949
+ *
950
+ * This guarantees monotonic cursor progression and prevents A→B→A cycles.
951
+ *
952
+ * @param clientCursor - The cursor provided by the client (if any)
953
+ * @param options - Configuration for cursor calculation
954
+ * @returns The cursor value to include in the response
955
+ */
956
+ function generateResponseCursor(clientCursor, options = {}) {
957
+ const intervalSeconds = options.intervalSeconds ?? DEFAULT_CURSOR_INTERVAL_SECONDS;
958
+ const currentCursor = calculateCursor(options);
959
+ const currentInterval = parseInt(currentCursor, 10);
960
+ if (!clientCursor) return currentCursor;
961
+ const clientInterval = parseInt(clientCursor, 10);
962
+ if (isNaN(clientInterval) || clientInterval < currentInterval) return currentCursor;
963
+ const jitterIntervals = generateJitterIntervals(intervalSeconds);
964
+ return String(clientInterval + jitterIntervals);
965
+ }
966
+ /**
967
+ * Handle cursor collision by adding random jitter.
968
+ *
969
+ * @deprecated Use generateResponseCursor instead, which handles all cases
970
+ * including monotonicity guarantees.
971
+ *
972
+ * @param currentCursor - The newly calculated cursor value
973
+ * @param previousCursor - The cursor provided by the client (if any)
974
+ * @param options - Configuration for cursor calculation
975
+ * @returns The cursor value to return, with jitter applied if there's a collision
976
+ */
977
+ function handleCursorCollision(currentCursor, previousCursor, options = {}) {
978
+ return generateResponseCursor(previousCursor, options);
979
+ }
980
+
981
+ //#endregion
982
+ //#region src/server.ts
983
+ const STREAM_OFFSET_HEADER = `Stream-Next-Offset`;
984
+ const STREAM_CURSOR_HEADER = `Stream-Cursor`;
985
+ const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
986
+ const STREAM_SEQ_HEADER = `Stream-Seq`;
987
+ const STREAM_TTL_HEADER = `Stream-TTL`;
988
+ const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
989
+ const SSE_OFFSET_FIELD = `streamNextOffset`;
990
+ const SSE_CURSOR_FIELD = `streamCursor`;
991
+ const SSE_UP_TO_DATE_FIELD = `upToDate`;
992
+ const OFFSET_QUERY_PARAM = `offset`;
993
+ const LIVE_QUERY_PARAM = `live`;
994
+ const CURSOR_QUERY_PARAM = `cursor`;
995
+ /**
996
+ * Encode data for SSE format.
997
+ * Per SSE spec, each line in the payload needs its own "data:" prefix.
998
+ * Newlines in the payload become separate data: lines.
999
+ */
1000
+ function encodeSSEData(payload) {
1001
+ const lines = payload.split(`\n`);
1002
+ return lines.map((line) => `data: ${line}`).join(`\n`) + `\n\n`;
1003
+ }
1004
+ /**
1005
+ * Minimum response size to consider for compression.
1006
+ * Responses smaller than this won't benefit from compression.
1007
+ */
1008
+ const COMPRESSION_THRESHOLD = 1024;
1009
+ /**
1010
+ * Determine the best compression encoding from Accept-Encoding header.
1011
+ * Returns 'gzip', 'deflate', or null if no compression should be used.
1012
+ */
1013
+ function getCompressionEncoding(acceptEncoding) {
1014
+ if (!acceptEncoding) return null;
1015
+ const encodings = acceptEncoding.toLowerCase().split(`,`).map((e) => e.trim());
1016
+ for (const encoding of encodings) {
1017
+ const name = encoding.split(`;`)[0]?.trim();
1018
+ if (name === `gzip`) return `gzip`;
1019
+ }
1020
+ for (const encoding of encodings) {
1021
+ const name = encoding.split(`;`)[0]?.trim();
1022
+ if (name === `deflate`) return `deflate`;
1023
+ }
1024
+ return null;
1025
+ }
1026
+ /**
1027
+ * Compress data using the specified encoding.
1028
+ */
1029
+ function compressData(data, encoding) {
1030
+ if (encoding === `gzip`) return gzipSync(data);
1031
+ else return deflateSync(data);
1032
+ }
1033
+ var DurableStreamTestServer = class {
1034
+ store;
1035
+ server = null;
1036
+ options;
1037
+ _url = null;
1038
+ activeSSEResponses = new Set();
1039
+ isShuttingDown = false;
1040
+ /** Injected errors for testing retry/resilience */
1041
+ injectedErrors = new Map();
1042
+ constructor(options = {}) {
1043
+ if (options.dataDir) this.store = new FileBackedStreamStore({ dataDir: options.dataDir });
1044
+ else this.store = new StreamStore();
1045
+ this.options = {
1046
+ port: options.port ?? 4437,
1047
+ host: options.host ?? `127.0.0.1`,
1048
+ longPollTimeout: options.longPollTimeout ?? 3e4,
1049
+ dataDir: options.dataDir,
1050
+ onStreamCreated: options.onStreamCreated,
1051
+ onStreamDeleted: options.onStreamDeleted,
1052
+ compression: options.compression ?? true,
1053
+ cursorOptions: {
1054
+ intervalSeconds: options.cursorIntervalSeconds,
1055
+ epoch: options.cursorEpoch
1056
+ }
1057
+ };
1058
+ }
1059
+ /**
1060
+ * Start the server.
1061
+ */
1062
+ async start() {
1063
+ if (this.server) throw new Error(`Server already started`);
1064
+ return new Promise((resolve, reject) => {
1065
+ this.server = createServer((req, res) => {
1066
+ this.handleRequest(req, res).catch((err) => {
1067
+ console.error(`Request error:`, err);
1068
+ if (!res.headersSent) {
1069
+ res.writeHead(500, { "content-type": `text/plain` });
1070
+ res.end(`Internal server error`);
1071
+ }
1072
+ });
1073
+ });
1074
+ this.server.on(`error`, reject);
1075
+ this.server.listen(this.options.port, this.options.host, () => {
1076
+ const addr = this.server.address();
1077
+ if (typeof addr === `string`) this._url = addr;
1078
+ else if (addr) this._url = `http://${this.options.host}:${addr.port}`;
1079
+ resolve(this._url);
1080
+ });
1081
+ });
1082
+ }
1083
+ /**
1084
+ * Stop the server.
1085
+ */
1086
+ async stop() {
1087
+ if (!this.server) return;
1088
+ this.isShuttingDown = true;
1089
+ if (`cancelAllWaits` in this.store) this.store.cancelAllWaits();
1090
+ for (const res of this.activeSSEResponses) res.end();
1091
+ this.activeSSEResponses.clear();
1092
+ return new Promise((resolve, reject) => {
1093
+ this.server.close(async (err) => {
1094
+ if (err) {
1095
+ reject(err);
1096
+ return;
1097
+ }
1098
+ try {
1099
+ if (this.store instanceof FileBackedStreamStore) await this.store.close();
1100
+ this.server = null;
1101
+ this._url = null;
1102
+ this.isShuttingDown = false;
1103
+ resolve();
1104
+ } catch (closeErr) {
1105
+ reject(closeErr);
1106
+ }
1107
+ });
1108
+ });
1109
+ }
1110
+ /**
1111
+ * Get the server URL.
1112
+ */
1113
+ get url() {
1114
+ if (!this._url) throw new Error(`Server not started`);
1115
+ return this._url;
1116
+ }
1117
+ /**
1118
+ * Clear all streams.
1119
+ */
1120
+ clear() {
1121
+ this.store.clear();
1122
+ }
1123
+ /**
1124
+ * Inject an error to be returned on the next N requests to a path.
1125
+ * Used for testing retry/resilience behavior.
1126
+ */
1127
+ injectError(path$2, status, count = 1, retryAfter) {
1128
+ this.injectedErrors.set(path$2, {
1129
+ status,
1130
+ count,
1131
+ retryAfter
1132
+ });
1133
+ }
1134
+ /**
1135
+ * Clear all injected errors.
1136
+ */
1137
+ clearInjectedErrors() {
1138
+ this.injectedErrors.clear();
1139
+ }
1140
+ /**
1141
+ * Check if there's an injected error for this path and consume it.
1142
+ * Returns the error config if one should be returned, null otherwise.
1143
+ */
1144
+ consumeInjectedError(path$2) {
1145
+ const error = this.injectedErrors.get(path$2);
1146
+ if (!error) return null;
1147
+ error.count--;
1148
+ if (error.count <= 0) this.injectedErrors.delete(path$2);
1149
+ return error;
1150
+ }
1151
+ async handleRequest(req, res) {
1152
+ const url = new URL(req.url ?? `/`, `http://${req.headers.host}`);
1153
+ const path$2 = url.pathname;
1154
+ const method = req.method?.toUpperCase();
1155
+ res.setHeader(`access-control-allow-origin`, `*`);
1156
+ res.setHeader(`access-control-allow-methods`, `GET, POST, PUT, DELETE, HEAD, OPTIONS`);
1157
+ res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At`);
1158
+ res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, etag, content-type, content-encoding, vary`);
1159
+ if (method === `OPTIONS`) {
1160
+ res.writeHead(204);
1161
+ res.end();
1162
+ return;
1163
+ }
1164
+ if (path$2 === `/_test/inject-error`) {
1165
+ await this.handleTestInjectError(method, req, res);
1166
+ return;
1167
+ }
1168
+ const injectedError = this.consumeInjectedError(path$2);
1169
+ if (injectedError) {
1170
+ const headers = { "content-type": `text/plain` };
1171
+ if (injectedError.retryAfter !== void 0) headers[`retry-after`] = injectedError.retryAfter.toString();
1172
+ res.writeHead(injectedError.status, headers);
1173
+ res.end(`Injected error for testing`);
1174
+ return;
1175
+ }
1176
+ try {
1177
+ switch (method) {
1178
+ case `PUT`:
1179
+ await this.handleCreate(path$2, req, res);
1180
+ break;
1181
+ case `HEAD`:
1182
+ this.handleHead(path$2, res);
1183
+ break;
1184
+ case `GET`:
1185
+ await this.handleRead(path$2, url, req, res);
1186
+ break;
1187
+ case `POST`:
1188
+ await this.handleAppend(path$2, req, res);
1189
+ break;
1190
+ case `DELETE`:
1191
+ await this.handleDelete(path$2, res);
1192
+ break;
1193
+ default:
1194
+ res.writeHead(405, { "content-type": `text/plain` });
1195
+ res.end(`Method not allowed`);
1196
+ }
1197
+ } catch (err) {
1198
+ if (err instanceof Error) if (err.message.includes(`not found`)) {
1199
+ res.writeHead(404, { "content-type": `text/plain` });
1200
+ res.end(`Stream not found`);
1201
+ } else if (err.message.includes(`already exists with different configuration`)) {
1202
+ res.writeHead(409, { "content-type": `text/plain` });
1203
+ res.end(`Stream already exists with different configuration`);
1204
+ } else if (err.message.includes(`Sequence conflict`)) {
1205
+ res.writeHead(409, { "content-type": `text/plain` });
1206
+ res.end(`Sequence conflict`);
1207
+ } else if (err.message.includes(`Content-type mismatch`)) {
1208
+ res.writeHead(409, { "content-type": `text/plain` });
1209
+ res.end(`Content-type mismatch`);
1210
+ } else if (err.message.includes(`Invalid JSON`)) {
1211
+ res.writeHead(400, { "content-type": `text/plain` });
1212
+ res.end(`Invalid JSON`);
1213
+ } else if (err.message.includes(`Empty arrays are not allowed`)) {
1214
+ res.writeHead(400, { "content-type": `text/plain` });
1215
+ res.end(`Empty arrays are not allowed`);
1216
+ } else throw err;
1217
+ else throw err;
1218
+ }
1219
+ }
1220
+ /**
1221
+ * Handle PUT - create stream
1222
+ */
1223
+ async handleCreate(path$2, req, res) {
1224
+ let contentType = req.headers[`content-type`];
1225
+ if (!contentType || contentType.trim() === `` || !/^[\w-]+\/[\w-]+/.test(contentType)) contentType = `application/octet-stream`;
1226
+ const ttlHeader = req.headers[STREAM_TTL_HEADER.toLowerCase()];
1227
+ const expiresAtHeader = req.headers[STREAM_EXPIRES_AT_HEADER.toLowerCase()];
1228
+ if (ttlHeader && expiresAtHeader) {
1229
+ res.writeHead(400, { "content-type": `text/plain` });
1230
+ res.end(`Cannot specify both Stream-TTL and Stream-Expires-At`);
1231
+ return;
1232
+ }
1233
+ let ttlSeconds;
1234
+ if (ttlHeader) {
1235
+ const ttlPattern = /^(0|[1-9]\d*)$/;
1236
+ if (!ttlPattern.test(ttlHeader)) {
1237
+ res.writeHead(400, { "content-type": `text/plain` });
1238
+ res.end(`Invalid Stream-TTL value`);
1239
+ return;
1240
+ }
1241
+ ttlSeconds = parseInt(ttlHeader, 10);
1242
+ if (isNaN(ttlSeconds) || ttlSeconds < 0) {
1243
+ res.writeHead(400, { "content-type": `text/plain` });
1244
+ res.end(`Invalid Stream-TTL value`);
1245
+ return;
1246
+ }
1247
+ }
1248
+ if (expiresAtHeader) {
1249
+ const timestamp = new Date(expiresAtHeader);
1250
+ if (isNaN(timestamp.getTime())) {
1251
+ res.writeHead(400, { "content-type": `text/plain` });
1252
+ res.end(`Invalid Stream-Expires-At timestamp`);
1253
+ return;
1254
+ }
1255
+ }
1256
+ const body = await this.readBody(req);
1257
+ const isNew = !this.store.has(path$2);
1258
+ await Promise.resolve(this.store.create(path$2, {
1259
+ contentType,
1260
+ ttlSeconds,
1261
+ expiresAt: expiresAtHeader,
1262
+ initialData: body.length > 0 ? body : void 0
1263
+ }));
1264
+ const stream = this.store.get(path$2);
1265
+ if (isNew && this.options.onStreamCreated) await Promise.resolve(this.options.onStreamCreated({
1266
+ type: `created`,
1267
+ path: path$2,
1268
+ contentType,
1269
+ timestamp: Date.now()
1270
+ }));
1271
+ const headers = {
1272
+ "content-type": contentType,
1273
+ [STREAM_OFFSET_HEADER]: stream.currentOffset
1274
+ };
1275
+ if (isNew) headers[`location`] = `${this._url}${path$2}`;
1276
+ res.writeHead(isNew ? 201 : 200, headers);
1277
+ res.end();
1278
+ }
1279
+ /**
1280
+ * Handle HEAD - get metadata
1281
+ */
1282
+ handleHead(path$2, res) {
1283
+ const stream = this.store.get(path$2);
1284
+ if (!stream) {
1285
+ res.writeHead(404, { "content-type": `text/plain` });
1286
+ res.end();
1287
+ return;
1288
+ }
1289
+ const headers = { [STREAM_OFFSET_HEADER]: stream.currentOffset };
1290
+ if (stream.contentType) headers[`content-type`] = stream.contentType;
1291
+ headers[`etag`] = `"${Buffer.from(path$2).toString(`base64`)}:-1:${stream.currentOffset}"`;
1292
+ res.writeHead(200, headers);
1293
+ res.end();
1294
+ }
1295
+ /**
1296
+ * Handle GET - read data
1297
+ */
1298
+ async handleRead(path$2, url, req, res) {
1299
+ const stream = this.store.get(path$2);
1300
+ if (!stream) {
1301
+ res.writeHead(404, { "content-type": `text/plain` });
1302
+ res.end(`Stream not found`);
1303
+ return;
1304
+ }
1305
+ const offset = url.searchParams.get(OFFSET_QUERY_PARAM) ?? void 0;
1306
+ const live = url.searchParams.get(LIVE_QUERY_PARAM);
1307
+ const cursor = url.searchParams.get(CURSOR_QUERY_PARAM) ?? void 0;
1308
+ if (offset !== void 0) {
1309
+ if (offset === ``) {
1310
+ res.writeHead(400, { "content-type": `text/plain` });
1311
+ res.end(`Empty offset parameter`);
1312
+ return;
1313
+ }
1314
+ const allOffsets = url.searchParams.getAll(OFFSET_QUERY_PARAM);
1315
+ if (allOffsets.length > 1) {
1316
+ res.writeHead(400, { "content-type": `text/plain` });
1317
+ res.end(`Multiple offset parameters not allowed`);
1318
+ return;
1319
+ }
1320
+ const validOffsetPattern = /^(-1|\d+_\d+)$/;
1321
+ if (!validOffsetPattern.test(offset)) {
1322
+ res.writeHead(400, { "content-type": `text/plain` });
1323
+ res.end(`Invalid offset format`);
1324
+ return;
1325
+ }
1326
+ }
1327
+ if ((live === `long-poll` || live === `sse`) && !offset) {
1328
+ res.writeHead(400, { "content-type": `text/plain` });
1329
+ res.end(`${live === `sse` ? `SSE` : `Long-poll`} requires offset parameter`);
1330
+ return;
1331
+ }
1332
+ if (live === `sse`) {
1333
+ await this.handleSSE(path$2, stream, offset, cursor, res);
1334
+ return;
1335
+ }
1336
+ let { messages, upToDate } = this.store.read(path$2, offset);
1337
+ const clientIsCaughtUp = offset && offset === stream.currentOffset;
1338
+ if (live === `long-poll` && clientIsCaughtUp && messages.length === 0) {
1339
+ const result = await this.store.waitForMessages(path$2, offset, this.options.longPollTimeout);
1340
+ if (result.timedOut) {
1341
+ const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
1342
+ res.writeHead(204, {
1343
+ [STREAM_OFFSET_HEADER]: offset,
1344
+ [STREAM_UP_TO_DATE_HEADER]: `true`,
1345
+ [STREAM_CURSOR_HEADER]: responseCursor
1346
+ });
1347
+ res.end();
1348
+ return;
1349
+ }
1350
+ messages = result.messages;
1351
+ upToDate = true;
1352
+ }
1353
+ const headers = {};
1354
+ if (stream.contentType) headers[`content-type`] = stream.contentType;
1355
+ const lastMessage = messages[messages.length - 1];
1356
+ const responseOffset = lastMessage?.offset ?? stream.currentOffset;
1357
+ headers[STREAM_OFFSET_HEADER] = responseOffset;
1358
+ if (live === `long-poll`) headers[STREAM_CURSOR_HEADER] = generateResponseCursor(cursor, this.options.cursorOptions);
1359
+ if (upToDate) headers[STREAM_UP_TO_DATE_HEADER] = `true`;
1360
+ const startOffset = offset ?? `-1`;
1361
+ const etag = `"${Buffer.from(path$2).toString(`base64`)}:${startOffset}:${responseOffset}"`;
1362
+ headers[`etag`] = etag;
1363
+ const ifNoneMatch = req.headers[`if-none-match`];
1364
+ if (ifNoneMatch && ifNoneMatch === etag) {
1365
+ res.writeHead(304, { etag });
1366
+ res.end();
1367
+ return;
1368
+ }
1369
+ const responseData = this.store.formatResponse(path$2, messages);
1370
+ let finalData = responseData;
1371
+ if (this.options.compression && responseData.length >= COMPRESSION_THRESHOLD) {
1372
+ const acceptEncoding = req.headers[`accept-encoding`];
1373
+ const encoding = getCompressionEncoding(acceptEncoding);
1374
+ if (encoding) {
1375
+ finalData = compressData(responseData, encoding);
1376
+ headers[`content-encoding`] = encoding;
1377
+ headers[`vary`] = `accept-encoding`;
1378
+ }
1379
+ }
1380
+ res.writeHead(200, headers);
1381
+ res.end(Buffer.from(finalData));
1382
+ }
1383
+ /**
1384
+ * Handle SSE (Server-Sent Events) mode
1385
+ */
1386
+ async handleSSE(path$2, stream, initialOffset, cursor, res) {
1387
+ this.activeSSEResponses.add(res);
1388
+ res.writeHead(200, {
1389
+ "content-type": `text/event-stream`,
1390
+ "cache-control": `no-cache`,
1391
+ connection: `keep-alive`,
1392
+ "access-control-allow-origin": `*`
1393
+ });
1394
+ let currentOffset = initialOffset;
1395
+ let isConnected = true;
1396
+ const decoder = new TextDecoder();
1397
+ res.on(`close`, () => {
1398
+ isConnected = false;
1399
+ this.activeSSEResponses.delete(res);
1400
+ });
1401
+ const isJsonStream = stream?.contentType?.includes(`application/json`);
1402
+ while (isConnected && !this.isShuttingDown) {
1403
+ const { messages, upToDate } = this.store.read(path$2, currentOffset);
1404
+ for (const message of messages) {
1405
+ let dataPayload;
1406
+ if (isJsonStream) {
1407
+ const jsonBytes = this.store.formatResponse(path$2, [message]);
1408
+ dataPayload = decoder.decode(jsonBytes);
1409
+ } else dataPayload = decoder.decode(message.data);
1410
+ res.write(`event: data\n`);
1411
+ res.write(encodeSSEData(dataPayload));
1412
+ currentOffset = message.offset;
1413
+ }
1414
+ const controlOffset = messages[messages.length - 1]?.offset ?? stream.currentOffset;
1415
+ const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
1416
+ const controlData = {
1417
+ [SSE_OFFSET_FIELD]: controlOffset,
1418
+ [SSE_CURSOR_FIELD]: responseCursor
1419
+ };
1420
+ if (upToDate) controlData[SSE_UP_TO_DATE_FIELD] = true;
1421
+ res.write(`event: control\n`);
1422
+ res.write(encodeSSEData(JSON.stringify(controlData)));
1423
+ currentOffset = controlOffset;
1424
+ if (upToDate) {
1425
+ const result = await this.store.waitForMessages(path$2, currentOffset, this.options.longPollTimeout);
1426
+ if (this.isShuttingDown || !isConnected) break;
1427
+ if (result.timedOut) {
1428
+ const keepAliveCursor = generateResponseCursor(cursor, this.options.cursorOptions);
1429
+ const keepAliveData = {
1430
+ [SSE_OFFSET_FIELD]: currentOffset,
1431
+ [SSE_CURSOR_FIELD]: keepAliveCursor,
1432
+ [SSE_UP_TO_DATE_FIELD]: true
1433
+ };
1434
+ res.write(`event: control\n`);
1435
+ res.write(encodeSSEData(JSON.stringify(keepAliveData)));
1436
+ }
1437
+ }
1438
+ }
1439
+ this.activeSSEResponses.delete(res);
1440
+ res.end();
1441
+ }
1442
+ /**
1443
+ * Handle POST - append data
1444
+ */
1445
+ async handleAppend(path$2, req, res) {
1446
+ const contentType = req.headers[`content-type`];
1447
+ const seq = req.headers[STREAM_SEQ_HEADER.toLowerCase()];
1448
+ const body = await this.readBody(req);
1449
+ if (body.length === 0) {
1450
+ res.writeHead(400, { "content-type": `text/plain` });
1451
+ res.end(`Empty body`);
1452
+ return;
1453
+ }
1454
+ if (!contentType) {
1455
+ res.writeHead(400, { "content-type": `text/plain` });
1456
+ res.end(`Content-Type header is required`);
1457
+ return;
1458
+ }
1459
+ const message = await Promise.resolve(this.store.append(path$2, body, {
1460
+ seq,
1461
+ contentType
1462
+ }));
1463
+ res.writeHead(200, { [STREAM_OFFSET_HEADER]: message.offset });
1464
+ res.end();
1465
+ }
1466
+ /**
1467
+ * Handle DELETE - delete stream
1468
+ */
1469
+ async handleDelete(path$2, res) {
1470
+ if (!this.store.has(path$2)) {
1471
+ res.writeHead(404, { "content-type": `text/plain` });
1472
+ res.end(`Stream not found`);
1473
+ return;
1474
+ }
1475
+ this.store.delete(path$2);
1476
+ if (this.options.onStreamDeleted) await Promise.resolve(this.options.onStreamDeleted({
1477
+ type: `deleted`,
1478
+ path: path$2,
1479
+ timestamp: Date.now()
1480
+ }));
1481
+ res.writeHead(204);
1482
+ res.end();
1483
+ }
1484
+ /**
1485
+ * Handle test control endpoints for error injection.
1486
+ * POST /_test/inject-error - inject an error
1487
+ * DELETE /_test/inject-error - clear all injected errors
1488
+ */
1489
+ async handleTestInjectError(method, req, res) {
1490
+ if (method === `POST`) {
1491
+ const body = await this.readBody(req);
1492
+ try {
1493
+ const config = JSON.parse(new TextDecoder().decode(body));
1494
+ if (!config.path || !config.status) {
1495
+ res.writeHead(400, { "content-type": `text/plain` });
1496
+ res.end(`Missing required fields: path, status`);
1497
+ return;
1498
+ }
1499
+ this.injectError(config.path, config.status, config.count ?? 1, config.retryAfter);
1500
+ res.writeHead(200, { "content-type": `application/json` });
1501
+ res.end(JSON.stringify({ ok: true }));
1502
+ } catch {
1503
+ res.writeHead(400, { "content-type": `text/plain` });
1504
+ res.end(`Invalid JSON body`);
1505
+ }
1506
+ } else if (method === `DELETE`) {
1507
+ this.clearInjectedErrors();
1508
+ res.writeHead(200, { "content-type": `application/json` });
1509
+ res.end(JSON.stringify({ ok: true }));
1510
+ } else {
1511
+ res.writeHead(405, { "content-type": `text/plain` });
1512
+ res.end(`Method not allowed`);
1513
+ }
1514
+ }
1515
+ readBody(req) {
1516
+ return new Promise((resolve, reject) => {
1517
+ const chunks = [];
1518
+ req.on(`data`, (chunk) => {
1519
+ chunks.push(chunk);
1520
+ });
1521
+ req.on(`end`, () => {
1522
+ const body = Buffer.concat(chunks);
1523
+ resolve(new Uint8Array(body));
1524
+ });
1525
+ req.on(`error`, reject);
1526
+ });
1527
+ }
1528
+ };
1529
+
1530
+ //#endregion
1531
+ //#region src/registry-hook.ts
1532
+ const REGISTRY_PATH = `/v1/stream/__registry__`;
1533
+ const streamMetadataSchema = { "~standard": {
1534
+ version: 1,
1535
+ vendor: `durable-streams`,
1536
+ validate: (value) => {
1537
+ if (typeof value !== `object` || value === null) return { issues: [{ message: `value must be an object` }] };
1538
+ const data = value;
1539
+ if (typeof data.path !== `string` || data.path.length === 0) return { issues: [{ message: `path must be a non-empty string` }] };
1540
+ if (typeof data.contentType !== `string` || data.contentType.length === 0) return { issues: [{ message: `contentType must be a non-empty string` }] };
1541
+ if (typeof data.createdAt !== `number`) return { issues: [{ message: `createdAt must be a number` }] };
1542
+ return { value: data };
1543
+ }
1544
+ } };
1545
+ const registryStateSchema = createStateSchema({ streams: {
1546
+ schema: streamMetadataSchema,
1547
+ type: `stream`,
1548
+ primaryKey: `path`
1549
+ } });
1550
+ /**
1551
+ * Creates lifecycle hooks that write to a __registry__ stream.
1552
+ * Any client can read this stream to discover all streams and their lifecycle events.
1553
+ */
1554
+ function createRegistryHooks(store, serverUrl) {
1555
+ const registryStream = new DurableStream({
1556
+ url: `${serverUrl}${REGISTRY_PATH}`,
1557
+ contentType: `application/json`
1558
+ });
1559
+ const ensureRegistryExists = async () => {
1560
+ if (!store.has(REGISTRY_PATH)) await DurableStream.create({
1561
+ url: `${serverUrl}${REGISTRY_PATH}`,
1562
+ contentType: `application/json`
1563
+ });
1564
+ };
1565
+ const extractStreamName = (fullPath) => {
1566
+ return fullPath.replace(/^\/v1\/stream\//, ``);
1567
+ };
1568
+ return {
1569
+ onStreamCreated: async (event) => {
1570
+ await ensureRegistryExists();
1571
+ const streamName = extractStreamName(event.path);
1572
+ const changeEvent = registryStateSchema.streams.insert({
1573
+ key: streamName,
1574
+ value: {
1575
+ path: streamName,
1576
+ contentType: event.contentType || `application/octet-stream`,
1577
+ createdAt: event.timestamp
1578
+ }
1579
+ });
1580
+ await registryStream.append(changeEvent);
1581
+ },
1582
+ onStreamDeleted: async (event) => {
1583
+ await ensureRegistryExists();
1584
+ const streamName = extractStreamName(event.path);
1585
+ const changeEvent = registryStateSchema.streams.delete({ key: streamName });
1586
+ await registryStream.append(changeEvent);
1587
+ }
1588
+ };
1589
+ }
1590
+
1591
+ //#endregion
1592
+ export { DEFAULT_CURSOR_EPOCH, DEFAULT_CURSOR_INTERVAL_SECONDS, DurableStreamTestServer, FileBackedStreamStore, StreamStore, calculateCursor, createRegistryHooks, decodeStreamPath, encodeStreamPath, generateResponseCursor, handleCursorCollision };