@durable-streams/server 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +167 -0
- package/dist/index.cjs +1682 -0
- package/dist/index.d.cts +518 -0
- package/dist/index.d.ts +26 -2
- package/dist/index.js +83 -26
- package/package.json +4 -4
- package/src/file-store.ts +58 -16
- package/src/store.ts +59 -10
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1682 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
//#region rolldown:runtime
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
11
|
+
key = keys[i];
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
13
|
+
get: ((k) => from[k]).bind(null, key),
|
|
14
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
20
|
+
value: mod,
|
|
21
|
+
enumerable: true
|
|
22
|
+
}) : target, mod));
|
|
23
|
+
|
|
24
|
+
//#endregion
|
|
25
|
+
const node_http = __toESM(require("node:http"));
|
|
26
|
+
const node_zlib = __toESM(require("node:zlib"));
|
|
27
|
+
const node_fs = __toESM(require("node:fs"));
|
|
28
|
+
const node_path = __toESM(require("node:path"));
|
|
29
|
+
const node_crypto = __toESM(require("node:crypto"));
|
|
30
|
+
const lmdb = __toESM(require("lmdb"));
|
|
31
|
+
const __neophi_sieve_cache = __toESM(require("@neophi/sieve-cache"));
|
|
32
|
+
const node_fs_promises = __toESM(require("node:fs/promises"));
|
|
33
|
+
const __durable_streams_client = __toESM(require("@durable-streams/client"));
|
|
34
|
+
const __durable_streams_state = __toESM(require("@durable-streams/state"));
|
|
35
|
+
|
|
36
|
+
//#region src/store.ts
|
|
37
|
+
/**
|
|
38
|
+
* Normalize content-type by extracting the media type (before any semicolon).
|
|
39
|
+
* Handles cases like "application/json; charset=utf-8".
|
|
40
|
+
*/
|
|
41
|
+
function normalizeContentType(contentType) {
|
|
42
|
+
if (!contentType) return ``;
|
|
43
|
+
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Process JSON data for append in JSON mode.
|
|
47
|
+
* - Validates JSON
|
|
48
|
+
* - Extracts array elements if data is an array
|
|
49
|
+
* - Always appends trailing comma for easy concatenation
|
|
50
|
+
* @param isInitialCreate - If true, empty arrays are allowed (creates empty stream)
|
|
51
|
+
* @throws Error if JSON is invalid or array is empty (for non-create operations)
|
|
52
|
+
*/
|
|
53
|
+
function processJsonAppend(data, isInitialCreate = false) {
|
|
54
|
+
const text = new TextDecoder().decode(data);
|
|
55
|
+
let parsed;
|
|
56
|
+
try {
|
|
57
|
+
parsed = JSON.parse(text);
|
|
58
|
+
} catch {
|
|
59
|
+
throw new Error(`Invalid JSON`);
|
|
60
|
+
}
|
|
61
|
+
let result;
|
|
62
|
+
if (Array.isArray(parsed)) {
|
|
63
|
+
if (parsed.length === 0) {
|
|
64
|
+
if (isInitialCreate) return new Uint8Array(0);
|
|
65
|
+
throw new Error(`Empty arrays are not allowed`);
|
|
66
|
+
}
|
|
67
|
+
const elements = parsed.map((item) => JSON.stringify(item));
|
|
68
|
+
result = elements.join(`,`) + `,`;
|
|
69
|
+
} else result = JSON.stringify(parsed) + `,`;
|
|
70
|
+
return new TextEncoder().encode(result);
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Format JSON mode response by wrapping in array brackets.
|
|
74
|
+
* Strips trailing comma before wrapping.
|
|
75
|
+
*/
|
|
76
|
+
function formatJsonResponse(data) {
|
|
77
|
+
if (data.length === 0) return new TextEncoder().encode(`[]`);
|
|
78
|
+
let text = new TextDecoder().decode(data);
|
|
79
|
+
text = text.trimEnd();
|
|
80
|
+
if (text.endsWith(`,`)) text = text.slice(0, -1);
|
|
81
|
+
const wrapped = `[${text}]`;
|
|
82
|
+
return new TextEncoder().encode(wrapped);
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* In-memory store for durable streams.
|
|
86
|
+
*/
|
|
87
|
+
var StreamStore = class {
|
|
88
|
+
streams = new Map();
|
|
89
|
+
pendingLongPolls = [];
|
|
90
|
+
/**
|
|
91
|
+
* Check if a stream is expired based on TTL or Expires-At.
|
|
92
|
+
*/
|
|
93
|
+
isExpired(stream) {
|
|
94
|
+
const now = Date.now();
|
|
95
|
+
if (stream.expiresAt) {
|
|
96
|
+
const expiryTime = new Date(stream.expiresAt).getTime();
|
|
97
|
+
if (!Number.isFinite(expiryTime) || now >= expiryTime) return true;
|
|
98
|
+
}
|
|
99
|
+
if (stream.ttlSeconds !== void 0) {
|
|
100
|
+
const expiryTime = stream.createdAt + stream.ttlSeconds * 1e3;
|
|
101
|
+
if (now >= expiryTime) return true;
|
|
102
|
+
}
|
|
103
|
+
return false;
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Get a stream, deleting it if expired.
|
|
107
|
+
* Returns undefined if stream doesn't exist or is expired.
|
|
108
|
+
*/
|
|
109
|
+
getIfNotExpired(path) {
|
|
110
|
+
const stream = this.streams.get(path);
|
|
111
|
+
if (!stream) return void 0;
|
|
112
|
+
if (this.isExpired(stream)) {
|
|
113
|
+
this.delete(path);
|
|
114
|
+
return void 0;
|
|
115
|
+
}
|
|
116
|
+
return stream;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Create a new stream.
|
|
120
|
+
* @throws Error if stream already exists with different config
|
|
121
|
+
* @returns existing stream if config matches (idempotent)
|
|
122
|
+
*/
|
|
123
|
+
create(path, options = {}) {
|
|
124
|
+
const existing = this.getIfNotExpired(path);
|
|
125
|
+
if (existing) {
|
|
126
|
+
const contentTypeMatches = (normalizeContentType(options.contentType) || `application/octet-stream`) === (normalizeContentType(existing.contentType) || `application/octet-stream`);
|
|
127
|
+
const ttlMatches = options.ttlSeconds === existing.ttlSeconds;
|
|
128
|
+
const expiresMatches = options.expiresAt === existing.expiresAt;
|
|
129
|
+
if (contentTypeMatches && ttlMatches && expiresMatches) return existing;
|
|
130
|
+
else throw new Error(`Stream already exists with different configuration: ${path}`);
|
|
131
|
+
}
|
|
132
|
+
const stream = {
|
|
133
|
+
path,
|
|
134
|
+
contentType: options.contentType,
|
|
135
|
+
messages: [],
|
|
136
|
+
currentOffset: `0000000000000000_0000000000000000`,
|
|
137
|
+
ttlSeconds: options.ttlSeconds,
|
|
138
|
+
expiresAt: options.expiresAt,
|
|
139
|
+
createdAt: Date.now()
|
|
140
|
+
};
|
|
141
|
+
if (options.initialData && options.initialData.length > 0) this.appendToStream(stream, options.initialData, true);
|
|
142
|
+
this.streams.set(path, stream);
|
|
143
|
+
return stream;
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Get a stream by path.
|
|
147
|
+
* Returns undefined if stream doesn't exist or is expired.
|
|
148
|
+
*/
|
|
149
|
+
get(path) {
|
|
150
|
+
return this.getIfNotExpired(path);
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Check if a stream exists (and is not expired).
|
|
154
|
+
*/
|
|
155
|
+
has(path) {
|
|
156
|
+
return this.getIfNotExpired(path) !== void 0;
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Delete a stream.
|
|
160
|
+
*/
|
|
161
|
+
delete(path) {
|
|
162
|
+
this.cancelLongPollsForStream(path);
|
|
163
|
+
return this.streams.delete(path);
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Append data to a stream.
|
|
167
|
+
* @throws Error if stream doesn't exist or is expired
|
|
168
|
+
* @throws Error if seq is lower than lastSeq
|
|
169
|
+
* @throws Error if JSON mode and array is empty
|
|
170
|
+
*/
|
|
171
|
+
append(path, data, options = {}) {
|
|
172
|
+
const stream = this.getIfNotExpired(path);
|
|
173
|
+
if (!stream) throw new Error(`Stream not found: ${path}`);
|
|
174
|
+
if (options.contentType && stream.contentType) {
|
|
175
|
+
const providedType = normalizeContentType(options.contentType);
|
|
176
|
+
const streamType = normalizeContentType(stream.contentType);
|
|
177
|
+
if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${stream.contentType}, got ${options.contentType}`);
|
|
178
|
+
}
|
|
179
|
+
if (options.seq !== void 0) {
|
|
180
|
+
if (stream.lastSeq !== void 0 && options.seq <= stream.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${stream.lastSeq}`);
|
|
181
|
+
stream.lastSeq = options.seq;
|
|
182
|
+
}
|
|
183
|
+
const message = this.appendToStream(stream, data);
|
|
184
|
+
this.notifyLongPolls(path);
|
|
185
|
+
return message;
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Read messages from a stream starting at the given offset.
|
|
189
|
+
* @throws Error if stream doesn't exist or is expired
|
|
190
|
+
*/
|
|
191
|
+
read(path, offset) {
|
|
192
|
+
const stream = this.getIfNotExpired(path);
|
|
193
|
+
if (!stream) throw new Error(`Stream not found: ${path}`);
|
|
194
|
+
if (!offset || offset === `-1`) return {
|
|
195
|
+
messages: [...stream.messages],
|
|
196
|
+
upToDate: true
|
|
197
|
+
};
|
|
198
|
+
const offsetIndex = this.findOffsetIndex(stream, offset);
|
|
199
|
+
if (offsetIndex === -1) return {
|
|
200
|
+
messages: [],
|
|
201
|
+
upToDate: true
|
|
202
|
+
};
|
|
203
|
+
return {
|
|
204
|
+
messages: stream.messages.slice(offsetIndex),
|
|
205
|
+
upToDate: true
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
/**
|
|
209
|
+
* Format messages for response.
|
|
210
|
+
* For JSON mode, wraps concatenated data in array brackets.
|
|
211
|
+
* @throws Error if stream doesn't exist or is expired
|
|
212
|
+
*/
|
|
213
|
+
formatResponse(path, messages) {
|
|
214
|
+
const stream = this.getIfNotExpired(path);
|
|
215
|
+
if (!stream) throw new Error(`Stream not found: ${path}`);
|
|
216
|
+
const totalSize = messages.reduce((sum, m) => sum + m.data.length, 0);
|
|
217
|
+
const concatenated = new Uint8Array(totalSize);
|
|
218
|
+
let offset = 0;
|
|
219
|
+
for (const msg of messages) {
|
|
220
|
+
concatenated.set(msg.data, offset);
|
|
221
|
+
offset += msg.data.length;
|
|
222
|
+
}
|
|
223
|
+
if (normalizeContentType(stream.contentType) === `application/json`) return formatJsonResponse(concatenated);
|
|
224
|
+
return concatenated;
|
|
225
|
+
}
|
|
226
|
+
/**
|
|
227
|
+
* Wait for new messages (long-poll).
|
|
228
|
+
* @throws Error if stream doesn't exist or is expired
|
|
229
|
+
*/
|
|
230
|
+
async waitForMessages(path, offset, timeoutMs) {
|
|
231
|
+
const stream = this.getIfNotExpired(path);
|
|
232
|
+
if (!stream) throw new Error(`Stream not found: ${path}`);
|
|
233
|
+
const { messages } = this.read(path, offset);
|
|
234
|
+
if (messages.length > 0) return {
|
|
235
|
+
messages,
|
|
236
|
+
timedOut: false
|
|
237
|
+
};
|
|
238
|
+
return new Promise((resolve) => {
|
|
239
|
+
const timeoutId = setTimeout(() => {
|
|
240
|
+
this.removePendingLongPoll(pending);
|
|
241
|
+
resolve({
|
|
242
|
+
messages: [],
|
|
243
|
+
timedOut: true
|
|
244
|
+
});
|
|
245
|
+
}, timeoutMs);
|
|
246
|
+
const pending = {
|
|
247
|
+
path,
|
|
248
|
+
offset,
|
|
249
|
+
resolve: (msgs) => {
|
|
250
|
+
clearTimeout(timeoutId);
|
|
251
|
+
this.removePendingLongPoll(pending);
|
|
252
|
+
resolve({
|
|
253
|
+
messages: msgs,
|
|
254
|
+
timedOut: false
|
|
255
|
+
});
|
|
256
|
+
},
|
|
257
|
+
timeoutId
|
|
258
|
+
};
|
|
259
|
+
this.pendingLongPolls.push(pending);
|
|
260
|
+
});
|
|
261
|
+
}
|
|
262
|
+
/**
|
|
263
|
+
* Get the current offset for a stream.
|
|
264
|
+
* Returns undefined if stream doesn't exist or is expired.
|
|
265
|
+
*/
|
|
266
|
+
getCurrentOffset(path) {
|
|
267
|
+
return this.getIfNotExpired(path)?.currentOffset;
|
|
268
|
+
}
|
|
269
|
+
/**
|
|
270
|
+
* Clear all streams.
|
|
271
|
+
*/
|
|
272
|
+
clear() {
|
|
273
|
+
for (const pending of this.pendingLongPolls) {
|
|
274
|
+
clearTimeout(pending.timeoutId);
|
|
275
|
+
pending.resolve([]);
|
|
276
|
+
}
|
|
277
|
+
this.pendingLongPolls = [];
|
|
278
|
+
this.streams.clear();
|
|
279
|
+
}
|
|
280
|
+
/**
|
|
281
|
+
* Cancel all pending long-polls (used during shutdown).
|
|
282
|
+
*/
|
|
283
|
+
cancelAllWaits() {
|
|
284
|
+
for (const pending of this.pendingLongPolls) {
|
|
285
|
+
clearTimeout(pending.timeoutId);
|
|
286
|
+
pending.resolve([]);
|
|
287
|
+
}
|
|
288
|
+
this.pendingLongPolls = [];
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Get all stream paths.
|
|
292
|
+
*/
|
|
293
|
+
list() {
|
|
294
|
+
return Array.from(this.streams.keys());
|
|
295
|
+
}
|
|
296
|
+
appendToStream(stream, data, isInitialCreate = false) {
|
|
297
|
+
let processedData = data;
|
|
298
|
+
if (normalizeContentType(stream.contentType) === `application/json`) {
|
|
299
|
+
processedData = processJsonAppend(data, isInitialCreate);
|
|
300
|
+
if (processedData.length === 0) return null;
|
|
301
|
+
}
|
|
302
|
+
const parts = stream.currentOffset.split(`_`).map(Number);
|
|
303
|
+
const readSeq = parts[0];
|
|
304
|
+
const byteOffset = parts[1];
|
|
305
|
+
const newByteOffset = byteOffset + processedData.length;
|
|
306
|
+
const newOffset = `${String(readSeq).padStart(16, `0`)}_${String(newByteOffset).padStart(16, `0`)}`;
|
|
307
|
+
const message = {
|
|
308
|
+
data: processedData,
|
|
309
|
+
offset: newOffset,
|
|
310
|
+
timestamp: Date.now()
|
|
311
|
+
};
|
|
312
|
+
stream.messages.push(message);
|
|
313
|
+
stream.currentOffset = newOffset;
|
|
314
|
+
return message;
|
|
315
|
+
}
|
|
316
|
+
findOffsetIndex(stream, offset) {
|
|
317
|
+
for (let i = 0; i < stream.messages.length; i++) if (stream.messages[i].offset > offset) return i;
|
|
318
|
+
return -1;
|
|
319
|
+
}
|
|
320
|
+
notifyLongPolls(path) {
|
|
321
|
+
const toNotify = this.pendingLongPolls.filter((p) => p.path === path);
|
|
322
|
+
for (const pending of toNotify) {
|
|
323
|
+
const { messages } = this.read(path, pending.offset);
|
|
324
|
+
if (messages.length > 0) pending.resolve(messages);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
cancelLongPollsForStream(path) {
|
|
328
|
+
const toCancel = this.pendingLongPolls.filter((p) => p.path === path);
|
|
329
|
+
for (const pending of toCancel) {
|
|
330
|
+
clearTimeout(pending.timeoutId);
|
|
331
|
+
pending.resolve([]);
|
|
332
|
+
}
|
|
333
|
+
this.pendingLongPolls = this.pendingLongPolls.filter((p) => p.path !== path);
|
|
334
|
+
}
|
|
335
|
+
removePendingLongPoll(pending) {
|
|
336
|
+
const index = this.pendingLongPolls.indexOf(pending);
|
|
337
|
+
if (index !== -1) this.pendingLongPolls.splice(index, 1);
|
|
338
|
+
}
|
|
339
|
+
};
|
|
340
|
+
|
|
341
|
+
//#endregion
|
|
342
|
+
//#region src/path-encoding.ts
|
|
343
|
+
const MAX_ENCODED_LENGTH = 200;
|
|
344
|
+
/**
|
|
345
|
+
* Encode a stream path to a filesystem-safe directory name using base64url encoding.
|
|
346
|
+
* Long paths (>200 chars) are hashed to keep directory names manageable.
|
|
347
|
+
*
|
|
348
|
+
* @example
|
|
349
|
+
* encodeStreamPath("/stream/users:created") → "L3N0cmVhbS91c2VyczpjcmVhdGVk"
|
|
350
|
+
*/
|
|
351
|
+
function encodeStreamPath(path) {
|
|
352
|
+
const base64 = Buffer.from(path, `utf-8`).toString(`base64`).replace(/\+/g, `-`).replace(/\//g, `_`).replace(/=/g, ``);
|
|
353
|
+
if (base64.length > MAX_ENCODED_LENGTH) {
|
|
354
|
+
const hash = (0, node_crypto.createHash)(`sha256`).update(path).digest(`hex`).slice(0, 16);
|
|
355
|
+
return `${base64.slice(0, 180)}~${hash}`;
|
|
356
|
+
}
|
|
357
|
+
return base64;
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Decode a filesystem-safe directory name back to the original stream path.
|
|
361
|
+
*
|
|
362
|
+
* @example
|
|
363
|
+
* decodeStreamPath("L3N0cmVhbS91c2VyczpjcmVhdGVk") → "/stream/users:created"
|
|
364
|
+
*/
|
|
365
|
+
function decodeStreamPath(encoded) {
|
|
366
|
+
let base = encoded;
|
|
367
|
+
const tildeIndex = encoded.lastIndexOf(`~`);
|
|
368
|
+
if (tildeIndex !== -1) {
|
|
369
|
+
const possibleHash = encoded.slice(tildeIndex + 1);
|
|
370
|
+
if (possibleHash.length === 16 && /^[0-9a-f]+$/.test(possibleHash)) base = encoded.slice(0, tildeIndex);
|
|
371
|
+
}
|
|
372
|
+
const normalized = base.replace(/-/g, `+`).replace(/_/g, `/`);
|
|
373
|
+
const padded = normalized + `=`.repeat((4 - normalized.length % 4) % 4);
|
|
374
|
+
return Buffer.from(padded, `base64`).toString(`utf-8`);
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
//#endregion
|
|
378
|
+
//#region src/file-manager.ts
|
|
379
|
+
var StreamFileManager = class {
|
|
380
|
+
constructor(streamsDir) {
|
|
381
|
+
this.streamsDir = streamsDir;
|
|
382
|
+
}
|
|
383
|
+
/**
|
|
384
|
+
* Create a directory for a new stream and initialize the first segment file.
|
|
385
|
+
* Returns the absolute path to the stream directory.
|
|
386
|
+
*/
|
|
387
|
+
async createStreamDirectory(streamPath) {
|
|
388
|
+
const encoded = encodeStreamPath(streamPath);
|
|
389
|
+
const dir = node_path.join(this.streamsDir, encoded);
|
|
390
|
+
await node_fs_promises.mkdir(dir, { recursive: true });
|
|
391
|
+
const segmentPath = node_path.join(dir, `segment_00000.log`);
|
|
392
|
+
await node_fs_promises.writeFile(segmentPath, ``);
|
|
393
|
+
return dir;
|
|
394
|
+
}
|
|
395
|
+
/**
|
|
396
|
+
* Delete a stream directory and all its contents.
|
|
397
|
+
*/
|
|
398
|
+
async deleteStreamDirectory(streamPath) {
|
|
399
|
+
const encoded = encodeStreamPath(streamPath);
|
|
400
|
+
const dir = node_path.join(this.streamsDir, encoded);
|
|
401
|
+
await node_fs_promises.rm(dir, {
|
|
402
|
+
recursive: true,
|
|
403
|
+
force: true
|
|
404
|
+
});
|
|
405
|
+
}
|
|
406
|
+
/**
|
|
407
|
+
* Delete a directory by its exact name (used for unique directory names).
|
|
408
|
+
*/
|
|
409
|
+
async deleteDirectoryByName(directoryName) {
|
|
410
|
+
const dir = node_path.join(this.streamsDir, directoryName);
|
|
411
|
+
await node_fs_promises.rm(dir, {
|
|
412
|
+
recursive: true,
|
|
413
|
+
force: true
|
|
414
|
+
});
|
|
415
|
+
}
|
|
416
|
+
/**
|
|
417
|
+
* Get the absolute path to a stream's directory.
|
|
418
|
+
* Returns null if the directory doesn't exist.
|
|
419
|
+
*/
|
|
420
|
+
async getStreamDirectory(streamPath) {
|
|
421
|
+
const encoded = encodeStreamPath(streamPath);
|
|
422
|
+
const dir = node_path.join(this.streamsDir, encoded);
|
|
423
|
+
try {
|
|
424
|
+
await node_fs_promises.access(dir);
|
|
425
|
+
return dir;
|
|
426
|
+
} catch {
|
|
427
|
+
return null;
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
/**
|
|
431
|
+
* List all stream paths by scanning the streams directory.
|
|
432
|
+
*/
|
|
433
|
+
async listStreamPaths() {
|
|
434
|
+
try {
|
|
435
|
+
const entries = await node_fs_promises.readdir(this.streamsDir, { withFileTypes: true });
|
|
436
|
+
return entries.filter((e) => e.isDirectory()).map((e) => decodeStreamPath(e.name));
|
|
437
|
+
} catch {
|
|
438
|
+
return [];
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
/**
|
|
442
|
+
* Get the path to a segment file within a stream directory.
|
|
443
|
+
*
|
|
444
|
+
* @param streamDir - Absolute path to the stream directory
|
|
445
|
+
* @param index - Segment index (0-based)
|
|
446
|
+
*/
|
|
447
|
+
getSegmentPath(streamDir, index) {
|
|
448
|
+
const paddedIndex = String(index).padStart(5, `0`);
|
|
449
|
+
return node_path.join(streamDir, `segment_${paddedIndex}.log`);
|
|
450
|
+
}
|
|
451
|
+
};
|
|
452
|
+
|
|
453
|
+
//#endregion
|
|
454
|
+
//#region src/file-store.ts
|
|
455
|
+
var FileHandlePool = class {
|
|
456
|
+
cache;
|
|
457
|
+
constructor(maxSize) {
|
|
458
|
+
this.cache = new __neophi_sieve_cache.SieveCache(maxSize, { evictHook: (_key, handle) => {
|
|
459
|
+
this.closeHandle(handle).catch((err) => {
|
|
460
|
+
console.error(`[FileHandlePool] Error closing evicted handle:`, err);
|
|
461
|
+
});
|
|
462
|
+
} });
|
|
463
|
+
}
|
|
464
|
+
getWriteStream(filePath) {
|
|
465
|
+
let handle = this.cache.get(filePath);
|
|
466
|
+
if (!handle) {
|
|
467
|
+
const stream = node_fs.createWriteStream(filePath, { flags: `a` });
|
|
468
|
+
handle = { stream };
|
|
469
|
+
this.cache.set(filePath, handle);
|
|
470
|
+
}
|
|
471
|
+
return handle.stream;
|
|
472
|
+
}
|
|
473
|
+
/**
|
|
474
|
+
* Flush a specific file to disk immediately.
|
|
475
|
+
* This is called after each append to ensure durability.
|
|
476
|
+
*/
|
|
477
|
+
async fsyncFile(filePath) {
|
|
478
|
+
const handle = this.cache.get(filePath);
|
|
479
|
+
if (!handle) return;
|
|
480
|
+
return new Promise((resolve, reject) => {
|
|
481
|
+
const fd = handle.stream.fd;
|
|
482
|
+
if (typeof fd !== `number`) {
|
|
483
|
+
const onOpen = (openedFd) => {
|
|
484
|
+
handle.stream.off(`error`, onError);
|
|
485
|
+
node_fs.fdatasync(openedFd, (err) => {
|
|
486
|
+
if (err) reject(err);
|
|
487
|
+
else resolve();
|
|
488
|
+
});
|
|
489
|
+
};
|
|
490
|
+
const onError = (err) => {
|
|
491
|
+
handle.stream.off(`open`, onOpen);
|
|
492
|
+
reject(err);
|
|
493
|
+
};
|
|
494
|
+
handle.stream.once(`open`, onOpen);
|
|
495
|
+
handle.stream.once(`error`, onError);
|
|
496
|
+
return;
|
|
497
|
+
}
|
|
498
|
+
node_fs.fdatasync(fd, (err) => {
|
|
499
|
+
if (err) reject(err);
|
|
500
|
+
else resolve();
|
|
501
|
+
});
|
|
502
|
+
});
|
|
503
|
+
}
|
|
504
|
+
async closeAll() {
|
|
505
|
+
const promises = [];
|
|
506
|
+
for (const [_key, handle] of this.cache.entries()) promises.push(this.closeHandle(handle));
|
|
507
|
+
await Promise.all(promises);
|
|
508
|
+
this.cache.clear();
|
|
509
|
+
}
|
|
510
|
+
/**
|
|
511
|
+
* Close a specific file handle if it exists in the cache.
|
|
512
|
+
* Useful for cleanup before deleting files.
|
|
513
|
+
*/
|
|
514
|
+
async closeFileHandle(filePath) {
|
|
515
|
+
const handle = this.cache.get(filePath);
|
|
516
|
+
if (handle) {
|
|
517
|
+
await this.closeHandle(handle);
|
|
518
|
+
this.cache.delete(filePath);
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
async closeHandle(handle) {
|
|
522
|
+
return new Promise((resolve) => {
|
|
523
|
+
handle.stream.end(() => resolve());
|
|
524
|
+
});
|
|
525
|
+
}
|
|
526
|
+
};
|
|
527
|
+
/**
|
|
528
|
+
* Generate a unique directory name for a stream.
|
|
529
|
+
* Format: {encoded_path}~{timestamp}~{random_hex}
|
|
530
|
+
* This allows safe async deletion and immediate reuse of stream paths.
|
|
531
|
+
*/
|
|
532
|
+
function generateUniqueDirectoryName(streamPath) {
|
|
533
|
+
const encoded = encodeStreamPath(streamPath);
|
|
534
|
+
const timestamp = Date.now().toString(36);
|
|
535
|
+
const random = (0, node_crypto.randomBytes)(4).toString(`hex`);
|
|
536
|
+
return `${encoded}~${timestamp}~${random}`;
|
|
537
|
+
}
|
|
538
|
+
/**
|
|
539
|
+
* File-backed implementation of StreamStore.
|
|
540
|
+
* Maintains the same interface as the in-memory StreamStore for drop-in compatibility.
|
|
541
|
+
*/
|
|
542
|
+
var FileBackedStreamStore = class {
|
|
543
|
+
db;
|
|
544
|
+
fileManager;
|
|
545
|
+
fileHandlePool;
|
|
546
|
+
pendingLongPolls = [];
|
|
547
|
+
dataDir;
|
|
548
|
+
constructor(options) {
|
|
549
|
+
this.dataDir = options.dataDir;
|
|
550
|
+
this.db = (0, lmdb.open)({
|
|
551
|
+
path: node_path.join(this.dataDir, `metadata.lmdb`),
|
|
552
|
+
compression: true
|
|
553
|
+
});
|
|
554
|
+
this.fileManager = new StreamFileManager(node_path.join(this.dataDir, `streams`));
|
|
555
|
+
const maxFileHandles = options.maxFileHandles ?? 100;
|
|
556
|
+
this.fileHandlePool = new FileHandlePool(maxFileHandles);
|
|
557
|
+
this.recover();
|
|
558
|
+
}
|
|
559
|
+
/**
|
|
560
|
+
* Recover streams from disk on startup.
|
|
561
|
+
* Validates that LMDB metadata matches actual file contents and reconciles any mismatches.
|
|
562
|
+
*/
|
|
563
|
+
recover() {
|
|
564
|
+
console.log(`[FileBackedStreamStore] Starting recovery...`);
|
|
565
|
+
let recovered = 0;
|
|
566
|
+
let reconciled = 0;
|
|
567
|
+
let errors = 0;
|
|
568
|
+
const range = this.db.getRange({
|
|
569
|
+
start: `stream:`,
|
|
570
|
+
end: `stream:\xFF`
|
|
571
|
+
});
|
|
572
|
+
const entries = Array.from(range);
|
|
573
|
+
for (const { key, value } of entries) try {
|
|
574
|
+
if (typeof key !== `string`) continue;
|
|
575
|
+
const streamMeta = value;
|
|
576
|
+
const streamPath = key.replace(`stream:`, ``);
|
|
577
|
+
const segmentPath = node_path.join(this.dataDir, `streams`, streamMeta.directoryName, `segment_00000.log`);
|
|
578
|
+
if (!node_fs.existsSync(segmentPath)) {
|
|
579
|
+
console.warn(`[FileBackedStreamStore] Recovery: Stream file missing for ${streamPath}, removing from LMDB`);
|
|
580
|
+
this.db.removeSync(key);
|
|
581
|
+
errors++;
|
|
582
|
+
continue;
|
|
583
|
+
}
|
|
584
|
+
const trueOffset = this.scanFileForTrueOffset(segmentPath);
|
|
585
|
+
if (trueOffset !== streamMeta.currentOffset) {
|
|
586
|
+
console.warn(`[FileBackedStreamStore] Recovery: Offset mismatch for ${streamPath}: LMDB says ${streamMeta.currentOffset}, file says ${trueOffset}. Reconciling to file.`);
|
|
587
|
+
const reconciledMeta = {
|
|
588
|
+
...streamMeta,
|
|
589
|
+
currentOffset: trueOffset
|
|
590
|
+
};
|
|
591
|
+
this.db.putSync(key, reconciledMeta);
|
|
592
|
+
reconciled++;
|
|
593
|
+
}
|
|
594
|
+
recovered++;
|
|
595
|
+
} catch (err) {
|
|
596
|
+
console.error(`[FileBackedStreamStore] Error recovering stream:`, err);
|
|
597
|
+
errors++;
|
|
598
|
+
}
|
|
599
|
+
console.log(`[FileBackedStreamStore] Recovery complete: ${recovered} streams, ${reconciled} reconciled, ${errors} errors`);
|
|
600
|
+
}
|
|
601
|
+
/**
|
|
602
|
+
* Scan a segment file to compute the true last offset.
|
|
603
|
+
* Handles partial/truncated messages at the end.
|
|
604
|
+
*/
|
|
605
|
+
scanFileForTrueOffset(segmentPath) {
|
|
606
|
+
try {
|
|
607
|
+
const fileContent = node_fs.readFileSync(segmentPath);
|
|
608
|
+
let filePos = 0;
|
|
609
|
+
let currentDataOffset = 0;
|
|
610
|
+
while (filePos < fileContent.length) {
|
|
611
|
+
if (filePos + 4 > fileContent.length) break;
|
|
612
|
+
const messageLength = fileContent.readUInt32BE(filePos);
|
|
613
|
+
filePos += 4;
|
|
614
|
+
if (filePos + messageLength > fileContent.length) break;
|
|
615
|
+
filePos += messageLength;
|
|
616
|
+
if (filePos < fileContent.length) filePos += 1;
|
|
617
|
+
currentDataOffset += messageLength;
|
|
618
|
+
}
|
|
619
|
+
return `0000000000000000_${String(currentDataOffset).padStart(16, `0`)}`;
|
|
620
|
+
} catch (err) {
|
|
621
|
+
console.error(`[FileBackedStreamStore] Error scanning file ${segmentPath}:`, err);
|
|
622
|
+
return `0000000000000000_0000000000000000`;
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
/**
|
|
626
|
+
* Convert LMDB metadata to Stream object.
|
|
627
|
+
*/
|
|
628
|
+
streamMetaToStream(meta) {
|
|
629
|
+
return {
|
|
630
|
+
path: meta.path,
|
|
631
|
+
contentType: meta.contentType,
|
|
632
|
+
messages: [],
|
|
633
|
+
currentOffset: meta.currentOffset,
|
|
634
|
+
lastSeq: meta.lastSeq,
|
|
635
|
+
ttlSeconds: meta.ttlSeconds,
|
|
636
|
+
expiresAt: meta.expiresAt,
|
|
637
|
+
createdAt: meta.createdAt
|
|
638
|
+
};
|
|
639
|
+
}
|
|
640
|
+
/**
|
|
641
|
+
* Check if a stream is expired based on TTL or Expires-At.
|
|
642
|
+
*/
|
|
643
|
+
isExpired(meta) {
|
|
644
|
+
const now = Date.now();
|
|
645
|
+
if (meta.expiresAt) {
|
|
646
|
+
const expiryTime = new Date(meta.expiresAt).getTime();
|
|
647
|
+
if (!Number.isFinite(expiryTime) || now >= expiryTime) return true;
|
|
648
|
+
}
|
|
649
|
+
if (meta.ttlSeconds !== void 0) {
|
|
650
|
+
const expiryTime = meta.createdAt + meta.ttlSeconds * 1e3;
|
|
651
|
+
if (now >= expiryTime) return true;
|
|
652
|
+
}
|
|
653
|
+
return false;
|
|
654
|
+
}
|
|
655
|
+
/**
|
|
656
|
+
* Get stream metadata, deleting it if expired.
|
|
657
|
+
* Returns undefined if stream doesn't exist or is expired.
|
|
658
|
+
*/
|
|
659
|
+
getMetaIfNotExpired(streamPath) {
|
|
660
|
+
const key = `stream:${streamPath}`;
|
|
661
|
+
const meta = this.db.get(key);
|
|
662
|
+
if (!meta) return void 0;
|
|
663
|
+
if (this.isExpired(meta)) {
|
|
664
|
+
this.delete(streamPath);
|
|
665
|
+
return void 0;
|
|
666
|
+
}
|
|
667
|
+
return meta;
|
|
668
|
+
}
|
|
669
|
+
/**
|
|
670
|
+
* Close the store, closing all file handles and database.
|
|
671
|
+
* All data is already fsynced on each append, so no final flush needed.
|
|
672
|
+
*/
|
|
673
|
+
async close() {
|
|
674
|
+
await this.fileHandlePool.closeAll();
|
|
675
|
+
await this.db.close();
|
|
676
|
+
}
|
|
677
|
+
async create(streamPath, options = {}) {
|
|
678
|
+
const existing = this.getMetaIfNotExpired(streamPath);
|
|
679
|
+
if (existing) {
|
|
680
|
+
const normalizeMimeType = (ct) => (ct ?? `application/octet-stream`).toLowerCase();
|
|
681
|
+
const contentTypeMatches = normalizeMimeType(options.contentType) === normalizeMimeType(existing.contentType);
|
|
682
|
+
const ttlMatches = options.ttlSeconds === existing.ttlSeconds;
|
|
683
|
+
const expiresMatches = options.expiresAt === existing.expiresAt;
|
|
684
|
+
if (contentTypeMatches && ttlMatches && expiresMatches) return this.streamMetaToStream(existing);
|
|
685
|
+
else throw new Error(`Stream already exists with different configuration: ${streamPath}`);
|
|
686
|
+
}
|
|
687
|
+
const key = `stream:${streamPath}`;
|
|
688
|
+
const streamMeta = {
|
|
689
|
+
path: streamPath,
|
|
690
|
+
contentType: options.contentType,
|
|
691
|
+
currentOffset: `0000000000000000_0000000000000000`,
|
|
692
|
+
lastSeq: void 0,
|
|
693
|
+
ttlSeconds: options.ttlSeconds,
|
|
694
|
+
expiresAt: options.expiresAt,
|
|
695
|
+
createdAt: Date.now(),
|
|
696
|
+
segmentCount: 1,
|
|
697
|
+
totalBytes: 0,
|
|
698
|
+
directoryName: generateUniqueDirectoryName(streamPath)
|
|
699
|
+
};
|
|
700
|
+
const streamDir = node_path.join(this.dataDir, `streams`, streamMeta.directoryName);
|
|
701
|
+
try {
|
|
702
|
+
node_fs.mkdirSync(streamDir, { recursive: true });
|
|
703
|
+
const segmentPath = node_path.join(streamDir, `segment_00000.log`);
|
|
704
|
+
node_fs.writeFileSync(segmentPath, ``);
|
|
705
|
+
} catch (err) {
|
|
706
|
+
console.error(`[FileBackedStreamStore] Error creating stream directory:`, err);
|
|
707
|
+
throw err;
|
|
708
|
+
}
|
|
709
|
+
this.db.putSync(key, streamMeta);
|
|
710
|
+
if (options.initialData && options.initialData.length > 0) {
|
|
711
|
+
await this.append(streamPath, options.initialData, {
|
|
712
|
+
contentType: options.contentType,
|
|
713
|
+
isInitialCreate: true
|
|
714
|
+
});
|
|
715
|
+
const updated = this.db.get(key);
|
|
716
|
+
return this.streamMetaToStream(updated);
|
|
717
|
+
}
|
|
718
|
+
return this.streamMetaToStream(streamMeta);
|
|
719
|
+
}
|
|
720
|
+
get(streamPath) {
|
|
721
|
+
const meta = this.getMetaIfNotExpired(streamPath);
|
|
722
|
+
return meta ? this.streamMetaToStream(meta) : void 0;
|
|
723
|
+
}
|
|
724
|
+
has(streamPath) {
|
|
725
|
+
return this.getMetaIfNotExpired(streamPath) !== void 0;
|
|
726
|
+
}
|
|
727
|
+
delete(streamPath) {
|
|
728
|
+
const key = `stream:${streamPath}`;
|
|
729
|
+
const streamMeta = this.db.get(key);
|
|
730
|
+
if (!streamMeta) return false;
|
|
731
|
+
this.cancelLongPollsForStream(streamPath);
|
|
732
|
+
const segmentPath = node_path.join(this.dataDir, `streams`, streamMeta.directoryName, `segment_00000.log`);
|
|
733
|
+
this.fileHandlePool.closeFileHandle(segmentPath).catch((err) => {
|
|
734
|
+
console.error(`[FileBackedStreamStore] Error closing file handle:`, err);
|
|
735
|
+
});
|
|
736
|
+
this.db.removeSync(key);
|
|
737
|
+
this.fileManager.deleteDirectoryByName(streamMeta.directoryName).catch((err) => {
|
|
738
|
+
console.error(`[FileBackedStreamStore] Error deleting stream directory:`, err);
|
|
739
|
+
});
|
|
740
|
+
return true;
|
|
741
|
+
}
|
|
742
|
+
async append(streamPath, data, options = {}) {
|
|
743
|
+
const streamMeta = this.getMetaIfNotExpired(streamPath);
|
|
744
|
+
if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
|
|
745
|
+
if (options.contentType && streamMeta.contentType) {
|
|
746
|
+
const providedType = normalizeContentType(options.contentType);
|
|
747
|
+
const streamType = normalizeContentType(streamMeta.contentType);
|
|
748
|
+
if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${streamMeta.contentType}, got ${options.contentType}`);
|
|
749
|
+
}
|
|
750
|
+
if (options.seq !== void 0) {
|
|
751
|
+
if (streamMeta.lastSeq !== void 0 && options.seq <= streamMeta.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${streamMeta.lastSeq}`);
|
|
752
|
+
}
|
|
753
|
+
let processedData = data;
|
|
754
|
+
if (normalizeContentType(streamMeta.contentType) === `application/json`) {
|
|
755
|
+
processedData = processJsonAppend(data, options.isInitialCreate ?? false);
|
|
756
|
+
if (processedData.length === 0) return null;
|
|
757
|
+
}
|
|
758
|
+
const parts = streamMeta.currentOffset.split(`_`).map(Number);
|
|
759
|
+
const readSeq = parts[0];
|
|
760
|
+
const byteOffset = parts[1];
|
|
761
|
+
const newByteOffset = byteOffset + processedData.length;
|
|
762
|
+
const newOffset = `${String(readSeq).padStart(16, `0`)}_${String(newByteOffset).padStart(16, `0`)}`;
|
|
763
|
+
const streamDir = node_path.join(this.dataDir, `streams`, streamMeta.directoryName);
|
|
764
|
+
const segmentPath = node_path.join(streamDir, `segment_00000.log`);
|
|
765
|
+
const stream = this.fileHandlePool.getWriteStream(segmentPath);
|
|
766
|
+
const lengthBuf = Buffer.allocUnsafe(4);
|
|
767
|
+
lengthBuf.writeUInt32BE(processedData.length, 0);
|
|
768
|
+
const frameBuf = Buffer.concat([
|
|
769
|
+
lengthBuf,
|
|
770
|
+
processedData,
|
|
771
|
+
Buffer.from(`\n`)
|
|
772
|
+
]);
|
|
773
|
+
await new Promise((resolve, reject) => {
|
|
774
|
+
stream.write(frameBuf, (err) => {
|
|
775
|
+
if (err) reject(err);
|
|
776
|
+
else resolve();
|
|
777
|
+
});
|
|
778
|
+
});
|
|
779
|
+
const message = {
|
|
780
|
+
data: processedData,
|
|
781
|
+
offset: newOffset,
|
|
782
|
+
timestamp: Date.now()
|
|
783
|
+
};
|
|
784
|
+
await this.fileHandlePool.fsyncFile(segmentPath);
|
|
785
|
+
const updatedMeta = {
|
|
786
|
+
...streamMeta,
|
|
787
|
+
currentOffset: newOffset,
|
|
788
|
+
lastSeq: options.seq ?? streamMeta.lastSeq,
|
|
789
|
+
totalBytes: streamMeta.totalBytes + processedData.length + 5
|
|
790
|
+
};
|
|
791
|
+
const key = `stream:${streamPath}`;
|
|
792
|
+
this.db.putSync(key, updatedMeta);
|
|
793
|
+
this.notifyLongPolls(streamPath);
|
|
794
|
+
return message;
|
|
795
|
+
}
|
|
796
|
+
read(streamPath, offset) {
|
|
797
|
+
const streamMeta = this.getMetaIfNotExpired(streamPath);
|
|
798
|
+
if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
|
|
799
|
+
const startOffset = offset ?? `0000000000000000_0000000000000000`;
|
|
800
|
+
const startParts = startOffset.split(`_`).map(Number);
|
|
801
|
+
const startByte = startParts[1] ?? 0;
|
|
802
|
+
const currentParts = streamMeta.currentOffset.split(`_`).map(Number);
|
|
803
|
+
const currentSeq = currentParts[0] ?? 0;
|
|
804
|
+
const currentByte = currentParts[1] ?? 0;
|
|
805
|
+
if (streamMeta.currentOffset === `0000000000000000_0000000000000000`) return {
|
|
806
|
+
messages: [],
|
|
807
|
+
upToDate: true
|
|
808
|
+
};
|
|
809
|
+
if (startByte >= currentByte) return {
|
|
810
|
+
messages: [],
|
|
811
|
+
upToDate: true
|
|
812
|
+
};
|
|
813
|
+
const streamDir = node_path.join(this.dataDir, `streams`, streamMeta.directoryName);
|
|
814
|
+
const segmentPath = node_path.join(streamDir, `segment_00000.log`);
|
|
815
|
+
if (!node_fs.existsSync(segmentPath)) return {
|
|
816
|
+
messages: [],
|
|
817
|
+
upToDate: true
|
|
818
|
+
};
|
|
819
|
+
const messages = [];
|
|
820
|
+
try {
|
|
821
|
+
const fileContent = node_fs.readFileSync(segmentPath);
|
|
822
|
+
let filePos = 0;
|
|
823
|
+
let currentDataOffset = 0;
|
|
824
|
+
while (filePos < fileContent.length) {
|
|
825
|
+
if (filePos + 4 > fileContent.length) break;
|
|
826
|
+
const messageLength = fileContent.readUInt32BE(filePos);
|
|
827
|
+
filePos += 4;
|
|
828
|
+
if (filePos + messageLength > fileContent.length) break;
|
|
829
|
+
const messageData = fileContent.subarray(filePos, filePos + messageLength);
|
|
830
|
+
filePos += messageLength;
|
|
831
|
+
filePos += 1;
|
|
832
|
+
const messageOffset = currentDataOffset + messageLength;
|
|
833
|
+
if (messageOffset > startByte) messages.push({
|
|
834
|
+
data: new Uint8Array(messageData),
|
|
835
|
+
offset: `${String(currentSeq).padStart(16, `0`)}_${String(messageOffset).padStart(16, `0`)}`,
|
|
836
|
+
timestamp: 0
|
|
837
|
+
});
|
|
838
|
+
currentDataOffset = messageOffset;
|
|
839
|
+
}
|
|
840
|
+
} catch (err) {
|
|
841
|
+
console.error(`[FileBackedStreamStore] Error reading file:`, err);
|
|
842
|
+
}
|
|
843
|
+
return {
|
|
844
|
+
messages,
|
|
845
|
+
upToDate: true
|
|
846
|
+
};
|
|
847
|
+
}
|
|
848
|
+
async waitForMessages(streamPath, offset, timeoutMs) {
|
|
849
|
+
const streamMeta = this.getMetaIfNotExpired(streamPath);
|
|
850
|
+
if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
|
|
851
|
+
const { messages } = this.read(streamPath, offset);
|
|
852
|
+
if (messages.length > 0) return {
|
|
853
|
+
messages,
|
|
854
|
+
timedOut: false
|
|
855
|
+
};
|
|
856
|
+
return new Promise((resolve) => {
|
|
857
|
+
const timeoutId = setTimeout(() => {
|
|
858
|
+
this.removePendingLongPoll(pending);
|
|
859
|
+
resolve({
|
|
860
|
+
messages: [],
|
|
861
|
+
timedOut: true
|
|
862
|
+
});
|
|
863
|
+
}, timeoutMs);
|
|
864
|
+
const pending = {
|
|
865
|
+
path: streamPath,
|
|
866
|
+
offset,
|
|
867
|
+
resolve: (msgs) => {
|
|
868
|
+
clearTimeout(timeoutId);
|
|
869
|
+
this.removePendingLongPoll(pending);
|
|
870
|
+
resolve({
|
|
871
|
+
messages: msgs,
|
|
872
|
+
timedOut: false
|
|
873
|
+
});
|
|
874
|
+
},
|
|
875
|
+
timeoutId
|
|
876
|
+
};
|
|
877
|
+
this.pendingLongPolls.push(pending);
|
|
878
|
+
});
|
|
879
|
+
}
|
|
880
|
+
/**
|
|
881
|
+
* Format messages for response.
|
|
882
|
+
* For JSON mode, wraps concatenated data in array brackets.
|
|
883
|
+
* @throws Error if stream doesn't exist or is expired
|
|
884
|
+
*/
|
|
885
|
+
formatResponse(streamPath, messages) {
|
|
886
|
+
const streamMeta = this.getMetaIfNotExpired(streamPath);
|
|
887
|
+
if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
|
|
888
|
+
const totalSize = messages.reduce((sum, m) => sum + m.data.length, 0);
|
|
889
|
+
const concatenated = new Uint8Array(totalSize);
|
|
890
|
+
let offset = 0;
|
|
891
|
+
for (const msg of messages) {
|
|
892
|
+
concatenated.set(msg.data, offset);
|
|
893
|
+
offset += msg.data.length;
|
|
894
|
+
}
|
|
895
|
+
if (normalizeContentType(streamMeta.contentType) === `application/json`) return formatJsonResponse(concatenated);
|
|
896
|
+
return concatenated;
|
|
897
|
+
}
|
|
898
|
+
getCurrentOffset(streamPath) {
|
|
899
|
+
const streamMeta = this.getMetaIfNotExpired(streamPath);
|
|
900
|
+
return streamMeta?.currentOffset;
|
|
901
|
+
}
|
|
902
|
+
clear() {
|
|
903
|
+
for (const pending of this.pendingLongPolls) {
|
|
904
|
+
clearTimeout(pending.timeoutId);
|
|
905
|
+
pending.resolve([]);
|
|
906
|
+
}
|
|
907
|
+
this.pendingLongPolls = [];
|
|
908
|
+
const range = this.db.getRange({
|
|
909
|
+
start: `stream:`,
|
|
910
|
+
end: `stream:\xFF`
|
|
911
|
+
});
|
|
912
|
+
const entries = Array.from(range);
|
|
913
|
+
for (const { key } of entries) this.db.removeSync(key);
|
|
914
|
+
this.fileHandlePool.closeAll().catch((err) => {
|
|
915
|
+
console.error(`[FileBackedStreamStore] Error closing handles:`, err);
|
|
916
|
+
});
|
|
917
|
+
}
|
|
918
|
+
/**
|
|
919
|
+
* Cancel all pending long-polls (used during shutdown).
|
|
920
|
+
*/
|
|
921
|
+
cancelAllWaits() {
|
|
922
|
+
for (const pending of this.pendingLongPolls) {
|
|
923
|
+
clearTimeout(pending.timeoutId);
|
|
924
|
+
pending.resolve([]);
|
|
925
|
+
}
|
|
926
|
+
this.pendingLongPolls = [];
|
|
927
|
+
}
|
|
928
|
+
list() {
|
|
929
|
+
const paths = [];
|
|
930
|
+
const range = this.db.getRange({
|
|
931
|
+
start: `stream:`,
|
|
932
|
+
end: `stream:\xFF`
|
|
933
|
+
});
|
|
934
|
+
const entries = Array.from(range);
|
|
935
|
+
for (const { key } of entries) if (typeof key === `string`) paths.push(key.replace(`stream:`, ``));
|
|
936
|
+
return paths;
|
|
937
|
+
}
|
|
938
|
+
notifyLongPolls(streamPath) {
|
|
939
|
+
const toNotify = this.pendingLongPolls.filter((p) => p.path === streamPath);
|
|
940
|
+
for (const pending of toNotify) {
|
|
941
|
+
const { messages } = this.read(streamPath, pending.offset);
|
|
942
|
+
if (messages.length > 0) pending.resolve(messages);
|
|
943
|
+
}
|
|
944
|
+
}
|
|
945
|
+
cancelLongPollsForStream(streamPath) {
|
|
946
|
+
const toCancel = this.pendingLongPolls.filter((p) => p.path === streamPath);
|
|
947
|
+
for (const pending of toCancel) {
|
|
948
|
+
clearTimeout(pending.timeoutId);
|
|
949
|
+
pending.resolve([]);
|
|
950
|
+
}
|
|
951
|
+
this.pendingLongPolls = this.pendingLongPolls.filter((p) => p.path !== streamPath);
|
|
952
|
+
}
|
|
953
|
+
removePendingLongPoll(pending) {
|
|
954
|
+
const index = this.pendingLongPolls.indexOf(pending);
|
|
955
|
+
if (index !== -1) this.pendingLongPolls.splice(index, 1);
|
|
956
|
+
}
|
|
957
|
+
};
|
|
958
|
+
|
|
959
|
+
//#endregion
|
|
960
|
+
//#region src/cursor.ts
|
|
961
|
+
/**
|
|
962
|
+
* Stream cursor calculation for CDN cache collapsing.
|
|
963
|
+
*
|
|
964
|
+
* This module implements interval-based cursor generation to prevent
|
|
965
|
+
* infinite CDN cache loops while enabling request collapsing.
|
|
966
|
+
*
|
|
967
|
+
* The mechanism works by:
|
|
968
|
+
* 1. Dividing time into fixed intervals (default 20 seconds)
|
|
969
|
+
* 2. Computing interval number from an epoch (October 9, 2024)
|
|
970
|
+
* 3. Returning cursor values that change at interval boundaries
|
|
971
|
+
* 4. Ensuring monotonic cursor progression (never going backwards)
|
|
972
|
+
*/
|
|
973
|
+
/**
|
|
974
|
+
* Default epoch for cursor calculation: October 9, 2024 00:00:00 UTC.
|
|
975
|
+
* This is the reference point from which intervals are counted.
|
|
976
|
+
* Using a past date ensures cursors are always positive.
|
|
977
|
+
*/
|
|
978
|
+
const DEFAULT_CURSOR_EPOCH = new Date(`2024-10-09T00:00:00.000Z`);
|
|
979
|
+
/**
|
|
980
|
+
* Default interval duration in seconds.
|
|
981
|
+
*/
|
|
982
|
+
const DEFAULT_CURSOR_INTERVAL_SECONDS = 20;
|
|
983
|
+
/**
|
|
984
|
+
* Maximum jitter in seconds to add on collision.
|
|
985
|
+
* Per protocol spec: random value between 1-3600 seconds.
|
|
986
|
+
*/
|
|
987
|
+
const MAX_JITTER_SECONDS = 3600;
|
|
988
|
+
/**
|
|
989
|
+
* Minimum jitter in seconds.
|
|
990
|
+
*/
|
|
991
|
+
const MIN_JITTER_SECONDS = 1;
|
|
992
|
+
/**
|
|
993
|
+
* Calculate the current cursor value based on time intervals.
|
|
994
|
+
*
|
|
995
|
+
* @param options - Configuration for cursor calculation
|
|
996
|
+
* @returns The current cursor value as a string
|
|
997
|
+
*/
|
|
998
|
+
function calculateCursor(options = {}) {
|
|
999
|
+
const intervalSeconds = options.intervalSeconds ?? DEFAULT_CURSOR_INTERVAL_SECONDS;
|
|
1000
|
+
const epoch = options.epoch ?? DEFAULT_CURSOR_EPOCH;
|
|
1001
|
+
const now = Date.now();
|
|
1002
|
+
const epochMs = epoch.getTime();
|
|
1003
|
+
const intervalMs = intervalSeconds * 1e3;
|
|
1004
|
+
const intervalNumber = Math.floor((now - epochMs) / intervalMs);
|
|
1005
|
+
return String(intervalNumber);
|
|
1006
|
+
}
|
|
1007
|
+
/**
|
|
1008
|
+
* Generate a random jitter value in intervals.
|
|
1009
|
+
*
|
|
1010
|
+
* @param intervalSeconds - The interval duration in seconds
|
|
1011
|
+
* @returns Number of intervals to add as jitter
|
|
1012
|
+
*/
|
|
1013
|
+
function generateJitterIntervals(intervalSeconds) {
|
|
1014
|
+
const jitterSeconds = MIN_JITTER_SECONDS + Math.floor(Math.random() * (MAX_JITTER_SECONDS - MIN_JITTER_SECONDS + 1));
|
|
1015
|
+
return Math.max(1, Math.ceil(jitterSeconds / intervalSeconds));
|
|
1016
|
+
}
|
|
1017
|
+
/**
|
|
1018
|
+
* Generate a cursor for a response, ensuring monotonic progression.
|
|
1019
|
+
*
|
|
1020
|
+
* This function ensures the returned cursor is always greater than or equal
|
|
1021
|
+
* to the current time interval, and strictly greater than any client-provided
|
|
1022
|
+
* cursor. This prevents cache loops where a client could cycle between
|
|
1023
|
+
* cursor values.
|
|
1024
|
+
*
|
|
1025
|
+
* Algorithm:
|
|
1026
|
+
* - If no client cursor: return current interval
|
|
1027
|
+
* - If client cursor < current interval: return current interval
|
|
1028
|
+
* - If client cursor >= current interval: return client cursor + jitter
|
|
1029
|
+
*
|
|
1030
|
+
* This guarantees monotonic cursor progression and prevents A→B→A cycles.
|
|
1031
|
+
*
|
|
1032
|
+
* @param clientCursor - The cursor provided by the client (if any)
|
|
1033
|
+
* @param options - Configuration for cursor calculation
|
|
1034
|
+
* @returns The cursor value to include in the response
|
|
1035
|
+
*/
|
|
1036
|
+
function generateResponseCursor(clientCursor, options = {}) {
|
|
1037
|
+
const intervalSeconds = options.intervalSeconds ?? DEFAULT_CURSOR_INTERVAL_SECONDS;
|
|
1038
|
+
const currentCursor = calculateCursor(options);
|
|
1039
|
+
const currentInterval = parseInt(currentCursor, 10);
|
|
1040
|
+
if (!clientCursor) return currentCursor;
|
|
1041
|
+
const clientInterval = parseInt(clientCursor, 10);
|
|
1042
|
+
if (isNaN(clientInterval) || clientInterval < currentInterval) return currentCursor;
|
|
1043
|
+
const jitterIntervals = generateJitterIntervals(intervalSeconds);
|
|
1044
|
+
return String(clientInterval + jitterIntervals);
|
|
1045
|
+
}
|
|
1046
|
+
/**
|
|
1047
|
+
* Handle cursor collision by adding random jitter.
|
|
1048
|
+
*
|
|
1049
|
+
* @deprecated Use generateResponseCursor instead, which handles all cases
|
|
1050
|
+
* including monotonicity guarantees.
|
|
1051
|
+
*
|
|
1052
|
+
* @param currentCursor - The newly calculated cursor value
|
|
1053
|
+
* @param previousCursor - The cursor provided by the client (if any)
|
|
1054
|
+
* @param options - Configuration for cursor calculation
|
|
1055
|
+
* @returns The cursor value to return, with jitter applied if there's a collision
|
|
1056
|
+
*/
|
|
1057
|
+
function handleCursorCollision(currentCursor, previousCursor, options = {}) {
|
|
1058
|
+
return generateResponseCursor(previousCursor, options);
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
//#endregion
|
|
1062
|
+
//#region src/server.ts
|
|
1063
|
+
const STREAM_OFFSET_HEADER = `Stream-Next-Offset`;
|
|
1064
|
+
const STREAM_CURSOR_HEADER = `Stream-Cursor`;
|
|
1065
|
+
const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
|
|
1066
|
+
const STREAM_SEQ_HEADER = `Stream-Seq`;
|
|
1067
|
+
const STREAM_TTL_HEADER = `Stream-TTL`;
|
|
1068
|
+
const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
|
|
1069
|
+
const SSE_OFFSET_FIELD = `streamNextOffset`;
|
|
1070
|
+
const SSE_CURSOR_FIELD = `streamCursor`;
|
|
1071
|
+
const SSE_UP_TO_DATE_FIELD = `upToDate`;
|
|
1072
|
+
const OFFSET_QUERY_PARAM = `offset`;
|
|
1073
|
+
const LIVE_QUERY_PARAM = `live`;
|
|
1074
|
+
const CURSOR_QUERY_PARAM = `cursor`;
|
|
1075
|
+
/**
|
|
1076
|
+
* Encode data for SSE format.
|
|
1077
|
+
* Per SSE spec, each line in the payload needs its own "data:" prefix.
|
|
1078
|
+
* Newlines in the payload become separate data: lines.
|
|
1079
|
+
*/
|
|
1080
|
+
function encodeSSEData(payload) {
|
|
1081
|
+
const lines = payload.split(`\n`);
|
|
1082
|
+
return lines.map((line) => `data: ${line}`).join(`\n`) + `\n\n`;
|
|
1083
|
+
}
|
|
1084
|
+
/**
|
|
1085
|
+
* Minimum response size to consider for compression.
|
|
1086
|
+
* Responses smaller than this won't benefit from compression.
|
|
1087
|
+
*/
|
|
1088
|
+
const COMPRESSION_THRESHOLD = 1024;
|
|
1089
|
+
/**
|
|
1090
|
+
* Determine the best compression encoding from Accept-Encoding header.
|
|
1091
|
+
* Returns 'gzip', 'deflate', or null if no compression should be used.
|
|
1092
|
+
*/
|
|
1093
|
+
function getCompressionEncoding(acceptEncoding) {
|
|
1094
|
+
if (!acceptEncoding) return null;
|
|
1095
|
+
const encodings = acceptEncoding.toLowerCase().split(`,`).map((e) => e.trim());
|
|
1096
|
+
for (const encoding of encodings) {
|
|
1097
|
+
const name = encoding.split(`;`)[0]?.trim();
|
|
1098
|
+
if (name === `gzip`) return `gzip`;
|
|
1099
|
+
}
|
|
1100
|
+
for (const encoding of encodings) {
|
|
1101
|
+
const name = encoding.split(`;`)[0]?.trim();
|
|
1102
|
+
if (name === `deflate`) return `deflate`;
|
|
1103
|
+
}
|
|
1104
|
+
return null;
|
|
1105
|
+
}
|
|
1106
|
+
/**
|
|
1107
|
+
* Compress data using the specified encoding.
|
|
1108
|
+
*/
|
|
1109
|
+
function compressData(data, encoding) {
|
|
1110
|
+
if (encoding === `gzip`) return (0, node_zlib.gzipSync)(data);
|
|
1111
|
+
else return (0, node_zlib.deflateSync)(data);
|
|
1112
|
+
}
|
|
1113
|
+
var DurableStreamTestServer = class {
|
|
1114
|
+
store;
|
|
1115
|
+
server = null;
|
|
1116
|
+
options;
|
|
1117
|
+
_url = null;
|
|
1118
|
+
activeSSEResponses = new Set();
|
|
1119
|
+
isShuttingDown = false;
|
|
1120
|
+
/** Injected errors for testing retry/resilience */
|
|
1121
|
+
injectedErrors = new Map();
|
|
1122
|
+
constructor(options = {}) {
|
|
1123
|
+
if (options.dataDir) this.store = new FileBackedStreamStore({ dataDir: options.dataDir });
|
|
1124
|
+
else this.store = new StreamStore();
|
|
1125
|
+
this.options = {
|
|
1126
|
+
port: options.port ?? 4437,
|
|
1127
|
+
host: options.host ?? `127.0.0.1`,
|
|
1128
|
+
longPollTimeout: options.longPollTimeout ?? 3e4,
|
|
1129
|
+
dataDir: options.dataDir,
|
|
1130
|
+
onStreamCreated: options.onStreamCreated,
|
|
1131
|
+
onStreamDeleted: options.onStreamDeleted,
|
|
1132
|
+
compression: options.compression ?? true,
|
|
1133
|
+
cursorOptions: {
|
|
1134
|
+
intervalSeconds: options.cursorIntervalSeconds,
|
|
1135
|
+
epoch: options.cursorEpoch
|
|
1136
|
+
}
|
|
1137
|
+
};
|
|
1138
|
+
}
|
|
1139
|
+
/**
|
|
1140
|
+
* Start the server.
|
|
1141
|
+
*/
|
|
1142
|
+
async start() {
|
|
1143
|
+
if (this.server) throw new Error(`Server already started`);
|
|
1144
|
+
return new Promise((resolve, reject) => {
|
|
1145
|
+
this.server = (0, node_http.createServer)((req, res) => {
|
|
1146
|
+
this.handleRequest(req, res).catch((err) => {
|
|
1147
|
+
console.error(`Request error:`, err);
|
|
1148
|
+
if (!res.headersSent) {
|
|
1149
|
+
res.writeHead(500, { "content-type": `text/plain` });
|
|
1150
|
+
res.end(`Internal server error`);
|
|
1151
|
+
}
|
|
1152
|
+
});
|
|
1153
|
+
});
|
|
1154
|
+
this.server.on(`error`, reject);
|
|
1155
|
+
this.server.listen(this.options.port, this.options.host, () => {
|
|
1156
|
+
const addr = this.server.address();
|
|
1157
|
+
if (typeof addr === `string`) this._url = addr;
|
|
1158
|
+
else if (addr) this._url = `http://${this.options.host}:${addr.port}`;
|
|
1159
|
+
resolve(this._url);
|
|
1160
|
+
});
|
|
1161
|
+
});
|
|
1162
|
+
}
|
|
1163
|
+
/**
|
|
1164
|
+
* Stop the server.
|
|
1165
|
+
*/
|
|
1166
|
+
async stop() {
|
|
1167
|
+
if (!this.server) return;
|
|
1168
|
+
this.isShuttingDown = true;
|
|
1169
|
+
if (`cancelAllWaits` in this.store) this.store.cancelAllWaits();
|
|
1170
|
+
for (const res of this.activeSSEResponses) res.end();
|
|
1171
|
+
this.activeSSEResponses.clear();
|
|
1172
|
+
return new Promise((resolve, reject) => {
|
|
1173
|
+
this.server.close(async (err) => {
|
|
1174
|
+
if (err) {
|
|
1175
|
+
reject(err);
|
|
1176
|
+
return;
|
|
1177
|
+
}
|
|
1178
|
+
try {
|
|
1179
|
+
if (this.store instanceof FileBackedStreamStore) await this.store.close();
|
|
1180
|
+
this.server = null;
|
|
1181
|
+
this._url = null;
|
|
1182
|
+
this.isShuttingDown = false;
|
|
1183
|
+
resolve();
|
|
1184
|
+
} catch (closeErr) {
|
|
1185
|
+
reject(closeErr);
|
|
1186
|
+
}
|
|
1187
|
+
});
|
|
1188
|
+
});
|
|
1189
|
+
}
|
|
1190
|
+
/**
|
|
1191
|
+
* Get the server URL.
|
|
1192
|
+
*/
|
|
1193
|
+
get url() {
|
|
1194
|
+
if (!this._url) throw new Error(`Server not started`);
|
|
1195
|
+
return this._url;
|
|
1196
|
+
}
|
|
1197
|
+
/**
|
|
1198
|
+
* Clear all streams.
|
|
1199
|
+
*/
|
|
1200
|
+
clear() {
|
|
1201
|
+
this.store.clear();
|
|
1202
|
+
}
|
|
1203
|
+
/**
|
|
1204
|
+
* Inject an error to be returned on the next N requests to a path.
|
|
1205
|
+
* Used for testing retry/resilience behavior.
|
|
1206
|
+
*/
|
|
1207
|
+
injectError(path, status, count = 1, retryAfter) {
|
|
1208
|
+
this.injectedErrors.set(path, {
|
|
1209
|
+
status,
|
|
1210
|
+
count,
|
|
1211
|
+
retryAfter
|
|
1212
|
+
});
|
|
1213
|
+
}
|
|
1214
|
+
/**
|
|
1215
|
+
* Clear all injected errors.
|
|
1216
|
+
*/
|
|
1217
|
+
clearInjectedErrors() {
|
|
1218
|
+
this.injectedErrors.clear();
|
|
1219
|
+
}
|
|
1220
|
+
/**
|
|
1221
|
+
* Check if there's an injected error for this path and consume it.
|
|
1222
|
+
* Returns the error config if one should be returned, null otherwise.
|
|
1223
|
+
*/
|
|
1224
|
+
consumeInjectedError(path) {
|
|
1225
|
+
const error = this.injectedErrors.get(path);
|
|
1226
|
+
if (!error) return null;
|
|
1227
|
+
error.count--;
|
|
1228
|
+
if (error.count <= 0) this.injectedErrors.delete(path);
|
|
1229
|
+
return error;
|
|
1230
|
+
}
|
|
1231
|
+
async handleRequest(req, res) {
|
|
1232
|
+
const url = new URL(req.url ?? `/`, `http://${req.headers.host}`);
|
|
1233
|
+
const path = url.pathname;
|
|
1234
|
+
const method = req.method?.toUpperCase();
|
|
1235
|
+
res.setHeader(`access-control-allow-origin`, `*`);
|
|
1236
|
+
res.setHeader(`access-control-allow-methods`, `GET, POST, PUT, DELETE, HEAD, OPTIONS`);
|
|
1237
|
+
res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At`);
|
|
1238
|
+
res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, etag, content-type, content-encoding, vary`);
|
|
1239
|
+
if (method === `OPTIONS`) {
|
|
1240
|
+
res.writeHead(204);
|
|
1241
|
+
res.end();
|
|
1242
|
+
return;
|
|
1243
|
+
}
|
|
1244
|
+
if (path === `/_test/inject-error`) {
|
|
1245
|
+
await this.handleTestInjectError(method, req, res);
|
|
1246
|
+
return;
|
|
1247
|
+
}
|
|
1248
|
+
const injectedError = this.consumeInjectedError(path);
|
|
1249
|
+
if (injectedError) {
|
|
1250
|
+
const headers = { "content-type": `text/plain` };
|
|
1251
|
+
if (injectedError.retryAfter !== void 0) headers[`retry-after`] = injectedError.retryAfter.toString();
|
|
1252
|
+
res.writeHead(injectedError.status, headers);
|
|
1253
|
+
res.end(`Injected error for testing`);
|
|
1254
|
+
return;
|
|
1255
|
+
}
|
|
1256
|
+
try {
|
|
1257
|
+
switch (method) {
|
|
1258
|
+
case `PUT`:
|
|
1259
|
+
await this.handleCreate(path, req, res);
|
|
1260
|
+
break;
|
|
1261
|
+
case `HEAD`:
|
|
1262
|
+
this.handleHead(path, res);
|
|
1263
|
+
break;
|
|
1264
|
+
case `GET`:
|
|
1265
|
+
await this.handleRead(path, url, req, res);
|
|
1266
|
+
break;
|
|
1267
|
+
case `POST`:
|
|
1268
|
+
await this.handleAppend(path, req, res);
|
|
1269
|
+
break;
|
|
1270
|
+
case `DELETE`:
|
|
1271
|
+
await this.handleDelete(path, res);
|
|
1272
|
+
break;
|
|
1273
|
+
default:
|
|
1274
|
+
res.writeHead(405, { "content-type": `text/plain` });
|
|
1275
|
+
res.end(`Method not allowed`);
|
|
1276
|
+
}
|
|
1277
|
+
} catch (err) {
|
|
1278
|
+
if (err instanceof Error) if (err.message.includes(`not found`)) {
|
|
1279
|
+
res.writeHead(404, { "content-type": `text/plain` });
|
|
1280
|
+
res.end(`Stream not found`);
|
|
1281
|
+
} else if (err.message.includes(`already exists with different configuration`)) {
|
|
1282
|
+
res.writeHead(409, { "content-type": `text/plain` });
|
|
1283
|
+
res.end(`Stream already exists with different configuration`);
|
|
1284
|
+
} else if (err.message.includes(`Sequence conflict`)) {
|
|
1285
|
+
res.writeHead(409, { "content-type": `text/plain` });
|
|
1286
|
+
res.end(`Sequence conflict`);
|
|
1287
|
+
} else if (err.message.includes(`Content-type mismatch`)) {
|
|
1288
|
+
res.writeHead(409, { "content-type": `text/plain` });
|
|
1289
|
+
res.end(`Content-type mismatch`);
|
|
1290
|
+
} else if (err.message.includes(`Invalid JSON`)) {
|
|
1291
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1292
|
+
res.end(`Invalid JSON`);
|
|
1293
|
+
} else if (err.message.includes(`Empty arrays are not allowed`)) {
|
|
1294
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1295
|
+
res.end(`Empty arrays are not allowed`);
|
|
1296
|
+
} else throw err;
|
|
1297
|
+
else throw err;
|
|
1298
|
+
}
|
|
1299
|
+
}
|
|
1300
|
+
/**
|
|
1301
|
+
* Handle PUT - create stream
|
|
1302
|
+
*/
|
|
1303
|
+
async handleCreate(path, req, res) {
|
|
1304
|
+
let contentType = req.headers[`content-type`];
|
|
1305
|
+
if (!contentType || contentType.trim() === `` || !/^[\w-]+\/[\w-]+/.test(contentType)) contentType = `application/octet-stream`;
|
|
1306
|
+
const ttlHeader = req.headers[STREAM_TTL_HEADER.toLowerCase()];
|
|
1307
|
+
const expiresAtHeader = req.headers[STREAM_EXPIRES_AT_HEADER.toLowerCase()];
|
|
1308
|
+
if (ttlHeader && expiresAtHeader) {
|
|
1309
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1310
|
+
res.end(`Cannot specify both Stream-TTL and Stream-Expires-At`);
|
|
1311
|
+
return;
|
|
1312
|
+
}
|
|
1313
|
+
let ttlSeconds;
|
|
1314
|
+
if (ttlHeader) {
|
|
1315
|
+
const ttlPattern = /^(0|[1-9]\d*)$/;
|
|
1316
|
+
if (!ttlPattern.test(ttlHeader)) {
|
|
1317
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1318
|
+
res.end(`Invalid Stream-TTL value`);
|
|
1319
|
+
return;
|
|
1320
|
+
}
|
|
1321
|
+
ttlSeconds = parseInt(ttlHeader, 10);
|
|
1322
|
+
if (isNaN(ttlSeconds) || ttlSeconds < 0) {
|
|
1323
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1324
|
+
res.end(`Invalid Stream-TTL value`);
|
|
1325
|
+
return;
|
|
1326
|
+
}
|
|
1327
|
+
}
|
|
1328
|
+
if (expiresAtHeader) {
|
|
1329
|
+
const timestamp = new Date(expiresAtHeader);
|
|
1330
|
+
if (isNaN(timestamp.getTime())) {
|
|
1331
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1332
|
+
res.end(`Invalid Stream-Expires-At timestamp`);
|
|
1333
|
+
return;
|
|
1334
|
+
}
|
|
1335
|
+
}
|
|
1336
|
+
const body = await this.readBody(req);
|
|
1337
|
+
const isNew = !this.store.has(path);
|
|
1338
|
+
await Promise.resolve(this.store.create(path, {
|
|
1339
|
+
contentType,
|
|
1340
|
+
ttlSeconds,
|
|
1341
|
+
expiresAt: expiresAtHeader,
|
|
1342
|
+
initialData: body.length > 0 ? body : void 0
|
|
1343
|
+
}));
|
|
1344
|
+
const stream = this.store.get(path);
|
|
1345
|
+
if (isNew && this.options.onStreamCreated) await Promise.resolve(this.options.onStreamCreated({
|
|
1346
|
+
type: `created`,
|
|
1347
|
+
path,
|
|
1348
|
+
contentType,
|
|
1349
|
+
timestamp: Date.now()
|
|
1350
|
+
}));
|
|
1351
|
+
const headers = {
|
|
1352
|
+
"content-type": contentType,
|
|
1353
|
+
[STREAM_OFFSET_HEADER]: stream.currentOffset
|
|
1354
|
+
};
|
|
1355
|
+
if (isNew) headers[`location`] = `${this._url}${path}`;
|
|
1356
|
+
res.writeHead(isNew ? 201 : 200, headers);
|
|
1357
|
+
res.end();
|
|
1358
|
+
}
|
|
1359
|
+
/**
|
|
1360
|
+
* Handle HEAD - get metadata
|
|
1361
|
+
*/
|
|
1362
|
+
handleHead(path, res) {
|
|
1363
|
+
const stream = this.store.get(path);
|
|
1364
|
+
if (!stream) {
|
|
1365
|
+
res.writeHead(404, { "content-type": `text/plain` });
|
|
1366
|
+
res.end();
|
|
1367
|
+
return;
|
|
1368
|
+
}
|
|
1369
|
+
const headers = { [STREAM_OFFSET_HEADER]: stream.currentOffset };
|
|
1370
|
+
if (stream.contentType) headers[`content-type`] = stream.contentType;
|
|
1371
|
+
headers[`etag`] = `"${Buffer.from(path).toString(`base64`)}:-1:${stream.currentOffset}"`;
|
|
1372
|
+
res.writeHead(200, headers);
|
|
1373
|
+
res.end();
|
|
1374
|
+
}
|
|
1375
|
+
/**
|
|
1376
|
+
* Handle GET - read data
|
|
1377
|
+
*/
|
|
1378
|
+
async handleRead(path, url, req, res) {
|
|
1379
|
+
const stream = this.store.get(path);
|
|
1380
|
+
if (!stream) {
|
|
1381
|
+
res.writeHead(404, { "content-type": `text/plain` });
|
|
1382
|
+
res.end(`Stream not found`);
|
|
1383
|
+
return;
|
|
1384
|
+
}
|
|
1385
|
+
const offset = url.searchParams.get(OFFSET_QUERY_PARAM) ?? void 0;
|
|
1386
|
+
const live = url.searchParams.get(LIVE_QUERY_PARAM);
|
|
1387
|
+
const cursor = url.searchParams.get(CURSOR_QUERY_PARAM) ?? void 0;
|
|
1388
|
+
if (offset !== void 0) {
|
|
1389
|
+
if (offset === ``) {
|
|
1390
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1391
|
+
res.end(`Empty offset parameter`);
|
|
1392
|
+
return;
|
|
1393
|
+
}
|
|
1394
|
+
const allOffsets = url.searchParams.getAll(OFFSET_QUERY_PARAM);
|
|
1395
|
+
if (allOffsets.length > 1) {
|
|
1396
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1397
|
+
res.end(`Multiple offset parameters not allowed`);
|
|
1398
|
+
return;
|
|
1399
|
+
}
|
|
1400
|
+
const validOffsetPattern = /^(-1|\d+_\d+)$/;
|
|
1401
|
+
if (!validOffsetPattern.test(offset)) {
|
|
1402
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1403
|
+
res.end(`Invalid offset format`);
|
|
1404
|
+
return;
|
|
1405
|
+
}
|
|
1406
|
+
}
|
|
1407
|
+
if ((live === `long-poll` || live === `sse`) && !offset) {
|
|
1408
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1409
|
+
res.end(`${live === `sse` ? `SSE` : `Long-poll`} requires offset parameter`);
|
|
1410
|
+
return;
|
|
1411
|
+
}
|
|
1412
|
+
if (live === `sse`) {
|
|
1413
|
+
await this.handleSSE(path, stream, offset, cursor, res);
|
|
1414
|
+
return;
|
|
1415
|
+
}
|
|
1416
|
+
let { messages, upToDate } = this.store.read(path, offset);
|
|
1417
|
+
const clientIsCaughtUp = offset && offset === stream.currentOffset;
|
|
1418
|
+
if (live === `long-poll` && clientIsCaughtUp && messages.length === 0) {
|
|
1419
|
+
const result = await this.store.waitForMessages(path, offset, this.options.longPollTimeout);
|
|
1420
|
+
if (result.timedOut) {
|
|
1421
|
+
const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
|
|
1422
|
+
res.writeHead(204, {
|
|
1423
|
+
[STREAM_OFFSET_HEADER]: offset,
|
|
1424
|
+
[STREAM_UP_TO_DATE_HEADER]: `true`,
|
|
1425
|
+
[STREAM_CURSOR_HEADER]: responseCursor
|
|
1426
|
+
});
|
|
1427
|
+
res.end();
|
|
1428
|
+
return;
|
|
1429
|
+
}
|
|
1430
|
+
messages = result.messages;
|
|
1431
|
+
upToDate = true;
|
|
1432
|
+
}
|
|
1433
|
+
const headers = {};
|
|
1434
|
+
if (stream.contentType) headers[`content-type`] = stream.contentType;
|
|
1435
|
+
const lastMessage = messages[messages.length - 1];
|
|
1436
|
+
const responseOffset = lastMessage?.offset ?? stream.currentOffset;
|
|
1437
|
+
headers[STREAM_OFFSET_HEADER] = responseOffset;
|
|
1438
|
+
if (live === `long-poll`) headers[STREAM_CURSOR_HEADER] = generateResponseCursor(cursor, this.options.cursorOptions);
|
|
1439
|
+
if (upToDate) headers[STREAM_UP_TO_DATE_HEADER] = `true`;
|
|
1440
|
+
const startOffset = offset ?? `-1`;
|
|
1441
|
+
const etag = `"${Buffer.from(path).toString(`base64`)}:${startOffset}:${responseOffset}"`;
|
|
1442
|
+
headers[`etag`] = etag;
|
|
1443
|
+
const ifNoneMatch = req.headers[`if-none-match`];
|
|
1444
|
+
if (ifNoneMatch && ifNoneMatch === etag) {
|
|
1445
|
+
res.writeHead(304, { etag });
|
|
1446
|
+
res.end();
|
|
1447
|
+
return;
|
|
1448
|
+
}
|
|
1449
|
+
const responseData = this.store.formatResponse(path, messages);
|
|
1450
|
+
let finalData = responseData;
|
|
1451
|
+
if (this.options.compression && responseData.length >= COMPRESSION_THRESHOLD) {
|
|
1452
|
+
const acceptEncoding = req.headers[`accept-encoding`];
|
|
1453
|
+
const encoding = getCompressionEncoding(acceptEncoding);
|
|
1454
|
+
if (encoding) {
|
|
1455
|
+
finalData = compressData(responseData, encoding);
|
|
1456
|
+
headers[`content-encoding`] = encoding;
|
|
1457
|
+
headers[`vary`] = `accept-encoding`;
|
|
1458
|
+
}
|
|
1459
|
+
}
|
|
1460
|
+
res.writeHead(200, headers);
|
|
1461
|
+
res.end(Buffer.from(finalData));
|
|
1462
|
+
}
|
|
1463
|
+
/**
|
|
1464
|
+
* Handle SSE (Server-Sent Events) mode
|
|
1465
|
+
*/
|
|
1466
|
+
async handleSSE(path, stream, initialOffset, cursor, res) {
|
|
1467
|
+
this.activeSSEResponses.add(res);
|
|
1468
|
+
res.writeHead(200, {
|
|
1469
|
+
"content-type": `text/event-stream`,
|
|
1470
|
+
"cache-control": `no-cache`,
|
|
1471
|
+
connection: `keep-alive`,
|
|
1472
|
+
"access-control-allow-origin": `*`
|
|
1473
|
+
});
|
|
1474
|
+
let currentOffset = initialOffset;
|
|
1475
|
+
let isConnected = true;
|
|
1476
|
+
const decoder = new TextDecoder();
|
|
1477
|
+
res.on(`close`, () => {
|
|
1478
|
+
isConnected = false;
|
|
1479
|
+
this.activeSSEResponses.delete(res);
|
|
1480
|
+
});
|
|
1481
|
+
const isJsonStream = stream?.contentType?.includes(`application/json`);
|
|
1482
|
+
while (isConnected && !this.isShuttingDown) {
|
|
1483
|
+
const { messages, upToDate } = this.store.read(path, currentOffset);
|
|
1484
|
+
for (const message of messages) {
|
|
1485
|
+
let dataPayload;
|
|
1486
|
+
if (isJsonStream) {
|
|
1487
|
+
const jsonBytes = this.store.formatResponse(path, [message]);
|
|
1488
|
+
dataPayload = decoder.decode(jsonBytes);
|
|
1489
|
+
} else dataPayload = decoder.decode(message.data);
|
|
1490
|
+
res.write(`event: data\n`);
|
|
1491
|
+
res.write(encodeSSEData(dataPayload));
|
|
1492
|
+
currentOffset = message.offset;
|
|
1493
|
+
}
|
|
1494
|
+
const controlOffset = messages[messages.length - 1]?.offset ?? stream.currentOffset;
|
|
1495
|
+
const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
|
|
1496
|
+
const controlData = {
|
|
1497
|
+
[SSE_OFFSET_FIELD]: controlOffset,
|
|
1498
|
+
[SSE_CURSOR_FIELD]: responseCursor
|
|
1499
|
+
};
|
|
1500
|
+
if (upToDate) controlData[SSE_UP_TO_DATE_FIELD] = true;
|
|
1501
|
+
res.write(`event: control\n`);
|
|
1502
|
+
res.write(encodeSSEData(JSON.stringify(controlData)));
|
|
1503
|
+
currentOffset = controlOffset;
|
|
1504
|
+
if (upToDate) {
|
|
1505
|
+
const result = await this.store.waitForMessages(path, currentOffset, this.options.longPollTimeout);
|
|
1506
|
+
if (this.isShuttingDown || !isConnected) break;
|
|
1507
|
+
if (result.timedOut) {
|
|
1508
|
+
const keepAliveCursor = generateResponseCursor(cursor, this.options.cursorOptions);
|
|
1509
|
+
const keepAliveData = {
|
|
1510
|
+
[SSE_OFFSET_FIELD]: currentOffset,
|
|
1511
|
+
[SSE_CURSOR_FIELD]: keepAliveCursor,
|
|
1512
|
+
[SSE_UP_TO_DATE_FIELD]: true
|
|
1513
|
+
};
|
|
1514
|
+
res.write(`event: control\n`);
|
|
1515
|
+
res.write(encodeSSEData(JSON.stringify(keepAliveData)));
|
|
1516
|
+
}
|
|
1517
|
+
}
|
|
1518
|
+
}
|
|
1519
|
+
this.activeSSEResponses.delete(res);
|
|
1520
|
+
res.end();
|
|
1521
|
+
}
|
|
1522
|
+
/**
|
|
1523
|
+
* Handle POST - append data
|
|
1524
|
+
*/
|
|
1525
|
+
async handleAppend(path, req, res) {
|
|
1526
|
+
const contentType = req.headers[`content-type`];
|
|
1527
|
+
const seq = req.headers[STREAM_SEQ_HEADER.toLowerCase()];
|
|
1528
|
+
const body = await this.readBody(req);
|
|
1529
|
+
if (body.length === 0) {
|
|
1530
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1531
|
+
res.end(`Empty body`);
|
|
1532
|
+
return;
|
|
1533
|
+
}
|
|
1534
|
+
if (!contentType) {
|
|
1535
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1536
|
+
res.end(`Content-Type header is required`);
|
|
1537
|
+
return;
|
|
1538
|
+
}
|
|
1539
|
+
const message = await Promise.resolve(this.store.append(path, body, {
|
|
1540
|
+
seq,
|
|
1541
|
+
contentType
|
|
1542
|
+
}));
|
|
1543
|
+
res.writeHead(200, { [STREAM_OFFSET_HEADER]: message.offset });
|
|
1544
|
+
res.end();
|
|
1545
|
+
}
|
|
1546
|
+
/**
|
|
1547
|
+
* Handle DELETE - delete stream
|
|
1548
|
+
*/
|
|
1549
|
+
async handleDelete(path, res) {
|
|
1550
|
+
if (!this.store.has(path)) {
|
|
1551
|
+
res.writeHead(404, { "content-type": `text/plain` });
|
|
1552
|
+
res.end(`Stream not found`);
|
|
1553
|
+
return;
|
|
1554
|
+
}
|
|
1555
|
+
this.store.delete(path);
|
|
1556
|
+
if (this.options.onStreamDeleted) await Promise.resolve(this.options.onStreamDeleted({
|
|
1557
|
+
type: `deleted`,
|
|
1558
|
+
path,
|
|
1559
|
+
timestamp: Date.now()
|
|
1560
|
+
}));
|
|
1561
|
+
res.writeHead(204);
|
|
1562
|
+
res.end();
|
|
1563
|
+
}
|
|
1564
|
+
/**
|
|
1565
|
+
* Handle test control endpoints for error injection.
|
|
1566
|
+
* POST /_test/inject-error - inject an error
|
|
1567
|
+
* DELETE /_test/inject-error - clear all injected errors
|
|
1568
|
+
*/
|
|
1569
|
+
async handleTestInjectError(method, req, res) {
|
|
1570
|
+
if (method === `POST`) {
|
|
1571
|
+
const body = await this.readBody(req);
|
|
1572
|
+
try {
|
|
1573
|
+
const config = JSON.parse(new TextDecoder().decode(body));
|
|
1574
|
+
if (!config.path || !config.status) {
|
|
1575
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1576
|
+
res.end(`Missing required fields: path, status`);
|
|
1577
|
+
return;
|
|
1578
|
+
}
|
|
1579
|
+
this.injectError(config.path, config.status, config.count ?? 1, config.retryAfter);
|
|
1580
|
+
res.writeHead(200, { "content-type": `application/json` });
|
|
1581
|
+
res.end(JSON.stringify({ ok: true }));
|
|
1582
|
+
} catch {
|
|
1583
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1584
|
+
res.end(`Invalid JSON body`);
|
|
1585
|
+
}
|
|
1586
|
+
} else if (method === `DELETE`) {
|
|
1587
|
+
this.clearInjectedErrors();
|
|
1588
|
+
res.writeHead(200, { "content-type": `application/json` });
|
|
1589
|
+
res.end(JSON.stringify({ ok: true }));
|
|
1590
|
+
} else {
|
|
1591
|
+
res.writeHead(405, { "content-type": `text/plain` });
|
|
1592
|
+
res.end(`Method not allowed`);
|
|
1593
|
+
}
|
|
1594
|
+
}
|
|
1595
|
+
readBody(req) {
|
|
1596
|
+
return new Promise((resolve, reject) => {
|
|
1597
|
+
const chunks = [];
|
|
1598
|
+
req.on(`data`, (chunk) => {
|
|
1599
|
+
chunks.push(chunk);
|
|
1600
|
+
});
|
|
1601
|
+
req.on(`end`, () => {
|
|
1602
|
+
const body = Buffer.concat(chunks);
|
|
1603
|
+
resolve(new Uint8Array(body));
|
|
1604
|
+
});
|
|
1605
|
+
req.on(`error`, reject);
|
|
1606
|
+
});
|
|
1607
|
+
}
|
|
1608
|
+
};
|
|
1609
|
+
|
|
1610
|
+
//#endregion
|
|
1611
|
+
//#region src/registry-hook.ts
|
|
1612
|
+
const REGISTRY_PATH = `/v1/stream/__registry__`;
|
|
1613
|
+
const streamMetadataSchema = { "~standard": {
|
|
1614
|
+
version: 1,
|
|
1615
|
+
vendor: `durable-streams`,
|
|
1616
|
+
validate: (value) => {
|
|
1617
|
+
if (typeof value !== `object` || value === null) return { issues: [{ message: `value must be an object` }] };
|
|
1618
|
+
const data = value;
|
|
1619
|
+
if (typeof data.path !== `string` || data.path.length === 0) return { issues: [{ message: `path must be a non-empty string` }] };
|
|
1620
|
+
if (typeof data.contentType !== `string` || data.contentType.length === 0) return { issues: [{ message: `contentType must be a non-empty string` }] };
|
|
1621
|
+
if (typeof data.createdAt !== `number`) return { issues: [{ message: `createdAt must be a number` }] };
|
|
1622
|
+
return { value: data };
|
|
1623
|
+
}
|
|
1624
|
+
} };
|
|
1625
|
+
const registryStateSchema = (0, __durable_streams_state.createStateSchema)({ streams: {
|
|
1626
|
+
schema: streamMetadataSchema,
|
|
1627
|
+
type: `stream`,
|
|
1628
|
+
primaryKey: `path`
|
|
1629
|
+
} });
|
|
1630
|
+
/**
|
|
1631
|
+
* Creates lifecycle hooks that write to a __registry__ stream.
|
|
1632
|
+
* Any client can read this stream to discover all streams and their lifecycle events.
|
|
1633
|
+
*/
|
|
1634
|
+
function createRegistryHooks(store, serverUrl) {
|
|
1635
|
+
const registryStream = new __durable_streams_client.DurableStream({
|
|
1636
|
+
url: `${serverUrl}${REGISTRY_PATH}`,
|
|
1637
|
+
contentType: `application/json`
|
|
1638
|
+
});
|
|
1639
|
+
const ensureRegistryExists = async () => {
|
|
1640
|
+
if (!store.has(REGISTRY_PATH)) await __durable_streams_client.DurableStream.create({
|
|
1641
|
+
url: `${serverUrl}${REGISTRY_PATH}`,
|
|
1642
|
+
contentType: `application/json`
|
|
1643
|
+
});
|
|
1644
|
+
};
|
|
1645
|
+
const extractStreamName = (fullPath) => {
|
|
1646
|
+
return fullPath.replace(/^\/v1\/stream\//, ``);
|
|
1647
|
+
};
|
|
1648
|
+
return {
|
|
1649
|
+
onStreamCreated: async (event) => {
|
|
1650
|
+
await ensureRegistryExists();
|
|
1651
|
+
const streamName = extractStreamName(event.path);
|
|
1652
|
+
const changeEvent = registryStateSchema.streams.insert({
|
|
1653
|
+
key: streamName,
|
|
1654
|
+
value: {
|
|
1655
|
+
path: streamName,
|
|
1656
|
+
contentType: event.contentType || `application/octet-stream`,
|
|
1657
|
+
createdAt: event.timestamp
|
|
1658
|
+
}
|
|
1659
|
+
});
|
|
1660
|
+
await registryStream.append(changeEvent);
|
|
1661
|
+
},
|
|
1662
|
+
onStreamDeleted: async (event) => {
|
|
1663
|
+
await ensureRegistryExists();
|
|
1664
|
+
const streamName = extractStreamName(event.path);
|
|
1665
|
+
const changeEvent = registryStateSchema.streams.delete({ key: streamName });
|
|
1666
|
+
await registryStream.append(changeEvent);
|
|
1667
|
+
}
|
|
1668
|
+
};
|
|
1669
|
+
}
|
|
1670
|
+
|
|
1671
|
+
//#endregion
|
|
1672
|
+
exports.DEFAULT_CURSOR_EPOCH = DEFAULT_CURSOR_EPOCH
|
|
1673
|
+
exports.DEFAULT_CURSOR_INTERVAL_SECONDS = DEFAULT_CURSOR_INTERVAL_SECONDS
|
|
1674
|
+
exports.DurableStreamTestServer = DurableStreamTestServer
|
|
1675
|
+
exports.FileBackedStreamStore = FileBackedStreamStore
|
|
1676
|
+
exports.StreamStore = StreamStore
|
|
1677
|
+
exports.calculateCursor = calculateCursor
|
|
1678
|
+
exports.createRegistryHooks = createRegistryHooks
|
|
1679
|
+
exports.decodeStreamPath = decodeStreamPath
|
|
1680
|
+
exports.encodeStreamPath = encodeStreamPath
|
|
1681
|
+
exports.generateResponseCursor = generateResponseCursor
|
|
1682
|
+
exports.handleCursorCollision = handleCursorCollision
|