@durable-streams/client 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1875 -0
- package/dist/index.d.cts +1072 -0
- package/package.json +8 -8
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1875 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
//#region rolldown:runtime
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
11
|
+
key = keys[i];
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
|
|
13
|
+
get: ((k) => from[k]).bind(null, key),
|
|
14
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
20
|
+
value: mod,
|
|
21
|
+
enumerable: true
|
|
22
|
+
}) : target, mod));
|
|
23
|
+
|
|
24
|
+
//#endregion
|
|
25
|
+
const fastq = __toESM(require("fastq"));
|
|
26
|
+
|
|
27
|
+
//#region src/constants.ts
|
|
28
|
+
/**
|
|
29
|
+
* Durable Streams Protocol Constants
|
|
30
|
+
*
|
|
31
|
+
* Header and query parameter names following the Electric Durable Stream Protocol.
|
|
32
|
+
*/
|
|
33
|
+
/**
|
|
34
|
+
* Response header containing the next offset to read from.
|
|
35
|
+
* Offsets are opaque tokens - clients MUST NOT interpret the format.
|
|
36
|
+
*/
|
|
37
|
+
const STREAM_OFFSET_HEADER = `Stream-Next-Offset`;
|
|
38
|
+
/**
|
|
39
|
+
* Response header for cursor (used for CDN collapsing).
|
|
40
|
+
* Echo this value in subsequent long-poll requests.
|
|
41
|
+
*/
|
|
42
|
+
const STREAM_CURSOR_HEADER = `Stream-Cursor`;
|
|
43
|
+
/**
|
|
44
|
+
* Presence header indicating response ends at current end of stream.
|
|
45
|
+
* When present (any value), indicates up-to-date.
|
|
46
|
+
*/
|
|
47
|
+
const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
|
|
48
|
+
/**
|
|
49
|
+
* Request header for writer coordination sequence.
|
|
50
|
+
* Monotonic, lexicographic. If lower than last appended seq -> 409 Conflict.
|
|
51
|
+
*/
|
|
52
|
+
const STREAM_SEQ_HEADER = `Stream-Seq`;
|
|
53
|
+
/**
|
|
54
|
+
* Request header for stream TTL in seconds (on create).
|
|
55
|
+
*/
|
|
56
|
+
const STREAM_TTL_HEADER = `Stream-TTL`;
|
|
57
|
+
/**
|
|
58
|
+
* Request header for absolute stream expiry time (RFC3339, on create).
|
|
59
|
+
*/
|
|
60
|
+
const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
|
|
61
|
+
/**
|
|
62
|
+
* Query parameter for starting offset.
|
|
63
|
+
*/
|
|
64
|
+
const OFFSET_QUERY_PARAM = `offset`;
|
|
65
|
+
/**
|
|
66
|
+
* Query parameter for live mode.
|
|
67
|
+
* Values: "long-poll", "sse"
|
|
68
|
+
*/
|
|
69
|
+
const LIVE_QUERY_PARAM = `live`;
|
|
70
|
+
/**
|
|
71
|
+
* Query parameter for echoing cursor (CDN collapsing).
|
|
72
|
+
*/
|
|
73
|
+
const CURSOR_QUERY_PARAM = `cursor`;
|
|
74
|
+
/**
|
|
75
|
+
* Content types that support SSE mode.
|
|
76
|
+
* SSE is only valid for text/* or application/json streams.
|
|
77
|
+
*/
|
|
78
|
+
const SSE_COMPATIBLE_CONTENT_TYPES = [`text/`, `application/json`];
|
|
79
|
+
/**
|
|
80
|
+
* Protocol query parameters that should not be set by users.
|
|
81
|
+
*/
|
|
82
|
+
const DURABLE_STREAM_PROTOCOL_QUERY_PARAMS = [
|
|
83
|
+
OFFSET_QUERY_PARAM,
|
|
84
|
+
LIVE_QUERY_PARAM,
|
|
85
|
+
CURSOR_QUERY_PARAM
|
|
86
|
+
];
|
|
87
|
+
|
|
88
|
+
//#endregion
|
|
89
|
+
//#region src/error.ts
|
|
90
|
+
/**
|
|
91
|
+
* Error thrown for transport/network errors.
|
|
92
|
+
* Following the @electric-sql/client FetchError pattern.
|
|
93
|
+
*/
|
|
94
|
+
var FetchError = class FetchError extends Error {
|
|
95
|
+
status;
|
|
96
|
+
text;
|
|
97
|
+
json;
|
|
98
|
+
headers;
|
|
99
|
+
constructor(status, text, json, headers, url, message) {
|
|
100
|
+
super(message || `HTTP Error ${status} at ${url}: ${text ?? JSON.stringify(json)}`);
|
|
101
|
+
this.url = url;
|
|
102
|
+
this.name = `FetchError`;
|
|
103
|
+
this.status = status;
|
|
104
|
+
this.text = text;
|
|
105
|
+
this.json = json;
|
|
106
|
+
this.headers = headers;
|
|
107
|
+
}
|
|
108
|
+
static async fromResponse(response, url) {
|
|
109
|
+
const status = response.status;
|
|
110
|
+
const headers = Object.fromEntries([...response.headers.entries()]);
|
|
111
|
+
let text = void 0;
|
|
112
|
+
let json = void 0;
|
|
113
|
+
const contentType = response.headers.get(`content-type`);
|
|
114
|
+
if (!response.bodyUsed) if (contentType && contentType.includes(`application/json`)) try {
|
|
115
|
+
json = await response.json();
|
|
116
|
+
} catch {
|
|
117
|
+
text = await response.text();
|
|
118
|
+
}
|
|
119
|
+
else text = await response.text();
|
|
120
|
+
return new FetchError(status, text, json, headers, url);
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
/**
|
|
124
|
+
* Error thrown when a fetch operation is aborted during backoff.
|
|
125
|
+
*/
|
|
126
|
+
var FetchBackoffAbortError = class extends Error {
|
|
127
|
+
constructor() {
|
|
128
|
+
super(`Fetch with backoff aborted`);
|
|
129
|
+
this.name = `FetchBackoffAbortError`;
|
|
130
|
+
}
|
|
131
|
+
};
|
|
132
|
+
/**
|
|
133
|
+
* Protocol-level error for Durable Streams operations.
|
|
134
|
+
* Provides structured error handling with error codes.
|
|
135
|
+
*/
|
|
136
|
+
var DurableStreamError = class DurableStreamError extends Error {
|
|
137
|
+
/**
|
|
138
|
+
* HTTP status code, if applicable.
|
|
139
|
+
*/
|
|
140
|
+
status;
|
|
141
|
+
/**
|
|
142
|
+
* Structured error code for programmatic handling.
|
|
143
|
+
*/
|
|
144
|
+
code;
|
|
145
|
+
/**
|
|
146
|
+
* Additional error details (e.g., raw response body).
|
|
147
|
+
*/
|
|
148
|
+
details;
|
|
149
|
+
constructor(message, code, status, details) {
|
|
150
|
+
super(message);
|
|
151
|
+
this.name = `DurableStreamError`;
|
|
152
|
+
this.code = code;
|
|
153
|
+
this.status = status;
|
|
154
|
+
this.details = details;
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Create a DurableStreamError from an HTTP response.
|
|
158
|
+
*/
|
|
159
|
+
static async fromResponse(response, url) {
|
|
160
|
+
const status = response.status;
|
|
161
|
+
let details;
|
|
162
|
+
const contentType = response.headers.get(`content-type`);
|
|
163
|
+
if (!response.bodyUsed) if (contentType && contentType.includes(`application/json`)) try {
|
|
164
|
+
details = await response.json();
|
|
165
|
+
} catch {
|
|
166
|
+
details = await response.text();
|
|
167
|
+
}
|
|
168
|
+
else details = await response.text();
|
|
169
|
+
const code = statusToCode(status);
|
|
170
|
+
const message = `Durable stream error at ${url}: ${response.statusText || status}`;
|
|
171
|
+
return new DurableStreamError(message, code, status, details);
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Create a DurableStreamError from a FetchError.
|
|
175
|
+
*/
|
|
176
|
+
static fromFetchError(error) {
|
|
177
|
+
const code = statusToCode(error.status);
|
|
178
|
+
return new DurableStreamError(error.message, code, error.status, error.json ?? error.text);
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
/**
|
|
182
|
+
* Map HTTP status codes to DurableStreamErrorCode.
|
|
183
|
+
*/
|
|
184
|
+
function statusToCode(status) {
|
|
185
|
+
switch (status) {
|
|
186
|
+
case 400: return `BAD_REQUEST`;
|
|
187
|
+
case 401: return `UNAUTHORIZED`;
|
|
188
|
+
case 403: return `FORBIDDEN`;
|
|
189
|
+
case 404: return `NOT_FOUND`;
|
|
190
|
+
case 409: return `CONFLICT_SEQ`;
|
|
191
|
+
case 429: return `RATE_LIMITED`;
|
|
192
|
+
case 503: return `BUSY`;
|
|
193
|
+
default: return `UNKNOWN`;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Error thrown when stream URL is missing.
|
|
198
|
+
*/
|
|
199
|
+
var MissingStreamUrlError = class extends Error {
|
|
200
|
+
constructor() {
|
|
201
|
+
super(`Invalid stream options: missing required url parameter`);
|
|
202
|
+
this.name = `MissingStreamUrlError`;
|
|
203
|
+
}
|
|
204
|
+
};
|
|
205
|
+
/**
|
|
206
|
+
* Error thrown when signal option is invalid.
|
|
207
|
+
*/
|
|
208
|
+
var InvalidSignalError = class extends Error {
|
|
209
|
+
constructor() {
|
|
210
|
+
super(`Invalid signal option. It must be an instance of AbortSignal.`);
|
|
211
|
+
this.name = `InvalidSignalError`;
|
|
212
|
+
}
|
|
213
|
+
};
|
|
214
|
+
|
|
215
|
+
//#endregion
|
|
216
|
+
//#region src/fetch.ts
|
|
217
|
+
/**
|
|
218
|
+
* HTTP status codes that should be retried.
|
|
219
|
+
*/
|
|
220
|
+
const HTTP_RETRY_STATUS_CODES = [429, 503];
|
|
221
|
+
/**
|
|
222
|
+
* Default backoff options.
|
|
223
|
+
*/
|
|
224
|
+
const BackoffDefaults = {
|
|
225
|
+
initialDelay: 100,
|
|
226
|
+
maxDelay: 6e4,
|
|
227
|
+
multiplier: 1.3,
|
|
228
|
+
maxRetries: Infinity
|
|
229
|
+
};
|
|
230
|
+
/**
|
|
231
|
+
* Parse Retry-After header value and return delay in milliseconds.
|
|
232
|
+
* Supports both delta-seconds format and HTTP-date format.
|
|
233
|
+
* Returns 0 if header is not present or invalid.
|
|
234
|
+
*/
|
|
235
|
+
function parseRetryAfterHeader(retryAfter) {
|
|
236
|
+
if (!retryAfter) return 0;
|
|
237
|
+
const retryAfterSec = Number(retryAfter);
|
|
238
|
+
if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) return retryAfterSec * 1e3;
|
|
239
|
+
const retryDate = Date.parse(retryAfter);
|
|
240
|
+
if (!isNaN(retryDate)) {
|
|
241
|
+
const deltaMs = retryDate - Date.now();
|
|
242
|
+
return Math.max(0, Math.min(deltaMs, 36e5));
|
|
243
|
+
}
|
|
244
|
+
return 0;
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* Creates a fetch client that retries failed requests with exponential backoff.
|
|
248
|
+
*
|
|
249
|
+
* @param fetchClient - The base fetch client to wrap
|
|
250
|
+
* @param backoffOptions - Options for retry behavior
|
|
251
|
+
* @returns A fetch function with automatic retry
|
|
252
|
+
*/
|
|
253
|
+
function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
254
|
+
const { initialDelay, maxDelay, multiplier, debug = false, onFailedAttempt, maxRetries = Infinity } = backoffOptions;
|
|
255
|
+
return async (...args) => {
|
|
256
|
+
const url = args[0];
|
|
257
|
+
const options = args[1];
|
|
258
|
+
let delay = initialDelay;
|
|
259
|
+
let attempt = 0;
|
|
260
|
+
while (true) try {
|
|
261
|
+
const result = await fetchClient(...args);
|
|
262
|
+
if (result.ok) return result;
|
|
263
|
+
const err = await FetchError.fromResponse(result, url.toString());
|
|
264
|
+
throw err;
|
|
265
|
+
} catch (e) {
|
|
266
|
+
onFailedAttempt?.();
|
|
267
|
+
if (options?.signal?.aborted) throw new FetchBackoffAbortError();
|
|
268
|
+
else if (e instanceof FetchError && !HTTP_RETRY_STATUS_CODES.includes(e.status) && e.status >= 400 && e.status < 500) throw e;
|
|
269
|
+
else {
|
|
270
|
+
attempt++;
|
|
271
|
+
if (attempt > maxRetries) {
|
|
272
|
+
if (debug) console.log(`Max retries reached (${attempt}/${maxRetries}), giving up`);
|
|
273
|
+
throw e;
|
|
274
|
+
}
|
|
275
|
+
const serverMinimumMs = e instanceof FetchError ? parseRetryAfterHeader(e.headers[`retry-after`]) : 0;
|
|
276
|
+
const jitter = Math.random() * delay;
|
|
277
|
+
const clientBackoffMs = Math.min(jitter, maxDelay);
|
|
278
|
+
const waitMs = Math.max(serverMinimumMs, clientBackoffMs);
|
|
279
|
+
if (debug) {
|
|
280
|
+
const source = serverMinimumMs > 0 ? `server+client` : `client`;
|
|
281
|
+
console.log(`Retry attempt #${attempt} after ${waitMs}ms (${source}, serverMin=${serverMinimumMs}ms, clientBackoff=${clientBackoffMs}ms)`);
|
|
282
|
+
}
|
|
283
|
+
await new Promise((resolve) => setTimeout(resolve, waitMs));
|
|
284
|
+
delay = Math.min(delay * multiplier, maxDelay);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Status codes where we shouldn't try to read the body.
|
|
291
|
+
*/
|
|
292
|
+
const NO_BODY_STATUS_CODES = [
|
|
293
|
+
201,
|
|
294
|
+
204,
|
|
295
|
+
205
|
|
296
|
+
];
|
|
297
|
+
/**
|
|
298
|
+
* Creates a fetch client that ensures the response body is fully consumed.
|
|
299
|
+
* This prevents issues with connection pooling when bodies aren't read.
|
|
300
|
+
*
|
|
301
|
+
* Uses arrayBuffer() instead of text() to preserve binary data integrity.
|
|
302
|
+
*
|
|
303
|
+
* @param fetchClient - The base fetch client to wrap
|
|
304
|
+
* @returns A fetch function that consumes response bodies
|
|
305
|
+
*/
|
|
306
|
+
function createFetchWithConsumedBody(fetchClient) {
|
|
307
|
+
return async (...args) => {
|
|
308
|
+
const url = args[0];
|
|
309
|
+
const res = await fetchClient(...args);
|
|
310
|
+
try {
|
|
311
|
+
if (res.status < 200 || NO_BODY_STATUS_CODES.includes(res.status)) return res;
|
|
312
|
+
const buf = await res.arrayBuffer();
|
|
313
|
+
return new Response(buf, {
|
|
314
|
+
status: res.status,
|
|
315
|
+
statusText: res.statusText,
|
|
316
|
+
headers: res.headers
|
|
317
|
+
});
|
|
318
|
+
} catch (err) {
|
|
319
|
+
if (args[1]?.signal?.aborted) throw new FetchBackoffAbortError();
|
|
320
|
+
throw new FetchError(res.status, void 0, void 0, Object.fromEntries([...res.headers.entries()]), url.toString(), err instanceof Error ? err.message : typeof err === `string` ? err : `failed to read body`);
|
|
321
|
+
}
|
|
322
|
+
};
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
//#endregion
|
|
326
|
+
//#region src/asyncIterableReadableStream.ts
|
|
327
|
+
/**
|
|
328
|
+
* Check if a value has Symbol.asyncIterator defined.
|
|
329
|
+
*/
|
|
330
|
+
function hasAsyncIterator(stream$1) {
|
|
331
|
+
return typeof Symbol !== `undefined` && typeof Symbol.asyncIterator === `symbol` && typeof stream$1[Symbol.asyncIterator] === `function`;
|
|
332
|
+
}
|
|
333
|
+
/**
|
|
334
|
+
* Define [Symbol.asyncIterator] and .values() on a ReadableStream instance.
|
|
335
|
+
*
|
|
336
|
+
* Uses getReader().read() to implement spec-consistent iteration.
|
|
337
|
+
* On completion or early exit (break/return/throw), releases lock and cancels as appropriate.
|
|
338
|
+
*
|
|
339
|
+
* **Iterator behavior notes:**
|
|
340
|
+
* - `return(value?)` accepts an optional cancellation reason passed to `reader.cancel()`
|
|
341
|
+
* - `return()` always resolves with `{ done: true, value: undefined }` regardless of the
|
|
342
|
+
* input value. This matches `for await...of` semantics where the return value is ignored.
|
|
343
|
+
* Manual iteration users should be aware of this behavior.
|
|
344
|
+
*/
|
|
345
|
+
function defineAsyncIterator(stream$1) {
|
|
346
|
+
if (typeof Symbol === `undefined` || typeof Symbol.asyncIterator !== `symbol`) return;
|
|
347
|
+
if (typeof stream$1[Symbol.asyncIterator] === `function`) return;
|
|
348
|
+
const createIterator = function() {
|
|
349
|
+
const reader = this.getReader();
|
|
350
|
+
let finished = false;
|
|
351
|
+
let pendingReads = 0;
|
|
352
|
+
const iterator = {
|
|
353
|
+
async next() {
|
|
354
|
+
if (finished) return {
|
|
355
|
+
done: true,
|
|
356
|
+
value: void 0
|
|
357
|
+
};
|
|
358
|
+
pendingReads++;
|
|
359
|
+
try {
|
|
360
|
+
const { value, done } = await reader.read();
|
|
361
|
+
if (done) {
|
|
362
|
+
finished = true;
|
|
363
|
+
reader.releaseLock();
|
|
364
|
+
return {
|
|
365
|
+
done: true,
|
|
366
|
+
value: void 0
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
return {
|
|
370
|
+
done: false,
|
|
371
|
+
value
|
|
372
|
+
};
|
|
373
|
+
} catch (err) {
|
|
374
|
+
finished = true;
|
|
375
|
+
try {
|
|
376
|
+
reader.releaseLock();
|
|
377
|
+
} catch {}
|
|
378
|
+
throw err;
|
|
379
|
+
} finally {
|
|
380
|
+
pendingReads--;
|
|
381
|
+
}
|
|
382
|
+
},
|
|
383
|
+
async return(value) {
|
|
384
|
+
if (pendingReads > 0) throw new TypeError(`Cannot close a readable stream reader when it has pending read requests`);
|
|
385
|
+
finished = true;
|
|
386
|
+
const cancelPromise = reader.cancel(value);
|
|
387
|
+
reader.releaseLock();
|
|
388
|
+
await cancelPromise;
|
|
389
|
+
return {
|
|
390
|
+
done: true,
|
|
391
|
+
value: void 0
|
|
392
|
+
};
|
|
393
|
+
},
|
|
394
|
+
async throw(err) {
|
|
395
|
+
if (pendingReads > 0) throw new TypeError(`Cannot close a readable stream reader when it has pending read requests`);
|
|
396
|
+
finished = true;
|
|
397
|
+
const cancelPromise = reader.cancel(err);
|
|
398
|
+
reader.releaseLock();
|
|
399
|
+
await cancelPromise;
|
|
400
|
+
throw err;
|
|
401
|
+
},
|
|
402
|
+
[Symbol.asyncIterator]() {
|
|
403
|
+
return this;
|
|
404
|
+
}
|
|
405
|
+
};
|
|
406
|
+
return iterator;
|
|
407
|
+
};
|
|
408
|
+
try {
|
|
409
|
+
Object.defineProperty(stream$1, Symbol.asyncIterator, {
|
|
410
|
+
configurable: true,
|
|
411
|
+
writable: true,
|
|
412
|
+
value: createIterator
|
|
413
|
+
});
|
|
414
|
+
} catch {
|
|
415
|
+
return;
|
|
416
|
+
}
|
|
417
|
+
try {
|
|
418
|
+
Object.defineProperty(stream$1, `values`, {
|
|
419
|
+
configurable: true,
|
|
420
|
+
writable: true,
|
|
421
|
+
value: createIterator
|
|
422
|
+
});
|
|
423
|
+
} catch {}
|
|
424
|
+
}
|
|
425
|
+
/**
|
|
426
|
+
* Ensure a ReadableStream is async-iterable.
|
|
427
|
+
*
|
|
428
|
+
* If the stream already has [Symbol.asyncIterator] defined (native or polyfilled),
|
|
429
|
+
* it is returned as-is. Otherwise, [Symbol.asyncIterator] is defined on the
|
|
430
|
+
* stream instance (not the prototype).
|
|
431
|
+
*
|
|
432
|
+
* The returned value is the same ReadableStream instance, so:
|
|
433
|
+
* - `stream instanceof ReadableStream` remains true
|
|
434
|
+
* - Any code relying on native branding/internal slots continues to work
|
|
435
|
+
*
|
|
436
|
+
* @example
|
|
437
|
+
* ```typescript
|
|
438
|
+
* const stream = someApiReturningReadableStream();
|
|
439
|
+
* const iterableStream = asAsyncIterableReadableStream(stream);
|
|
440
|
+
*
|
|
441
|
+
* // Now works on Safari/iOS:
|
|
442
|
+
* for await (const chunk of iterableStream) {
|
|
443
|
+
* console.log(chunk);
|
|
444
|
+
* }
|
|
445
|
+
* ```
|
|
446
|
+
*/
|
|
447
|
+
function asAsyncIterableReadableStream(stream$1) {
|
|
448
|
+
if (!hasAsyncIterator(stream$1)) defineAsyncIterator(stream$1);
|
|
449
|
+
return stream$1;
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
//#endregion
|
|
453
|
+
//#region src/sse.ts
|
|
454
|
+
/**
|
|
455
|
+
* Parse SSE events from a ReadableStream<Uint8Array>.
|
|
456
|
+
* Yields parsed events as they arrive.
|
|
457
|
+
*/
|
|
458
|
+
async function* parseSSEStream(stream$1, signal) {
|
|
459
|
+
const reader = stream$1.getReader();
|
|
460
|
+
const decoder = new TextDecoder();
|
|
461
|
+
let buffer = ``;
|
|
462
|
+
let currentEvent = { data: [] };
|
|
463
|
+
try {
|
|
464
|
+
while (true) {
|
|
465
|
+
if (signal?.aborted) break;
|
|
466
|
+
const { done, value } = await reader.read();
|
|
467
|
+
if (done) break;
|
|
468
|
+
buffer += decoder.decode(value, { stream: true });
|
|
469
|
+
const lines = buffer.split(`\n`);
|
|
470
|
+
buffer = lines.pop() ?? ``;
|
|
471
|
+
for (const line of lines) if (line === ``) {
|
|
472
|
+
if (currentEvent.type && currentEvent.data.length > 0) {
|
|
473
|
+
const dataStr = currentEvent.data.join(`\n`);
|
|
474
|
+
if (currentEvent.type === `data`) yield {
|
|
475
|
+
type: `data`,
|
|
476
|
+
data: dataStr
|
|
477
|
+
};
|
|
478
|
+
else if (currentEvent.type === `control`) try {
|
|
479
|
+
const control = JSON.parse(dataStr);
|
|
480
|
+
yield {
|
|
481
|
+
type: `control`,
|
|
482
|
+
streamNextOffset: control.streamNextOffset,
|
|
483
|
+
streamCursor: control.streamCursor,
|
|
484
|
+
upToDate: control.upToDate
|
|
485
|
+
};
|
|
486
|
+
} catch {}
|
|
487
|
+
}
|
|
488
|
+
currentEvent = { data: [] };
|
|
489
|
+
} else if (line.startsWith(`event:`)) currentEvent.type = line.slice(6).trim();
|
|
490
|
+
else if (line.startsWith(`data:`)) {
|
|
491
|
+
const content = line.slice(5);
|
|
492
|
+
currentEvent.data.push(content.startsWith(` `) ? content.slice(1) : content);
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
const remaining = decoder.decode();
|
|
496
|
+
if (remaining) buffer += remaining;
|
|
497
|
+
if (buffer && currentEvent.type && currentEvent.data.length > 0) {
|
|
498
|
+
const dataStr = currentEvent.data.join(`\n`);
|
|
499
|
+
if (currentEvent.type === `data`) yield {
|
|
500
|
+
type: `data`,
|
|
501
|
+
data: dataStr
|
|
502
|
+
};
|
|
503
|
+
else if (currentEvent.type === `control`) try {
|
|
504
|
+
const control = JSON.parse(dataStr);
|
|
505
|
+
yield {
|
|
506
|
+
type: `control`,
|
|
507
|
+
streamNextOffset: control.streamNextOffset,
|
|
508
|
+
streamCursor: control.streamCursor,
|
|
509
|
+
upToDate: control.upToDate
|
|
510
|
+
};
|
|
511
|
+
} catch {}
|
|
512
|
+
}
|
|
513
|
+
} finally {
|
|
514
|
+
reader.releaseLock();
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
//#endregion
|
|
519
|
+
//#region src/response.ts
|
|
520
|
+
/**
|
|
521
|
+
* Implementation of the StreamResponse interface.
|
|
522
|
+
*/
|
|
523
|
+
var StreamResponseImpl = class {
|
|
524
|
+
url;
|
|
525
|
+
contentType;
|
|
526
|
+
live;
|
|
527
|
+
startOffset;
|
|
528
|
+
#headers;
|
|
529
|
+
#status;
|
|
530
|
+
#statusText;
|
|
531
|
+
#ok;
|
|
532
|
+
#isLoading;
|
|
533
|
+
offset;
|
|
534
|
+
cursor;
|
|
535
|
+
upToDate;
|
|
536
|
+
#isJsonMode;
|
|
537
|
+
#abortController;
|
|
538
|
+
#fetchNext;
|
|
539
|
+
#startSSE;
|
|
540
|
+
#closedResolve;
|
|
541
|
+
#closedReject;
|
|
542
|
+
#closed;
|
|
543
|
+
#stopAfterUpToDate = false;
|
|
544
|
+
#consumptionMethod = null;
|
|
545
|
+
#sseResilience;
|
|
546
|
+
#lastSSEConnectionStartTime;
|
|
547
|
+
#consecutiveShortSSEConnections = 0;
|
|
548
|
+
#sseFallbackToLongPoll = false;
|
|
549
|
+
#responseStream;
|
|
550
|
+
constructor(config) {
|
|
551
|
+
this.url = config.url;
|
|
552
|
+
this.contentType = config.contentType;
|
|
553
|
+
this.live = config.live;
|
|
554
|
+
this.startOffset = config.startOffset;
|
|
555
|
+
this.offset = config.initialOffset;
|
|
556
|
+
this.cursor = config.initialCursor;
|
|
557
|
+
this.upToDate = config.initialUpToDate;
|
|
558
|
+
this.#headers = config.firstResponse.headers;
|
|
559
|
+
this.#status = config.firstResponse.status;
|
|
560
|
+
this.#statusText = config.firstResponse.statusText;
|
|
561
|
+
this.#ok = config.firstResponse.ok;
|
|
562
|
+
this.#isLoading = false;
|
|
563
|
+
this.#isJsonMode = config.isJsonMode;
|
|
564
|
+
this.#abortController = config.abortController;
|
|
565
|
+
this.#fetchNext = config.fetchNext;
|
|
566
|
+
this.#startSSE = config.startSSE;
|
|
567
|
+
this.#sseResilience = {
|
|
568
|
+
minConnectionDuration: config.sseResilience?.minConnectionDuration ?? 1e3,
|
|
569
|
+
maxShortConnections: config.sseResilience?.maxShortConnections ?? 3,
|
|
570
|
+
backoffBaseDelay: config.sseResilience?.backoffBaseDelay ?? 100,
|
|
571
|
+
backoffMaxDelay: config.sseResilience?.backoffMaxDelay ?? 5e3,
|
|
572
|
+
logWarnings: config.sseResilience?.logWarnings ?? true
|
|
573
|
+
};
|
|
574
|
+
this.#closed = new Promise((resolve, reject) => {
|
|
575
|
+
this.#closedResolve = resolve;
|
|
576
|
+
this.#closedReject = reject;
|
|
577
|
+
});
|
|
578
|
+
this.#responseStream = this.#createResponseStream(config.firstResponse);
|
|
579
|
+
}
|
|
580
|
+
get headers() {
|
|
581
|
+
return this.#headers;
|
|
582
|
+
}
|
|
583
|
+
get status() {
|
|
584
|
+
return this.#status;
|
|
585
|
+
}
|
|
586
|
+
get statusText() {
|
|
587
|
+
return this.#statusText;
|
|
588
|
+
}
|
|
589
|
+
get ok() {
|
|
590
|
+
return this.#ok;
|
|
591
|
+
}
|
|
592
|
+
get isLoading() {
|
|
593
|
+
return this.#isLoading;
|
|
594
|
+
}
|
|
595
|
+
#ensureJsonMode() {
|
|
596
|
+
if (!this.#isJsonMode) throw new DurableStreamError(`JSON methods are only valid for JSON-mode streams. Content-Type is "${this.contentType}" and json hint was not set.`, `BAD_REQUEST`);
|
|
597
|
+
}
|
|
598
|
+
#markClosed() {
|
|
599
|
+
this.#closedResolve();
|
|
600
|
+
}
|
|
601
|
+
#markError(err) {
|
|
602
|
+
this.#closedReject(err);
|
|
603
|
+
}
|
|
604
|
+
/**
|
|
605
|
+
* Ensure only one consumption method is used per StreamResponse.
|
|
606
|
+
* Throws if any consumption method was already called.
|
|
607
|
+
*/
|
|
608
|
+
#ensureNoConsumption(method) {
|
|
609
|
+
if (this.#consumptionMethod !== null) throw new DurableStreamError(`Cannot call ${method}() - this StreamResponse is already being consumed via ${this.#consumptionMethod}()`, `ALREADY_CONSUMED`);
|
|
610
|
+
this.#consumptionMethod = method;
|
|
611
|
+
}
|
|
612
|
+
/**
|
|
613
|
+
* Determine if we should continue with live updates based on live mode
|
|
614
|
+
* and whether we've received upToDate.
|
|
615
|
+
*/
|
|
616
|
+
#shouldContinueLive() {
|
|
617
|
+
if (this.#stopAfterUpToDate && this.upToDate) return false;
|
|
618
|
+
if (this.live === false) return false;
|
|
619
|
+
return true;
|
|
620
|
+
}
|
|
621
|
+
/**
|
|
622
|
+
* Update state from response headers.
|
|
623
|
+
*/
|
|
624
|
+
#updateStateFromResponse(response) {
|
|
625
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER);
|
|
626
|
+
if (offset) this.offset = offset;
|
|
627
|
+
const cursor = response.headers.get(STREAM_CURSOR_HEADER);
|
|
628
|
+
if (cursor) this.cursor = cursor;
|
|
629
|
+
this.upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
630
|
+
this.#headers = response.headers;
|
|
631
|
+
this.#status = response.status;
|
|
632
|
+
this.#statusText = response.statusText;
|
|
633
|
+
this.#ok = response.ok;
|
|
634
|
+
}
|
|
635
|
+
/**
|
|
636
|
+
* Extract stream metadata from Response headers.
|
|
637
|
+
* Used by subscriber APIs to get the correct offset/cursor/upToDate for each
|
|
638
|
+
* specific Response, rather than reading from `this` which may be stale due to
|
|
639
|
+
* ReadableStream prefetching or timing issues.
|
|
640
|
+
*/
|
|
641
|
+
#getMetadataFromResponse(response) {
|
|
642
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER);
|
|
643
|
+
const cursor = response.headers.get(STREAM_CURSOR_HEADER);
|
|
644
|
+
const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
645
|
+
return {
|
|
646
|
+
offset: offset ?? this.offset,
|
|
647
|
+
cursor: cursor ?? this.cursor,
|
|
648
|
+
upToDate
|
|
649
|
+
};
|
|
650
|
+
}
|
|
651
|
+
/**
|
|
652
|
+
* Create a synthetic Response from SSE data with proper headers.
|
|
653
|
+
* Includes offset/cursor/upToDate in headers so subscribers can read them.
|
|
654
|
+
*/
|
|
655
|
+
#createSSESyntheticResponse(data, offset, cursor, upToDate) {
|
|
656
|
+
const headers = {
|
|
657
|
+
"content-type": this.contentType ?? `application/json`,
|
|
658
|
+
[STREAM_OFFSET_HEADER]: String(offset)
|
|
659
|
+
};
|
|
660
|
+
if (cursor) headers[STREAM_CURSOR_HEADER] = cursor;
|
|
661
|
+
if (upToDate) headers[STREAM_UP_TO_DATE_HEADER] = `true`;
|
|
662
|
+
return new Response(data, {
|
|
663
|
+
status: 200,
|
|
664
|
+
headers
|
|
665
|
+
});
|
|
666
|
+
}
|
|
667
|
+
/**
|
|
668
|
+
* Update instance state from an SSE control event.
|
|
669
|
+
*/
|
|
670
|
+
#updateStateFromSSEControl(controlEvent) {
|
|
671
|
+
this.offset = controlEvent.streamNextOffset;
|
|
672
|
+
if (controlEvent.streamCursor) this.cursor = controlEvent.streamCursor;
|
|
673
|
+
if (controlEvent.upToDate !== void 0) this.upToDate = controlEvent.upToDate;
|
|
674
|
+
}
|
|
675
|
+
/**
|
|
676
|
+
* Mark the start of an SSE connection for duration tracking.
|
|
677
|
+
*/
|
|
678
|
+
#markSSEConnectionStart() {
|
|
679
|
+
this.#lastSSEConnectionStartTime = Date.now();
|
|
680
|
+
}
|
|
681
|
+
/**
|
|
682
|
+
* Handle SSE connection end - check duration and manage fallback state.
|
|
683
|
+
* Returns a delay to wait before reconnecting, or null if should not reconnect.
|
|
684
|
+
*/
|
|
685
|
+
async #handleSSEConnectionEnd() {
|
|
686
|
+
if (this.#lastSSEConnectionStartTime === void 0) return 0;
|
|
687
|
+
const connectionDuration = Date.now() - this.#lastSSEConnectionStartTime;
|
|
688
|
+
const wasAborted = this.#abortController.signal.aborted;
|
|
689
|
+
if (connectionDuration < this.#sseResilience.minConnectionDuration && !wasAborted) {
|
|
690
|
+
this.#consecutiveShortSSEConnections++;
|
|
691
|
+
if (this.#consecutiveShortSSEConnections >= this.#sseResilience.maxShortConnections) {
|
|
692
|
+
this.#sseFallbackToLongPoll = true;
|
|
693
|
+
if (this.#sseResilience.logWarnings) console.warn("[Durable Streams] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). Falling back to long polling. Your proxy must support streaming SSE responses (not buffer the complete response). Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy.");
|
|
694
|
+
return null;
|
|
695
|
+
} else {
|
|
696
|
+
const maxDelay = Math.min(this.#sseResilience.backoffMaxDelay, this.#sseResilience.backoffBaseDelay * Math.pow(2, this.#consecutiveShortSSEConnections));
|
|
697
|
+
const delayMs = Math.floor(Math.random() * maxDelay);
|
|
698
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
699
|
+
return delayMs;
|
|
700
|
+
}
|
|
701
|
+
} else if (connectionDuration >= this.#sseResilience.minConnectionDuration) this.#consecutiveShortSSEConnections = 0;
|
|
702
|
+
return 0;
|
|
703
|
+
}
|
|
704
|
+
/**
|
|
705
|
+
* Try to reconnect SSE and return the new iterator, or null if reconnection
|
|
706
|
+
* is not possible or fails.
|
|
707
|
+
*/
|
|
708
|
+
async #trySSEReconnect() {
|
|
709
|
+
if (this.#sseFallbackToLongPoll) return null;
|
|
710
|
+
if (!this.#shouldContinueLive() || !this.#startSSE) return null;
|
|
711
|
+
const delayOrNull = await this.#handleSSEConnectionEnd();
|
|
712
|
+
if (delayOrNull === null) return null;
|
|
713
|
+
this.#markSSEConnectionStart();
|
|
714
|
+
const newSSEResponse = await this.#startSSE(this.offset, this.cursor, this.#abortController.signal);
|
|
715
|
+
if (newSSEResponse.body) return parseSSEStream(newSSEResponse.body, this.#abortController.signal);
|
|
716
|
+
return null;
|
|
717
|
+
}
|
|
718
|
+
/**
|
|
719
|
+
* Process SSE events from the iterator.
|
|
720
|
+
* Returns an object indicating the result:
|
|
721
|
+
* - { type: 'response', response, newIterator? } - yield this response
|
|
722
|
+
* - { type: 'closed' } - stream should be closed
|
|
723
|
+
* - { type: 'error', error } - an error occurred
|
|
724
|
+
* - { type: 'continue', newIterator? } - continue processing (control-only event)
|
|
725
|
+
*/
|
|
726
|
+
async #processSSEEvents(sseEventIterator) {
|
|
727
|
+
const { done, value: event } = await sseEventIterator.next();
|
|
728
|
+
if (done) {
|
|
729
|
+
try {
|
|
730
|
+
const newIterator = await this.#trySSEReconnect();
|
|
731
|
+
if (newIterator) return {
|
|
732
|
+
type: `continue`,
|
|
733
|
+
newIterator
|
|
734
|
+
};
|
|
735
|
+
} catch (err) {
|
|
736
|
+
return {
|
|
737
|
+
type: `error`,
|
|
738
|
+
error: err instanceof Error ? err : new Error(`SSE reconnection failed`)
|
|
739
|
+
};
|
|
740
|
+
}
|
|
741
|
+
return { type: `closed` };
|
|
742
|
+
}
|
|
743
|
+
if (event.type === `data`) return this.#processSSEDataEvent(event.data, sseEventIterator);
|
|
744
|
+
this.#updateStateFromSSEControl(event);
|
|
745
|
+
return { type: `continue` };
|
|
746
|
+
}
|
|
747
|
+
/**
|
|
748
|
+
* Process an SSE data event by waiting for its corresponding control event.
|
|
749
|
+
* In SSE protocol, control events come AFTER data events.
|
|
750
|
+
* Multiple data events may arrive before a single control event - we buffer them.
|
|
751
|
+
*/
|
|
752
|
+
async #processSSEDataEvent(pendingData, sseEventIterator) {
|
|
753
|
+
let bufferedData = pendingData;
|
|
754
|
+
while (true) {
|
|
755
|
+
const { done: controlDone, value: controlEvent } = await sseEventIterator.next();
|
|
756
|
+
if (controlDone) {
|
|
757
|
+
const response = this.#createSSESyntheticResponse(bufferedData, this.offset, this.cursor, this.upToDate);
|
|
758
|
+
try {
|
|
759
|
+
const newIterator = await this.#trySSEReconnect();
|
|
760
|
+
return {
|
|
761
|
+
type: `response`,
|
|
762
|
+
response,
|
|
763
|
+
newIterator: newIterator ?? void 0
|
|
764
|
+
};
|
|
765
|
+
} catch (err) {
|
|
766
|
+
return {
|
|
767
|
+
type: `error`,
|
|
768
|
+
error: err instanceof Error ? err : new Error(`SSE reconnection failed`)
|
|
769
|
+
};
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
if (controlEvent.type === `control`) {
|
|
773
|
+
this.#updateStateFromSSEControl(controlEvent);
|
|
774
|
+
const response = this.#createSSESyntheticResponse(bufferedData, controlEvent.streamNextOffset, controlEvent.streamCursor, controlEvent.upToDate ?? false);
|
|
775
|
+
return {
|
|
776
|
+
type: `response`,
|
|
777
|
+
response
|
|
778
|
+
};
|
|
779
|
+
}
|
|
780
|
+
bufferedData += controlEvent.data;
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
/**
|
|
784
|
+
* Create the core ReadableStream<Response> that yields responses.
|
|
785
|
+
* This is consumed once - all consumption methods use this same stream.
|
|
786
|
+
*
|
|
787
|
+
* For long-poll mode: yields actual Response objects.
|
|
788
|
+
* For SSE mode: yields synthetic Response objects created from SSE data events.
|
|
789
|
+
*/
|
|
790
|
+
#createResponseStream(firstResponse) {
|
|
791
|
+
let firstResponseYielded = false;
|
|
792
|
+
let sseEventIterator = null;
|
|
793
|
+
return new ReadableStream({
|
|
794
|
+
pull: async (controller) => {
|
|
795
|
+
try {
|
|
796
|
+
if (!firstResponseYielded) {
|
|
797
|
+
firstResponseYielded = true;
|
|
798
|
+
const isSSE = firstResponse.headers.get(`content-type`)?.includes(`text/event-stream`) ?? false;
|
|
799
|
+
if (isSSE && firstResponse.body) {
|
|
800
|
+
this.#markSSEConnectionStart();
|
|
801
|
+
sseEventIterator = parseSSEStream(firstResponse.body, this.#abortController.signal);
|
|
802
|
+
} else {
|
|
803
|
+
controller.enqueue(firstResponse);
|
|
804
|
+
if (this.upToDate && !this.#shouldContinueLive()) {
|
|
805
|
+
this.#markClosed();
|
|
806
|
+
controller.close();
|
|
807
|
+
return;
|
|
808
|
+
}
|
|
809
|
+
return;
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
if (sseEventIterator) while (true) {
|
|
813
|
+
const result = await this.#processSSEEvents(sseEventIterator);
|
|
814
|
+
switch (result.type) {
|
|
815
|
+
case `response`:
|
|
816
|
+
if (result.newIterator) sseEventIterator = result.newIterator;
|
|
817
|
+
controller.enqueue(result.response);
|
|
818
|
+
return;
|
|
819
|
+
case `closed`:
|
|
820
|
+
this.#markClosed();
|
|
821
|
+
controller.close();
|
|
822
|
+
return;
|
|
823
|
+
case `error`:
|
|
824
|
+
this.#markError(result.error);
|
|
825
|
+
controller.error(result.error);
|
|
826
|
+
return;
|
|
827
|
+
case `continue`:
|
|
828
|
+
if (result.newIterator) sseEventIterator = result.newIterator;
|
|
829
|
+
continue;
|
|
830
|
+
}
|
|
831
|
+
}
|
|
832
|
+
if (this.#shouldContinueLive()) {
|
|
833
|
+
if (this.#abortController.signal.aborted) {
|
|
834
|
+
this.#markClosed();
|
|
835
|
+
controller.close();
|
|
836
|
+
return;
|
|
837
|
+
}
|
|
838
|
+
const response = await this.#fetchNext(this.offset, this.cursor, this.#abortController.signal);
|
|
839
|
+
this.#updateStateFromResponse(response);
|
|
840
|
+
controller.enqueue(response);
|
|
841
|
+
return;
|
|
842
|
+
}
|
|
843
|
+
this.#markClosed();
|
|
844
|
+
controller.close();
|
|
845
|
+
} catch (err) {
|
|
846
|
+
if (this.#abortController.signal.aborted) {
|
|
847
|
+
this.#markClosed();
|
|
848
|
+
controller.close();
|
|
849
|
+
} else {
|
|
850
|
+
this.#markError(err instanceof Error ? err : new Error(String(err)));
|
|
851
|
+
controller.error(err);
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
},
|
|
855
|
+
cancel: () => {
|
|
856
|
+
this.#abortController.abort();
|
|
857
|
+
this.#markClosed();
|
|
858
|
+
}
|
|
859
|
+
});
|
|
860
|
+
}
|
|
861
|
+
/**
|
|
862
|
+
* Get the response stream reader. Can only be called once.
|
|
863
|
+
*/
|
|
864
|
+
#getResponseReader() {
|
|
865
|
+
return this.#responseStream.getReader();
|
|
866
|
+
}
|
|
867
|
+
async body() {
|
|
868
|
+
this.#ensureNoConsumption(`body`);
|
|
869
|
+
this.#stopAfterUpToDate = true;
|
|
870
|
+
const reader = this.#getResponseReader();
|
|
871
|
+
const blobs = [];
|
|
872
|
+
try {
|
|
873
|
+
let result = await reader.read();
|
|
874
|
+
while (!result.done) {
|
|
875
|
+
const wasUpToDate = this.upToDate;
|
|
876
|
+
const blob = await result.value.blob();
|
|
877
|
+
if (blob.size > 0) blobs.push(blob);
|
|
878
|
+
if (wasUpToDate) break;
|
|
879
|
+
result = await reader.read();
|
|
880
|
+
}
|
|
881
|
+
} finally {
|
|
882
|
+
reader.releaseLock();
|
|
883
|
+
}
|
|
884
|
+
this.#markClosed();
|
|
885
|
+
if (blobs.length === 0) return new Uint8Array(0);
|
|
886
|
+
if (blobs.length === 1) return new Uint8Array(await blobs[0].arrayBuffer());
|
|
887
|
+
const combined = new Blob(blobs);
|
|
888
|
+
return new Uint8Array(await combined.arrayBuffer());
|
|
889
|
+
}
|
|
890
|
+
async json() {
|
|
891
|
+
this.#ensureNoConsumption(`json`);
|
|
892
|
+
this.#ensureJsonMode();
|
|
893
|
+
this.#stopAfterUpToDate = true;
|
|
894
|
+
const reader = this.#getResponseReader();
|
|
895
|
+
const items = [];
|
|
896
|
+
try {
|
|
897
|
+
let result = await reader.read();
|
|
898
|
+
while (!result.done) {
|
|
899
|
+
const wasUpToDate = this.upToDate;
|
|
900
|
+
const text = await result.value.text();
|
|
901
|
+
const content = text.trim() || `[]`;
|
|
902
|
+
const parsed = JSON.parse(content);
|
|
903
|
+
if (Array.isArray(parsed)) items.push(...parsed);
|
|
904
|
+
else items.push(parsed);
|
|
905
|
+
if (wasUpToDate) break;
|
|
906
|
+
result = await reader.read();
|
|
907
|
+
}
|
|
908
|
+
} finally {
|
|
909
|
+
reader.releaseLock();
|
|
910
|
+
}
|
|
911
|
+
this.#markClosed();
|
|
912
|
+
return items;
|
|
913
|
+
}
|
|
914
|
+
async text() {
|
|
915
|
+
this.#ensureNoConsumption(`text`);
|
|
916
|
+
this.#stopAfterUpToDate = true;
|
|
917
|
+
const reader = this.#getResponseReader();
|
|
918
|
+
const parts = [];
|
|
919
|
+
try {
|
|
920
|
+
let result = await reader.read();
|
|
921
|
+
while (!result.done) {
|
|
922
|
+
const wasUpToDate = this.upToDate;
|
|
923
|
+
const text = await result.value.text();
|
|
924
|
+
if (text) parts.push(text);
|
|
925
|
+
if (wasUpToDate) break;
|
|
926
|
+
result = await reader.read();
|
|
927
|
+
}
|
|
928
|
+
} finally {
|
|
929
|
+
reader.releaseLock();
|
|
930
|
+
}
|
|
931
|
+
this.#markClosed();
|
|
932
|
+
return parts.join(``);
|
|
933
|
+
}
|
|
934
|
+
/**
|
|
935
|
+
* Internal helper to create the body stream without consumption check.
|
|
936
|
+
* Used by both bodyStream() and textStream().
|
|
937
|
+
*/
|
|
938
|
+
#createBodyStreamInternal() {
|
|
939
|
+
const { readable, writable } = new TransformStream();
|
|
940
|
+
const reader = this.#getResponseReader();
|
|
941
|
+
const pipeBodyStream = async () => {
|
|
942
|
+
try {
|
|
943
|
+
let result = await reader.read();
|
|
944
|
+
while (!result.done) {
|
|
945
|
+
const wasUpToDate = this.upToDate;
|
|
946
|
+
const body = result.value.body;
|
|
947
|
+
if (body) await body.pipeTo(writable, {
|
|
948
|
+
preventClose: true,
|
|
949
|
+
preventAbort: true,
|
|
950
|
+
preventCancel: true
|
|
951
|
+
});
|
|
952
|
+
if (wasUpToDate && !this.#shouldContinueLive()) break;
|
|
953
|
+
result = await reader.read();
|
|
954
|
+
}
|
|
955
|
+
await writable.close();
|
|
956
|
+
this.#markClosed();
|
|
957
|
+
} catch (err) {
|
|
958
|
+
if (this.#abortController.signal.aborted) {
|
|
959
|
+
try {
|
|
960
|
+
await writable.close();
|
|
961
|
+
} catch {}
|
|
962
|
+
this.#markClosed();
|
|
963
|
+
} else {
|
|
964
|
+
try {
|
|
965
|
+
await writable.abort(err);
|
|
966
|
+
} catch {}
|
|
967
|
+
this.#markError(err instanceof Error ? err : new Error(String(err)));
|
|
968
|
+
}
|
|
969
|
+
} finally {
|
|
970
|
+
reader.releaseLock();
|
|
971
|
+
}
|
|
972
|
+
};
|
|
973
|
+
pipeBodyStream();
|
|
974
|
+
return readable;
|
|
975
|
+
}
|
|
976
|
+
bodyStream() {
|
|
977
|
+
this.#ensureNoConsumption(`bodyStream`);
|
|
978
|
+
return asAsyncIterableReadableStream(this.#createBodyStreamInternal());
|
|
979
|
+
}
|
|
980
|
+
jsonStream() {
|
|
981
|
+
this.#ensureNoConsumption(`jsonStream`);
|
|
982
|
+
this.#ensureJsonMode();
|
|
983
|
+
const reader = this.#getResponseReader();
|
|
984
|
+
let pendingItems = [];
|
|
985
|
+
const stream$1 = new ReadableStream({
|
|
986
|
+
pull: async (controller) => {
|
|
987
|
+
if (pendingItems.length > 0) {
|
|
988
|
+
controller.enqueue(pendingItems.shift());
|
|
989
|
+
return;
|
|
990
|
+
}
|
|
991
|
+
const { done, value: response } = await reader.read();
|
|
992
|
+
if (done) {
|
|
993
|
+
this.#markClosed();
|
|
994
|
+
controller.close();
|
|
995
|
+
return;
|
|
996
|
+
}
|
|
997
|
+
const text = await response.text();
|
|
998
|
+
const content = text.trim() || `[]`;
|
|
999
|
+
const parsed = JSON.parse(content);
|
|
1000
|
+
pendingItems = Array.isArray(parsed) ? parsed : [parsed];
|
|
1001
|
+
if (pendingItems.length > 0) controller.enqueue(pendingItems.shift());
|
|
1002
|
+
},
|
|
1003
|
+
cancel: () => {
|
|
1004
|
+
reader.releaseLock();
|
|
1005
|
+
this.cancel();
|
|
1006
|
+
}
|
|
1007
|
+
});
|
|
1008
|
+
return asAsyncIterableReadableStream(stream$1);
|
|
1009
|
+
}
|
|
1010
|
+
textStream() {
|
|
1011
|
+
this.#ensureNoConsumption(`textStream`);
|
|
1012
|
+
const decoder = new TextDecoder();
|
|
1013
|
+
const stream$1 = this.#createBodyStreamInternal().pipeThrough(new TransformStream({
|
|
1014
|
+
transform(chunk, controller) {
|
|
1015
|
+
controller.enqueue(decoder.decode(chunk, { stream: true }));
|
|
1016
|
+
},
|
|
1017
|
+
flush(controller) {
|
|
1018
|
+
const remaining = decoder.decode();
|
|
1019
|
+
if (remaining) controller.enqueue(remaining);
|
|
1020
|
+
}
|
|
1021
|
+
}));
|
|
1022
|
+
return asAsyncIterableReadableStream(stream$1);
|
|
1023
|
+
}
|
|
1024
|
+
subscribeJson(subscriber) {
|
|
1025
|
+
this.#ensureNoConsumption(`subscribeJson`);
|
|
1026
|
+
this.#ensureJsonMode();
|
|
1027
|
+
const abortController = new AbortController();
|
|
1028
|
+
const reader = this.#getResponseReader();
|
|
1029
|
+
const consumeJsonSubscription = async () => {
|
|
1030
|
+
try {
|
|
1031
|
+
let result = await reader.read();
|
|
1032
|
+
while (!result.done) {
|
|
1033
|
+
if (abortController.signal.aborted) break;
|
|
1034
|
+
const response = result.value;
|
|
1035
|
+
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1036
|
+
const text = await response.text();
|
|
1037
|
+
const content = text.trim() || `[]`;
|
|
1038
|
+
const parsed = JSON.parse(content);
|
|
1039
|
+
const items = Array.isArray(parsed) ? parsed : [parsed];
|
|
1040
|
+
await subscriber({
|
|
1041
|
+
items,
|
|
1042
|
+
offset,
|
|
1043
|
+
cursor,
|
|
1044
|
+
upToDate
|
|
1045
|
+
});
|
|
1046
|
+
result = await reader.read();
|
|
1047
|
+
}
|
|
1048
|
+
this.#markClosed();
|
|
1049
|
+
} catch (e) {
|
|
1050
|
+
const isAborted = abortController.signal.aborted;
|
|
1051
|
+
const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
|
|
1052
|
+
if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
|
|
1053
|
+
else this.#markClosed();
|
|
1054
|
+
} finally {
|
|
1055
|
+
reader.releaseLock();
|
|
1056
|
+
}
|
|
1057
|
+
};
|
|
1058
|
+
consumeJsonSubscription();
|
|
1059
|
+
return () => {
|
|
1060
|
+
abortController.abort();
|
|
1061
|
+
this.cancel();
|
|
1062
|
+
};
|
|
1063
|
+
}
|
|
1064
|
+
subscribeBytes(subscriber) {
|
|
1065
|
+
this.#ensureNoConsumption(`subscribeBytes`);
|
|
1066
|
+
const abortController = new AbortController();
|
|
1067
|
+
const reader = this.#getResponseReader();
|
|
1068
|
+
const consumeBytesSubscription = async () => {
|
|
1069
|
+
try {
|
|
1070
|
+
let result = await reader.read();
|
|
1071
|
+
while (!result.done) {
|
|
1072
|
+
if (abortController.signal.aborted) break;
|
|
1073
|
+
const response = result.value;
|
|
1074
|
+
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1075
|
+
const buffer = await response.arrayBuffer();
|
|
1076
|
+
await subscriber({
|
|
1077
|
+
data: new Uint8Array(buffer),
|
|
1078
|
+
offset,
|
|
1079
|
+
cursor,
|
|
1080
|
+
upToDate
|
|
1081
|
+
});
|
|
1082
|
+
result = await reader.read();
|
|
1083
|
+
}
|
|
1084
|
+
this.#markClosed();
|
|
1085
|
+
} catch (e) {
|
|
1086
|
+
const isAborted = abortController.signal.aborted;
|
|
1087
|
+
const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
|
|
1088
|
+
if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
|
|
1089
|
+
else this.#markClosed();
|
|
1090
|
+
} finally {
|
|
1091
|
+
reader.releaseLock();
|
|
1092
|
+
}
|
|
1093
|
+
};
|
|
1094
|
+
consumeBytesSubscription();
|
|
1095
|
+
return () => {
|
|
1096
|
+
abortController.abort();
|
|
1097
|
+
this.cancel();
|
|
1098
|
+
};
|
|
1099
|
+
}
|
|
1100
|
+
subscribeText(subscriber) {
|
|
1101
|
+
this.#ensureNoConsumption(`subscribeText`);
|
|
1102
|
+
const abortController = new AbortController();
|
|
1103
|
+
const reader = this.#getResponseReader();
|
|
1104
|
+
const consumeTextSubscription = async () => {
|
|
1105
|
+
try {
|
|
1106
|
+
let result = await reader.read();
|
|
1107
|
+
while (!result.done) {
|
|
1108
|
+
if (abortController.signal.aborted) break;
|
|
1109
|
+
const response = result.value;
|
|
1110
|
+
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1111
|
+
const text = await response.text();
|
|
1112
|
+
await subscriber({
|
|
1113
|
+
text,
|
|
1114
|
+
offset,
|
|
1115
|
+
cursor,
|
|
1116
|
+
upToDate
|
|
1117
|
+
});
|
|
1118
|
+
result = await reader.read();
|
|
1119
|
+
}
|
|
1120
|
+
this.#markClosed();
|
|
1121
|
+
} catch (e) {
|
|
1122
|
+
const isAborted = abortController.signal.aborted;
|
|
1123
|
+
const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
|
|
1124
|
+
if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
|
|
1125
|
+
else this.#markClosed();
|
|
1126
|
+
} finally {
|
|
1127
|
+
reader.releaseLock();
|
|
1128
|
+
}
|
|
1129
|
+
};
|
|
1130
|
+
consumeTextSubscription();
|
|
1131
|
+
return () => {
|
|
1132
|
+
abortController.abort();
|
|
1133
|
+
this.cancel();
|
|
1134
|
+
};
|
|
1135
|
+
}
|
|
1136
|
+
cancel(reason) {
|
|
1137
|
+
this.#abortController.abort(reason);
|
|
1138
|
+
this.#markClosed();
|
|
1139
|
+
}
|
|
1140
|
+
get closed() {
|
|
1141
|
+
return this.#closed;
|
|
1142
|
+
}
|
|
1143
|
+
};
|
|
1144
|
+
|
|
1145
|
+
//#endregion
|
|
1146
|
+
//#region src/utils.ts
|
|
1147
|
+
/**
|
|
1148
|
+
* Resolve headers from HeadersRecord (supports async functions).
|
|
1149
|
+
* Unified implementation used by both stream() and DurableStream.
|
|
1150
|
+
*/
|
|
1151
|
+
async function resolveHeaders(headers) {
|
|
1152
|
+
const resolved = {};
|
|
1153
|
+
if (!headers) return resolved;
|
|
1154
|
+
for (const [key, value] of Object.entries(headers)) if (typeof value === `function`) resolved[key] = await value();
|
|
1155
|
+
else resolved[key] = value;
|
|
1156
|
+
return resolved;
|
|
1157
|
+
}
|
|
1158
|
+
/**
|
|
1159
|
+
* Handle error responses from the server.
|
|
1160
|
+
* Throws appropriate DurableStreamError based on status code.
|
|
1161
|
+
*/
|
|
1162
|
+
async function handleErrorResponse(response, url, context) {
|
|
1163
|
+
const status = response.status;
|
|
1164
|
+
if (status === 404) throw new DurableStreamError(`Stream not found: ${url}`, `NOT_FOUND`, 404);
|
|
1165
|
+
if (status === 409) {
|
|
1166
|
+
const message = context?.operation === `create` ? `Stream already exists: ${url}` : `Sequence conflict: seq is lower than last appended`;
|
|
1167
|
+
const code = context?.operation === `create` ? `CONFLICT_EXISTS` : `CONFLICT_SEQ`;
|
|
1168
|
+
throw new DurableStreamError(message, code, 409);
|
|
1169
|
+
}
|
|
1170
|
+
if (status === 400) throw new DurableStreamError(`Bad request (possibly content-type mismatch)`, `BAD_REQUEST`, 400);
|
|
1171
|
+
throw await DurableStreamError.fromResponse(response, url);
|
|
1172
|
+
}
|
|
1173
|
+
/**
|
|
1174
|
+
* Resolve params from ParamsRecord (supports async functions).
|
|
1175
|
+
*/
|
|
1176
|
+
async function resolveParams(params) {
|
|
1177
|
+
const resolved = {};
|
|
1178
|
+
if (!params) return resolved;
|
|
1179
|
+
for (const [key, value] of Object.entries(params)) if (value !== void 0) if (typeof value === `function`) resolved[key] = await value();
|
|
1180
|
+
else resolved[key] = value;
|
|
1181
|
+
return resolved;
|
|
1182
|
+
}
|
|
1183
|
+
|
|
1184
|
+
//#endregion
|
|
1185
|
+
//#region src/stream-api.ts
|
|
1186
|
+
/**
|
|
1187
|
+
* Create a streaming session to read from a durable stream.
|
|
1188
|
+
*
|
|
1189
|
+
* This is a fetch-like API:
|
|
1190
|
+
* - The promise resolves after the first network request succeeds
|
|
1191
|
+
* - It rejects for auth/404/other protocol errors
|
|
1192
|
+
* - Returns a StreamResponse for consuming the data
|
|
1193
|
+
*
|
|
1194
|
+
* @example
|
|
1195
|
+
* ```typescript
|
|
1196
|
+
* // Catch-up JSON:
|
|
1197
|
+
* const res = await stream<{ message: string }>({
|
|
1198
|
+
* url,
|
|
1199
|
+
* auth,
|
|
1200
|
+
* offset: "0",
|
|
1201
|
+
* live: false,
|
|
1202
|
+
* })
|
|
1203
|
+
* const items = await res.json()
|
|
1204
|
+
*
|
|
1205
|
+
* // Live JSON:
|
|
1206
|
+
* const live = await stream<{ message: string }>({
|
|
1207
|
+
* url,
|
|
1208
|
+
* auth,
|
|
1209
|
+
* offset: savedOffset,
|
|
1210
|
+
* live: "auto",
|
|
1211
|
+
* })
|
|
1212
|
+
* live.subscribeJson(async (batch) => {
|
|
1213
|
+
* for (const item of batch.items) {
|
|
1214
|
+
* handle(item)
|
|
1215
|
+
* }
|
|
1216
|
+
* })
|
|
1217
|
+
* ```
|
|
1218
|
+
*/
|
|
1219
|
+
async function stream(options) {
|
|
1220
|
+
if (!options.url) throw new DurableStreamError(`Invalid stream options: missing required url parameter`, `BAD_REQUEST`);
|
|
1221
|
+
let currentHeaders = options.headers;
|
|
1222
|
+
let currentParams = options.params;
|
|
1223
|
+
while (true) try {
|
|
1224
|
+
return await streamInternal({
|
|
1225
|
+
...options,
|
|
1226
|
+
headers: currentHeaders,
|
|
1227
|
+
params: currentParams
|
|
1228
|
+
});
|
|
1229
|
+
} catch (err) {
|
|
1230
|
+
if (options.onError) {
|
|
1231
|
+
const retryOpts = await options.onError(err instanceof Error ? err : new Error(String(err)));
|
|
1232
|
+
if (retryOpts === void 0) throw err;
|
|
1233
|
+
if (retryOpts.params) currentParams = {
|
|
1234
|
+
...currentParams,
|
|
1235
|
+
...retryOpts.params
|
|
1236
|
+
};
|
|
1237
|
+
if (retryOpts.headers) currentHeaders = {
|
|
1238
|
+
...currentHeaders,
|
|
1239
|
+
...retryOpts.headers
|
|
1240
|
+
};
|
|
1241
|
+
continue;
|
|
1242
|
+
}
|
|
1243
|
+
throw err;
|
|
1244
|
+
}
|
|
1245
|
+
}
|
|
1246
|
+
/**
|
|
1247
|
+
* Internal implementation of stream that doesn't handle onError retries.
|
|
1248
|
+
*/
|
|
1249
|
+
async function streamInternal(options) {
|
|
1250
|
+
const url = options.url instanceof URL ? options.url.toString() : options.url;
|
|
1251
|
+
const fetchUrl = new URL(url);
|
|
1252
|
+
const startOffset = options.offset ?? `-1`;
|
|
1253
|
+
fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, startOffset);
|
|
1254
|
+
const live = options.live ?? `auto`;
|
|
1255
|
+
if (live === `long-poll` || live === `sse`) fetchUrl.searchParams.set(LIVE_QUERY_PARAM, live);
|
|
1256
|
+
const params = await resolveParams(options.params);
|
|
1257
|
+
for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
|
|
1258
|
+
const headers = await resolveHeaders(options.headers);
|
|
1259
|
+
const abortController = new AbortController();
|
|
1260
|
+
if (options.signal) options.signal.addEventListener(`abort`, () => abortController.abort(options.signal?.reason), { once: true });
|
|
1261
|
+
const baseFetchClient = options.fetch ?? ((...args) => fetch(...args));
|
|
1262
|
+
const backoffOptions = options.backoffOptions ?? BackoffDefaults;
|
|
1263
|
+
const fetchClient = createFetchWithBackoff(baseFetchClient, backoffOptions);
|
|
1264
|
+
let firstResponse;
|
|
1265
|
+
try {
|
|
1266
|
+
firstResponse = await fetchClient(fetchUrl.toString(), {
|
|
1267
|
+
method: `GET`,
|
|
1268
|
+
headers,
|
|
1269
|
+
signal: abortController.signal
|
|
1270
|
+
});
|
|
1271
|
+
} catch (err) {
|
|
1272
|
+
if (err instanceof FetchBackoffAbortError) throw new DurableStreamError(`Stream request was aborted`, `UNKNOWN`);
|
|
1273
|
+
throw err;
|
|
1274
|
+
}
|
|
1275
|
+
const contentType = firstResponse.headers.get(`content-type`) ?? void 0;
|
|
1276
|
+
const initialOffset = firstResponse.headers.get(STREAM_OFFSET_HEADER) ?? startOffset;
|
|
1277
|
+
const initialCursor = firstResponse.headers.get(STREAM_CURSOR_HEADER) ?? void 0;
|
|
1278
|
+
const initialUpToDate = firstResponse.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
1279
|
+
const isJsonMode = options.json === true || (contentType?.includes(`application/json`) ?? false);
|
|
1280
|
+
const fetchNext = async (offset, cursor, signal) => {
|
|
1281
|
+
const nextUrl = new URL(url);
|
|
1282
|
+
nextUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
|
|
1283
|
+
if (live === `auto` || live === `long-poll`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`);
|
|
1284
|
+
else if (live === `sse`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
|
|
1285
|
+
if (cursor) nextUrl.searchParams.set(`cursor`, cursor);
|
|
1286
|
+
const nextParams = await resolveParams(options.params);
|
|
1287
|
+
for (const [key, value] of Object.entries(nextParams)) nextUrl.searchParams.set(key, value);
|
|
1288
|
+
const nextHeaders = await resolveHeaders(options.headers);
|
|
1289
|
+
const response = await fetchClient(nextUrl.toString(), {
|
|
1290
|
+
method: `GET`,
|
|
1291
|
+
headers: nextHeaders,
|
|
1292
|
+
signal
|
|
1293
|
+
});
|
|
1294
|
+
if (!response.ok) await handleErrorResponse(response, url);
|
|
1295
|
+
return response;
|
|
1296
|
+
};
|
|
1297
|
+
const startSSE = live === `sse` ? async (offset, cursor, signal) => {
|
|
1298
|
+
const sseUrl = new URL(url);
|
|
1299
|
+
sseUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
|
|
1300
|
+
sseUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
|
|
1301
|
+
if (cursor) sseUrl.searchParams.set(`cursor`, cursor);
|
|
1302
|
+
const sseParams = await resolveParams(options.params);
|
|
1303
|
+
for (const [key, value] of Object.entries(sseParams)) sseUrl.searchParams.set(key, value);
|
|
1304
|
+
const sseHeaders = await resolveHeaders(options.headers);
|
|
1305
|
+
const response = await fetchClient(sseUrl.toString(), {
|
|
1306
|
+
method: `GET`,
|
|
1307
|
+
headers: sseHeaders,
|
|
1308
|
+
signal
|
|
1309
|
+
});
|
|
1310
|
+
if (!response.ok) await handleErrorResponse(response, url);
|
|
1311
|
+
return response;
|
|
1312
|
+
} : void 0;
|
|
1313
|
+
return new StreamResponseImpl({
|
|
1314
|
+
url,
|
|
1315
|
+
contentType,
|
|
1316
|
+
live,
|
|
1317
|
+
startOffset,
|
|
1318
|
+
isJsonMode,
|
|
1319
|
+
initialOffset,
|
|
1320
|
+
initialCursor,
|
|
1321
|
+
initialUpToDate,
|
|
1322
|
+
firstResponse,
|
|
1323
|
+
abortController,
|
|
1324
|
+
fetchNext,
|
|
1325
|
+
startSSE,
|
|
1326
|
+
sseResilience: options.sseResilience
|
|
1327
|
+
});
|
|
1328
|
+
}
|
|
1329
|
+
|
|
1330
|
+
//#endregion
|
|
1331
|
+
//#region src/stream.ts
|
|
1332
|
+
/**
|
|
1333
|
+
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1334
|
+
* Handles cases like "application/json; charset=utf-8".
|
|
1335
|
+
*/
|
|
1336
|
+
function normalizeContentType(contentType) {
|
|
1337
|
+
if (!contentType) return ``;
|
|
1338
|
+
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1339
|
+
}
|
|
1340
|
+
/**
|
|
1341
|
+
* Check if a value is a Promise or Promise-like (thenable).
|
|
1342
|
+
*/
|
|
1343
|
+
function isPromiseLike(value) {
|
|
1344
|
+
return value !== null && typeof value === `object` && `then` in value && typeof value.then === `function`;
|
|
1345
|
+
}
|
|
1346
|
+
/**
|
|
1347
|
+
* A handle to a remote durable stream for read/write operations.
|
|
1348
|
+
*
|
|
1349
|
+
* This is a lightweight, reusable handle - not a persistent connection.
|
|
1350
|
+
* It does not automatically start reading or listening.
|
|
1351
|
+
* Create sessions as needed via stream().
|
|
1352
|
+
*
|
|
1353
|
+
* @example
|
|
1354
|
+
* ```typescript
|
|
1355
|
+
* // Create a new stream
|
|
1356
|
+
* const stream = await DurableStream.create({
|
|
1357
|
+
* url: "https://streams.example.com/my-stream",
|
|
1358
|
+
* headers: { Authorization: "Bearer my-token" },
|
|
1359
|
+
* contentType: "application/json"
|
|
1360
|
+
* });
|
|
1361
|
+
*
|
|
1362
|
+
* // Write data
|
|
1363
|
+
* await stream.append({ message: "hello" });
|
|
1364
|
+
*
|
|
1365
|
+
* // Read with the new API
|
|
1366
|
+
* const res = await stream.stream<{ message: string }>();
|
|
1367
|
+
* res.subscribeJson(async (batch) => {
|
|
1368
|
+
* for (const item of batch.items) {
|
|
1369
|
+
* console.log(item.message);
|
|
1370
|
+
* }
|
|
1371
|
+
* });
|
|
1372
|
+
* ```
|
|
1373
|
+
*/
|
|
1374
|
+
var DurableStream = class DurableStream {
|
|
1375
|
+
/**
|
|
1376
|
+
* The URL of the durable stream.
|
|
1377
|
+
*/
|
|
1378
|
+
url;
|
|
1379
|
+
/**
|
|
1380
|
+
* The content type of the stream (populated after connect/head/read).
|
|
1381
|
+
*/
|
|
1382
|
+
contentType;
|
|
1383
|
+
#options;
|
|
1384
|
+
#fetchClient;
|
|
1385
|
+
#onError;
|
|
1386
|
+
#batchingEnabled;
|
|
1387
|
+
#queue;
|
|
1388
|
+
#buffer = [];
|
|
1389
|
+
/**
|
|
1390
|
+
* Create a cold handle to a stream.
|
|
1391
|
+
* No network IO is performed by the constructor.
|
|
1392
|
+
*/
|
|
1393
|
+
constructor(opts) {
|
|
1394
|
+
validateOptions(opts);
|
|
1395
|
+
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
|
|
1396
|
+
this.url = urlStr;
|
|
1397
|
+
this.#options = {
|
|
1398
|
+
...opts,
|
|
1399
|
+
url: urlStr
|
|
1400
|
+
};
|
|
1401
|
+
this.#onError = opts.onError;
|
|
1402
|
+
this.#batchingEnabled = opts.batching !== false;
|
|
1403
|
+
if (this.#batchingEnabled) this.#queue = fastq.default.promise(this.#batchWorker.bind(this), 1);
|
|
1404
|
+
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
1405
|
+
const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
|
|
1406
|
+
const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
|
|
1407
|
+
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
|
|
1408
|
+
}
|
|
1409
|
+
/**
|
|
1410
|
+
* Create a new stream (create-only PUT) and return a handle.
|
|
1411
|
+
* Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
|
|
1412
|
+
*/
|
|
1413
|
+
static async create(opts) {
|
|
1414
|
+
const stream$1 = new DurableStream(opts);
|
|
1415
|
+
await stream$1.create({
|
|
1416
|
+
contentType: opts.contentType,
|
|
1417
|
+
ttlSeconds: opts.ttlSeconds,
|
|
1418
|
+
expiresAt: opts.expiresAt,
|
|
1419
|
+
body: opts.body
|
|
1420
|
+
});
|
|
1421
|
+
return stream$1;
|
|
1422
|
+
}
|
|
1423
|
+
/**
|
|
1424
|
+
* Validate that a stream exists and fetch metadata via HEAD.
|
|
1425
|
+
* Returns a handle with contentType populated (if sent by server).
|
|
1426
|
+
*
|
|
1427
|
+
* **Important**: This only performs a HEAD request for validation - it does
|
|
1428
|
+
* NOT open a session or start reading data. To read from the stream, call
|
|
1429
|
+
* `stream()` on the returned handle.
|
|
1430
|
+
*
|
|
1431
|
+
* @example
|
|
1432
|
+
* ```typescript
|
|
1433
|
+
* // Validate stream exists before reading
|
|
1434
|
+
* const handle = await DurableStream.connect({ url })
|
|
1435
|
+
* const res = await handle.stream() // Now actually read
|
|
1436
|
+
* ```
|
|
1437
|
+
*/
|
|
1438
|
+
static async connect(opts) {
|
|
1439
|
+
const stream$1 = new DurableStream(opts);
|
|
1440
|
+
await stream$1.head();
|
|
1441
|
+
return stream$1;
|
|
1442
|
+
}
|
|
1443
|
+
/**
|
|
1444
|
+
* HEAD metadata for a stream without creating a handle.
|
|
1445
|
+
*/
|
|
1446
|
+
static async head(opts) {
|
|
1447
|
+
const stream$1 = new DurableStream(opts);
|
|
1448
|
+
return stream$1.head();
|
|
1449
|
+
}
|
|
1450
|
+
/**
|
|
1451
|
+
* Delete a stream without creating a handle.
|
|
1452
|
+
*/
|
|
1453
|
+
static async delete(opts) {
|
|
1454
|
+
const stream$1 = new DurableStream(opts);
|
|
1455
|
+
return stream$1.delete();
|
|
1456
|
+
}
|
|
1457
|
+
/**
|
|
1458
|
+
* HEAD metadata for this stream.
|
|
1459
|
+
*/
|
|
1460
|
+
async head(opts) {
|
|
1461
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1462
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1463
|
+
method: `HEAD`,
|
|
1464
|
+
headers: requestHeaders,
|
|
1465
|
+
signal: opts?.signal ?? this.#options.signal
|
|
1466
|
+
});
|
|
1467
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1468
|
+
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
1469
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
1470
|
+
const etag = response.headers.get(`etag`) ?? void 0;
|
|
1471
|
+
const cacheControl = response.headers.get(`cache-control`) ?? void 0;
|
|
1472
|
+
if (contentType) this.contentType = contentType;
|
|
1473
|
+
return {
|
|
1474
|
+
exists: true,
|
|
1475
|
+
contentType,
|
|
1476
|
+
offset,
|
|
1477
|
+
etag,
|
|
1478
|
+
cacheControl
|
|
1479
|
+
};
|
|
1480
|
+
}
|
|
1481
|
+
/**
|
|
1482
|
+
* Create this stream (create-only PUT) using the URL/auth from the handle.
|
|
1483
|
+
*/
|
|
1484
|
+
async create(opts) {
|
|
1485
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1486
|
+
const contentType = opts?.contentType ?? this.#options.contentType;
|
|
1487
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1488
|
+
if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
|
|
1489
|
+
if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
|
|
1490
|
+
const body = encodeBody(opts?.body);
|
|
1491
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1492
|
+
method: `PUT`,
|
|
1493
|
+
headers: requestHeaders,
|
|
1494
|
+
body,
|
|
1495
|
+
signal: this.#options.signal
|
|
1496
|
+
});
|
|
1497
|
+
if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
|
|
1498
|
+
const responseContentType = response.headers.get(`content-type`);
|
|
1499
|
+
if (responseContentType) this.contentType = responseContentType;
|
|
1500
|
+
else if (contentType) this.contentType = contentType;
|
|
1501
|
+
return this;
|
|
1502
|
+
}
|
|
1503
|
+
/**
|
|
1504
|
+
* Delete this stream.
|
|
1505
|
+
*/
|
|
1506
|
+
async delete(opts) {
|
|
1507
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1508
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1509
|
+
method: `DELETE`,
|
|
1510
|
+
headers: requestHeaders,
|
|
1511
|
+
signal: opts?.signal ?? this.#options.signal
|
|
1512
|
+
});
|
|
1513
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1514
|
+
}
|
|
1515
|
+
/**
|
|
1516
|
+
* Append a single payload to the stream.
|
|
1517
|
+
*
|
|
1518
|
+
* When batching is enabled (default), multiple append() calls made while
|
|
1519
|
+
* a POST is in-flight will be batched together into a single request.
|
|
1520
|
+
* This significantly improves throughput for high-frequency writes.
|
|
1521
|
+
*
|
|
1522
|
+
* - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
|
|
1523
|
+
* - `body` may also be a Promise that resolves to any of the above types.
|
|
1524
|
+
* - Strings are encoded as UTF-8.
|
|
1525
|
+
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
1526
|
+
*
|
|
1527
|
+
* @example
|
|
1528
|
+
* ```typescript
|
|
1529
|
+
* // Direct value
|
|
1530
|
+
* await stream.append({ message: "hello" });
|
|
1531
|
+
*
|
|
1532
|
+
* // Promise value - awaited before buffering
|
|
1533
|
+
* await stream.append(fetchData());
|
|
1534
|
+
* await stream.append(Promise.all([a, b, c]));
|
|
1535
|
+
* ```
|
|
1536
|
+
*/
|
|
1537
|
+
async append(body, opts) {
|
|
1538
|
+
const resolvedBody = isPromiseLike(body) ? await body : body;
|
|
1539
|
+
if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
|
|
1540
|
+
return this.#appendDirect(resolvedBody, opts);
|
|
1541
|
+
}
|
|
1542
|
+
/**
|
|
1543
|
+
* Direct append without batching (used when batching is disabled).
|
|
1544
|
+
*/
|
|
1545
|
+
async #appendDirect(body, opts) {
|
|
1546
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1547
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1548
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1549
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1550
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
1551
|
+
const bodyToEncode = isJson ? [body] : body;
|
|
1552
|
+
const encodedBody = encodeBody(bodyToEncode);
|
|
1553
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1554
|
+
method: `POST`,
|
|
1555
|
+
headers: requestHeaders,
|
|
1556
|
+
body: encodedBody,
|
|
1557
|
+
signal: opts?.signal ?? this.#options.signal
|
|
1558
|
+
});
|
|
1559
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1560
|
+
}
|
|
1561
|
+
/**
|
|
1562
|
+
* Append with batching - buffers messages and sends them in batches.
|
|
1563
|
+
*/
|
|
1564
|
+
async #appendWithBatching(body, opts) {
|
|
1565
|
+
return new Promise((resolve, reject) => {
|
|
1566
|
+
this.#buffer.push({
|
|
1567
|
+
data: body,
|
|
1568
|
+
seq: opts?.seq,
|
|
1569
|
+
contentType: opts?.contentType,
|
|
1570
|
+
signal: opts?.signal,
|
|
1571
|
+
resolve,
|
|
1572
|
+
reject
|
|
1573
|
+
});
|
|
1574
|
+
if (this.#queue.idle()) {
|
|
1575
|
+
const batch = this.#buffer.splice(0);
|
|
1576
|
+
this.#queue.push(batch).catch((err) => {
|
|
1577
|
+
for (const msg of batch) msg.reject(err);
|
|
1578
|
+
});
|
|
1579
|
+
}
|
|
1580
|
+
});
|
|
1581
|
+
}
|
|
1582
|
+
/**
|
|
1583
|
+
* Batch worker - processes batches of messages.
|
|
1584
|
+
*/
|
|
1585
|
+
async #batchWorker(batch) {
|
|
1586
|
+
try {
|
|
1587
|
+
await this.#sendBatch(batch);
|
|
1588
|
+
for (const msg of batch) msg.resolve();
|
|
1589
|
+
if (this.#buffer.length > 0) {
|
|
1590
|
+
const nextBatch = this.#buffer.splice(0);
|
|
1591
|
+
this.#queue.push(nextBatch).catch((err) => {
|
|
1592
|
+
for (const msg of nextBatch) msg.reject(err);
|
|
1593
|
+
});
|
|
1594
|
+
}
|
|
1595
|
+
} catch (error) {
|
|
1596
|
+
for (const msg of batch) msg.reject(error);
|
|
1597
|
+
for (const msg of this.#buffer) msg.reject(error);
|
|
1598
|
+
this.#buffer = [];
|
|
1599
|
+
throw error;
|
|
1600
|
+
}
|
|
1601
|
+
}
|
|
1602
|
+
/**
|
|
1603
|
+
* Send a batch of messages as a single POST request.
|
|
1604
|
+
*/
|
|
1605
|
+
async #sendBatch(batch) {
|
|
1606
|
+
if (batch.length === 0) return;
|
|
1607
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1608
|
+
const contentType = batch[0]?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1609
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1610
|
+
let highestSeq;
|
|
1611
|
+
for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
|
|
1612
|
+
highestSeq = batch[i].seq;
|
|
1613
|
+
break;
|
|
1614
|
+
}
|
|
1615
|
+
if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
|
|
1616
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
1617
|
+
let batchedBody;
|
|
1618
|
+
if (isJson) {
|
|
1619
|
+
const values = batch.map((m) => m.data);
|
|
1620
|
+
batchedBody = JSON.stringify(values);
|
|
1621
|
+
} else {
|
|
1622
|
+
const totalSize = batch.reduce((sum, m) => {
|
|
1623
|
+
const size = typeof m.data === `string` ? new TextEncoder().encode(m.data).length : m.data.length;
|
|
1624
|
+
return sum + size;
|
|
1625
|
+
}, 0);
|
|
1626
|
+
const concatenated = new Uint8Array(totalSize);
|
|
1627
|
+
let offset = 0;
|
|
1628
|
+
for (const msg of batch) {
|
|
1629
|
+
const bytes = typeof msg.data === `string` ? new TextEncoder().encode(msg.data) : msg.data;
|
|
1630
|
+
concatenated.set(bytes, offset);
|
|
1631
|
+
offset += bytes.length;
|
|
1632
|
+
}
|
|
1633
|
+
batchedBody = concatenated;
|
|
1634
|
+
}
|
|
1635
|
+
const signals = [];
|
|
1636
|
+
if (this.#options.signal) signals.push(this.#options.signal);
|
|
1637
|
+
for (const msg of batch) if (msg.signal) signals.push(msg.signal);
|
|
1638
|
+
const combinedSignal = signals.length > 0 ? AbortSignal.any(signals) : void 0;
|
|
1639
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1640
|
+
method: `POST`,
|
|
1641
|
+
headers: requestHeaders,
|
|
1642
|
+
body: batchedBody,
|
|
1643
|
+
signal: combinedSignal
|
|
1644
|
+
});
|
|
1645
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1646
|
+
}
|
|
1647
|
+
/**
|
|
1648
|
+
* Append a streaming body to the stream.
|
|
1649
|
+
*
|
|
1650
|
+
* Supports piping from any ReadableStream or async iterable:
|
|
1651
|
+
* - `source` yields Uint8Array or string chunks.
|
|
1652
|
+
* - Strings are encoded as UTF-8; no delimiters are added.
|
|
1653
|
+
* - Internally uses chunked transfer or HTTP/2 streaming.
|
|
1654
|
+
*
|
|
1655
|
+
* @example
|
|
1656
|
+
* ```typescript
|
|
1657
|
+
* // Pipe from a ReadableStream
|
|
1658
|
+
* const readable = new ReadableStream({
|
|
1659
|
+
* start(controller) {
|
|
1660
|
+
* controller.enqueue("chunk 1");
|
|
1661
|
+
* controller.enqueue("chunk 2");
|
|
1662
|
+
* controller.close();
|
|
1663
|
+
* }
|
|
1664
|
+
* });
|
|
1665
|
+
* await stream.appendStream(readable);
|
|
1666
|
+
*
|
|
1667
|
+
* // Pipe from an async generator
|
|
1668
|
+
* async function* generate() {
|
|
1669
|
+
* yield "line 1\n";
|
|
1670
|
+
* yield "line 2\n";
|
|
1671
|
+
* }
|
|
1672
|
+
* await stream.appendStream(generate());
|
|
1673
|
+
*
|
|
1674
|
+
* // Pipe from fetch response body
|
|
1675
|
+
* const response = await fetch("https://example.com/data");
|
|
1676
|
+
* await stream.appendStream(response.body!);
|
|
1677
|
+
* ```
|
|
1678
|
+
*/
|
|
1679
|
+
async appendStream(source, opts) {
|
|
1680
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1681
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1682
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1683
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1684
|
+
const body = toReadableStream(source);
|
|
1685
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1686
|
+
method: `POST`,
|
|
1687
|
+
headers: requestHeaders,
|
|
1688
|
+
body,
|
|
1689
|
+
duplex: `half`,
|
|
1690
|
+
signal: opts?.signal ?? this.#options.signal
|
|
1691
|
+
});
|
|
1692
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1693
|
+
}
|
|
1694
|
+
/**
|
|
1695
|
+
* Create a writable stream that pipes data to this durable stream.
|
|
1696
|
+
*
|
|
1697
|
+
* Returns a WritableStream that can be used with `pipeTo()` or
|
|
1698
|
+
* `pipeThrough()` from any ReadableStream source.
|
|
1699
|
+
*
|
|
1700
|
+
* @example
|
|
1701
|
+
* ```typescript
|
|
1702
|
+
* // Pipe from fetch response
|
|
1703
|
+
* const response = await fetch("https://example.com/data");
|
|
1704
|
+
* await response.body!.pipeTo(stream.writable());
|
|
1705
|
+
*
|
|
1706
|
+
* // Pipe through a transform
|
|
1707
|
+
* const readable = someStream.pipeThrough(new TextEncoderStream());
|
|
1708
|
+
* await readable.pipeTo(stream.writable());
|
|
1709
|
+
* ```
|
|
1710
|
+
*/
|
|
1711
|
+
writable(opts) {
|
|
1712
|
+
const chunks = [];
|
|
1713
|
+
const stream$1 = this;
|
|
1714
|
+
return new WritableStream({
|
|
1715
|
+
write(chunk) {
|
|
1716
|
+
chunks.push(chunk);
|
|
1717
|
+
},
|
|
1718
|
+
async close() {
|
|
1719
|
+
if (chunks.length > 0) {
|
|
1720
|
+
const readable = new ReadableStream({ start(controller) {
|
|
1721
|
+
for (const chunk of chunks) controller.enqueue(chunk);
|
|
1722
|
+
controller.close();
|
|
1723
|
+
} });
|
|
1724
|
+
await stream$1.appendStream(readable, opts);
|
|
1725
|
+
}
|
|
1726
|
+
},
|
|
1727
|
+
abort(reason) {
|
|
1728
|
+
console.error(`WritableStream aborted:`, reason);
|
|
1729
|
+
}
|
|
1730
|
+
});
|
|
1731
|
+
}
|
|
1732
|
+
/**
|
|
1733
|
+
* Start a fetch-like streaming session against this handle's URL/headers/params.
|
|
1734
|
+
* The first request is made inside this method; it resolves when we have
|
|
1735
|
+
* a valid first response, or rejects on errors.
|
|
1736
|
+
*
|
|
1737
|
+
* Call-specific headers and params are merged with handle-level ones,
|
|
1738
|
+
* with call-specific values taking precedence.
|
|
1739
|
+
*
|
|
1740
|
+
* @example
|
|
1741
|
+
* ```typescript
|
|
1742
|
+
* const handle = await DurableStream.connect({
|
|
1743
|
+
* url,
|
|
1744
|
+
* headers: { Authorization: `Bearer ${token}` }
|
|
1745
|
+
* });
|
|
1746
|
+
* const res = await handle.stream<{ message: string }>();
|
|
1747
|
+
*
|
|
1748
|
+
* // Accumulate all JSON items
|
|
1749
|
+
* const items = await res.json();
|
|
1750
|
+
*
|
|
1751
|
+
* // Or stream live with ReadableStream
|
|
1752
|
+
* const reader = res.jsonStream().getReader();
|
|
1753
|
+
* let result = await reader.read();
|
|
1754
|
+
* while (!result.done) {
|
|
1755
|
+
* console.log(result.value);
|
|
1756
|
+
* result = await reader.read();
|
|
1757
|
+
* }
|
|
1758
|
+
*
|
|
1759
|
+
* // Or use subscriber for backpressure-aware consumption
|
|
1760
|
+
* res.subscribeJson(async (batch) => {
|
|
1761
|
+
* for (const item of batch.items) {
|
|
1762
|
+
* console.log(item);
|
|
1763
|
+
* }
|
|
1764
|
+
* });
|
|
1765
|
+
* ```
|
|
1766
|
+
*/
|
|
1767
|
+
async stream(options) {
|
|
1768
|
+
if (options?.live === `sse` && this.contentType) {
|
|
1769
|
+
const isSSECompatible = SSE_COMPATIBLE_CONTENT_TYPES.some((prefix) => this.contentType.startsWith(prefix));
|
|
1770
|
+
if (!isSSECompatible) throw new DurableStreamError(`SSE is not supported for content-type: ${this.contentType}`, `SSE_NOT_SUPPORTED`, 400);
|
|
1771
|
+
}
|
|
1772
|
+
const mergedHeaders = {
|
|
1773
|
+
...this.#options.headers,
|
|
1774
|
+
...options?.headers
|
|
1775
|
+
};
|
|
1776
|
+
const mergedParams = {
|
|
1777
|
+
...this.#options.params,
|
|
1778
|
+
...options?.params
|
|
1779
|
+
};
|
|
1780
|
+
return stream({
|
|
1781
|
+
url: this.url,
|
|
1782
|
+
headers: mergedHeaders,
|
|
1783
|
+
params: mergedParams,
|
|
1784
|
+
signal: options?.signal ?? this.#options.signal,
|
|
1785
|
+
fetch: this.#options.fetch,
|
|
1786
|
+
backoffOptions: this.#options.backoffOptions,
|
|
1787
|
+
offset: options?.offset,
|
|
1788
|
+
live: options?.live,
|
|
1789
|
+
json: options?.json,
|
|
1790
|
+
onError: options?.onError ?? this.#onError
|
|
1791
|
+
});
|
|
1792
|
+
}
|
|
1793
|
+
/**
|
|
1794
|
+
* Build request headers and URL.
|
|
1795
|
+
*/
|
|
1796
|
+
async #buildRequest() {
|
|
1797
|
+
const requestHeaders = await resolveHeaders(this.#options.headers);
|
|
1798
|
+
const fetchUrl = new URL(this.url);
|
|
1799
|
+
const params = await resolveParams(this.#options.params);
|
|
1800
|
+
for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
|
|
1801
|
+
return {
|
|
1802
|
+
requestHeaders,
|
|
1803
|
+
fetchUrl
|
|
1804
|
+
};
|
|
1805
|
+
}
|
|
1806
|
+
};
|
|
1807
|
+
/**
|
|
1808
|
+
* Encode a body value to the appropriate format.
|
|
1809
|
+
* Strings are encoded as UTF-8.
|
|
1810
|
+
* Objects are JSON-serialized.
|
|
1811
|
+
*/
|
|
1812
|
+
function encodeBody(body) {
|
|
1813
|
+
if (body === void 0) return void 0;
|
|
1814
|
+
if (typeof body === `string`) return new TextEncoder().encode(body);
|
|
1815
|
+
if (body instanceof Uint8Array) return body;
|
|
1816
|
+
if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
|
|
1817
|
+
return new TextEncoder().encode(JSON.stringify(body));
|
|
1818
|
+
}
|
|
1819
|
+
/**
|
|
1820
|
+
* Convert an async iterable to a ReadableStream.
|
|
1821
|
+
*/
|
|
1822
|
+
function toReadableStream(source) {
|
|
1823
|
+
if (source instanceof ReadableStream) return source.pipeThrough(new TransformStream({ transform(chunk, controller) {
|
|
1824
|
+
if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
|
|
1825
|
+
else controller.enqueue(chunk);
|
|
1826
|
+
} }));
|
|
1827
|
+
const encoder = new TextEncoder();
|
|
1828
|
+
const iterator = source[Symbol.asyncIterator]();
|
|
1829
|
+
return new ReadableStream({
|
|
1830
|
+
async pull(controller) {
|
|
1831
|
+
try {
|
|
1832
|
+
const { done, value } = await iterator.next();
|
|
1833
|
+
if (done) controller.close();
|
|
1834
|
+
else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
|
|
1835
|
+
else controller.enqueue(value);
|
|
1836
|
+
} catch (e) {
|
|
1837
|
+
controller.error(e);
|
|
1838
|
+
}
|
|
1839
|
+
},
|
|
1840
|
+
cancel() {
|
|
1841
|
+
iterator.return?.();
|
|
1842
|
+
}
|
|
1843
|
+
});
|
|
1844
|
+
}
|
|
1845
|
+
/**
|
|
1846
|
+
* Validate stream options.
|
|
1847
|
+
*/
|
|
1848
|
+
function validateOptions(options) {
|
|
1849
|
+
if (!options.url) throw new MissingStreamUrlError();
|
|
1850
|
+
if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
|
|
1851
|
+
}
|
|
1852
|
+
|
|
1853
|
+
//#endregion
|
|
1854
|
+
exports.BackoffDefaults = BackoffDefaults
|
|
1855
|
+
exports.CURSOR_QUERY_PARAM = CURSOR_QUERY_PARAM
|
|
1856
|
+
exports.DURABLE_STREAM_PROTOCOL_QUERY_PARAMS = DURABLE_STREAM_PROTOCOL_QUERY_PARAMS
|
|
1857
|
+
exports.DurableStream = DurableStream
|
|
1858
|
+
exports.DurableStreamError = DurableStreamError
|
|
1859
|
+
exports.FetchBackoffAbortError = FetchBackoffAbortError
|
|
1860
|
+
exports.FetchError = FetchError
|
|
1861
|
+
exports.InvalidSignalError = InvalidSignalError
|
|
1862
|
+
exports.LIVE_QUERY_PARAM = LIVE_QUERY_PARAM
|
|
1863
|
+
exports.MissingStreamUrlError = MissingStreamUrlError
|
|
1864
|
+
exports.OFFSET_QUERY_PARAM = OFFSET_QUERY_PARAM
|
|
1865
|
+
exports.SSE_COMPATIBLE_CONTENT_TYPES = SSE_COMPATIBLE_CONTENT_TYPES
|
|
1866
|
+
exports.STREAM_CURSOR_HEADER = STREAM_CURSOR_HEADER
|
|
1867
|
+
exports.STREAM_EXPIRES_AT_HEADER = STREAM_EXPIRES_AT_HEADER
|
|
1868
|
+
exports.STREAM_OFFSET_HEADER = STREAM_OFFSET_HEADER
|
|
1869
|
+
exports.STREAM_SEQ_HEADER = STREAM_SEQ_HEADER
|
|
1870
|
+
exports.STREAM_TTL_HEADER = STREAM_TTL_HEADER
|
|
1871
|
+
exports.STREAM_UP_TO_DATE_HEADER = STREAM_UP_TO_DATE_HEADER
|
|
1872
|
+
exports.asAsyncIterableReadableStream = asAsyncIterableReadableStream
|
|
1873
|
+
exports.createFetchWithBackoff = createFetchWithBackoff
|
|
1874
|
+
exports.createFetchWithConsumedBody = createFetchWithConsumedBody
|
|
1875
|
+
exports.stream = stream
|