@durable-streams/client 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1,1172 +1,1875 @@
1
1
  "use strict";
2
+ //#region rolldown:runtime
3
+ var __create = Object.create;
2
4
  var __defProp = Object.defineProperty;
3
5
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
6
  var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getProtoOf = Object.getPrototypeOf;
5
8
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __typeError = (msg) => {
7
- throw TypeError(msg);
8
- };
9
- var __export = (target, all) => {
10
- for (var name in all)
11
- __defProp(target, name, { get: all[name], enumerable: true });
12
- };
13
9
  var __copyProps = (to, from, except, desc) => {
14
- if (from && typeof from === "object" || typeof from === "function") {
15
- for (let key of __getOwnPropNames(from))
16
- if (!__hasOwnProp.call(to, key) && key !== except)
17
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
18
- }
19
- return to;
10
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
11
+ key = keys[i];
12
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
13
+ get: ((k) => from[k]).bind(null, key),
14
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
15
+ });
16
+ }
17
+ return to;
20
18
  };
21
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
22
- var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
23
- var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
24
- var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
25
- var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
26
- var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
19
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
20
+ value: mod,
21
+ enumerable: true
22
+ }) : target, mod));
27
23
 
28
- // src/index.ts
29
- var index_exports = {};
30
- __export(index_exports, {
31
- BackoffDefaults: () => BackoffDefaults,
32
- CURSOR_QUERY_PARAM: () => CURSOR_QUERY_PARAM,
33
- DURABLE_STREAM_PROTOCOL_QUERY_PARAMS: () => DURABLE_STREAM_PROTOCOL_QUERY_PARAMS,
34
- DurableStream: () => DurableStream,
35
- DurableStreamError: () => DurableStreamError,
36
- FetchBackoffAbortError: () => FetchBackoffAbortError,
37
- FetchError: () => FetchError,
38
- InvalidSignalError: () => InvalidSignalError,
39
- LIVE_QUERY_PARAM: () => LIVE_QUERY_PARAM,
40
- MissingStreamUrlError: () => MissingStreamUrlError,
41
- OFFSET_QUERY_PARAM: () => OFFSET_QUERY_PARAM,
42
- SSE_COMPATIBLE_CONTENT_TYPES: () => SSE_COMPATIBLE_CONTENT_TYPES,
43
- STREAM_CURSOR_HEADER: () => STREAM_CURSOR_HEADER,
44
- STREAM_EXPIRES_AT_HEADER: () => STREAM_EXPIRES_AT_HEADER,
45
- STREAM_OFFSET_HEADER: () => STREAM_OFFSET_HEADER,
46
- STREAM_SEQ_HEADER: () => STREAM_SEQ_HEADER,
47
- STREAM_TTL_HEADER: () => STREAM_TTL_HEADER,
48
- STREAM_UP_TO_DATE_HEADER: () => STREAM_UP_TO_DATE_HEADER,
49
- createFetchWithBackoff: () => createFetchWithBackoff,
50
- createFetchWithConsumedBody: () => createFetchWithConsumedBody
51
- });
52
- module.exports = __toCommonJS(index_exports);
24
+ //#endregion
25
+ const fastq = __toESM(require("fastq"));
53
26
 
54
- // src/stream.ts
55
- var import_fetch_event_source = require("@microsoft/fetch-event-source");
27
+ //#region src/constants.ts
28
+ /**
29
+ * Durable Streams Protocol Constants
30
+ *
31
+ * Header and query parameter names following the Electric Durable Stream Protocol.
32
+ */
33
+ /**
34
+ * Response header containing the next offset to read from.
35
+ * Offsets are opaque tokens - clients MUST NOT interpret the format.
36
+ */
37
+ const STREAM_OFFSET_HEADER = `Stream-Next-Offset`;
38
+ /**
39
+ * Response header for cursor (used for CDN collapsing).
40
+ * Echo this value in subsequent long-poll requests.
41
+ */
42
+ const STREAM_CURSOR_HEADER = `Stream-Cursor`;
43
+ /**
44
+ * Presence header indicating response ends at current end of stream.
45
+ * When present (any value), indicates up-to-date.
46
+ */
47
+ const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
48
+ /**
49
+ * Request header for writer coordination sequence.
50
+ * Monotonic, lexicographic. If lower than last appended seq -> 409 Conflict.
51
+ */
52
+ const STREAM_SEQ_HEADER = `Stream-Seq`;
53
+ /**
54
+ * Request header for stream TTL in seconds (on create).
55
+ */
56
+ const STREAM_TTL_HEADER = `Stream-TTL`;
57
+ /**
58
+ * Request header for absolute stream expiry time (RFC3339, on create).
59
+ */
60
+ const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
61
+ /**
62
+ * Query parameter for starting offset.
63
+ */
64
+ const OFFSET_QUERY_PARAM = `offset`;
65
+ /**
66
+ * Query parameter for live mode.
67
+ * Values: "long-poll", "sse"
68
+ */
69
+ const LIVE_QUERY_PARAM = `live`;
70
+ /**
71
+ * Query parameter for echoing cursor (CDN collapsing).
72
+ */
73
+ const CURSOR_QUERY_PARAM = `cursor`;
74
+ /**
75
+ * Content types that support SSE mode.
76
+ * SSE is only valid for text/* or application/json streams.
77
+ */
78
+ const SSE_COMPATIBLE_CONTENT_TYPES = [`text/`, `application/json`];
79
+ /**
80
+ * Protocol query parameters that should not be set by users.
81
+ */
82
+ const DURABLE_STREAM_PROTOCOL_QUERY_PARAMS = [
83
+ OFFSET_QUERY_PARAM,
84
+ LIVE_QUERY_PARAM,
85
+ CURSOR_QUERY_PARAM
86
+ ];
56
87
 
57
- // src/error.ts
58
- var FetchError = class _FetchError extends Error {
59
- constructor(status, text, json, headers, url, message) {
60
- super(
61
- message || `HTTP Error ${status} at ${url}: ${text ?? JSON.stringify(json)}`
62
- );
63
- this.url = url;
64
- this.name = `FetchError`;
65
- this.status = status;
66
- this.text = text;
67
- this.json = json;
68
- this.headers = headers;
69
- }
70
- static async fromResponse(response, url) {
71
- const status = response.status;
72
- const headers = Object.fromEntries([...response.headers.entries()]);
73
- let text = void 0;
74
- let json = void 0;
75
- const contentType = response.headers.get(`content-type`);
76
- if (!response.bodyUsed) {
77
- if (contentType && contentType.includes(`application/json`)) {
78
- try {
79
- json = await response.json();
80
- } catch {
81
- text = await response.text();
82
- }
83
- } else {
84
- text = await response.text();
85
- }
86
- }
87
- return new _FetchError(status, text, json, headers, url);
88
- }
88
+ //#endregion
89
+ //#region src/error.ts
90
+ /**
91
+ * Error thrown for transport/network errors.
92
+ * Following the @electric-sql/client FetchError pattern.
93
+ */
94
+ var FetchError = class FetchError extends Error {
95
+ status;
96
+ text;
97
+ json;
98
+ headers;
99
+ constructor(status, text, json, headers, url, message) {
100
+ super(message || `HTTP Error ${status} at ${url}: ${text ?? JSON.stringify(json)}`);
101
+ this.url = url;
102
+ this.name = `FetchError`;
103
+ this.status = status;
104
+ this.text = text;
105
+ this.json = json;
106
+ this.headers = headers;
107
+ }
108
+ static async fromResponse(response, url) {
109
+ const status = response.status;
110
+ const headers = Object.fromEntries([...response.headers.entries()]);
111
+ let text = void 0;
112
+ let json = void 0;
113
+ const contentType = response.headers.get(`content-type`);
114
+ if (!response.bodyUsed) if (contentType && contentType.includes(`application/json`)) try {
115
+ json = await response.json();
116
+ } catch {
117
+ text = await response.text();
118
+ }
119
+ else text = await response.text();
120
+ return new FetchError(status, text, json, headers, url);
121
+ }
89
122
  };
123
+ /**
124
+ * Error thrown when a fetch operation is aborted during backoff.
125
+ */
90
126
  var FetchBackoffAbortError = class extends Error {
91
- constructor() {
92
- super(`Fetch with backoff aborted`);
93
- this.name = `FetchBackoffAbortError`;
94
- }
127
+ constructor() {
128
+ super(`Fetch with backoff aborted`);
129
+ this.name = `FetchBackoffAbortError`;
130
+ }
95
131
  };
96
- var DurableStreamError = class _DurableStreamError extends Error {
97
- constructor(message, code, status, details) {
98
- super(message);
99
- this.name = `DurableStreamError`;
100
- this.code = code;
101
- this.status = status;
102
- this.details = details;
103
- }
104
- /**
105
- * Create a DurableStreamError from an HTTP response.
106
- */
107
- static async fromResponse(response, url) {
108
- const status = response.status;
109
- let details;
110
- const contentType = response.headers.get(`content-type`);
111
- if (!response.bodyUsed) {
112
- if (contentType && contentType.includes(`application/json`)) {
113
- try {
114
- details = await response.json();
115
- } catch {
116
- details = await response.text();
117
- }
118
- } else {
119
- details = await response.text();
120
- }
121
- }
122
- const code = statusToCode(status);
123
- const message = `Durable stream error at ${url}: ${response.statusText || status}`;
124
- return new _DurableStreamError(message, code, status, details);
125
- }
126
- /**
127
- * Create a DurableStreamError from a FetchError.
128
- */
129
- static fromFetchError(error) {
130
- const code = statusToCode(error.status);
131
- return new _DurableStreamError(
132
- error.message,
133
- code,
134
- error.status,
135
- error.json ?? error.text
136
- );
137
- }
132
+ /**
133
+ * Protocol-level error for Durable Streams operations.
134
+ * Provides structured error handling with error codes.
135
+ */
136
+ var DurableStreamError = class DurableStreamError extends Error {
137
+ /**
138
+ * HTTP status code, if applicable.
139
+ */
140
+ status;
141
+ /**
142
+ * Structured error code for programmatic handling.
143
+ */
144
+ code;
145
+ /**
146
+ * Additional error details (e.g., raw response body).
147
+ */
148
+ details;
149
+ constructor(message, code, status, details) {
150
+ super(message);
151
+ this.name = `DurableStreamError`;
152
+ this.code = code;
153
+ this.status = status;
154
+ this.details = details;
155
+ }
156
+ /**
157
+ * Create a DurableStreamError from an HTTP response.
158
+ */
159
+ static async fromResponse(response, url) {
160
+ const status = response.status;
161
+ let details;
162
+ const contentType = response.headers.get(`content-type`);
163
+ if (!response.bodyUsed) if (contentType && contentType.includes(`application/json`)) try {
164
+ details = await response.json();
165
+ } catch {
166
+ details = await response.text();
167
+ }
168
+ else details = await response.text();
169
+ const code = statusToCode(status);
170
+ const message = `Durable stream error at ${url}: ${response.statusText || status}`;
171
+ return new DurableStreamError(message, code, status, details);
172
+ }
173
+ /**
174
+ * Create a DurableStreamError from a FetchError.
175
+ */
176
+ static fromFetchError(error) {
177
+ const code = statusToCode(error.status);
178
+ return new DurableStreamError(error.message, code, error.status, error.json ?? error.text);
179
+ }
138
180
  };
181
+ /**
182
+ * Map HTTP status codes to DurableStreamErrorCode.
183
+ */
139
184
  function statusToCode(status) {
140
- switch (status) {
141
- case 400:
142
- return `BAD_REQUEST`;
143
- case 401:
144
- return `UNAUTHORIZED`;
145
- case 403:
146
- return `FORBIDDEN`;
147
- case 404:
148
- return `NOT_FOUND`;
149
- case 409:
150
- return `CONFLICT_SEQ`;
151
- case 429:
152
- return `RATE_LIMITED`;
153
- case 503:
154
- return `BUSY`;
155
- default:
156
- return `UNKNOWN`;
157
- }
185
+ switch (status) {
186
+ case 400: return `BAD_REQUEST`;
187
+ case 401: return `UNAUTHORIZED`;
188
+ case 403: return `FORBIDDEN`;
189
+ case 404: return `NOT_FOUND`;
190
+ case 409: return `CONFLICT_SEQ`;
191
+ case 429: return `RATE_LIMITED`;
192
+ case 503: return `BUSY`;
193
+ default: return `UNKNOWN`;
194
+ }
158
195
  }
196
+ /**
197
+ * Error thrown when stream URL is missing.
198
+ */
159
199
  var MissingStreamUrlError = class extends Error {
160
- constructor() {
161
- super(`Invalid stream options: missing required url parameter`);
162
- this.name = `MissingStreamUrlError`;
163
- }
200
+ constructor() {
201
+ super(`Invalid stream options: missing required url parameter`);
202
+ this.name = `MissingStreamUrlError`;
203
+ }
164
204
  };
205
+ /**
206
+ * Error thrown when signal option is invalid.
207
+ */
165
208
  var InvalidSignalError = class extends Error {
166
- constructor() {
167
- super(`Invalid signal option. It must be an instance of AbortSignal.`);
168
- this.name = `InvalidSignalError`;
169
- }
209
+ constructor() {
210
+ super(`Invalid signal option. It must be an instance of AbortSignal.`);
211
+ this.name = `InvalidSignalError`;
212
+ }
170
213
  };
171
214
 
172
- // src/constants.ts
173
- var STREAM_OFFSET_HEADER = `stream-offset`;
174
- var STREAM_CURSOR_HEADER = `stream-cursor`;
175
- var STREAM_UP_TO_DATE_HEADER = `stream-up-to-date`;
176
- var STREAM_SEQ_HEADER = `stream-seq`;
177
- var STREAM_TTL_HEADER = `stream-ttl`;
178
- var STREAM_EXPIRES_AT_HEADER = `stream-expires-at`;
179
- var OFFSET_QUERY_PARAM = `offset`;
180
- var LIVE_QUERY_PARAM = `live`;
181
- var CURSOR_QUERY_PARAM = `cursor`;
182
- var SSE_COMPATIBLE_CONTENT_TYPES = [`text/`, `application/json`];
183
- var DURABLE_STREAM_PROTOCOL_QUERY_PARAMS = [
184
- OFFSET_QUERY_PARAM,
185
- LIVE_QUERY_PARAM,
186
- CURSOR_QUERY_PARAM
187
- ];
188
-
189
- // src/fetch.ts
190
- var HTTP_RETRY_STATUS_CODES = [429, 503];
191
- var BackoffDefaults = {
192
- initialDelay: 100,
193
- maxDelay: 6e4,
194
- // Cap at 60s
195
- multiplier: 1.3,
196
- maxRetries: Infinity
197
- // Retry forever by default
215
+ //#endregion
216
+ //#region src/fetch.ts
217
+ /**
218
+ * HTTP status codes that should be retried.
219
+ */
220
+ const HTTP_RETRY_STATUS_CODES = [429, 503];
221
+ /**
222
+ * Default backoff options.
223
+ */
224
+ const BackoffDefaults = {
225
+ initialDelay: 100,
226
+ maxDelay: 6e4,
227
+ multiplier: 1.3,
228
+ maxRetries: Infinity
198
229
  };
230
+ /**
231
+ * Parse Retry-After header value and return delay in milliseconds.
232
+ * Supports both delta-seconds format and HTTP-date format.
233
+ * Returns 0 if header is not present or invalid.
234
+ */
199
235
  function parseRetryAfterHeader(retryAfter) {
200
- if (!retryAfter) return 0;
201
- const retryAfterSec = Number(retryAfter);
202
- if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) {
203
- return retryAfterSec * 1e3;
204
- }
205
- const retryDate = Date.parse(retryAfter);
206
- if (!isNaN(retryDate)) {
207
- const deltaMs = retryDate - Date.now();
208
- return Math.max(0, Math.min(deltaMs, 36e5));
209
- }
210
- return 0;
236
+ if (!retryAfter) return 0;
237
+ const retryAfterSec = Number(retryAfter);
238
+ if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) return retryAfterSec * 1e3;
239
+ const retryDate = Date.parse(retryAfter);
240
+ if (!isNaN(retryDate)) {
241
+ const deltaMs = retryDate - Date.now();
242
+ return Math.max(0, Math.min(deltaMs, 36e5));
243
+ }
244
+ return 0;
211
245
  }
246
+ /**
247
+ * Creates a fetch client that retries failed requests with exponential backoff.
248
+ *
249
+ * @param fetchClient - The base fetch client to wrap
250
+ * @param backoffOptions - Options for retry behavior
251
+ * @returns A fetch function with automatic retry
252
+ */
212
253
  function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
213
- const {
214
- initialDelay,
215
- maxDelay,
216
- multiplier,
217
- debug = false,
218
- onFailedAttempt,
219
- maxRetries = Infinity
220
- } = backoffOptions;
221
- return async (...args) => {
222
- const url = args[0];
223
- const options = args[1];
224
- let delay = initialDelay;
225
- let attempt = 0;
226
- while (true) {
227
- try {
228
- const result = await fetchClient(...args);
229
- if (result.ok) {
230
- return result;
231
- }
232
- const err = await FetchError.fromResponse(result, url.toString());
233
- throw err;
234
- } catch (e) {
235
- onFailedAttempt?.();
236
- if (options?.signal?.aborted) {
237
- throw new FetchBackoffAbortError();
238
- } else if (e instanceof FetchError && !HTTP_RETRY_STATUS_CODES.includes(e.status) && e.status >= 400 && e.status < 500) {
239
- throw e;
240
- } else {
241
- attempt++;
242
- if (attempt > maxRetries) {
243
- if (debug) {
244
- console.log(
245
- `Max retries reached (${attempt}/${maxRetries}), giving up`
246
- );
247
- }
248
- throw e;
249
- }
250
- const serverMinimumMs = e instanceof FetchError ? parseRetryAfterHeader(e.headers[`retry-after`]) : 0;
251
- const jitter = Math.random() * delay;
252
- const clientBackoffMs = Math.min(jitter, maxDelay);
253
- const waitMs = Math.max(serverMinimumMs, clientBackoffMs);
254
- if (debug) {
255
- const source = serverMinimumMs > 0 ? `server+client` : `client`;
256
- console.log(
257
- `Retry attempt #${attempt} after ${waitMs}ms (${source}, serverMin=${serverMinimumMs}ms, clientBackoff=${clientBackoffMs}ms)`
258
- );
259
- }
260
- await new Promise((resolve) => setTimeout(resolve, waitMs));
261
- delay = Math.min(delay * multiplier, maxDelay);
262
- }
263
- }
264
- }
265
- };
254
+ const { initialDelay, maxDelay, multiplier, debug = false, onFailedAttempt, maxRetries = Infinity } = backoffOptions;
255
+ return async (...args) => {
256
+ const url = args[0];
257
+ const options = args[1];
258
+ let delay = initialDelay;
259
+ let attempt = 0;
260
+ while (true) try {
261
+ const result = await fetchClient(...args);
262
+ if (result.ok) return result;
263
+ const err = await FetchError.fromResponse(result, url.toString());
264
+ throw err;
265
+ } catch (e) {
266
+ onFailedAttempt?.();
267
+ if (options?.signal?.aborted) throw new FetchBackoffAbortError();
268
+ else if (e instanceof FetchError && !HTTP_RETRY_STATUS_CODES.includes(e.status) && e.status >= 400 && e.status < 500) throw e;
269
+ else {
270
+ attempt++;
271
+ if (attempt > maxRetries) {
272
+ if (debug) console.log(`Max retries reached (${attempt}/${maxRetries}), giving up`);
273
+ throw e;
274
+ }
275
+ const serverMinimumMs = e instanceof FetchError ? parseRetryAfterHeader(e.headers[`retry-after`]) : 0;
276
+ const jitter = Math.random() * delay;
277
+ const clientBackoffMs = Math.min(jitter, maxDelay);
278
+ const waitMs = Math.max(serverMinimumMs, clientBackoffMs);
279
+ if (debug) {
280
+ const source = serverMinimumMs > 0 ? `server+client` : `client`;
281
+ console.log(`Retry attempt #${attempt} after ${waitMs}ms (${source}, serverMin=${serverMinimumMs}ms, clientBackoff=${clientBackoffMs}ms)`);
282
+ }
283
+ await new Promise((resolve) => setTimeout(resolve, waitMs));
284
+ delay = Math.min(delay * multiplier, maxDelay);
285
+ }
286
+ }
287
+ };
266
288
  }
267
- var NO_BODY_STATUS_CODES = [201, 204, 205];
289
+ /**
290
+ * Status codes where we shouldn't try to read the body.
291
+ */
292
+ const NO_BODY_STATUS_CODES = [
293
+ 201,
294
+ 204,
295
+ 205
296
+ ];
297
+ /**
298
+ * Creates a fetch client that ensures the response body is fully consumed.
299
+ * This prevents issues with connection pooling when bodies aren't read.
300
+ *
301
+ * Uses arrayBuffer() instead of text() to preserve binary data integrity.
302
+ *
303
+ * @param fetchClient - The base fetch client to wrap
304
+ * @returns A fetch function that consumes response bodies
305
+ */
268
306
  function createFetchWithConsumedBody(fetchClient) {
269
- return async (...args) => {
270
- const url = args[0];
271
- const res = await fetchClient(...args);
272
- try {
273
- if (res.status < 200 || NO_BODY_STATUS_CODES.includes(res.status)) {
274
- return res;
275
- }
276
- const buf = await res.arrayBuffer();
277
- return new Response(buf, {
278
- status: res.status,
279
- statusText: res.statusText,
280
- headers: res.headers
281
- });
282
- } catch (err) {
283
- if (args[1]?.signal?.aborted) {
284
- throw new FetchBackoffAbortError();
285
- }
286
- throw new FetchError(
287
- res.status,
288
- void 0,
289
- void 0,
290
- Object.fromEntries([...res.headers.entries()]),
291
- url.toString(),
292
- err instanceof Error ? err.message : typeof err === `string` ? err : `failed to read body`
293
- );
294
- }
295
- };
307
+ return async (...args) => {
308
+ const url = args[0];
309
+ const res = await fetchClient(...args);
310
+ try {
311
+ if (res.status < 200 || NO_BODY_STATUS_CODES.includes(res.status)) return res;
312
+ const buf = await res.arrayBuffer();
313
+ return new Response(buf, {
314
+ status: res.status,
315
+ statusText: res.statusText,
316
+ headers: res.headers
317
+ });
318
+ } catch (err) {
319
+ if (args[1]?.signal?.aborted) throw new FetchBackoffAbortError();
320
+ throw new FetchError(res.status, void 0, void 0, Object.fromEntries([...res.headers.entries()]), url.toString(), err instanceof Error ? err.message : typeof err === `string` ? err : `failed to read body`);
321
+ }
322
+ };
323
+ }
324
+
325
+ //#endregion
326
+ //#region src/asyncIterableReadableStream.ts
327
+ /**
328
+ * Check if a value has Symbol.asyncIterator defined.
329
+ */
330
+ function hasAsyncIterator(stream$1) {
331
+ return typeof Symbol !== `undefined` && typeof Symbol.asyncIterator === `symbol` && typeof stream$1[Symbol.asyncIterator] === `function`;
296
332
  }
297
- function chainAborter(aborter, sourceSignal) {
298
- let cleanup = noop;
299
- if (!sourceSignal) {
300
- } else if (sourceSignal.aborted) {
301
- aborter.abort(sourceSignal.reason);
302
- } else {
303
- const abortParent = () => aborter.abort(sourceSignal.reason);
304
- sourceSignal.addEventListener(`abort`, abortParent, {
305
- once: true,
306
- signal: aborter.signal
307
- });
308
- cleanup = () => sourceSignal.removeEventListener(`abort`, abortParent);
309
- }
310
- return {
311
- signal: aborter.signal,
312
- cleanup
313
- };
333
+ /**
334
+ * Define [Symbol.asyncIterator] and .values() on a ReadableStream instance.
335
+ *
336
+ * Uses getReader().read() to implement spec-consistent iteration.
337
+ * On completion or early exit (break/return/throw), releases lock and cancels as appropriate.
338
+ *
339
+ * **Iterator behavior notes:**
340
+ * - `return(value?)` accepts an optional cancellation reason passed to `reader.cancel()`
341
+ * - `return()` always resolves with `{ done: true, value: undefined }` regardless of the
342
+ * input value. This matches `for await...of` semantics where the return value is ignored.
343
+ * Manual iteration users should be aware of this behavior.
344
+ */
345
+ function defineAsyncIterator(stream$1) {
346
+ if (typeof Symbol === `undefined` || typeof Symbol.asyncIterator !== `symbol`) return;
347
+ if (typeof stream$1[Symbol.asyncIterator] === `function`) return;
348
+ const createIterator = function() {
349
+ const reader = this.getReader();
350
+ let finished = false;
351
+ let pendingReads = 0;
352
+ const iterator = {
353
+ async next() {
354
+ if (finished) return {
355
+ done: true,
356
+ value: void 0
357
+ };
358
+ pendingReads++;
359
+ try {
360
+ const { value, done } = await reader.read();
361
+ if (done) {
362
+ finished = true;
363
+ reader.releaseLock();
364
+ return {
365
+ done: true,
366
+ value: void 0
367
+ };
368
+ }
369
+ return {
370
+ done: false,
371
+ value
372
+ };
373
+ } catch (err) {
374
+ finished = true;
375
+ try {
376
+ reader.releaseLock();
377
+ } catch {}
378
+ throw err;
379
+ } finally {
380
+ pendingReads--;
381
+ }
382
+ },
383
+ async return(value) {
384
+ if (pendingReads > 0) throw new TypeError(`Cannot close a readable stream reader when it has pending read requests`);
385
+ finished = true;
386
+ const cancelPromise = reader.cancel(value);
387
+ reader.releaseLock();
388
+ await cancelPromise;
389
+ return {
390
+ done: true,
391
+ value: void 0
392
+ };
393
+ },
394
+ async throw(err) {
395
+ if (pendingReads > 0) throw new TypeError(`Cannot close a readable stream reader when it has pending read requests`);
396
+ finished = true;
397
+ const cancelPromise = reader.cancel(err);
398
+ reader.releaseLock();
399
+ await cancelPromise;
400
+ throw err;
401
+ },
402
+ [Symbol.asyncIterator]() {
403
+ return this;
404
+ }
405
+ };
406
+ return iterator;
407
+ };
408
+ try {
409
+ Object.defineProperty(stream$1, Symbol.asyncIterator, {
410
+ configurable: true,
411
+ writable: true,
412
+ value: createIterator
413
+ });
414
+ } catch {
415
+ return;
416
+ }
417
+ try {
418
+ Object.defineProperty(stream$1, `values`, {
419
+ configurable: true,
420
+ writable: true,
421
+ value: createIterator
422
+ });
423
+ } catch {}
314
424
  }
315
- function noop() {
425
+ /**
426
+ * Ensure a ReadableStream is async-iterable.
427
+ *
428
+ * If the stream already has [Symbol.asyncIterator] defined (native or polyfilled),
429
+ * it is returned as-is. Otherwise, [Symbol.asyncIterator] is defined on the
430
+ * stream instance (not the prototype).
431
+ *
432
+ * The returned value is the same ReadableStream instance, so:
433
+ * - `stream instanceof ReadableStream` remains true
434
+ * - Any code relying on native branding/internal slots continues to work
435
+ *
436
+ * @example
437
+ * ```typescript
438
+ * const stream = someApiReturningReadableStream();
439
+ * const iterableStream = asAsyncIterableReadableStream(stream);
440
+ *
441
+ * // Now works on Safari/iOS:
442
+ * for await (const chunk of iterableStream) {
443
+ * console.log(chunk);
444
+ * }
445
+ * ```
446
+ */
447
+ function asAsyncIterableReadableStream(stream$1) {
448
+ if (!hasAsyncIterator(stream$1)) defineAsyncIterator(stream$1);
449
+ return stream$1;
316
450
  }
317
451
 
318
- // src/stream.ts
319
- var _options, _fetchClient, _sseFetchClient, _onError, _DurableStream_instances, buildRequest_fn, resolveHeaders_fn, parseReadResponse_fn, isSSECompatible_fn, createSSEIterator_fn, parseSSEData_fn;
320
- var _DurableStream = class _DurableStream {
321
- /**
322
- * Create a cold handle to a stream.
323
- * No network IO is performed by the constructor.
324
- */
325
- constructor(opts) {
326
- __privateAdd(this, _DurableStream_instances);
327
- __privateAdd(this, _options);
328
- __privateAdd(this, _fetchClient);
329
- __privateAdd(this, _sseFetchClient);
330
- __privateAdd(this, _onError);
331
- validateOptions(opts);
332
- this.url = opts.url;
333
- __privateSet(this, _options, opts);
334
- __privateSet(this, _onError, opts.onError);
335
- const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
336
- const backOffOpts = {
337
- ...opts.backoffOptions ?? BackoffDefaults
338
- };
339
- const fetchWithBackoffClient = createFetchWithBackoff(
340
- baseFetchClient,
341
- backOffOpts
342
- );
343
- __privateSet(this, _sseFetchClient, fetchWithBackoffClient);
344
- __privateSet(this, _fetchClient, createFetchWithConsumedBody(fetchWithBackoffClient));
345
- }
346
- // ============================================================================
347
- // Static convenience methods
348
- // ============================================================================
349
- /**
350
- * Create a new stream (create-only PUT) and return a handle.
351
- * Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
352
- */
353
- static async create(opts) {
354
- const stream = new _DurableStream(opts);
355
- await stream.create({
356
- contentType: opts.contentType,
357
- ttlSeconds: opts.ttlSeconds,
358
- expiresAt: opts.expiresAt,
359
- body: opts.body
360
- });
361
- return stream;
362
- }
363
- /**
364
- * Validate that a stream exists and fetch metadata via HEAD.
365
- * Returns a handle with contentType populated (if sent by server).
366
- */
367
- static async connect(opts) {
368
- const stream = new _DurableStream(opts);
369
- await stream.head();
370
- return stream;
371
- }
372
- /**
373
- * HEAD metadata for a stream without creating a handle.
374
- */
375
- static async head(opts) {
376
- const stream = new _DurableStream(opts);
377
- return stream.head();
378
- }
379
- /**
380
- * Delete a stream without creating a handle.
381
- */
382
- static async delete(opts) {
383
- const stream = new _DurableStream(opts);
384
- return stream.delete();
385
- }
386
- // ============================================================================
387
- // Instance methods
388
- // ============================================================================
389
- /**
390
- * HEAD metadata for this stream.
391
- */
392
- async head(opts) {
393
- const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
394
- const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
395
- method: `HEAD`,
396
- headers: requestHeaders,
397
- signal: opts?.signal ?? __privateGet(this, _options).signal
398
- });
399
- if (!response.ok) {
400
- if (response.status === 404) {
401
- throw new DurableStreamError(
402
- `Stream not found: ${this.url}`,
403
- `NOT_FOUND`,
404
- 404
405
- );
406
- }
407
- throw await DurableStreamError.fromResponse(response, this.url);
408
- }
409
- const contentType = response.headers.get(`content-type`) ?? void 0;
410
- const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
411
- const etag = response.headers.get(`etag`) ?? void 0;
412
- const cacheControl = response.headers.get(`cache-control`) ?? void 0;
413
- if (contentType) {
414
- this.contentType = contentType;
415
- }
416
- return {
417
- exists: true,
418
- contentType,
419
- offset,
420
- etag,
421
- cacheControl
422
- };
423
- }
424
- /**
425
- * Create this stream (create-only PUT) using the URL/auth from the handle.
426
- */
427
- async create(opts) {
428
- const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
429
- if (opts?.contentType) {
430
- requestHeaders[`content-type`] = opts.contentType;
431
- }
432
- if (opts?.ttlSeconds !== void 0) {
433
- requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
434
- }
435
- if (opts?.expiresAt) {
436
- requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
437
- }
438
- const body = encodeBody(opts?.body);
439
- const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
440
- method: `PUT`,
441
- headers: requestHeaders,
442
- body,
443
- signal: __privateGet(this, _options).signal
444
- });
445
- if (!response.ok) {
446
- if (response.status === 409) {
447
- throw new DurableStreamError(
448
- `Stream already exists: ${this.url}`,
449
- `CONFLICT_EXISTS`,
450
- 409
451
- );
452
- }
453
- throw await DurableStreamError.fromResponse(response, this.url);
454
- }
455
- const responseContentType = response.headers.get(`content-type`);
456
- if (responseContentType) {
457
- this.contentType = responseContentType;
458
- } else if (opts?.contentType) {
459
- this.contentType = opts.contentType;
460
- }
461
- return this;
462
- }
463
- /**
464
- * Delete this stream.
465
- */
466
- async delete(opts) {
467
- const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
468
- const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
469
- method: `DELETE`,
470
- headers: requestHeaders,
471
- signal: opts?.signal ?? __privateGet(this, _options).signal
472
- });
473
- if (!response.ok) {
474
- if (response.status === 404) {
475
- throw new DurableStreamError(
476
- `Stream not found: ${this.url}`,
477
- `NOT_FOUND`,
478
- 404
479
- );
480
- }
481
- throw await DurableStreamError.fromResponse(response, this.url);
482
- }
483
- }
484
- /**
485
- * Append a single payload to the stream.
486
- *
487
- * - `body` may be Uint8Array, string, or any Fetch BodyInit.
488
- * - Strings are encoded as UTF-8.
489
- * - `seq` (if provided) is sent as stream-seq (writer coordination).
490
- */
491
- async append(body, opts) {
492
- const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
493
- if (opts?.contentType) {
494
- requestHeaders[`content-type`] = opts.contentType;
495
- } else if (this.contentType) {
496
- requestHeaders[`content-type`] = this.contentType;
497
- }
498
- if (opts?.seq) {
499
- requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
500
- }
501
- const encodedBody = encodeBody(body);
502
- const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
503
- method: `POST`,
504
- headers: requestHeaders,
505
- body: encodedBody,
506
- signal: opts?.signal ?? __privateGet(this, _options).signal
507
- });
508
- if (!response.ok) {
509
- if (response.status === 404) {
510
- throw new DurableStreamError(
511
- `Stream not found: ${this.url}`,
512
- `NOT_FOUND`,
513
- 404
514
- );
515
- }
516
- if (response.status === 409) {
517
- throw new DurableStreamError(
518
- `Sequence conflict: seq is lower than last appended`,
519
- `CONFLICT_SEQ`,
520
- 409
521
- );
522
- }
523
- if (response.status === 400) {
524
- throw new DurableStreamError(
525
- `Bad request (possibly content-type mismatch)`,
526
- `BAD_REQUEST`,
527
- 400
528
- );
529
- }
530
- throw await DurableStreamError.fromResponse(response, this.url);
531
- }
532
- }
533
- /**
534
- * Append a streaming body to the stream.
535
- *
536
- * - `source` yields Uint8Array or string chunks.
537
- * - Strings are encoded as UTF-8; no delimiters are added.
538
- * - Internally uses chunked transfer or HTTP/2 streaming.
539
- */
540
- async appendStream(source, opts) {
541
- const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
542
- if (opts?.contentType) {
543
- requestHeaders[`content-type`] = opts.contentType;
544
- } else if (this.contentType) {
545
- requestHeaders[`content-type`] = this.contentType;
546
- }
547
- if (opts?.seq) {
548
- requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
549
- }
550
- const body = toReadableStream(source);
551
- const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
552
- method: `POST`,
553
- headers: requestHeaders,
554
- body,
555
- // @ts-expect-error - duplex is needed for streaming but not in types
556
- duplex: `half`,
557
- signal: opts?.signal ?? __privateGet(this, _options).signal
558
- });
559
- if (!response.ok) {
560
- if (response.status === 404) {
561
- throw new DurableStreamError(
562
- `Stream not found: ${this.url}`,
563
- `NOT_FOUND`,
564
- 404
565
- );
566
- }
567
- if (response.status === 409) {
568
- throw new DurableStreamError(
569
- `Sequence conflict: seq is lower than last appended`,
570
- `CONFLICT_SEQ`,
571
- 409
572
- );
573
- }
574
- throw await DurableStreamError.fromResponse(response, this.url);
575
- }
576
- }
577
- /**
578
- * One-shot read.
579
- *
580
- * Performs a single GET from the specified offset/mode and returns a chunk.
581
- * Caller is responsible for persisting the returned offset if they want to resume.
582
- */
583
- async read(opts) {
584
- const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this, opts);
585
- const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
586
- method: `GET`,
587
- headers: requestHeaders,
588
- signal: opts?.signal ?? __privateGet(this, _options).signal
589
- });
590
- if (!response.ok) {
591
- if (response.status === 404) {
592
- throw new DurableStreamError(
593
- `Stream not found: ${this.url}`,
594
- `NOT_FOUND`,
595
- 404
596
- );
597
- }
598
- if (response.status === 204) {
599
- const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? opts?.offset ?? ``;
600
- return {
601
- data: new Uint8Array(0),
602
- offset,
603
- upToDate: true,
604
- contentType: this.contentType
605
- };
606
- }
607
- throw await DurableStreamError.fromResponse(response, this.url);
608
- }
609
- return __privateMethod(this, _DurableStream_instances, parseReadResponse_fn).call(this, response);
610
- }
611
- /**
612
- * Follow the stream as an AsyncIterable of chunks.
613
- *
614
- * Default behaviour:
615
- * - From `offset` (or start if omitted), repeatedly perform catch-up reads
616
- * until a chunk with upToDate=true.
617
- * - Then switch to live mode:
618
- * - SSE if content-type is text/* or application/json;
619
- * - otherwise long-poll.
620
- *
621
- * Explicit live override:
622
- * - live="catchup": only catch-up, stop at upToDate.
623
- * - live="long-poll": start long-polling immediately from offset.
624
- * - live="sse": start SSE immediately (throws if SSE not supported).
625
- */
626
- follow(opts) {
627
- const stream = this;
628
- const liveMode = opts?.live;
629
- let currentOffset = opts?.offset;
630
- let currentCursor = opts?.cursor;
631
- let isUpToDate = false;
632
- const aborter = new AbortController();
633
- const { signal, cleanup } = chainAborter(
634
- aborter,
635
- opts?.signal ?? __privateGet(stream, _options).signal
636
- );
637
- let sseIterator = null;
638
- return {
639
- [Symbol.asyncIterator]() {
640
- return {
641
- async next() {
642
- var _a, _b, _c, _d;
643
- try {
644
- if (signal.aborted) {
645
- cleanup();
646
- return { done: true, value: void 0 };
647
- }
648
- if (sseIterator) {
649
- const result = await sseIterator.next();
650
- if (result.done) {
651
- cleanup();
652
- }
653
- return result;
654
- }
655
- if (liveMode === `catchup`) {
656
- if (isUpToDate) {
657
- cleanup();
658
- return { done: true, value: void 0 };
659
- }
660
- const chunk = await stream.read({
661
- offset: currentOffset,
662
- cursor: currentCursor,
663
- signal
664
- });
665
- currentOffset = chunk.offset;
666
- currentCursor = chunk.cursor;
667
- isUpToDate = chunk.upToDate;
668
- return { done: false, value: chunk };
669
- }
670
- if (liveMode === `sse`) {
671
- sseIterator = __privateMethod(_a = stream, _DurableStream_instances, createSSEIterator_fn).call(_a, currentOffset, currentCursor, signal);
672
- return sseIterator.next();
673
- }
674
- if (liveMode === `long-poll`) {
675
- const chunk = await stream.read({
676
- offset: currentOffset,
677
- cursor: currentCursor,
678
- live: `long-poll`,
679
- signal
680
- });
681
- currentOffset = chunk.offset;
682
- currentCursor = chunk.cursor;
683
- return { done: false, value: chunk };
684
- }
685
- if (!isUpToDate) {
686
- const chunk = await stream.read({
687
- offset: currentOffset,
688
- cursor: currentCursor,
689
- signal
690
- });
691
- currentOffset = chunk.offset;
692
- currentCursor = chunk.cursor;
693
- isUpToDate = chunk.upToDate;
694
- if (chunk.contentType && !stream.contentType) {
695
- stream.contentType = chunk.contentType;
696
- }
697
- return { done: false, value: chunk };
698
- }
699
- if (__privateMethod(_b = stream, _DurableStream_instances, isSSECompatible_fn).call(_b)) {
700
- sseIterator = __privateMethod(_c = stream, _DurableStream_instances, createSSEIterator_fn).call(_c, currentOffset, currentCursor, signal);
701
- return sseIterator.next();
702
- } else {
703
- const chunk = await stream.read({
704
- offset: currentOffset,
705
- cursor: currentCursor,
706
- live: `long-poll`,
707
- signal
708
- });
709
- currentOffset = chunk.offset;
710
- currentCursor = chunk.cursor;
711
- return { done: false, value: chunk };
712
- }
713
- } catch (e) {
714
- if (e instanceof FetchBackoffAbortError) {
715
- cleanup();
716
- return { done: true, value: void 0 };
717
- }
718
- if (__privateGet(stream, _onError) && e instanceof Error) {
719
- const retryOpts = await __privateGet(_d = stream, _onError).call(_d, e);
720
- if (retryOpts && typeof retryOpts === `object`) {
721
- if (retryOpts.params) {
722
- __privateGet(stream, _options).params = {
723
- ...__privateGet(stream, _options).params ?? {},
724
- ...retryOpts.params
725
- };
726
- }
727
- if (retryOpts.headers) {
728
- __privateGet(stream, _options).headers = {
729
- ...__privateGet(stream, _options).headers ?? {},
730
- ...retryOpts.headers
731
- };
732
- }
733
- return this.next();
734
- }
735
- }
736
- cleanup();
737
- throw e;
738
- }
739
- },
740
- async return() {
741
- if (sseIterator?.return) {
742
- await sseIterator.return();
743
- }
744
- cleanup();
745
- aborter.abort();
746
- return { done: true, value: void 0 };
747
- }
748
- };
749
- }
750
- };
751
- }
752
- /**
753
- * Wrap follow() in a Web ReadableStream for piping.
754
- *
755
- * Backpressure:
756
- * - One chunk is pulled from follow() per pull() call, so standard
757
- * Web Streams backpressure semantics apply.
758
- *
759
- * Cancellation:
760
- * - rs.cancel() will stop follow() and abort any in-flight request.
761
- */
762
- toReadableStream(opts) {
763
- const iterator = this.follow(opts)[Symbol.asyncIterator]();
764
- return new ReadableStream({
765
- async pull(controller) {
766
- try {
767
- const { done, value } = await iterator.next();
768
- if (done) {
769
- controller.close();
770
- } else {
771
- controller.enqueue(value);
772
- }
773
- } catch (e) {
774
- controller.error(e);
775
- }
776
- },
777
- cancel() {
778
- iterator.return?.();
779
- }
780
- });
781
- }
782
- /**
783
- * Wrap follow() in a Web ReadableStream<Uint8Array> for piping raw bytes.
784
- *
785
- * This is the native format for many web stream APIs.
786
- */
787
- toByteStream(opts) {
788
- const iterator = this.follow(opts)[Symbol.asyncIterator]();
789
- return new ReadableStream({
790
- async pull(controller) {
791
- try {
792
- const { done, value } = await iterator.next();
793
- if (done) {
794
- controller.close();
795
- } else {
796
- controller.enqueue(value.data);
797
- }
798
- } catch (e) {
799
- controller.error(e);
800
- }
801
- },
802
- cancel() {
803
- iterator.return?.();
804
- }
805
- });
806
- }
807
- /**
808
- * Convenience: interpret data as JSON messages.
809
- * Parses each chunk's data as JSON and yields the parsed values.
810
- */
811
- async *json(opts) {
812
- const decoder = new TextDecoder();
813
- for await (const chunk of this.follow(opts)) {
814
- if (chunk.data.length > 0) {
815
- const text = decoder.decode(chunk.data);
816
- const lines = text.split(`
817
- `).filter((l) => l.trim());
818
- for (const line of lines) {
819
- yield JSON.parse(line);
820
- }
821
- }
822
- }
823
- }
824
- /**
825
- * Convenience: interpret data as text (UTF-8).
826
- */
827
- async *text(opts) {
828
- const decoder = opts?.decoder ?? new TextDecoder();
829
- for await (const chunk of this.follow(opts)) {
830
- if (chunk.data.length > 0) {
831
- yield decoder.decode(chunk.data, { stream: true });
832
- }
833
- }
834
- }
835
- };
836
- _options = new WeakMap();
837
- _fetchClient = new WeakMap();
838
- _sseFetchClient = new WeakMap();
839
- _onError = new WeakMap();
840
- _DurableStream_instances = new WeakSet();
841
- buildRequest_fn = async function(readOpts) {
842
- const requestHeaders = await __privateMethod(this, _DurableStream_instances, resolveHeaders_fn).call(this);
843
- const fetchUrl = new URL(this.url);
844
- const params = __privateGet(this, _options).params;
845
- if (params) {
846
- for (const [key, value] of Object.entries(params)) {
847
- if (value !== void 0) {
848
- const resolved = await resolveValue(value);
849
- fetchUrl.searchParams.set(key, resolved);
850
- }
851
- }
852
- }
853
- if (readOpts) {
854
- if (readOpts.offset) {
855
- fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, readOpts.offset);
856
- }
857
- if (readOpts.live) {
858
- fetchUrl.searchParams.set(LIVE_QUERY_PARAM, readOpts.live);
859
- }
860
- if (readOpts.cursor) {
861
- fetchUrl.searchParams.set(CURSOR_QUERY_PARAM, readOpts.cursor);
862
- }
863
- }
864
- return { requestHeaders, fetchUrl };
865
- };
866
- resolveHeaders_fn = async function() {
867
- const headers = {};
868
- const auth = __privateGet(this, _options).auth;
869
- if (auth) {
870
- if (`token` in auth) {
871
- const headerName = auth.headerName ?? `authorization`;
872
- headers[headerName] = `Bearer ${auth.token}`;
873
- } else if (`headers` in auth) {
874
- Object.assign(headers, auth.headers);
875
- } else if (`getHeaders` in auth) {
876
- const authHeaders = await auth.getHeaders();
877
- Object.assign(headers, authHeaders);
878
- }
879
- }
880
- const headersOpt = __privateGet(this, _options).headers;
881
- if (headersOpt) {
882
- for (const [key, value] of Object.entries(headersOpt)) {
883
- headers[key] = await resolveValue(value);
884
- }
885
- }
886
- return headers;
887
- };
888
- parseReadResponse_fn = async function(response) {
889
- const data = new Uint8Array(await response.arrayBuffer());
890
- const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
891
- const cursor = response.headers.get(STREAM_CURSOR_HEADER) ?? void 0;
892
- const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
893
- const etag = response.headers.get(`etag`) ?? void 0;
894
- const contentType = response.headers.get(`content-type`) ?? void 0;
895
- if (contentType && !this.contentType) {
896
- this.contentType = contentType;
897
- }
898
- return {
899
- data,
900
- offset,
901
- cursor,
902
- upToDate,
903
- etag,
904
- contentType
905
- };
906
- };
452
+ //#endregion
453
+ //#region src/sse.ts
907
454
  /**
908
- * Check if the stream's content type is compatible with SSE.
909
- */
910
- isSSECompatible_fn = function() {
911
- if (!this.contentType) return false;
912
- return SSE_COMPATIBLE_CONTENT_TYPES.some(
913
- (prefix) => this.contentType.startsWith(prefix)
914
- );
915
- };
455
+ * Parse SSE events from a ReadableStream<Uint8Array>.
456
+ * Yields parsed events as they arrive.
457
+ */
458
+ async function* parseSSEStream(stream$1, signal) {
459
+ const reader = stream$1.getReader();
460
+ const decoder = new TextDecoder();
461
+ let buffer = ``;
462
+ let currentEvent = { data: [] };
463
+ try {
464
+ while (true) {
465
+ if (signal?.aborted) break;
466
+ const { done, value } = await reader.read();
467
+ if (done) break;
468
+ buffer += decoder.decode(value, { stream: true });
469
+ const lines = buffer.split(`\n`);
470
+ buffer = lines.pop() ?? ``;
471
+ for (const line of lines) if (line === ``) {
472
+ if (currentEvent.type && currentEvent.data.length > 0) {
473
+ const dataStr = currentEvent.data.join(`\n`);
474
+ if (currentEvent.type === `data`) yield {
475
+ type: `data`,
476
+ data: dataStr
477
+ };
478
+ else if (currentEvent.type === `control`) try {
479
+ const control = JSON.parse(dataStr);
480
+ yield {
481
+ type: `control`,
482
+ streamNextOffset: control.streamNextOffset,
483
+ streamCursor: control.streamCursor,
484
+ upToDate: control.upToDate
485
+ };
486
+ } catch {}
487
+ }
488
+ currentEvent = { data: [] };
489
+ } else if (line.startsWith(`event:`)) currentEvent.type = line.slice(6).trim();
490
+ else if (line.startsWith(`data:`)) {
491
+ const content = line.slice(5);
492
+ currentEvent.data.push(content.startsWith(` `) ? content.slice(1) : content);
493
+ }
494
+ }
495
+ const remaining = decoder.decode();
496
+ if (remaining) buffer += remaining;
497
+ if (buffer && currentEvent.type && currentEvent.data.length > 0) {
498
+ const dataStr = currentEvent.data.join(`\n`);
499
+ if (currentEvent.type === `data`) yield {
500
+ type: `data`,
501
+ data: dataStr
502
+ };
503
+ else if (currentEvent.type === `control`) try {
504
+ const control = JSON.parse(dataStr);
505
+ yield {
506
+ type: `control`,
507
+ streamNextOffset: control.streamNextOffset,
508
+ streamCursor: control.streamCursor,
509
+ upToDate: control.upToDate
510
+ };
511
+ } catch {}
512
+ }
513
+ } finally {
514
+ reader.releaseLock();
515
+ }
516
+ }
517
+
518
+ //#endregion
519
+ //#region src/response.ts
916
520
  /**
917
- * Create an SSE connection that maintains a persistent connection with an internal queue.
918
- * Returns an AsyncIterator that yields chunks as they arrive.
919
- *
920
- * Follows the Electric client pattern:
921
- * - Buffer data events until control event (up-to-date)
922
- * - Flush buffer on control event
923
- * - Use promise chain for sequential processing
924
- */
925
- createSSEIterator_fn = function(initialOffset, initialCursor, signal) {
926
- if (!__privateMethod(this, _DurableStream_instances, isSSECompatible_fn).call(this)) {
927
- throw new DurableStreamError(
928
- `SSE is not supported for content-type: ${this.contentType}`,
929
- `SSE_NOT_SUPPORTED`,
930
- 400
931
- );
932
- }
933
- const chunkQueue = [];
934
- let pendingResolve = null;
935
- let currentOffset = initialOffset;
936
- let currentCursor = initialCursor;
937
- let connectionClosed = false;
938
- let connectionError = null;
939
- const connectionAbort = new AbortController();
940
- let dataBuffer = [];
941
- const stream = this;
942
- const startConnection = async () => {
943
- var _a;
944
- const { requestHeaders, fetchUrl } = await __privateMethod(_a = stream, _DurableStream_instances, buildRequest_fn).call(_a, {
945
- offset: currentOffset,
946
- cursor: currentCursor,
947
- live: `sse`
948
- });
949
- try {
950
- await (0, import_fetch_event_source.fetchEventSource)(fetchUrl.toString(), {
951
- headers: requestHeaders,
952
- fetch: __privateGet(stream, _sseFetchClient),
953
- signal: signal.aborted ? signal : connectionAbort.signal,
954
- onopen: async (response) => {
955
- if (!response.ok) {
956
- throw await DurableStreamError.fromResponse(response, stream.url);
957
- }
958
- const contentType = response.headers.get(`content-type`);
959
- if (contentType && !stream.contentType) {
960
- stream.contentType = contentType;
961
- }
962
- },
963
- onmessage: (event) => {
964
- var _a2;
965
- if (event.event === `data` && event.data) {
966
- const data = __privateMethod(_a2 = stream, _DurableStream_instances, parseSSEData_fn).call(_a2, event.data);
967
- dataBuffer.push(data);
968
- } else if (event.event === `control` && event.data) {
969
- try {
970
- const control = JSON.parse(event.data);
971
- const newOffset = control[STREAM_OFFSET_HEADER];
972
- const newCursor = control[STREAM_CURSOR_HEADER];
973
- const totalSize = dataBuffer.reduce(
974
- (sum, buf) => sum + buf.length,
975
- 0
976
- );
977
- const combinedData = new Uint8Array(totalSize);
978
- let offset = 0;
979
- for (const buf of dataBuffer) {
980
- combinedData.set(buf, offset);
981
- offset += buf.length;
982
- }
983
- const chunk = {
984
- data: combinedData,
985
- offset: newOffset ?? currentOffset ?? ``,
986
- cursor: newCursor,
987
- upToDate: true,
988
- contentType: stream.contentType
989
- };
990
- currentOffset = chunk.offset;
991
- currentCursor = chunk.cursor;
992
- dataBuffer = [];
993
- if (pendingResolve) {
994
- const resolve = pendingResolve;
995
- pendingResolve = null;
996
- resolve({ done: false, value: chunk });
997
- } else {
998
- chunkQueue.push(chunk);
999
- }
1000
- } catch {
1001
- }
1002
- }
1003
- },
1004
- onerror: (error) => {
1005
- throw error;
1006
- }
1007
- });
1008
- } catch (error) {
1009
- if (connectionAbort.signal.aborted || signal.aborted) {
1010
- throw new FetchBackoffAbortError();
1011
- }
1012
- throw error;
1013
- }
1014
- };
1015
- const connectionPromise = startConnection().catch((e) => {
1016
- if (e instanceof FetchBackoffAbortError) {
1017
- connectionClosed = true;
1018
- } else {
1019
- connectionError = e;
1020
- connectionClosed = true;
1021
- }
1022
- if (pendingResolve) {
1023
- const resolve = pendingResolve;
1024
- pendingResolve = null;
1025
- resolve({ done: true, value: void 0 });
1026
- }
1027
- });
1028
- const abortHandler = () => {
1029
- connectionAbort.abort();
1030
- connectionClosed = true;
1031
- if (pendingResolve) {
1032
- const resolve = pendingResolve;
1033
- pendingResolve = null;
1034
- resolve({ done: true, value: void 0 });
1035
- }
1036
- };
1037
- signal.addEventListener(`abort`, abortHandler, { once: true });
1038
- return {
1039
- async next() {
1040
- if (chunkQueue.length > 0) {
1041
- return { done: false, value: chunkQueue.shift() };
1042
- }
1043
- if (connectionError) {
1044
- throw connectionError;
1045
- }
1046
- if (connectionClosed || signal.aborted) {
1047
- return { done: true, value: void 0 };
1048
- }
1049
- return new Promise((resolve) => {
1050
- pendingResolve = resolve;
1051
- });
1052
- },
1053
- async return() {
1054
- signal.removeEventListener(`abort`, abortHandler);
1055
- connectionAbort.abort();
1056
- connectionClosed = true;
1057
- await connectionPromise.catch(() => {
1058
- });
1059
- return { done: true, value: void 0 };
1060
- }
1061
- };
521
+ * Implementation of the StreamResponse interface.
522
+ */
523
+ var StreamResponseImpl = class {
524
+ url;
525
+ contentType;
526
+ live;
527
+ startOffset;
528
+ #headers;
529
+ #status;
530
+ #statusText;
531
+ #ok;
532
+ #isLoading;
533
+ offset;
534
+ cursor;
535
+ upToDate;
536
+ #isJsonMode;
537
+ #abortController;
538
+ #fetchNext;
539
+ #startSSE;
540
+ #closedResolve;
541
+ #closedReject;
542
+ #closed;
543
+ #stopAfterUpToDate = false;
544
+ #consumptionMethod = null;
545
+ #sseResilience;
546
+ #lastSSEConnectionStartTime;
547
+ #consecutiveShortSSEConnections = 0;
548
+ #sseFallbackToLongPoll = false;
549
+ #responseStream;
550
+ constructor(config) {
551
+ this.url = config.url;
552
+ this.contentType = config.contentType;
553
+ this.live = config.live;
554
+ this.startOffset = config.startOffset;
555
+ this.offset = config.initialOffset;
556
+ this.cursor = config.initialCursor;
557
+ this.upToDate = config.initialUpToDate;
558
+ this.#headers = config.firstResponse.headers;
559
+ this.#status = config.firstResponse.status;
560
+ this.#statusText = config.firstResponse.statusText;
561
+ this.#ok = config.firstResponse.ok;
562
+ this.#isLoading = false;
563
+ this.#isJsonMode = config.isJsonMode;
564
+ this.#abortController = config.abortController;
565
+ this.#fetchNext = config.fetchNext;
566
+ this.#startSSE = config.startSSE;
567
+ this.#sseResilience = {
568
+ minConnectionDuration: config.sseResilience?.minConnectionDuration ?? 1e3,
569
+ maxShortConnections: config.sseResilience?.maxShortConnections ?? 3,
570
+ backoffBaseDelay: config.sseResilience?.backoffBaseDelay ?? 100,
571
+ backoffMaxDelay: config.sseResilience?.backoffMaxDelay ?? 5e3,
572
+ logWarnings: config.sseResilience?.logWarnings ?? true
573
+ };
574
+ this.#closed = new Promise((resolve, reject) => {
575
+ this.#closedResolve = resolve;
576
+ this.#closedReject = reject;
577
+ });
578
+ this.#responseStream = this.#createResponseStream(config.firstResponse);
579
+ }
580
+ get headers() {
581
+ return this.#headers;
582
+ }
583
+ get status() {
584
+ return this.#status;
585
+ }
586
+ get statusText() {
587
+ return this.#statusText;
588
+ }
589
+ get ok() {
590
+ return this.#ok;
591
+ }
592
+ get isLoading() {
593
+ return this.#isLoading;
594
+ }
595
+ #ensureJsonMode() {
596
+ if (!this.#isJsonMode) throw new DurableStreamError(`JSON methods are only valid for JSON-mode streams. Content-Type is "${this.contentType}" and json hint was not set.`, `BAD_REQUEST`);
597
+ }
598
+ #markClosed() {
599
+ this.#closedResolve();
600
+ }
601
+ #markError(err) {
602
+ this.#closedReject(err);
603
+ }
604
+ /**
605
+ * Ensure only one consumption method is used per StreamResponse.
606
+ * Throws if any consumption method was already called.
607
+ */
608
+ #ensureNoConsumption(method) {
609
+ if (this.#consumptionMethod !== null) throw new DurableStreamError(`Cannot call ${method}() - this StreamResponse is already being consumed via ${this.#consumptionMethod}()`, `ALREADY_CONSUMED`);
610
+ this.#consumptionMethod = method;
611
+ }
612
+ /**
613
+ * Determine if we should continue with live updates based on live mode
614
+ * and whether we've received upToDate.
615
+ */
616
+ #shouldContinueLive() {
617
+ if (this.#stopAfterUpToDate && this.upToDate) return false;
618
+ if (this.live === false) return false;
619
+ return true;
620
+ }
621
+ /**
622
+ * Update state from response headers.
623
+ */
624
+ #updateStateFromResponse(response) {
625
+ const offset = response.headers.get(STREAM_OFFSET_HEADER);
626
+ if (offset) this.offset = offset;
627
+ const cursor = response.headers.get(STREAM_CURSOR_HEADER);
628
+ if (cursor) this.cursor = cursor;
629
+ this.upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
630
+ this.#headers = response.headers;
631
+ this.#status = response.status;
632
+ this.#statusText = response.statusText;
633
+ this.#ok = response.ok;
634
+ }
635
+ /**
636
+ * Extract stream metadata from Response headers.
637
+ * Used by subscriber APIs to get the correct offset/cursor/upToDate for each
638
+ * specific Response, rather than reading from `this` which may be stale due to
639
+ * ReadableStream prefetching or timing issues.
640
+ */
641
+ #getMetadataFromResponse(response) {
642
+ const offset = response.headers.get(STREAM_OFFSET_HEADER);
643
+ const cursor = response.headers.get(STREAM_CURSOR_HEADER);
644
+ const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
645
+ return {
646
+ offset: offset ?? this.offset,
647
+ cursor: cursor ?? this.cursor,
648
+ upToDate
649
+ };
650
+ }
651
+ /**
652
+ * Create a synthetic Response from SSE data with proper headers.
653
+ * Includes offset/cursor/upToDate in headers so subscribers can read them.
654
+ */
655
+ #createSSESyntheticResponse(data, offset, cursor, upToDate) {
656
+ const headers = {
657
+ "content-type": this.contentType ?? `application/json`,
658
+ [STREAM_OFFSET_HEADER]: String(offset)
659
+ };
660
+ if (cursor) headers[STREAM_CURSOR_HEADER] = cursor;
661
+ if (upToDate) headers[STREAM_UP_TO_DATE_HEADER] = `true`;
662
+ return new Response(data, {
663
+ status: 200,
664
+ headers
665
+ });
666
+ }
667
+ /**
668
+ * Update instance state from an SSE control event.
669
+ */
670
+ #updateStateFromSSEControl(controlEvent) {
671
+ this.offset = controlEvent.streamNextOffset;
672
+ if (controlEvent.streamCursor) this.cursor = controlEvent.streamCursor;
673
+ if (controlEvent.upToDate !== void 0) this.upToDate = controlEvent.upToDate;
674
+ }
675
+ /**
676
+ * Mark the start of an SSE connection for duration tracking.
677
+ */
678
+ #markSSEConnectionStart() {
679
+ this.#lastSSEConnectionStartTime = Date.now();
680
+ }
681
+ /**
682
+ * Handle SSE connection end - check duration and manage fallback state.
683
+ * Returns a delay to wait before reconnecting, or null if should not reconnect.
684
+ */
685
+ async #handleSSEConnectionEnd() {
686
+ if (this.#lastSSEConnectionStartTime === void 0) return 0;
687
+ const connectionDuration = Date.now() - this.#lastSSEConnectionStartTime;
688
+ const wasAborted = this.#abortController.signal.aborted;
689
+ if (connectionDuration < this.#sseResilience.minConnectionDuration && !wasAborted) {
690
+ this.#consecutiveShortSSEConnections++;
691
+ if (this.#consecutiveShortSSEConnections >= this.#sseResilience.maxShortConnections) {
692
+ this.#sseFallbackToLongPoll = true;
693
+ if (this.#sseResilience.logWarnings) console.warn("[Durable Streams] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). Falling back to long polling. Your proxy must support streaming SSE responses (not buffer the complete response). Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy.");
694
+ return null;
695
+ } else {
696
+ const maxDelay = Math.min(this.#sseResilience.backoffMaxDelay, this.#sseResilience.backoffBaseDelay * Math.pow(2, this.#consecutiveShortSSEConnections));
697
+ const delayMs = Math.floor(Math.random() * maxDelay);
698
+ await new Promise((resolve) => setTimeout(resolve, delayMs));
699
+ return delayMs;
700
+ }
701
+ } else if (connectionDuration >= this.#sseResilience.minConnectionDuration) this.#consecutiveShortSSEConnections = 0;
702
+ return 0;
703
+ }
704
+ /**
705
+ * Try to reconnect SSE and return the new iterator, or null if reconnection
706
+ * is not possible or fails.
707
+ */
708
+ async #trySSEReconnect() {
709
+ if (this.#sseFallbackToLongPoll) return null;
710
+ if (!this.#shouldContinueLive() || !this.#startSSE) return null;
711
+ const delayOrNull = await this.#handleSSEConnectionEnd();
712
+ if (delayOrNull === null) return null;
713
+ this.#markSSEConnectionStart();
714
+ const newSSEResponse = await this.#startSSE(this.offset, this.cursor, this.#abortController.signal);
715
+ if (newSSEResponse.body) return parseSSEStream(newSSEResponse.body, this.#abortController.signal);
716
+ return null;
717
+ }
718
+ /**
719
+ * Process SSE events from the iterator.
720
+ * Returns an object indicating the result:
721
+ * - { type: 'response', response, newIterator? } - yield this response
722
+ * - { type: 'closed' } - stream should be closed
723
+ * - { type: 'error', error } - an error occurred
724
+ * - { type: 'continue', newIterator? } - continue processing (control-only event)
725
+ */
726
+ async #processSSEEvents(sseEventIterator) {
727
+ const { done, value: event } = await sseEventIterator.next();
728
+ if (done) {
729
+ try {
730
+ const newIterator = await this.#trySSEReconnect();
731
+ if (newIterator) return {
732
+ type: `continue`,
733
+ newIterator
734
+ };
735
+ } catch (err) {
736
+ return {
737
+ type: `error`,
738
+ error: err instanceof Error ? err : new Error(`SSE reconnection failed`)
739
+ };
740
+ }
741
+ return { type: `closed` };
742
+ }
743
+ if (event.type === `data`) return this.#processSSEDataEvent(event.data, sseEventIterator);
744
+ this.#updateStateFromSSEControl(event);
745
+ return { type: `continue` };
746
+ }
747
+ /**
748
+ * Process an SSE data event by waiting for its corresponding control event.
749
+ * In SSE protocol, control events come AFTER data events.
750
+ * Multiple data events may arrive before a single control event - we buffer them.
751
+ */
752
+ async #processSSEDataEvent(pendingData, sseEventIterator) {
753
+ let bufferedData = pendingData;
754
+ while (true) {
755
+ const { done: controlDone, value: controlEvent } = await sseEventIterator.next();
756
+ if (controlDone) {
757
+ const response = this.#createSSESyntheticResponse(bufferedData, this.offset, this.cursor, this.upToDate);
758
+ try {
759
+ const newIterator = await this.#trySSEReconnect();
760
+ return {
761
+ type: `response`,
762
+ response,
763
+ newIterator: newIterator ?? void 0
764
+ };
765
+ } catch (err) {
766
+ return {
767
+ type: `error`,
768
+ error: err instanceof Error ? err : new Error(`SSE reconnection failed`)
769
+ };
770
+ }
771
+ }
772
+ if (controlEvent.type === `control`) {
773
+ this.#updateStateFromSSEControl(controlEvent);
774
+ const response = this.#createSSESyntheticResponse(bufferedData, controlEvent.streamNextOffset, controlEvent.streamCursor, controlEvent.upToDate ?? false);
775
+ return {
776
+ type: `response`,
777
+ response
778
+ };
779
+ }
780
+ bufferedData += controlEvent.data;
781
+ }
782
+ }
783
+ /**
784
+ * Create the core ReadableStream<Response> that yields responses.
785
+ * This is consumed once - all consumption methods use this same stream.
786
+ *
787
+ * For long-poll mode: yields actual Response objects.
788
+ * For SSE mode: yields synthetic Response objects created from SSE data events.
789
+ */
790
+ #createResponseStream(firstResponse) {
791
+ let firstResponseYielded = false;
792
+ let sseEventIterator = null;
793
+ return new ReadableStream({
794
+ pull: async (controller) => {
795
+ try {
796
+ if (!firstResponseYielded) {
797
+ firstResponseYielded = true;
798
+ const isSSE = firstResponse.headers.get(`content-type`)?.includes(`text/event-stream`) ?? false;
799
+ if (isSSE && firstResponse.body) {
800
+ this.#markSSEConnectionStart();
801
+ sseEventIterator = parseSSEStream(firstResponse.body, this.#abortController.signal);
802
+ } else {
803
+ controller.enqueue(firstResponse);
804
+ if (this.upToDate && !this.#shouldContinueLive()) {
805
+ this.#markClosed();
806
+ controller.close();
807
+ return;
808
+ }
809
+ return;
810
+ }
811
+ }
812
+ if (sseEventIterator) while (true) {
813
+ const result = await this.#processSSEEvents(sseEventIterator);
814
+ switch (result.type) {
815
+ case `response`:
816
+ if (result.newIterator) sseEventIterator = result.newIterator;
817
+ controller.enqueue(result.response);
818
+ return;
819
+ case `closed`:
820
+ this.#markClosed();
821
+ controller.close();
822
+ return;
823
+ case `error`:
824
+ this.#markError(result.error);
825
+ controller.error(result.error);
826
+ return;
827
+ case `continue`:
828
+ if (result.newIterator) sseEventIterator = result.newIterator;
829
+ continue;
830
+ }
831
+ }
832
+ if (this.#shouldContinueLive()) {
833
+ if (this.#abortController.signal.aborted) {
834
+ this.#markClosed();
835
+ controller.close();
836
+ return;
837
+ }
838
+ const response = await this.#fetchNext(this.offset, this.cursor, this.#abortController.signal);
839
+ this.#updateStateFromResponse(response);
840
+ controller.enqueue(response);
841
+ return;
842
+ }
843
+ this.#markClosed();
844
+ controller.close();
845
+ } catch (err) {
846
+ if (this.#abortController.signal.aborted) {
847
+ this.#markClosed();
848
+ controller.close();
849
+ } else {
850
+ this.#markError(err instanceof Error ? err : new Error(String(err)));
851
+ controller.error(err);
852
+ }
853
+ }
854
+ },
855
+ cancel: () => {
856
+ this.#abortController.abort();
857
+ this.#markClosed();
858
+ }
859
+ });
860
+ }
861
+ /**
862
+ * Get the response stream reader. Can only be called once.
863
+ */
864
+ #getResponseReader() {
865
+ return this.#responseStream.getReader();
866
+ }
867
+ async body() {
868
+ this.#ensureNoConsumption(`body`);
869
+ this.#stopAfterUpToDate = true;
870
+ const reader = this.#getResponseReader();
871
+ const blobs = [];
872
+ try {
873
+ let result = await reader.read();
874
+ while (!result.done) {
875
+ const wasUpToDate = this.upToDate;
876
+ const blob = await result.value.blob();
877
+ if (blob.size > 0) blobs.push(blob);
878
+ if (wasUpToDate) break;
879
+ result = await reader.read();
880
+ }
881
+ } finally {
882
+ reader.releaseLock();
883
+ }
884
+ this.#markClosed();
885
+ if (blobs.length === 0) return new Uint8Array(0);
886
+ if (blobs.length === 1) return new Uint8Array(await blobs[0].arrayBuffer());
887
+ const combined = new Blob(blobs);
888
+ return new Uint8Array(await combined.arrayBuffer());
889
+ }
890
+ async json() {
891
+ this.#ensureNoConsumption(`json`);
892
+ this.#ensureJsonMode();
893
+ this.#stopAfterUpToDate = true;
894
+ const reader = this.#getResponseReader();
895
+ const items = [];
896
+ try {
897
+ let result = await reader.read();
898
+ while (!result.done) {
899
+ const wasUpToDate = this.upToDate;
900
+ const text = await result.value.text();
901
+ const content = text.trim() || `[]`;
902
+ const parsed = JSON.parse(content);
903
+ if (Array.isArray(parsed)) items.push(...parsed);
904
+ else items.push(parsed);
905
+ if (wasUpToDate) break;
906
+ result = await reader.read();
907
+ }
908
+ } finally {
909
+ reader.releaseLock();
910
+ }
911
+ this.#markClosed();
912
+ return items;
913
+ }
914
+ async text() {
915
+ this.#ensureNoConsumption(`text`);
916
+ this.#stopAfterUpToDate = true;
917
+ const reader = this.#getResponseReader();
918
+ const parts = [];
919
+ try {
920
+ let result = await reader.read();
921
+ while (!result.done) {
922
+ const wasUpToDate = this.upToDate;
923
+ const text = await result.value.text();
924
+ if (text) parts.push(text);
925
+ if (wasUpToDate) break;
926
+ result = await reader.read();
927
+ }
928
+ } finally {
929
+ reader.releaseLock();
930
+ }
931
+ this.#markClosed();
932
+ return parts.join(``);
933
+ }
934
+ /**
935
+ * Internal helper to create the body stream without consumption check.
936
+ * Used by both bodyStream() and textStream().
937
+ */
938
+ #createBodyStreamInternal() {
939
+ const { readable, writable } = new TransformStream();
940
+ const reader = this.#getResponseReader();
941
+ const pipeBodyStream = async () => {
942
+ try {
943
+ let result = await reader.read();
944
+ while (!result.done) {
945
+ const wasUpToDate = this.upToDate;
946
+ const body = result.value.body;
947
+ if (body) await body.pipeTo(writable, {
948
+ preventClose: true,
949
+ preventAbort: true,
950
+ preventCancel: true
951
+ });
952
+ if (wasUpToDate && !this.#shouldContinueLive()) break;
953
+ result = await reader.read();
954
+ }
955
+ await writable.close();
956
+ this.#markClosed();
957
+ } catch (err) {
958
+ if (this.#abortController.signal.aborted) {
959
+ try {
960
+ await writable.close();
961
+ } catch {}
962
+ this.#markClosed();
963
+ } else {
964
+ try {
965
+ await writable.abort(err);
966
+ } catch {}
967
+ this.#markError(err instanceof Error ? err : new Error(String(err)));
968
+ }
969
+ } finally {
970
+ reader.releaseLock();
971
+ }
972
+ };
973
+ pipeBodyStream();
974
+ return readable;
975
+ }
976
+ bodyStream() {
977
+ this.#ensureNoConsumption(`bodyStream`);
978
+ return asAsyncIterableReadableStream(this.#createBodyStreamInternal());
979
+ }
980
+ jsonStream() {
981
+ this.#ensureNoConsumption(`jsonStream`);
982
+ this.#ensureJsonMode();
983
+ const reader = this.#getResponseReader();
984
+ let pendingItems = [];
985
+ const stream$1 = new ReadableStream({
986
+ pull: async (controller) => {
987
+ if (pendingItems.length > 0) {
988
+ controller.enqueue(pendingItems.shift());
989
+ return;
990
+ }
991
+ const { done, value: response } = await reader.read();
992
+ if (done) {
993
+ this.#markClosed();
994
+ controller.close();
995
+ return;
996
+ }
997
+ const text = await response.text();
998
+ const content = text.trim() || `[]`;
999
+ const parsed = JSON.parse(content);
1000
+ pendingItems = Array.isArray(parsed) ? parsed : [parsed];
1001
+ if (pendingItems.length > 0) controller.enqueue(pendingItems.shift());
1002
+ },
1003
+ cancel: () => {
1004
+ reader.releaseLock();
1005
+ this.cancel();
1006
+ }
1007
+ });
1008
+ return asAsyncIterableReadableStream(stream$1);
1009
+ }
1010
+ textStream() {
1011
+ this.#ensureNoConsumption(`textStream`);
1012
+ const decoder = new TextDecoder();
1013
+ const stream$1 = this.#createBodyStreamInternal().pipeThrough(new TransformStream({
1014
+ transform(chunk, controller) {
1015
+ controller.enqueue(decoder.decode(chunk, { stream: true }));
1016
+ },
1017
+ flush(controller) {
1018
+ const remaining = decoder.decode();
1019
+ if (remaining) controller.enqueue(remaining);
1020
+ }
1021
+ }));
1022
+ return asAsyncIterableReadableStream(stream$1);
1023
+ }
1024
+ subscribeJson(subscriber) {
1025
+ this.#ensureNoConsumption(`subscribeJson`);
1026
+ this.#ensureJsonMode();
1027
+ const abortController = new AbortController();
1028
+ const reader = this.#getResponseReader();
1029
+ const consumeJsonSubscription = async () => {
1030
+ try {
1031
+ let result = await reader.read();
1032
+ while (!result.done) {
1033
+ if (abortController.signal.aborted) break;
1034
+ const response = result.value;
1035
+ const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
1036
+ const text = await response.text();
1037
+ const content = text.trim() || `[]`;
1038
+ const parsed = JSON.parse(content);
1039
+ const items = Array.isArray(parsed) ? parsed : [parsed];
1040
+ await subscriber({
1041
+ items,
1042
+ offset,
1043
+ cursor,
1044
+ upToDate
1045
+ });
1046
+ result = await reader.read();
1047
+ }
1048
+ this.#markClosed();
1049
+ } catch (e) {
1050
+ const isAborted = abortController.signal.aborted;
1051
+ const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
1052
+ if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
1053
+ else this.#markClosed();
1054
+ } finally {
1055
+ reader.releaseLock();
1056
+ }
1057
+ };
1058
+ consumeJsonSubscription();
1059
+ return () => {
1060
+ abortController.abort();
1061
+ this.cancel();
1062
+ };
1063
+ }
1064
+ subscribeBytes(subscriber) {
1065
+ this.#ensureNoConsumption(`subscribeBytes`);
1066
+ const abortController = new AbortController();
1067
+ const reader = this.#getResponseReader();
1068
+ const consumeBytesSubscription = async () => {
1069
+ try {
1070
+ let result = await reader.read();
1071
+ while (!result.done) {
1072
+ if (abortController.signal.aborted) break;
1073
+ const response = result.value;
1074
+ const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
1075
+ const buffer = await response.arrayBuffer();
1076
+ await subscriber({
1077
+ data: new Uint8Array(buffer),
1078
+ offset,
1079
+ cursor,
1080
+ upToDate
1081
+ });
1082
+ result = await reader.read();
1083
+ }
1084
+ this.#markClosed();
1085
+ } catch (e) {
1086
+ const isAborted = abortController.signal.aborted;
1087
+ const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
1088
+ if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
1089
+ else this.#markClosed();
1090
+ } finally {
1091
+ reader.releaseLock();
1092
+ }
1093
+ };
1094
+ consumeBytesSubscription();
1095
+ return () => {
1096
+ abortController.abort();
1097
+ this.cancel();
1098
+ };
1099
+ }
1100
+ subscribeText(subscriber) {
1101
+ this.#ensureNoConsumption(`subscribeText`);
1102
+ const abortController = new AbortController();
1103
+ const reader = this.#getResponseReader();
1104
+ const consumeTextSubscription = async () => {
1105
+ try {
1106
+ let result = await reader.read();
1107
+ while (!result.done) {
1108
+ if (abortController.signal.aborted) break;
1109
+ const response = result.value;
1110
+ const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
1111
+ const text = await response.text();
1112
+ await subscriber({
1113
+ text,
1114
+ offset,
1115
+ cursor,
1116
+ upToDate
1117
+ });
1118
+ result = await reader.read();
1119
+ }
1120
+ this.#markClosed();
1121
+ } catch (e) {
1122
+ const isAborted = abortController.signal.aborted;
1123
+ const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
1124
+ if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
1125
+ else this.#markClosed();
1126
+ } finally {
1127
+ reader.releaseLock();
1128
+ }
1129
+ };
1130
+ consumeTextSubscription();
1131
+ return () => {
1132
+ abortController.abort();
1133
+ this.cancel();
1134
+ };
1135
+ }
1136
+ cancel(reason) {
1137
+ this.#abortController.abort(reason);
1138
+ this.#markClosed();
1139
+ }
1140
+ get closed() {
1141
+ return this.#closed;
1142
+ }
1062
1143
  };
1144
+
1145
+ //#endregion
1146
+ //#region src/utils.ts
1063
1147
  /**
1064
- * Parse SSE data payload.
1065
- * For application/json, data is wrapped in [ and ], so we unwrap it.
1066
- */
1067
- parseSSEData_fn = function(data) {
1068
- const lines = data.split(`
1069
- `);
1070
- const content = lines.map((line) => {
1071
- if (line.startsWith(`data: `)) {
1072
- return line.slice(6);
1073
- }
1074
- return line;
1075
- }).join(`
1076
- `);
1077
- let text = content.trim();
1078
- if (this.contentType?.includes(`application/json`) && text.startsWith(`[`) && text.endsWith(`]`)) {
1079
- text = text.slice(1, -1).trim();
1080
- if (text.endsWith(`,`)) {
1081
- text = text.slice(0, -1);
1082
- }
1083
- }
1084
- return new TextEncoder().encode(text);
1085
- };
1086
- var DurableStream = _DurableStream;
1087
- async function resolveValue(value) {
1088
- if (typeof value === `function`) {
1089
- return value();
1090
- }
1091
- return value;
1148
+ * Resolve headers from HeadersRecord (supports async functions).
1149
+ * Unified implementation used by both stream() and DurableStream.
1150
+ */
1151
+ async function resolveHeaders(headers) {
1152
+ const resolved = {};
1153
+ if (!headers) return resolved;
1154
+ for (const [key, value] of Object.entries(headers)) if (typeof value === `function`) resolved[key] = await value();
1155
+ else resolved[key] = value;
1156
+ return resolved;
1157
+ }
1158
+ /**
1159
+ * Handle error responses from the server.
1160
+ * Throws appropriate DurableStreamError based on status code.
1161
+ */
1162
+ async function handleErrorResponse(response, url, context) {
1163
+ const status = response.status;
1164
+ if (status === 404) throw new DurableStreamError(`Stream not found: ${url}`, `NOT_FOUND`, 404);
1165
+ if (status === 409) {
1166
+ const message = context?.operation === `create` ? `Stream already exists: ${url}` : `Sequence conflict: seq is lower than last appended`;
1167
+ const code = context?.operation === `create` ? `CONFLICT_EXISTS` : `CONFLICT_SEQ`;
1168
+ throw new DurableStreamError(message, code, 409);
1169
+ }
1170
+ if (status === 400) throw new DurableStreamError(`Bad request (possibly content-type mismatch)`, `BAD_REQUEST`, 400);
1171
+ throw await DurableStreamError.fromResponse(response, url);
1172
+ }
1173
+ /**
1174
+ * Resolve params from ParamsRecord (supports async functions).
1175
+ */
1176
+ async function resolveParams(params) {
1177
+ const resolved = {};
1178
+ if (!params) return resolved;
1179
+ for (const [key, value] of Object.entries(params)) if (value !== void 0) if (typeof value === `function`) resolved[key] = await value();
1180
+ else resolved[key] = value;
1181
+ return resolved;
1092
1182
  }
1183
+
1184
+ //#endregion
1185
+ //#region src/stream-api.ts
1186
+ /**
1187
+ * Create a streaming session to read from a durable stream.
1188
+ *
1189
+ * This is a fetch-like API:
1190
+ * - The promise resolves after the first network request succeeds
1191
+ * - It rejects for auth/404/other protocol errors
1192
+ * - Returns a StreamResponse for consuming the data
1193
+ *
1194
+ * @example
1195
+ * ```typescript
1196
+ * // Catch-up JSON:
1197
+ * const res = await stream<{ message: string }>({
1198
+ * url,
1199
+ * auth,
1200
+ * offset: "0",
1201
+ * live: false,
1202
+ * })
1203
+ * const items = await res.json()
1204
+ *
1205
+ * // Live JSON:
1206
+ * const live = await stream<{ message: string }>({
1207
+ * url,
1208
+ * auth,
1209
+ * offset: savedOffset,
1210
+ * live: "auto",
1211
+ * })
1212
+ * live.subscribeJson(async (batch) => {
1213
+ * for (const item of batch.items) {
1214
+ * handle(item)
1215
+ * }
1216
+ * })
1217
+ * ```
1218
+ */
1219
+ async function stream(options) {
1220
+ if (!options.url) throw new DurableStreamError(`Invalid stream options: missing required url parameter`, `BAD_REQUEST`);
1221
+ let currentHeaders = options.headers;
1222
+ let currentParams = options.params;
1223
+ while (true) try {
1224
+ return await streamInternal({
1225
+ ...options,
1226
+ headers: currentHeaders,
1227
+ params: currentParams
1228
+ });
1229
+ } catch (err) {
1230
+ if (options.onError) {
1231
+ const retryOpts = await options.onError(err instanceof Error ? err : new Error(String(err)));
1232
+ if (retryOpts === void 0) throw err;
1233
+ if (retryOpts.params) currentParams = {
1234
+ ...currentParams,
1235
+ ...retryOpts.params
1236
+ };
1237
+ if (retryOpts.headers) currentHeaders = {
1238
+ ...currentHeaders,
1239
+ ...retryOpts.headers
1240
+ };
1241
+ continue;
1242
+ }
1243
+ throw err;
1244
+ }
1245
+ }
1246
+ /**
1247
+ * Internal implementation of stream that doesn't handle onError retries.
1248
+ */
1249
+ async function streamInternal(options) {
1250
+ const url = options.url instanceof URL ? options.url.toString() : options.url;
1251
+ const fetchUrl = new URL(url);
1252
+ const startOffset = options.offset ?? `-1`;
1253
+ fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, startOffset);
1254
+ const live = options.live ?? `auto`;
1255
+ if (live === `long-poll` || live === `sse`) fetchUrl.searchParams.set(LIVE_QUERY_PARAM, live);
1256
+ const params = await resolveParams(options.params);
1257
+ for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
1258
+ const headers = await resolveHeaders(options.headers);
1259
+ const abortController = new AbortController();
1260
+ if (options.signal) options.signal.addEventListener(`abort`, () => abortController.abort(options.signal?.reason), { once: true });
1261
+ const baseFetchClient = options.fetch ?? ((...args) => fetch(...args));
1262
+ const backoffOptions = options.backoffOptions ?? BackoffDefaults;
1263
+ const fetchClient = createFetchWithBackoff(baseFetchClient, backoffOptions);
1264
+ let firstResponse;
1265
+ try {
1266
+ firstResponse = await fetchClient(fetchUrl.toString(), {
1267
+ method: `GET`,
1268
+ headers,
1269
+ signal: abortController.signal
1270
+ });
1271
+ } catch (err) {
1272
+ if (err instanceof FetchBackoffAbortError) throw new DurableStreamError(`Stream request was aborted`, `UNKNOWN`);
1273
+ throw err;
1274
+ }
1275
+ const contentType = firstResponse.headers.get(`content-type`) ?? void 0;
1276
+ const initialOffset = firstResponse.headers.get(STREAM_OFFSET_HEADER) ?? startOffset;
1277
+ const initialCursor = firstResponse.headers.get(STREAM_CURSOR_HEADER) ?? void 0;
1278
+ const initialUpToDate = firstResponse.headers.has(STREAM_UP_TO_DATE_HEADER);
1279
+ const isJsonMode = options.json === true || (contentType?.includes(`application/json`) ?? false);
1280
+ const fetchNext = async (offset, cursor, signal) => {
1281
+ const nextUrl = new URL(url);
1282
+ nextUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
1283
+ if (live === `auto` || live === `long-poll`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`);
1284
+ else if (live === `sse`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
1285
+ if (cursor) nextUrl.searchParams.set(`cursor`, cursor);
1286
+ const nextParams = await resolveParams(options.params);
1287
+ for (const [key, value] of Object.entries(nextParams)) nextUrl.searchParams.set(key, value);
1288
+ const nextHeaders = await resolveHeaders(options.headers);
1289
+ const response = await fetchClient(nextUrl.toString(), {
1290
+ method: `GET`,
1291
+ headers: nextHeaders,
1292
+ signal
1293
+ });
1294
+ if (!response.ok) await handleErrorResponse(response, url);
1295
+ return response;
1296
+ };
1297
+ const startSSE = live === `sse` ? async (offset, cursor, signal) => {
1298
+ const sseUrl = new URL(url);
1299
+ sseUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
1300
+ sseUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
1301
+ if (cursor) sseUrl.searchParams.set(`cursor`, cursor);
1302
+ const sseParams = await resolveParams(options.params);
1303
+ for (const [key, value] of Object.entries(sseParams)) sseUrl.searchParams.set(key, value);
1304
+ const sseHeaders = await resolveHeaders(options.headers);
1305
+ const response = await fetchClient(sseUrl.toString(), {
1306
+ method: `GET`,
1307
+ headers: sseHeaders,
1308
+ signal
1309
+ });
1310
+ if (!response.ok) await handleErrorResponse(response, url);
1311
+ return response;
1312
+ } : void 0;
1313
+ return new StreamResponseImpl({
1314
+ url,
1315
+ contentType,
1316
+ live,
1317
+ startOffset,
1318
+ isJsonMode,
1319
+ initialOffset,
1320
+ initialCursor,
1321
+ initialUpToDate,
1322
+ firstResponse,
1323
+ abortController,
1324
+ fetchNext,
1325
+ startSSE,
1326
+ sseResilience: options.sseResilience
1327
+ });
1328
+ }
1329
+
1330
+ //#endregion
1331
+ //#region src/stream.ts
1332
+ /**
1333
+ * Normalize content-type by extracting the media type (before any semicolon).
1334
+ * Handles cases like "application/json; charset=utf-8".
1335
+ */
1336
+ function normalizeContentType(contentType) {
1337
+ if (!contentType) return ``;
1338
+ return contentType.split(`;`)[0].trim().toLowerCase();
1339
+ }
1340
+ /**
1341
+ * Check if a value is a Promise or Promise-like (thenable).
1342
+ */
1343
+ function isPromiseLike(value) {
1344
+ return value !== null && typeof value === `object` && `then` in value && typeof value.then === `function`;
1345
+ }
1346
+ /**
1347
+ * A handle to a remote durable stream for read/write operations.
1348
+ *
1349
+ * This is a lightweight, reusable handle - not a persistent connection.
1350
+ * It does not automatically start reading or listening.
1351
+ * Create sessions as needed via stream().
1352
+ *
1353
+ * @example
1354
+ * ```typescript
1355
+ * // Create a new stream
1356
+ * const stream = await DurableStream.create({
1357
+ * url: "https://streams.example.com/my-stream",
1358
+ * headers: { Authorization: "Bearer my-token" },
1359
+ * contentType: "application/json"
1360
+ * });
1361
+ *
1362
+ * // Write data
1363
+ * await stream.append({ message: "hello" });
1364
+ *
1365
+ * // Read with the new API
1366
+ * const res = await stream.stream<{ message: string }>();
1367
+ * res.subscribeJson(async (batch) => {
1368
+ * for (const item of batch.items) {
1369
+ * console.log(item.message);
1370
+ * }
1371
+ * });
1372
+ * ```
1373
+ */
1374
+ var DurableStream = class DurableStream {
1375
+ /**
1376
+ * The URL of the durable stream.
1377
+ */
1378
+ url;
1379
+ /**
1380
+ * The content type of the stream (populated after connect/head/read).
1381
+ */
1382
+ contentType;
1383
+ #options;
1384
+ #fetchClient;
1385
+ #onError;
1386
+ #batchingEnabled;
1387
+ #queue;
1388
+ #buffer = [];
1389
+ /**
1390
+ * Create a cold handle to a stream.
1391
+ * No network IO is performed by the constructor.
1392
+ */
1393
+ constructor(opts) {
1394
+ validateOptions(opts);
1395
+ const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
1396
+ this.url = urlStr;
1397
+ this.#options = {
1398
+ ...opts,
1399
+ url: urlStr
1400
+ };
1401
+ this.#onError = opts.onError;
1402
+ this.#batchingEnabled = opts.batching !== false;
1403
+ if (this.#batchingEnabled) this.#queue = fastq.default.promise(this.#batchWorker.bind(this), 1);
1404
+ const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
1405
+ const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
1406
+ const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
1407
+ this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
1408
+ }
1409
+ /**
1410
+ * Create a new stream (create-only PUT) and return a handle.
1411
+ * Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
1412
+ */
1413
+ static async create(opts) {
1414
+ const stream$1 = new DurableStream(opts);
1415
+ await stream$1.create({
1416
+ contentType: opts.contentType,
1417
+ ttlSeconds: opts.ttlSeconds,
1418
+ expiresAt: opts.expiresAt,
1419
+ body: opts.body
1420
+ });
1421
+ return stream$1;
1422
+ }
1423
+ /**
1424
+ * Validate that a stream exists and fetch metadata via HEAD.
1425
+ * Returns a handle with contentType populated (if sent by server).
1426
+ *
1427
+ * **Important**: This only performs a HEAD request for validation - it does
1428
+ * NOT open a session or start reading data. To read from the stream, call
1429
+ * `stream()` on the returned handle.
1430
+ *
1431
+ * @example
1432
+ * ```typescript
1433
+ * // Validate stream exists before reading
1434
+ * const handle = await DurableStream.connect({ url })
1435
+ * const res = await handle.stream() // Now actually read
1436
+ * ```
1437
+ */
1438
+ static async connect(opts) {
1439
+ const stream$1 = new DurableStream(opts);
1440
+ await stream$1.head();
1441
+ return stream$1;
1442
+ }
1443
+ /**
1444
+ * HEAD metadata for a stream without creating a handle.
1445
+ */
1446
+ static async head(opts) {
1447
+ const stream$1 = new DurableStream(opts);
1448
+ return stream$1.head();
1449
+ }
1450
+ /**
1451
+ * Delete a stream without creating a handle.
1452
+ */
1453
+ static async delete(opts) {
1454
+ const stream$1 = new DurableStream(opts);
1455
+ return stream$1.delete();
1456
+ }
1457
+ /**
1458
+ * HEAD metadata for this stream.
1459
+ */
1460
+ async head(opts) {
1461
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
1462
+ const response = await this.#fetchClient(fetchUrl.toString(), {
1463
+ method: `HEAD`,
1464
+ headers: requestHeaders,
1465
+ signal: opts?.signal ?? this.#options.signal
1466
+ });
1467
+ if (!response.ok) await handleErrorResponse(response, this.url);
1468
+ const contentType = response.headers.get(`content-type`) ?? void 0;
1469
+ const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
1470
+ const etag = response.headers.get(`etag`) ?? void 0;
1471
+ const cacheControl = response.headers.get(`cache-control`) ?? void 0;
1472
+ if (contentType) this.contentType = contentType;
1473
+ return {
1474
+ exists: true,
1475
+ contentType,
1476
+ offset,
1477
+ etag,
1478
+ cacheControl
1479
+ };
1480
+ }
1481
+ /**
1482
+ * Create this stream (create-only PUT) using the URL/auth from the handle.
1483
+ */
1484
+ async create(opts) {
1485
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
1486
+ const contentType = opts?.contentType ?? this.#options.contentType;
1487
+ if (contentType) requestHeaders[`content-type`] = contentType;
1488
+ if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
1489
+ if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
1490
+ const body = encodeBody(opts?.body);
1491
+ const response = await this.#fetchClient(fetchUrl.toString(), {
1492
+ method: `PUT`,
1493
+ headers: requestHeaders,
1494
+ body,
1495
+ signal: this.#options.signal
1496
+ });
1497
+ if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
1498
+ const responseContentType = response.headers.get(`content-type`);
1499
+ if (responseContentType) this.contentType = responseContentType;
1500
+ else if (contentType) this.contentType = contentType;
1501
+ return this;
1502
+ }
1503
+ /**
1504
+ * Delete this stream.
1505
+ */
1506
+ async delete(opts) {
1507
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
1508
+ const response = await this.#fetchClient(fetchUrl.toString(), {
1509
+ method: `DELETE`,
1510
+ headers: requestHeaders,
1511
+ signal: opts?.signal ?? this.#options.signal
1512
+ });
1513
+ if (!response.ok) await handleErrorResponse(response, this.url);
1514
+ }
1515
+ /**
1516
+ * Append a single payload to the stream.
1517
+ *
1518
+ * When batching is enabled (default), multiple append() calls made while
1519
+ * a POST is in-flight will be batched together into a single request.
1520
+ * This significantly improves throughput for high-frequency writes.
1521
+ *
1522
+ * - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
1523
+ * - `body` may also be a Promise that resolves to any of the above types.
1524
+ * - Strings are encoded as UTF-8.
1525
+ * - `seq` (if provided) is sent as stream-seq (writer coordination).
1526
+ *
1527
+ * @example
1528
+ * ```typescript
1529
+ * // Direct value
1530
+ * await stream.append({ message: "hello" });
1531
+ *
1532
+ * // Promise value - awaited before buffering
1533
+ * await stream.append(fetchData());
1534
+ * await stream.append(Promise.all([a, b, c]));
1535
+ * ```
1536
+ */
1537
+ async append(body, opts) {
1538
+ const resolvedBody = isPromiseLike(body) ? await body : body;
1539
+ if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
1540
+ return this.#appendDirect(resolvedBody, opts);
1541
+ }
1542
+ /**
1543
+ * Direct append without batching (used when batching is disabled).
1544
+ */
1545
+ async #appendDirect(body, opts) {
1546
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
1547
+ const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
1548
+ if (contentType) requestHeaders[`content-type`] = contentType;
1549
+ if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
1550
+ const isJson = normalizeContentType(contentType) === `application/json`;
1551
+ const bodyToEncode = isJson ? [body] : body;
1552
+ const encodedBody = encodeBody(bodyToEncode);
1553
+ const response = await this.#fetchClient(fetchUrl.toString(), {
1554
+ method: `POST`,
1555
+ headers: requestHeaders,
1556
+ body: encodedBody,
1557
+ signal: opts?.signal ?? this.#options.signal
1558
+ });
1559
+ if (!response.ok) await handleErrorResponse(response, this.url);
1560
+ }
1561
+ /**
1562
+ * Append with batching - buffers messages and sends them in batches.
1563
+ */
1564
+ async #appendWithBatching(body, opts) {
1565
+ return new Promise((resolve, reject) => {
1566
+ this.#buffer.push({
1567
+ data: body,
1568
+ seq: opts?.seq,
1569
+ contentType: opts?.contentType,
1570
+ signal: opts?.signal,
1571
+ resolve,
1572
+ reject
1573
+ });
1574
+ if (this.#queue.idle()) {
1575
+ const batch = this.#buffer.splice(0);
1576
+ this.#queue.push(batch).catch((err) => {
1577
+ for (const msg of batch) msg.reject(err);
1578
+ });
1579
+ }
1580
+ });
1581
+ }
1582
+ /**
1583
+ * Batch worker - processes batches of messages.
1584
+ */
1585
+ async #batchWorker(batch) {
1586
+ try {
1587
+ await this.#sendBatch(batch);
1588
+ for (const msg of batch) msg.resolve();
1589
+ if (this.#buffer.length > 0) {
1590
+ const nextBatch = this.#buffer.splice(0);
1591
+ this.#queue.push(nextBatch).catch((err) => {
1592
+ for (const msg of nextBatch) msg.reject(err);
1593
+ });
1594
+ }
1595
+ } catch (error) {
1596
+ for (const msg of batch) msg.reject(error);
1597
+ for (const msg of this.#buffer) msg.reject(error);
1598
+ this.#buffer = [];
1599
+ throw error;
1600
+ }
1601
+ }
1602
+ /**
1603
+ * Send a batch of messages as a single POST request.
1604
+ */
1605
+ async #sendBatch(batch) {
1606
+ if (batch.length === 0) return;
1607
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
1608
+ const contentType = batch[0]?.contentType ?? this.#options.contentType ?? this.contentType;
1609
+ if (contentType) requestHeaders[`content-type`] = contentType;
1610
+ let highestSeq;
1611
+ for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
1612
+ highestSeq = batch[i].seq;
1613
+ break;
1614
+ }
1615
+ if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
1616
+ const isJson = normalizeContentType(contentType) === `application/json`;
1617
+ let batchedBody;
1618
+ if (isJson) {
1619
+ const values = batch.map((m) => m.data);
1620
+ batchedBody = JSON.stringify(values);
1621
+ } else {
1622
+ const totalSize = batch.reduce((sum, m) => {
1623
+ const size = typeof m.data === `string` ? new TextEncoder().encode(m.data).length : m.data.length;
1624
+ return sum + size;
1625
+ }, 0);
1626
+ const concatenated = new Uint8Array(totalSize);
1627
+ let offset = 0;
1628
+ for (const msg of batch) {
1629
+ const bytes = typeof msg.data === `string` ? new TextEncoder().encode(msg.data) : msg.data;
1630
+ concatenated.set(bytes, offset);
1631
+ offset += bytes.length;
1632
+ }
1633
+ batchedBody = concatenated;
1634
+ }
1635
+ const signals = [];
1636
+ if (this.#options.signal) signals.push(this.#options.signal);
1637
+ for (const msg of batch) if (msg.signal) signals.push(msg.signal);
1638
+ const combinedSignal = signals.length > 0 ? AbortSignal.any(signals) : void 0;
1639
+ const response = await this.#fetchClient(fetchUrl.toString(), {
1640
+ method: `POST`,
1641
+ headers: requestHeaders,
1642
+ body: batchedBody,
1643
+ signal: combinedSignal
1644
+ });
1645
+ if (!response.ok) await handleErrorResponse(response, this.url);
1646
+ }
1647
+ /**
1648
+ * Append a streaming body to the stream.
1649
+ *
1650
+ * Supports piping from any ReadableStream or async iterable:
1651
+ * - `source` yields Uint8Array or string chunks.
1652
+ * - Strings are encoded as UTF-8; no delimiters are added.
1653
+ * - Internally uses chunked transfer or HTTP/2 streaming.
1654
+ *
1655
+ * @example
1656
+ * ```typescript
1657
+ * // Pipe from a ReadableStream
1658
+ * const readable = new ReadableStream({
1659
+ * start(controller) {
1660
+ * controller.enqueue("chunk 1");
1661
+ * controller.enqueue("chunk 2");
1662
+ * controller.close();
1663
+ * }
1664
+ * });
1665
+ * await stream.appendStream(readable);
1666
+ *
1667
+ * // Pipe from an async generator
1668
+ * async function* generate() {
1669
+ * yield "line 1\n";
1670
+ * yield "line 2\n";
1671
+ * }
1672
+ * await stream.appendStream(generate());
1673
+ *
1674
+ * // Pipe from fetch response body
1675
+ * const response = await fetch("https://example.com/data");
1676
+ * await stream.appendStream(response.body!);
1677
+ * ```
1678
+ */
1679
+ async appendStream(source, opts) {
1680
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
1681
+ const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
1682
+ if (contentType) requestHeaders[`content-type`] = contentType;
1683
+ if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
1684
+ const body = toReadableStream(source);
1685
+ const response = await this.#fetchClient(fetchUrl.toString(), {
1686
+ method: `POST`,
1687
+ headers: requestHeaders,
1688
+ body,
1689
+ duplex: `half`,
1690
+ signal: opts?.signal ?? this.#options.signal
1691
+ });
1692
+ if (!response.ok) await handleErrorResponse(response, this.url);
1693
+ }
1694
+ /**
1695
+ * Create a writable stream that pipes data to this durable stream.
1696
+ *
1697
+ * Returns a WritableStream that can be used with `pipeTo()` or
1698
+ * `pipeThrough()` from any ReadableStream source.
1699
+ *
1700
+ * @example
1701
+ * ```typescript
1702
+ * // Pipe from fetch response
1703
+ * const response = await fetch("https://example.com/data");
1704
+ * await response.body!.pipeTo(stream.writable());
1705
+ *
1706
+ * // Pipe through a transform
1707
+ * const readable = someStream.pipeThrough(new TextEncoderStream());
1708
+ * await readable.pipeTo(stream.writable());
1709
+ * ```
1710
+ */
1711
+ writable(opts) {
1712
+ const chunks = [];
1713
+ const stream$1 = this;
1714
+ return new WritableStream({
1715
+ write(chunk) {
1716
+ chunks.push(chunk);
1717
+ },
1718
+ async close() {
1719
+ if (chunks.length > 0) {
1720
+ const readable = new ReadableStream({ start(controller) {
1721
+ for (const chunk of chunks) controller.enqueue(chunk);
1722
+ controller.close();
1723
+ } });
1724
+ await stream$1.appendStream(readable, opts);
1725
+ }
1726
+ },
1727
+ abort(reason) {
1728
+ console.error(`WritableStream aborted:`, reason);
1729
+ }
1730
+ });
1731
+ }
1732
+ /**
1733
+ * Start a fetch-like streaming session against this handle's URL/headers/params.
1734
+ * The first request is made inside this method; it resolves when we have
1735
+ * a valid first response, or rejects on errors.
1736
+ *
1737
+ * Call-specific headers and params are merged with handle-level ones,
1738
+ * with call-specific values taking precedence.
1739
+ *
1740
+ * @example
1741
+ * ```typescript
1742
+ * const handle = await DurableStream.connect({
1743
+ * url,
1744
+ * headers: { Authorization: `Bearer ${token}` }
1745
+ * });
1746
+ * const res = await handle.stream<{ message: string }>();
1747
+ *
1748
+ * // Accumulate all JSON items
1749
+ * const items = await res.json();
1750
+ *
1751
+ * // Or stream live with ReadableStream
1752
+ * const reader = res.jsonStream().getReader();
1753
+ * let result = await reader.read();
1754
+ * while (!result.done) {
1755
+ * console.log(result.value);
1756
+ * result = await reader.read();
1757
+ * }
1758
+ *
1759
+ * // Or use subscriber for backpressure-aware consumption
1760
+ * res.subscribeJson(async (batch) => {
1761
+ * for (const item of batch.items) {
1762
+ * console.log(item);
1763
+ * }
1764
+ * });
1765
+ * ```
1766
+ */
1767
+ async stream(options) {
1768
+ if (options?.live === `sse` && this.contentType) {
1769
+ const isSSECompatible = SSE_COMPATIBLE_CONTENT_TYPES.some((prefix) => this.contentType.startsWith(prefix));
1770
+ if (!isSSECompatible) throw new DurableStreamError(`SSE is not supported for content-type: ${this.contentType}`, `SSE_NOT_SUPPORTED`, 400);
1771
+ }
1772
+ const mergedHeaders = {
1773
+ ...this.#options.headers,
1774
+ ...options?.headers
1775
+ };
1776
+ const mergedParams = {
1777
+ ...this.#options.params,
1778
+ ...options?.params
1779
+ };
1780
+ return stream({
1781
+ url: this.url,
1782
+ headers: mergedHeaders,
1783
+ params: mergedParams,
1784
+ signal: options?.signal ?? this.#options.signal,
1785
+ fetch: this.#options.fetch,
1786
+ backoffOptions: this.#options.backoffOptions,
1787
+ offset: options?.offset,
1788
+ live: options?.live,
1789
+ json: options?.json,
1790
+ onError: options?.onError ?? this.#onError
1791
+ });
1792
+ }
1793
+ /**
1794
+ * Build request headers and URL.
1795
+ */
1796
+ async #buildRequest() {
1797
+ const requestHeaders = await resolveHeaders(this.#options.headers);
1798
+ const fetchUrl = new URL(this.url);
1799
+ const params = await resolveParams(this.#options.params);
1800
+ for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
1801
+ return {
1802
+ requestHeaders,
1803
+ fetchUrl
1804
+ };
1805
+ }
1806
+ };
1807
+ /**
1808
+ * Encode a body value to the appropriate format.
1809
+ * Strings are encoded as UTF-8.
1810
+ * Objects are JSON-serialized.
1811
+ */
1093
1812
  function encodeBody(body) {
1094
- if (body === void 0) {
1095
- return void 0;
1096
- }
1097
- if (typeof body === `string`) {
1098
- return new TextEncoder().encode(body);
1099
- }
1100
- if (body instanceof Uint8Array) {
1101
- return body;
1102
- }
1103
- return body;
1813
+ if (body === void 0) return void 0;
1814
+ if (typeof body === `string`) return new TextEncoder().encode(body);
1815
+ if (body instanceof Uint8Array) return body;
1816
+ if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
1817
+ return new TextEncoder().encode(JSON.stringify(body));
1104
1818
  }
1819
+ /**
1820
+ * Convert an async iterable to a ReadableStream.
1821
+ */
1105
1822
  function toReadableStream(source) {
1106
- if (source instanceof ReadableStream) {
1107
- return source.pipeThrough(
1108
- new TransformStream({
1109
- transform(chunk, controller) {
1110
- if (typeof chunk === `string`) {
1111
- controller.enqueue(new TextEncoder().encode(chunk));
1112
- } else {
1113
- controller.enqueue(chunk);
1114
- }
1115
- }
1116
- })
1117
- );
1118
- }
1119
- const encoder = new TextEncoder();
1120
- const iterator = source[Symbol.asyncIterator]();
1121
- return new ReadableStream({
1122
- async pull(controller) {
1123
- try {
1124
- const { done, value } = await iterator.next();
1125
- if (done) {
1126
- controller.close();
1127
- } else if (typeof value === `string`) {
1128
- controller.enqueue(encoder.encode(value));
1129
- } else {
1130
- controller.enqueue(value);
1131
- }
1132
- } catch (e) {
1133
- controller.error(e);
1134
- }
1135
- },
1136
- cancel() {
1137
- iterator.return?.();
1138
- }
1139
- });
1823
+ if (source instanceof ReadableStream) return source.pipeThrough(new TransformStream({ transform(chunk, controller) {
1824
+ if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
1825
+ else controller.enqueue(chunk);
1826
+ } }));
1827
+ const encoder = new TextEncoder();
1828
+ const iterator = source[Symbol.asyncIterator]();
1829
+ return new ReadableStream({
1830
+ async pull(controller) {
1831
+ try {
1832
+ const { done, value } = await iterator.next();
1833
+ if (done) controller.close();
1834
+ else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
1835
+ else controller.enqueue(value);
1836
+ } catch (e) {
1837
+ controller.error(e);
1838
+ }
1839
+ },
1840
+ cancel() {
1841
+ iterator.return?.();
1842
+ }
1843
+ });
1140
1844
  }
1845
+ /**
1846
+ * Validate stream options.
1847
+ */
1141
1848
  function validateOptions(options) {
1142
- if (!options.url) {
1143
- throw new MissingStreamUrlError();
1144
- }
1145
- if (options.signal && !(options.signal instanceof AbortSignal)) {
1146
- throw new InvalidSignalError();
1147
- }
1849
+ if (!options.url) throw new MissingStreamUrlError();
1850
+ if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
1148
1851
  }
1149
- // Annotate the CommonJS export names for ESM import in node:
1150
- 0 && (module.exports = {
1151
- BackoffDefaults,
1152
- CURSOR_QUERY_PARAM,
1153
- DURABLE_STREAM_PROTOCOL_QUERY_PARAMS,
1154
- DurableStream,
1155
- DurableStreamError,
1156
- FetchBackoffAbortError,
1157
- FetchError,
1158
- InvalidSignalError,
1159
- LIVE_QUERY_PARAM,
1160
- MissingStreamUrlError,
1161
- OFFSET_QUERY_PARAM,
1162
- SSE_COMPATIBLE_CONTENT_TYPES,
1163
- STREAM_CURSOR_HEADER,
1164
- STREAM_EXPIRES_AT_HEADER,
1165
- STREAM_OFFSET_HEADER,
1166
- STREAM_SEQ_HEADER,
1167
- STREAM_TTL_HEADER,
1168
- STREAM_UP_TO_DATE_HEADER,
1169
- createFetchWithBackoff,
1170
- createFetchWithConsumedBody
1171
- });
1172
- //# sourceMappingURL=index.cjs.map
1852
+
1853
+ //#endregion
1854
+ exports.BackoffDefaults = BackoffDefaults
1855
+ exports.CURSOR_QUERY_PARAM = CURSOR_QUERY_PARAM
1856
+ exports.DURABLE_STREAM_PROTOCOL_QUERY_PARAMS = DURABLE_STREAM_PROTOCOL_QUERY_PARAMS
1857
+ exports.DurableStream = DurableStream
1858
+ exports.DurableStreamError = DurableStreamError
1859
+ exports.FetchBackoffAbortError = FetchBackoffAbortError
1860
+ exports.FetchError = FetchError
1861
+ exports.InvalidSignalError = InvalidSignalError
1862
+ exports.LIVE_QUERY_PARAM = LIVE_QUERY_PARAM
1863
+ exports.MissingStreamUrlError = MissingStreamUrlError
1864
+ exports.OFFSET_QUERY_PARAM = OFFSET_QUERY_PARAM
1865
+ exports.SSE_COMPATIBLE_CONTENT_TYPES = SSE_COMPATIBLE_CONTENT_TYPES
1866
+ exports.STREAM_CURSOR_HEADER = STREAM_CURSOR_HEADER
1867
+ exports.STREAM_EXPIRES_AT_HEADER = STREAM_EXPIRES_AT_HEADER
1868
+ exports.STREAM_OFFSET_HEADER = STREAM_OFFSET_HEADER
1869
+ exports.STREAM_SEQ_HEADER = STREAM_SEQ_HEADER
1870
+ exports.STREAM_TTL_HEADER = STREAM_TTL_HEADER
1871
+ exports.STREAM_UP_TO_DATE_HEADER = STREAM_UP_TO_DATE_HEADER
1872
+ exports.asAsyncIterableReadableStream = asAsyncIterableReadableStream
1873
+ exports.createFetchWithBackoff = createFetchWithBackoff
1874
+ exports.createFetchWithConsumedBody = createFetchWithConsumedBody
1875
+ exports.stream = stream