@durable-streams/client 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +799 -0
- package/dist/index.cjs +1172 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +627 -0
- package/dist/index.d.ts +1072 -0
- package/dist/index.js +1830 -0
- package/dist/index.js.map +1 -0
- package/package.json +46 -0
- package/src/asyncIterableReadableStream.ts +220 -0
- package/src/constants.ts +105 -0
- package/src/error.ts +189 -0
- package/src/fetch.ts +267 -0
- package/src/index.ts +103 -0
- package/src/response.ts +1053 -0
- package/src/sse.ts +130 -0
- package/src/stream-api.ts +284 -0
- package/src/stream.ts +867 -0
- package/src/types.ts +737 -0
- package/src/utils.ts +104 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1172 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __typeError = (msg) => {
|
|
7
|
+
throw TypeError(msg);
|
|
8
|
+
};
|
|
9
|
+
var __export = (target, all) => {
|
|
10
|
+
for (var name in all)
|
|
11
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
12
|
+
};
|
|
13
|
+
var __copyProps = (to, from, except, desc) => {
|
|
14
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
15
|
+
for (let key of __getOwnPropNames(from))
|
|
16
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
17
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
18
|
+
}
|
|
19
|
+
return to;
|
|
20
|
+
};
|
|
21
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
22
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
|
23
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
|
24
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
|
25
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
|
|
26
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
|
27
|
+
|
|
28
|
+
// src/index.ts
|
|
29
|
+
var index_exports = {};
|
|
30
|
+
__export(index_exports, {
|
|
31
|
+
BackoffDefaults: () => BackoffDefaults,
|
|
32
|
+
CURSOR_QUERY_PARAM: () => CURSOR_QUERY_PARAM,
|
|
33
|
+
DURABLE_STREAM_PROTOCOL_QUERY_PARAMS: () => DURABLE_STREAM_PROTOCOL_QUERY_PARAMS,
|
|
34
|
+
DurableStream: () => DurableStream,
|
|
35
|
+
DurableStreamError: () => DurableStreamError,
|
|
36
|
+
FetchBackoffAbortError: () => FetchBackoffAbortError,
|
|
37
|
+
FetchError: () => FetchError,
|
|
38
|
+
InvalidSignalError: () => InvalidSignalError,
|
|
39
|
+
LIVE_QUERY_PARAM: () => LIVE_QUERY_PARAM,
|
|
40
|
+
MissingStreamUrlError: () => MissingStreamUrlError,
|
|
41
|
+
OFFSET_QUERY_PARAM: () => OFFSET_QUERY_PARAM,
|
|
42
|
+
SSE_COMPATIBLE_CONTENT_TYPES: () => SSE_COMPATIBLE_CONTENT_TYPES,
|
|
43
|
+
STREAM_CURSOR_HEADER: () => STREAM_CURSOR_HEADER,
|
|
44
|
+
STREAM_EXPIRES_AT_HEADER: () => STREAM_EXPIRES_AT_HEADER,
|
|
45
|
+
STREAM_OFFSET_HEADER: () => STREAM_OFFSET_HEADER,
|
|
46
|
+
STREAM_SEQ_HEADER: () => STREAM_SEQ_HEADER,
|
|
47
|
+
STREAM_TTL_HEADER: () => STREAM_TTL_HEADER,
|
|
48
|
+
STREAM_UP_TO_DATE_HEADER: () => STREAM_UP_TO_DATE_HEADER,
|
|
49
|
+
createFetchWithBackoff: () => createFetchWithBackoff,
|
|
50
|
+
createFetchWithConsumedBody: () => createFetchWithConsumedBody
|
|
51
|
+
});
|
|
52
|
+
module.exports = __toCommonJS(index_exports);
|
|
53
|
+
|
|
54
|
+
// src/stream.ts
|
|
55
|
+
var import_fetch_event_source = require("@microsoft/fetch-event-source");
|
|
56
|
+
|
|
57
|
+
// src/error.ts
|
|
58
|
+
var FetchError = class _FetchError extends Error {
|
|
59
|
+
constructor(status, text, json, headers, url, message) {
|
|
60
|
+
super(
|
|
61
|
+
message || `HTTP Error ${status} at ${url}: ${text ?? JSON.stringify(json)}`
|
|
62
|
+
);
|
|
63
|
+
this.url = url;
|
|
64
|
+
this.name = `FetchError`;
|
|
65
|
+
this.status = status;
|
|
66
|
+
this.text = text;
|
|
67
|
+
this.json = json;
|
|
68
|
+
this.headers = headers;
|
|
69
|
+
}
|
|
70
|
+
static async fromResponse(response, url) {
|
|
71
|
+
const status = response.status;
|
|
72
|
+
const headers = Object.fromEntries([...response.headers.entries()]);
|
|
73
|
+
let text = void 0;
|
|
74
|
+
let json = void 0;
|
|
75
|
+
const contentType = response.headers.get(`content-type`);
|
|
76
|
+
if (!response.bodyUsed) {
|
|
77
|
+
if (contentType && contentType.includes(`application/json`)) {
|
|
78
|
+
try {
|
|
79
|
+
json = await response.json();
|
|
80
|
+
} catch {
|
|
81
|
+
text = await response.text();
|
|
82
|
+
}
|
|
83
|
+
} else {
|
|
84
|
+
text = await response.text();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
return new _FetchError(status, text, json, headers, url);
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
var FetchBackoffAbortError = class extends Error {
|
|
91
|
+
constructor() {
|
|
92
|
+
super(`Fetch with backoff aborted`);
|
|
93
|
+
this.name = `FetchBackoffAbortError`;
|
|
94
|
+
}
|
|
95
|
+
};
|
|
96
|
+
var DurableStreamError = class _DurableStreamError extends Error {
|
|
97
|
+
constructor(message, code, status, details) {
|
|
98
|
+
super(message);
|
|
99
|
+
this.name = `DurableStreamError`;
|
|
100
|
+
this.code = code;
|
|
101
|
+
this.status = status;
|
|
102
|
+
this.details = details;
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Create a DurableStreamError from an HTTP response.
|
|
106
|
+
*/
|
|
107
|
+
static async fromResponse(response, url) {
|
|
108
|
+
const status = response.status;
|
|
109
|
+
let details;
|
|
110
|
+
const contentType = response.headers.get(`content-type`);
|
|
111
|
+
if (!response.bodyUsed) {
|
|
112
|
+
if (contentType && contentType.includes(`application/json`)) {
|
|
113
|
+
try {
|
|
114
|
+
details = await response.json();
|
|
115
|
+
} catch {
|
|
116
|
+
details = await response.text();
|
|
117
|
+
}
|
|
118
|
+
} else {
|
|
119
|
+
details = await response.text();
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
const code = statusToCode(status);
|
|
123
|
+
const message = `Durable stream error at ${url}: ${response.statusText || status}`;
|
|
124
|
+
return new _DurableStreamError(message, code, status, details);
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Create a DurableStreamError from a FetchError.
|
|
128
|
+
*/
|
|
129
|
+
static fromFetchError(error) {
|
|
130
|
+
const code = statusToCode(error.status);
|
|
131
|
+
return new _DurableStreamError(
|
|
132
|
+
error.message,
|
|
133
|
+
code,
|
|
134
|
+
error.status,
|
|
135
|
+
error.json ?? error.text
|
|
136
|
+
);
|
|
137
|
+
}
|
|
138
|
+
};
|
|
139
|
+
function statusToCode(status) {
|
|
140
|
+
switch (status) {
|
|
141
|
+
case 400:
|
|
142
|
+
return `BAD_REQUEST`;
|
|
143
|
+
case 401:
|
|
144
|
+
return `UNAUTHORIZED`;
|
|
145
|
+
case 403:
|
|
146
|
+
return `FORBIDDEN`;
|
|
147
|
+
case 404:
|
|
148
|
+
return `NOT_FOUND`;
|
|
149
|
+
case 409:
|
|
150
|
+
return `CONFLICT_SEQ`;
|
|
151
|
+
case 429:
|
|
152
|
+
return `RATE_LIMITED`;
|
|
153
|
+
case 503:
|
|
154
|
+
return `BUSY`;
|
|
155
|
+
default:
|
|
156
|
+
return `UNKNOWN`;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
var MissingStreamUrlError = class extends Error {
|
|
160
|
+
constructor() {
|
|
161
|
+
super(`Invalid stream options: missing required url parameter`);
|
|
162
|
+
this.name = `MissingStreamUrlError`;
|
|
163
|
+
}
|
|
164
|
+
};
|
|
165
|
+
var InvalidSignalError = class extends Error {
|
|
166
|
+
constructor() {
|
|
167
|
+
super(`Invalid signal option. It must be an instance of AbortSignal.`);
|
|
168
|
+
this.name = `InvalidSignalError`;
|
|
169
|
+
}
|
|
170
|
+
};
|
|
171
|
+
|
|
172
|
+
// src/constants.ts
|
|
173
|
+
var STREAM_OFFSET_HEADER = `stream-offset`;
|
|
174
|
+
var STREAM_CURSOR_HEADER = `stream-cursor`;
|
|
175
|
+
var STREAM_UP_TO_DATE_HEADER = `stream-up-to-date`;
|
|
176
|
+
var STREAM_SEQ_HEADER = `stream-seq`;
|
|
177
|
+
var STREAM_TTL_HEADER = `stream-ttl`;
|
|
178
|
+
var STREAM_EXPIRES_AT_HEADER = `stream-expires-at`;
|
|
179
|
+
var OFFSET_QUERY_PARAM = `offset`;
|
|
180
|
+
var LIVE_QUERY_PARAM = `live`;
|
|
181
|
+
var CURSOR_QUERY_PARAM = `cursor`;
|
|
182
|
+
var SSE_COMPATIBLE_CONTENT_TYPES = [`text/`, `application/json`];
|
|
183
|
+
var DURABLE_STREAM_PROTOCOL_QUERY_PARAMS = [
|
|
184
|
+
OFFSET_QUERY_PARAM,
|
|
185
|
+
LIVE_QUERY_PARAM,
|
|
186
|
+
CURSOR_QUERY_PARAM
|
|
187
|
+
];
|
|
188
|
+
|
|
189
|
+
// src/fetch.ts
|
|
190
|
+
var HTTP_RETRY_STATUS_CODES = [429, 503];
|
|
191
|
+
var BackoffDefaults = {
|
|
192
|
+
initialDelay: 100,
|
|
193
|
+
maxDelay: 6e4,
|
|
194
|
+
// Cap at 60s
|
|
195
|
+
multiplier: 1.3,
|
|
196
|
+
maxRetries: Infinity
|
|
197
|
+
// Retry forever by default
|
|
198
|
+
};
|
|
199
|
+
function parseRetryAfterHeader(retryAfter) {
|
|
200
|
+
if (!retryAfter) return 0;
|
|
201
|
+
const retryAfterSec = Number(retryAfter);
|
|
202
|
+
if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) {
|
|
203
|
+
return retryAfterSec * 1e3;
|
|
204
|
+
}
|
|
205
|
+
const retryDate = Date.parse(retryAfter);
|
|
206
|
+
if (!isNaN(retryDate)) {
|
|
207
|
+
const deltaMs = retryDate - Date.now();
|
|
208
|
+
return Math.max(0, Math.min(deltaMs, 36e5));
|
|
209
|
+
}
|
|
210
|
+
return 0;
|
|
211
|
+
}
|
|
212
|
+
function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
213
|
+
const {
|
|
214
|
+
initialDelay,
|
|
215
|
+
maxDelay,
|
|
216
|
+
multiplier,
|
|
217
|
+
debug = false,
|
|
218
|
+
onFailedAttempt,
|
|
219
|
+
maxRetries = Infinity
|
|
220
|
+
} = backoffOptions;
|
|
221
|
+
return async (...args) => {
|
|
222
|
+
const url = args[0];
|
|
223
|
+
const options = args[1];
|
|
224
|
+
let delay = initialDelay;
|
|
225
|
+
let attempt = 0;
|
|
226
|
+
while (true) {
|
|
227
|
+
try {
|
|
228
|
+
const result = await fetchClient(...args);
|
|
229
|
+
if (result.ok) {
|
|
230
|
+
return result;
|
|
231
|
+
}
|
|
232
|
+
const err = await FetchError.fromResponse(result, url.toString());
|
|
233
|
+
throw err;
|
|
234
|
+
} catch (e) {
|
|
235
|
+
onFailedAttempt?.();
|
|
236
|
+
if (options?.signal?.aborted) {
|
|
237
|
+
throw new FetchBackoffAbortError();
|
|
238
|
+
} else if (e instanceof FetchError && !HTTP_RETRY_STATUS_CODES.includes(e.status) && e.status >= 400 && e.status < 500) {
|
|
239
|
+
throw e;
|
|
240
|
+
} else {
|
|
241
|
+
attempt++;
|
|
242
|
+
if (attempt > maxRetries) {
|
|
243
|
+
if (debug) {
|
|
244
|
+
console.log(
|
|
245
|
+
`Max retries reached (${attempt}/${maxRetries}), giving up`
|
|
246
|
+
);
|
|
247
|
+
}
|
|
248
|
+
throw e;
|
|
249
|
+
}
|
|
250
|
+
const serverMinimumMs = e instanceof FetchError ? parseRetryAfterHeader(e.headers[`retry-after`]) : 0;
|
|
251
|
+
const jitter = Math.random() * delay;
|
|
252
|
+
const clientBackoffMs = Math.min(jitter, maxDelay);
|
|
253
|
+
const waitMs = Math.max(serverMinimumMs, clientBackoffMs);
|
|
254
|
+
if (debug) {
|
|
255
|
+
const source = serverMinimumMs > 0 ? `server+client` : `client`;
|
|
256
|
+
console.log(
|
|
257
|
+
`Retry attempt #${attempt} after ${waitMs}ms (${source}, serverMin=${serverMinimumMs}ms, clientBackoff=${clientBackoffMs}ms)`
|
|
258
|
+
);
|
|
259
|
+
}
|
|
260
|
+
await new Promise((resolve) => setTimeout(resolve, waitMs));
|
|
261
|
+
delay = Math.min(delay * multiplier, maxDelay);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
var NO_BODY_STATUS_CODES = [201, 204, 205];
|
|
268
|
+
function createFetchWithConsumedBody(fetchClient) {
|
|
269
|
+
return async (...args) => {
|
|
270
|
+
const url = args[0];
|
|
271
|
+
const res = await fetchClient(...args);
|
|
272
|
+
try {
|
|
273
|
+
if (res.status < 200 || NO_BODY_STATUS_CODES.includes(res.status)) {
|
|
274
|
+
return res;
|
|
275
|
+
}
|
|
276
|
+
const buf = await res.arrayBuffer();
|
|
277
|
+
return new Response(buf, {
|
|
278
|
+
status: res.status,
|
|
279
|
+
statusText: res.statusText,
|
|
280
|
+
headers: res.headers
|
|
281
|
+
});
|
|
282
|
+
} catch (err) {
|
|
283
|
+
if (args[1]?.signal?.aborted) {
|
|
284
|
+
throw new FetchBackoffAbortError();
|
|
285
|
+
}
|
|
286
|
+
throw new FetchError(
|
|
287
|
+
res.status,
|
|
288
|
+
void 0,
|
|
289
|
+
void 0,
|
|
290
|
+
Object.fromEntries([...res.headers.entries()]),
|
|
291
|
+
url.toString(),
|
|
292
|
+
err instanceof Error ? err.message : typeof err === `string` ? err : `failed to read body`
|
|
293
|
+
);
|
|
294
|
+
}
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
function chainAborter(aborter, sourceSignal) {
|
|
298
|
+
let cleanup = noop;
|
|
299
|
+
if (!sourceSignal) {
|
|
300
|
+
} else if (sourceSignal.aborted) {
|
|
301
|
+
aborter.abort(sourceSignal.reason);
|
|
302
|
+
} else {
|
|
303
|
+
const abortParent = () => aborter.abort(sourceSignal.reason);
|
|
304
|
+
sourceSignal.addEventListener(`abort`, abortParent, {
|
|
305
|
+
once: true,
|
|
306
|
+
signal: aborter.signal
|
|
307
|
+
});
|
|
308
|
+
cleanup = () => sourceSignal.removeEventListener(`abort`, abortParent);
|
|
309
|
+
}
|
|
310
|
+
return {
|
|
311
|
+
signal: aborter.signal,
|
|
312
|
+
cleanup
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
function noop() {
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// src/stream.ts
|
|
319
|
+
var _options, _fetchClient, _sseFetchClient, _onError, _DurableStream_instances, buildRequest_fn, resolveHeaders_fn, parseReadResponse_fn, isSSECompatible_fn, createSSEIterator_fn, parseSSEData_fn;
|
|
320
|
+
var _DurableStream = class _DurableStream {
|
|
321
|
+
/**
|
|
322
|
+
* Create a cold handle to a stream.
|
|
323
|
+
* No network IO is performed by the constructor.
|
|
324
|
+
*/
|
|
325
|
+
constructor(opts) {
|
|
326
|
+
__privateAdd(this, _DurableStream_instances);
|
|
327
|
+
__privateAdd(this, _options);
|
|
328
|
+
__privateAdd(this, _fetchClient);
|
|
329
|
+
__privateAdd(this, _sseFetchClient);
|
|
330
|
+
__privateAdd(this, _onError);
|
|
331
|
+
validateOptions(opts);
|
|
332
|
+
this.url = opts.url;
|
|
333
|
+
__privateSet(this, _options, opts);
|
|
334
|
+
__privateSet(this, _onError, opts.onError);
|
|
335
|
+
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
336
|
+
const backOffOpts = {
|
|
337
|
+
...opts.backoffOptions ?? BackoffDefaults
|
|
338
|
+
};
|
|
339
|
+
const fetchWithBackoffClient = createFetchWithBackoff(
|
|
340
|
+
baseFetchClient,
|
|
341
|
+
backOffOpts
|
|
342
|
+
);
|
|
343
|
+
__privateSet(this, _sseFetchClient, fetchWithBackoffClient);
|
|
344
|
+
__privateSet(this, _fetchClient, createFetchWithConsumedBody(fetchWithBackoffClient));
|
|
345
|
+
}
|
|
346
|
+
// ============================================================================
|
|
347
|
+
// Static convenience methods
|
|
348
|
+
// ============================================================================
|
|
349
|
+
/**
|
|
350
|
+
* Create a new stream (create-only PUT) and return a handle.
|
|
351
|
+
* Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
|
|
352
|
+
*/
|
|
353
|
+
static async create(opts) {
|
|
354
|
+
const stream = new _DurableStream(opts);
|
|
355
|
+
await stream.create({
|
|
356
|
+
contentType: opts.contentType,
|
|
357
|
+
ttlSeconds: opts.ttlSeconds,
|
|
358
|
+
expiresAt: opts.expiresAt,
|
|
359
|
+
body: opts.body
|
|
360
|
+
});
|
|
361
|
+
return stream;
|
|
362
|
+
}
|
|
363
|
+
/**
|
|
364
|
+
* Validate that a stream exists and fetch metadata via HEAD.
|
|
365
|
+
* Returns a handle with contentType populated (if sent by server).
|
|
366
|
+
*/
|
|
367
|
+
static async connect(opts) {
|
|
368
|
+
const stream = new _DurableStream(opts);
|
|
369
|
+
await stream.head();
|
|
370
|
+
return stream;
|
|
371
|
+
}
|
|
372
|
+
/**
|
|
373
|
+
* HEAD metadata for a stream without creating a handle.
|
|
374
|
+
*/
|
|
375
|
+
static async head(opts) {
|
|
376
|
+
const stream = new _DurableStream(opts);
|
|
377
|
+
return stream.head();
|
|
378
|
+
}
|
|
379
|
+
/**
|
|
380
|
+
* Delete a stream without creating a handle.
|
|
381
|
+
*/
|
|
382
|
+
static async delete(opts) {
|
|
383
|
+
const stream = new _DurableStream(opts);
|
|
384
|
+
return stream.delete();
|
|
385
|
+
}
|
|
386
|
+
// ============================================================================
|
|
387
|
+
// Instance methods
|
|
388
|
+
// ============================================================================
|
|
389
|
+
/**
|
|
390
|
+
* HEAD metadata for this stream.
|
|
391
|
+
*/
|
|
392
|
+
async head(opts) {
|
|
393
|
+
const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
|
|
394
|
+
const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
|
|
395
|
+
method: `HEAD`,
|
|
396
|
+
headers: requestHeaders,
|
|
397
|
+
signal: opts?.signal ?? __privateGet(this, _options).signal
|
|
398
|
+
});
|
|
399
|
+
if (!response.ok) {
|
|
400
|
+
if (response.status === 404) {
|
|
401
|
+
throw new DurableStreamError(
|
|
402
|
+
`Stream not found: ${this.url}`,
|
|
403
|
+
`NOT_FOUND`,
|
|
404
|
+
404
|
|
405
|
+
);
|
|
406
|
+
}
|
|
407
|
+
throw await DurableStreamError.fromResponse(response, this.url);
|
|
408
|
+
}
|
|
409
|
+
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
410
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
411
|
+
const etag = response.headers.get(`etag`) ?? void 0;
|
|
412
|
+
const cacheControl = response.headers.get(`cache-control`) ?? void 0;
|
|
413
|
+
if (contentType) {
|
|
414
|
+
this.contentType = contentType;
|
|
415
|
+
}
|
|
416
|
+
return {
|
|
417
|
+
exists: true,
|
|
418
|
+
contentType,
|
|
419
|
+
offset,
|
|
420
|
+
etag,
|
|
421
|
+
cacheControl
|
|
422
|
+
};
|
|
423
|
+
}
|
|
424
|
+
/**
|
|
425
|
+
* Create this stream (create-only PUT) using the URL/auth from the handle.
|
|
426
|
+
*/
|
|
427
|
+
async create(opts) {
|
|
428
|
+
const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
|
|
429
|
+
if (opts?.contentType) {
|
|
430
|
+
requestHeaders[`content-type`] = opts.contentType;
|
|
431
|
+
}
|
|
432
|
+
if (opts?.ttlSeconds !== void 0) {
|
|
433
|
+
requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
|
|
434
|
+
}
|
|
435
|
+
if (opts?.expiresAt) {
|
|
436
|
+
requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
|
|
437
|
+
}
|
|
438
|
+
const body = encodeBody(opts?.body);
|
|
439
|
+
const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
|
|
440
|
+
method: `PUT`,
|
|
441
|
+
headers: requestHeaders,
|
|
442
|
+
body,
|
|
443
|
+
signal: __privateGet(this, _options).signal
|
|
444
|
+
});
|
|
445
|
+
if (!response.ok) {
|
|
446
|
+
if (response.status === 409) {
|
|
447
|
+
throw new DurableStreamError(
|
|
448
|
+
`Stream already exists: ${this.url}`,
|
|
449
|
+
`CONFLICT_EXISTS`,
|
|
450
|
+
409
|
|
451
|
+
);
|
|
452
|
+
}
|
|
453
|
+
throw await DurableStreamError.fromResponse(response, this.url);
|
|
454
|
+
}
|
|
455
|
+
const responseContentType = response.headers.get(`content-type`);
|
|
456
|
+
if (responseContentType) {
|
|
457
|
+
this.contentType = responseContentType;
|
|
458
|
+
} else if (opts?.contentType) {
|
|
459
|
+
this.contentType = opts.contentType;
|
|
460
|
+
}
|
|
461
|
+
return this;
|
|
462
|
+
}
|
|
463
|
+
/**
|
|
464
|
+
* Delete this stream.
|
|
465
|
+
*/
|
|
466
|
+
async delete(opts) {
|
|
467
|
+
const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
|
|
468
|
+
const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
|
|
469
|
+
method: `DELETE`,
|
|
470
|
+
headers: requestHeaders,
|
|
471
|
+
signal: opts?.signal ?? __privateGet(this, _options).signal
|
|
472
|
+
});
|
|
473
|
+
if (!response.ok) {
|
|
474
|
+
if (response.status === 404) {
|
|
475
|
+
throw new DurableStreamError(
|
|
476
|
+
`Stream not found: ${this.url}`,
|
|
477
|
+
`NOT_FOUND`,
|
|
478
|
+
404
|
|
479
|
+
);
|
|
480
|
+
}
|
|
481
|
+
throw await DurableStreamError.fromResponse(response, this.url);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
/**
|
|
485
|
+
* Append a single payload to the stream.
|
|
486
|
+
*
|
|
487
|
+
* - `body` may be Uint8Array, string, or any Fetch BodyInit.
|
|
488
|
+
* - Strings are encoded as UTF-8.
|
|
489
|
+
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
490
|
+
*/
|
|
491
|
+
async append(body, opts) {
|
|
492
|
+
const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
|
|
493
|
+
if (opts?.contentType) {
|
|
494
|
+
requestHeaders[`content-type`] = opts.contentType;
|
|
495
|
+
} else if (this.contentType) {
|
|
496
|
+
requestHeaders[`content-type`] = this.contentType;
|
|
497
|
+
}
|
|
498
|
+
if (opts?.seq) {
|
|
499
|
+
requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
500
|
+
}
|
|
501
|
+
const encodedBody = encodeBody(body);
|
|
502
|
+
const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
|
|
503
|
+
method: `POST`,
|
|
504
|
+
headers: requestHeaders,
|
|
505
|
+
body: encodedBody,
|
|
506
|
+
signal: opts?.signal ?? __privateGet(this, _options).signal
|
|
507
|
+
});
|
|
508
|
+
if (!response.ok) {
|
|
509
|
+
if (response.status === 404) {
|
|
510
|
+
throw new DurableStreamError(
|
|
511
|
+
`Stream not found: ${this.url}`,
|
|
512
|
+
`NOT_FOUND`,
|
|
513
|
+
404
|
|
514
|
+
);
|
|
515
|
+
}
|
|
516
|
+
if (response.status === 409) {
|
|
517
|
+
throw new DurableStreamError(
|
|
518
|
+
`Sequence conflict: seq is lower than last appended`,
|
|
519
|
+
`CONFLICT_SEQ`,
|
|
520
|
+
409
|
|
521
|
+
);
|
|
522
|
+
}
|
|
523
|
+
if (response.status === 400) {
|
|
524
|
+
throw new DurableStreamError(
|
|
525
|
+
`Bad request (possibly content-type mismatch)`,
|
|
526
|
+
`BAD_REQUEST`,
|
|
527
|
+
400
|
|
528
|
+
);
|
|
529
|
+
}
|
|
530
|
+
throw await DurableStreamError.fromResponse(response, this.url);
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
/**
|
|
534
|
+
* Append a streaming body to the stream.
|
|
535
|
+
*
|
|
536
|
+
* - `source` yields Uint8Array or string chunks.
|
|
537
|
+
* - Strings are encoded as UTF-8; no delimiters are added.
|
|
538
|
+
* - Internally uses chunked transfer or HTTP/2 streaming.
|
|
539
|
+
*/
|
|
540
|
+
async appendStream(source, opts) {
|
|
541
|
+
const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this);
|
|
542
|
+
if (opts?.contentType) {
|
|
543
|
+
requestHeaders[`content-type`] = opts.contentType;
|
|
544
|
+
} else if (this.contentType) {
|
|
545
|
+
requestHeaders[`content-type`] = this.contentType;
|
|
546
|
+
}
|
|
547
|
+
if (opts?.seq) {
|
|
548
|
+
requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
549
|
+
}
|
|
550
|
+
const body = toReadableStream(source);
|
|
551
|
+
const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
|
|
552
|
+
method: `POST`,
|
|
553
|
+
headers: requestHeaders,
|
|
554
|
+
body,
|
|
555
|
+
// @ts-expect-error - duplex is needed for streaming but not in types
|
|
556
|
+
duplex: `half`,
|
|
557
|
+
signal: opts?.signal ?? __privateGet(this, _options).signal
|
|
558
|
+
});
|
|
559
|
+
if (!response.ok) {
|
|
560
|
+
if (response.status === 404) {
|
|
561
|
+
throw new DurableStreamError(
|
|
562
|
+
`Stream not found: ${this.url}`,
|
|
563
|
+
`NOT_FOUND`,
|
|
564
|
+
404
|
|
565
|
+
);
|
|
566
|
+
}
|
|
567
|
+
if (response.status === 409) {
|
|
568
|
+
throw new DurableStreamError(
|
|
569
|
+
`Sequence conflict: seq is lower than last appended`,
|
|
570
|
+
`CONFLICT_SEQ`,
|
|
571
|
+
409
|
|
572
|
+
);
|
|
573
|
+
}
|
|
574
|
+
throw await DurableStreamError.fromResponse(response, this.url);
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
/**
|
|
578
|
+
* One-shot read.
|
|
579
|
+
*
|
|
580
|
+
* Performs a single GET from the specified offset/mode and returns a chunk.
|
|
581
|
+
* Caller is responsible for persisting the returned offset if they want to resume.
|
|
582
|
+
*/
|
|
583
|
+
async read(opts) {
|
|
584
|
+
const { requestHeaders, fetchUrl } = await __privateMethod(this, _DurableStream_instances, buildRequest_fn).call(this, opts);
|
|
585
|
+
const response = await __privateGet(this, _fetchClient).call(this, fetchUrl.toString(), {
|
|
586
|
+
method: `GET`,
|
|
587
|
+
headers: requestHeaders,
|
|
588
|
+
signal: opts?.signal ?? __privateGet(this, _options).signal
|
|
589
|
+
});
|
|
590
|
+
if (!response.ok) {
|
|
591
|
+
if (response.status === 404) {
|
|
592
|
+
throw new DurableStreamError(
|
|
593
|
+
`Stream not found: ${this.url}`,
|
|
594
|
+
`NOT_FOUND`,
|
|
595
|
+
404
|
|
596
|
+
);
|
|
597
|
+
}
|
|
598
|
+
if (response.status === 204) {
|
|
599
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? opts?.offset ?? ``;
|
|
600
|
+
return {
|
|
601
|
+
data: new Uint8Array(0),
|
|
602
|
+
offset,
|
|
603
|
+
upToDate: true,
|
|
604
|
+
contentType: this.contentType
|
|
605
|
+
};
|
|
606
|
+
}
|
|
607
|
+
throw await DurableStreamError.fromResponse(response, this.url);
|
|
608
|
+
}
|
|
609
|
+
return __privateMethod(this, _DurableStream_instances, parseReadResponse_fn).call(this, response);
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Follow the stream as an AsyncIterable of chunks.
|
|
613
|
+
*
|
|
614
|
+
* Default behaviour:
|
|
615
|
+
* - From `offset` (or start if omitted), repeatedly perform catch-up reads
|
|
616
|
+
* until a chunk with upToDate=true.
|
|
617
|
+
* - Then switch to live mode:
|
|
618
|
+
* - SSE if content-type is text/* or application/json;
|
|
619
|
+
* - otherwise long-poll.
|
|
620
|
+
*
|
|
621
|
+
* Explicit live override:
|
|
622
|
+
* - live="catchup": only catch-up, stop at upToDate.
|
|
623
|
+
* - live="long-poll": start long-polling immediately from offset.
|
|
624
|
+
* - live="sse": start SSE immediately (throws if SSE not supported).
|
|
625
|
+
*/
|
|
626
|
+
follow(opts) {
|
|
627
|
+
const stream = this;
|
|
628
|
+
const liveMode = opts?.live;
|
|
629
|
+
let currentOffset = opts?.offset;
|
|
630
|
+
let currentCursor = opts?.cursor;
|
|
631
|
+
let isUpToDate = false;
|
|
632
|
+
const aborter = new AbortController();
|
|
633
|
+
const { signal, cleanup } = chainAborter(
|
|
634
|
+
aborter,
|
|
635
|
+
opts?.signal ?? __privateGet(stream, _options).signal
|
|
636
|
+
);
|
|
637
|
+
let sseIterator = null;
|
|
638
|
+
return {
|
|
639
|
+
[Symbol.asyncIterator]() {
|
|
640
|
+
return {
|
|
641
|
+
async next() {
|
|
642
|
+
var _a, _b, _c, _d;
|
|
643
|
+
try {
|
|
644
|
+
if (signal.aborted) {
|
|
645
|
+
cleanup();
|
|
646
|
+
return { done: true, value: void 0 };
|
|
647
|
+
}
|
|
648
|
+
if (sseIterator) {
|
|
649
|
+
const result = await sseIterator.next();
|
|
650
|
+
if (result.done) {
|
|
651
|
+
cleanup();
|
|
652
|
+
}
|
|
653
|
+
return result;
|
|
654
|
+
}
|
|
655
|
+
if (liveMode === `catchup`) {
|
|
656
|
+
if (isUpToDate) {
|
|
657
|
+
cleanup();
|
|
658
|
+
return { done: true, value: void 0 };
|
|
659
|
+
}
|
|
660
|
+
const chunk = await stream.read({
|
|
661
|
+
offset: currentOffset,
|
|
662
|
+
cursor: currentCursor,
|
|
663
|
+
signal
|
|
664
|
+
});
|
|
665
|
+
currentOffset = chunk.offset;
|
|
666
|
+
currentCursor = chunk.cursor;
|
|
667
|
+
isUpToDate = chunk.upToDate;
|
|
668
|
+
return { done: false, value: chunk };
|
|
669
|
+
}
|
|
670
|
+
if (liveMode === `sse`) {
|
|
671
|
+
sseIterator = __privateMethod(_a = stream, _DurableStream_instances, createSSEIterator_fn).call(_a, currentOffset, currentCursor, signal);
|
|
672
|
+
return sseIterator.next();
|
|
673
|
+
}
|
|
674
|
+
if (liveMode === `long-poll`) {
|
|
675
|
+
const chunk = await stream.read({
|
|
676
|
+
offset: currentOffset,
|
|
677
|
+
cursor: currentCursor,
|
|
678
|
+
live: `long-poll`,
|
|
679
|
+
signal
|
|
680
|
+
});
|
|
681
|
+
currentOffset = chunk.offset;
|
|
682
|
+
currentCursor = chunk.cursor;
|
|
683
|
+
return { done: false, value: chunk };
|
|
684
|
+
}
|
|
685
|
+
if (!isUpToDate) {
|
|
686
|
+
const chunk = await stream.read({
|
|
687
|
+
offset: currentOffset,
|
|
688
|
+
cursor: currentCursor,
|
|
689
|
+
signal
|
|
690
|
+
});
|
|
691
|
+
currentOffset = chunk.offset;
|
|
692
|
+
currentCursor = chunk.cursor;
|
|
693
|
+
isUpToDate = chunk.upToDate;
|
|
694
|
+
if (chunk.contentType && !stream.contentType) {
|
|
695
|
+
stream.contentType = chunk.contentType;
|
|
696
|
+
}
|
|
697
|
+
return { done: false, value: chunk };
|
|
698
|
+
}
|
|
699
|
+
if (__privateMethod(_b = stream, _DurableStream_instances, isSSECompatible_fn).call(_b)) {
|
|
700
|
+
sseIterator = __privateMethod(_c = stream, _DurableStream_instances, createSSEIterator_fn).call(_c, currentOffset, currentCursor, signal);
|
|
701
|
+
return sseIterator.next();
|
|
702
|
+
} else {
|
|
703
|
+
const chunk = await stream.read({
|
|
704
|
+
offset: currentOffset,
|
|
705
|
+
cursor: currentCursor,
|
|
706
|
+
live: `long-poll`,
|
|
707
|
+
signal
|
|
708
|
+
});
|
|
709
|
+
currentOffset = chunk.offset;
|
|
710
|
+
currentCursor = chunk.cursor;
|
|
711
|
+
return { done: false, value: chunk };
|
|
712
|
+
}
|
|
713
|
+
} catch (e) {
|
|
714
|
+
if (e instanceof FetchBackoffAbortError) {
|
|
715
|
+
cleanup();
|
|
716
|
+
return { done: true, value: void 0 };
|
|
717
|
+
}
|
|
718
|
+
if (__privateGet(stream, _onError) && e instanceof Error) {
|
|
719
|
+
const retryOpts = await __privateGet(_d = stream, _onError).call(_d, e);
|
|
720
|
+
if (retryOpts && typeof retryOpts === `object`) {
|
|
721
|
+
if (retryOpts.params) {
|
|
722
|
+
__privateGet(stream, _options).params = {
|
|
723
|
+
...__privateGet(stream, _options).params ?? {},
|
|
724
|
+
...retryOpts.params
|
|
725
|
+
};
|
|
726
|
+
}
|
|
727
|
+
if (retryOpts.headers) {
|
|
728
|
+
__privateGet(stream, _options).headers = {
|
|
729
|
+
...__privateGet(stream, _options).headers ?? {},
|
|
730
|
+
...retryOpts.headers
|
|
731
|
+
};
|
|
732
|
+
}
|
|
733
|
+
return this.next();
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
cleanup();
|
|
737
|
+
throw e;
|
|
738
|
+
}
|
|
739
|
+
},
|
|
740
|
+
async return() {
|
|
741
|
+
if (sseIterator?.return) {
|
|
742
|
+
await sseIterator.return();
|
|
743
|
+
}
|
|
744
|
+
cleanup();
|
|
745
|
+
aborter.abort();
|
|
746
|
+
return { done: true, value: void 0 };
|
|
747
|
+
}
|
|
748
|
+
};
|
|
749
|
+
}
|
|
750
|
+
};
|
|
751
|
+
}
|
|
752
|
+
/**
|
|
753
|
+
* Wrap follow() in a Web ReadableStream for piping.
|
|
754
|
+
*
|
|
755
|
+
* Backpressure:
|
|
756
|
+
* - One chunk is pulled from follow() per pull() call, so standard
|
|
757
|
+
* Web Streams backpressure semantics apply.
|
|
758
|
+
*
|
|
759
|
+
* Cancellation:
|
|
760
|
+
* - rs.cancel() will stop follow() and abort any in-flight request.
|
|
761
|
+
*/
|
|
762
|
+
toReadableStream(opts) {
|
|
763
|
+
const iterator = this.follow(opts)[Symbol.asyncIterator]();
|
|
764
|
+
return new ReadableStream({
|
|
765
|
+
async pull(controller) {
|
|
766
|
+
try {
|
|
767
|
+
const { done, value } = await iterator.next();
|
|
768
|
+
if (done) {
|
|
769
|
+
controller.close();
|
|
770
|
+
} else {
|
|
771
|
+
controller.enqueue(value);
|
|
772
|
+
}
|
|
773
|
+
} catch (e) {
|
|
774
|
+
controller.error(e);
|
|
775
|
+
}
|
|
776
|
+
},
|
|
777
|
+
cancel() {
|
|
778
|
+
iterator.return?.();
|
|
779
|
+
}
|
|
780
|
+
});
|
|
781
|
+
}
|
|
782
|
+
/**
|
|
783
|
+
* Wrap follow() in a Web ReadableStream<Uint8Array> for piping raw bytes.
|
|
784
|
+
*
|
|
785
|
+
* This is the native format for many web stream APIs.
|
|
786
|
+
*/
|
|
787
|
+
toByteStream(opts) {
|
|
788
|
+
const iterator = this.follow(opts)[Symbol.asyncIterator]();
|
|
789
|
+
return new ReadableStream({
|
|
790
|
+
async pull(controller) {
|
|
791
|
+
try {
|
|
792
|
+
const { done, value } = await iterator.next();
|
|
793
|
+
if (done) {
|
|
794
|
+
controller.close();
|
|
795
|
+
} else {
|
|
796
|
+
controller.enqueue(value.data);
|
|
797
|
+
}
|
|
798
|
+
} catch (e) {
|
|
799
|
+
controller.error(e);
|
|
800
|
+
}
|
|
801
|
+
},
|
|
802
|
+
cancel() {
|
|
803
|
+
iterator.return?.();
|
|
804
|
+
}
|
|
805
|
+
});
|
|
806
|
+
}
|
|
807
|
+
/**
|
|
808
|
+
* Convenience: interpret data as JSON messages.
|
|
809
|
+
* Parses each chunk's data as JSON and yields the parsed values.
|
|
810
|
+
*/
|
|
811
|
+
async *json(opts) {
|
|
812
|
+
const decoder = new TextDecoder();
|
|
813
|
+
for await (const chunk of this.follow(opts)) {
|
|
814
|
+
if (chunk.data.length > 0) {
|
|
815
|
+
const text = decoder.decode(chunk.data);
|
|
816
|
+
const lines = text.split(`
|
|
817
|
+
`).filter((l) => l.trim());
|
|
818
|
+
for (const line of lines) {
|
|
819
|
+
yield JSON.parse(line);
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
/**
|
|
825
|
+
* Convenience: interpret data as text (UTF-8).
|
|
826
|
+
*/
|
|
827
|
+
async *text(opts) {
|
|
828
|
+
const decoder = opts?.decoder ?? new TextDecoder();
|
|
829
|
+
for await (const chunk of this.follow(opts)) {
|
|
830
|
+
if (chunk.data.length > 0) {
|
|
831
|
+
yield decoder.decode(chunk.data, { stream: true });
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
};
|
|
836
|
+
_options = new WeakMap();
|
|
837
|
+
_fetchClient = new WeakMap();
|
|
838
|
+
_sseFetchClient = new WeakMap();
|
|
839
|
+
_onError = new WeakMap();
|
|
840
|
+
_DurableStream_instances = new WeakSet();
|
|
841
|
+
buildRequest_fn = async function(readOpts) {
|
|
842
|
+
const requestHeaders = await __privateMethod(this, _DurableStream_instances, resolveHeaders_fn).call(this);
|
|
843
|
+
const fetchUrl = new URL(this.url);
|
|
844
|
+
const params = __privateGet(this, _options).params;
|
|
845
|
+
if (params) {
|
|
846
|
+
for (const [key, value] of Object.entries(params)) {
|
|
847
|
+
if (value !== void 0) {
|
|
848
|
+
const resolved = await resolveValue(value);
|
|
849
|
+
fetchUrl.searchParams.set(key, resolved);
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
if (readOpts) {
|
|
854
|
+
if (readOpts.offset) {
|
|
855
|
+
fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, readOpts.offset);
|
|
856
|
+
}
|
|
857
|
+
if (readOpts.live) {
|
|
858
|
+
fetchUrl.searchParams.set(LIVE_QUERY_PARAM, readOpts.live);
|
|
859
|
+
}
|
|
860
|
+
if (readOpts.cursor) {
|
|
861
|
+
fetchUrl.searchParams.set(CURSOR_QUERY_PARAM, readOpts.cursor);
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
return { requestHeaders, fetchUrl };
|
|
865
|
+
};
|
|
866
|
+
resolveHeaders_fn = async function() {
|
|
867
|
+
const headers = {};
|
|
868
|
+
const auth = __privateGet(this, _options).auth;
|
|
869
|
+
if (auth) {
|
|
870
|
+
if (`token` in auth) {
|
|
871
|
+
const headerName = auth.headerName ?? `authorization`;
|
|
872
|
+
headers[headerName] = `Bearer ${auth.token}`;
|
|
873
|
+
} else if (`headers` in auth) {
|
|
874
|
+
Object.assign(headers, auth.headers);
|
|
875
|
+
} else if (`getHeaders` in auth) {
|
|
876
|
+
const authHeaders = await auth.getHeaders();
|
|
877
|
+
Object.assign(headers, authHeaders);
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
const headersOpt = __privateGet(this, _options).headers;
|
|
881
|
+
if (headersOpt) {
|
|
882
|
+
for (const [key, value] of Object.entries(headersOpt)) {
|
|
883
|
+
headers[key] = await resolveValue(value);
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
return headers;
|
|
887
|
+
};
|
|
888
|
+
parseReadResponse_fn = async function(response) {
|
|
889
|
+
const data = new Uint8Array(await response.arrayBuffer());
|
|
890
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
891
|
+
const cursor = response.headers.get(STREAM_CURSOR_HEADER) ?? void 0;
|
|
892
|
+
const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
893
|
+
const etag = response.headers.get(`etag`) ?? void 0;
|
|
894
|
+
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
895
|
+
if (contentType && !this.contentType) {
|
|
896
|
+
this.contentType = contentType;
|
|
897
|
+
}
|
|
898
|
+
return {
|
|
899
|
+
data,
|
|
900
|
+
offset,
|
|
901
|
+
cursor,
|
|
902
|
+
upToDate,
|
|
903
|
+
etag,
|
|
904
|
+
contentType
|
|
905
|
+
};
|
|
906
|
+
};
|
|
907
|
+
/**
|
|
908
|
+
* Check if the stream's content type is compatible with SSE.
|
|
909
|
+
*/
|
|
910
|
+
isSSECompatible_fn = function() {
|
|
911
|
+
if (!this.contentType) return false;
|
|
912
|
+
return SSE_COMPATIBLE_CONTENT_TYPES.some(
|
|
913
|
+
(prefix) => this.contentType.startsWith(prefix)
|
|
914
|
+
);
|
|
915
|
+
};
|
|
916
|
+
/**
|
|
917
|
+
* Create an SSE connection that maintains a persistent connection with an internal queue.
|
|
918
|
+
* Returns an AsyncIterator that yields chunks as they arrive.
|
|
919
|
+
*
|
|
920
|
+
* Follows the Electric client pattern:
|
|
921
|
+
* - Buffer data events until control event (up-to-date)
|
|
922
|
+
* - Flush buffer on control event
|
|
923
|
+
* - Use promise chain for sequential processing
|
|
924
|
+
*/
|
|
925
|
+
createSSEIterator_fn = function(initialOffset, initialCursor, signal) {
|
|
926
|
+
if (!__privateMethod(this, _DurableStream_instances, isSSECompatible_fn).call(this)) {
|
|
927
|
+
throw new DurableStreamError(
|
|
928
|
+
`SSE is not supported for content-type: ${this.contentType}`,
|
|
929
|
+
`SSE_NOT_SUPPORTED`,
|
|
930
|
+
400
|
|
931
|
+
);
|
|
932
|
+
}
|
|
933
|
+
const chunkQueue = [];
|
|
934
|
+
let pendingResolve = null;
|
|
935
|
+
let currentOffset = initialOffset;
|
|
936
|
+
let currentCursor = initialCursor;
|
|
937
|
+
let connectionClosed = false;
|
|
938
|
+
let connectionError = null;
|
|
939
|
+
const connectionAbort = new AbortController();
|
|
940
|
+
let dataBuffer = [];
|
|
941
|
+
const stream = this;
|
|
942
|
+
const startConnection = async () => {
|
|
943
|
+
var _a;
|
|
944
|
+
const { requestHeaders, fetchUrl } = await __privateMethod(_a = stream, _DurableStream_instances, buildRequest_fn).call(_a, {
|
|
945
|
+
offset: currentOffset,
|
|
946
|
+
cursor: currentCursor,
|
|
947
|
+
live: `sse`
|
|
948
|
+
});
|
|
949
|
+
try {
|
|
950
|
+
await (0, import_fetch_event_source.fetchEventSource)(fetchUrl.toString(), {
|
|
951
|
+
headers: requestHeaders,
|
|
952
|
+
fetch: __privateGet(stream, _sseFetchClient),
|
|
953
|
+
signal: signal.aborted ? signal : connectionAbort.signal,
|
|
954
|
+
onopen: async (response) => {
|
|
955
|
+
if (!response.ok) {
|
|
956
|
+
throw await DurableStreamError.fromResponse(response, stream.url);
|
|
957
|
+
}
|
|
958
|
+
const contentType = response.headers.get(`content-type`);
|
|
959
|
+
if (contentType && !stream.contentType) {
|
|
960
|
+
stream.contentType = contentType;
|
|
961
|
+
}
|
|
962
|
+
},
|
|
963
|
+
onmessage: (event) => {
|
|
964
|
+
var _a2;
|
|
965
|
+
if (event.event === `data` && event.data) {
|
|
966
|
+
const data = __privateMethod(_a2 = stream, _DurableStream_instances, parseSSEData_fn).call(_a2, event.data);
|
|
967
|
+
dataBuffer.push(data);
|
|
968
|
+
} else if (event.event === `control` && event.data) {
|
|
969
|
+
try {
|
|
970
|
+
const control = JSON.parse(event.data);
|
|
971
|
+
const newOffset = control[STREAM_OFFSET_HEADER];
|
|
972
|
+
const newCursor = control[STREAM_CURSOR_HEADER];
|
|
973
|
+
const totalSize = dataBuffer.reduce(
|
|
974
|
+
(sum, buf) => sum + buf.length,
|
|
975
|
+
0
|
|
976
|
+
);
|
|
977
|
+
const combinedData = new Uint8Array(totalSize);
|
|
978
|
+
let offset = 0;
|
|
979
|
+
for (const buf of dataBuffer) {
|
|
980
|
+
combinedData.set(buf, offset);
|
|
981
|
+
offset += buf.length;
|
|
982
|
+
}
|
|
983
|
+
const chunk = {
|
|
984
|
+
data: combinedData,
|
|
985
|
+
offset: newOffset ?? currentOffset ?? ``,
|
|
986
|
+
cursor: newCursor,
|
|
987
|
+
upToDate: true,
|
|
988
|
+
contentType: stream.contentType
|
|
989
|
+
};
|
|
990
|
+
currentOffset = chunk.offset;
|
|
991
|
+
currentCursor = chunk.cursor;
|
|
992
|
+
dataBuffer = [];
|
|
993
|
+
if (pendingResolve) {
|
|
994
|
+
const resolve = pendingResolve;
|
|
995
|
+
pendingResolve = null;
|
|
996
|
+
resolve({ done: false, value: chunk });
|
|
997
|
+
} else {
|
|
998
|
+
chunkQueue.push(chunk);
|
|
999
|
+
}
|
|
1000
|
+
} catch {
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
},
|
|
1004
|
+
onerror: (error) => {
|
|
1005
|
+
throw error;
|
|
1006
|
+
}
|
|
1007
|
+
});
|
|
1008
|
+
} catch (error) {
|
|
1009
|
+
if (connectionAbort.signal.aborted || signal.aborted) {
|
|
1010
|
+
throw new FetchBackoffAbortError();
|
|
1011
|
+
}
|
|
1012
|
+
throw error;
|
|
1013
|
+
}
|
|
1014
|
+
};
|
|
1015
|
+
const connectionPromise = startConnection().catch((e) => {
|
|
1016
|
+
if (e instanceof FetchBackoffAbortError) {
|
|
1017
|
+
connectionClosed = true;
|
|
1018
|
+
} else {
|
|
1019
|
+
connectionError = e;
|
|
1020
|
+
connectionClosed = true;
|
|
1021
|
+
}
|
|
1022
|
+
if (pendingResolve) {
|
|
1023
|
+
const resolve = pendingResolve;
|
|
1024
|
+
pendingResolve = null;
|
|
1025
|
+
resolve({ done: true, value: void 0 });
|
|
1026
|
+
}
|
|
1027
|
+
});
|
|
1028
|
+
const abortHandler = () => {
|
|
1029
|
+
connectionAbort.abort();
|
|
1030
|
+
connectionClosed = true;
|
|
1031
|
+
if (pendingResolve) {
|
|
1032
|
+
const resolve = pendingResolve;
|
|
1033
|
+
pendingResolve = null;
|
|
1034
|
+
resolve({ done: true, value: void 0 });
|
|
1035
|
+
}
|
|
1036
|
+
};
|
|
1037
|
+
signal.addEventListener(`abort`, abortHandler, { once: true });
|
|
1038
|
+
return {
|
|
1039
|
+
async next() {
|
|
1040
|
+
if (chunkQueue.length > 0) {
|
|
1041
|
+
return { done: false, value: chunkQueue.shift() };
|
|
1042
|
+
}
|
|
1043
|
+
if (connectionError) {
|
|
1044
|
+
throw connectionError;
|
|
1045
|
+
}
|
|
1046
|
+
if (connectionClosed || signal.aborted) {
|
|
1047
|
+
return { done: true, value: void 0 };
|
|
1048
|
+
}
|
|
1049
|
+
return new Promise((resolve) => {
|
|
1050
|
+
pendingResolve = resolve;
|
|
1051
|
+
});
|
|
1052
|
+
},
|
|
1053
|
+
async return() {
|
|
1054
|
+
signal.removeEventListener(`abort`, abortHandler);
|
|
1055
|
+
connectionAbort.abort();
|
|
1056
|
+
connectionClosed = true;
|
|
1057
|
+
await connectionPromise.catch(() => {
|
|
1058
|
+
});
|
|
1059
|
+
return { done: true, value: void 0 };
|
|
1060
|
+
}
|
|
1061
|
+
};
|
|
1062
|
+
};
|
|
1063
|
+
/**
|
|
1064
|
+
* Parse SSE data payload.
|
|
1065
|
+
* For application/json, data is wrapped in [ and ], so we unwrap it.
|
|
1066
|
+
*/
|
|
1067
|
+
parseSSEData_fn = function(data) {
|
|
1068
|
+
const lines = data.split(`
|
|
1069
|
+
`);
|
|
1070
|
+
const content = lines.map((line) => {
|
|
1071
|
+
if (line.startsWith(`data: `)) {
|
|
1072
|
+
return line.slice(6);
|
|
1073
|
+
}
|
|
1074
|
+
return line;
|
|
1075
|
+
}).join(`
|
|
1076
|
+
`);
|
|
1077
|
+
let text = content.trim();
|
|
1078
|
+
if (this.contentType?.includes(`application/json`) && text.startsWith(`[`) && text.endsWith(`]`)) {
|
|
1079
|
+
text = text.slice(1, -1).trim();
|
|
1080
|
+
if (text.endsWith(`,`)) {
|
|
1081
|
+
text = text.slice(0, -1);
|
|
1082
|
+
}
|
|
1083
|
+
}
|
|
1084
|
+
return new TextEncoder().encode(text);
|
|
1085
|
+
};
|
|
1086
|
+
var DurableStream = _DurableStream;
|
|
1087
|
+
async function resolveValue(value) {
|
|
1088
|
+
if (typeof value === `function`) {
|
|
1089
|
+
return value();
|
|
1090
|
+
}
|
|
1091
|
+
return value;
|
|
1092
|
+
}
|
|
1093
|
+
function encodeBody(body) {
|
|
1094
|
+
if (body === void 0) {
|
|
1095
|
+
return void 0;
|
|
1096
|
+
}
|
|
1097
|
+
if (typeof body === `string`) {
|
|
1098
|
+
return new TextEncoder().encode(body);
|
|
1099
|
+
}
|
|
1100
|
+
if (body instanceof Uint8Array) {
|
|
1101
|
+
return body;
|
|
1102
|
+
}
|
|
1103
|
+
return body;
|
|
1104
|
+
}
|
|
1105
|
+
function toReadableStream(source) {
|
|
1106
|
+
if (source instanceof ReadableStream) {
|
|
1107
|
+
return source.pipeThrough(
|
|
1108
|
+
new TransformStream({
|
|
1109
|
+
transform(chunk, controller) {
|
|
1110
|
+
if (typeof chunk === `string`) {
|
|
1111
|
+
controller.enqueue(new TextEncoder().encode(chunk));
|
|
1112
|
+
} else {
|
|
1113
|
+
controller.enqueue(chunk);
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
})
|
|
1117
|
+
);
|
|
1118
|
+
}
|
|
1119
|
+
const encoder = new TextEncoder();
|
|
1120
|
+
const iterator = source[Symbol.asyncIterator]();
|
|
1121
|
+
return new ReadableStream({
|
|
1122
|
+
async pull(controller) {
|
|
1123
|
+
try {
|
|
1124
|
+
const { done, value } = await iterator.next();
|
|
1125
|
+
if (done) {
|
|
1126
|
+
controller.close();
|
|
1127
|
+
} else if (typeof value === `string`) {
|
|
1128
|
+
controller.enqueue(encoder.encode(value));
|
|
1129
|
+
} else {
|
|
1130
|
+
controller.enqueue(value);
|
|
1131
|
+
}
|
|
1132
|
+
} catch (e) {
|
|
1133
|
+
controller.error(e);
|
|
1134
|
+
}
|
|
1135
|
+
},
|
|
1136
|
+
cancel() {
|
|
1137
|
+
iterator.return?.();
|
|
1138
|
+
}
|
|
1139
|
+
});
|
|
1140
|
+
}
|
|
1141
|
+
function validateOptions(options) {
|
|
1142
|
+
if (!options.url) {
|
|
1143
|
+
throw new MissingStreamUrlError();
|
|
1144
|
+
}
|
|
1145
|
+
if (options.signal && !(options.signal instanceof AbortSignal)) {
|
|
1146
|
+
throw new InvalidSignalError();
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1150
|
+
0 && (module.exports = {
|
|
1151
|
+
BackoffDefaults,
|
|
1152
|
+
CURSOR_QUERY_PARAM,
|
|
1153
|
+
DURABLE_STREAM_PROTOCOL_QUERY_PARAMS,
|
|
1154
|
+
DurableStream,
|
|
1155
|
+
DurableStreamError,
|
|
1156
|
+
FetchBackoffAbortError,
|
|
1157
|
+
FetchError,
|
|
1158
|
+
InvalidSignalError,
|
|
1159
|
+
LIVE_QUERY_PARAM,
|
|
1160
|
+
MissingStreamUrlError,
|
|
1161
|
+
OFFSET_QUERY_PARAM,
|
|
1162
|
+
SSE_COMPATIBLE_CONTENT_TYPES,
|
|
1163
|
+
STREAM_CURSOR_HEADER,
|
|
1164
|
+
STREAM_EXPIRES_AT_HEADER,
|
|
1165
|
+
STREAM_OFFSET_HEADER,
|
|
1166
|
+
STREAM_SEQ_HEADER,
|
|
1167
|
+
STREAM_TTL_HEADER,
|
|
1168
|
+
STREAM_UP_TO_DATE_HEADER,
|
|
1169
|
+
createFetchWithBackoff,
|
|
1170
|
+
createFetchWithConsumedBody
|
|
1171
|
+
});
|
|
1172
|
+
//# sourceMappingURL=index.cjs.map
|