@durable-streams/client 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +799 -0
- package/dist/index.cjs +1172 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +627 -0
- package/dist/index.d.ts +1072 -0
- package/dist/index.js +1830 -0
- package/dist/index.js.map +1 -0
- package/package.json +46 -0
- package/src/asyncIterableReadableStream.ts +220 -0
- package/src/constants.ts +105 -0
- package/src/error.ts +189 -0
- package/src/fetch.ts +267 -0
- package/src/index.ts +103 -0
- package/src/response.ts +1053 -0
- package/src/sse.ts +130 -0
- package/src/stream-api.ts +284 -0
- package/src/stream.ts +867 -0
- package/src/types.ts +737 -0
- package/src/utils.ts +104 -0
package/src/stream.ts
ADDED
|
@@ -0,0 +1,867 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* DurableStream - A handle to a remote durable stream for read/write operations.
|
|
3
|
+
*
|
|
4
|
+
* Following the Electric Durable Stream Protocol specification.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import fastq from "fastq"
|
|
8
|
+
|
|
9
|
+
import {
|
|
10
|
+
DurableStreamError,
|
|
11
|
+
InvalidSignalError,
|
|
12
|
+
MissingStreamUrlError,
|
|
13
|
+
} from "./error"
|
|
14
|
+
import {
|
|
15
|
+
SSE_COMPATIBLE_CONTENT_TYPES,
|
|
16
|
+
STREAM_EXPIRES_AT_HEADER,
|
|
17
|
+
STREAM_OFFSET_HEADER,
|
|
18
|
+
STREAM_SEQ_HEADER,
|
|
19
|
+
STREAM_TTL_HEADER,
|
|
20
|
+
} from "./constants"
|
|
21
|
+
import {
|
|
22
|
+
BackoffDefaults,
|
|
23
|
+
createFetchWithBackoff,
|
|
24
|
+
createFetchWithConsumedBody,
|
|
25
|
+
} from "./fetch"
|
|
26
|
+
import { stream as streamFn } from "./stream-api"
|
|
27
|
+
import { handleErrorResponse, resolveHeaders, resolveParams } from "./utils"
|
|
28
|
+
import type { BackoffOptions } from "./fetch"
|
|
29
|
+
import type { queueAsPromised } from "fastq"
|
|
30
|
+
import type {
|
|
31
|
+
AppendOptions,
|
|
32
|
+
CreateOptions,
|
|
33
|
+
HeadResult,
|
|
34
|
+
HeadersRecord,
|
|
35
|
+
MaybePromise,
|
|
36
|
+
ParamsRecord,
|
|
37
|
+
StreamErrorHandler,
|
|
38
|
+
StreamHandleOptions,
|
|
39
|
+
StreamOptions,
|
|
40
|
+
StreamResponse,
|
|
41
|
+
} from "./types"
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Queued message for batching.
|
|
45
|
+
*/
|
|
46
|
+
interface QueuedMessage {
|
|
47
|
+
data: unknown
|
|
48
|
+
seq?: string
|
|
49
|
+
contentType?: string
|
|
50
|
+
signal?: AbortSignal
|
|
51
|
+
resolve: () => void
|
|
52
|
+
reject: (error: Error) => void
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Normalize content-type by extracting the media type (before any semicolon).
|
|
57
|
+
* Handles cases like "application/json; charset=utf-8".
|
|
58
|
+
*/
|
|
59
|
+
function normalizeContentType(contentType: string | undefined): string {
|
|
60
|
+
if (!contentType) return ``
|
|
61
|
+
return contentType.split(`;`)[0]!.trim().toLowerCase()
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Check if a value is a Promise or Promise-like (thenable).
|
|
66
|
+
*/
|
|
67
|
+
function isPromiseLike(value: unknown): value is PromiseLike<unknown> {
|
|
68
|
+
return (
|
|
69
|
+
value !== null &&
|
|
70
|
+
typeof value === `object` &&
|
|
71
|
+
`then` in value &&
|
|
72
|
+
typeof (value as PromiseLike<unknown>).then === `function`
|
|
73
|
+
)
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Options for DurableStream constructor.
|
|
78
|
+
*/
|
|
79
|
+
export interface DurableStreamOptions extends StreamHandleOptions {
|
|
80
|
+
/**
|
|
81
|
+
* Additional query parameters to include in requests.
|
|
82
|
+
*/
|
|
83
|
+
params?: {
|
|
84
|
+
[key: string]: string | (() => MaybePromise<string>) | undefined
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Backoff options for retry behavior.
|
|
89
|
+
*/
|
|
90
|
+
backoffOptions?: BackoffOptions
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Enable automatic batching for append() calls.
|
|
94
|
+
* When true, multiple append() calls made while a POST is in-flight
|
|
95
|
+
* will be batched together into a single request.
|
|
96
|
+
*
|
|
97
|
+
* @default true
|
|
98
|
+
*/
|
|
99
|
+
batching?: boolean
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* A handle to a remote durable stream for read/write operations.
|
|
104
|
+
*
|
|
105
|
+
* This is a lightweight, reusable handle - not a persistent connection.
|
|
106
|
+
* It does not automatically start reading or listening.
|
|
107
|
+
* Create sessions as needed via stream().
|
|
108
|
+
*
|
|
109
|
+
* @example
|
|
110
|
+
* ```typescript
|
|
111
|
+
* // Create a new stream
|
|
112
|
+
* const stream = await DurableStream.create({
|
|
113
|
+
* url: "https://streams.example.com/my-stream",
|
|
114
|
+
* headers: { Authorization: "Bearer my-token" },
|
|
115
|
+
* contentType: "application/json"
|
|
116
|
+
* });
|
|
117
|
+
*
|
|
118
|
+
* // Write data
|
|
119
|
+
* await stream.append({ message: "hello" });
|
|
120
|
+
*
|
|
121
|
+
* // Read with the new API
|
|
122
|
+
* const res = await stream.stream<{ message: string }>();
|
|
123
|
+
* res.subscribeJson(async (batch) => {
|
|
124
|
+
* for (const item of batch.items) {
|
|
125
|
+
* console.log(item.message);
|
|
126
|
+
* }
|
|
127
|
+
* });
|
|
128
|
+
* ```
|
|
129
|
+
*/
|
|
130
|
+
export class DurableStream {
|
|
131
|
+
/**
|
|
132
|
+
* The URL of the durable stream.
|
|
133
|
+
*/
|
|
134
|
+
readonly url: string
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* The content type of the stream (populated after connect/head/read).
|
|
138
|
+
*/
|
|
139
|
+
contentType?: string
|
|
140
|
+
|
|
141
|
+
#options: DurableStreamOptions
|
|
142
|
+
readonly #fetchClient: typeof fetch
|
|
143
|
+
#onError?: StreamErrorHandler
|
|
144
|
+
|
|
145
|
+
// Batching infrastructure
|
|
146
|
+
#batchingEnabled: boolean
|
|
147
|
+
#queue?: queueAsPromised<Array<QueuedMessage>>
|
|
148
|
+
#buffer: Array<QueuedMessage> = []
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Create a cold handle to a stream.
|
|
152
|
+
* No network IO is performed by the constructor.
|
|
153
|
+
*/
|
|
154
|
+
constructor(opts: DurableStreamOptions) {
|
|
155
|
+
validateOptions(opts)
|
|
156
|
+
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url
|
|
157
|
+
this.url = urlStr
|
|
158
|
+
this.#options = { ...opts, url: urlStr }
|
|
159
|
+
this.#onError = opts.onError
|
|
160
|
+
|
|
161
|
+
// Batching is enabled by default
|
|
162
|
+
this.#batchingEnabled = opts.batching !== false
|
|
163
|
+
|
|
164
|
+
if (this.#batchingEnabled) {
|
|
165
|
+
this.#queue = fastq.promise(this.#batchWorker.bind(this), 1)
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const baseFetchClient =
|
|
169
|
+
opts.fetch ?? ((...args: Parameters<typeof fetch>) => fetch(...args))
|
|
170
|
+
|
|
171
|
+
const backOffOpts = {
|
|
172
|
+
...(opts.backoffOptions ?? BackoffDefaults),
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const fetchWithBackoffClient = createFetchWithBackoff(
|
|
176
|
+
baseFetchClient,
|
|
177
|
+
backOffOpts
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient)
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// ============================================================================
|
|
184
|
+
// Static convenience methods
|
|
185
|
+
// ============================================================================
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Create a new stream (create-only PUT) and return a handle.
|
|
189
|
+
* Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
|
|
190
|
+
*/
|
|
191
|
+
static async create(opts: CreateOptions): Promise<DurableStream> {
|
|
192
|
+
const stream = new DurableStream(opts)
|
|
193
|
+
await stream.create({
|
|
194
|
+
contentType: opts.contentType,
|
|
195
|
+
ttlSeconds: opts.ttlSeconds,
|
|
196
|
+
expiresAt: opts.expiresAt,
|
|
197
|
+
body: opts.body,
|
|
198
|
+
})
|
|
199
|
+
return stream
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Validate that a stream exists and fetch metadata via HEAD.
|
|
204
|
+
* Returns a handle with contentType populated (if sent by server).
|
|
205
|
+
*
|
|
206
|
+
* **Important**: This only performs a HEAD request for validation - it does
|
|
207
|
+
* NOT open a session or start reading data. To read from the stream, call
|
|
208
|
+
* `stream()` on the returned handle.
|
|
209
|
+
*
|
|
210
|
+
* @example
|
|
211
|
+
* ```typescript
|
|
212
|
+
* // Validate stream exists before reading
|
|
213
|
+
* const handle = await DurableStream.connect({ url })
|
|
214
|
+
* const res = await handle.stream() // Now actually read
|
|
215
|
+
* ```
|
|
216
|
+
*/
|
|
217
|
+
static async connect(opts: DurableStreamOptions): Promise<DurableStream> {
|
|
218
|
+
const stream = new DurableStream(opts)
|
|
219
|
+
await stream.head()
|
|
220
|
+
return stream
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* HEAD metadata for a stream without creating a handle.
|
|
225
|
+
*/
|
|
226
|
+
static async head(opts: DurableStreamOptions): Promise<HeadResult> {
|
|
227
|
+
const stream = new DurableStream(opts)
|
|
228
|
+
return stream.head()
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
/**
|
|
232
|
+
* Delete a stream without creating a handle.
|
|
233
|
+
*/
|
|
234
|
+
static async delete(opts: DurableStreamOptions): Promise<void> {
|
|
235
|
+
const stream = new DurableStream(opts)
|
|
236
|
+
return stream.delete()
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// ============================================================================
|
|
240
|
+
// Instance methods
|
|
241
|
+
// ============================================================================
|
|
242
|
+
|
|
243
|
+
/**
|
|
244
|
+
* HEAD metadata for this stream.
|
|
245
|
+
*/
|
|
246
|
+
async head(opts?: { signal?: AbortSignal }): Promise<HeadResult> {
|
|
247
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest()
|
|
248
|
+
|
|
249
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
250
|
+
method: `HEAD`,
|
|
251
|
+
headers: requestHeaders,
|
|
252
|
+
signal: opts?.signal ?? this.#options.signal,
|
|
253
|
+
})
|
|
254
|
+
|
|
255
|
+
if (!response.ok) {
|
|
256
|
+
await handleErrorResponse(response, this.url)
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
const contentType = response.headers.get(`content-type`) ?? undefined
|
|
260
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? undefined
|
|
261
|
+
const etag = response.headers.get(`etag`) ?? undefined
|
|
262
|
+
const cacheControl = response.headers.get(`cache-control`) ?? undefined
|
|
263
|
+
|
|
264
|
+
// Update instance contentType
|
|
265
|
+
if (contentType) {
|
|
266
|
+
this.contentType = contentType
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
return {
|
|
270
|
+
exists: true,
|
|
271
|
+
contentType,
|
|
272
|
+
offset,
|
|
273
|
+
etag,
|
|
274
|
+
cacheControl,
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
/**
|
|
279
|
+
* Create this stream (create-only PUT) using the URL/auth from the handle.
|
|
280
|
+
*/
|
|
281
|
+
async create(opts?: Omit<CreateOptions, keyof StreamOptions>): Promise<this> {
|
|
282
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest()
|
|
283
|
+
|
|
284
|
+
const contentType = opts?.contentType ?? this.#options.contentType
|
|
285
|
+
if (contentType) {
|
|
286
|
+
requestHeaders[`content-type`] = contentType
|
|
287
|
+
}
|
|
288
|
+
if (opts?.ttlSeconds !== undefined) {
|
|
289
|
+
requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds)
|
|
290
|
+
}
|
|
291
|
+
if (opts?.expiresAt) {
|
|
292
|
+
requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
const body = encodeBody(opts?.body)
|
|
296
|
+
|
|
297
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
298
|
+
method: `PUT`,
|
|
299
|
+
headers: requestHeaders,
|
|
300
|
+
body,
|
|
301
|
+
signal: this.#options.signal,
|
|
302
|
+
})
|
|
303
|
+
|
|
304
|
+
if (!response.ok) {
|
|
305
|
+
await handleErrorResponse(response, this.url, { operation: `create` })
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// Update content type from response or options
|
|
309
|
+
const responseContentType = response.headers.get(`content-type`)
|
|
310
|
+
if (responseContentType) {
|
|
311
|
+
this.contentType = responseContentType
|
|
312
|
+
} else if (contentType) {
|
|
313
|
+
this.contentType = contentType
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
return this
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
/**
|
|
320
|
+
* Delete this stream.
|
|
321
|
+
*/
|
|
322
|
+
async delete(opts?: { signal?: AbortSignal }): Promise<void> {
|
|
323
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest()
|
|
324
|
+
|
|
325
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
326
|
+
method: `DELETE`,
|
|
327
|
+
headers: requestHeaders,
|
|
328
|
+
signal: opts?.signal ?? this.#options.signal,
|
|
329
|
+
})
|
|
330
|
+
|
|
331
|
+
if (!response.ok) {
|
|
332
|
+
await handleErrorResponse(response, this.url)
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
/**
|
|
337
|
+
* Append a single payload to the stream.
|
|
338
|
+
*
|
|
339
|
+
* When batching is enabled (default), multiple append() calls made while
|
|
340
|
+
* a POST is in-flight will be batched together into a single request.
|
|
341
|
+
* This significantly improves throughput for high-frequency writes.
|
|
342
|
+
*
|
|
343
|
+
* - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
|
|
344
|
+
* - `body` may also be a Promise that resolves to any of the above types.
|
|
345
|
+
* - Strings are encoded as UTF-8.
|
|
346
|
+
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
347
|
+
*
|
|
348
|
+
* @example
|
|
349
|
+
* ```typescript
|
|
350
|
+
* // Direct value
|
|
351
|
+
* await stream.append({ message: "hello" });
|
|
352
|
+
*
|
|
353
|
+
* // Promise value - awaited before buffering
|
|
354
|
+
* await stream.append(fetchData());
|
|
355
|
+
* await stream.append(Promise.all([a, b, c]));
|
|
356
|
+
* ```
|
|
357
|
+
*/
|
|
358
|
+
async append(
|
|
359
|
+
body: BodyInit | Uint8Array | string | unknown,
|
|
360
|
+
opts?: AppendOptions
|
|
361
|
+
): Promise<void> {
|
|
362
|
+
// Await promises before buffering
|
|
363
|
+
const resolvedBody = isPromiseLike(body) ? await body : body
|
|
364
|
+
|
|
365
|
+
if (this.#batchingEnabled && this.#queue) {
|
|
366
|
+
return this.#appendWithBatching(resolvedBody, opts)
|
|
367
|
+
}
|
|
368
|
+
return this.#appendDirect(resolvedBody, opts)
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Direct append without batching (used when batching is disabled).
|
|
373
|
+
*/
|
|
374
|
+
async #appendDirect(
|
|
375
|
+
body: BodyInit | Uint8Array | string | unknown,
|
|
376
|
+
opts?: AppendOptions
|
|
377
|
+
): Promise<void> {
|
|
378
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest()
|
|
379
|
+
|
|
380
|
+
const contentType =
|
|
381
|
+
opts?.contentType ?? this.#options.contentType ?? this.contentType
|
|
382
|
+
if (contentType) {
|
|
383
|
+
requestHeaders[`content-type`] = contentType
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
if (opts?.seq) {
|
|
387
|
+
requestHeaders[STREAM_SEQ_HEADER] = opts.seq
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
// For JSON mode, wrap body in array to match protocol (server flattens one level)
|
|
391
|
+
const isJson = normalizeContentType(contentType) === `application/json`
|
|
392
|
+
const bodyToEncode = isJson ? [body] : body
|
|
393
|
+
const encodedBody = encodeBody(bodyToEncode)
|
|
394
|
+
|
|
395
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
396
|
+
method: `POST`,
|
|
397
|
+
headers: requestHeaders,
|
|
398
|
+
body: encodedBody,
|
|
399
|
+
signal: opts?.signal ?? this.#options.signal,
|
|
400
|
+
})
|
|
401
|
+
|
|
402
|
+
if (!response.ok) {
|
|
403
|
+
await handleErrorResponse(response, this.url)
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
/**
|
|
408
|
+
* Append with batching - buffers messages and sends them in batches.
|
|
409
|
+
*/
|
|
410
|
+
async #appendWithBatching(
|
|
411
|
+
body: unknown,
|
|
412
|
+
opts?: AppendOptions
|
|
413
|
+
): Promise<void> {
|
|
414
|
+
return new Promise<void>((resolve, reject) => {
|
|
415
|
+
this.#buffer.push({
|
|
416
|
+
data: body,
|
|
417
|
+
seq: opts?.seq,
|
|
418
|
+
contentType: opts?.contentType,
|
|
419
|
+
signal: opts?.signal,
|
|
420
|
+
resolve,
|
|
421
|
+
reject,
|
|
422
|
+
})
|
|
423
|
+
|
|
424
|
+
// If no POST in flight, send immediately
|
|
425
|
+
if (this.#queue!.idle()) {
|
|
426
|
+
const batch = this.#buffer.splice(0)
|
|
427
|
+
this.#queue!.push(batch).catch((err) => {
|
|
428
|
+
for (const msg of batch) msg.reject(err)
|
|
429
|
+
})
|
|
430
|
+
}
|
|
431
|
+
})
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
/**
|
|
435
|
+
* Batch worker - processes batches of messages.
|
|
436
|
+
*/
|
|
437
|
+
async #batchWorker(batch: Array<QueuedMessage>): Promise<void> {
|
|
438
|
+
try {
|
|
439
|
+
await this.#sendBatch(batch)
|
|
440
|
+
|
|
441
|
+
// Resolve all messages in the batch
|
|
442
|
+
for (const msg of batch) {
|
|
443
|
+
msg.resolve()
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
// Send accumulated batch if any
|
|
447
|
+
if (this.#buffer.length > 0) {
|
|
448
|
+
const nextBatch = this.#buffer.splice(0)
|
|
449
|
+
this.#queue!.push(nextBatch).catch((err) => {
|
|
450
|
+
for (const msg of nextBatch) msg.reject(err)
|
|
451
|
+
})
|
|
452
|
+
}
|
|
453
|
+
} catch (error) {
|
|
454
|
+
// Reject current batch
|
|
455
|
+
for (const msg of batch) {
|
|
456
|
+
msg.reject(error as Error)
|
|
457
|
+
}
|
|
458
|
+
// Also reject buffered messages (don't leave promises hanging)
|
|
459
|
+
for (const msg of this.#buffer) {
|
|
460
|
+
msg.reject(error as Error)
|
|
461
|
+
}
|
|
462
|
+
this.#buffer = []
|
|
463
|
+
throw error
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
/**
|
|
468
|
+
* Send a batch of messages as a single POST request.
|
|
469
|
+
*/
|
|
470
|
+
async #sendBatch(batch: Array<QueuedMessage>): Promise<void> {
|
|
471
|
+
if (batch.length === 0) return
|
|
472
|
+
|
|
473
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest()
|
|
474
|
+
|
|
475
|
+
// Get content type - prefer from options, then from messages, then from stream
|
|
476
|
+
const contentType =
|
|
477
|
+
batch[0]?.contentType ?? this.#options.contentType ?? this.contentType
|
|
478
|
+
|
|
479
|
+
if (contentType) {
|
|
480
|
+
requestHeaders[`content-type`] = contentType
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
// Get last non-undefined seq (queue preserves append order)
|
|
484
|
+
let highestSeq: string | undefined
|
|
485
|
+
for (let i = batch.length - 1; i >= 0; i--) {
|
|
486
|
+
if (batch[i]!.seq !== undefined) {
|
|
487
|
+
highestSeq = batch[i]!.seq
|
|
488
|
+
break
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
if (highestSeq) {
|
|
493
|
+
requestHeaders[STREAM_SEQ_HEADER] = highestSeq
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
const isJson = normalizeContentType(contentType) === `application/json`
|
|
497
|
+
|
|
498
|
+
// Batch data based on content type
|
|
499
|
+
let batchedBody: BodyInit
|
|
500
|
+
if (isJson) {
|
|
501
|
+
// For JSON mode: always send as array (server flattens one level)
|
|
502
|
+
// Single append: [value] → server stores value
|
|
503
|
+
// Multiple appends: [val1, val2] → server stores val1, val2
|
|
504
|
+
const values = batch.map((m) => m.data)
|
|
505
|
+
batchedBody = JSON.stringify(values)
|
|
506
|
+
} else {
|
|
507
|
+
// For byte mode: concatenate all chunks
|
|
508
|
+
const totalSize = batch.reduce((sum, m) => {
|
|
509
|
+
const size =
|
|
510
|
+
typeof m.data === `string`
|
|
511
|
+
? new TextEncoder().encode(m.data).length
|
|
512
|
+
: (m.data as Uint8Array).length
|
|
513
|
+
return sum + size
|
|
514
|
+
}, 0)
|
|
515
|
+
|
|
516
|
+
const concatenated = new Uint8Array(totalSize)
|
|
517
|
+
let offset = 0
|
|
518
|
+
for (const msg of batch) {
|
|
519
|
+
const bytes =
|
|
520
|
+
typeof msg.data === `string`
|
|
521
|
+
? new TextEncoder().encode(msg.data)
|
|
522
|
+
: (msg.data as Uint8Array)
|
|
523
|
+
concatenated.set(bytes, offset)
|
|
524
|
+
offset += bytes.length
|
|
525
|
+
}
|
|
526
|
+
batchedBody = concatenated
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
// Combine signals: stream-level signal + any per-message signals
|
|
530
|
+
const signals: Array<AbortSignal> = []
|
|
531
|
+
if (this.#options.signal) {
|
|
532
|
+
signals.push(this.#options.signal)
|
|
533
|
+
}
|
|
534
|
+
for (const msg of batch) {
|
|
535
|
+
if (msg.signal) {
|
|
536
|
+
signals.push(msg.signal)
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
const combinedSignal =
|
|
540
|
+
signals.length > 0 ? AbortSignal.any(signals) : undefined
|
|
541
|
+
|
|
542
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
543
|
+
method: `POST`,
|
|
544
|
+
headers: requestHeaders,
|
|
545
|
+
body: batchedBody,
|
|
546
|
+
signal: combinedSignal,
|
|
547
|
+
})
|
|
548
|
+
|
|
549
|
+
if (!response.ok) {
|
|
550
|
+
await handleErrorResponse(response, this.url)
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
/**
|
|
555
|
+
* Append a streaming body to the stream.
|
|
556
|
+
*
|
|
557
|
+
* Supports piping from any ReadableStream or async iterable:
|
|
558
|
+
* - `source` yields Uint8Array or string chunks.
|
|
559
|
+
* - Strings are encoded as UTF-8; no delimiters are added.
|
|
560
|
+
* - Internally uses chunked transfer or HTTP/2 streaming.
|
|
561
|
+
*
|
|
562
|
+
* @example
|
|
563
|
+
* ```typescript
|
|
564
|
+
* // Pipe from a ReadableStream
|
|
565
|
+
* const readable = new ReadableStream({
|
|
566
|
+
* start(controller) {
|
|
567
|
+
* controller.enqueue("chunk 1");
|
|
568
|
+
* controller.enqueue("chunk 2");
|
|
569
|
+
* controller.close();
|
|
570
|
+
* }
|
|
571
|
+
* });
|
|
572
|
+
* await stream.appendStream(readable);
|
|
573
|
+
*
|
|
574
|
+
* // Pipe from an async generator
|
|
575
|
+
* async function* generate() {
|
|
576
|
+
* yield "line 1\n";
|
|
577
|
+
* yield "line 2\n";
|
|
578
|
+
* }
|
|
579
|
+
* await stream.appendStream(generate());
|
|
580
|
+
*
|
|
581
|
+
* // Pipe from fetch response body
|
|
582
|
+
* const response = await fetch("https://example.com/data");
|
|
583
|
+
* await stream.appendStream(response.body!);
|
|
584
|
+
* ```
|
|
585
|
+
*/
|
|
586
|
+
async appendStream(
|
|
587
|
+
source:
|
|
588
|
+
| ReadableStream<Uint8Array | string>
|
|
589
|
+
| AsyncIterable<Uint8Array | string>,
|
|
590
|
+
opts?: AppendOptions
|
|
591
|
+
): Promise<void> {
|
|
592
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest()
|
|
593
|
+
|
|
594
|
+
const contentType =
|
|
595
|
+
opts?.contentType ?? this.#options.contentType ?? this.contentType
|
|
596
|
+
if (contentType) {
|
|
597
|
+
requestHeaders[`content-type`] = contentType
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
if (opts?.seq) {
|
|
601
|
+
requestHeaders[STREAM_SEQ_HEADER] = opts.seq
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
// Convert to ReadableStream<Uint8Array> for the body
|
|
605
|
+
const body = toReadableStream(source)
|
|
606
|
+
|
|
607
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
608
|
+
method: `POST`,
|
|
609
|
+
headers: requestHeaders,
|
|
610
|
+
body,
|
|
611
|
+
// @ts-expect-error - duplex is needed for streaming but not in types
|
|
612
|
+
duplex: `half`,
|
|
613
|
+
signal: opts?.signal ?? this.#options.signal,
|
|
614
|
+
})
|
|
615
|
+
|
|
616
|
+
if (!response.ok) {
|
|
617
|
+
await handleErrorResponse(response, this.url)
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
/**
|
|
622
|
+
* Create a writable stream that pipes data to this durable stream.
|
|
623
|
+
*
|
|
624
|
+
* Returns a WritableStream that can be used with `pipeTo()` or
|
|
625
|
+
* `pipeThrough()` from any ReadableStream source.
|
|
626
|
+
*
|
|
627
|
+
* @example
|
|
628
|
+
* ```typescript
|
|
629
|
+
* // Pipe from fetch response
|
|
630
|
+
* const response = await fetch("https://example.com/data");
|
|
631
|
+
* await response.body!.pipeTo(stream.writable());
|
|
632
|
+
*
|
|
633
|
+
* // Pipe through a transform
|
|
634
|
+
* const readable = someStream.pipeThrough(new TextEncoderStream());
|
|
635
|
+
* await readable.pipeTo(stream.writable());
|
|
636
|
+
* ```
|
|
637
|
+
*/
|
|
638
|
+
writable(opts?: AppendOptions): WritableStream<Uint8Array | string> {
|
|
639
|
+
const chunks: Array<Uint8Array | string> = []
|
|
640
|
+
const stream = this
|
|
641
|
+
|
|
642
|
+
return new WritableStream<Uint8Array | string>({
|
|
643
|
+
write(chunk) {
|
|
644
|
+
chunks.push(chunk)
|
|
645
|
+
},
|
|
646
|
+
async close() {
|
|
647
|
+
if (chunks.length > 0) {
|
|
648
|
+
// Create a ReadableStream from collected chunks
|
|
649
|
+
const readable = new ReadableStream<Uint8Array | string>({
|
|
650
|
+
start(controller) {
|
|
651
|
+
for (const chunk of chunks) {
|
|
652
|
+
controller.enqueue(chunk)
|
|
653
|
+
}
|
|
654
|
+
controller.close()
|
|
655
|
+
},
|
|
656
|
+
})
|
|
657
|
+
await stream.appendStream(readable, opts)
|
|
658
|
+
}
|
|
659
|
+
},
|
|
660
|
+
abort(reason) {
|
|
661
|
+
console.error(`WritableStream aborted:`, reason)
|
|
662
|
+
},
|
|
663
|
+
})
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
// ============================================================================
|
|
667
|
+
// Read session factory (new API)
|
|
668
|
+
// ============================================================================
|
|
669
|
+
|
|
670
|
+
/**
|
|
671
|
+
* Start a fetch-like streaming session against this handle's URL/headers/params.
|
|
672
|
+
* The first request is made inside this method; it resolves when we have
|
|
673
|
+
* a valid first response, or rejects on errors.
|
|
674
|
+
*
|
|
675
|
+
* Call-specific headers and params are merged with handle-level ones,
|
|
676
|
+
* with call-specific values taking precedence.
|
|
677
|
+
*
|
|
678
|
+
* @example
|
|
679
|
+
* ```typescript
|
|
680
|
+
* const handle = await DurableStream.connect({
|
|
681
|
+
* url,
|
|
682
|
+
* headers: { Authorization: `Bearer ${token}` }
|
|
683
|
+
* });
|
|
684
|
+
* const res = await handle.stream<{ message: string }>();
|
|
685
|
+
*
|
|
686
|
+
* // Accumulate all JSON items
|
|
687
|
+
* const items = await res.json();
|
|
688
|
+
*
|
|
689
|
+
* // Or stream live with ReadableStream
|
|
690
|
+
* const reader = res.jsonStream().getReader();
|
|
691
|
+
* let result = await reader.read();
|
|
692
|
+
* while (!result.done) {
|
|
693
|
+
* console.log(result.value);
|
|
694
|
+
* result = await reader.read();
|
|
695
|
+
* }
|
|
696
|
+
*
|
|
697
|
+
* // Or use subscriber for backpressure-aware consumption
|
|
698
|
+
* res.subscribeJson(async (batch) => {
|
|
699
|
+
* for (const item of batch.items) {
|
|
700
|
+
* console.log(item);
|
|
701
|
+
* }
|
|
702
|
+
* });
|
|
703
|
+
* ```
|
|
704
|
+
*/
|
|
705
|
+
async stream<TJson = unknown>(
|
|
706
|
+
options?: Omit<StreamOptions, `url`>
|
|
707
|
+
): Promise<StreamResponse<TJson>> {
|
|
708
|
+
// Check SSE compatibility if SSE mode is requested
|
|
709
|
+
if (options?.live === `sse` && this.contentType) {
|
|
710
|
+
const isSSECompatible = SSE_COMPATIBLE_CONTENT_TYPES.some((prefix) =>
|
|
711
|
+
this.contentType!.startsWith(prefix)
|
|
712
|
+
)
|
|
713
|
+
if (!isSSECompatible) {
|
|
714
|
+
throw new DurableStreamError(
|
|
715
|
+
`SSE is not supported for content-type: ${this.contentType}`,
|
|
716
|
+
`SSE_NOT_SUPPORTED`,
|
|
717
|
+
400
|
|
718
|
+
)
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
|
|
722
|
+
// Merge handle-level and call-specific headers
|
|
723
|
+
const mergedHeaders: HeadersRecord = {
|
|
724
|
+
...this.#options.headers,
|
|
725
|
+
...options?.headers,
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
// Merge handle-level and call-specific params
|
|
729
|
+
const mergedParams: ParamsRecord = {
|
|
730
|
+
...this.#options.params,
|
|
731
|
+
...options?.params,
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
return streamFn<TJson>({
|
|
735
|
+
url: this.url,
|
|
736
|
+
headers: mergedHeaders,
|
|
737
|
+
params: mergedParams,
|
|
738
|
+
signal: options?.signal ?? this.#options.signal,
|
|
739
|
+
fetch: this.#options.fetch,
|
|
740
|
+
backoffOptions: this.#options.backoffOptions,
|
|
741
|
+
offset: options?.offset,
|
|
742
|
+
live: options?.live,
|
|
743
|
+
json: options?.json,
|
|
744
|
+
onError: options?.onError ?? this.#onError,
|
|
745
|
+
})
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
// ============================================================================
|
|
749
|
+
// Private methods
|
|
750
|
+
// ============================================================================
|
|
751
|
+
|
|
752
|
+
/**
|
|
753
|
+
* Build request headers and URL.
|
|
754
|
+
*/
|
|
755
|
+
async #buildRequest(): Promise<{
|
|
756
|
+
requestHeaders: Record<string, string>
|
|
757
|
+
fetchUrl: URL
|
|
758
|
+
}> {
|
|
759
|
+
const requestHeaders = await resolveHeaders(this.#options.headers)
|
|
760
|
+
const fetchUrl = new URL(this.url)
|
|
761
|
+
|
|
762
|
+
// Add params
|
|
763
|
+
const params = await resolveParams(this.#options.params)
|
|
764
|
+
for (const [key, value] of Object.entries(params)) {
|
|
765
|
+
fetchUrl.searchParams.set(key, value)
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
return { requestHeaders, fetchUrl }
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
// ============================================================================
|
|
773
|
+
// Utility functions
|
|
774
|
+
// ============================================================================
|
|
775
|
+
|
|
776
|
+
/**
|
|
777
|
+
* Encode a body value to the appropriate format.
|
|
778
|
+
* Strings are encoded as UTF-8.
|
|
779
|
+
* Objects are JSON-serialized.
|
|
780
|
+
*/
|
|
781
|
+
function encodeBody(
|
|
782
|
+
body: BodyInit | Uint8Array | string | unknown | undefined
|
|
783
|
+
): BodyInit | undefined {
|
|
784
|
+
if (body === undefined) {
|
|
785
|
+
return undefined
|
|
786
|
+
}
|
|
787
|
+
if (typeof body === `string`) {
|
|
788
|
+
return new TextEncoder().encode(body)
|
|
789
|
+
}
|
|
790
|
+
if (body instanceof Uint8Array) {
|
|
791
|
+
// Cast to ensure compatible BodyInit type
|
|
792
|
+
return body as unknown as BodyInit
|
|
793
|
+
}
|
|
794
|
+
// Check for BodyInit types (Blob, FormData, ReadableStream, ArrayBuffer, etc.)
|
|
795
|
+
if (
|
|
796
|
+
body instanceof Blob ||
|
|
797
|
+
body instanceof FormData ||
|
|
798
|
+
body instanceof ReadableStream ||
|
|
799
|
+
body instanceof ArrayBuffer ||
|
|
800
|
+
ArrayBuffer.isView(body)
|
|
801
|
+
) {
|
|
802
|
+
return body as BodyInit
|
|
803
|
+
}
|
|
804
|
+
// For other types (objects, arrays, numbers, etc.), JSON-serialize
|
|
805
|
+
return new TextEncoder().encode(JSON.stringify(body))
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
/**
|
|
809
|
+
* Convert an async iterable to a ReadableStream.
|
|
810
|
+
*/
|
|
811
|
+
function toReadableStream(
|
|
812
|
+
source:
|
|
813
|
+
| ReadableStream<Uint8Array | string>
|
|
814
|
+
| AsyncIterable<Uint8Array | string>
|
|
815
|
+
): ReadableStream<Uint8Array> {
|
|
816
|
+
// If it's already a ReadableStream, transform it
|
|
817
|
+
if (source instanceof ReadableStream) {
|
|
818
|
+
return source.pipeThrough(
|
|
819
|
+
new TransformStream<Uint8Array | string, Uint8Array>({
|
|
820
|
+
transform(chunk, controller) {
|
|
821
|
+
if (typeof chunk === `string`) {
|
|
822
|
+
controller.enqueue(new TextEncoder().encode(chunk))
|
|
823
|
+
} else {
|
|
824
|
+
controller.enqueue(chunk)
|
|
825
|
+
}
|
|
826
|
+
},
|
|
827
|
+
})
|
|
828
|
+
)
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
// Convert async iterable to ReadableStream
|
|
832
|
+
const encoder = new TextEncoder()
|
|
833
|
+
const iterator = source[Symbol.asyncIterator]()
|
|
834
|
+
|
|
835
|
+
return new ReadableStream<Uint8Array>({
|
|
836
|
+
async pull(controller) {
|
|
837
|
+
try {
|
|
838
|
+
const { done, value } = await iterator.next()
|
|
839
|
+
if (done) {
|
|
840
|
+
controller.close()
|
|
841
|
+
} else if (typeof value === `string`) {
|
|
842
|
+
controller.enqueue(encoder.encode(value))
|
|
843
|
+
} else {
|
|
844
|
+
controller.enqueue(value)
|
|
845
|
+
}
|
|
846
|
+
} catch (e) {
|
|
847
|
+
controller.error(e)
|
|
848
|
+
}
|
|
849
|
+
},
|
|
850
|
+
|
|
851
|
+
cancel() {
|
|
852
|
+
iterator.return?.()
|
|
853
|
+
},
|
|
854
|
+
})
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
/**
|
|
858
|
+
* Validate stream options.
|
|
859
|
+
*/
|
|
860
|
+
function validateOptions(options: Partial<DurableStreamOptions>): void {
|
|
861
|
+
if (!options.url) {
|
|
862
|
+
throw new MissingStreamUrlError()
|
|
863
|
+
}
|
|
864
|
+
if (options.signal && !(options.signal instanceof AbortSignal)) {
|
|
865
|
+
throw new InvalidSignalError()
|
|
866
|
+
}
|
|
867
|
+
}
|