@durable-streams/client 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +799 -0
- package/dist/index.cjs +1172 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +627 -0
- package/dist/index.d.ts +1072 -0
- package/dist/index.js +1830 -0
- package/dist/index.js.map +1 -0
- package/package.json +46 -0
- package/src/asyncIterableReadableStream.ts +220 -0
- package/src/constants.ts +105 -0
- package/src/error.ts +189 -0
- package/src/fetch.ts +267 -0
- package/src/index.ts +103 -0
- package/src/response.ts +1053 -0
- package/src/sse.ts +130 -0
- package/src/stream-api.ts +284 -0
- package/src/stream.ts +867 -0
- package/src/types.ts +737 -0
- package/src/utils.ts +104 -0
package/src/response.ts
ADDED
|
@@ -0,0 +1,1053 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* StreamResponse - A streaming session for reading from a durable stream.
|
|
3
|
+
*
|
|
4
|
+
* Represents a live session with fixed `url`, `offset`, and `live` parameters.
|
|
5
|
+
* Supports multiple consumption styles: Promise helpers, ReadableStreams, and Subscribers.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { asAsyncIterableReadableStream } from "./asyncIterableReadableStream"
|
|
9
|
+
import {
|
|
10
|
+
STREAM_CURSOR_HEADER,
|
|
11
|
+
STREAM_OFFSET_HEADER,
|
|
12
|
+
STREAM_UP_TO_DATE_HEADER,
|
|
13
|
+
} from "./constants"
|
|
14
|
+
import { DurableStreamError } from "./error"
|
|
15
|
+
import { parseSSEStream } from "./sse"
|
|
16
|
+
import type { ReadableStreamAsyncIterable } from "./asyncIterableReadableStream"
|
|
17
|
+
import type { SSEControlEvent, SSEEvent } from "./sse"
|
|
18
|
+
import type {
|
|
19
|
+
ByteChunk,
|
|
20
|
+
StreamResponse as IStreamResponse,
|
|
21
|
+
JsonBatch,
|
|
22
|
+
LiveMode,
|
|
23
|
+
Offset,
|
|
24
|
+
SSEResilienceOptions,
|
|
25
|
+
TextChunk,
|
|
26
|
+
} from "./types"
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Internal configuration for creating a StreamResponse.
|
|
30
|
+
*/
|
|
31
|
+
export interface StreamResponseConfig {
|
|
32
|
+
/** The stream URL */
|
|
33
|
+
url: string
|
|
34
|
+
/** Content type from the first response */
|
|
35
|
+
contentType?: string
|
|
36
|
+
/** Live mode for this session */
|
|
37
|
+
live: LiveMode
|
|
38
|
+
/** Starting offset */
|
|
39
|
+
startOffset: Offset
|
|
40
|
+
/** Whether to treat as JSON (hint or content-type) */
|
|
41
|
+
isJsonMode: boolean
|
|
42
|
+
/** Initial offset from first response headers */
|
|
43
|
+
initialOffset: Offset
|
|
44
|
+
/** Initial cursor from first response headers */
|
|
45
|
+
initialCursor?: string
|
|
46
|
+
/** Initial upToDate from first response headers */
|
|
47
|
+
initialUpToDate: boolean
|
|
48
|
+
/** The held first Response object */
|
|
49
|
+
firstResponse: Response
|
|
50
|
+
/** Abort controller for the session */
|
|
51
|
+
abortController: AbortController
|
|
52
|
+
/** Function to fetch the next chunk (for long-poll) */
|
|
53
|
+
fetchNext: (
|
|
54
|
+
offset: Offset,
|
|
55
|
+
cursor: string | undefined,
|
|
56
|
+
signal: AbortSignal
|
|
57
|
+
) => Promise<Response>
|
|
58
|
+
/** Function to start SSE connection and return a Response with SSE body */
|
|
59
|
+
startSSE?: (
|
|
60
|
+
offset: Offset,
|
|
61
|
+
cursor: string | undefined,
|
|
62
|
+
signal: AbortSignal
|
|
63
|
+
) => Promise<Response>
|
|
64
|
+
/** SSE resilience options */
|
|
65
|
+
sseResilience?: SSEResilienceOptions
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Implementation of the StreamResponse interface.
|
|
70
|
+
*/
|
|
71
|
+
export class StreamResponseImpl<
|
|
72
|
+
TJson = unknown,
|
|
73
|
+
> implements IStreamResponse<TJson> {
|
|
74
|
+
// --- Static session info ---
|
|
75
|
+
readonly url: string
|
|
76
|
+
readonly contentType?: string
|
|
77
|
+
readonly live: LiveMode
|
|
78
|
+
readonly startOffset: Offset
|
|
79
|
+
|
|
80
|
+
// --- Response metadata (updated on each response) ---
|
|
81
|
+
#headers: Headers
|
|
82
|
+
#status: number
|
|
83
|
+
#statusText: string
|
|
84
|
+
#ok: boolean
|
|
85
|
+
#isLoading: boolean
|
|
86
|
+
|
|
87
|
+
// --- Evolving state ---
|
|
88
|
+
offset: Offset
|
|
89
|
+
cursor?: string
|
|
90
|
+
upToDate: boolean
|
|
91
|
+
|
|
92
|
+
// --- Internal state ---
|
|
93
|
+
#isJsonMode: boolean
|
|
94
|
+
#abortController: AbortController
|
|
95
|
+
#fetchNext: StreamResponseConfig[`fetchNext`]
|
|
96
|
+
#startSSE?: StreamResponseConfig[`startSSE`]
|
|
97
|
+
#closedResolve!: () => void
|
|
98
|
+
#closedReject!: (err: Error) => void
|
|
99
|
+
#closed: Promise<void>
|
|
100
|
+
#stopAfterUpToDate = false
|
|
101
|
+
#consumptionMethod: string | null = null
|
|
102
|
+
|
|
103
|
+
// --- SSE Resilience State ---
|
|
104
|
+
#sseResilience: Required<SSEResilienceOptions>
|
|
105
|
+
#lastSSEConnectionStartTime?: number
|
|
106
|
+
#consecutiveShortSSEConnections = 0
|
|
107
|
+
#sseFallbackToLongPoll = false
|
|
108
|
+
|
|
109
|
+
// Core primitive: a ReadableStream of Response objects
|
|
110
|
+
#responseStream: ReadableStream<Response>
|
|
111
|
+
|
|
112
|
+
constructor(config: StreamResponseConfig) {
|
|
113
|
+
this.url = config.url
|
|
114
|
+
this.contentType = config.contentType
|
|
115
|
+
this.live = config.live
|
|
116
|
+
this.startOffset = config.startOffset
|
|
117
|
+
this.offset = config.initialOffset
|
|
118
|
+
this.cursor = config.initialCursor
|
|
119
|
+
this.upToDate = config.initialUpToDate
|
|
120
|
+
|
|
121
|
+
// Initialize response metadata from first response
|
|
122
|
+
this.#headers = config.firstResponse.headers
|
|
123
|
+
this.#status = config.firstResponse.status
|
|
124
|
+
this.#statusText = config.firstResponse.statusText
|
|
125
|
+
this.#ok = config.firstResponse.ok
|
|
126
|
+
// isLoading is false because stream() already awaited the first response
|
|
127
|
+
// before creating this StreamResponse. By the time user has this object,
|
|
128
|
+
// the initial request has completed.
|
|
129
|
+
this.#isLoading = false
|
|
130
|
+
|
|
131
|
+
this.#isJsonMode = config.isJsonMode
|
|
132
|
+
this.#abortController = config.abortController
|
|
133
|
+
this.#fetchNext = config.fetchNext
|
|
134
|
+
this.#startSSE = config.startSSE
|
|
135
|
+
|
|
136
|
+
// Initialize SSE resilience options with defaults
|
|
137
|
+
this.#sseResilience = {
|
|
138
|
+
minConnectionDuration:
|
|
139
|
+
config.sseResilience?.minConnectionDuration ?? 1000,
|
|
140
|
+
maxShortConnections: config.sseResilience?.maxShortConnections ?? 3,
|
|
141
|
+
backoffBaseDelay: config.sseResilience?.backoffBaseDelay ?? 100,
|
|
142
|
+
backoffMaxDelay: config.sseResilience?.backoffMaxDelay ?? 5000,
|
|
143
|
+
logWarnings: config.sseResilience?.logWarnings ?? true,
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
this.#closed = new Promise((resolve, reject) => {
|
|
147
|
+
this.#closedResolve = resolve
|
|
148
|
+
this.#closedReject = reject
|
|
149
|
+
})
|
|
150
|
+
|
|
151
|
+
// Create the core response stream
|
|
152
|
+
this.#responseStream = this.#createResponseStream(config.firstResponse)
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// --- Response metadata getters ---
|
|
156
|
+
|
|
157
|
+
get headers(): Headers {
|
|
158
|
+
return this.#headers
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
get status(): number {
|
|
162
|
+
return this.#status
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
get statusText(): string {
|
|
166
|
+
return this.#statusText
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
get ok(): boolean {
|
|
170
|
+
return this.#ok
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
get isLoading(): boolean {
|
|
174
|
+
return this.#isLoading
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// =================================
|
|
178
|
+
// Internal helpers
|
|
179
|
+
// =================================
|
|
180
|
+
|
|
181
|
+
#ensureJsonMode(): void {
|
|
182
|
+
if (!this.#isJsonMode) {
|
|
183
|
+
throw new DurableStreamError(
|
|
184
|
+
`JSON methods are only valid for JSON-mode streams. ` +
|
|
185
|
+
`Content-Type is "${this.contentType}" and json hint was not set.`,
|
|
186
|
+
`BAD_REQUEST`
|
|
187
|
+
)
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
#markClosed(): void {
|
|
192
|
+
this.#closedResolve()
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
#markError(err: Error): void {
|
|
196
|
+
this.#closedReject(err)
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Ensure only one consumption method is used per StreamResponse.
|
|
201
|
+
* Throws if any consumption method was already called.
|
|
202
|
+
*/
|
|
203
|
+
#ensureNoConsumption(method: string): void {
|
|
204
|
+
if (this.#consumptionMethod !== null) {
|
|
205
|
+
throw new DurableStreamError(
|
|
206
|
+
`Cannot call ${method}() - this StreamResponse is already being consumed via ${this.#consumptionMethod}()`,
|
|
207
|
+
`ALREADY_CONSUMED`
|
|
208
|
+
)
|
|
209
|
+
}
|
|
210
|
+
this.#consumptionMethod = method
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
/**
|
|
214
|
+
* Determine if we should continue with live updates based on live mode
|
|
215
|
+
* and whether we've received upToDate.
|
|
216
|
+
*/
|
|
217
|
+
#shouldContinueLive(): boolean {
|
|
218
|
+
// Stop if we've received upToDate and a consumption method wants to stop after upToDate
|
|
219
|
+
if (this.#stopAfterUpToDate && this.upToDate) return false
|
|
220
|
+
// Stop if live mode is explicitly disabled
|
|
221
|
+
if (this.live === false) return false
|
|
222
|
+
return true
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
/**
|
|
226
|
+
* Update state from response headers.
|
|
227
|
+
*/
|
|
228
|
+
#updateStateFromResponse(response: Response): void {
|
|
229
|
+
// Update stream-specific state
|
|
230
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER)
|
|
231
|
+
if (offset) this.offset = offset
|
|
232
|
+
const cursor = response.headers.get(STREAM_CURSOR_HEADER)
|
|
233
|
+
if (cursor) this.cursor = cursor
|
|
234
|
+
this.upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER)
|
|
235
|
+
|
|
236
|
+
// Update response metadata to reflect latest server response
|
|
237
|
+
this.#headers = response.headers
|
|
238
|
+
this.#status = response.status
|
|
239
|
+
this.#statusText = response.statusText
|
|
240
|
+
this.#ok = response.ok
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
/**
|
|
244
|
+
* Extract stream metadata from Response headers.
|
|
245
|
+
* Used by subscriber APIs to get the correct offset/cursor/upToDate for each
|
|
246
|
+
* specific Response, rather than reading from `this` which may be stale due to
|
|
247
|
+
* ReadableStream prefetching or timing issues.
|
|
248
|
+
*/
|
|
249
|
+
#getMetadataFromResponse(response: Response): {
|
|
250
|
+
offset: Offset
|
|
251
|
+
cursor: string | undefined
|
|
252
|
+
upToDate: boolean
|
|
253
|
+
} {
|
|
254
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER)
|
|
255
|
+
const cursor = response.headers.get(STREAM_CURSOR_HEADER)
|
|
256
|
+
const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER)
|
|
257
|
+
return {
|
|
258
|
+
offset: offset ?? this.offset, // Fall back to instance state if no header
|
|
259
|
+
cursor: cursor ?? this.cursor,
|
|
260
|
+
upToDate,
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
/**
|
|
265
|
+
* Create a synthetic Response from SSE data with proper headers.
|
|
266
|
+
* Includes offset/cursor/upToDate in headers so subscribers can read them.
|
|
267
|
+
*/
|
|
268
|
+
#createSSESyntheticResponse(
|
|
269
|
+
data: string,
|
|
270
|
+
offset: Offset,
|
|
271
|
+
cursor: string | undefined,
|
|
272
|
+
upToDate: boolean
|
|
273
|
+
): Response {
|
|
274
|
+
const headers: Record<string, string> = {
|
|
275
|
+
"content-type": this.contentType ?? `application/json`,
|
|
276
|
+
[STREAM_OFFSET_HEADER]: String(offset),
|
|
277
|
+
}
|
|
278
|
+
if (cursor) {
|
|
279
|
+
headers[STREAM_CURSOR_HEADER] = cursor
|
|
280
|
+
}
|
|
281
|
+
if (upToDate) {
|
|
282
|
+
headers[STREAM_UP_TO_DATE_HEADER] = `true`
|
|
283
|
+
}
|
|
284
|
+
return new Response(data, { status: 200, headers })
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
/**
|
|
288
|
+
* Update instance state from an SSE control event.
|
|
289
|
+
*/
|
|
290
|
+
#updateStateFromSSEControl(controlEvent: SSEControlEvent): void {
|
|
291
|
+
this.offset = controlEvent.streamNextOffset
|
|
292
|
+
if (controlEvent.streamCursor) {
|
|
293
|
+
this.cursor = controlEvent.streamCursor
|
|
294
|
+
}
|
|
295
|
+
if (controlEvent.upToDate !== undefined) {
|
|
296
|
+
this.upToDate = controlEvent.upToDate
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* Mark the start of an SSE connection for duration tracking.
|
|
302
|
+
*/
|
|
303
|
+
#markSSEConnectionStart(): void {
|
|
304
|
+
this.#lastSSEConnectionStartTime = Date.now()
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
/**
|
|
308
|
+
* Handle SSE connection end - check duration and manage fallback state.
|
|
309
|
+
* Returns a delay to wait before reconnecting, or null if should not reconnect.
|
|
310
|
+
*/
|
|
311
|
+
async #handleSSEConnectionEnd(): Promise<number | null> {
|
|
312
|
+
if (this.#lastSSEConnectionStartTime === undefined) {
|
|
313
|
+
return 0 // No tracking, allow immediate reconnect
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
const connectionDuration = Date.now() - this.#lastSSEConnectionStartTime
|
|
317
|
+
const wasAborted = this.#abortController.signal.aborted
|
|
318
|
+
|
|
319
|
+
if (
|
|
320
|
+
connectionDuration < this.#sseResilience.minConnectionDuration &&
|
|
321
|
+
!wasAborted
|
|
322
|
+
) {
|
|
323
|
+
// Connection was too short - likely proxy buffering or misconfiguration
|
|
324
|
+
this.#consecutiveShortSSEConnections++
|
|
325
|
+
|
|
326
|
+
if (
|
|
327
|
+
this.#consecutiveShortSSEConnections >=
|
|
328
|
+
this.#sseResilience.maxShortConnections
|
|
329
|
+
) {
|
|
330
|
+
// Too many short connections - fall back to long polling
|
|
331
|
+
this.#sseFallbackToLongPoll = true
|
|
332
|
+
|
|
333
|
+
if (this.#sseResilience.logWarnings) {
|
|
334
|
+
console.warn(
|
|
335
|
+
`[Durable Streams] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). ` +
|
|
336
|
+
`Falling back to long polling. ` +
|
|
337
|
+
`Your proxy must support streaming SSE responses (not buffer the complete response). ` +
|
|
338
|
+
`Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy.`
|
|
339
|
+
)
|
|
340
|
+
}
|
|
341
|
+
return null // Signal to not reconnect SSE
|
|
342
|
+
} else {
|
|
343
|
+
// Add exponential backoff with full jitter to prevent tight infinite loop
|
|
344
|
+
// Formula: random(0, min(cap, base * 2^attempt))
|
|
345
|
+
const maxDelay = Math.min(
|
|
346
|
+
this.#sseResilience.backoffMaxDelay,
|
|
347
|
+
this.#sseResilience.backoffBaseDelay *
|
|
348
|
+
Math.pow(2, this.#consecutiveShortSSEConnections)
|
|
349
|
+
)
|
|
350
|
+
const delayMs = Math.floor(Math.random() * maxDelay)
|
|
351
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs))
|
|
352
|
+
return delayMs
|
|
353
|
+
}
|
|
354
|
+
} else if (
|
|
355
|
+
connectionDuration >= this.#sseResilience.minConnectionDuration
|
|
356
|
+
) {
|
|
357
|
+
// Connection was healthy - reset counter
|
|
358
|
+
this.#consecutiveShortSSEConnections = 0
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
return 0 // Allow immediate reconnect
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
/**
|
|
365
|
+
* Try to reconnect SSE and return the new iterator, or null if reconnection
|
|
366
|
+
* is not possible or fails.
|
|
367
|
+
*/
|
|
368
|
+
async #trySSEReconnect(): Promise<AsyncGenerator<
|
|
369
|
+
SSEEvent,
|
|
370
|
+
void,
|
|
371
|
+
undefined
|
|
372
|
+
> | null> {
|
|
373
|
+
// Check if we should fall back to long-poll due to repeated short connections
|
|
374
|
+
if (this.#sseFallbackToLongPoll) {
|
|
375
|
+
return null // Will cause fallback to long-poll
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
if (!this.#shouldContinueLive() || !this.#startSSE) {
|
|
379
|
+
return null
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
// Handle short connection detection and backoff
|
|
383
|
+
const delayOrNull = await this.#handleSSEConnectionEnd()
|
|
384
|
+
if (delayOrNull === null) {
|
|
385
|
+
return null // Fallback to long-poll was triggered
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
// Track new connection start
|
|
389
|
+
this.#markSSEConnectionStart()
|
|
390
|
+
|
|
391
|
+
const newSSEResponse = await this.#startSSE(
|
|
392
|
+
this.offset,
|
|
393
|
+
this.cursor,
|
|
394
|
+
this.#abortController.signal
|
|
395
|
+
)
|
|
396
|
+
if (newSSEResponse.body) {
|
|
397
|
+
return parseSSEStream(newSSEResponse.body, this.#abortController.signal)
|
|
398
|
+
}
|
|
399
|
+
return null
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
/**
|
|
403
|
+
* Process SSE events from the iterator.
|
|
404
|
+
* Returns an object indicating the result:
|
|
405
|
+
* - { type: 'response', response, newIterator? } - yield this response
|
|
406
|
+
* - { type: 'closed' } - stream should be closed
|
|
407
|
+
* - { type: 'error', error } - an error occurred
|
|
408
|
+
* - { type: 'continue', newIterator? } - continue processing (control-only event)
|
|
409
|
+
*/
|
|
410
|
+
async #processSSEEvents(
|
|
411
|
+
sseEventIterator: AsyncGenerator<SSEEvent, void, undefined>
|
|
412
|
+
): Promise<
|
|
413
|
+
| {
|
|
414
|
+
type: `response`
|
|
415
|
+
response: Response
|
|
416
|
+
newIterator?: AsyncGenerator<SSEEvent, void, undefined>
|
|
417
|
+
}
|
|
418
|
+
| { type: `closed` }
|
|
419
|
+
| { type: `error`; error: Error }
|
|
420
|
+
| {
|
|
421
|
+
type: `continue`
|
|
422
|
+
newIterator?: AsyncGenerator<SSEEvent, void, undefined>
|
|
423
|
+
}
|
|
424
|
+
> {
|
|
425
|
+
const { done, value: event } = await sseEventIterator.next()
|
|
426
|
+
|
|
427
|
+
if (done) {
|
|
428
|
+
// SSE stream ended - try to reconnect
|
|
429
|
+
try {
|
|
430
|
+
const newIterator = await this.#trySSEReconnect()
|
|
431
|
+
if (newIterator) {
|
|
432
|
+
return { type: `continue`, newIterator }
|
|
433
|
+
}
|
|
434
|
+
} catch (err) {
|
|
435
|
+
return {
|
|
436
|
+
type: `error`,
|
|
437
|
+
error:
|
|
438
|
+
err instanceof Error ? err : new Error(`SSE reconnection failed`),
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
return { type: `closed` }
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
if (event.type === `data`) {
|
|
445
|
+
// Wait for the subsequent control event to get correct offset/cursor/upToDate
|
|
446
|
+
return this.#processSSEDataEvent(event.data, sseEventIterator)
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
// Control event without preceding data - update state and continue
|
|
450
|
+
this.#updateStateFromSSEControl(event)
|
|
451
|
+
return { type: `continue` }
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
/**
|
|
455
|
+
* Process an SSE data event by waiting for its corresponding control event.
|
|
456
|
+
* In SSE protocol, control events come AFTER data events.
|
|
457
|
+
* Multiple data events may arrive before a single control event - we buffer them.
|
|
458
|
+
*/
|
|
459
|
+
async #processSSEDataEvent(
|
|
460
|
+
pendingData: string,
|
|
461
|
+
sseEventIterator: AsyncGenerator<SSEEvent, void, undefined>
|
|
462
|
+
): Promise<
|
|
463
|
+
| {
|
|
464
|
+
type: `response`
|
|
465
|
+
response: Response
|
|
466
|
+
newIterator?: AsyncGenerator<SSEEvent, void, undefined>
|
|
467
|
+
}
|
|
468
|
+
| { type: `error`; error: Error }
|
|
469
|
+
> {
|
|
470
|
+
// Buffer to accumulate data from multiple consecutive data events
|
|
471
|
+
let bufferedData = pendingData
|
|
472
|
+
|
|
473
|
+
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
|
474
|
+
while (true) {
|
|
475
|
+
const { done: controlDone, value: controlEvent } =
|
|
476
|
+
await sseEventIterator.next()
|
|
477
|
+
|
|
478
|
+
if (controlDone) {
|
|
479
|
+
// Stream ended without control event - yield buffered data with current state
|
|
480
|
+
const response = this.#createSSESyntheticResponse(
|
|
481
|
+
bufferedData,
|
|
482
|
+
this.offset,
|
|
483
|
+
this.cursor,
|
|
484
|
+
this.upToDate
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
// Try to reconnect
|
|
488
|
+
try {
|
|
489
|
+
const newIterator = await this.#trySSEReconnect()
|
|
490
|
+
return {
|
|
491
|
+
type: `response`,
|
|
492
|
+
response,
|
|
493
|
+
newIterator: newIterator ?? undefined,
|
|
494
|
+
}
|
|
495
|
+
} catch (err) {
|
|
496
|
+
return {
|
|
497
|
+
type: `error`,
|
|
498
|
+
error:
|
|
499
|
+
err instanceof Error ? err : new Error(`SSE reconnection failed`),
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
if (controlEvent.type === `control`) {
|
|
505
|
+
// Update state and create response with correct metadata
|
|
506
|
+
this.#updateStateFromSSEControl(controlEvent)
|
|
507
|
+
const response = this.#createSSESyntheticResponse(
|
|
508
|
+
bufferedData,
|
|
509
|
+
controlEvent.streamNextOffset,
|
|
510
|
+
controlEvent.streamCursor,
|
|
511
|
+
controlEvent.upToDate ?? false
|
|
512
|
+
)
|
|
513
|
+
return { type: `response`, response }
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
// Got another data event before control - buffer it
|
|
517
|
+
// Server sends multiple data events followed by one control event
|
|
518
|
+
bufferedData += controlEvent.data
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
/**
|
|
523
|
+
* Create the core ReadableStream<Response> that yields responses.
|
|
524
|
+
* This is consumed once - all consumption methods use this same stream.
|
|
525
|
+
*
|
|
526
|
+
* For long-poll mode: yields actual Response objects.
|
|
527
|
+
* For SSE mode: yields synthetic Response objects created from SSE data events.
|
|
528
|
+
*/
|
|
529
|
+
#createResponseStream(firstResponse: Response): ReadableStream<Response> {
|
|
530
|
+
let firstResponseYielded = false
|
|
531
|
+
let sseEventIterator: AsyncGenerator<SSEEvent, void, undefined> | null =
|
|
532
|
+
null
|
|
533
|
+
|
|
534
|
+
return new ReadableStream<Response>({
|
|
535
|
+
pull: async (controller) => {
|
|
536
|
+
try {
|
|
537
|
+
// First, yield the held first response (for non-SSE modes)
|
|
538
|
+
// For SSE mode, the first response IS the SSE stream, so we start parsing it
|
|
539
|
+
if (!firstResponseYielded) {
|
|
540
|
+
firstResponseYielded = true
|
|
541
|
+
|
|
542
|
+
// Check if this is an SSE response
|
|
543
|
+
const isSSE =
|
|
544
|
+
firstResponse.headers
|
|
545
|
+
.get(`content-type`)
|
|
546
|
+
?.includes(`text/event-stream`) ?? false
|
|
547
|
+
|
|
548
|
+
if (isSSE && firstResponse.body) {
|
|
549
|
+
// Track SSE connection start for resilience monitoring
|
|
550
|
+
this.#markSSEConnectionStart()
|
|
551
|
+
// Start parsing SSE events
|
|
552
|
+
sseEventIterator = parseSSEStream(
|
|
553
|
+
firstResponse.body,
|
|
554
|
+
this.#abortController.signal
|
|
555
|
+
)
|
|
556
|
+
// Fall through to SSE processing below
|
|
557
|
+
} else {
|
|
558
|
+
// Regular response - enqueue it
|
|
559
|
+
controller.enqueue(firstResponse)
|
|
560
|
+
|
|
561
|
+
// If upToDate and not continuing live, we're done
|
|
562
|
+
if (this.upToDate && !this.#shouldContinueLive()) {
|
|
563
|
+
this.#markClosed()
|
|
564
|
+
controller.close()
|
|
565
|
+
return
|
|
566
|
+
}
|
|
567
|
+
return
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
// SSE mode: process events from the SSE stream
|
|
572
|
+
if (sseEventIterator) {
|
|
573
|
+
// Keep reading events until we get data or stream ends
|
|
574
|
+
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
|
575
|
+
while (true) {
|
|
576
|
+
const result = await this.#processSSEEvents(sseEventIterator)
|
|
577
|
+
|
|
578
|
+
switch (result.type) {
|
|
579
|
+
case `response`:
|
|
580
|
+
if (result.newIterator) {
|
|
581
|
+
sseEventIterator = result.newIterator
|
|
582
|
+
}
|
|
583
|
+
controller.enqueue(result.response)
|
|
584
|
+
return
|
|
585
|
+
|
|
586
|
+
case `closed`:
|
|
587
|
+
this.#markClosed()
|
|
588
|
+
controller.close()
|
|
589
|
+
return
|
|
590
|
+
|
|
591
|
+
case `error`:
|
|
592
|
+
this.#markError(result.error)
|
|
593
|
+
controller.error(result.error)
|
|
594
|
+
return
|
|
595
|
+
|
|
596
|
+
case `continue`:
|
|
597
|
+
if (result.newIterator) {
|
|
598
|
+
sseEventIterator = result.newIterator
|
|
599
|
+
}
|
|
600
|
+
continue
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
// Long-poll mode: continue with live updates if needed
|
|
606
|
+
if (this.#shouldContinueLive()) {
|
|
607
|
+
if (this.#abortController.signal.aborted) {
|
|
608
|
+
this.#markClosed()
|
|
609
|
+
controller.close()
|
|
610
|
+
return
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
const response = await this.#fetchNext(
|
|
614
|
+
this.offset,
|
|
615
|
+
this.cursor,
|
|
616
|
+
this.#abortController.signal
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
this.#updateStateFromResponse(response)
|
|
620
|
+
controller.enqueue(response)
|
|
621
|
+
// Let the next pull() decide whether to close based on upToDate
|
|
622
|
+
return
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
// No more data
|
|
626
|
+
this.#markClosed()
|
|
627
|
+
controller.close()
|
|
628
|
+
} catch (err) {
|
|
629
|
+
if (this.#abortController.signal.aborted) {
|
|
630
|
+
this.#markClosed()
|
|
631
|
+
controller.close()
|
|
632
|
+
} else {
|
|
633
|
+
this.#markError(err instanceof Error ? err : new Error(String(err)))
|
|
634
|
+
controller.error(err)
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
},
|
|
638
|
+
|
|
639
|
+
cancel: () => {
|
|
640
|
+
this.#abortController.abort()
|
|
641
|
+
this.#markClosed()
|
|
642
|
+
},
|
|
643
|
+
})
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
/**
|
|
647
|
+
* Get the response stream reader. Can only be called once.
|
|
648
|
+
*/
|
|
649
|
+
#getResponseReader(): ReadableStreamDefaultReader<Response> {
|
|
650
|
+
return this.#responseStream.getReader()
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
// =================================
|
|
654
|
+
// 1) Accumulating helpers (Promise)
|
|
655
|
+
// =================================
|
|
656
|
+
|
|
657
|
+
async body(): Promise<Uint8Array> {
|
|
658
|
+
this.#ensureNoConsumption(`body`)
|
|
659
|
+
this.#stopAfterUpToDate = true
|
|
660
|
+
const reader = this.#getResponseReader()
|
|
661
|
+
const blobs: Array<Blob> = []
|
|
662
|
+
|
|
663
|
+
try {
|
|
664
|
+
let result = await reader.read()
|
|
665
|
+
while (!result.done) {
|
|
666
|
+
// Capture upToDate BEFORE consuming body (to avoid race with prefetch)
|
|
667
|
+
const wasUpToDate = this.upToDate
|
|
668
|
+
const blob = await result.value.blob()
|
|
669
|
+
if (blob.size > 0) {
|
|
670
|
+
blobs.push(blob)
|
|
671
|
+
}
|
|
672
|
+
if (wasUpToDate) break
|
|
673
|
+
result = await reader.read()
|
|
674
|
+
}
|
|
675
|
+
} finally {
|
|
676
|
+
reader.releaseLock()
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
this.#markClosed()
|
|
680
|
+
|
|
681
|
+
if (blobs.length === 0) {
|
|
682
|
+
return new Uint8Array(0)
|
|
683
|
+
}
|
|
684
|
+
if (blobs.length === 1) {
|
|
685
|
+
return new Uint8Array(await blobs[0]!.arrayBuffer())
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
const combined = new Blob(blobs)
|
|
689
|
+
return new Uint8Array(await combined.arrayBuffer())
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
async json<T = TJson>(): Promise<Array<T>> {
|
|
693
|
+
this.#ensureNoConsumption(`json`)
|
|
694
|
+
this.#ensureJsonMode()
|
|
695
|
+
this.#stopAfterUpToDate = true
|
|
696
|
+
const reader = this.#getResponseReader()
|
|
697
|
+
const items: Array<T> = []
|
|
698
|
+
|
|
699
|
+
try {
|
|
700
|
+
let result = await reader.read()
|
|
701
|
+
while (!result.done) {
|
|
702
|
+
// Capture upToDate BEFORE parsing (to avoid race with prefetch)
|
|
703
|
+
const wasUpToDate = this.upToDate
|
|
704
|
+
// Get response text first (handles empty responses gracefully)
|
|
705
|
+
const text = await result.value.text()
|
|
706
|
+
const content = text.trim() || `[]` // Default to empty array if no content or whitespace
|
|
707
|
+
const parsed = JSON.parse(content) as T | Array<T>
|
|
708
|
+
if (Array.isArray(parsed)) {
|
|
709
|
+
items.push(...parsed)
|
|
710
|
+
} else {
|
|
711
|
+
items.push(parsed)
|
|
712
|
+
}
|
|
713
|
+
// Check if THIS response had upToDate set when we started reading it
|
|
714
|
+
if (wasUpToDate) break
|
|
715
|
+
result = await reader.read()
|
|
716
|
+
}
|
|
717
|
+
} finally {
|
|
718
|
+
reader.releaseLock()
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
this.#markClosed()
|
|
722
|
+
return items
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
async text(): Promise<string> {
|
|
726
|
+
this.#ensureNoConsumption(`text`)
|
|
727
|
+
this.#stopAfterUpToDate = true
|
|
728
|
+
const reader = this.#getResponseReader()
|
|
729
|
+
const parts: Array<string> = []
|
|
730
|
+
|
|
731
|
+
try {
|
|
732
|
+
let result = await reader.read()
|
|
733
|
+
while (!result.done) {
|
|
734
|
+
// Capture upToDate BEFORE consuming text (to avoid race with prefetch)
|
|
735
|
+
const wasUpToDate = this.upToDate
|
|
736
|
+
const text = await result.value.text()
|
|
737
|
+
if (text) {
|
|
738
|
+
parts.push(text)
|
|
739
|
+
}
|
|
740
|
+
if (wasUpToDate) break
|
|
741
|
+
result = await reader.read()
|
|
742
|
+
}
|
|
743
|
+
} finally {
|
|
744
|
+
reader.releaseLock()
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
this.#markClosed()
|
|
748
|
+
return parts.join(``)
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
// =====================
|
|
752
|
+
// 2) ReadableStreams
|
|
753
|
+
// =====================
|
|
754
|
+
|
|
755
|
+
/**
|
|
756
|
+
* Internal helper to create the body stream without consumption check.
|
|
757
|
+
* Used by both bodyStream() and textStream().
|
|
758
|
+
*/
|
|
759
|
+
#createBodyStreamInternal(): ReadableStream<Uint8Array> {
|
|
760
|
+
const { readable, writable } = new TransformStream<Uint8Array, Uint8Array>()
|
|
761
|
+
const reader = this.#getResponseReader()
|
|
762
|
+
|
|
763
|
+
const pipeBodyStream = async (): Promise<void> => {
|
|
764
|
+
try {
|
|
765
|
+
let result = await reader.read()
|
|
766
|
+
while (!result.done) {
|
|
767
|
+
// Capture upToDate BEFORE consuming body (to avoid race with prefetch)
|
|
768
|
+
const wasUpToDate = this.upToDate
|
|
769
|
+
const body = result.value.body
|
|
770
|
+
if (body) {
|
|
771
|
+
await body.pipeTo(writable, {
|
|
772
|
+
preventClose: true,
|
|
773
|
+
preventAbort: true,
|
|
774
|
+
preventCancel: true,
|
|
775
|
+
})
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
if (wasUpToDate && !this.#shouldContinueLive()) {
|
|
779
|
+
break
|
|
780
|
+
}
|
|
781
|
+
result = await reader.read()
|
|
782
|
+
}
|
|
783
|
+
await writable.close()
|
|
784
|
+
this.#markClosed()
|
|
785
|
+
} catch (err) {
|
|
786
|
+
if (this.#abortController.signal.aborted) {
|
|
787
|
+
try {
|
|
788
|
+
await writable.close()
|
|
789
|
+
} catch {
|
|
790
|
+
// Ignore close errors on abort
|
|
791
|
+
}
|
|
792
|
+
this.#markClosed()
|
|
793
|
+
} else {
|
|
794
|
+
try {
|
|
795
|
+
await writable.abort(err)
|
|
796
|
+
} catch {
|
|
797
|
+
// Ignore abort errors
|
|
798
|
+
}
|
|
799
|
+
this.#markError(err instanceof Error ? err : new Error(String(err)))
|
|
800
|
+
}
|
|
801
|
+
} finally {
|
|
802
|
+
reader.releaseLock()
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
|
|
806
|
+
pipeBodyStream()
|
|
807
|
+
|
|
808
|
+
return readable
|
|
809
|
+
}
|
|
810
|
+
|
|
811
|
+
bodyStream(): ReadableStreamAsyncIterable<Uint8Array> {
|
|
812
|
+
this.#ensureNoConsumption(`bodyStream`)
|
|
813
|
+
return asAsyncIterableReadableStream(this.#createBodyStreamInternal())
|
|
814
|
+
}
|
|
815
|
+
|
|
816
|
+
jsonStream(): ReadableStreamAsyncIterable<TJson> {
|
|
817
|
+
this.#ensureNoConsumption(`jsonStream`)
|
|
818
|
+
this.#ensureJsonMode()
|
|
819
|
+
const reader = this.#getResponseReader()
|
|
820
|
+
let pendingItems: Array<TJson> = []
|
|
821
|
+
|
|
822
|
+
const stream = new ReadableStream<TJson>({
|
|
823
|
+
pull: async (controller) => {
|
|
824
|
+
// Drain pending items first
|
|
825
|
+
if (pendingItems.length > 0) {
|
|
826
|
+
controller.enqueue(pendingItems.shift())
|
|
827
|
+
return
|
|
828
|
+
}
|
|
829
|
+
|
|
830
|
+
// Get next response
|
|
831
|
+
const { done, value: response } = await reader.read()
|
|
832
|
+
if (done) {
|
|
833
|
+
this.#markClosed()
|
|
834
|
+
controller.close()
|
|
835
|
+
return
|
|
836
|
+
}
|
|
837
|
+
|
|
838
|
+
// Parse JSON and flatten arrays (handle empty responses gracefully)
|
|
839
|
+
const text = await response.text()
|
|
840
|
+
const content = text.trim() || `[]` // Default to empty array if no content or whitespace
|
|
841
|
+
const parsed = JSON.parse(content) as TJson | Array<TJson>
|
|
842
|
+
pendingItems = Array.isArray(parsed) ? parsed : [parsed]
|
|
843
|
+
|
|
844
|
+
// Enqueue first item
|
|
845
|
+
if (pendingItems.length > 0) {
|
|
846
|
+
controller.enqueue(pendingItems.shift())
|
|
847
|
+
}
|
|
848
|
+
},
|
|
849
|
+
|
|
850
|
+
cancel: () => {
|
|
851
|
+
reader.releaseLock()
|
|
852
|
+
this.cancel()
|
|
853
|
+
},
|
|
854
|
+
})
|
|
855
|
+
|
|
856
|
+
return asAsyncIterableReadableStream(stream)
|
|
857
|
+
}
|
|
858
|
+
|
|
859
|
+
textStream(): ReadableStreamAsyncIterable<string> {
|
|
860
|
+
this.#ensureNoConsumption(`textStream`)
|
|
861
|
+
const decoder = new TextDecoder()
|
|
862
|
+
|
|
863
|
+
const stream = this.#createBodyStreamInternal().pipeThrough(
|
|
864
|
+
new TransformStream<Uint8Array, string>({
|
|
865
|
+
transform(chunk, controller) {
|
|
866
|
+
controller.enqueue(decoder.decode(chunk, { stream: true }))
|
|
867
|
+
},
|
|
868
|
+
flush(controller) {
|
|
869
|
+
const remaining = decoder.decode()
|
|
870
|
+
if (remaining) {
|
|
871
|
+
controller.enqueue(remaining)
|
|
872
|
+
}
|
|
873
|
+
},
|
|
874
|
+
})
|
|
875
|
+
)
|
|
876
|
+
|
|
877
|
+
return asAsyncIterableReadableStream(stream)
|
|
878
|
+
}
|
|
879
|
+
|
|
880
|
+
// =====================
|
|
881
|
+
// 3) Subscriber APIs
|
|
882
|
+
// =====================
|
|
883
|
+
|
|
884
|
+
subscribeJson<T = TJson>(
|
|
885
|
+
subscriber: (batch: JsonBatch<T>) => Promise<void>
|
|
886
|
+
): () => void {
|
|
887
|
+
this.#ensureNoConsumption(`subscribeJson`)
|
|
888
|
+
this.#ensureJsonMode()
|
|
889
|
+
const abortController = new AbortController()
|
|
890
|
+
const reader = this.#getResponseReader()
|
|
891
|
+
|
|
892
|
+
const consumeJsonSubscription = async (): Promise<void> => {
|
|
893
|
+
try {
|
|
894
|
+
let result = await reader.read()
|
|
895
|
+
while (!result.done) {
|
|
896
|
+
if (abortController.signal.aborted) break
|
|
897
|
+
|
|
898
|
+
// Get metadata from Response headers (not from `this` which may be stale)
|
|
899
|
+
const response = result.value
|
|
900
|
+
const { offset, cursor, upToDate } =
|
|
901
|
+
this.#getMetadataFromResponse(response)
|
|
902
|
+
|
|
903
|
+
// Get response text first (handles empty responses gracefully)
|
|
904
|
+
const text = await response.text()
|
|
905
|
+
const content = text.trim() || `[]` // Default to empty array if no content or whitespace
|
|
906
|
+
const parsed = JSON.parse(content) as T | Array<T>
|
|
907
|
+
const items = Array.isArray(parsed) ? parsed : [parsed]
|
|
908
|
+
|
|
909
|
+
await subscriber({
|
|
910
|
+
items,
|
|
911
|
+
offset,
|
|
912
|
+
cursor,
|
|
913
|
+
upToDate,
|
|
914
|
+
})
|
|
915
|
+
|
|
916
|
+
result = await reader.read()
|
|
917
|
+
}
|
|
918
|
+
this.#markClosed()
|
|
919
|
+
} catch (e) {
|
|
920
|
+
// Ignore abort-related and body-consumed errors
|
|
921
|
+
const isAborted = abortController.signal.aborted
|
|
922
|
+
const isBodyError = e instanceof TypeError && String(e).includes(`Body`)
|
|
923
|
+
if (!isAborted && !isBodyError) {
|
|
924
|
+
this.#markError(e instanceof Error ? e : new Error(String(e)))
|
|
925
|
+
} else {
|
|
926
|
+
this.#markClosed()
|
|
927
|
+
}
|
|
928
|
+
} finally {
|
|
929
|
+
reader.releaseLock()
|
|
930
|
+
}
|
|
931
|
+
}
|
|
932
|
+
|
|
933
|
+
consumeJsonSubscription()
|
|
934
|
+
|
|
935
|
+
return () => {
|
|
936
|
+
abortController.abort()
|
|
937
|
+
this.cancel()
|
|
938
|
+
}
|
|
939
|
+
}
|
|
940
|
+
|
|
941
|
+
subscribeBytes(subscriber: (chunk: ByteChunk) => Promise<void>): () => void {
|
|
942
|
+
this.#ensureNoConsumption(`subscribeBytes`)
|
|
943
|
+
const abortController = new AbortController()
|
|
944
|
+
const reader = this.#getResponseReader()
|
|
945
|
+
|
|
946
|
+
const consumeBytesSubscription = async (): Promise<void> => {
|
|
947
|
+
try {
|
|
948
|
+
let result = await reader.read()
|
|
949
|
+
while (!result.done) {
|
|
950
|
+
if (abortController.signal.aborted) break
|
|
951
|
+
|
|
952
|
+
// Get metadata from Response headers (not from `this` which may be stale)
|
|
953
|
+
const response = result.value
|
|
954
|
+
const { offset, cursor, upToDate } =
|
|
955
|
+
this.#getMetadataFromResponse(response)
|
|
956
|
+
|
|
957
|
+
const buffer = await response.arrayBuffer()
|
|
958
|
+
|
|
959
|
+
await subscriber({
|
|
960
|
+
data: new Uint8Array(buffer),
|
|
961
|
+
offset,
|
|
962
|
+
cursor,
|
|
963
|
+
upToDate,
|
|
964
|
+
})
|
|
965
|
+
|
|
966
|
+
result = await reader.read()
|
|
967
|
+
}
|
|
968
|
+
this.#markClosed()
|
|
969
|
+
} catch (e) {
|
|
970
|
+
// Ignore abort-related and body-consumed errors
|
|
971
|
+
const isAborted = abortController.signal.aborted
|
|
972
|
+
const isBodyError = e instanceof TypeError && String(e).includes(`Body`)
|
|
973
|
+
if (!isAborted && !isBodyError) {
|
|
974
|
+
this.#markError(e instanceof Error ? e : new Error(String(e)))
|
|
975
|
+
} else {
|
|
976
|
+
this.#markClosed()
|
|
977
|
+
}
|
|
978
|
+
} finally {
|
|
979
|
+
reader.releaseLock()
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
consumeBytesSubscription()
|
|
984
|
+
|
|
985
|
+
return () => {
|
|
986
|
+
abortController.abort()
|
|
987
|
+
this.cancel()
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
|
|
991
|
+
subscribeText(subscriber: (chunk: TextChunk) => Promise<void>): () => void {
|
|
992
|
+
this.#ensureNoConsumption(`subscribeText`)
|
|
993
|
+
const abortController = new AbortController()
|
|
994
|
+
const reader = this.#getResponseReader()
|
|
995
|
+
|
|
996
|
+
const consumeTextSubscription = async (): Promise<void> => {
|
|
997
|
+
try {
|
|
998
|
+
let result = await reader.read()
|
|
999
|
+
while (!result.done) {
|
|
1000
|
+
if (abortController.signal.aborted) break
|
|
1001
|
+
|
|
1002
|
+
// Get metadata from Response headers (not from `this` which may be stale)
|
|
1003
|
+
const response = result.value
|
|
1004
|
+
const { offset, cursor, upToDate } =
|
|
1005
|
+
this.#getMetadataFromResponse(response)
|
|
1006
|
+
|
|
1007
|
+
const text = await response.text()
|
|
1008
|
+
|
|
1009
|
+
await subscriber({
|
|
1010
|
+
text,
|
|
1011
|
+
offset,
|
|
1012
|
+
cursor,
|
|
1013
|
+
upToDate,
|
|
1014
|
+
})
|
|
1015
|
+
|
|
1016
|
+
result = await reader.read()
|
|
1017
|
+
}
|
|
1018
|
+
this.#markClosed()
|
|
1019
|
+
} catch (e) {
|
|
1020
|
+
// Ignore abort-related and body-consumed errors
|
|
1021
|
+
const isAborted = abortController.signal.aborted
|
|
1022
|
+
const isBodyError = e instanceof TypeError && String(e).includes(`Body`)
|
|
1023
|
+
if (!isAborted && !isBodyError) {
|
|
1024
|
+
this.#markError(e instanceof Error ? e : new Error(String(e)))
|
|
1025
|
+
} else {
|
|
1026
|
+
this.#markClosed()
|
|
1027
|
+
}
|
|
1028
|
+
} finally {
|
|
1029
|
+
reader.releaseLock()
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
|
|
1033
|
+
consumeTextSubscription()
|
|
1034
|
+
|
|
1035
|
+
return () => {
|
|
1036
|
+
abortController.abort()
|
|
1037
|
+
this.cancel()
|
|
1038
|
+
}
|
|
1039
|
+
}
|
|
1040
|
+
|
|
1041
|
+
// =====================
|
|
1042
|
+
// 4) Lifecycle
|
|
1043
|
+
// =====================
|
|
1044
|
+
|
|
1045
|
+
cancel(reason?: unknown): void {
|
|
1046
|
+
this.#abortController.abort(reason)
|
|
1047
|
+
this.#markClosed()
|
|
1048
|
+
}
|
|
1049
|
+
|
|
1050
|
+
get closed(): Promise<void> {
|
|
1051
|
+
return this.#closed
|
|
1052
|
+
}
|
|
1053
|
+
}
|